Skip to content

Commit

Permalink
fix: databricks test config
Browse files Browse the repository at this point in the history
  • Loading branch information
donotpush committed Jan 15, 2025
1 parent 9d560d9 commit 902c49d
Showing 1 changed file with 22 additions and 5 deletions.
27 changes: 22 additions & 5 deletions tests/load/pipeline/test_databricks_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,14 +159,22 @@ def test_databricks_gcs_external_location(destination_config: DestinationTestCon
)
def test_databricks_auth_oauth(destination_config: DestinationTestConfiguration) -> None:
os.environ["DESTINATION__DATABRICKS__CREDENTIALS__ACCESS_TOKEN"] = ""
bricks = databricks()

from dlt.destinations import databricks, filesystem
from dlt.destinations.impl.databricks.databricks import DatabricksLoadJob

abfss_bucket_url = DatabricksLoadJob.ensure_databricks_abfss_url(AZ_BUCKET, "dltdata")
stage = filesystem(abfss_bucket_url)

bricks = databricks(is_staging_external_location=False)
config = bricks.configuration(None, accept_partial=True)

assert config.credentials.client_id and config.credentials.client_secret
assert not config.credentials.access_token

dataset_name = "test_databricks_oauth" + uniq_id()
pipeline = destination_config.setup_pipeline(
"test_databricks_oauth", dataset_name=dataset_name, destination=bricks
"test_databricks_oauth", dataset_name=dataset_name, destination=bricks, staging=stage
)

info = pipeline.run([1, 2, 3], table_name="digits", **destination_config.run_kwargs)
Expand All @@ -179,20 +187,29 @@ def test_databricks_auth_oauth(destination_config: DestinationTestConfiguration)

@pytest.mark.parametrize(
"destination_config",
destinations_configs(default_sql_configs=True, subset=("databricks",)),
destinations_configs(
default_sql_configs=True, bucket_subset=(AZ_BUCKET,), subset=("databricks",)
),
ids=lambda x: x.name,
)
def test_databricks_auth_token(destination_config: DestinationTestConfiguration) -> None:
os.environ["DESTINATION__DATABRICKS__CREDENTIALS__CLIENT_ID"] = ""
os.environ["DESTINATION__DATABRICKS__CREDENTIALS__CLIENT_SECRET"] = ""
bricks = databricks()

from dlt.destinations import databricks, filesystem
from dlt.destinations.impl.databricks.databricks import DatabricksLoadJob

abfss_bucket_url = DatabricksLoadJob.ensure_databricks_abfss_url(AZ_BUCKET, "dltdata")
stage = filesystem(abfss_bucket_url)

bricks = databricks(is_staging_external_location=False)
config = bricks.configuration(None, accept_partial=True)
assert config.credentials.access_token
assert not (config.credentials.client_secret and config.credentials.client_id)

dataset_name = "test_databricks_token" + uniq_id()
pipeline = destination_config.setup_pipeline(
"test_databricks_token", dataset_name=dataset_name, destination=bricks
"test_databricks_token", dataset_name=dataset_name, destination=bricks, staging=stage
)

info = pipeline.run([1, 2, 3], table_name="digits", **destination_config.run_kwargs)
Expand Down

0 comments on commit 902c49d

Please sign in to comment.