Skip to content

Commit

Permalink
Update Delta Tests for Spark 3.5.0 (rtdip#549)
Browse files Browse the repository at this point in the history
* Update Delta Tests for Spark 3.5.0

Signed-off-by: GBBBAS <[email protected]>

* Fix black formatting

Signed-off-by: GBBBAS <[email protected]>

---------

Signed-off-by: GBBBAS <[email protected]>
  • Loading branch information
GBBBAS authored Oct 18, 2023
1 parent bb2cd54 commit 10e18af
Show file tree
Hide file tree
Showing 6 changed files with 81 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,12 @@
import sys

sys.path.insert(0, ".")
from semver.version import Version
import pytest
from src.sdk.python.rtdip_sdk._sdk_utils.compare_versions import _get_package_version
from src.sdk.python.rtdip_sdk._sdk_utils.compare_versions import (
_get_package_version,
_get_python_package_version,
)
from src.sdk.python.rtdip_sdk.pipelines.destinations.spark.delta import (
SparkDeltaDestination,
)
Expand All @@ -38,11 +42,19 @@ def test_spark_delta_write_setup():
None, {}, "test_delta_destination_setup", "overwrite"
)
assert delta_destination.system_type().value == 2
delta_spark_artifact_id = "delta-core_2.12"
if (
Version.compare(
_get_python_package_version("delta-spark"), Version.parse("3.0.0")
)
>= 0
):
delta_spark_artifact_id = "delta-spark_2.12"
assert delta_destination.libraries() == Libraries(
maven_libraries=[
MavenLibrary(
group_id="io.delta",
artifact_id="delta-core_2.12",
artifact_id=delta_spark_artifact_id,
version=_get_package_version("delta-spark"),
)
],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,11 @@
import sys

sys.path.insert(0, ".")
from semver.version import Version
import pytest
from importlib_metadata import version
from src.sdk.python.rtdip_sdk._sdk_utils.compare_versions import (
_get_python_package_version,
_package_version_meets_minimum,
_get_package_version,
)
Expand Down Expand Up @@ -51,11 +53,19 @@ def test_spark_delta_merge_write_setup(spark_session: SparkSession):
spark_session, None, "test_delta_merge_destination_setup", {}, "1=2"
)
assert delta_merge_destination.system_type().value == 2
delta_spark_artifact_id = "delta-core_2.12"
if (
Version.compare(
_get_python_package_version("delta-spark"), Version.parse("3.0.0")
)
>= 0
):
delta_spark_artifact_id = "delta-spark_2.12"
assert delta_merge_destination.libraries() == Libraries(
maven_libraries=[
MavenLibrary(
group_id="io.delta",
artifact_id="delta-core_2.12",
artifact_id=delta_spark_artifact_id,
version=_get_package_version("delta-spark"),
)
],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,12 @@
import sys

sys.path.insert(0, ".")
from semver.version import Version
import pytest
from src.sdk.python.rtdip_sdk._sdk_utils.compare_versions import _get_package_version
from src.sdk.python.rtdip_sdk._sdk_utils.compare_versions import (
_get_package_version,
_get_python_package_version,
)
from src.sdk.python.rtdip_sdk.pipelines.destinations import (
SparkPCDMLatestToDeltaDestination,
)
Expand Down Expand Up @@ -73,11 +77,19 @@ def test_spark_pcdm_latest_to_delta_write_setup(spark_session: SparkSession):
"test_delta_latest_destination_setup",
)
assert pcdm_latest_o_delta_destination.system_type().value == 2
delta_spark_artifact_id = "delta-core_2.12"
if (
Version.compare(
_get_python_package_version("delta-spark"), Version.parse("3.0.0")
)
>= 0
):
delta_spark_artifact_id = "delta-spark_2.12"
assert pcdm_latest_o_delta_destination.libraries() == Libraries(
maven_libraries=[
MavenLibrary(
group_id="io.delta",
artifact_id="delta-core_2.12",
artifact_id=delta_spark_artifact_id,
version=_get_package_version("delta-spark"),
)
],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,12 @@
import sys

sys.path.insert(0, ".")
from semver.version import Version
import pytest
from src.sdk.python.rtdip_sdk._sdk_utils.compare_versions import _get_package_version
from src.sdk.python.rtdip_sdk._sdk_utils.compare_versions import (
_get_package_version,
_get_python_package_version,
)
from src.sdk.python.rtdip_sdk.pipelines.destinations.spark.delta import (
SparkDeltaDestination,
)
Expand Down Expand Up @@ -81,11 +85,19 @@ def test_spark_pcdm_to_delta_write_setup(spark_session: SparkSession):
"append",
)
assert pcdm_to_delta_destination.system_type().value == 2
delta_spark_artifact_id = "delta-core_2.12"
if (
Version.compare(
_get_python_package_version("delta-spark"), Version.parse("3.0.0")
)
>= 0
):
delta_spark_artifact_id = "delta-spark_2.12"
assert pcdm_to_delta_destination.libraries() == Libraries(
maven_libraries=[
MavenLibrary(
group_id="io.delta",
artifact_id="delta-core_2.12",
artifact_id=delta_spark_artifact_id,
version=_get_package_version("delta-spark"),
)
],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,13 @@
import sys

sys.path.insert(0, ".")
from semver.version import Version
import pytest
from pytest_mock import MockerFixture
from src.sdk.python.rtdip_sdk._sdk_utils.compare_versions import _get_package_version
from src.sdk.python.rtdip_sdk._sdk_utils.compare_versions import (
_get_package_version,
_get_python_package_version,
)
from src.sdk.python.rtdip_sdk.pipelines.sources.spark.autoloader import (
DataBricksAutoLoaderSource,
)
Expand All @@ -33,11 +37,19 @@
def test_databricks_autoloader_setup(spark_session: SparkSession):
autoloader_source = DataBricksAutoLoaderSource(spark_session, {}, path, "parquet")
assert autoloader_source.system_type().value == 3
delta_spark_artifact_id = "delta-core_2.12"
if (
Version.compare(
_get_python_package_version("delta-spark"), Version.parse("3.0.0")
)
>= 0
):
delta_spark_artifact_id = "delta-spark_2.12"
assert autoloader_source.libraries() == Libraries(
maven_libraries=[
MavenLibrary(
group_id="io.delta",
artifact_id="delta-core_2.12",
artifact_id=delta_spark_artifact_id,
version=_get_package_version("delta-spark"),
)
],
Expand Down
16 changes: 14 additions & 2 deletions tests/sdk/python/rtdip_sdk/pipelines/sources/spark/test_delta.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,13 @@
import sys

sys.path.insert(0, ".")
from semver.version import Version
from importlib_metadata import version
import pytest
from src.sdk.python.rtdip_sdk._sdk_utils.compare_versions import _get_package_version
from src.sdk.python.rtdip_sdk._sdk_utils.compare_versions import (
_get_package_version,
_get_python_package_version,
)
from src.sdk.python.rtdip_sdk.pipelines.destinations.spark.delta import (
SparkDeltaDestination,
)
Expand All @@ -34,11 +38,19 @@
def test_spark_delta_read_setup(spark_session: SparkSession):
delta_source = SparkDeltaSource(spark_session, {}, "test_spark_delta_read_setup")
assert delta_source.system_type().value == 2
delta_spark_artifact_id = "delta-core_2.12"
if (
Version.compare(
_get_python_package_version("delta-spark"), Version.parse("3.0.0")
)
>= 0
):
delta_spark_artifact_id = "delta-spark_2.12"
assert delta_source.libraries() == Libraries(
maven_libraries=[
MavenLibrary(
group_id="io.delta",
artifact_id="delta-core_2.12",
artifact_id=delta_spark_artifact_id,
version=_get_package_version("delta-spark"),
)
],
Expand Down

0 comments on commit 10e18af

Please sign in to comment.