Skip to content

Commit

Permalink
enable kerberos support for non GPU Driver dependent init action tests (
Browse files Browse the repository at this point in the history
#1246)

* enable kerberos support for init action tests

* removing kerberos parameter from not supported init actions

* removing kerberos parameter from not supported init actions

* removing kerberos parameter from not supported init actions

* removing kerberos parameter from not supported init actions

* unpinning libsystemd0 as the fix is patched in open source

* revert the changes for BUILD file
  • Loading branch information
prince-cs authored Oct 11, 2024
1 parent fe302d3 commit 6f76b25
Show file tree
Hide file tree
Showing 48 changed files with 92 additions and 49 deletions.
9 changes: 4 additions & 5 deletions BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ test_suite(
":test_cloud_sql_proxy",
":test_dr_elephant",
":test_hive_hcatalog",
":test_hive_llap",
":test_starburst_presto",
":test_spark_rapids",
"//alluxio:test_alluxio",
Expand Down Expand Up @@ -63,7 +62,7 @@ py_test(
srcs = ["cloud-sql-proxy/test_cloud_sql_proxy.py"],
data = ["cloud-sql-proxy/cloud-sql-proxy.sh", "cloud-sql-proxy/hivetest.hive"],
local = True,
shard_count = 3,
shard_count = 4,
deps = [
":pyspark_metastore_test",
"//integration_tests:dataproc_test_case",
Expand All @@ -77,7 +76,7 @@ py_test(
srcs = ["dr-elephant/test_dr_elephant.py"],
data = ["dr-elephant/dr-elephant.sh"],
local = True,
shard_count = 2,
shard_count = 3,
deps = [
"//integration_tests:dataproc_test_case",
"@io_abseil_py//absl/testing:parameterized",
Expand All @@ -90,7 +89,7 @@ py_test(
srcs = ["hive-hcatalog/test_hive_hcatalog.py"],
data = ["hive-hcatalog/hive-hcatalog.sh"],
local = True,
shard_count = 6,
shard_count = 8,
deps = [
"//integration_tests:dataproc_test_case",
"@io_abseil_py//absl/testing:parameterized",
Expand All @@ -103,7 +102,7 @@ py_test(
srcs = ["starburst-presto/test_starburst_presto.py"],
data = ["starburst-presto/presto.sh"],
local = True,
shard_count = 4,
shard_count = 6,
deps = [
"//integration_tests:dataproc_test_case",
"@io_abseil_py//absl/testing:parameterized",
Expand Down
2 changes: 1 addition & 1 deletion alluxio/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ py_test(
"alluxio.sh",
],
local = True,
shard_count = 2,
shard_count = 4,
deps = [
"//integration_tests:dataproc_test_case",
"@io_abseil_py//absl/testing:parameterized",
Expand Down
8 changes: 6 additions & 2 deletions alluxio/test_alluxio.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@ def verify_instance(self, name):
self.assert_instance_command(name, "alluxio fs leader")

@parameterized.parameters(
("STANDARD", ["m"]),)
("STANDARD", ["m"]),
("KERBEROS", ["m"]),
)
def test_alluxio(self, configuration, machine_suffixes):
if self.getImageOs() == 'rocky':
self.skipTest("Not supported in Rocky Linux-based images")
Expand All @@ -32,7 +34,9 @@ def test_alluxio(self, configuration, machine_suffixes):
machine_suffix))

@parameterized.parameters(
("STANDARD", ["m"]),)
("STANDARD", ["m"]),
("KERBEROS", ["m"]),
)
def test_alluxio_with_presto(self, configuration, machine_suffixes):
if self.getImageOs() == 'rocky':
self.skipTest("Not supported in Rocky Linux-based images")
Expand Down
2 changes: 1 addition & 1 deletion atlas/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ py_test(
"//kafka:kafka.sh",
],
local = True,
shard_count = 7,
shard_count = 9,
deps = [
":validate_atlas",
"//integration_tests:dataproc_test_case",
Expand Down
6 changes: 5 additions & 1 deletion atlas/test_atlas.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ def verify_instance(self, instance, username='admin', password='admin'):
@parameterized.parameters(
("SINGLE", ["m"]),
("STANDARD", ["m"]),
("KERBEROS", ["m"]),
)
def test_atlas(self, configuration, machine_suffixes):
if self.getImageOs() == 'rocky':
Expand Down Expand Up @@ -145,7 +146,10 @@ def test_atlas(self, configuration, machine_suffixes):
if configuration == "HA":
self.assertEqual(2, atlas_statuses.count("PASSIVE"))

@parameterized.parameters(("SINGLE", ["m"]))
@parameterized.parameters(
("SINGLE", ["m"]),
("KERBEROS", ["m"]),
)
def test_atlas_overrides_admin_credentials(self, configuration,
machine_suffixes):
if self.getImageOs() == 'rocky':
Expand Down
2 changes: 1 addition & 1 deletion bigtable/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ py_test(
srcs = ["test_bigtable.py"],
data = ["bigtable.sh"],
local = True,
shard_count = 3,
shard_count = 4,
deps = [
":run_hbase_commands",
"//integration_tests:dataproc_test_case",
Expand Down
1 change: 1 addition & 0 deletions bigtable/test_bigtable.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ def verify_instance(self, name):
("SINGLE", ["m"]),
("STANDARD", ["m"]),
("HA", ["m-0"]),
("KERBEROS", ["m"]),
)
def test_bigtable(self, configuration, machine_suffixes):
self.createCluster(
Expand Down
1 change: 1 addition & 0 deletions cloud-sql-proxy/test_cloud_sql_proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ def __submit_pyspark_job(self, cluster_name):
'SINGLE',
'STANDARD',
'HA',
'KERBEROS',
)
def test_cloud_sql_proxy(self, configuration):
metadata = 'hive-metastore-instance={}:{},hive-metastore-db=metastore'.format(self.PROJECT_METADATA,
Expand Down
2 changes: 1 addition & 1 deletion conda/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ py_test(
"install-conda-env.sh",
],
local = True,
shard_count = 2,
shard_count = 4,
deps = [
":get_sys_exec",
"//integration_tests:dataproc_test_case",
Expand Down
2 changes: 2 additions & 0 deletions conda/test_conda.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,8 @@ def _parse_packages(stdout):
@parameterized.parameters(
("STANDARD", [], []),
("STANDARD", CONDA_PKGS, PIP_PKGS),
("KERBEROS", [], []),
("KERBEROS", CONDA_PKGS, PIP_PKGS),
)
def test_conda(self, configuration, conda_packages, pip_packages):
if self.getImageOs() == 'rocky':
Expand Down
2 changes: 1 addition & 1 deletion connectors/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ py_test(
srcs = ["test_connectors.py"],
data = ["connectors.sh"],
local = True,
shard_count = 10,
shard_count = 12,
deps = [
"//integration_tests:dataproc_test_case",
"@io_abseil_py//absl/testing:parameterized",
Expand Down
28 changes: 20 additions & 8 deletions connectors/test_connectors.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,11 @@ def _hadoop_version(self):
def _scala_version(self):
return "2.12"

@parameterized.parameters(("SINGLE", ["m"]),
("HA", ["m-0", "m-1", "m-2", "w-0", "w-1"]))
@parameterized.parameters(
("SINGLE", ["m"]),
("HA", ["m-0", "m-1", "m-2", "w-0", "w-1"]),
("KERBEROS", ["m"]),
)
def test_bq_connector_version(self, configuration, instances):
if self.getImageOs() == 'rocky':
self.skipTest("Not supported in Rocky Linux-based images")
Expand All @@ -63,8 +66,11 @@ def test_bq_connector_version(self, configuration, instances):
self.verify_instances(self.getClusterName(), instances,
"bigquery-connector", self.BQ_CONNECTOR_VERSION)

@parameterized.parameters(("SINGLE", ["m"]),
("HA", ["m-0", "m-1", "m-2", "w-0", "w-1"]))
@parameterized.parameters(
("SINGLE", ["m"]),
("HA", ["m-0", "m-1", "m-2", "w-0", "w-1"]),
("KERBEROS", ["m"]),
)
def test_spark_bq_connector_version(self, configuration, instances):
if self.getImageVersion() < pkg_resources.parse_version("1.5"):
self.SPARK_BQ_CONNECTOR_VERSION = "0.29.0"
Expand All @@ -83,8 +89,11 @@ def test_spark_bq_connector_version(self, configuration, instances):
"spark-bigquery-connector",
self.SPARK_BQ_CONNECTOR_VERSION)

@parameterized.parameters(("SINGLE", ["m"]),
("HA", ["m-0", "m-1", "m-2", "w-0", "w-1"]))
@parameterized.parameters(
("SINGLE", ["m"]),
("HA", ["m-0", "m-1", "m-2", "w-0", "w-1"]),
("KERBEROS", ["m"]),
)
def test_bq_connector_url(self, configuration, instances):
if self.getImageVersion() < pkg_resources.parse_version("1.5"):
self.SPARK_BQ_CONNECTOR_VERSION = "0.29.0"
Expand All @@ -102,8 +111,11 @@ def test_bq_connector_url(self, configuration, instances):
self.verify_instances(self.getClusterName(), instances,
"bigquery-connector", self.BQ_CONNECTOR_VERSION)

@parameterized.parameters(("SINGLE", ["m"]),
("HA", ["m-0", "m-1", "m-2", "w-0", "w-1"]))
@parameterized.parameters(
("SINGLE", ["m"]),
("HA", ["m-0", "m-1", "m-2", "w-0", "w-1"]),
("KERBEROS", ["m"]),
)
def test_spark_bq_connector_url(self, configuration, instances):
if self.getImageVersion() < pkg_resources.parse_version("1.5"):
self.SPARK_BQ_CONNECTOR_VERSION = "0.29.0"
Expand Down
2 changes: 1 addition & 1 deletion dask/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ py_test(
"verify_dask_standalone.py",
],
local = True,
shard_count = 3,
shard_count = 4,
deps = [
"//integration_tests:dataproc_test_case",
"@io_abseil_py//absl/testing:parameterized",
Expand Down
4 changes: 3 additions & 1 deletion dask/test_dask.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,9 @@ def _run_dask_test_script(self, name, script):

@parameterized.parameters(
("STANDARD", ["m", "w-0"], "yarn"),
("STANDARD", ["m"], "standalone"))
("STANDARD", ["m"], "standalone"),
("KERBEROS", ["m"], "standalone"),
)
def test_dask(self, configuration, instances, runtime):

if self.getImageVersion() < pkg_resources.parse_version("2.0"):
Expand Down
1 change: 1 addition & 0 deletions dr-elephant/test_dr_elephant.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ def verify_instance(self, instance_name):
@parameterized.parameters(
("STANDARD", ["m"]),
("HA", ["m-0"]),
("KERBEROS", ["m"]),
)
def test_dr_elephant(self, configuration, machine_suffixes):
if self.getImageVersion() >= pkg_resources.parse_version("1.3"):
Expand Down
2 changes: 1 addition & 1 deletion drill/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ py_test(
"//zookeeper:zookeeper.sh",
],
local = True,
shard_count = 3,
shard_count = 4,
deps = [
"//integration_tests:dataproc_test_case",
"@io_abseil_py//absl/testing:parameterized",
Expand Down
1 change: 1 addition & 0 deletions drill/test_drill.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ def __run_bash_test_file(self, name, drill_mode, target_node):
("SINGLE", [("m", "m")]),
("STANDARD", [("m", "w-0"), ("m", "m")]),
("HA", [("m-0", "w-0"), ("w-0", "m-1")]),
("KERBEROS", ["m"]),
)
def test_drill(self, configuration, verify_options):
if self.getImageOs() == 'rocky':
Expand Down
2 changes: 1 addition & 1 deletion flink/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ py_test(
"validate.sh",
],
local = True,
shard_count = 5,
shard_count = 7,
deps = [
"//integration_tests:dataproc_test_case",
"@io_abseil_py//absl/testing:parameterized",
Expand Down
2 changes: 2 additions & 0 deletions flink/test_flink.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ def __run_test_file(self, name, yarn_session):
@parameterized.parameters(
("STANDARD", ["m"]),
("HA", ["m-0", "m-1", "m-2"]),
("KERBEROS", ["m"]),
)
def test_flink(self, configuration, machine_suffixes):
if self.getImageOs() == 'rocky':
Expand All @@ -49,6 +50,7 @@ def test_flink(self, configuration, machine_suffixes):
("SINGLE", ["m"]),
("STANDARD", ["m"]),
("HA", ["m-0", "m-1", "m-2"]),
("KERBEROS", ["m"]),
)
def test_flink_with_optional_metadata(self, configuration, machine_suffixes):
if self.getImageOs() == 'rocky':
Expand Down
2 changes: 1 addition & 1 deletion ganglia/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ py_test(
"ganglia.sh",
],
local = True,
shard_count = 3,
shard_count = 4,
deps = [
":verify_ganglia_running",
"//integration_tests:dataproc_test_case",
Expand Down
1 change: 1 addition & 0 deletions ganglia/test_ganglia.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ def verify_instance(self, name):
("SINGLE", ["m"]),
("STANDARD", ["m", "w-0"]),
("HA", ["m-0", "m-1", "m-2", "w-0"]),
("KERBEROS", ["m"]),
)
def test_ganglia(self, configuration, machine_suffixes):
if self.getImageOs() == 'rocky':
Expand Down
2 changes: 1 addition & 1 deletion h2o/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ py_test(
"//conda:bootstrap-conda.sh",
],
local = True,
shard_count = 2,
shard_count = 3,
deps = [
"//integration_tests:dataproc_test_case",
"@io_abseil_py//absl/testing:parameterized",
Expand Down
2 changes: 1 addition & 1 deletion h2o/test_h2o.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ class H2OTestCase(DataprocTestCase):
INIT_ACTIONS = ["h2o/h2o.sh"]
SAMPLE_H2O_JOB_PATH = "h2o/sample-script.py"

@parameterized.parameters("STANDARD", "HA")
@parameterized.parameters("STANDARD", "HA", "KERBEROS")
def test_h2o(self, configuration):
if self.getImageVersion() < pkg_resources.parse_version("2.0"):
self.skipTest("Not supported in pre-2.0 images")
Expand Down
2 changes: 1 addition & 1 deletion hbase/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ py_test(
"//zookeeper:zookeeper.sh",
],
local = True,
shard_count = 6,
shard_count = 8,
deps = [
"//integration_tests:dataproc_test_case",
"@io_abseil_py//absl/testing:parameterized",
Expand Down
2 changes: 2 additions & 0 deletions hbase/test_hbase.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ def verify_instance(self, name):
("SINGLE", ["m"]),
("STANDARD", ["m"]),
("HA", ["m-0"]),
("KERBEROS", ["m"]),
)
def test_hbase(self, configuration, machine_suffixes):
if self.getImageOs() == 'rocky':
Expand All @@ -54,6 +55,7 @@ def test_hbase(self, configuration, machine_suffixes):
("SINGLE", ["m"]),
("STANDARD", ["m"]),
("HA", ["m-0"]),
("KERBEROS", ["m"]),
)
def test_hbase_on_gcs(self, configuration, machine_suffixes):
if self.getImageOs() == 'rocky':
Expand Down
2 changes: 2 additions & 0 deletions hive-hcatalog/test_hive_hcatalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,9 +61,11 @@ def __submit_hive_job(self, cluster_name, job, should_repeat_job=False):
("SINGLE", False),
("STANDARD", False),
("HA", False),
("KERBEROS", False),
("SINGLE", True),
("STANDARD", True),
("HA", True),
("KERBEROS", True),
)
def test_hive_hcatalog(self, configuration, should_repeat_job):
if self.getImageOs() == 'rocky':
Expand Down
2 changes: 1 addition & 1 deletion hue/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ py_test(
"run_queries.py"],
data = ["hue.sh"],
local = True,
shard_count = 3,
shard_count = 7,
deps = [
"//integration_tests:dataproc_test_case",
"@io_abseil_py//absl/testing:parameterized",
Expand Down
3 changes: 3 additions & 0 deletions integration_tests/dataproc_test_case.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,9 @@ class DataprocTestCase(parameterized.TestCase):
"HA": [
"--num-masters=3",
"--num-workers=2",
],
"KERBEROS": [
"--enable-kerberos"
]
}

Expand Down
3 changes: 3 additions & 0 deletions kafka/test_kafka.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ def __submit_pyspark_job(self, cluster_name):
@parameterized.parameters(
'STANDARD',
'HA',
'KERBEROS',
)
def test_kafka_job(self, configuration):
if self.getImageOs() == 'rocky':
Expand All @@ -60,6 +61,7 @@ def test_kafka_job(self, configuration):
@parameterized.parameters(
'STANDARD',
'HA',
'KERBEROS',
)
def test_kafka_cruise_control_job(self, configuration):
if self.getImageOs() == 'rocky':
Expand All @@ -74,6 +76,7 @@ def test_kafka_cruise_control_job(self, configuration):
@parameterized.parameters(
'STANDARD',
'HA',
'KERBEROS',
)
def test_kafka_manager_job(self, configuration):
if self.getImageOs() == 'rocky':
Expand Down
Loading

0 comments on commit 6f76b25

Please sign in to comment.