Skip to content

Commit

Permalink
Cleanup deprecation messages and tests (#806)
Browse files Browse the repository at this point in the history
  • Loading branch information
medb authored Sep 10, 2020
1 parent d059a6c commit f61a6aa
Show file tree
Hide file tree
Showing 25 changed files with 78 additions and 77 deletions.
68 changes: 34 additions & 34 deletions atlas/test_atlas.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,8 @@ def verify_instance(self, instance, username='admin', password='admin'):
def test_atlas(self, configuration, machine_suffixes):
image_version = self.getImageVersion()
if image_version < pkg_resources.parse_version("1.5") or \
image_version > pkg_resources.parse_version("2.0"):
return
image_version >= pkg_resources.parse_version("2.0"):
self.skipTest("Not supported in pre 1.5 and 2.0+ images")

init_actions = self.INIT_ACTIONS
optional_components = self.OPTIONAL_COMPONENTS
Expand Down Expand Up @@ -146,46 +146,46 @@ def test_atlas_overrides_admin_credentials(self, configuration,
machine_suffixes):
image_version = self.getImageVersion()
if image_version < pkg_resources.parse_version("1.5") or \
image_version > pkg_resources.parse_version("2.0"):
return

username = 'dataproc-user'
password = 'dataproc-password'
password_sha256 = hashlib.sha256(password.encode('utf-8')).hexdigest()
metadata = \
"ATLAS_ADMIN_USERNAME={},ATLAS_ADMIN_PASSWORD_SHA256={}".format(
username, password_sha256)
self.createCluster(configuration,
self.INIT_ACTIONS,
beta=True,
timeout_in_minutes=30,
metadata=metadata,
optional_components=self.OPTIONAL_COMPONENTS,
machine_type="e2-standard-4")
for machine_suffix in machine_suffixes:
self.verify_instance(
"{}-{}".format(self.getClusterName(), machine_suffix),
username, password)
image_version >= pkg_resources.parse_version("2.0"):
self.skipTest("Not supported in pre 1.5 and 2.0+ images")

username = 'dataproc-user'
password = 'dataproc-password'
password_sha256 = hashlib.sha256(password.encode('utf-8')).hexdigest()
metadata = \
"ATLAS_ADMIN_USERNAME={},ATLAS_ADMIN_PASSWORD_SHA256={}".format(
username, password_sha256)
self.createCluster(configuration,
self.INIT_ACTIONS,
beta=True,
timeout_in_minutes=30,
metadata=metadata,
optional_components=self.OPTIONAL_COMPONENTS,
machine_type="e2-standard-4")
for machine_suffix in machine_suffixes:
self.verify_instance(
"{}-{}".format(self.getClusterName(), machine_suffix),
username, password)

@parameterized.parameters("ZOOKEEPER", "HBASE", "SOLR")
def test_atlas_fails_without_component(self, component):
image_version = self.getImageVersion()
if image_version < pkg_resources.parse_version("1.5") or \
image_version > pkg_resources.parse_version("2.0"):
return

with self.assertRaises(AssertionError):
self.createCluster(
"SINGLE",
self.INIT_ACTIONS,
beta=True,
timeout_in_minutes=30,
machine_type="e2-standard-4",
optional_components=self.OPTIONAL_COMPONENTS.remove(component))
image_version >= pkg_resources.parse_version("2.0"):
self.skipTest("Not supported in pre 1.5 and 2.0+ images")

with self.assertRaises(AssertionError):
self.createCluster(
"SINGLE",
self.INIT_ACTIONS,
beta=True,
timeout_in_minutes=30,
machine_type="e2-standard-4",
optional_components=self.OPTIONAL_COMPONENTS.remove(component))

def test_atlas_ha_fails_without_kafka(self):
if self.getImageVersion() < pkg_resources.parse_version("1.5"):
return
self.skipTest("Not supported in pre 1.5 images")

with self.assertRaises(AssertionError):
self.createCluster("HA",
Expand Down
4 changes: 2 additions & 2 deletions conda/bootstrap-conda.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@

set -exo pipefail

readonly NOT_SUPPORTED_MESSAGE="Conda initialization action is not supported on Dataproc 2.0+.
readonly NOT_SUPPORTED_MESSAGE="Conda initialization action is not supported on Dataproc ${DATAPROC_VERSION}.
Use Anaconda Component instead: https://cloud.google.com/dataproc/docs/concepts/components/anaconda"
[[ $DATAPROC_VERSION = 2.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1
[[ $DATAPROC_VERSION != 1.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1

# Modified from bootstrap-conda.sh script, see:
# https://bitbucket.org/bombora-datascience/bootstrap-conda
Expand Down
4 changes: 2 additions & 2 deletions conda/install-conda-env.sh
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
#!/bin/bash
set -exo pipefail

readonly NOT_SUPPORTED_MESSAGE="Conda initialization action is not supported on Dataproc 2.0+.
readonly NOT_SUPPORTED_MESSAGE="Conda initialization action is not supported on Dataproc ${DATAPROC_VERSION}.
Use Anaconda Component instead: https://cloud.google.com/dataproc/docs/concepts/components/anaconda"
[[ $DATAPROC_VERSION = 2.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1
[[ $DATAPROC_VERSION != 1.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1

if [[ -f /etc/profile.d/effective-python.sh ]]; then
PROFILE_SCRIPT_PATH=/etc/profile.d/effective-python.sh
Expand Down
2 changes: 1 addition & 1 deletion conda/test_conda.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def _parse_packages(stdout):
def test_conda(self, configuration, conda_packages, pip_packages):
# Skip on 2.0+ version of Dataproc because it's not supported
if self.getImageVersion() >= pkg_resources.parse_version("2.0"):
return
self.skipTest("Not supported in 2.0+ images")

metadata = "'CONDA_PACKAGES={},PIP_PACKAGES={}'".format(
" ".join(conda_packages), " ".join(pip_packages))
Expand Down
4 changes: 2 additions & 2 deletions datalab/datalab.sh
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@

set -exo pipefail

readonly NOT_SUPPORTED_MESSAGE="Datalab initialization action is not supported on Dataproc 2.0+.
readonly NOT_SUPPORTED_MESSAGE="Datalab initialization action is not supported on Dataproc ${DATAPROC_VERSION}.
Use Jupyter Component instead: https://cloud.google.com/dataproc/docs/concepts/components/jupyter"
[[ $DATAPROC_VERSION = 2.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1
[[ $DATAPROC_VERSION != 1.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1

readonly ROLE="$(/usr/share/google/get_metadata_value attributes/dataproc-role)"
readonly PROJECT="$(/usr/share/google/get_metadata_value ../project/project-id)"
Expand Down
2 changes: 1 addition & 1 deletion datalab/test_datalab.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def verify_instance(self, name):
def test_datalab(self, configuration, machine_suffixes, python):
# Skip on 2.0+ version of Dataproc because it's not supported
if self.getImageVersion() >= pkg_resources.parse_version("2.0"):
return
self.skipTest("Not supported in 2.0+ images")

init_actions = self.INIT_ACTIONS
metadata = 'INIT_ACTIONS_REPO={}'.format(self.INIT_ACTIONS_REPO)
Expand Down
3 changes: 2 additions & 1 deletion h2o/test_h2o.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@ class H2OTestCase(DataprocTestCase):
def test_h2o(self, configuration):
# Init action supported on Dataproc 1.3+
if self.getImageVersion() < pkg_resources.parse_version("1.3"):
return
self.skipTest("Not supported in pre 1.3 images")

init_actions = self.INIT_ACTIONS
optional_components = ["ANACONDA"]
if self.getImageVersion() < pkg_resources.parse_version("1.4"):
Expand Down
4 changes: 2 additions & 2 deletions hbase/hbase.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@

set -Eeuxo pipefail

readonly NOT_SUPPORTED_MESSAGE="HBase initialization action is not supported on Dataproc 2.0+.
readonly NOT_SUPPORTED_MESSAGE="HBase initialization action is not supported on Dataproc ${DATAPROC_VERSION}.
Use HBase Component instead: https://cloud.google.com/dataproc/docs/concepts/components/hbase"
[[ $DATAPROC_VERSION = 2.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1
[[ $DATAPROC_VERSION != 1.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1

readonly HBASE_HOME='/etc/hbase'
readonly CLUSTER_NAME=$(/usr/share/google/get_metadata_value attributes/dataproc-cluster-name)
Expand Down
4 changes: 2 additions & 2 deletions hbase/test_hbase.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def verify_instance(self, name):
def test_hbase(self, configuration, machine_suffixes):
# Skip on 2.0+ version of Dataproc because it's not supported
if self.getImageVersion() >= pkg_resources.parse_version("2.0"):
return
self.skipTest("Not supported in 2.0+ images")

init_actions = self.INIT_ACTIONS
if configuration != "HA":
Expand All @@ -55,7 +55,7 @@ def test_hbase(self, configuration, machine_suffixes):
def test_hbase_on_gcs(self, configuration, machine_suffixes):
# Skip on 2.0+ version of Dataproc because it's not supported
if self.getImageVersion() >= pkg_resources.parse_version("2.0"):
return
self.skipTest("Not supported in 2.0+ images")

init_actions = self.INIT_ACTIONS
if configuration != "HA":
Expand Down
4 changes: 2 additions & 2 deletions jupyter/jupyter.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@

set -euxo pipefail

readonly NOT_SUPPORTED_MESSAGE="Jupyter initialization action is not supported on Dataproc 2.0+.
readonly NOT_SUPPORTED_MESSAGE="Jupyter initialization action is not supported on Dataproc ${DATAPROC_VERSION}.
Use Jupyter Component instead: https://cloud.google.com/dataproc/docs/concepts/components/jupyter"
[[ $DATAPROC_VERSION = 2.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1
[[ $DATAPROC_VERSION != 1.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1

readonly ROLE="$(/usr/share/google/get_metadata_value attributes/dataproc-role)"

Expand Down
4 changes: 2 additions & 2 deletions jupyter/test_jupyter.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def verify_instance(self, name, jupyter_port):
def test_jupyter(self, configuration, machine_suffixes):
# Skip on 2.0+ version of Dataproc because it's not supported
if self.getImageVersion() >= pkg_resources.parse_version("2.0"):
return
self.skipTest("Not supported in 2.0+ images")

metadata = 'INIT_ACTIONS_REPO={}'.format(self.INIT_ACTIONS_REPO)
self.createCluster(
Expand All @@ -42,7 +42,7 @@ def test_jupyter(self, configuration, machine_suffixes):
def test_jupyter_with_metadata(self, configuration, machine_suffixes):
# Skip on 2.0+ version of Dataproc because it's not supported
if self.getImageVersion() >= pkg_resources.parse_version("2.0"):
return
self.skipTest("Not supported in 2.0+ images")

jupyter_port = "8125"

Expand Down
4 changes: 2 additions & 2 deletions jupyter2/jupyter2.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@

set -euxo pipefail

readonly NOT_SUPPORTED_MESSAGE="Jupyter 2 initialization action is not supported on Dataproc 2.0+.
readonly NOT_SUPPORTED_MESSAGE="Jupyter 2 initialization action is not supported on Dataproc ${DATAPROC_VERSION}.
Use Jupyter Component instead: https://cloud.google.com/dataproc/docs/concepts/components/jupyter"
[[ $DATAPROC_VERSION = 2.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1
[[ $DATAPROC_VERSION != 1.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1

readonly DATAPROC_MASTER="$(/usr/share/google/get_metadata_value attributes/dataproc-master)"
readonly DATAPROC_BUCKET="$(/usr/share/google/get_metadata_value attributes/dataproc-bucket)"
Expand Down
4 changes: 2 additions & 2 deletions knox/test_knox.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def _run_test_script(self, name, cert_type):
def test_knox_localhost_cert(self, configuration, machine_suffixes):
# Init action supported on Dataproc 1.3+
if self.getImageVersion() < pkg_resources.parse_version("1.3"):
return
self.skipTest("Not supported in pre 1.3 images")

self.createCluster(
configuration,
Expand All @@ -54,7 +54,7 @@ def test_knox_localhost_cert(self, configuration, machine_suffixes):
def test_knox_hostname_cert(self, configuration, machine_suffixes):
# Init action supported on Dataproc 1.3+
if self.getImageVersion() < pkg_resources.parse_version("1.3"):
return
self.skipTest("Not supported in pre 1.3 images")

self.createCluster(
configuration,
Expand Down
2 changes: 1 addition & 1 deletion mlvm/test_mlvm.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def verify_rapids_dask(self):
def test_mlvm(self, configuration, accelerator, gpu_provider, rapids_runtime):
# Supported on Dataproc 1.5+
if self.getImageVersion() < pkg_resources.parse_version("1.5"):
return
self.skipTest("Not supported in pre 1.5 images")

metadata = "init-actions-repo={}".format(self.INIT_ACTIONS_REPO)
if accelerator:
Expand Down
4 changes: 2 additions & 2 deletions presto/presto.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@

set -euxo pipefail

readonly NOT_SUPPORTED_MESSAGE="Presto initialization action is not supported on Dataproc 2.0+.
readonly NOT_SUPPORTED_MESSAGE="Presto initialization action is not supported on Dataproc ${DATAPROC_VERSION}.
Use Presto Component instead: https://cloud.google.com/dataproc/docs/concepts/components/presto"
[[ $DATAPROC_VERSION = 2.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1
[[ $DATAPROC_VERSION != 1.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1

# Use Python from /usr/bin instead of /opt/conda.
export PATH=/usr/bin:$PATH
Expand Down
4 changes: 2 additions & 2 deletions presto/test_presto.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def test_presto(self, configuration, machine_suffixes, coordinators,
workers):
# Skip on 2.0+ version of Dataproc because it's not supported
if self.getImageVersion() >= pkg_resources.parse_version("2.0"):
return
self.skipTest("Not supported in 2.0+ images")

self.createCluster(configuration, self.INIT_ACTIONS)
for machine_suffix in machine_suffixes:
Expand All @@ -101,7 +101,7 @@ def test_presto_custom_port(self, configuration, machine_suffixes,
coordinators, workers):
# Skip on 2.0+ version of Dataproc because it's not supported
if self.getImageVersion() >= pkg_resources.parse_version("2.0"):
return
self.skipTest("Not supported in 2.0+ images")

self.createCluster(
configuration,
Expand Down
4 changes: 2 additions & 2 deletions ranger/ranger.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@

set -euxo pipefail

readonly NOT_SUPPORTED_MESSAGE="Ranger initialization action is not supported on Dataproc 2.0+.
readonly NOT_SUPPORTED_MESSAGE="Ranger initialization action is not supported on Dataproc ${DATAPROC_VERSION}.
Use Ranger Component instead: https://cloud.google.com/dataproc/docs/concepts/components/ranger"
[[ $DATAPROC_VERSION = 2.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1
[[ $DATAPROC_VERSION != 1.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1

# Use Python from /usr/bin instead of /opt/conda.
export PATH=/usr/bin:$PATH
Expand Down
4 changes: 2 additions & 2 deletions ranger/test_ranger.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,11 @@ def __run_test_script(self, name):
def test_ranger(self, configuration, machine_suffixes):
# Skip on 2.0+ version of Dataproc because it's not supported
if self.getImageVersion() >= pkg_resources.parse_version("2.0"):
return
self.skipTest("Not supported in 2.0+ images")

# Init action supported on Dataproc 1.3+
if self.getImageVersion() < pkg_resources.parse_version("1.3"):
return
self.skipTest("Not supported in pre 1.3 images")

self.createCluster(
configuration,
Expand Down
4 changes: 2 additions & 2 deletions rapids/test_rapids.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def verify_spark_job(self):
@parameterized.parameters(("STANDARD", ["m", "w-0"], GPU_P100))
def test_rapids_dask(self, configuration, machine_suffixes, accelerator):
if self.getImageVersion() < pkg_resources.parse_version("1.5"):
return
self.skipTest("Not supported in pre 1.5 images")

self.createCluster(configuration,
self.INIT_ACTIONS,
Expand All @@ -57,7 +57,7 @@ def test_rapids_dask(self, configuration, machine_suffixes, accelerator):
@parameterized.parameters(("STANDARD", ["w-0"], GPU_P100))
def test_rapids_spark(self, configuration, machine_suffixes, accelerator):
if self.getImageVersion() < pkg_resources.parse_version("1.5"):
return
self.skipTest("Not supported in pre 1.5 images")

metadata = 'gpu-driver-provider=NVIDIA,rapids-runtime=SPARK'
if self.getImageVersion() < pkg_resources.parse_version("2.0"):
Expand Down
4 changes: 2 additions & 2 deletions solr/solr.sh
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@

set -euxo pipefail

readonly NOT_SUPPORTED_MESSAGE="Solr initialization action is not supported on Dataproc 2.0+.
readonly NOT_SUPPORTED_MESSAGE="Solr initialization action is not supported on Dataproc ${DATAPROC_VERSION}.
Use Solr Component instead: https://cloud.google.com/dataproc/docs/concepts/components/solr"
[[ $DATAPROC_VERSION = 2.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1
[[ $DATAPROC_VERSION != 1.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1

readonly MASTER_ADDITIONAL="$(/usr/share/google/get_metadata_value attributes/dataproc-master-additional)"
readonly CLUSTER_NAME="$(/usr/share/google/get_metadata_value attributes/dataproc-cluster-name)"
Expand Down
2 changes: 1 addition & 1 deletion solr/test_solr.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def __run_test_script(self, name):
def test_solr(self, configuration, machine_suffixes):
# Skip on 2.0+ version of Dataproc because it's not supported
if self.getImageVersion() >= pkg_resources.parse_version("2.0"):
return
self.skipTest("Not supported in 2.0+ images")

self.createCluster(configuration, self.INIT_ACTIONS)
for machine_suffix in machine_suffixes:
Expand Down
4 changes: 2 additions & 2 deletions tez/tez.sh
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@

set -euxo pipefail

readonly NOT_SUPPORTED_MESSAGE="Tez initialization action is not supported on Dataproc 2.0+.
readonly NOT_SUPPORTED_MESSAGE="Tez initialization action is not supported on Dataproc ${DATAPROC_VERSION}.
Tez is configured by default in Dataproc 1.3+"
[[ $DATAPROC_VERSION = 2.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1
[[ $DATAPROC_VERSION != 1.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1

# Use Python from /usr/bin instead of /opt/conda.
export PATH=/usr/bin:$PATH
Expand Down
4 changes: 2 additions & 2 deletions tony/test_tony.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ class TonYTestCase(DataprocTestCase):
def test_tony_tf(self, configuration):
# Init action supported on Dataproc 1.3+
if self.getImageVersion() < pkg_resources.parse_version("1.3"):
return
self.skipTest("Not supported in pre 1.3 images")

self.createCluster(
configuration,
Expand All @@ -41,7 +41,7 @@ def test_tony_tf(self, configuration):
def test_tony_torch(self):
# Init action supported on Dataproc 1.3+
if self.getImageVersion() < pkg_resources.parse_version("1.3"):
return
self.skipTest("Not supported in pre 1.3 images")

self.createCluster(
"STANDARD",
Expand Down
4 changes: 2 additions & 2 deletions zeppelin/zeppelin.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@

set -euxo pipefail

readonly NOT_SUPPORTED_MESSAGE="Zeppelin initialization action is not supported on Dataproc 2.0+.
readonly NOT_SUPPORTED_MESSAGE="Zeppelin initialization action is not supported on Dataproc ${DATAPROC_VERSION}.
Use Zeppelin Component instead: https://cloud.google.com/dataproc/docs/concepts/components/zeppelin"
[[ $DATAPROC_VERSION = 2.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1
[[ $DATAPROC_VERSION != 1.* ]] && echo "$NOT_SUPPORTED_MESSAGE" && exit 1

readonly ROLE="$(/usr/share/google/get_metadata_value attributes/dataproc-role)"
readonly INTERPRETER_FILE='/etc/zeppelin/conf/interpreter.json'
Expand Down
Loading

0 comments on commit f61a6aa

Please sign in to comment.