diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/package_spec.py b/.buildkite/dagster-buildkite/dagster_buildkite/package_spec.py index 03a58aee22b2f..95de4bd06aa3e 100644 --- a/.buildkite/dagster-buildkite/dagster_buildkite/package_spec.py +++ b/.buildkite/dagster-buildkite/dagster_buildkite/package_spec.py @@ -232,7 +232,7 @@ def build_steps(self) -> List[BuildkiteTopLevelStep]: @property def requirements(self): # First try to infer requirements from the python package - package = PythonPackages.get(self.name) + package = PythonPackages.get(self.name) # pyright: ignore[reportArgumentType] if package: return set.union(package.install_requires, *package.extras_require.values()) diff --git a/.buildkite/dagster-buildkite/dagster_buildkite/utils.py b/.buildkite/dagster-buildkite/dagster_buildkite/utils.py index d83b5d67a7785..6150522e8bd02 100644 --- a/.buildkite/dagster-buildkite/dagster_buildkite/utils.py +++ b/.buildkite/dagster-buildkite/dagster_buildkite/utils.py @@ -363,7 +363,7 @@ def skip_if_no_docs_changes(): if message_contains("NO_SKIP"): return None - if not is_feature_branch(os.getenv("BUILDKITE_BRANCH")): + if not is_feature_branch(os.getenv("BUILDKITE_BRANCH")): # pyright: ignore[reportArgumentType] return None # If anything changes in the docs directory diff --git a/docs/content/integrations/pandas.mdx b/docs/content/integrations/pandas.mdx index fd458e4f8b3a6..9df0968e2bcc0 100644 --- a/docs/content/integrations/pandas.mdx +++ b/docs/content/integrations/pandas.mdx @@ -43,10 +43,10 @@ TripDataFrame = create_dagster_pandas_dataframe_type( PandasColumn.integer_column("bike_id", min_value=0), PandasColumn.categorical_column("color", categories={"red", "green", "blue"}), PandasColumn.datetime_column( - "start_time", min_datetime=datetime(year=2020, month=2, day=10) + "start_time", min_datetime=Timestamp(year=2020, month=2, day=10) ), PandasColumn.datetime_column( - "end_time", min_datetime=datetime(year=2020, month=2, day=10) + "end_time", min_datetime=Timestamp(year=2020, month=2, day=10) ), PandasColumn.string_column("station"), PandasColumn.exists("amount_paid"), diff --git a/examples/docs_snippets/docs_snippets/legacy/dagster_pandas_guide/core_trip.py b/examples/docs_snippets/docs_snippets/legacy/dagster_pandas_guide/core_trip.py index c636184a92cd3..2d0f28073e9bc 100644 --- a/examples/docs_snippets/docs_snippets/legacy/dagster_pandas_guide/core_trip.py +++ b/examples/docs_snippets/docs_snippets/legacy/dagster_pandas_guide/core_trip.py @@ -1,7 +1,7 @@ from datetime import datetime from dagster_pandas import PandasColumn, create_dagster_pandas_dataframe_type -from pandas import DataFrame, read_csv +from pandas import DataFrame, Timestamp, read_csv from dagster import Out, file_relative_path, job, op @@ -12,10 +12,10 @@ PandasColumn.integer_column("bike_id", min_value=0), PandasColumn.categorical_column("color", categories={"red", "green", "blue"}), PandasColumn.datetime_column( - "start_time", min_datetime=datetime(year=2020, month=2, day=10) + "start_time", min_datetime=Timestamp(year=2020, month=2, day=10) ), PandasColumn.datetime_column( - "end_time", min_datetime=datetime(year=2020, month=2, day=10) + "end_time", min_datetime=Timestamp(year=2020, month=2, day=10) ), PandasColumn.string_column("station"), PandasColumn.exists("amount_paid"), diff --git a/examples/experimental/assets_yaml_dsl/assets_yaml_dsl_tests/test_stocks_dsl.py b/examples/experimental/assets_yaml_dsl/assets_yaml_dsl_tests/test_stocks_dsl.py index edda0f45e44bf..0647f54c3e9e8 100644 --- a/examples/experimental/assets_yaml_dsl/assets_yaml_dsl_tests/test_stocks_dsl.py +++ b/examples/experimental/assets_yaml_dsl/assets_yaml_dsl_tests/test_stocks_dsl.py @@ -19,7 +19,7 @@ from dagster import AssetKey from dagster._core.definitions import materialize from dagster._core.pipes.subprocess import PipesSubprocessClient -from examples.experimental.assets_yaml_dsl.assets_yaml_dsl.domain_specific_dsl.stocks_dsl import ( +from examples.experimental.assets_yaml_dsl.assets_yaml_dsl.domain_specific_dsl.stocks_dsl import ( # type: ignore build_stock_assets_object, ) diff --git a/integration_tests/python_modules/dagster-k8s-test-infra/dagster_k8s_test_infra/cluster.py b/integration_tests/python_modules/dagster-k8s-test-infra/dagster_k8s_test_infra/cluster.py index 4958c6098460a..d2b55b92e040e 100644 --- a/integration_tests/python_modules/dagster-k8s-test-infra/dagster_k8s_test_infra/cluster.py +++ b/integration_tests/python_modules/dagster-k8s-test-infra/dagster_k8s_test_infra/cluster.py @@ -71,7 +71,7 @@ def _cluster_provider(request): f"Found existing image tagged {docker_image}, skipping image build. To rebuild," f" first run: docker rmi {docker_image}" ) - except docker.errors.ImageNotFound: + except docker.errors.ImageNotFound: # pyright: ignore[reportAttributeAccessIssue] build_and_tag_test_image(docker_image) kind_load_images( cluster_name=cluster_config.name, @@ -231,7 +231,7 @@ def check_export_runs(instance): # example PYTEST_CURRENT_TEST: test_user_code_deployments.py::test_execute_on_celery_k8s (teardown) current_test = ( - os.environ.get("PYTEST_CURRENT_TEST").split()[0].replace("::", "-").replace(".", "-") + os.environ.get("PYTEST_CURRENT_TEST").split()[0].replace("::", "-").replace(".", "-") # pyright: ignore[reportOptionalMemberAccess] ) for run in instance.get_runs(): diff --git a/integration_tests/test_suites/backcompat-test-suite/webserver_service/defs_for_earliest_tested_release.py b/integration_tests/test_suites/backcompat-test-suite/webserver_service/defs_for_earliest_tested_release.py index 4099eff3f9eae..1610643fa5208 100644 --- a/integration_tests/test_suites/backcompat-test-suite/webserver_service/defs_for_earliest_tested_release.py +++ b/integration_tests/test_suites/backcompat-test-suite/webserver_service/defs_for_earliest_tested_release.py @@ -1,5 +1,3 @@ -# type: ignore - # Backcompat test definitions intended for use with our oldest testest release of Dagster. Does not # use `Definitions` because it is not available in our oldest supported releases. diff --git a/integration_tests/test_suites/celery-k8s-test-suite/conftest.py b/integration_tests/test_suites/celery-k8s-test-suite/conftest.py index 2c012e44cf21a..e03a69851698c 100644 --- a/integration_tests/test_suites/celery-k8s-test-suite/conftest.py +++ b/integration_tests/test_suites/celery-k8s-test-suite/conftest.py @@ -30,7 +30,7 @@ def dagster_docker_image(): f"Found existing image tagged {docker_image}, skipping image build. To rebuild, first run: " f"docker rmi {docker_image}" ) - except docker.errors.ImageNotFound: + except docker.errors.ImageNotFound: # pyright: ignore[reportAttributeAccessIssue] build_and_tag_test_image(docker_image) return docker_image diff --git a/integration_tests/test_suites/daemon-test-suite/monitoring_daemon_tests/test_monitoring.py b/integration_tests/test_suites/daemon-test-suite/monitoring_daemon_tests/test_monitoring.py index 9cae084b3e6c3..61624a80b1daa 100644 --- a/integration_tests/test_suites/daemon-test-suite/monitoring_daemon_tests/test_monitoring.py +++ b/integration_tests/test_suites/daemon-test-suite/monitoring_daemon_tests/test_monitoring.py @@ -96,7 +96,7 @@ def test_docker_monitoring(aws_env): find_local_test_image(docker_image) run_config = merge_dicts( - load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), + load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), # pyright: ignore[reportArgumentType] { "ops": { "multiply_the_word_slow": { @@ -139,21 +139,21 @@ def test_docker_monitoring(aws_env): start_time = time.time() while time.time() - start_time < 60: - run = instance.get_run_by_id(run.run_id) - if run.status == DagsterRunStatus.STARTED: + run = instance.get_run_by_id(run.run_id) # pyright: ignore[reportOptionalMemberAccess] + if run.status == DagsterRunStatus.STARTED: # pyright: ignore[reportOptionalMemberAccess] break - assert run.status == DagsterRunStatus.STARTING + assert run.status == DagsterRunStatus.STARTING # pyright: ignore[reportOptionalMemberAccess] time.sleep(1) time.sleep(3) - instance.run_launcher._get_container( # noqa: SLF001 - instance.get_run_by_id(run.run_id) + instance.run_launcher._get_container( # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] + instance.get_run_by_id(run.run_id) # pyright: ignore[reportOptionalMemberAccess] ).stop() # daemon resumes the run - poll_for_finished_run(instance, run.run_id, timeout=300) - assert instance.get_run_by_id(run.run_id).status == DagsterRunStatus.SUCCESS + poll_for_finished_run(instance, run.run_id, timeout=300) # pyright: ignore[reportOptionalMemberAccess] + assert instance.get_run_by_id(run.run_id).status == DagsterRunStatus.SUCCESS # pyright: ignore[reportOptionalMemberAccess] @pytest.fixture @@ -183,7 +183,7 @@ def test_docker_monitoring_run_out_of_attempts(aws_env): find_local_test_image(docker_image) run_config = merge_dicts( - load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), + load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), # pyright: ignore[reportArgumentType] { "ops": { "multiply_the_word_slow": { @@ -230,17 +230,17 @@ def test_docker_monitoring_run_out_of_attempts(aws_env): start_time = time.time() while time.time() - start_time < 60: - run = instance.get_run_by_id(run.run_id) - if run.status == DagsterRunStatus.STARTED: + run = instance.get_run_by_id(run.run_id) # pyright: ignore[reportOptionalMemberAccess] + if run.status == DagsterRunStatus.STARTED: # pyright: ignore[reportOptionalMemberAccess] break - assert run.status == DagsterRunStatus.STARTING + assert run.status == DagsterRunStatus.STARTING # pyright: ignore[reportOptionalMemberAccess] time.sleep(1) time.sleep(3) - instance.run_launcher._get_container( # noqa: SLF001 - instance.get_run_by_id(run.run_id) + instance.run_launcher._get_container( # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] + instance.get_run_by_id(run.run_id) # pyright: ignore[reportOptionalMemberAccess] ).stop(timeout=0) - poll_for_finished_run(instance, run.run_id, timeout=60) - assert instance.get_run_by_id(run.run_id).status == DagsterRunStatus.FAILURE + poll_for_finished_run(instance, run.run_id, timeout=60) # pyright: ignore[reportOptionalMemberAccess] + assert instance.get_run_by_id(run.run_id).status == DagsterRunStatus.FAILURE # pyright: ignore[reportOptionalMemberAccess] diff --git a/integration_tests/test_suites/daemon-test-suite/test_dagster_daemon_health.py b/integration_tests/test_suites/daemon-test-suite/test_dagster_daemon_health.py index f2d98678d7d23..df6c076bed62b 100644 --- a/integration_tests/test_suites/daemon-test-suite/test_dagster_daemon_health.py +++ b/integration_tests/test_suites/daemon-test-suite/test_dagster_daemon_health.py @@ -242,14 +242,14 @@ def _get_error_number(error): )[SensorDaemon.daemon_type()] # Errors build up until there are > 5, then pull off the last - if status.healthy is False and len(status.last_heartbeat.errors) >= 5: - first_error_number = _get_error_number(status.last_heartbeat.errors[0]) + if status.healthy is False and len(status.last_heartbeat.errors) >= 5: # pyright: ignore[reportArgumentType,reportOptionalMemberAccess] + first_error_number = _get_error_number(status.last_heartbeat.errors[0]) # pyright: ignore[reportOptionalSubscript,reportOptionalMemberAccess] if first_error_number > 5: # Verify error numbers decrease consecutively assert [ _get_error_number(error) - for error in status.last_heartbeat.errors + for error in status.last_heartbeat.errors # pyright: ignore[reportOptionalIterable,reportOptionalMemberAccess] ] == list(range(first_error_number, first_error_number - 5, -1)) assert not get_daemon_statuses( @@ -276,10 +276,10 @@ def _get_error_number(error): )[SensorDaemon.daemon_type()] # Error count does not rise above 5, continues to increase - assert len(status.last_heartbeat.errors) == 5 + assert len(status.last_heartbeat.errors) == 5 # pyright: ignore[reportArgumentType,reportOptionalMemberAccess] new_first_error_number = _get_error_number( - status.last_heartbeat.errors[0] + status.last_heartbeat.errors[0] # pyright: ignore[reportOptionalSubscript,reportOptionalMemberAccess] ) assert new_first_error_number > first_error_number @@ -307,7 +307,7 @@ def _get_error_number(error): )[SensorDaemon.daemon_type()] # Error count does not rise above 5 - if len(status.last_heartbeat.errors) == 0: + if len(status.last_heartbeat.errors) == 0: # pyright: ignore[reportArgumentType,reportOptionalMemberAccess] break if (now - init_time).total_seconds() > 15: @@ -322,8 +322,8 @@ def test_multiple_error_daemon(monkeypatch): def run_loop_error(_, _ctx, _shutdown_event): # ?message stack cls_name cause" - yield SerializableErrorInfo("foobar", None, None, None) - yield SerializableErrorInfo("bizbuz", None, None, None) + yield SerializableErrorInfo("foobar", None, None, None) # pyright: ignore[reportArgumentType] + yield SerializableErrorInfo("bizbuz", None, None, None) # pyright: ignore[reportArgumentType] while True: yield @@ -354,9 +354,9 @@ def run_loop_error(_, _ctx, _shutdown_event): instance, [SensorDaemon.daemon_type()], now.timestamp() )[SensorDaemon.daemon_type()] - if status.healthy is False and len(status.last_heartbeat.errors) == 2: - assert status.last_heartbeat.errors[0].message.strip() == "bizbuz" - assert status.last_heartbeat.errors[1].message.strip() == "foobar" + if status.healthy is False and len(status.last_heartbeat.errors) == 2: # pyright: ignore[reportArgumentType,reportOptionalMemberAccess] + assert status.last_heartbeat.errors[0].message.strip() == "bizbuz" # pyright: ignore[reportOptionalSubscript,reportOptionalMemberAccess] + assert status.last_heartbeat.errors[1].message.strip() == "foobar" # pyright: ignore[reportOptionalSubscript,reportOptionalMemberAccess] break if (now - init_time).total_seconds() > 10: @@ -403,7 +403,7 @@ def test_warn_multiple_daemons(capsys): now.timestamp(), heartbeat_interval_seconds=heartbeat_interval_seconds, )[SensorDaemon.daemon_type()] - last_heartbeat_time = status.last_heartbeat.timestamp + last_heartbeat_time = status.last_heartbeat.timestamp # pyright: ignore[reportOptionalMemberAccess] # No warning when a second controller starts up again with daemon_controller_from_instance( @@ -437,7 +437,7 @@ def test_warn_multiple_daemons(capsys): now.timestamp(), heartbeat_interval_seconds=heartbeat_interval_seconds, )[SensorDaemon.daemon_type()] - last_heartbeat_time = status.last_heartbeat.timestamp + last_heartbeat_time = status.last_heartbeat.timestamp # pyright: ignore[reportOptionalMemberAccess] # Starting up a controller while one is running produces the warning though with daemon_controller_from_instance( diff --git a/integration_tests/test_suites/daemon-test-suite/test_memory.py b/integration_tests/test_suites/daemon-test-suite/test_memory.py index 03eee8a9d7bcb..df2c82c1f3b3f 100644 --- a/integration_tests/test_suites/daemon-test-suite/test_memory.py +++ b/integration_tests/test_suites/daemon-test-suite/test_memory.py @@ -92,7 +92,7 @@ def test_no_memory_leaks(): growth = objgraph.growth( limit=10, filter=lambda obj: inspect.getmodule(obj) - and "dagster" in inspect.getmodule(obj).__name__, + and "dagster" in inspect.getmodule(obj).__name__, # pyright: ignore[reportOptionalMemberAccess] ) while True: time.sleep(30) @@ -103,7 +103,7 @@ def test_no_memory_leaks(): growth = objgraph.growth( limit=10, filter=lambda obj: inspect.getmodule(obj) - and "dagster" in inspect.getmodule(obj).__name__, + and "dagster" in inspect.getmodule(obj).__name__, # pyright: ignore[reportOptionalMemberAccess] ) if not growth: print( # noqa: T201 diff --git a/integration_tests/test_suites/k8s-test-suite/tests/test_executor.py b/integration_tests/test_suites/k8s-test-suite/tests/test_executor.py index 210cbbe449456..f05ed217fce65 100644 --- a/integration_tests/test_suites/k8s-test-suite/tests/test_executor.py +++ b/integration_tests/test_suites/k8s-test-suite/tests/test_executor.py @@ -51,8 +51,8 @@ def test_k8s_run_launcher_default( webserver_url_for_k8s_run_launcher, ): run_config = merge_dicts( - load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env.yaml")), - load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), + load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env.yaml")), # pyright: ignore[reportArgumentType] + load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), # pyright: ignore[reportArgumentType] { "execution": { "config": { @@ -79,7 +79,7 @@ def test_k8s_run_launcher_volume_mounts( webserver_url_for_k8s_run_launcher, ): run_config = merge_dicts( - load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), + load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), # pyright: ignore[reportArgumentType] { "execution": { "config": { @@ -109,8 +109,8 @@ def test_k8s_executor_get_config_from_run_launcher( ): # Verify that if you do not specify executor config it is delegated by the run launcher run_config = merge_dicts( - load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env.yaml")), - load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), + load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env.yaml")), # pyright: ignore[reportArgumentType] + load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), # pyright: ignore[reportArgumentType] { "execution": {"config": {"job_image": dagster_docker_image}}, }, @@ -134,8 +134,8 @@ def test_k8s_executor_combine_configs( # from run launcher config and executor config. Also includes each executor secret # twice to verify that duplicates within the combined config are acceptable run_config = merge_dicts( - load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env.yaml")), - load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), + load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env.yaml")), # pyright: ignore[reportArgumentType] + load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), # pyright: ignore[reportArgumentType] { "execution": { "config": { @@ -242,8 +242,8 @@ def test_k8s_run_launcher_image_from_origin( check.invariant(not celery_pod_names) run_config = merge_dicts( - load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env.yaml")), - load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), + load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env.yaml")), # pyright: ignore[reportArgumentType] + load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), # pyright: ignore[reportArgumentType] { "execution": { "config": { @@ -280,7 +280,7 @@ def test_k8s_run_launcher_terminate( job_name = "slow_job_k8s" run_config = merge_dicts( - load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), + load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), # pyright: ignore[reportArgumentType] { "execution": { "config": { @@ -344,7 +344,7 @@ def test_k8s_executor_resource_requirements( check.invariant(not celery_pod_names) run_config = merge_dicts( - load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), + load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), # pyright: ignore[reportArgumentType] { "execution": { "config": { @@ -382,7 +382,7 @@ def test_execute_on_k8s_retry_job( webserver_url_for_k8s_run_launcher, ): run_config = merge_dicts( - load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), + load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), # pyright: ignore[reportArgumentType] { "execution": { "config": { diff --git a/integration_tests/test_suites/k8s-test-suite/tests/test_integration.py b/integration_tests/test_suites/k8s-test-suite/tests/test_integration.py index 69ebce83a916a..3fef25abbd71e 100644 --- a/integration_tests/test_suites/k8s-test-suite/tests/test_integration.py +++ b/integration_tests/test_suites/k8s-test-suite/tests/test_integration.py @@ -69,7 +69,9 @@ def test_k8s_run_launcher_default( job_name = "demo_job" run_id = launch_run_over_graphql( - webserver_url_for_k8s_run_launcher, run_config=run_config, job_name=job_name + webserver_url_for_k8s_run_launcher, + run_config=run_config, # pyright: ignore[reportArgumentType] + job_name=job_name, ) result = wait_for_job_and_get_raw_logs( @@ -158,7 +160,9 @@ def test_failing_k8s_run_launcher( job_name = "always_fail_job" run_id = launch_run_over_graphql( - webserver_url_for_k8s_run_launcher, run_config=run_config, job_name=job_name + webserver_url_for_k8s_run_launcher, + run_config=run_config, # pyright: ignore[reportArgumentType] + job_name=job_name, ) result = wait_for_job_and_get_raw_logs( @@ -185,7 +189,9 @@ def test_k8s_run_launcher_terminate( ) run_id = launch_run_over_graphql( - webserver_url_for_k8s_run_launcher, run_config=run_config, job_name=job_name + webserver_url_for_k8s_run_launcher, + run_config=run_config, # pyright: ignore[reportArgumentType] + job_name=job_name, ) DagsterKubernetesClient.production_client().wait_for_job( @@ -225,7 +231,9 @@ def test_k8s_run_launcher_secret_from_deployment( job_name = "demo_job" run_id = launch_run_over_graphql( - webserver_url_for_k8s_run_launcher, run_config=run_config, job_name=job_name + webserver_url_for_k8s_run_launcher, + run_config=run_config, # pyright: ignore[reportArgumentType] + job_name=job_name, ) result = wait_for_job_and_get_raw_logs( diff --git a/integration_tests/test_suites/k8s-test-suite/tests/test_k8s_monitoring.py b/integration_tests/test_suites/k8s-test-suite/tests/test_k8s_monitoring.py index 74ba3b345e820..2eec79fb8f627 100644 --- a/integration_tests/test_suites/k8s-test-suite/tests/test_k8s_monitoring.py +++ b/integration_tests/test_suites/k8s-test-suite/tests/test_k8s_monitoring.py @@ -25,7 +25,7 @@ def test_k8s_run_monitoring_startup_fail( webserver_url_for_k8s_run_launcher, ): run_config = merge_dicts( - load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), + load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), # pyright: ignore[reportArgumentType] { "execution": { "config": { @@ -70,7 +70,7 @@ def test_k8s_run_monitoring_resume( webserver_url_for_k8s_run_launcher, ): run_config = merge_dicts( - load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), + load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), # pyright: ignore[reportArgumentType] { "execution": { "config": { diff --git a/pyproject.toml b/pyproject.toml index bc20513f7feb4..a20e246f32b43 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,10 +42,6 @@ exclude = [ venv = ".venv" venvPath = "pyright/master" -# Set to false to help us during the transition from mypy to pyright. Mypy does -# not analyze unannotated functions by default, and so as of 2023-02 the codebase contains a large -# number of type errors in unannotated functions. Eventually we can turn off this setting. -analyzeUnannotatedFunctions = false # Minimum version of Python on which code must run. This determines the standard library stubs used by # pyright. @@ -71,6 +67,13 @@ reportPrivateImportUsage = false # recognize. reportUnnecessaryTypeIgnoreComment = "warning" +# Skip analyzing unannotated code in examples to facilitate terse code. +executionEnvironments = [ + { root = "examples", analyzeUnannotatedFunctions=false }, + { root = "python_modules" }, + { root = "integration_tests" } +] + # ######################## # ##### PYTEST # ######################## diff --git a/python_modules/automation/automation/docker/dagster_docker.py b/python_modules/automation/automation/docker/dagster_docker.py index b19174093778d..f66a19fc3e46d 100644 --- a/python_modules/automation/automation/docker/dagster_docker.py +++ b/python_modules/automation/automation/docker/dagster_docker.py @@ -65,7 +65,7 @@ def __new__( return super(DagsterDockerImage, cls).__new__( cls, check.str_param(image, "image"), - check.opt_str_param( # type: ignore + check.opt_str_param( images_path, "images_path", default_images_path(), diff --git a/python_modules/dagster-graphql/dagster_graphql/schema/pipelines/pipeline.py b/python_modules/dagster-graphql/dagster_graphql/schema/pipelines/pipeline.py index 1bd77792ee7e3..be0b522e2da82 100644 --- a/python_modules/dagster-graphql/dagster_graphql/schema/pipelines/pipeline.py +++ b/python_modules/dagster-graphql/dagster_graphql/schema/pipelines/pipeline.py @@ -1018,7 +1018,7 @@ class Meta: # doesn't inherit from base class def __init__(self, remote_job): - super().__init__() + super().__init__() # pyright: ignore[reportCallIssue] self._remote_job = check.inst_param(remote_job, "remote_job", RemoteJob) diff --git a/python_modules/dagster-graphql/dagster_graphql/schema/roots/mutation.py b/python_modules/dagster-graphql/dagster_graphql/schema/roots/mutation.py index 687d1fd70ba60..31d51fc9be512 100644 --- a/python_modules/dagster-graphql/dagster_graphql/schema/roots/mutation.py +++ b/python_modules/dagster-graphql/dagster_graphql/schema/roots/mutation.py @@ -150,8 +150,9 @@ def create_execution_params(graphene_info, graphql_execution_params): def execution_params_from_graphql(graphql_execution_params): return ExecutionParams( selector=pipeline_selector_from_graphql(graphql_execution_params.get("selector")), - run_config=parse_run_config_input( - graphql_execution_params.get("runConfigData") or {}, raise_on_error=True + run_config=parse_run_config_input( # pyright: ignore[reportArgumentType] + graphql_execution_params.get("runConfigData") or {}, + raise_on_error=True, ), mode=graphql_execution_params.get("mode"), execution_metadata=create_execution_metadata( diff --git a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/graphql_context_test_suite.py b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/graphql_context_test_suite.py index 9f9df41a02b64..d8667cf266e9b 100644 --- a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/graphql_context_test_suite.py +++ b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/graphql_context_test_suite.py @@ -288,7 +288,7 @@ def _mgr_fn(instance, read_only): ) if loadable_target_origin.python_file else ModuleTarget( - module_name=loadable_target_origin.module_name, + module_name=loadable_target_origin.module_name, # pyright: ignore[reportArgumentType] attribute=loadable_target_origin.attribute, working_directory=loadable_target_origin.working_directory, location_name=location_name, @@ -321,7 +321,7 @@ def _mgr_fn(instance, read_only): GrpcServerTarget( port=api_client.port, socket=api_client.socket, - host=api_client.host, + host=api_client.host, # pyright: ignore[reportArgumentType] location_name=location_name, ), version="", @@ -341,7 +341,7 @@ def code_server_cli_grpc(target=None, location_name="test_location"): def _mgr_fn(instance, read_only): loadable_target_origin = target or get_main_loadable_target_origin() with safe_tempfile_path() as socket: - subprocess_args = [ + subprocess_args = [ # pyright: ignore[reportOperatorIssue] "dagster", "code-server", "start", diff --git a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/repo.py b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/repo.py index 81b656daad511..01e2923af1089 100644 --- a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/repo.py +++ b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/repo.py @@ -502,7 +502,7 @@ def return_int(): def return_bool(): return True - @op(out=Out(Any)) + @op(out=Out(Any)) # pyright: ignore[reportArgumentType] def return_any(): return "dkjfkdjfe" @@ -620,7 +620,7 @@ def foo_logger(init_context): return logger_ -@logger({"log_level": Field(str), "prefix": Field(str)}) +@logger({"log_level": Field(str), "prefix": Field(str)}) # pyright: ignore[reportArgumentType] def bar_logger(init_context): class BarLogger(logging.Logger): def __init__(self, name, prefix, *args, **kwargs): diff --git a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_asset_backfill.py b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_asset_backfill.py index 8430f3dcf2da5..a5245216955de 100644 --- a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_asset_backfill.py +++ b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_asset_backfill.py @@ -628,7 +628,7 @@ def test_remove_partitions_defs_after_backfill_backcompat(): backfill = instance.get_backfills()[0] backcompat_backfill = backfill._replace( asset_backfill_data=None, - serialized_asset_backfill_data=backfill.asset_backfill_data.serialize( + serialized_asset_backfill_data=backfill.asset_backfill_data.serialize( # pyright: ignore[reportOptionalMemberAccess] instance, asset_graph=repo.asset_graph ), ) @@ -788,10 +788,10 @@ def test_launch_asset_backfill_with_upstream_anchor_asset(): partitions_subsets_by_asset_key={ AssetKey("hourly"): asset_graph.get( AssetKey("hourly") - ).partitions_def.subset_with_partition_keys(hourly_partitions), + ).partitions_def.subset_with_partition_keys(hourly_partitions), # pyright: ignore[reportOptionalMemberAccess] AssetKey("daily"): asset_graph.get( AssetKey("daily") - ).partitions_def.subset_with_partition_keys(["2020-01-02", "2020-01-03"]), + ).partitions_def.subset_with_partition_keys(["2020-01-02", "2020-01-03"]), # pyright: ignore[reportOptionalMemberAccess] }, ) @@ -853,13 +853,13 @@ def test_launch_asset_backfill_with_two_anchor_assets(): partitions_subsets_by_asset_key={ AssetKey("hourly1"): asset_graph.get( AssetKey("hourly1") - ).partitions_def.subset_with_partition_keys(hourly_partitions), + ).partitions_def.subset_with_partition_keys(hourly_partitions), # pyright: ignore[reportOptionalMemberAccess] AssetKey("hourly2"): asset_graph.get( AssetKey("hourly2") - ).partitions_def.subset_with_partition_keys(hourly_partitions), + ).partitions_def.subset_with_partition_keys(hourly_partitions), # pyright: ignore[reportOptionalMemberAccess] AssetKey("daily"): asset_graph.get( AssetKey("daily") - ).partitions_def.subset_with_partition_keys(["2020-01-02", "2020-01-03"]), + ).partitions_def.subset_with_partition_keys(["2020-01-02", "2020-01-03"]), # pyright: ignore[reportOptionalMemberAccess] }, ) @@ -908,12 +908,12 @@ def test_launch_asset_backfill_with_upstream_anchor_asset_and_non_partitioned_as partitions_subsets_by_asset_key={ AssetKey("hourly"): ( asset_graph.get(AssetKey("hourly")) - .partitions_def.empty_subset() + .partitions_def.empty_subset() # pyright: ignore[reportOptionalMemberAccess] .with_partition_keys(hourly_partitions) ), AssetKey("daily"): ( asset_graph.get(AssetKey("daily")) - .partitions_def.empty_subset() + .partitions_def.empty_subset() # pyright: ignore[reportOptionalMemberAccess] .with_partition_keys(["2020-01-02", "2020-01-03"]) ), }, diff --git a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_instance.py b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_instance.py index 8693729498a1a..813b156c5eebb 100644 --- a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_instance.py +++ b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_instance.py @@ -108,51 +108,51 @@ def _set_limits(key: str, limit: int): # set a limit _set_limits("foo", 10) foo = _fetch_limits("foo") - assert foo["concurrencyKey"] == "foo" - assert foo["slotCount"] == 10 - assert foo["activeSlotCount"] == 0 - assert foo["activeRunIds"] == [] - assert foo["claimedSlots"] == [] - assert foo["pendingSteps"] == [] + assert foo["concurrencyKey"] == "foo" # pyright: ignore[reportOptionalSubscript] + assert foo["slotCount"] == 10 # pyright: ignore[reportOptionalSubscript] + assert foo["activeSlotCount"] == 0 # pyright: ignore[reportOptionalSubscript] + assert foo["activeRunIds"] == [] # pyright: ignore[reportOptionalSubscript] + assert foo["claimedSlots"] == [] # pyright: ignore[reportOptionalSubscript] + assert foo["pendingSteps"] == [] # pyright: ignore[reportOptionalSubscript] # claim a slot run_id = make_new_run_id() instance.event_log_storage.claim_concurrency_slot("foo", run_id, "fake_step_key") foo = _fetch_limits("foo") - assert foo["concurrencyKey"] == "foo" - assert foo["slotCount"] == 10 - assert foo["activeSlotCount"] == 1 - assert foo["activeRunIds"] == [run_id] - assert foo["claimedSlots"] == [{"runId": run_id, "stepKey": "fake_step_key"}] - assert len(foo["pendingSteps"]) == 1 - assert foo["pendingSteps"][0]["runId"] == run_id - assert foo["pendingSteps"][0]["stepKey"] == "fake_step_key" - assert foo["pendingSteps"][0]["assignedTimestamp"] is not None - assert foo["pendingSteps"][0]["priority"] == 0 + assert foo["concurrencyKey"] == "foo" # pyright: ignore[reportOptionalSubscript] + assert foo["slotCount"] == 10 # pyright: ignore[reportOptionalSubscript] + assert foo["activeSlotCount"] == 1 # pyright: ignore[reportOptionalSubscript] + assert foo["activeRunIds"] == [run_id] # pyright: ignore[reportOptionalSubscript] + assert foo["claimedSlots"] == [{"runId": run_id, "stepKey": "fake_step_key"}] # pyright: ignore[reportOptionalSubscript] + assert len(foo["pendingSteps"]) == 1 # pyright: ignore[reportOptionalSubscript] + assert foo["pendingSteps"][0]["runId"] == run_id # pyright: ignore[reportOptionalSubscript] + assert foo["pendingSteps"][0]["stepKey"] == "fake_step_key" # pyright: ignore[reportOptionalSubscript] + assert foo["pendingSteps"][0]["assignedTimestamp"] is not None # pyright: ignore[reportOptionalSubscript] + assert foo["pendingSteps"][0]["priority"] == 0 # pyright: ignore[reportOptionalSubscript] # set a new limit _set_limits("foo", 5) foo = _fetch_limits("foo") - assert foo["concurrencyKey"] == "foo" - assert foo["slotCount"] == 5 - assert foo["activeSlotCount"] == 1 - assert foo["activeRunIds"] == [run_id] - assert foo["claimedSlots"] == [{"runId": run_id, "stepKey": "fake_step_key"}] - assert len(foo["pendingSteps"]) == 1 - assert foo["pendingSteps"][0]["runId"] == run_id - assert foo["pendingSteps"][0]["stepKey"] == "fake_step_key" - assert foo["pendingSteps"][0]["assignedTimestamp"] is not None - assert foo["pendingSteps"][0]["priority"] == 0 + assert foo["concurrencyKey"] == "foo" # pyright: ignore[reportOptionalSubscript] + assert foo["slotCount"] == 5 # pyright: ignore[reportOptionalSubscript] + assert foo["activeSlotCount"] == 1 # pyright: ignore[reportOptionalSubscript] + assert foo["activeRunIds"] == [run_id] # pyright: ignore[reportOptionalSubscript] + assert foo["claimedSlots"] == [{"runId": run_id, "stepKey": "fake_step_key"}] # pyright: ignore[reportOptionalSubscript] + assert len(foo["pendingSteps"]) == 1 # pyright: ignore[reportOptionalSubscript] + assert foo["pendingSteps"][0]["runId"] == run_id # pyright: ignore[reportOptionalSubscript] + assert foo["pendingSteps"][0]["stepKey"] == "fake_step_key" # pyright: ignore[reportOptionalSubscript] + assert foo["pendingSteps"][0]["assignedTimestamp"] is not None # pyright: ignore[reportOptionalSubscript] + assert foo["pendingSteps"][0]["priority"] == 0 # pyright: ignore[reportOptionalSubscript] # free a slot instance.event_log_storage.free_concurrency_slots_for_run(run_id) foo = _fetch_limits("foo") - assert foo["concurrencyKey"] == "foo" - assert foo["slotCount"] == 5 - assert foo["activeSlotCount"] == 0 - assert foo["activeRunIds"] == [] - assert foo["claimedSlots"] == [] - assert foo["pendingSteps"] == [] + assert foo["concurrencyKey"] == "foo" # pyright: ignore[reportOptionalSubscript] + assert foo["slotCount"] == 5 # pyright: ignore[reportOptionalSubscript] + assert foo["activeSlotCount"] == 0 # pyright: ignore[reportOptionalSubscript] + assert foo["activeRunIds"] == [] # pyright: ignore[reportOptionalSubscript] + assert foo["claimedSlots"] == [] # pyright: ignore[reportOptionalSubscript] + assert foo["pendingSteps"] == [] # pyright: ignore[reportOptionalSubscript] def test_concurrency_free(self, graphql_context): storage = graphql_context.instance.event_log_storage diff --git a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_misc.py b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_misc.py index 8c411059c2c1e..0133c595c2bf5 100644 --- a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_misc.py +++ b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_misc.py @@ -139,8 +139,8 @@ def define_circular_dependency_job(): node_defs=[ OpDefinition( name="csolid", - ins={"num": In("num", PoorMansDataFrame)}, - outs={"result": OutputDefinition(PoorMansDataFrame)}, + ins={"num": In("num", PoorMansDataFrame)}, # pyright: ignore[reportArgumentType] + outs={"result": OutputDefinition(PoorMansDataFrame)}, # pyright: ignore[reportArgumentType] compute_fn=lambda *_args: None, ) ], @@ -149,7 +149,7 @@ def define_circular_dependency_job(): ) -@repository +@repository # pyright: ignore[reportArgumentType] def test_repository(): return {"jobs": {"circular_dependency_job": define_circular_dependency_job}} diff --git a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_reload_repository_location.py b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_reload_repository_location.py index 801c23152d52a..c947f1b3d3c8a 100644 --- a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_reload_repository_location.py +++ b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_reload_repository_location.py @@ -155,7 +155,7 @@ def test_reload_workspace(self, graphql_context): # Simulate adding an origin with an error, reload - original_origins.append( + original_origins.append( # pyright: ignore[reportAttributeAccessIssue] ManagedGrpcPythonEnvCodeLocationOrigin( location_name="error_location", loadable_target_origin=LoadableTargetOrigin( @@ -196,7 +196,7 @@ def test_reload_workspace(self, graphql_context): # Add another origin without an error, reload - original_origins.append(original_origins[0]._replace(location_name="location_copy")) + original_origins.append(original_origins[0]._replace(location_name="location_copy")) # pyright: ignore[reportAttributeAccessIssue] origins_mock.return_value = original_origins result = execute_dagster_graphql(graphql_context, RELOAD_WORKSPACE_QUERY) @@ -224,7 +224,7 @@ def test_reload_workspace(self, graphql_context): # Finally, update one of the origins' location names - original_origins[0] = original_origins[0]._replace(location_name="new_location_name") + original_origins[0] = original_origins[0]._replace(location_name="new_location_name") # pyright: ignore[reportIndexIssue,reportAttributeAccessIssue] result = execute_dagster_graphql(graphql_context, RELOAD_WORKSPACE_QUERY) diff --git a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_runs.py b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_runs.py index 17c6095dba23d..afbdf9bc35750 100644 --- a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_runs.py +++ b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_runs.py @@ -862,7 +862,7 @@ def test_run_group(): tags={PARENT_RUN_ID_TAG: root_run_id, ROOT_RUN_ID_TAG: root_run_id}, ) execute_run(InMemoryJob(foo_job), run, instance) - runs.append(run) + runs.append(run) # pyright: ignore[reportArgumentType] with define_out_of_process_context( __file__, "get_repo_at_time_1", instance @@ -937,7 +937,7 @@ def test_asset_batching(): assert len(materializations) == 3 counter = traced_counter.get() - counts = counter.counts() + counts = counter.counts() # pyright: ignore[reportOptionalMemberAccess] assert counts assert counts.get("DagsterInstance.get_run_records") == 1 diff --git a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_scheduler.py b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_scheduler.py index 57571fa2a2b84..44f1c2abe0401 100644 --- a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_scheduler.py +++ b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_scheduler.py @@ -874,7 +874,7 @@ def test_repository_batching(graphql_context): assert "repositoryOrError" in result.data assert "schedules" in result.data["repositoryOrError"] counter = traced_counter.get() - counts = counter.counts() + counts = counter.counts() # pyright: ignore[reportOptionalMemberAccess] assert counts assert len(counts) == 3 diff --git a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_sync_run_launcher.py b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_sync_run_launcher.py index d6115dbe1d808..6638b7a0d1593 100644 --- a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_sync_run_launcher.py +++ b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_sync_run_launcher.py @@ -44,4 +44,4 @@ def test_sync_run_launcher_run(): run = instance.launch_run(run_id=run.run_id, workspace=workspace) completed_run = instance.get_run_by_id(run.run_id) - assert completed_run.is_success + assert completed_run.is_success # pyright: ignore[reportOptionalMemberAccess] diff --git a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_workspace.py b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_workspace.py index 6fb588b26e34f..8102d3b5069cc 100644 --- a/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_workspace.py +++ b/python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_workspace.py @@ -145,7 +145,7 @@ def test_load_workspace(self, graphql_context): with mock.patch( "dagster._core.workspace.load_target.location_origins_from_yaml_paths", ) as origins_mock: - original_origins.append( + original_origins.append( # pyright: ignore[reportAttributeAccessIssue] ManagedGrpcPythonEnvCodeLocationOrigin( location_name="error_location", loadable_target_origin=LoadableTargetOrigin( @@ -228,7 +228,7 @@ def test_load_location_statuses(self, graphql_context): "dagster._core.workspace.load_target.location_origins_from_yaml_paths", ) as origins_mock: # Add an error origin - original_origins.append( + original_origins.append( # pyright: ignore[reportAttributeAccessIssue] ManagedGrpcPythonEnvCodeLocationOrigin( location_name="error_location", loadable_target_origin=LoadableTargetOrigin( @@ -273,7 +273,7 @@ def test_load_workspace_masked(self, graphql_context, enable_masking_user_code_e with mock.patch( "dagster._core.workspace.load_target.location_origins_from_yaml_paths", ) as origins_mock: - original_origins.append( + original_origins.append( # pyright: ignore[reportAttributeAccessIssue] ManagedGrpcPythonEnvCodeLocationOrigin( location_name="error_location", loadable_target_origin=LoadableTargetOrigin( diff --git a/python_modules/dagster-graphql/dagster_graphql_tests/test_cli.py b/python_modules/dagster-graphql/dagster_graphql_tests/test_cli.py index f1d9fe3cca46a..c6db1a1898a8c 100644 --- a/python_modules/dagster-graphql/dagster_graphql_tests/test_cli.py +++ b/python_modules/dagster-graphql/dagster_graphql_tests/test_cli.py @@ -354,7 +354,7 @@ def test_logs_in_start_execution_predefined(): # assert that the watching run storage captured the run correctly from the other process run = instance.get_run_by_id(run_id) - assert run.status == DagsterRunStatus.SUCCESS + assert run.status == DagsterRunStatus.SUCCESS # pyright: ignore[reportOptionalMemberAccess] def _is_done(instance, run_id): diff --git a/python_modules/dagster-pipes/dagster_pipes/__init__.py b/python_modules/dagster-pipes/dagster_pipes/__init__.py index 0057cab5a18b7..4d67bae088097 100644 --- a/python_modules/dagster-pipes/dagster_pipes/__init__.py +++ b/python_modules/dagster-pipes/dagster_pipes/__init__.py @@ -778,7 +778,7 @@ def join(self, *args, **kwargs): while not self.exceptions.empty(): exc_info = self.exceptions.get() - sys.stderr.write(traceback.format_exception(*exc_info)) + sys.stderr.write(traceback.format_exception(*exc_info)) # pyright: ignore[reportCallIssue,reportArgumentType] # log writers can potentially capture other type sof logs (for example, from Spark workers) diff --git a/python_modules/dagster-test/dagster_test/fixtures/utils.py b/python_modules/dagster-test/dagster_test/fixtures/utils.py index 81119ee33a7b3..91204aa427495 100644 --- a/python_modules/dagster-test/dagster_test/fixtures/utils.py +++ b/python_modules/dagster-test/dagster_test/fixtures/utils.py @@ -25,7 +25,8 @@ def sigterm_handler(): def retrying_requests(): session = requests.Session() session.mount( - "http://", requests.adapters.HTTPAdapter(max_retries=Retry(total=5, backoff_factor=1)) + "http://", + requests.adapters.HTTPAdapter(max_retries=Retry(total=5, backoff_factor=1)), # pyright: ignore[reportAttributeAccessIssue] ) yield session diff --git a/python_modules/dagster-test/dagster_test/test_project/__init__.py b/python_modules/dagster-test/dagster_test/test_project/__init__.py index b1d4595b6d931..206ce6ce62b1a 100644 --- a/python_modules/dagster-test/dagster_test/test_project/__init__.py +++ b/python_modules/dagster-test/dagster_test/test_project/__init__.py @@ -72,7 +72,7 @@ def find_local_test_image(docker_image): f"Found existing image tagged {docker_image}, skipping image build. To rebuild, first run: " f"docker rmi {docker_image}" ) - except docker.errors.ImageNotFound: + except docker.errors.ImageNotFound: # pyright: ignore[reportAttributeAccessIssue] build_and_tag_test_image(docker_image) @@ -224,7 +224,7 @@ def selector_id(self): """Hack! Inject a selector that matches the one that the k8s helm chart will use.""" return create_snapshot_id( InstigatorSelector( - "user-code-deployment-1", + "user-code-deployment-1", # pyright: ignore[reportCallIssue] "demo_execution_repo", self.name, ) diff --git a/python_modules/dagster-test/dagster_test/toys/error_monster.py b/python_modules/dagster-test/dagster_test/toys/error_monster.py index bf234f8fa6948..705448c1eb900 100644 --- a/python_modules/dagster-test/dagster_test/toys/error_monster.py +++ b/python_modules/dagster-test/dagster_test/toys/error_monster.py @@ -37,7 +37,7 @@ def load_input(self, context): if self._throw_input: raise ExampleException("throwing up trying to load input") - keys = tuple(context.upstream_output.get_identifier()) + keys = tuple(context.upstream_output.get_identifier()) # pyright: ignore[reportOptionalMemberAccess] return self._values[keys] diff --git a/python_modules/dagster-test/dagster_test/toys/input_managers.py b/python_modules/dagster-test/dagster_test/toys/input_managers.py index 05ac0427c6b70..1a6f1abfbec0a 100644 --- a/python_modules/dagster-test/dagster_test/toys/input_managers.py +++ b/python_modules/dagster-test/dagster_test/toys/input_managers.py @@ -10,8 +10,10 @@ def __init__(self, base_dir=None): self.base_dir = os.getenv("DAGSTER_HOME") if base_dir is None else base_dir def _get_path(self, output_context): - return os.path.join( - self.base_dir, "storage", f"{output_context.step_key}_{output_context.name}.csv" + return os.path.join( # pyright: ignore[reportCallIssue] + self.base_dir, # pyright: ignore[reportArgumentType] + "storage", + f"{output_context.step_key}_{output_context.name}.csv", ) def handle_output(self, context, obj: pd.DataFrame): diff --git a/python_modules/dagster-test/dagster_test/toys/partitioned_assets/dynamic_asset_partitions.py b/python_modules/dagster-test/dagster_test/toys/partitioned_assets/dynamic_asset_partitions.py index c01fdd1e94814..115e64ae25bcd 100644 --- a/python_modules/dagster-test/dagster_test/toys/partitioned_assets/dynamic_asset_partitions.py +++ b/python_modules/dagster-test/dagster_test/toys/partitioned_assets/dynamic_asset_partitions.py @@ -55,7 +55,7 @@ def ints_dynamic_asset(): def add_partitions(num_partitions): with DagsterInstance.get() as instance: partition_keys = [f"customer_{i}" for i in range(num_partitions)] - instance.add_dynamic_partitions(customers_partitions_def.name, partition_keys) + instance.add_dynamic_partitions(customers_partitions_def.name, partition_keys) # pyright: ignore[reportArgumentType] if __name__ == "__main__": diff --git a/python_modules/dagster-test/dagster_test/toys/partitioned_assets/partitioned_run_request_sensors.py b/python_modules/dagster-test/dagster_test/toys/partitioned_assets/partitioned_run_request_sensors.py index 1b41ed83b8a39..c22e01dfb1f3e 100644 --- a/python_modules/dagster-test/dagster_test/toys/partitioned_assets/partitioned_run_request_sensors.py +++ b/python_modules/dagster-test/dagster_test/toys/partitioned_assets/partitioned_run_request_sensors.py @@ -47,7 +47,7 @@ def ints_dynamic_partitions_job_sensor(): @sensor(asset_selection=AssetSelection.assets(upstream_daily_partitioned_asset)) def upstream_daily_partitioned_asset_sensor(context): - latest_partition = upstream_daily_partitioned_asset.partitions_def.get_partition_keys()[-1] + latest_partition = upstream_daily_partitioned_asset.partitions_def.get_partition_keys()[-1] # pyright: ignore[reportOptionalMemberAccess] yield RunRequest(partition_key=latest_partition) yield define_asset_job( "upstream_daily_partitioned_asset_job", diff --git a/python_modules/dagster-test/dagster_test/toys/pyspark_assets/pyspark_assets_job.py b/python_modules/dagster-test/dagster_test/toys/pyspark_assets/pyspark_assets_job.py index cb49c06d45b0d..047c28760ddc3 100644 --- a/python_modules/dagster-test/dagster_test/toys/pyspark_assets/pyspark_assets_job.py +++ b/python_modules/dagster-test/dagster_test/toys/pyspark_assets/pyspark_assets_job.py @@ -11,7 +11,7 @@ def create_spark_session(): - return SparkSession.builder.getOrCreate() + return SparkSession.builder.getOrCreate() # pyright: ignore[reportAttributeAccessIssue] def df_from_csv(path): diff --git a/python_modules/dagster-test/dagster_test/toys/repo.py b/python_modules/dagster-test/dagster_test/toys/repo.py index a57444771fbbf..317d0f7339652 100644 --- a/python_modules/dagster-test/dagster_test/toys/repo.py +++ b/python_modules/dagster-test/dagster_test/toys/repo.py @@ -153,7 +153,7 @@ def partitioned_assets_repository(): ] -@repository +@repository # pyright: ignore[reportArgumentType] def column_schema_repository(): from dagster_test.toys import column_schema @@ -174,7 +174,7 @@ def long_asset_keys_repository(): return load_assets_from_modules([long_asset_keys]) -@repository +@repository # pyright: ignore[reportArgumentType] def big_honkin_assets_repository(): return [load_assets_from_modules([big_honkin_asset_graph_module])] diff --git a/python_modules/dagster-test/dagster_test/toys/sensors.py b/python_modules/dagster-test/dagster_test/toys/sensors.py index 13a2776d4a944..48dcfe367bd3c 100644 --- a/python_modules/dagster-test/dagster_test/toys/sensors.py +++ b/python_modules/dagster-test/dagster_test/toys/sensors.py @@ -27,7 +27,7 @@ def get_directory_files(directory_name, since=None): return [] try: - since = float(since) + since = float(since) # pyright: ignore[reportArgumentType] except (TypeError, ValueError): since = None @@ -121,7 +121,7 @@ def custom_slack_on_job_failure(context: RunFailureSensorContext): built_in_slack_on_run_failure_sensor = make_slack_on_run_failure_sensor( name="built_in_slack_on_run_failure_sensor", channel="#toy-test", - slack_token=os.environ.get("SLACK_DAGSTER_ETL_BOT_TOKEN"), + slack_token=os.environ.get("SLACK_DAGSTER_ETL_BOT_TOKEN"), # pyright: ignore[reportArgumentType] monitored_jobs=[error_monster_failing_job], webserver_base_url="http://localhost:3000", ) diff --git a/python_modules/dagster-test/dagster_test_tests/test_toys.py b/python_modules/dagster-test/dagster_test_tests/test_toys.py index ca745a1868422..0a74359409fcc 100644 --- a/python_modules/dagster-test/dagster_test_tests/test_toys.py +++ b/python_modules/dagster-test/dagster_test_tests/test_toys.py @@ -92,8 +92,8 @@ def test_longitudinal_job(executor_def): result = longitudinal.to_job( resource_defs={"io_manager": fs_io_manager}, executor_def=executor_def, - config=longitudinal_schedule().job.partitioned_config, - ).execute_in_process(partition_key=partitions_def.get_partition_keys()[0]) + config=longitudinal_schedule().job.partitioned_config, # pyright: ignore[reportAttributeAccessIssue] + ).execute_in_process(partition_key=partitions_def.get_partition_keys()[0]) # pyright: ignore[reportOptionalMemberAccess] assert result.success except IntentionalRandomFailure: pass diff --git a/python_modules/dagster-test/dagster_test_tests/toys_tests/partitioned_assets_tests/test_dynamic_asset_partitions.py b/python_modules/dagster-test/dagster_test_tests/toys_tests/partitioned_assets_tests/test_dynamic_asset_partitions.py index 5524b1b315690..46ec227814c8f 100644 --- a/python_modules/dagster-test/dagster_test_tests/toys_tests/partitioned_assets_tests/test_dynamic_asset_partitions.py +++ b/python_modules/dagster-test/dagster_test_tests/toys_tests/partitioned_assets_tests/test_dynamic_asset_partitions.py @@ -10,7 +10,7 @@ def test_assets(): with DagsterInstance.ephemeral() as instance: - instance.add_dynamic_partitions(customers_partitions_def.name, ["pepsi", "coca_cola"]) + instance.add_dynamic_partitions(customers_partitions_def.name, ["pepsi", "coca_cola"]) # pyright: ignore[reportArgumentType] assert materialize_to_memory( [customers_dynamic_partitions_asset1, customers_dynamic_partitions_asset2], @@ -26,7 +26,7 @@ def test_assets(): def test_job(): with DagsterInstance.ephemeral() as instance: - instance.add_dynamic_partitions(customers_partitions_def.name, ["pepsi", "coca_cola"]) + instance.add_dynamic_partitions(customers_partitions_def.name, ["pepsi", "coca_cola"]) # pyright: ignore[reportArgumentType] assert ( partitioned_assets_repository.get_job("customers_dynamic_partitions_job") .execute_in_process(partition_key="pepsi", instance=instance) diff --git a/python_modules/dagster-test/dagster_test_tests/toys_tests/partitioned_assets_tests/test_sensors.py b/python_modules/dagster-test/dagster_test_tests/toys_tests/partitioned_assets_tests/test_sensors.py index 455c54e2563b0..86567b0bf72c2 100644 --- a/python_modules/dagster-test/dagster_test_tests/toys_tests/partitioned_assets_tests/test_sensors.py +++ b/python_modules/dagster-test/dagster_test_tests/toys_tests/partitioned_assets_tests/test_sensors.py @@ -20,8 +20,8 @@ def test_ints_sensors(): ) as context: for ints_sensor in ints_sensors: result = ints_sensor(context) - assert len(result.run_requests) == 1 - assert len(result.dynamic_partitions_requests) == 3 + assert len(result.run_requests) == 1 # pyright: ignore[reportOptionalMemberAccess,reportArgumentType,reportAttributeAccessIssue] + assert len(result.dynamic_partitions_requests) == 3 # pyright: ignore[reportOptionalMemberAccess,reportArgumentType,reportAttributeAccessIssue] def test_daily_partitioned_sensor(): @@ -29,4 +29,4 @@ def test_daily_partitioned_sensor(): repository_def=partitioned_assets_repository, ) as context: result = upstream_daily_partitioned_asset_sensor.evaluate_tick(context) - assert len(result.run_requests) == 2 + assert len(result.run_requests) == 2 # pyright: ignore[reportArgumentType] diff --git a/python_modules/dagster-test/dagster_test_tests/toys_tests/test_asset_sensors.py b/python_modules/dagster-test/dagster_test_tests/toys_tests/test_asset_sensors.py index 278ea528ef09a..d2bb474884f8e 100644 --- a/python_modules/dagster-test/dagster_test_tests/toys_tests/test_asset_sensors.py +++ b/python_modules/dagster-test/dagster_test_tests/toys_tests/test_asset_sensors.py @@ -15,4 +15,4 @@ def test_partitioned_multi_asset_sensor(): materialize([partitioned_asset], partition_key="1", instance=instance) result = partitioned_multi_asset_sensor(context) - assert len(result) == 1 + assert len(result) == 1 # pyright: ignore[reportArgumentType] diff --git a/python_modules/dagster-webserver/dagster_webserver_tests/webserver/conftest.py b/python_modules/dagster-webserver/dagster_webserver_tests/webserver/conftest.py index 745ba0c7db013..30e64a4e8167e 100644 --- a/python_modules/dagster-webserver/dagster_webserver_tests/webserver/conftest.py +++ b/python_modules/dagster-webserver/dagster_webserver_tests/webserver/conftest.py @@ -16,7 +16,7 @@ def test_client(instance): instance=instance, version=__version__, read_only=False, - kwargs={"empty_workspace": True}, + kwargs={"empty_workspace": True}, # pyright: ignore[reportArgumentType] ) app = DagsterWebserver(process_context).create_asgi_app(debug=True) return TestClient(app) diff --git a/python_modules/dagster/dagster/_cli/asset.py b/python_modules/dagster/dagster/_cli/asset.py index 0ac3fc36cab26..e1589f6514876 100644 --- a/python_modules/dagster/dagster/_cli/asset.py +++ b/python_modules/dagster/dagster/_cli/asset.py @@ -208,7 +208,7 @@ def asset_wipe_command(key, **cli_args): confirmation = click.prompt(prompt) if confirmation == "DELETE": - instance.wipe_assets(asset_keys) + instance.wipe_assets(asset_keys) # pyright: ignore[reportArgumentType] click.echo("Removed asset indexes from event logs") else: click.echo("Exiting without removing asset indexes") @@ -266,7 +266,7 @@ def asset_wipe_cache_command(key, **cli_args): confirmation = click.prompt(prompt) if confirmation == "DELETE": - instance.wipe_asset_cached_status(asset_keys) + instance.wipe_asset_cached_status(asset_keys) # pyright: ignore[reportArgumentType] click.echo("Cleared the partitions status cache") else: click.echo("Exiting without wiping the partitions status cache") diff --git a/python_modules/dagster/dagster/_cli/schedule.py b/python_modules/dagster/dagster/_cli/schedule.py index e74c9a50c0cd1..85ddf2e96dcdb 100644 --- a/python_modules/dagster/dagster/_cli/schedule.py +++ b/python_modules/dagster/dagster/_cli/schedule.py @@ -255,14 +255,14 @@ def execute_start_command(schedule_name, all_flag, cli_args, print_fn): try: instance.start_schedule(remote_schedule) except DagsterInvariantViolationError as ex: - raise click.UsageError(ex) + raise click.UsageError(ex) # pyright: ignore[reportArgumentType] print_fn(f"Started all schedules for repository {repository_name}") else: try: instance.start_schedule(repo.get_schedule(schedule_name)) except DagsterInvariantViolationError as ex: - raise click.UsageError(ex) + raise click.UsageError(ex) # pyright: ignore[reportArgumentType] print_fn(f"Started schedule {schedule_name}") @@ -290,7 +290,7 @@ def execute_stop_command(schedule_name, cli_args, print_fn, instance=None): remote_schedule, ) except DagsterInvariantViolationError as ex: - raise click.UsageError(ex) + raise click.UsageError(ex) # pyright: ignore[reportArgumentType] print_fn(f"Stopped schedule {schedule_name}") @@ -400,7 +400,7 @@ def execute_restart_command(schedule_name, all_running_flag, cli_args, print_fn) ) instance.start_schedule(remote_schedule) except DagsterInvariantViolationError as ex: - raise click.UsageError(ex) + raise click.UsageError(ex) # pyright: ignore[reportArgumentType] print_fn(f"Restarted all running schedules for repository {repository_name}") else: @@ -416,13 +416,13 @@ def execute_restart_command(schedule_name, all_running_flag, cli_args, print_fn) try: instance.stop_schedule( - schedule_state.instigator_origin_id, + schedule_state.instigator_origin_id, # pyright: ignore[reportOptionalMemberAccess] remote_schedule.selector_id, remote_schedule, ) instance.start_schedule(remote_schedule) except DagsterInvariantViolationError as ex: - raise click.UsageError(ex) + raise click.UsageError(ex) # pyright: ignore[reportArgumentType] print_fn(f"Restarted schedule {schedule_name}") diff --git a/python_modules/dagster/dagster/_cli/sensor.py b/python_modules/dagster/dagster/_cli/sensor.py index b799ae6cac412..995b20dfcb45f 100644 --- a/python_modules/dagster/dagster/_cli/sensor.py +++ b/python_modules/dagster/dagster/_cli/sensor.py @@ -204,13 +204,13 @@ def execute_start_command(sensor_name, all_flag, cli_args, print_fn): instance.start_sensor(sensor) print_fn(f"Started all sensors for repository {repository_name}") except DagsterInvariantViolationError as ex: - raise click.UsageError(ex) + raise click.UsageError(ex) # pyright: ignore[reportArgumentType] else: try: sensor = repo.get_sensor(sensor_name) instance.start_sensor(sensor) except DagsterInvariantViolationError as ex: - raise click.UsageError(ex) + raise click.UsageError(ex) # pyright: ignore[reportArgumentType] print_fn(f"Started sensor {sensor_name}") @@ -237,7 +237,7 @@ def execute_stop_command(sensor_name, cli_args, print_fn): sensor, ) except DagsterInvariantViolationError as ex: - raise click.UsageError(ex) + raise click.UsageError(ex) # pyright: ignore[reportArgumentType] print_fn(f"Stopped sensor {sensor_name}") @@ -319,7 +319,7 @@ def execute_preview_command( ) except DagsterInvariantViolationError as ex: - raise click.UsageError(ex) + raise click.UsageError(ex) # pyright: ignore[reportArgumentType] @sensor_cli.command(name="cursor", help="Set the cursor value for an existing sensor.") @@ -374,12 +374,12 @@ def execute_cursor_command(sensor_name, cli_args, print_fn): instance.update_instigator_state( job_state.with_data( SensorInstigatorData( - last_tick_timestamp=job_state.instigator_data.last_tick_timestamp, - last_run_key=job_state.instigator_data.last_run_key, + last_tick_timestamp=job_state.instigator_data.last_tick_timestamp, # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + last_run_key=job_state.instigator_data.last_run_key, # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] min_interval=sensor.min_interval_seconds, cursor=cursor_value, - last_tick_start_timestamp=job_state.instigator_data.last_tick_start_timestamp, - last_sensor_start_timestamp=job_state.instigator_data.last_sensor_start_timestamp, + last_tick_start_timestamp=job_state.instigator_data.last_tick_start_timestamp, # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + last_sensor_start_timestamp=job_state.instigator_data.last_sensor_start_timestamp, # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] sensor_type=sensor.sensor_type, ), ) diff --git a/python_modules/dagster/dagster/_config/pythonic_config/config.py b/python_modules/dagster/dagster/_config/pythonic_config/config.py index 9512eb960aab2..28cfb74cd559b 100644 --- a/python_modules/dagster/dagster/_config/pythonic_config/config.py +++ b/python_modules/dagster/dagster/_config/pythonic_config/config.py @@ -312,7 +312,7 @@ def to_fields_dict(cls) -> Dict[str, DagsterField]: This is useful when interacting with legacy code that expects a dictionary of fields but you want the source of truth to be a config class. """ - return cast(Shape, cls.to_config_schema().as_field().config_type).fields + return cast(Shape, cls.to_config_schema().as_field().config_type).fields # pyright: ignore[reportReturnType] def _discriminated_union_config_dict_to_selector_config_dict( diff --git a/python_modules/dagster/dagster/_config/pythonic_config/resource.py b/python_modules/dagster/dagster/_config/pythonic_config/resource.py index 37fc6573ad44d..64e5cbda35a23 100644 --- a/python_modules/dagster/dagster/_config/pythonic_config/resource.py +++ b/python_modules/dagster/dagster/_config/pythonic_config/resource.py @@ -469,7 +469,8 @@ def process_config_and_initialize(self) -> TResValue: return self.from_resource_context( build_init_resource_context( config=post_process_config( - self._config_schema.config_type, self._convert_to_config_dictionary() + self._config_schema.config_type, # pyright: ignore[reportArgumentType] + self._convert_to_config_dictionary(), # pyright: ignore[reportArgumentType] ).value, ), nested_resources=self.nested_resources, @@ -485,7 +486,8 @@ def process_config_and_initialize_cm(self) -> Generator[TResValue, None, None]: with self.from_resource_context_cm( build_init_resource_context( config=post_process_config( - self._config_schema.config_type, self._convert_to_config_dictionary() + self._config_schema.config_type, # pyright: ignore[reportArgumentType] + self._convert_to_config_dictionary(), # pyright: ignore[reportArgumentType] ).value ), nested_resources=self.nested_resources, diff --git a/python_modules/dagster/dagster/_core/definitions/events.py b/python_modules/dagster/dagster/_core/definitions/events.py index f5ceb9bf1b0f9..d9db32c882b55 100644 --- a/python_modules/dagster/dagster/_core/definitions/events.py +++ b/python_modules/dagster/dagster/_core/definitions/events.py @@ -712,7 +712,7 @@ def __new__( def serializable(cls, inst, **kwargs): return cls( **dict( - { + { # pyright: ignore[reportArgumentType] "op": inst.op.value, "key": inst.key, "dest_key": inst.dest_key, diff --git a/python_modules/dagster/dagster/_core/definitions/graph_definition.py b/python_modules/dagster/dagster/_core/definitions/graph_definition.py index a6f86b1947f00..96a4040b8ca88 100644 --- a/python_modules/dagster/dagster/_core/definitions/graph_definition.py +++ b/python_modules/dagster/dagster/_core/definitions/graph_definition.py @@ -612,7 +612,7 @@ def to_job( config: Optional[ Union["RunConfig", ConfigMapping, Mapping[str, object], "PartitionedConfig"] ] = None, - tags: Optional[Mapping[str, str]] = None, + tags: Optional[Mapping[str, object]] = None, metadata: Optional[Mapping[str, RawMetadataValue]] = None, logger_defs: Optional[Mapping[str, LoggerDefinition]] = None, executor_def: Optional["ExecutorDefinition"] = None, diff --git a/python_modules/dagster/dagster/_core/definitions/multi_asset_sensor_definition.py b/python_modules/dagster/dagster/_core/definitions/multi_asset_sensor_definition.py index 2c9e8a89c5d4c..b98f6ba5a8253 100644 --- a/python_modules/dagster/dagster/_core/definitions/multi_asset_sensor_definition.py +++ b/python_modules/dagster/dagster/_core/definitions/multi_asset_sensor_definition.py @@ -1241,7 +1241,7 @@ def _check_cursor_not_set(sensor_result: SensorResult): " context.advance_all_cursors to update the cursor." ) - multi_asset_sensor_context.update_cursor_after_evaluation() + multi_asset_sensor_context.update_cursor_after_evaluation() # pyright: ignore[reportAttributeAccessIssue] context.update_cursor(multi_asset_sensor_context.cursor) return _fn diff --git a/python_modules/dagster/dagster/_core/definitions/partitioned_schedule.py b/python_modules/dagster/dagster/_core/definitions/partitioned_schedule.py index 7f7a242b045d1..6765a483b879b 100644 --- a/python_modules/dagster/dagster/_core/definitions/partitioned_schedule.py +++ b/python_modules/dagster/dagster/_core/definitions/partitioned_schedule.py @@ -226,7 +226,7 @@ def schedule_fn(context): ] else: check.invariant(isinstance(partitions_def, MultiPartitionsDefinition)) - time_window_dimension = partitions_def.time_window_dimension + time_window_dimension = partitions_def.time_window_dimension # pyright: ignore[reportAttributeAccessIssue] partition_key = time_window_dimension.partitions_def.get_last_partition_key( context.scheduled_execution_time ) @@ -241,14 +241,14 @@ def schedule_fn(context): current_time=context.scheduled_execution_time, dynamic_partitions_store=context.instance if context.instance_ref else None, ) - for key in partitions_def.get_multipartition_keys_with_dimension_value( + for key in partitions_def.get_multipartition_keys_with_dimension_value( # pyright: ignore[reportAttributeAccessIssue] time_window_dimension.name, partition_key, dynamic_partitions_store=context.instance if context.instance_ref else None, ) ] - return schedule_fn + return schedule_fn # pyright: ignore[reportReturnType] def _check_valid_schedule_partitions_def( diff --git a/python_modules/dagster/dagster/_core/definitions/reconstruct.py b/python_modules/dagster/dagster/_core/definitions/reconstruct.py index 6548f957277d8..ae019129ae508 100644 --- a/python_modules/dagster/dagster/_core/definitions/reconstruct.py +++ b/python_modules/dagster/dagster/_core/definitions/reconstruct.py @@ -185,7 +185,7 @@ def before_unpack(self, _, unpacked_dict: Dict[str, Any]) -> Dict[str, Any]: def pack_items(self, *args, **kwargs): for k, v in super().pack_items(*args, **kwargs): if k == "op_selection": - new_v = json.dumps(v["__set__"]) if v else None + new_v = json.dumps(v["__set__"]) if v else None # pyright: ignore[reportCallIssue,reportArgumentType,reportIndexIssue] yield "solid_selection_str", new_v else: yield k, v diff --git a/python_modules/dagster/dagster/_core/definitions/run_request.py b/python_modules/dagster/dagster/_core/definitions/run_request.py index dd99e422bc333..6ddfc75def35c 100644 --- a/python_modules/dagster/dagster/_core/definitions/run_request.py +++ b/python_modules/dagster/dagster/_core/definitions/run_request.py @@ -180,7 +180,7 @@ def with_replaced_attrs(self, **kwargs: Any) -> "RunRequest": fields = self._asdict() for k in fields.keys(): if k in kwargs: - fields[k] = kwargs[k] + fields[k] = kwargs[k] # pyright: ignore[reportIndexIssue] return RunRequest(**fields) def with_resolved_tags_and_config( diff --git a/python_modules/dagster/dagster/_core/execution/compute_logs.py b/python_modules/dagster/dagster/_core/execution/compute_logs.py index 31a1035f47a61..98aead160176e 100644 --- a/python_modules/dagster/dagster/_core/execution/compute_logs.py +++ b/python_modules/dagster/dagster/_core/execution/compute_logs.py @@ -25,7 +25,7 @@ def create_compute_log_file_key(): @contextmanager def redirect_to_file(stream, filepath): with open(filepath, "a+", buffering=1, encoding="utf8") as file_stream: - with redirect_stream(file_stream, stream): + with redirect_stream(file_stream, stream): # pyright: ignore[reportArgumentType] yield @@ -62,7 +62,7 @@ def redirect_stream(to_stream=os.devnull, from_stream=sys.stdout): with os.fdopen(os.dup(from_fd), "wb") as copied: from_stream.flush() try: - os.dup2(_fileno(to_stream), from_fd) + os.dup2(_fileno(to_stream), from_fd) # pyright: ignore[reportArgumentType] except ValueError: with open(to_stream, "wb") as to_file: os.dup2(to_file.fileno(), from_fd) @@ -70,7 +70,7 @@ def redirect_stream(to_stream=os.devnull, from_stream=sys.stdout): yield from_stream finally: from_stream.flush() - to_stream.flush() + to_stream.flush() # pyright: ignore[reportAttributeAccessIssue] os.dup2(copied.fileno(), from_fd) diff --git a/python_modules/dagster/dagster/_core/execution/context/invocation.py b/python_modules/dagster/dagster/_core/execution/context/invocation.py index 5d4940fb30383..7f16e5cfaab35 100644 --- a/python_modules/dagster/dagster/_core/execution/context/invocation.py +++ b/python_modules/dagster/dagster/_core/execution/context/invocation.py @@ -738,7 +738,7 @@ def add_metadata_two_outputs(context) -> Tuple[str, int]: self._execution_properties.output_metadata[output_name][mapping_key] = metadata else: - self._execution_properties.output_metadata[output_name] = metadata + self._execution_properties.output_metadata[output_name] = metadata # pyright: ignore[reportArgumentType] # In bound mode no conversion is done on returned values and missing but expected outputs are not # allowed. diff --git a/python_modules/dagster/dagster/_core/execution/plan/state.py b/python_modules/dagster/dagster/_core/execution/plan/state.py index 5fed41487ab10..34f5042186502 100644 --- a/python_modules/dagster/dagster/_core/execution/plan/state.py +++ b/python_modules/dagster/dagster/_core/execution/plan/state.py @@ -333,11 +333,11 @@ def _derive_state_of_past_run( continue for output in step_snap.outputs: - if output.properties.is_dynamic: + if output.properties.is_dynamic: # pyright: ignore[reportOptionalMemberAccess] if step_key in dynamic_outputs and output.name in dynamic_outputs[step_key]: continue elif step_key in successful_steps_in_parent_run_logs: - if output.properties.is_required: + if output.properties.is_required: # pyright: ignore[reportOptionalMemberAccess] dynamic_outputs[step_key][output.name] = [] else: dynamic_outputs[step_key][output.name] = None diff --git a/python_modules/dagster/dagster/_core/execution/resources_init.py b/python_modules/dagster/dagster/_core/execution/resources_init.py index aee1ba8642f9b..0ecc7395c01d5 100644 --- a/python_modules/dagster/dagster/_core/execution/resources_init.py +++ b/python_modules/dagster/dagster/_core/execution/resources_init.py @@ -453,7 +453,7 @@ def _wrapped_resource_iterator( if isinstance(resource_or_gen, ContextDecorator): def _gen_resource(): - with resource_or_gen as resource: + with resource_or_gen as resource: # pyright: ignore[reportGeneralTypeIssues] yield resource return _gen_resource() diff --git a/python_modules/dagster/dagster/_core/executor/multiprocess.py b/python_modules/dagster/dagster/_core/executor/multiprocess.py index 2d333ff382017..9958250712f35 100644 --- a/python_modules/dagster/dagster/_core/executor/multiprocess.py +++ b/python_modules/dagster/dagster/_core/executor/multiprocess.py @@ -273,7 +273,7 @@ def execute( step_context, get_run_crash_explanation( prefix=f"Multiprocess executor: child process for step {key}", - exit_code=crash.exit_code, + exit_code=crash.exit_code, # pyright: ignore[reportArgumentType] ), EngineEventData.engine_error(serializable_error), ) diff --git a/python_modules/dagster/dagster/_core/instance/ref.py b/python_modules/dagster/dagster/_core/instance/ref.py index 30d5f17869ac2..6a160b8d217a4 100644 --- a/python_modules/dagster/dagster/_core/instance/ref.py +++ b/python_modules/dagster/dagster/_core/instance/ref.py @@ -483,7 +483,7 @@ def value_for_ref_item(k, v): return v return ConfigurableClassData(*v) - return InstanceRef(**{k: value_for_ref_item(k, v) for k, v in instance_ref_dict.items()}) + return InstanceRef(**{k: value_for_ref_item(k, v) for k, v in instance_ref_dict.items()}) # pyright: ignore[reportArgumentType] @property def local_artifact_storage(self) -> "LocalArtifactStorage": diff --git a/python_modules/dagster/dagster/_core/launcher/default_run_launcher.py b/python_modules/dagster/dagster/_core/launcher/default_run_launcher.py index 6984598891f9d..4688f56ec11cd 100644 --- a/python_modules/dagster/dagster/_core/launcher/default_run_launcher.py +++ b/python_modules/dagster/dagster/_core/launcher/default_run_launcher.py @@ -138,7 +138,7 @@ def _get_grpc_client_for_termination(self, run_id): if GRPC_INFO_TAG not in tags: return None - grpc_info = seven.json.loads(tags.get(GRPC_INFO_TAG)) + grpc_info = seven.json.loads(tags.get(GRPC_INFO_TAG)) # pyright: ignore[reportArgumentType] return DagsterGrpcClient( port=grpc_info.get("port"), @@ -194,7 +194,7 @@ def join(self, timeout=30): for run_id in self._run_ids if ( self._instance.get_run_by_id(run_id) - and not self._instance.get_run_by_id(run_id).is_finished + and not self._instance.get_run_by_id(run_id).is_finished # pyright: ignore[reportOptionalMemberAccess] ) ] diff --git a/python_modules/dagster/dagster/_core/remote_representation/external_data.py b/python_modules/dagster/dagster/_core/remote_representation/external_data.py index 3aaed3faed871..3afe97064d9af 100644 --- a/python_modules/dagster/dagster/_core/remote_representation/external_data.py +++ b/python_modules/dagster/dagster/_core/remote_representation/external_data.py @@ -835,21 +835,21 @@ def get_partitions_definition(self): if self.cron_schedule is not None: return TimeWindowPartitionsDefinition( cron_schedule=self.cron_schedule, - start=datetime_from_timestamp(self.start, tz=self.timezone), + start=datetime_from_timestamp(self.start, tz=self.timezone), # pyright: ignore[reportArgumentType] timezone=self.timezone, fmt=self.fmt, end_offset=self.end_offset, - end=(datetime_from_timestamp(self.end, tz=self.timezone) if self.end else None), + end=(datetime_from_timestamp(self.end, tz=self.timezone) if self.end else None), # pyright: ignore[reportArgumentType] ) else: # backcompat case return TimeWindowPartitionsDefinition( schedule_type=self.schedule_type, - start=datetime_from_timestamp(self.start, tz=self.timezone), + start=datetime_from_timestamp(self.start, tz=self.timezone), # pyright: ignore[reportArgumentType] timezone=self.timezone, fmt=self.fmt, end_offset=self.end_offset, - end=(datetime_from_timestamp(self.end, tz=self.timezone) if self.end else None), + end=(datetime_from_timestamp(self.end, tz=self.timezone) if self.end else None), # pyright: ignore[reportArgumentType] minute_offset=self.minute_offset, hour_offset=self.hour_offset, day_offset=self.day_offset, diff --git a/python_modules/dagster/dagster/_core/storage/alembic/versions/006_scheduler_update_postgres.py b/python_modules/dagster/dagster/_core/storage/alembic/versions/006_scheduler_update_postgres.py index 9351477a6a45f..a235554e87205 100644 --- a/python_modules/dagster/dagster/_core/storage/alembic/versions/006_scheduler_update_postgres.py +++ b/python_modules/dagster/dagster/_core/storage/alembic/versions/006_scheduler_update_postgres.py @@ -27,7 +27,7 @@ def upgrade(): return instance = get_currently_upgrading_instance() if instance.scheduler: - instance.scheduler.wipe(instance) + instance.scheduler.wipe(instance) # pyright: ignore[reportAttributeAccessIssue] # No longer dropping the "schedules" table here, since # the 0.10.0 migration checks for the presence of the "schedules" diff --git a/python_modules/dagster/dagster/_core/storage/alembic/versions/011_wipe_schedules_table_for_0_10_0_postgres.py b/python_modules/dagster/dagster/_core/storage/alembic/versions/011_wipe_schedules_table_for_0_10_0_postgres.py index bad66647700f9..990fb409ed165 100644 --- a/python_modules/dagster/dagster/_core/storage/alembic/versions/011_wipe_schedules_table_for_0_10_0_postgres.py +++ b/python_modules/dagster/dagster/_core/storage/alembic/versions/011_wipe_schedules_table_for_0_10_0_postgres.py @@ -28,7 +28,7 @@ def upgrade(): instance = get_currently_upgrading_instance() if instance.scheduler: - instance.scheduler.wipe(instance) + instance.scheduler.wipe(instance) # pyright: ignore[reportAttributeAccessIssue] # No longer dropping the "schedules" table here, since # the 0.10.0 migration checks for the presence of the "schedules" diff --git a/python_modules/dagster/dagster/_core/storage/alembic/versions/011_wipe_schedules_table_for_0_10_0_sqlite_1.py b/python_modules/dagster/dagster/_core/storage/alembic/versions/011_wipe_schedules_table_for_0_10_0_sqlite_1.py index d8445ae872b63..49b582a2fe73e 100644 --- a/python_modules/dagster/dagster/_core/storage/alembic/versions/011_wipe_schedules_table_for_0_10_0_sqlite_1.py +++ b/python_modules/dagster/dagster/_core/storage/alembic/versions/011_wipe_schedules_table_for_0_10_0_sqlite_1.py @@ -28,7 +28,7 @@ def upgrade(): instance = get_currently_upgrading_instance() if instance.scheduler: - instance.scheduler.wipe(instance) + instance.scheduler.wipe(instance) # pyright: ignore[reportAttributeAccessIssue] # No longer dropping the "schedules" table here, since # the 0.10.0 migration checks for the presence of the "schedules" diff --git a/python_modules/dagster/dagster/_core/storage/alembic/versions/011_wipe_schedules_table_for_0_10_0_sqlite_2.py b/python_modules/dagster/dagster/_core/storage/alembic/versions/011_wipe_schedules_table_for_0_10_0_sqlite_2.py index 897b25b6f4b0d..432ee90f1af7a 100644 --- a/python_modules/dagster/dagster/_core/storage/alembic/versions/011_wipe_schedules_table_for_0_10_0_sqlite_2.py +++ b/python_modules/dagster/dagster/_core/storage/alembic/versions/011_wipe_schedules_table_for_0_10_0_sqlite_2.py @@ -28,7 +28,7 @@ def upgrade(): instance = get_currently_upgrading_instance() if instance.scheduler: - instance.scheduler.wipe(instance) + instance.scheduler.wipe(instance) # pyright: ignore[reportAttributeAccessIssue] # No longer dropping the "schedules" table here, since # the 0.10.0 migration checks for the presence of the "schedules" diff --git a/python_modules/dagster/dagster/_core/storage/event_log/migration.py b/python_modules/dagster/dagster/_core/storage/event_log/migration.py index da5006937fbcc..1ef0527655435 100644 --- a/python_modules/dagster/dagster/_core/storage/event_log/migration.py +++ b/python_modules/dagster/dagster/_core/storage/event_log/migration.py @@ -26,12 +26,12 @@ def migrate_event_log_data(instance=None): """ from dagster._core.storage.event_log.sql_event_log import SqlEventLogStorage - event_log_storage = instance._event_storage # noqa: SLF001 + event_log_storage = instance._event_storage # noqa: SLF001 # pyright: ignore[reportOptionalMemberAccess] if not isinstance(event_log_storage, SqlEventLogStorage): return - for run in instance.get_runs(): + for run in instance.get_runs(): # pyright: ignore[reportOptionalMemberAccess] for record in event_log_storage.get_records_for_run(run.run_id).records: event_log_storage.update_event_log_record(record.storage_id, record.event_log_entry) @@ -64,10 +64,10 @@ def migrate_asset_key_data(event_log_storage, print_fn=None): try: conn.execute( AssetKeyTable.insert().values( - asset_key=AssetKey.from_db_string(asset_key).to_string() + asset_key=AssetKey.from_db_string(asset_key).to_string() # pyright: ignore[reportOptionalMemberAccess] ) ) - except db.exc.IntegrityError: + except db.exc.IntegrityError: # pyright: ignore[reportAttributeAccessIssue] # asset key already present pass @@ -119,14 +119,14 @@ def migrate_asset_keys_index_columns(event_log_storage, print_fn=None): materialization_query = ( db_select([SqlEventLogStorageTable.c.event]) .where( - SqlEventLogStorageTable.c.asset_key == asset_key.to_string(), + SqlEventLogStorageTable.c.asset_key == asset_key.to_string(), # pyright: ignore[reportOptionalMemberAccess] ) .order_by(SqlEventLogStorageTable.c.timestamp.desc()) .limit(1) ) materialization_row = conn.execute(materialization_query).fetchone() if materialization_row: - event = deserialize_value(materialization_row[0], NamedTuple) + event = deserialize_value(materialization_row[0], NamedTuple) # pyright: ignore[reportCallIssue,reportArgumentType] if not event: # this must be a wiped asset @@ -140,7 +140,7 @@ def migrate_asset_keys_index_columns(event_log_storage, print_fn=None): ), ) .where( - AssetKeyTable.c.asset_key == asset_key.to_string(), + AssetKeyTable.c.asset_key == asset_key.to_string(), # pyright: ignore[reportOptionalMemberAccess] ) ) else: @@ -154,7 +154,7 @@ def migrate_asset_keys_index_columns(event_log_storage, print_fn=None): ), ) .where( - AssetKeyTable.c.asset_key == asset_key.to_string(), + AssetKeyTable.c.asset_key == asset_key.to_string(), # pyright: ignore[reportOptionalMemberAccess] ) ) diff --git a/python_modules/dagster/dagster/_core/storage/file_manager.py b/python_modules/dagster/dagster/_core/storage/file_manager.py index 4c125919db87f..2759db87de47c 100644 --- a/python_modules/dagster/dagster/_core/storage/file_manager.py +++ b/python_modules/dagster/dagster/_core/storage/file_manager.py @@ -256,7 +256,7 @@ def copy_handle_to_local_temp(self, file_handle: FileHandle) -> str: check.inst_param(file_handle, "file_handle", FileHandle) with self.read(file_handle, "rb") as handle_obj: # type: ignore # (??) temp_file_obj = self._temp_file_manager.tempfile() - temp_file_obj.write(handle_obj.read()) + temp_file_obj.write(handle_obj.read()) # pyright: ignore[reportCallIssue,reportArgumentType] temp_name = temp_file_obj.name temp_file_obj.close() return temp_name diff --git a/python_modules/dagster/dagster/_core/storage/legacy_storage.py b/python_modules/dagster/dagster/_core/storage/legacy_storage.py index 10cd5e60b06cb..8873d4546b8a1 100644 --- a/python_modules/dagster/dagster/_core/storage/legacy_storage.py +++ b/python_modules/dagster/dagster/_core/storage/legacy_storage.py @@ -385,7 +385,7 @@ def _instance(self) -> Optional["DagsterInstance"]: return self._storage._instance # noqa: SLF001 def index_connection(self): - return self._storage.event_log_storage.index_connection() + return self._storage.event_log_storage.index_connection() # pyright: ignore[reportAttributeAccessIssue] def register_instance(self, instance: "DagsterInstance") -> None: if not self._storage.has_instance: diff --git a/python_modules/dagster/dagster/_core/storage/migration/bigint_migration.py b/python_modules/dagster/dagster/_core/storage/migration/bigint_migration.py index cc242b684e109..4a5a0ee65222e 100644 --- a/python_modules/dagster/dagster/_core/storage/migration/bigint_migration.py +++ b/python_modules/dagster/dagster/_core/storage/migration/bigint_migration.py @@ -94,7 +94,7 @@ def _migrate_storage(conn, tables_to_migrate, print_fn): # restore the foreign key on the asset event tags table if needed, even if we did not just # migrate the event logs table in case we hit some error and exited in a bad state if table == "event_logs" and "asset_event_tags" in all_table_names: - _restore_asset_event_tags_foreign_key(conn, print_fn) + _restore_asset_event_tags_foreign_key(conn, print_fn) # pyright: ignore[reportCallIssue] def run_bigint_migration(instance: DagsterInstance, print_fn: Callable[..., Any] = print): diff --git a/python_modules/dagster/dagster/_core/storage/output_manager.py b/python_modules/dagster/dagster/_core/storage/output_manager.py index 6feda9d52f0c4..10f58db1cc65a 100644 --- a/python_modules/dagster/dagster/_core/storage/output_manager.py +++ b/python_modules/dagster/dagster/_core/storage/output_manager.py @@ -35,7 +35,7 @@ def __init__( output_config_schema ) super(OutputManagerDefinition, self).__init__( - resource_fn=resource_fn, + resource_fn=resource_fn, # pyright: ignore[reportArgumentType] config_schema=config_schema, description=description, required_resource_keys=required_resource_keys, diff --git a/python_modules/dagster/dagster/_core/storage/upath_io_manager.py b/python_modules/dagster/dagster/_core/storage/upath_io_manager.py index 08a704f5e03e5..79a5d6abbb4d3 100644 --- a/python_modules/dagster/dagster/_core/storage/upath_io_manager.py +++ b/python_modules/dagster/dagster/_core/storage/upath_io_manager.py @@ -116,7 +116,7 @@ def storage_options(self) -> Dict[str, Any]: from upath import UPath if isinstance(self._base_path, UPath): - return self._base_path._kwargs.copy() # noqa + return self._base_path._kwargs.copy() # noqa # pyright: ignore[reportAttributeAccessIssue] elif isinstance(self._base_path, Path): return {} else: diff --git a/python_modules/dagster/dagster/_core/types/dagster_type.py b/python_modules/dagster/dagster/_core/types/dagster_type.py index 26842c86c5b81..3c85cf16ff541 100644 --- a/python_modules/dagster/dagster/_core/types/dagster_type.py +++ b/python_modules/dagster/dagster/_core/types/dagster_type.py @@ -619,7 +619,7 @@ def type_check_method(self, context, value): @property def inner_types(self): - return [self.inner_type] + self.inner_type.inner_types + return [self.inner_type] + self.inner_type.inner_types # pyright: ignore[reportOperatorIssue] @property def type_param_keys(self): @@ -688,7 +688,7 @@ def type_check_method(self, context, value): @property def inner_types(self): - return [self.inner_type] + self.inner_type.inner_types + return [self.inner_type] + self.inner_type.inner_types # pyright: ignore[reportOperatorIssue] @property def type_param_keys(self): diff --git a/python_modules/dagster/dagster/_core/types/python_tuple.py b/python_modules/dagster/dagster/_core/types/python_tuple.py index 7abc0b9509eb3..9ccdf12377d9f 100644 --- a/python_modules/dagster/dagster/_core/types/python_tuple.py +++ b/python_modules/dagster/dagster/_core/types/python_tuple.py @@ -24,7 +24,7 @@ def schema_type(self): def construct_from_config_value(self, context, config_value): return tuple( ( - self._dagster_types[idx].loader.construct_from_config_value(context, item) + self._dagster_types[idx].loader.construct_from_config_value(context, item) # pyright: ignore[reportOptionalMemberAccess] for idx, item in enumerate(config_value) ) ) @@ -36,7 +36,7 @@ def __init__(self, dagster_types): self.dagster_types = dagster_types typing_types = tuple(t.typing_type for t in dagster_types) super(_TypedPythonTuple, self).__init__( - key="TypedPythonTuple" + ".".join(map(lambda t: t.key, dagster_types)), + key="TypedPythonTuple" + ".".join(map(lambda t: t.key, dagster_types)), # pyright: ignore[reportAttributeAccessIssue] name=None, loader=(TypedTupleDagsterTypeLoader(dagster_types) if all_have_input_configs else None), type_check_fn=self.type_check_method, diff --git a/python_modules/dagster/dagster/_daemon/types.py b/python_modules/dagster/dagster/_daemon/types.py index 365a5077efbd5..18a434256ed10 100644 --- a/python_modules/dagster/dagster/_daemon/types.py +++ b/python_modules/dagster/dagster/_daemon/types.py @@ -12,7 +12,7 @@ def before_unpack(self, context, unpacked_dict): # just extract the name, which is the string we want. if isinstance(unpacked_dict.get("daemon_type"), UnknownSerdesValue): unknown = unpacked_dict["daemon_type"] - unpacked_dict["daemon_type"] = unknown.value["__enum__"].split(".")[-1] + unpacked_dict["daemon_type"] = unknown.value["__enum__"].split(".")[-1] # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] context.clear_ignored_unknown_values(unknown) if unpacked_dict.get("error"): unpacked_dict["errors"] = [unpacked_dict["error"]] diff --git a/python_modules/dagster/dagster/_grpc/proxy_server.py b/python_modules/dagster/dagster/_grpc/proxy_server.py index 32d0a9983d4aa..bc21179efcfc3 100644 --- a/python_modules/dagster/dagster/_grpc/proxy_server.py +++ b/python_modules/dagster/dagster/_grpc/proxy_server.py @@ -347,5 +347,5 @@ def StartRun(self, request, context): client = self._client - self._run_clients[run_id] = client - return client._get_response("StartRun", request) # noqa + self._run_clients[run_id] = client # pyright: ignore[reportArgumentType] + return client._get_response("StartRun", request) # noqa # pyright: ignore[reportOptionalMemberAccess] diff --git a/python_modules/dagster/dagster/_grpc/server.py b/python_modules/dagster/dagster/_grpc/server.py index a6e1a3c38dd7c..bf4b7264f7d4d 100644 --- a/python_modules/dagster/dagster/_grpc/server.py +++ b/python_modules/dagster/dagster/_grpc/server.py @@ -1284,7 +1284,7 @@ def serve(self): try: self.server.wait_for_termination() finally: - self._api_servicer.cleanup() + self._api_servicer.cleanup() # pyright: ignore[reportAttributeAccessIssue] server_termination_thread.join() diff --git a/python_modules/dagster/dagster/_serdes/config_class.py b/python_modules/dagster/dagster/_serdes/config_class.py index 11e62492d627b..819530f4cd19f 100644 --- a/python_modules/dagster/dagster/_serdes/config_class.py +++ b/python_modules/dagster/dagster/_serdes/config_class.py @@ -34,7 +34,7 @@ class ConfigurableClassDataSerializer(NamedTupleSerializer["ConfigurableClassDat def pack_items(self, *args, **kwargs): for k, v in super().pack_items(*args, **kwargs): if k == "module_name": - yield k, convert_dagster_submodule_name(v, "public") + yield k, convert_dagster_submodule_name(v, "public") # pyright: ignore[reportArgumentType] else: yield k, v diff --git a/python_modules/dagster/dagster/_time/__init__.py b/python_modules/dagster/dagster/_time/__init__.py index 12e5631b46a6e..49ede7ebe4722 100644 --- a/python_modules/dagster/dagster/_time/__init__.py +++ b/python_modules/dagster/dagster/_time/__init__.py @@ -7,7 +7,7 @@ try: # zoneinfo is python >= 3.9 - from zoneinfo import ZoneInfo as _timezone_from_string # type: ignore + from zoneinfo import ZoneInfo as _timezone_from_string except: from dagster._vendored.dateutil.tz import gettz as _timezone_from_string @@ -115,7 +115,7 @@ def parse_time_string(datetime_str) -> datetime: """ dt = parser.parse(datetime_str) - if not dt.tzinfo: - dt = dt.replace(tzinfo=timezone.utc) + if not dt.tzinfo: # pyright: ignore[reportAttributeAccessIssue] + dt = dt.replace(tzinfo=timezone.utc) # pyright: ignore[reportAttributeAccessIssue] - return dt + return dt # pyright: ignore[reportReturnType] diff --git a/python_modules/dagster/dagster/_utils/schedules.py b/python_modules/dagster/dagster/_utils/schedules.py index b51cec36abf99..e9cc5acd327bf 100644 --- a/python_modules/dagster/dagster/_utils/schedules.py +++ b/python_modules/dagster/dagster/_utils/schedules.py @@ -666,7 +666,7 @@ def cron_string_iterator( if ( all(is_numeric[0:3]) and all(is_wildcard[3:]) - and cron_parts[2][0] <= MAX_DAY_OF_MONTH_WITH_GUARANTEED_MONTHLY_INTERVAL + and cron_parts[2][0] <= MAX_DAY_OF_MONTH_WITH_GUARANTEED_MONTHLY_INTERVAL # pyright: ignore[reportOperatorIssue] ): # monthly known_schedule_type = ScheduleType.MONTHLY elif all(is_numeric[0:2]) and is_numeric[4] and all(is_wildcard[2:4]): # weekly @@ -701,10 +701,10 @@ def cron_string_iterator( yield start_datetime else: next_date = _find_schedule_time( - expected_minutes, - expected_hour, - expected_day, - expected_day_of_week, + expected_minutes, # pyright: ignore[reportArgumentType] + expected_hour, # pyright: ignore[reportArgumentType] + expected_day, # pyright: ignore[reportArgumentType] + expected_day_of_week, # pyright: ignore[reportArgumentType] known_schedule_type, start_datetime, ascending=not ascending, # Going in the reverse direction @@ -713,10 +713,10 @@ def cron_string_iterator( check.invariant(start_offset <= 0) for _ in range(-start_offset): next_date = _find_schedule_time( - expected_minutes, - expected_hour, - expected_day, - expected_day_of_week, + expected_minutes, # pyright: ignore[reportArgumentType] + expected_hour, # pyright: ignore[reportArgumentType] + expected_day, # pyright: ignore[reportArgumentType] + expected_day_of_week, # pyright: ignore[reportArgumentType] known_schedule_type, next_date, ascending=not ascending, # Going in the reverse direction @@ -725,10 +725,10 @@ def cron_string_iterator( while True: next_date = _find_schedule_time( - expected_minutes, - expected_hour, - expected_day, - expected_day_of_week, + expected_minutes, # pyright: ignore[reportArgumentType] + expected_hour, # pyright: ignore[reportArgumentType] + expected_day, # pyright: ignore[reportArgumentType] + expected_day_of_week, # pyright: ignore[reportArgumentType] known_schedule_type, next_date, ascending=ascending, diff --git a/python_modules/dagster/dagster/_utils/security.py b/python_modules/dagster/dagster/_utils/security.py index 3cf93ff4744ab..9bb467e130957 100644 --- a/python_modules/dagster/dagster/_utils/security.py +++ b/python_modules/dagster/dagster/_utils/security.py @@ -9,4 +9,4 @@ def non_secure_md5_hash_str(s: Union[bytes, bytearray, memoryview]) -> str: if sys.version_info[0] <= 3 and sys.version_info[1] <= 8: return hashlib.md5(s).hexdigest() else: - return hashlib.md5(s, usedforsecurity=False).hexdigest() # type: ignore + return hashlib.md5(s, usedforsecurity=False).hexdigest() diff --git a/python_modules/dagster/dagster/_utils/test/postgres_instance.py b/python_modules/dagster/dagster/_utils/test/postgres_instance.py index c8165083d6892..e2d1622fe6912 100644 --- a/python_modules/dagster/dagster/_utils/test/postgres_instance.py +++ b/python_modules/dagster/dagster/_utils/test/postgres_instance.py @@ -75,15 +75,15 @@ def conn_string(env_name="POSTGRES_TEST_DB_HOST", **kwargs): from dagster_postgres.utils import get_conn_string return get_conn_string( - **dict( - dict( - username="test", - password="test", - hostname=TestPostgresInstance.get_hostname(env_name=env_name), - db_name="test", - ), - **kwargs, - ) + **dict( # pyright: ignore[reportArgumentType] + dict( # pyright: ignore[reportArgumentType] + username="test", # pyright: ignore[reportArgumentType] + password="test", # pyright: ignore[reportArgumentType] + hostname=TestPostgresInstance.get_hostname(env_name=env_name), # pyright: ignore[reportArgumentType] + db_name="test", # pyright: ignore[reportArgumentType] + ), # pyright: ignore[reportArgumentType] + **kwargs, # pyright: ignore[reportArgumentType] + ) # pyright: ignore[reportArgumentType] ) @staticmethod diff --git a/python_modules/dagster/dagster/_utils/test/schedule_storage.py b/python_modules/dagster/dagster/_utils/test/schedule_storage.py index 3706a29744b72..6c21deb8f54dd 100644 --- a/python_modules/dagster/dagster/_utils/test/schedule_storage.py +++ b/python_modules/dagster/dagster/_utils/test/schedule_storage.py @@ -212,7 +212,7 @@ def test_update_schedule(self, storage): new_schedule = schedule.with_status(InstigatorStatus.RUNNING).with_data( ScheduleInstigatorData( - cron_schedule=schedule.instigator_data.cron_schedule, + cron_schedule=schedule.instigator_data.cron_schedule, # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] start_timestamp=now_time, ) ) diff --git a/python_modules/dagster/dagster_tests/api_tests/api_tests_repo.py b/python_modules/dagster/dagster_tests/api_tests/api_tests_repo.py index 3652b57a87be6..8ad4e79a19c2d 100644 --- a/python_modules/dagster/dagster_tests/api_tests/api_tests_repo.py +++ b/python_modules/dagster/dagster_tests/api_tests/api_tests_repo.py @@ -215,6 +215,6 @@ def bar_repo(): } -@repository +@repository # pyright: ignore[reportArgumentType] def other_repo(): return {"jobs": {"other_foo": define_other_foo_job}} diff --git a/python_modules/dagster/dagster_tests/api_tests/test_api_launch_run.py b/python_modules/dagster/dagster_tests/api_tests/test_api_launch_run.py index 54f895ef1c1e4..60084d042f17d 100644 --- a/python_modules/dagster/dagster_tests/api_tests/test_api_launch_run.py +++ b/python_modules/dagster/dagster_tests/api_tests/test_api_launch_run.py @@ -142,5 +142,5 @@ def test_launch_unloadable_run_grpc(): assert ( f"gRPC server could not load run {run_id} in order to execute it. " "Make sure that the gRPC server has access to your run storage." - in res.serializable_error_info.message + in res.serializable_error_info.message # pyright: ignore[reportOptionalMemberAccess] ) diff --git a/python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_job.py b/python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_job.py index 6c4f918c471b4..430ce9f440f6f 100644 --- a/python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_job.py +++ b/python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_job.py @@ -29,7 +29,7 @@ def test_job_snapshot_api_grpc(instance): remote_job_subset_result = _test_job_subset_grpc(job_handle, api_client) assert isinstance(remote_job_subset_result, RemoteJobSubsetResult) assert remote_job_subset_result.success is True - assert remote_job_subset_result.job_data_snap.name == "foo" + assert remote_job_subset_result.job_data_snap.name == "foo" # pyright: ignore[reportOptionalMemberAccess] def test_job_snapshot_deserialize_error(instance): @@ -60,9 +60,9 @@ def test_job_with_valid_subset_snapshot_api_grpc(instance): remote_job_subset_result = _test_job_subset_grpc(job_handle, api_client, ["do_something"]) assert isinstance(remote_job_subset_result, RemoteJobSubsetResult) assert remote_job_subset_result.success is True - assert remote_job_subset_result.job_data_snap.name == "foo" + assert remote_job_subset_result.job_data_snap.name == "foo" # pyright: ignore[reportOptionalMemberAccess] assert ( - remote_job_subset_result.job_data_snap.parent_job + remote_job_subset_result.job_data_snap.parent_job # pyright: ignore[reportOptionalMemberAccess] == code_location.get_repository("bar_repo").get_full_job("foo").job_snapshot ) @@ -77,8 +77,8 @@ def test_job_with_valid_subset_snapshot_without_parent_snapshot(instance): ) assert isinstance(remote_job_subset_result, RemoteJobSubsetResult) assert remote_job_subset_result.success is True - assert remote_job_subset_result.job_data_snap.name == "foo" - assert not remote_job_subset_result.job_data_snap.parent_job + assert remote_job_subset_result.job_data_snap.name == "foo" # pyright: ignore[reportOptionalMemberAccess] + assert not remote_job_subset_result.job_data_snap.parent_job # pyright: ignore[reportOptionalMemberAccess] def test_job_with_invalid_subset_snapshot_api_grpc(instance): diff --git a/python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_repository.py b/python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_repository.py index 2335602198c98..dc854e851f0f1 100644 --- a/python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_repository.py +++ b/python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_repository.py @@ -82,7 +82,7 @@ def giant_job(): do_something() -@repository +@repository # pyright: ignore[reportArgumentType] def giant_repo(): return { "jobs": { diff --git a/python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_schedule_execution_data.py b/python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_schedule_execution_data.py index 089d3ca405835..15e770d6292a5 100644 --- a/python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_schedule_execution_data.py +++ b/python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_schedule_execution_data.py @@ -32,8 +32,8 @@ def test_external_schedule_execution_data_api_grpc(): None, ) assert isinstance(execution_data, ScheduleExecutionData) - assert len(execution_data.run_requests) == 1 - to_launch = execution_data.run_requests[0] + assert len(execution_data.run_requests) == 1 # pyright: ignore[reportArgumentType] + to_launch = execution_data.run_requests[0] # pyright: ignore[reportOptionalSubscript] assert to_launch.run_config == {"fizz": "buzz"} assert to_launch.tags == {"dagster/schedule_name": "foo_schedule"} @@ -76,8 +76,8 @@ def test_external_schedule_execution_data_api_grpc_fallback_to_streaming(): None, ) assert isinstance(execution_data, ScheduleExecutionData) - assert len(execution_data.run_requests) == 1 - to_launch = execution_data.run_requests[0] + assert len(execution_data.run_requests) == 1 # pyright: ignore[reportArgumentType] + to_launch = execution_data.run_requests[0] # pyright: ignore[reportOptionalSubscript] assert to_launch.run_config == {"fizz": "buzz"} assert to_launch.tags == {"dagster/schedule_name": "foo_schedule"} @@ -93,7 +93,7 @@ def test_external_schedule_execution_data_api_never_execute_grpc(): None, ) assert isinstance(execution_data, ScheduleExecutionData) - assert len(execution_data.run_requests) == 0 + assert len(execution_data.run_requests) == 0 # pyright: ignore[reportArgumentType] def test_external_schedule_execution_deserialize_error(): @@ -130,8 +130,8 @@ def test_include_execution_time_grpc(): ) assert isinstance(execution_data, ScheduleExecutionData) - assert len(execution_data.run_requests) == 1 - to_launch = execution_data.run_requests[0] + assert len(execution_data.run_requests) == 1 # pyright: ignore[reportArgumentType] + to_launch = execution_data.run_requests[0] # pyright: ignore[reportOptionalSubscript] assert to_launch.run_config == {"passed_in_time": execution_time.isoformat()} assert to_launch.tags == {"dagster/schedule_name": "foo_schedule_echo_time"} @@ -148,7 +148,7 @@ def test_run_request_partition_key_schedule_grpc(): ) assert isinstance(execution_data, ScheduleExecutionData) - assert len(execution_data.run_requests) == 1 - to_launch = execution_data.run_requests[0] + assert len(execution_data.run_requests) == 1 # pyright: ignore[reportArgumentType] + to_launch = execution_data.run_requests[0] # pyright: ignore[reportOptionalSubscript] assert to_launch.tags["dagster/schedule_name"] == "partitioned_run_request_schedule" assert to_launch.tags["dagster/partition"] == "a" diff --git a/python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_sensor.py b/python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_sensor.py index d712b74ef8040..9054a85e8e82f 100644 --- a/python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_sensor.py +++ b/python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_sensor.py @@ -23,8 +23,8 @@ def test_remote_sensor_grpc(instance): instance, repository_handle, "sensor_foo", None, None, None, None ) assert isinstance(result, SensorExecutionData) - assert len(result.run_requests) == 2 - run_request = result.run_requests[0] + assert len(result.run_requests) == 2 # pyright: ignore[reportArgumentType] + run_request = result.run_requests[0] # pyright: ignore[reportOptionalSubscript] assert run_request.run_config == {"foo": "FOO"} assert run_request.tags == {"foo": "foo_tag", "dagster/sensor_name": "sensor_foo"} @@ -54,8 +54,8 @@ def test_remote_sensor_grpc_fallback_to_streaming(instance): None, ) assert isinstance(result, SensorExecutionData) - assert len(result.run_requests) == 2 - run_request = result.run_requests[0] + assert len(result.run_requests) == 2 # pyright: ignore[reportArgumentType] + run_request = result.run_requests[0] # pyright: ignore[reportOptionalSubscript] assert run_request.run_config == {"foo": "FOO"} assert run_request.tags == { "foo": "foo_tag", diff --git a/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_deps.py b/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_deps.py index 9676544ab708b..8b9f9056b95cd 100644 --- a/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_deps.py +++ b/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_deps.py @@ -45,7 +45,7 @@ def test_instantiation_with_asset_dep(): og_dep = AssetDep("upstream", partition_mapping=partition_mapping) with pytest.raises(ParameterCheckError): - assert AssetDep(og_dep) == AssetDep("upstream") + assert AssetDep(og_dep) == AssetDep("upstream") # pyright: ignore[reportArgumentType] def test_multi_asset_errors(): @@ -97,7 +97,7 @@ def a_multi_asset(): # Test bad type with pytest.raises(ParameterCheckError, match='Param "asset" is not one of'): # full error msg: Param "asset" is not one of ['AssetKey', 'AssetSpec', 'AssetsDefinition', 'SourceAsset', 'str']. Got 1 which is type . - AssetDep.from_coercible(1) + AssetDep.from_coercible(1) # pyright: ignore[reportArgumentType] ### Tests for deps parameter on @asset and @multi_asset @@ -121,8 +121,8 @@ def asset_1(): def asset_2(): return None - assert len(asset_2.input_names) == 1 - assert asset_2.op.ins["asset_1"].dagster_type.is_nothing + assert len(asset_2.input_names) == 1 # pyright: ignore[reportArgumentType] + assert asset_2.op.ins["asset_1"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] res = materialize([asset_1, asset_2], resources={"io_manager": TestingIOManager()}) @@ -138,8 +138,8 @@ def asset_1(): def asset_2(): return None - assert len(asset_2.input_names) == 1 - assert asset_2.op.ins["asset_1"].dagster_type.is_nothing + assert len(asset_2.input_names) == 1 # pyright: ignore[reportArgumentType] + assert asset_2.op.ins["asset_1"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] res = materialize([asset_1, asset_2], resources={"io_manager": TestingIOManager()}) @@ -155,8 +155,8 @@ def asset_1(): def asset_2(): return None - assert len(asset_2.input_names) == 1 - assert asset_2.op.ins["asset_1"].dagster_type.is_nothing + assert len(asset_2.input_names) == 1 # pyright: ignore[reportArgumentType] + assert asset_2.op.ins["asset_1"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] res = materialize([asset_1, asset_2], resources={"io_manager": TestingIOManager()}) @@ -172,8 +172,8 @@ def asset_1(): def asset_2(): return None - assert len(asset_2.input_names) == 1 - assert asset_2.op.ins["asset_1"].dagster_type.is_nothing + assert len(asset_2.input_names) == 1 # pyright: ignore[reportArgumentType] + assert asset_2.op.ins["asset_1"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] res = materialize([asset_1, asset_2], resources={"io_manager": TestingIOManager()}) assert res.success @@ -196,10 +196,10 @@ def via_asset_key(): def downstream(): return None - assert len(downstream.input_names) == 3 - assert downstream.op.ins["via_definition"].dagster_type.is_nothing - assert downstream.op.ins["via_string"].dagster_type.is_nothing - assert downstream.op.ins["via_asset_key"].dagster_type.is_nothing + assert len(downstream.input_names) == 3 # pyright: ignore[reportArgumentType] + assert downstream.op.ins["via_definition"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] + assert downstream.op.ins["via_string"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] + assert downstream.op.ins["via_asset_key"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] res = materialize( [via_definition, via_string, via_asset_key, downstream], @@ -222,16 +222,16 @@ def a_multi_asset(): def depends_on_one_sub_asset(): return None - assert len(depends_on_one_sub_asset.input_names) == 1 - assert depends_on_one_sub_asset.op.ins["asset_1"].dagster_type.is_nothing + assert len(depends_on_one_sub_asset.input_names) == 1 # pyright: ignore[reportArgumentType] + assert depends_on_one_sub_asset.op.ins["asset_1"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] @asset(deps=["asset_1", "asset_2"]) def depends_on_both_sub_assets(): return None - assert len(depends_on_both_sub_assets.input_names) == 2 - assert depends_on_both_sub_assets.op.ins["asset_1"].dagster_type.is_nothing - assert depends_on_both_sub_assets.op.ins["asset_2"].dagster_type.is_nothing + assert len(depends_on_both_sub_assets.input_names) == 2 # pyright: ignore[reportArgumentType] + assert depends_on_both_sub_assets.op.ins["asset_1"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] + assert depends_on_both_sub_assets.op.ins["asset_2"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] res = materialize( [a_multi_asset, depends_on_one_sub_asset, depends_on_both_sub_assets], @@ -254,16 +254,16 @@ def a_multi_asset(): def depends_on_one_sub_asset(): return None - assert len(depends_on_one_sub_asset.input_names) == 1 - assert depends_on_one_sub_asset.op.ins["asset_1"].dagster_type.is_nothing + assert len(depends_on_one_sub_asset.input_names) == 1 # pyright: ignore[reportArgumentType] + assert depends_on_one_sub_asset.op.ins["asset_1"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] @asset(deps=[AssetKey("asset_1"), AssetKey("asset_2")]) def depends_on_both_sub_assets(): return None - assert len(depends_on_both_sub_assets.input_names) == 2 - assert depends_on_both_sub_assets.op.ins["asset_1"].dagster_type.is_nothing - assert depends_on_both_sub_assets.op.ins["asset_2"].dagster_type.is_nothing + assert len(depends_on_both_sub_assets.input_names) == 2 # pyright: ignore[reportArgumentType] + assert depends_on_both_sub_assets.op.ins["asset_1"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] + assert depends_on_both_sub_assets.op.ins["asset_2"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] res = materialize( [a_multi_asset, depends_on_one_sub_asset, depends_on_both_sub_assets], @@ -286,9 +286,9 @@ def a_multi_asset(): def depends_on_both_sub_assets(): return None - assert len(depends_on_both_sub_assets.input_names) == 2 - assert depends_on_both_sub_assets.op.ins["asset_1"].dagster_type.is_nothing - assert depends_on_both_sub_assets.op.ins["asset_2"].dagster_type.is_nothing + assert len(depends_on_both_sub_assets.input_names) == 2 # pyright: ignore[reportArgumentType] + assert depends_on_both_sub_assets.op.ins["asset_1"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] + assert depends_on_both_sub_assets.op.ins["asset_2"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] res = materialize( [a_multi_asset, depends_on_both_sub_assets], resources={"io_manager": TestingIOManager()} @@ -310,9 +310,9 @@ def a_multi_asset(): def depends_on_both_sub_assets(): return None - assert len(depends_on_both_sub_assets.input_names) == 2 - assert depends_on_both_sub_assets.op.ins["asset_1"].dagster_type.is_nothing - assert depends_on_both_sub_assets.op.ins["asset_2"].dagster_type.is_nothing + assert len(depends_on_both_sub_assets.input_names) == 2 # pyright: ignore[reportArgumentType] + assert depends_on_both_sub_assets.op.ins["asset_1"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] + assert depends_on_both_sub_assets.op.ins["asset_2"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] res = materialize( [a_multi_asset, depends_on_both_sub_assets], resources={"io_manager": TestingIOManager()} @@ -334,9 +334,9 @@ def a_multi_asset(): def depends_on_both_sub_assets(): return None - assert len(depends_on_both_sub_assets.input_names) == 2 - assert depends_on_both_sub_assets.op.ins["asset_1"].dagster_type.is_nothing - assert depends_on_both_sub_assets.op.ins["asset_2"].dagster_type.is_nothing + assert len(depends_on_both_sub_assets.input_names) == 2 # pyright: ignore[reportArgumentType] + assert depends_on_both_sub_assets.op.ins["asset_1"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] + assert depends_on_both_sub_assets.op.ins["asset_2"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] res = materialize( [a_multi_asset, depends_on_both_sub_assets], resources={"io_manager": TestingIOManager()} @@ -353,8 +353,8 @@ def asset_1(): def asset_2(): return None, None - assert len(asset_2.input_names) == 1 - assert asset_2.op.ins["asset_1"].dagster_type.is_nothing + assert len(asset_2.input_names) == 1 # pyright: ignore[reportArgumentType] + assert asset_2.op.ins["asset_1"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] res = materialize([asset_1, asset_2], resources={"io_manager": TestingIOManager()}) @@ -370,8 +370,8 @@ def asset_1(): def asset_2(): return None, None - assert len(asset_2.input_names) == 1 - assert asset_2.op.ins["asset_1"].dagster_type.is_nothing + assert len(asset_2.input_names) == 1 # pyright: ignore[reportArgumentType] + assert asset_2.op.ins["asset_1"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] res = materialize([asset_1, asset_2], resources={"io_manager": TestingIOManager()}) @@ -387,8 +387,8 @@ def asset_1(): def asset_2(): return None, None - assert len(asset_2.input_names) == 1 - assert asset_2.op.ins["asset_1"].dagster_type.is_nothing + assert len(asset_2.input_names) == 1 # pyright: ignore[reportArgumentType] + assert asset_2.op.ins["asset_1"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] res = materialize([asset_1, asset_2], resources={"io_manager": TestingIOManager()}) assert res.success @@ -414,10 +414,10 @@ def via_asset_key(): def downstream(): return None, None - assert len(downstream.input_names) == 3 - assert downstream.op.ins["via_definition"].dagster_type.is_nothing - assert downstream.op.ins["via_string"].dagster_type.is_nothing - assert downstream.op.ins["via_asset_key"].dagster_type.is_nothing + assert len(downstream.input_names) == 3 # pyright: ignore[reportArgumentType] + assert downstream.op.ins["via_definition"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] + assert downstream.op.ins["via_string"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] + assert downstream.op.ins["via_asset_key"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] res = materialize( [via_definition, via_string, via_asset_key, downstream], @@ -433,8 +433,8 @@ def test_source_asset_deps_via_assets_definition(): def depends_on_source_asset(): return None - assert len(depends_on_source_asset.input_names) == 1 - assert depends_on_source_asset.op.ins["a_key"].dagster_type.is_nothing + assert len(depends_on_source_asset.input_names) == 1 # pyright: ignore[reportArgumentType] + assert depends_on_source_asset.op.ins["a_key"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] res = materialize([depends_on_source_asset], resources={"io_manager": TestingIOManager()}) assert res.success @@ -447,8 +447,8 @@ def test_source_asset_deps_via_string(): def depends_on_source_asset(): return None - assert len(depends_on_source_asset.input_names) == 1 - assert depends_on_source_asset.op.ins["a_key"].dagster_type.is_nothing + assert len(depends_on_source_asset.input_names) == 1 # pyright: ignore[reportArgumentType] + assert depends_on_source_asset.op.ins["a_key"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] res = materialize([depends_on_source_asset], resources={"io_manager": TestingIOManager()}) assert res.success @@ -461,8 +461,8 @@ def test_source_asset_deps_via_key(): def depends_on_source_asset(): return None - assert len(depends_on_source_asset.input_names) == 1 - assert depends_on_source_asset.op.ins["a_key"].dagster_type.is_nothing + assert len(depends_on_source_asset.input_names) == 1 # pyright: ignore[reportArgumentType] + assert depends_on_source_asset.op.ins["a_key"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] res = materialize([depends_on_source_asset], resources={"io_manager": TestingIOManager()}) assert res.success @@ -483,9 +483,9 @@ def value_asset() -> int: def interop_asset(value_asset: int): assert value_asset == 1 - assert len(interop_asset.input_names) == 2 - assert interop_asset.op.ins["no_value_asset"].dagster_type.is_nothing - assert interop_asset.op.ins["value_asset"].dagster_type.kind == DagsterTypeKind.SCALAR + assert len(interop_asset.input_names) == 2 # pyright: ignore[reportArgumentType] + assert interop_asset.op.ins["no_value_asset"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] + assert interop_asset.op.ins["value_asset"].dagster_type.kind == DagsterTypeKind.SCALAR # pyright: ignore[reportAttributeAccessIssue] res = materialize( [no_value_asset, value_asset, interop_asset], @@ -516,7 +516,7 @@ def __init__(self): match='Param "asset" is not one of ', ): - @asset(deps=[not_an_asset]) + @asset(deps=[not_an_asset]) # pyright: ignore[reportArgumentType] def my_asset(): return None @@ -545,8 +545,8 @@ def the_upstream_asset(): def the_downstream_asset(): return None - assert len(the_downstream_asset.input_names) == 1 - assert the_downstream_asset.op.ins["the_upstream_asset"].dagster_type.is_nothing + assert len(the_downstream_asset.input_names) == 1 # pyright: ignore[reportArgumentType] + assert the_downstream_asset.op.ins["the_upstream_asset"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] res = materialize( [the_downstream_asset, the_upstream_asset], diff --git a/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_graph.py b/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_graph.py index ce7d2348050b7..41bdf56da9c7d 100644 --- a/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_graph.py +++ b/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_graph.py @@ -708,10 +708,10 @@ def test_required_assets_and_checks_by_key_check_decorator( @asset def asset0(): ... - @asset_check(asset=asset0) + @asset_check(asset=asset0) # pyright: ignore[reportArgumentType] def check0(): ... - @asset_check( + @asset_check( # pyright: ignore[reportArgumentType] asset=asset0, blocking=True, automation_condition=AutomationCondition.cron_tick_passed("*/15 * * * *"), @@ -748,10 +748,10 @@ def A(): ... @asset(deps=[A]) def B(): ... - @asset_check(asset=A) + @asset_check(asset=A) # pyright: ignore[reportArgumentType] def Ac(): ... - @asset_check(asset=B) + @asset_check(asset=B) # pyright: ignore[reportArgumentType] def Bc(): ... asset_graph = asset_graph_from_assets([A, B, Ac, Bc]) @@ -773,7 +773,7 @@ def test_required_assets_and_checks_by_key_asset_decorator( @asset(check_specs=[foo_check, bar_check]) def asset0(): ... - @asset_check(asset=asset0) + @asset_check(asset=asset0) # pyright: ignore[reportArgumentType] def check0(): ... asset_graph = asset_graph_from_assets([asset0, check0]) @@ -960,7 +960,7 @@ def test_serdes() -> None: @asset def a(): ... - @asset_check(asset=a) + @asset_check(asset=a) # pyright: ignore[reportArgumentType] def c(): ... @repository diff --git a/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_job.py b/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_job.py index 9dd2f593315f8..07ff2d7bc2f65 100644 --- a/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_job.py +++ b/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_job.py @@ -266,14 +266,14 @@ def handle_output(self, context, obj): pass def load_input(self, context): - assert context.resource_config["a"] == 7 + assert context.resource_config["a"] == 7 # pyright: ignore[reportOptionalSubscript] assert context.resources.subresource == 9 - assert context.upstream_output.resources.subresource == 9 - assert context.upstream_output.asset_key == AssetKey("source1") - assert context.upstream_output.definition_metadata["a"] == "b" - assert context.upstream_output.resource_config["a"] == 7 - assert context.upstream_output.log is not None - context.upstream_output.log.info("hullo") + assert context.upstream_output.resources.subresource == 9 # pyright: ignore[reportOptionalMemberAccess] + assert context.upstream_output.asset_key == AssetKey("source1") # pyright: ignore[reportOptionalMemberAccess] + assert context.upstream_output.definition_metadata["a"] == "b" # pyright: ignore[reportOptionalMemberAccess] + assert context.upstream_output.resource_config["a"] == 7 # pyright: ignore[reportOptionalSubscript,reportOptionalMemberAccess] + assert context.upstream_output.log is not None # pyright: ignore[reportOptionalMemberAccess] + context.upstream_output.log.info("hullo") # pyright: ignore[reportOptionalMemberAccess] assert context.asset_key == AssetKey("source1") return 5 @@ -1042,7 +1042,7 @@ def get_transformed_string(string): @graph(out={"o1": GraphOut(), "o3": GraphOut()}) def thing(): - o1, o2 = inside_thing() + o1, o2 = inside_thing() # pyright: ignore[reportGeneralTypeIssues] o3 = get_transformed_string(o2) return (o1, o3) @@ -1089,13 +1089,13 @@ def combiner(s1, s2): @graph(out={"n1": GraphOut(), "n2": GraphOut(), "unused": GraphOut()}) def middle_thing(): - n1, unused_output = innermost_thing() + n1, unused_output = innermost_thing() # pyright: ignore[reportGeneralTypeIssues] n2 = get_string() return {"n1": n1, "n2": n2, "unused": unused_output} @graph(out={"n1": GraphOut(), "n2": GraphOut(), "unused": GraphOut()}) def outer_thing(foo_asset): - n1, output, unused_output = middle_thing() + n1, output, unused_output = middle_thing() # pyright: ignore[reportGeneralTypeIssues] n2 = transformer(output) unused_output = combiner(unused_output, transformer(foo_asset)) return {"n1": n1, "n2": n2, "unused": unused_output} @@ -1257,7 +1257,7 @@ def test_connected_subset(): ) materialization_events = sorted( [event for event in result.all_events if event.is_step_materialization], - key=lambda event: event.asset_key, + key=lambda event: event.asset_key, # type: ignore ) assert len(materialization_events) == 3 @@ -1276,7 +1276,7 @@ def test_subset_of_asset_job(): ) materialization_events = sorted( [event for event in result.all_events if event.is_step_materialization], - key=lambda event: event.asset_key, + key=lambda event: event.asset_key, # type: ignore ) assert len(materialization_events) == 3 assert materialization_events[0].asset_key == AssetKey("bar") @@ -1299,7 +1299,7 @@ def test_subset_of_assets_job(): ) materialization_events = sorted( [event for event in result.all_events if event.is_step_materialization], - key=lambda event: event.asset_key, + key=lambda event: event.asset_key, # type: ignore ) assert len(materialization_events) == 3 assert materialization_events[0].asset_key == AssetKey("bar") @@ -1525,7 +1525,7 @@ def test_multi_subset(): ) materialization_events = sorted( [event for event in result.all_events if event.is_step_materialization], - key=lambda event: event.asset_key, + key=lambda event: event.asset_key, # type: ignore ) assert len(materialization_events) == 2 @@ -1543,7 +1543,7 @@ def test_multi_all(): ) materialization_events = sorted( [event for event in result.all_events if event.is_step_materialization], - key=lambda event: event.asset_key, + key=lambda event: event.asset_key, # type: ignore ) assert len(materialization_events) == 3 @@ -1655,7 +1655,7 @@ def nested(): }, ) def complicated_graph(): - one, two = nested() + one, two = nested() # pyright: ignore[reportGeneralTypeIssues] return one, two, transform(two) defs = Definitions( @@ -1855,7 +1855,7 @@ def the_asset(): @ignore_warning("Class `SourceAsset` is deprecated and will be removed in 2.0.0.") @ignore_warning("Parameter `io_manager_def` .* is experimental") def test_transitive_io_manager_dep_not_provided(): - @io_manager(required_resource_keys={"foo"}) + @io_manager(required_resource_keys={"foo"}) # pyright: ignore[reportArgumentType] def the_manager(): pass @@ -2380,7 +2380,7 @@ def test_asset_group_build_subset_job(job_selection, expected_assets, use_multi, with instance_for_test() as instance: result = job.execute_in_process(instance=instance) planned_asset_keys = { - record.event_log_entry.dagster_event.event_specific_data.asset_key + record.event_log_entry.dagster_event.event_specific_data.asset_key # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] for record in instance.get_records_for_run( run_id=result.run_id, of_type=DagsterEventType.ASSET_MATERIALIZATION_PLANNED, @@ -2879,7 +2879,7 @@ def __init__(self): self.values: Dict[AssetKey, int] = {} def handle_output(self, context: OutputContext, obj: object): - self.values[context.asset_key] = obj + self.values[context.asset_key] = obj # pyright: ignore[reportArgumentType] def load_input(self, context: InputContext) -> object: return self.values[context.asset_key] diff --git a/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_key.py b/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_key.py index 0955acf553050..d32dfe6852066 100644 --- a/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_key.py +++ b/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_key.py @@ -51,7 +51,7 @@ def asset_2(): ... def test_forward_slashes_allowed(): - keys1 = [a.key for a in load_assets_from_current_module(key_prefix="foo/bar")] + keys1 = [a.key for a in load_assets_from_current_module(key_prefix="foo/bar")] # pyright: ignore[reportAttributeAccessIssue] assert AssetKey(["foo/bar", "baz"]) in keys1 assert AssetKey(["foo/bar", "baz", "quix"]) in keys1 diff --git a/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_selection.py b/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_selection.py index 439b9a495aee9..f552519b46e93 100644 --- a/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_selection.py +++ b/python_modules/dagster/dagster_tests/asset_defs_tests/test_asset_selection.py @@ -509,47 +509,47 @@ def test_asset_selection_type_checking(): invalid_argument = "invalid_argument" with pytest.raises(CheckError): - AssetChecksForAssetKeysSelection(selected_asset_keys=invalid_argument) + AssetChecksForAssetKeysSelection(selected_asset_keys=invalid_argument) # pyright: ignore[reportArgumentType] test = AssetChecksForAssetKeysSelection(selected_asset_keys=valid_asset_key_sequence) assert isinstance(test, AssetChecksForAssetKeysSelection) with pytest.raises(CheckError): - AssetCheckKeysSelection(selected_asset_check_keys=invalid_argument) + AssetCheckKeysSelection(selected_asset_check_keys=invalid_argument) # pyright: ignore[reportArgumentType] test = AssetCheckKeysSelection(selected_asset_check_keys=valid_asset_check_key_sequence) assert isinstance(test, AssetCheckKeysSelection) with pytest.raises(CheckError): - AndAssetSelection(operands=invalid_argument) + AndAssetSelection(operands=invalid_argument) # pyright: ignore[reportArgumentType] test = AndAssetSelection(operands=valid_asset_selection_sequence) assert isinstance(test, AndAssetSelection) with pytest.raises(CheckError): - OrAssetSelection(operands=invalid_argument) + OrAssetSelection(operands=invalid_argument) # pyright: ignore[reportArgumentType] test = OrAssetSelection(operands=valid_asset_selection_sequence) assert isinstance(test, OrAssetSelection) with pytest.raises(CheckError): - SubtractAssetSelection(left=invalid_argument, right=invalid_argument) + SubtractAssetSelection(left=invalid_argument, right=invalid_argument) # pyright: ignore[reportArgumentType] test = SubtractAssetSelection(left=valid_asset_selection, right=valid_asset_selection) assert isinstance(test, SubtractAssetSelection) with pytest.raises(CheckError): - SinksAssetSelection(child=invalid_argument) + SinksAssetSelection(child=invalid_argument) # pyright: ignore[reportArgumentType] test = SinksAssetSelection(child=valid_asset_selection) assert isinstance(test, SinksAssetSelection) with pytest.raises(CheckError): - RequiredNeighborsAssetSelection(child=invalid_argument) + RequiredNeighborsAssetSelection(child=invalid_argument) # pyright: ignore[reportArgumentType] test = RequiredNeighborsAssetSelection(child=valid_asset_selection) assert isinstance(test, RequiredNeighborsAssetSelection) with pytest.raises(CheckError): - RootsAssetSelection(child=invalid_argument) + RootsAssetSelection(child=invalid_argument) # pyright: ignore[reportArgumentType] test = RootsAssetSelection(child=valid_asset_selection) assert isinstance(test, RootsAssetSelection) with pytest.raises(CheckError): - DownstreamAssetSelection(child=invalid_argument, depth=0, include_self=False) + DownstreamAssetSelection(child=invalid_argument, depth=0, include_self=False) # pyright: ignore[reportArgumentType] test = DownstreamAssetSelection(child=valid_asset_selection, depth=0, include_self=False) assert isinstance(test, DownstreamAssetSelection) @@ -557,7 +557,7 @@ def test_asset_selection_type_checking(): assert isinstance(test, GroupsAssetSelection) with pytest.raises(CheckError): - KeysAssetSelection(selected_keys=invalid_argument) + KeysAssetSelection(selected_keys=invalid_argument) # pyright: ignore[reportArgumentType] test = KeysAssetSelection(selected_keys=valid_asset_key_sequence) assert isinstance(test, KeysAssetSelection) @@ -573,12 +573,12 @@ def test_asset_selection_type_checking(): assert isinstance(test, KeyPrefixesAssetSelection) with pytest.raises(CheckError): - UpstreamAssetSelection(child=invalid_argument, depth=0, include_self=False) + UpstreamAssetSelection(child=invalid_argument, depth=0, include_self=False) # pyright: ignore[reportArgumentType] test = UpstreamAssetSelection(child=valid_asset_selection, depth=0, include_self=False) assert isinstance(test, UpstreamAssetSelection) with pytest.raises(CheckError): - ParentSourcesAssetSelection(child=invalid_argument) + ParentSourcesAssetSelection(child=invalid_argument) # pyright: ignore[reportArgumentType] test = ParentSourcesAssetSelection(child=valid_asset_selection) assert isinstance(test, ParentSourcesAssetSelection) @@ -616,7 +616,7 @@ def asset1(): ... @asset def asset2(): ... - @asset_check(asset=asset1) + @asset_check(asset=asset1) # pyright: ignore[reportArgumentType] def check1(): ... asset_graph = AssetGraph.from_assets([asset1, asset2, check1]) diff --git a/python_modules/dagster/dagster_tests/asset_defs_tests/test_assets.py b/python_modules/dagster/dagster_tests/asset_defs_tests/test_assets.py index fcaa9f3cdede0..1f29844b42a83 100644 --- a/python_modules/dagster/dagster_tests/asset_defs_tests/test_assets.py +++ b/python_modules/dagster/dagster_tests/asset_defs_tests/test_assets.py @@ -575,7 +575,7 @@ def the_asset(): def test_asset_both_io_manager_args_provided(): - @io_manager + @io_manager # pyright: ignore[reportCallIssue,reportArgumentType] def the_io_manager(): pass @@ -810,7 +810,7 @@ def handle_output(self, context, _obj): events.append(f"entered handle_output for {context.step_key}") def load_input(self, context): - events.append(f"entered handle_input for {context.upstream_output.step_key}") + events.append(f"entered handle_input for {context.upstream_output.step_key}") # pyright: ignore[reportOptionalMemberAccess] asset_provided_resources = AssetsDefinition.from_graph( graph_def=basic, @@ -1310,7 +1310,7 @@ def assets(): result = materialize([assets], selection=["asset2"]) assert result.success materialized_assets = [ - event.event_specific_data.materialization.asset_key + event.event_specific_data.materialization.asset_key # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] for event in result.get_asset_materialization_events() ] assert materialized_assets == [AssetKey("asset2")] @@ -1339,7 +1339,7 @@ def assets(): result = materialize([assets], selection=["asset2"]) assert result.success materialized_assets = [ - event.event_specific_data.materialization.asset_key + event.event_specific_data.materialization.asset_key # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] for event in result.get_asset_materialization_events() ] assert materialized_assets == [AssetKey("asset2")] @@ -1468,8 +1468,8 @@ def downstream_graph(b): @graph(out={"a": GraphOut(), "b": GraphOut(), "c": GraphOut(), "d": GraphOut()}) def nested_graph(): - a, b = two_outputs_graph() - c, d = downstream_graph(b) + a, b = two_outputs_graph() # pyright: ignore[reportGeneralTypeIssues] + c, d = downstream_graph(b) # pyright: ignore[reportGeneralTypeIssues] return {"a": a, "b": b, "c": c, "d": d} with instance_for_test() as instance: @@ -1667,7 +1667,7 @@ def test_asset_key_with_prefix(): ) with pytest.raises(CheckError): - AssetKey("foo").with_prefix(1) + AssetKey("foo").with_prefix(1) # pyright: ignore[reportArgumentType] def _exec_asset(asset_def, selection=None): @@ -2231,8 +2231,8 @@ def my_multi_asset(): def test_asset_spec_with_code_versions(): @multi_asset(specs=[AssetSpec(key="a", code_version="1"), AssetSpec(key="b", code_version="2")]) def multi_asset_with_versions(): - yield MaterializeResult("a") - yield MaterializeResult("b") + yield MaterializeResult("a") # pyright: ignore[reportCallIssue] + yield MaterializeResult("b") # pyright: ignore[reportCallIssue] code_versions_by_key = {spec.key: spec.code_version for spec in multi_asset_with_versions.specs} assert code_versions_by_key == {AssetKey(["a"]): "1", AssetKey(["b"]): "2"} @@ -2243,8 +2243,8 @@ def test_asset_spec_with_metadata(): specs=[AssetSpec(key="a", metadata={"foo": "1"}), AssetSpec(key="b", metadata={"bar": "2"})] ) def multi_asset_with_metadata(): - yield MaterializeResult("a") - yield MaterializeResult("b") + yield MaterializeResult("a") # pyright: ignore[reportCallIssue] + yield MaterializeResult("b") # pyright: ignore[reportCallIssue] metadata_by_key = {spec.key: spec.metadata for spec in multi_asset_with_metadata.specs} assert metadata_by_key == {AssetKey(["a"]): {"foo": "1"}, AssetKey(["b"]): {"bar": "2"}} diff --git a/python_modules/dagster/dagster_tests/asset_defs_tests/test_assets_from_modules.py b/python_modules/dagster/dagster_tests/asset_defs_tests/test_assets_from_modules.py index c3a12c37039c6..7fc579483ac2e 100644 --- a/python_modules/dagster/dagster_tests/asset_defs_tests/test_assets_from_modules.py +++ b/python_modules/dagster/dagster_tests/asset_defs_tests/test_assets_from_modules.py @@ -179,7 +179,7 @@ def test_load_assets_from_modules_with_group_name(): def test_respect_existing_groups(): assets = load_assets_from_current_module() - assert assets[0].group_names_by_key.get(AssetKey("asset_in_current_module")) == "my_group" + assert assets[0].group_names_by_key.get(AssetKey("asset_in_current_module")) == "my_group" # pyright: ignore[reportAttributeAccessIssue] with pytest.raises(DagsterInvalidDefinitionError): load_assets_from_current_module(group_name="yay") @@ -229,10 +229,10 @@ def test_prefix(prefix): from dagster_tests.asset_defs_tests.asset_package import module_with_assets assets = load_assets_from_modules([asset_package, module_with_assets], key_prefix=prefix) - assert_assets_have_prefix(prefix, assets) + assert_assets_have_prefix(prefix, assets) # pyright: ignore[reportArgumentType] assets = load_assets_from_package_module(asset_package, key_prefix=prefix) - assert_assets_have_prefix(prefix, assets) + assert_assets_have_prefix(prefix, assets) # pyright: ignore[reportArgumentType] def _load_assets_from_module_with_assets(**kwargs): diff --git a/python_modules/dagster/dagster_tests/asset_defs_tests/test_assets_from_modules_with_checks.py b/python_modules/dagster/dagster_tests/asset_defs_tests/test_assets_from_modules_with_checks.py index e23da1081c489..a8d87c5f72205 100644 --- a/python_modules/dagster/dagster_tests/asset_defs_tests/test_assets_from_modules_with_checks.py +++ b/python_modules/dagster/dagster_tests/asset_defs_tests/test_assets_from_modules_with_checks.py @@ -20,13 +20,13 @@ def test_load(): assets = load_assets_from_current_module() assert len(assets) == 1 - assert assets[0].key == AssetKey(["my_asset"]) - assert len(assets[0].check_specs) == 1 - assert next(iter(assets[0].check_specs)).asset_key == AssetKey(["my_asset"]) + assert assets[0].key == AssetKey(["my_asset"]) # pyright: ignore[reportAttributeAccessIssue] + assert len(assets[0].check_specs) == 1 # pyright: ignore[reportArgumentType,reportAttributeAccessIssue] + assert next(iter(assets[0].check_specs)).asset_key == AssetKey(["my_asset"]) # pyright: ignore[reportAttributeAccessIssue] def test_materialize(): - result = materialize(load_assets_from_current_module()) + result = materialize(load_assets_from_current_module()) # pyright: ignore[reportArgumentType] assert len(result.get_asset_materialization_events()) == 1 assert result.get_asset_materialization_events()[0].asset_key == AssetKey(["my_asset"]) @@ -38,13 +38,13 @@ def test_prefix_load(): assets = load_assets_from_current_module(key_prefix="foo") assert len(assets) == 1 - assert assets[0].key == AssetKey(["foo", "my_asset"]) - assert len(assets[0].check_specs) == 1 - assert next(iter(assets[0].check_specs)).asset_key == AssetKey(["foo", "my_asset"]) + assert assets[0].key == AssetKey(["foo", "my_asset"]) # pyright: ignore[reportAttributeAccessIssue] + assert len(assets[0].check_specs) == 1 # pyright: ignore[reportArgumentType,reportAttributeAccessIssue] + assert next(iter(assets[0].check_specs)).asset_key == AssetKey(["foo", "my_asset"]) # pyright: ignore[reportAttributeAccessIssue] def test_prefix_materialize(): - result = materialize(load_assets_from_current_module(key_prefix="foo")) + result = materialize(load_assets_from_current_module(key_prefix="foo")) # pyright: ignore[reportArgumentType] assert len(result.get_asset_materialization_events()) == 1 assert result.get_asset_materialization_events()[0].asset_key == AssetKey(["foo", "my_asset"]) diff --git a/python_modules/dagster/dagster_tests/asset_defs_tests/test_decorators.py b/python_modules/dagster/dagster_tests/asset_defs_tests/test_decorators.py index 94c29a5265752..b495ad9fefc0c 100644 --- a/python_modules/dagster/dagster_tests/asset_defs_tests/test_decorators.py +++ b/python_modules/dagster/dagster_tests/asset_defs_tests/test_decorators.py @@ -298,7 +298,7 @@ def _my_asset(): def test_asset_with_dagster_type(): - @asset(dagster_type=String) + @asset(dagster_type=String) # pyright: ignore[reportArgumentType] def my_asset(arg1): return arg1 @@ -485,8 +485,8 @@ def test_infer_output_dagster_type(): def my_asset() -> str: return "foo" - assert my_asset.op.outs["result"].dagster_type.display_name == "String" - assert my_asset.op.outs["result"].dagster_type.typing_type == str + assert my_asset.op.outs["result"].dagster_type.display_name == "String" # pyright: ignore[reportAttributeAccessIssue] + assert my_asset.op.outs["result"].dagster_type.typing_type == str # pyright: ignore[reportAttributeAccessIssue] def test_infer_output_dagster_type_none(): @@ -494,8 +494,8 @@ def test_infer_output_dagster_type_none(): def my_asset() -> None: pass - assert my_asset.op.outs["result"].dagster_type.typing_type == type(None) - assert my_asset.op.outs["result"].dagster_type.display_name == "Nothing" + assert my_asset.op.outs["result"].dagster_type.typing_type == type(None) # pyright: ignore[reportAttributeAccessIssue] + assert my_asset.op.outs["result"].dagster_type.display_name == "Nothing" # pyright: ignore[reportAttributeAccessIssue] def test_infer_output_dagster_type_empty(): @@ -503,8 +503,8 @@ def test_infer_output_dagster_type_empty(): def my_asset(): pass - assert my_asset.op.outs["result"].dagster_type.typing_type is Any - assert my_asset.op.outs["result"].dagster_type.display_name == "Any" + assert my_asset.op.outs["result"].dagster_type.typing_type is Any # pyright: ignore[reportAttributeAccessIssue] + assert my_asset.op.outs["result"].dagster_type.display_name == "Any" # pyright: ignore[reportAttributeAccessIssue] def test_asset_with_docstring_description(): @@ -686,11 +686,11 @@ def test_multi_asset_resource_defs(): def baz_resource(): pass - @io_manager(required_resource_keys={"baz"}) + @io_manager(required_resource_keys={"baz"}) # pyright: ignore[reportArgumentType] def foo_manager(): pass - @io_manager + @io_manager # pyright: ignore[reportCallIssue,reportArgumentType] def bar_manager(): pass @@ -717,11 +717,11 @@ def test_multi_asset_resource_defs_specs() -> None: def baz_resource(): pass - @io_manager(required_resource_keys={"baz"}) + @io_manager(required_resource_keys={"baz"}) # pyright: ignore[reportArgumentType] def foo_manager(): pass - @io_manager + @io_manager # pyright: ignore[reportCallIssue,reportArgumentType] def bar_manager(): pass @@ -761,7 +761,7 @@ def my_asset(): @ignore_warning("Parameter `io_manager_def` .* is experimental") @ignore_warning("Parameter `resource_defs` .* is experimental") def test_asset_io_manager_def(): - @io_manager + @io_manager # pyright: ignore[reportCallIssue,reportArgumentType] def the_manager(): pass @@ -771,7 +771,7 @@ def the_asset(): # If IO manager def is passed directly, then it doesn't appear as a # required resource key on the underlying op. - assert set(the_asset.node_def.required_resource_keys) == set() + assert set(the_asset.node_def.required_resource_keys) == set() # pyright: ignore[reportAttributeAccessIssue] @asset(io_manager_key="blah", resource_defs={"blah": the_manager}) def other_asset(): @@ -779,7 +779,7 @@ def other_asset(): # If IO manager def is provided as a resource def, it appears in required # resource keys on the underlying op. - assert set(other_asset.node_def.required_resource_keys) == {"blah"} + assert set(other_asset.node_def.required_resource_keys) == {"blah"} # pyright: ignore[reportAttributeAccessIssue] def test_asset_retry_policy(): diff --git a/python_modules/dagster/dagster_tests/asset_defs_tests/test_materialize.py b/python_modules/dagster/dagster_tests/asset_defs_tests/test_materialize.py index fa5f221c5aa58..1df11770896b6 100644 --- a/python_modules/dagster/dagster_tests/asset_defs_tests/test_materialize.py +++ b/python_modules/dagster/dagster_tests/asset_defs_tests/test_materialize.py @@ -193,7 +193,7 @@ def the_asset(): ... @ignore_warning("Parameter `io_manager_def` .* is experimental") @ignore_warning("Class `SourceAsset` is deprecated and will be removed in 2.0.0.") def test_materialize_source_asset_conflicts(): - @io_manager(required_resource_keys={"foo"}) + @io_manager(required_resource_keys={"foo"}) # pyright: ignore[reportArgumentType] def the_manager(): pass diff --git a/python_modules/dagster/dagster_tests/asset_defs_tests/test_materialize_to_memory.py b/python_modules/dagster/dagster_tests/asset_defs_tests/test_materialize_to_memory.py index 31a787d9e671f..bcd039fc53d4e 100644 --- a/python_modules/dagster/dagster_tests/asset_defs_tests/test_materialize_to_memory.py +++ b/python_modules/dagster/dagster_tests/asset_defs_tests/test_materialize_to_memory.py @@ -278,7 +278,7 @@ def partitioned(context): def test_materialize_to_memory_provided_io_manager_instance(): - @io_manager + @io_manager # pyright: ignore[reportCallIssue,reportArgumentType] def the_manager(): pass diff --git a/python_modules/dagster/dagster_tests/asset_defs_tests/test_partitioned_assets.py b/python_modules/dagster/dagster_tests/asset_defs_tests/test_partitioned_assets.py index 22f6f7c398571..8b446f2b36baa 100644 --- a/python_modules/dagster/dagster_tests/asset_defs_tests/test_partitioned_assets.py +++ b/python_modules/dagster/dagster_tests/asset_defs_tests/test_partitioned_assets.py @@ -136,7 +136,7 @@ def downstream_asset(upstream_asset): assert get_upstream_partitions_for_partition_range( downstream_asset, - upstream_asset.partitions_def, + upstream_asset.partitions_def, # pyright: ignore[reportArgumentType] AssetKey("upstream_asset"), PartitionKeyRange("a", "c"), ) == PartitionKeyRange("a", "c") @@ -427,14 +427,14 @@ def downstream_asset_2(upstream_asset_2: int): assert get_upstream_partitions_for_partition_range( downstream_asset_1, - upstream_asset.partitions_def, + upstream_asset.partitions_def, # pyright: ignore[reportArgumentType] AssetKey("upstream_asset_1"), PartitionKeyRange("a", "c"), ) == PartitionKeyRange("a", "c") assert get_upstream_partitions_for_partition_range( downstream_asset_2, - upstream_asset.partitions_def, + upstream_asset.partitions_def, # pyright: ignore[reportArgumentType] AssetKey("upstream_asset_2"), PartitionKeyRange("a", "c"), ) == PartitionKeyRange("a", "c") diff --git a/python_modules/dagster/dagster_tests/asset_defs_tests/test_source_asset_observation_job.py b/python_modules/dagster/dagster_tests/asset_defs_tests/test_source_asset_observation_job.py index fa98b500f6fd3..87939eb723211 100644 --- a/python_modules/dagster/dagster_tests/asset_defs_tests/test_source_asset_observation_job.py +++ b/python_modules/dagster/dagster_tests/asset_defs_tests/test_source_asset_observation_job.py @@ -77,11 +77,11 @@ def baz(): job_def = Definitions(assets=[foo, bar, baz]).get_implicit_job_def_for_assets([foo.key]) # If the asset selection contains any materializable assets, source assets observations will not run - job_def.execute_in_process(partition_key="A", instance=instance) + job_def.execute_in_process(partition_key="A", instance=instance) # pyright: ignore[reportOptionalMemberAccess] assert called == {"bar"} # If the asset selection contains only observable source assets, source assets are observed - job_def.execute_in_process(partition_key="A", asset_selection=[foo.key], instance=instance) + job_def.execute_in_process(partition_key="A", asset_selection=[foo.key], instance=instance) # pyright: ignore[reportOptionalMemberAccess] assert called == {"bar", "foo"} record = instance.get_latest_data_version_record(AssetKey(["foo"])) assert record and extract_data_version_from_entry(record.event_log_entry) == DataVersion( diff --git a/python_modules/dagster/dagster_tests/asset_defs_tests/test_unresolved_asset_job.py b/python_modules/dagster/dagster_tests/asset_defs_tests/test_unresolved_asset_job.py index 1cb19f3e0721e..563d5d46cf59c 100644 --- a/python_modules/dagster/dagster_tests/asset_defs_tests/test_unresolved_asset_job.py +++ b/python_modules/dagster/dagster_tests/asset_defs_tests/test_unresolved_asset_job.py @@ -369,7 +369,7 @@ def test_define_selection_job(job_selection, expected_assets, use_multi, prefixe with instance_for_test() as instance: result = job.execute_in_process(instance=instance) planned_asset_keys = { - record.event_log_entry.dagster_event.event_specific_data.asset_key + record.event_log_entry.dagster_event.event_specific_data.asset_key # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] for record in instance.get_records_for_run( run_id=result.run_id, of_type=DagsterEventType.ASSET_MATERIALIZATION_PLANNED, diff --git a/python_modules/dagster/dagster_tests/cli_tests/command_tests/extra_repo.py b/python_modules/dagster/dagster_tests/cli_tests/command_tests/extra_repo.py index 1ed88cb39d7cd..19bdd44fc3a05 100644 --- a/python_modules/dagster/dagster_tests/cli_tests/command_tests/extra_repo.py +++ b/python_modules/dagster/dagster_tests/cli_tests/command_tests/extra_repo.py @@ -11,6 +11,6 @@ def extra_job(): do_something() -@repository +@repository # pyright: ignore[reportArgumentType] def extra(): return {"jobs": {"extra_job": extra_job}} diff --git a/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_cli_commands.py b/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_cli_commands.py index 33594494fd0d6..61a54ed87f75a 100644 --- a/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_cli_commands.py +++ b/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_cli_commands.py @@ -307,7 +307,7 @@ def grpc_server_bar_kwargs(instance, job_name: Optional[str] = None): if job_name: args["job_name"] = "foo" if client.port: - args["grpc_port"] = client.port + args["grpc_port"] = client.port # pyright: ignore[reportArgumentType] if client.socket: args["grpc_socket"] = client.socket yield args diff --git a/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_core_test_utils.py b/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_core_test_utils.py index 5b20eda447f91..b1f657ee05c0b 100644 --- a/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_core_test_utils.py +++ b/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_core_test_utils.py @@ -11,7 +11,7 @@ def test_environ(): assert os.environ.get(env_var1) == "1.1" assert os.environ.get(env_var2) == "2.0" - with environ({env_var1: None}): + with environ({env_var1: None}): # pyright: ignore[reportArgumentType] assert os.environ.get(env_var1) is None assert os.environ.get(env_var2) == "2.0" diff --git a/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_execute_command.py b/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_execute_command.py index 0b42d537a3493..1a3e87541d571 100644 --- a/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_execute_command.py +++ b/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_execute_command.py @@ -203,7 +203,7 @@ def test_more_than_one_job(): match=re.escape("Must provide --job as there is more than one job in bar"), ): execute_execute_command( - kwargs={ + kwargs={ # pyright: ignore[reportArgumentType] "repository_yaml": None, "job_name": None, "python_file": file_relative_path(__file__, "test_cli_commands.py"), @@ -218,7 +218,7 @@ def test_more_than_one_job(): match=re.escape("Must provide --job as there is more than one job in bar. "), ): execute_execute_command( - kwargs={ + kwargs={ # pyright: ignore[reportArgumentType] "repository_yaml": None, "job_name": None, "python_file": file_relative_path(__file__, "test_cli_commands.py"), @@ -274,7 +274,7 @@ def test_attribute_not_found(): match=re.escape("nope not found at module scope in file"), ): execute_execute_command( - kwargs={ + kwargs={ # pyright: ignore[reportArgumentType] "repository_yaml": None, "job_name": None, "python_file": file_relative_path(__file__, "test_cli_commands.py"), @@ -295,7 +295,7 @@ def test_attribute_is_wrong_thing(): ), ): execute_execute_command( - kwargs={ + kwargs={ # pyright: ignore[reportArgumentType] "repository_yaml": None, "job_name": None, "python_file": file_relative_path(__file__, "test_cli_commands.py"), @@ -316,7 +316,7 @@ def test_attribute_fn_returns_wrong_thing(): ), ): execute_execute_command( - kwargs={ + kwargs={ # pyright: ignore[reportArgumentType] "repository_yaml": None, "job_name": None, "python_file": file_relative_path(__file__, "test_cli_commands.py"), diff --git a/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_list_command.py b/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_list_command.py index 59e03c73233b0..535c9333ecdd4 100644 --- a/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_list_command.py +++ b/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_list_command.py @@ -89,12 +89,12 @@ def test_list_command_grpc_socket(): no_print, ) - result = runner.invoke(job_list_command, ["--grpc-socket", api_client.socket]) + result = runner.invoke(job_list_command, ["--grpc-socket", api_client.socket]) # pyright: ignore[reportArgumentType] assert_correct_bar_repository_output(result) result = runner.invoke( job_list_command, - ["--grpc-socket", api_client.socket, "--grpc-host", api_client.host], + ["--grpc-socket", api_client.socket, "--grpc-host", api_client.host], # pyright: ignore[reportArgumentType] ) assert_correct_bar_repository_output(result) @@ -115,21 +115,21 @@ def test_list_command_deployed_grpc(): ) as server_process: api_client = server_process.create_client() - result = runner.invoke(job_list_command, ["--grpc-port", api_client.port]) + result = runner.invoke(job_list_command, ["--grpc-port", api_client.port]) # pyright: ignore[reportArgumentType] assert_correct_bar_repository_output(result) result = runner.invoke( job_list_command, - ["--grpc-port", api_client.port, "--grpc-host", api_client.host], + ["--grpc-port", api_client.port, "--grpc-host", api_client.host], # pyright: ignore[reportArgumentType] ) assert_correct_bar_repository_output(result) - result = runner.invoke(job_list_command, ["--grpc-port", api_client.port]) + result = runner.invoke(job_list_command, ["--grpc-port", api_client.port]) # pyright: ignore[reportArgumentType] assert_correct_bar_repository_output(result) result = runner.invoke( job_list_command, - ["--grpc-port", api_client.port, "--grpc-socket", "foonamedsocket"], + ["--grpc-port", api_client.port, "--grpc-socket", "foonamedsocket"], # pyright: ignore[reportArgumentType] ) assert result.exit_code != 0 diff --git a/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_materialize_command.py b/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_materialize_command.py index 04d6586b068f5..4d16a01ade5eb 100644 --- a/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_materialize_command.py +++ b/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_materialize_command.py @@ -89,7 +89,7 @@ def test_partition(): assert "RUN_SUCCESS" in result.output event = instance.get_latest_materialization_event(AssetKey("partitioned_asset")) assert event is not None - assert event.asset_materialization.partition == "one" + assert event.asset_materialization.partition == "one" # pyright: ignore[reportOptionalMemberAccess] def test_partition_option_with_non_partitioned_asset(): diff --git a/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_telemetry.py b/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_telemetry.py index d20cca282a43d..c245be5eff6c9 100644 --- a/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_telemetry.py +++ b/python_modules/dagster/dagster_tests/cli_tests/command_tests/test_telemetry.py @@ -319,10 +319,10 @@ def test_get_stats_from_remote_repo_code_checks(instance): @asset def my_asset(): ... - @asset_check(asset=my_asset) + @asset_check(asset=my_asset) # pyright: ignore[reportArgumentType] def my_check(): ... - @asset_check(asset=my_asset) + @asset_check(asset=my_asset) # pyright: ignore[reportArgumentType] def my_check_2(): ... @asset @@ -473,11 +473,11 @@ def asset1(): ... def test_get_stats_from_remote_repo_functional_io_managers(instance): @dagster_maintained_io_manager - @io_manager(config_schema={"foo": str}) + @io_manager(config_schema={"foo": str}) # pyright: ignore[reportArgumentType] def my_io_manager(): return 1 - @io_manager(config_schema={"baz": str}) + @io_manager(config_schema={"baz": str}) # pyright: ignore[reportArgumentType] def custom_io_manager(): return 2 @@ -550,7 +550,7 @@ def my_resource(): return 1 @dagster_maintained_io_manager - @io_manager(config_schema={"foo": str}) + @io_manager(config_schema={"foo": str}) # pyright: ignore[reportArgumentType] def my_io_manager(): return 1 diff --git a/python_modules/dagster/dagster_tests/cli_tests/test_api_commands.py b/python_modules/dagster/dagster_tests/cli_tests/test_api_commands.py index 2a7cff073930f..1c483394bd1c2 100644 --- a/python_modules/dagster/dagster_tests/cli_tests/test_api_commands.py +++ b/python_modules/dagster/dagster_tests/cli_tests/test_api_commands.py @@ -91,7 +91,7 @@ def test_execute_run_with_secrets_loader(capfd): runner = CliRunner() # Restore original env after test - with environ({"FOO": None}): + with environ({"FOO": None}): # pyright: ignore[reportArgumentType] with instance_for_test( overrides={ "compute_logs": { @@ -370,7 +370,7 @@ def test_execute_step_with_secrets_loader(): runner = CliRunner() # Restore original env after test - with environ({"FOO": None}): + with environ({"FOO": None}): # pyright: ignore[reportArgumentType] with instance_for_test( overrides={ "compute_logs": { @@ -676,5 +676,5 @@ def test_execute_step_verify_step_framework_error(mock_verify_step): assert log_entry.step_key == "fake_step" assert "Unexpected framework error text" in str( - log_entry.dagster_event.event_specific_data.error + log_entry.dagster_event.event_specific_data.error # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] ) diff --git a/python_modules/dagster/dagster_tests/cli_tests/test_config_scaffolder.py b/python_modules/dagster/dagster_tests/cli_tests/test_config_scaffolder.py index 709fc7a01fd13..3b17686326801 100644 --- a/python_modules/dagster/dagster_tests/cli_tests/test_config_scaffolder.py +++ b/python_modules/dagster/dagster_tests/cli_tests/test_config_scaffolder.py @@ -34,15 +34,15 @@ def test_basic_ops_config(snapshot): env_config_type = job_def.run_config_schema.config_type - assert env_config_type.fields["ops"].is_required - ops_config_type = env_config_type.fields["ops"].config_type + assert env_config_type.fields["ops"].is_required # pyright: ignore[reportAttributeAccessIssue] + ops_config_type = env_config_type.fields["ops"].config_type # pyright: ignore[reportAttributeAccessIssue] assert ops_config_type.fields["required_field_op"].is_required required_op_config_type = ops_config_type.fields["required_field_op"].config_type assert required_op_config_type.fields["config"].is_required - assert set(env_config_type.fields["loggers"].config_type.fields.keys()) == set(["console"]) + assert set(env_config_type.fields["loggers"].config_type.fields.keys()) == set(["console"]) # pyright: ignore[reportAttributeAccessIssue] - console_logger_config_type = env_config_type.fields["loggers"].config_type.fields["console"] + console_logger_config_type = env_config_type.fields["loggers"].config_type.fields["console"] # pyright: ignore[reportAttributeAccessIssue] assert set(console_logger_config_type.config_type.fields.keys()) == set(["config"]) diff --git a/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/multi_file_target_workspace/example_two/ops.py b/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/multi_file_target_workspace/example_two/ops.py index b294ce8ce2b96..0c0f45d7ea299 100644 --- a/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/multi_file_target_workspace/example_two/ops.py +++ b/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/multi_file_target_workspace/example_two/ops.py @@ -1,4 +1,3 @@ -# type: ignore from dagster._core.definitions import op diff --git a/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/multi_location/test_load_failure_workspace.py b/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/multi_location/test_load_failure_workspace.py index 88bde06425da6..26ca226c8d81c 100644 --- a/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/multi_location/test_load_failure_workspace.py +++ b/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/multi_location/test_load_failure_workspace.py @@ -31,7 +31,7 @@ def test_multi_location_error(instance): assert not request_context.has_code_location("completely_unknown_location") assert ( - "No module named" in request_context.get_code_location_error("broken_location").message + "No module named" in request_context.get_code_location_error("broken_location").message # pyright: ignore[reportOptionalMemberAccess] ) @@ -51,5 +51,5 @@ def test_workspace_with_only_error(instance): assert request_context.has_code_location_error("broken_location") assert ( - "No module named" in request_context.get_code_location_error("broken_location").message + "No module named" in request_context.get_code_location_error("broken_location").message # pyright: ignore[reportOptionalMemberAccess] ) diff --git a/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/multi_location/test_multi_location_workspace.py b/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/multi_location/test_multi_location_workspace.py index 1c184cb6054a9..eb6230e1ecc3a 100644 --- a/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/multi_location/test_multi_location_workspace.py +++ b/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/multi_location/test_multi_location_workspace.py @@ -191,7 +191,7 @@ def test_grpc_multi_location_workspace(config_source): with ExitStack() as stack: instance = stack.enter_context(instance_for_test()) code_locations = { - name: stack.enter_context(origin.create_single_location(instance)) + name: stack.enter_context(origin.create_single_location(instance)) # pyright: ignore[reportAttributeAccessIssue] for name, origin in origins.items() } @@ -209,17 +209,17 @@ def test_grpc_multi_location_workspace(config_source): assert loaded_from_module_location.repository_names == {"hello_world_repository"} named_loaded_from_file_location = code_locations.get("named_loaded_from_file") - assert named_loaded_from_file_location.repository_names == {"hello_world_repository_name"} + assert named_loaded_from_file_location.repository_names == {"hello_world_repository_name"} # pyright: ignore[reportOptionalMemberAccess] assert isinstance(named_loaded_from_file_location, GrpcServerCodeLocation) named_loaded_from_module_location = code_locations.get("named_loaded_from_module") - assert named_loaded_from_module_location.repository_names == {"hello_world_repository_name"} + assert named_loaded_from_module_location.repository_names == {"hello_world_repository_name"} # pyright: ignore[reportOptionalMemberAccess] assert isinstance(named_loaded_from_module_location, GrpcServerCodeLocation) named_loaded_from_module_attribute_location = code_locations.get( "named_loaded_from_module_attribute" ) - assert named_loaded_from_module_attribute_location.repository_names == { + assert named_loaded_from_module_attribute_location.repository_names == { # pyright: ignore[reportOptionalMemberAccess] "hello_world_repository_name" } assert isinstance(named_loaded_from_module_attribute_location, GrpcServerCodeLocation) @@ -227,7 +227,7 @@ def test_grpc_multi_location_workspace(config_source): named_loaded_from_file_attribute_location = code_locations.get( "named_loaded_from_file_attribute" ) - assert named_loaded_from_file_attribute_location.repository_names == { + assert named_loaded_from_file_attribute_location.repository_names == { # pyright: ignore[reportOptionalMemberAccess] "hello_world_repository_name" } assert isinstance(named_loaded_from_file_attribute_location, GrpcServerCodeLocation) diff --git a/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/test_grpc_server_workspace.py b/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/test_grpc_server_workspace.py index 88d8edd8c186e..d44e991ef65ec 100644 --- a/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/test_grpc_server_workspace.py +++ b/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/test_grpc_server_workspace.py @@ -57,16 +57,16 @@ def test_grpc_socket_workspace(instance): assert code_locations.get(default_location_name) local_port = code_locations.get(default_location_name) - assert local_port.socket == first_socket - assert local_port.host == "localhost" - assert local_port.port is None + assert local_port.socket == first_socket # pyright: ignore[reportOptionalMemberAccess] + assert local_port.host == "localhost" # pyright: ignore[reportOptionalMemberAccess] + assert local_port.port is None # pyright: ignore[reportOptionalMemberAccess] assert code_locations.get("local_port_default_host") local_port_default_host = code_locations.get("local_port_default_host") - assert local_port_default_host.socket == second_socket - assert local_port_default_host.host == "localhost" - assert local_port_default_host.port is None + assert local_port_default_host.socket == second_socket # pyright: ignore[reportOptionalMemberAccess] + assert local_port_default_host.host == "localhost" # pyright: ignore[reportOptionalMemberAccess] + assert local_port_default_host.port is None # pyright: ignore[reportOptionalMemberAccess] assert all(map(lambda x: x.name, code_locations.values())) @@ -105,14 +105,14 @@ def test_grpc_server_env_vars(): port_origin = origins["my_grpc_server_port"] assert isinstance(origins["my_grpc_server_port"], GrpcServerCodeLocationOrigin) - assert port_origin.port == 1234 - assert port_origin.host == "barhost" + assert port_origin.port == 1234 # pyright: ignore[reportAttributeAccessIssue] + assert port_origin.host == "barhost" # pyright: ignore[reportAttributeAccessIssue] socket_origin = origins["my_grpc_server_socket"] assert isinstance(origins["my_grpc_server_socket"], GrpcServerCodeLocationOrigin) - assert socket_origin.socket == "barsocket" - assert socket_origin.host == "barhost" + assert socket_origin.socket == "barsocket" # pyright: ignore[reportAttributeAccessIssue] + assert socket_origin.host == "barhost" # pyright: ignore[reportAttributeAccessIssue] def test_ssl_grpc_server_workspace(instance): @@ -136,7 +136,7 @@ def test_ssl_grpc_server_workspace(instance): file_relative_path(__file__, "not_a_real.yaml"), ) origin = next(iter(origins.values())) - assert origin.use_ssl + assert origin.use_ssl # pyright: ignore[reportAttributeAccessIssue] # Actually connecting to the server will fail since it's expecting SSL # and we didn't set up the server with SSL @@ -185,16 +185,16 @@ def test_grpc_server_workspace(instance): assert code_locations.get(default_location_name) local_port = code_locations.get(default_location_name) - assert local_port.port == first_port - assert local_port.host == "localhost" - assert local_port.socket is None + assert local_port.port == first_port # pyright: ignore[reportOptionalMemberAccess] + assert local_port.host == "localhost" # pyright: ignore[reportOptionalMemberAccess] + assert local_port.socket is None # pyright: ignore[reportOptionalMemberAccess] assert code_locations.get("local_port_default_host") local_port_default_host = code_locations.get("local_port_default_host") - assert local_port_default_host.port == second_port - assert local_port_default_host.host == "localhost" - assert local_port_default_host.socket is None + assert local_port_default_host.port == second_port # pyright: ignore[reportOptionalMemberAccess] + assert local_port_default_host.host == "localhost" # pyright: ignore[reportOptionalMemberAccess] + assert local_port_default_host.socket is None # pyright: ignore[reportOptionalMemberAccess] assert all(map(lambda x: x.name, code_locations.values())) diff --git a/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/test_job_load.py b/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/test_job_load.py index c898115f7e20e..7932097625bc3 100644 --- a/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/test_job_load.py +++ b/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/test_job_load.py @@ -15,7 +15,7 @@ def load_pipeline_via_cli_runner(cli_args): @job_target_argument def command(**kwargs): with get_remote_job_from_kwargs(DagsterInstance.get(), "", kwargs) as remote_job: - capture_result["external_pipeline"] = remote_job + capture_result["external_pipeline"] = remote_job # pyright: ignore[reportArgumentType] with instance_for_test(): runner = CliRunner() diff --git a/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/toml_tests/test_toml_loading.py b/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/toml_tests/test_toml_loading.py index 4b3fbf4d4744e..09edf4238e4b0 100644 --- a/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/toml_tests/test_toml_loading.py +++ b/python_modules/dagster/dagster_tests/cli_tests/workspace_tests/toml_tests/test_toml_loading.py @@ -58,13 +58,13 @@ def test_is_valid_modules_list_from_toml(): def test_is_valid_modules_list_not_a_list(): with raises(ValueError, match="Modules should be a list."): - is_valid_modules_list("not a list") + is_valid_modules_list("not a list") # pyright: ignore[reportArgumentType] def test_is_valid_modules_list_item_not_dict(): modules = ["not a dictionary"] with raises(ValueError, match="Item at index 0 is not a dictionary."): - is_valid_modules_list(modules) + is_valid_modules_list(modules) # pyright: ignore[reportArgumentType] def test_is_valid_modules_list_missing_type(): diff --git a/python_modules/dagster/dagster_tests/core_tests/config_types_tests/evaluator_tests/test_defaults.py b/python_modules/dagster/dagster_tests/core_tests/config_types_tests/evaluator_tests/test_defaults.py index 4e8b3bd85ceeb..1d766134f4dff 100644 --- a/python_modules/dagster/dagster_tests/core_tests/config_types_tests/evaluator_tests/test_defaults.py +++ b/python_modules/dagster/dagster_tests/core_tests/config_types_tests/evaluator_tests/test_defaults.py @@ -39,15 +39,15 @@ def test_post_process_config(): assert post_process_config(nullable_list_config_type, None).value == [] map_config_type = resolve_to_config_type({str: int}) - assert post_process_config(map_config_type, {"foo": 5}).value == {"foo": 5} - assert post_process_config(map_config_type, None).value == {} + assert post_process_config(map_config_type, {"foo": 5}).value == {"foo": 5} # pyright: ignore[reportArgumentType] + assert post_process_config(map_config_type, None).value == {} # pyright: ignore[reportArgumentType] with pytest.raises(CheckError, match="Null map member not caught"): - assert post_process_config(map_config_type, {"foo": None}).value == {"foo": None} + assert post_process_config(map_config_type, {"foo": None}).value == {"foo": None} # pyright: ignore[reportArgumentType] nullable_map_config_type = resolve_to_config_type({str: Noneable(int)}) - assert post_process_config(nullable_map_config_type, {"foo": 5}).value == {"foo": 5} - assert post_process_config(nullable_map_config_type, {"foo": None}).value == {"foo": None} - assert post_process_config(nullable_map_config_type, None).value == {} + assert post_process_config(nullable_map_config_type, {"foo": 5}).value == {"foo": 5} # pyright: ignore[reportArgumentType] + assert post_process_config(nullable_map_config_type, {"foo": None}).value == {"foo": None} # pyright: ignore[reportArgumentType] + assert post_process_config(nullable_map_config_type, None).value == {} # pyright: ignore[reportArgumentType] composite_config_type = resolve_to_config_type( { @@ -123,7 +123,7 @@ def test_post_process_config(): any_config_type = resolve_to_config_type(Any) - assert post_process_config(any_config_type, {"foo": "bar"}).value == {"foo": "bar"} + assert post_process_config(any_config_type, {"foo": "bar"}).value == {"foo": "bar"} # pyright: ignore[reportArgumentType] assert post_process_config( ConfigType("gargle", given_name="bargle", kind=ConfigTypeKind.ANY), 3 @@ -185,11 +185,12 @@ def test_post_process_config(): noneable_permissive_config_type = resolve_to_config_type( {"args": Field(Noneable(Permissive()), is_required=False, default_value=None)} ) - assert post_process_config( - noneable_permissive_config_type, {"args": {"foo": "wow", "mau": "mau"}} + assert post_process_config( # pyright: ignore[reportOptionalSubscript] + noneable_permissive_config_type, + {"args": {"foo": "wow", "mau": "mau"}}, ).value["args"] == { "foo": "wow", "mau": "mau", } - assert post_process_config(noneable_permissive_config_type, {"args": {}}).value["args"] == {} - assert post_process_config(noneable_permissive_config_type, None).value["args"] is None + assert post_process_config(noneable_permissive_config_type, {"args": {}}).value["args"] == {} # pyright: ignore[reportOptionalSubscript] + assert post_process_config(noneable_permissive_config_type, None).value["args"] is None # pyright: ignore[reportOptionalSubscript] diff --git a/python_modules/dagster/dagster_tests/core_tests/config_types_tests/evaluator_tests/test_evaluator.py b/python_modules/dagster/dagster_tests/core_tests/config_types_tests/evaluator_tests/test_evaluator.py index 2d858f2894d1e..bc640e054effd 100644 --- a/python_modules/dagster/dagster_tests/core_tests/config_types_tests/evaluator_tests/test_evaluator.py +++ b/python_modules/dagster/dagster_tests/core_tests/config_types_tests/evaluator_tests/test_evaluator.py @@ -30,12 +30,12 @@ def test_evaluate_scalar_failure(): result = eval_config_value_from_dagster_type(String, 2343) assert not result.success assert result.value is None - assert len(result.errors) == 1 - error = result.errors[0] + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH assert not error.stack.entries - assert error.error_data.config_type_snap.given_name == "String" - assert error.error_data.value_rep == "2343" + assert error.error_data.config_type_snap.given_name == "String" # pyright: ignore[reportAttributeAccessIssue] + assert error.error_data.value_rep == "2343" # pyright: ignore[reportAttributeAccessIssue] SingleLevelShape = Shape({"level_one": Field(String)}) @@ -53,11 +53,11 @@ def test_single_level_scalar_mismatch(): result = eval_config_value_from_dagster_type(SingleLevelShape, value) assert not result.success assert result.value is None - assert len(result.errors) == 1 - error = result.errors[0] + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH assert len(error.stack.entries) == 1 - assert error.stack.entries[0].field_name == "level_one" + assert error.stack.entries[0].field_name == "level_one" # pyright: ignore[reportAttributeAccessIssue] def test_single_level_dict_not_a_dict(): @@ -65,8 +65,8 @@ def test_single_level_dict_not_a_dict(): result = eval_config_value_from_dagster_type(SingleLevelShape, value) assert not result.success assert result.value is None - assert len(result.errors) == 1 - error = result.errors[0] + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH assert not error.stack.entries @@ -75,11 +75,11 @@ def test_root_missing_field(): result = eval_config_value_from_dagster_type(SingleLevelShape, {}) assert not result.success assert result.value is None - assert len(result.errors) == 1 - error = result.errors[0] + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.MISSING_REQUIRED_FIELD assert len(result.errors_at_level()) == 1 - assert error.error_data.field_name == "level_one" + assert error.error_data.field_name == "level_one" # pyright: ignore[reportAttributeAccessIssue] DoubleLevelShape = Shape( @@ -109,7 +109,7 @@ def test_nested_success(): assert isinstance(result, EvaluateValueResult) assert result.success - assert result.value["level_one"]["int_field"] == 989 + assert result.value["level_one"]["int_field"] == 989 # pyright: ignore[reportOptionalSubscript] def test_nested_error_one_field_not_defined(): @@ -125,13 +125,13 @@ def test_nested_error_one_field_not_defined(): result = eval_config_value_from_dagster_type(DoubleLevelShape, value) assert not result.success - assert len(result.errors) == 1 - error = result.errors[0] + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.FIELD_NOT_DEFINED - assert error.error_data.field_name == "no_field_one" + assert error.error_data.field_name == "no_field_one" # pyright: ignore[reportAttributeAccessIssue] assert len(error.stack.entries) == 1 stack_entry = error.stack.entries[0] - assert stack_entry.field_name == "level_one" + assert stack_entry.field_name == "level_one" # pyright: ignore[reportAttributeAccessIssue] def get_field_name_error(result, field_name): @@ -155,13 +155,13 @@ def test_nested_error_two_fields_not_defined(): result = eval_config_value_from_dagster_type(DoubleLevelShape, value) assert not result.success - assert len(result.errors) == 1 + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] - fields_error = result.errors[0] + fields_error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert fields_error.reason == DagsterEvaluationErrorReason.FIELDS_NOT_DEFINED - assert fields_error.error_data.field_names == ["no_field_one", "no_field_two"] + assert fields_error.error_data.field_names == ["no_field_one", "no_field_two"] # pyright: ignore[reportAttributeAccessIssue] def test_nested_error_missing_fields(): @@ -169,10 +169,10 @@ def test_nested_error_missing_fields(): result = eval_config_value_from_dagster_type(DoubleLevelShape, value) assert not result.success - assert len(result.errors) == 1 - error = result.errors[0] + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.MISSING_REQUIRED_FIELD - assert error.error_data.field_name == "bool_field" + assert error.error_data.field_name == "bool_field" # pyright: ignore[reportAttributeAccessIssue] def test_nested_error_multiple_missing_fields(): @@ -180,11 +180,11 @@ def test_nested_error_multiple_missing_fields(): result = eval_config_value_from_dagster_type(DoubleLevelShape, value) assert not result.success - assert len(result.errors) == 1 + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] - fields_error = result.errors[0] + fields_error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert fields_error.reason == DagsterEvaluationErrorReason.MISSING_REQUIRED_FIELDS - assert fields_error.error_data.field_names == ["bool_field", "string_field"] + assert fields_error.error_data.field_names == ["bool_field", "string_field"] # pyright: ignore[reportAttributeAccessIssue] def test_nested_missing_and_not_defined(): @@ -192,16 +192,16 @@ def test_nested_missing_and_not_defined(): result = eval_config_value_from_dagster_type(DoubleLevelShape, value) assert not result.success - assert len(result.errors) == 2 + assert len(result.errors) == 2 # pyright: ignore[reportArgumentType] fields_error = next( error - for error in result.errors + for error in result.errors # pyright: ignore[reportOptionalIterable] if error.reason == DagsterEvaluationErrorReason.MISSING_REQUIRED_FIELDS ) assert fields_error.reason == DagsterEvaluationErrorReason.MISSING_REQUIRED_FIELDS - assert fields_error.error_data.field_names == ["bool_field", "string_field"] + assert fields_error.error_data.field_names == ["bool_field", "string_field"] # pyright: ignore[reportAttributeAccessIssue] assert ( get_field_name_error(result, "not_defined").reason @@ -245,14 +245,14 @@ def test_deep_scalar(): result = eval_config_value_from_dagster_type(MultiLevelShapeType, value) assert not result.success - assert len(result.errors) == 1 - error = result.errors[0] + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH - assert error.error_data.config_type_snap.given_name == "String" - assert error.error_data.value_rep == "123" + assert error.error_data.config_type_snap.given_name == "String" # pyright: ignore[reportAttributeAccessIssue] + assert error.error_data.value_rep == "123" # pyright: ignore[reportAttributeAccessIssue] assert len(error.stack.entries) == 3 - assert [entry.field_name for entry in error.stack.entries] == [ + assert [entry.field_name for entry in error.stack.entries] == [ # pyright: ignore[reportAttributeAccessIssue] "level_two_dict", "level_three_dict", "level_three_string", @@ -278,19 +278,19 @@ def test_deep_mixed_level_errors(): result = eval_config_value_from_dagster_type(MultiLevelShapeType, value) assert not result.success - assert len(result.errors) == 3 + assert len(result.errors) == 3 # pyright: ignore[reportArgumentType] root_errors = result.errors_at_level() assert len(root_errors) == 1 root_error = root_errors[0] assert root_error.reason == DagsterEvaluationErrorReason.FIELD_NOT_DEFINED - assert root_error.error_data.field_name == "level_one_not_defined" + assert root_error.error_data.field_name == "level_one_not_defined" # pyright: ignore[reportAttributeAccessIssue] level_two_errors = result.errors_at_level("level_two_dict") assert len(level_two_errors) == 1 level_two_error = level_two_errors[0] assert level_two_error.reason == DagsterEvaluationErrorReason.MISSING_REQUIRED_FIELD - assert level_two_error.error_data.field_name == "level_two_int_field" + assert level_two_error.error_data.field_name == "level_two_int_field" # pyright: ignore[reportAttributeAccessIssue] assert not result.errors_at_level("level_two_dict", "level_three_dict") @@ -321,16 +321,16 @@ def test_example_selector_error_top_level_type(): result = eval_config_value_from_dagster_type(ExampleSelector, "kjsdkf") assert not result.success assert result.value is None - assert len(result.errors) == 1 - assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH # pyright: ignore[reportOptionalSubscript] def test_example_selector_wrong_field(): result = eval_config_value_from_dagster_type(ExampleSelector, {"nope": 234}) assert not result.success assert result.value is None - assert len(result.errors) == 1 - assert result.errors[0].reason == DagsterEvaluationErrorReason.FIELD_NOT_DEFINED + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + assert result.errors[0].reason == DagsterEvaluationErrorReason.FIELD_NOT_DEFINED # pyright: ignore[reportOptionalSubscript] def test_example_selector_multiple_fields(): @@ -339,8 +339,8 @@ def test_example_selector_multiple_fields(): ) assert not result.success - assert len(result.errors) == 1 - assert result.errors[0].reason == DagsterEvaluationErrorReason.SELECTOR_FIELD_ERROR + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + assert result.errors[0].reason == DagsterEvaluationErrorReason.SELECTOR_FIELD_ERROR # pyright: ignore[reportOptionalSubscript] def test_selector_within_dict_no_subfields(): @@ -348,9 +348,9 @@ def test_selector_within_dict_no_subfields(): Shape({"selector": Field(ExampleSelector)}), {"selector": {}} ) assert not result.success - assert len(result.errors) == 1 + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] assert ( - result.errors[0].message + result.errors[0].message # pyright: ignore[reportOptionalSubscript] == "Must specify a field at path root:selector if more than one field " "is defined. Defined fields: ['option_one', 'option_two']" ) @@ -396,16 +396,16 @@ def test_evaluate_map_float(): def test_evaluate_map_error_item_mismatch(): result = eval_config_value_from_dagster_type({str: str}, {"a": 1}) assert not result.success - assert len(result.errors) == 1 - assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH # pyright: ignore[reportOptionalSubscript] def test_evaluate_map_error_top_level_mismatch(): string_map = {str: str} result = eval_config_value_from_dagster_type(string_map, 1) assert not result.success - assert len(result.errors) == 1 - assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH # pyright: ignore[reportOptionalSubscript] def test_evaluate_double_map(): @@ -430,8 +430,8 @@ def test_config_map_in_dict_error(): value = {"nested_map": {"a": 1, "b": "bar", "c": 3}} result = eval_config_value_from_dagster_type(nested_map, value) assert not result.success - assert len(result.errors) == 1 - error = result.errors[0] + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH assert len(error.stack.entries) == 2 stack_entry = error.stack.entries[0] @@ -448,8 +448,8 @@ def test_config_map_in_dict_error_two_errors(): value = {"nested_map": {"a": 1, 5: 3, "c": "bar"}} result = eval_config_value_from_dagster_type(nested_map, value) assert not result.success - assert len(result.errors) == 2 - error = result.errors[0] + assert len(result.errors) == 2 # pyright: ignore[reportArgumentType] + error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH assert len(error.stack.entries) == 2 stack_entry = error.stack.entries[0] @@ -458,7 +458,7 @@ def test_config_map_in_dict_error_two_errors(): map_entry = error.stack.entries[1] assert isinstance(map_entry, EvaluationStackMapKeyEntry) assert map_entry.map_key == 5 - map_entry = result.errors[1].stack.entries[1] + map_entry = result.errors[1].stack.entries[1] # pyright: ignore[reportOptionalSubscript] assert isinstance(map_entry, EvaluationStackMapValueEntry) assert map_entry.map_key == "c" @@ -499,7 +499,7 @@ def test_config_double_map_double_error(): } error_result = eval_config_value_from_dagster_type(nested_maps, error_value) assert not error_result.success - assert len(error_result.errors) == 2 + assert len(error_result.errors) == 2 # pyright: ignore[reportArgumentType] def test_evaluate_list_string(): @@ -512,16 +512,16 @@ def test_evaluate_list_string(): def test_evaluate_list_error_item_mismatch(): result = eval_config_value_from_dagster_type([str], [1]) assert not result.success - assert len(result.errors) == 1 - assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH # pyright: ignore[reportOptionalSubscript] def test_evaluate_list_error_top_level_mismatch(): string_list = [str] result = eval_config_value_from_dagster_type(string_list, 1) assert not result.success - assert len(result.errors) == 1 - assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH # pyright: ignore[reportOptionalSubscript] def test_evaluate_double_list(): @@ -546,8 +546,8 @@ def test_config_list_in_dict_error(): value = {"nested_list": [1, "bar", 3]} result = eval_config_value_from_dagster_type(nested_list, value) assert not result.success - assert len(result.errors) == 1 - error = result.errors[0] + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH assert len(error.stack.entries) == 2 stack_entry = error.stack.entries[0] @@ -579,7 +579,7 @@ def test_config_double_list_double_error(): error_value = {"nested_list_one": "kjdfkdj", "nested_list_two": ["bar", 2]} error_result = eval_config_value_from_dagster_type(nested_lists, error_value) assert not error_result.success - assert len(error_result.errors) == 2 + assert len(error_result.errors) == 2 # pyright: ignore[reportArgumentType] def test_nullable_int(): @@ -677,7 +677,7 @@ def test_post_process_error(): Shape({"foo": StringSource}), {"foo": {"env": "THIS_ENV_VAR_DOES_NOT_EXIST"}} ) assert not error_result.success - assert len(error_result.errors) == 1 - error = error_result.errors[0] + assert len(error_result.errors) == 1 # pyright: ignore[reportArgumentType] + error = error_result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.FAILED_POST_PROCESSING assert len(error.stack.entries) == 1 diff --git a/python_modules/dagster/dagster_tests/core_tests/config_types_tests/evaluator_tests/test_validate.py b/python_modules/dagster/dagster_tests/core_tests/config_types_tests/evaluator_tests/test_validate.py index 474125c4fe6de..7851c8b26a818 100644 --- a/python_modules/dagster/dagster_tests/core_tests/config_types_tests/evaluator_tests/test_validate.py +++ b/python_modules/dagster/dagster_tests/core_tests/config_types_tests/evaluator_tests/test_validate.py @@ -20,12 +20,12 @@ def test_parse_scalar_failure(): result = validate_config(str, 2343) assert not result.success assert result.value is None - assert len(result.errors) == 1 - error = result.errors[0] + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH assert not error.stack.entries - assert error.error_data.config_type_snap.given_name == "String" - assert error.error_data.value_rep == "2343" + assert error.error_data.config_type_snap.given_name == "String" # pyright: ignore[reportAttributeAccessIssue] + assert error.error_data.value_rep == "2343" # pyright: ignore[reportAttributeAccessIssue] SingleLevelShape = Shape({"level_one": Field(str)}) @@ -41,22 +41,22 @@ def test_single_level_scalar_mismatch(): result = validate_config(SingleLevelShape, value) assert not result.success assert result.value is None - assert len(result.errors) == 1 - error = result.errors[0] + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH assert len(error.stack.entries) == 1 - assert error.stack.entries[0].field_name == "level_one" + assert error.stack.entries[0].field_name == "level_one" # pyright: ignore[reportAttributeAccessIssue] def test_root_missing_field(): result = validate_config(SingleLevelShape, {}) assert not result.success assert result.value is None - assert len(result.errors) == 1 - error = result.errors[0] + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.MISSING_REQUIRED_FIELD assert len(result.errors_at_level()) == 1 - assert error.error_data.field_name == "level_one" + assert error.error_data.field_name == "level_one" # pyright: ignore[reportAttributeAccessIssue] DoubleLevelShape = Shape( @@ -94,13 +94,13 @@ def test_nested_error_one_field_not_defined(): result = validate_config(DoubleLevelShape, value) assert not result.success - assert len(result.errors) == 1 - error = result.errors[0] + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.FIELD_NOT_DEFINED - assert error.error_data.field_name == "no_field_one" + assert error.error_data.field_name == "no_field_one" # pyright: ignore[reportAttributeAccessIssue] assert len(error.stack.entries) == 1 stack_entry = error.stack.entries[0] - assert stack_entry.field_name == "level_one" + assert stack_entry.field_name == "level_one" # pyright: ignore[reportAttributeAccessIssue] def test_nested_error_two_fields_not_defined(): @@ -117,13 +117,13 @@ def test_nested_error_two_fields_not_defined(): result = validate_config(DoubleLevelShape, value) assert not result.success - assert len(result.errors) == 1 + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] - fields_error = result.errors[0] + fields_error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert fields_error.reason == DagsterEvaluationErrorReason.FIELDS_NOT_DEFINED - assert fields_error.error_data.field_names == ["no_field_one", "no_field_two"] + assert fields_error.error_data.field_names == ["no_field_one", "no_field_two"] # pyright: ignore[reportAttributeAccessIssue] def test_nested_error_missing_fields(): @@ -131,10 +131,10 @@ def test_nested_error_missing_fields(): result = validate_config(DoubleLevelShape, value) assert not result.success - assert len(result.errors) == 1 - error = result.errors[0] + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.MISSING_REQUIRED_FIELD - assert error.error_data.field_name == "bool_field" + assert error.error_data.field_name == "bool_field" # pyright: ignore[reportAttributeAccessIssue] def test_nested_error_multiple_missing_fields(): @@ -142,11 +142,11 @@ def test_nested_error_multiple_missing_fields(): result = validate_config(DoubleLevelShape, value) assert not result.success - assert len(result.errors) == 1 + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] - fields_error = result.errors[0] + fields_error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert fields_error.reason == DagsterEvaluationErrorReason.MISSING_REQUIRED_FIELDS - assert fields_error.error_data.field_names == ["bool_field", "string_field"] + assert fields_error.error_data.field_names == ["bool_field", "string_field"] # pyright: ignore[reportAttributeAccessIssue] def test_nested_missing_and_not_defined(): @@ -154,16 +154,16 @@ def test_nested_missing_and_not_defined(): result = validate_config(DoubleLevelShape, value) assert not result.success - assert len(result.errors) == 2 + assert len(result.errors) == 2 # pyright: ignore[reportArgumentType] fields_error = next( error - for error in result.errors + for error in result.errors # pyright: ignore[reportOptionalIterable] if error.reason == DagsterEvaluationErrorReason.MISSING_REQUIRED_FIELDS ) assert fields_error.reason == DagsterEvaluationErrorReason.MISSING_REQUIRED_FIELDS - assert fields_error.error_data.field_names == ["bool_field", "string_field"] + assert fields_error.error_data.field_names == ["bool_field", "string_field"] # pyright: ignore[reportAttributeAccessIssue] assert ( get_field_name_error(result, "not_defined").reason @@ -191,9 +191,9 @@ def test_shape_with_field_substitutions_collisions(): value = {"foo_field": {}, "bar_field": "world", "foo": {}} result = validate_config(FieldSubShape, value) assert not result.success - assert len(result.errors) == 1 + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] - collision_error = result.errors[0] + collision_error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert collision_error.reason == DagsterEvaluationErrorReason.FIELD_ALIAS_COLLISION @@ -231,14 +231,14 @@ def test_deep_scalar(): result = validate_config(MultiLevelShapeType, value) assert not result.success - assert len(result.errors) == 1 - error = result.errors[0] + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH - assert error.error_data.config_type_snap.given_name == "String" - assert error.error_data.value_rep == "123" + assert error.error_data.config_type_snap.given_name == "String" # pyright: ignore[reportAttributeAccessIssue] + assert error.error_data.value_rep == "123" # pyright: ignore[reportAttributeAccessIssue] assert len(error.stack.entries) == 3 - assert [entry.field_name for entry in error.stack.entries] == [ + assert [entry.field_name for entry in error.stack.entries] == [ # pyright: ignore[reportAttributeAccessIssue] "level_two_dict", "level_three_dict", "level_three_string", @@ -264,19 +264,19 @@ def test_deep_mixed_level_errors(): result = validate_config(MultiLevelShapeType, value) assert not result.success - assert len(result.errors) == 3 + assert len(result.errors) == 3 # pyright: ignore[reportArgumentType] root_errors = result.errors_at_level() assert len(root_errors) == 1 root_error = root_errors[0] assert root_error.reason == DagsterEvaluationErrorReason.FIELD_NOT_DEFINED - assert root_error.error_data.field_name == "level_one_not_defined" + assert root_error.error_data.field_name == "level_one_not_defined" # pyright: ignore[reportAttributeAccessIssue] level_two_errors = result.errors_at_level("level_two_dict") assert len(level_two_errors) == 1 level_two_error = level_two_errors[0] assert level_two_error.reason == DagsterEvaluationErrorReason.MISSING_REQUIRED_FIELD - assert level_two_error.error_data.field_name == "level_two_int_field" + assert level_two_error.error_data.field_name == "level_two_int_field" # pyright: ignore[reportAttributeAccessIssue] assert not result.errors_at_level("level_two_dict", "level_three_dict") @@ -307,32 +307,32 @@ def test_example_selector_error_top_level_type(): result = validate_config(ExampleSelector, "kjsdkf") assert not result.success assert result.value is None - assert len(result.errors) == 1 - assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH # pyright: ignore[reportOptionalSubscript] def test_example_selector_wrong_field(): result = validate_config(ExampleSelector, {"nope": 234}) assert not result.success assert result.value is None - assert len(result.errors) == 1 - assert result.errors[0].reason == DagsterEvaluationErrorReason.FIELD_NOT_DEFINED + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + assert result.errors[0].reason == DagsterEvaluationErrorReason.FIELD_NOT_DEFINED # pyright: ignore[reportOptionalSubscript] def test_example_selector_multiple_fields(): result = validate_config(ExampleSelector, {"option_one": "foo", "option_two": "boo"}) assert not result.success - assert len(result.errors) == 1 - assert result.errors[0].reason == DagsterEvaluationErrorReason.SELECTOR_FIELD_ERROR + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + assert result.errors[0].reason == DagsterEvaluationErrorReason.SELECTOR_FIELD_ERROR # pyright: ignore[reportOptionalSubscript] def test_selector_within_dict_no_subfields(): result = validate_config(Shape({"selector": Field(ExampleSelector)}), {"selector": {}}) assert not result.success - assert len(result.errors) == 1 + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] assert ( - result.errors[0].message + result.errors[0].message # pyright: ignore[reportOptionalSubscript] == "Must specify a field at path root:selector if more than one field " "is defined. Defined fields: ['option_one', 'option_two']" ) @@ -365,22 +365,22 @@ def test_evaluate_map_float(): def test_evaluate_map_error_item_mismatch(): result = validate_config({str: str}, {"x": 5}) assert not result.success - assert len(result.errors) == 1 - assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH # pyright: ignore[reportOptionalSubscript] def test_evaluate_map_error_key_mismatch(): result = validate_config({str: str}, {5: "foo"}) assert not result.success - assert len(result.errors) == 1 - assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH # pyright: ignore[reportOptionalSubscript] def test_evaluate_map_error_top_level_mismatch(): result = validate_config({str: str}, 1) assert not result.success - assert len(result.errors) == 1 - assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH # pyright: ignore[reportOptionalSubscript] def test_evaluate_double_map(): @@ -404,8 +404,8 @@ def test_config_map_in_dict_error(): value = {"nested_map": {"a": 1, "b": "bar", "c": 3}} result = validate_config(nested_map, value) assert not result.success - assert len(result.errors) == 1 - error = result.errors[0] + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH assert len(error.stack.entries) == 2 stack_entry = error.stack.entries[0] @@ -422,8 +422,8 @@ def test_config_map_in_dict_error_double_error(): value = {"nested_map": {"a": 1, 3: 3, "c": "asdf"}} result = validate_config(nested_map, value) assert not result.success - assert len(result.errors) == 2 - error = result.errors[0] + assert len(result.errors) == 2 # pyright: ignore[reportArgumentType] + error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH assert len(error.stack.entries) == 2 stack_entry = error.stack.entries[0] @@ -432,7 +432,7 @@ def test_config_map_in_dict_error_double_error(): map_entry = error.stack.entries[1] assert isinstance(map_entry, EvaluationStackMapKeyEntry) assert map_entry.map_key == 3 - map_entry = result.errors[1].stack.entries[1] + map_entry = result.errors[1].stack.entries[1] # pyright: ignore[reportOptionalSubscript] assert isinstance(map_entry, EvaluationStackMapValueEntry) assert map_entry.map_key == "c" @@ -446,15 +446,15 @@ def test_evaluate_list_string(): def test_evaluate_list_error_item_mismatch(): result = validate_config([str], [1]) assert not result.success - assert len(result.errors) == 1 - assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH # pyright: ignore[reportOptionalSubscript] def test_evaluate_list_error_top_level_mismatch(): result = validate_config([str], 1) assert not result.success - assert len(result.errors) == 1 - assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + assert result.errors[0].reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH # pyright: ignore[reportOptionalSubscript] def test_evaluate_double_list(): @@ -478,8 +478,8 @@ def test_config_list_in_dict_error(): value = {"nested_list": [1, "bar", 3]} result = validate_config(nested_list, value) assert not result.success - assert len(result.errors) == 1 - error = result.errors[0] + assert len(result.errors) == 1 # pyright: ignore[reportArgumentType] + error = result.errors[0] # pyright: ignore[reportOptionalSubscript] assert error.reason == DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH assert len(error.stack.entries) == 2 stack_entry = error.stack.entries[0] @@ -511,7 +511,7 @@ def test_config_double_list_double_error(): error_value = {"nested_list_one": "kjdfkdj", "nested_list_two": ["bar", 2]} error_result = validate_config(nested_lists, error_value) assert not error_result.success - assert len(error_result.errors) == 2 + assert len(error_result.errors) == 2 # pyright: ignore[reportArgumentType] def test_nullable_int(): diff --git a/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_config_default_required.py b/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_config_default_required.py index 0cc7787dd5c33..d8d2722268d1e 100644 --- a/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_config_default_required.py +++ b/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_config_default_required.py @@ -7,7 +7,7 @@ def test_default_implies_not_required_field_correct(): def return_default_to_one(context): return context.op_config["default_to_one"] - default_to_one_field = return_default_to_one.config_schema.as_field().config_type.fields[ + default_to_one_field = return_default_to_one.config_schema.as_field().config_type.fields[ # pyright: ignore[reportAttributeAccessIssue] "default_to_one" ] assert default_to_one_field.is_required is False diff --git a/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_config_type_system.py b/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_config_type_system.py index d67b5e7139b81..1767c3701205c 100644 --- a/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_config_type_system.py +++ b/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_config_type_system.py @@ -139,7 +139,7 @@ def _multiple_required_fields_config_permissive_dict(): def _validate(config_field, value): res = process_config(config_field.config_type, value) - assert res.success, res.errors[0].message + assert res.success, res.errors[0].message # pyright: ignore[reportOptionalSubscript] return res.value @@ -670,8 +670,8 @@ def test_build_optionality(): } ).config_type - assert optional_test_type.fields["required"].is_required - assert optional_test_type.fields["optional"].is_required is False + assert optional_test_type.fields["required"].is_required # pyright: ignore[reportAttributeAccessIssue] + assert optional_test_type.fields["optional"].is_required is False # pyright: ignore[reportAttributeAccessIssue] def test_wrong_op_name(): @@ -927,7 +927,7 @@ def test_list_in_config_error(): with pytest.raises(DagsterInvalidDefinitionError, match=re.escape(error_msg)): - @op(config_schema=List[int]) + @op(config_schema=List[int]) # pyright: ignore[reportArgumentType] def _no_runtime_list_in_config(_): pass @@ -935,7 +935,7 @@ def _no_runtime_list_in_config(_): def test_working_map_path(): called = {} - @op(config_schema={str: int}) + @op(config_schema={str: int}) # pyright: ignore[reportArgumentType] def required_map_int_op(context): assert context.op_config == {"foo": 1, "bar": 2} called["yup"] = True @@ -955,7 +955,7 @@ def job_def(): def test_item_error_map_path(): called = {} - @op(config_schema={str: int}) + @op(config_schema={str: int}) # pyright: ignore[reportArgumentType] def required_map_int_op(context): assert context.op_config == {"foo": 1, "bar": 2} called["yup"] = True @@ -1146,7 +1146,7 @@ def _bare_open_set(_): with pytest.raises(DagsterInvalidDefinitionError, match=set_error_msg): - @op(config_schema=Set) + @op(config_schema=Set) # pyright: ignore[reportArgumentType] def _bare_open_set(_): pass @@ -1158,7 +1158,7 @@ def _bare_closed_set(_): with pytest.raises(DagsterInvalidDefinitionError, match=set_error_msg): - @op(config_schema=Set[int]) + @op(config_schema=Set[int]) # pyright: ignore[reportArgumentType] def _bare_closed_set(_): pass diff --git a/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_error_messages.py b/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_error_messages.py index 1851aded7f112..bec51a0e570f1 100644 --- a/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_error_messages.py +++ b/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_error_messages.py @@ -11,7 +11,7 @@ def test_invalid_optional_in_config(): match=re.escape("You have passed an instance of DagsterType Int? to the config system"), ): - @op(config_schema=Optional[int]) + @op(config_schema=Optional[int]) # pyright: ignore[reportArgumentType] def _op(_): pass @@ -20,7 +20,7 @@ def test_invalid_dict_call(): # prior to 0.7.0 dicts in config contexts were callable with pytest.raises(TypeError, match=re.escape("'DagsterDictApi' object is not callable")): - @op(config_schema=Dict({"foo": int})) + @op(config_schema=Dict({"foo": int})) # pyright: ignore[reportCallIssue] def _op(_): pass @@ -34,7 +34,7 @@ def test_list_in_config(): ), ): - @op(config_schema=List[int]) + @op(config_schema=List[int]) # pyright: ignore[reportArgumentType] def _op(_): pass @@ -55,6 +55,6 @@ def test_non_scalar_key_map(): match=re.escape("Map dict must have a scalar type as its only key."), ): - @op(config_schema={Noneable(int): str}) + @op(config_schema={Noneable(int): str}) # pyright: ignore[reportArgumentType] def _op(_): pass diff --git a/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_new_config_types.py b/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_new_config_types.py index 5985085a3a82a..a3694050f2db4 100644 --- a/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_new_config_types.py +++ b/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_new_config_types.py @@ -7,7 +7,7 @@ def test_config_any(): assert validate_config(any_inst, 1).success assert validate_config(any_inst, None).success assert validate_config(any_inst, "r").success - assert any_inst.kind == ConfigTypeKind.ANY + assert any_inst.kind == ConfigTypeKind.ANY # pyright: ignore[reportAttributeAccessIssue] def test_config_int(): diff --git a/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_source_types.py b/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_source_types.py index 1bf398122ac52..b0f1fb8df4c89 100644 --- a/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_source_types.py +++ b/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_source_types.py @@ -6,8 +6,8 @@ def test_string_source(): - assert process_config(StringSource, "foo").success - assert not process_config(StringSource, 1).success + assert process_config(StringSource, "foo").success # pyright: ignore[reportArgumentType] + assert not process_config(StringSource, 1).success # pyright: ignore[reportArgumentType] assert not process_config(StringSource, {"env": 1}).success @@ -18,7 +18,7 @@ def test_string_source(): 'You have attempted to fetch the environment variable "DAGSTER_TEST_ENV_VAR" ' "which is not set. In order for this execution to succeed it must be set in " "this environment." - in process_config(StringSource, {"env": "DAGSTER_TEST_ENV_VAR"}).errors[0].message + in process_config(StringSource, {"env": "DAGSTER_TEST_ENV_VAR"}).errors[0].message # pyright: ignore[reportOptionalSubscript] ) with environ({"DAGSTER_TEST_ENV_VAR": "baz"}): @@ -27,8 +27,8 @@ def test_string_source(): def test_int_source(): - assert process_config(IntSource, 1).success - assert not process_config(IntSource, "foo").success + assert process_config(IntSource, 1).success # pyright: ignore[reportArgumentType] + assert not process_config(IntSource, "foo").success # pyright: ignore[reportArgumentType] assert not process_config(IntSource, {"env": 1}).success @@ -39,7 +39,7 @@ def test_int_source(): 'You have attempted to fetch the environment variable "DAGSTER_TEST_ENV_VAR" ' "which is not set. In order for this execution to succeed it must be set in " "this environment." - in process_config(IntSource, {"env": "DAGSTER_TEST_ENV_VAR"}).errors[0].message + in process_config(IntSource, {"env": "DAGSTER_TEST_ENV_VAR"}).errors[0].message # pyright: ignore[reportOptionalSubscript] ) with environ({"DAGSTER_TEST_ENV_VAR": "4"}): @@ -51,34 +51,35 @@ def test_int_source(): assert ( 'Value "four" stored in env variable "DAGSTER_TEST_ENV_VAR" cannot ' "be coerced into an int." - in process_config(IntSource, {"env": "DAGSTER_TEST_ENV_VAR"}).errors[0].message + in process_config(IntSource, {"env": "DAGSTER_TEST_ENV_VAR"}).errors[0].message # pyright: ignore[reportOptionalSubscript] ) def test_noneable_string_source_array(): - assert process_config(Noneable(Array(StringSource)), []).success - assert process_config(Noneable(Array(StringSource)), None).success + assert process_config(Noneable(Array(StringSource)), []).success # pyright: ignore[reportArgumentType] + assert process_config(Noneable(Array(StringSource)), None).success # pyright: ignore[reportArgumentType] assert ( 'You have attempted to fetch the environment variable "DAGSTER_TEST_ENV_VAR" ' "which is not set. In order for this execution to succeed it must be set in " "this environment." - in process_config(Noneable(Array(StringSource)), ["test", {"env": "DAGSTER_TEST_ENV_VAR"}]) + in process_config(Noneable(Array(StringSource)), ["test", {"env": "DAGSTER_TEST_ENV_VAR"}]) # pyright: ignore[reportArgumentType,reportOptionalSubscript] .errors[0] .message ) with environ({"DAGSTER_TEST_ENV_VAR": "baz"}): assert process_config( - Noneable(Array(StringSource)), ["test", {"env": "DAGSTER_TEST_ENV_VAR"}] + Noneable(Array(StringSource)), + ["test", {"env": "DAGSTER_TEST_ENV_VAR"}], # pyright: ignore[reportArgumentType] ).success def test_bool_source(): - assert process_config(BoolSource, True).success - assert process_config(BoolSource, False).success - assert not process_config(BoolSource, "False").success - assert not process_config(BoolSource, "foo").success - assert not process_config(BoolSource, 1).success + assert process_config(BoolSource, True).success # pyright: ignore[reportArgumentType] + assert process_config(BoolSource, False).success # pyright: ignore[reportArgumentType] + assert not process_config(BoolSource, "False").success # pyright: ignore[reportArgumentType] + assert not process_config(BoolSource, "foo").success # pyright: ignore[reportArgumentType] + assert not process_config(BoolSource, 1).success # pyright: ignore[reportArgumentType] assert not process_config(BoolSource, {"env": 1}).success @@ -89,7 +90,7 @@ def test_bool_source(): 'You have attempted to fetch the environment variable "DAGSTER_TEST_ENV_VAR" ' "which is not set. In order for this execution to succeed it must be set in " "this environment." - in process_config(BoolSource, {"env": "DAGSTER_TEST_ENV_VAR"}).errors[0].message + in process_config(BoolSource, {"env": "DAGSTER_TEST_ENV_VAR"}).errors[0].message # pyright: ignore[reportOptionalSubscript] ) with environ({"DAGSTER_TEST_ENV_VAR": ""}): diff --git a/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_type_printer.py b/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_type_printer.py index 1ecd9fabcb6a7..57b7e7e15beb8 100644 --- a/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_type_printer.py +++ b/python_modules/dagster/dagster_tests/core_tests/config_types_tests/test_type_printer.py @@ -9,10 +9,11 @@ def assert_inner_types(parent_type, *dagster_types): config_type = resolve_to_config_type(parent_type) - config_schema_snapshot = config_type.get_schema_snapshot() + config_schema_snapshot = config_type.get_schema_snapshot() # pyright: ignore[reportAttributeAccessIssue] all_type_keys = get_recursive_type_keys( - snap_from_config_type(config_type), config_schema_snapshot + snap_from_config_type(config_type), # pyright: ignore[reportArgumentType] + config_schema_snapshot, ) assert set(all_type_keys) == set( diff --git a/python_modules/dagster/dagster_tests/core_tests/definitions_tests/decorators_tests/test_sensor_decorator.py b/python_modules/dagster/dagster_tests/core_tests/definitions_tests/decorators_tests/test_sensor_decorator.py index ec43e003bfc70..587e1f2a2221a 100644 --- a/python_modules/dagster/dagster_tests/core_tests/definitions_tests/decorators_tests/test_sensor_decorator.py +++ b/python_modules/dagster/dagster_tests/core_tests/definitions_tests/decorators_tests/test_sensor_decorator.py @@ -17,12 +17,12 @@ def asset3(): ... @sensor(asset_selection=["asset1", "asset2"]) def sensor1(): ... - assert sensor1.asset_selection.resolve(assets) == {AssetKey("asset1"), AssetKey("asset2")} + assert sensor1.asset_selection.resolve(assets) == {AssetKey("asset1"), AssetKey("asset2")} # pyright: ignore[reportOptionalMemberAccess] @sensor(asset_selection=[asset1, asset2]) def sensor2(): ... - assert sensor2.asset_selection.resolve(assets) == {AssetKey("asset1"), AssetKey("asset2")} + assert sensor2.asset_selection.resolve(assets) == {AssetKey("asset1"), AssetKey("asset2")} # pyright: ignore[reportOptionalMemberAccess] def test_jobless_sensor_uses_eval_fn_name(): diff --git a/python_modules/dagster/dagster_tests/core_tests/definitions_tests/test_dependency.py b/python_modules/dagster/dagster_tests/core_tests/definitions_tests/test_dependency.py index c1cfae169d045..a2b9c3f2c3989 100644 --- a/python_modules/dagster/dagster_tests/core_tests/definitions_tests/test_dependency.py +++ b/python_modules/dagster/dagster_tests/core_tests/definitions_tests/test_dependency.py @@ -2,16 +2,16 @@ def test_node_input_handle_str(): - assert str(NodeInputHandle(NodeHandle("foo", parent=None), "bar")) == "foo:bar" + assert str(NodeInputHandle(NodeHandle("foo", parent=None), "bar")) == "foo:bar" # pyright: ignore[reportCallIssue,reportArgumentType] assert ( - str(NodeInputHandle(NodeHandle("foo", parent=NodeHandle("baz", parent=None)), "bar")) + str(NodeInputHandle(NodeHandle("foo", parent=NodeHandle("baz", parent=None)), "bar")) # pyright: ignore[reportCallIssue,reportArgumentType] == "baz.foo:bar" ) def test_node_output_handle_str(): - assert str(NodeOutputHandle(NodeHandle("foo", parent=None), "bar")) == "foo:bar" + assert str(NodeOutputHandle(NodeHandle("foo", parent=None), "bar")) == "foo:bar" # pyright: ignore[reportCallIssue,reportArgumentType] assert ( - str(NodeOutputHandle(NodeHandle("foo", parent=NodeHandle("baz", parent=None)), "bar")) + str(NodeOutputHandle(NodeHandle("foo", parent=NodeHandle("baz", parent=None)), "bar")) # pyright: ignore[reportCallIssue,reportArgumentType] == "baz.foo:bar" ) diff --git a/python_modules/dagster/dagster_tests/core_tests/execution_tests/test_asset_backfill.py b/python_modules/dagster/dagster_tests/core_tests/execution_tests/test_asset_backfill.py index e3ebe7a6ae057..b0c71d4084069 100644 --- a/python_modules/dagster/dagster_tests/core_tests/execution_tests/test_asset_backfill.py +++ b/python_modules/dagster/dagster_tests/core_tests/execution_tests/test_asset_backfill.py @@ -398,7 +398,7 @@ def test_materializations_outside_of_backfill(): instance=instance, asset_graph=asset_graph, assets_by_repo_name=assets_by_repo_name, - backfill_data=make_backfill_data("all", asset_graph, instance, None), + backfill_data=make_backfill_data("all", asset_graph, instance, None), # pyright: ignore[reportArgumentType] fail_asset_partitions=set(), ) @@ -959,19 +959,19 @@ def downstream_weekly_partitioned_asset( counts = completed_backfill_data.get_backfill_status_per_asset_key(asset_graph) assert counts[0].asset_key == unpartitioned_upstream_of_partitioned.key - assert counts[0].backfill_status == AssetBackfillStatus.MATERIALIZED + assert counts[0].backfill_status == AssetBackfillStatus.MATERIALIZED # pyright: ignore[reportAttributeAccessIssue] assert counts[1].asset_key == upstream_daily_partitioned_asset.key - assert counts[1].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] == 0 - assert counts[1].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 1 - assert counts[1].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 - assert counts[1].num_targeted_partitions == 1 + assert counts[1].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] == 0 # pyright: ignore[reportAttributeAccessIssue] + assert counts[1].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 1 # pyright: ignore[reportAttributeAccessIssue] + assert counts[1].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 # pyright: ignore[reportAttributeAccessIssue] + assert counts[1].num_targeted_partitions == 1 # pyright: ignore[reportAttributeAccessIssue] assert counts[2].asset_key == downstream_weekly_partitioned_asset.key - assert counts[2].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] == 0 - assert counts[2].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 1 - assert counts[2].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 - assert counts[2].num_targeted_partitions == 1 + assert counts[2].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] == 0 # pyright: ignore[reportAttributeAccessIssue] + assert counts[2].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 1 # pyright: ignore[reportAttributeAccessIssue] + assert counts[2].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 # pyright: ignore[reportAttributeAccessIssue] + assert counts[2].num_targeted_partitions == 1 # pyright: ignore[reportAttributeAccessIssue] def test_asset_backfill_status_counts_with_reexecution(): @@ -1011,9 +1011,9 @@ def upstream_success(): counts = backfill_data.get_backfill_status_per_asset_key(asset_graph) assert counts[0].asset_key == upstream_fail.key - assert counts[0].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] == 0 - assert counts[0].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 1 - assert counts[0].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 + assert counts[0].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] == 0 # pyright: ignore[reportAttributeAccessIssue] + assert counts[0].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 1 # pyright: ignore[reportAttributeAccessIssue] + assert counts[0].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 # pyright: ignore[reportAttributeAccessIssue] materialize( [upstream_success], @@ -1027,9 +1027,9 @@ def upstream_success(): ) counts = backfill_data.get_backfill_status_per_asset_key(asset_graph) assert counts[0].asset_key == upstream_fail.key - assert counts[0].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] == 1 - assert counts[0].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 0 - assert counts[0].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 + assert counts[0].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] == 1 # pyright: ignore[reportAttributeAccessIssue] + assert counts[0].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 0 # pyright: ignore[reportAttributeAccessIssue] + assert counts[0].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 # pyright: ignore[reportAttributeAccessIssue] def test_asset_backfill_selects_only_existent_partitions(): @@ -1654,7 +1654,7 @@ def first_partitioned(): repo_with_partitioned_root = {"repo": [first_partitioned, second]} assert asset_backfill_data.get_target_root_partitions_subset( get_asset_graph(repo_with_partitioned_root) - ).get_partition_keys() == ["2024-01-01"] + ).get_partition_keys() == ["2024-01-01"] # pyright: ignore[reportOptionalMemberAccess] def test_multi_asset_internal_deps_asset_backfill(): diff --git a/python_modules/dagster/dagster_tests/core_tests/execution_tests/test_asset_backfill_with_backfill_policies.py b/python_modules/dagster/dagster_tests/core_tests/execution_tests/test_asset_backfill_with_backfill_policies.py index 39d568684830b..68a9a2cb714a3 100644 --- a/python_modules/dagster/dagster_tests/core_tests/execution_tests/test_asset_backfill_with_backfill_policies.py +++ b/python_modules/dagster/dagster_tests/core_tests/execution_tests/test_asset_backfill_with_backfill_policies.py @@ -182,8 +182,8 @@ def downstream_daily_partitioned_asset(upstream_daily_partitioned_asset): # single run request for partitioned asset, both parent and the children somce they share same # partitions def and backfill policy assert run_request.partition_key is None - assert upstream_daily_partitioned_asset.key in run_request.asset_selection - assert downstream_daily_partitioned_asset.key in run_request.asset_selection + assert upstream_daily_partitioned_asset.key in run_request.asset_selection # pyright: ignore[reportOperatorIssue] + assert downstream_daily_partitioned_asset.key in run_request.asset_selection # pyright: ignore[reportOperatorIssue] assert run_request.tags.get(ASSET_PARTITION_RANGE_START_TAG) == "2023-01-01" assert ( run_request.tags.get(ASSET_PARTITION_RANGE_END_TAG) @@ -251,9 +251,9 @@ def downstream_daily_partitioned_asset(upstream_daily_partitioned_asset): assert len(result.run_requests) == 2 for run_request in result.run_requests: - if upstream_daily_partitioned_asset.key in run_request.asset_selection: - assert downstream_daily_partitioned_asset.key in run_request.asset_selection - assert has_different_backfill_policy.key not in run_request.asset_selection + if upstream_daily_partitioned_asset.key in run_request.asset_selection: # pyright: ignore[reportOperatorIssue] + assert downstream_daily_partitioned_asset.key in run_request.asset_selection # pyright: ignore[reportOperatorIssue] + assert has_different_backfill_policy.key not in run_request.asset_selection # pyright: ignore[reportOperatorIssue] def test_asset_backfill_return_single_run_request_for_non_partitioned(): @@ -445,21 +445,21 @@ def downstream_weekly_partitioned_asset(): counts = completed_backfill_data.get_backfill_status_per_asset_key(asset_graph) assert counts[0].asset_key == unpartitioned_upstream_of_partitioned.key - assert counts[0].backfill_status == AssetBackfillStatus.MATERIALIZED + assert counts[0].backfill_status == AssetBackfillStatus.MATERIALIZED # pyright: ignore[reportAttributeAccessIssue] assert counts[1].asset_key == upstream_daily_partitioned_asset.key assert ( - counts[1].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] + counts[1].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] # pyright: ignore[reportAttributeAccessIssue] == num_of_daily_partitions ) - assert counts[1].num_targeted_partitions == num_of_daily_partitions + assert counts[1].num_targeted_partitions == num_of_daily_partitions # pyright: ignore[reportAttributeAccessIssue] assert counts[2].asset_key == downstream_weekly_partitioned_asset.key assert ( - counts[2].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] + counts[2].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] # pyright: ignore[reportAttributeAccessIssue] == num_of_weekly_partitions ) - assert counts[2].num_targeted_partitions == num_of_weekly_partitions + assert counts[2].num_targeted_partitions == num_of_weekly_partitions # pyright: ignore[reportAttributeAccessIssue] def test_backfill_run_contains_more_than_one_asset(): @@ -528,39 +528,39 @@ def downstream_b(): assert counts[0].asset_key == upstream_a.key assert ( - counts[0].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] + counts[0].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] # pyright: ignore[reportAttributeAccessIssue] == upstream_num_of_partitions ) - assert counts[0].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 0 - assert counts[0].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 - assert counts[0].num_targeted_partitions == upstream_num_of_partitions + assert counts[0].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 0 # pyright: ignore[reportAttributeAccessIssue] + assert counts[0].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 # pyright: ignore[reportAttributeAccessIssue] + assert counts[0].num_targeted_partitions == upstream_num_of_partitions # pyright: ignore[reportAttributeAccessIssue] assert counts[1].asset_key == upstream_b.key assert ( - counts[1].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] + counts[1].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] # pyright: ignore[reportAttributeAccessIssue] == upstream_num_of_partitions ) - assert counts[1].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 0 - assert counts[1].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 - assert counts[1].num_targeted_partitions == upstream_num_of_partitions + assert counts[1].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 0 # pyright: ignore[reportAttributeAccessIssue] + assert counts[1].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 # pyright: ignore[reportAttributeAccessIssue] + assert counts[1].num_targeted_partitions == upstream_num_of_partitions # pyright: ignore[reportAttributeAccessIssue] assert counts[2].asset_key == downstream_a.key assert ( - counts[2].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] + counts[2].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] # pyright: ignore[reportAttributeAccessIssue] == downstream_num_of_partitions ) - assert counts[2].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 0 - assert counts[2].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 - assert counts[2].num_targeted_partitions == downstream_num_of_partitions + assert counts[2].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 0 # pyright: ignore[reportAttributeAccessIssue] + assert counts[2].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 # pyright: ignore[reportAttributeAccessIssue] + assert counts[2].num_targeted_partitions == downstream_num_of_partitions # pyright: ignore[reportAttributeAccessIssue] assert counts[3].asset_key == downstream_b.key assert ( - counts[3].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] + counts[3].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] # pyright: ignore[reportAttributeAccessIssue] == downstream_num_of_partitions ) - assert counts[3].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 0 - assert counts[3].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 - assert counts[3].num_targeted_partitions == downstream_num_of_partitions + assert counts[3].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 0 # pyright: ignore[reportAttributeAccessIssue] + assert counts[3].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 # pyright: ignore[reportAttributeAccessIssue] + assert counts[3].num_targeted_partitions == downstream_num_of_partitions # pyright: ignore[reportAttributeAccessIssue] def test_dynamic_partitions_multi_run_backfill_policy(): @@ -699,9 +699,9 @@ def downstream_b(): assert len(result.run_requests) == 1 if same_partitions: - assert set(result.run_requests[0].asset_selection) == {upstream_a.key, downstream_b.key} + assert set(result.run_requests[0].asset_selection) == {upstream_a.key, downstream_b.key} # pyright: ignore[reportArgumentType] else: - assert set(result.run_requests[0].asset_selection) == {upstream_a.key} + assert set(result.run_requests[0].asset_selection) == {upstream_a.key} # pyright: ignore[reportArgumentType] assert result.run_requests[0].tags.get(ASSET_PARTITION_RANGE_START_TAG) == "2023-03-01" assert result.run_requests[0].tags.get(ASSET_PARTITION_RANGE_END_TAG) == "2023-03-03" @@ -771,18 +771,18 @@ def downstream_b(): counts = completed_backfill_data.get_backfill_status_per_asset_key(asset_graph) assert counts[0].asset_key == upstream_a.key - assert counts[0].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] == 3 - assert counts[0].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 0 - assert counts[0].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 + assert counts[0].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] == 3 # pyright: ignore[reportAttributeAccessIssue] + assert counts[0].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 0 # pyright: ignore[reportAttributeAccessIssue] + assert counts[0].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 # pyright: ignore[reportAttributeAccessIssue] assert counts[1].asset_key == downstream_b.key assert ( - counts[1].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] == 3 + counts[1].partitions_counts_by_status[AssetBackfillStatus.MATERIALIZED] == 3 # pyright: ignore[reportAttributeAccessIssue] if same_partitions else 6 ) - assert counts[1].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 0 - assert counts[1].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 + assert counts[1].partitions_counts_by_status[AssetBackfillStatus.FAILED] == 0 # pyright: ignore[reportAttributeAccessIssue] + assert counts[1].partitions_counts_by_status[AssetBackfillStatus.IN_PROGRESS] == 0 # pyright: ignore[reportAttributeAccessIssue] def test_assets_backfill_with_partition_mapping_without_backfill_policy(): @@ -838,9 +838,9 @@ def downstream_b(): for run_request in result.run_requests: # b should not be materialized in the same run as a if run_request.partition_key == "2023-03-02": - assert set(run_request.asset_selection) == {upstream_a.key} + assert set(run_request.asset_selection) == {upstream_a.key} # pyright: ignore[reportArgumentType] elif run_request.partition_key == "2023-03-03": - assert set(run_request.asset_selection) == {upstream_a.key} + assert set(run_request.asset_selection) == {upstream_a.key} # pyright: ignore[reportArgumentType] else: # should only have the above 2 partitions assert False @@ -959,7 +959,7 @@ def downstream_b(): for run_request in result.run_requests: # there is no parallel runs for downstream_b before upstream_a's targeted partitions are materialized - assert set(run_request.asset_selection) == {upstream_a.key} + assert set(run_request.asset_selection) == {upstream_a.key} # pyright: ignore[reportArgumentType] def test_assets_backfill_with_partition_mapping_with_single_run_backfill_policy(): @@ -1021,7 +1021,7 @@ def downstream_b(): ) assert len(result.run_requests) == 1 - assert set(result.run_requests[0].asset_selection) == {upstream_a.key, downstream_b.key} + assert set(result.run_requests[0].asset_selection) == {upstream_a.key, downstream_b.key} # pyright: ignore[reportArgumentType] assert result.run_requests[0].tags.get(ASSET_PARTITION_RANGE_START_TAG) == "2023-03-02" assert result.run_requests[0].tags.get(ASSET_PARTITION_RANGE_END_TAG) == "2023-03-09" diff --git a/python_modules/dagster/dagster_tests/core_tests/execution_tests/test_context.py b/python_modules/dagster/dagster_tests/core_tests/execution_tests/test_context.py index acc691913d8d8..e980094ecf824 100644 --- a/python_modules/dagster/dagster_tests/core_tests/execution_tests/test_context.py +++ b/python_modules/dagster/dagster_tests/core_tests/execution_tests/test_context.py @@ -162,21 +162,21 @@ def op_annotation_job(): def test_context_provided_to_multi_asset(): - @multi_asset(outs={"out1": AssetOut(dagster_type=None), "out2": AssetOut(dagster_type=None)}) + @multi_asset(outs={"out1": AssetOut(dagster_type=None), "out2": AssetOut(dagster_type=None)}) # pyright: ignore[reportArgumentType] def no_annotation(context): assert isinstance(context, AssetExecutionContext) return None, None materialize([no_annotation]) - @multi_asset(outs={"out1": AssetOut(dagster_type=None), "out2": AssetOut(dagster_type=None)}) + @multi_asset(outs={"out1": AssetOut(dagster_type=None), "out2": AssetOut(dagster_type=None)}) # pyright: ignore[reportArgumentType] def asset_annotation(context: AssetExecutionContext): assert isinstance(context, AssetExecutionContext) return None, None materialize([asset_annotation]) - @multi_asset(outs={"out1": AssetOut(dagster_type=None), "out2": AssetOut(dagster_type=None)}) + @multi_asset(outs={"out1": AssetOut(dagster_type=None), "out2": AssetOut(dagster_type=None)}) # pyright: ignore[reportArgumentType] def op_annotation(context: OpExecutionContext): assert isinstance(context, OpExecutionContext) # AssetExecutionContext is an instance of OpExecutionContext, so add this additional check @@ -246,7 +246,7 @@ def no_annotation_op(context): return 1 @graph_multi_asset( - outs={"out1": AssetOut(dagster_type=None), "out2": AssetOut(dagster_type=None)} + outs={"out1": AssetOut(dagster_type=None), "out2": AssetOut(dagster_type=None)} # pyright: ignore[reportArgumentType] ) def no_annotation_asset(): return layered_op(no_annotation_op()), layered_op(no_annotation_op()) @@ -259,7 +259,7 @@ def asset_annotation_op(context: AssetExecutionContext): return 1 @graph_multi_asset( - outs={"out1": AssetOut(dagster_type=None), "out2": AssetOut(dagster_type=None)} + outs={"out1": AssetOut(dagster_type=None), "out2": AssetOut(dagster_type=None)} # pyright: ignore[reportArgumentType] ) def asset_annotation_asset(): return layered_op(asset_annotation_op()), layered_op(asset_annotation_op()) @@ -274,7 +274,7 @@ def op_annotation_op(context: OpExecutionContext): return 1 @graph_multi_asset( - outs={"out1": AssetOut(dagster_type=None), "out2": AssetOut(dagster_type=None)} + outs={"out1": AssetOut(dagster_type=None), "out2": AssetOut(dagster_type=None)} # pyright: ignore[reportArgumentType] ) def op_annotation_asset(): return layered_op(op_annotation_op()), layered_op(op_annotation_op()) @@ -334,7 +334,7 @@ def execute_assets_and_checks(assets=None, asset_checks=None, raise_on_error: bo def to_check(): return 1 - @asset_check(asset=to_check) + @asset_check(asset=to_check) # pyright: ignore[reportArgumentType] def no_annotation(context): assert isinstance(context, AssetCheckExecutionContext) assert context.check_specs == [ @@ -346,7 +346,7 @@ def no_annotation(context): execute_assets_and_checks(assets=[to_check], asset_checks=[no_annotation]) - @asset_check(asset=to_check) + @asset_check(asset=to_check) # pyright: ignore[reportArgumentType] def asset_annotation(context: AssetExecutionContext): pass @@ -356,7 +356,7 @@ def asset_annotation(context: AssetExecutionContext): ): execute_assets_and_checks(assets=[to_check], asset_checks=[asset_annotation]) - @asset_check(asset=to_check) + @asset_check(asset=to_check) # pyright: ignore[reportArgumentType] def op_annotation(context: OpExecutionContext): assert isinstance(context, OpExecutionContext) # AssetExecutionContext is an instance of OpExecutionContext, so add this additional check diff --git a/python_modules/dagster/dagster_tests/core_tests/execution_tests/test_run_metrics_thread.py b/python_modules/dagster/dagster_tests/core_tests/execution_tests/test_run_metrics_thread.py index d346427c032e4..2dded9566da05 100644 --- a/python_modules/dagster/dagster_tests/core_tests/execution_tests/test_run_metrics_thread.py +++ b/python_modules/dagster/dagster_tests/core_tests/execution_tests/test_run_metrics_thread.py @@ -191,14 +191,14 @@ def test_start_run_metrics_thread(dagster_instance, dagster_run, mock_container_ time.sleep(0.1) - assert thread.is_alive() + assert thread.is_alive() # pyright: ignore[reportOptionalMemberAccess] assert "Starting run metrics thread" in caplog.messages[0] time.sleep(0.1) - shutdown.set() + shutdown.set() # pyright: ignore[reportOptionalMemberAccess] - thread.join() - assert thread.is_alive() is False + thread.join() # pyright: ignore[reportOptionalMemberAccess] + assert thread.is_alive() is False # pyright: ignore[reportOptionalMemberAccess] assert "Shutting down metrics capture thread" in caplog.messages[-1] diff --git a/python_modules/dagster/dagster_tests/core_tests/graph_tests/test_graph.py b/python_modules/dagster/dagster_tests/core_tests/graph_tests/test_graph.py index 641a97ed35658..0e9f9790a1262 100644 --- a/python_modules/dagster/dagster_tests/core_tests/graph_tests/test_graph.py +++ b/python_modules/dagster/dagster_tests/core_tests/graph_tests/test_graph.py @@ -344,11 +344,11 @@ def my_op(_): def my_graph(): my_op() - @logger + @logger # pyright: ignore[reportCallIssue,reportArgumentType] def my_logger(_): pass - my_job = my_graph.to_job(logger_defs={"abc": my_logger}) + my_job = my_graph.to_job(logger_defs={"abc": my_logger}) # pyright: ignore[reportArgumentType] assert my_job.loggers == {"abc": my_logger} diff --git a/python_modules/dagster/dagster_tests/core_tests/graph_tests/test_graph_ins_out.py b/python_modules/dagster/dagster_tests/core_tests/graph_tests/test_graph_ins_out.py index 71bd7710e9fdb..83b8009e69400 100644 --- a/python_modules/dagster/dagster_tests/core_tests/graph_tests/test_graph_ins_out.py +++ b/python_modules/dagster/dagster_tests/core_tests/graph_tests/test_graph_ins_out.py @@ -85,7 +85,7 @@ def echo(in_one, in_two): @graph def my_graph(): - one, two = composite_return_mult() + one, two = composite_return_mult() # pyright: ignore[reportGeneralTypeIssues] echo(one, two) result = composite_return_mult.execute_in_process() diff --git a/python_modules/dagster/dagster_tests/core_tests/graph_tests/test_graph_source_asset_input.py b/python_modules/dagster/dagster_tests/core_tests/graph_tests/test_graph_source_asset_input.py index 557c5e4977fe7..52724637d1419 100644 --- a/python_modules/dagster/dagster_tests/core_tests/graph_tests/test_graph_source_asset_input.py +++ b/python_modules/dagster/dagster_tests/core_tests/graph_tests/test_graph_source_asset_input.py @@ -23,7 +23,7 @@ def load_input(self, context): self.loaded_input = True assert context.asset_key == asset.key for key, value in expected_metadata.items(): - assert context.upstream_output.definition_metadata[key] == value + assert context.upstream_output.definition_metadata[key] == value # pyright: ignore[reportOptionalMemberAccess] return input_value return MyIOManager() diff --git a/python_modules/dagster/dagster_tests/core_tests/hook_tests/test_hook_def.py b/python_modules/dagster/dagster_tests/core_tests/hook_tests/test_hook_def.py index 1672f0f3c75d0..94f454a32424e 100644 --- a/python_modules/dagster/dagster_tests/core_tests/hook_tests/test_hook_def.py +++ b/python_modules/dagster/dagster_tests/core_tests/hook_tests/test_hook_def.py @@ -92,19 +92,19 @@ def a_op(_): lambda event: event.event_type == DagsterEventType.HOOK_ERRORED ) assert len(hook_errored_events) == 1 - assert hook_errored_events[0].node_handle.name == "a_op_with_hook" + assert hook_errored_events[0].node_handle.name == "a_op_with_hook" # pyright: ignore[reportOptionalMemberAccess] def test_hook_decorator_arg_error(): with pytest.raises(DagsterInvalidDefinitionError, match="does not have required positional"): - @success_hook + @success_hook # pyright: ignore[reportArgumentType] def _(): pass with pytest.raises(DagsterInvalidDefinitionError, match="does not have required positional"): - @failure_hook + @failure_hook # pyright: ignore[reportCallIssue,reportArgumentType] def _(): pass diff --git a/python_modules/dagster/dagster_tests/core_tests/hook_tests/test_hook_invocation.py b/python_modules/dagster/dagster_tests/core_tests/hook_tests/test_hook_invocation.py index 43cc82c3d12f2..36b4425d5c34c 100644 --- a/python_modules/dagster/dagster_tests/core_tests/hook_tests/test_hook_invocation.py +++ b/python_modules/dagster/dagster_tests/core_tests/hook_tests/test_hook_invocation.py @@ -114,12 +114,12 @@ def my_hook(_): DagsterInvalidInvocationError, match="Decorated function expects one parameter, _, but 0 were provided.", ): - my_hook() + my_hook() # pyright: ignore[reportCallIssue] with pytest.raises( DagsterInvalidInvocationError, match="Could not find expected argument '_'." ): - my_hook(foo=None) + my_hook(foo=None) # pyright: ignore[reportCallIssue] @pytest.mark.parametrize( @@ -137,7 +137,7 @@ def my_hook_reqs_resources(context, _): # type: ignore # (test rename) hook = decorator(my_hook_reqs_resources) else: - def my_hook_reqs_resources(context): # type: ignore # (test rename) + def my_hook_reqs_resources(context): assert context.resources.foo == "foo" assert context.resources.bar == "bar" diff --git a/python_modules/dagster/dagster_tests/core_tests/host_representation_tests/test_custom_repository_data.py b/python_modules/dagster/dagster_tests/core_tests/host_representation_tests/test_custom_repository_data.py index c5e29713cfcd2..d6de017d383c9 100644 --- a/python_modules/dagster/dagster_tests/core_tests/host_representation_tests/test_custom_repository_data.py +++ b/python_modules/dagster/dagster_tests/core_tests/host_representation_tests/test_custom_repository_data.py @@ -52,7 +52,7 @@ def get_env_vars_by_top_level_resource(self): return {} -@repository +@repository # pyright: ignore[reportArgumentType] def bar_repo(): return TestDynamicRepositoryData() diff --git a/python_modules/dagster/dagster_tests/core_tests/host_representation_tests/test_external_data.py b/python_modules/dagster/dagster_tests/core_tests/host_representation_tests/test_external_data.py index 599c93fa47937..bad196e8ebce9 100644 --- a/python_modules/dagster/dagster_tests/core_tests/host_representation_tests/test_external_data.py +++ b/python_modules/dagster/dagster_tests/core_tests/host_representation_tests/test_external_data.py @@ -1343,4 +1343,4 @@ def test_back_compat_team_owners(): } asset_node_snap = unpack_value(packed_1_7_7_external_asset) - assert asset_node_snap.owners == ["team:foo", "hi@me.com"] + assert asset_node_snap.owners == ["team:foo", "hi@me.com"] # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] diff --git a/python_modules/dagster/dagster_tests/core_tests/instance_tests/test_instance.py b/python_modules/dagster/dagster_tests/core_tests/instance_tests/test_instance.py index d3d4528715538..11d6c94ddafea 100644 --- a/python_modules/dagster/dagster_tests/core_tests/instance_tests/test_instance.py +++ b/python_modules/dagster/dagster_tests/core_tests/instance_tests/test_instance.py @@ -132,12 +132,12 @@ def test_unified_storage_env_var(tmpdir): } } ) as instance: - assert _runs_directory(str(tmpdir)) in instance.run_storage._conn_string # noqa: SLF001 + assert _runs_directory(str(tmpdir)) in instance.run_storage._conn_string # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] assert ( - _event_logs_directory(str(tmpdir)) == instance.event_log_storage._base_dir + "/" # noqa: SLF001 + _event_logs_directory(str(tmpdir)) == instance.event_log_storage._base_dir + "/" # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] ) assert ( - _schedule_directory(str(tmpdir)) in instance.schedule_storage._conn_string # noqa: SLF001 + _schedule_directory(str(tmpdir)) in instance.schedule_storage._conn_string # noqa: SLF001 # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] ) @@ -262,7 +262,7 @@ def test_create_job_snapshot(): run = instance.get_run_by_id(result.run_id) - assert run.job_snapshot_id == noop_job.get_job_snapshot().snapshot_id + assert run.job_snapshot_id == noop_job.get_job_snapshot().snapshot_id # pyright: ignore[reportOptionalMemberAccess] def test_create_execution_plan_snapshot(): @@ -277,8 +277,8 @@ def test_create_execution_plan_snapshot(): run = instance.get_run_by_id(result.run_id) - assert run.execution_plan_snapshot_id == ep_snapshot_id - assert run.execution_plan_snapshot_id == create_execution_plan_snapshot_id(ep_snapshot) + assert run.execution_plan_snapshot_id == ep_snapshot_id # pyright: ignore[reportOptionalMemberAccess] + assert run.execution_plan_snapshot_id == create_execution_plan_snapshot_id(ep_snapshot) # pyright: ignore[reportOptionalMemberAccess] def test_submit_run(): @@ -306,8 +306,8 @@ def test_submit_run(): instance.submit_run(run.run_id, workspace) - assert len(instance.run_coordinator.queue()) == 1 - assert instance.run_coordinator.queue()[0].run_id == run.run_id + assert len(instance.run_coordinator.queue()) == 1 # pyright: ignore[reportAttributeAccessIssue] + assert instance.run_coordinator.queue()[0].run_id == run.run_id # pyright: ignore[reportAttributeAccessIssue] def test_create_run_with_asset_partitions(): @@ -597,7 +597,7 @@ def test_dagster_env_vars_from_dotenv_file(): ) with new_cwd(working_dir): - with environ({"DAGSTER_HOME": None}): + with environ({"DAGSTER_HOME": None}): # pyright: ignore[reportArgumentType] # without .env file with a DAGSTER_HOME, loading fails with pytest.raises(DagsterHomeNotSetError): with get_instance_for_cli(): @@ -611,7 +611,7 @@ def test_dagster_env_vars_from_dotenv_file(): with get_instance_for_cli() as instance: assert ( - _runs_directory(str(storage_dir)) in instance.run_storage._conn_string # noqa: SLF001 + _runs_directory(str(storage_dir)) in instance.run_storage._conn_string # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] ) @@ -638,7 +638,7 @@ def config_schema(cls): @staticmethod def config_defaults(base_dir): defaults = InstanceRef.config_defaults(base_dir) - defaults["run_coordinator"] = ConfigurableClassData( + defaults["run_coordinator"] = ConfigurableClassData( # pyright: ignore[reportIndexIssue] "dagster._core.run_coordinator.queued_run_coordinator", "QueuedRunCoordinator", yaml.dump({}), @@ -662,8 +662,8 @@ def test_instance_subclass(): # Likely because the imported/dynamically loaded class is different from the local one assert subclass_instance.__class__.__name__ == "TestInstanceSubclass" - assert subclass_instance.foo() == "bar" - assert subclass_instance.baz is None + assert subclass_instance.foo() == "bar" # pyright: ignore[reportAttributeAccessIssue] + assert subclass_instance.baz is None # pyright: ignore[reportAttributeAccessIssue] assert isinstance(subclass_instance.run_coordinator, QueuedRunCoordinator) @@ -680,8 +680,8 @@ def test_instance_subclass(): assert isinstance(subclass_instance, DagsterInstance) assert subclass_instance.__class__.__name__ == "TestInstanceSubclass" - assert subclass_instance.foo() == "bar" - assert subclass_instance.baz == "quux" + assert subclass_instance.foo() == "bar" # pyright: ignore[reportAttributeAccessIssue] + assert subclass_instance.baz == "quux" # pyright: ignore[reportAttributeAccessIssue] # omitting foo leads to a config schema validation error diff --git a/python_modules/dagster/dagster_tests/core_tests/partition_tests/test_partition.py b/python_modules/dagster/dagster_tests/core_tests/partition_tests/test_partition.py index 710c9bb9a2c23..43807e430ee7e 100644 --- a/python_modules/dagster/dagster_tests/core_tests/partition_tests/test_partition.py +++ b/python_modules/dagster/dagster_tests/core_tests/partition_tests/test_partition.py @@ -102,7 +102,7 @@ def test_unique_identifier(): identifier1 = dynamic_def.get_serializable_unique_identifier( dynamic_partitions_store=instance ) - instance.add_dynamic_partitions(dynamic_def.name, ["bar"]) + instance.add_dynamic_partitions(dynamic_def.name, ["bar"]) # pyright: ignore[reportArgumentType] assert identifier1 != dynamic_def.get_serializable_unique_identifier( dynamic_partitions_store=instance ) @@ -112,7 +112,7 @@ def test_unique_identifier(): {"a": StaticPartitionsDefinition(["a", "b", "c"]), "b": dynamic_dimension_def} ) serializable_unique_id = multipartitions_def.get_serializable_unique_identifier(instance) - instance.add_dynamic_partitions(dynamic_dimension_def.name, ["apple"]) + instance.add_dynamic_partitions(dynamic_dimension_def.name, ["apple"]) # pyright: ignore[reportArgumentType] assert serializable_unique_id != multipartitions_def.get_serializable_unique_identifier( instance ) @@ -172,7 +172,7 @@ def test_static_partitions_subset_identical_serialization(): reverse_order_subset = partitions.subset_with_partition_keys(reversed(subset)) assert in_order_subset.serialize() == reverse_order_subset.serialize() - assert serialize_value(in_order_subset) == serialize_value(reverse_order_subset) + assert serialize_value(in_order_subset) == serialize_value(reverse_order_subset) # pyright: ignore[reportArgumentType] def test_static_partitions_invalid_chars(): diff --git a/python_modules/dagster/dagster_tests/core_tests/partition_tests/test_partitioned_job.py b/python_modules/dagster/dagster_tests/core_tests/partition_tests/test_partitioned_job.py index f2d1f5c1315cc..d2d216e5f5793 100644 --- a/python_modules/dagster/dagster_tests/core_tests/partition_tests/test_partitioned_job.py +++ b/python_modules/dagster/dagster_tests/core_tests/partition_tests/test_partitioned_job.py @@ -136,7 +136,8 @@ def partition_fn(_current_time=None): return ["blah"] @dynamic_partitioned_config( - partition_fn, tags_for_partition_key_fn=lambda key: {"foo": {"bar": key}} + partition_fn, + tags_for_partition_key_fn=lambda key: {"foo": {"bar": key}}, # pyright: ignore[reportArgumentType] ) def my_dynamic_partitioned_config(_partition_key): return RUN_CONFIG diff --git a/python_modules/dagster/dagster_tests/core_tests/pythonic_config_tests/test_basic_pythonic_config.py b/python_modules/dagster/dagster_tests/core_tests/pythonic_config_tests/test_basic_pythonic_config.py index cbe03e803a493..0098cb7588ac2 100644 --- a/python_modules/dagster/dagster_tests/core_tests/pythonic_config_tests/test_basic_pythonic_config.py +++ b/python_modules/dagster/dagster_tests/core_tests/pythonic_config_tests/test_basic_pythonic_config.py @@ -112,8 +112,8 @@ def a_struct_config_op(config: ANewConfigOpConfig): assert DecoratedOpFunction(a_struct_config_op).has_config_arg() # test fields are inferred correctly - assert a_struct_config_op.config_schema.config_type.kind == ConfigTypeKind.STRICT_SHAPE - assert list(a_struct_config_op.config_schema.config_type.fields.keys()) == [ + assert a_struct_config_op.config_schema.config_type.kind == ConfigTypeKind.STRICT_SHAPE # pyright: ignore[reportOptionalMemberAccess] + assert list(a_struct_config_op.config_schema.config_type.fields.keys()) == [ # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] "a_string", "an_int", ] @@ -289,8 +289,8 @@ def a_struct_config_op(config: ANewConfigOpConfig): assert DecoratedOpFunction(a_struct_config_op).has_config_arg() # test fields are inferred correctly - assert a_struct_config_op.config_schema.config_type.kind == ConfigTypeKind.STRICT_SHAPE - assert list(a_struct_config_op.config_schema.config_type.fields.keys()) == [ + assert a_struct_config_op.config_schema.config_type.kind == ConfigTypeKind.STRICT_SHAPE # pyright: ignore[reportOptionalMemberAccess] + assert list(a_struct_config_op.config_schema.config_type.fields.keys()) == [ # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] "a_nested_value", "a_bool", ] diff --git a/python_modules/dagster/dagster_tests/core_tests/pythonic_config_tests/test_descriptions.py b/python_modules/dagster/dagster_tests/core_tests/pythonic_config_tests/test_descriptions.py index 1adcb8ed97a97..f434c0140cd66 100644 --- a/python_modules/dagster/dagster_tests/core_tests/pythonic_config_tests/test_descriptions.py +++ b/python_modules/dagster/dagster_tests/core_tests/pythonic_config_tests/test_descriptions.py @@ -20,15 +20,15 @@ def a_new_config_op(config: AnOpConfig): pass # test fields are inferred correctly - assert a_new_config_op.config_schema.config_type.kind == ConfigTypeKind.STRICT_SHAPE - assert list(a_new_config_op.config_schema.config_type.fields.keys()) == ["a_string", "nested"] + assert a_new_config_op.config_schema.config_type.kind == ConfigTypeKind.STRICT_SHAPE # pyright: ignore[reportOptionalMemberAccess] + assert list(a_new_config_op.config_schema.config_type.fields.keys()) == ["a_string", "nested"] # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] assert a_new_config_op.config_schema.description == "Config for my new op." - assert a_new_config_op.config_schema.config_type.fields["a_string"].description == "A string" + assert a_new_config_op.config_schema.config_type.fields["a_string"].description == "A string" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] assert ( - a_new_config_op.config_schema.config_type.fields["nested"].description == "A nested config" + a_new_config_op.config_schema.config_type.fields["nested"].description == "A nested config" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] ) assert ( - a_new_config_op.config_schema.config_type.fields["nested"] + a_new_config_op.config_schema.config_type.fields["nested"] # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] .config_type.fields["an_int"] .description == "An int" diff --git a/python_modules/dagster/dagster_tests/core_tests/resource_tests/pythonic_resources/test_enum.py b/python_modules/dagster/dagster_tests/core_tests/resource_tests/pythonic_resources/test_enum.py index 809958f7a6cff..394b880aced09 100644 --- a/python_modules/dagster/dagster_tests/core_tests/resource_tests/pythonic_resources/test_enum.py +++ b/python_modules/dagster/dagster_tests/core_tests/resource_tests/pythonic_resources/test_enum.py @@ -47,7 +47,7 @@ class MyResource(ConfigurableResource): def my_asset(my_resource: MyResource): return my_resource.enum.value - materialize([my_asset], resources={"my_resource": MyResource(enum=MyEnum.TYPE_A.name)}) + materialize([my_asset], resources={"my_resource": MyResource(enum=MyEnum.TYPE_A.name)}) # pyright: ignore[reportArgumentType] def test_enum_name_resource_override_enum(): diff --git a/python_modules/dagster/dagster_tests/core_tests/resource_tests/pythonic_resources/test_general_pythonic_resources.py b/python_modules/dagster/dagster_tests/core_tests/resource_tests/pythonic_resources/test_general_pythonic_resources.py index 6e9a9b1c1aacd..3e0925e66a213 100644 --- a/python_modules/dagster/dagster_tests/core_tests/resource_tests/pythonic_resources/test_general_pythonic_resources.py +++ b/python_modules/dagster/dagster_tests/core_tests/resource_tests/pythonic_resources/test_general_pythonic_resources.py @@ -192,7 +192,7 @@ def hello_world_op(writer: Writer): # Can't instantiate abstract class with pytest.raises(TypeError): - Writer() + Writer() # pyright: ignore[reportAbstractUsage] @job(resource_defs={"writer": PrefixedWriterResource(prefix="greeting: ")}) def prefixed_job(): @@ -517,7 +517,7 @@ def my_asset(my_resource: MyResource): assert completed["yes"] str_resource_partial = StringResource.configure_at_launch() - my_resource = MyResource(string_from_resource=str_resource_partial) + my_resource = MyResource(string_from_resource=str_resource_partial) # pyright: ignore[reportArgumentType] defs = Definitions( assets=[my_asset], diff --git a/python_modules/dagster/dagster_tests/core_tests/resource_tests/test_build_init_resource_context.py b/python_modules/dagster/dagster_tests/core_tests/resource_tests/test_build_init_resource_context.py index b8980b0da8a95..f004eb8359260 100644 --- a/python_modules/dagster/dagster_tests/core_tests/resource_tests/test_build_init_resource_context.py +++ b/python_modules/dagster/dagster_tests/core_tests/resource_tests/test_build_init_resource_context.py @@ -51,7 +51,7 @@ def reqs_cm_resource(context): del context assert entered == ["true"] - with build_init_resource_context(resources={"foo": foo}) as context: + with build_init_resource_context(resources={"foo": foo}) as context: # pyright: ignore[reportGeneralTypeIssues] assert context.resources.foo == "foo" assert reqs_cm_resource(context) == "foobar" diff --git a/python_modules/dagster/dagster_tests/core_tests/resource_tests/test_resource_definition.py b/python_modules/dagster/dagster_tests/core_tests/resource_tests/test_resource_definition.py index 11d46d143ec13..cf345ad15e87d 100644 --- a/python_modules/dagster/dagster_tests/core_tests/resource_tests/test_resource_definition.py +++ b/python_modules/dagster/dagster_tests/core_tests/resource_tests/test_resource_definition.py @@ -621,7 +621,7 @@ def _correct_resource_no_context(): ), ): - @resource + @resource # pyright: ignore[reportCallIssue,reportArgumentType] def _incorrect_resource_2(_a, _b, _c, _d=4): pass @@ -855,7 +855,7 @@ def resource_op(_): error_events = [ event for event in result.all_events - if event.event_type == DagsterEventType.ENGINE_EVENT and event.event_specific_data.error + if event.event_type == DagsterEventType.ENGINE_EVENT and event.event_specific_data.error # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] ] assert len(error_events) == 1 assert called == ["A", "B"] @@ -905,7 +905,7 @@ def test_multiprocessing_resource_teardown_failure(): error_events = [ event for event in result.all_events - if event.event_type == DagsterEventType.ENGINE_EVENT and event.event_specific_data.error + if event.event_type == DagsterEventType.ENGINE_EVENT and event.event_specific_data.error # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] ] assert len(error_events) == 1 diff --git a/python_modules/dagster/dagster_tests/core_tests/resource_tests/test_with_resources.py b/python_modules/dagster/dagster_tests/core_tests/resource_tests/test_with_resources.py index 135e91c77b689..c4a2973afb5c7 100644 --- a/python_modules/dagster/dagster_tests/core_tests/resource_tests/test_with_resources.py +++ b/python_modules/dagster/dagster_tests/core_tests/resource_tests/test_with_resources.py @@ -136,7 +136,7 @@ def my_derived_asset(my_source_asset): # When an io manager definition is provided using the generic key, that # generic key is used as the io manager key for the source asset. - assert transformed_source.get_io_manager_key() == "io_manager" + assert transformed_source.get_io_manager_key() == "io_manager" # pyright: ignore[reportAttributeAccessIssue] result = materialize([transformed_derived, transformed_source], selection=[transformed_derived]) assert result.success @@ -167,7 +167,7 @@ def my_derived_asset(my_source_asset): # When an io manager definition is provided using the generic key, that # generic key is used as the io manager key for the source asset. - assert transformed_source.get_io_manager_key() == "the_manager" + assert transformed_source.get_io_manager_key() == "the_manager" # pyright: ignore[reportAttributeAccessIssue] result = materialize([transformed_derived, transformed_source], selection=[transformed_derived]) assert result.success @@ -198,7 +198,7 @@ def my_derived_asset(my_source_asset): # When an io manager definition has already been provided, it will use an # override key. - assert transformed_source.io_manager_def == the_manager + assert transformed_source.io_manager_def == the_manager # pyright: ignore[reportAttributeAccessIssue] result = materialize([transformed_derived, transformed_source], selection=[transformed_derived]) assert result.success @@ -456,7 +456,7 @@ def test_bad_config_provided(): def test_overlapping_io_manager_asset(): - @io_manager + @io_manager # pyright: ignore[reportCallIssue,reportArgumentType] def the_io_manager(): pass @@ -516,7 +516,7 @@ def the_asset(): def test_overlapping_io_manager_source_asset(): - @io_manager + @io_manager # pyright: ignore[reportCallIssue,reportArgumentType] def the_io_manager(): pass @@ -541,7 +541,7 @@ def the_io_manager(): def test_overlapping_resources_source_asset(): foo_resource = ResourceDefinition.hardcoded_resource("blah") - @io_manager(required_resource_keys={"foo"}) + @io_manager(required_resource_keys={"foo"}) # pyright: ignore[reportArgumentType] def the_io_manager(): pass @@ -586,7 +586,7 @@ def test_with_resources_no_exp_warnings(): def blah(): pass - @io_manager + @io_manager # pyright: ignore[reportCallIssue,reportArgumentType] def the_manager(): pass diff --git a/python_modules/dagster/dagster_tests/core_tests/run_coordinator_tests/test_queued_run_coordinator.py b/python_modules/dagster/dagster_tests/core_tests/run_coordinator_tests/test_queued_run_coordinator.py index 44ce8153dae8b..f78090f083ce8 100644 --- a/python_modules/dagster/dagster_tests/core_tests/run_coordinator_tests/test_queued_run_coordinator.py +++ b/python_modules/dagster/dagster_tests/core_tests/run_coordinator_tests/test_queued_run_coordinator.py @@ -189,4 +189,4 @@ def test_thread_config(): } } ) as instance: - assert instance.run_coordinator.dequeue_num_workers == num + assert instance.run_coordinator.dequeue_num_workers == num # pyright: ignore[reportAttributeAccessIssue] diff --git a/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/config_schema_tests/test_builtin_schemas.py b/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/config_schema_tests/test_builtin_schemas.py index 1047ce8db148e..54b42b5516eca 100644 --- a/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/config_schema_tests/test_builtin_schemas.py +++ b/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/config_schema_tests/test_builtin_schemas.py @@ -55,11 +55,11 @@ def take_bool(bool_value): def produce_bool(): return True - @op(ins={"any_value": In(Any)}) + @op(ins={"any_value": In(Any)}) # pyright: ignore[reportArgumentType] def take_any(any_value): return any_value - @op(out=Out(Any)) + @op(out=Out(Any)) # pyright: ignore[reportArgumentType] def produce_any(): return True diff --git a/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/config_schema_tests/test_config_schema.py b/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/config_schema_tests/test_config_schema.py index feaa4671a7cf7..cfe7ad570d5d9 100644 --- a/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/config_schema_tests/test_config_schema.py +++ b/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/config_schema_tests/test_config_schema.py @@ -13,7 +13,7 @@ def _foo(_, hello): def test_dagster_type_loader_missing_context(): with pytest.raises(DagsterInvalidDefinitionError): - @dagster_type_loader(String) + @dagster_type_loader(String) # pyright: ignore[reportArgumentType] def _foo(hello): return hello @@ -21,6 +21,6 @@ def _foo(hello): def test_dagster_type_loader_missing_variable(): with pytest.raises(DagsterInvalidDefinitionError): - @dagster_type_loader(String) + @dagster_type_loader(String) # pyright: ignore[reportArgumentType] def _foo(_): return 1 diff --git a/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_python_dict.py b/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_python_dict.py index b5c32d1b5fab1..2a6cd7f3f6dfd 100644 --- a/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_python_dict.py +++ b/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_python_dict.py @@ -77,7 +77,7 @@ def emit_dict(): def test_basic_dagster_dictionary_input(): - @op(ins={"data": In(Dict)}, out=Out(str)) + @op(ins={"data": In(Dict)}, out=Out(str)) # pyright: ignore[reportArgumentType] def input_dict(data): return data["key"] @@ -129,8 +129,8 @@ def emit_dict(): assert wrap_op_in_graph_and_execute(emit_dict).output_value() == {"key": "value"} assert emit_dict.output_defs[0].dagster_type.key == "TypedPythonDict.String.String" - assert emit_dict.output_defs[0].dagster_type.key_type.unique_name == "String" - assert emit_dict.output_defs[0].dagster_type.value_type.unique_name == "String" + assert emit_dict.output_defs[0].dagster_type.key_type.unique_name == "String" # pyright: ignore[reportAttributeAccessIssue] + assert emit_dict.output_defs[0].dagster_type.value_type.unique_name == "String" # pyright: ignore[reportAttributeAccessIssue] def test_basic_closed_typing_dictionary_input(): diff --git a/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_python_set.py b/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_python_set.py index 31164ac8479da..25778386b8374 100644 --- a/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_python_set.py +++ b/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_python_set.py @@ -84,30 +84,30 @@ def take_set(tt): def test_runtime_set_of_int(): set_dagster_type = create_typed_runtime_set(int) - set_dagster_type.type_check(None, {1}) - set_dagster_type.type_check(None, set()) + set_dagster_type.type_check(None, {1}) # pyright: ignore[reportArgumentType] + set_dagster_type.type_check(None, set()) # pyright: ignore[reportArgumentType] - res = set_dagster_type.type_check(None, None) + res = set_dagster_type.type_check(None, None) # pyright: ignore[reportArgumentType] assert not res.success - res = set_dagster_type.type_check(None, "nope") + res = set_dagster_type.type_check(None, "nope") # pyright: ignore[reportArgumentType] assert not res.success - res = set_dagster_type.type_check(None, {"nope"}) + res = set_dagster_type.type_check(None, {"nope"}) # pyright: ignore[reportArgumentType] assert not res.success def test_runtime_optional_set(): set_dagster_type = resolve_dagster_type(Optional[create_typed_runtime_set(int)]) - set_dagster_type.type_check(None, {1}) - set_dagster_type.type_check(None, set()) - set_dagster_type.type_check(None, None) + set_dagster_type.type_check(None, {1}) # pyright: ignore[reportArgumentType] + set_dagster_type.type_check(None, set()) # pyright: ignore[reportArgumentType] + set_dagster_type.type_check(None, None) # pyright: ignore[reportArgumentType] - res = set_dagster_type.type_check(None, "nope") + res = set_dagster_type.type_check(None, "nope") # pyright: ignore[reportArgumentType] assert not res.success - res = set_dagster_type.type_check(None, {"nope"}) + res = set_dagster_type.type_check(None, {"nope"}) # pyright: ignore[reportArgumentType] assert not res.success diff --git a/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_python_tuple.py b/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_python_tuple.py index eb6bb8d3e0cdf..23e5acd064657 100644 --- a/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_python_tuple.py +++ b/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_python_tuple.py @@ -83,18 +83,18 @@ def take_tuple(tt): def test_typed_python_tuple_directly(): int_str_tuple = create_typed_tuple(int, str) - int_str_tuple.type_check(None, (1, "foo")) + int_str_tuple.type_check(None, (1, "foo")) # pyright: ignore[reportArgumentType] - res = int_str_tuple.type_check(None, None) + res = int_str_tuple.type_check(None, None) # pyright: ignore[reportArgumentType] assert not res.success - res = int_str_tuple.type_check(None, "bar") + res = int_str_tuple.type_check(None, "bar") # pyright: ignore[reportArgumentType] assert not res.success - res = int_str_tuple.type_check(None, (1, 2, 3)) + res = int_str_tuple.type_check(None, (1, 2, 3)) # pyright: ignore[reportArgumentType] assert not res.success - res = int_str_tuple.type_check(None, ("1", 2)) + res = int_str_tuple.type_check(None, ("1", 2)) # pyright: ignore[reportArgumentType] assert not res.success @@ -103,15 +103,15 @@ def test_nested_python_tuple_directly(): nested_tuple = create_typed_tuple(bool, list, int_str_tuple_kls) - nested_tuple.type_check(None, (True, [1], (1, "foo"))) + nested_tuple.type_check(None, (True, [1], (1, "foo"))) # pyright: ignore[reportArgumentType] - res = nested_tuple.type_check(None, None) + res = nested_tuple.type_check(None, None) # pyright: ignore[reportArgumentType] assert not res.success - res = nested_tuple.type_check(None, "bar") + res = nested_tuple.type_check(None, "bar") # pyright: ignore[reportArgumentType] assert not res.success - res = nested_tuple.type_check(None, (True, [1], (1, 2))) + res = nested_tuple.type_check(None, (True, [1], (1, 2))) # pyright: ignore[reportArgumentType] assert not res.success diff --git a/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_runtime_types.py b/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_runtime_types.py index d809fa26d5fba..a3a1a93e5c972 100644 --- a/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_runtime_types.py +++ b/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_runtime_types.py @@ -117,13 +117,13 @@ def add_one(num): assert add_one.input_defs[0].dagster_type.unique_name == "Int" runtime = resolve_dagster_type(float) - runtime.type_check(None, 1.0) - res = runtime.type_check(None, 1) + runtime.type_check(None, 1.0) # pyright: ignore[reportArgumentType] + res = runtime.type_check(None, 1) # pyright: ignore[reportArgumentType] assert not res.success runtime = resolve_dagster_type(bool) - runtime.type_check(None, True) - res = runtime.type_check(None, 1) + runtime.type_check(None, True) # pyright: ignore[reportArgumentType] + res = runtime.type_check(None, 1) # pyright: ignore[reportArgumentType] assert not res.success diff --git a/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_typed_python_dict.py b/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_typed_python_dict.py index f42c98c56f23b..466e3ac992890 100644 --- a/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_typed_python_dict.py +++ b/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_typed_python_dict.py @@ -6,13 +6,13 @@ def test_typed_python_dict(): int_to_int = Dict[int, int] - int_to_int.type_check(None, {1: 1}) + int_to_int.type_check(None, {1: 1}) # pyright: ignore[reportArgumentType] def test_typed_python_dict_failure(): int_to_int = Dict[int, int] - res = int_to_int.type_check(None, {1: "1"}) + res = int_to_int.type_check(None, {1: "1"}) # pyright: ignore[reportArgumentType] assert not res.success diff --git a/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_types.py b/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_types.py index f9d248751b334..95dfb6598e41c 100644 --- a/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_types.py +++ b/python_modules/dagster/dagster_tests/core_tests/runtime_types_tests/test_types.py @@ -184,8 +184,8 @@ def pipe(): event for event in events_for_node if event.event_type == DagsterEventType.STEP_OUTPUT ].pop() - type_check_data = output_event.event_specific_data.type_check_data - assert type_check_data.success + type_check_data = output_event.event_specific_data.type_check_data # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + assert type_check_data.success # pyright: ignore[reportOptionalMemberAccess] def test_input_types_fail_in_job(): @@ -219,7 +219,7 @@ def pipe(): for event in result.events_for_node("take_string") if event.event_type == DagsterEventType.STEP_FAILURE ].pop() - assert failure_event.step_failure_data.error.cls_name == "DagsterTypeCheckDidNotPass" + assert failure_event.step_failure_data.error.cls_name == "DagsterTypeCheckDidNotPass" # pyright: ignore[reportOptionalMemberAccess] def test_output_types_fail_in_job(): @@ -243,9 +243,9 @@ def pipe(): event for event in events_for_node if event.event_type == DagsterEventType.STEP_OUTPUT ].pop() - type_check_data = output_event.event_specific_data.type_check_data - assert not type_check_data.success - assert type_check_data.description == 'Value "1" of python type "int" must be a string.' + type_check_data = output_event.event_specific_data.type_check_data # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + assert not type_check_data.success # pyright: ignore[reportOptionalMemberAccess] + assert type_check_data.description == 'Value "1" of python type "int" must be a string.' # pyright: ignore[reportOptionalMemberAccess] failure_event = [ event @@ -253,7 +253,7 @@ def pipe(): if event.event_type == DagsterEventType.STEP_FAILURE ].pop() - assert failure_event.step_failure_data.error.cls_name == "DagsterTypeCheckDidNotPass" + assert failure_event.step_failure_data.error.cls_name == "DagsterTypeCheckDidNotPass" # pyright: ignore[reportOptionalMemberAccess] # TODO add more step output use cases @@ -278,7 +278,7 @@ def _return_bad_value(_, _value): return "foo" -BadType = DagsterType(name="BadType", type_check_fn=_return_bad_value) +BadType = DagsterType(name="BadType", type_check_fn=_return_bad_value) # pyright: ignore[reportArgumentType] def test_input_type_returns_wrong_thing(): @@ -314,7 +314,7 @@ def pipe(): if event.event_type == DagsterEventType.STEP_FAILURE ].pop() - assert failure_event.step_failure_data.error.cls_name == "DagsterInvariantViolationError" + assert failure_event.step_failure_data.error.cls_name == "DagsterInvariantViolationError" # pyright: ignore[reportOptionalMemberAccess] def test_output_type_returns_wrong_thing(): @@ -337,7 +337,7 @@ def pipe(): for event in result.events_for_node("return_one_bad_thing") if event.event_type == DagsterEventType.STEP_FAILURE ].pop() - assert failure_event.step_failure_data.error.cls_name == "DagsterInvariantViolationError" + assert failure_event.step_failure_data.error.cls_name == "DagsterInvariantViolationError" # pyright: ignore[reportOptionalMemberAccess] def test_input_type_throw_arbitrary_exception(): @@ -363,7 +363,7 @@ def pipe(): for event in result.events_for_node("take_throws") if event.event_type == DagsterEventType.STEP_FAILURE ].pop() - assert failure_event.step_failure_data.error.cause.cls_name == "AlwaysFailsException" + assert failure_event.step_failure_data.error.cause.cls_name == "AlwaysFailsException" # pyright: ignore[reportOptionalMemberAccess] def test_output_type_throw_arbitrary_exception(): @@ -385,8 +385,8 @@ def pipe(): for event in result.events_for_node("return_one_throws") if event.event_type == DagsterEventType.STEP_FAILURE ].pop() - assert failure_event.step_failure_data.error.cause.cls_name == "AlwaysFailsException" - assert "kdjfkjd" in failure_event.step_failure_data.error.cause.message + assert failure_event.step_failure_data.error.cause.cls_name == "AlwaysFailsException" # pyright: ignore[reportOptionalMemberAccess] + assert "kdjfkjd" in failure_event.step_failure_data.error.cause.message # pyright: ignore[reportOptionalMemberAccess] def define_custom_dict(name, permitted_key_names): @@ -401,7 +401,7 @@ def type_check_method(_, value): return TypeCheck( False, description=( - f"Key {value.name} is not a permitted value, values can only be of: {permitted_key_names}" + f"Key {value.name} is not a permitted value, values can only be of: {permitted_key_names}" # pyright: ignore[reportAttributeAccessIssue] ), ) return TypeCheck( @@ -477,7 +477,7 @@ def foo_job(): ] for event in result.all_node_events: if event.event_type_value == DagsterEventType.STEP_FAILURE.value: - assert event.event_specific_data.error.cls_name == "DagsterTypeCheckDidNotPass" + assert event.event_specific_data.error.cls_name == "DagsterTypeCheckDidNotPass" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_raise_on_error_true_type_check_returns_unsuccessful_type_check(): @@ -509,7 +509,7 @@ def foo_job(): ] for event in result.all_node_events: if event.event_type_value == DagsterEventType.STEP_FAILURE.value: - assert event.event_specific_data.error.cls_name == "DagsterTypeCheckDidNotPass" + assert event.event_specific_data.error.cls_name == "DagsterTypeCheckDidNotPass" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_raise_on_error_true_type_check_raises_exception(): @@ -537,7 +537,7 @@ def foo_job(): ] for event in result.all_node_events: if event.event_type_value == DagsterEventType.STEP_FAILURE.value: - assert event.event_specific_data.error.cause.cls_name == "Failure" + assert event.event_specific_data.error.cause.cls_name == "Failure" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_raise_on_error_true_type_check_returns_true(): @@ -584,8 +584,8 @@ def foo_job(): assert result.success for event in result.all_node_events: if event.event_type_value == DagsterEventType.STEP_OUTPUT.value: - assert event.event_specific_data.type_check_data - assert event.event_specific_data.type_check_data.metadata["bar"].text == "foo" + assert event.event_specific_data.type_check_data # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + assert event.event_specific_data.type_check_data.metadata["bar"].text == "foo" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] result = foo_job.execute_in_process(raise_on_error=False) assert result.success diff --git a/python_modules/dagster/dagster_tests/core_tests/selector_tests/test_execute.py b/python_modules/dagster/dagster_tests/core_tests/selector_tests/test_execute.py index 699134dc91826..58af66a45155e 100644 --- a/python_modules/dagster/dagster_tests/core_tests/selector_tests/test_execute.py +++ b/python_modules/dagster/dagster_tests/core_tests/selector_tests/test_execute.py @@ -57,7 +57,7 @@ def test_reexecute_asset_subset(): assert materializations[0].asset_key == AssetKey("my_asset") run = instance.get_run_by_id(result.run_id) - assert run.asset_selection == {AssetKey("my_asset")} + assert run.asset_selection == {AssetKey("my_asset")} # pyright: ignore[reportOptionalMemberAccess] reexecution_result = execute_job( reconstructable(get_asset_selection_job), @@ -70,7 +70,7 @@ def test_reexecute_asset_subset(): assert len(materializations) == 1 assert materializations[0].asset_key == AssetKey("my_asset") run = instance.get_run_by_id(reexecution_result.run_id) - assert run.asset_selection == {AssetKey("my_asset")} + assert run.asset_selection == {AssetKey("my_asset")} # pyright: ignore[reportOptionalMemberAccess] def test_execute_job_with_op_selection_single_clause(): diff --git a/python_modules/dagster/dagster_tests/core_tests/selector_tests/test_subset_selector.py b/python_modules/dagster/dagster_tests/core_tests/selector_tests/test_subset_selector.py index ecbfc961d9964..9f3ac1b68d6f8 100644 --- a/python_modules/dagster/dagster_tests/core_tests/selector_tests/test_subset_selector.py +++ b/python_modules/dagster/dagster_tests/core_tests/selector_tests/test_subset_selector.py @@ -46,7 +46,7 @@ def foo_job(): def test_generate_dep_graph(): - graph = generate_dep_graph(foo_job) + graph = generate_dep_graph(foo_job) # pyright: ignore[reportArgumentType] assert graph == { "upstream": { "return_one": set(), @@ -66,7 +66,7 @@ def test_generate_dep_graph(): def test_traverser(): - graph = generate_dep_graph(foo_job) + graph = generate_dep_graph(foo_job) # pyright: ignore[reportArgumentType] traverser = Traverser(graph) assert traverser.fetch_upstream(item_name="return_one", depth=1) == set() @@ -81,7 +81,7 @@ def test_traverser(): def test_traverser_invalid(): - graph = generate_dep_graph(foo_job) + graph = generate_dep_graph(foo_job) # pyright: ignore[reportArgumentType] traverser = Traverser(graph) assert traverser.fetch_upstream(item_name="some_solid", depth=1) == set() @@ -100,15 +100,15 @@ def test_parse_clause_invalid(): def test_parse_op_selection_single(): - op_selection_single = parse_op_queries(foo_job, ["add_nums"]) + op_selection_single = parse_op_queries(foo_job, ["add_nums"]) # pyright: ignore[reportArgumentType] assert len(op_selection_single) == 1 assert op_selection_single == {"add_nums"} - op_selection_star = parse_op_queries(foo_job, ["add_nums*"]) + op_selection_star = parse_op_queries(foo_job, ["add_nums*"]) # pyright: ignore[reportArgumentType] assert len(op_selection_star) == 3 assert set(op_selection_star) == {"add_nums", "multiply_two", "add_one"} - op_selection_both = parse_op_queries(foo_job, ["*add_nums+"]) + op_selection_both = parse_op_queries(foo_job, ["*add_nums+"]) # pyright: ignore[reportArgumentType] assert len(op_selection_both) == 4 assert set(op_selection_both) == { "return_one", @@ -119,7 +119,7 @@ def test_parse_op_selection_single(): def test_parse_op_selection_multi(): - op_selection_multi_disjoint = parse_op_queries(foo_job, ["return_one", "add_nums+"]) + op_selection_multi_disjoint = parse_op_queries(foo_job, ["return_one", "add_nums+"]) # pyright: ignore[reportArgumentType] assert len(op_selection_multi_disjoint) == 3 assert set(op_selection_multi_disjoint) == { "return_one", @@ -127,7 +127,7 @@ def test_parse_op_selection_multi(): "multiply_two", } - op_selection_multi_overlap = parse_op_queries(foo_job, ["*add_nums", "return_one+"]) + op_selection_multi_overlap = parse_op_queries(foo_job, ["*add_nums", "return_one+"]) # pyright: ignore[reportArgumentType] assert len(op_selection_multi_overlap) == 3 assert set(op_selection_multi_overlap) == { "return_one", @@ -139,7 +139,7 @@ def test_parse_op_selection_multi(): DagsterInvalidSubsetError, match="No qualified ops to execute found for op_selection", ): - parse_op_queries(foo_job, ["*add_nums", "a"]) + parse_op_queries(foo_job, ["*add_nums", "a"]) # pyright: ignore[reportArgumentType] def test_parse_op_selection_invalid(): @@ -147,7 +147,7 @@ def test_parse_op_selection_invalid(): DagsterInvalidSubsetError, match="No qualified ops to execute found for op_selection", ): - parse_op_queries(foo_job, ["some,solid"]) + parse_op_queries(foo_job, ["some,solid"]) # pyright: ignore[reportArgumentType] step_deps = { @@ -192,7 +192,7 @@ def test_clause_to_subset(clause, expected_subset): "e": {"f"}, }, } - assert set(clause_to_subset(graph, clause, lambda x: x)) == set(expected_subset.split(",")) + assert set(clause_to_subset(graph, clause, lambda x: x)) == set(expected_subset.split(",")) # pyright: ignore[reportArgumentType] def test_parse_step_selection_single(): diff --git a/python_modules/dagster/dagster_tests/core_tests/snap_tests/test_config_schema_snapshot.py b/python_modules/dagster/dagster_tests/core_tests/snap_tests/test_config_schema_snapshot.py index c63aaecb226cc..04f4313a5115a 100644 --- a/python_modules/dagster/dagster_tests/core_tests/snap_tests/test_config_schema_snapshot.py +++ b/python_modules/dagster/dagster_tests/core_tests/snap_tests/test_config_schema_snapshot.py @@ -17,7 +17,7 @@ def snap_from_dagster_type(dagster_type: DagsterType) -> ConfigTypeSnap: def test_enum_snap(): enum_snap = snap_from_dagster_type( - Enum( + Enum( # pyright: ignore[reportArgumentType] "CowboyType", [ EnumValue("good"), @@ -36,7 +36,7 @@ def test_enum_snap(): def test_basic_int_snap(): - int_snap = snap_from_dagster_type(int) + int_snap = snap_from_dagster_type(int) # pyright: ignore[reportArgumentType] assert int_snap.given_name == "Int" assert int_snap.key == "Int" assert int_snap.kind == ConfigTypeKind.SCALAR @@ -45,7 +45,7 @@ def test_basic_int_snap(): def test_basic_dict(): - dict_snap = snap_from_dagster_type({"foo": int}) + dict_snap = snap_from_dagster_type({"foo": int}) # pyright: ignore[reportArgumentType] assert dict_snap.key.startswith("Shape.") assert dict_snap.given_name is None child_type_keys = dict_snap.get_child_type_keys() @@ -62,7 +62,7 @@ def test_basic_dict(): def test_field_things(): dict_snap = snap_from_dagster_type( - { + { # pyright: ignore[reportArgumentType] "req": int, "opt": Field(int, is_required=False), "opt_with_default": Field(int, is_required=False, default_value=2), @@ -81,14 +81,14 @@ def test_field_things(): assert field_snap_dict["opt"].default_value_as_json_str is None assert field_snap_dict["opt_with_default"].is_required is False assert field_snap_dict["opt_with_default"].default_provided is True - assert deserialize_value(field_snap_dict["opt_with_default"].default_value_as_json_str) == 2 + assert deserialize_value(field_snap_dict["opt_with_default"].default_value_as_json_str) == 2 # pyright: ignore[reportArgumentType] assert field_snap_dict["req_with_desc"].is_required is True assert field_snap_dict["req_with_desc"].description == "A desc" def test_basic_list(): - list_snap = snap_from_dagster_type(Array(int)) + list_snap = snap_from_dagster_type(Array(int)) # pyright: ignore[reportArgumentType] assert list_snap.key.startswith("Array") child_type_keys = list_snap.get_child_type_keys() assert child_type_keys @@ -97,7 +97,7 @@ def test_basic_list(): def test_basic_optional(): - optional_snap = snap_from_dagster_type(Noneable(int)) + optional_snap = snap_from_dagster_type(Noneable(int)) # pyright: ignore[reportArgumentType] assert optional_snap.key.startswith("Noneable") child_type_keys = optional_snap.get_child_type_keys() @@ -109,7 +109,7 @@ def test_basic_optional(): def test_basic_list_list(): - list_snap = snap_from_dagster_type([[int]]) + list_snap = snap_from_dagster_type([[int]]) # pyright: ignore[reportArgumentType] assert list_snap.key.startswith("Array") child_type_keys = list_snap.get_child_type_keys() assert child_type_keys @@ -120,7 +120,7 @@ def test_basic_list_list(): def test_list_of_dict(): inner_dict_dagster_type = Shape({"foo": Field(str)}) - list_of_dict_snap = snap_from_dagster_type([inner_dict_dagster_type]) + list_of_dict_snap = snap_from_dagster_type([inner_dict_dagster_type]) # pyright: ignore[reportArgumentType] assert list_of_dict_snap.key.startswith("Array") child_type_keys = list_of_dict_snap.get_child_type_keys() @@ -130,7 +130,7 @@ def test_list_of_dict(): def test_selector_of_things(): - selector_snap = snap_from_dagster_type(Selector({"bar": Field(int)})) + selector_snap = snap_from_dagster_type(Selector({"bar": Field(int)})) # pyright: ignore[reportArgumentType] assert selector_snap.key.startswith("Selector") assert selector_snap.kind == ConfigTypeKind.SELECTOR assert selector_snap.fields and len(selector_snap.fields) == 1 @@ -140,7 +140,7 @@ def test_selector_of_things(): def test_basic_map(): - map_snap = snap_from_dagster_type(Map(str, int)) + map_snap = snap_from_dagster_type(Map(str, int)) # pyright: ignore[reportArgumentType] assert map_snap.key.startswith("Map") child_type_keys = map_snap.get_child_type_keys() assert child_type_keys @@ -150,7 +150,7 @@ def test_basic_map(): def test_named_map(): - map_snap = snap_from_dagster_type(Map(str, float, key_label_name="title")) + map_snap = snap_from_dagster_type(Map(str, float, key_label_name="title")) # pyright: ignore[reportArgumentType] assert map_snap.key.startswith("Map") assert map_snap.given_name == "title" child_type_keys = map_snap.get_child_type_keys() @@ -161,7 +161,7 @@ def test_named_map(): def test_basic_map_nested(): - map_snap = snap_from_dagster_type({int: {str: int}}) + map_snap = snap_from_dagster_type({int: {str: int}}) # pyright: ignore[reportArgumentType] assert map_snap.key.startswith("Map") child_type_keys = map_snap.get_child_type_keys() assert child_type_keys @@ -173,7 +173,7 @@ def test_basic_map_nested(): def test_map_of_dict(): inner_dict_dagster_type = Shape({"foo": Field(str)}) - map_of_dict_snap = snap_from_dagster_type({str: inner_dict_dagster_type}) + map_of_dict_snap = snap_from_dagster_type({str: inner_dict_dagster_type}) # pyright: ignore[reportArgumentType] assert map_of_dict_snap.key.startswith("Map") child_type_keys = map_of_dict_snap.get_child_type_keys() @@ -201,7 +201,7 @@ def test_kitchen_sink(): ] ) - kitchen_sink_snap = snap_from_dagster_type(kitchen_sink) + kitchen_sink_snap = snap_from_dagster_type(kitchen_sink) # pyright: ignore[reportArgumentType] rehydrated_snap = deserialize_value(serialize_value(kitchen_sink_snap), ConfigTypeSnap) assert kitchen_sink_snap == rehydrated_snap @@ -233,7 +233,7 @@ def op_with_config(_): def single_op_job(): op_with_config() - op_config_key = op_with_config.config_schema.config_type.key + op_config_key = op_with_config.config_schema.config_type.key # pyright: ignore[reportOptionalMemberAccess] config_snaps = build_config_schema_snapshot(single_op_job).all_config_snaps_by_key @@ -242,9 +242,9 @@ def single_op_job(): op_config_snap = config_snaps[op_config_key] assert op_config_snap.kind == ConfigTypeKind.STRICT_SHAPE - assert len(op_config_snap.fields) == 1 + assert len(op_config_snap.fields) == 1 # pyright: ignore[reportArgumentType] - foo_field = op_config_snap.fields[0] + foo_field = op_config_snap.fields[0] # pyright: ignore[reportOptionalSubscript] assert foo_field.name == "foo" assert foo_field.type_key == "String" @@ -259,16 +259,16 @@ def op_with_config(_): def single_op_job(): op_with_config() - op_config_key = op_with_config.config_schema.config_type.key + op_config_key = op_with_config.config_schema.config_type.key # pyright: ignore[reportOptionalMemberAccess] config_snaps = build_config_schema_snapshot(single_op_job).all_config_snaps_by_key assert op_config_key in config_snaps op_config_snap = config_snaps[op_config_key] assert op_config_snap.kind == ConfigTypeKind.STRICT_SHAPE - assert len(op_config_snap.fields) == 1 + assert len(op_config_snap.fields) == 1 # pyright: ignore[reportArgumentType] - list_list_field = op_config_snap.fields[0] + list_list_field = op_config_snap.fields[0] # pyright: ignore[reportOptionalSubscript] list_list_type_key = list_list_field.type_key @@ -308,7 +308,7 @@ def single_op_job(): config_snaps = build_config_schema_snapshot(single_op_job).all_config_snaps_by_key - op_config_key = op_with_kitchen_sink_config.config_schema.config_type.key + op_config_key = op_with_kitchen_sink_config.config_schema.config_type.key # pyright: ignore[reportOptionalMemberAccess] assert op_config_key in config_snaps op_config_snap = config_snaps[op_config_key] @@ -316,7 +316,7 @@ def single_op_job(): dict_within_list = config_snaps[op_config_snap.inner_type_key] - assert len(dict_within_list.fields) == 3 + assert len(dict_within_list.fields) == 3 # pyright: ignore[reportArgumentType] opt_field = dict_within_list.get_field("opt_list_of_int") @@ -324,7 +324,7 @@ def single_op_job(): assert config_snaps[opt_field.type_key].kind == ConfigTypeKind.ARRAY nested_dict = config_snaps[dict_within_list.get_field("nested_dict").type_key] - assert len(nested_dict.fields) == 2 + assert len(nested_dict.fields) == 2 # pyright: ignore[reportArgumentType] nested_selector = config_snaps[nested_dict.get_field("nested_selector").type_key] noneable_list_bool = config_snaps[nested_selector.get_field("noneable_list").type_key] assert noneable_list_bool.kind == ConfigTypeKind.NONEABLE @@ -334,7 +334,7 @@ def single_op_job(): amap = config_snaps[dict_within_list.get_field("map").type_key] assert amap.kind == ConfigTypeKind.MAP map_dict = config_snaps[amap.inner_type_key] - assert len(map_dict.fields) == 2 + assert len(map_dict.fields) == 2 # pyright: ignore[reportArgumentType] map_a = config_snaps[map_dict.get_field("map_a").type_key] assert map_a.kind == ConfigTypeKind.SCALAR @@ -357,7 +357,7 @@ def single_op_job(): config_snaps = build_config_schema_snapshot(single_op_job).all_config_snaps_by_key - scalar_union_key = op_with_config.config_schema.config_type.key + scalar_union_key = op_with_config.config_schema.config_type.key # pyright: ignore[reportOptionalMemberAccess] assert scalar_union_key in config_snaps diff --git a/python_modules/dagster/dagster_tests/core_tests/snap_tests/test_job_snap.py b/python_modules/dagster/dagster_tests/core_tests/snap_tests/test_job_snap.py index 797b4ee90605f..ab6f501b29a9e 100644 --- a/python_modules/dagster/dagster_tests/core_tests/snap_tests/test_job_snap.py +++ b/python_modules/dagster/dagster_tests/core_tests/snap_tests/test_job_snap.py @@ -472,7 +472,7 @@ def noop_job(): job_snapshot = JobSnap.from_job_def(noop_job) node_def_snap = job_snapshot.get_node_def_snap("fancy_op") recevied_config_type = job_snapshot.get_config_type_from_node_def_snap(node_def_snap) - snapshot.assert_match(serialize_pp(snap_from_config_type(recevied_config_type))) + snapshot.assert_match(serialize_pp(snap_from_config_type(recevied_config_type))) # pyright: ignore[reportArgumentType] _dict_has_stable_hashes( recevied_config_type, job_snapshot.config_schema_snapshot.all_config_snaps_by_key, @@ -492,7 +492,7 @@ def noop_job(): job_snapshot = JobSnap.from_job_def(noop_job) node_def_snap = job_snapshot.get_node_def_snap("fancy_op") recevied_config_type = job_snapshot.get_config_type_from_node_def_snap(node_def_snap) - snapshot.assert_match(serialize_pp(snap_from_config_type(recevied_config_type))) + snapshot.assert_match(serialize_pp(snap_from_config_type(recevied_config_type))) # pyright: ignore[reportArgumentType] _array_has_stable_hashes( recevied_config_type, job_snapshot.config_schema_snapshot.all_config_snaps_by_key, @@ -511,7 +511,7 @@ def noop_job(): job_snapshot = JobSnap.from_job_def(noop_job) node_def_snap = job_snapshot.get_node_def_snap("fancy_op") recevied_config_type = job_snapshot.get_config_type_from_node_def_snap(node_def_snap) - snapshot.assert_match(serialize_pp(snap_from_config_type(recevied_config_type))) + snapshot.assert_match(serialize_pp(snap_from_config_type(recevied_config_type))) # pyright: ignore[reportArgumentType] _array_has_stable_hashes( recevied_config_type, job_snapshot.config_schema_snapshot.all_config_snaps_by_key, @@ -536,7 +536,7 @@ def noop_job(): job_snapshot = JobSnap.from_job_def(noop_job) node_def_snap = job_snapshot.get_node_def_snap("fancy_op") recevied_config_type = job_snapshot.get_config_type_from_node_def_snap(node_def_snap) - snapshot.assert_match(serialize_pp(snap_from_config_type(recevied_config_type))) + snapshot.assert_match(serialize_pp(snap_from_config_type(recevied_config_type))) # pyright: ignore[reportArgumentType] _dict_has_stable_hashes( recevied_config_type, job_snapshot.config_schema_snapshot.all_config_snaps_by_key, diff --git a/python_modules/dagster/dagster_tests/core_tests/snap_tests/test_legacy_mode_def_snap.py b/python_modules/dagster/dagster_tests/core_tests/snap_tests/test_legacy_mode_def_snap.py index edb425fe982f0..3862c9a7b0f30 100644 --- a/python_modules/dagster/dagster_tests/core_tests/snap_tests/test_legacy_mode_def_snap.py +++ b/python_modules/dagster/dagster_tests/core_tests/snap_tests/test_legacy_mode_def_snap.py @@ -14,11 +14,11 @@ def a_resource(_): def no_config_resource(_): pass - @logger(config_schema={"bar": str}) + @logger(config_schema={"bar": str}) # pyright: ignore[reportArgumentType] def a_logger(_): pass - @logger(description="logger_description") + @logger(description="logger_description") # pyright: ignore[reportCallIssue] def no_config_logger(_): pass @@ -27,7 +27,7 @@ def no_config_logger(_): "some_resource": a_resource, "no_config_resource": no_config_resource, }, - logger_defs={ + logger_defs={ # pyright: ignore[reportArgumentType] "some_logger": a_logger, "no_config_logger": no_config_logger, }, diff --git a/python_modules/dagster/dagster_tests/core_tests/snap_tests/test_repository_snap.py b/python_modules/dagster/dagster_tests/core_tests/snap_tests/test_repository_snap.py index eba7bb4d92bdd..83946c2b63db0 100644 --- a/python_modules/dagster/dagster_tests/core_tests/snap_tests/test_repository_snap.py +++ b/python_modules/dagster/dagster_tests/core_tests/snap_tests/test_repository_snap.py @@ -48,10 +48,10 @@ def noop_repo(): repo_snap = RepositorySnap.from_def(noop_repo) assert repo_snap.name == "noop_repo" - assert len(repo_snap.job_datas) == 1 - assert isinstance(repo_snap.job_datas[0], JobDataSnap) + assert len(repo_snap.job_datas) == 1 # pyright: ignore[reportArgumentType] + assert isinstance(repo_snap.job_datas[0], JobDataSnap) # pyright: ignore[reportOptionalSubscript] - job_snapshot = repo_snap.job_datas[0].job + job_snapshot = repo_snap.job_datas[0].job # pyright: ignore[reportOptionalSubscript] assert isinstance(job_snapshot, JobSnap) assert job_snapshot.name == "noop_job" assert job_snapshot.description is None @@ -71,11 +71,11 @@ def my_asset(foo: ResourceParam[str]): repo = defs.get_repository_def() repo_snap = RepositorySnap.from_def(repo) - assert len(repo_snap.resources) == 1 - assert repo_snap.resources[0].name == "foo" - assert repo_snap.resources[0].resource_snapshot.name == "foo" - assert repo_snap.resources[0].resource_snapshot.description is None - assert repo_snap.resources[0].configured_values == {} + assert len(repo_snap.resources) == 1 # pyright: ignore[reportArgumentType] + assert repo_snap.resources[0].name == "foo" # pyright: ignore[reportOptionalSubscript] + assert repo_snap.resources[0].resource_snapshot.name == "foo" # pyright: ignore[reportOptionalSubscript] + assert repo_snap.resources[0].resource_snapshot.description is None # pyright: ignore[reportOptionalSubscript] + assert repo_snap.resources[0].configured_values == {} # pyright: ignore[reportOptionalSubscript] def test_repository_snap_definitions_resources_nested() -> None: @@ -260,20 +260,20 @@ def my_asset(foo: MyStringResource): repo = defs.get_repository_def() repo_snap = RepositorySnap.from_def(repo) - assert len(repo_snap.resources) == 1 - assert repo_snap.resources[0].name == "foo" - assert repo_snap.resources[0].resource_snapshot.name == "foo" - assert repo_snap.resources[0].resource_snapshot.description == "My description." + assert len(repo_snap.resources) == 1 # pyright: ignore[reportArgumentType] + assert repo_snap.resources[0].name == "foo" # pyright: ignore[reportOptionalSubscript] + assert repo_snap.resources[0].resource_snapshot.name == "foo" # pyright: ignore[reportOptionalSubscript] + assert repo_snap.resources[0].resource_snapshot.description == "My description." # pyright: ignore[reportOptionalSubscript] # Ensure we get config snaps for the resource's fields - assert len(repo_snap.resources[0].config_field_snaps) == 1 - snap = repo_snap.resources[0].config_field_snaps[0] + assert len(repo_snap.resources[0].config_field_snaps) == 1 # pyright: ignore[reportOptionalSubscript] + snap = repo_snap.resources[0].config_field_snaps[0] # pyright: ignore[reportOptionalSubscript] assert snap.name == "my_string" assert not snap.is_required assert snap.default_value_as_json_str == '"bar"' # Ensure we get the configured values for the resource - assert repo_snap.resources[0].configured_values == { + assert repo_snap.resources[0].configured_values == { # pyright: ignore[reportOptionalSubscript] "my_string": '"baz"', } @@ -285,8 +285,8 @@ def empty_repo(): repo_snap = RepositorySnap.from_def(empty_repo) assert repo_snap.name == "empty_repo" - assert len(repo_snap.job_datas) == 0 - assert len(repo_snap.resources) == 0 + assert len(repo_snap.job_datas) == 0 # pyright: ignore[reportArgumentType] + assert len(repo_snap.resources) == 0 # pyright: ignore[reportArgumentType] def test_repository_snap_definitions_env_vars() -> None: @@ -587,10 +587,10 @@ def test_asset_check(): def my_asset(): pass - @asset_check(asset=my_asset) + @asset_check(asset=my_asset) # pyright: ignore[reportArgumentType] def my_asset_check(): ... - @asset_check(asset=my_asset) + @asset_check(asset=my_asset) # pyright: ignore[reportArgumentType] def my_asset_check_2(): ... defs = Definitions( @@ -601,9 +601,9 @@ def my_asset_check_2(): ... repo = defs.get_repository_def() repo_snap = RepositorySnap.from_def(repo) - assert len(repo_snap.asset_check_nodes) == 2 - assert repo_snap.asset_check_nodes[0].name == "my_asset_check" - assert repo_snap.asset_check_nodes[1].name == "my_asset_check_2" + assert len(repo_snap.asset_check_nodes) == 2 # pyright: ignore[reportArgumentType] + assert repo_snap.asset_check_nodes[0].name == "my_asset_check" # pyright: ignore[reportOptionalSubscript] + assert repo_snap.asset_check_nodes[1].name == "my_asset_check_2" # pyright: ignore[reportOptionalSubscript] def test_asset_check_in_asset_op(): @@ -616,7 +616,7 @@ def test_asset_check_in_asset_op(): def my_asset(): pass - @asset_check(asset=my_asset) + @asset_check(asset=my_asset) # pyright: ignore[reportArgumentType] def my_asset_check(): ... defs = Definitions( @@ -627,10 +627,10 @@ def my_asset_check(): ... repo = defs.get_repository_def() repo_snap = RepositorySnap.from_def(repo) - assert len(repo_snap.asset_check_nodes) == 3 - assert repo_snap.asset_check_nodes[0].name == "my_asset_check" - assert repo_snap.asset_check_nodes[1].name == "my_other_asset_check" - assert repo_snap.asset_check_nodes[2].name == "my_other_asset_check_2" + assert len(repo_snap.asset_check_nodes) == 3 # pyright: ignore[reportArgumentType] + assert repo_snap.asset_check_nodes[0].name == "my_asset_check" # pyright: ignore[reportOptionalSubscript] + assert repo_snap.asset_check_nodes[1].name == "my_other_asset_check" # pyright: ignore[reportOptionalSubscript] + assert repo_snap.asset_check_nodes[2].name == "my_other_asset_check_2" # pyright: ignore[reportOptionalSubscript] def test_asset_check_multiple_jobs(): @@ -642,7 +642,7 @@ def test_asset_check_multiple_jobs(): def my_asset(): pass - @asset_check(asset=my_asset) + @asset_check(asset=my_asset) # pyright: ignore[reportArgumentType] def my_asset_check(): ... my_job = define_asset_job("my_job", [my_asset]) diff --git a/python_modules/dagster/dagster_tests/core_tests/system_config_tests/test_system_config.py b/python_modules/dagster/dagster_tests/core_tests/system_config_tests/test_system_config.py index bc05a368daaf8..1887fdfe8bc47 100644 --- a/python_modules/dagster/dagster_tests/core_tests/system_config_tests/test_system_config.py +++ b/python_modules/dagster/dagster_tests/core_tests/system_config_tests/test_system_config.py @@ -26,14 +26,14 @@ def create_creation_data(job_def): - return RunConfigSchemaCreationData( + return RunConfigSchemaCreationData( # pyright: ignore[reportCallIssue] job_def.name, job_def.nodes, job_def.dependency_structure, logger_defs=default_loggers(), ignored_nodes=[], required_resources=set(), - direct_inputs=job_def._input_values, # noqa: SLF001 + direct_inputs=job_def._input_values, # noqa [SLF001] asset_layer=job_def.asset_layer, ) @@ -64,7 +64,7 @@ def test_all_types_provided(): matching_types = [ tt for tt in all_types - if tt.kind == ConfigTypeKind.STRICT_SHAPE and "with_default_int" in tt.fields.keys() + if tt.kind == ConfigTypeKind.STRICT_SHAPE and "with_default_int" in tt.fields.keys() # pyright: ignore[reportAttributeAccessIssue] ] assert len(matching_types) == 1 @@ -93,7 +93,7 @@ def job_def(): some_op() env_type = create_run_config_schema_type(job_def) - some_resource_field = env_type.fields["resources"].config_type.fields["some_resource"] + some_resource_field = env_type.fields["resources"].config_type.fields["some_resource"] # pyright: ignore[reportAttributeAccessIssue] assert some_resource_field.is_required is False some_resource_config_field = some_resource_field.config_type.fields["config"] @@ -123,7 +123,7 @@ def job_def(): def test_op_config(): solid_config_type = Shape({"config": Field(Int)}) solid_inst = process_config(solid_config_type, {"config": 1}) - assert solid_inst.value["config"] == 1 + assert solid_inst.value["config"] == 1 # pyright: ignore[reportOptionalSubscript] def test_op_dictionary_type(): @@ -182,7 +182,7 @@ def assert_has_fields(dtype, *fields): def test_op_configs_defaults(): env_type = create_run_config_schema_type(define_test_solids_config_pipeline()) - solids_field = env_type.fields["ops"] + solids_field = env_type.fields["ops"] # pyright: ignore[reportAttributeAccessIssue] assert_has_fields(solids_field.config_type, "int_config_op", "string_config_op") @@ -245,7 +245,7 @@ def test_whole_environment(): ], ).to_job( resource_defs={ - "test_resource": ResourceDefinition(resource_fn=lambda _: None, config_schema=Any) + "test_resource": ResourceDefinition(resource_fn=lambda _: None, config_schema=Any) # pyright: ignore[reportArgumentType] }, ) @@ -284,9 +284,9 @@ def test_op_config_error(): res = process_config(int_solid_config_type, {"notconfig": 1}) assert not res.success - assert re.match('Received unexpected config entry "notconfig"', res.errors[0].message) + assert re.match('Received unexpected config entry "notconfig"', res.errors[0].message) # pyright: ignore[reportOptionalSubscript] - res = process_config(int_solid_config_type, 1) + res = process_config(int_solid_config_type, 1) # pyright: ignore[reportArgumentType] assert not res.success @@ -335,9 +335,9 @@ def _assert_config_none(context, value): env_type = create_run_config_schema_type(job_def) - assert env_type.fields["ops"].is_required is False + assert env_type.fields["ops"].is_required is False # pyright: ignore[reportAttributeAccessIssue] - solids_type = env_type.fields["ops"].config_type + solids_type = env_type.fields["ops"].config_type # pyright: ignore[reportAttributeAccessIssue] assert solids_type.fields["int_config_op"].is_required is False @@ -365,9 +365,9 @@ def _assert_config_none(context, value): env_type = create_run_config_schema_type(job_def) - assert env_type.fields["ops"].is_required is True + assert env_type.fields["ops"].is_required is True # pyright: ignore[reportAttributeAccessIssue] - solids_type = env_type.fields["ops"].config_type + solids_type = env_type.fields["ops"].config_type # pyright: ignore[reportAttributeAccessIssue] assert solids_type.fields["int_config_op"].is_required is True @@ -398,22 +398,22 @@ def test_required_op_with_required_subfield(): env_type = create_run_config_schema_type(job_def) - assert env_type.fields["ops"].is_required is True - assert env_type.fields["ops"].config_type + assert env_type.fields["ops"].is_required is True # pyright: ignore[reportAttributeAccessIssue] + assert env_type.fields["ops"].config_type # pyright: ignore[reportAttributeAccessIssue] - solids_type = env_type.fields["ops"].config_type + solids_type = env_type.fields["ops"].config_type # pyright: ignore[reportAttributeAccessIssue] assert solids_type.fields["int_config_op"].is_required is True int_config_solid_type = solids_type.fields["int_config_op"].config_type assert int_config_solid_type.fields["config"].is_required is True - assert env_type.fields["execution"].is_required is False + assert env_type.fields["execution"].is_required is False # pyright: ignore[reportAttributeAccessIssue] env_obj = ResolvedRunConfig.build( job_def, {"ops": {"int_config_op": {"config": {"required_field": "foobar"}}}}, ) - assert env_obj.ops["int_config_op"].config["required_field"] == "foobar" + assert env_obj.ops["int_config_op"].config["required_field"] == "foobar" # pyright: ignore[reportIndexIssue] res = process_config(env_type, {"ops": {}}) assert not res.success @@ -440,8 +440,8 @@ def test_optional_op_with_optional_subfield(): ).to_job() env_type = create_run_config_schema_type(job_def) - assert env_type.fields["ops"].is_required is False - assert env_type.fields["execution"].is_required is False + assert env_type.fields["ops"].is_required is False # pyright: ignore[reportAttributeAccessIssue] + assert env_type.fields["execution"].is_required is False # pyright: ignore[reportAttributeAccessIssue] def nested_field(config_type, *field_names): @@ -473,9 +473,9 @@ def needs_resource(_): ) env_type = create_run_config_schema_type(job_def) - assert env_type.fields["ops"].is_required is False - assert env_type.fields["execution"].is_required is False - assert env_type.fields["resources"].is_required + assert env_type.fields["ops"].is_required is False # pyright: ignore[reportAttributeAccessIssue] + assert env_type.fields["execution"].is_required is False # pyright: ignore[reportAttributeAccessIssue] + assert env_type.fields["resources"].is_required # pyright: ignore[reportAttributeAccessIssue] assert nested_field(env_type, "resources", "with_required").is_required assert nested_field(env_type, "resources", "with_required", "config").is_required assert nested_field( @@ -497,9 +497,9 @@ def test_all_optional_field_on_single_resource(): ) env_type = create_run_config_schema_type(job_def) - assert env_type.fields["ops"].is_required is False - assert env_type.fields["execution"].is_required is False - assert env_type.fields["resources"].is_required is False + assert env_type.fields["ops"].is_required is False # pyright: ignore[reportAttributeAccessIssue] + assert env_type.fields["execution"].is_required is False # pyright: ignore[reportAttributeAccessIssue] + assert env_type.fields["resources"].is_required is False # pyright: ignore[reportAttributeAccessIssue] assert nested_field(env_type, "resources", "with_optional").is_required is False assert nested_field(env_type, "resources", "with_optional", "config").is_required is False assert ( @@ -530,9 +530,9 @@ def needs_resource(_): ) env_type = create_run_config_schema_type(job_def) - assert env_type.fields["ops"].is_required is False + assert env_type.fields["ops"].is_required is False # pyright: ignore[reportAttributeAccessIssue] - assert env_type.fields["execution"].is_required is False + assert env_type.fields["execution"].is_required is False # pyright: ignore[reportAttributeAccessIssue] assert nested_field(env_type, "resources").is_required assert nested_field(env_type, "resources", "optional_resource").is_required is False @@ -577,7 +577,7 @@ def add_one(num): env_type = create_run_config_schema_type(job_def) - solids_type = env_type.fields["ops"].config_type + solids_type = env_type.fields["ops"].config_type # pyright: ignore[reportAttributeAccessIssue] first_add_fields = solids_type.fields["first_add"].config_type.fields @@ -612,7 +612,7 @@ def return_three(): ).to_job() env_type = create_run_config_schema_type(job_def) - solids_type = env_type.fields["ops"].config_type + solids_type = env_type.fields["ops"].config_type # pyright: ignore[reportAttributeAccessIssue] add_numbers_type = solids_type.fields["add_numbers"].config_type inputs_fields_dict = add_numbers_type.fields["inputs"].config_type.fields diff --git a/python_modules/dagster/dagster_tests/core_tests/test_asset_events.py b/python_modules/dagster/dagster_tests/core_tests/test_asset_events.py index 3f88d47818652..c3d32766dcca8 100644 --- a/python_modules/dagster/dagster_tests/core_tests/test_asset_events.py +++ b/python_modules/dagster/dagster_tests/core_tests/test_asset_events.py @@ -32,8 +32,8 @@ def my_asset(): planned_events = _get_planned_events(instance, result.run_id) assert len(planned_events) == 1 planned_event = planned_events[0] - assert planned_event.event_specific_data.asset_key == AssetKey("my_asset") - assert planned_event.step_key == "my_asset" + assert planned_event.event_specific_data.asset_key == AssetKey("my_asset") # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] + assert planned_event.step_key == "my_asset" # pyright: ignore[reportOptionalMemberAccess] def test_multi_asset_mat_planned_event_step_key(): @@ -51,9 +51,9 @@ def my_asset(): result = materialize([my_asset], instance=instance) planned_events = _get_planned_events(instance, result.run_id) assert len(planned_events) == 2 - assert all(event.is_asset_materialization_planned for event in planned_events) - assert all(event.step_key == "my_asset" for event in planned_events) - assert set(event.asset_key for event in planned_events) == { + assert all(event.is_asset_materialization_planned for event in planned_events) # pyright: ignore[reportOptionalMemberAccess] + assert all(event.step_key == "my_asset" for event in planned_events) # pyright: ignore[reportOptionalMemberAccess] + assert set(event.asset_key for event in planned_events) == { # pyright: ignore[reportOptionalMemberAccess] AssetKey("my_asset_name"), AssetKey("my_other_asset"), } @@ -173,13 +173,17 @@ def my_other_asset(my_asset): planned_events = _get_planned_events(instance, result.run_id) assert len(planned_events) == 2 [my_asset_event] = [ - event for event in planned_events if event.asset_key == AssetKey("my_asset") + event + for event in planned_events + if event.asset_key == AssetKey("my_asset") # pyright: ignore[reportOptionalMemberAccess] ] [my_other_asset_event] = [ - event for event in planned_events if event.asset_key == AssetKey("my_other_asset") + event + for event in planned_events + if event.asset_key == AssetKey("my_other_asset") # pyright: ignore[reportOptionalMemberAccess] ] - assert my_asset_event.event_specific_data.partition == "b" - assert my_other_asset_event.event_specific_data.partition is None + assert my_asset_event.event_specific_data.partition == "b" # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] + assert my_other_asset_event.event_specific_data.partition is None # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] def test_subset_on_asset_materialization_planned_event_for_single_run_backfill_allowed(): @@ -199,9 +203,9 @@ def my_asset(): planned_events = _get_planned_events(instance, result.run_id) assert len(planned_events) == 1 planned_event = planned_events[0] - assert planned_event.asset_key == AssetKey("my_asset") + assert planned_event.asset_key == AssetKey("my_asset") # pyright: ignore[reportOptionalMemberAccess] assert ( - planned_event.event_specific_data.partitions_subset + planned_event.event_specific_data.partitions_subset # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] == partitions_def.subset_with_partition_keys(["a", "b"]) ) @@ -227,13 +231,17 @@ def unpartitioned(): planned_events = _get_planned_events(instance, result.run_id) assert len(planned_events) == 2 [partitioned_event] = [ - event for event in planned_events if event.asset_key == partitioned.key + event + for event in planned_events + if event.asset_key == partitioned.key # pyright: ignore[reportOptionalMemberAccess] ] [unpartitioned_event] = [ - event for event in planned_events if event.asset_key == unpartitioned.key + event + for event in planned_events + if event.asset_key == unpartitioned.key # pyright: ignore[reportOptionalMemberAccess] ] assert ( - partitioned_event.event_specific_data.partitions_subset + partitioned_event.event_specific_data.partitions_subset # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] == partitions_def.subset_with_partition_keys(["a", "b"]) ) - assert unpartitioned_event.event_specific_data.partitions_subset is None + assert unpartitioned_event.event_specific_data.partitions_subset is None # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] diff --git a/python_modules/dagster/dagster_tests/core_tests/test_data_time.py b/python_modules/dagster/dagster_tests/core_tests/test_data_time.py index 88ddac5213718..ab70f8a095fa4 100644 --- a/python_modules/dagster/dagster_tests/core_tests/test_data_time.py +++ b/python_modules/dagster/dagster_tests/core_tests/test_data_time.py @@ -131,7 +131,7 @@ def f(): for entry in instance.all_logs( result.run_id, of_type=DagsterEventType.ASSET_MATERIALIZATION ): - asset_key = entry.dagster_event.event_specific_data.materialization.asset_key + asset_key = entry.dagster_event.event_specific_data.materialization.asset_key # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] materialization_times_index[asset_key][idx] = datetime.datetime.fromtimestamp( entry.timestamp, tz=datetime.timezone.utc ) @@ -148,11 +148,11 @@ def f(): ) as tags_property: tags_property.return_value = None upstream_data_times = data_time_queryer.get_data_time_by_key_for_record( - record=latest_asset_record, + record=latest_asset_record, # pyright: ignore[reportArgumentType] ) else: upstream_data_times = data_time_queryer.get_data_time_by_key_for_record( - record=latest_asset_record, + record=latest_asset_record, # pyright: ignore[reportArgumentType] ) assert upstream_data_times == { AssetKey(k): materialization_times_index[AssetKey(k)][v] diff --git a/python_modules/dagster/dagster_tests/core_tests/test_data_versions.py b/python_modules/dagster/dagster_tests/core_tests/test_data_versions.py index 8f6a2a6175585..bbdf67b36fdf7 100644 --- a/python_modules/dagster/dagster_tests/core_tests/test_data_versions.py +++ b/python_modules/dagster/dagster_tests/core_tests/test_data_versions.py @@ -8,7 +8,7 @@ def test_data_version_construction(): assert ver.value == "foo" with pytest.raises(ParameterCheckError): - DataVersion(100) + DataVersion(100) # pyright: ignore[reportArgumentType] def test_data_version_equality(): diff --git a/python_modules/dagster/dagster_tests/core_tests/test_event_logging.py b/python_modules/dagster/dagster_tests/core_tests/test_event_logging.py index 8ea991c4e9f4f..6e6efae908d40 100644 --- a/python_modules/dagster/dagster_tests/core_tests/test_event_logging.py +++ b/python_modules/dagster/dagster_tests/core_tests/test_event_logging.py @@ -53,7 +53,7 @@ def test_empty_job(): def _event_callback(record): assert isinstance(record, EventLogEntry) if record.is_dagster_event: - events[record.dagster_event.event_type].append(record) + events[record.dagster_event.event_type].append(record) # pyright: ignore[reportOptionalMemberAccess] job_def = JobDefinition( graph_def=GraphDefinition( @@ -97,22 +97,22 @@ def _event_callback(record): start_event = single_dagster_event(events, DagsterEventType.STEP_START) assert start_event.job_name == "single_op_job" - assert start_event.dagster_event.node_name == "op_one" + assert start_event.dagster_event.node_name == "op_one" # pyright: ignore[reportAttributeAccessIssue] # persisted logging tags contain pipeline_name but not pipeline_tags - assert start_event.dagster_event.logging_tags["job_name"] == "single_op_job" - assert "pipeline_tags" not in start_event.dagster_event.logging_tags + assert start_event.dagster_event.logging_tags["job_name"] == "single_op_job" # pyright: ignore[reportAttributeAccessIssue] + assert "pipeline_tags" not in start_event.dagster_event.logging_tags # pyright: ignore[reportAttributeAccessIssue] output_event = single_dagster_event(events, DagsterEventType.STEP_OUTPUT) assert output_event - assert output_event.dagster_event.step_output_data.output_name == "result" + assert output_event.dagster_event.step_output_data.output_name == "result" # pyright: ignore[reportAttributeAccessIssue] success_event = single_dagster_event(events, DagsterEventType.STEP_SUCCESS) assert success_event.job_name == "single_op_job" - assert success_event.dagster_event.node_name == "op_one" + assert success_event.dagster_event.node_name == "op_one" # pyright: ignore[reportAttributeAccessIssue] - assert isinstance(success_event.dagster_event.step_success_data.duration_ms, float) - assert success_event.dagster_event.step_success_data.duration_ms > 0.0 + assert isinstance(success_event.dagster_event.step_success_data.duration_ms, float) # pyright: ignore[reportAttributeAccessIssue] + assert success_event.dagster_event.step_success_data.duration_ms > 0.0 # pyright: ignore[reportAttributeAccessIssue] def test_single_op_job_failure(): @@ -140,14 +140,14 @@ def _event_callback(record): start_event = single_dagster_event(events, DagsterEventType.STEP_START) assert start_event.job_name == "single_op_job" - assert start_event.dagster_event.node_name == "op_one" - assert start_event.level == logging.DEBUG + assert start_event.dagster_event.node_name == "op_one" # pyright: ignore[reportAttributeAccessIssue] + assert start_event.level == logging.DEBUG # pyright: ignore[reportAttributeAccessIssue] failure_event = single_dagster_event(events, DagsterEventType.STEP_FAILURE) assert failure_event.job_name == "single_op_job" - assert failure_event.dagster_event.node_name == "op_one" - assert failure_event.level == logging.ERROR + assert failure_event.dagster_event.node_name == "op_one" # pyright: ignore[reportAttributeAccessIssue] + assert failure_event.level == logging.ERROR # pyright: ignore[reportAttributeAccessIssue] def define_simple(): @@ -197,7 +197,7 @@ def test_event_forward_compat_with_event_specific_data(): assert result.step_key == "future_step" assert ( 'Attempted to deserialize class "FutureEventData" which is not in the whitelist.' - in result.event_specific_data.error.message + in result.event_specific_data.error.message # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] ) @@ -216,7 +216,7 @@ def test_event_forward_compat_without_event_specific_data(): assert result.step_key == "future_step" assert ( "'EVENT_TYPE_FROM_THE_FUTURE' is not a valid DagsterEventType" - in result.event_specific_data.error.message + in result.event_specific_data.error.message # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] ) diff --git a/python_modules/dagster/dagster_tests/core_tests/test_external_execution_plan.py b/python_modules/dagster/dagster_tests/core_tests/test_external_execution_plan.py index e317ffc18c16c..5441dcc74a909 100644 --- a/python_modules/dagster/dagster_tests/core_tests/test_external_execution_plan.py +++ b/python_modules/dagster/dagster_tests/core_tests/test_external_execution_plan.py @@ -235,7 +235,7 @@ def test_using_file_system_for_subplan_missing_input(): failures = [event for event in events if event.event_type_value == "STEP_FAILURE"] assert len(failures) == 1 assert failures[0].step_key == "add_one" - assert "DagsterExecutionLoadInputError" in failures[0].event_specific_data.error.message + assert "DagsterExecutionLoadInputError" in failures[0].event_specific_data.error.message # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_using_file_system_for_subplan_invalid_step(): diff --git a/python_modules/dagster/dagster_tests/core_tests/test_job_errors.py b/python_modules/dagster/dagster_tests/core_tests/test_job_errors.py index 1b281b40aed4e..a97a2aa81e19d 100644 --- a/python_modules/dagster/dagster_tests/core_tests/test_job_errors.py +++ b/python_modules/dagster/dagster_tests/core_tests/test_job_errors.py @@ -105,7 +105,7 @@ def job_def(): ) assert len(failure_events) == 1 - failure_event = failure_events.pop() + failure_event = failure_events.pop() # pyright: ignore[reportAttributeAccessIssue] assert failure_event.step_failure_data.error.cls_name == "DagsterExecutionStepExecutionError" @@ -172,7 +172,7 @@ def job_def(): ) assert len(failure_events) == 1 - failure_event = failure_events.pop() + failure_event = failure_events.pop() # pyright: ignore[reportAttributeAccessIssue] assert failure_event.step_failure_data.error.cause.cls_name == "CheckError" diff --git a/python_modules/dagster/dagster_tests/core_tests/test_job_execution.py b/python_modules/dagster/dagster_tests/core_tests/test_job_execution.py index 593f04e59dd1c..9b3cc4b6b0743 100644 --- a/python_modules/dagster/dagster_tests/core_tests/test_job_execution.py +++ b/python_modules/dagster/dagster_tests/core_tests/test_job_execution.py @@ -624,7 +624,7 @@ def failing_init_job(): event = result.all_events[-1] assert event.event_type_value == "PIPELINE_FAILURE" assert event.job_failure_data - assert mem_instance.get_run_by_id(result.run_id).is_failure_or_canceled + assert mem_instance.get_run_by_id(result.run_id).is_failure_or_canceled # pyright: ignore[reportOptionalMemberAccess] with instance_for_test() as fs_instance: result = failing_init_job.execute_in_process( @@ -636,7 +636,7 @@ def failing_init_job(): event = result.all_events[-1] assert event.event_type_value == "PIPELINE_FAILURE" assert event.job_failure_data - assert fs_instance.get_run_by_id(result.run_id).is_failure_or_canceled + assert fs_instance.get_run_by_id(result.run_id).is_failure_or_canceled # pyright: ignore[reportOptionalMemberAccess] def get_retry_job() -> JobDefinition: @@ -926,7 +926,7 @@ def pipe(): def_one() def_two() - assert pipe.get_subset(op_selection=["def_two"]).op_selection_data.resolved_op_selection == { + assert pipe.get_subset(op_selection=["def_two"]).op_selection_data.resolved_op_selection == { # pyright: ignore[reportOptionalMemberAccess] "def_two" } diff --git a/python_modules/dagster/dagster_tests/core_tests/test_metadata.py b/python_modules/dagster/dagster_tests/core_tests/test_metadata.py index 97a86a6f2df7c..642ed75ce8872 100644 --- a/python_modules/dagster/dagster_tests/core_tests/test_metadata.py +++ b/python_modules/dagster/dagster_tests/core_tests/test_metadata.py @@ -120,14 +120,17 @@ def test_code_location_reconstruction_metadata_value(): assert CodeLocationReconstructionMetadataValue("foo").value == "foo" with pytest.raises(CheckError, match="not a str"): - CodeLocationReconstructionMetadataValue({"foo": "bar"}) + CodeLocationReconstructionMetadataValue({"foo": "bar"}) # pyright: ignore[reportArgumentType] def test_serdes_json_metadata(): old_bad_event_str = '{"__class__": "JsonMetadataEntryData", "data": {"float": {"__class__": "FloatMetadataEntryData", "value": 1.0}}}' val = deserialize_value(old_bad_event_str, JsonMetadataValue) assert val - assert isinstance(val.data["float"], dict) # and not FloatMetadataValue + assert isinstance( + val.data["float"], # type: ignore + dict, + ) s = serialize_value(val) val_2 = deserialize_value(s, JsonMetadataValue) assert val_2 == val diff --git a/python_modules/dagster/dagster_tests/core_tests/test_multiple_outputs.py b/python_modules/dagster/dagster_tests/core_tests/test_multiple_outputs.py index 7d484e75c07b6..0756e11dad55a 100644 --- a/python_modules/dagster/dagster_tests/core_tests/test_multiple_outputs.py +++ b/python_modules/dagster/dagster_tests/core_tests/test_multiple_outputs.py @@ -110,7 +110,7 @@ def test_multiple_outputs_only_emit_one(): ) assert len(output_events) == 1 - assert output_events[0].event_specific_data.step_output_handle.output_name == "output_one" + assert output_events[0].event_specific_data.step_output_handle.output_name == "output_one" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] with pytest.raises(DagsterInvariantViolationError): result.output_for_node("not_present") @@ -165,7 +165,7 @@ def missing_non_optional_job(): def test_warning_for_conditional_output(capsys): @op( config_schema={"return": bool}, - out=Out(Any, is_required=False), + out=Out(Any, is_required=False), # pyright: ignore[reportArgumentType] ) def maybe(context): if context.op_config["return"]: diff --git a/python_modules/dagster/dagster_tests/core_tests/test_nothing_dependencies.py b/python_modules/dagster/dagster_tests/core_tests/test_nothing_dependencies.py index 38bc1b71a95c6..a5775cb894f4e 100644 --- a/python_modules/dagster/dagster_tests/core_tests/test_nothing_dependencies.py +++ b/python_modules/dagster/dagster_tests/core_tests/test_nothing_dependencies.py @@ -363,7 +363,7 @@ def asset2(asset1): def test_asset_nothing_output_non_none_input(): - @asset(dagster_type=Nothing) + @asset(dagster_type=Nothing) # pyright: ignore[reportArgumentType] def asset1(): pass diff --git a/python_modules/dagster/dagster_tests/core_tests/test_op_aliases.py b/python_modules/dagster/dagster_tests/core_tests/test_op_aliases.py index ae48e272d12e0..8da71c2d79f61 100644 --- a/python_modules/dagster/dagster_tests/core_tests/test_op_aliases.py +++ b/python_modules/dagster/dagster_tests/core_tests/test_op_aliases.py @@ -15,7 +15,7 @@ def not_first(prev): job_def = GraphDefinition( node_defs=[first, not_first], name="test", - dependencies={ + dependencies={ # pyright: ignore[reportArgumentType] "not_first": {"prev": DependencyDefinition("first")}, NodeInvocation("not_first", alias="second"): { "prev": DependencyDefinition("not_first") diff --git a/python_modules/dagster/dagster_tests/core_tests/test_op_invocation.py b/python_modules/dagster/dagster_tests/core_tests/test_op_invocation.py index f090d6c72c425..e9cd27da3d2bb 100644 --- a/python_modules/dagster/dagster_tests/core_tests/test_op_invocation.py +++ b/python_modules/dagster/dagster_tests/core_tests/test_op_invocation.py @@ -98,7 +98,7 @@ def basic_op(context): pass # Verify dispose was called on the instance - assert context.instance.run_storage._held_conn.closed # noqa + assert context.instance.run_storage._held_conn.closed # noqa # pyright: ignore[reportAttributeAccessIssue] def test_op_invocation_context_arg(): @@ -1355,8 +1355,8 @@ async def async_asset_two(context): async def main(): return await asyncio.gather( - async_asset_one(ctx), - async_asset_two(ctx), + async_asset_one(ctx), # type: ignore + async_asset_two(ctx), # type: ignore ) with pytest.raises( @@ -1442,15 +1442,15 @@ async def async_asset(context): ctx = build_asset_context() - result = asyncio.run(async_asset(ctx)) + result = asyncio.run(async_asset(ctx)) # pyright: ignore[reportArgumentType] assert result == "one" - assert_context_unbound(ctx) - assert_execution_properties_exist(ctx) + assert_context_unbound(ctx) # pyright: ignore[reportArgumentType] + assert_execution_properties_exist(ctx) # pyright: ignore[reportArgumentType] - result = asyncio.run(async_asset(ctx)) + result = asyncio.run(async_asset(ctx)) # pyright: ignore[reportArgumentType] assert result == "one" - assert_context_unbound(ctx) - assert_execution_properties_exist(ctx) + assert_context_unbound(ctx) # pyright: ignore[reportArgumentType] + assert_execution_properties_exist(ctx) # pyright: ignore[reportArgumentType] def test_context_bound_state_async_generator(): @@ -1495,7 +1495,7 @@ def throws_error(context): with pytest.raises(Failure): throws_error(ctx) - assert_context_unbound(ctx) + assert_context_unbound(ctx) # pyright: ignore[reportArgumentType] @asset def no_error(context): @@ -1536,16 +1536,16 @@ def generator(context): def test_context_bound_state_with_error_async(): @asset async def async_asset(context): - assert_context_bound(ctx) + assert_context_bound(ctx) # pyright: ignore[reportArgumentType] await asyncio.sleep(0.01) raise Failure("something bad happened!") ctx = build_asset_context() with pytest.raises(Failure): - asyncio.run(async_asset(ctx)) + asyncio.run(async_asset(ctx)) # pyright: ignore[reportArgumentType] - assert_context_unbound(ctx) + assert_context_unbound(ctx) # pyright: ignore[reportArgumentType] def test_context_bound_state_with_error_async_generator(): diff --git a/python_modules/dagster/dagster_tests/core_tests/test_op_with_config.py b/python_modules/dagster/dagster_tests/core_tests/test_op_with_config.py index b74e8922a60fb..08e802e728195 100644 --- a/python_modules/dagster/dagster_tests/core_tests/test_op_with_config.py +++ b/python_modules/dagster/dagster_tests/core_tests/test_op_with_config.py @@ -114,7 +114,7 @@ def my_job(): ) # works if input added, don't need to remove other stuff - run_config["ops"]["op2"] = {"inputs": {"input_table": {"value": "public.table_1"}}} + run_config["ops"]["op2"] = {"inputs": {"input_table": {"value": "public.table_1"}}} # pyright: ignore[reportArgumentType] assert my_job.execute_in_process( run_config=run_config, op_selection=["op2"], diff --git a/python_modules/dagster/dagster_tests/core_tests/test_utils.py b/python_modules/dagster/dagster_tests/core_tests/test_utils.py index 1a0a180c45441..8647bb4404b09 100644 --- a/python_modules/dagster/dagster_tests/core_tests/test_utils.py +++ b/python_modules/dagster/dagster_tests/core_tests/test_utils.py @@ -22,7 +22,7 @@ def library_registry_fixture(): yield - DagsterLibraryRegistry._libraries = previous_libraries # noqa: SLF001 + DagsterLibraryRegistry._libraries = previous_libraries # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] def test_parse_env_var_no_equals(): @@ -106,7 +106,7 @@ def test_hash_collection(): assert hash_collection(set(range(10))) == hash_collection(set(range(10))) with pytest.raises(AssertionError): - hash_collection(object()) + hash_collection(object()) # pyright: ignore[reportArgumentType] class Foo(NamedTuple): a: List[int] diff --git a/python_modules/dagster/dagster_tests/daemon_sensor_tests/test_run_status_sensors.py b/python_modules/dagster/dagster_tests/daemon_sensor_tests/test_run_status_sensors.py index 4f6a883f3d67a..fa4879cea88f3 100644 --- a/python_modules/dagster/dagster_tests/daemon_sensor_tests/test_run_status_sensors.py +++ b/python_modules/dagster/dagster_tests/daemon_sensor_tests/test_run_status_sensors.py @@ -711,8 +711,8 @@ def test_run_status_sensor_interleave(storage_config_fn, executor: Optional[Thre freeze_datetime, TickStatus.SUCCESS, ) - assert len(ticks[0].origin_run_ids) == 1 - assert ticks[0].origin_run_ids[0] == run2.run_id + assert len(ticks[0].origin_run_ids) == 1 # pyright: ignore[reportArgumentType] + assert ticks[0].origin_run_ids[0] == run2.run_id # pyright: ignore[reportOptionalSubscript] # fail run 1 with freeze_time(freeze_datetime): @@ -736,8 +736,8 @@ def test_run_status_sensor_interleave(storage_config_fn, executor: Optional[Thre freeze_datetime, TickStatus.SUCCESS, ) - assert len(ticks[0].origin_run_ids) == 1 - assert ticks[0].origin_run_ids[0] == run1.run_id + assert len(ticks[0].origin_run_ids) == 1 # pyright: ignore[reportArgumentType] + assert ticks[0].origin_run_ids[0] == run1.run_id # pyright: ignore[reportOptionalSubscript] @pytest.mark.parametrize("storage_config_fn", [sql_event_log_storage_config_fn]) diff --git a/python_modules/dagster/dagster_tests/daemon_sensor_tests/test_sensor_run.py b/python_modules/dagster/dagster_tests/daemon_sensor_tests/test_sensor_run.py index 1e3a596352dde..6db5285f05648 100644 --- a/python_modules/dagster/dagster_tests/daemon_sensor_tests/test_sensor_run.py +++ b/python_modules/dagster/dagster_tests/daemon_sensor_tests/test_sensor_run.py @@ -2600,7 +2600,7 @@ def test_status_in_code_sensor(executor, instance): ) as workspace_context: remote_repo = next( iter(workspace_context.create_request_context().get_code_location_entries().values()) - ).code_location.get_repository("the_status_in_code_repo") + ).code_location.get_repository("the_status_in_code_repo") # pyright: ignore[reportOptionalMemberAccess] with freeze_time(freeze_datetime): running_sensor = remote_repo.get_sensor("always_running_sensor") @@ -2858,11 +2858,11 @@ def test_repository_namespacing(executor): full_workspace_context.create_request_context().get_code_location_entries().values() ) ).code_location - repo = full_location.get_repository("the_repo") - other_repo = full_location.get_repository("the_other_repo") + repo = full_location.get_repository("the_repo") # pyright: ignore[reportOptionalMemberAccess] + other_repo = full_location.get_repository("the_other_repo") # pyright: ignore[reportOptionalMemberAccess] # stop always on sensor - status_in_code_repo = full_location.get_repository("the_status_in_code_repo") + status_in_code_repo = full_location.get_repository("the_status_in_code_repo") # pyright: ignore[reportOptionalMemberAccess] running_sensor = status_in_code_repo.get_sensor("always_running_sensor") instance.stop_sensor( running_sensor.get_remote_origin_id(), running_sensor.selector_id, running_sensor diff --git a/python_modules/dagster/dagster_tests/daemon_tests/conftest.py b/python_modules/dagster/dagster_tests/daemon_tests/conftest.py index 712318128e4d0..5d20465ca29e0 100644 --- a/python_modules/dagster/dagster_tests/daemon_tests/conftest.py +++ b/python_modules/dagster/dagster_tests/daemon_tests/conftest.py @@ -170,7 +170,7 @@ def partitions_defs_changes_location_2_fixture( def base_job_name_changes_workspace_1_load_target(attribute=None): return InProcessTestWorkspaceLoadTarget( - ManagedGrpcPythonEnvCodeLocationOrigin( + ManagedGrpcPythonEnvCodeLocationOrigin( # pyright: ignore[reportArgumentType] loadable_target_origin=LoadableTargetOrigin( executable_path=sys.executable, module_name="dagster_tests.daemon_tests.test_locations.base_job_name_changes_locations.location_1", @@ -195,7 +195,7 @@ def base_job_name_changes_location_1_fixture( def base_job_name_changes_workspace_2_load_target(attribute=None): return InProcessTestWorkspaceLoadTarget( - ManagedGrpcPythonEnvCodeLocationOrigin( + ManagedGrpcPythonEnvCodeLocationOrigin( # pyright: ignore[reportArgumentType] loadable_target_origin=LoadableTargetOrigin( executable_path=sys.executable, module_name="dagster_tests.daemon_tests.test_locations.base_job_name_changes_locations.location_2", diff --git a/python_modules/dagster/dagster_tests/daemon_tests/test_backfill.py b/python_modules/dagster/dagster_tests/daemon_tests/test_backfill.py index 934a84fe825c8..9f6bad53bb2ed 100644 --- a/python_modules/dagster/dagster_tests/daemon_tests/test_backfill.py +++ b/python_modules/dagster/dagster_tests/daemon_tests/test_backfill.py @@ -1548,7 +1548,7 @@ def test_asset_backfill_forcible_mark_as_canceled_during_canceling_iteration( instance.add_backfill( # Add some partitions in a "requested" state to mock that certain partitions are hanging backfill.with_asset_backfill_data( - backfill.asset_backfill_data._replace( + backfill.asset_backfill_data._replace( # pyright: ignore[reportOptionalMemberAccess] requested_subset=AssetGraphSubset(non_partitioned_asset_keys={AssetKey("daily_1")}) ), dynamic_partitions_store=instance, @@ -2308,7 +2308,7 @@ def test_error_code_location( assert ( "dagster._core.errors.DagsterAssetBackfillDataLoadError: Asset AssetKey(['asset_a']) existed at" " storage-time, but no longer does. This could be because it's inside a code location" - " that's failing to load" in errors[0].message + " that's failing to load" in errors[0].message # pyright: ignore[reportOptionalMemberAccess] ) assert "Failure loading location" in caplog.text diff --git a/python_modules/dagster/dagster_tests/daemon_tests/test_queued_run_coordinator_daemon.py b/python_modules/dagster/dagster_tests/daemon_tests/test_queued_run_coordinator_daemon.py index c63ba4c81e318..daf43ae61fd71 100644 --- a/python_modules/dagster/dagster_tests/daemon_tests/test_queued_run_coordinator_daemon.py +++ b/python_modules/dagster/dagster_tests/daemon_tests/test_queued_run_coordinator_daemon.py @@ -613,11 +613,11 @@ def mocked_location_init( return original_method( self, origin, - host, + host, # pyright: ignore[reportArgumentType] port, socket, server_id, - heartbeat, + heartbeat, # pyright: ignore[reportArgumentType] watch_server, grpc_server_registry, ) @@ -875,14 +875,14 @@ def test_op_concurrency_aware_dequeuing( instance.event_log_storage.set_concurrency_slots("foo", 1) list(daemon.run_iteration(concurrency_limited_workspace_context)) assert set(self.get_run_ids(instance.run_launcher.queue())) == set([run_id_1]) - caplog.text.count("is blocked by global concurrency limits") == 0 + caplog.text.count("is blocked by global concurrency limits") == 0 # pyright: ignore[reportUnusedExpression] self.submit_run( instance, remote_job, workspace, run_id=run_id_2, asset_selection=set([foo_key]) ) list(daemon.run_iteration(concurrency_limited_workspace_context)) assert set(self.get_run_ids(instance.run_launcher.queue())) == {run_id_1} - caplog.text.count(f"Run {run_id_2} is blocked by global concurrency limits") == 1 + caplog.text.count(f"Run {run_id_2} is blocked by global concurrency limits") == 1 # pyright: ignore[reportUnusedExpression] self.submit_run( instance, remote_job, workspace, run_id=run_id_3, asset_selection=set([foo_key]) @@ -890,15 +890,15 @@ def test_op_concurrency_aware_dequeuing( list(daemon.run_iteration(concurrency_limited_workspace_context)) assert set(self.get_run_ids(instance.run_launcher.queue())) == {run_id_1} # the log message only shows up once per run - caplog.text.count(f"Run {run_id_2} is blocked by global concurrency limits") == 1 - caplog.text.count(f"Run {run_id_3} is blocked by global concurrency limits") == 1 + caplog.text.count(f"Run {run_id_2} is blocked by global concurrency limits") == 1 # pyright: ignore[reportUnusedExpression] + caplog.text.count(f"Run {run_id_3} is blocked by global concurrency limits") == 1 # pyright: ignore[reportUnusedExpression] # bumping up the slot by one means that one more run should get dequeued instance.event_log_storage.set_concurrency_slots("foo", 2) list(daemon.run_iteration(concurrency_limited_workspace_context)) assert set(self.get_run_ids(instance.run_launcher.queue())) == {run_id_1, run_id_2} - caplog.text.count(f"Run {run_id_2} is blocked by global concurrency limits") == 1 - caplog.text.count(f"Run {run_id_3} is blocked by global concurrency limits") == 1 + caplog.text.count(f"Run {run_id_2} is blocked by global concurrency limits") == 1 # pyright: ignore[reportUnusedExpression] + caplog.text.count(f"Run {run_id_3} is blocked by global concurrency limits") == 1 # pyright: ignore[reportUnusedExpression] @pytest.mark.parametrize( "run_coordinator_config", @@ -1167,7 +1167,7 @@ def test_concurrency_buffer_with_default_slot( list(daemon.run_iteration(concurrency_limited_workspace_context)) assert set(self.get_run_ids(instance.run_launcher.queue())) == set([run_id_1]) - caplog.text.count("is blocked by global concurrency limits") == 0 + caplog.text.count("is blocked by global concurrency limits") == 0 # pyright: ignore[reportUnusedExpression] # the global concurrency counter has initialized the concurrency configuration assert "foo" in instance.event_log_storage.get_concurrency_keys() @@ -1177,7 +1177,7 @@ def test_concurrency_buffer_with_default_slot( ) list(daemon.run_iteration(concurrency_limited_workspace_context)) assert set(self.get_run_ids(instance.run_launcher.queue())) == {run_id_1} - caplog.text.count(f"Run {run_id_2} is blocked by global concurrency limits") == 1 + caplog.text.count(f"Run {run_id_2} is blocked by global concurrency limits") == 1 # pyright: ignore[reportUnusedExpression] class TestQueuedRunCoordinatorDaemon(QueuedRunCoordinatorDaemonTests): diff --git a/python_modules/dagster/dagster_tests/definitions_tests/asset_check_tests/checks_module/checks_submodule/__init__.py b/python_modules/dagster/dagster_tests/definitions_tests/asset_check_tests/checks_module/checks_submodule/__init__.py index 492f847560ddf..78c03ee99b2df 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/asset_check_tests/checks_module/checks_submodule/__init__.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/asset_check_tests/checks_module/checks_submodule/__init__.py @@ -1,6 +1,6 @@ from dagster import asset_check -@asset_check(asset="asset_1") +@asset_check(asset="asset_1") # pyright: ignore[reportArgumentType] def submodule_check(): pass diff --git a/python_modules/dagster/dagster_tests/definitions_tests/asset_check_tests/test_load_from_modules.py b/python_modules/dagster/dagster_tests/definitions_tests/asset_check_tests/test_load_from_modules.py index d3176efa6e449..c299c488aae4a 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/asset_check_tests/test_load_from_modules.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/asset_check_tests/test_load_from_modules.py @@ -71,7 +71,7 @@ def test_load_asset_checks_from_modules_prefix(): assert result.get_asset_check_evaluations()[1].check_name == "asset_check_1" -@asset_check(asset=AssetKey("asset_1")) +@asset_check(asset=AssetKey("asset_1")) # pyright: ignore[reportArgumentType] def check_in_current_module(): pass diff --git a/python_modules/dagster/dagster_tests/definitions_tests/asset_policy_sensors_tests/test_default_auto_materialize_sensors.py b/python_modules/dagster/dagster_tests/definitions_tests/asset_policy_sensors_tests/test_default_auto_materialize_sensors.py index f818f1881b1fc..3326851284a5c 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/asset_policy_sensors_tests/test_default_auto_materialize_sensors.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/asset_policy_sensors_tests/test_default_auto_materialize_sensors.py @@ -219,7 +219,7 @@ def test_combine_default_sensors_with_non_default_sensors(instance_with_auto_mat == 'not key:"auto_materialize_asset" or key:"auto_observe_asset"' ) - assert default_sensor.asset_selection.resolve(asset_graph) == { + assert default_sensor.asset_selection.resolve(asset_graph) == { # pyright: ignore[reportOptionalMemberAccess] AssetKey(["other_auto_materialize_asset"]), AssetKey(["other_auto_observe_asset"]), AssetKey(["boring_asset"]), @@ -228,7 +228,7 @@ def test_combine_default_sensors_with_non_default_sensors(instance_with_auto_mat custom_sensor = remote_repo.get_sensor("my_custom_policy_sensor") - assert custom_sensor.asset_selection.resolve(asset_graph) == { + assert custom_sensor.asset_selection.resolve(asset_graph) == { # pyright: ignore[reportOptionalMemberAccess] AssetKey(["auto_materialize_asset"]), AssetKey(["auto_observe_asset"]), } @@ -282,7 +282,7 @@ def test_custom_sensors_cover_all(instance_with_auto_materialize_sensors): # Custom sensor covered all the valid assets custom_sensor = remote_repo.get_sensor("my_custom_policy_sensor") - assert custom_sensor.asset_selection.resolve(asset_graph) == { + assert custom_sensor.asset_selection.resolve(asset_graph) == { # pyright: ignore[reportOptionalMemberAccess] AssetKey(["auto_materialize_asset"]), AssetKey(["auto_observe_asset"]), AssetKey(["other_auto_materialize_asset"]), diff --git a/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/daemon_tests/test_asset_daemon.py b/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/daemon_tests/test_asset_daemon.py index 1567c4d7cfff2..b61aa40234139 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/daemon_tests/test_asset_daemon.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/daemon_tests/test_asset_daemon.py @@ -407,7 +407,7 @@ def test_auto_materialize_sensor_no_transition(): assert get_has_migrated_to_sensors(instance) - sensor_states = instance.schedule_storage.all_instigator_state( + sensor_states = instance.schedule_storage.all_instigator_state( # pyright: ignore[reportOptionalMemberAccess] instigator_type=InstigatorType.SENSOR ) @@ -430,7 +430,7 @@ def test_auto_materialize_sensor_no_transition(): result = result.with_current_time_advanced(seconds=30) result = result.evaluate_tick() daemon_sensor_scenario.evaluate_daemon(instance) - sensor_states = instance.schedule_storage.all_instigator_state( + sensor_states = instance.schedule_storage.all_instigator_state( # pyright: ignore[reportOptionalMemberAccess] instigator_type=InstigatorType.SENSOR ) assert len(sensor_states) == 1 @@ -484,7 +484,7 @@ def test_auto_materialize_sensor_transition(): assert get_has_migrated_to_sensors(instance) - sensor_states = instance.schedule_storage.all_instigator_state( + sensor_states = instance.schedule_storage.all_instigator_state( # pyright: ignore[reportOptionalMemberAccess] instigator_type=InstigatorType.SENSOR ) @@ -648,7 +648,7 @@ def test_auto_materialize_sensor_ticks(num_threads): instance, threadpool_executor=threadpool_executor ) - sensor_states = instance.schedule_storage.all_instigator_state( + sensor_states = instance.schedule_storage.all_instigator_state( # pyright: ignore[reportOptionalMemberAccess] instigator_type=InstigatorType.SENSOR ) @@ -690,7 +690,7 @@ def test_auto_materialize_sensor_ticks(num_threads): result = result.start_sensor("auto_materialize_sensor_b") result = result.with_current_time_advanced(seconds=15) result = result.evaluate_tick() - sensor_states = instance.schedule_storage.all_instigator_state( + sensor_states = instance.schedule_storage.all_instigator_state( # pyright: ignore[reportOptionalMemberAccess] instigator_type=InstigatorType.SENSOR ) assert len(sensor_states) == 3 @@ -720,7 +720,7 @@ def test_auto_materialize_sensor_ticks(num_threads): result = result.with_current_time_advanced(seconds=15) result = result.evaluate_tick() - sensor_states = instance.schedule_storage.all_instigator_state( + sensor_states = instance.schedule_storage.all_instigator_state( # pyright: ignore[reportOptionalMemberAccess] instigator_type=InstigatorType.SENSOR ) @@ -834,7 +834,7 @@ def test_auto_materialize_sensor_ticks(num_threads): # than the pre-sensor evaluation ID and they are increasing for each sensor sensor_states = [ sensor_state - for sensor_state in instance.schedule_storage.all_instigator_state( + for sensor_state in instance.schedule_storage.all_instigator_state( # pyright: ignore[reportOptionalMemberAccess] instigator_type=InstigatorType.SENSOR ) ] diff --git a/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/daemon_tests/test_asset_daemon_failure_recovery.py b/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/daemon_tests/test_asset_daemon_failure_recovery.py index 8006b2725946d..69a310e2956a9 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/daemon_tests/test_asset_daemon_failure_recovery.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/daemon_tests/test_asset_daemon_failure_recovery.py @@ -87,7 +87,7 @@ def test_old_tick_not_resumed(daemon_not_paused_instance): debug_crash_flags = {"RUN_CREATED": Exception("OOPS")} - with freeze_time(execution_time): + with freeze_time(execution_time): # pyright: ignore[reportArgumentType] error_asset_scenario.do_daemon_scenario( instance, scenario_name="auto_materialize_policy_max_materializations_not_exceeded", @@ -102,10 +102,10 @@ def test_old_tick_not_resumed(daemon_not_paused_instance): assert len(ticks) == 1 assert ticks[0].status == TickStatus.FAILURE assert ticks[0].automation_condition_evaluation_id == 1 - assert ticks[0].timestamp == execution_time.timestamp() + assert ticks[0].timestamp == execution_time.timestamp() # pyright: ignore[reportOptionalMemberAccess] # advancing past MAX_TIME_TO_RESUME_TICK_SECONDS gives up and advances to a new evaluation - execution_time = execution_time + datetime.timedelta( + execution_time = execution_time + datetime.timedelta( # pyright: ignore[reportOptionalOperand] seconds=MAX_TIME_TO_RESUME_TICK_SECONDS + 1 ) @@ -163,7 +163,7 @@ def test_error_loop_before_cursor_written(daemon_not_paused_instance, crash_loca error_asset_scenario = error_asset_scenario._replace(current_time=None) for trial_num in range(3): - test_time = execution_time + datetime.timedelta(seconds=15 * trial_num) + test_time = execution_time + datetime.timedelta(seconds=15 * trial_num) # pyright: ignore[reportOptionalOperand] with freeze_time(test_time): debug_crash_flags = {crash_location: Exception(f"Oops {trial_num}")} @@ -201,7 +201,7 @@ def test_error_loop_before_cursor_written(daemon_not_paused_instance, crash_loca assert not cursor.evaluation_id test_time = test_time + datetime.timedelta(seconds=45) - with freeze_time(test_time): + with freeze_time(test_time): # pyright: ignore[reportArgumentType] # Next successful tick recovers error_asset_scenario.do_daemon_scenario( instance, @@ -215,8 +215,8 @@ def test_error_loop_before_cursor_written(daemon_not_paused_instance, crash_loca assert len(ticks) == 4 assert ticks[0].status == TickStatus.SUCCESS - assert ticks[0].timestamp == test_time.timestamp() - assert ticks[0].tick_data.end_timestamp == test_time.timestamp() + assert ticks[0].timestamp == test_time.timestamp() # pyright: ignore[reportAttributeAccessIssue] + assert ticks[0].tick_data.end_timestamp == test_time.timestamp() # pyright: ignore[reportAttributeAccessIssue] assert ticks[0].automation_condition_evaluation_id == 1 # finally finishes runs = instance.get_runs() @@ -245,7 +245,7 @@ def test_error_loop_after_cursor_written(daemon_not_paused_instance, crash_locat last_cursor = None # User code error retries but does not increment the retry count - test_time = execution_time + datetime.timedelta(seconds=15) + test_time = execution_time + datetime.timedelta(seconds=15) # pyright: ignore[reportOptionalOperand] with freeze_time(test_time): debug_crash_flags = {crash_location: DagsterUserCodeUnreachableError("WHERE IS THE CODE")} @@ -435,13 +435,13 @@ def test_asset_daemon_crash_recovery(daemon_not_paused_instance, crash_location) assert len(ticks) == 1 assert ticks[0] assert ticks[0].status == TickStatus.STARTED - assert ticks[0].timestamp == scenario.current_time.timestamp() - assert not ticks[0].tick_data.end_timestamp == scenario.current_time.timestamp() + assert ticks[0].timestamp == scenario.current_time.timestamp() # pyright: ignore[reportOptionalMemberAccess] + assert not ticks[0].tick_data.end_timestamp == scenario.current_time.timestamp() # pyright: ignore[reportOptionalMemberAccess] assert not len(ticks[0].tick_data.run_ids) assert ticks[0].automation_condition_evaluation_id == 1 - freeze_datetime = scenario.current_time + datetime.timedelta(seconds=1) + freeze_datetime = scenario.current_time + datetime.timedelta(seconds=1) # pyright: ignore[reportOptionalOperand] # Run another tick with no crash, daemon continues on and succeeds asset_daemon_process = spawn_ctx.Process( @@ -474,7 +474,7 @@ def test_asset_daemon_crash_recovery(daemon_not_paused_instance, crash_location) assert ticks[0] assert ticks[0].status == TickStatus.SUCCESS assert ( - ticks[0].timestamp == scenario.current_time.timestamp() + ticks[0].timestamp == scenario.current_time.timestamp() # pyright: ignore[reportOptionalMemberAccess] if cursor_written else freeze_datetime.timestamp() ) @@ -494,7 +494,7 @@ def test_asset_daemon_crash_recovery(daemon_not_paused_instance, crash_location) def sort_run_key_fn(run): return (min(run.asset_selection), run.tags.get(PARTITION_NAME_TAG)) - sorted_runs = sorted(runs[: len(scenario.expected_run_requests)], key=sort_run_key_fn) + sorted_runs = sorted(runs[: len(scenario.expected_run_requests)], key=sort_run_key_fn) # pyright: ignore[reportArgumentType] evaluations = instance.schedule_storage.get_auto_materialize_asset_evaluations( key=AssetKey("hourly"), limit=100 @@ -545,8 +545,8 @@ def test_asset_daemon_exception_recovery(daemon_not_paused_instance, crash_locat assert len(ticks) == 1 assert ticks[0] assert ticks[0].status == TickStatus.FAILURE - assert ticks[0].timestamp == scenario.current_time.timestamp() - assert ticks[0].tick_data.end_timestamp == scenario.current_time.timestamp() + assert ticks[0].timestamp == scenario.current_time.timestamp() # pyright: ignore[reportOptionalMemberAccess] + assert ticks[0].tick_data.end_timestamp == scenario.current_time.timestamp() # pyright: ignore[reportOptionalMemberAccess] assert ticks[0].automation_condition_evaluation_id == 1 @@ -566,7 +566,7 @@ def test_asset_daemon_exception_recovery(daemon_not_paused_instance, crash_locat cursor = _get_pre_sensor_auto_materialize_cursor(instance, None) assert (cursor.evaluation_id > 0) == cursor_written - freeze_datetime = scenario.current_time + datetime.timedelta(seconds=1) + freeze_datetime = scenario.current_time + datetime.timedelta(seconds=1) # pyright: ignore[reportOptionalOperand] # Run another tick with no failure, daemon continues on and succeeds asset_daemon_process = spawn_ctx.Process( @@ -603,7 +603,7 @@ def test_asset_daemon_exception_recovery(daemon_not_paused_instance, crash_locat def sort_run_key_fn(run): return (min(run.asset_selection), run.tags.get(PARTITION_NAME_TAG)) - sorted_runs = sorted(runs[: len(scenario.expected_run_requests)], key=sort_run_key_fn) + sorted_runs = sorted(runs[: len(scenario.expected_run_requests)], key=sort_run_key_fn) # pyright: ignore[reportArgumentType] evaluations = instance.schedule_storage.get_auto_materialize_asset_evaluations( key=AssetKey("hourly"), limit=100 diff --git a/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/daemon_tests/test_e2e.py b/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/daemon_tests/test_e2e.py index 675278c567feb..4a664fda6e6c4 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/daemon_tests/test_e2e.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/daemon_tests/test_e2e.py @@ -268,7 +268,7 @@ def test_checks_and_assets_in_same_run() -> None: assert _get_runs_for_latest_ticks(context) == [] with freeze_time(time): - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] # nothing happening yet, as parent hasn't updated assert _get_latest_evaluation_ids(context) == {1} @@ -281,7 +281,7 @@ def test_checks_and_assets_in_same_run() -> None: AssetMaterialization(asset_key=AssetKey("processed_files")) ) - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] # should just request the check assert _get_latest_evaluation_ids(context) == {2} @@ -300,7 +300,7 @@ def test_checks_and_assets_in_same_run() -> None: AssetMaterialization(asset_key=AssetKey("raw_files")) ) - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] # should create a single run request targeting both the downstream asset # and the associated check @@ -326,7 +326,7 @@ def test_cross_location_checks() -> None: assert _get_runs_for_latest_ticks(context) == [] with freeze_time(time): - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] # nothing happening yet, as parent hasn't updated assert _get_latest_evaluation_ids(context) == {1, 2} @@ -339,7 +339,7 @@ def test_cross_location_checks() -> None: AssetMaterialization(asset_key=AssetKey("processed_files")) ) - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] # should request both checks on processed_files, but one of the checks # is in a different code location, so two separate runs should be created @@ -364,7 +364,7 @@ def test_cross_location_checks() -> None: AssetMaterialization(asset_key=AssetKey("raw_files")) ) - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] # should create a single run request targeting both the downstream asset # and the associated check -- the check in the other location cannot @@ -380,7 +380,7 @@ def test_cross_location_checks() -> None: time += datetime.timedelta(seconds=30) with freeze_time(time): - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] # now, after processed_files gets materialized, the no_nulls check # can be executed (and row_count also gets executed again because @@ -407,7 +407,7 @@ def test_default_condition() -> None: get_threadpool_executor() as executor, ): with freeze_time(time): - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] # eager asset materializes runs = _get_runs_for_latest_ticks(context) @@ -416,7 +416,7 @@ def test_default_condition() -> None: time += datetime.timedelta(seconds=60) with freeze_time(time): - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] # passed a cron tick, so cron asset materializes runs = _get_runs_for_latest_ticks(context) @@ -432,7 +432,7 @@ def test_non_subsettable_check() -> None: ): time = datetime.datetime(2024, 8, 17, 1, 35) with freeze_time(time): - _execute_ticks(context, executor, submit_executor) + _execute_ticks(context, executor, submit_executor) # pyright: ignore[reportArgumentType] # eager asset materializes runs = _get_runs_for_latest_ticks(context) @@ -490,7 +490,7 @@ def test_backfill_creation_simple(location: str) -> None: # all start off missing, should be requested time = get_current_datetime() with freeze_time(time): - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] backfills = _get_backfills_for_latest_ticks(context) assert len(backfills) == 1 subsets_by_key = _get_subsets_by_key(backfills[0], asset_graph) @@ -515,7 +515,7 @@ def test_backfill_creation_simple(location: str) -> None: time += datetime.timedelta(seconds=30) with freeze_time(time): # second tick, don't kick off again - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] backfills = _get_backfills_for_latest_ticks(context) assert len(backfills) == 0 # still don't create runs @@ -538,7 +538,7 @@ def test_backfill_with_runs_and_checks() -> None: # all start off missing, should be requested time = get_current_datetime() with freeze_time(time): - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] # create a backfill for the part of the graph that has multiple partitions # required backfills = _get_backfills_for_latest_ticks(context) @@ -577,7 +577,7 @@ def test_backfill_with_runs_and_checks() -> None: time += datetime.timedelta(seconds=30) with freeze_time(time): # second tick, don't kick off again - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] backfills = _get_backfills_for_latest_ticks(context) assert len(backfills) == 0 @@ -619,7 +619,7 @@ def test_toggle_user_code() -> None: time += datetime.timedelta(seconds=35) with freeze_time(time): # first tick, nothing happened - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] runs = _get_runs_for_latest_ticks(context) assert len(runs) == 0 @@ -627,14 +627,14 @@ def test_toggle_user_code() -> None: with freeze_time(time): # second tick, root gets updated instance.report_runless_asset_event(AssetMaterialization("root")) - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] runs = _get_runs_for_latest_ticks(context) assert runs[0].asset_selection == {AssetKey("downstream")} time += datetime.timedelta(seconds=35) with freeze_time(time): # third tick, don't kick off again - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] runs = _get_runs_for_latest_ticks(context) assert len(runs) == 0 @@ -649,19 +649,19 @@ def test_custom_condition() -> None: # custom condition only materializes on the 5th tick for _ in range(4): with freeze_time(time): - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] runs = _get_runs_for_latest_ticks(context) assert len(runs) == 0 time += datetime.timedelta(minutes=1) with freeze_time(time): - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] runs = _get_runs_for_latest_ticks(context) assert len(runs) == 1 time += datetime.timedelta(minutes=1) with freeze_time(time): - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] runs = _get_runs_for_latest_ticks(context) assert len(runs) == 0 @@ -676,7 +676,7 @@ def test_500_eager_assets_user_code(capsys) -> None: for _ in range(2): clock_time = time.time() with freeze_time(freeze_dt): - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] runs = _get_runs_for_latest_ticks(context) assert len(runs) == 0 duration = time.time() - clock_time @@ -700,7 +700,7 @@ def test_fail_if_not_use_sensors(capsys) -> None: ) as context, get_threadpool_executor() as executor, ): - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] latest_ticks = _get_latest_ticks(context.create_request_context()) assert len(latest_ticks) == 1 # no failure @@ -719,7 +719,7 @@ def test_simple_old_code_server() -> None: time = datetime.datetime(2024, 8, 16, 1, 35) with freeze_time(time): # initial evaluation - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] runs = _get_runs_for_latest_ticks(context) assert len(runs) == 1 @@ -731,19 +731,19 @@ def test_observable_source_asset() -> None: ): time = datetime.datetime(2024, 8, 16, 1, 35) with freeze_time(time): - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] runs = _get_runs_for_latest_ticks(context) assert len(runs) == 0 time += datetime.timedelta(hours=1) with freeze_time(time): - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] runs = _get_runs_for_latest_ticks(context) assert len(runs) == 1 assert runs[0].asset_selection == {AssetKey("obs"), AssetKey("mat")} time += datetime.timedelta(minutes=1) with freeze_time(time): - _execute_ticks(context, executor) + _execute_ticks(context, executor) # pyright: ignore[reportArgumentType] runs = _get_runs_for_latest_ticks(context) assert len(runs) == 0 diff --git a/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/daemon_tests/test_failure_recovery.py b/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/daemon_tests/test_failure_recovery.py index 275838244e0d8..3566771a9d29d 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/daemon_tests/test_failure_recovery.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/daemon_tests/test_failure_recovery.py @@ -39,7 +39,7 @@ def _execute( with freeze_time(evaluation_time): _execute_ticks( context, - executor, + executor, # pyright: ignore[reportArgumentType] submit_executor, { crash_location: get_terminate_signal() if terminate else Exception("Oops!"), diff --git a/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/scenario_utils/base_scenario.py b/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/scenario_utils/base_scenario.py index 301a5030c58a8..e906f04b78396 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/scenario_utils/base_scenario.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/scenario_utils/base_scenario.py @@ -260,7 +260,7 @@ def do_sensor_scenario( with freeze_time(test_time): - @repository + @repository # pyright: ignore[reportArgumentType] def repo(): return self.assets @@ -268,7 +268,7 @@ def repo(): for dagster_run in self.dagster_runs or []: instance.add_run(dagster_run) # make sure to log the planned events - for asset_key in dagster_run.asset_selection: + for asset_key in dagster_run.asset_selection: # pyright: ignore[reportOptionalIterable] event = DagsterEvent( event_type_value=DagsterEventType.ASSET_MATERIALIZATION_PLANNED.value, job_name=dagster_run.job_name, @@ -292,7 +292,7 @@ def repo(): else: target_subset = AssetGraphSubset( partitions_subsets_by_asset_key={}, - non_partitioned_asset_keys=target, + non_partitioned_asset_keys=target, # pyright: ignore[reportArgumentType] ) empty_subset = AssetGraphSubset( partitions_subsets_by_asset_key={}, @@ -321,9 +321,9 @@ def repo(): if self.cursor_from is not None: - @repository + @repository # pyright: ignore[reportArgumentType] def prior_repo(): - return self.cursor_from.assets + return self.cursor_from.assets # pyright: ignore[reportOptionalMemberAccess] ( run_requests, @@ -359,7 +359,7 @@ def test_time_fn(): instance=instance, assets=[ a - for a in self.assets + for a in self.assets # pyright: ignore[reportOptionalIterable] if isinstance(a, SourceAsset) and a.key in run.asset_keys ], ) @@ -367,7 +367,7 @@ def test_time_fn(): do_run( asset_keys=run.asset_keys, partition_key=run.partition_key, - all_assets=self.assets, + all_assets=self.assets, # pyright: ignore[reportArgumentType] instance=instance, failed_asset_keys=run.failed_asset_keys, ) @@ -395,7 +395,7 @@ def test_time_fn(): with mock.patch.object( DagsterInstance, "auto_materialize_respect_materialization_data_versions", - new=lambda: self.respect_materialization_data_versions, + new=lambda: self.respect_materialization_data_versions, # pyright: ignore[reportAttributeAccessIssue] ): run_requests, cursor, evaluations = AutomationTickEvaluationContext( evaluation_id=cursor.evaluation_id + 1, @@ -415,7 +415,7 @@ def test_time_fn(): ).evaluate() for run_request in run_requests: - base_job = repo.get_implicit_job_def_for_assets(run_request.asset_selection) + base_job = repo.get_implicit_job_def_for_assets(run_request.asset_selection) # pyright: ignore[reportArgumentType] assert base_job is not None return run_requests, cursor, evaluations diff --git a/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/scenario_utils/scenario_state.py b/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/scenario_utils/scenario_state.py index 8548d1fd55034..04e5e639b491f 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/scenario_utils/scenario_state.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/declarative_automation_tests/scenario_utils/scenario_state.py @@ -190,8 +190,10 @@ def noop(): ... ) for check_spec in self.check_specs: - @asset_check( - asset=check_spec.asset_key, name=check_spec.key.name, blocking=check_spec.blocking + @asset_check( # pyright: ignore[reportArgumentType] + asset=check_spec.asset_key, + name=check_spec.key.name, + blocking=check_spec.blocking, ) def _check(): ... diff --git a/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_asset_check_decorator.py b/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_asset_check_decorator.py index f9340b56f4878..09b0151444fa4 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_asset_check_decorator.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_asset_check_decorator.py @@ -82,7 +82,7 @@ def _check() -> AssetCheckResult: def test_asset_check_decorator_docstring_description() -> None: - @asset_check(asset="asset1") + @asset_check(asset="asset1") # pyright: ignore[reportArgumentType] def check1(): """Docstring.""" pass @@ -94,7 +94,7 @@ def check1(): def test_asset_check_decorator_parameter_description() -> None: - @asset_check(asset="asset1", description="parameter") + @asset_check(asset="asset1", description="parameter") # pyright: ignore[reportArgumentType] def check1(): """Docstring.""" diff --git a/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_asset_check_decorator_secondary_assets.py b/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_asset_check_decorator_secondary_assets.py index dc00f1ab21300..d06db18106ede 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_asset_check_decorator_secondary_assets.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_asset_check_decorator_secondary_assets.py @@ -66,7 +66,7 @@ def test_additional_deps_overlap(): ), ): - @asset_check(asset=asset1, additional_deps=[asset1]) + @asset_check(asset=asset1, additional_deps=[asset1]) # pyright: ignore[reportArgumentType] def check1(asset_1): pass @@ -78,7 +78,7 @@ def check1(asset_1): ), ): - @asset_check(asset=asset1, additional_deps=[asset1]) + @asset_check(asset=asset1, additional_deps=[asset1]) # pyright: ignore[reportArgumentType] def check2(): pass @@ -92,7 +92,7 @@ def test_additional_ins_overlap(): ), ): - @asset_check(asset=asset1, additional_ins={"asset_1": AssetIn("asset1")}) + @asset_check(asset=asset1, additional_ins={"asset_1": AssetIn("asset1")}) # pyright: ignore[reportArgumentType] def check1(asset_1): pass @@ -103,8 +103,10 @@ def test_additional_ins_and_deps_overlap(): match=re.escape("deps value AssetKey(['asset2']) also declared as input/AssetIn"), ): - @asset_check( - asset=asset1, additional_ins={"asset_2": AssetIn("asset2")}, additional_deps=[asset2] + @asset_check( # pyright: ignore[reportArgumentType] + asset=asset1, + additional_ins={"asset_2": AssetIn("asset2")}, + additional_deps=[asset2], ) def check1(asset_2): pass diff --git a/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_op.py b/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_op.py index 7c5e60dfaee26..ef0a196e4c8c1 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_op.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_op.py @@ -225,13 +225,13 @@ def non_solid_func(): DagsterInvalidDefinitionError, match="You have passed a lambda or function non_solid_func", ): - GraphDefinition(node_defs=[non_solid_func], name="test") + GraphDefinition(node_defs=[non_solid_func], name="test") # pyright: ignore[reportArgumentType] with pytest.raises( DagsterInvalidDefinitionError, match="You have passed a lambda or function ", ): - GraphDefinition(node_defs=[lambda x: x], name="test") + GraphDefinition(node_defs=[lambda x: x], name="test") # pyright: ignore[reportArgumentType] def test_descriptions(): @@ -325,7 +325,7 @@ def the_graph(): assert comp_graph.__name__ == "comp_graph" assert the_job.__doc__ == "THE_DOCSTRING." assert the_job.description == "THE_DOCSTRING." - assert the_job.__name__ == "the_job" + assert the_job.__name__ == "the_job" # pyright: ignore[reportAttributeAccessIssue] assert the_op.__doc__ == "OP_DOCSTRING." assert the_op.description == "OP_DOCSTRING." assert the_op.__name__ == "the_op" @@ -441,7 +441,7 @@ def my_graph(): def test_ins_dagster_types(): - assert In(dagster_type=None) + assert In(dagster_type=None) # pyright: ignore[reportArgumentType] assert In(dagster_type=int) assert In(dagster_type=List) assert In(dagster_type=List[int]) # typing type @@ -877,13 +877,13 @@ def yielding_op(context): assert log.user_message == "A log" first = relevant_event_logs[0] - assert first.dagster_event.event_specific_data.materialization.label == "first" + assert first.dagster_event.event_specific_data.materialization.label == "first" # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] second = relevant_event_logs[1] - assert second.dagster_event.event_specific_data.materialization.label == "second" + assert second.dagster_event.event_specific_data.materialization.label == "second" # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] third = relevant_event_logs[2] - assert third.dagster_event.event_specific_data.materialization.label == "third" + assert third.dagster_event.event_specific_data.materialization.label == "third" # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] assert second.timestamp - first.timestamp >= 1 assert log.timestamp - first.timestamp >= 1 @@ -899,8 +899,8 @@ def basic(context): assert result.success assert result.output_for_node("basic") == "baz" events = result.events_for_node("basic") - assert len(events[1].event_specific_data.metadata) == 1 - assert events[1].event_specific_data.metadata["foo"].text == "bar" + assert len(events[1].event_specific_data.metadata) == 1 # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + assert events[1].event_specific_data.metadata["foo"].text == "bar" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_metadata_logging_multiple_entries(): @@ -913,9 +913,9 @@ def basic(context): result = execute_op_in_graph(basic) assert result.success events = result.events_for_node("basic") - assert len(events[1].event_specific_data.metadata) == 2 - assert events[1].event_specific_data.metadata["foo"].text == "second_value" - assert events[1].event_specific_data.metadata["boo"].text == "bot" + assert len(events[1].event_specific_data.metadata) == 2 # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + assert events[1].event_specific_data.metadata["foo"].text == "second_value" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + assert events[1].event_specific_data.metadata["boo"].text == "bot" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_log_event_multi_output(): @@ -946,8 +946,8 @@ def the_op(context): first_output_event = events[1] second_output_event = events[3] - assert "foo" in first_output_event.event_specific_data.metadata - assert "bar" in second_output_event.event_specific_data.metadata + assert "foo" in first_output_event.event_specific_data.metadata # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + assert "bar" in second_output_event.event_specific_data.metadata # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_log_metadata_after_output(): @@ -982,17 +982,17 @@ def the_op(context): assert result.success events = result.all_node_events output_event_one = events[1] - assert output_event_one.event_specific_data.mapping_key == "one" - assert "one" in output_event_one.event_specific_data.metadata + assert output_event_one.event_specific_data.mapping_key == "one" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + assert "one" in output_event_one.event_specific_data.metadata # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] output_event_two = events[3] - assert output_event_two.event_specific_data.mapping_key == "two" - assert "two" in output_event_two.event_specific_data.metadata + assert output_event_two.event_specific_data.mapping_key == "two" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + assert "two" in output_event_two.event_specific_data.metadata # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] output_event_three = events[5] - assert output_event_three.event_specific_data.mapping_key == "three" - assert "three" in output_event_three.event_specific_data.metadata + assert output_event_three.event_specific_data.mapping_key == "three" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + assert "three" in output_event_three.event_specific_data.metadata # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] output_event_four = events[7] - assert output_event_four.event_specific_data.mapping_key == "four" - assert "four" in output_event_four.event_specific_data.metadata + assert output_event_four.event_specific_data.mapping_key == "four" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + assert "four" in output_event_four.event_specific_data.metadata # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_log_metadata_after_dynamic_output(): diff --git a/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_schedule.py b/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_schedule.py index 4c7913b2216cd..f123e311b51c7 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_schedule.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_schedule.py @@ -172,11 +172,11 @@ def bad_cron_string_three(context): def test_schedule_with_nested_tags(): nested_tags = {"foo": {"bar": "baz"}} - @schedule(cron_schedule="* * * * *", job_name="foo_job", tags=nested_tags) + @schedule(cron_schedule="* * * * *", job_name="foo_job", tags=nested_tags) # pyright: ignore[reportArgumentType] def my_tag_schedule(): return {} - assert my_tag_schedule.evaluate_tick( + assert my_tag_schedule.evaluate_tick( # pyright: ignore[reportOptionalSubscript] build_schedule_context(scheduled_execution_time=get_current_datetime()) )[0][0].tags == merge_dicts( {key: json.dumps(val) for key, val in nested_tags.items()}, @@ -202,7 +202,7 @@ def my_tag_schedule(): ) assert warning.filename.endswith("test_schedule.py") - assert my_tag_schedule.evaluate_tick( + assert my_tag_schedule.evaluate_tick( # pyright: ignore[reportOptionalSubscript] build_schedule_context(scheduled_execution_time=get_current_datetime()) )[0][0].tags == merge_dicts(tags, {"dagster/schedule_name": "my_tag_schedule"}) @@ -262,8 +262,8 @@ def foo_schedule(context): # test direct invocation run_request = foo_schedule(context_without_time) - assert run_request.run_config == FOO_CONFIG - assert run_request.tags.get("foo") == "FOO" + assert run_request.run_config == FOO_CONFIG # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + assert run_request.tags.get("foo") == "FOO" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_request_based_schedule_no_context(): @@ -298,8 +298,8 @@ def foo_schedule(): # test direct invocation run_request = foo_schedule() - assert run_request.run_config == FOO_CONFIG - assert run_request.tags.get("foo") == "FOO" + assert run_request.run_config == FOO_CONFIG # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + assert run_request.tags.get("foo") == "FOO" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_config_based_schedule(): diff --git a/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_source_asset_decorator.py b/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_source_asset_decorator.py index 46c82d77958fc..172cb400a7c29 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_source_asset_decorator.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/decorators_tests/test_source_asset_decorator.py @@ -13,7 +13,7 @@ def test_all_fields(): StaticPartitionsDefinition(["a", "b", "c", "d"]) - @io_manager(required_resource_keys={"baz"}) + @io_manager(required_resource_keys={"baz"}) # pyright: ignore[reportArgumentType] def foo_manager(): pass @@ -91,7 +91,7 @@ def key_prefix_and_key_specified(): ... match="Cannot specify a name or key prefix for @observable_source_asset when the key argument is provided", ): - @observable_source_asset(name=["peach"], key=["peach", "nectarine"]) + @observable_source_asset(name=["peach"], key=["peach", "nectarine"]) # pyright: ignore[reportArgumentType] def name_and_key_specified(): ... diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_asset_check_evaluation.py b/python_modules/dagster/dagster_tests/definitions_tests/test_asset_check_evaluation.py index 887453d81fa09..0f93bddf3994a 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_asset_check_evaluation.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_asset_check_evaluation.py @@ -8,4 +8,4 @@ def test_backcompat(): ' "AssetCheckSeverity.ERROR"}, "success": true, "target_materialization_data": null}' ) v = deserialize_value(old_value) - assert v.passed + assert v.passed # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_asset_check_spec.py b/python_modules/dagster/dagster_tests/definitions_tests/test_asset_check_spec.py index 0740e10c213d3..510bf89e6a114 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_asset_check_spec.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_asset_check_spec.py @@ -36,4 +36,4 @@ class SomeObject: ... obj = SomeObject() - assert AssetCheckSpec(asset="foo", name="check1", metadata={"foo": obj}).metadata["foo"] == obj + assert AssetCheckSpec(asset="foo", name="check1", metadata={"foo": obj}).metadata["foo"] == obj # pyright: ignore[reportOptionalSubscript] diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_backfill_policy.py b/python_modules/dagster/dagster_tests/definitions_tests/test_backfill_policy.py index b8c9df26d83dd..d9c1b710a4347 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_backfill_policy.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_backfill_policy.py @@ -7,4 +7,4 @@ def test_type(): assert BackfillPolicy.single_run().policy_type == BackfillPolicyType.SINGLE_RUN assert BackfillPolicy.multi_run().policy_type == BackfillPolicyType.MULTI_RUN with pytest.raises(ParameterCheckError): - BackfillPolicy.multi_run(max_partitions_per_run=None) + BackfillPolicy.multi_run(max_partitions_per_run=None) # pyright: ignore[reportArgumentType] diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_composition.py b/python_modules/dagster/dagster_tests/definitions_tests/test_composition.py index a20b0e8814087..7311d7ea7146a 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_composition.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_composition.py @@ -324,7 +324,7 @@ def wrap_mult(): @graph def mult_graph(): - one, two = wrap_mult() + one, two = wrap_mult() # pyright: ignore[reportGeneralTypeIssues] echo.alias("echo_one")(one) echo.alias("echo_two")(two) @@ -341,7 +341,7 @@ def wrap_mult(): @graph def mult_graph(): - x, y = wrap_mult() + x, y = wrap_mult() # pyright: ignore[reportGeneralTypeIssues] echo.alias("echo_x")(x) echo.alias("echo_y")(y) @@ -860,12 +860,12 @@ def a_op(_): @job def a_job(): - a_op.with_hooks(hook_defs={test_hook}).alias("hook_alias_tag").tag({"pos": 3})() - a_op.with_hooks(hook_defs={test_hook}).tag({"pos": 2}).alias("hook_tag_alias")() - a_op.alias("alias_tag_hook").tag({"pos": 2}).with_hooks(hook_defs={test_hook})() - a_op.alias("alias_hook_tag").with_hooks(hook_defs={test_hook}).tag({"pos": 3})() - a_op.tag({"pos": 1}).with_hooks(hook_defs={test_hook}).alias("tag_hook_alias")() - a_op.tag({"pos": 1}).alias("tag_alias_hook").with_hooks(hook_defs={test_hook})() + a_op.with_hooks(hook_defs={test_hook}).alias("hook_alias_tag").tag({"pos": 3})() # pyright: ignore[reportArgumentType] + a_op.with_hooks(hook_defs={test_hook}).tag({"pos": 2}).alias("hook_tag_alias")() # pyright: ignore[reportArgumentType] + a_op.alias("alias_tag_hook").tag({"pos": 2}).with_hooks(hook_defs={test_hook})() # pyright: ignore[reportArgumentType] + a_op.alias("alias_hook_tag").with_hooks(hook_defs={test_hook}).tag({"pos": 3})() # pyright: ignore[reportArgumentType] + a_op.tag({"pos": 1}).with_hooks(hook_defs={test_hook}).alias("tag_hook_alias")() # pyright: ignore[reportArgumentType] + a_op.tag({"pos": 1}).alias("tag_alias_hook").with_hooks(hook_defs={test_hook})() # pyright: ignore[reportArgumentType] result = a_job.execute_in_process(raise_on_error=False) assert result.success diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_definition_errors.py b/python_modules/dagster/dagster_tests/definitions_tests/test_definition_errors.py index 2c401146d46fa..384f381093cea 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_definition_errors.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_definition_errors.py @@ -36,7 +36,7 @@ def solid_a_b_list(): def test_create_job_with_bad_ops_list(): with pytest.raises(ParameterCheckError, match=r'Param "node_defs" is not a Sequence'): - GraphDefinition(name="a_pipeline", node_defs=create_stub_op("stub", [{"a key": "a value"}])) + GraphDefinition(name="a_pipeline", node_defs=create_stub_op("stub", [{"a key": "a value"}])) # pyright: ignore[reportArgumentType] def test_circular_dep(): @@ -103,7 +103,7 @@ def test_invalid_item_in_op_list(): DagsterInvalidDefinitionError, match="Invalid item in node list: 'not_a_op'" ): GraphDefinition( - node_defs=["not_a_op"], + node_defs=["not_a_op"], # pyright: ignore[reportArgumentType] name="test", ) @@ -116,7 +116,7 @@ def test_one_layer_off_dependencies(): GraphDefinition( node_defs=solid_a_b_list(), name="test", - dependencies={"B": DependencyDefinition("A")}, + dependencies={"B": DependencyDefinition("A")}, # pyright: ignore[reportArgumentType] ) @@ -128,7 +128,7 @@ def test_malformed_dependencies(): GraphDefinition( node_defs=solid_a_b_list(), name="test", - dependencies={"B": {"b_input": {"b_input": DependencyDefinition("A")}}}, + dependencies={"B": {"b_input": {"b_input": DependencyDefinition("A")}}}, # pyright: ignore[reportArgumentType] ) @@ -137,7 +137,7 @@ def test_list_dependencies(): DagsterInvalidDefinitionError, match=r'The expected type for "dependencies" is Union\[Mapping\[', ): - GraphDefinition(node_defs=solid_a_b_list(), name="test", dependencies=[]) + GraphDefinition(node_defs=solid_a_b_list(), name="test", dependencies=[]) # pyright: ignore[reportArgumentType] def test_pass_unrelated_type_to_field_error_op_definition(): @@ -193,7 +193,7 @@ def test_bad_out(): "got foo." ), ): - _output = Out("foo") + _output = Out("foo") # pyright: ignore[reportArgumentType] # Test the case where the object is not hashable with pytest.raises( @@ -204,7 +204,7 @@ def test_bad_out(): "Did you pass an instance of a type instead of the type?" ), ): - _output = Out({"foo": "bar"}) + _output = Out({"foo": "bar"}) # pyright: ignore[reportArgumentType] # Test the case where the object throws in __nonzero__, e.g. pandas.DataFrame class Exotic: @@ -215,7 +215,7 @@ def __nonzero__(self): DagsterInvalidDefinitionError, match="Invalid type: dagster_type must be an instance of DagsterType or a Python type", ): - _output = Out(Exotic()) + _output = Out(Exotic()) # pyright: ignore[reportArgumentType] def test_op_tags(): diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_definitions.py b/python_modules/dagster/dagster_tests/definitions_tests/test_definitions.py index b1de489e4a1be..4f86014677828 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_definitions.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_definitions.py @@ -35,7 +35,7 @@ def produce_string(): @op( ins={"input_one": In(String)}, - out=Out(Any), + out=Out(Any), # pyright: ignore[reportArgumentType] config_schema={"another_field": Int}, ) def op_one(_context, input_one): @@ -123,7 +123,7 @@ def produce_string(): @op( ins={"input_one": In(String)}, - out=Out(Any), + out=Out(Any), # pyright: ignore[reportArgumentType] config_schema={"another_field": Int}, ) def op_one(_context, input_one): diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_definitions_class.py b/python_modules/dagster/dagster_tests/definitions_tests/test_definitions_class.py index 33b4a923ffccf..9131b5a353d29 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_definitions_class.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_definitions_class.py @@ -279,7 +279,7 @@ def one(): def test_bad_executor(): with pytest.raises(CheckError): # ignoring type to catch runtime error - Definitions(executor="not an executor") + Definitions(executor="not an executor") # pyright: ignore[reportArgumentType] def test_custom_executor_in_definitions(): @@ -321,13 +321,13 @@ def a_logger(_): with pytest.raises(CheckError): # ignore type to catch runtime error - Definitions(loggers={1: a_logger}) + Definitions(loggers={1: a_logger}) # pyright: ignore[reportArgumentType] def test_bad_logger_value(): with pytest.raises(CheckError): # ignore type to catch runtime error - Definitions(loggers={"not_a_logger": "not_a_logger"}) + Definitions(loggers={"not_a_logger": "not_a_logger"}) # pyright: ignore[reportArgumentType] def test_kitchen_sink_on_create_helper_and_definitions(): @@ -568,7 +568,7 @@ def retries(self): ... assert isinstance(job, JobDefinition) # ignore typecheck because we know our implementation doesn't use the context - assert job.executor_def.executor_creation_fn(None) is executor_inst + assert job.executor_def.executor_creation_fn(None) is executor_inst # pyright: ignore[reportArgumentType,reportOptionalCall] def test_assets_with_io_manager(): @@ -1074,11 +1074,11 @@ def the_schedule(): assert len(list(underlying_repo.schedule_defs)) == 1 # properties on the definitions object do not dedupe - assert len(defs.assets) == 2 - assert len(defs.asset_checks) == 2 - assert len(defs.jobs) == 2 - assert len(defs.sensors) == 2 - assert len(defs.schedules) == 2 + assert len(defs.assets) == 2 # pyright: ignore[reportArgumentType] + assert len(defs.asset_checks) == 2 # pyright: ignore[reportArgumentType] + assert len(defs.jobs) == 2 # pyright: ignore[reportArgumentType] + assert len(defs.sensors) == 2 # pyright: ignore[reportArgumentType] + assert len(defs.schedules) == 2 # pyright: ignore[reportArgumentType] def test_definitions_class_metadata(): @@ -1088,7 +1088,7 @@ def test_definitions_class_metadata(): def test_assets_def_with_only_checks(): - @asset_check(asset="asset1") + @asset_check(asset="asset1") # pyright: ignore[reportArgumentType] def check1(): pass diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_definitions_load_context.py b/python_modules/dagster/dagster_tests/definitions_tests/test_definitions_load_context.py index c7254af588bbc..4a4d6f32b158d 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_definitions_load_context.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_definitions_load_context.py @@ -119,7 +119,7 @@ def test_invalid_reconstruction_metadata(): with pytest.raises( DagsterInvariantViolationError, match=r"Reconstruction metadata values must be strings" ): - Definitions().with_reconstruction_metadata({"foo": {"not": "a string"}}) + Definitions().with_reconstruction_metadata({"foo": {"not": "a string"}}) # pyright: ignore[reportArgumentType] def test_default_global_context(): diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_dynamic_partitions.py b/python_modules/dagster/dagster_tests/definitions_tests/test_dynamic_partitions.py index 23496dc5dbb87..4399d5f6f4c4a 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_dynamic_partitions.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_dynamic_partitions.py @@ -75,7 +75,7 @@ def my_asset(): assert materialize([my_asset], instance=instance, partition_key="a").success materialization = instance.get_latest_materialization_event(AssetKey("my_asset")) assert materialization - assert materialization.dagster_event.partition == "a" + assert materialization.dagster_event.partition == "a" # pyright: ignore[reportOptionalMemberAccess] with pytest.raises(CheckError): partitions_def.get_partition_keys() @@ -97,7 +97,7 @@ def asset2(context): assert context.asset_keys_for_input() == ["apple"] with instance_for_test() as instance: - instance.add_dynamic_partitions(partitions_def.name, ["apple"]) + instance.add_dynamic_partitions(partitions_def.name, ["apple"]) # pyright: ignore[reportArgumentType] materialize_to_memory([asset1], instance=instance, partition_key="apple") @@ -127,7 +127,7 @@ def asset2(context, asset1): return asset1 with instance_for_test() as instance: - instance.add_dynamic_partitions(partitions_def.name, ["apple"]) + instance.add_dynamic_partitions(partitions_def.name, ["apple"]) # pyright: ignore[reportArgumentType] materialize( [asset1, asset2], @@ -164,7 +164,7 @@ def unpartitioned(context, dynamic1): return 1 with instance_for_test() as instance: - instance.add_dynamic_partitions(partitions_def.name, ["apple"]) + instance.add_dynamic_partitions(partitions_def.name, ["apple"]) # pyright: ignore[reportArgumentType] materialize([dynamic1, dynamic2, unpartitioned], instance=instance, partition_key="apple") @@ -188,7 +188,7 @@ def unpartitioned(context, dynamic1): return 1 with instance_for_test() as instance: - instance.add_dynamic_partitions(partitions_def.name, partitions) + instance.add_dynamic_partitions(partitions_def.name, partitions) # pyright: ignore[reportArgumentType] for partition in partitions[:-1]: materialize([dynamic1], instance=instance, partition_key=partition) @@ -200,7 +200,7 @@ def test_has_partition_key(): partitions_def = DynamicPartitionsDefinition(name="fruits") with instance_for_test() as instance: - instance.add_dynamic_partitions(partitions_def.name, ["apple", "banana"]) + instance.add_dynamic_partitions(partitions_def.name, ["apple", "banana"]) # pyright: ignore[reportArgumentType] assert partitions_def.has_partition_key("apple", dynamic_partitions_store=instance) assert partitions_def.has_partition_key("banana", dynamic_partitions_store=instance) assert not partitions_def.has_partition_key("peach", dynamic_partitions_store=instance) diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_executor_definition.py b/python_modules/dagster/dagster_tests/definitions_tests/test_executor_definition.py index ca95a8bf066b0..4f37e89e1158b 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_executor_definition.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_executor_definition.py @@ -59,7 +59,7 @@ def test_executor(init_context): return InProcessExecutor( # shouldn't need to .get() here - issue with defaults in config setup - retries=RetryMode.from_config({"enabled": {}}), + retries=RetryMode.from_config({"enabled": {}}), # pyright: ignore[reportArgumentType] marker_to_close=None, ) @@ -88,7 +88,7 @@ def test_executor(init_context): return InProcessExecutor( # shouldn't need to .get() here - issue with defaults in config setup - retries=RetryMode.from_config({"enabled": {}}), + retries=RetryMode.from_config({"enabled": {}}), # pyright: ignore[reportArgumentType] marker_to_close=None, ) @@ -117,7 +117,7 @@ def test_executor(init_context): return InProcessExecutor( # shouldn't need to .get() here - issue with defaults in config setup - retries=RetryMode.from_config({"enabled": {}}), + retries=RetryMode.from_config({"enabled": {}}), # pyright: ignore[reportArgumentType] marker_to_close=None, ) @@ -156,7 +156,7 @@ def test_executor(init_context): return InProcessExecutor( # shouldn't need to .get() here - issue with defaults in config setup - retries=RetryMode.from_config({"enabled": {}}), + retries=RetryMode.from_config({"enabled": {}}), # pyright: ignore[reportArgumentType] marker_to_close=None, ) @@ -164,7 +164,7 @@ def test_executor(init_context): {"value": "secret testing value!!"}, "configured_test_executor" ) - assert test_executor_configured.get_requirements(None) == test_executor.get_requirements(None) + assert test_executor_configured.get_requirements(None) == test_executor.get_requirements(None) # pyright: ignore[reportArgumentType] return get_job_for_executor(test_executor_configured) @@ -182,14 +182,14 @@ def test_executor(init_context): return InProcessExecutor( # shouldn't need to .get() here - issue with defaults in config setup - retries=RetryMode.from_config({"enabled": {}}), + retries=RetryMode.from_config({"enabled": {}}), # pyright: ignore[reportArgumentType] marker_to_close=None, ) test_executor_configured = test_executor.configured( {"value": "secret testing value!!"}, "configured_test_executor" ) - assert test_executor_configured.get_requirements(None) == test_executor.get_requirements(None) + assert test_executor_configured.get_requirements(None) == test_executor.get_requirements(None) # pyright: ignore[reportArgumentType] return get_job_for_executor(test_executor_configured) @@ -230,7 +230,7 @@ def needs_config(_): from dagster._core.executor.in_process import InProcessExecutor return InProcessExecutor( - retries=RetryMode.from_config({"enabled": {}}), + retries=RetryMode.from_config({"enabled": {}}), # pyright: ignore[reportArgumentType] marker_to_close=None, ) @@ -270,7 +270,7 @@ def test_failing_executor_initialization(): assert event_records[0].dagster_event_type == DagsterEventType.RUN_FAILURE run = instance.get_run_by_id(result.run_id) - assert run.tags[RUN_FAILURE_REASON_TAG] == RunFailureReason.JOB_INITIALIZATION_FAILURE.value + assert run.tags[RUN_FAILURE_REASON_TAG] == RunFailureReason.JOB_INITIALIZATION_FAILURE.value # pyright: ignore[reportOptionalMemberAccess] def test_multiprocess_executor_default(): diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_logger_invocation.py b/python_modules/dagster/dagster_tests/definitions_tests/test_logger_invocation.py index 841e9379c9bf9..47ef9746b4519 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_logger_invocation.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_logger_invocation.py @@ -133,7 +133,7 @@ def sample_graph(): def test_logger_job_def(): - @logger + @logger # pyright: ignore[reportCallIssue,reportArgumentType] def job_logger(init_context): assert init_context.job_def.name == "sample_job" diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_materialize_result.py b/python_modules/dagster/dagster_tests/definitions_tests/test_materialize_result.py index 8b62bc7aae50b..bd7bb71b5c6e5 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_materialize_result.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_materialize_result.py @@ -78,7 +78,7 @@ def ret_two(): # direct invocation direct_results = ret_two() - assert len(direct_results) == 2 + assert len(direct_results) == 2 # pyright: ignore[reportArgumentType] def test_return_materialization_with_asset_checks(): @@ -117,8 +117,8 @@ def outs_multi_asset(): assert materialize([outs_multi_asset]).success res = outs_multi_asset() - assert res[0].metadata["foo"] == "bar" - assert res[1].metadata["baz"] == "qux" + assert res[0].metadata["foo"] == "bar" # pyright: ignore[reportIndexIssue] + assert res[1].metadata["baz"] == "qux" # pyright: ignore[reportIndexIssue] @multi_asset(specs=[AssetSpec(["prefix", "one"]), AssetSpec(["prefix", "two"])]) def specs_multi_asset(): @@ -129,8 +129,8 @@ def specs_multi_asset(): assert materialize([specs_multi_asset]).success res = specs_multi_asset() - assert res[0].metadata["foo"] == "bar" - assert res[1].metadata["baz"] == "qux" + assert res[0].metadata["foo"] == "bar" # pyright: ignore[reportIndexIssue] + assert res[1].metadata["baz"] == "qux" # pyright: ignore[reportIndexIssue] def test_return_materialization_multi_asset(): @@ -156,7 +156,7 @@ def multi(): assert "two" in mats[1].metadata assert mats[1].tags - direct_results = list(multi()) + direct_results = list(multi()) # pyright: ignore[reportArgumentType] assert len(direct_results) == 2 # @@ -182,7 +182,7 @@ def missing(): DagsterInvariantViolationError, match='Invocation of op "missing" did not return an output for non-optional output "two"', ): - list(missing()) + list(missing()) # pyright: ignore[reportArgumentType] # # missing asset_key @@ -212,7 +212,7 @@ def no_key(): " asset_key, options are:" ), ): - list(no_key()) + list(no_key()) # pyright: ignore[reportArgumentType] # # return tuple success @@ -239,7 +239,7 @@ def ret_multi(): assert mats[1].tags res = ret_multi() - assert len(res) == 2 + assert len(res) == 2 # pyright: ignore[reportArgumentType] # # return list error @@ -390,7 +390,7 @@ def handle_output(self, context, obj): self.handle_output_calls += 1 def load_input(self, context): - self.load_input_calls += 1 + self.load_input_calls += 1 # pyright: ignore[reportAttributeAccessIssue] def reset(self): self.handle_output_calls = 0 @@ -448,7 +448,7 @@ def generator_asset() -> Generator[MaterializeResult, None, None]: yield MaterializeResult(metadata={"foo": "bar"}) _exec_asset(generator_asset, resources={"io_manager": io_mgr}) - io_mgr.handle_output_calls == 0 + io_mgr.handle_output_calls == 0 # pyright: ignore[reportUnusedExpression] def test_materialize_result_implicit_output_typing(): @@ -513,7 +513,7 @@ def generator_asset() -> Generator[MaterializeResult, None, None]: assert len(res) == 1 assert res[0].metadata["foo"].value == "bar" - res = list(generator_asset()) + res = list(generator_asset()) # pyright: ignore[reportArgumentType] assert len(res) == 1 assert res[0].metadata["foo"] == "bar" @@ -527,7 +527,7 @@ def generator_specs_multi_asset(): assert res[0].metadata["foo"].value == "bar" assert res[1].metadata["baz"].value == "qux" - res = list(generator_specs_multi_asset()) + res = list(generator_specs_multi_asset()) # pyright: ignore[reportArgumentType] assert len(res) == 2 assert res[0].metadata["foo"] == "bar" assert res[1].metadata["baz"] == "qux" @@ -542,7 +542,7 @@ def generator_outs_multi_asset(): assert res[0].metadata["foo"].value == "bar" assert res[1].metadata["baz"].value == "qux" - res = list(generator_outs_multi_asset()) + res = list(generator_outs_multi_asset()) # pyright: ignore[reportArgumentType] assert len(res) == 2 assert res[0].metadata["foo"] == "bar" assert res[1].metadata["baz"] == "qux" @@ -558,7 +558,7 @@ async def async_specs_multi_asset(): assert res[0].metadata["foo"].value == "bar" assert res[1].metadata["baz"].value == "qux" - res = asyncio.run(async_specs_multi_asset()) + res = asyncio.run(async_specs_multi_asset()) # pyright: ignore[reportArgumentType] assert len(res) == 2 assert res[0].metadata["foo"] == "bar" assert res[1].metadata["baz"] == "qux" @@ -575,7 +575,7 @@ async def async_gen_specs_multi_asset(): async def _run_async_gen(): results = [] - async for result in async_gen_specs_multi_asset(): + async for result in async_gen_specs_multi_asset(): # pyright: ignore[reportGeneralTypeIssues] results.append(result) return results @@ -603,4 +603,4 @@ def partitioned_asset(context: AssetExecutionContext) -> MaterializeResult: context = build_asset_context(partition_key="red") res = partitioned_asset(context) - assert res.metadata["key"] == "red" + assert res.metadata["key"] == "red" # pyright: ignore[reportAttributeAccessIssue] diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_multi_partitions.py b/python_modules/dagster/dagster_tests/definitions_tests/test_multi_partitions.py index d2abe701e1e70..39e34a14d21b4 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_multi_partitions.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_multi_partitions.py @@ -123,7 +123,7 @@ def my_repo(): asset2_records = instance.fetch_materializations(asset2.key, limit=1000).records materializations = sorted( [*asset1_records, *asset2_records], - key=lambda x: x.event_log_entry.dagster_event.asset_key, + key=lambda x: x.event_log_entry.dagster_event.asset_key, # type: ignore ) assert len(materializations) == 2 @@ -357,7 +357,7 @@ def test_keys_with_dimension_value_with_dynamic(): ) with instance_for_test() as instance: - instance.add_dynamic_partitions(dynamic_partitions_def.name, ["a", "b", "c", "d"]) + instance.add_dynamic_partitions(dynamic_partitions_def.name, ["a", "b", "c", "d"]) # pyright: ignore[reportArgumentType] assert multipartitions_def.get_multipartition_keys_with_dimension_value( dimension_name="dynamic", diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_observe_result.py b/python_modules/dagster/dagster_tests/definitions_tests/test_observe_result.py index 924f03899ae56..0eea131f1615e 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_observe_result.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_observe_result.py @@ -31,7 +31,7 @@ def ret_untyped(context: AssetExecutionContext): result = observe([ret_untyped]) assert result.success - observations = result.asset_observations_for_node(ret_untyped.node_def.name) + observations = result.asset_observations_for_node(ret_untyped.node_def.name) # pyright: ignore[reportOptionalMemberAccess] assert len(observations) == 1, observations assert "one" in observations[0].metadata assert observations[0].tags["foo"] == "bar" @@ -91,8 +91,8 @@ def outs_multi_asset(): assert observe([outs_multi_asset]).success res = outs_multi_asset() - assert res[0].metadata["foo"] == "bar" - assert res[1].metadata["baz"] == "qux" + assert res[0].metadata["foo"] == "bar" # pyright: ignore[reportIndexIssue] + assert res[1].metadata["baz"] == "qux" # pyright: ignore[reportIndexIssue] @multi_observable_source_asset( specs=[ @@ -108,8 +108,8 @@ def specs_multi_asset(): assert observe([specs_multi_asset]).success res = specs_multi_asset() - assert res[0].metadata["foo"] == "bar" - assert res[1].metadata["baz"] == "qux" + assert res[0].metadata["foo"] == "bar" # pyright: ignore[reportIndexIssue] + assert res[1].metadata["baz"] == "qux" # pyright: ignore[reportIndexIssue] def test_yield_materialization_multi_asset(): @@ -134,7 +134,7 @@ def multi(): assert "one" in observations[0].metadata assert "two" in observations[1].metadata - direct_results = list(multi()) + direct_results = list(multi()) # pyright: ignore[reportArgumentType] assert len(direct_results) == 2 # @@ -160,7 +160,7 @@ def missing(): DagsterInvariantViolationError, match='Invocation of op "missing" did not return an output for non-optional output "two"', ): - list(missing()) + list(missing()) # pyright: ignore[reportArgumentType] # # missing asset_key @@ -190,7 +190,7 @@ def no_key(): " asset_key, options are:" ), ): - list(no_key()) + list(no_key()) # pyright: ignore[reportArgumentType] # # return tuple success @@ -216,7 +216,7 @@ def ret_multi(): assert "two" in observations[1].metadata res = ret_multi() - assert len(res) == 2 + assert len(res) == 2 # pyright: ignore[reportArgumentType] # # return list error @@ -395,7 +395,7 @@ def generator_specs_multi_asset(): assert observations[0].metadata["foo"].value == "bar" assert observations[1].metadata["baz"].value == "qux" - result = list(generator_specs_multi_asset()) + result = list(generator_specs_multi_asset()) # pyright: ignore[reportArgumentType] assert len(result) == 2 assert result[0].metadata["foo"] == "bar" assert result[1].metadata["baz"] == "qux" @@ -412,7 +412,7 @@ def generator_outs_multi_asset(): assert observations[0].metadata["foo"].value == "bar" assert observations[1].metadata["baz"].value == "qux" - result = list(generator_outs_multi_asset()) + result = list(generator_outs_multi_asset()) # pyright: ignore[reportArgumentType] assert len(result) == 2 assert result[0].metadata["foo"] == "bar" assert result[1].metadata["baz"] == "qux" @@ -430,7 +430,7 @@ async def async_specs_multi_asset(): assert observations[0].metadata["foo"].value == "bar" assert observations[1].metadata["baz"].value == "qux" - result = asyncio.run(async_specs_multi_asset()) + result = asyncio.run(async_specs_multi_asset()) # pyright: ignore[reportArgumentType] assert len(result) == 2 assert result[0].metadata["foo"] == "bar" assert result[1].metadata["baz"] == "qux" @@ -449,7 +449,7 @@ async def async_gen_specs_multi_asset(): async def _run_async_gen(): results = [] - async for result in async_gen_specs_multi_asset(): + async for result in async_gen_specs_multi_asset(): # pyright: ignore[reportGeneralTypeIssues] results.append(result) return results @@ -485,4 +485,4 @@ def partitioned_asset(context: AssetExecutionContext) -> ObserveResult: context = build_op_context(partition_key="red") res = partitioned_asset(context) - assert res.metadata["key"] == "red" + assert res.metadata["key"] == "red" # pyright: ignore[reportAttributeAccessIssue] diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_op_io.py b/python_modules/dagster/dagster_tests/definitions_tests/test_op_io.py index 510c2d0e0ead6..26d8a44b5a05f 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_op_io.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_op_io.py @@ -86,8 +86,8 @@ def precedence(_context, arg_a: int, arg_b: int, arg_c: int): assert precedence.input_defs[0].default_value == "hi" assert precedence.input_defs[0].metadata["explicit"] assert precedence.input_defs[0].input_manager_key == "rudy" - assert precedence.input_defs[0].get_asset_key(None) is not None - assert precedence.input_defs[0].get_asset_partitions(None) is not None + assert precedence.input_defs[0].get_asset_key(None) is not None # pyright: ignore[reportArgumentType] + assert precedence.input_defs[0].get_asset_partitions(None) is not None # pyright: ignore[reportArgumentType] def test_output_merge(): diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_partitioned_schedule.py b/python_modules/dagster/dagster_tests/definitions_tests/test_partitioned_schedule.py index 5055ffab0727f..21cd0ef63c184 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_partitioned_schedule.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_partitioned_schedule.py @@ -80,9 +80,9 @@ def my_partitioned_config(start, end): my_schedule = schedule_for_partitioned_config( my_partitioned_config, hour_of_day=9, minute_of_hour=30 ) - assert my_schedule.cron_schedule == "30 9 * * *" + assert my_schedule.cron_schedule == "30 9 * * *" # pyright: ignore[reportAttributeAccessIssue] - run_request = my_schedule.evaluate_tick( + run_request = my_schedule.evaluate_tick( # pyright: ignore[reportOptionalSubscript,reportAttributeAccessIssue] build_schedule_context( scheduled_execution_time=datetime.strptime("2021-05-08", DATE_FORMAT) ) @@ -117,14 +117,14 @@ def my_partitioned_config(start, end): } my_schedule_default = schedule_for_partitioned_config(my_partitioned_config) - assert my_schedule_default.cron_schedule == "15 2 * * *" + assert my_schedule_default.cron_schedule == "15 2 * * *" # pyright: ignore[reportAttributeAccessIssue] my_schedule = schedule_for_partitioned_config( my_partitioned_config, hour_of_day=9, minute_of_hour=30 ) - assert my_schedule.cron_schedule == "30 9 * * *" + assert my_schedule.cron_schedule == "30 9 * * *" # pyright: ignore[reportAttributeAccessIssue] - assert my_schedule.evaluate_tick( + assert my_schedule.evaluate_tick( # pyright: ignore[reportOptionalSubscript,reportAttributeAccessIssue] build_schedule_context(scheduled_execution_time=datetime(2021, 5, 8, 9, 30)) ).run_requests[0].run_config == { "start": "2021-05-07T02:15:00+00:00", @@ -155,12 +155,12 @@ def my_partitioned_config(start, end): } my_schedule_default = schedule_for_partitioned_config(my_partitioned_config) - assert my_schedule_default.cron_schedule == "0 * * * *" + assert my_schedule_default.cron_schedule == "0 * * * *" # pyright: ignore[reportAttributeAccessIssue] my_schedule = schedule_for_partitioned_config(my_partitioned_config, minute_of_hour=30) - assert my_schedule.cron_schedule == "30 * * * *" + assert my_schedule.cron_schedule == "30 * * * *" # pyright: ignore[reportAttributeAccessIssue] - assert my_schedule.evaluate_tick( + assert my_schedule.evaluate_tick( # pyright: ignore[reportOptionalSubscript,reportAttributeAccessIssue] build_schedule_context( scheduled_execution_time=datetime.strptime("2021-05-08", DATE_FORMAT) ) @@ -192,9 +192,9 @@ def my_partitioned_config(start, end): } my_schedule = schedule_for_partitioned_config(my_partitioned_config, minute_of_hour=30) - assert my_schedule.cron_schedule == "30 * * * *" + assert my_schedule.cron_schedule == "30 * * * *" # pyright: ignore[reportAttributeAccessIssue] - assert my_schedule.evaluate_tick( + assert my_schedule.evaluate_tick( # pyright: ignore[reportOptionalSubscript,reportAttributeAccessIssue] build_schedule_context( scheduled_execution_time=datetime.strptime("2021-05-08", DATE_FORMAT) ) @@ -228,9 +228,9 @@ def my_partitioned_config(start, end): my_schedule = schedule_for_partitioned_config( my_partitioned_config, hour_of_day=9, minute_of_hour=30, day_of_week=2 ) - assert my_schedule.cron_schedule == "30 9 * * 2" + assert my_schedule.cron_schedule == "30 9 * * 2" # pyright: ignore[reportAttributeAccessIssue] - assert my_schedule.evaluate_tick( + assert my_schedule.evaluate_tick( # pyright: ignore[reportOptionalSubscript,reportAttributeAccessIssue] build_schedule_context( scheduled_execution_time=datetime.strptime("2021-05-21", DATE_FORMAT) ) @@ -266,9 +266,9 @@ def my_partitioned_config(start, end): my_schedule = schedule_for_partitioned_config( my_partitioned_config, hour_of_day=9, minute_of_hour=30, day_of_week=2 ) - assert my_schedule.cron_schedule == "30 9 * * 2" + assert my_schedule.cron_schedule == "30 9 * * 2" # pyright: ignore[reportAttributeAccessIssue] - assert my_schedule.evaluate_tick( + assert my_schedule.evaluate_tick( # pyright: ignore[reportOptionalSubscript,reportAttributeAccessIssue] build_schedule_context( scheduled_execution_time=datetime.strptime("2021-05-21", DATE_FORMAT) ) @@ -302,9 +302,9 @@ def my_partitioned_config(start, end): my_schedule = schedule_for_partitioned_config( my_partitioned_config, hour_of_day=9, minute_of_hour=30, day_of_month=2 ) - assert my_schedule.cron_schedule == "30 9 2 * *" + assert my_schedule.cron_schedule == "30 9 2 * *" # pyright: ignore[reportAttributeAccessIssue] - assert my_schedule.evaluate_tick( + assert my_schedule.evaluate_tick( # pyright: ignore[reportOptionalSubscript,reportAttributeAccessIssue] build_schedule_context( scheduled_execution_time=datetime.strptime("2021-07-21", DATE_FORMAT) ) @@ -352,9 +352,9 @@ def my_partitioned_config(start, end): my_schedule = schedule_for_partitioned_config( my_partitioned_config, hour_of_day=9, minute_of_hour=30, day_of_month=2 ) - assert my_schedule.cron_schedule == "30 9 2 * *" + assert my_schedule.cron_schedule == "30 9 2 * *" # pyright: ignore[reportAttributeAccessIssue] - assert my_schedule.evaluate_tick( + assert my_schedule.evaluate_tick( # pyright: ignore[reportOptionalSubscript,reportAttributeAccessIssue] build_schedule_context( scheduled_execution_time=datetime.strptime("2021-06-21", DATE_FORMAT) ) @@ -379,13 +379,13 @@ def my_partitioned_config(start, end): my_partitioned_config, hour_of_day=9, minute_of_hour=30 ) - result = my_schedule.evaluate_tick( + result = my_schedule.evaluate_tick( # pyright: ignore[reportAttributeAccessIssue] build_schedule_context( scheduled_execution_time=datetime.strptime("2021-05-05", DATE_FORMAT) ) ) - assert len(result.run_requests) == 0 + assert len(result.run_requests) == 0 # pyright: ignore[reportArgumentType] assert result.skip_message is not None @@ -398,7 +398,7 @@ def my_partitioned_config(start, end): my_schedule = schedule_for_partitioned_config(my_partitioned_config) - run_request = my_schedule.evaluate_tick( + run_request = my_schedule.evaluate_tick( # pyright: ignore[reportOptionalSubscript,reportAttributeAccessIssue] build_schedule_context( scheduled_execution_time=datetime.strptime("2022-03-05", DATE_FORMAT) ) @@ -431,14 +431,14 @@ def my_asset(): def my_repo(): return [my_asset, my_schedule, my_job] - run_requests = my_schedule.evaluate_tick( + run_requests = my_schedule.evaluate_tick( # pyright: ignore[reportAttributeAccessIssue] build_schedule_context( scheduled_execution_time=datetime.strptime("2020-01-02", DATE_FORMAT), repository_def=my_repo, ) ).run_requests - assert len(run_requests) == 4 - assert set([req.partition_key for req in run_requests]) == set( + assert len(run_requests) == 4 # pyright: ignore[reportArgumentType] + assert set([req.partition_key for req in run_requests]) == set( # pyright: ignore[reportOptionalIterable] [ "2020-01-01|a", "2020-01-01|b", @@ -490,8 +490,8 @@ def my_repo(): ) .run_requests ) - assert len(run_requests) == 1 - assert run_requests[0].partition_key == "2020-01-01" + assert len(run_requests) == 1 # pyright: ignore[reportArgumentType] + assert run_requests[0].partition_key == "2020-01-01" # pyright: ignore[reportOptionalSubscript] def test_unresolved_multi_partitioned_schedule(): @@ -521,8 +521,8 @@ def my_repo(): ) .run_requests ) - assert len(run_requests) == 4 - assert set([req.partition_key for req in run_requests]) == set( + assert len(run_requests) == 4 # pyright: ignore[reportArgumentType] + assert set([req.partition_key for req in run_requests]) == set( # pyright: ignore[reportOptionalIterable] [ "2020-01-01|a", "2020-01-01|b", @@ -554,9 +554,9 @@ def my_repo(): return [my_asset, my_schedule, my_job] with instance_for_test() as instance: - instance.add_dynamic_partitions(dynamic_partitions.name, ["a", "b", "c", "d"]) + instance.add_dynamic_partitions(dynamic_partitions.name, ["a", "b", "c", "d"]) # pyright: ignore[reportArgumentType] - run_requests = my_schedule.evaluate_tick( + run_requests = my_schedule.evaluate_tick( # pyright: ignore[reportAttributeAccessIssue] build_schedule_context( scheduled_execution_time=datetime.strptime("2020-01-02", DATE_FORMAT), repository_def=my_repo, @@ -564,8 +564,8 @@ def my_repo(): ) ).run_requests - assert len(run_requests) == 4 - assert set([req.partition_key for req in run_requests]) == { + assert len(run_requests) == 4 # pyright: ignore[reportArgumentType] + assert set([req.partition_key for req in run_requests]) == { # pyright: ignore[reportOptionalIterable] "2020-01-01|a", "2020-01-01|b", "2020-01-01|c", diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_reconstructable.py b/python_modules/dagster/dagster_tests/definitions_tests/test_reconstructable.py index 51718347fff62..733e5154863bb 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_reconstructable.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_reconstructable.py @@ -122,7 +122,7 @@ def test_bad_target(): " or RepositoryDefinition. Got None." ), ): - reconstructable(not_the_pipeline) + reconstructable(not_the_pipeline) # pyright: ignore[reportArgumentType] def test_inner_scope(): diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_repository_definition.py b/python_modules/dagster/dagster_tests/definitions_tests/test_repository_definition.py index 1a5952bd5c45a..4e4defc3c590c 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_repository_definition.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_repository_definition.py @@ -60,7 +60,7 @@ def create_single_node_job(name, called): def test_repo_lazy_definition(): called = defaultdict(int) - @repository + @repository # pyright: ignore[reportArgumentType] def lazy_repo(): return { "jobs": { @@ -107,7 +107,7 @@ def noop(): def noop2(): pass - @repository + @repository # pyright: ignore[reportArgumentType] def error_repo(): return { "jobs": { @@ -159,7 +159,7 @@ def _some_repo(): def test_key_mismatch(): called = defaultdict(int) - @repository + @repository # pyright: ignore[reportArgumentType] def some_repo(): return {"jobs": {"foo": lambda: create_single_node_job("bar", called)}} @@ -170,7 +170,7 @@ def some_repo(): def test_non_job_in_jobs(): with pytest.raises(DagsterInvalidDefinitionError, match="all elements of list must be of type"): - @repository + @repository # pyright: ignore[reportArgumentType] def _some_repo(): return ["not-a-job"] @@ -195,7 +195,7 @@ def _some_repo(): def test_bad_sensor(): - @sensor( + @sensor( # pyright: ignore[reportArgumentType] job_name="foo", ) def foo_sensor(_): @@ -258,7 +258,7 @@ def wow(): def wonder(): wow() - @sensor(job=wonder) + @sensor(job=wonder) # pyright: ignore[reportArgumentType] def direct_sensor(_): return {} @@ -276,7 +276,7 @@ def test_direct_sensor_unresolved_target(): def foo(): return None - @sensor(job=unresolved_job) + @sensor(job=unresolved_job) # pyright: ignore[reportArgumentType] def direct_sensor(_): return {} @@ -298,7 +298,7 @@ def wonder(): w_job = wonder.to_job() - @sensor(job=w_job) + @sensor(job=w_job) # pyright: ignore[reportArgumentType] def direct_sensor(_): return {} @@ -316,7 +316,7 @@ def test_target_dupe_unresolved(): def foo(): return None - @sensor(job=unresolved_job) + @sensor(job=unresolved_job) # pyright: ignore[reportArgumentType] def direct_sensor(_): return {} @@ -519,7 +519,7 @@ def test_job_validation(): match="Object mapped to my_job is not an instance of JobDefinition or GraphDefinition.", ): - @repository + @repository # pyright: ignore[reportArgumentType] def _my_repo(): return {"jobs": {"my_job": "blah"}} @@ -577,7 +577,7 @@ def test_lazy_graph(): def my_graph(): pass - @repository + @repository # pyright: ignore[reportArgumentType] def jobs(): return { "jobs": { @@ -625,7 +625,7 @@ def bar(): match="resource with key 'x' required by op 'foo' was not provided", ): - @repository + @repository # pyright: ignore[reportArgumentType] def _fails(): return { "jobs": {"bar": bar}, @@ -635,7 +635,7 @@ def _fails(): def test_bad_resolve(): with pytest.raises(DagsterInvalidSubsetError, match=r"AssetKey\(s\) \['foo'\] were selected"): - @repository + @repository # pyright: ignore[reportArgumentType] def _fails(): return {"jobs": {"tbd": define_asset_job(name="tbd", selection="foo")}} @@ -656,7 +656,7 @@ def my_repo(): def test_assets_checks(): foo = SourceAsset(key=AssetKey("foo")) - @asset_check(asset=foo) + @asset_check(asset=foo) # pyright: ignore[reportArgumentType] def foo_check(): return True @@ -668,7 +668,7 @@ def my_repo(): def test_direct_assets(): - @io_manager(required_resource_keys={"foo"}) + @io_manager(required_resource_keys={"foo"}) # pyright: ignore[reportArgumentType] def the_manager(): pass @@ -750,7 +750,7 @@ def my_repo(): def test_source_asset_unsatisfied_resource(): - @io_manager(required_resource_keys={"foo"}) + @io_manager(required_resource_keys={"foo"}) # pyright: ignore[reportArgumentType] def the_manager(): pass @@ -769,7 +769,7 @@ def the_repo(): def test_source_asset_unsatisfied_resource_transitive(): - @io_manager(required_resource_keys={"foo"}) + @io_manager(required_resource_keys={"foo"}) # pyright: ignore[reportArgumentType] def the_manager(): pass @@ -819,7 +819,7 @@ def test_source_asset_resource_conflicts(): def the_asset(): pass - @io_manager(required_resource_keys={"foo"}) + @io_manager(required_resource_keys={"foo"}) # pyright: ignore[reportArgumentType] def the_manager(): pass @@ -1135,15 +1135,15 @@ def the_asset(): unresolved_job = define_asset_job("asset_job", selection="*") - @executor + @executor # pyright: ignore[reportCallIssue,reportArgumentType] def custom_executor(_): pass - @executor + @executor # pyright: ignore[reportCallIssue,reportArgumentType] def other_custom_executor(_): pass - @job(executor_def=custom_executor) + @job(executor_def=custom_executor) # pyright: ignore[reportArgumentType] def op_job_with_executor(): pass @@ -1159,7 +1159,7 @@ def job_explicitly_specifies_default_executor(): def the_job(): pass - @repository(default_executor_def=other_custom_executor) + @repository(default_executor_def=other_custom_executor) # pyright: ignore[reportArgumentType] def the_repo(): return [ the_asset, @@ -1192,9 +1192,9 @@ def asset2(): source = SourceAsset(key=AssetKey("a_source_asset")) - all_assets: Sequence[AssetsDefinition, SourceAsset] = [asset1, asset2, source] + all_assets: Sequence[AssetsDefinition, SourceAsset] = [asset1, asset2, source] # pyright: ignore[reportInvalidTypeArguments,reportAssignmentType] - @repository + @repository # pyright: ignore[reportArgumentType] def assets_repo(): return [all_assets] @@ -1222,7 +1222,7 @@ def job2(): job_list = [job1, job2] - @repository + @repository # pyright: ignore[reportArgumentType] def job_repo(): return [job_list] @@ -1242,7 +1242,7 @@ def job3(): combo_list = [asset3, job3] - @repository + @repository # pyright: ignore[reportArgumentType] def combo_repo(): return [combo_list] @@ -1263,12 +1263,12 @@ def asset2(): source = SourceAsset(key=AssetKey("a_source_asset")) - layer_1: Sequence[AssetsDefinition, SourceAsset] = [asset2, source] + layer_1: Sequence[AssetsDefinition, SourceAsset] = [asset2, source] # pyright: ignore[reportInvalidTypeArguments,reportAssignmentType] layer_2 = [layer_1, asset1] with pytest.raises(DagsterInvalidDefinitionError, match="Bad return value from repository"): - @repository + @repository # pyright: ignore[reportArgumentType] def assets_repo(): return [layer_2] @@ -1311,11 +1311,11 @@ def repo(): def test_default_loggers_repo(): - @logger + @logger # pyright: ignore[reportCallIssue,reportArgumentType] def basic(): pass - @repository(default_logger_defs={"foo": basic}) + @repository(default_logger_defs={"foo": basic}) # pyright: ignore[reportArgumentType] def the_repo(): return [] @@ -1329,11 +1329,11 @@ def no_logger_provided(): def the_asset(): pass - @logger + @logger # pyright: ignore[reportCallIssue,reportArgumentType] def basic(): pass - @repository(default_logger_defs={"foo": basic}) + @repository(default_logger_defs={"foo": basic}) # pyright: ignore[reportArgumentType] def the_repo(): return [no_logger_provided, the_asset] @@ -1349,15 +1349,15 @@ def the_asset(): unresolved_job = define_asset_job("asset_job", selection="*") - @logger + @logger # pyright: ignore[reportCallIssue,reportArgumentType] def custom_logger(_): pass - @logger + @logger # pyright: ignore[reportCallIssue,reportArgumentType] def other_custom_logger(_): pass - @job(logger_defs={"bar": custom_logger}) + @job(logger_defs={"bar": custom_logger}) # pyright: ignore[reportArgumentType] def job_with_loggers(): pass @@ -1369,7 +1369,7 @@ def job_no_loggers(): def job_explicitly_specifies_default_loggers(): pass - @repository(default_logger_defs={"foo": other_custom_logger}) + @repository(default_logger_defs={"foo": other_custom_logger}) # pyright: ignore[reportArgumentType] def the_repo(): return [ the_asset, @@ -1389,19 +1389,19 @@ def the_repo(): def test_default_loggers_keys_conflict(): - @logger + @logger # pyright: ignore[reportCallIssue,reportArgumentType] def some_logger(): pass - @logger + @logger # pyright: ignore[reportCallIssue,reportArgumentType] def other_logger(): pass - @job(logger_defs={"foo": some_logger}) + @job(logger_defs={"foo": some_logger}) # pyright: ignore[reportArgumentType] def the_job(): pass - @repository(default_logger_defs={"foo": other_logger}) + @repository(default_logger_defs={"foo": other_logger}) # pyright: ignore[reportArgumentType] def the_repo(): return [the_job] diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_schedule.py b/python_modules/dagster/dagster_tests/definitions_tests/test_schedule.py index 04b222f822eb3..6bcd4db25a28a 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_schedule.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_schedule.py @@ -164,13 +164,13 @@ def test_tag_transfer_to_run_request(): # If no defined execution function, tags should be transferred to the run request (backcompat) assert ( - tags_and_no_exec_fn_schedule.evaluate_tick(context_with_time).run_requests[0].tags["foo"] + tags_and_no_exec_fn_schedule.evaluate_tick(context_with_time).run_requests[0].tags["foo"] # pyright: ignore[reportOptionalSubscript] == "bar" ) # If an execution function is defined, tags should not be transferred to the run request assert ( - "foo" not in tags_and_exec_fn_schedule.evaluate_tick(context_with_time).run_requests[0].tags + "foo" not in tags_and_exec_fn_schedule.evaluate_tick(context_with_time).run_requests[0].tags # pyright: ignore[reportOptionalSubscript] ) diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_schedule_invocation.py b/python_modules/dagster/dagster_tests/definitions_tests/test_schedule_invocation.py index b661909b6653c..edd009cb45da4 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_schedule_invocation.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_schedule_invocation.py @@ -256,8 +256,8 @@ def my_repo(): repository_def=my_repo, scheduled_execution_time=datetime.datetime(2023, 1, 1) ) as context: run_requests = my_schedule.evaluate_tick(context).run_requests - assert len(run_requests) == 1 - run_request = run_requests[0] + assert len(run_requests) == 1 # pyright: ignore[reportArgumentType] + run_request = run_requests[0] # pyright: ignore[reportOptionalSubscript] assert run_request.tags.get(PARTITION_NAME_TAG) == "a" @@ -279,8 +279,8 @@ def my_repo(): repository_def=my_repo, scheduled_execution_time=datetime.datetime(2023, 1, 1) ) as context: run_requests = my_schedule.evaluate_tick(context).run_requests - assert len(run_requests) == 2 - for request in run_requests: + assert len(run_requests) == 2 # pyright: ignore[reportArgumentType] + for request in run_requests: # pyright: ignore[reportOptionalIterable] assert request.tags.get(PARTITION_NAME_TAG) == "1" diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_sensor.py b/python_modules/dagster/dagster_tests/definitions_tests/test_sensor.py index 5aae5f0eb1f1e..ecf9267b4ab79 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_sensor.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_sensor.py @@ -62,12 +62,12 @@ def evaluation_fn(context): assert SensorDefinition( "a", asset_selection=["asset1", "asset2"], evaluation_fn=evaluation_fn - ).asset_selection.resolve(assets) == {AssetKey("asset1"), AssetKey("asset2")} + ).asset_selection.resolve(assets) == {AssetKey("asset1"), AssetKey("asset2")} # pyright: ignore[reportOptionalMemberAccess] sensor_def = SensorDefinition( "a", asset_selection=[asset1, asset2], evaluation_fn=evaluation_fn ) - assert sensor_def.asset_selection.resolve(assets) == {AssetKey("asset1"), AssetKey("asset2")} + assert sensor_def.asset_selection.resolve(assets) == {AssetKey("asset1"), AssetKey("asset2")} # pyright: ignore[reportOptionalMemberAccess] def test_coerce_graph_def_to_job(): diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_sensor_invocation.py b/python_modules/dagster/dagster_tests/definitions_tests/test_sensor_invocation.py index cdb8d9c79d6a2..1a94c2b068729 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_sensor_invocation.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_sensor_invocation.py @@ -57,15 +57,15 @@ def test_sensor_invocation_args(): def basic_sensor_no_arg(): return RunRequest(run_key=None, run_config={}, tags={}) - assert basic_sensor_no_arg().run_config == {} + assert basic_sensor_no_arg().run_config == {} # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] # Test underscore name @sensor(job_name="foo_job") def basic_sensor(_): return RunRequest(run_key=None, run_config={}, tags={}) - assert basic_sensor(build_sensor_context()).run_config == {} - assert basic_sensor(None).run_config == {} + assert basic_sensor(build_sensor_context()).run_config == {} # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + assert basic_sensor(None).run_config == {} # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] # Test sensor arbitrary arg name @sensor(job_name="foo_job") @@ -75,10 +75,10 @@ def basic_sensor_with_context(_arbitrary_context): context = build_sensor_context() # Pass context as positional arg - assert basic_sensor_with_context(context).run_config == {} + assert basic_sensor_with_context(context).run_config == {} # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] # pass context as kwarg - assert basic_sensor_with_context(_arbitrary_context=context).run_config == {} + assert basic_sensor_with_context(_arbitrary_context=context).run_config == {} # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] # pass context as wrong kwarg with pytest.raises( @@ -586,8 +586,8 @@ def my_repo(): with build_sensor_context(repository_def=my_repo) as context: run_requests = valid_req_sensor.evaluate_tick(context).run_requests - assert len(run_requests) == 1 - run_request = run_requests[0] + assert len(run_requests) == 1 # pyright: ignore[reportArgumentType] + run_request = run_requests[0] # pyright: ignore[reportOptionalSubscript] assert run_request.partition_key == "foo" assert run_request.run_config == {} assert run_request.tags.get(PARTITION_NAME_TAG) == "foo" @@ -640,8 +640,8 @@ def my_repo(): with build_sensor_context(repository_def=my_repo) as context: for valid_sensor in [valid_req_sensor, job_str_target_sensor]: run_requests = valid_sensor.evaluate_tick(context).run_requests - assert len(run_requests) == 1 - run_request = run_requests[0] + assert len(run_requests) == 1 # pyright: ignore[reportArgumentType] + run_request = run_requests[0] # pyright: ignore[reportOptionalSubscript] assert run_request.run_config == partition_fn("a") assert run_request.tags.get(PARTITION_NAME_TAG) == "a" assert run_request.tags.get("yay") == "yay!" @@ -688,10 +688,10 @@ def my_repo(): with build_sensor_context(repository_def=my_repo) as context: run_requests = valid_req_sensor.evaluate_tick(context).run_requests - assert len(run_requests) == 1 - assert run_requests[0].partition_key == "a" - assert run_requests[0].tags.get(PARTITION_NAME_TAG) == "a" - assert run_requests[0].asset_selection == [a_asset.key] + assert len(run_requests) == 1 # pyright: ignore[reportArgumentType] + assert run_requests[0].partition_key == "a" # pyright: ignore[reportOptionalSubscript] + assert run_requests[0].tags.get(PARTITION_NAME_TAG) == "a" # pyright: ignore[reportOptionalSubscript] + assert run_requests[0].asset_selection == [a_asset.key] # pyright: ignore[reportOptionalSubscript] with pytest.raises(DagsterUnknownPartitionError, match="Could not find a partition"): invalid_req_sensor.evaluate_tick(context) @@ -781,7 +781,7 @@ def my_job_2(): def basic_sensor(_): return RunRequest(run_key=None, run_config={}, tags={}) - assert basic_sensor(context).run_config == {} + assert basic_sensor(context).run_config == {} # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] # test with context @run_status_sensor(run_status=DagsterRunStatus.SUCCESS) @@ -789,7 +789,7 @@ def basic_sensor_w_arg(context): assert context.dagster_event.event_type_value == "PIPELINE_SUCCESS" return RunRequest(run_key=None, run_config={}, tags={}) - assert basic_sensor_w_arg(context).run_config == {} + assert basic_sensor_w_arg(context).run_config == {} # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_run_failure_w_run_request(): @@ -819,7 +819,7 @@ def my_job(): def basic_sensor(_): return RunRequest(run_key=None, run_config={}, tags={}) - assert basic_sensor(context).run_config == {} + assert basic_sensor(context).run_config == {} # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] # test with context @run_failure_sensor @@ -827,7 +827,7 @@ def basic_sensor_w_arg(context): assert context.dagster_event.event_type_value == "PIPELINE_FAILURE" return RunRequest(run_key=None, run_config={}, tags={}) - assert basic_sensor_w_arg(context).run_config == {} + assert basic_sensor_w_arg(context).run_config == {} # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_multi_asset_sensor(): @@ -866,7 +866,7 @@ def a_and_b_sensor(context): repository_def=repository_def, definitions=definitions, ) - assert a_and_b_sensor(ctx).run_config == {} + assert a_and_b_sensor(ctx).run_config == {} # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_multi_asset_sensor_selection(): @@ -961,9 +961,9 @@ def two_asset_sensor(context): repository_def=my_repo, ) sensor_data = two_asset_sensor.evaluate_tick(ctx) - assert len(sensor_data.run_requests) == 1 - assert sensor_data.run_requests[0].partition_key == "2022-08-01" - assert sensor_data.run_requests[0].tags["dagster/partition"] == "2022-08-01" + assert len(sensor_data.run_requests) == 1 # pyright: ignore[reportArgumentType] + assert sensor_data.run_requests[0].partition_key == "2022-08-01" # pyright: ignore[reportOptionalSubscript] + assert sensor_data.run_requests[0].tags["dagster/partition"] == "2022-08-01" # pyright: ignore[reportOptionalSubscript] assert ( ctx.cursor == '{"AssetKey([\'daily_partitions_asset\'])": ["2022-08-01", 4, {}],' ' "AssetKey([\'daily_partitions_asset_2\'])": ["2022-08-01", 5, {}]}' @@ -1049,7 +1049,7 @@ def my_asset_sensor(context): @multi_asset_sensor(monitored_assets=[my_asset.key]) def my_multi_asset_sensor(context): - ctx.advance_all_cursors() + ctx.advance_all_cursors() # pyright: ignore[reportAttributeAccessIssue] with instance_for_test() as instance: ctx = build_sensor_context( @@ -1319,7 +1319,7 @@ def test_unconsumed_events_sensor(context): ) test_unconsumed_events_sensor(ctx) july_asset_cursor = ctx._get_cursor(july_asset.key) # noqa: SLF001 - assert first_2022_07_10_mat < july_asset_cursor.latest_consumed_event_id + assert first_2022_07_10_mat < july_asset_cursor.latest_consumed_event_id # pyright: ignore[reportOperatorIssue] assert july_asset_cursor.latest_consumed_event_partition == "2022-07-10" # Second materialization for 2022-07-10 is after cursor so should not be unconsumed assert july_asset_cursor.trailing_unconsumed_partitioned_event_ids == { @@ -1394,7 +1394,7 @@ def test_unconsumed_events_sensor(context): july_asset_cursor = ctx._get_cursor(july_asset.key) # noqa: SLF001 assert july_asset_cursor.latest_consumed_event_partition == "2022-07-06" assert july_asset_cursor.trailing_unconsumed_partitioned_event_ids == {} - assert july_asset_cursor.latest_consumed_event_id > first_storage_id + assert july_asset_cursor.latest_consumed_event_id > first_storage_id # pyright: ignore[reportOptionalOperand] def test_error_when_max_num_unconsumed_events(): @@ -1493,7 +1493,7 @@ def test_unconsumed_events_sensor(context): second_july_cursor = ctx._get_cursor(july_asset.key) # noqa: SLF001 assert second_july_cursor.latest_consumed_event_partition == "2022-07-02" assert ( - second_july_cursor.latest_consumed_event_id > first_july_cursor.latest_consumed_event_id + second_july_cursor.latest_consumed_event_id > first_july_cursor.latest_consumed_event_id # pyright: ignore[reportOptionalOperand] ) # We should remove the 2022-07-02 materialization from the unconsumed events list # since we have advanced the cursor for a later materialization with that partition key. @@ -1562,7 +1562,7 @@ def test_unconsumed_events_sensor(context): test_unconsumed_events_sensor(ctx) second_july_cursor = ctx._get_cursor(july_asset.key) # noqa: SLF001 assert ( - second_july_cursor.latest_consumed_event_id > first_july_cursor.latest_consumed_event_id + second_july_cursor.latest_consumed_event_id > first_july_cursor.latest_consumed_event_id # pyright: ignore[reportOptionalOperand] ) assert second_july_cursor.latest_consumed_event_partition == "2022-07-05" assert ( @@ -1756,7 +1756,7 @@ def test_sensor(context): instance=instance, ) run_request = test_sensor(ctx) - assert run_request.partition_key == "apple" + assert run_request.partition_key == "apple" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_sensor_invocation_runconfig() -> None: @@ -1797,7 +1797,7 @@ def my_repo(): instance=instance, ) exec_data = my_sensor.evaluate_tick(ctx) - assert exec_data.run_requests[0].asset_selection == [] + assert exec_data.run_requests[0].asset_selection == [] # pyright: ignore[reportOptionalSubscript] def test_reject_invalid_asset_check_keys(): @@ -1833,8 +1833,8 @@ def asset2_sensor(context): instance=instance, ) asset1_sensor_data = asset1_sensor.evaluate_tick(ctx) - assert asset1_sensor_data.run_requests[0].asset_selection == [asset1.key] - assert asset1_sensor_data.run_requests[0].asset_check_keys == [check1.check_key] + assert asset1_sensor_data.run_requests[0].asset_selection == [asset1.key] # pyright: ignore[reportOptionalSubscript] + assert asset1_sensor_data.run_requests[0].asset_check_keys == [check1.check_key] # pyright: ignore[reportOptionalSubscript] with pytest.warns(DeprecationWarning, match="asset check keys"): asset2_sensor.evaluate_tick(ctx) diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_sensor_result.py b/python_modules/dagster/dagster_tests/definitions_tests/test_sensor_result.py index b6f44231c8ded..9a14591e0265a 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_sensor_result.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_sensor_result.py @@ -41,8 +41,8 @@ def test_sensor(_): instance=instance, ) sensor_data = test_sensor.evaluate_tick(ctx) - assert len(sensor_data.run_requests) == 1 - assert sensor_data.run_requests[0].run_key == "foo" + assert len(sensor_data.run_requests) == 1 # pyright: ignore[reportArgumentType] + assert sensor_data.run_requests[0].run_key == "foo" # pyright: ignore[reportOptionalSubscript] assert not sensor_data.skip_message assert not sensor_data.dagster_run_reactions assert not sensor_data.cursor @@ -51,7 +51,7 @@ def test_sensor(_): def test_sensor_result_skip_reason(): skip_reason = SkipReason("I'm skipping") - @sensor(job=do_something_job) + @sensor(job=do_something_job) # pyright: ignore[reportArgumentType] def test_sensor(_): return [ SensorResult(skip_reason=skip_reason), @@ -71,7 +71,7 @@ def test_sensor(_): def test_sensor_result_string_skip_reason(): skip_reason = "I'm skipping" - @sensor(job=do_something_job) + @sensor(job=do_something_job) # pyright: ignore[reportArgumentType] def test_sensor(_): return [ SensorResult(skip_reason=skip_reason), @@ -89,7 +89,7 @@ def test_sensor(_): def test_invalid_skip_reason_invocations(): - @sensor(job=do_something_job) + @sensor(job=do_something_job) # pyright: ignore[reportArgumentType] def multiple_sensor_results(_): return [ SensorResult(skip_reason=SkipReason("I'm skipping")), @@ -103,7 +103,7 @@ def sensor_result_w_other_objects(_): RunRequest(run_key="foo"), ] - @sensor(job=do_something_job) + @sensor(job=do_something_job) # pyright: ignore[reportArgumentType] def invalid_sensor_result(_): return [ SensorResult( @@ -142,7 +142,7 @@ def invalid_sensor_result(_): def test_update_cursor(): - @sensor(job=do_something_job) + @sensor(job=do_something_job) # pyright: ignore[reportArgumentType] def test_sensor(_): return [ SensorResult([RunRequest("foo")], cursor="foo"), @@ -157,7 +157,7 @@ def test_sensor(_): def test_update_cursor_and_sensor_result_cursor(): - @sensor(job=do_something_job) + @sensor(job=do_something_job) # pyright: ignore[reportArgumentType] def test_sensor(context): context.update_cursor("bar") return [ @@ -202,8 +202,8 @@ def asset_sensor_set_cursor(context, asset_event): instance=instance, ) as ctx: result = my_asset_sensor.evaluate_tick(ctx) - assert len(result.run_requests) == 1 - assert result.run_requests[0].run_key == "foo" + assert len(result.run_requests) == 1 # pyright: ignore[reportArgumentType] + assert result.run_requests[0].run_key == "foo" # pyright: ignore[reportOptionalSubscript] assert result.cursor != observed["cursor"] # ensure cursor progresses with build_sensor_context( @@ -237,7 +237,7 @@ def sensor_with_yield_run_request_and_return_skip_reason(context): build_sensor_context(cursor="go") ) assert result_without_skip.skip_message is None - assert len(result_without_skip.run_requests) == 1 + assert len(result_without_skip.run_requests) == 1 # pyright: ignore[reportArgumentType] @sensor(job=job1) def sensor_with_yield_and_return_run_request(context): @@ -247,7 +247,7 @@ def sensor_with_yield_and_return_run_request(context): result_yield_and_return_run_request = sensor_with_yield_and_return_run_request.evaluate_tick( build_sensor_context() ) - assert len(result_yield_and_return_run_request.run_requests) == 2 + assert len(result_yield_and_return_run_request.run_requests) == 2 # pyright: ignore[reportArgumentType] def test_asset_events_experimental_param_on_sensor_result() -> None: @@ -318,4 +318,4 @@ def my_sensor(): with instance_for_test() as instance: result = my_sensor.evaluate_tick(build_sensor_context(instance)) - assert "foo" not in result.run_requests[0].tags + assert "foo" not in result.run_requests[0].tags # pyright: ignore[reportOptionalSubscript] diff --git a/python_modules/dagster/dagster_tests/definitions_tests/test_time_window_partitions.py b/python_modules/dagster/dagster_tests/definitions_tests/test_time_window_partitions.py index f7b6b764c62bf..0ba811ecc7da0 100644 --- a/python_modules/dagster/dagster_tests/definitions_tests/test_time_window_partitions.py +++ b/python_modules/dagster/dagster_tests/definitions_tests/test_time_window_partitions.py @@ -1506,7 +1506,7 @@ def test_time_window_partitions_def_serialization(partitions_def): ) deserialized = deserialize_value(serialize_value(time_window_partitions_def)) assert deserialized == time_window_partitions_def - assert deserialized.start.tzinfo == time_window_partitions_def.start.tzinfo + assert deserialized.start.tzinfo == time_window_partitions_def.start.tzinfo # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_pickle_time_window_partitions_def(): diff --git a/python_modules/dagster/dagster_tests/execution_tests/dynamic_tests/test_dsl.py b/python_modules/dagster/dagster_tests/execution_tests/dynamic_tests/test_dsl.py index 0920bb2894e05..277af97a9d6b3 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/dynamic_tests/test_dsl.py +++ b/python_modules/dagster/dagster_tests/execution_tests/dynamic_tests/test_dsl.py @@ -93,7 +93,7 @@ def multi_out(): @job def composite_multi(): - one, numbers = multi_out() + one, numbers = multi_out() # pyright: ignore[reportGeneralTypeIssues] echo(one) numbers.map(echo) diff --git a/python_modules/dagster/dagster_tests/execution_tests/dynamic_tests/test_not_allowed.py b/python_modules/dagster/dagster_tests/execution_tests/dynamic_tests/test_not_allowed.py index 782615e1a694c..3d6d5842b2f86 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/dynamic_tests/test_not_allowed.py +++ b/python_modules/dagster/dagster_tests/execution_tests/dynamic_tests/test_not_allowed.py @@ -77,7 +77,7 @@ def composed_echo(): @job def _should_fail(): def _complex(item): - composed_echo().map(lambda y: add(y, item)) + composed_echo().map(lambda y: add(y, item)) # pyright: ignore[reportAttributeAccessIssue] dynamic_op().map(_complex) diff --git a/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_child_process_executor.py b/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_child_process_executor.py index e38164c126c36..d814b07ef7e5a 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_child_process_executor.py +++ b/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_child_process_executor.py @@ -1,6 +1,6 @@ -import multiprocessing import os import time +from multiprocessing import get_context from multiprocessing.process import BaseProcess import pytest @@ -15,6 +15,8 @@ ) from dagster._utils import segfault +multiprocessing_ctx = get_context() + class DoubleAStringChildProcessCommand(ChildProcessCommand): def __init__(self, a_str): @@ -55,7 +57,9 @@ def test_basic_child_process_command(): events = list( filter( lambda x: x and not isinstance(x, (ChildProcessEvent, BaseProcess)), - execute_child_process_command(multiprocessing, DoubleAStringChildProcessCommand("aa")), + execute_child_process_command( + multiprocessing_ctx, DoubleAStringChildProcessCommand("aa") + ), ) ) assert events == ["aaaa"] @@ -65,7 +69,9 @@ def test_basic_child_process_command_with_process_events(): events = list( filter( lambda x: x, - execute_child_process_command(multiprocessing, DoubleAStringChildProcessCommand("aa")), + execute_child_process_command( + multiprocessing_ctx, DoubleAStringChildProcessCommand("aa") + ), ) ) assert len(events) == 4 @@ -85,27 +91,27 @@ def test_child_process_uncaught_exception(): results = list( filter( lambda x: x and isinstance(x, ChildProcessSystemErrorEvent), - execute_child_process_command(multiprocessing, ThrowAnErrorCommand()), + execute_child_process_command(multiprocessing_ctx, ThrowAnErrorCommand()), ) ) assert len(results) == 1 - assert "AnError" in str(results[0].error_info.message) + assert "AnError" in str(results[0].error_info.message) # type: ignore def test_child_process_crashy_process(): with pytest.raises(ChildProcessCrashException) as exc: - list(execute_child_process_command(multiprocessing, CrashyCommand())) + list(execute_child_process_command(multiprocessing_ctx, CrashyCommand())) assert exc.value.exit_code == 1 @pytest.mark.skipif(os.name == "nt", reason="Segfault not being caught on Windows: See issue #2791") def test_child_process_segfault(): with pytest.raises(ChildProcessCrashException) as exc: - list(execute_child_process_command(multiprocessing, SegfaultCommand())) + list(execute_child_process_command(multiprocessing_ctx, SegfaultCommand())) assert exc.value.exit_code == -11 @pytest.mark.skip("too long") def test_long_running_command(): - list(execute_child_process_command(multiprocessing, LongRunningCommand())) + list(execute_child_process_command(multiprocessing_ctx, LongRunningCommand())) diff --git a/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_multiprocessing.py b/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_multiprocessing.py index 3a24e48b439db..b3f80edfdc9a5 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_multiprocessing.py +++ b/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_multiprocessing.py @@ -210,16 +210,16 @@ def test_invalid_instance(): assert len(result.all_events) == 1 assert result.all_events[0].is_failure assert ( - result.all_events[0].job_failure_data.error.cls_name + result.all_events[0].job_failure_data.error.cls_name # pyright: ignore[reportOptionalMemberAccess] == "DagsterUnmetExecutorRequirementsError" ) - assert "non-ephemeral instance" in result.all_events[0].job_failure_data.error.message + assert "non-ephemeral instance" in result.all_events[0].job_failure_data.error.message # pyright: ignore[reportOptionalMemberAccess] def test_no_handle(): with pytest.raises(CheckError, match='Param "job" is not a ReconstructableJob.'): execute_job( - define_diamond_job(), + define_diamond_job(), # pyright: ignore[reportArgumentType] instance=DagsterInstance.ephemeral(), raise_on_error=False, ) @@ -380,13 +380,13 @@ def test_failure_multiprocessing(): assert not result.success failure_data = result.failure_data_for_node("throw") assert failure_data - assert failure_data.error.cls_name == "Failure" + assert failure_data.error.cls_name == "Failure" # pyright: ignore[reportOptionalMemberAccess] # hard coded - assert failure_data.user_failure_data.label == "intentional-failure" + assert failure_data.user_failure_data.label == "intentional-failure" # pyright: ignore[reportOptionalMemberAccess] # from Failure - assert failure_data.user_failure_data.description == "it Failure" - assert failure_data.user_failure_data.metadata["label"] == MetadataValue.text("text") + assert failure_data.user_failure_data.description == "it Failure" # pyright: ignore[reportOptionalMemberAccess] + assert failure_data.user_failure_data.metadata["label"] == MetadataValue.text("text") # pyright: ignore[reportOptionalMemberAccess] @op @@ -413,7 +413,7 @@ def test_crash_multiprocessing(): assert not result.success failure_data = result.failure_data_for_node("sys_exit") assert failure_data - assert failure_data.error.cls_name == "ChildProcessCrashException" + assert failure_data.error.cls_name == "ChildProcessCrashException" # pyright: ignore[reportOptionalMemberAccess] assert failure_data.user_failure_data is None @@ -428,7 +428,7 @@ def test_crash_multiprocessing(): ) log_data = instance.compute_log_manager.get_log_data(log_key) - assert "Crashy output to stdout" in log_data.stdout.decode("utf-8") + assert "Crashy output to stdout" in log_data.stdout.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] # The argument to sys.exit won't (reliably) make it to the compute logs for stderr b/c the # LocalComputeLogManger is in-process -- documenting this behavior here though we may want to @@ -463,7 +463,7 @@ def test_crash_hard_multiprocessing(): assert not result.success failure_data = result.failure_data_for_node("segfault_op") assert failure_data - assert failure_data.error.cls_name == "ChildProcessCrashException" + assert failure_data.error.cls_name == "ChildProcessCrashException" # pyright: ignore[reportOptionalMemberAccess] assert failure_data.user_failure_data is None diff --git a/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_op_concurrency.py b/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_op_concurrency.py index 5834080e6de46..eddfde007d00a 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_op_concurrency.py +++ b/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_op_concurrency.py @@ -401,7 +401,7 @@ def _unblock_concurrency_key(instance, timeout): threading.Thread(target=_unblock_concurrency_key, args=(instance, TIMEOUT), daemon=True).start() for event in execute_run_iterator(recon_simple_job, run, instance=instance): - if "blocked by concurrency limit for key foo" in event.message: + if "blocked by concurrency limit for key foo" in event.message: # pyright: ignore[reportOperatorIssue] has_blocked_message = True break if time.time() - start > TIMEOUT: diff --git a/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_step_delegating_executor.py b/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_step_delegating_executor.py index a0535df9dbe45..95dc5795e949c 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_step_delegating_executor.py +++ b/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_step_delegating_executor.py @@ -61,7 +61,7 @@ def name(self): def launch_step(self, step_handler_context): if step_handler_context.execute_step_args.should_verify_step: TestStepHandler.verify_step_count += 1 - if step_handler_context.execute_step_args.step_keys_to_execute[0] == "baz_op": + if step_handler_context.execute_step_args.step_keys_to_execute[0] == "baz_op": # pyright: ignore[reportOptionalSubscript] TestStepHandler.saw_baz_op = True assert step_handler_context.step_tags["baz_op"] == {"foo": "bar"} @@ -134,7 +134,7 @@ def test_execute(): assert any( [ - "Starting execution with step handler TestStepHandler" in event.message + "Starting execution with step handler TestStepHandler" in event.message # pyright: ignore[reportOperatorIssue] for event in result.all_events ] ) @@ -163,7 +163,7 @@ def test_execute_with_tailer_offset(): assert any( [ - "Starting execution with step handler TestStepHandler" in event.message + "Starting execution with step handler TestStepHandler" in event.message # pyright: ignore[reportOperatorIssue] for event in result.all_events ] ) @@ -362,7 +362,7 @@ def test_execute_verify_step(): assert any( [ - "Starting execution with step handler TestStepHandler" in event.message + "Starting execution with step handler TestStepHandler" in event.message # pyright: ignore[reportOperatorIssue] for event in result.all_events ] ) @@ -543,4 +543,4 @@ def _unblock_concurrency_key(instance, timeout): # the executor loop sleeps every second, so there should be at least a call per # second that the steps are blocked, in addition to the processing of any step # events - assert instance.event_log_storage.get_records_for_run_calls(result.run_id) <= 3 + assert instance.event_log_storage.get_records_for_run_calls(result.run_id) <= 3 # pyright: ignore[reportAttributeAccessIssue] diff --git a/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_step_handler.py b/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_step_handler.py index d21da472a23f5..d17989f136809 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_step_handler.py +++ b/python_modules/dagster/dagster_tests/execution_tests/engine_tests/test_step_handler.py @@ -20,7 +20,7 @@ def foo_pipline(): def _get_executor(instance, pipeline, executor_config=None): - return test_step_delegating_executor.executor_creation_fn( + return test_step_delegating_executor.executor_creation_fn( # pyright: ignore[reportOptionalCall] InitExecutorContext( job=pipeline, executor_def=test_step_delegating_executor, @@ -64,7 +64,7 @@ def test_step_handler_context(): ctx = StepHandlerContext( instance=instance, plan_context=plan_context, - steps=execution_plan.steps, + steps=execution_plan.steps, # pyright: ignore[reportArgumentType] execute_step_args=args, dagster_run=run, ) diff --git a/python_modules/dagster/dagster_tests/execution_tests/execute_job_tests/test_job.py b/python_modules/dagster/dagster_tests/execution_tests/execute_job_tests/test_job.py index 639674932b30d..e64bb1e1dc9a6 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/execute_job_tests/test_job.py +++ b/python_modules/dagster/dagster_tests/execution_tests/execute_job_tests/test_job.py @@ -127,7 +127,7 @@ def the_op(context): def basic(): the_op() - with environ({"SOME_ENV_VAR": None}): + with environ({"SOME_ENV_VAR": None}): # pyright: ignore[reportArgumentType] # Ensure that the env var not existing will not throw an error, since resolution happens in post-processing. the_job = basic.to_job( config={"ops": {"the_op": {"config": {"foo": {"env": "SOME_ENV_VAR"}}}}} diff --git a/python_modules/dagster/dagster_tests/execution_tests/execution_plan_tests/test_execution_plan_reexecution.py b/python_modules/dagster/dagster_tests/execution_tests/execution_plan_tests/test_execution_plan_reexecution.py index 422d164448f41..bcac6a391960f 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/execution_plan_tests/test_execution_plan_reexecution.py +++ b/python_modules/dagster/dagster_tests/execution_tests/execution_plan_tests/test_execution_plan_reexecution.py @@ -94,7 +94,7 @@ def test_execution_plan_reexecution(): ) known_state = KnownExecutionState.build_for_reexecution( instance, - instance.get_run_by_id(run_id), + instance.get_run_by_id(run_id), # pyright: ignore[reportArgumentType] ) _check_known_state(known_state) @@ -166,7 +166,7 @@ def test_execution_plan_reexecution_with_in_memory(): resolved_run_config = ResolvedRunConfig.build(job_def, run_config=run_config) known_state = KnownExecutionState.build_for_reexecution( instance, - instance.get_run_by_id(run_id), + instance.get_run_by_id(run_id), # pyright: ignore[reportArgumentType] ) _check_known_state(known_state) diff --git a/python_modules/dagster/dagster_tests/execution_tests/execution_plan_tests/test_external_step.py b/python_modules/dagster/dagster_tests/execution_tests/execution_plan_tests/test_external_step.py index 3df33e63edde7..890e4069a1768 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/execution_plan_tests/test_external_step.py +++ b/python_modules/dagster/dagster_tests/execution_tests/execution_plan_tests/test_external_step.py @@ -109,7 +109,7 @@ def retry_op(context): if is_explicit: raise Failure(description="some failure description", metadata={"foo": 1.23}) else: - _ = "x" + 1 + _ = "x" + 1 # pyright: ignore[reportOperatorIssue] return context.retry_number @job( @@ -367,7 +367,7 @@ def test_step_context_to_step_run_ref(): with DagsterInstance.ephemeral() as instance: step_context = initialize_step_context("", instance) step = step_context.step - step_run_ref = step_context_to_step_run_ref(step_context) + step_run_ref = step_context_to_step_run_ref(step_context) # pyright: ignore[reportArgumentType] assert step_run_ref.run_config == step_context.dagster_run.run_config assert step_run_ref.run_id == step_context.dagster_run.run_id @@ -388,7 +388,7 @@ def test_local_external_step_launcher(): step_context = initialize_step_context(tmpdir, instance) step_launcher = LocalExternalStepLauncher(tmpdir) - events = list(step_launcher.launch_step(step_context)) + events = list(step_launcher.launch_step(step_context)) # pyright: ignore[reportArgumentType] event_types = [event.event_type for event in events] assert DagsterEventType.STEP_START in event_types assert DagsterEventType.STEP_SUCCESS in event_types @@ -407,7 +407,7 @@ def test_asset_check_step_launcher(): ) step_launcher = LocalExternalStepLauncher(tmpdir) - events = list(step_launcher.launch_step(step_context)) + events = list(step_launcher.launch_step(step_context)) # pyright: ignore[reportArgumentType] event_types = [event.event_type for event in events] assert DagsterEventType.STEP_START in event_types assert DagsterEventType.STEP_SUCCESS in event_types @@ -557,8 +557,8 @@ def test_explicit_failure(): raise_on_error=False, ) as result: fd = result.failure_data_for_node("retry_op") - assert fd.user_failure_data.description == "some failure description" - assert fd.user_failure_data.metadata == {"foo": MetadataValue.float(1.23)} + assert fd.user_failure_data.description == "some failure description" # pyright: ignore[reportOptionalMemberAccess] + assert fd.user_failure_data.metadata == {"foo": MetadataValue.float(1.23)} # pyright: ignore[reportOptionalMemberAccess] def test_arbitrary_error(): @@ -580,7 +580,7 @@ def test_arbitrary_error(): e for e in result.all_events if e.event_type_value == "STEP_FAILURE" ] assert len(failure_events) == 1 - assert result.failure_data_for_node("retry_op").error.cause.cls_name == "TypeError" + assert result.failure_data_for_node("retry_op").error.cause.cls_name == "TypeError" # pyright: ignore[reportOptionalMemberAccess] def test_launcher_requests_retry(): diff --git a/python_modules/dagster/dagster_tests/execution_tests/execution_plan_tests/test_host_run_worker.py b/python_modules/dagster/dagster_tests/execution_tests/execution_plan_tests/test_host_run_worker.py index d1e345f299cc6..1755adb2022c3 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/execution_plan_tests/test_host_run_worker.py +++ b/python_modules/dagster/dagster_tests/execution_tests/execution_plan_tests/test_host_run_worker.py @@ -68,7 +68,7 @@ def get_definition(self): def test_host_run_worker(): - _explode_pid["pid"] = os.getpid() + _explode_pid["pid"] = os.getpid() # pyright: ignore[reportArgumentType] with instance_for_test() as instance: run_config = { @@ -95,7 +95,7 @@ def test_host_run_worker(): raise_on_error=True, ) - assert instance.get_run_by_id(dagster_run.run_id).status == DagsterRunStatus.SUCCESS + assert instance.get_run_by_id(dagster_run.run_id).status == DagsterRunStatus.SUCCESS # pyright: ignore[reportOptionalMemberAccess] logs = instance.all_logs(dagster_run.run_id) assert any( @@ -116,7 +116,7 @@ def test_executor(_init_context): def test_custom_executor_fn(): - _explode_pid["pid"] = os.getpid() + _explode_pid["pid"] = os.getpid() # pyright: ignore[reportArgumentType] with instance_for_test() as instance: run_config = { @@ -143,7 +143,7 @@ def test_custom_executor_fn(): raise_on_error=True, ) - assert instance.get_run_by_id(dagster_run.run_id).status == DagsterRunStatus.SUCCESS + assert instance.get_run_by_id(dagster_run.run_id).status == DagsterRunStatus.SUCCESS # pyright: ignore[reportOptionalMemberAccess] logs = instance.all_logs(dagster_run.run_id) assert any( diff --git a/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_api_iterators.py b/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_api_iterators.py index c7e692c0b2d33..36b99e562344f 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_api_iterators.py +++ b/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_api_iterators.py @@ -90,12 +90,12 @@ def event_callback(record: EventLogEntry) -> None: event = next(iterator) event_type = event.event_type_value - iterator.close() + iterator.close() # pyright: ignore[reportAttributeAccessIssue] events = [record.dagster_event for record in records if record.is_dagster_event] messages = [record.user_message for record in records if not record.is_dagster_event] - job_failure_events = [event for event in events if event.is_job_failure] + job_failure_events = [event for event in events if event.is_job_failure] # pyright: ignore[reportOptionalMemberAccess] assert len(job_failure_events) == 1 - assert "GeneratorExit" in job_failure_events[0].job_failure_data.error.message + assert "GeneratorExit" in job_failure_events[0].job_failure_data.error.message # pyright: ignore[reportOptionalMemberAccess] assert len([message for message in messages if message == "CLEANING A"]) > 0 assert len([message for message in messages if message == "CLEANING B"]) > 0 @@ -134,7 +134,7 @@ def event_callback(record: EventLogEntry) -> None: instance=run_monitoring_instance, ) ) - assert ( + assert ( # pyright: ignore[reportOperatorIssue] "Ignoring a duplicate run that was started from somewhere other than the run" " monitor daemon" in event.message ) @@ -186,7 +186,7 @@ def event_callback(_record): assert any( [ - f"{dagster_run.job_name} ({dagster_run.run_id}) started a new run worker" + f"{dagster_run.job_name} ({dagster_run.run_id}) started a new run worker" # pyright: ignore[reportOperatorIssue] " while the run was already in state DagsterRunStatus.STARTED. " in event.message for event in events ] @@ -200,7 +200,7 @@ def event_callback(_record): ] ) - assert instance.get_run_by_id(dagster_run.run_id).status == DagsterRunStatus.FAILURE + assert instance.get_run_by_id(dagster_run.run_id).status == DagsterRunStatus.FAILURE # pyright: ignore[reportOptionalMemberAccess] def test_start_run_worker_after_run_failure(): @@ -221,7 +221,7 @@ def event_callback(_record): ).with_status(DagsterRunStatus.FAILURE) event = next(execute_run_iterator(InMemoryJob(job_def), dagster_run, instance=instance)) - assert ( + assert ( # pyright: ignore[reportOperatorIssue] "Ignoring a run worker that started after the run had already finished." in event.message ) @@ -267,7 +267,7 @@ def event_callback(_record): iter_events = list(execute_run_iterator(InMemoryJob(job_def), iter_run, instance=instance)) assert len(iter_events) == 1 - assert ( + assert ( # pyright: ignore[reportOperatorIssue] "Not starting execution since the run was canceled before execution could start" in iter_events[0].message ) @@ -338,7 +338,7 @@ def event_callback(record): event = next(iterator) event_type = event.event_type_value - iterator.close() + iterator.close() # pyright: ignore[reportAttributeAccessIssue] messages = [record.user_message for record in records if not record.is_dagster_event] assert len([message for message in messages if message == "CLEANING A"]) > 0 assert len([message for message in messages if message == "CLEANING B"]) > 0 @@ -367,4 +367,4 @@ def test_run_fails_while_loading_code(): list(gen_execute_run) # Execution is stopped, stays in failure state - assert instance.get_run_by_id(run.run_id).status == DagsterRunStatus.FAILURE + assert instance.get_run_by_id(run.run_id).status == DagsterRunStatus.FAILURE # pyright: ignore[reportOptionalMemberAccess] diff --git a/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_execute_in_process.py b/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_execute_in_process.py index 92dd604b5c85c..7abe415d186ad 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_execute_in_process.py +++ b/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_execute_in_process.py @@ -385,7 +385,7 @@ def fail(): "Exception: I have failed" in result.filter_events(lambda evt: evt.is_step_failure)[ 0 - ].event_specific_data.error_display_string + ].event_specific_data.error_display_string # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] ) diff --git a/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_expectations.py b/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_expectations.py index 4dc3d46723500..50f359e8413be 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_expectations.py +++ b/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_expectations.py @@ -42,8 +42,8 @@ def success_expectation_op(_context): assert len(expt_results) == 1 expt_result = expt_results[0] - assert expt_result.event_specific_data.expectation_result.success - assert expt_result.event_specific_data.expectation_result.description == "This is always true." + assert expt_result.event_specific_data.expectation_result.success # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + assert expt_result.event_specific_data.expectation_result.description == "This is always true." # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_failed_expectation_in_compute_step(): @@ -64,8 +64,8 @@ def failure_expectation_op(_context): assert len(expt_results) == 1 expt_result = expt_results[0] - assert not expt_result.event_specific_data.expectation_result.success - assert expt_result.event_specific_data.expectation_result.description == "This is always false." + assert not expt_result.event_specific_data.expectation_result.success # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + assert expt_result.event_specific_data.expectation_result.description == "This is always false." # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_return_expectation_failure(): diff --git a/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_failure.py b/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_failure.py index 402f2d43917a6..f4ee3c295aaa3 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_failure.py +++ b/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_failure.py @@ -19,10 +19,10 @@ def failure(): assert not result.success failure_data = result.failure_data_for_node("throw") assert failure_data - assert failure_data.error.cls_name == "Failure" + assert failure_data.error.cls_name == "Failure" # pyright: ignore[reportOptionalMemberAccess] # hard coded - assert failure_data.user_failure_data.label == "intentional-failure" + assert failure_data.user_failure_data.label == "intentional-failure" # pyright: ignore[reportOptionalMemberAccess] # from Failure - assert failure_data.user_failure_data.description == "it Failure" - assert failure_data.user_failure_data.metadata["label"] == MetadataValue.text("text") + assert failure_data.user_failure_data.description == "it Failure" # pyright: ignore[reportOptionalMemberAccess] + assert failure_data.user_failure_data.metadata["label"] == MetadataValue.text("text") # pyright: ignore[reportOptionalMemberAccess] diff --git a/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_metadata.py b/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_metadata.py index 25d4bfaa9e37d..82915c5234211 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_metadata.py +++ b/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_metadata.py @@ -89,7 +89,7 @@ def the_job(): result, "the_op", DagsterEventType.ASSET_MATERIALIZATION ) assert len(materialization_events) == 1 - materialization = materialization_events[0].event_specific_data.materialization + materialization = materialization_events[0].event_specific_data.materialization # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] assert len(materialization.metadata) == 8 entry_map = {k: v.__class__ for k, v in materialization.metadata.items()} assert entry_map["text"] == TextMetadataValue @@ -127,7 +127,7 @@ def the_job(): observation_events = step_events_of_type(result, "the_op", DagsterEventType.ASSET_OBSERVATION) assert len(observation_events) == 1 - observation = observation_events[0].event_specific_data.asset_observation + observation = observation_events[0].event_specific_data.asset_observation # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] assert len(observation.metadata) == 5 entry_map = {k: v.__class__ for k, v in observation.metadata.items()} assert entry_map["text"] == TextMetadataValue @@ -187,9 +187,9 @@ def test_parse_invalid_metadata(): metadata = {"foo": object()} with pytest.raises(DagsterInvalidMetadata) as _exc_info: - normalize_metadata(metadata) + normalize_metadata(metadata) # pyright: ignore[reportArgumentType] - normalized = normalize_metadata(metadata, allow_invalid=True) + normalized = normalize_metadata(metadata, allow_invalid=True) # pyright: ignore[reportArgumentType] assert normalized["foo"] == TextMetadataValue("[object] (unserializable)") @@ -255,7 +255,7 @@ def test_table_metadata_value_schema_inference(): def test_table_column_keys(): with pytest.raises(TypeError): - TableColumn(bad_key="foo", description="bar", type="string") + TableColumn(bad_key="foo", description="bar", type="string") # pyright: ignore[reportCallIssue] @pytest.mark.parametrize("key,value", list(bad_values["table_column"].items())) @@ -273,7 +273,7 @@ def test_table_column_values(key, value): def test_table_constraints_keys(): with pytest.raises(TypeError): - TableColumn(bad_key="foo") + TableColumn(bad_key="foo") # pyright: ignore[reportCallIssue] @pytest.mark.parametrize("key,value", list(bad_values["table_constraints"].items())) @@ -286,7 +286,7 @@ def test_table_constraints(key, value): def test_table_column_constraints_keys(): with pytest.raises(TypeError): - TableColumnConstraints(bad_key="foo") + TableColumnConstraints(bad_key="foo") # pyright: ignore[reportCallIssue] # minimum and maximum aren't checked because they depend on the type of the column @@ -304,7 +304,7 @@ def test_table_column_constraints_values(key, value): def test_table_schema_keys(): with pytest.raises(TypeError): - TableSchema(bad_key="foo") + TableSchema(bad_key="foo") # pyright: ignore[reportCallIssue] @pytest.mark.parametrize("key,value", list(bad_values["table_schema"].items())) @@ -374,7 +374,7 @@ def the_job(): result, "the_op", DagsterEventType.ASSET_MATERIALIZATION ) assert len(materialization_events) == 1 - materialization = materialization_events[0].event_specific_data.materialization + materialization = materialization_events[0].event_specific_data.materialization # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] entry_map = {k: v.__class__ for k, v in materialization.metadata.items()} assert entry_map["first_bool"] == BoolMetadataValue assert entry_map["second_bool"] == BoolMetadataValue diff --git a/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_timing.py b/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_timing.py index ef1321a15e99c..4dd6f47ab4c31 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_timing.py +++ b/python_modules/dagster/dagster_tests/execution_tests/misc_execution_tests/test_timing.py @@ -18,7 +18,7 @@ def before_yield_op(_context): result = job_def.execute_in_process() success_event = result.get_step_success_events()[0] - assert success_event.event_specific_data.duration_ms >= 10.0 + assert success_event.event_specific_data.duration_ms >= 10.0 # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] @pytest.mark.skipif( @@ -33,7 +33,7 @@ def after_yield_op(_context): job_def = GraphDefinition(node_defs=[after_yield_op], name="test").to_job() result = job_def.execute_in_process() success_event = result.get_step_success_events()[0] - assert success_event.event_specific_data.duration_ms >= 10.0 + assert success_event.event_specific_data.duration_ms >= 10.0 # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] @pytest.mark.skipif( @@ -48,7 +48,7 @@ def direct_return_op(_context): job_def = GraphDefinition(node_defs=[direct_return_op], name="test").to_job() result = job_def.execute_in_process() success_event = result.get_step_success_events()[0] - assert success_event.event_specific_data.duration_ms >= 10.0 + assert success_event.event_specific_data.duration_ms >= 10.0 # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] @pytest.mark.skipif( @@ -69,4 +69,4 @@ def my_input_manager(): ) result = job_def.execute_in_process() success_event = result.get_step_success_events()[0] - assert success_event.event_specific_data.duration_ms >= 10.0 + assert success_event.event_specific_data.duration_ms >= 10.0 # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] diff --git a/python_modules/dagster/dagster_tests/execution_tests/pipes_tests/test_threaded_message_reader.py b/python_modules/dagster/dagster_tests/execution_tests/pipes_tests/test_threaded_message_reader.py index 64958461e17e2..a15ded168f2a3 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/pipes_tests/test_threaded_message_reader.py +++ b/python_modules/dagster/dagster_tests/execution_tests/pipes_tests/test_threaded_message_reader.py @@ -218,7 +218,7 @@ def log_line_3(message: str): mat = mats[0] assert mat.asset_key == AssetKey(["my_asset"]) assert mat.materialization.metadata["foo"].value == "bar" - assert mat.materialization.tags[DATA_VERSION_TAG] == "alpha" + assert mat.materialization.tags[DATA_VERSION_TAG] == "alpha" # pyright: ignore[reportOptionalSubscript] captured = capsys.readouterr() diff --git a/python_modules/dagster/dagster_tests/execution_tests/versioning_tests/test_data_versions.py b/python_modules/dagster/dagster_tests/execution_tests/versioning_tests/test_data_versions.py index b8d87fa66c7fb..0f0f4f864e39c 100644 --- a/python_modules/dagster/dagster_tests/execution_tests/versioning_tests/test_data_versions.py +++ b/python_modules/dagster/dagster_tests/execution_tests/versioning_tests/test_data_versions.py @@ -928,9 +928,9 @@ def mocked_get_input_data_version_tag( # This will create materializations with the legacy tags with mock.patch.dict("dagster._core.execution.plan.execute_step.__dict__", legacy_tags): mats = materialize_assets([foo, bar], instance) - assert mats["bar"].tags["dagster/logical_version"] - assert mats["bar"].tags["dagster/input_logical_version/foo"] - assert mats["bar"].tags["dagster/input_event_pointer/foo"] + assert mats["bar"].tags["dagster/logical_version"] # pyright: ignore[reportOptionalSubscript] + assert mats["bar"].tags["dagster/input_logical_version/foo"] # pyright: ignore[reportOptionalSubscript] + assert mats["bar"].tags["dagster/input_event_pointer/foo"] # pyright: ignore[reportOptionalSubscript] # We're now outside the mock context record = instance.get_latest_data_version_record(bar.key) @@ -995,14 +995,14 @@ def asset0(): ... @asset(deps=["asset0"]) def asset1(): - return MaterializeResult(tags={"dagster/input_event_pointer/asset0": 500}) + return MaterializeResult(tags={"dagster/input_event_pointer/asset0": 500}) # pyright: ignore[reportArgumentType] with instance_for_test() as instance: materialize([asset0], instance=instance) materialize([asset1], instance=instance) record = instance.get_latest_data_version_record(asset1.key) - assert extract_data_provenance_from_entry(record.event_log_entry).input_storage_ids == { + assert extract_data_provenance_from_entry(record.event_log_entry).input_storage_ids == { # pyright: ignore[reportOptionalMemberAccess] AssetKey(["asset0"]): 500 } @@ -1013,14 +1013,14 @@ def asset0(): ... @asset(deps=["asset0"]) def asset1(): - return Output(value=None, tags={"dagster/input_event_pointer/asset0": 500}) + return Output(value=None, tags={"dagster/input_event_pointer/asset0": 500}) # pyright: ignore[reportArgumentType] with instance_for_test() as instance: materialize([asset0], instance=instance) materialize([asset1], instance=instance) record = instance.get_latest_data_version_record(asset1.key) - assert extract_data_provenance_from_entry(record.event_log_entry).input_storage_ids == { + assert extract_data_provenance_from_entry(record.event_log_entry).input_storage_ids == { # pyright: ignore[reportOptionalMemberAccess] AssetKey(["asset0"]): 500 } @@ -1050,7 +1050,7 @@ def downstream_asset(**kwargs): traced_counter.set(counter) materialize_assets(all_assets, instance)[downstream_asset.key] assert ( - traced_counter.get().counts() + traced_counter.get().counts() # pyright: ignore[reportOptionalMemberAccess] == { "DagsterInstance.get_asset_records": 1, "DagsterInstance.get_run_record_by_id": 3, # get_run_record_by_id called when handling events for the run diff --git a/python_modules/dagster/dagster_tests/general_tests/check_tests/test_check.py b/python_modules/dagster/dagster_tests/general_tests/check_tests/test_check.py index ec8d096efdf33..3eeb1a8c521b9 100644 --- a/python_modules/dagster/dagster_tests/general_tests/check_tests/test_check.py +++ b/python_modules/dagster/dagster_tests/general_tests/check_tests/test_check.py @@ -116,10 +116,10 @@ def test_callable_param(): assert check.callable_param(lamb, "lamb") == lamb with pytest.raises(ParameterCheckError): - check.callable_param(None, "lamb") + check.callable_param(None, "lamb") # pyright: ignore[reportArgumentType] with pytest.raises(ParameterCheckError): - check.callable_param(2, "lamb") + check.callable_param(2, "lamb") # pyright: ignore[reportArgumentType] def test_opt_callable_param(): @@ -130,7 +130,7 @@ def test_opt_callable_param(): assert check.opt_callable_param(None, "lamb", default=lamb) == lamb with pytest.raises(ParameterCheckError): - check.opt_callable_param(2, "lamb") + check.opt_callable_param(2, "lamb") # pyright: ignore[reportCallIssue,reportArgumentType] def test_is_callable(): @@ -142,13 +142,13 @@ def fn(): assert check.is_callable(lambda: None, "some desc") with pytest.raises(CheckError): - check.is_callable(None) + check.is_callable(None) # pyright: ignore[reportArgumentType] with pytest.raises(CheckError): - check.is_callable(1) + check.is_callable(1) # pyright: ignore[reportArgumentType] with pytest.raises(CheckError, match="some other desc"): - check.is_callable(1, "some other desc") + check.is_callable(1, "some other desc") # pyright: ignore[reportArgumentType] # ######################## @@ -187,19 +187,19 @@ class Bar: assert check.class_param(Bar, "foo") with pytest.raises(CheckError): - check.class_param(None, "foo") + check.class_param(None, "foo") # pyright: ignore[reportArgumentType] with pytest.raises(CheckError): check.class_param(check, "foo") with pytest.raises(CheckError): - check.class_param(234, "foo") + check.class_param(234, "foo") # pyright: ignore[reportArgumentType] with pytest.raises(CheckError): - check.class_param("bar", "foo") + check.class_param("bar", "foo") # pyright: ignore[reportArgumentType] with pytest.raises(CheckError): - check.class_param(Bar(), "foo") + check.class_param(Bar(), "foo") # pyright: ignore[reportArgumentType] class Super: pass @@ -216,7 +216,7 @@ class Alone: assert check.class_param(Alone, "foo", superclass=Super) with pytest.raises(CheckError): - assert check.class_param("value", "foo", superclass=Super) + assert check.class_param("value", "foo", superclass=Super) # pyright: ignore[reportArgumentType] assert check.opt_class_param(Sub, "foo", superclass=Super) assert check.opt_class_param(None, "foo", superclass=Super) is None @@ -352,19 +352,19 @@ def test_opt_dict_param(): assert check.opt_dict_param(ddict, "opt_dict_param") == ddict with pytest.raises(ParameterCheckError): - check.opt_dict_param(0, "opt_dict_param") + check.opt_dict_param(0, "opt_dict_param") # pyright: ignore[reportArgumentType] with pytest.raises(ParameterCheckError): - check.opt_dict_param(1, "opt_dict_param") + check.opt_dict_param(1, "opt_dict_param") # pyright: ignore[reportArgumentType] with pytest.raises(ParameterCheckError): - check.opt_dict_param("foo", "opt_dict_param") + check.opt_dict_param("foo", "opt_dict_param") # pyright: ignore[reportArgumentType] with pytest.raises(ParameterCheckError): - check.opt_dict_param(["foo"], "opt_dict_param") + check.opt_dict_param(["foo"], "opt_dict_param") # pyright: ignore[reportArgumentType] with pytest.raises(ParameterCheckError): - check.opt_dict_param([], "opt_dict_param") + check.opt_dict_param([], "opt_dict_param") # pyright: ignore[reportArgumentType] def test_opt_nullable_dict_param(): @@ -569,13 +569,13 @@ def _test_gen(): assert list(gen) == [] with pytest.raises(ParameterCheckError): - assert check.generator_param(list(gen), "gen") + assert check.generator_param(list(gen), "gen") # pyright: ignore[reportArgumentType] with pytest.raises(ParameterCheckError): - assert check.generator_param(None, "gen") + assert check.generator_param(None, "gen") # pyright: ignore[reportArgumentType] with pytest.raises(ParameterCheckError): - assert check.generator_param(_test_gen, "gen") + assert check.generator_param(_test_gen, "gen") # pyright: ignore[reportArgumentType] def test_opt_generator_param(): @@ -878,13 +878,13 @@ def test_opt_nullable_list_param(): assert check.opt_nullable_list_param(obj_list, "list_param") == obj_list with pytest.raises(ParameterCheckError): - check.opt_nullable_list_param(0, "list_param") + check.opt_nullable_list_param(0, "list_param") # pyright: ignore[reportCallIssue,reportArgumentType] with pytest.raises(ParameterCheckError): - check.opt_nullable_list_param("", "list_param") + check.opt_nullable_list_param("", "list_param") # pyright: ignore[reportCallIssue,reportArgumentType] with pytest.raises(ParameterCheckError): - check.opt_nullable_list_param("3u4", "list_param") + check.opt_nullable_list_param("3u4", "list_param") # pyright: ignore[reportCallIssue,reportArgumentType] def test_typed_is_list(): @@ -1011,7 +1011,7 @@ def test_opt_mapping_param(): assert check.opt_mapping_param(None, param_name="name") == dict() with pytest.raises(CheckError): - check.opt_mapping_param("foo", param_name="name") + check.opt_mapping_param("foo", param_name="name") # pyright: ignore[reportArgumentType] assert check.opt_nullable_mapping_param(None, "name") is None @@ -1044,10 +1044,10 @@ def test_path_param(): assert check.opt_path_param(Path("/a/b.csv"), "path_param") == "/a/b.csv" with pytest.raises(ParameterCheckError): - check.path_param(None, "path_param") + check.path_param(None, "path_param") # pyright: ignore[reportArgumentType] with pytest.raises(ParameterCheckError): - check.path_param(0, "path_param") + check.path_param(0, "path_param") # pyright: ignore[reportArgumentType] def test_opt_path_param(): @@ -1061,7 +1061,7 @@ def test_opt_path_param(): assert check.opt_path_param(None, "path_param") is None with pytest.raises(ParameterCheckError): - check.opt_path_param(0, "path_param") + check.opt_path_param(0, "path_param") # pyright: ignore[reportCallIssue,reportArgumentType] # ######################## @@ -1074,10 +1074,10 @@ def test_set_param(): assert check.set_param(frozenset(), "set_param") == set() with pytest.raises(ParameterCheckError): - check.set_param(None, "set_param") + check.set_param(None, "set_param") # pyright: ignore[reportArgumentType] with pytest.raises(ParameterCheckError): - check.set_param("3u4", "set_param") + check.set_param("3u4", "set_param") # pyright: ignore[reportArgumentType] obj_set = {1} assert check.set_param(obj_set, "set_param") == obj_set @@ -1101,10 +1101,10 @@ def test_opt_set_param(): assert check.opt_set_param({3}, "set_param") == {3} with pytest.raises(ParameterCheckError): - check.opt_set_param(0, "set_param") + check.opt_set_param(0, "set_param") # pyright: ignore[reportArgumentType] with pytest.raises(ParameterCheckError): - check.opt_set_param("3u4", "set_param") + check.opt_set_param("3u4", "set_param") # pyright: ignore[reportArgumentType] # ######################## @@ -1123,10 +1123,10 @@ def test_sequence_param(): assert check.sequence_param(["foo"], "sequence_param", of_type=str) == ["foo"] with pytest.raises(ParameterCheckError): - check.sequence_param(None, "sequence_param") + check.sequence_param(None, "sequence_param") # pyright: ignore[reportArgumentType] with pytest.raises(CheckError): - check.sequence_param(1, "sequence_param", of_type=int) + check.sequence_param(1, "sequence_param", of_type=int) # pyright: ignore[reportArgumentType] with pytest.raises(CheckError): check.sequence_param(["foo"], "sequence_param", of_type=int) @@ -1138,7 +1138,7 @@ def test_sequence_param(): check.sequence_param("foo", "sequence_param", of_type=str) with pytest.raises(CheckError): - check.sequence_param(SomeRecord(), "sequence_param") + check.sequence_param(SomeRecord(), "sequence_param") # pyright: ignore[reportArgumentType] def test_opt_sequence_param(): @@ -1150,7 +1150,7 @@ def test_opt_sequence_param(): assert check.opt_sequence_param(None, "sequence_param") == [] with pytest.raises(CheckError): - check.opt_sequence_param(1, "sequence_param", of_type=int) + check.opt_sequence_param(1, "sequence_param", of_type=int) # pyright: ignore[reportArgumentType] with pytest.raises(CheckError): check.opt_sequence_param(["foo"], "sequence_param", of_type=int) @@ -1162,7 +1162,7 @@ def test_opt_sequence_param(): check.opt_sequence_param("foo", "sequence_param", of_type=str) with pytest.raises(CheckError): - check.opt_sequence_param(SomeRecord(), "sequence_param") + check.opt_sequence_param(SomeRecord(), "sequence_param") # pyright: ignore[reportArgumentType] def test_opt_nullable_sequence_param(): @@ -1174,7 +1174,7 @@ def test_opt_nullable_sequence_param(): assert check.opt_nullable_sequence_param(None, "sequence_param") is None with pytest.raises(CheckError): - check.opt_nullable_sequence_param(1, "sequence_param", of_type=int) + check.opt_nullable_sequence_param(1, "sequence_param", of_type=int) # pyright: ignore[reportCallIssue,reportArgumentType] with pytest.raises(CheckError): check.opt_nullable_sequence_param(["foo"], "sequence_param", of_type=int) @@ -1183,7 +1183,7 @@ def test_opt_nullable_sequence_param(): assert check.opt_nullable_sequence_param("foo", "sequence_param", of_type=str) with pytest.raises(CheckError): - check.opt_nullable_sequence_param(SomeRecord(), "sequence_param") + check.opt_nullable_sequence_param(SomeRecord(), "sequence_param") # pyright: ignore[reportCallIssue,reportArgumentType] # ######################## @@ -1269,19 +1269,19 @@ def test_tuple_param(): assert check.tuple_param((1, 2), "something") with pytest.raises(CheckError): - assert check.tuple_param(None, "something") + assert check.tuple_param(None, "something") # pyright: ignore[reportArgumentType] with pytest.raises(CheckError): - assert check.tuple_param(1, "something") + assert check.tuple_param(1, "something") # pyright: ignore[reportArgumentType] with pytest.raises(CheckError): - assert check.tuple_param([1], "something") + assert check.tuple_param([1], "something") # pyright: ignore[reportArgumentType] with pytest.raises(CheckError): - assert check.tuple_param({1: 2}, "something") + assert check.tuple_param({1: 2}, "something") # pyright: ignore[reportArgumentType] with pytest.raises(CheckError): - assert check.tuple_param("kdjfkd", "something") + assert check.tuple_param("kdjfkd", "something") # pyright: ignore[reportArgumentType] assert check.tuple_param((3, 4), "something", of_type=int) assert check.tuple_param(("foo", "bar"), "something", of_type=str) @@ -1354,7 +1354,7 @@ def test_opt_nullable_tuple_param(): assert check.opt_nullable_tuple_param(None, "something") is None with pytest.raises(CheckError): - check.opt_nullable_tuple_param([3, 4], "something", of_shape=(int, int), of_type=int) + check.opt_nullable_tuple_param([3, 4], "something", of_shape=(int, int), of_type=int) # pyright: ignore[reportCallIssue,reportArgumentType] def test_is_tuple(): @@ -1502,7 +1502,7 @@ def test_failed(): check.failed("some desc") with pytest.raises(CheckError, match="must be a string"): - check.failed(0) + check.failed(0) # pyright: ignore[reportArgumentType] def test_not_implemented(): @@ -1510,7 +1510,7 @@ def test_not_implemented(): check.not_implemented("some string") with pytest.raises(CheckError, match="desc argument must be a string"): - check.not_implemented(None) + check.not_implemented(None) # pyright: ignore[reportArgumentType] def test_iterable(): @@ -1526,10 +1526,10 @@ def test_iterable(): check.iterable_param("lkjsdkf", "stringisiterable") with pytest.raises(CheckError, match="Iterable.*None"): - check.iterable_param(None, "nonenotallowed") + check.iterable_param(None, "nonenotallowed") # pyright: ignore[reportArgumentType] with pytest.raises(CheckError, match="Iterable.*int"): - check.iterable_param(1, "intnotallowed") + check.iterable_param(1, "intnotallowed") # pyright: ignore[reportArgumentType] with pytest.raises(CheckError, match="Member of iterable mismatches type"): check.iterable_param([1], "typemismatch", of_type=str) @@ -1541,7 +1541,7 @@ def test_iterable(): check.iterable_param(["atr", None], "nonedoesntcount", of_type=str) with pytest.raises(CheckError): - check.iterable_param(SomeRecord(), "nonenotallowed") + check.iterable_param(SomeRecord(), "nonenotallowed") # pyright: ignore[reportArgumentType] def test_opt_iterable(): @@ -1563,7 +1563,7 @@ def test_opt_iterable(): check.opt_iterable_param("lkjsdkf", "stringisiterable") with pytest.raises(CheckError, match="Iterable.*int"): - check.opt_iterable_param(1, "intnotallowed") + check.opt_iterable_param(1, "intnotallowed") # pyright: ignore[reportArgumentType] with pytest.raises(CheckError, match="Member of iterable mismatches type"): check.opt_iterable_param([1], "typemismatch", of_type=str) @@ -1575,7 +1575,7 @@ def test_opt_iterable(): check.opt_iterable_param(["atr", None], "nonedoesntcount", of_type=str) with pytest.raises(CheckError): - check.opt_iterable_param(SomeRecord(), "nonenotallowed") + check.opt_iterable_param(SomeRecord(), "nonenotallowed") # pyright: ignore[reportArgumentType] def test_is_iterable() -> None: diff --git a/python_modules/dagster/dagster_tests/general_tests/compat_tests/test_back_compat.py b/python_modules/dagster/dagster_tests/general_tests/compat_tests/test_back_compat.py index 5fec541f46314..973e863a3180e 100644 --- a/python_modules/dagster/dagster_tests/general_tests/compat_tests/test_back_compat.py +++ b/python_modules/dagster/dagster_tests/general_tests/compat_tests/test_back_compat.py @@ -103,7 +103,7 @@ def test_event_log_step_key_migration(): runs = instance.get_runs() assert len(runs) == 1 - run_ids = instance._event_storage.get_all_run_ids() + run_ids = instance._event_storage.get_all_run_ids() # pyright: ignore[reportAttributeAccessIssue] assert run_ids == ["6405c4a0-3ccc-4600-af81-b5ee197f8528"] assert isinstance(instance._event_storage, SqlEventLogStorage) records = instance._event_storage.get_records_for_run( @@ -116,7 +116,7 @@ def test_event_log_step_key_migration(): row_data = instance._event_storage.get_event_log_table_data( "6405c4a0-3ccc-4600-af81-b5ee197f8528", record.storage_id ) - if row_data.step_key is not None: + if row_data.step_key is not None: # pyright: ignore[reportOptionalMemberAccess] step_key_records.append(row_data) assert len(step_key_records) == 0 @@ -128,7 +128,7 @@ def test_event_log_step_key_migration(): row_data = instance._event_storage.get_event_log_table_data( "6405c4a0-3ccc-4600-af81-b5ee197f8528", record.storage_id ) - if row_data.step_key is not None: + if row_data.step_key is not None: # pyright: ignore[reportOptionalMemberAccess] step_key_records.append(row_data) assert len(step_key_records) > 0 @@ -184,7 +184,7 @@ def noop_job(): noop_op() with pytest.raises( - (db.exc.OperationalError, db.exc.ProgrammingError, db.exc.StatementError) + (db.exc.OperationalError, db.exc.ProgrammingError, db.exc.StatementError) # pyright: ignore[reportAttributeAccessIssue] ): noop_job.execute_in_process(instance=instance) @@ -202,8 +202,8 @@ def noop_job(): run = instance.get_run_by_id(run_id) - assert run.run_id == run_id - assert run.job_snapshot_id is None + assert run.run_id == run_id # pyright: ignore[reportOptionalMemberAccess] + assert run.job_snapshot_id is None # pyright: ignore[reportOptionalMemberAccess] result = noop_job.execute_in_process(instance=instance) @@ -216,7 +216,7 @@ def noop_job(): new_run = instance.get_run_by_id(new_run_id) - assert new_run.job_snapshot_id + assert new_run.job_snapshot_id # pyright: ignore[reportOptionalMemberAccess] def test_downgrade_and_upgrade(): @@ -244,7 +244,7 @@ def test_downgrade_and_upgrade(): assert len(instance.get_runs()) == 1 - instance._run_storage._alembic_downgrade(rev="9fe9e746268c") + instance._run_storage._alembic_downgrade(rev="9fe9e746268c") # pyright: ignore[reportAttributeAccessIssue] assert get_current_alembic_version(db_path) == "9fe9e746268c" @@ -340,7 +340,7 @@ def _test(): assert instance.get_run_records() assert instance.create_run_for_job(_test) - instance._run_storage._alembic_downgrade(rev="72686963a802") + instance._run_storage._alembic_downgrade(rev="72686963a802") # pyright: ignore[reportAttributeAccessIssue] assert get_current_alembic_version(db_path) == "72686963a802" assert "mode" not in set(get_sqlite3_columns(db_path, "runs")) @@ -361,7 +361,7 @@ def test_run_partition_migration(): assert "partition" in set(get_sqlite3_columns(db_path, "runs")) assert "partition_set" in set(get_sqlite3_columns(db_path, "runs")) - instance._run_storage._alembic_downgrade(rev="224640159acf") + instance._run_storage._alembic_downgrade(rev="224640159acf") # pyright: ignore[reportAttributeAccessIssue] assert get_current_alembic_version(db_path) == "224640159acf" assert "partition" not in set(get_sqlite3_columns(db_path, "runs")) @@ -384,7 +384,7 @@ def test_run_partition_data_migration(): with DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) as instance: with upgrading_instance(instance): - instance._run_storage.upgrade() + instance._run_storage.upgrade() # pyright: ignore[reportAttributeAccessIssue] run_storage = instance._run_storage assert isinstance(run_storage, SqlRunStorage) @@ -627,7 +627,7 @@ def _test(): assert instance.get_run_records() assert instance.create_run_for_job(_test) - instance._run_storage._alembic_downgrade(rev="7f2b1a4ca7a5") + instance._run_storage._alembic_downgrade(rev="7f2b1a4ca7a5") # pyright: ignore[reportAttributeAccessIssue] assert get_current_alembic_version(db_path) == "7f2b1a4ca7a5" assert True @@ -848,27 +848,27 @@ def test_jobs_selector_id_migration(): with DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) as instance: # runs the required data migrations instance.upgrade() - assert instance.schedule_storage.has_built_index(SCHEDULE_JOBS_SELECTOR_ID) + assert instance.schedule_storage.has_built_index(SCHEDULE_JOBS_SELECTOR_ID) # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] legacy_count = len(instance.all_instigator_state()) - migrated_instigator_count = instance.schedule_storage.execute( + migrated_instigator_count = instance.schedule_storage.execute( # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] db_select([db.func.count()]).select_from(InstigatorsTable) )[0][0] assert migrated_instigator_count == legacy_count - migrated_job_count = instance.schedule_storage.execute( + migrated_job_count = instance.schedule_storage.execute( # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] db_select([db.func.count()]) .select_from(JobTable) .where(JobTable.c.selector_id.isnot(None)) )[0][0] assert migrated_job_count == legacy_count - legacy_tick_count = instance.schedule_storage.execute( + legacy_tick_count = instance.schedule_storage.execute( # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] db_select([db.func.count()]).select_from(JobTickTable) )[0][0] assert legacy_tick_count > 0 # tick migrations are optional - migrated_tick_count = instance.schedule_storage.execute( + migrated_tick_count = instance.schedule_storage.execute( # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] db_select([db.func.count()]) .select_from(JobTickTable) .where(JobTickTable.c.selector_id.isnot(None)) @@ -878,7 +878,7 @@ def test_jobs_selector_id_migration(): # run the optional migrations instance.reindex() - migrated_tick_count = instance.schedule_storage.execute( + migrated_tick_count = instance.schedule_storage.execute( # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] db_select([db.func.count()]) .select_from(JobTickTable) .where(JobTickTable.c.selector_id.isnot(None)) @@ -955,7 +955,7 @@ def test_add_bulk_actions_columns(): # check data migration backfill_count = len(instance.get_backfills()) - migrated_row_count = instance._run_storage.fetchone( + migrated_row_count = instance._run_storage.fetchone( # pyright: ignore[reportAttributeAccessIssue] db_select([db.func.count().label("count")]) .select_from(BulkActionsTable) .where(BulkActionsTable.c.selector_id.isnot(None)) @@ -983,7 +983,7 @@ def test_add_bulk_actions_columns(): backfill_timestamp=get_current_timestamp(), ) ) - unmigrated_row_count = instance._run_storage.fetchone( + unmigrated_row_count = instance._run_storage.fetchone( # pyright: ignore[reportAttributeAccessIssue] db_select([db.func.count().label("count")]) .select_from(BulkActionsTable) .where(BulkActionsTable.c.selector_id.is_(None)) @@ -991,7 +991,7 @@ def test_add_bulk_actions_columns(): assert unmigrated_row_count == 0 # test downgrade - instance._run_storage._alembic_downgrade(rev="721d858e1dda") + instance._run_storage._alembic_downgrade(rev="721d858e1dda") # pyright: ignore[reportAttributeAccessIssue] assert get_current_alembic_version(db_path) == "721d858e1dda" assert {"id", "key", "status", "timestamp", "body"} == set( @@ -1019,7 +1019,7 @@ def test_add_kvs_table(): assert "kvs" in get_sqlite3_tables(db_path) assert get_sqlite3_indexes(db_path, "kvs") == ["idx_kvs_keys_unique"] - instance._run_storage._alembic_downgrade(rev="6860f830e40c") + instance._run_storage._alembic_downgrade(rev="6860f830e40c") # pyright: ignore[reportAttributeAccessIssue] assert "kvs" not in get_sqlite3_tables(db_path) assert get_sqlite3_indexes(db_path, "kvs") == [] @@ -1064,7 +1064,7 @@ def asset_job(): assert "idx_asset_event_tags_event_id" in indexes assert "idx_asset_event_tags" in indexes - instance._run_storage._alembic_downgrade(rev="a00dd8d936a1") + instance._run_storage._alembic_downgrade(rev="a00dd8d936a1") # pyright: ignore[reportAttributeAccessIssue] assert "asset_event_tags" not in get_sqlite3_tables(db_path) assert get_sqlite3_indexes(db_path, "asset_event_tags") == [] @@ -1154,21 +1154,21 @@ def test_add_primary_keys(): instance.upgrade() assert "id" in set(get_sqlite3_columns(db_path, "kvs")) - with instance.run_storage.connect(): + with instance.run_storage.connect(): # pyright: ignore[reportAttributeAccessIssue] kvs_id_count = _get_table_row_count( instance.run_storage, KeyValueStoreTable, with_non_null_id=True ) assert kvs_id_count == kvs_row_count assert "id" in set(get_sqlite3_columns(db_path, "instance_info")) - with instance.run_storage.connect(): + with instance.run_storage.connect(): # pyright: ignore[reportAttributeAccessIssue] instance_info_id_count = _get_table_row_count( instance.run_storage, InstanceInfo, with_non_null_id=True ) assert instance_info_id_count == instance_info_row_count assert "id" in set(get_sqlite3_columns(db_path, "daemon_heartbeats")) - with instance.run_storage.connect(): + with instance.run_storage.connect(): # pyright: ignore[reportAttributeAccessIssue] daemon_heartbeats_id_count = _get_table_row_count( instance.run_storage, DaemonHeartbeatsTable, with_non_null_id=True ) @@ -1225,7 +1225,7 @@ def test_add_backfill_id_column(): assert len(instance.get_runs(filters=RunsFilter(exclude_subruns=True))) == 2 instance.upgrade() - assert instance.run_storage.has_built_index(RUN_BACKFILL_ID) + assert instance.run_storage.has_built_index(RUN_BACKFILL_ID) # pyright: ignore[reportAttributeAccessIssue] columns = get_sqlite3_columns(db_path, "runs") assert { @@ -1264,7 +1264,7 @@ def test_add_backfill_id_column(): backfill_ids = { row["run_id"]: row["backfill_id"] - for row in instance._run_storage.fetchall( + for row in instance._run_storage.fetchall( # pyright: ignore[reportAttributeAccessIssue] db_select([RunsTable.c.run_id, RunsTable.c.backfill_id]).select_from(RunsTable) ) } @@ -1277,7 +1277,7 @@ def test_add_backfill_id_column(): assert len(instance.get_runs(filters=RunsFilter(exclude_subruns=True))) == 3 # test downgrade - instance._run_storage._alembic_downgrade(rev="284a732df317") + instance._run_storage._alembic_downgrade(rev="284a732df317") # pyright: ignore[reportAttributeAccessIssue] assert get_current_alembic_version(db_path) == "284a732df317" columns = get_sqlite3_columns(db_path, "runs") @@ -1384,7 +1384,7 @@ def test_add_backfill_tags(): ) instance.add_backfill(after_migration) - with instance.run_storage.connect() as conn: + with instance.run_storage.connect() as conn: # pyright: ignore[reportAttributeAccessIssue] rows = conn.execute( db.text("SELECT backfill_id, key, value FROM backfill_tags") ).fetchall() @@ -1395,7 +1395,7 @@ def test_add_backfill_tags(): assert ids_to_tags[after_migration.backfill_id] == after_migration.tags # filtering by tags works after migration - assert instance.run_storage.has_built_index(BACKFILL_JOB_NAME_AND_TAGS) + assert instance.run_storage.has_built_index(BACKFILL_JOB_NAME_AND_TAGS) # pyright: ignore[reportAttributeAccessIssue] # delete the run that was added pre-migration to prove that tags filtering is happening on the # backfill_tags table instance.delete_run(pre_migration_run.run_id) @@ -1413,7 +1413,7 @@ def test_add_backfill_tags(): ) # test downgrade - instance._run_storage._alembic_downgrade(rev="1aca709bba64") + instance._run_storage._alembic_downgrade(rev="1aca709bba64") # pyright: ignore[reportAttributeAccessIssue] assert get_current_alembic_version(db_path) == "1aca709bba64" assert "backfill_tags" not in get_sqlite3_tables(db_path) @@ -1456,7 +1456,7 @@ def test_add_bulk_actions_job_name_column(): # filtering pre-migration relies on filtering runs, so add a run with the expected job_name pre_migration_run = instance.run_storage.add_run( DagsterRun( - job_name=before_migration.job_name, + job_name=before_migration.job_name, # pyright: ignore[reportArgumentType] run_id=make_new_run_id(), tags={BACKFILL_ID_TAG: before_migration.backfill_id}, status=DagsterRunStatus.NOT_STARTED, @@ -1495,7 +1495,7 @@ def test_add_bulk_actions_job_name_column(): ) instance.add_backfill(after_migration) - with instance.run_storage.connect() as conn: + with instance.run_storage.connect() as conn: # pyright: ignore[reportAttributeAccessIssue] rows = conn.execute(db.text("SELECT key, job_name FROM bulk_actions")).fetchall() assert len(rows) == 3 # a backfill exists in the db snapshot @@ -1504,7 +1504,7 @@ def test_add_bulk_actions_job_name_column(): assert ids_to_job_name[after_migration.backfill_id] == after_migration.job_name # filtering by job_name works after migration - assert instance.run_storage.has_built_index(BACKFILL_JOB_NAME_AND_TAGS) + assert instance.run_storage.has_built_index(BACKFILL_JOB_NAME_AND_TAGS) # pyright: ignore[reportAttributeAccessIssue] # delete the run that was added pre-migration to prove that tags filtering is happening on the # backfill_tags table instance.delete_run(pre_migration_run.run_id) @@ -1522,7 +1522,7 @@ def test_add_bulk_actions_job_name_column(): ) # test downgrade - instance.run_storage._alembic_downgrade(rev="1aca709bba64") + instance.run_storage._alembic_downgrade(rev="1aca709bba64") # pyright: ignore[reportAttributeAccessIssue] assert get_current_alembic_version(db_path) == "1aca709bba64" backfill_columns = get_sqlite3_columns(db_path, "bulk_actions") @@ -1551,7 +1551,7 @@ def test_add_run_tags_run_id_idx(): assert "idx_run_tags_run_id" in get_sqlite3_indexes(db_path, "run_tags") # After downgrade (same as before migration) - instance._run_storage._alembic_downgrade(rev="16e3655b4d9b") + instance._run_storage._alembic_downgrade(rev="16e3655b4d9b") # pyright: ignore[reportAttributeAccessIssue] assert get_current_alembic_version(db_path) == "16e3655b4d9b" assert "run_tags" in get_sqlite3_tables(db_path) assert "idx_run_tags" in get_sqlite3_indexes(db_path, "run_tags") diff --git a/python_modules/dagster/dagster_tests/general_tests/compat_tests/test_execution_plan_snapshot.py b/python_modules/dagster/dagster_tests/general_tests/compat_tests/test_execution_plan_snapshot.py index 543503e8cb1ab..ee932c4a3a25a 100644 --- a/python_modules/dagster/dagster_tests/general_tests/compat_tests/test_execution_plan_snapshot.py +++ b/python_modules/dagster/dagster_tests/general_tests/compat_tests/test_execution_plan_snapshot.py @@ -193,13 +193,13 @@ def test_execution_plan_snapshot_backcompat(): _validate_execution_plan(new_plan) # Create a snapshot and rebuild it, validate the rebuilt plan - new_plan_snapshot = snapshot_from_execution_plan(new_plan, run.job_snapshot_id) + new_plan_snapshot = snapshot_from_execution_plan(new_plan, run.job_snapshot_id) # pyright: ignore[reportArgumentType] rebuilt_plan = ExecutionPlan.rebuild_from_snapshot("dynamic_job", new_plan_snapshot) _validate_execution_plan(rebuilt_plan) # Then validate the plan built from the historical snapshot on the run stored_snapshot = instance.get_execution_plan_snapshot( - run.execution_plan_snapshot_id + run.execution_plan_snapshot_id # pyright: ignore[reportArgumentType] ) rebuilt_plan = ExecutionPlan.rebuild_from_snapshot("dynamic_job", stored_snapshot) diff --git a/python_modules/dagster/dagster_tests/general_tests/grpc_tests/error_repo.py b/python_modules/dagster/dagster_tests/general_tests/grpc_tests/error_repo.py index 83b4744a28ba4..e68f0cd420378 100644 --- a/python_modules/dagster/dagster_tests/general_tests/grpc_tests/error_repo.py +++ b/python_modules/dagster/dagster_tests/general_tests/grpc_tests/error_repo.py @@ -1,7 +1,7 @@ from dagster import repository -@repository +@repository # pyright: ignore[reportArgumentType] def error_repo(): a = None - a() + a() # pyright: ignore[reportOptionalCall] diff --git a/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_persistent.py b/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_persistent.py index 3774886a8b0d0..0d2bf7f5e5bf8 100644 --- a/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_persistent.py +++ b/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_persistent.py @@ -107,7 +107,7 @@ def test_python_environment_args(): process = open_server_process( instance.get_ref(), port, socket=None, loadable_target_origin=loadable_target_origin ) - assert process.args[:5] == [sys.executable, "-m", "dagster", "api", "grpc"] + assert process.args[:5] == [sys.executable, "-m", "dagster", "api", "grpc"] # pyright: ignore[reportIndexIssue] finally: if process: process.terminate() @@ -169,7 +169,7 @@ def test_empty_executable_args(): process = open_server_process( instance.get_ref(), port, socket=None, loadable_target_origin=loadable_target_origin ) - assert process.args[:5] == [sys.executable, "-m", "dagster", "api", "grpc"] + assert process.args[:5] == [sys.executable, "-m", "dagster", "api", "grpc"] # pyright: ignore[reportIndexIssue] client = DagsterGrpcClient(port=port, host="localhost") list_repositories_response = sync_list_repositories_grpc(client) @@ -533,7 +533,7 @@ def test_load_timeout(): process, DagsterGrpcClient(port=port, host="localhost"), subprocess_args, - timeout=0.01, + timeout=0.01, # pyright: ignore[reportArgumentType] ) assert False, "server should have timed out" except Exception as e: @@ -685,7 +685,7 @@ def test_load_with_secrets_loader_instance_ref(entrypoint): python_file, ] - with environ({"FOO": None, "FOO_INSIDE_OP": None}): + with environ({"FOO": None, "FOO_INSIDE_OP": None}): # pyright: ignore[reportArgumentType] with instance_for_test( set_dagster_home=False, ) as instance: @@ -753,7 +753,7 @@ def test_load_with_secrets_loader_no_instance_ref(entrypoint): python_file, ] - with environ({"FOO": None}): + with environ({"FOO": None}): # pyright: ignore[reportArgumentType] with instance_for_test( set_dagster_home=True, ): @@ -948,8 +948,8 @@ def test_load_with_error_logging(capfd): ) ) - assert result.error - assert 'Could not find a repository called "missing_repo_name"' in str(result.error) + assert result.error # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + assert 'Could not find a repository called "missing_repo_name"' in str(result.error) # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] finally: process.terminate() diff --git a/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_ping.py b/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_ping.py index c752208ef88ca..fedee20436148 100644 --- a/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_ping.py +++ b/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_ping.py @@ -99,8 +99,8 @@ def test_process_killed_after_server_finished(): # verify socket is cleaned up assert not os.path.exists(socket) finally: - raw_process.terminate() - raw_process.wait() + raw_process.terminate() # pyright: ignore[reportOptionalMemberAccess] + raw_process.wait() # pyright: ignore[reportOptionalMemberAccess] def test_server_port(): @@ -148,7 +148,7 @@ def test_client_port(): def test_client_port_bad_host(): port = find_free_port() with pytest.raises(check.CheckError, match="Must provide a hostname"): - DagsterGrpcClient(port=port, host=None) + DagsterGrpcClient(port=port, host=None) # pyright: ignore[reportArgumentType] @pytest.mark.skipif(seven.IS_WINDOWS, reason="Unix-only test") diff --git a/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_utils.py b/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_utils.py index c50711ef20303..606d85ec4eb1b 100644 --- a/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_utils.py +++ b/python_modules/dagster/dagster_tests/general_tests/grpc_tests/test_utils.py @@ -10,7 +10,7 @@ def test_default_grpc_timeouts(): with environ( - { + { # pyright: ignore[reportArgumentType] "DAGSTER_GRPC_TIMEOUT_SECONDS": None, "DAGSTER_SCHEDULE_GRPC_TIMEOUT_SECONDS": None, "DAGSTER_SENSOR_GRPC_TIMEOUT_SECONDS": None, @@ -26,7 +26,7 @@ def test_default_grpc_timeouts(): def test_override_grpc_timeouts(): with environ( - { + { # pyright: ignore[reportArgumentType] "DAGSTER_GRPC_TIMEOUT_SECONDS": "120", "DAGSTER_SCHEDULE_GRPC_TIMEOUT_SECONDS": None, "DAGSTER_SENSOR_GRPC_TIMEOUT_SECONDS": None, @@ -40,7 +40,7 @@ def test_override_grpc_timeouts(): assert default_repository_grpc_timeout() == 180 with environ( - { + { # pyright: ignore[reportArgumentType] "DAGSTER_GRPC_TIMEOUT_SECONDS": "240", "DAGSTER_SCHEDULE_GRPC_TIMEOUT_SECONDS": None, "DAGSTER_SENSOR_GRPC_TIMEOUT_SECONDS": None, @@ -54,7 +54,7 @@ def test_override_grpc_timeouts(): assert default_repository_grpc_timeout() == 240 with environ( - { + { # pyright: ignore[reportArgumentType] "DAGSTER_GRPC_TIMEOUT_SECONDS": None, "DAGSTER_SCHEDULE_GRPC_TIMEOUT_SECONDS": "45", "DAGSTER_SENSOR_GRPC_TIMEOUT_SECONDS": None, @@ -68,7 +68,7 @@ def test_override_grpc_timeouts(): assert default_repository_grpc_timeout() == 180 with environ( - { + { # pyright: ignore[reportArgumentType] "DAGSTER_GRPC_TIMEOUT_SECONDS": None, "DAGSTER_SCHEDULE_GRPC_TIMEOUT_SECONDS": None, "DAGSTER_SENSOR_GRPC_TIMEOUT_SECONDS": "45", @@ -86,7 +86,7 @@ def test_override_grpc_timeouts(): "DAGSTER_GRPC_TIMEOUT_SECONDS": "75", "DAGSTER_SCHEDULE_GRPC_TIMEOUT_SECONDS": "120", "DAGSTER_SENSOR_GRPC_TIMEOUT_SECONDS": "400", - "DAGSTER_REPOSITORY_GRPC_TIMEOUT_SECONDS": None, + "DAGSTER_REPOSITORY_GRPC_TIMEOUT_SECONDS": None, # pyright: ignore[reportArgumentType] } ): assert default_grpc_timeout() == 75 @@ -96,7 +96,7 @@ def test_override_grpc_timeouts(): assert default_grpc_server_shutdown_grace_period() == 400 with environ( - { + { # pyright: ignore[reportArgumentType] "DAGSTER_GRPC_TIMEOUT_SECONDS": None, "DAGSTER_SCHEDULE_GRPC_TIMEOUT_SECONDS": None, "DAGSTER_SENSOR_GRPC_TIMEOUT_SECONDS": None, diff --git a/python_modules/dagster/dagster_tests/general_tests/py3_tests/test_inference.py b/python_modules/dagster/dagster_tests/general_tests/py3_tests/test_inference.py index 6b0c48334b6ec..61a2aaca8ec5d 100644 --- a/python_modules/dagster/dagster_tests/general_tests/py3_tests/test_inference.py +++ b/python_modules/dagster/dagster_tests/general_tests/py3_tests/test_inference.py @@ -151,11 +151,11 @@ def add_one(nums: List[int]) -> Optional[List[int]]: assert len(add_one.input_defs) == 1 assert add_one.input_defs[0].name == "nums" assert add_one.input_defs[0].dagster_type.kind == DagsterTypeKind.LIST - assert add_one.input_defs[0].dagster_type.inner_type.unique_name == "Int" + assert add_one.input_defs[0].dagster_type.inner_type.unique_name == "Int" # pyright: ignore[reportAttributeAccessIssue] assert len(add_one.output_defs) == 1 assert add_one.output_defs[0].dagster_type.kind == DagsterTypeKind.NULLABLE - assert add_one.output_defs[0].dagster_type.inner_type.kind == DagsterTypeKind.LIST + assert add_one.output_defs[0].dagster_type.inner_type.kind == DagsterTypeKind.LIST # pyright: ignore[reportAttributeAccessIssue] def test_kitchen_sink(): diff --git a/python_modules/dagster/dagster_tests/general_tests/seven_tests/test_seven.py b/python_modules/dagster/dagster_tests/general_tests/seven_tests/test_seven.py index 2e396c274a449..3c9e107284c13 100644 --- a/python_modules/dagster/dagster_tests/general_tests/seven_tests/test_seven.py +++ b/python_modules/dagster/dagster_tests/general_tests/seven_tests/test_seven.py @@ -36,7 +36,7 @@ def test_json_dumps(): def test_tempdir(): - assert not _seven.temp_dir.get_system_temp_directory().startswith("/var") + assert not _seven.temp_dir.get_system_temp_directory().startswith("/var") # pyright: ignore[reportAttributeAccessIssue] def test_get_arg_names(): @@ -79,7 +79,7 @@ def foo(): def quux_decor(fn): q = Quux() - return update_wrapper(q, fn) + return update_wrapper(q, fn) # pyright: ignore[reportArgumentType] @quux_decor def yoodles(): @@ -111,7 +111,7 @@ def test_is_subclass(): # type that aren't classes can be passed into is_subclass assert not inspect.isclass(2) - assert not is_subclass(2, DagsterType) + assert not is_subclass(2, DagsterType) # pyright: ignore[reportArgumentType] @pytest.mark.skipif( diff --git a/python_modules/dagster/dagster_tests/general_tests/test_record.py b/python_modules/dagster/dagster_tests/general_tests/test_record.py index 4dce080210b80..08af55d1fc1dc 100644 --- a/python_modules/dagster/dagster_tests/general_tests/test_record.py +++ b/python_modules/dagster/dagster_tests/general_tests/test_record.py @@ -131,10 +131,10 @@ class MyModel2: assert MyModel2(some_class=SomeClass()) with pytest.raises(check.CheckError): - MyModel2(some_class=OtherClass()) # wrong class + MyModel2(some_class=OtherClass()) # wrong class # pyright: ignore[reportArgumentType] with pytest.raises(check.CheckError): - MyModel2(some_class=SomeClass) # forgot () + MyModel2(some_class=SomeClass) # forgot () # pyright: ignore[reportArgumentType] def test_cached_method() -> None: @@ -194,8 +194,8 @@ class Parent: def __new__(cls, partner=None, child=None): return super().__new__( cls, - partner=partner, - child=child, + partner=partner, # pyright: ignore[reportCallIssue] + child=child, # pyright: ignore[reportCallIssue] ) class Child: ... @@ -469,7 +469,7 @@ class AnnotatedModel: with pytest.raises( check.CheckError, match="Expected " ): - AnnotatedModel(foos=[1, 2, 3]) + AnnotatedModel(foos=[1, 2, 3]) # pyright: ignore[reportArgumentType] def _out_of_scope(): from dagster._core.test_utils import TestType @@ -533,7 +533,7 @@ class Yep: stuff: Sequence[Any] def __hash__(self): - return hash_collection(self) + return hash_collection(self) # pyright: ignore[reportArgumentType] y = Yep(stuff=[1, 2, 3]) assert hash(y) diff --git a/python_modules/dagster/dagster_tests/general_tests/test_repository.py b/python_modules/dagster/dagster_tests/general_tests/test_repository.py index 919e23b42f5a9..db367c25ebbf2 100644 --- a/python_modules/dagster/dagster_tests/general_tests/test_repository.py +++ b/python_modules/dagster/dagster_tests/general_tests/test_repository.py @@ -108,7 +108,7 @@ def empty_repository(): def test_invalid_repository(): with pytest.raises(CheckError): - @repository + @repository # pyright: ignore[reportArgumentType] def invalid_repository(_invalid_arg: str): return [] diff --git a/python_modules/dagster/dagster_tests/general_tests/test_serdes.py b/python_modules/dagster/dagster_tests/general_tests/test_serdes.py index 2633117d44e42..e39e158e89961 100644 --- a/python_modules/dagster/dagster_tests/general_tests/test_serdes.py +++ b/python_modules/dagster/dagster_tests/general_tests/test_serdes.py @@ -296,7 +296,7 @@ def test_wrong_first_arg(): @serdes_test_class class NotCls(namedtuple("NotCls", "field_one field_two")): def __new__(not_cls, field_two, field_one): # type: ignore - return super(NotCls, not_cls).__new__(field_one, field_two) + return super(NotCls, not_cls).__new__(field_one, field_two) # pyright: ignore[reportCallIssue] assert str(exc_info.value) == 'For NotCls: First parameter must be _cls or cls. Got "not_cls".' @@ -307,7 +307,7 @@ def test_incorrect_order(): @serdes_test_class class WrongOrder(namedtuple("WrongOrder", "field_one field_two")): def __new__(cls, field_two, field_one): - return super(WrongOrder, cls).__new__(field_one, field_two) + return super(WrongOrder, cls).__new__(field_one, field_two) # pyright: ignore[reportCallIssue] assert ( str(exc_info.value) == "For WrongOrder: " @@ -323,7 +323,7 @@ def test_missing_one_parameter(): @serdes_test_class class MissingFieldInNew(namedtuple("MissingFieldInNew", "field_one field_two field_three")): def __new__(cls, field_one, field_two): - return super(MissingFieldInNew, cls).__new__(field_one, field_two, None) + return super(MissingFieldInNew, cls).__new__(field_one, field_two, None) # pyright: ignore[reportCallIssue] assert ( str(exc_info.value) == "For MissingFieldInNew: " @@ -343,7 +343,7 @@ class MissingFieldsInNew( namedtuple("MissingFieldsInNew", "field_one field_two field_three, field_four") ): def __new__(cls, field_one, field_two): - return super(MissingFieldsInNew, cls).__new__(field_one, field_two, None, None) + return super(MissingFieldsInNew, cls).__new__(field_one, field_two, None, None) # pyright: ignore[reportCallIssue] assert ( str(exc_info.value) == "For MissingFieldsInNew: " @@ -371,7 +371,7 @@ def __new__( field_one, field_two, ): - return super(OldFieldsWithoutDefaults, cls).__new__(field_three, field_four) + return super(OldFieldsWithoutDefaults, cls).__new__(field_three, field_four) # pyright: ignore[reportCallIssue] assert ( str(exc_info.value) == "For OldFieldsWithoutDefaults: " @@ -400,7 +400,7 @@ def __new__( another_falsey_field="", value_field="klsjkfjd", ): - return super(OldFieldsWithDefaults, cls).__new__(field_three, field_four) + return super(OldFieldsWithDefaults, cls).__new__(field_three, field_four) # pyright: ignore[reportCallIssue] def test_set(): @@ -789,7 +789,7 @@ class NumHolder(NamedTuple): x = NumHolder(98765432109876543210) ser_x = serialize_value(x, test_map) roundtrip_x = deserialize_value(ser_x, whitelist_map=test_map) - assert x.num == roundtrip_x.num + assert x.num == roundtrip_x.num # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_enum_storage_name() -> None: @@ -860,7 +860,7 @@ class Bar(NamedTuple): non_scalar_key_mapping = SerializableNonScalarKeyMapping({Bar("red"): 1}) - serialized = serialize_value(non_scalar_key_mapping, whitelist_map=test_env) + serialized = serialize_value(non_scalar_key_mapping, whitelist_map=test_env) # pyright: ignore[reportArgumentType] assert serialized == """{"__mapping_items__": [[{"__class__": "Bar", "color": "red"}, 1]]}""" assert non_scalar_key_mapping == deserialize_value(serialized, whitelist_map=test_env) @@ -879,7 +879,7 @@ class Bar(NamedTuple): assert list(iter(non_scalar_key_mapping)) == list(iter([Bar("red")])) with pytest.raises(NotImplementedError, match="SerializableNonScalarKeyMapping is immutable"): - non_scalar_key_mapping["foo"] = None + non_scalar_key_mapping["foo"] = None # pyright: ignore[reportArgumentType] def test_serializable_non_scalar_key_mapping_in_named_tuple(): @@ -961,7 +961,7 @@ class MyEnt(NamedTuple): # type: ignore age: int children: List["MyEnt"] - nt_ent = MyEnt("dad", 40, [MyEnt("sis", 4, [])]) + nt_ent = MyEnt("dad", 40, [MyEnt("sis", 4, [])]) # pyright: ignore[reportArgumentType] ser_nt_ent = serialize_value(nt_ent, whitelist_map=nt_env) assert deserialize_value(ser_nt_ent, whitelist_map=nt_env) == nt_ent @@ -1136,19 +1136,19 @@ class MyRecord: def __new__(cls, **kwargs): return super().__new__( cls, - name=kwargs.get("name", ""), - stuff=kwargs.get("stuff", []), + name=kwargs.get("name", ""), # pyright: ignore[reportCallIssue] + stuff=kwargs.get("stuff", []), # pyright: ignore[reportCallIssue] ) r = MyRecord() assert r assert ( - deserialize_value(serialize_value(r, whitelist_map=test_env), whitelist_map=test_env) == r + deserialize_value(serialize_value(r, whitelist_map=test_env), whitelist_map=test_env) == r # pyright: ignore[reportArgumentType] ) r = MyRecord(name="CUSTOM", stuff=[1, 2, 3, 4, 5, 6]) assert r assert ( - deserialize_value(serialize_value(r, whitelist_map=test_env), whitelist_map=test_env) == r + deserialize_value(serialize_value(r, whitelist_map=test_env), whitelist_map=test_env) == r # pyright: ignore[reportArgumentType] ) diff --git a/python_modules/dagster/dagster_tests/general_tests/utils_tests/log_tests/test_structured_logging.py b/python_modules/dagster/dagster_tests/general_tests/utils_tests/log_tests/test_structured_logging.py index 02cfd4fc19c6b..6d01ef48290e7 100644 --- a/python_modules/dagster/dagster_tests/general_tests/utils_tests/log_tests/test_structured_logging.py +++ b/python_modules/dagster/dagster_tests/general_tests/utils_tests/log_tests/test_structured_logging.py @@ -38,4 +38,4 @@ def _append_message(logger_message): logger = define_structured_logger("some_name", _append_message, level=logging.DEBUG) context = create_test_pipeline_execution_context(logger_defs={"structured_logger": logger}) with pytest.raises(AttributeError): - context.log.gargle("from_context") + context.log.gargle("from_context") # pyright: ignore[reportAttributeAccessIssue] diff --git a/python_modules/dagster/dagster_tests/general_tests/utils_tests/test_op_isolation.py b/python_modules/dagster/dagster_tests/general_tests/utils_tests/test_op_isolation.py index 3715bc82fc09d..3b731c3a39914 100644 --- a/python_modules/dagster/dagster_tests/general_tests/utils_tests/test_op_isolation.py +++ b/python_modules/dagster/dagster_tests/general_tests/utils_tests/test_op_isolation.py @@ -197,7 +197,7 @@ def test_execute_nested_graphs(): nested_graph_job = nesting_graph(2, 2).to_job() nested_graph = nested_graph_job.nodes[0].definition - res = nested_graph.execute_in_process() + res = nested_graph.execute_in_process() # pyright: ignore[reportAttributeAccessIssue] assert res.success @@ -231,8 +231,8 @@ def add_op(num_one, num_two): assert not result.success failure_data = result.failure_data_for_node("add_op") - assert failure_data.error.cls_name == "DagsterTypeCheckDidNotPass" + assert failure_data.error.cls_name == "DagsterTypeCheckDidNotPass" # pyright: ignore[reportOptionalMemberAccess] assert ( 'Type check failed for step input "num_two" - expected type "Int"' - in failure_data.error.message + in failure_data.error.message # pyright: ignore[reportOptionalMemberAccess] ) diff --git a/python_modules/dagster/dagster_tests/general_tests/utils_tests/test_utils.py b/python_modules/dagster/dagster_tests/general_tests/utils_tests/test_utils.py index a2f4d890fd21d..7de357a2d9490 100644 --- a/python_modules/dagster/dagster_tests/general_tests/utils_tests/test_utils.py +++ b/python_modules/dagster/dagster_tests/general_tests/utils_tests/test_utils.py @@ -35,10 +35,10 @@ def basic_generator(): yield "C" with pytest.raises(CheckError, match="Not a generator"): - EventGenerationManager(None, int) + EventGenerationManager(None, int) # pyright: ignore[reportArgumentType] with pytest.raises(CheckError, match="must be a class"): - EventGenerationManager(basic_generator(), None) + EventGenerationManager(basic_generator(), None) # pyright: ignore[reportArgumentType] with pytest.raises(CheckError, match="Called `get_object` before `generate_setup_events`"): basic_manager = EventGenerationManager(basic_generator(), int) diff --git a/python_modules/dagster/dagster_tests/general_tests/utils_tests/utils.py b/python_modules/dagster/dagster_tests/general_tests/utils_tests/utils.py index f025f49691a6c..d2ae730c433ee 100644 --- a/python_modules/dagster/dagster_tests/general_tests/utils_tests/utils.py +++ b/python_modules/dagster/dagster_tests/general_tests/utils_tests/utils.py @@ -6,6 +6,6 @@ @contextmanager def assert_no_warnings(): # https://stackoverflow.com/questions/45671803/how-to-use-pytest-to-assert-no-warning-is-raised - with pytest.warns(None) as record: + with pytest.warns(None) as record: # pyright: ignore[reportArgumentType] yield assert len(record) == 0, f"Unexpected warnings: {[str(record[i]) for i in range(len(record))]}" diff --git a/python_modules/dagster/dagster_tests/launcher_tests/test_default_run_launcher.py b/python_modules/dagster/dagster_tests/launcher_tests/test_default_run_launcher.py index 7dea5143d3c9f..b9bb3748ffe01 100644 --- a/python_modules/dagster/dagster_tests/launcher_tests/test_default_run_launcher.py +++ b/python_modules/dagster/dagster_tests/launcher_tests/test_default_run_launcher.py @@ -356,7 +356,7 @@ def test_invalid_instance_run(): instance.launch_run(run_id=run.run_id, workspace=workspace) failed_run = instance.get_run_by_id(run.run_id) - assert failed_run.status == DagsterRunStatus.FAILURE + assert failed_run.status == DagsterRunStatus.FAILURE # pyright: ignore[reportOptionalMemberAccess] @pytest.mark.parametrize( diff --git a/python_modules/dagster/dagster_tests/launcher_tests/test_persistent_grpc_run_launcher.py b/python_modules/dagster/dagster_tests/launcher_tests/test_persistent_grpc_run_launcher.py index 71c10ef2d9cc8..cd90b7b839401 100644 --- a/python_modules/dagster/dagster_tests/launcher_tests/test_persistent_grpc_run_launcher.py +++ b/python_modules/dagster/dagster_tests/launcher_tests/test_persistent_grpc_run_launcher.py @@ -60,13 +60,13 @@ def test_run_always_finishes(): ) run_id = dagster_run.run_id - assert instance.get_run_by_id(run_id).status == DagsterRunStatus.NOT_STARTED + assert instance.get_run_by_id(run_id).status == DagsterRunStatus.NOT_STARTED # pyright: ignore[reportOptionalMemberAccess] instance.launch_run(run_id=run_id, workspace=workspace) # Server process now receives shutdown event, run has not finished yet dagster_run = instance.get_run_by_id(run_id) - assert not dagster_run.is_finished + assert not dagster_run.is_finished # pyright: ignore[reportOptionalMemberAccess] assert server_process.server_process.poll() is None # Server should wait until run finishes, then shutdown @@ -158,13 +158,13 @@ def test_run_from_pending_repository(): run_id = dagster_run.run_id - assert instance.get_run_by_id(run_id).status == DagsterRunStatus.NOT_STARTED + assert instance.get_run_by_id(run_id).status == DagsterRunStatus.NOT_STARTED # pyright: ignore[reportOptionalMemberAccess] instance.launch_run(run_id=run_id, workspace=workspace) # Server process now receives shutdown event, run has not finished yet dagster_run = instance.get_run_by_id(run_id) - assert not dagster_run.is_finished + assert not dagster_run.is_finished # pyright: ignore[reportOptionalMemberAccess] assert server_process.server_process.poll() is None # Server should wait until run finishes, then shutdown @@ -192,8 +192,8 @@ def test_run_from_pending_repository(): assert call_counts.get("compute_cacheable_data_called_b") == "1" # once at initial load time, once inside the run launch process, once for each (3) subprocess # upper bound of 5 here because race conditions result in lower count sometimes - assert int(call_counts.get("get_definitions_called_a")) < 6 - assert int(call_counts.get("get_definitions_called_b")) < 6 + assert int(call_counts.get("get_definitions_called_a")) < 6 # pyright: ignore[reportArgumentType] + assert int(call_counts.get("get_definitions_called_b")) < 6 # pyright: ignore[reportArgumentType] def test_terminate_after_shutdown(): @@ -228,7 +228,7 @@ def test_terminate_after_shutdown(): code_location = workspace.get_code_location("test") # Tell the server to shut down once executions finish - code_location.grpc_server_registry.get_grpc_endpoint( + code_location.grpc_server_registry.get_grpc_endpoint( # pyright: ignore[reportAttributeAccessIssue] code_location.origin ).create_client().shutdown_server() @@ -276,7 +276,7 @@ def test_server_down(): location_name="test", port=api_client.port, socket=api_client.socket, - host=api_client.host, + host=api_client.host, # pyright: ignore[reportArgumentType] ), ) as workspace_process_context: workspace = workspace_process_context.create_request_context() @@ -300,7 +300,7 @@ def test_server_down(): launcher = instance.run_launcher - original_run_tags = instance.get_run_by_id(dagster_run.run_id).tags[GRPC_INFO_TAG] + original_run_tags = instance.get_run_by_id(dagster_run.run_id).tags[GRPC_INFO_TAG] # pyright: ignore[reportOptionalMemberAccess] # Replace run tags with an invalid port instance.add_run_tags( diff --git a/python_modules/dagster/dagster_tests/logging_tests/test_logging.py b/python_modules/dagster/dagster_tests/logging_tests/test_logging.py index 988d48a5ba650..4f03f87f293cf 100644 --- a/python_modules/dagster/dagster_tests/logging_tests/test_logging.py +++ b/python_modules/dagster/dagster_tests/logging_tests/test_logging.py @@ -98,7 +98,7 @@ def test_logging_custom_log_levels(): dagster_run=DagsterRun(job_name="system", run_id="123"), ) with pytest.raises(AttributeError): - dl.foo("test") + dl.foo("test") # pyright: ignore[reportAttributeAccessIssue] def test_logging_integer_log_levels(): diff --git a/python_modules/dagster/dagster_tests/logging_tests/test_stdout.py b/python_modules/dagster/dagster_tests/logging_tests/test_stdout.py index 1437d145cb004..0a4a31cd5e061 100644 --- a/python_modules/dagster/dagster_tests/logging_tests/test_stdout.py +++ b/python_modules/dagster/dagster_tests/logging_tests/test_stdout.py @@ -139,9 +139,9 @@ def test_compute_log_manager(): log_key = manager.build_log_key_for_run(result.run_id, event.logs_captured_data.file_key) assert manager.is_capture_complete(log_key) log_data = manager.get_log_data(log_key) - stdout = normalize_file_content(log_data.stdout.decode("utf-8")) + stdout = normalize_file_content(log_data.stdout.decode("utf-8")) # pyright: ignore[reportOptionalMemberAccess] assert stdout == f"{HELLO_FROM_OP}\n{HELLO_FROM_OP}" - stderr = normalize_file_content(log_data.stderr.decode("utf-8")) + stderr = normalize_file_content(log_data.stderr.decode("utf-8")) # pyright: ignore[reportOptionalMemberAccess] cleaned_logs = stderr.replace("\x1b[34m", "").replace("\x1b[0m", "") assert "dagster - DEBUG - spew_job - " in cleaned_logs @@ -239,7 +239,7 @@ def long_job(): assert manager.is_capture_complete(log_key) log_data = manager.get_log_data(log_key) - assert normalize_file_content(log_data.stdout.decode("utf-8")) == HELLO_FROM_OP + assert normalize_file_content(log_data.stdout.decode("utf-8")) == HELLO_FROM_OP # pyright: ignore[reportOptionalMemberAccess] def execute_inner(step_key: str, dagster_run: DagsterRun, instance_ref: InstanceRef) -> None: @@ -287,7 +287,7 @@ def test_single(): for step_key in step_keys: log_key = [dagster_run.run_id, "compute_logs", step_key] log_data = instance.compute_log_manager.get_log_data(log_key) - assert normalize_file_content(log_data.stdout.decode("utf-8")) == expected_inner_output( + assert normalize_file_content(log_data.stdout.decode("utf-8")) == expected_inner_output( # pyright: ignore[reportOptionalMemberAccess] step_key ) @@ -295,7 +295,7 @@ def test_single(): [dagster_run.run_id, "compute_logs", job_name] ) - assert normalize_file_content(full_data.stdout.decode("utf-8")).startswith( + assert normalize_file_content(full_data.stdout.decode("utf-8")).startswith( # pyright: ignore[reportOptionalMemberAccess] expected_outer_prefix() ) @@ -333,14 +333,14 @@ def test_compute_log_base_with_spaces(): log_key = [dagster_run.run_id, "compute_logs", step_key] log_data = instance.compute_log_manager.get_log_data(log_key) assert normalize_file_content( - log_data.stdout.decode("utf-8") + log_data.stdout.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] ) == expected_inner_output(step_key) full_data = instance.compute_log_manager.get_log_data( [dagster_run.run_id, "compute_logs", job_name] ) - assert normalize_file_content(full_data.stdout.decode("utf-8")).startswith( + assert normalize_file_content(full_data.stdout.decode("utf-8")).startswith( # pyright: ignore[reportOptionalMemberAccess] expected_outer_prefix() ) @@ -374,7 +374,7 @@ def test_multi(): for step_key in step_keys: log_key = [dagster_run.run_id, "compute_logs", step_key] log_data = instance.compute_log_manager.get_log_data(log_key) - assert normalize_file_content(log_data.stdout.decode("utf-8")) == expected_inner_output( + assert normalize_file_content(log_data.stdout.decode("utf-8")) == expected_inner_output( # pyright: ignore[reportOptionalMemberAccess] step_key ) @@ -385,6 +385,6 @@ def test_multi(): # The way that the multiprocess compute-logging interacts with pytest (which stubs out the # sys.stdout fileno) makes this difficult to test. The pytest-captured stdout only captures # the stdout from the outer process, not also the inner process - assert normalize_file_content(full_data.stdout.decode("utf-8")).startswith( + assert normalize_file_content(full_data.stdout.decode("utf-8")).startswith( # pyright: ignore[reportOptionalMemberAccess] expected_outer_prefix() ) diff --git a/python_modules/dagster/dagster_tests/model_tests/test_dagster_model.py b/python_modules/dagster/dagster_tests/model_tests/test_dagster_model.py index 27d5a3e72bf4c..54dbed3b80da9 100644 --- a/python_modules/dagster/dagster_tests/model_tests/test_dagster_model.py +++ b/python_modules/dagster/dagster_tests/model_tests/test_dagster_model.py @@ -69,10 +69,10 @@ class MyModel(DagsterModel): assert MyModel(some_class=SomeClass()) with pytest.raises(ValidationError): - MyModel(some_class=OtherClass()) # wrong class + MyModel(some_class=OtherClass()) # wrong class # pyright: ignore[reportArgumentType] with pytest.raises(ValidationError): - MyModel(some_class=SomeClass) # forgot () + MyModel(some_class=SomeClass) # forgot () # pyright: ignore[reportArgumentType] def test_cached_method() -> None: diff --git a/python_modules/dagster/dagster_tests/model_tests/test_dagster_model_serdes.py b/python_modules/dagster/dagster_tests/model_tests/test_dagster_model_serdes.py index bd69c7423b527..3ac30aab78491 100644 --- a/python_modules/dagster/dagster_tests/model_tests/test_dagster_model_serdes.py +++ b/python_modules/dagster/dagster_tests/model_tests/test_dagster_model_serdes.py @@ -44,7 +44,7 @@ class SomeDagsterModel(DagsterModel): class Config: alias_generator = lambda field_name: f"{field_name}_alias" - o = SomeDagsterModel(id_alias=5, name_alias="fdsk") + o = SomeDagsterModel(id_alias=5, name_alias="fdsk") # pyright: ignore[reportCallIssue] packed_o = pack_value(o, whitelist_map=test_env) assert packed_o == {"__class__": "SomeDagsterModel", "id_alias": 5, "name_alias": "fdsk"} assert unpack_value(packed_o, whitelist_map=test_env, as_type=SomeDagsterModel) == o @@ -83,7 +83,7 @@ class SomeDagsterModel(DagsterModel): unaliased_id: int = Field(..., validation_alias="id_alias") name: str - o = SomeDagsterModel(id_alias=5, name="fdsk") + o = SomeDagsterModel(id_alias=5, name="fdsk") # pyright: ignore[reportCallIssue] with pytest.raises( SerializationError, match="Can't serialize pydantic models with serialization or validation aliases.", diff --git a/python_modules/dagster/dagster_tests/scheduler_tests/test_cron_string_iterator.py b/python_modules/dagster/dagster_tests/scheduler_tests/test_cron_string_iterator.py index 690ee891948f9..522609e893734 100644 --- a/python_modules/dagster/dagster_tests/scheduler_tests/test_cron_string_iterator.py +++ b/python_modules/dagster/dagster_tests/scheduler_tests/test_cron_string_iterator.py @@ -488,7 +488,7 @@ def test_dst_transition_advances(execution_timezone, cron_string, times, force_c assert ( next_time.timestamp() == times[j].timestamp() - ), f"Expected ({datetime.datetime.from_timestamp(orig_start_timestamp, tz=get_timezone(execution_timezone))}) to advance from {prev_time} to {times[j]}, got {next_time} (Difference: {next_time.timestamp() - times[j].timestamp()})" + ), f"Expected ({datetime.datetime.from_timestamp(orig_start_timestamp, tz=get_timezone(execution_timezone))}) to advance from {prev_time} to {times[j]}, got {next_time} (Difference: {next_time.timestamp() - times[j].timestamp()})" # pyright: ignore[reportAttributeAccessIssue] prev_time = next_time start_timestamp = orig_start_timestamp + 1 @@ -517,7 +517,7 @@ def test_dst_transition_advances(execution_timezone, cron_string, times, force_c assert ( next_time.timestamp() == times[j].timestamp() - ), f"Expected ({datetime.datetime.from_timestamp(start_timestamp, tz=get_timezone(execution_timezone))}) to advance from {prev_time} to {times[j]}, got {next_time} (Difference: {next_time.timestamp() - times[j].timestamp()})" + ), f"Expected ({datetime.datetime.from_timestamp(start_timestamp, tz=get_timezone(execution_timezone))}) to advance from {prev_time} to {times[j]}, got {next_time} (Difference: {next_time.timestamp() - times[j].timestamp()})" # pyright: ignore[reportAttributeAccessIssue] prev_time = next_time @@ -578,7 +578,7 @@ def test_reversed_dst_transition_advances(execution_timezone, cron_string, times assert ( next_time.timestamp() == times[j].timestamp() - ), f"Expected ({datetime.datetime.from_timestamp(start_timestamp, tz=get_timezone(execution_timezone))}) to advance from {prev_time} to {times[j]}, got {next_time} (Difference: {next_time.timestamp() - times[j].timestamp()})" + ), f"Expected ({datetime.datetime.from_timestamp(start_timestamp, tz=get_timezone(execution_timezone))}) to advance from {prev_time} to {times[j]}, got {next_time} (Difference: {next_time.timestamp() - times[j].timestamp()})" # pyright: ignore[reportAttributeAccessIssue] prev_time = next_time diff --git a/python_modules/dagster/dagster_tests/storage_tests/test_asset_events.py b/python_modules/dagster/dagster_tests/storage_tests/test_asset_events.py index ceba80fd31f77..982de5a203fa3 100644 --- a/python_modules/dagster/dagster_tests/storage_tests/test_asset_events.py +++ b/python_modules/dagster/dagster_tests/storage_tests/test_asset_events.py @@ -73,7 +73,7 @@ def after(before): event for event in result.all_events if event.event_type_value == "LOADED_INPUT" ) assert loaded_input_event - loaded_input_event_metadata = loaded_input_event.event_specific_data.metadata + loaded_input_event_metadata = loaded_input_event.event_specific_data.metadata # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] assert len(loaded_input_event_metadata) == 2 assert "foo" in loaded_input_event_metadata assert "baz" in loaded_input_event_metadata @@ -98,7 +98,7 @@ def my_job(): loaded_input_event = next( event for event in result.all_events if event.event_type_value == "LOADED_INPUT" ) - metadata = loaded_input_event.event_specific_data.metadata + metadata = loaded_input_event.event_specific_data.metadata # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] assert len(metadata) == 2 assert "foo" in metadata assert "baz" in metadata diff --git a/python_modules/dagster/dagster_tests/storage_tests/test_asset_lineage.py b/python_modules/dagster/dagster_tests/storage_tests/test_asset_lineage.py index 598b34e2f4269..b22ccd6730241 100644 --- a/python_modules/dagster/dagster_tests/storage_tests/test_asset_lineage.py +++ b/python_modules/dagster/dagster_tests/storage_tests/test_asset_lineage.py @@ -16,11 +16,11 @@ def check_materialization(materialization, asset_key, parent_assets=None, metada @pytest.mark.skip(reason="no longer supporting dynamic output asset keys") def test_dynamic_output_definition_single_partition_materialization(): - @op(out={"output1": Out(asset_key=AssetKey("table1"))}) + @op(out={"output1": Out(asset_key=AssetKey("table1"))}) # pyright: ignore[reportCallIssue] def op1(_): return Output(None, "output1", metadata={"nrows": 123}) - @op(out={"output2": DynamicOut(asset_key=lambda context: AssetKey(context.mapping_key))}) + @op(out={"output2": DynamicOut(asset_key=lambda context: AssetKey(context.mapping_key))}) # pyright: ignore[reportCallIssue] def op2(_, _input1): for i in range(4): yield DynamicOutput( @@ -46,7 +46,7 @@ def my_job(): check_materialization(materializations[0], AssetKey(["table1"]), metadata={"nrows": 123}) seen_paths = set() for i in range(1, 5): - path = materializations[i].asset_key.path + path = materializations[i].asset_key.path # pyright: ignore[reportOptionalMemberAccess] seen_paths.add(tuple(path)) check_materialization( materializations[i], diff --git a/python_modules/dagster/dagster_tests/storage_tests/test_asset_value_loader.py b/python_modules/dagster/dagster_tests/storage_tests/test_asset_value_loader.py index 858518b14c6aa..397434434aadd 100644 --- a/python_modules/dagster/dagster_tests/storage_tests/test_asset_value_loader.py +++ b/python_modules/dagster/dagster_tests/storage_tests/test_asset_value_loader.py @@ -30,16 +30,16 @@ def handle_output(self, context, obj): def load_input(self, context): assert context.asset_key == AssetKey("asset1") - assert context.upstream_output.asset_key == AssetKey("asset1") - assert context.upstream_output.definition_metadata["a"] == "b" - assert context.upstream_output.op_def == asset1.op - assert context.upstream_output.name == "result" + assert context.upstream_output.asset_key == AssetKey("asset1") # pyright: ignore[reportOptionalMemberAccess] + assert context.upstream_output.definition_metadata["a"] == "b" # pyright: ignore[reportOptionalMemberAccess] + assert context.upstream_output.op_def == asset1.op # pyright: ignore[reportOptionalMemberAccess] + assert context.upstream_output.name == "result" # pyright: ignore[reportOptionalMemberAccess] assert context.dagster_type.typing_type == int return 5 happenings = set() - @io_manager + @io_manager # pyright: ignore[reportCallIssue,reportArgumentType] @contextmanager def my_io_manager(): try: @@ -74,14 +74,14 @@ def handle_output(self, context, obj): def load_input(self, context): assert context.asset_key == AssetKey("asset1") - assert context.upstream_output.asset_key == AssetKey("asset1") - assert context.upstream_output.definition_metadata["a"] == "b" + assert context.upstream_output.asset_key == AssetKey("asset1") # pyright: ignore[reportOptionalMemberAccess] + assert context.upstream_output.definition_metadata["a"] == "b" # pyright: ignore[reportOptionalMemberAccess] assert context.dagster_type.typing_type == int return 5 happenings = set() - @io_manager + @io_manager # pyright: ignore[reportCallIssue,reportArgumentType] @contextmanager def my_io_manager(): try: @@ -114,7 +114,7 @@ def handle_output(self, context, obj): def load_input(self, context): assert context.resources.other_resource == "apple" - assert context.resource_config["config_key"] == "config_val" + assert context.resource_config["config_key"] == "config_val" # pyright: ignore[reportOptionalSubscript] return 5 @io_manager(required_resource_keys={"other_resource"}, config_schema={"config_key": str}) diff --git a/python_modules/dagster/dagster_tests/storage_tests/test_assets.py b/python_modules/dagster/dagster_tests/storage_tests/test_assets.py index 6fb0385e608b8..6ec8ddd903d57 100644 --- a/python_modules/dagster/dagster_tests/storage_tests/test_assets.py +++ b/python_modules/dagster/dagster_tests/storage_tests/test_assets.py @@ -203,17 +203,17 @@ def my_job(): with copy_directory(src_dir) as test_dir: with DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) as instance: storage = instance.event_log_storage - assert not storage.has_asset_key_index_cols() - assert not storage.has_secondary_index(ASSET_KEY_INDEX_COLS) + assert not storage.has_asset_key_index_cols() # pyright: ignore[reportAttributeAccessIssue] + assert not storage.has_secondary_index(ASSET_KEY_INDEX_COLS) # pyright: ignore[reportAttributeAccessIssue] # run the schema migration without reindexing the asset keys storage.upgrade() - assert storage.has_asset_key_index_cols() - assert not storage.has_secondary_index(ASSET_KEY_INDEX_COLS) + assert storage.has_asset_key_index_cols() # pyright: ignore[reportAttributeAccessIssue] + assert not storage.has_secondary_index(ASSET_KEY_INDEX_COLS) # pyright: ignore[reportAttributeAccessIssue] # fetch all asset keys instance.all_asset_keys() - assert not storage.has_secondary_index(ASSET_KEY_INDEX_COLS) + assert not storage.has_secondary_index(ASSET_KEY_INDEX_COLS) # pyright: ignore[reportAttributeAccessIssue] # wipe a, b in order to populate wipe_timestamp storage.wipe_asset(AssetKey("a")) @@ -223,16 +223,16 @@ def my_job(): my_job.execute_in_process(instance=instance) # still should not be migrated (on write) - assert not storage.has_secondary_index(ASSET_KEY_INDEX_COLS) + assert not storage.has_secondary_index(ASSET_KEY_INDEX_COLS) # pyright: ignore[reportAttributeAccessIssue] # fetching partial results should not trigger migration instance.get_asset_keys(prefix=["b"]) instance.get_asset_keys(cursor=str(AssetKey("b"))) instance.get_latest_materialization_events(asset_keys=[AssetKey("b")]) - assert not storage.has_secondary_index(ASSET_KEY_INDEX_COLS) + assert not storage.has_secondary_index(ASSET_KEY_INDEX_COLS) # pyright: ignore[reportAttributeAccessIssue] # on read, we should see that all the data has already been migrated and we can now mark # the asset key index as migrated instance.all_asset_keys() - assert storage.has_secondary_index(ASSET_KEY_INDEX_COLS) + assert storage.has_secondary_index(ASSET_KEY_INDEX_COLS) # pyright: ignore[reportAttributeAccessIssue] diff --git a/python_modules/dagster/dagster_tests/storage_tests/test_captured_log_manager.py b/python_modules/dagster/dagster_tests/storage_tests/test_captured_log_manager.py index 0fe9b98ae0847..62bf91f97cfee 100644 --- a/python_modules/dagster/dagster_tests/storage_tests/test_captured_log_manager.py +++ b/python_modules/dagster/dagster_tests/storage_tests/test_captured_log_manager.py @@ -83,10 +83,10 @@ def my_job(): assert len(captured_log_entries) == 1 entry = captured_log_entries[0] assert ( - entry.dagster_event.logs_captured_data.external_stdout_url == "https://fake.com/stdout" + entry.dagster_event.logs_captured_data.external_stdout_url == "https://fake.com/stdout" # pyright: ignore[reportOptionalMemberAccess] ) assert ( - entry.dagster_event.logs_captured_data.external_stderr_url == "https://fake.com/stderr" + entry.dagster_event.logs_captured_data.external_stderr_url == "https://fake.com/stderr" # pyright: ignore[reportOptionalMemberAccess] ) @@ -99,13 +99,13 @@ def test_get_log_keys_for_log_key_prefix(): def write_log_file(file_id: int): full_log_key = [*log_key_prefix, f"{file_id}"] with cm.open_log_stream(full_log_key, ComputeIOType.STDERR) as f: - f.write("foo") + f.write("foo") # pyright: ignore[reportOptionalMemberAccess] for i in range(4): write_log_file(i) log_keys = cm.get_log_keys_for_log_key_prefix(log_key_prefix, io_type=ComputeIOType.STDERR) - assert sorted(log_keys) == [ + assert sorted(log_keys) == [ # pyright: ignore[reportArgumentType] [*log_key_prefix, "0"], [*log_key_prefix, "1"], [*log_key_prefix, "2"], @@ -129,9 +129,9 @@ def write_log_file(file_id: int): for j in range(num_lines): msg = f"file: {file_id}, line: {j}" all_logs.append(msg) - f.write(msg) + f.write(msg) # pyright: ignore[reportOptionalMemberAccess] if j < num_lines - 1: - f.write("\n") + f.write("\n") # pyright: ignore[reportOptionalMemberAccess] for i in range(4): write_log_file(i) @@ -144,46 +144,52 @@ def write_log_file(file_id: int): log_key_prefix, cursor=None, io_type=ComputeIOType.STDERR ) assert len(log_lines) == 10 - assert cursor.has_more_now - assert cursor.log_key == [*log_key_prefix, "1"] - assert cursor.line == 0 + assert cursor.has_more_now # pyright: ignore[reportOptionalMemberAccess] + assert cursor.log_key == [*log_key_prefix, "1"] # pyright: ignore[reportOptionalMemberAccess] + assert cursor.line == 0 # pyright: ignore[reportOptionalMemberAccess] for ll in log_lines: assert ll == next(all_logs_iter) # read half of the next log file os.environ["DAGSTER_CAPTURED_LOG_CHUNK_SIZE"] = "5" log_lines, cursor = cm.read_log_lines_for_log_key_prefix( - log_key_prefix, cursor=cursor.to_string(), io_type=ComputeIOType.STDERR + log_key_prefix, + cursor=cursor.to_string(), # pyright: ignore[reportOptionalMemberAccess] + io_type=ComputeIOType.STDERR, ) assert len(log_lines) == 5 - assert cursor.has_more_now - assert cursor.log_key == [*log_key_prefix, "1"] - assert cursor.line == 5 + assert cursor.has_more_now # pyright: ignore[reportOptionalMemberAccess] + assert cursor.log_key == [*log_key_prefix, "1"] # pyright: ignore[reportOptionalMemberAccess] + assert cursor.line == 5 # pyright: ignore[reportOptionalMemberAccess] for ll in log_lines: assert ll == next(all_logs_iter) # read the next ten lines, five will be in the second file, five will be in the third os.environ["DAGSTER_CAPTURED_LOG_CHUNK_SIZE"] = "10" log_lines, cursor = cm.read_log_lines_for_log_key_prefix( - log_key_prefix, cursor=cursor.to_string(), io_type=ComputeIOType.STDERR + log_key_prefix, + cursor=cursor.to_string(), # pyright: ignore[reportOptionalMemberAccess] + io_type=ComputeIOType.STDERR, ) assert len(log_lines) == 10 - assert cursor.has_more_now - assert cursor.log_key == [*log_key_prefix, "2"] - assert cursor.line == 5 + assert cursor.has_more_now # pyright: ignore[reportOptionalMemberAccess] + assert cursor.log_key == [*log_key_prefix, "2"] # pyright: ignore[reportOptionalMemberAccess] + assert cursor.line == 5 # pyright: ignore[reportOptionalMemberAccess] for ll in log_lines: assert ll == next(all_logs_iter) # read the remaining 15 lines, but request 20 os.environ["DAGSTER_CAPTURED_LOG_CHUNK_SIZE"] = "20" log_lines, cursor = cm.read_log_lines_for_log_key_prefix( - log_key_prefix, cursor=cursor.to_string(), io_type=ComputeIOType.STDERR + log_key_prefix, + cursor=cursor.to_string(), # pyright: ignore[reportOptionalMemberAccess] + io_type=ComputeIOType.STDERR, ) assert len(log_lines) == 15 - assert not cursor.has_more_now - assert cursor.log_key == [*log_key_prefix, "3"] + assert not cursor.has_more_now # pyright: ignore[reportOptionalMemberAccess] + assert cursor.log_key == [*log_key_prefix, "3"] # pyright: ignore[reportOptionalMemberAccess] # processed up to the end of the file, but there is not another file to process so cursor should be -1 - assert cursor.line == -1 + assert cursor.line == -1 # pyright: ignore[reportOptionalMemberAccess] for ll in log_lines: assert ll == next(all_logs_iter) @@ -193,12 +199,14 @@ def write_log_file(file_id: int): os.environ["DAGSTER_CAPTURED_LOG_CHUNK_SIZE"] = "15" log_lines, cursor = cm.read_log_lines_for_log_key_prefix( - log_key_prefix, cursor=cursor.to_string(), io_type=ComputeIOType.STDERR + log_key_prefix, + cursor=cursor.to_string(), # pyright: ignore[reportOptionalMemberAccess] + io_type=ComputeIOType.STDERR, ) assert len(log_lines) == 10 - assert not cursor.has_more_now - assert cursor.log_key == [*log_key_prefix, "4"] + assert not cursor.has_more_now # pyright: ignore[reportOptionalMemberAccess] + assert cursor.log_key == [*log_key_prefix, "4"] # pyright: ignore[reportOptionalMemberAccess] # processed up to the end of the file, but there is not another file to process so cursor should be -1 - assert cursor.line == -1 + assert cursor.line == -1 # pyright: ignore[reportOptionalMemberAccess] for ll in log_lines: assert ll == next(all_logs_iter) diff --git a/python_modules/dagster/dagster_tests/storage_tests/test_compute_log_manager.py b/python_modules/dagster/dagster_tests/storage_tests/test_compute_log_manager.py index 5e881d704fca5..1a41d920304f9 100644 --- a/python_modules/dagster/dagster_tests/storage_tests/test_compute_log_manager.py +++ b/python_modules/dagster/dagster_tests/storage_tests/test_compute_log_manager.py @@ -243,10 +243,10 @@ def my_job(): assert len(captured_log_entries) == 1 entry = captured_log_entries[0] assert ( - entry.dagster_event.logs_captured_data.external_stdout_url == "https://fake.com/stdout" + entry.dagster_event.logs_captured_data.external_stdout_url == "https://fake.com/stdout" # pyright: ignore[reportOptionalMemberAccess] ) assert ( - entry.dagster_event.logs_captured_data.external_stderr_url == "https://fake.com/stderr" + entry.dagster_event.logs_captured_data.external_stderr_url == "https://fake.com/stderr" # pyright: ignore[reportOptionalMemberAccess] ) @@ -259,13 +259,13 @@ def test_get_log_keys_for_log_key_prefix(): def write_log_file(file_id: int): full_log_key = [*log_key_prefix, f"{file_id}"] with cm.open_log_stream(full_log_key, ComputeIOType.STDERR) as f: - f.write("foo") + f.write("foo") # pyright: ignore[reportOptionalMemberAccess] for i in range(4): write_log_file(i) log_keys = cm.get_log_keys_for_log_key_prefix(log_key_prefix, io_type=ComputeIOType.STDERR) - assert sorted(log_keys) == [ + assert sorted(log_keys) == [ # pyright: ignore[reportArgumentType] [*log_key_prefix, "0"], [*log_key_prefix, "1"], [*log_key_prefix, "2"], @@ -289,9 +289,9 @@ def write_log_file(file_id: int): for j in range(num_lines): msg = f"file: {file_id}, line: {j}" all_logs.append(msg) - f.write(msg) + f.write(msg) # pyright: ignore[reportOptionalMemberAccess] if j < num_lines - 1: - f.write("\n") + f.write("\n") # pyright: ignore[reportOptionalMemberAccess] for i in range(4): write_log_file(i) @@ -304,46 +304,52 @@ def write_log_file(file_id: int): log_key_prefix, cursor=None, io_type=ComputeIOType.STDERR ) assert len(log_lines) == 10 - assert cursor.has_more_now - assert cursor.log_key == [*log_key_prefix, "1"] - assert cursor.line == 0 + assert cursor.has_more_now # pyright: ignore[reportOptionalMemberAccess] + assert cursor.log_key == [*log_key_prefix, "1"] # pyright: ignore[reportOptionalMemberAccess] + assert cursor.line == 0 # pyright: ignore[reportOptionalMemberAccess] for ll in log_lines: assert ll == next(all_logs_iter) # read half of the next log file os.environ["DAGSTER_CAPTURED_LOG_CHUNK_SIZE"] = "5" log_lines, cursor = cm.read_log_lines_for_log_key_prefix( - log_key_prefix, cursor=cursor.to_string(), io_type=ComputeIOType.STDERR + log_key_prefix, + cursor=cursor.to_string(), # pyright: ignore[reportOptionalMemberAccess] + io_type=ComputeIOType.STDERR, ) assert len(log_lines) == 5 - assert cursor.has_more_now - assert cursor.log_key == [*log_key_prefix, "1"] - assert cursor.line == 5 + assert cursor.has_more_now # pyright: ignore[reportOptionalMemberAccess] + assert cursor.log_key == [*log_key_prefix, "1"] # pyright: ignore[reportOptionalMemberAccess] + assert cursor.line == 5 # pyright: ignore[reportOptionalMemberAccess] for ll in log_lines: assert ll == next(all_logs_iter) # read the next ten lines, five will be in the second file, five will be in the third os.environ["DAGSTER_CAPTURED_LOG_CHUNK_SIZE"] = "10" log_lines, cursor = cm.read_log_lines_for_log_key_prefix( - log_key_prefix, cursor=cursor.to_string(), io_type=ComputeIOType.STDERR + log_key_prefix, + cursor=cursor.to_string(), # pyright: ignore[reportOptionalMemberAccess] + io_type=ComputeIOType.STDERR, ) assert len(log_lines) == 10 - assert cursor.has_more_now - assert cursor.log_key == [*log_key_prefix, "2"] - assert cursor.line == 5 + assert cursor.has_more_now # pyright: ignore[reportOptionalMemberAccess] + assert cursor.log_key == [*log_key_prefix, "2"] # pyright: ignore[reportOptionalMemberAccess] + assert cursor.line == 5 # pyright: ignore[reportOptionalMemberAccess] for ll in log_lines: assert ll == next(all_logs_iter) # read the remaining 15 lines, but request 20 os.environ["DAGSTER_CAPTURED_LOG_CHUNK_SIZE"] = "20" log_lines, cursor = cm.read_log_lines_for_log_key_prefix( - log_key_prefix, cursor=cursor.to_string(), io_type=ComputeIOType.STDERR + log_key_prefix, + cursor=cursor.to_string(), # pyright: ignore[reportOptionalMemberAccess] + io_type=ComputeIOType.STDERR, ) assert len(log_lines) == 15 - assert not cursor.has_more_now - assert cursor.log_key == [*log_key_prefix, "3"] + assert not cursor.has_more_now # pyright: ignore[reportOptionalMemberAccess] + assert cursor.log_key == [*log_key_prefix, "3"] # pyright: ignore[reportOptionalMemberAccess] # processed up to the end of the file, but there is not another file to process so cursor should be -1 - assert cursor.line == -1 + assert cursor.line == -1 # pyright: ignore[reportOptionalMemberAccess] for ll in log_lines: assert ll == next(all_logs_iter) @@ -353,12 +359,14 @@ def write_log_file(file_id: int): os.environ["DAGSTER_CAPTURED_LOG_CHUNK_SIZE"] = "15" log_lines, cursor = cm.read_log_lines_for_log_key_prefix( - log_key_prefix, cursor=cursor.to_string(), io_type=ComputeIOType.STDERR + log_key_prefix, + cursor=cursor.to_string(), # pyright: ignore[reportOptionalMemberAccess] + io_type=ComputeIOType.STDERR, ) assert len(log_lines) == 10 - assert not cursor.has_more_now - assert cursor.log_key == [*log_key_prefix, "4"] + assert not cursor.has_more_now # pyright: ignore[reportOptionalMemberAccess] + assert cursor.log_key == [*log_key_prefix, "4"] # pyright: ignore[reportOptionalMemberAccess] # processed up to the end of the file, but there is not another file to process so cursor should be -1 - assert cursor.line == -1 + assert cursor.line == -1 # pyright: ignore[reportOptionalMemberAccess] for ll in log_lines: assert ll == next(all_logs_iter) diff --git a/python_modules/dagster/dagster_tests/storage_tests/test_event_log.py b/python_modules/dagster/dagster_tests/storage_tests/test_event_log.py index 051b24aa31595..b111e213861fb 100644 --- a/python_modules/dagster/dagster_tests/storage_tests/test_event_log.py +++ b/python_modules/dagster/dagster_tests/storage_tests/test_event_log.py @@ -73,7 +73,7 @@ def test_filesystem_event_log_storage_run_corrupted(self, storage): os.path.abspath(storage.conn_string_for_shard("foo")[10:]), "w", encoding="utf8" ) as fd: fd.write("some nonsense") - with pytest.raises(sqlalchemy.exc.DatabaseError): + with pytest.raises(sqlalchemy.exc.DatabaseError): # pyright: ignore[reportAttributeAccessIssue] storage.get_logs_for_run("foo") def test_filesystem_event_log_storage_run_corrupted_bad_data(self, storage): diff --git a/python_modules/dagster/dagster_tests/storage_tests/test_fs_io_manager.py b/python_modules/dagster/dagster_tests/storage_tests/test_fs_io_manager.py index 2e03c9d6060b4..4742e2e955271 100644 --- a/python_modules/dagster/dagster_tests/storage_tests/test_fs_io_manager.py +++ b/python_modules/dagster/dagster_tests/storage_tests/test_fs_io_manager.py @@ -71,19 +71,19 @@ def test_fs_io_manager(): assert len(handled_output_events) == 2 filepath_a = os.path.join(tmpdir_path, result.run_id, "op_a", "result") - metadata = handled_output_events[0].event_specific_data.metadata + metadata = handled_output_events[0].event_specific_data.metadata # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] assert metadata["path"] == MetadataValue.path(filepath_a) assert os.path.isfile(filepath_a) with open(filepath_a, "rb") as read_obj: assert pickle.load(read_obj) == [1, 2, 3] loaded_input_events = list(filter(lambda evt: evt.is_loaded_input, result.all_events)) - metadata = loaded_input_events[0].event_specific_data.metadata + metadata = loaded_input_events[0].event_specific_data.metadata # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] assert len(loaded_input_events) == 1 - assert loaded_input_events[0].event_specific_data.upstream_step_key == "op_a" + assert loaded_input_events[0].event_specific_data.upstream_step_key == "op_a" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] filepath_b = os.path.join(tmpdir_path, result.run_id, "op_b", "result") - metadata = handled_output_events[1].event_specific_data.metadata + metadata = handled_output_events[1].event_specific_data.metadata # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] assert metadata["path"] == MetadataValue.path(filepath_b) assert os.path.isfile(filepath_b) with open(filepath_b, "rb") as read_obj: @@ -203,7 +203,7 @@ def test_fs_io_manager_handles_assets(): io_manager_def = fs_io_manager.configured({"base_dir": tmpdir_path}) job_def = get_assets_job(io_manager_def) - result = job_def.execute_in_process() + result = job_def.execute_in_process() # pyright: ignore[reportOptionalMemberAccess] assert result.success handled_output_events = list( @@ -218,7 +218,7 @@ def test_fs_io_manager_handles_assets(): loaded_input_events = list(filter(lambda evt: evt.is_loaded_input, result.all_node_events)) assert len(loaded_input_events) == 1 - assert loaded_input_events[0].event_specific_data.upstream_step_key.endswith("asset1") + assert loaded_input_events[0].event_specific_data.upstream_step_key.endswith("asset1") # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] filepath_b = os.path.join(tmpdir_path, "four", "five", "asset2") assert os.path.isfile(filepath_b) @@ -234,7 +234,7 @@ def test_fs_io_manager_partitioned(): partitions_def=DailyPartitionsDefinition(start_date="2020-02-01"), ) - result = job_def.execute_in_process(partition_key="2020-05-03") + result = job_def.execute_in_process(partition_key="2020-05-03") # pyright: ignore[reportOptionalMemberAccess] assert result.success handled_output_events = list( @@ -249,7 +249,7 @@ def test_fs_io_manager_partitioned(): loaded_input_events = list(filter(lambda evt: evt.is_loaded_input, result.all_node_events)) assert len(loaded_input_events) == 1 - assert loaded_input_events[0].event_specific_data.upstream_step_key.endswith("asset1") + assert loaded_input_events[0].event_specific_data.upstream_step_key.endswith("asset1") # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] filepath_b = os.path.join(tmpdir_path, "four", "five", "asset2", "2020-05-03") assert os.path.isfile(filepath_b) @@ -386,7 +386,7 @@ def four(inp): loaded_input_events = list(filter(lambda evt: evt.is_loaded_input, result.all_node_events)) assert len(loaded_input_events) == 3 - assert loaded_input_events[0].event_specific_data.upstream_step_key.endswith("one") + assert loaded_input_events[0].event_specific_data.upstream_step_key.endswith("one") # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] filepath_b = os.path.join(tmpdir_path, "four", "A") assert os.path.isfile(filepath_b) @@ -485,7 +485,7 @@ def asset1(): filter(lambda evt: evt.is_handled_output, result.all_node_events) ) assert len(handled_output_events) == 1 - metadata = handled_output_events[0].event_specific_data.metadata + metadata = handled_output_events[0].event_specific_data.metadata # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] assert "path" not in metadata diff --git a/python_modules/dagster/dagster_tests/storage_tests/test_input_manager.py b/python_modules/dagster/dagster_tests/storage_tests/test_input_manager.py index 1e8b4f4e8a418..60ce965bbe7bd 100644 --- a/python_modules/dagster/dagster_tests/storage_tests/test_input_manager.py +++ b/python_modules/dagster/dagster_tests/storage_tests/test_input_manager.py @@ -366,7 +366,7 @@ def handle_output(self, context, obj): ... resources={"special_io_manager": IOManagerDefinition.hardcoded_io_manager(MyIOManager())}, ) - assert output._get_output_for_handle("downstream", "result") == 3 # noqa: SLF001 + assert output._get_output_for_handle("downstream", "result") == 3 # noqa: SLF001 # pyright: ignore[reportArgumentType] def test_input_manager_with_observable_source_asset() -> None: @@ -450,7 +450,7 @@ def downstream(upstream) -> int: class MyIOManager(IOManager): def load_input(self, context): - assert context.resource_config["foo"] == "bar" + assert context.resource_config["foo"] == "bar" # pyright: ignore[reportOptionalSubscript] assert context.upstream_output is not None assert context.upstream_output.asset_key == AssetKey(["upstream"]) @@ -520,10 +520,10 @@ def simple(): failure_data = result.filter_events(lambda evt: evt.is_step_failure)[0].step_failure_data - assert failure_data.error.cls_name == "Failure" + assert failure_data.error.cls_name == "Failure" # pyright: ignore[reportOptionalMemberAccess] - assert failure_data.user_failure_data.description == "Foolure" - assert failure_data.user_failure_data.metadata["label"] == MetadataValue.text("text") + assert failure_data.user_failure_data.description == "Foolure" # pyright: ignore[reportOptionalMemberAccess] + assert failure_data.user_failure_data.metadata["label"] == MetadataValue.text("text") # pyright: ignore[reportOptionalMemberAccess] def test_input_manager_with_retries(): @@ -572,13 +572,13 @@ def simple(): step_stats_1 = instance.get_run_step_stats(result.run_id, step_keys=["take_input_1"]) assert len(step_stats_1) == 1 step_stat_1 = step_stats_1[0] - assert step_stat_1.status.value == "SUCCESS" + assert step_stat_1.status.value == "SUCCESS" # pyright: ignore[reportOptionalMemberAccess] assert step_stat_1.attempts == 3 step_stats_2 = instance.get_run_step_stats(result.run_id, step_keys=["take_input_2"]) assert len(step_stats_2) == 1 step_stat_2 = step_stats_2[0] - assert step_stat_2.status.value == "FAILURE" + assert step_stat_2.status.value == "FAILURE" # pyright: ignore[reportOptionalMemberAccess] assert step_stat_2.attempts == 4 step_stats_3 = instance.get_run_step_stats(result.run_id, step_keys=["take_input_3"]) diff --git a/python_modules/dagster/dagster_tests/storage_tests/test_io_manager.py b/python_modules/dagster/dagster_tests/storage_tests/test_io_manager.py index 21c10462a4033..6de157688983a 100644 --- a/python_modules/dagster/dagster_tests/storage_tests/test_io_manager.py +++ b/python_modules/dagster/dagster_tests/storage_tests/test_io_manager.py @@ -58,7 +58,7 @@ def my_op(): class MyIOManager(IOManager): def load_input(self, context): - assert context.upstream_output.config["some_config"] == "some_value" + assert context.upstream_output.config["some_config"] == "some_value" # pyright: ignore[reportOptionalMemberAccess] return 1 def handle_output(self, context, obj): @@ -193,7 +193,7 @@ def test_fs_io_manager_reexecution(): assert re_result.success loaded_input_events = re_result.filter_events(lambda evt: evt.is_loaded_input) assert len(loaded_input_events) == 1 - assert loaded_input_events[0].event_specific_data.upstream_step_key == "op_a" + assert loaded_input_events[0].event_specific_data.upstream_step_key == "op_a" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] assert [ evt.step_key for evt in re_result.filter_events(lambda evt: evt.is_step_success) ] == ["op_b"] @@ -402,7 +402,7 @@ def test_step_subset_with_custom_paths(): ) assert len(step_materialization_events) == 1 assert os.path.join(tmpdir_path, test_metadata_dict["op_b"]["path"]) == ( - step_materialization_events[0].event_specific_data.materialization.metadata["path"].path + step_materialization_events[0].event_specific_data.materialization.metadata["path"].path # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] ) # test reexecution via backfills (not via re-execution apis) @@ -431,7 +431,7 @@ def handle_output(self, context, obj): yield AssetMaterialization(asset_key="yield_two") def load_input(self, context): - keys = tuple(context.upstream_output.get_identifier()) + keys = tuple(context.upstream_output.get_identifier()) # pyright: ignore[reportOptionalMemberAccess] return self.values[keys] def has_asset(self, context): @@ -480,7 +480,7 @@ def my_job(): assert my_job.execute_in_process().success -@io_manager +@io_manager # pyright: ignore[reportCallIssue,reportArgumentType] def my_io_manager(): pass @@ -533,7 +533,7 @@ def my_job(): def test_configured(): - @io_manager( + @io_manager( # pyright: ignore[reportArgumentType] config_schema={"base_dir": str}, description="abc", output_config_schema={"path": str}, @@ -691,7 +691,7 @@ def single_op_job(): step_failure = next( event for event in result.all_events if event.event_type_value == "STEP_FAILURE" ) - assert step_failure.event_specific_data.error.cls_name == "DagsterExecutionHandleOutputError" + assert step_failure.event_specific_data.error.cls_name == "DagsterExecutionHandleOutputError" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_handle_output_exception_raised(): @@ -718,7 +718,7 @@ def single_op_job(): step_failure = next( event for event in result.all_node_events if event.event_type_value == "STEP_FAILURE" ) - assert step_failure.event_specific_data.error.cls_name == "DagsterExecutionHandleOutputError" + assert step_failure.event_specific_data.error.cls_name == "DagsterExecutionHandleOutputError" # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_output_identifier_dynamic_memoization(): @@ -746,7 +746,7 @@ def after(before): class MyIOManager(IOManager): def load_input(self, context): assert context.asset_key == before.key - assert context.upstream_output.asset_key == before.key + assert context.upstream_output.asset_key == before.key # pyright: ignore[reportOptionalMemberAccess] return 1 def handle_output(self, context, obj): @@ -799,7 +799,7 @@ def handle_output(self, context, obj): context.log_event(AssetMaterialization(asset_key="second")) def load_input(self, context): - keys = tuple(context.upstream_output.get_identifier()) + keys = tuple(context.upstream_output.get_identifier()) # pyright: ignore[reportOptionalMemberAccess] return self.values[keys] @op @@ -834,10 +834,10 @@ def the_graph(): assert log.user_message == "foo bar" first = relevant_event_logs[0] - assert first.dagster_event.event_specific_data.materialization.label == "first" + assert first.dagster_event.event_specific_data.materialization.label == "first" # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] second = relevant_event_logs[1] - assert second.dagster_event.event_specific_data.materialization.label == "second" + assert second.dagster_event.event_specific_data.materialization.label == "second" # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] assert second.timestamp - first.timestamp >= 1 assert log.timestamp - first.timestamp >= 1 @@ -859,7 +859,7 @@ def handle_output(self, context, obj): yield materialization def load_input(self, context): - keys = tuple(context.upstream_output.get_identifier()) + keys = tuple(context.upstream_output.get_identifier()) # pyright: ignore[reportOptionalMemberAccess] return self.values[keys] @asset @@ -872,18 +872,18 @@ def key_on_out(): assert result.success output_event = result.all_node_events[4] - metadata = output_event.event_specific_data.metadata + metadata = output_event.event_specific_data.metadata # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] # Ensure that ordering is preserved among yields and calls to log assert set(metadata.keys()) == {"foo", "baz", "bar"} materialization_event = result.all_node_events[2] - metadata = materialization_event.event_specific_data.materialization.metadata + metadata = materialization_event.event_specific_data.materialization.metadata # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] assert len(metadata) == 3 assert set(metadata.keys()) == {"foo", "baz", "bar"} implicit_materialization_event = result.all_node_events[3] - metadata = implicit_materialization_event.event_specific_data.materialization.metadata + metadata = implicit_materialization_event.event_specific_data.materialization.metadata # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] assert len(metadata) == 3 assert set(metadata.keys()) == {"foo", "baz", "bar"} @@ -924,7 +924,7 @@ def asset1(): handled_output_event = next( event for event in result.all_node_events if event.event_type_value == "HANDLED_OUTPUT" ) - assert set(handled_output_event.event_specific_data.metadata.keys()) == { + assert set(handled_output_event.event_specific_data.metadata.keys()) == { # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] "foo", "bar", } @@ -942,7 +942,7 @@ def handle_output(self, context, obj): yield {"handle_output": "I come from handle_output"} def load_input(self, context): - keys = tuple(context.upstream_output.get_identifier()) + keys = tuple(context.upstream_output.get_identifier()) # pyright: ignore[reportOptionalMemberAccess] return self.values[keys] @op(out=DynamicOut()) diff --git a/python_modules/dagster/dagster_tests/storage_tests/test_io_manager_asset_metadata.py b/python_modules/dagster/dagster_tests/storage_tests/test_io_manager_asset_metadata.py index efb441ec97897..fb002ab33fa9e 100644 --- a/python_modules/dagster/dagster_tests/storage_tests/test_io_manager_asset_metadata.py +++ b/python_modules/dagster/dagster_tests/storage_tests/test_io_manager_asset_metadata.py @@ -23,7 +23,7 @@ def handle_output(self, context, obj): assert context.definition_metadata["fruit"] == "apple" def load_input(self, context): - assert context.upstream_output.definition_metadata["fruit"] == "apple" + assert context.upstream_output.definition_metadata["fruit"] == "apple" # pyright: ignore[reportOptionalMemberAccess] assert materialize( assets=[assets_def, downstream_asset], diff --git a/python_modules/dagster/dagster_tests/storage_tests/test_io_manager_composites.py b/python_modules/dagster/dagster_tests/storage_tests/test_io_manager_composites.py index 625cb4aa08511..8ca960669b459 100644 --- a/python_modules/dagster/dagster_tests/storage_tests/test_io_manager_composites.py +++ b/python_modules/dagster/dagster_tests/storage_tests/test_io_manager_composites.py @@ -14,7 +14,7 @@ def handle_output(self, context, obj): def load_input(self, context): result = storage_dict[ - tuple(context.upstream_output.get_run_scoped_output_identifier()) + tuple(context.upstream_output.get_run_scoped_output_identifier()) # pyright: ignore[reportOptionalMemberAccess] ] return {**result, "input_manager_name": name} @@ -96,14 +96,14 @@ def inner_manager(_): class MyHardcodedIOManager(IOManager): def handle_output(self, context, obj): keys = tuple( - context.get_run_scoped_output_identifier() + [context.config["output_suffix"]] + context.get_run_scoped_output_identifier() + [context.config["output_suffix"]] # pyright: ignore[reportOperatorIssue] ) stored_dict[keys] = obj def load_input(self, context): keys = tuple( - context.upstream_output.get_run_scoped_output_identifier() - + [context.upstream_output.config["output_suffix"]] + context.upstream_output.get_run_scoped_output_identifier() # type: ignore + + [context.upstream_output.config["output_suffix"]] # pyright: ignore[reportOptionalMemberAccess] ) return stored_dict[keys] diff --git a/python_modules/dagster/dagster_tests/storage_tests/test_io_managers_pythonic_config.py b/python_modules/dagster/dagster_tests/storage_tests/test_io_managers_pythonic_config.py index c9a4cf77dbf1b..6a9d13a82e698 100644 --- a/python_modules/dagster/dagster_tests/storage_tests/test_io_managers_pythonic_config.py +++ b/python_modules/dagster/dagster_tests/storage_tests/test_io_managers_pythonic_config.py @@ -227,7 +227,7 @@ def hello_world_asset(): def test_config_schemas() -> None: # Decorator-based IO manager definition - @io_manager( + @io_manager( # pyright: ignore[reportArgumentType] config_schema={"base_dir": StringSource}, output_config_schema={"path": StringSource}, input_config_schema={"format": StringSource}, diff --git a/python_modules/dagster/dagster_tests/storage_tests/test_io_managers_with_graphs.py b/python_modules/dagster/dagster_tests/storage_tests/test_io_managers_with_graphs.py index 625cb4aa08511..814b14105e33c 100644 --- a/python_modules/dagster/dagster_tests/storage_tests/test_io_managers_with_graphs.py +++ b/python_modules/dagster/dagster_tests/storage_tests/test_io_managers_with_graphs.py @@ -14,7 +14,7 @@ def handle_output(self, context, obj): def load_input(self, context): result = storage_dict[ - tuple(context.upstream_output.get_run_scoped_output_identifier()) + tuple(context.upstream_output.get_run_scoped_output_identifier()) # pyright: ignore[reportOptionalMemberAccess] ] return {**result, "input_manager_name": name} @@ -96,14 +96,14 @@ def inner_manager(_): class MyHardcodedIOManager(IOManager): def handle_output(self, context, obj): keys = tuple( - context.get_run_scoped_output_identifier() + [context.config["output_suffix"]] + context.get_run_scoped_output_identifier() + [context.config["output_suffix"]] # pyright: ignore[reportOperatorIssue] ) stored_dict[keys] = obj def load_input(self, context): keys = tuple( - context.upstream_output.get_run_scoped_output_identifier() - + [context.upstream_output.config["output_suffix"]] + context.upstream_output.get_run_scoped_output_identifier() # pyright: ignore[reportOptionalMemberAccess] + + [context.upstream_output.config["output_suffix"]] # type: ignore ) return stored_dict[keys] diff --git a/python_modules/dagster/dagster_tests/storage_tests/test_local_instance.py b/python_modules/dagster/dagster_tests/storage_tests/test_local_instance.py index e1d85d1a8f37d..abe9b80bb4ff5 100644 --- a/python_modules/dagster/dagster_tests/storage_tests/test_local_instance.py +++ b/python_modules/dagster/dagster_tests/storage_tests/test_local_instance.py @@ -62,9 +62,9 @@ def easy(context): result = simple.execute_in_process(instance=instance) assert run_store.has_run(result.run_id) - assert instance.get_run_by_id(result.run_id).status == DagsterRunStatus.SUCCESS + assert instance.get_run_by_id(result.run_id).status == DagsterRunStatus.SUCCESS # pyright: ignore[reportOptionalMemberAccess] assert DagsterEventType.PIPELINE_SUCCESS in [ - event.dagster_event.event_type + event.dagster_event.event_type # pyright: ignore[reportOptionalMemberAccess] for event in event_store.get_logs_for_run(result.run_id) if event.is_dagster_event ] @@ -131,7 +131,7 @@ def test_get_run_by_id(): instance = DagsterInstance.from_ref(InstanceRef.from_dir(tmpdir_path)) run = DagsterRun(job_name="foo_job", run_id="bar_run") - def _has_run(self, run_id): + def _has_run(self, run_id): # pyright: ignore[reportRedeclaration] # This is uglier than we would like because there is no nonlocal keyword in py2 global MOCK_HAS_RUN_CALLED # noqa: PLW0602 @@ -197,7 +197,7 @@ def should_not_execute(_, x): with tempfile.TemporaryDirectory() as tmpdir_path: instance = DagsterInstance.from_ref(InstanceRef.from_dir(tmpdir_path)) result = simple.execute_in_process(instance=instance, raise_on_error=False) - step_stats = sorted(instance.get_run_step_stats(result.run_id), key=lambda x: x.end_time) + step_stats = sorted(instance.get_run_step_stats(result.run_id), key=lambda x: x.end_time) # pyright: ignore[reportCallIssue,reportArgumentType] assert len(step_stats) == 2 assert step_stats[0].step_key == "should_succeed" assert step_stats[0].status == StepEventStatus.SUCCESS @@ -247,7 +247,7 @@ def should_not_execute(_, x): assert len(step_stats) == 1 assert step_stats[0].step_key == "should_retry" assert step_stats[0].status == StepEventStatus.FAILURE - assert step_stats[0].end_time > step_stats[0].start_time + assert step_stats[0].end_time > step_stats[0].start_time # pyright: ignore[reportOperatorIssue] assert step_stats[0].attempts == 4 assert not _called @@ -293,7 +293,7 @@ def test_threadsafe_local_temp_instance(): shared = DagsterInstance.local_temp() def _run(_): - shared.root_directory # noqa: B018 + shared.root_directory # noqa: B018 # pyright: ignore[reportOptionalMemberAccess] with DagsterInstance.local_temp() as instance: instance.root_directory # noqa: B018 return True diff --git a/python_modules/dagster/dagster_tests/storage_tests/utils/event_log_storage.py b/python_modules/dagster/dagster_tests/storage_tests/utils/event_log_storage.py index 010278fcad892..55b130a2c0bef 100644 --- a/python_modules/dagster/dagster_tests/storage_tests/utils/event_log_storage.py +++ b/python_modules/dagster/dagster_tests/storage_tests/utils/event_log_storage.py @@ -1259,7 +1259,9 @@ def foo(): ) materializations = [ - e for e in events if e.dagster_event.event_type == "ASSET_MATERIALIZATION" + e + for e in events + if e.dagster_event.event_type == "ASSET_MATERIALIZATION" # pyright: ignore[reportOptionalMemberAccess] ] storage.store_event_batch(materializations) @@ -1303,7 +1305,7 @@ def _ops(): def _get_counts(result): assert isinstance(result, EventRecordsResult) return [ - record.asset_materialization.metadata.get("count").value + record.asset_materialization.metadata.get("count").value # pyright: ignore[reportOptionalMemberAccess] for record in result.records ] @@ -1458,7 +1460,8 @@ def _ops(): def _get_counts(result): assert isinstance(result, EventRecordsResult) return [ - record.asset_observation.metadata.get("count").value for record in result.records + record.asset_observation.metadata.get("count").value # pyright: ignore[reportOptionalMemberAccess] + for record in result.records ] # results come in descending order, by default @@ -1579,7 +1582,7 @@ def _get_counts(result): def test_asset_materialization_null_key_fails(self): with pytest.raises(check.CheckError): - AssetMaterialization(asset_key=None) + AssetMaterialization(asset_key=None) # pyright: ignore[reportArgumentType] def test_asset_events_error_parsing(self, storage, instance): if not isinstance(storage, SqlEventLogStorage): diff --git a/python_modules/dagster/dagster_tests/storage_tests/utils/partition_status_cache.py b/python_modules/dagster/dagster_tests/storage_tests/utils/partition_status_cache.py index edb3b59946428..9f4e8a9b715f4 100644 --- a/python_modules/dagster/dagster_tests/storage_tests/utils/partition_status_cache.py +++ b/python_modules/dagster/dagster_tests/storage_tests/utils/partition_status_cache.py @@ -121,7 +121,7 @@ def _swap_partitions_def(new_partitions_def, asset, asset_graph, asset_job): ).get_partition_keys() ) assert set(materialized_keys) == {"2022-02-02"} - counts = traced_counter.get().counts() + counts = traced_counter.get().counts() # pyright: ignore[reportOptionalMemberAccess] assert counts.get("DagsterInstance.get_materialized_partitions") == 1 def test_get_cached_partition_status_by_asset(self, instance): @@ -167,7 +167,7 @@ def _swap_partitions_def(new_partitions_def, asset, asset_graph, asset_job): ) assert len(materialized_keys) == 1 assert "2022-02-01" in materialized_keys - counts = traced_counter.get().counts() + counts = traced_counter.get().counts() # pyright: ignore[reportOptionalMemberAccess] assert counts.get("DagsterInstance.get_materialized_partitions") == 1 asset_job.execute_in_process(instance=instance, partition_key="2022-02-02") @@ -344,10 +344,10 @@ def asset1(context): ) # failed partition assert partitions_def.deserialize_subset( - cached_status.serialized_failed_partition_subset + cached_status.serialized_failed_partition_subset # pyright: ignore[reportArgumentType,reportOptionalMemberAccess] ).get_partition_keys() == {"fail1"} assert ( - cached_status.deserialize_in_progress_partition_subsets( + cached_status.deserialize_in_progress_partition_subsets( # pyright: ignore[reportOptionalMemberAccess] partitions_def ).get_partition_keys() == set() @@ -361,10 +361,10 @@ def asset1(context): ) # cache is updated with new failed partition, successful partition is ignored assert partitions_def.deserialize_subset( - cached_status.serialized_failed_partition_subset + cached_status.serialized_failed_partition_subset # pyright: ignore[reportArgumentType,reportOptionalMemberAccess] ).get_partition_keys() == {"fail1", "fail2"} assert ( - cached_status.deserialize_in_progress_partition_subsets( + cached_status.deserialize_in_progress_partition_subsets( # pyright: ignore[reportOptionalMemberAccess] partitions_def ).get_partition_keys() == set() @@ -393,10 +393,10 @@ def asset1(context): ) # cache is updated after successful materialization of fail1 assert partitions_def.deserialize_subset( - cached_status.serialized_failed_partition_subset + cached_status.serialized_failed_partition_subset # pyright: ignore[reportArgumentType,reportOptionalMemberAccess] ).get_partition_keys() == {"fail2"} assert ( - cached_status.deserialize_in_progress_partition_subsets( + cached_status.deserialize_in_progress_partition_subsets( # pyright: ignore[reportOptionalMemberAccess] partitions_def ).get_partition_keys() == set() @@ -423,9 +423,9 @@ def asset1(context): ) # in progress materialization is ignored assert partitions_def.deserialize_subset( - cached_status.serialized_failed_partition_subset + cached_status.serialized_failed_partition_subset # pyright: ignore[reportArgumentType,reportOptionalMemberAccess] ).get_partition_keys() == {"fail2"} - assert cached_status.deserialize_in_progress_partition_subsets( + assert cached_status.deserialize_in_progress_partition_subsets( # pyright: ignore[reportOptionalMemberAccess] partitions_def ).get_partition_keys() == {"good2"} @@ -448,7 +448,7 @@ def asset1(context): ) # failed partition assert partitions_def.deserialize_subset( - cached_status.serialized_failed_partition_subset + cached_status.serialized_failed_partition_subset # pyright: ignore[reportArgumentType,reportOptionalMemberAccess] ).get_partition_keys() == {"fail1"} def test_failure_cache_in_progress_runs(self, instance): @@ -490,11 +490,11 @@ def asset1(context): cached_status = get_and_update_asset_status_cache_value( instance, asset_key, asset_graph.get(asset_key).partitions_def ) - assert cached_status.deserialize_failed_partition_subsets( + assert cached_status.deserialize_failed_partition_subsets( # pyright: ignore[reportOptionalMemberAccess] partitions_def ).get_partition_keys() == {"fail1"} assert ( - cached_status.deserialize_in_progress_partition_subsets( + cached_status.deserialize_in_progress_partition_subsets( # pyright: ignore[reportOptionalMemberAccess] partitions_def ).get_partition_keys() == set() @@ -521,10 +521,10 @@ def asset1(context): cached_status = get_and_update_asset_status_cache_value( instance, asset_key, asset_graph.get(asset_key).partitions_def ) - assert cached_status.deserialize_failed_partition_subsets( + assert cached_status.deserialize_failed_partition_subsets( # pyright: ignore[reportOptionalMemberAccess] partitions_def ).get_partition_keys() == {"fail1"} - assert cached_status.deserialize_in_progress_partition_subsets( + assert cached_status.deserialize_in_progress_partition_subsets( # pyright: ignore[reportOptionalMemberAccess] partitions_def ).get_partition_keys() == {"fail2"} @@ -533,11 +533,11 @@ def asset1(context): cached_status = get_and_update_asset_status_cache_value( instance, asset_key, asset_graph.get(asset_key).partitions_def ) - assert cached_status.deserialize_failed_partition_subsets( + assert cached_status.deserialize_failed_partition_subsets( # pyright: ignore[reportOptionalMemberAccess] partitions_def ).get_partition_keys() == {"fail1", "fail2"} assert ( - cached_status.deserialize_in_progress_partition_subsets( + cached_status.deserialize_in_progress_partition_subsets( # pyright: ignore[reportOptionalMemberAccess] partitions_def ).get_partition_keys() == set() @@ -594,7 +594,7 @@ def asset1(context): instance, asset_key, asset_graph.get(asset_key).partitions_def ) - assert cached_status.deserialize_in_progress_partition_subsets( + assert cached_status.deserialize_in_progress_partition_subsets( # pyright: ignore[reportOptionalMemberAccess] partitions_def ).get_partition_keys() == {"good1"} @@ -603,18 +603,18 @@ def asset1(context): cached_status = get_and_update_asset_status_cache_value( instance, asset_key, asset_graph.get(asset_key).partitions_def ) - assert not cached_status.earliest_in_progress_materialization_event_id + assert not cached_status.earliest_in_progress_materialization_event_id # pyright: ignore[reportOptionalMemberAccess] materialized_keys = list( partitions_def.deserialize_subset( - cached_status.serialized_materialized_partition_subset + cached_status.serialized_materialized_partition_subset # pyright: ignore[reportArgumentType,reportOptionalMemberAccess] ).get_partition_keys() ) assert len(materialized_keys) == 1 assert "good1" in materialized_keys assert ( - cached_status.deserialize_in_progress_partition_subsets( + cached_status.deserialize_in_progress_partition_subsets( # pyright: ignore[reportOptionalMemberAccess] partitions_def ).get_partition_keys() == set() @@ -653,8 +653,8 @@ def asset1(context): cached_status = get_and_update_asset_status_cache_value( instance, asset_key, asset_graph.get(asset_key).partitions_def ) - early_id = cached_status.earliest_in_progress_materialization_event_id - assert cached_status.deserialize_in_progress_partition_subsets( + early_id = cached_status.earliest_in_progress_materialization_event_id # pyright: ignore[reportOptionalMemberAccess] + assert cached_status.deserialize_in_progress_partition_subsets( # pyright: ignore[reportOptionalMemberAccess] partitions_def ).get_partition_keys() == {"fail1"} @@ -681,14 +681,14 @@ def asset1(context): ) assert ( partitions_def.deserialize_subset( - cached_status.serialized_failed_partition_subset + cached_status.serialized_failed_partition_subset # pyright: ignore[reportArgumentType,reportOptionalMemberAccess] ).get_partition_keys() == set() ) - assert cached_status.deserialize_in_progress_partition_subsets( + assert cached_status.deserialize_in_progress_partition_subsets( # pyright: ignore[reportOptionalMemberAccess] partitions_def ).get_partition_keys() == {"fail1", "fail2"} - assert cached_status.earliest_in_progress_materialization_event_id == early_id + assert cached_status.earliest_in_progress_materialization_event_id == early_id # pyright: ignore[reportOptionalMemberAccess] instance.report_run_failed(run_2) @@ -696,12 +696,12 @@ def asset1(context): instance, asset_key, asset_graph.get(asset_key).partitions_def ) assert partitions_def.deserialize_subset( - cached_status.serialized_failed_partition_subset + cached_status.serialized_failed_partition_subset # pyright: ignore[reportArgumentType,reportOptionalMemberAccess] ).get_partition_keys() == {"fail2"} - assert cached_status.deserialize_in_progress_partition_subsets( + assert cached_status.deserialize_in_progress_partition_subsets( # pyright: ignore[reportOptionalMemberAccess] partitions_def ).get_partition_keys() == {"fail1"} - assert cached_status.earliest_in_progress_materialization_event_id == early_id + assert cached_status.earliest_in_progress_materialization_event_id == early_id # pyright: ignore[reportOptionalMemberAccess] instance.report_run_canceled(run_1) @@ -709,15 +709,15 @@ def asset1(context): instance, asset_key, asset_graph.get(asset_key).partitions_def ) assert partitions_def.deserialize_subset( - cached_status.serialized_failed_partition_subset + cached_status.serialized_failed_partition_subset # pyright: ignore[reportArgumentType,reportOptionalMemberAccess] ).get_partition_keys() == {"fail2"} assert ( - cached_status.deserialize_in_progress_partition_subsets( + cached_status.deserialize_in_progress_partition_subsets( # pyright: ignore[reportOptionalMemberAccess] partitions_def ).get_partition_keys() == set() ) - assert cached_status.earliest_in_progress_materialization_event_id is None + assert cached_status.earliest_in_progress_materialization_event_id is None # pyright: ignore[reportOptionalMemberAccess] def test_failure_cache_concurrent_materializations(self, instance): partitions_def = StaticPartitionsDefinition(["good1", "good2", "fail1", "fail2"]) @@ -770,10 +770,10 @@ def asset1(context): cached_status = get_and_update_asset_status_cache_value( instance, asset_key, asset_graph.get(asset_key).partitions_def ) - assert cached_status.deserialize_in_progress_partition_subsets( + assert cached_status.deserialize_in_progress_partition_subsets( # pyright: ignore[reportOptionalMemberAccess] partitions_def ).get_partition_keys() == {"fail1"} - assert cached_status.earliest_in_progress_materialization_event_id is not None + assert cached_status.earliest_in_progress_materialization_event_id is not None # pyright: ignore[reportOptionalMemberAccess] instance.report_run_failed(run_2) @@ -781,16 +781,16 @@ def asset1(context): instance, asset_key, asset_graph.get(asset_key).partitions_def ) assert partitions_def.deserialize_subset( - cached_status.serialized_failed_partition_subset + cached_status.serialized_failed_partition_subset # pyright: ignore[reportArgumentType,reportOptionalMemberAccess] ).get_partition_keys() == {"fail1"} assert ( - cached_status.deserialize_in_progress_partition_subsets( + cached_status.deserialize_in_progress_partition_subsets( # pyright: ignore[reportOptionalMemberAccess] partitions_def ).get_partition_keys() == set() ) # run_1 is still in progress, but run_2 started after and failed, so we move on - assert cached_status.earliest_in_progress_materialization_event_id is None + assert cached_status.earliest_in_progress_materialization_event_id is None # pyright: ignore[reportOptionalMemberAccess] def test_failed_partitioned_asset_converted_to_multipartitioned(self, instance): daily_def = DailyPartitionsDefinition("2023-01-01") @@ -823,8 +823,8 @@ def my_asset(): cached_status = get_and_update_asset_status_cache_value( instance, asset_key, asset_graph.get(asset_key).partitions_def ) - failed_subset = cached_status.deserialize_failed_partition_subsets( - asset_graph.get(asset_key).partitions_def + failed_subset = cached_status.deserialize_failed_partition_subsets( # pyright: ignore[reportOptionalMemberAccess] + asset_graph.get(asset_key).partitions_def # pyright: ignore[reportArgumentType] ) assert failed_subset.get_partition_keys() == set() diff --git a/python_modules/dagster/dagster_tests/storage_tests/utils/run_storage.py b/python_modules/dagster/dagster_tests/storage_tests/utils/run_storage.py index 1cd9824521160..3ad1b0bbd181e 100644 --- a/python_modules/dagster/dagster_tests/storage_tests/utils/run_storage.py +++ b/python_modules/dagster/dagster_tests/storage_tests/utils/run_storage.py @@ -173,8 +173,8 @@ def test_basic_storage(self, storage): assert run.tags.get("foo") == "bar" assert storage.has_run(run_id) fetched_run = _get_run_by_id(storage, run_id) - assert fetched_run.run_id == run_id - assert fetched_run.job_name == "some_pipeline" + assert fetched_run.run_id == run_id # pyright: ignore[reportOptionalMemberAccess] + assert fetched_run.job_name == "some_pipeline" # pyright: ignore[reportOptionalMemberAccess] def test_clear(self, storage): if not self.can_delete_runs(): @@ -767,7 +767,7 @@ def test_failure_event_updates_tags(self, storage, instance): ) run = _get_run_by_id(storage, one) - assert run.tags[RUN_FAILURE_REASON_TAG] == RunFailureReason.RUN_EXCEPTION.value + assert run.tags[RUN_FAILURE_REASON_TAG] == RunFailureReason.RUN_EXCEPTION.value # pyright: ignore[reportOptionalMemberAccess] def _get_run_event_entry(self, dagster_event: DagsterEvent, run_id: str): return EventLogEntry( @@ -1760,7 +1760,7 @@ def test_handle_run_event_job_success_test(self, storage, instance): instance.handle_new_event(self._get_run_event_entry(dagster_job_start_event, run_id)) - assert _get_run_by_id(storage, run_id).status == DagsterRunStatus.STARTED + assert _get_run_by_id(storage, run_id).status == DagsterRunStatus.STARTED # pyright: ignore[reportOptionalMemberAccess] instance.handle_new_event( self._get_run_event_entry( @@ -1777,7 +1777,7 @@ def test_handle_run_event_job_success_test(self, storage, instance): ) ) - assert _get_run_by_id(storage, run_id).status == DagsterRunStatus.STARTED + assert _get_run_by_id(storage, run_id).status == DagsterRunStatus.STARTED # pyright: ignore[reportOptionalMemberAccess] instance.handle_new_event( self._get_run_event_entry( @@ -1794,7 +1794,7 @@ def test_handle_run_event_job_success_test(self, storage, instance): ) ) - assert _get_run_by_id(storage, run_id).status == DagsterRunStatus.SUCCESS + assert _get_run_by_id(storage, run_id).status == DagsterRunStatus.SUCCESS # pyright: ignore[reportOptionalMemberAccess] def test_debug_snapshot_import(self, storage): from dagster._core.execution.api import create_execution_plan diff --git a/python_modules/dagster/dagster_tests/test_annotations.py b/python_modules/dagster/dagster_tests/test_annotations.py index fd054f23cdccc..5cb29abbec309 100644 --- a/python_modules/dagster/dagster_tests/test_annotations.py +++ b/python_modules/dagster/dagster_tests/test_annotations.py @@ -207,7 +207,7 @@ def bar(cls): with pytest.warns( DeprecationWarning, match=r"`[^`]+Foo.bar` is deprecated and will be removed in 2.0" ) as warning: - Foo.bar() + Foo.bar() # pyright: ignore[reportCallIssue] assert warning[0].filename.endswith("test_annotations.py") @@ -325,7 +325,7 @@ def bar(baz=None): with pytest.warns( DeprecationWarning, match=r"Parameter `baz` of [^`]+`[^`]+Foo.bar` is deprecated" ) as warning: - Foo.bar(baz="ok") + Foo.bar(baz="ok") # pyright: ignore[reportArgumentType] assert warning[0].filename.endswith("test_annotations.py") @@ -351,7 +351,7 @@ def bar(cls, baz=None): with pytest.warns( DeprecationWarning, match=r"Parameter `baz` of [^`]+`[^`]+Foo.bar` is deprecated" ) as warning: - Foo.bar(baz="ok") + Foo.bar(baz="ok") # pyright: ignore[reportCallIssue] assert warning[0].filename.endswith("test_annotations.py") @@ -497,7 +497,7 @@ def bar(cls): assert is_experimental(Foo.__dict__["bar"]) # __dict__ access to get descriptor with pytest.warns(ExperimentalWarning, match=r"`[^`]+Foo.bar` is experimental") as warning: - Foo.bar() + Foo.bar() # pyright: ignore[reportCallIssue] assert warning[0].filename.endswith("test_annotations.py") @@ -653,7 +653,7 @@ def bar(baz=None): with pytest.warns( ExperimentalWarning, match=r"Parameter `baz` of [^`]+`[^`]+Foo.bar` is experimental" ) as warning: - Foo.bar(baz="ok") + Foo.bar(baz="ok") # pyright: ignore[reportArgumentType] assert warning[0].filename.endswith("test_annotations.py") @@ -679,7 +679,7 @@ def bar(cls, baz=None): with pytest.warns( ExperimentalWarning, match=r"Parameter `baz` of [^`]+`[^`]+Foo.bar` is experimental" ) as warning: - Foo.bar(baz="ok") + Foo.bar(baz="ok") # pyright: ignore[reportCallIssue] assert warning[0].filename.endswith("test_annotations.py") diff --git a/python_modules/dagster/dagster_tests/utils_tests/test_dataloader.py b/python_modules/dagster/dagster_tests/utils_tests/test_dataloader.py index 73b3917403173..e1873156115f6 100644 --- a/python_modules/dagster/dagster_tests/utils_tests/test_dataloader.py +++ b/python_modules/dagster/dagster_tests/utils_tests/test_dataloader.py @@ -39,7 +39,7 @@ async def batch_load_fn(keys: List[str]): class ThingLoader(DataLoader[str, Thing]): def __init__(self): - super().__init__(batch_load_fn=batch_load_fn) + super().__init__(batch_load_fn=batch_load_fn) # pyright: ignore[reportArgumentType] def test_basic() -> None: @@ -93,7 +93,7 @@ async def batch_load_fn(keys: List[str]): class Thrower(DataLoader[str, str]): def __init__(self): - super().__init__(batch_load_fn=batch_load_fn) + super().__init__(batch_load_fn=batch_load_fn) # pyright: ignore[reportArgumentType] async def _test(): loader = Thrower() @@ -125,7 +125,7 @@ def test_bad_load_fn(): async def _oops(wrong, args, here): ... async def _test(): - loader = DataLoader(_oops) + loader = DataLoader(_oops) # pyright: ignore[reportArgumentType] done, pending = await asyncio.wait( (loader.load(1),), timeout=0.01, @@ -134,7 +134,7 @@ async def _test(): assert len(done) == 1 with pytest.raises(TypeError): - done[0].result() + done[0].result() # pyright: ignore[reportIndexIssue] asyncio.run(_test()) diff --git a/python_modules/libraries/dagster-airbyte/dagster_airbyte_tests/test_asset_defs.py b/python_modules/libraries/dagster-airbyte/dagster_airbyte_tests/test_asset_defs.py index 089d12568b992..81c056fe0be57 100644 --- a/python_modules/libraries/dagster-airbyte/dagster_airbyte_tests/test_asset_defs.py +++ b/python_modules/libraries/dagster-airbyte/dagster_airbyte_tests/test_asset_defs.py @@ -83,7 +83,7 @@ def test_assets(schema_prefix, auto_materialize_policy, monkeypatch): ) materializations = [ - event.event_specific_data.materialization + event.event_specific_data.materialization # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] for event in res.events_for_node(ab_assets_name) if event.event_type_value == "ASSET_MATERIALIZATION" ] @@ -185,7 +185,7 @@ def test_assets_with_normalization( ) materializations = [ - event.event_specific_data.materialization + event.event_specific_data.materialization # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] for event in res.events_for_node(ab_assets_name) if event.event_type_value == "ASSET_MATERIALIZATION" ] @@ -288,9 +288,9 @@ def test_built_airbyte_asset_with_downstream_asset_via_definition(): def downstream_of_ab(): return None - assert len(downstream_of_ab.input_names) == 2 - assert downstream_of_ab.op.ins["some_prefix_foo"].dagster_type.is_nothing - assert downstream_of_ab.op.ins["some_prefix_bar"].dagster_type.is_nothing + assert len(downstream_of_ab.input_names) == 2 # pyright: ignore[reportArgumentType] + assert downstream_of_ab.op.ins["some_prefix_foo"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] + assert downstream_of_ab.op.ins["some_prefix_bar"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] def test_built_airbyte_asset_with_downstream_asset(): @@ -305,9 +305,9 @@ def test_built_airbyte_asset_with_downstream_asset(): def downstream_of_ab(): return None - assert len(downstream_of_ab.input_names) == 2 - assert downstream_of_ab.op.ins["some_prefix_foo"].dagster_type.is_nothing - assert downstream_of_ab.op.ins["some_prefix_bar"].dagster_type.is_nothing + assert len(downstream_of_ab.input_names) == 2 # pyright: ignore[reportArgumentType] + assert downstream_of_ab.op.ins["some_prefix_foo"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] + assert downstream_of_ab.op.ins["some_prefix_bar"].dagster_type.is_nothing # pyright: ignore[reportAttributeAccessIssue] def test_built_airbyte_asset_table_name(): diff --git a/python_modules/libraries/dagster-airbyte/dagster_airbyte_tests/test_load_from_instance.py b/python_modules/libraries/dagster-airbyte/dagster_airbyte_tests/test_load_from_instance.py index e64f5edfd8986..bc8d719e95ccb 100644 --- a/python_modules/libraries/dagster-airbyte/dagster_airbyte_tests/test_load_from_instance.py +++ b/python_modules/libraries/dagster-airbyte/dagster_airbyte_tests/test_load_from_instance.py @@ -368,6 +368,6 @@ def test_load_from_instance_with_downstream_asset_errors(): match='Param "asset" is not one of ', ): - @asset(deps=[ab_cacheable_assets]) + @asset(deps=[ab_cacheable_assets]) # pyright: ignore[reportArgumentType] def downstream_of_ab(): return None diff --git a/python_modules/libraries/dagster-airflow/dagster_airflow/operators/dagster_operator.py b/python_modules/libraries/dagster-airflow/dagster_airflow/operators/dagster_operator.py index a4635c2a860f2..1ef57544f72ba 100644 --- a/python_modules/libraries/dagster-airflow/dagster_airflow/operators/dagster_operator.py +++ b/python_modules/libraries/dagster-airflow/dagster_airflow/operators/dagster_operator.py @@ -84,7 +84,7 @@ def pre_execute(self, context): def on_kill(self): self.log.info("Terminating Run") self.hook.terminate_run( - run_id=self.run_id, + run_id=self.run_id, # pyright: ignore[reportArgumentType] ) def execute(self, context): diff --git a/python_modules/libraries/dagster-airflow/dagster_airflow/resources/airflow_db.py b/python_modules/libraries/dagster-airflow/dagster_airflow/resources/airflow_db.py index c76f547ed6407..31ade38d2453d 100644 --- a/python_modules/libraries/dagster-airflow/dagster_airflow/resources/airflow_db.py +++ b/python_modules/libraries/dagster-airflow/dagster_airflow/resources/airflow_db.py @@ -49,7 +49,7 @@ def _parse_execution_date_for_job( raise DagsterInvariantViolationError( f'Date "{execution_date_str}" exceeds the largest valid C integer on the system.' ) - return execution_date + return execution_date # pyright: ignore[reportReturnType] def _parse_execution_date_for_asset( self, dag: DAG, run_tags: Mapping[str, str] @@ -58,7 +58,7 @@ def _parse_execution_date_for_asset( if not execution_date_str: raise DagsterInvariantViolationError("dagster/partition is not set") execution_date = pendulum.parse(execution_date_str, tz=pendulum.timezone(dag.timezone.name)) - return execution_date + return execution_date # pyright: ignore[reportReturnType] def get_dagrun(self, dag: DAG) -> DagRun: run_tags = self.dagster_run.tags if self.dagster_run else {} diff --git a/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_job_factory/test_load_assets.py b/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_job_factory/test_load_assets.py index bca6dbd64a3a4..dfc01decaf29e 100644 --- a/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_job_factory/test_load_assets.py +++ b/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_job_factory/test_load_assets.py @@ -54,7 +54,7 @@ def new_upstream_asset(): return 1 assets = load_assets_from_airflow_dag( - dag=asset_dag, + dag=asset_dag, # pyright: ignore[reportArgumentType] task_ids_by_asset_key={ AssetKey("foo_asset"): {"foo"}, AssetKey("biz_asset"): {"biz"}, @@ -68,7 +68,7 @@ def new_upstream_asset(): other_dag = dag_bag.get_dag(dag_id="other_dag") other_assets = load_assets_from_airflow_dag( - dag=other_dag, + dag=other_dag, # pyright: ignore[reportArgumentType] ) result = materialize( @@ -98,7 +98,7 @@ def new_upstream_asset(): with pytest.raises(CheckError, match="Each asset key must have no more than one task ID"): load_assets_from_airflow_dag( - dag=asset_dag, + dag=asset_dag, # pyright: ignore[reportArgumentType] task_ids_by_asset_key={ AssetKey("foo_asset"): {"foo", "biz"}, }, diff --git a/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_job_factory/test_load_assets_airflow_2.py b/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_job_factory/test_load_assets_airflow_2.py index b947e739f9831..a12959a317216 100644 --- a/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_job_factory/test_load_assets_airflow_2.py +++ b/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_job_factory/test_load_assets_airflow_2.py @@ -47,7 +47,7 @@ def test_load_assets_from_airflow_dag(): asset_dag = dag_bag.get_dag(dag_id="asset_dag") assets = load_assets_from_airflow_dag( - dag=asset_dag, + dag=asset_dag, # pyright: ignore[reportArgumentType] task_ids_by_asset_key={ AssetKey("foo_asset"): {"foo"}, AssetKey("biz_asset"): {"biz"}, @@ -60,7 +60,7 @@ def test_load_assets_from_airflow_dag(): ) other_dag = dag_bag.get_dag(dag_id="other_dag") other_assets = load_assets_from_airflow_dag( - dag=other_dag, + dag=other_dag, # pyright: ignore[reportArgumentType] ) resources = None diff --git a/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_job_factory/test_load_dag_bag.py b/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_job_factory/test_load_dag_bag.py index 90e148f19b7af..c097ead182ec3 100644 --- a/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_job_factory/test_load_dag_bag.py +++ b/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_job_factory/test_load_dag_bag.py @@ -155,7 +155,7 @@ def test_retry_conversion(): dag_bag = DagBag(dag_folder=tmpdir_path) retry_dag = dag_bag.get_dag(dag_id="retry_dag") - job = make_dagster_job_from_airflow_dag(dag=retry_dag) + job = make_dagster_job_from_airflow_dag(dag=retry_dag) # pyright: ignore[reportArgumentType] result = job.execute_in_process() assert result.success for event in result.all_events: diff --git a/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_job_factory/test_load_dag_bag_airflow_2.py b/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_job_factory/test_load_dag_bag_airflow_2.py index f279f4cd910b7..ef630bd985f2d 100644 --- a/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_job_factory/test_load_dag_bag_airflow_2.py +++ b/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_job_factory/test_load_dag_bag_airflow_2.py @@ -141,7 +141,7 @@ def test_retry_conversion(): retry_dag = dag_bag.get_dag(dag_id="retry_dag") job = make_dagster_job_from_airflow_dag( - dag=retry_dag, + dag=retry_dag, # pyright: ignore[reportArgumentType] ) result = job.execute_in_process() assert result.success diff --git a/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_operator.py b/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_operator.py index c4bdfdb370f71..b2e8f7b133877 100644 --- a/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_operator.py +++ b/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_operator.py @@ -47,7 +47,7 @@ def test_operator(self, launch_run, wait_for_run): run_config=run_config, user_token="token", organization_id="test-org", - dagster_conn_id=None, + dagster_conn_id=None, # pyright: ignore[reportArgumentType] ) if airflow_version >= "2.0.0": dagrun = dag.create_dagrun( @@ -58,9 +58,9 @@ def test_operator(self, launch_run, wait_for_run): run_type=DagRunType.MANUAL, ) ti = dagrun.get_task_instance(task_id="anytask") - ti.task = dag.get_task(task_id="anytask") - ti.run(ignore_ti_state=True) - assert ti.state == TaskInstanceState.SUCCESS + ti.task = dag.get_task(task_id="anytask") # pyright: ignore[reportOptionalMemberAccess] + ti.run(ignore_ti_state=True) # pyright: ignore[reportOptionalMemberAccess] + assert ti.state == TaskInstanceState.SUCCESS # pyright: ignore[reportOptionalMemberAccess] else: ti = TaskInstance(task=task, execution_date=datetime.now()) ctx = ti.get_template_context() @@ -87,8 +87,8 @@ def test_operator_with_connection(self, launch_run, wait_for_run, _mock_get_conn run_type=DagRunType.MANUAL, ) ti = dagrun.get_task_instance(task_id="anytask") - ti.task = dag.get_task(task_id="anytask") - ti.run(ignore_ti_state=True) - assert ti.state == TaskInstanceState.SUCCESS + ti.task = dag.get_task(task_id="anytask") # pyright: ignore[reportOptionalMemberAccess] + ti.run(ignore_ti_state=True) # pyright: ignore[reportOptionalMemberAccess] + assert ti.state == TaskInstanceState.SUCCESS # pyright: ignore[reportOptionalMemberAccess] launch_run.assert_called_once() wait_for_run.assert_called_once() diff --git a/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_pipeline_factory/test_dependency_structure_translation.py b/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_pipeline_factory/test_dependency_structure_translation.py index 4f5ac624445f4..2beeb1cdeb940 100644 --- a/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_pipeline_factory/test_dependency_structure_translation.py +++ b/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_pipeline_factory/test_dependency_structure_translation.py @@ -99,7 +99,7 @@ def test_two_task_dag_with_dep(snapshot): task_id="dummy_operator_2", dag=dag, ) - dummy_operator_1 >> dummy_operator_2 + dummy_operator_1 >> dummy_operator_2 # pyright: ignore[reportUnusedExpression] snapshot.assert_match( serialize_pp( @@ -138,10 +138,10 @@ def test_diamond_task_dag(snapshot): task_id="dummy_operator_4", dag=dag, ) - dummy_operator_1 >> dummy_operator_2 - dummy_operator_1 >> dummy_operator_3 - dummy_operator_2 >> dummy_operator_4 - dummy_operator_3 >> dummy_operator_4 + dummy_operator_1 >> dummy_operator_2 # pyright: ignore[reportUnusedExpression] + dummy_operator_1 >> dummy_operator_3 # pyright: ignore[reportUnusedExpression] + dummy_operator_2 >> dummy_operator_4 # pyright: ignore[reportUnusedExpression] + dummy_operator_3 >> dummy_operator_4 # pyright: ignore[reportUnusedExpression] snapshot.assert_match( serialize_pp( @@ -180,9 +180,9 @@ def test_multi_root_dag(snapshot): task_id="dummy_operator_4", dag=dag, ) - dummy_operator_1 >> dummy_operator_4 - dummy_operator_2 >> dummy_operator_4 - dummy_operator_3 >> dummy_operator_4 + dummy_operator_1 >> dummy_operator_4 # pyright: ignore[reportUnusedExpression] + dummy_operator_2 >> dummy_operator_4 # pyright: ignore[reportUnusedExpression] + dummy_operator_3 >> dummy_operator_4 # pyright: ignore[reportUnusedExpression] dag.tree_view() snapshot.assert_match( @@ -222,9 +222,9 @@ def test_multi_leaf_dag(snapshot): task_id="dummy_operator_4", dag=dag, ) - dummy_operator_1 >> dummy_operator_2 - dummy_operator_1 >> dummy_operator_3 - dummy_operator_1 >> dummy_operator_4 + dummy_operator_1 >> dummy_operator_2 # pyright: ignore[reportUnusedExpression] + dummy_operator_1 >> dummy_operator_3 # pyright: ignore[reportUnusedExpression] + dummy_operator_1 >> dummy_operator_4 # pyright: ignore[reportUnusedExpression] snapshot.assert_match( serialize_pp( @@ -311,17 +311,17 @@ def test_complex_dag(snapshot): task_id="delete_entry", dag=dag, ) - create_entry_gcs >> delete_entry + create_entry_gcs >> delete_entry # pyright: ignore[reportUnusedExpression] delete_entry_group = DummyOperator( task_id="delete_entry_group", dag=dag, ) - create_entry_group >> delete_entry_group + create_entry_group >> delete_entry_group # pyright: ignore[reportUnusedExpression] delete_tag = DummyOperator( task_id="delete_tag", dag=dag, ) - create_tag >> delete_tag + create_tag >> delete_tag # pyright: ignore[reportUnusedExpression] delete_tag_template_field = DummyOperator( task_id="delete_tag_template_field", dag=dag, @@ -421,24 +421,24 @@ def test_complex_dag(snapshot): ] chain(*create_tasks) - create_entry_group >> delete_entry_group - create_entry_group >> create_entry_group_result - create_entry_group >> create_entry_group_result2 + create_entry_group >> delete_entry_group # pyright: ignore[reportUnusedExpression] + create_entry_group >> create_entry_group_result # pyright: ignore[reportUnusedExpression] + create_entry_group >> create_entry_group_result2 # pyright: ignore[reportUnusedExpression] - create_entry_gcs >> delete_entry - create_entry_gcs >> create_entry_gcs_result - create_entry_gcs >> create_entry_gcs_result2 + create_entry_gcs >> delete_entry # pyright: ignore[reportUnusedExpression] + create_entry_gcs >> create_entry_gcs_result # pyright: ignore[reportUnusedExpression] + create_entry_gcs >> create_entry_gcs_result2 # pyright: ignore[reportUnusedExpression] - create_tag_template >> delete_tag_template_field - create_tag_template >> create_tag_template_result - create_tag_template >> create_tag_template_result2 + create_tag_template >> delete_tag_template_field # pyright: ignore[reportUnusedExpression] + create_tag_template >> create_tag_template_result # pyright: ignore[reportUnusedExpression] + create_tag_template >> create_tag_template_result2 # pyright: ignore[reportUnusedExpression] - create_tag_template_field >> delete_tag_template_field - create_tag_template_field >> create_tag_template_field_result + create_tag_template_field >> delete_tag_template_field # pyright: ignore[reportUnusedExpression] + create_tag_template_field >> create_tag_template_field_result # pyright: ignore[reportUnusedExpression] - create_tag >> delete_tag - create_tag >> create_tag_result - create_tag >> create_tag_result2 + create_tag >> delete_tag # pyright: ignore[reportUnusedExpression] + create_tag >> create_tag_result # pyright: ignore[reportUnusedExpression] + create_tag >> create_tag_result2 # pyright: ignore[reportUnusedExpression] # Delete delete_tasks = [ @@ -451,35 +451,35 @@ def test_complex_dag(snapshot): chain(*delete_tasks) # Get - create_tag_template >> get_tag_template >> delete_tag_template - get_tag_template >> get_tag_template_result + create_tag_template >> get_tag_template >> delete_tag_template # pyright: ignore[reportUnusedExpression] + get_tag_template >> get_tag_template_result # pyright: ignore[reportUnusedExpression] - create_entry_gcs >> get_entry >> delete_entry - get_entry >> get_entry_result + create_entry_gcs >> get_entry >> delete_entry # pyright: ignore[reportUnusedExpression] + get_entry >> get_entry_result # pyright: ignore[reportUnusedExpression] - create_entry_group >> get_entry_group >> delete_entry_group - get_entry_group >> get_entry_group_result + create_entry_group >> get_entry_group >> delete_entry_group # pyright: ignore[reportUnusedExpression] + get_entry_group >> get_entry_group_result # pyright: ignore[reportUnusedExpression] # List - create_tag >> list_tags >> delete_tag - list_tags >> list_tags_result + create_tag >> list_tags >> delete_tag # pyright: ignore[reportUnusedExpression] + list_tags >> list_tags_result # pyright: ignore[reportUnusedExpression] # Lookup - create_entry_gcs >> lookup_entry >> delete_entry - lookup_entry >> lookup_entry_result + create_entry_gcs >> lookup_entry >> delete_entry # pyright: ignore[reportUnusedExpression] + lookup_entry >> lookup_entry_result # pyright: ignore[reportUnusedExpression] # Rename - create_tag_template_field >> rename_tag_template_field >> delete_tag_template_field + create_tag_template_field >> rename_tag_template_field >> delete_tag_template_field # pyright: ignore[reportUnusedExpression] # Search chain(create_tasks, search_catalog, delete_tasks) - search_catalog >> search_catalog_result + search_catalog >> search_catalog_result # pyright: ignore[reportUnusedExpression] # Update - create_entry_gcs >> update_entry >> delete_entry - create_tag >> update_tag >> delete_tag - create_tag_template >> update_tag_template >> delete_tag_template - create_tag_template_field >> update_tag_template_field >> rename_tag_template_field + create_entry_gcs >> update_entry >> delete_entry # pyright: ignore[reportUnusedExpression] + create_tag >> update_tag >> delete_tag # pyright: ignore[reportUnusedExpression] + create_tag_template >> update_tag_template >> delete_tag_template # pyright: ignore[reportUnusedExpression] + create_tag_template_field >> update_tag_template_field >> rename_tag_template_field # pyright: ignore[reportUnusedExpression] snapshot.assert_match( serialize_pp( diff --git a/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_pipeline_factory/test_load_connections.py b/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_pipeline_factory/test_load_connections.py index b4f9a1e3627eb..24377678a6667 100644 --- a/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_pipeline_factory/test_load_connections.py +++ b/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_pipeline_factory/test_load_connections.py @@ -58,7 +58,7 @@ def test_ingest_airflow_dags_with_connections(self, launch_run, wait_for_run): host="prod", password="test_token", description="test-org", - port="test-port", + port="test-port", # pyright: ignore[reportArgumentType] schema="test-port", extra={"foo": "bar"}, ) @@ -120,7 +120,7 @@ def test_ingest_airflow_dags_with_connections(self, launch_run, wait_for_run): conn_type="dagster", host="prod", password="test_token", - port="test-port", + port="test-port", # pyright: ignore[reportArgumentType] schema="test-port", extra=json.dumps({"foo": "bar"}), ) diff --git a/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_pipeline_factory/test_op_execution.py b/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_pipeline_factory/test_op_execution.py index 9a991e51a7666..50b2a7c9245d3 100644 --- a/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_pipeline_factory/test_op_execution.py +++ b/python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_pipeline_factory/test_op_execution.py @@ -183,7 +183,7 @@ def test_template_task_dag(tmpdir): dag=dag, ) - t1 >> [t2, t3] + t1 >> [t2, t3] # pyright: ignore[reportUnusedExpression] with instance_for_test() as instance: execution_date = get_current_datetime() diff --git a/python_modules/libraries/dagster-airlift/dagster_airlift_tests/unit_tests/core_tests/test_load_defs.py b/python_modules/libraries/dagster-airlift/dagster_airlift_tests/unit_tests/core_tests/test_load_defs.py index 7223c2e741614..3e7e563b6d92f 100644 --- a/python_modules/libraries/dagster-airlift/dagster_airlift_tests/unit_tests/core_tests/test_load_defs.py +++ b/python_modules/libraries/dagster-airlift/dagster_airlift_tests/unit_tests/core_tests/test_load_defs.py @@ -60,12 +60,12 @@ ) -@executor +@executor # pyright: ignore[reportCallIssue,reportArgumentType] def nonstandard_executor(init_context): pass -@logger +@logger # pyright: ignore[reportCallIssue,reportArgumentType] def nonstandard_logger(init_context): pass @@ -88,7 +88,7 @@ def a(): b_spec = AssetSpec(key="b") -@asset_check(asset=a) +@asset_check(asset=a) # pyright: ignore[reportArgumentType] def a_check(): pass @@ -112,8 +112,8 @@ def test_defs_passthrough() -> None: jobs=[the_job], sensors=[some_sensor], schedules=[some_schedule], - loggers={"the_logger": nonstandard_logger}, - executor=nonstandard_executor, + loggers={"the_logger": nonstandard_logger}, # pyright: ignore[reportArgumentType] + executor=nonstandard_executor, # pyright: ignore[reportArgumentType] ), ) assert defs.executor == nonstandard_executor diff --git a/python_modules/libraries/dagster-airlift/dagster_airlift_tests/unit_tests/core_tests/test_sensor.py b/python_modules/libraries/dagster-airlift/dagster_airlift_tests/unit_tests/core_tests/test_sensor.py index cb50ed721575e..bb07a354b2e27 100644 --- a/python_modules/libraries/dagster-airlift/dagster_airlift_tests/unit_tests/core_tests/test_sensor.py +++ b/python_modules/libraries/dagster-airlift/dagster_airlift_tests/unit_tests/core_tests/test_sensor.py @@ -236,15 +236,15 @@ def test_request_asset_checks(init_load_context: None, instance: DagsterInstance dag_asset_key = make_dag_key("dag") - @asset_check(asset="a") + @asset_check(asset="a") # pyright: ignore[reportArgumentType] def check_task_asset(): pass - @asset_check(asset=dag_asset_key) + @asset_check(asset=dag_asset_key) # pyright: ignore[reportArgumentType] def check_dag_asset(): pass - @asset_check(asset="c") + @asset_check(asset="c") # pyright: ignore[reportArgumentType] def check_unrelated_asset(): pass diff --git a/python_modules/libraries/dagster-aws/dagster_aws/ecs/tasks.py b/python_modules/libraries/dagster-aws/dagster_aws/ecs/tasks.py index 1fc23421417cf..65d241c18a3f4 100644 --- a/python_modules/libraries/dagster-aws/dagster_aws/ecs/tasks.py +++ b/python_modules/libraries/dagster-aws/dagster_aws/ecs/tasks.py @@ -130,13 +130,13 @@ def task_definition_dict(self): kwargs.update(dict(taskRoleArn=self.task_role_arn)) if self.runtime_platform: - kwargs.update(dict(runtimePlatform=self.runtime_platform)) + kwargs.update(dict(runtimePlatform=self.runtime_platform)) # pyright: ignore[reportCallIssue,reportArgumentType] if self.ephemeral_storage: - kwargs.update(dict(ephemeralStorage={"sizeInGiB": self.ephemeral_storage})) + kwargs.update(dict(ephemeralStorage={"sizeInGiB": self.ephemeral_storage})) # pyright: ignore[reportCallIssue,reportArgumentType] if self.volumes: - kwargs.update(dict(volumes=self.volumes)) + kwargs.update(dict(volumes=self.volumes)) # pyright: ignore[reportCallIssue,reportArgumentType] return kwargs @@ -328,7 +328,7 @@ class CurrentEcsTaskMetadata( def get_current_ecs_task_metadata() -> CurrentEcsTaskMetadata: - task_metadata_uri = _container_metadata_uri() + "/task" + task_metadata_uri = _container_metadata_uri() + "/task" # pyright: ignore[reportOptionalOperand] response = requests.get(task_metadata_uri).json() cluster = response.get("Cluster") task_arn = response.get("TaskARN") @@ -349,7 +349,7 @@ def _container_metadata_uri(): def current_ecs_container_name(): - return requests.get(_container_metadata_uri()).json()["Name"] + return requests.get(_container_metadata_uri()).json()["Name"] # pyright: ignore[reportArgumentType] def get_current_ecs_task(ecs, task_arn, cluster): diff --git a/python_modules/libraries/dagster-aws/dagster_aws/emr/emr.py b/python_modules/libraries/dagster-aws/dagster_aws/emr/emr.py index 3bd34dd171e86..b46f572f7374a 100644 --- a/python_modules/libraries/dagster-aws/dagster_aws/emr/emr.py +++ b/python_modules/libraries/dagster-aws/dagster_aws/emr/emr.py @@ -107,7 +107,7 @@ def cluster_id_from_name(self, cluster_name): """ check.str_param(cluster_name, "cluster_name") - response = self.make_emr_client().list_clusters().get("Clusters", []) + response = self.make_emr_client().list_clusters().get("Clusters", []) # pyright: ignore[reportOptionalMemberAccess] for cluster in response: if cluster["Name"] == cluster_name: return cluster["Id"] @@ -180,7 +180,7 @@ def run_job_flow(self, log, cluster_config): "Calling run_job_flow(%s)" % (", ".join("%s=%r" % (k, v) for k, v in sorted(cluster_config.items()))) ) - cluster_id = emr_client.run_job_flow(**cluster_config)["JobFlowId"] + cluster_id = emr_client.run_job_flow(**cluster_config)["JobFlowId"] # pyright: ignore[reportOptionalSubscript] log.info("Created new cluster %s" % cluster_id) @@ -244,14 +244,14 @@ def add_job_flow_steps(self, log, cluster_id, step_defs): "Calling add_job_flow_steps(%s)" % ",".join(("%s=%r" % (k, v)) for k, v in steps_kwargs.items()) ) - return emr_client.add_job_flow_steps(**steps_kwargs)["StepIds"] + return emr_client.add_job_flow_steps(**steps_kwargs)["StepIds"] # pyright: ignore[reportOptionalSubscript] def is_emr_step_complete(self, log, cluster_id, emr_step_id): - step = self.describe_step(cluster_id, emr_step_id)["Step"] + step = self.describe_step(cluster_id, emr_step_id)["Step"] # pyright: ignore[reportOptionalSubscript] step_state = EmrStepState(step["Status"]["State"]) if step_state == EmrStepState.Pending: - cluster = self.describe_cluster(cluster_id)["Cluster"] + cluster = self.describe_cluster(cluster_id)["Cluster"] # pyright: ignore[reportOptionalSubscript] reason = _get_reason(cluster) reason_desc = (": %s" % reason) if reason else "" @@ -283,7 +283,7 @@ def is_emr_step_complete(self, log, cluster_id, emr_step_id): # print cluster status; this might give more context # why step didn't succeed - cluster = self.describe_cluster(cluster_id)["Cluster"] + cluster = self.describe_cluster(cluster_id)["Cluster"] # pyright: ignore[reportOptionalSubscript] reason = _get_reason(cluster) reason_desc = (": %s" % reason) if reason else "" log.info( @@ -338,7 +338,7 @@ def log_location_for_cluster(self, cluster_id): check.str_param(cluster_id, "cluster_id") # The S3 log URI is specified per job flow (cluster) - log_uri = self.describe_cluster(cluster_id)["Cluster"].get("LogUri", None) + log_uri = self.describe_cluster(cluster_id)["Cluster"].get("LogUri", None) # pyright: ignore[reportOptionalSubscript] # ugh, seriously boto3?! This will come back as string "None" if log_uri == "None" or log_uri is None: @@ -398,7 +398,7 @@ def wait_for_log(self, log, log_bucket, log_key, waiter_delay=30, waiter_max_att s3 = _wrap_aws_client(boto3.client("s3"), min_backoff=self.check_cluster_every) waiter = s3.get_waiter("object_exists") try: - waiter.wait( + waiter.wait( # pyright: ignore[reportOptionalMemberAccess] Bucket=log_bucket, Key=log_key, WaiterConfig={"Delay": waiter_delay, "MaxAttempts": waiter_max_attempts}, @@ -406,7 +406,7 @@ def wait_for_log(self, log, log_bucket, log_key, waiter_delay=30, waiter_max_att except WaiterError as err: raise EmrError("EMR log file did not appear on S3 after waiting") from err - obj = BytesIO(s3.get_object(Bucket=log_bucket, Key=log_key)["Body"].read()) + obj = BytesIO(s3.get_object(Bucket=log_bucket, Key=log_key)["Body"].read()) # pyright: ignore[reportOptionalSubscript] gzip_file = gzip.GzipFile(fileobj=obj) return gzip_file.read().decode("utf-8") diff --git a/python_modules/libraries/dagster-aws/dagster_aws/emr/pyspark_step_launcher.py b/python_modules/libraries/dagster-aws/dagster_aws/emr/pyspark_step_launcher.py index 76e325ef973f6..ff29016e2d20e 100644 --- a/python_modules/libraries/dagster-aws/dagster_aws/emr/pyspark_step_launcher.py +++ b/python_modules/libraries/dagster-aws/dagster_aws/emr/pyspark_step_launcher.py @@ -364,13 +364,13 @@ def wait_for_completion( all_events_new = self.read_events(s3, run_id, step_key) - if len(all_events_new) > len(all_events): - for i in range(len(all_events), len(all_events_new)): - event = all_events_new[i] + if len(all_events_new) > len(all_events): # pyright: ignore[reportArgumentType] + for i in range(len(all_events), len(all_events_new)): # pyright: ignore[reportArgumentType] + event = all_events_new[i] # pyright: ignore[reportOptionalSubscript,reportArgumentType,reportIndexIssue] # write each event from the EMR instance to the local instance step_context.instance.handle_new_event(event) - if event.is_dagster_event: - yield event.dagster_event + if event.is_dagster_event: # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + yield event.dagster_event # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] all_events = all_events_new def read_events(self, s3, run_id, step_key): @@ -383,7 +383,7 @@ def read_events(self, s3, run_id, step_key): return deserialize_value(pickle.loads(events_data)) except ClientError as ex: # The file might not be there yet, which is fine - if ex.response["Error"]["Code"] == "NoSuchKey": + if ex.response["Error"]["Code"] == "NoSuchKey": # pyright: ignore[reportTypedDictNotRequiredAccess] return [] else: raise ex diff --git a/python_modules/libraries/dagster-aws/dagster_aws/s3/s3_fake_resource.py b/python_modules/libraries/dagster-aws/dagster_aws/s3/s3_fake_resource.py index c03855062cde3..ad2c9edcc99b3 100644 --- a/python_modules/libraries/dagster-aws/dagster_aws/s3/s3_fake_resource.py +++ b/python_modules/libraries/dagster-aws/dagster_aws/s3/s3_fake_resource.py @@ -55,7 +55,7 @@ def put_object(self, Bucket, Key, Body, *args, **kwargs): def get_object(self, Bucket, Key, *args, **kwargs): if not self.has_object(Bucket, Key): - raise ClientError({}, None) + raise ClientError({}, None) # pyright: ignore[reportArgumentType] self.mock_extras.get_object(*args, **kwargs) return {"Body": self._get_byte_stream(Bucket, Key)} diff --git a/python_modules/libraries/dagster-aws/dagster_aws/utils/__init__.py b/python_modules/libraries/dagster-aws/dagster_aws/utils/__init__.py index 990d0e7e0aba1..947408ebeb8e4 100644 --- a/python_modules/libraries/dagster-aws/dagster_aws/utils/__init__.py +++ b/python_modules/libraries/dagster-aws/dagster_aws/utils/__init__.py @@ -20,7 +20,7 @@ def construct_boto_client_retry_config(max_attempts): retry_config = {"max_attempts": max_attempts} if version.parse(botocore_version) >= version.parse("1.15.0"): retry_config["mode"] = "standard" - return Config(retries=retry_config) + return Config(retries=retry_config) # pyright: ignore[reportArgumentType] T = TypeVar("T") diff --git a/python_modules/libraries/dagster-aws/dagster_aws/utils/mrjob/utils.py b/python_modules/libraries/dagster-aws/dagster_aws/utils/mrjob/utils.py index f1b736c2bcd12..bc2b1ca155243 100644 --- a/python_modules/libraries/dagster-aws/dagster_aws/utils/mrjob/utils.py +++ b/python_modules/libraries/dagster-aws/dagster_aws/utils/mrjob/utils.py @@ -57,7 +57,7 @@ def _client_error_status(ex): def _is_retriable_client_error(ex): """Is the exception from a boto3 client retriable?""" - if isinstance(ex, botocore.exceptions.ClientError): + if isinstance(ex, botocore.exceptions.ClientError): # pyright: ignore[reportAttributeAccessIssue] # these rarely get through in boto3 code = _client_error_code(ex) # "Throttl" catches "Throttled" and "Throttling" diff --git a/python_modules/libraries/dagster-aws/dagster_aws_tests/ecs_tests/launcher_tests/repo.py b/python_modules/libraries/dagster-aws/dagster_aws_tests/ecs_tests/launcher_tests/repo.py index 0a04980cd82db..36f487e82e9cc 100644 --- a/python_modules/libraries/dagster-aws/dagster_aws_tests/ecs_tests/launcher_tests/repo.py +++ b/python_modules/libraries/dagster-aws/dagster_aws_tests/ecs_tests/launcher_tests/repo.py @@ -11,6 +11,6 @@ def job(): node() -@dagster.repository +@dagster.repository # pyright: ignore[reportArgumentType] def repository(): return {"jobs": {"job": job}} diff --git a/python_modules/libraries/dagster-aws/dagster_aws_tests/ecs_tests/launcher_tests/test_launching.py b/python_modules/libraries/dagster-aws/dagster_aws_tests/ecs_tests/launcher_tests/test_launching.py index d9849c4f36322..002592b8a4bc5 100644 --- a/python_modules/libraries/dagster-aws/dagster_aws_tests/ecs_tests/launcher_tests/test_launching.py +++ b/python_modules/libraries/dagster-aws/dagster_aws_tests/ecs_tests/launcher_tests/test_launching.py @@ -881,7 +881,7 @@ def test_eventual_consistency(ecs, instance, workspace, run, monkeypatch): retries = 0 original_describe_tasks = instance.run_launcher.ecs.describe_tasks - original_backoff_retries = dagster_aws.ecs.tasks.BACKOFF_RETRIES + original_backoff_retries = dagster_aws.ecs.tasks.BACKOFF_RETRIES # pyright: ignore[reportAttributeAccessIssue] def describe_tasks(*_args, **_kwargs): nonlocal retries @@ -894,12 +894,12 @@ def describe_tasks(*_args, **_kwargs): with pytest.raises(EcsEventualConsistencyTimeout): monkeypatch.setattr(instance.run_launcher.ecs, "describe_tasks", describe_tasks) - monkeypatch.setattr(dagster_aws.ecs.tasks, "BACKOFF_RETRIES", 0) + monkeypatch.setattr(dagster_aws.ecs.tasks, "BACKOFF_RETRIES", 0) # pyright: ignore[reportAttributeAccessIssue] instance.launch_run(run.run_id, workspace) # Reset the mock retries = 0 - monkeypatch.setattr(dagster_aws.ecs.tasks, "BACKOFF_RETRIES", original_backoff_retries) + monkeypatch.setattr(dagster_aws.ecs.tasks, "BACKOFF_RETRIES", original_backoff_retries) # pyright: ignore[reportAttributeAccessIssue] instance.launch_run(run.run_id, workspace) tasks = ecs.list_tasks()["taskArns"] @@ -1326,7 +1326,7 @@ def test_overrides_too_long( fn_name="foo", ), container_image="test:latest", - container_context=large_container_context, + container_context=large_container_context, # pyright: ignore[reportArgumentType] ), ) diff --git a/python_modules/libraries/dagster-aws/dagster_aws_tests/ecs_tests/stubbed_ecs.py b/python_modules/libraries/dagster-aws/dagster_aws_tests/ecs_tests/stubbed_ecs.py index 93c0a253c29f8..5a1b4f6c9522e 100644 --- a/python_modules/libraries/dagster-aws/dagster_aws_tests/ecs_tests/stubbed_ecs.py +++ b/python_modules/libraries/dagster-aws/dagster_aws_tests/ecs_tests/stubbed_ecs.py @@ -165,10 +165,10 @@ def describe_tasks(self, **kwargs): cluster = self._cluster(kwargs.get("cluster")) arns = kwargs.get("tasks") - for i, arn in enumerate(arns): + for i, arn in enumerate(arns): # pyright: ignore[reportArgumentType] if ":" not in arn: # We received just a task ID, not a full ARN - arns[i] = self._arn("task", f"{cluster}/{arn}") + arns[i] = self._arn("task", f"{cluster}/{arn}") # pyright: ignore[reportOptionalSubscript] tasks = [task for task in self.storage.tasks[cluster] if task["taskArn"] in arns] @@ -297,7 +297,7 @@ def register_task_definition(self, **kwargs): # Sleep for long enough that we hit the lock time.sleep(0.2) # Family must be <= 255 characters. Alphanumeric, dash, and underscore only. - if len(family) > 255 or not re.match(r"^[\w\-]+$", family): + if len(family) > 255 or not re.match(r"^[\w\-]+$", family): # pyright: ignore[reportCallIssue,reportArgumentType] self.stubber.add_client_error( method="register_task_definition", expected_params={**kwargs} ) @@ -505,7 +505,7 @@ def tag_resource(self, **kwargs): service_response={}, expected_params={**kwargs}, ) - self.storage.tags[arn] = tags + self.storage.tags[arn] = tags # pyright: ignore[reportArgumentType] else: self.stubber.add_client_error(method="tag_resource", expected_params={**kwargs}) diff --git a/python_modules/libraries/dagster-aws/dagster_aws_tests/emr_tests/test_emr.py b/python_modules/libraries/dagster-aws/dagster_aws_tests/emr_tests/test_emr.py index 25e11b0770af4..650ddb8fe92b3 100644 --- a/python_modules/libraries/dagster-aws/dagster_aws_tests/emr_tests/test_emr.py +++ b/python_modules/libraries/dagster-aws/dagster_aws_tests/emr_tests/test_emr.py @@ -32,7 +32,7 @@ def test_emr_add_tags_and_describe_cluster(emr_cluster_config): emr.add_tags(context.log, {"foobar": "v1", "baz": "123"}, cluster_id) - tags = emr.describe_cluster(cluster_id)["Cluster"]["Tags"] + tags = emr.describe_cluster(cluster_id)["Cluster"]["Tags"] # pyright: ignore[reportOptionalSubscript] assert {"Key": "baz", "Value": "123"} in tags assert {"Key": "foobar", "Value": "v1"} in tags @@ -43,7 +43,7 @@ def test_emr_describe_cluster(emr_cluster_config): context = create_test_pipeline_execution_context() cluster = EmrJobRunner(region=REGION) cluster_id = cluster.run_job_flow(context.log, emr_cluster_config) - cluster_info = cluster.describe_cluster(cluster_id)["Cluster"] + cluster_info = cluster.describe_cluster(cluster_id)["Cluster"] # pyright: ignore[reportOptionalSubscript] assert cluster_info["Name"] == "test-emr" assert EmrClusterState(cluster_info["Status"]["State"]) == EmrClusterState.Waiting diff --git a/python_modules/libraries/dagster-aws/dagster_aws_tests/pipes_tests/fake_ecs.py b/python_modules/libraries/dagster-aws/dagster_aws_tests/pipes_tests/fake_ecs.py index 156d3f64ddbf2..900f80b7463d4 100644 --- a/python_modules/libraries/dagster-aws/dagster_aws_tests/pipes_tests/fake_ecs.py +++ b/python_modules/libraries/dagster-aws/dagster_aws_tests/pipes_tests/fake_ecs.py @@ -264,7 +264,7 @@ def wait(self, **kwargs): return if num_attempts >= max_attempts: - raise botocore.exceptions.WaiterError( + raise botocore.exceptions.WaiterError( # pyright: ignore[reportAttributeAccessIssue] name=self.waiter_name, reason="Max attempts exceeded", last_response=response, diff --git a/python_modules/libraries/dagster-aws/dagster_aws_tests/pipes_tests/test_pipes.py b/python_modules/libraries/dagster-aws/dagster_aws_tests/pipes_tests/test_pipes.py index f8a25cf85dd18..37808a779d33f 100644 --- a/python_modules/libraries/dagster-aws/dagster_aws_tests/pipes_tests/test_pipes.py +++ b/python_modules/libraries/dagster-aws/dagster_aws_tests/pipes_tests/test_pipes.py @@ -910,8 +910,8 @@ def materialize_asset(env, return_dict): resources={"pipes_ecs_client": pipes_ecs_client}, ) finally: - assert len(pipes_ecs_client._client._task_runs) > 0 # noqa - task_arn = next(iter(pipes_ecs_client._client._task_runs.keys())) # noqa + assert len(pipes_ecs_client._client._task_runs) > 0 # noqa # pyright: ignore[reportAttributeAccessIssue] + task_arn = next(iter(pipes_ecs_client._client._task_runs.keys())) # noqa # pyright: ignore[reportAttributeAccessIssue] return_dict[0] = pipes_ecs_client._client.describe_tasks( # noqa cluster="test-cluster", tasks=[task_arn] ) diff --git a/python_modules/libraries/dagster-aws/dagster_aws_tests/s3_tests/test_compute_log_manager.py b/python_modules/libraries/dagster-aws/dagster_aws_tests/s3_tests/test_compute_log_manager.py index b2514804caa32..d5d4ee55ca5f1 100644 --- a/python_modules/libraries/dagster-aws/dagster_aws_tests/s3_tests/test_compute_log_manager.py +++ b/python_modules/libraries/dagster-aws/dagster_aws_tests/s3_tests/test_compute_log_manager.py @@ -70,10 +70,10 @@ def simple(): file_key = event.logs_captured_data.file_key log_key = manager.build_log_key_for_run(result.run_id, file_key) log_data = manager.get_log_data(log_key) - stdout = log_data.stdout.decode("utf-8") + stdout = log_data.stdout.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] assert stdout == HELLO_WORLD + SEPARATOR - stderr = log_data.stderr.decode("utf-8") + stderr = log_data.stderr.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] for expected in EXPECTED_LOGS: assert expected in stderr @@ -95,10 +95,10 @@ def simple(): os.unlink(os.path.join(local_dir, filename)) log_data = manager.get_log_data(log_key) - stdout = log_data.stdout.decode("utf-8") + stdout = log_data.stdout.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] assert stdout == HELLO_WORLD + SEPARATOR - stderr = log_data.stderr.decode("utf-8") + stderr = log_data.stderr.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] for expected in EXPECTED_LOGS: assert expected in stderr @@ -121,8 +121,8 @@ def test_compute_log_manager_from_config(mock_s3_bucket): f.write(dagster_yaml.encode("utf-8")) instance = DagsterInstance.from_config(tempdir) - assert instance.compute_log_manager._s3_bucket == mock_s3_bucket.name # noqa: SLF001 - assert instance.compute_log_manager._s3_prefix == s3_prefix # noqa: SLF001 + assert instance.compute_log_manager._s3_bucket == mock_s3_bucket.name # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] + assert instance.compute_log_manager._s3_prefix == s3_prefix # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] def test_compute_log_manager_skip_empty_upload(mock_s3_bucket): @@ -197,7 +197,7 @@ def test_prefix_filter(mock_s3_bucket): ) log_key = ["arbitrary", "log", "key"] with manager.open_log_stream(log_key, ComputeIOType.STDERR) as write_stream: - write_stream.write("hello hello") + write_stream.write("hello hello") # pyright: ignore[reportOptionalMemberAccess] s3_object = mock_s3_bucket.Object(key="foo/bar/storage/arbitrary/log/key.err") logs = s3_object.get()["Body"].read().decode("utf-8") @@ -217,7 +217,7 @@ def test_get_log_keys_for_log_key_prefix(mock_s3_bucket): def write_log_file(file_id: int, io_type: ComputeIOType): full_log_key = [*log_key_prefix, f"{file_id}"] with manager.open_log_stream(full_log_key, io_type) as f: - f.write("foo") + f.write("foo") # pyright: ignore[reportOptionalMemberAccess] log_keys = manager.get_log_keys_for_log_key_prefix(log_key_prefix, io_type=ComputeIOType.STDERR) assert len(log_keys) == 0 @@ -226,7 +226,7 @@ def write_log_file(file_id: int, io_type: ComputeIOType): write_log_file(i, ComputeIOType.STDERR) log_keys = manager.get_log_keys_for_log_key_prefix(log_key_prefix, io_type=ComputeIOType.STDERR) - assert sorted(log_keys) == [ + assert sorted(log_keys) == [ # pyright: ignore[reportArgumentType] [*log_key_prefix, "0"], [*log_key_prefix, "1"], [*log_key_prefix, "2"], @@ -237,7 +237,7 @@ def write_log_file(file_id: int, io_type: ComputeIOType): write_log_file(4, ComputeIOType.STDOUT) log_keys = manager.get_log_keys_for_log_key_prefix(log_key_prefix, io_type=ComputeIOType.STDERR) - assert sorted(log_keys) == [ + assert sorted(log_keys) == [ # pyright: ignore[reportArgumentType] [*log_key_prefix, "0"], [*log_key_prefix, "1"], [*log_key_prefix, "2"], @@ -306,5 +306,5 @@ def my_job(): ) assert len(captured_log_entries) == 1 entry = captured_log_entries[0] - assert entry.dagster_event.logs_captured_data.external_stdout_url - assert entry.dagster_event.logs_captured_data.external_stderr_url + assert entry.dagster_event.logs_captured_data.external_stdout_url # pyright: ignore[reportOptionalMemberAccess] + assert entry.dagster_event.logs_captured_data.external_stderr_url # pyright: ignore[reportOptionalMemberAccess] diff --git a/python_modules/libraries/dagster-aws/dagster_aws_tests/secretsmanager_tests/test_secrets.py b/python_modules/libraries/dagster-aws/dagster_aws_tests/secretsmanager_tests/test_secrets.py index 12790ae99e468..4c2d0c9485561 100644 --- a/python_modules/libraries/dagster-aws/dagster_aws_tests/secretsmanager_tests/test_secrets.py +++ b/python_modules/libraries/dagster-aws/dagster_aws_tests/secretsmanager_tests/test_secrets.py @@ -151,7 +151,7 @@ def test_secretmanager_secrets_resource(mock_secretsmanager_resource): ) as secret_map: assert json.loads(secret_map["json_secret"]) == json_secret_obj - assert json.loads(os.getenv("json_secret")) == json_secret_obj + assert json.loads(os.getenv("json_secret")) == json_secret_obj # pyright: ignore[reportArgumentType] # Binary secrets have a None value binary_secret = mock_secretsmanager_resource.create_secret( diff --git a/python_modules/libraries/dagster-aws/dagster_aws_tests/ssm_tests/test_parameters.py b/python_modules/libraries/dagster-aws/dagster_aws_tests/ssm_tests/test_parameters.py index 37ec6fc4a45d2..0be8f824df37c 100644 --- a/python_modules/libraries/dagster-aws/dagster_aws_tests/ssm_tests/test_parameters.py +++ b/python_modules/libraries/dagster-aws/dagster_aws_tests/ssm_tests/test_parameters.py @@ -41,16 +41,25 @@ def test_get_parameters_by_path(mock_ssm_client): mock_ssm_client.put_parameter(Name="path/not/matching/param3", Value="param3", Type="String") result = get_parameters_by_paths( - mock_ssm_client, ["path/based"], with_decryption=True, recursive=True + mock_ssm_client, + ["path/based"], # pyright: ignore[reportArgumentType] + with_decryption=True, + recursive=True, ) assert result == {"path/based/param1": "param1", "path/based/nested/param2": "param2"} result = get_parameters_by_paths( - mock_ssm_client, ["path/based"], with_decryption=True, recursive=False + mock_ssm_client, + ["path/based"], # pyright: ignore[reportArgumentType] + with_decryption=True, + recursive=False, ) assert result == {"path/based/param1": "param1"} result = get_parameters_by_paths( - mock_ssm_client, ["path/"], with_decryption=False, recursive=False + mock_ssm_client, + ["path/"], # pyright: ignore[reportArgumentType] + with_decryption=False, + recursive=False, ) assert result == {} diff --git a/python_modules/libraries/dagster-aws/dagster_aws_tests/utils_tests/mrjob_tests/test_utils.py b/python_modules/libraries/dagster-aws/dagster_aws_tests/utils_tests/mrjob_tests/test_utils.py index 9e98766248b49..59402ad3b5437 100644 --- a/python_modules/libraries/dagster-aws/dagster_aws_tests/utils_tests/mrjob_tests/test_utils.py +++ b/python_modules/libraries/dagster-aws/dagster_aws_tests/utils_tests/mrjob_tests/test_utils.py @@ -20,33 +20,33 @@ def test_client_error_code(): code = "Timeout" - ex = botocore.exceptions.ClientError({"Error": {"Code": code}}, "foo") + ex = botocore.exceptions.ClientError({"Error": {"Code": code}}, "foo") # pyright: ignore[reportAttributeAccessIssue] assert _client_error_code(ex) == code - empty_ex = botocore.exceptions.ClientError({}, "foo") + empty_ex = botocore.exceptions.ClientError({}, "foo") # pyright: ignore[reportAttributeAccessIssue] assert _client_error_code(empty_ex) == "" def test_client_error_status(): code = 403 - ex = botocore.exceptions.ClientError({"Error": {"HTTPStatusCode": code}}, "foo") + ex = botocore.exceptions.ClientError({"Error": {"HTTPStatusCode": code}}, "foo") # pyright: ignore[reportAttributeAccessIssue] assert _client_error_status(ex) == code - empty_ex = botocore.exceptions.ClientError({}, "foo") + empty_ex = botocore.exceptions.ClientError({}, "foo") # pyright: ignore[reportAttributeAccessIssue] assert _client_error_status(empty_ex) is None def test_is_retriable_client_error(): - ex = botocore.exceptions.ClientError({"Error": {"Code": "Timeout"}}, "foo") + ex = botocore.exceptions.ClientError({"Error": {"Code": "Timeout"}}, "foo") # pyright: ignore[reportAttributeAccessIssue] assert _is_retriable_client_error(ex) - ex = botocore.exceptions.ClientError({"Error": {"Code": "Not retryable"}}, "foo") + ex = botocore.exceptions.ClientError({"Error": {"Code": "Not retryable"}}, "foo") # pyright: ignore[reportAttributeAccessIssue] assert not _is_retriable_client_error(ex) - ex = botocore.exceptions.ClientError({"Error": {"HTTPStatusCode": 505}}, "foo") + ex = botocore.exceptions.ClientError({"Error": {"HTTPStatusCode": 505}}, "foo") # pyright: ignore[reportAttributeAccessIssue] assert _is_retriable_client_error(ex) - ex = botocore.exceptions.ClientError({"Error": {"HTTPStatusCode": 403}}, "foo") + ex = botocore.exceptions.ClientError({"Error": {"HTTPStatusCode": 403}}, "foo") # pyright: ignore[reportAttributeAccessIssue] assert not _is_retriable_client_error(ex) assert _is_retriable_client_error(ssl.SSLError("The read operation timed out")) @@ -59,8 +59,8 @@ def test_is_retriable_client_error(): def test_wrap_aws_client(mock_s3_resource): client = _wrap_aws_client(mock_s3_resource.meta.client, min_backoff=1000) res = client.list_buckets() - assert res["ResponseMetadata"]["HTTPStatusCode"] == 200 - assert res["Buckets"] == [] + assert res["ResponseMetadata"]["HTTPStatusCode"] == 200 # pyright: ignore[reportOptionalSubscript] + assert res["Buckets"] == [] # pyright: ignore[reportOptionalSubscript] def test_boto3_now(): diff --git a/python_modules/libraries/dagster-azure/dagster_azure_tests/adls2_tests/test_io_manager.py b/python_modules/libraries/dagster-azure/dagster_azure_tests/adls2_tests/test_io_manager.py index 4e00d3d72896d..6567fb6e84b34 100644 --- a/python_modules/libraries/dagster-azure/dagster_azure_tests/adls2_tests/test_io_manager.py +++ b/python_modules/libraries/dagster-azure/dagster_azure_tests/adls2_tests/test_io_manager.py @@ -261,7 +261,7 @@ def graph_asset(): @asset( name=f"upstream_{_id}", - ins={"asset3": AssetIn(asset_key=AssetKey([f"asset3_{_id}"]))}, + ins={"asset3": AssetIn(asset_key=AssetKey([f"asset3_{_id}"]))}, # pyright: ignore[reportCallIssue] ) def upstream(asset3): return asset3 + 1 @@ -287,7 +287,7 @@ def upstream(asset3): @asset( name=f"downstream_{_id}", - ins={"upstream": AssetIn(asset_key=AssetKey([f"upstream_{_id}"]))}, + ins={"upstream": AssetIn(asset_key=AssetKey([f"upstream_{_id}"]))}, # pyright: ignore[reportCallIssue] ) def downstream(upstream, source): assert upstream == 7 diff --git a/python_modules/libraries/dagster-azure/dagster_azure_tests/blob_tests/test_compute_log_manager.py b/python_modules/libraries/dagster-azure/dagster_azure_tests/blob_tests/test_compute_log_manager.py index bc957c9c9148f..7c76f5846f8bc 100644 --- a/python_modules/libraries/dagster-azure/dagster_azure_tests/blob_tests/test_compute_log_manager.py +++ b/python_modules/libraries/dagster-azure/dagster_azure_tests/blob_tests/test_compute_log_manager.py @@ -83,9 +83,9 @@ def easy(context): # Capture API log_data = manager.get_log_data(log_key) - stdout = log_data.stdout.decode("utf-8") + stdout = log_data.stdout.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] assert stdout == HELLO_WORLD + SEPARATOR - stderr = log_data.stderr.decode("utf-8") + stderr = log_data.stderr.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] for expected in EXPECTED_LOGS: assert expected in stderr @@ -105,9 +105,9 @@ def easy(context): # Capture API log_data = manager.get_log_data(log_key) - stdout = log_data.stdout.decode("utf-8") + stdout = log_data.stdout.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] assert stdout == HELLO_WORLD + SEPARATOR - stderr = log_data.stderr.decode("utf-8") + stderr = log_data.stderr.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] for expected in EXPECTED_LOGS: assert expected in stderr @@ -135,9 +135,9 @@ def test_compute_log_manager_from_config(storage_account, container, credential) f.write(dagster_yaml.encode("utf-8")) instance = DagsterInstance.from_config(tempdir) - assert instance.compute_log_manager._storage_account == storage_account # noqa: SLF001 - assert instance.compute_log_manager._container == container # noqa: SLF001 - assert instance.compute_log_manager._blob_prefix == prefix # noqa: SLF001 + assert instance.compute_log_manager._storage_account == storage_account # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] + assert instance.compute_log_manager._container == container # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] + assert instance.compute_log_manager._blob_prefix == prefix # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] @mock.patch("dagster_azure.blob.compute_log_manager.create_blob_client") @@ -156,7 +156,7 @@ def test_prefix_filter(mock_create_blob_client, storage_account, container, cred ) log_key = ["arbitrary", "log", "key"] with manager.open_log_stream(log_key, ComputeIOType.STDERR) as write_stream: - write_stream.write("hello hello") + write_stream.write("hello hello") # pyright: ignore[reportOptionalMemberAccess] adls2_object = fake_client.get_blob_client( container=container, @@ -188,7 +188,7 @@ def test_get_log_keys_for_log_key_prefix( def write_log_file(file_id: int, io_type: ComputeIOType): full_log_key = [*log_key_prefix, f"{file_id}"] with manager.open_log_stream(full_log_key, io_type) as f: - f.write("foo") + f.write("foo") # pyright: ignore[reportOptionalMemberAccess] log_keys = manager.get_log_keys_for_log_key_prefix(log_key_prefix, io_type=ComputeIOType.STDERR) assert len(log_keys) == 0 @@ -197,7 +197,7 @@ def write_log_file(file_id: int, io_type: ComputeIOType): write_log_file(i, ComputeIOType.STDERR) log_keys = manager.get_log_keys_for_log_key_prefix(log_key_prefix, io_type=ComputeIOType.STDERR) - assert sorted(log_keys) == [ + assert sorted(log_keys) == [ # pyright: ignore[reportArgumentType] [*log_key_prefix, "0"], [*log_key_prefix, "1"], [*log_key_prefix, "2"], @@ -209,7 +209,7 @@ def write_log_file(file_id: int, io_type: ComputeIOType): log_key = [*log_key_prefix, "4"] with manager.local_manager.open_log_stream(log_key, ComputeIOType.STDOUT) as f: - f.write("foo") + f.write("foo") # pyright: ignore[reportOptionalMemberAccess] blob_key = manager._blob_key(log_key, ComputeIOType.STDOUT) # noqa: SLF001 with open( manager.local_manager.get_captured_local_path( @@ -221,7 +221,7 @@ def write_log_file(file_id: int, io_type: ComputeIOType): blob.upload_blob(data) log_keys = manager.get_log_keys_for_log_key_prefix(log_key_prefix, io_type=ComputeIOType.STDERR) - assert sorted(log_keys) == [ + assert sorted(log_keys) == [ # pyright: ignore[reportArgumentType] [*log_key_prefix, "0"], [*log_key_prefix, "1"], [*log_key_prefix, "2"], @@ -355,9 +355,9 @@ def easy(context): # Capture API log_data = manager.get_log_data(log_key) - stdout = log_data.stdout.decode("utf-8") + stdout = log_data.stdout.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] assert stdout == HELLO_WORLD + SEPARATOR - stderr = log_data.stderr.decode("utf-8") + stderr = log_data.stderr.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] for expected in EXPECTED_LOGS: assert expected in stderr @@ -377,9 +377,9 @@ def easy(context): # Capture API log_data = manager.get_log_data(log_key) - stdout = log_data.stdout.decode("utf-8") + stdout = log_data.stdout.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] assert stdout == HELLO_WORLD + SEPARATOR - stderr = log_data.stderr.decode("utf-8") + stderr = log_data.stderr.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] for expected in EXPECTED_LOGS: assert expected in stderr @@ -405,9 +405,9 @@ def test_compute_log_manager_from_config_default_azure_credential(storage_accoun f.write(dagster_yaml.encode("utf-8")) instance = DagsterInstance.from_config(tempdir) - assert instance.compute_log_manager._storage_account == storage_account # noqa: SLF001 - assert instance.compute_log_manager._container == container # noqa: SLF001 - assert instance.compute_log_manager._blob_prefix == prefix # noqa: SLF001 - assert instance.compute_log_manager._default_azure_credential == { # noqa: SLF001 + assert instance.compute_log_manager._storage_account == storage_account # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] + assert instance.compute_log_manager._container == container # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] + assert instance.compute_log_manager._blob_prefix == prefix # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] + assert instance.compute_log_manager._default_azure_credential == { # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] "exclude_environment_credentials": True } diff --git a/python_modules/libraries/dagster-celery-docker/dagster_celery_docker/executor.py b/python_modules/libraries/dagster-celery-docker/dagster_celery_docker/executor.py index 8d55007d7c641..ea96f85cf2d61 100644 --- a/python_modules/libraries/dagster-celery-docker/dagster_celery_docker/executor.py +++ b/python_modules/libraries/dagster-celery-docker/dagster_celery_docker/executor.py @@ -229,17 +229,17 @@ def _execute_step_docker( check.dict_param(docker_config, "docker_config") - instance = DagsterInstance.from_ref(execute_step_args.instance_ref) + instance = DagsterInstance.from_ref(execute_step_args.instance_ref) # pyright: ignore[reportArgumentType] dagster_run = check.not_none( instance.get_run_by_id(execute_step_args.run_id), f"Could not load run {execute_step_args.run_id}", ) - step_keys_str = ", ".join(execute_step_args.step_keys_to_execute) + step_keys_str = ", ".join(execute_step_args.step_keys_to_execute) # pyright: ignore[reportCallIssue,reportArgumentType] docker_image = ( docker_config["image"] if docker_config.get("image") - else dagster_run.job_code_origin.repository_origin.container_image + else dagster_run.job_code_origin.repository_origin.container_image # pyright: ignore[reportOptionalMemberAccess] ) if not docker_image: @@ -267,7 +267,7 @@ def _execute_step_docker( marker_end=DELEGATE_MARKER, ), CeleryDockerExecutor, - step_key=execute_step_args.step_keys_to_execute[0], + step_key=execute_step_args.step_keys_to_execute[0], # pyright: ignore[reportOptionalSubscript] ) serialized_events = [serialize_value(engine_event)] @@ -290,7 +290,7 @@ def _execute_step_docker( if isinstance(e_vars, dict): docker_env.update(e_vars) else: - for v in e_vars: + for v in e_vars: # pyright: ignore[reportOptionalIterable] key, val = v.split("=") docker_env[key] = val del container_kwargs["environment"] @@ -298,15 +298,14 @@ def _execute_step_docker( try: docker_response = client.containers.run( docker_image, - command=execute_step_args.get_command_args(), - # pass through this worker's environment for things like AWS creds etc. - environment=docker_env, + command=execute_step_args.get_command_args(), # type: ignore # Sequence list mismatch + environment=docker_env, # type: ignore # Mapping dict mismatch network=docker_config.get("network", None), **container_kwargs, ) res = docker_response.decode("utf-8") - except docker.errors.ContainerError as err: + except docker.errors.ContainerError as err: # pyright: ignore[reportAttributeAccessIssue] metadata = {"Job image": docker_image} if err.stderr is not None: metadata["Docker stderr"] = err.stderr @@ -316,7 +315,7 @@ def _execute_step_docker( dagster_run, EngineEventData(metadata), CeleryDockerExecutor, - step_key=execute_step_args.step_keys_to_execute[0], + step_key=execute_step_args.step_keys_to_execute[0], # pyright: ignore[reportOptionalSubscript] ) raise else: diff --git a/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s/executor.py b/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s/executor.py index 52298959be014..233e737e35ab8 100644 --- a/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s/executor.py +++ b/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s/executor.py @@ -287,7 +287,7 @@ def _execute_step_k8s_job( ) check.inst_param(execute_step_args, "execute_step_args", ExecuteStepArgs) check.invariant( - len(execute_step_args.step_keys_to_execute) == 1, + len(execute_step_args.step_keys_to_execute) == 1, # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] "Celery K8s task executor can only execute 1 step at a time", ) @@ -315,13 +315,13 @@ def _execute_step_k8s_job( kubernetes.config.load_kube_config(kubeconfig_file) api_client = DagsterKubernetesClient.production_client() - instance = DagsterInstance.from_ref(execute_step_args.instance_ref) + instance = DagsterInstance.from_ref(execute_step_args.instance_ref) # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] dagster_run = check.not_none( - instance.get_run_by_id(execute_step_args.run_id), - f"Could not load run {execute_step_args.run_id}", + instance.get_run_by_id(execute_step_args.run_id), # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + f"Could not load run {execute_step_args.run_id}", # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] ) - step_key = execute_step_args.step_keys_to_execute[0] + step_key = execute_step_args.step_keys_to_execute[0] # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] celery_worker_name = self.request.hostname celery_pod_name = os.environ.get("HOSTNAME") @@ -353,9 +353,9 @@ def _execute_step_k8s_job( return [] # Ensure we stay below k8s name length limits - k8s_name_key = get_k8s_job_name(execute_step_args.run_id, step_key) + k8s_name_key = get_k8s_job_name(execute_step_args.run_id, step_key) # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] - retry_state = execute_step_args.known_state.get_retry_state() + retry_state = execute_step_args.known_state.get_retry_state() # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] if retry_state.get_attempt_count(step_key): attempt_number = retry_state.get_attempt_count(step_key) @@ -365,18 +365,18 @@ def _execute_step_k8s_job( job_name = "dagster-step-%s" % (k8s_name_key) pod_name = "dagster-step-%s" % (k8s_name_key) - args = execute_step_args.get_command_args() + args = execute_step_args.get_command_args() # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] labels = { "dagster/job": dagster_run.job_name, "dagster/op": step_key, - "dagster/run-id": execute_step_args.run_id, + "dagster/run-id": execute_step_args.run_id, # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] } if dagster_run.remote_job_origin: labels["dagster/code-location"] = ( dagster_run.remote_job_origin.repository_origin.code_location_origin.location_name ) - per_op_override = per_step_k8s_config.get(step_key, {}) + per_op_override = per_step_k8s_config.get(step_key, {}) # pyright: ignore[reportOptionalMemberAccess] tag_container_context = K8sContainerContext(run_k8s_config=user_defined_k8s_config) executor_config_container_context = K8sContainerContext( @@ -469,7 +469,7 @@ def _execute_step_k8s_job( job_name=job_name, namespace=job_namespace, instance=instance, - run_id=execute_step_args.run_id, + run_id=execute_step_args.run_id, # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] wait_timeout=job_wait_timeout, ) except (DagsterK8sError, DagsterK8sTimeoutError) as err: diff --git a/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s/launcher.py b/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s/launcher.py index 1bdcd2cbaf6bc..e11a058298a3d 100644 --- a/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s/launcher.py +++ b/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s/launcher.py @@ -158,7 +158,7 @@ def launch_run(self, context: LaunchRunContext) -> None: pod_name = job_name exc_config = _get_validated_celery_k8s_executor_config(run.run_config) - job_image_from_executor_config = exc_config.get("job_image") + job_image_from_executor_config = exc_config.get("job_image") # pyright: ignore[reportOptionalMemberAccess] job_origin = cast(JobPythonOrigin, context.job_code_origin) repository_origin = job_origin.repository_origin @@ -227,7 +227,7 @@ def launch_run(self, context: LaunchRunContext) -> None: {DOCKER_IMAGE_TAG: job.spec.template.spec.containers[0].image}, ) - job_namespace = exc_config.get("job_namespace", self.job_namespace) + job_namespace = exc_config.get("job_namespace", self.job_namespace) # pyright: ignore[reportOptionalMemberAccess] self._instance.report_engine_event( "Creating Kubernetes run worker job", @@ -320,16 +320,16 @@ def get_namespace_from_run_config(self, run_id): check.str_param(run_id, "run_id") dagster_run = self._instance.get_run_by_id(run_id) - run_config = dagster_run.run_config + run_config = dagster_run.run_config # pyright: ignore[reportOptionalMemberAccess] executor_config = _get_validated_celery_k8s_executor_config(run_config) - return executor_config.get("job_namespace", self.job_namespace) + return executor_config.get("job_namespace", self.job_namespace) # pyright: ignore[reportOptionalMemberAccess] @property def supports_check_run_worker_health(self): return True def check_run_worker_health(self, run: DagsterRun): - job_namespace = _get_validated_celery_k8s_executor_config(run.run_config).get( + job_namespace = _get_validated_celery_k8s_executor_config(run.run_config).get( # pyright: ignore[reportOptionalMemberAccess] "job_namespace", self.job_namespace ) job_name = get_job_name_from_run_id(run.run_id) diff --git a/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s_tests/test_executor.py b/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s_tests/test_executor.py index dd7cd314be423..fc89666295ee8 100644 --- a/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s_tests/test_executor.py +++ b/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s_tests/test_executor.py @@ -68,7 +68,7 @@ def apply_async(self, **kwargs): "execute_step_args_packed", ) ) - args = execute_step_args.get_command_args() + args = execute_step_args.get_command_args() # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] result = subprocess.run(args, check=True, capture_output=True) raw_logs = result.stdout diff --git a/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s_tests/test_launcher.py b/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s_tests/test_launcher.py index f4bb38b1428c5..4cda3dce399b7 100644 --- a/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s_tests/test_launcher.py +++ b/python_modules/libraries/dagster-celery-k8s/dagster_celery_k8s_tests/test_launcher.py @@ -366,7 +366,7 @@ def test_user_defined_k8s_config_in_run_tags(kubeconfig_file): celery_k8s_run_launcher.launch_run(LaunchRunContext(run, workspace)) updated_run = instance.get_run_by_id(run.run_id) - assert updated_run.tags[DOCKER_IMAGE_TAG] == expected_image + assert updated_run.tags[DOCKER_IMAGE_TAG] == expected_image # pyright: ignore[reportOptionalMemberAccess] # Check that user defined k8s config was passed down to the k8s job. mock_method_calls = mock_k8s_client_batch_api.method_calls @@ -391,7 +391,7 @@ def test_user_defined_k8s_config_in_run_tags(kubeconfig_file): assert ( args == ExecuteRunArgs( - job_origin=run.job_code_origin, + job_origin=run.job_code_origin, # pyright: ignore[reportArgumentType] run_id=run.run_id, instance_ref=instance.get_ref(), set_exit_code_on_failure=None, @@ -455,7 +455,7 @@ def test_raise_on_error(kubeconfig_file): assert ( args == ExecuteRunArgs( - job_origin=run.job_code_origin, + job_origin=run.job_code_origin, # pyright: ignore[reportArgumentType] run_id=run.run_id, instance_ref=instance.get_ref(), set_exit_code_on_failure=True, @@ -496,7 +496,7 @@ def test_k8s_executor_config_override(kubeconfig_file): celery_k8s_run_launcher.launch_run(LaunchRunContext(run, workspace)) updated_run = instance.get_run_by_id(run.run_id) - assert updated_run.tags[DOCKER_IMAGE_TAG] == "my_image:tag" + assert updated_run.tags[DOCKER_IMAGE_TAG] == "my_image:tag" # pyright: ignore[reportOptionalMemberAccess] # Launch with custom job_image run = create_run_for_test( @@ -511,7 +511,7 @@ def test_k8s_executor_config_override(kubeconfig_file): celery_k8s_run_launcher.launch_run(LaunchRunContext(run, workspace)) updated_run = instance.get_run_by_id(run.run_id) - assert updated_run.tags[DOCKER_IMAGE_TAG] == "fake-image-name" + assert updated_run.tags[DOCKER_IMAGE_TAG] == "fake-image-name" # pyright: ignore[reportOptionalMemberAccess] # Check that user defined k8s config was passed down to the k8s job. mock_method_calls = mock_k8s_client_batch_api.method_calls diff --git a/python_modules/libraries/dagster-celery/dagster_celery/cli.py b/python_modules/libraries/dagster-celery/dagster_celery/cli.py index 7934f807548bd..f194e9646b20a 100644 --- a/python_modules/libraries/dagster-celery/dagster_celery/cli.py +++ b/python_modules/libraries/dagster-celery/dagster_celery/cli.py @@ -247,7 +247,7 @@ def status_command( def worker_list_command(config_yaml=None): app = get_app(config_yaml) - print(app.control.inspect(timeout=1).active()) # noqa: T201 + print(app.control.inspect(timeout=1).active()) # noqa: T201 # pyright: ignore[reportAttributeAccessIssue] @click.command( @@ -279,9 +279,9 @@ def worker_terminate_command(name="dagster", config_yaml=None, all_=False): app = get_app(config_yaml) if all_: - app.control.broadcast("shutdown") + app.control.broadcast("shutdown") # pyright: ignore[reportAttributeAccessIssue] else: - app.control.broadcast( + app.control.broadcast( # pyright: ignore[reportAttributeAccessIssue] "shutdown", destination=[host_format(default_nodename(get_worker_name(name)))] ) diff --git a/python_modules/libraries/dagster-celery/dagster_celery/tasks.py b/python_modules/libraries/dagster-celery/dagster_celery/tasks.py index 8f83dc01cea5b..53b9689ab8acb 100644 --- a/python_modules/libraries/dagster-celery/dagster_celery/tasks.py +++ b/python_modules/libraries/dagster-celery/dagster_celery/tasks.py @@ -36,7 +36,7 @@ def _execute_plan(self, execute_step_args_packed, executable_dict): check.dict_param(executable_dict, "executable_dict") - instance = DagsterInstance.from_ref(execute_step_args.instance_ref) + instance = DagsterInstance.from_ref(execute_step_args.instance_ref) # pyright: ignore[reportArgumentType] recon_job = ReconstructableJob.from_dict(executable_dict) retry_mode = execute_step_args.retry_mode @@ -44,11 +44,11 @@ def _execute_plan(self, execute_step_args_packed, executable_dict): dagster_run = instance.get_run_by_id(execute_step_args.run_id) check.invariant(dagster_run, f"Could not load run {execute_step_args.run_id}") - step_keys_str = ", ".join(execute_step_args.step_keys_to_execute) + step_keys_str = ", ".join(execute_step_args.step_keys_to_execute) # pyright: ignore[reportCallIssue,reportArgumentType] execution_plan = create_execution_plan( recon_job, - dagster_run.run_config, + dagster_run.run_config, # pyright: ignore[reportOptionalMemberAccess] step_keys_to_execute=execute_step_args.step_keys_to_execute, known_state=execute_step_args.known_state, ) @@ -64,17 +64,17 @@ def _execute_plan(self, execute_step_args_packed, executable_dict): marker_end=DELEGATE_MARKER, ), CeleryExecutor, - step_key=execution_plan.step_handle_for_single_step_plans().to_key(), + step_key=execution_plan.step_handle_for_single_step_plans().to_key(), # pyright: ignore[reportOptionalMemberAccess] ) events = [engine_event] for step_event in execute_plan_iterator( execution_plan=execution_plan, job=recon_job, - dagster_run=dagster_run, + dagster_run=dagster_run, # pyright: ignore[reportArgumentType] instance=instance, retry_mode=retry_mode, - run_config=dagster_run.run_config, + run_config=dagster_run.run_config, # pyright: ignore[reportOptionalMemberAccess] ): events.append(step_event) diff --git a/python_modules/libraries/dagster-celery/dagster_celery_tests/conftest.py b/python_modules/libraries/dagster-celery/dagster_celery_tests/conftest.py index 832e6bec2fa35..bd35661db697b 100644 --- a/python_modules/libraries/dagster-celery/dagster_celery_tests/conftest.py +++ b/python_modules/libraries/dagster-celery/dagster_celery_tests/conftest.py @@ -92,7 +92,7 @@ def dagster_docker_image(): f"Found existing image tagged {docker_image}, skipping image build. To rebuild, first run: " f"docker rmi {docker_image}" ) - except docker.errors.ImageNotFound: + except docker.errors.ImageNotFound: # pyright: ignore[reportAttributeAccessIssue] build_and_tag_test_image(docker_image) return docker_image diff --git a/python_modules/libraries/dagster-celery/dagster_celery_tests/test_execute.py b/python_modules/libraries/dagster-celery/dagster_celery_tests/test_execute.py index 8cfd461b7c7ae..1b1324d6b2469 100644 --- a/python_modules/libraries/dagster-celery/dagster_celery_tests/test_execute.py +++ b/python_modules/libraries/dagster-celery/dagster_celery_tests/test_execute.py @@ -71,7 +71,7 @@ def test_execute_fails_job_on_celery(dagster_celery_worker): with execute_job_on_celery("test_fails") as result: assert len(result.get_step_failure_events()) == 1 assert result.is_node_failed("fails") - assert "Exception: argjhgjh\n" in result.failure_data_for_node("fails").error.cause.message + assert "Exception: argjhgjh\n" in result.failure_data_for_node("fails").error.cause.message # pyright: ignore[reportOptionalMemberAccess] assert result.is_node_untouched("should_never_execute") @@ -207,7 +207,7 @@ def test_execute_eagerly_fails_job_on_celery(): with execute_eagerly_on_celery("test_fails") as result: assert len(result.get_step_failure_events()) == 1 assert result.is_node_failed("fails") - assert "Exception: argjhgjh\n" in result.failure_data_for_node("fails").error.cause.message + assert "Exception: argjhgjh\n" in result.failure_data_for_node("fails").error.cause.message # pyright: ignore[reportOptionalMemberAccess] assert result.is_node_untouched("should_never_execute") diff --git a/python_modules/libraries/dagster-celery/dagster_celery_tests/test_priority.py b/python_modules/libraries/dagster-celery/dagster_celery_tests/test_priority.py index fa57ce0299ca9..c3c229b052dc7 100644 --- a/python_modules/libraries/dagster-celery/dagster_celery_tests/test_priority.py +++ b/python_modules/libraries/dagster-celery/dagster_celery_tests/test_priority.py @@ -82,4 +82,4 @@ def test_run_priority_job(rabbitmq): hi_run = hi_runs[0] histats = instance.get_run_stats(hi_run.run_id) - assert lowstats.start_time < histats.start_time + assert lowstats.start_time < histats.start_time # pyright: ignore[reportOperatorIssue] diff --git a/python_modules/libraries/dagster-census/dagster_census_tests/test_resources.py b/python_modules/libraries/dagster-census/dagster_census_tests/test_resources.py index 278dd2328ac98..9ca077dd3bf08 100644 --- a/python_modules/libraries/dagster-census/dagster_census_tests/test_resources.py +++ b/python_modules/libraries/dagster-census/dagster_census_tests/test_resources.py @@ -21,7 +21,7 @@ def test_get_sync(): "https://app.getcensus.com/api/v1/syncs/52", json=get_sync_data(), ) - assert census.get_sync(sync_id="52") + assert census.get_sync(sync_id="52") # pyright: ignore[reportArgumentType] def test_get_source(): @@ -32,7 +32,7 @@ def test_get_source(): "https://app.getcensus.com/api/v1/sources/15", json=get_source_data(), ) - assert census.get_source(source_id="15") + assert census.get_source(source_id="15") # pyright: ignore[reportArgumentType] def test_get_destination(): @@ -43,7 +43,7 @@ def test_get_destination(): "https://app.getcensus.com/api/v1/destinations/15", json=get_destination_data(), ) - assert census.get_destination(destination_id="15") + assert census.get_destination(destination_id="15") # pyright: ignore[reportArgumentType] def test_get_sync_run(): @@ -54,7 +54,7 @@ def test_get_sync_run(): "https://app.getcensus.com/api/v1/sync_runs/94", json=get_sync_run_data(), ) - assert census.get_sync_run(sync_run_id="94") + assert census.get_sync_run(sync_run_id="94") # pyright: ignore[reportArgumentType] def test_poll_sync_run(): @@ -66,7 +66,7 @@ def test_poll_sync_run(): "https://app.getcensus.com/api/v1/sync_runs/94", json=get_sync_run_data(), ) - assert census.poll_sync_run(sync_run_id="94", poll_interval=0) + assert census.poll_sync_run(sync_run_id="94", poll_interval=0) # pyright: ignore[reportArgumentType] mock_logger.info.assert_called_with( "View sync details here: https://app.getcensus.com/syncs_runs/94." ) @@ -80,7 +80,7 @@ def test_trigger_sync(): "https://app.getcensus.com/api/v1/syncs/52/trigger", json=get_sync_trigger_data(), ) - assert census.trigger_sync(sync_id="52") + assert census.trigger_sync(sync_id="52") # pyright: ignore[reportArgumentType] def test_trigger_sync_and_poll(): @@ -111,7 +111,7 @@ def test_trigger_sync_and_poll(): "https://app.getcensus.com/api/v1/sync_runs/94", json=get_sync_run_data(), ) - result = census.trigger_sync_and_poll(sync_id="52", poll_interval=0) + result = census.trigger_sync_and_poll(sync_id="52", poll_interval=0) # pyright: ignore[reportArgumentType] assert result == CensusOutput( sync_run=get_sync_run_data()["data"], source=get_source_data()["data"], diff --git a/python_modules/libraries/dagster-dask/dagster_dask/resources.py b/python_modules/libraries/dagster-dask/dagster_dask/resources.py index 87ef0b22a1c8c..f57acbcbe6515 100644 --- a/python_modules/libraries/dagster-dask/dagster_dask/resources.py +++ b/python_modules/libraries/dagster-dask/dagster_dask/resources.py @@ -59,7 +59,7 @@ def client(self): return self._client def close(self): - self.client.close() + self.client.close() # pyright: ignore[reportOptionalMemberAccess] if self.cluster: self.cluster.close() diff --git a/python_modules/libraries/dagster-dask/dagster_dask_tests/test_execute.py b/python_modules/libraries/dagster-dask/dagster_dask_tests/test_execute.py index 6c4a1552b1746..8b8dc064b3d21 100644 --- a/python_modules/libraries/dagster-dask/dagster_dask_tests/test_execute.py +++ b/python_modules/libraries/dagster-dask/dagster_dask_tests/test_execute.py @@ -160,7 +160,7 @@ def job_def(): """ Fails because 'DagsterExecutionInterruptedError' is not actually raised-- there's a timeout instead. It's not clear that the test ever was working-- prior to conversion to op/job/graph - APIs, it appears to have been mistakenly not using the dask executor. + APIs, it appears to have been mistakenly not using the dask executor. """ ) def test_dask_terminate(): @@ -184,8 +184,8 @@ def test_dask_terminate(): run_config=run_config, ) - for event in execute_run_iterator( - i_job=reconstructable(sleepy_dask_job), + for event in execute_run_iterator( # pyright: ignore[reportCallIssue] + i_job=reconstructable(sleepy_dask_job), # pyright: ignore[reportCallIssue] dagster_run=dagster_run, instance=instance, ): diff --git a/python_modules/libraries/dagster-databricks/dagster_databricks_tests/test_databricks.py b/python_modules/libraries/dagster-databricks/dagster_databricks_tests/test_databricks.py index 45cf4282de6c8..4b892824dcbf9 100644 --- a/python_modules/libraries/dagster-databricks/dagster_databricks_tests/test_databricks.py +++ b/python_modules/libraries/dagster-databricks/dagster_databricks_tests/test_databricks.py @@ -326,8 +326,8 @@ def test_given_oauth_instantiates_correctly(self): client_id="test-client-id", client_secret="test-client-secret" ), ) - assert client.oauth_credentials.client_id == "test-client-id" - assert client.oauth_credentials.client_secret == "test-client-secret" + assert client.oauth_credentials.client_id == "test-client-id" # pyright: ignore[reportOptionalMemberAccess] + assert client.oauth_credentials.client_secret == "test-client-secret" # pyright: ignore[reportOptionalMemberAccess] assert client.token is None assert client.azure_credentials is None @@ -340,8 +340,8 @@ def test_given_azure_instantiates_correctly(self): azure_tenant_id="test-tenant-id", ), ) - assert client.azure_credentials.azure_client_id == "test-client-id" - assert client.azure_credentials.azure_client_secret == "test-client-secret" - assert client.azure_credentials.azure_tenant_id == "test-tenant-id" + assert client.azure_credentials.azure_client_id == "test-client-id" # pyright: ignore[reportOptionalMemberAccess] + assert client.azure_credentials.azure_client_secret == "test-client-secret" # pyright: ignore[reportOptionalMemberAccess] + assert client.azure_credentials.azure_tenant_id == "test-tenant-id" # pyright: ignore[reportOptionalMemberAccess] assert client.token is None assert client.oauth_credentials is None diff --git a/python_modules/libraries/dagster-databricks/dagster_databricks_tests/test_pyspark.py b/python_modules/libraries/dagster-databricks/dagster_databricks_tests/test_pyspark.py index 859ab203708ff..869098b8f3afd 100644 --- a/python_modules/libraries/dagster-databricks/dagster_databricks_tests/test_pyspark.py +++ b/python_modules/libraries/dagster-databricks/dagster_databricks_tests/test_pyspark.py @@ -269,7 +269,7 @@ def test_pyspark_databricks( with instance_for_test() as instance: config = BASE_DATABRICKS_PYSPARK_STEP_LAUNCHER_CONFIG.copy() config.pop("local_job_package_path") - config["run_config"]["cluster"] = {"existing": "cluster_id"} + config["run_config"]["cluster"] = {"existing": "cluster_id"} # pyright: ignore[reportIndexIssue] with pytest.raises(ValueError) as excinfo: execute_job( job=reconstructable(define_do_nothing_test_job), @@ -307,7 +307,7 @@ def test_pyspark_databricks( reason="This test is slow and requires a Databricks cluster; run only upon explicit request", ) def test_do_it_live_databricks_s3(): - result = execute_job( + result = execute_job( # pyright: ignore[reportCallIssue] reconstructable(define_pyspark_s3_job), run_config={ "ops": {"blah": {"config": {"foo": "a string", "bar": 123}}}, @@ -339,7 +339,7 @@ def test_do_it_live_databricks_adls2(): } } - result = execute_job( + result = execute_job( # pyright: ignore[reportCallIssue] reconstructable(define_pyspark_adls2_job), run_config={ "ops": {"blah": {"config": {"foo": "a string", "bar": 123}}}, diff --git a/python_modules/libraries/dagster-dbt/dagster_dbt_tests/cloud/test_asset_defs.py b/python_modules/libraries/dagster-dbt/dagster_dbt_tests/cloud/test_asset_defs.py index 267d60fc6f516..73535383d3474 100644 --- a/python_modules/libraries/dagster-dbt/dagster_dbt_tests/cloud/test_asset_defs.py +++ b/python_modules/libraries/dagster-dbt/dagster_dbt_tests/cloud/test_asset_defs.py @@ -152,7 +152,7 @@ def test_load_assets_from_dbt_cloud_job( mock_run_job_and_poll = mocker.patch( "dagster_dbt.cloud.resources.DbtCloudClient.run_job_and_poll", - wraps=dbt_cloud_cacheable_assets._dbt_cloud.run_job_and_poll, # noqa: SLF001 + wraps=dbt_cloud_cacheable_assets._dbt_cloud.run_job_and_poll, # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] ) dbt_assets_definition_cacheable_data = dbt_cloud_cacheable_assets.compute_cacheable_data() @@ -269,7 +269,7 @@ def test_load_assets_from_cached_compile_run( mock_run_job_and_poll = mocker.patch( "dagster_dbt.cloud.resources.DbtCloudClient.run_job_and_poll", - wraps=dbt_cloud_cacheable_assets._dbt_cloud.run_job_and_poll, # noqa: SLF001 + wraps=dbt_cloud_cacheable_assets._dbt_cloud.run_job_and_poll, # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] ) dbt_assets_definition_cacheable_data = dbt_cloud_cacheable_assets.compute_cacheable_data() @@ -514,7 +514,7 @@ def test_partitions(mocker, dbt_cloud, dbt_cloud_service): mock_run_job_and_poll = mocker.patch( "dagster_dbt.cloud.resources.DbtCloudClient.run_job_and_poll", - wraps=dbt_cloud_cacheable_assets._dbt_cloud.run_job_and_poll, # noqa: SLF001 + wraps=dbt_cloud_cacheable_assets._dbt_cloud.run_job_and_poll, # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] ) dbt_assets_definition_cacheable_data = dbt_cloud_cacheable_assets.compute_cacheable_data() @@ -618,7 +618,7 @@ def test_subsetting( mock_run_job_and_poll = mocker.patch( "dagster_dbt.cloud.resources.DbtCloudClient.run_job_and_poll", - wraps=dbt_cloud_cacheable_assets._dbt_cloud.run_job_and_poll, # noqa: SLF001 + wraps=dbt_cloud_cacheable_assets._dbt_cloud.run_job_and_poll, # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] ) dbt_assets_definition_cacheable_data = dbt_cloud_cacheable_assets.compute_cacheable_data() diff --git a/python_modules/libraries/dagster-deltalake-pandas/dagster_deltalake_pandas_tests/test_type_handler.py b/python_modules/libraries/dagster-deltalake-pandas/dagster_deltalake_pandas_tests/test_type_handler.py index 02b5ce522a44d..f10e907cb758e 100644 --- a/python_modules/libraries/dagster-deltalake-pandas/dagster_deltalake_pandas_tests/test_type_handler.py +++ b/python_modules/libraries/dagster-deltalake-pandas/dagster_deltalake_pandas_tests/test_type_handler.py @@ -411,7 +411,7 @@ def test_dynamic_partition(tmp_path, io_manager): with instance_for_test() as instance: resource_defs = {"io_manager": io_manager} - instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned], @@ -425,7 +425,7 @@ def test_dynamic_partition(tmp_path, io_manager): out_df = dt.to_pyarrow_table() assert out_df["a"].to_pylist() == ["1", "1", "1"] - instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned], diff --git a/python_modules/libraries/dagster-deltalake-polars/dagster_deltalake_polars_tests/test_type_handler.py b/python_modules/libraries/dagster-deltalake-polars/dagster_deltalake_polars_tests/test_type_handler.py index 505421458afc7..a404237e3858e 100644 --- a/python_modules/libraries/dagster-deltalake-polars/dagster_deltalake_polars_tests/test_type_handler.py +++ b/python_modules/libraries/dagster-deltalake-polars/dagster_deltalake_polars_tests/test_type_handler.py @@ -493,7 +493,7 @@ def test_dynamic_partition(tmp_path, io_manager): with instance_for_test() as instance: resource_defs = {"io_manager": io_manager} - instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned], @@ -507,7 +507,7 @@ def test_dynamic_partition(tmp_path, io_manager): out_df = dt.to_pyarrow_table() assert out_df["a"].to_pylist() == ["1", "1", "1"] - instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned], diff --git a/python_modules/libraries/dagster-deltalake/dagster_deltalake_tests/test_type_handler.py b/python_modules/libraries/dagster-deltalake/dagster_deltalake_tests/test_type_handler.py index fea13883ed962..963c35365ca35 100644 --- a/python_modules/libraries/dagster-deltalake/dagster_deltalake_tests/test_type_handler.py +++ b/python_modules/libraries/dagster-deltalake/dagster_deltalake_tests/test_type_handler.py @@ -410,7 +410,7 @@ def test_dynamic_partition(tmp_path, io_manager): with instance_for_test() as instance: resource_defs = {"io_manager": io_manager} - instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned], @@ -424,7 +424,7 @@ def test_dynamic_partition(tmp_path, io_manager): out_df = dt.to_pyarrow_table() assert out_df["a"].to_pylist() == ["1", "1", "1"] - instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned], diff --git a/python_modules/libraries/dagster-docker/dagster_docker/docker_run_launcher.py b/python_modules/libraries/dagster-docker/dagster_docker/docker_run_launcher.py index ad2b5ca3f822b..f7665e21ac839 100644 --- a/python_modules/libraries/dagster-docker/dagster_docker/docker_run_launcher.py +++ b/python_modules/libraries/dagster-docker/dagster_docker/docker_run_launcher.py @@ -128,7 +128,7 @@ def _launch_container_with_command(self, run, docker_image, command): **container_kwargs, ) - except docker.errors.ImageNotFound: + except docker.errors.ImageNotFound: # pyright: ignore[reportAttributeAccessIssue] client.images.pull(docker_image) container = client.containers.create( image=docker_image, @@ -153,7 +153,7 @@ def _launch_container_with_command(self, run, docker_image, command): self._instance.add_run_tags( run.run_id, - {DOCKER_CONTAINER_ID_TAG: container.id, DOCKER_IMAGE_TAG: docker_image}, + {DOCKER_CONTAINER_ID_TAG: container.id, DOCKER_IMAGE_TAG: docker_image}, # pyright: ignore[reportArgumentType] ) container.start() @@ -201,7 +201,7 @@ def _get_container(self, run): try: return self._get_client(container_context).containers.get(container_id) - except docker.errors.NotFound: + except docker.errors.NotFound: # pyright: ignore[reportAttributeAccessIssue] return None def terminate(self, run_id): diff --git a/python_modules/libraries/dagster-docker/dagster_docker_tests/test_launch_docker.py b/python_modules/libraries/dagster-docker/dagster_docker_tests/test_launch_docker.py index efc378bdd18c5..c32bdaaa8b702 100644 --- a/python_modules/libraries/dagster-docker/dagster_docker_tests/test_launch_docker.py +++ b/python_modules/libraries/dagster-docker/dagster_docker_tests/test_launch_docker.py @@ -68,7 +68,7 @@ def test_launch_docker_no_network(aws_env): ) run = instance.create_run_for_job( job_def=recon_job.get_definition(), - run_config=run_config, + run_config=run_config, # pyright: ignore[reportArgumentType] remote_job_origin=remote_job.get_remote_origin(), job_code_origin=remote_job.get_python_origin(), ) @@ -77,14 +77,14 @@ def test_launch_docker_no_network(aws_env): # Container launches, but run is stuck in STARTING state # due to not being able to access the network run = instance.get_run_by_id(run.run_id) - assert run.tags[DOCKER_IMAGE_TAG] == docker_image + assert run.tags[DOCKER_IMAGE_TAG] == docker_image # pyright: ignore[reportOptionalMemberAccess] - container_id = run.tags[DOCKER_CONTAINER_ID_TAG] + container_id = run.tags[DOCKER_CONTAINER_ID_TAG] # pyright: ignore[reportOptionalMemberAccess] - run = instance.get_run_by_id(run.run_id) + run = instance.get_run_by_id(run.run_id) # pyright: ignore[reportOptionalMemberAccess] - assert run.status == DagsterRunStatus.STARTING - assert run.tags[DOCKER_IMAGE_TAG] == docker_image + assert run.status == DagsterRunStatus.STARTING # pyright: ignore[reportOptionalMemberAccess] + assert run.tags[DOCKER_IMAGE_TAG] == docker_image # pyright: ignore[reportOptionalMemberAccess] client = docker.client.from_env() container = None @@ -153,7 +153,7 @@ def test_launch_docker_image_on_job_config(aws_env): ) run = instance.create_run_for_job( job_def=recon_job.get_definition(), - run_config=run_config, + run_config=run_config, # pyright: ignore[reportArgumentType] remote_job_origin=remote_job.get_remote_origin(), job_code_origin=remote_job.get_python_origin(), ) @@ -163,15 +163,15 @@ def test_launch_docker_image_on_job_config(aws_env): run = instance.get_run_by_id(run.run_id) - assert run.status == DagsterRunStatus.SUCCESS + assert run.status == DagsterRunStatus.SUCCESS # pyright: ignore[reportOptionalMemberAccess] - assert run.tags[DOCKER_IMAGE_TAG] == docker_image + assert run.tags[DOCKER_IMAGE_TAG] == docker_image # pyright: ignore[reportOptionalMemberAccess] - container_obj = instance.run_launcher._get_container(run) # noqa + container_obj = instance.run_launcher._get_container(run) # noqa # pyright: ignore[reportAttributeAccessIssue] assert container_obj.labels["foo"] == "baz" assert container_obj.labels["bar"] == "" - assert container_obj.labels["dagster/run_id"] == run.run_id - assert container_obj.labels["dagster/job_name"] == run.job_name + assert container_obj.labels["dagster/run_id"] == run.run_id # pyright: ignore[reportOptionalMemberAccess] + assert container_obj.labels["dagster/job_name"] == run.job_name # pyright: ignore[reportOptionalMemberAccess] def check_event_log_contains(event_log, expected_type_and_message): @@ -229,7 +229,7 @@ def test_terminate_launched_docker_run(aws_env): run = instance.create_run_for_job( job_def=recon_job.get_definition(), - run_config=run_config, + run_config=run_config, # pyright: ignore[reportArgumentType] remote_job_origin=remote_job.get_remote_origin(), job_code_origin=remote_job.get_python_origin(), ) @@ -244,7 +244,7 @@ def test_terminate_launched_docker_run(aws_env): terminated_run = poll_for_finished_run(instance, run_id, timeout=30) terminated_run = instance.get_run_by_id(run_id) - assert terminated_run.status == DagsterRunStatus.CANCELED + assert terminated_run.status == DagsterRunStatus.CANCELED # pyright: ignore[reportOptionalMemberAccess] run_logs = instance.all_logs(run_id) @@ -296,7 +296,7 @@ def test_launch_docker_invalid_image(aws_env): run = instance.create_run_for_job( job_def=recon_job.get_definition(), - run_config=run_config, + run_config=run_config, # pyright: ignore[reportArgumentType] remote_job_origin=remote_job.get_remote_origin(), job_code_origin=remote_job.get_python_origin(), ) @@ -492,7 +492,7 @@ def _test_launch( run = instance.create_run_for_job( job_def=recon_job.get_definition(), - run_config=run_config, + run_config=run_config, # pyright: ignore[reportArgumentType] remote_job_origin=remote_job.get_remote_origin(), job_code_origin=recon_job.get_python_origin(), ) @@ -502,7 +502,7 @@ def _test_launch( if not terminate: poll_for_finished_run(instance, run.run_id, timeout=60) - assert instance.get_run_by_id(run.run_id).status == DagsterRunStatus.SUCCESS + assert instance.get_run_by_id(run.run_id).status == DagsterRunStatus.SUCCESS # pyright: ignore[reportOptionalMemberAccess] else: start_time = time.time() @@ -526,7 +526,7 @@ def _test_launch( assert launcher.terminate(run.run_id) poll_for_finished_run(instance, run.run_id, timeout=60) - assert instance.get_run_by_id(run.run_id).status == DagsterRunStatus.CANCELED + assert instance.get_run_by_id(run.run_id).status == DagsterRunStatus.CANCELED # pyright: ignore[reportOptionalMemberAccess] # termination is a no-op once run is finished assert not launcher.terminate(run.run_id) diff --git a/python_modules/libraries/dagster-docker/dagster_docker_tests/test_launcher_and_executor.py b/python_modules/libraries/dagster-docker/dagster_docker_tests/test_launcher_and_executor.py index ed15140508d6a..220a5e3a291d6 100644 --- a/python_modules/libraries/dagster-docker/dagster_docker_tests/test_launcher_and_executor.py +++ b/python_modules/libraries/dagster-docker/dagster_docker_tests/test_launcher_and_executor.py @@ -107,7 +107,7 @@ def test_image_on_job(monkeypatch, aws_env, from_pending_repository, asset_selec for log in instance.all_logs(run.run_id): print(log) # noqa: T201 - assert instance.get_run_by_id(run.run_id).status == DagsterRunStatus.SUCCESS + assert instance.get_run_by_id(run.run_id).status == DagsterRunStatus.SUCCESS # pyright: ignore[reportOptionalMemberAccess] @pytest.mark.integration @@ -180,7 +180,7 @@ def test_container_context_on_job(aws_env): for log in instance.all_logs(run.run_id): print(log) # noqa: T201 - assert instance.get_run_by_id(run.run_id).status == DagsterRunStatus.SUCCESS + assert instance.get_run_by_id(run.run_id).status == DagsterRunStatus.SUCCESS # pyright: ignore[reportOptionalMemberAccess] @pytest.mark.integration @@ -202,7 +202,7 @@ def test_recovery(aws_env): find_local_test_image(docker_image) run_config = merge_dicts( - load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), + load_yaml_from_path(os.path.join(get_test_project_environments_path(), "env_s3.yaml")), # pyright: ignore[reportArgumentType] { "ops": { "multiply_the_word_slow": { @@ -245,19 +245,19 @@ def test_recovery(aws_env): start_time = time.time() while time.time() - start_time < 60: run = instance.get_run_by_id(run.run_id) - if run.status == DagsterRunStatus.STARTED: + if run.status == DagsterRunStatus.STARTED: # pyright: ignore[reportOptionalMemberAccess] break - assert run.status == DagsterRunStatus.STARTING + assert run.status == DagsterRunStatus.STARTING # pyright: ignore[reportOptionalMemberAccess] time.sleep(1) time.sleep(3) - instance.run_launcher._get_container( # noqa: SLF001 - instance.get_run_by_id(run.run_id) + instance.run_launcher._get_container( # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] + instance.get_run_by_id(run.run_id) # pyright: ignore[reportOptionalMemberAccess] ).stop() - instance.resume_run(run.run_id, workspace, attempt_number=1) - poll_for_finished_run(instance, run.run_id, timeout=60) + instance.resume_run(run.run_id, workspace, attempt_number=1) # pyright: ignore[reportOptionalMemberAccess] + poll_for_finished_run(instance, run.run_id, timeout=60) # pyright: ignore[reportOptionalMemberAccess] - for log in instance.all_logs(run.run_id): + for log in instance.all_logs(run.run_id): # pyright: ignore[reportOptionalMemberAccess] print(str(log) + "\n") # noqa: T201 - assert instance.get_run_by_id(run.run_id).status == DagsterRunStatus.SUCCESS + assert instance.get_run_by_id(run.run_id).status == DagsterRunStatus.SUCCESS # pyright: ignore[reportOptionalMemberAccess] diff --git a/python_modules/libraries/dagster-duckdb-pandas/dagster_duckdb_pandas_tests/test_type_handler.py b/python_modules/libraries/dagster-duckdb-pandas/dagster_duckdb_pandas_tests/test_type_handler.py index 55202d98470bd..c2996ae98deee 100644 --- a/python_modules/libraries/dagster-duckdb-pandas/dagster_duckdb_pandas_tests/test_type_handler.py +++ b/python_modules/libraries/dagster-duckdb-pandas/dagster_duckdb_pandas_tests/test_type_handler.py @@ -456,7 +456,7 @@ def test_dynamic_partition(tmp_path, io_managers): with instance_for_test() as instance: resource_defs = {"io_manager": io_manager} - instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned], @@ -471,7 +471,7 @@ def test_dynamic_partition(tmp_path, io_managers): assert out_df["a"].tolist() == ["1", "1", "1"] duckdb_conn.close() - instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned], diff --git a/python_modules/libraries/dagster-duckdb-polars/dagster_duckdb_polars_tests/test_type_handler.py b/python_modules/libraries/dagster-duckdb-polars/dagster_duckdb_polars_tests/test_type_handler.py index bc0dee8f66528..f916dc4087933 100644 --- a/python_modules/libraries/dagster-duckdb-polars/dagster_duckdb_polars_tests/test_type_handler.py +++ b/python_modules/libraries/dagster-duckdb-polars/dagster_duckdb_polars_tests/test_type_handler.py @@ -444,7 +444,7 @@ def test_dynamic_partition(tmp_path, io_managers): with instance_for_test() as instance: resource_defs = {"io_manager": io_manager} - instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned], @@ -461,7 +461,7 @@ def test_dynamic_partition(tmp_path, io_managers): assert out_df["a"].to_list() == ["1", "1", "1"] duckdb_conn.close() - instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned], diff --git a/python_modules/libraries/dagster-duckdb-pyspark/dagster_duckdb_pyspark_tests/test_type_handler.py b/python_modules/libraries/dagster-duckdb-pyspark/dagster_duckdb_pyspark_tests/test_type_handler.py index c78a852530645..3fbb7f8637ee4 100644 --- a/python_modules/libraries/dagster-duckdb-pyspark/dagster_duckdb_pyspark_tests/test_type_handler.py +++ b/python_modules/libraries/dagster-duckdb-pyspark/dagster_duckdb_pyspark_tests/test_type_handler.py @@ -411,7 +411,7 @@ def test_dynamic_partition(tmp_path, io_managers): with instance_for_test() as instance: resource_defs = {"io_manager": io_manager} - instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned], @@ -426,7 +426,7 @@ def test_dynamic_partition(tmp_path, io_managers): assert out_df["a"].tolist() == ["1", "1", "1"] duckdb_conn.close() - instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned], diff --git a/python_modules/libraries/dagster-embedded-elt/dagster_embedded_elt/dlt/resource.py b/python_modules/libraries/dagster-embedded-elt/dagster_embedded_elt/dlt/resource.py index a72ac1714687b..0156fb2071f92 100644 --- a/python_modules/libraries/dagster-embedded-elt/dagster_embedded_elt/dlt/resource.py +++ b/python_modules/libraries/dagster-embedded-elt/dagster_embedded_elt/dlt/resource.py @@ -50,7 +50,7 @@ def _cast_load_info_metadata(self, mapping: Mapping[Any, Any]) -> Mapping[Any, A """ try: # zoneinfo is python >= 3.9 - from zoneinfo import ZoneInfo # type: ignore + from zoneinfo import ZoneInfo casted_instance_types = (datetime, timezone, ZoneInfo) except: diff --git a/python_modules/libraries/dagster-embedded-elt/dagster_embedded_elt_tests/sling_tests/conftest.py b/python_modules/libraries/dagster-embedded-elt/dagster_embedded_elt_tests/sling_tests/conftest.py index 57ba7386d074f..3c5ffda6eda8c 100644 --- a/python_modules/libraries/dagster-embedded-elt/dagster_embedded_elt_tests/sling_tests/conftest.py +++ b/python_modules/libraries/dagster-embedded-elt/dagster_embedded_elt_tests/sling_tests/conftest.py @@ -26,8 +26,8 @@ def path_to_temp_sqlite_db(tmp_path): @pytest.fixture def sling_sqlite_resource(path_to_temp_sqlite_db): return SlingResource( - source_connection=SlingConnectionResource(name="file_source", type="file"), - target_connection=SlingConnectionResource( + source_connection=SlingConnectionResource(name="file_source", type="file"), # pyright: ignore[reportCallIssue] + target_connection=SlingConnectionResource( # pyright: ignore[reportCallIssue] name="sqlite_target", type="sqlite", connection_string=f"sqlite://{path_to_temp_sqlite_db}", diff --git a/python_modules/libraries/dagster-embedded-elt/dagster_embedded_elt_tests/sling_tests/test_resource.py b/python_modules/libraries/dagster-embedded-elt/dagster_embedded_elt_tests/sling_tests/test_resource.py index 0d2f0ba0b165b..46c378b1de890 100644 --- a/python_modules/libraries/dagster-embedded-elt/dagster_embedded_elt_tests/sling_tests/test_resource.py +++ b/python_modules/libraries/dagster-embedded-elt/dagster_embedded_elt_tests/sling_tests/test_resource.py @@ -10,7 +10,10 @@ def test_sling_resource_env_with_source_target(): name="duckdb_source", type="duckdb", connection_string="duckdb://localhost:5000" ) target = SlingConnectionResource( - name="postgres_target", type="postgres", host="abchost.com", port="420" + name="postgres_target", + type="postgres", + host="abchost.com", # pyright: ignore[reportCallIssue] + port="420", # pyright: ignore[reportCallIssue] ) sling_resource = SlingResource(connections=[source, target]) @@ -34,17 +37,17 @@ def test_sling_resource_env_with_connection_resources(): SlingConnectionResource( name="CLOUD_PRODUCTION", type="postgres", - host="CLOUD_PROD_READ_REPLICA_POSTGRES_HOST", - user="CLOUD_PROD_POSTGRES_USER", - database="dagster", + host="CLOUD_PROD_READ_REPLICA_POSTGRES_HOST", # pyright: ignore[reportCallIssue] + user="CLOUD_PROD_POSTGRES_USER", # pyright: ignore[reportCallIssue] + database="dagster", # pyright: ignore[reportCallIssue] ), SlingConnectionResource( name="SLING_DB", type="snowflake", - host="SNOWFLAKE_ACCOUNT", - user="SNOWFLAKE_SLING_USER", - password=EnvVar("SNOWFLAKE_SLING_PASSWORD"), - database="sling", + host="SNOWFLAKE_ACCOUNT", # pyright: ignore[reportCallIssue] + user="SNOWFLAKE_SLING_USER", # pyright: ignore[reportCallIssue] + password=EnvVar("SNOWFLAKE_SLING_PASSWORD"), # pyright: ignore[reportCallIssue] + database="sling", # pyright: ignore[reportCallIssue] ), ] diff --git a/python_modules/libraries/dagster-fivetran/dagster_fivetran/resources.py b/python_modules/libraries/dagster-fivetran/dagster_fivetran/resources.py index aa595b58768e4..01936211cc2ed 100644 --- a/python_modules/libraries/dagster-fivetran/dagster_fivetran/resources.py +++ b/python_modules/libraries/dagster-fivetran/dagster_fivetran/resources.py @@ -203,8 +203,8 @@ def get_connector_sync_status(self, connector_id: str) -> Tuple[datetime, bool, failed_at = parser.parse(connector_details["failed_at"] or min_time_str) return ( - max(succeeded_at, failed_at), - succeeded_at > failed_at, + max(succeeded_at, failed_at), # pyright: ignore[reportReturnType] + succeeded_at > failed_at, # pyright: ignore[reportOperatorIssue] connector_details["status"]["sync_state"], ) diff --git a/python_modules/libraries/dagster-fivetran/dagster_fivetran/translator.py b/python_modules/libraries/dagster-fivetran/dagster_fivetran/translator.py index 9d36533dc67fb..3a35d335703b5 100644 --- a/python_modules/libraries/dagster-fivetran/dagster_fivetran/translator.py +++ b/python_modules/libraries/dagster-fivetran/dagster_fivetran/translator.py @@ -83,7 +83,7 @@ def last_sync_completed_at(self) -> datetime: succeeded_at = parser.parse(self.succeeded_at or MIN_TIME_STR) failed_at = parser.parse(self.failed_at or MIN_TIME_STR) - return max(succeeded_at, failed_at) + return max(succeeded_at, failed_at) # pyright: ignore[reportReturnType] @property def is_last_sync_successful(self) -> bool: @@ -96,7 +96,7 @@ def is_last_sync_successful(self) -> bool: succeeded_at = parser.parse(self.succeeded_at or MIN_TIME_STR) failed_at = parser.parse(self.failed_at or MIN_TIME_STR) - return succeeded_at > failed_at + return succeeded_at > failed_at # pyright: ignore[reportOperatorIssue] def validate_syncable(self) -> bool: """Confirms that the connector can be sync. Will raise a Failure in the event that diff --git a/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/experimental/test_resources.py b/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/experimental/test_resources.py index 48da73164c512..477f107da4479 100644 --- a/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/experimental/test_resources.py +++ b/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/experimental/test_resources.py @@ -93,7 +93,8 @@ def test_basic_resource_request( # Succeeded poll all_api_mocks.calls.reset() client.poll_sync( - connector_id=connector_id, previous_sync_completed_at=parser.parse(MIN_TIME_STR) + connector_id=connector_id, + previous_sync_completed_at=parser.parse(MIN_TIME_STR), # pyright: ignore[reportArgumentType] ) assert len(all_api_mocks.calls) == 1 @@ -104,7 +105,7 @@ def test_basic_resource_request( connector_id=connector_id, # The poll process will time out because the value of # `FivetranConnector.last_sync_completed_at` does not change in the test - previous_sync_completed_at=parser.parse(TEST_MAX_TIME_STR), + previous_sync_completed_at=parser.parse(TEST_MAX_TIME_STR), # pyright: ignore[reportArgumentType] poll_timeout=2, poll_interval=1, ) @@ -123,7 +124,7 @@ def test_basic_resource_request( with pytest.raises(Failure, match=f"Sync for connector '{connector_id}' failed!"): client.poll_sync( connector_id=connector_id, - previous_sync_completed_at=parser.parse(MIN_TIME_STR), + previous_sync_completed_at=parser.parse(MIN_TIME_STR), # pyright: ignore[reportArgumentType] poll_timeout=2, poll_interval=1, ) diff --git a/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/test_asset_defs.py b/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/test_asset_defs.py index 05d20c696e66b..375413cfae1ae 100644 --- a/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/test_asset_defs.py +++ b/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/test_asset_defs.py @@ -104,7 +104,7 @@ def test_fivetran_asset_run(tables, infer_missing_tables, should_error, schema_p final_json = get_sample_connector_response(data=final_data) if schema_prefix: - final_json["data"]["config"]["schema_prefix"] = schema_prefix + final_json["data"]["config"]["schema_prefix"] = schema_prefix # pyright: ignore[reportOptionalSubscript,reportArgumentType,reportIndexIssue] # final state will be updated rsps.add(rsps.GET, api_prefix, json=final_json) @@ -141,7 +141,8 @@ def test_fivetran_asset_run(tables, infer_missing_tables, should_error, schema_p ] assert len(asset_materializations) == 4 if infer_missing_tables else 3 found_asset_keys = set( - mat.event_specific_data.materialization.asset_key for mat in asset_materializations + mat.event_specific_data.materialization.asset_key # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + for mat in asset_materializations ) if schema_prefix: assert found_asset_keys == { diff --git a/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/test_managed_elements.py b/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/test_managed_elements.py index 377818b56339d..88e5e45cde250 100644 --- a/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/test_managed_elements.py +++ b/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/test_managed_elements.py @@ -162,7 +162,7 @@ def mock_connector(self, _method, url, _contents): "code": "Success", "data": { "id": connector_id, - "group_id": connector.destination.name, + "group_id": connector.destination.name, # pyright: ignore[reportOptionalMemberAccess] "service": connector.source_type, "service_version": 1, "schema": connector.schema_name, diff --git a/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/test_ops.py b/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/test_ops.py index bd3157660aaab..56ec90e32253b 100644 --- a/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/test_ops.py +++ b/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/test_ops.py @@ -62,7 +62,7 @@ def fivetran_sync_job(): result = fivetran_sync_job.execute_in_process() assert result.output_for_node("fivetran_sync_op") == FivetranOutput( - connector_details=get_sample_connector_response(data=final_data)["data"], + connector_details=get_sample_connector_response(data=final_data)["data"], # pyright: ignore[reportArgumentType] schema_config=get_complex_sample_connector_schema_config()["data"], ) asset_materializations = [ @@ -145,7 +145,7 @@ def fivetran_resync_job() -> None: with instance_for_test() as instance: result = fivetran_resync_job.execute_in_process(instance=instance) assert result.output_for_node("fivetran_resync_op") == FivetranOutput( - connector_details=get_sample_connector_response(data=final_data)["data"], + connector_details=get_sample_connector_response(data=final_data)["data"], # pyright: ignore[reportArgumentType] schema_config=get_complex_sample_connector_schema_config()["data"], ) asset_materializations = [ diff --git a/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/test_resources.py b/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/test_resources.py index 3330f4cdaad11..18c756a63e2c0 100644 --- a/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/test_resources.py +++ b/python_modules/libraries/dagster-fivetran/dagster_fivetran_tests/test_resources.py @@ -177,7 +177,7 @@ def _mock_interaction(): if succeed_at_end: assert _mock_interaction() == FivetranOutput( - connector_details=get_sample_connector_response(data=final_data)["data"], + connector_details=get_sample_connector_response(data=final_data)["data"], # pyright: ignore[reportArgumentType] schema_config=get_complex_sample_connector_schema_config()["data"], ) else: @@ -309,7 +309,7 @@ def _mock_interaction(): if succeed_at_end: assert _mock_interaction() == FivetranOutput( - connector_details=get_sample_connector_response(data=final_data)["data"], + connector_details=get_sample_connector_response(data=final_data)["data"], # pyright: ignore[reportArgumentType] schema_config=get_complex_sample_connector_schema_config()["data"], ) else: diff --git a/python_modules/libraries/dagster-gcp-pandas/dagster_gcp_pandas_tests/bigquery/test_type_handler.py b/python_modules/libraries/dagster-gcp-pandas/dagster_gcp_pandas_tests/bigquery/test_type_handler.py index 67f007b3f11ed..2f528edf53c9d 100644 --- a/python_modules/libraries/dagster-gcp-pandas/dagster_gcp_pandas_tests/bigquery/test_type_handler.py +++ b/python_modules/libraries/dagster-gcp-pandas/dagster_gcp_pandas_tests/bigquery/test_type_handler.py @@ -457,7 +457,7 @@ def downstream_partitioned(df: pd.DataFrame) -> None: resource_defs = {"io_manager": io_manager, "fs_io": fs_io_manager} with instance_for_test() as instance: - instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned, downstream_partitioned], @@ -472,7 +472,7 @@ def downstream_partitioned(df: pd.DataFrame) -> None: ) assert out_df["A"].tolist() == ["1", "1", "1"] - instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned, downstream_partitioned], diff --git a/python_modules/libraries/dagster-gcp-pyspark/dagster_gcp_pyspark_tests/bigquery/conftest.py b/python_modules/libraries/dagster-gcp-pyspark/dagster_gcp_pyspark_tests/bigquery/conftest.py index d69f8bdba1075..be7ab7a708d4f 100644 --- a/python_modules/libraries/dagster-gcp-pyspark/dagster_gcp_pyspark_tests/bigquery/conftest.py +++ b/python_modules/libraries/dagster-gcp-pyspark/dagster_gcp_pyspark_tests/bigquery/conftest.py @@ -25,7 +25,7 @@ def gcs_jar_path(tmp_path_factory): @pytest.fixture(scope="module") def spark(gcs_jar_path): spark = ( - SparkSession.builder.config( + SparkSession.builder.config( # pyright: ignore[reportAttributeAccessIssue] key="spark.jars.packages", value=BIGQUERY_JARS, ) diff --git a/python_modules/libraries/dagster-gcp-pyspark/dagster_gcp_pyspark_tests/bigquery/test_type_handler.py b/python_modules/libraries/dagster-gcp-pyspark/dagster_gcp_pyspark_tests/bigquery/test_type_handler.py index ea3d336813048..5597b6ab4e13f 100644 --- a/python_modules/libraries/dagster-gcp-pyspark/dagster_gcp_pyspark_tests/bigquery/test_type_handler.py +++ b/python_modules/libraries/dagster-gcp-pyspark/dagster_gcp_pyspark_tests/bigquery/test_type_handler.py @@ -505,7 +505,7 @@ def downstream_partitioned(df: DataFrame) -> None: resource_defs = {"io_manager": io_manager, "fs_io": fs_io_manager} with instance_for_test() as instance: - instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned, downstream_partitioned], @@ -520,7 +520,7 @@ def downstream_partitioned(df: DataFrame) -> None: ) assert out_df["A"].tolist() == ["1", "1", "1"] - instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned, downstream_partitioned], diff --git a/python_modules/libraries/dagster-gcp/dagster_gcp/bigquery/io_manager.py b/python_modules/libraries/dagster-gcp/dagster_gcp/bigquery/io_manager.py index c4d3f225fc185..674a04a650933 100644 --- a/python_modules/libraries/dagster-gcp/dagster_gcp/bigquery/io_manager.py +++ b/python_modules/libraries/dagster-gcp/dagster_gcp/bigquery/io_manager.py @@ -137,7 +137,7 @@ def my_table_a(my_table: pd.DataFrame) -> pd.DataFrame: """ @dagster_maintained_io_manager - @io_manager(config_schema=BigQueryIOManager.to_config_schema()) + @io_manager(config_schema=BigQueryIOManager.to_config_schema()) # pyright: ignore[reportArgumentType] def bigquery_io_manager(init_context): """I/O Manager for storing outputs in a BigQuery database. diff --git a/python_modules/libraries/dagster-gcp/dagster_gcp_tests/bigquery_tests/test_ops.py b/python_modules/libraries/dagster-gcp/dagster_gcp_tests/bigquery_tests/test_ops.py index 9b7098aac34e1..8ae99f53a4dd6 100644 --- a/python_modules/libraries/dagster-gcp/dagster_gcp_tests/bigquery_tests/test_ops.py +++ b/python_modules/libraries/dagster-gcp/dagster_gcp_tests/bigquery_tests/test_ops.py @@ -133,7 +133,7 @@ def test_config(): for config_fragment, error_message in configs_and_expected_errors: config = {"ops": {"test": {"config": {"query_job_config": config_fragment}}}} result = validate_config(env_type, config) - assert error_message in result.errors[0].message + assert error_message in result.errors[0].message # pyright: ignore[reportOptionalSubscript] configs_and_expected_validation_errors = [ ( @@ -149,7 +149,7 @@ def test_config(): for config_fragment, error_message in configs_and_expected_validation_errors: config = {"ops": {"test": {"config": {"query_job_config": config_fragment}}}} result = process_config(env_type, config) - assert error_message in result.errors[0].message + assert error_message in result.errors[0].message # pyright: ignore[reportOptionalSubscript] @pytest.mark.integration diff --git a/python_modules/libraries/dagster-gcp/dagster_gcp_tests/bigquery_tests/test_resource.py b/python_modules/libraries/dagster-gcp/dagster_gcp_tests/bigquery_tests/test_resource.py index 7c9175b408395..79bc04fb8f97c 100644 --- a/python_modules/libraries/dagster-gcp/dagster_gcp_tests/bigquery_tests/test_resource.py +++ b/python_modules/libraries/dagster-gcp/dagster_gcp_tests/bigquery_tests/test_resource.py @@ -136,7 +136,7 @@ def test_asset(bigquery: BigQueryResource) -> int: def test_fetch_last_updated_timestamps_no_table(): with pytest.raises(CheckError): fetch_last_updated_timestamps( - client={}, + client={}, # pyright: ignore[reportArgumentType] dataset_id="foo", table_ids=[], ) @@ -181,4 +181,4 @@ def retrieve_freshness(bigquery: BigQueryResource) -> ObserveResult: assert observation.tags["dagster/data_version"] == "foo" assert observation.metadata["freshness_timestamp"] is not None assert isinstance(observation.metadata["freshness_timestamp"], FloatMetadataValue) - assert start_timestamp < observation.metadata["freshness_timestamp"].value + assert start_timestamp < observation.metadata["freshness_timestamp"].value # pyright: ignore[reportOperatorIssue] diff --git a/python_modules/libraries/dagster-gcp/dagster_gcp_tests/gcs_tests/test_compute_log_manager.py b/python_modules/libraries/dagster-gcp/dagster_gcp_tests/gcs_tests/test_compute_log_manager.py index 49525c2c97fff..0165aced3da20 100644 --- a/python_modules/libraries/dagster-gcp/dagster_gcp_tests/gcs_tests/test_compute_log_manager.py +++ b/python_modules/libraries/dagster-gcp/dagster_gcp_tests/gcs_tests/test_compute_log_manager.py @@ -74,9 +74,9 @@ def easy(context): # Capture API log_data = manager.get_log_data(log_key) - stdout = log_data.stdout.decode("utf-8") + stdout = log_data.stdout.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] assert stdout == HELLO_WORLD + SEPARATOR - stderr = log_data.stderr.decode("utf-8") + stderr = log_data.stderr.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] for expected in EXPECTED_LOGS: assert expected in stderr @@ -99,9 +99,9 @@ def easy(context): # Capture API log_data = manager.get_log_data(log_key) - stdout = log_data.stdout.decode("utf-8") + stdout = log_data.stdout.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] assert stdout == HELLO_WORLD + SEPARATOR - stderr = log_data.stderr.decode("utf-8") + stderr = log_data.stderr.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] for expected in EXPECTED_LOGS: assert expected in stderr @@ -118,7 +118,7 @@ def easy(context): easy() - with open(os.environ.get("GOOGLE_APPLICATION_CREDENTIALS"), encoding="utf8") as f: + with open(os.environ.get("GOOGLE_APPLICATION_CREDENTIALS"), encoding="utf8") as f: # pyright: ignore[reportArgumentType] with tempfile.TemporaryDirectory() as temp_dir: with environ({"ENV_VAR": f.read(), "DAGSTER_HOME": temp_dir}): run_store = SqliteRunStorage.from_local(temp_dir) @@ -153,7 +153,7 @@ def easy(context): # capture API log_data = manager.get_log_data(log_key) - stdout = log_data.stdout.decode("utf-8") + stdout = log_data.stdout.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] assert stdout == HELLO_WORLD + SEPARATOR # Check GCS directly @@ -175,7 +175,7 @@ def easy(context): # capture API log_data = manager.get_log_data(log_key) - stdout = log_data.stdout.decode("utf-8") + stdout = log_data.stdout.decode("utf-8") # pyright: ignore[reportOptionalMemberAccess] assert stdout == HELLO_WORLD + SEPARATOR @@ -238,8 +238,8 @@ def easy(context): ) assert len(captured_log_entries) == 1 entry = captured_log_entries[0] - assert entry.dagster_event.logs_captured_data.external_stdout_url - assert entry.dagster_event.logs_captured_data.external_stderr_url + assert entry.dagster_event.logs_captured_data.external_stdout_url # pyright: ignore[reportOptionalMemberAccess] + assert entry.dagster_event.logs_captured_data.external_stderr_url # pyright: ignore[reportOptionalMemberAccess] @pytest.mark.integration @@ -251,7 +251,7 @@ def test_prefix_filter(gcs_bucket): time_str = get_current_datetime().strftime("%Y_%m_%d__%H_%M_%S") log_key = ["arbitrary", "log", "key", time_str] with manager.open_log_stream(log_key, ComputeIOType.STDERR) as write_stream: - write_stream.write("hello hello") + write_stream.write("hello hello") # pyright: ignore[reportOptionalMemberAccess] logs = ( storage.Client() @@ -275,7 +275,7 @@ def test_get_log_keys_for_log_key_prefix(gcs_bucket): def write_log_file(file_id: int, io_type: ComputeIOType): full_log_key = [*log_key_prefix, f"{file_id}"] with manager.open_log_stream(full_log_key, io_type) as f: - f.write("foo") + f.write("foo") # pyright: ignore[reportOptionalMemberAccess] log_keys = manager.get_log_keys_for_log_key_prefix(log_key_prefix, io_type=ComputeIOType.STDERR) assert len(log_keys) == 0 @@ -284,7 +284,7 @@ def write_log_file(file_id: int, io_type: ComputeIOType): write_log_file(i, ComputeIOType.STDERR) log_keys = manager.get_log_keys_for_log_key_prefix(log_key_prefix, io_type=ComputeIOType.STDERR) - assert sorted(log_keys) == [ + assert sorted(log_keys) == [ # pyright: ignore[reportArgumentType] [*log_key_prefix, "0"], [*log_key_prefix, "1"], [*log_key_prefix, "2"], @@ -296,11 +296,11 @@ def write_log_file(file_id: int, io_type: ComputeIOType): log_key = [*log_key_prefix, "4"] with manager.local_manager.open_log_stream(log_key, ComputeIOType.STDOUT) as f: - f.write("foo") + f.write("foo") # pyright: ignore[reportOptionalMemberAccess] manager.upload_to_cloud_storage(log_key, ComputeIOType.STDOUT) log_keys = manager.get_log_keys_for_log_key_prefix(log_key_prefix, io_type=ComputeIOType.STDERR) - assert sorted(log_keys) == [ + assert sorted(log_keys) == [ # pyright: ignore[reportArgumentType] [*log_key_prefix, "0"], [*log_key_prefix, "1"], [*log_key_prefix, "2"], @@ -326,13 +326,15 @@ def _return_mocked_blob(*args, **kwargs): blob_fn.side_effect = _return_mocked_blob with manager.open_log_stream(log_key, ComputeIOType.STDERR) as write_stream: - write_stream.write("hello hello") + write_stream.write("hello hello") # pyright: ignore[reportOptionalMemberAccess] # can read bytes log_data, _ = manager.get_log_data_for_type(log_key, ComputeIOType.STDERR, 0, None) + assert log_data assert log_data.decode("utf-8") == "hello hello" url = manager.download_url_for_type(log_key, ComputeIOType.STDERR) + assert url assert url.startswith("/logs") # falls back to local storage url diff --git a/python_modules/libraries/dagster-k8s/dagster_k8s/executor.py b/python_modules/libraries/dagster-k8s/dagster_k8s/executor.py index c8b0d7289d4dc..daffa21caadb1 100644 --- a/python_modules/libraries/dagster-k8s/dagster_k8s/executor.py +++ b/python_modules/libraries/dagster-k8s/dagster_k8s/executor.py @@ -332,7 +332,7 @@ def check_step_health(self, step_handler_context: StepHandlerContext) -> CheckSt container_context = self._get_container_context(step_handler_context) status = self._api_client.get_job_status( - namespace=container_context.namespace, + namespace=container_context.namespace, # pyright: ignore[reportArgumentType] job_name=job_name, ) if not status: diff --git a/python_modules/libraries/dagster-k8s/dagster_k8s/launcher.py b/python_modules/libraries/dagster-k8s/dagster_k8s/launcher.py index 02646e8e58f56..c6dd0b08fac92 100644 --- a/python_modules/libraries/dagster-k8s/dagster_k8s/launcher.py +++ b/python_modules/libraries/dagster-k8s/dagster_k8s/launcher.py @@ -400,7 +400,7 @@ def get_run_worker_debug_info( ) else: - job_debug_info = self._api_client.get_job_debug_info(job_name, namespace=namespace) + job_debug_info = self._api_client.get_job_debug_info(job_name, namespace=namespace) # pyright: ignore[reportArgumentType] full_msg = ( full_msg + "\n\n" @@ -419,7 +419,7 @@ def check_run_worker_health(self, run: DagsterRun): ) try: status = self._api_client.get_job_status( - namespace=container_context.namespace, + namespace=container_context.namespace, # pyright: ignore[reportArgumentType] job_name=job_name, ) except Exception: diff --git a/python_modules/libraries/dagster-k8s/dagster_k8s/models.py b/python_modules/libraries/dagster-k8s/dagster_k8s/models.py index 11ae720fb9afd..7be818bbef410 100644 --- a/python_modules/libraries/dagster-k8s/dagster_k8s/models.py +++ b/python_modules/libraries/dagster-k8s/dagster_k8s/models.py @@ -69,7 +69,7 @@ def _k8s_parse_value(data: Any, classname: str, attr_name: str) -> Any: elif klass == object: return data elif klass == datetime.date: - return parse(data).date() + return parse(data).date() # pyright: ignore[reportAttributeAccessIssue] elif klass == datetime.datetime: return parse(data) else: diff --git a/python_modules/libraries/dagster-k8s/dagster_k8s/ops/k8s_job_op.py b/python_modules/libraries/dagster-k8s/dagster_k8s/ops/k8s_job_op.py index 9d92830919c95..b016a8f2789da 100644 --- a/python_modules/libraries/dagster-k8s/dagster_k8s/ops/k8s_job_op.py +++ b/python_modules/libraries/dagster-k8s/dagster_k8s/ops/k8s_job_op.py @@ -371,7 +371,10 @@ def execute_k8s_job( watch = kubernetes.watch.Watch() # consider moving in to api_client api_client.wait_for_pod( - pod_to_watch, namespace, wait_timeout=timeout, start_time=start_time + pod_to_watch, + namespace, # pyright: ignore[reportArgumentType] + wait_timeout=timeout, + start_time=start_time, # pyright: ignore[reportArgumentType] ) log_stream = watch.stream( diff --git a/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_executor.py b/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_executor.py index 91591ab251927..e5dead96d63fe 100644 --- a/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_executor.py +++ b/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_executor.py @@ -205,7 +205,7 @@ def _get_executor(instance, job_def, executor_config=None): InitExecutorContext( job=job_def, executor_def=k8s_job_executor, - executor_config=process_result.value, + executor_config=process_result.value, # type: ignore instance=instance, ) ) @@ -239,7 +239,7 @@ def _step_handler_context(job_def, dagster_run, instance, executor): return StepHandlerContext( instance=instance, plan_context=plan_context, - steps=execution_plan.steps, + steps=execution_plan.steps, # pyright: ignore[reportArgumentType] execute_step_args=execute_step_args, ) @@ -324,7 +324,7 @@ def test_executor_init( # env vars from both launcher and the executor - assert executor._step_handler._get_container_context( # noqa: SLF001 # noqa: SLF001 + assert executor._step_handler._get_container_context( # noqa: SLF001 # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] step_handler_context ).run_k8s_config.container_config["env"] == [ {"name": "BAR_TEST", "value": "bar"}, @@ -332,14 +332,14 @@ def test_executor_init( ] assert ( - executor._step_handler._get_container_context( # noqa: SLF001 # noqa: SLF001 + executor._step_handler._get_container_context( # noqa: SLF001 # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] step_handler_context ).run_k8s_config.container_config["resources"] == resources ) assert ( - executor._step_handler._get_container_context( # noqa: SLF001 # noqa: SLF001 + executor._step_handler._get_container_context( # noqa: SLF001 # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] step_handler_context ).run_k8s_config.pod_spec_config["scheduler_name"] == "my-scheduler" @@ -370,16 +370,16 @@ def test_executor_init_container_context( # env vars from both launcher and the executor - assert executor._step_handler._get_container_context( # noqa: SLF001 # noqa: SLF001 + assert executor._step_handler._get_container_context( # noqa: SLF001 # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] step_handler_context ).run_k8s_config.container_config["env"] == [ {"name": "BAR_TEST", "value": "bar"}, {"name": "BAZ_TEST", "value": "baz_val"}, {"name": "FOO_TEST", "value": "foo"}, ] - assert executor._max_concurrent == 4 # noqa: SLF001 + assert executor._max_concurrent == 4 # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] assert ( - executor._step_handler._get_container_context( # noqa: SLF001 # noqa: SLF001 + executor._step_handler._get_container_context( # noqa: SLF001 # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] step_handler_context ).run_k8s_config.container_config["resources"] == python_origin_with_container_context.repository_origin.container_context["k8s"][ @@ -388,7 +388,7 @@ def test_executor_init_container_context( ) assert ( - executor._step_handler._get_container_context( # noqa: SLF001 # noqa: SLF001 + executor._step_handler._get_container_context( # noqa: SLF001 # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] step_handler_context ).run_k8s_config.pod_spec_config["scheduler_name"] == "my-other-scheduler" @@ -664,7 +664,7 @@ def test_step_raw_k8s_config_inheritance( executor=executor, ) - container_context = executor._step_handler._get_container_context( # noqa: SLF001 + container_context = executor._step_handler._get_container_context( # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] step_handler_context ) @@ -725,7 +725,7 @@ def test_per_step_k8s_config(k8s_run_launcher_instance, python_origin_with_conta executor=executor, ) - container_context = executor._step_handler._get_container_context( # noqa: SLF001 + container_context = executor._step_handler._get_container_context( # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] step_handler_context ) diff --git a/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_job.py b/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_job.py index 532c8cdb78687..ea99ba7eed692 100644 --- a/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_job.py +++ b/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_job.py @@ -798,7 +798,9 @@ def user_defined_k8s_env_tags_graph(): expected_image = "different_image:tag" user_defined_k8s_config = get_user_defined_k8s_config( user_defined_k8s_env_tags_graph.to_job( - tags={USER_DEFINED_K8S_CONFIG_KEY: {"container_config": {"image": expected_image}}} + tags={ + USER_DEFINED_K8S_CONFIG_KEY: {"container_config": {"image": expected_image}}, + } ).tags ) diff --git a/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_launcher.py b/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_launcher.py index e7c016fd70fbb..45785857c799d 100644 --- a/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_launcher.py +++ b/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_launcher.py @@ -194,7 +194,7 @@ def test_launcher_with_container_context(kubeconfig_file): k8s_run_launcher.launch_run(LaunchRunContext(run, workspace)) updated_run = instance.get_run_by_id(run.run_id) - assert updated_run.tags[DOCKER_IMAGE_TAG] == "fake_job_image" + assert updated_run.tags[DOCKER_IMAGE_TAG] == "fake_job_image" # pyright: ignore[reportOptionalMemberAccess] # Check that user defined k8s config was passed down to the k8s job. mock_method_calls = mock_k8s_client_batch_api.method_calls @@ -224,7 +224,7 @@ def test_launcher_with_container_context(kubeconfig_file): assert ( args == ExecuteRunArgs( - job_origin=run.job_code_origin, + job_origin=run.job_code_origin, # pyright: ignore[reportArgumentType] run_id=run.run_id, instance_ref=instance.get_ref(), set_exit_code_on_failure=None, @@ -360,7 +360,7 @@ def test_launcher_with_k8s_config(kubeconfig_file): k8s_run_launcher.launch_run(LaunchRunContext(run, workspace)) updated_run = instance.get_run_by_id(run.run_id) - assert updated_run.tags[DOCKER_IMAGE_TAG] == "fake_job_image" + assert updated_run.tags[DOCKER_IMAGE_TAG] == "fake_job_image" # pyright: ignore[reportOptionalMemberAccess] # Check that user defined k8s config was passed down to the k8s job. mock_method_calls = mock_k8s_client_batch_api.method_calls @@ -455,7 +455,7 @@ def test_user_defined_k8s_config_in_run_tags(kubeconfig_file): k8s_run_launcher.launch_run(LaunchRunContext(run, workspace)) updated_run = instance.get_run_by_id(run.run_id) - assert updated_run.tags[DOCKER_IMAGE_TAG] == expected_image + assert updated_run.tags[DOCKER_IMAGE_TAG] == expected_image # pyright: ignore[reportOptionalMemberAccess] # Check that user defined k8s config was passed down to the k8s job. mock_method_calls = mock_k8s_client_batch_api.method_calls @@ -482,7 +482,7 @@ def test_user_defined_k8s_config_in_run_tags(kubeconfig_file): assert ( args == ExecuteRunArgs( - job_origin=run.job_code_origin, + job_origin=run.job_code_origin, # pyright: ignore[reportArgumentType] run_id=run.run_id, instance_ref=instance.get_ref(), set_exit_code_on_failure=None, @@ -544,7 +544,7 @@ def test_raise_on_error(kubeconfig_file): assert ( args == ExecuteRunArgs( - job_origin=run.job_code_origin, + job_origin=run.job_code_origin, # pyright: ignore[reportArgumentType] run_id=run.run_id, instance_ref=instance.get_ref(), set_exit_code_on_failure=True, @@ -596,7 +596,7 @@ def test_no_postgres(kubeconfig_file): k8s_run_launcher.launch_run(LaunchRunContext(run, workspace)) updated_run = instance.get_run_by_id(run.run_id) - assert updated_run.tags[DOCKER_IMAGE_TAG] == "fake_job_image" + assert updated_run.tags[DOCKER_IMAGE_TAG] == "fake_job_image" # pyright: ignore[reportOptionalMemberAccess] # Check that user defined k8s config was passed down to the k8s job. mock_method_calls = mock_k8s_client_batch_api.method_calls @@ -797,6 +797,6 @@ def test_get_run_worker_debug_info(kubeconfig_file): debug_info = k8s_run_launcher.get_run_worker_debug_info(started_run) running_job_name = get_job_name_from_run_id(started_run.run_id) - assert f"Debug information for job {running_job_name}" in debug_info - assert "Job status:" in debug_info - assert "Testing: test message" in debug_info + assert f"Debug information for job {running_job_name}" in debug_info # pyright: ignore[reportOperatorIssue] + assert "Job status:" in debug_info # pyright: ignore[reportOperatorIssue] + assert "Testing: test message" in debug_info # pyright: ignore[reportOperatorIssue] diff --git a/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_pipes.py b/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_pipes.py index 115c6d331b9f6..62b37515d07c2 100644 --- a/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_pipes.py +++ b/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_pipes.py @@ -450,7 +450,7 @@ def test_namespace_autodetect_from_kubeconfig_active_context(kubeconfig_with_nam def test_pipes_client_namespace_autodetection_from_secret(tmpdir, kubeconfig_dummy): namespace_secret_path = Path(tmpdir) / "namespace_secret" namespace_secret_path.write_text("my-namespace-from-secret") - got = _detect_current_namespace(kubeconfig_with_namespace, namespace_secret_path) + got = _detect_current_namespace(kubeconfig_with_namespace, namespace_secret_path) # pyright: ignore[reportArgumentType] assert got == "my-namespace-from-secret" diff --git a/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_resource_tags.py b/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_resource_tags.py index 4eaa11d4ec476..65109eb566650 100644 --- a/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_resource_tags.py +++ b/python_modules/libraries/dagster-k8s/dagster_k8s_tests/unit_tests/test_resource_tags.py @@ -93,7 +93,7 @@ def blank(_): @job def k8s_ready(): blank.tag( - { + { # pyright: ignore[reportArgumentType] USER_DEFINED_K8S_CONFIG_KEY: { "container_config": { "resources": { @@ -108,7 +108,7 @@ def k8s_ready(): plan = create_execution_plan(k8s_ready) step = next(iter(plan.step_dict.values())) - user_defined_k8s_config = get_user_defined_k8s_config(step.tags) + user_defined_k8s_config = get_user_defined_k8s_config(step.tags) # pyright: ignore[reportArgumentType] assert user_defined_k8s_config.container_config assert user_defined_k8s_config.container_config["resources"] @@ -165,7 +165,7 @@ def k8s_ready(): plan = create_execution_plan(k8s_ready, known_state=known_state) emit_step = plan.get_step_by_key(emit.name) - user_defined_k8s_config = get_user_defined_k8s_config(emit_step.tags) + user_defined_k8s_config = get_user_defined_k8s_config(emit_step.tags) # pyright: ignore[reportArgumentType] assert user_defined_k8s_config.container_config assert user_defined_k8s_config.container_config["resources"] @@ -180,7 +180,7 @@ def k8s_ready(): for mapping_key in range(3): multiply_inputs_step = plan.get_step_by_key(f"{multiply_inputs.name}[{mapping_key}]") dynamic_step_user_defined_k8s_config = get_user_defined_k8s_config( - multiply_inputs_step.tags + multiply_inputs_step.tags # pyright: ignore[reportArgumentType] ) assert dynamic_step_user_defined_k8s_config.container_config diff --git a/python_modules/libraries/dagster-mlflow/dagster_mlflow_tests/test_resources.py b/python_modules/libraries/dagster-mlflow/dagster_mlflow_tests/test_resources.py index a25773c2e78e1..1453aee661090 100644 --- a/python_modules/libraries/dagster-mlflow/dagster_mlflow_tests/test_resources.py +++ b/python_modules/libraries/dagster-mlflow/dagster_mlflow_tests/test_resources.py @@ -195,7 +195,7 @@ def test_cleanup_on_error( # Given: a context passed into the __init__ for MlFlow mlf = MlFlow(context) # When: a run is started - mlf.start_run() + mlf.start_run() # pyright: ignore[reportAttributeAccessIssue] with patch("sys.exc_info", return_value=[0, any_error]): # When: cleanup_on_error is called @@ -301,7 +301,7 @@ def test_setup(mock_atexit, context): # - _set_all_tags is called once mock_set_all_tags.assert_called_once() # - atexit.unregister is called with mlf.end_run as an argument - mock_atexit.assert_called_once_with(mlf.end_run) + mock_atexit.assert_called_once_with(mlf.end_run) # pyright: ignore[reportAttributeAccessIssue] @patch("atexit.unregister") @@ -330,7 +330,7 @@ def test_setup_with_passed_run_id(mock_atexit, context): # - _set_all_tags is called once mock_set_all_tags.assert_called_once() # - atexit.unregister is called with mlf.end_run as an argument - mock_atexit.assert_called_once_with(mlf.end_run) + mock_atexit.assert_called_once_with(mlf.end_run) # pyright: ignore[reportAttributeAccessIssue] @pytest.mark.parametrize("run_id", [None, 0, "12"]) diff --git a/python_modules/libraries/dagster-msteams/dagster_msteams_tests/test_sensors.py b/python_modules/libraries/dagster-msteams/dagster_msteams_tests/test_sensors.py index 97086a469462d..643ee2a2d8f0e 100644 --- a/python_modules/libraries/dagster-msteams/dagster_msteams_tests/test_sensors.py +++ b/python_modules/libraries/dagster-msteams/dagster_msteams_tests/test_sensors.py @@ -10,7 +10,7 @@ def test_teams_run_failure_sensor_def(): sensor_name = "my_failure_sensor" my_sensor = make_teams_on_run_failure_sensor( - hook_url=os.getenv("TEAMS_WEBHOOK_URL"), + hook_url=os.getenv("TEAMS_WEBHOOK_URL"), # pyright: ignore[reportArgumentType] name=sensor_name, ) assert my_sensor.name == sensor_name diff --git a/python_modules/libraries/dagster-mysql/dagster_mysql_tests/compat_tests/test_back_compat.py b/python_modules/libraries/dagster-mysql/dagster_mysql_tests/compat_tests/test_back_compat.py index db71c2ce9fbfd..0cf691fb6606d 100644 --- a/python_modules/libraries/dagster-mysql/dagster_mysql_tests/compat_tests/test_back_compat.py +++ b/python_modules/libraries/dagster-mysql/dagster_mysql_tests/compat_tests/test_back_compat.py @@ -73,8 +73,8 @@ def test_0_13_17_mysql_convert_float_cols(conn_string): instance = DagsterInstance.from_config(tempdir) record = instance.get_run_records(limit=1)[0] - assert int(record.start_time) == 1643760000 - assert int(record.end_time) == 1643760000 + assert int(record.start_time) == 1643760000 # pyright: ignore[reportArgumentType] + assert int(record.end_time) == 1643760000 # pyright: ignore[reportArgumentType] instance.upgrade() @@ -85,8 +85,8 @@ def test_0_13_17_mysql_convert_float_cols(conn_string): instance.reindex() record = instance.get_run_records(limit=1)[0] - assert int(record.start_time) == 1643788829 - assert int(record.end_time) == 1643788834 + assert int(record.start_time) == 1643788829 # pyright: ignore[reportArgumentType] + assert int(record.end_time) == 1643788834 # pyright: ignore[reportArgumentType] def test_instigators_table_backcompat(conn_string): @@ -105,11 +105,11 @@ def test_instigators_table_backcompat(conn_string): instance = DagsterInstance.from_config(tempdir) - assert not instance.schedule_storage.has_instigators_table() + assert not instance.schedule_storage.has_instigators_table() # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] instance.upgrade() - assert instance.schedule_storage.has_instigators_table() + assert instance.schedule_storage.has_instigators_table() # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_asset_observation_backcompat(conn_string): @@ -138,7 +138,7 @@ def asset_job(): with DagsterInstance.from_config(tempdir) as instance: storage = instance._event_storage - assert not instance.event_log_storage.has_secondary_index(ASSET_KEY_INDEX_COLS) + assert not instance.event_log_storage.has_secondary_index(ASSET_KEY_INDEX_COLS) # pyright: ignore[reportAttributeAccessIssue] asset_job.execute_in_process(instance=instance) assert storage.has_asset_key(AssetKey(["a"])) @@ -166,27 +166,27 @@ def test_jobs_selector_id_migration(conn_string): # runs the required data migrations instance.upgrade() - assert instance.schedule_storage.has_built_index(SCHEDULE_JOBS_SELECTOR_ID) + assert instance.schedule_storage.has_built_index(SCHEDULE_JOBS_SELECTOR_ID) # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] legacy_count = len(instance.all_instigator_state()) - migrated_instigator_count = instance.schedule_storage.execute( + migrated_instigator_count = instance.schedule_storage.execute( # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] db_select([db.func.count()]).select_from(InstigatorsTable) )[0][0] assert migrated_instigator_count == legacy_count - migrated_job_count = instance.schedule_storage.execute( + migrated_job_count = instance.schedule_storage.execute( # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] db_select([db.func.count()]) .select_from(JobTable) .where(JobTable.c.selector_id.isnot(None)) )[0][0] assert migrated_job_count == legacy_count - legacy_tick_count = instance.schedule_storage.execute( + legacy_tick_count = instance.schedule_storage.execute( # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] db_select([db.func.count()]).select_from(JobTickTable) )[0][0] assert legacy_tick_count > 0 # tick migrations are optional - migrated_tick_count = instance.schedule_storage.execute( + migrated_tick_count = instance.schedule_storage.execute( # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] db_select([db.func.count()]) .select_from(JobTickTable) .where(JobTickTable.c.selector_id.isnot(None)) @@ -196,7 +196,7 @@ def test_jobs_selector_id_migration(conn_string): # run the optional migrations instance.reindex() - migrated_tick_count = instance.schedule_storage.execute( + migrated_tick_count = instance.schedule_storage.execute( # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] db_select([db.func.count()]) .select_from(JobTickTable) .where(JobTickTable.c.selector_id.isnot(None)) @@ -407,21 +407,21 @@ def test_add_primary_keys(conn_string): instance.upgrade() assert "id" in get_columns(instance, "kvs") - with instance.run_storage.connect(): + with instance.run_storage.connect(): # pyright: ignore[reportAttributeAccessIssue] kvs_id_count = _get_table_row_count( instance.run_storage, KeyValueStoreTable, with_non_null_id=True ) assert kvs_id_count == kvs_row_count assert "id" in get_columns(instance, "instance_info") - with instance.run_storage.connect(): + with instance.run_storage.connect(): # pyright: ignore[reportAttributeAccessIssue] instance_info_id_count = _get_table_row_count( instance.run_storage, InstanceInfo, with_non_null_id=True ) assert instance_info_id_count == instance_info_row_count assert "id" in get_columns(instance, "daemon_heartbeats") - with instance.run_storage.connect(): + with instance.run_storage.connect(): # pyright: ignore[reportAttributeAccessIssue] daemon_heartbeats_id_count = _get_table_row_count( instance.run_storage, DaemonHeartbeatsTable, with_non_null_id=True ) @@ -461,25 +461,25 @@ def _assert_autoincrement_id(conn): target_fd.write(template) with DagsterInstance.from_config(tempdir) as instance: - with instance.run_storage.connect() as conn: + with instance.run_storage.connect() as conn: # pyright: ignore[reportAttributeAccessIssue] assert len(_get_integer_id_tables(conn)) > 0 _assert_autoincrement_id(conn) - with instance.event_log_storage.index_connection() as conn: + with instance.event_log_storage.index_connection() as conn: # pyright: ignore[reportAttributeAccessIssue] assert len(_get_integer_id_tables(conn)) > 0 _assert_autoincrement_id(conn) - with instance.schedule_storage.connect() as conn: + with instance.schedule_storage.connect() as conn: # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] assert len(_get_integer_id_tables(conn)) > 0 _assert_autoincrement_id(conn) run_bigint_migration(instance) - with instance.run_storage.connect() as conn: + with instance.run_storage.connect() as conn: # pyright: ignore[reportAttributeAccessIssue] assert len(_get_integer_id_tables(conn)) == 0 _assert_autoincrement_id(conn) - with instance.event_log_storage.index_connection() as conn: + with instance.event_log_storage.index_connection() as conn: # pyright: ignore[reportAttributeAccessIssue] assert len(_get_integer_id_tables(conn)) == 0 _assert_autoincrement_id(conn) - with instance.schedule_storage.connect() as conn: + with instance.schedule_storage.connect() as conn: # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] assert len(_get_integer_id_tables(conn)) == 0 _assert_autoincrement_id(conn) @@ -530,7 +530,7 @@ def test_add_backfill_id_column(conn_string): assert len(instance.get_runs(filters=RunsFilter(exclude_subruns=True))) == 2 instance.upgrade() - assert instance.run_storage.has_built_index(RUN_BACKFILL_ID) + assert instance.run_storage.has_built_index(RUN_BACKFILL_ID) # pyright: ignore[reportAttributeAccessIssue] assert new_columns <= get_columns(instance, "runs") run_not_in_backfill_post_migration = instance.run_storage.add_run( @@ -552,7 +552,7 @@ def test_add_backfill_id_column(conn_string): backfill_ids = { row["run_id"]: row["backfill_id"] - for row in instance._run_storage.fetchall( + for row in instance._run_storage.fetchall( # pyright: ignore[reportAttributeAccessIssue] db_select([RunsTable.c.run_id, RunsTable.c.backfill_id]).select_from(RunsTable) ) } @@ -647,7 +647,7 @@ def test_add_backfill_tags(conn_string): ) instance.add_backfill(after_migration) - with instance.run_storage.connect() as conn: + with instance.run_storage.connect() as conn: # pyright: ignore[reportAttributeAccessIssue] rows = conn.execute( db_select( [ @@ -663,7 +663,7 @@ def test_add_backfill_tags(conn_string): assert ids_to_tags[after_migration.backfill_id] == after_migration.tags # filtering by tags works after migration - assert instance.run_storage.has_built_index(BACKFILL_JOB_NAME_AND_TAGS) + assert instance.run_storage.has_built_index(BACKFILL_JOB_NAME_AND_TAGS) # pyright: ignore[reportAttributeAccessIssue] # delete the run that was added pre-migration to prove that tags filtering is happening on the # backfill_tags table instance.delete_run(pre_migration_run.run_id) @@ -727,7 +727,7 @@ def test_add_bulk_actions_job_name_column(conn_string): # filtering pre-migration relies on filtering runs, so add a run with the expected job_name pre_migration_run = instance.run_storage.add_run( DagsterRun( - job_name=before_migration.job_name, + job_name=before_migration.job_name, # pyright: ignore[reportArgumentType] run_id=make_new_run_id(), tags={BACKFILL_ID_TAG: before_migration.backfill_id}, status=DagsterRunStatus.NOT_STARTED, @@ -765,7 +765,7 @@ def test_add_bulk_actions_job_name_column(conn_string): ) instance.add_backfill(after_migration) - with instance.run_storage.connect() as conn: + with instance.run_storage.connect() as conn: # pyright: ignore[reportAttributeAccessIssue] rows = conn.execute( db_select([BulkActionsTable.c.key, BulkActionsTable.c.job_name]) ).fetchall() @@ -775,7 +775,7 @@ def test_add_bulk_actions_job_name_column(conn_string): assert ids_to_job_name[after_migration.backfill_id] == after_migration.job_name # filtering by job_name works after migration - assert instance.run_storage.has_built_index(BACKFILL_JOB_NAME_AND_TAGS) + assert instance.run_storage.has_built_index(BACKFILL_JOB_NAME_AND_TAGS) # pyright: ignore[reportAttributeAccessIssue] # delete the run that was added pre-migration to prove that tags filtering is happening on the # backfill_tags table instance.delete_run(pre_migration_run.run_id) diff --git a/python_modules/libraries/dagster-mysql/dagster_mysql_tests/test_event_log.py b/python_modules/libraries/dagster-mysql/dagster_mysql_tests/test_event_log.py index 0caf7f6bd1765..06329a9ad7ea3 100644 --- a/python_modules/libraries/dagster-mysql/dagster_mysql_tests/test_event_log.py +++ b/python_modules/libraries/dagster-mysql/dagster_mysql_tests/test_event_log.py @@ -143,4 +143,4 @@ def test_load_from_config(self, conn_string): with instance_for_test(overrides=yaml.safe_load(explicit_cfg)) as explicit_instance: from_explicit = explicit_instance._event_storage # noqa: SLF001 - assert from_url.mysql_url == from_explicit.mysql_url + assert from_url.mysql_url == from_explicit.mysql_url # pyright: ignore[reportAttributeAccessIssue] diff --git a/python_modules/libraries/dagster-mysql/dagster_mysql_tests/test_instance.py b/python_modules/libraries/dagster-mysql/dagster_mysql_tests/test_instance.py index 248925ec3f0f3..c1bbdf1a87d0f 100644 --- a/python_modules/libraries/dagster-mysql/dagster_mysql_tests/test_instance.py +++ b/python_modules/libraries/dagster-mysql/dagster_mysql_tests/test_instance.py @@ -91,7 +91,7 @@ def test_connection_leak(conn_string): # This includes a number of internal connections, so just ensure it did not scale # with number of instances - assert row[0] < num_instances + assert row[0] < num_instances # pyright: ignore[reportOperatorIssue,reportOptionalSubscript] for copy in copies: copy.dispose() @@ -113,7 +113,7 @@ def test_load_instance(conn_string): file_relative_path(__file__, "../dagster_mysql/__init__.py") ) with engine.connect() as conn: - stamp_alembic_rev(alembic_config, conn, rev=None) + stamp_alembic_rev(alembic_config, conn, rev=None) # pyright: ignore[reportArgumentType] # Now load from scratch, verify it loads without errors with instance_for_test(overrides=yaml.safe_load(full_mysql_config(hostname, port))): @@ -136,14 +136,14 @@ def test_statement_timeouts(conn_string): # ensure migration error is not raised by being up to date instance.upgrade() - with pytest.raises(db.exc.OperationalError, match="QueryCanceled"): - with instance._run_storage.connect() as conn: # noqa: SLF001 + with pytest.raises(db.exc.OperationalError, match="QueryCanceled"): # pyright: ignore[reportAttributeAccessIssue] + with instance._run_storage.connect() as conn: # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] conn.execute(db.text("select sleep(1)")).fetchone() - with pytest.raises(db.exc.OperationalError, match="QueryCanceled"): - with instance._event_storage.connect() as conn: # noqa: SLF001 + with pytest.raises(db.exc.OperationalError, match="QueryCanceled"): # pyright: ignore[reportAttributeAccessIssue] + with instance._event_storage.connect() as conn: # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] conn.execute(db.text("select sleep(1)")).fetchone() - with pytest.raises(db.exc.OperationalError, match="QueryCanceled"): - with instance._schedule_storage.connect() as conn: # noqa: SLF001 + with pytest.raises(db.exc.OperationalError, match="QueryCanceled"): # pyright: ignore[reportAttributeAccessIssue] + with instance._schedule_storage.connect() as conn: # noqa: SLF001 # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] conn.execute(db.text("select sleep(1)")).fetchone() diff --git a/python_modules/libraries/dagster-mysql/dagster_mysql_tests/test_run_storage.py b/python_modules/libraries/dagster-mysql/dagster_mysql_tests/test_run_storage.py index 7b3d8e5c079c2..bae3182c2bbe3 100644 --- a/python_modules/libraries/dagster-mysql/dagster_mysql_tests/test_run_storage.py +++ b/python_modules/libraries/dagster-mysql/dagster_mysql_tests/test_run_storage.py @@ -90,11 +90,11 @@ def test_load_from_config(self, conn_string): overrides=yaml.safe_load(explicit_cfg) ) as from_explicit_instance: assert ( - from_url_instance._run_storage.mysql_url # noqa: SLF001 - == from_explicit_instance._run_storage.mysql_url # noqa: SLF001 + from_url_instance._run_storage.mysql_url # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] + == from_explicit_instance._run_storage.mysql_url # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] ) with instance_for_test(overrides=yaml.safe_load(env_cfg)) as from_env_instance: assert ( - from_url_instance._run_storage.mysql_url # noqa: SLF001 - == from_env_instance._run_storage.mysql_url # noqa: SLF001 + from_url_instance._run_storage.mysql_url # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] + == from_env_instance._run_storage.mysql_url # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] ) diff --git a/python_modules/libraries/dagster-mysql/dagster_mysql_tests/test_wait_timeout.py b/python_modules/libraries/dagster-mysql/dagster_mysql_tests/test_wait_timeout.py index cd0c3a633fc20..cdbe26b2c076e 100644 --- a/python_modules/libraries/dagster-mysql/dagster_mysql_tests/test_wait_timeout.py +++ b/python_modules/libraries/dagster-mysql/dagster_mysql_tests/test_wait_timeout.py @@ -21,7 +21,7 @@ def retry_connect(conn_string: str, num_retries: int = 5, pool_recycle=-1): def test_pool_recycle_greater_than_wait_timeout(conn_string): - with pytest.raises(db.exc.OperationalError): + with pytest.raises(db.exc.OperationalError): # pyright: ignore[reportAttributeAccessIssue] retry_connect(conn_string) diff --git a/python_modules/libraries/dagster-pandas/dagster_pandas/constraints.py b/python_modules/libraries/dagster-pandas/dagster_pandas/constraints.py index d82ad880f84de..3e333a39d7f3a 100644 --- a/python_modules/libraries/dagster-pandas/dagster_pandas/constraints.py +++ b/python_modules/libraries/dagster-pandas/dagster_pandas/constraints.py @@ -189,7 +189,7 @@ def as_dagster_type(self, *args, **kwargs): return DagsterType( name=self.name, description=f"A Pandas DataFrame with the following validation: {self.description}", - type_check_fn=lambda x: self.validate(x, *args), + type_check_fn=lambda x: self.validate(x, *args), # pyright: ignore[reportArgumentType] **kwargs, ) @@ -524,10 +524,10 @@ def validation_fn(data, *args, **kwargs): result = new_validator.validate( DataFrame(data[column]), column, *args, **kwargs ) - result_val = result.success + result_val = result.success # pyright: ignore[reportOptionalMemberAccess] if result_val: continue - result_dict = result.metadata[CONSTRAINT_METADATA_KEY].data + result_dict = result.metadata[CONSTRAINT_METADATA_KEY].data # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] truthparam = truthparam and result_val for key in result_dict.keys(): if "constraint" not in key: @@ -590,7 +590,7 @@ def __init__( fn_and_columns_dict, resulting_exception, raise_or_typecheck=raise_or_typecheck, - type_for_internal=ColumnAggregateConstraintWithMetadata, + type_for_internal=ColumnAggregateConstraintWithMetadata, # pyright: ignore[reportArgumentType] name=name, ) @@ -664,7 +664,7 @@ def nvalidator(val): nval = non_null_validation(val) return origval[0] and nval[0], {} - nvalidator.__doc__ += " and ensures no values are null" + nvalidator.__doc__ += " and ensures no values are null" # pyright: ignore[reportOperatorIssue] return nvalidator diff --git a/python_modules/libraries/dagster-pandas/dagster_pandas/data_frame.py b/python_modules/libraries/dagster-pandas/dagster_pandas/data_frame.py index 03f5e95200e60..a8bbbc925293f 100644 --- a/python_modules/libraries/dagster-pandas/dagster_pandas/data_frame.py +++ b/python_modules/libraries/dagster-pandas/dagster_pandas/data_frame.py @@ -97,7 +97,7 @@ def _build_column_header(column_name, constraints): for constraint in constraints: if isinstance(constraint, ColumnDTypeInSetConstraint): dtypes_tuple = tuple(constraint.expected_dtype_set) - return header + f": `{dtypes_tuple if len(dtypes_tuple) > 1 else dtypes_tuple[0]}`" + return header + f": `{dtypes_tuple if len(dtypes_tuple) > 1 else dtypes_tuple[0]}`" # pyright: ignore[reportGeneralTypeIssues] elif isinstance(constraint, ColumnDTypeFnConstraint): return header + f": Validator `{constraint.type_fn.__name__}`" return header @@ -189,7 +189,7 @@ def _dagster_type_check(_, value): return TypeCheck( success=True, - metadata=_execute_summary_stats(name, value, metadata_fn) if metadata_fn else None, + metadata=_execute_summary_stats(name, value, metadata_fn) if metadata_fn else None, # pyright: ignore[reportArgumentType] ) return DagsterType( diff --git a/python_modules/libraries/dagster-pandas/dagster_pandas/validation.py b/python_modules/libraries/dagster-pandas/dagster_pandas/validation.py index f9f4b9f7c29c9..0be05bc5bc80f 100644 --- a/python_modules/libraries/dagster-pandas/dagster_pandas/validation.py +++ b/python_modules/libraries/dagster-pandas/dagster_pandas/validation.py @@ -69,7 +69,7 @@ def validate(self, dataframe): ) else: for constraint in self.constraints: - constraint.validate(dataframe, self.name) + constraint.validate(dataframe, self.name) # pyright: ignore[reportAttributeAccessIssue] @staticmethod def exists(name, non_nullable=False, unique=False, ignore_missing_vals=False, is_required=None): diff --git a/python_modules/libraries/dagster-pandas/dagster_pandas_tests/pandas_hello_world/test_pandas_hello_world.py b/python_modules/libraries/dagster-pandas/dagster_pandas_tests/pandas_hello_world/test_pandas_hello_world.py index 48d6b45945551..d9d2024367200 100644 --- a/python_modules/libraries/dagster-pandas/dagster_pandas_tests/pandas_hello_world/test_pandas_hello_world.py +++ b/python_modules/libraries/dagster-pandas/dagster_pandas_tests/pandas_hello_world/test_pandas_hello_world.py @@ -89,4 +89,4 @@ def test_cli_execute_failure(): os.chdir(cwd) assert len(failures) == 1 - assert "I am a programmer and I make error" in failures[0].step_failure_data.error.cause.message + assert "I am a programmer and I make error" in failures[0].step_failure_data.error.cause.message # pyright: ignore[reportOptionalMemberAccess] diff --git a/python_modules/libraries/dagster-pandas/dagster_pandas_tests/test_data_frame.py b/python_modules/libraries/dagster-pandas/dagster_pandas_tests/test_data_frame.py index 784c238fdc64d..0b2f2e1c578bc 100644 --- a/python_modules/libraries/dagster-pandas/dagster_pandas_tests/test_data_frame.py +++ b/python_modules/libraries/dagster-pandas/dagster_pandas_tests/test_data_frame.py @@ -64,7 +64,7 @@ def basic_graph(): assert result.success for event in result.all_node_events: if event.event_type_value == "STEP_OUTPUT": - mock_df_output_metadata = event.event_specific_data.type_check_data.metadata + mock_df_output_metadata = event.event_specific_data.type_check_data.metadata # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] assert len(mock_df_output_metadata) == 1 assert "max_pid" in mock_df_output_metadata @@ -142,7 +142,7 @@ def test_execute_summary_stats_null_function(): lambda value: {"qux": MetadataValue.text("baz")}, ) assert len(metadata) == 1 - assert metadata["qux"] == MetadataValue.text("baz") + assert metadata["qux"] == MetadataValue.text("baz") # pyright: ignore[reportCallIssue,reportArgumentType] def test_execute_summary_stats_error(): @@ -277,7 +277,7 @@ def basic_graph(): assert result.success for event in result.all_node_events: if event.event_type_value == "STEP_OUTPUT": - mock_df_output_metadata = event.event_specific_data.type_check_data.metadata + mock_df_output_metadata = event.event_specific_data.type_check_data.metadata # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] assert len(mock_df_output_metadata) == 1 assert "max_pid" in mock_df_output_metadata diff --git a/python_modules/libraries/dagster-pandas/dagster_pandas_tests/test_metadata_constraints.py b/python_modules/libraries/dagster-pandas/dagster_pandas_tests/test_metadata_constraints.py index c88d5adf1d3a0..5751aa3d9eb03 100644 --- a/python_modules/libraries/dagster-pandas/dagster_pandas_tests/test_metadata_constraints.py +++ b/python_modules/libraries/dagster-pandas/dagster_pandas_tests/test_metadata_constraints.py @@ -42,7 +42,7 @@ def basic_validation_function(inframe): def test_failed_basic(): - assert not basic_confirmation_function.validate([]).success + assert not basic_confirmation_function.validate([]).success # pyright: ignore[reportOptionalMemberAccess] def test_basic(): @@ -50,25 +50,25 @@ def test_basic(): def test_failed_multi(): - mul_val = basic_multi_constraint.validate([]).metadata[CONSTRAINT_METADATA_KEY].data + mul_val = basic_multi_constraint.validate([]).metadata[CONSTRAINT_METADATA_KEY].data # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] assert mul_val["expected"] == {"basic_validation_function": "a DataFrame"} assert mul_val["actual"] == {"basic_validation_function": "a list"} def test_success_multi(): mul_val = basic_multi_constraint.validate(DataFrame()) - assert mul_val.success is True - assert mul_val.metadata == {} + assert mul_val.success is True # pyright: ignore[reportOptionalMemberAccess] + assert mul_val.metadata == {} # pyright: ignore[reportOptionalMemberAccess] def test_failed_strict(): strict_column = StrictColumnsWithMetadata(["base_test"], raise_or_typecheck=False) - assert not strict_column.validate(DataFrame()).success + assert not strict_column.validate(DataFrame()).success # pyright: ignore[reportOptionalMemberAccess] def test_successful_strict(): strict_column = StrictColumnsWithMetadata([], raise_or_typecheck=False) - assert strict_column.validate(DataFrame()).success + assert strict_column.validate(DataFrame()).success # pyright: ignore[reportOptionalMemberAccess] def test_column_constraint(): @@ -82,7 +82,7 @@ def column_num_validation_function(value): ColumnWithMetadataException, raise_or_typecheck=False, ) - val = column_val.validate(df, *df.columns).metadata[CONSTRAINT_METADATA_KEY].data + val = column_val.validate(df, *df.columns).metadata[CONSTRAINT_METADATA_KEY].data # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] assert {"bar": ["row 0"], "baz": ["row 1"]} == val["offending"] assert {"bar": ["a"], "baz": ["a"]} == val["actual"] @@ -98,7 +98,7 @@ def column_num_validation_function(value): ColumnWithMetadataException, raise_or_typecheck=False, ) - val = column_val.validate(df, *df.columns).metadata[CONSTRAINT_METADATA_KEY].data + val = column_val.validate(df, *df.columns).metadata[CONSTRAINT_METADATA_KEY].data # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] assert {"foo": ["row 0", "row 1"], "bar": ["row 1"], "baz": ["row 0"]} == val["offending"] assert {"foo": [1, 2], "bar": [2], "baz": [1]} == val["actual"] @@ -119,7 +119,7 @@ def col_val_two(value): ColumnWithMetadataException, raise_or_typecheck=False, ) - val = column_val.validate(df).metadata[CONSTRAINT_METADATA_KEY].data + val = column_val.validate(df).metadata[CONSTRAINT_METADATA_KEY].data # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] assert { "bar": { "col_val_two": "values less than 2.", @@ -153,7 +153,7 @@ def column_mean_validation_function(data): ConstraintWithMetadataException, raise_or_typecheck=False, ) - val = aggregate_val.validate(df, *df.columns).metadata[CONSTRAINT_METADATA_KEY].data + val = aggregate_val.validate(df, *df.columns).metadata[CONSTRAINT_METADATA_KEY].data # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] assert ["foo"] == val["offending"] assert [1, 2] == val["actual"]["foo"] @@ -179,7 +179,7 @@ def column_val_2(data): ConstraintWithMetadataException, raise_or_typecheck=False, ) - val = aggregate_val.validate(df).metadata[CONSTRAINT_METADATA_KEY].data + val = aggregate_val.validate(df).metadata[CONSTRAINT_METADATA_KEY].data # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] assert val["expected"] == { "bar": {"column_val_2": "Checks column mean equal to 1.5."}, "foo": {"column_val_1": "Checks column mean equal to 1."}, @@ -193,8 +193,8 @@ def column_val_2(data): def test_range_constraint(): df = DataFrame({"foo": [1, 2], "bar": [3, 2], "baz": [1, 4]}) range_val = ColumnRangeConstraintWithMetadata(1, 2.5, raise_or_typecheck=False) - val = range_val.validate(df).metadata[CONSTRAINT_METADATA_KEY].data + val = range_val.validate(df).metadata[CONSTRAINT_METADATA_KEY].data # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] assert {"bar": ["row 0"], "baz": ["row 1"]} == val["offending"] assert {"bar": [3], "baz": [4]} == val["actual"] range_val = ColumnRangeConstraintWithMetadata(raise_or_typecheck=False) - assert range_val.validate(df).success + assert range_val.validate(df).success # pyright: ignore[reportOptionalMemberAccess] diff --git a/python_modules/libraries/dagster-pandas/dagster_pandas_tests/test_pandas_metadata.py b/python_modules/libraries/dagster-pandas/dagster_pandas_tests/test_pandas_metadata.py index e483015bfbb2b..7c4eb9b9986e6 100644 --- a/python_modules/libraries/dagster-pandas/dagster_pandas_tests/test_pandas_metadata.py +++ b/python_modules/libraries/dagster-pandas/dagster_pandas_tests/test_pandas_metadata.py @@ -32,4 +32,4 @@ def basic_graph(): metadata = input_event.step_input_data.type_check_data.metadata assert metadata["row_count"] == MetadataValue.text("2") - assert metadata["metadata"].data["columns"] == ["num1", "num2"] + assert metadata["metadata"].data["columns"] == ["num1", "num2"] # pyright: ignore[reportAttributeAccessIssue] diff --git a/python_modules/libraries/dagster-pandas/dagster_pandas_tests/test_structured_df_types.py b/python_modules/libraries/dagster-pandas/dagster_pandas_tests/test_structured_df_types.py index a1b346db6efd8..a7e18364cdd09 100644 --- a/python_modules/libraries/dagster-pandas/dagster_pandas_tests/test_structured_df_types.py +++ b/python_modules/libraries/dagster-pandas/dagster_pandas_tests/test_structured_df_types.py @@ -90,14 +90,14 @@ def basic_graph(): result = basic_graph.execute_in_process(raise_on_error=False) output = next(item for item in result.all_node_events if item.is_successful_output) - output_data = output.event_specific_data.type_check_data - output_metadata = output_data.metadata + output_data = output.event_specific_data.type_check_data # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + output_metadata = output_data.metadata # pyright: ignore[reportOptionalMemberAccess] assert len(output_metadata) == 1 - column_const_data = output_metadata["columns-constraint-metadata"].data + column_const_data = output_metadata["columns-constraint-metadata"].data # pyright: ignore[reportAttributeAccessIssue] assert column_const_data["expected"] == { "foo": { - "in_range_validation_fn": in_range_validator.__doc__.strip(), - "dtype_in_set_validation_fn": dtype_is_num_validator.__doc__.strip(), + "in_range_validation_fn": in_range_validator.__doc__.strip(), # pyright: ignore[reportOptionalMemberAccess] + "dtype_in_set_validation_fn": dtype_is_num_validator.__doc__.strip(), # pyright: ignore[reportOptionalMemberAccess] } } assert column_const_data["offending"] == { @@ -132,13 +132,13 @@ def basic_graph(): result = basic_graph.execute_in_process(raise_on_error=False) output = next(item for item in result.all_node_events if item.is_successful_output) - output_data = output.event_specific_data.type_check_data - output_metadata = output_data.metadata + output_data = output.event_specific_data.type_check_data # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + output_metadata = output_data.metadata # pyright: ignore[reportOptionalMemberAccess] assert len(output_metadata) == 1 column_const = output_metadata["column-aggregates-constraint-metadata"] - column_const_data = column_const.data + column_const_data = column_const.data # pyright: ignore[reportAttributeAccessIssue] assert column_const_data["expected"] == { - "bar": {"all_unique_validator": all_unique_validator.__doc__.strip()} + "bar": {"all_unique_validator": all_unique_validator.__doc__.strip()} # pyright: ignore[reportOptionalMemberAccess] } assert column_const_data["offending"] == {"bar": {"all_unique_validator": "a violation"}} assert column_const_data["actual"] == {"bar": {"all_unique_validator": [10.0]}} @@ -165,10 +165,10 @@ def basic_graph(): result = basic_graph.execute_in_process(raise_on_error=False) output = next(item for item in result.all_node_events if item.is_successful_output) - output_data = output.event_specific_data.type_check_data - output_metadata = output_data.metadata + output_data = output.event_specific_data.type_check_data # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + output_metadata = output_data.metadata # pyright: ignore[reportOptionalMemberAccess] assert len(output_metadata) == 1 - column_const_data = output_metadata["dataframe-constraint-metadata"].data + column_const_data = output_metadata["dataframe-constraint-metadata"].data # pyright: ignore[reportAttributeAccessIssue] assert column_const_data["expected"] == ["foo", "bar"] assert column_const_data["actual"] == {"extra_columns": ["baz"], "missing_columns": ["bar"]} @@ -194,21 +194,21 @@ def basic_graph(): result = basic_graph.execute_in_process(raise_on_error=False) output = next(item for item in result.all_node_events if item.is_successful_output) - output_data = output.event_specific_data.type_check_data - output_metadata = output_data.metadata + output_data = output.event_specific_data.type_check_data # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] + output_metadata = output_data.metadata # pyright: ignore[reportOptionalMemberAccess] assert len(output_metadata) == 3 agg_data = output_metadata["column-aggregates-constraint-metadata"] - agg_metadata = agg_data.data + agg_metadata = agg_data.data # pyright: ignore[reportAttributeAccessIssue] assert agg_metadata["expected"] == { - "bar": {"all_unique_validator": all_unique_validator.__doc__.strip()} + "bar": {"all_unique_validator": all_unique_validator.__doc__.strip()} # pyright: ignore[reportOptionalMemberAccess] } assert agg_metadata["offending"] == {"bar": {"all_unique_validator": "a violation"}} assert agg_metadata["actual"] == {"bar": {"all_unique_validator": [10.0]}} - column_const_data = output_metadata["columns-constraint-metadata"].data + column_const_data = output_metadata["columns-constraint-metadata"].data # pyright: ignore[reportAttributeAccessIssue] assert column_const_data["expected"] == { "foo": { - "in_range_validation_fn": in_range_validator.__doc__.strip(), - "dtype_in_set_validation_fn": dtype_is_num_validator.__doc__.strip(), + "in_range_validation_fn": in_range_validator.__doc__.strip(), # pyright: ignore[reportOptionalMemberAccess] + "dtype_in_set_validation_fn": dtype_is_num_validator.__doc__.strip(), # pyright: ignore[reportOptionalMemberAccess] } } assert column_const_data["offending"] == { @@ -221,6 +221,6 @@ def basic_graph(): "foo": {"dtype_in_set_validation_fn": ["a"], "in_range_validation_fn": ["a", 7]} } - df_metadata = output_metadata["dataframe-constraint-metadata"].data + df_metadata = output_metadata["dataframe-constraint-metadata"].data # pyright: ignore[reportAttributeAccessIssue] assert df_metadata["expected"] == ["foo", "bar"] assert df_metadata["actual"] == {"extra_columns": ["baz"], "missing_columns": []} diff --git a/python_modules/libraries/dagster-pandera/dagster_pandera/__init__.py b/python_modules/libraries/dagster-pandera/dagster_pandera/__init__.py index f7414967e4a55..d2fdefe9dbbcf 100644 --- a/python_modules/libraries/dagster-pandera/dagster_pandera/__init__.py +++ b/python_modules/libraries/dagster-pandera/dagster_pandera/__init__.py @@ -233,7 +233,7 @@ def _pandera_errors_to_type_check( def _pandera_schema_to_table_schema(schema: DagsterPanderaSchema) -> TableSchema: - df_constraints = _pandera_schema_wide_checks_to_table_constraints(schema.checks) + df_constraints = _pandera_schema_wide_checks_to_table_constraints(schema.checks) # pyright: ignore[reportArgumentType] columns = [_pandera_column_to_table_column(col) for k, col in schema.columns.items()] return TableSchema(columns=columns, constraints=df_constraints) diff --git a/python_modules/libraries/dagster-pandera/dagster_pandera_tests/test_polars.py b/python_modules/libraries/dagster-pandera/dagster_pandera_tests/test_polars.py index c937050f78b16..891286acd84be 100644 --- a/python_modules/libraries/dagster-pandera/dagster_pandera_tests/test_polars.py +++ b/python_modules/libraries/dagster-pandera/dagster_pandera_tests/test_polars.py @@ -88,7 +88,7 @@ def a_gt_b(cls, df): sum_b = df.lazyframe.select(pl.col("b")).sum().collect().item() return sum_a > sum_b - Config = make_schema_model_config(**config_attrs) + Config = make_schema_model_config(**config_attrs) # pyright: ignore[reportAssignmentType] return SampleDataframeModel diff --git a/python_modules/libraries/dagster-papertrail/dagster_papertrail/loggers.py b/python_modules/libraries/dagster-papertrail/dagster_papertrail/loggers.py index da53a8d46267b..badf84895e394 100644 --- a/python_modules/libraries/dagster-papertrail/dagster_papertrail/loggers.py +++ b/python_modules/libraries/dagster-papertrail/dagster_papertrail/loggers.py @@ -67,7 +67,7 @@ def simple_job(): log_format = "%(asctime)s %(hostname)s " + name + ": %(message)s" formatter = logging.Formatter(log_format, datefmt="%b %d %H:%M:%S") - handler = logging.handlers.SysLogHandler(address=(papertrail_address, papertrail_port)) + handler = logging.handlers.SysLogHandler(address=(papertrail_address, papertrail_port)) # pyright: ignore[reportAttributeAccessIssue] handler.addFilter(ContextFilter()) handler.setFormatter(formatter) diff --git a/python_modules/libraries/dagster-postgres/dagster_postgres_tests/compat_tests/test_back_compat.py b/python_modules/libraries/dagster-postgres/dagster_postgres_tests/compat_tests/test_back_compat.py index 501b67cc04bf6..0be54d93cf8af 100644 --- a/python_modules/libraries/dagster-postgres/dagster_postgres_tests/compat_tests/test_back_compat.py +++ b/python_modules/libraries/dagster-postgres/dagster_postgres_tests/compat_tests/test_back_compat.py @@ -87,7 +87,7 @@ def noop_job(): noop_op() with pytest.raises( - (db.exc.OperationalError, db.exc.ProgrammingError, db.exc.StatementError) + (db.exc.OperationalError, db.exc.ProgrammingError, db.exc.StatementError) # pyright: ignore[reportAttributeAccessIssue] ): noop_job.execute_in_process(instance=instance) @@ -102,8 +102,8 @@ def noop_job(): run = instance.get_run_by_id(run_id) - assert run.run_id == run_id - assert run.job_snapshot_id is None + assert run.run_id == run_id # pyright: ignore[reportOptionalMemberAccess] + assert run.job_snapshot_id is None # pyright: ignore[reportOptionalMemberAccess] result = noop_job.execute_in_process(instance=instance) assert result.success @@ -115,7 +115,7 @@ def noop_job(): new_run = instance.get_run_by_id(new_run_id) - assert new_run.job_snapshot_id + assert new_run.job_snapshot_id # pyright: ignore[reportOptionalMemberAccess] def test_0_9_22_postgres_pre_asset_partition(hostname, conn_string): @@ -147,7 +147,7 @@ def asset_job(): asset_op() with pytest.raises( - (db.exc.OperationalError, db.exc.ProgrammingError, db.exc.StatementError) + (db.exc.OperationalError, db.exc.ProgrammingError, db.exc.StatementError) # pyright: ignore[reportAttributeAccessIssue] ): asset_job.execute_in_process(instance=instance) @@ -188,7 +188,7 @@ def simple_job(): } with pytest.raises( - (db.exc.OperationalError, db.exc.ProgrammingError, db.exc.StatementError) + (db.exc.OperationalError, db.exc.ProgrammingError, db.exc.StatementError) # pyright: ignore[reportAttributeAccessIssue] ): simple_job.execute_in_process(tags=tags, instance=instance) @@ -235,7 +235,7 @@ def test_0_10_6_add_bulk_actions_table(hostname, conn_string): target_fd.write(template) with pytest.raises( - (db.exc.OperationalError, db.exc.ProgrammingError, db.exc.StatementError) + (db.exc.OperationalError, db.exc.ProgrammingError, db.exc.StatementError) # pyright: ignore[reportAttributeAccessIssue] ): with DagsterInstance.from_config(tempdir) as instance: instance.get_backfills() @@ -266,9 +266,9 @@ def test_0_11_0_add_asset_details(hostname, conn_string): storage = instance._event_storage with pytest.raises( ( - db.exc.OperationalError, - db.exc.ProgrammingError, - db.exc.StatementError, + db.exc.OperationalError, # pyright: ignore[reportAttributeAccessIssue] + db.exc.ProgrammingError, # pyright: ignore[reportAttributeAccessIssue] + db.exc.StatementError, # pyright: ignore[reportAttributeAccessIssue] ) ): storage.all_asset_keys() @@ -397,7 +397,7 @@ def asset_job(): with DagsterInstance.from_config(tempdir) as instance: storage = instance._event_storage - assert not storage.has_secondary_index(ASSET_KEY_INDEX_COLS) + assert not storage.has_secondary_index(ASSET_KEY_INDEX_COLS) # pyright: ignore[reportAttributeAccessIssue] # make sure that executing the job works asset_job.execute_in_process(instance=instance) @@ -478,11 +478,11 @@ def test_0_13_12_add_start_time_end_time(hostname, conn_string): assert result.success assert len(instance.get_runs()) == 3 latest_run_record = instance.get_run_records()[0] - assert latest_run_record.end_time > latest_run_record.start_time + assert latest_run_record.end_time > latest_run_record.start_time # pyright: ignore[reportOperatorIssue] # Verify that historical records also get updated via data migration earliest_run_record = instance.get_run_records()[-1] - assert earliest_run_record.end_time > earliest_run_record.start_time + assert earliest_run_record.end_time > earliest_run_record.start_time # pyright: ignore[reportOperatorIssue] def test_schedule_secondary_index_table_backcompat(hostname, conn_string): @@ -506,13 +506,13 @@ def test_schedule_secondary_index_table_backcompat(hostname, conn_string): # secondary indexes should exist because it's colocated in this database from the run # storage - assert instance.schedule_storage.has_secondary_index_table() + assert instance.schedule_storage.has_secondary_index_table() # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] # this should succeed without raising any issues instance.upgrade() # no-op - assert instance.schedule_storage.has_secondary_index_table() + assert instance.schedule_storage.has_secondary_index_table() # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_instigators_table_backcompat(hostname, conn_string): @@ -532,11 +532,11 @@ def test_instigators_table_backcompat(hostname, conn_string): instance = DagsterInstance.from_config(tempdir) - assert not instance.schedule_storage.has_instigators_table() + assert not instance.schedule_storage.has_instigators_table() # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] instance.upgrade() - assert instance.schedule_storage.has_instigators_table() + assert instance.schedule_storage.has_instigators_table() # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_jobs_selector_id_migration(hostname, conn_string): @@ -564,27 +564,27 @@ def test_jobs_selector_id_migration(hostname, conn_string): # runs the required data migrations instance.upgrade() - assert instance.schedule_storage.has_built_index(SCHEDULE_JOBS_SELECTOR_ID) + assert instance.schedule_storage.has_built_index(SCHEDULE_JOBS_SELECTOR_ID) # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] legacy_count = len(instance.all_instigator_state()) - migrated_instigator_count = instance.schedule_storage.execute( + migrated_instigator_count = instance.schedule_storage.execute( # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] db_select([db.func.count()]).select_from(InstigatorsTable) )[0][0] assert migrated_instigator_count == legacy_count - migrated_job_count = instance.schedule_storage.execute( + migrated_job_count = instance.schedule_storage.execute( # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] db_select([db.func.count()]) .select_from(JobTable) .where(JobTable.c.selector_id.isnot(None)) )[0][0] assert migrated_job_count == legacy_count - legacy_tick_count = instance.schedule_storage.execute( + legacy_tick_count = instance.schedule_storage.execute( # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] db_select([db.func.count()]).select_from(JobTickTable) )[0][0] assert legacy_tick_count > 0 # tick migrations are optional - migrated_tick_count = instance.schedule_storage.execute( + migrated_tick_count = instance.schedule_storage.execute( # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] db_select([db.func.count()]) .select_from(JobTickTable) .where(JobTickTable.c.selector_id.isnot(None)) @@ -594,7 +594,7 @@ def test_jobs_selector_id_migration(hostname, conn_string): # run the optional migrations instance.reindex() - migrated_tick_count = instance.schedule_storage.execute( + migrated_tick_count = instance.schedule_storage.execute( # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] db_select([db.func.count()]) .select_from(JobTickTable) .where(JobTickTable.c.selector_id.isnot(None)) @@ -828,7 +828,7 @@ def test_add_primary_keys(hostname, conn_string): instance.upgrade() assert "id" in get_columns(instance, "kvs") - with instance.run_storage.connect(): + with instance.run_storage.connect(): # pyright: ignore[reportAttributeAccessIssue] kvs_id_count = _get_table_row_count( instance.run_storage, KeyValueStoreTable, with_non_null_id=True ) @@ -836,7 +836,7 @@ def test_add_primary_keys(hostname, conn_string): assert get_primary_key(instance, "kvs") assert "id" in get_columns(instance, "instance_info") - with instance.run_storage.connect(): + with instance.run_storage.connect(): # pyright: ignore[reportAttributeAccessIssue] instance_info_id_count = _get_table_row_count( instance.run_storage, InstanceInfo, with_non_null_id=True ) @@ -844,7 +844,7 @@ def test_add_primary_keys(hostname, conn_string): assert get_primary_key(instance, "instance_info") assert "id" in get_columns(instance, "daemon_heartbeats") - with instance.run_storage.connect(): + with instance.run_storage.connect(): # pyright: ignore[reportAttributeAccessIssue] daemon_heartbeats_id_count = _get_table_row_count( instance.run_storage, DaemonHeartbeatsTable, with_non_null_id=True ) @@ -881,20 +881,20 @@ def _get_integer_id_tables(conn): target_fd.write(template) with DagsterInstance.from_config(tempdir) as instance: - with instance.run_storage.connect() as conn: + with instance.run_storage.connect() as conn: # pyright: ignore[reportAttributeAccessIssue] assert len(_get_integer_id_tables(conn)) > 0 - with instance.event_log_storage.index_connection() as conn: + with instance.event_log_storage.index_connection() as conn: # pyright: ignore[reportAttributeAccessIssue] assert len(_get_integer_id_tables(conn)) > 0 - with instance.schedule_storage.connect() as conn: + with instance.schedule_storage.connect() as conn: # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] assert len(_get_integer_id_tables(conn)) > 0 run_bigint_migration(instance) - with instance.run_storage.connect() as conn: + with instance.run_storage.connect() as conn: # pyright: ignore[reportAttributeAccessIssue] assert len(_get_integer_id_tables(conn)) == 0 - with instance.event_log_storage.index_connection() as conn: + with instance.event_log_storage.index_connection() as conn: # pyright: ignore[reportAttributeAccessIssue] assert len(_get_integer_id_tables(conn)) == 0 - with instance.schedule_storage.connect() as conn: + with instance.schedule_storage.connect() as conn: # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] assert len(_get_integer_id_tables(conn)) == 0 @@ -946,7 +946,7 @@ def test_add_backfill_id_column(hostname, conn_string): assert len(instance.get_runs(filters=RunsFilter(exclude_subruns=True))) == 2 instance.upgrade() - assert instance.run_storage.has_built_index(RUN_BACKFILL_ID) + assert instance.run_storage.has_built_index(RUN_BACKFILL_ID) # pyright: ignore[reportAttributeAccessIssue] assert new_columns <= get_columns(instance, "runs") run_not_in_backfill_post_migration = instance.run_storage.add_run( @@ -968,7 +968,7 @@ def test_add_backfill_id_column(hostname, conn_string): backfill_ids = { row["run_id"]: row["backfill_id"] - for row in instance._run_storage.fetchall( + for row in instance._run_storage.fetchall( # pyright: ignore[reportAttributeAccessIssue] db_select([RunsTable.c.run_id, RunsTable.c.backfill_id]).select_from(RunsTable) ) } @@ -1067,7 +1067,7 @@ def test_add_backfill_tags(hostname, conn_string): ) instance.add_backfill(after_migration) - with instance.run_storage.connect() as conn: + with instance.run_storage.connect() as conn: # pyright: ignore[reportAttributeAccessIssue] rows = conn.execute( db_select( [ @@ -1083,7 +1083,7 @@ def test_add_backfill_tags(hostname, conn_string): assert ids_to_tags[after_migration.backfill_id] == after_migration.tags # filtering by tags works after migration - assert instance.run_storage.has_built_index(BACKFILL_JOB_NAME_AND_TAGS) + assert instance.run_storage.has_built_index(BACKFILL_JOB_NAME_AND_TAGS) # pyright: ignore[reportAttributeAccessIssue] # delete the run that was added pre-migration to prove that tags filtering is happening on the # backfill_tags table instance.delete_run(pre_migration_run.run_id) @@ -1150,7 +1150,7 @@ def test_add_bulk_actions_job_name_column(hostname, conn_string): # filtering pre-migration relies on filtering runs, so add a run with the expected job_name pre_migration_run = instance.run_storage.add_run( DagsterRun( - job_name=before_migration.job_name, + job_name=before_migration.job_name, # pyright: ignore[reportArgumentType] run_id=make_new_run_id(), tags={BACKFILL_ID_TAG: before_migration.backfill_id}, status=DagsterRunStatus.NOT_STARTED, @@ -1187,7 +1187,7 @@ def test_add_bulk_actions_job_name_column(hostname, conn_string): ) instance.add_backfill(after_migration) - with instance.run_storage.connect() as conn: + with instance.run_storage.connect() as conn: # pyright: ignore[reportAttributeAccessIssue] rows = conn.execute( db_select([BulkActionsTable.c.key, BulkActionsTable.c.job_name]) ).fetchall() @@ -1197,7 +1197,7 @@ def test_add_bulk_actions_job_name_column(hostname, conn_string): assert ids_to_job_name[after_migration.backfill_id] == after_migration.job_name # filtering by job_name works after migration - assert instance.run_storage.has_built_index(BACKFILL_JOB_NAME_AND_TAGS) + assert instance.run_storage.has_built_index(BACKFILL_JOB_NAME_AND_TAGS) # pyright: ignore[reportAttributeAccessIssue] # delete the run that was added pre-migration to prove that tags filtering is happening on the # backfill_tags table instance.delete_run(pre_migration_run.run_id) diff --git a/python_modules/libraries/dagster-postgres/dagster_postgres_tests/test_event_log.py b/python_modules/libraries/dagster-postgres/dagster_postgres_tests/test_event_log.py index 77c52fa915037..d6a2bb42e4fc3 100644 --- a/python_modules/libraries/dagster-postgres/dagster_postgres_tests/test_event_log.py +++ b/python_modules/libraries/dagster-postgres/dagster_postgres_tests/test_event_log.py @@ -136,4 +136,4 @@ def test_load_from_config(self, hostname): with instance_for_test(overrides=yaml.safe_load(explicit_cfg)) as explicit_instance: from_explicit = explicit_instance._event_storage # noqa: SLF001 - assert from_url.postgres_url == from_explicit.postgres_url + assert from_url.postgres_url == from_explicit.postgres_url # pyright: ignore[reportAttributeAccessIssue] diff --git a/python_modules/libraries/dagster-postgres/dagster_postgres_tests/test_instance.py b/python_modules/libraries/dagster-postgres/dagster_postgres_tests/test_instance.py index a9ea1d708b323..e6f8eccd5f43e 100644 --- a/python_modules/libraries/dagster-postgres/dagster_postgres_tests/test_instance.py +++ b/python_modules/libraries/dagster-postgres/dagster_postgres_tests/test_instance.py @@ -202,16 +202,16 @@ def test_statement_timeouts(hostname): # ensure migration error is not raised by being up to date instance.upgrade() - with pytest.raises(db.exc.OperationalError, match="QueryCanceled"): - with instance._run_storage.connect() as conn: # noqa: SLF001 + with pytest.raises(db.exc.OperationalError, match="QueryCanceled"): # pyright: ignore[reportAttributeAccessIssue] + with instance._run_storage.connect() as conn: # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] conn.execute(db.text("select pg_sleep(1)")).fetchone() - with pytest.raises(db.exc.OperationalError, match="QueryCanceled"): - with instance._event_storage._connect() as conn: # noqa: SLF001 + with pytest.raises(db.exc.OperationalError, match="QueryCanceled"): # pyright: ignore[reportAttributeAccessIssue] + with instance._event_storage._connect() as conn: # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] conn.execute(db.text("select pg_sleep(1)")).fetchone() - with pytest.raises(db.exc.OperationalError, match="QueryCanceled"): - with instance._schedule_storage.connect() as conn: # noqa: SLF001 + with pytest.raises(db.exc.OperationalError, match="QueryCanceled"): # pyright: ignore[reportAttributeAccessIssue] + with instance._schedule_storage.connect() as conn: # noqa: SLF001 # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] conn.execute(db.text("select pg_sleep(1)")).fetchone() @@ -223,13 +223,13 @@ def test_skip_autocreate(hostname, conn_string): with instance_for_test( overrides=yaml.safe_load(skip_autocreate_pg_config(hostname)) ) as instance: - with pytest.raises(db.exc.ProgrammingError): + with pytest.raises(db.exc.ProgrammingError): # pyright: ignore[reportAttributeAccessIssue] instance.get_runs() - with pytest.raises(db.exc.ProgrammingError): + with pytest.raises(db.exc.ProgrammingError): # pyright: ignore[reportAttributeAccessIssue] instance.all_asset_keys() - with pytest.raises(db.exc.ProgrammingError): + with pytest.raises(db.exc.ProgrammingError): # pyright: ignore[reportAttributeAccessIssue] instance.all_instigator_state() with instance_for_test(overrides=yaml.safe_load(full_pg_config(hostname))) as instance: @@ -248,9 +248,9 @@ def test_specify_pg_params(hostname): ) as instance: postgres_url = f"postgresql://test:test@{hostname}:5432/test?application_name=myapp&connect_timeout=10&options=-c%20synchronous_commit%3Doff" - assert instance._event_storage.postgres_url == postgres_url # noqa: SLF001 - assert instance._run_storage.postgres_url == postgres_url # noqa: SLF001 - assert instance._schedule_storage.postgres_url == postgres_url # noqa: SLF001 + assert instance._event_storage.postgres_url == postgres_url # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] + assert instance._run_storage.postgres_url == postgres_url # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] + assert instance._schedule_storage.postgres_url == postgres_url # noqa: SLF001 # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] def test_conn_str(): @@ -269,8 +269,8 @@ def test_conn_str(): ) assert conn_str == f"postgresql://{url_wo_scheme}" parsed = urlparse(conn_str) - assert unquote(parsed.username) == username - assert unquote(parsed.password) == password + assert unquote(parsed.username) == username # pyright: ignore[reportArgumentType] + assert unquote(parsed.password) == password # pyright: ignore[reportArgumentType] assert parsed.hostname == hostname assert parsed.scheme == "postgresql" @@ -285,8 +285,8 @@ def test_conn_str(): assert conn_str == f"postgresql+dialect://{url_wo_scheme}" parsed = urlparse(conn_str) - assert unquote(parsed.username) == username - assert unquote(parsed.password) == password + assert unquote(parsed.username) == username # pyright: ignore[reportArgumentType] + assert unquote(parsed.password) == password # pyright: ignore[reportArgumentType] assert parsed.hostname == hostname assert parsed.scheme == custom_scheme diff --git a/python_modules/libraries/dagster-postgres/dagster_postgres_tests/test_run_storage.py b/python_modules/libraries/dagster-postgres/dagster_postgres_tests/test_run_storage.py index d32a77397f250..d00a5478e75ee 100644 --- a/python_modules/libraries/dagster-postgres/dagster_postgres_tests/test_run_storage.py +++ b/python_modules/libraries/dagster-postgres/dagster_postgres_tests/test_run_storage.py @@ -77,11 +77,11 @@ def test_load_from_config(self, hostname): overrides=yaml.safe_load(explicit_cfg) ) as from_explicit_instance: assert ( - from_url_instance._run_storage.postgres_url # noqa: SLF001 - == from_explicit_instance._run_storage.postgres_url # noqa: SLF001 + from_url_instance._run_storage.postgres_url # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] + == from_explicit_instance._run_storage.postgres_url # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] ) with instance_for_test(overrides=yaml.safe_load(env_cfg)) as from_env_instance: assert ( - from_url_instance._run_storage.postgres_url # noqa: SLF001 - == from_env_instance._run_storage.postgres_url # noqa: SLF001 + from_url_instance._run_storage.postgres_url # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] + == from_env_instance._run_storage.postgres_url # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue] ) diff --git a/python_modules/libraries/dagster-pyspark/dagster_pyspark/resources.py b/python_modules/libraries/dagster-pyspark/dagster_pyspark/resources.py index f5f5cc2f45373..9a3c34df7f0d9 100644 --- a/python_modules/libraries/dagster-pyspark/dagster_pyspark/resources.py +++ b/python_modules/libraries/dagster-pyspark/dagster_pyspark/resources.py @@ -15,9 +15,9 @@ def spark_session_from_config(spark_conf=None): builder = SparkSession.builder flat = flatten_dict(spark_conf) for key, value in flat: - builder = builder.config(key, value) + builder = builder.config(key, value) # pyright: ignore[reportAttributeAccessIssue] - return builder.getOrCreate() + return builder.getOrCreate() # pyright: ignore[reportAttributeAccessIssue] class PySparkResource(ConfigurableResource): diff --git a/python_modules/libraries/dagster-shell/dagster_shell_tests/test_terminate.py b/python_modules/libraries/dagster-shell/dagster_shell_tests/test_terminate.py index a0f9e1f8dbfce..9655221dd171a 100644 --- a/python_modules/libraries/dagster-shell/dagster_shell_tests/test_terminate.py +++ b/python_modules/libraries/dagster-shell/dagster_shell_tests/test_terminate.py @@ -76,7 +76,7 @@ def test_terminate_kills_subproc(): run_id = dagster_run.run_id - assert instance.get_run_by_id(run_id).status == DagsterRunStatus.NOT_STARTED + assert instance.get_run_by_id(run_id).status == DagsterRunStatus.NOT_STARTED # pyright: ignore[reportOptionalMemberAccess] instance.launch_run(dagster_run.run_id, workspace) @@ -84,7 +84,7 @@ def test_terminate_kills_subproc(): # find pid of subprocess subproc_pid = poll_for_pid(instance, run_id) - assert psutil.pid_exists(subproc_pid) + assert psutil.pid_exists(subproc_pid) # pyright: ignore[reportArgumentType] # simulate waiting a bit to terminate the job time.sleep(0.5) @@ -94,8 +94,8 @@ def test_terminate_kills_subproc(): terminated_dagster_run = poll_for_finished_run(instance, run_id, timeout=30) terminated_dagster_run = instance.get_run_by_id(run_id) - assert terminated_dagster_run.status == DagsterRunStatus.CANCELED + assert terminated_dagster_run.status == DagsterRunStatus.CANCELED # pyright: ignore[reportOptionalMemberAccess] # make sure the subprocess is killed after a short delay time.sleep(0.5) - assert not psutil.pid_exists(subproc_pid) + assert not psutil.pid_exists(subproc_pid) # pyright: ignore[reportArgumentType] diff --git a/python_modules/libraries/dagster-shell/dagster_shell_tests/test_utils.py b/python_modules/libraries/dagster-shell/dagster_shell_tests/test_utils.py index 3d1b564deb89e..f14d410f71f62 100644 --- a/python_modules/libraries/dagster-shell/dagster_shell_tests/test_utils.py +++ b/python_modules/libraries/dagster-shell/dagster_shell_tests/test_utils.py @@ -5,15 +5,17 @@ from dagster._core.test_utils import environ from dagster_shell.utils import execute, execute_script_file +logger = logging.getLogger() + def test_bad_output_logging(): with pytest.raises(Exception, match="Unrecognized output_logging NOT_A_VALID_LOGGING_VALUE"): - execute("ls", output_logging="NOT_A_VALID_LOGGING_VALUE", log=logging) + execute("ls", output_logging="NOT_A_VALID_LOGGING_VALUE", log=logger) def test_execute_inline(tmp_file): with tmp_file("some file contents") as (tmp_path, tmp_file): - res, retcode = execute("ls", cwd=tmp_path, output_logging="BUFFER", log=logging) + res, retcode = execute("ls", cwd=tmp_path, output_logging="BUFFER", log=logger) assert os.path.basename(tmp_file) in res assert retcode == 0 @@ -21,7 +23,7 @@ def test_execute_inline(tmp_file): def test_execute_file(tmp_file): with tmp_file("ls") as (tmp_path, tmp_file): res, retcode = execute_script_file( - tmp_file, output_logging="BUFFER", log=logging, cwd=tmp_path + tmp_file, output_logging="BUFFER", log=logger, cwd=tmp_path ) assert os.path.basename(tmp_file) in res assert retcode == 0 @@ -31,7 +33,7 @@ def test_execute_file_large_buffered_output(tmp_file): large_string = "0123456789" * (6600) # bigger than 2**16 buffer with tmp_file(f"echo -n {large_string}") as (tmp_path, tmp_file): output, retcode = execute_script_file( - tmp_file, output_logging="BUFFER", log=logging, cwd=tmp_path + tmp_file, output_logging="BUFFER", log=logger, cwd=tmp_path ) assert retcode == 0 assert output == large_string @@ -41,7 +43,7 @@ def test_execute_file_large_output_no_logging(tmp_file): large_string = "0123456789" * (6600) # bigger than 2**16 buffer with tmp_file(f"echo -n {large_string}") as (tmp_path, tmp_file): output, retcode = execute_script_file( - tmp_file, output_logging="NONE", log=logging, cwd=tmp_path + tmp_file, output_logging="NONE", log=logger, cwd=tmp_path ) assert retcode == 0 assert output == "" @@ -51,7 +53,7 @@ def test_execute_file_large_line_stream_output(tmp_file): large_string = "0123456789" * (100000) # one giant line > 2**16 buffer with tmp_file(f"echo -n {large_string}") as (tmp_path, tmp_file): output, retcode = execute_script_file( - tmp_file, output_logging="STREAM", log=logging, cwd=tmp_path + tmp_file, output_logging="STREAM", log=logger, cwd=tmp_path ) assert retcode == 0 assert output == large_string @@ -60,14 +62,14 @@ def test_execute_file_large_line_stream_output(tmp_file): def test_env(tmp_file): cmd = "echo $TEST_VAR" res, retcode = execute( - cmd, output_logging="BUFFER", log=logging, env={"TEST_VAR": "some_env_value"} + cmd, output_logging="BUFFER", log=logger, env={"TEST_VAR": "some_env_value"} ) assert res.strip() == "some_env_value" assert retcode == 0 # By default, pulls in env from the calling process with environ({"TEST_VAR": "some_other_env_value"}): - res, retcode = execute(cmd, output_logging="BUFFER", log=logging) + res, retcode = execute(cmd, output_logging="BUFFER", log=logger) assert res.strip() == "some_other_env_value" assert retcode == 0 @@ -76,7 +78,7 @@ def test_env(tmp_file): res, retcode = execute_script_file( tmp_file, output_logging="BUFFER", - log=logging, + log=logger, env={"TEST_VAR": "some_env_value"}, ) assert res.strip() == "some_env_value" @@ -86,7 +88,7 @@ def test_env(tmp_file): def test_output_logging_stream(caplog): caplog.set_level(logging.INFO) - _, retcode = execute("ls", output_logging="STREAM", log=logging) + _, retcode = execute("ls", output_logging="STREAM", log=logger) log_messages = [r.message for r in caplog.records] assert log_messages[0].startswith("Using temporary directory: ") assert log_messages[1].startswith("Temporary script location: ") @@ -97,7 +99,7 @@ def test_output_logging_stream(caplog): caplog.clear() - _, retcode = execute("ls", output_logging="STREAM", log=logging) + _, retcode = execute("ls", output_logging="STREAM", log=logger) log_messages = [r.message for r in caplog.records] assert log_messages[0].startswith("Using temporary directory: ") assert log_messages[1].startswith("Temporary script location: ") @@ -108,7 +110,7 @@ def test_output_logging_stream(caplog): caplog.clear() - _, retcode = execute("ls", output_logging="STREAM", log=logging, log_shell_command=False) + _, retcode = execute("ls", output_logging="STREAM", log=logger, log_shell_command=False) log_messages = [r.message for r in caplog.records] assert log_messages[0].startswith("Using temporary directory: ") assert log_messages[1].startswith("Temporary script location: ") @@ -121,7 +123,7 @@ def test_output_logging_stream(caplog): _, retcode = execute( 'for i in 1 2 3; do echo "iter $i"; done;', output_logging="STREAM", - log=logging, + log=logger, ) log_messages = [r.message for r in caplog.records] assert retcode == 0 @@ -132,7 +134,7 @@ def test_output_logging_stream(caplog): _, retcode = execute( 'for i in 1 2 3; do echo "iter $i"; done;', output_logging="BUFFER", - log=logging, + log=logger, ) log_messages = [r.message for r in caplog.records] assert retcode == 0 diff --git a/python_modules/libraries/dagster-slack/dagster_slack_tests/test_sensors.py b/python_modules/libraries/dagster-slack/dagster_slack_tests/test_sensors.py index 712eceffc93ee..c9f30af429364 100644 --- a/python_modules/libraries/dagster-slack/dagster_slack_tests/test_sensors.py +++ b/python_modules/libraries/dagster-slack/dagster_slack_tests/test_sensors.py @@ -10,7 +10,9 @@ def test_slack_run_failure_sensor_def(): sensor_name = "my_failure_sensor" my_sensor = make_slack_on_run_failure_sensor( - channel="#foo", slack_token=os.getenv("SLACK_TOKEN"), name=sensor_name + channel="#foo", + slack_token=os.getenv("SLACK_TOKEN"), # pyright: ignore[reportArgumentType] + name=sensor_name, ) assert my_sensor.name == sensor_name diff --git a/python_modules/libraries/dagster-snowflake-pandas/dagster_snowflake_pandas_tests/test_snowflake_pandas_type_handler.py b/python_modules/libraries/dagster-snowflake-pandas/dagster_snowflake_pandas_tests/test_snowflake_pandas_type_handler.py index 27e5a390ece1b..c8dc0858f8512 100644 --- a/python_modules/libraries/dagster-snowflake-pandas/dagster_snowflake_pandas_tests/test_snowflake_pandas_type_handler.py +++ b/python_modules/libraries/dagster-snowflake-pandas/dagster_snowflake_pandas_tests/test_snowflake_pandas_type_handler.py @@ -369,7 +369,7 @@ def downstream_partitioned(df) -> None: with snowflake_conn.get_connection() as conn: out_df = ( - conn.cursor().execute(f"SELECT * FROM {snowflake_table_path}").fetch_pandas_all() + conn.cursor().execute(f"SELECT * FROM {snowflake_table_path}").fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert out_df["A"].tolist() == ["1", "1", "1"] @@ -382,7 +382,7 @@ def downstream_partitioned(df) -> None: with snowflake_conn.get_connection() as conn: out_df = ( - conn.cursor().execute(f"SELECT * FROM {snowflake_table_path}").fetch_pandas_all() + conn.cursor().execute(f"SELECT * FROM {snowflake_table_path}").fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert sorted(out_df["A"].tolist()) == ["1", "1", "1", "2", "2", "2"] @@ -395,7 +395,7 @@ def downstream_partitioned(df) -> None: with snowflake_conn.get_connection() as conn: out_df = ( - conn.cursor().execute(f"SELECT * FROM {snowflake_table_path}").fetch_pandas_all() + conn.cursor().execute(f"SELECT * FROM {snowflake_table_path}").fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert sorted(out_df["A"].tolist()) == ["2", "2", "2", "3", "3", "3"] @@ -456,7 +456,7 @@ def downstream_partitioned(df) -> None: with snowflake_conn.get_connection() as conn: out_df = ( conn.cursor().execute(f"SELECT * FROM {snowflake_table_path}") - ).fetch_pandas_all() + ).fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] assert out_df["A"].tolist() == ["1", "1", "1"] materialize( @@ -469,7 +469,7 @@ def downstream_partitioned(df) -> None: with snowflake_conn.get_connection() as conn: out_df = ( conn.cursor().execute(f"SELECT * FROM {snowflake_table_path}") - ).fetch_pandas_all() + ).fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] assert sorted(out_df["A"].tolist()) == ["1", "1", "1", "2", "2", "2"] materialize( @@ -482,7 +482,7 @@ def downstream_partitioned(df) -> None: with snowflake_conn.get_connection() as conn: out_df = ( conn.cursor().execute(f"SELECT * FROM {snowflake_table_path}") - ).fetch_pandas_all() + ).fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] assert sorted(out_df["A"].tolist()) == ["2", "2", "2", "3", "3", "3"] @@ -548,7 +548,7 @@ def downstream_partitioned(df) -> None: with snowflake_conn.get_connection() as conn: out_df = ( conn.cursor().execute(f"SELECT * FROM {snowflake_table_path}") - ).fetch_pandas_all() + ).fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] assert out_df["A"].tolist() == ["1", "1", "1"] materialize( @@ -561,7 +561,7 @@ def downstream_partitioned(df) -> None: with snowflake_conn.get_connection() as conn: out_df = ( conn.cursor().execute(f"SELECT * FROM {snowflake_table_path}") - ).fetch_pandas_all() + ).fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] assert sorted(out_df["A"].tolist()) == ["1", "1", "1", "2", "2", "2"] materialize( @@ -574,7 +574,7 @@ def downstream_partitioned(df) -> None: with snowflake_conn.get_connection() as conn: out_df = ( conn.cursor().execute(f"SELECT * FROM {snowflake_table_path}") - ).fetch_pandas_all() + ).fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] assert sorted(out_df["A"].tolist()) == ["1", "1", "1", "2", "2", "2", "3", "3", "3"] materialize( @@ -587,7 +587,7 @@ def downstream_partitioned(df) -> None: with snowflake_conn.get_connection() as conn: out_df = ( conn.cursor().execute(f"SELECT * FROM {snowflake_table_path}") - ).fetch_pandas_all() + ).fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] assert sorted(out_df["A"].tolist()) == ["2", "2", "2", "3", "3", "3", "4", "4", "4"] @@ -638,7 +638,7 @@ def downstream_partitioned(df) -> None: resource_defs = {"io_manager": io_manager, "fs_io": fs_io_manager} with instance_for_test() as instance: - instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned, downstream_partitioned], @@ -654,11 +654,11 @@ def downstream_partitioned(df) -> None: .execute( f"SELECT * FROM {snowflake_table_path}", ) - .fetch_pandas_all() + .fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert out_df["A"].tolist() == ["1", "1", "1"] - instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned, downstream_partitioned], @@ -674,7 +674,7 @@ def downstream_partitioned(df) -> None: .execute( f"SELECT * FROM {snowflake_table_path}", ) - .fetch_pandas_all() + .fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert sorted(out_df["A"].tolist()) == ["1", "1", "1", "2", "2", "2"] @@ -692,7 +692,7 @@ def downstream_partitioned(df) -> None: .execute( f"SELECT * FROM {snowflake_table_path}", ) - .fetch_pandas_all() + .fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert sorted(out_df["A"].tolist()) == ["2", "2", "2", "3", "3", "3"] @@ -765,7 +765,7 @@ def self_dependent_asset( with snowflake_conn.get_connection() as conn: out_df = ( - conn.cursor().execute(f"SELECT * FROM {snowflake_table_path}").fetch_pandas_all() + conn.cursor().execute(f"SELECT * FROM {snowflake_table_path}").fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert sorted(out_df["A"].tolist()) == ["1", "1", "1"] @@ -782,7 +782,7 @@ def self_dependent_asset( with snowflake_conn.get_connection() as conn: out_df = ( - conn.cursor().execute(f"SELECT * FROM {snowflake_table_path}").fetch_pandas_all() + conn.cursor().execute(f"SELECT * FROM {snowflake_table_path}").fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert sorted(out_df["A"].tolist()) == ["1", "1", "1", "2", "2", "2"] diff --git a/python_modules/libraries/dagster-snowflake-pyspark/dagster_snowflake_pyspark_tests/conftest.py b/python_modules/libraries/dagster-snowflake-pyspark/dagster_snowflake_pyspark_tests/conftest.py index a09570d57971e..37d38e6cf597e 100644 --- a/python_modules/libraries/dagster-snowflake-pyspark/dagster_snowflake_pyspark_tests/conftest.py +++ b/python_modules/libraries/dagster-snowflake-pyspark/dagster_snowflake_pyspark_tests/conftest.py @@ -8,7 +8,7 @@ @pytest.fixture(scope="module") def spark(): - spark = SparkSession.builder.config( + spark = SparkSession.builder.config( # pyright: ignore[reportAttributeAccessIssue] key="spark.jars.packages", value=SNOWFLAKE_JARS, ).getOrCreate() diff --git a/python_modules/libraries/dagster-snowflake-pyspark/dagster_snowflake_pyspark_tests/test_snowflake_pyspark_type_handler.py b/python_modules/libraries/dagster-snowflake-pyspark/dagster_snowflake_pyspark_tests/test_snowflake_pyspark_type_handler.py index 3465d68da3b5a..c923c6477a070 100644 --- a/python_modules/libraries/dagster-snowflake-pyspark/dagster_snowflake_pyspark_tests/test_snowflake_pyspark_type_handler.py +++ b/python_modules/libraries/dagster-snowflake-pyspark/dagster_snowflake_pyspark_tests/test_snowflake_pyspark_type_handler.py @@ -286,7 +286,7 @@ def downstream_partitioned(df) -> None: .execute( f"SELECT * FROM {snowflake_table_path}", ) - .fetch_pandas_all() + .fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert out_df["A"].tolist() == ["1", "1", "1"] @@ -303,7 +303,7 @@ def downstream_partitioned(df) -> None: .execute( f"SELECT * FROM {snowflake_table_path}", ) - .fetch_pandas_all() + .fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert sorted(out_df["A"].tolist()) == ["1", "1", "1", "2", "2", "2"] @@ -320,7 +320,7 @@ def downstream_partitioned(df) -> None: .execute( f"SELECT * FROM {snowflake_table_path}", ) - .fetch_pandas_all() + .fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert sorted(out_df["A"].tolist()) == ["2", "2", "2", "3", "3", "3"] @@ -388,7 +388,7 @@ def downstream_partitioned(df) -> None: .execute( f"SELECT * FROM {snowflake_table_path}", ) - .fetch_pandas_all() + .fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert out_df["A"].tolist() == ["1", "1", "1"] @@ -405,7 +405,7 @@ def downstream_partitioned(df) -> None: .execute( f"SELECT * FROM {snowflake_table_path}", ) - .fetch_pandas_all() + .fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert sorted(out_df["A"].tolist()) == ["1", "1", "1", "2", "2", "2"] @@ -422,7 +422,7 @@ def downstream_partitioned(df) -> None: .execute( f"SELECT * FROM {snowflake_table_path}", ) - .fetch_pandas_all() + .fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert sorted(out_df["A"].tolist()) == ["2", "2", "2", "3", "3", "3"] @@ -502,7 +502,7 @@ def downstream_partitioned(df) -> None: .execute( f"SELECT * FROM {snowflake_table_path}", ) - .fetch_pandas_all() + .fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert out_df["A"].tolist() == ["1", "1", "1"] @@ -519,7 +519,7 @@ def downstream_partitioned(df) -> None: .execute( f"SELECT * FROM {snowflake_table_path}", ) - .fetch_pandas_all() + .fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert sorted(out_df["A"].tolist()) == ["1", "1", "1", "2", "2", "2"] @@ -536,7 +536,7 @@ def downstream_partitioned(df) -> None: .execute( f"SELECT * FROM {snowflake_table_path}", ) - .fetch_pandas_all() + .fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert sorted(out_df["A"].tolist()) == ["1", "1", "1", "2", "2", "2", "3", "3", "3"] @@ -553,7 +553,7 @@ def downstream_partitioned(df) -> None: .execute( f"SELECT * FROM {snowflake_table_path}", ) - .fetch_pandas_all() + .fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert sorted(out_df["A"].tolist()) == ["2", "2", "2", "3", "3", "3", "4", "4", "4"] @@ -613,7 +613,7 @@ def downstream_partitioned(df) -> None: resource_defs = {"io_manager": io_manager, "fs_io": fs_io_manager} with instance_for_test() as instance: - instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["apple"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned, downstream_partitioned], @@ -629,11 +629,11 @@ def downstream_partitioned(df) -> None: .execute( f"SELECT * FROM {snowflake_table_path}", ) - .fetch_pandas_all() + .fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert out_df["A"].tolist() == ["1", "1", "1"] - instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) + instance.add_dynamic_partitions(dynamic_fruits.name, ["orange"]) # pyright: ignore[reportArgumentType] materialize( [dynamic_partitioned, downstream_partitioned], @@ -649,7 +649,7 @@ def downstream_partitioned(df) -> None: .execute( f"SELECT * FROM {snowflake_table_path}", ) - .fetch_pandas_all() + .fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert sorted(out_df["A"].tolist()) == ["1", "1", "1", "2", "2", "2"] @@ -667,7 +667,7 @@ def downstream_partitioned(df) -> None: .execute( f"SELECT * FROM {snowflake_table_path}", ) - .fetch_pandas_all() + .fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert sorted(out_df["A"].tolist()) == ["2", "2", "2", "3", "3", "3"] @@ -750,7 +750,7 @@ def self_dependent_asset( .execute( f"SELECT * FROM {snowflake_table_path}", ) - .fetch_pandas_all() + .fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert sorted(out_df["A"].tolist()) == ["1", "1", "1"] @@ -771,6 +771,6 @@ def self_dependent_asset( .execute( f"SELECT * FROM {snowflake_table_path}", ) - .fetch_pandas_all() + .fetch_pandas_all() # pyright: ignore[reportOptionalMemberAccess] ) assert sorted(out_df["A"].tolist()) == ["1", "1", "1", "2", "2", "2"] diff --git a/python_modules/libraries/dagster-snowflake/dagster_snowflake/snowflake_io_manager.py b/python_modules/libraries/dagster-snowflake/dagster_snowflake/snowflake_io_manager.py index f3ed72a4dd1b5..5de8bf21603f9 100644 --- a/python_modules/libraries/dagster-snowflake/dagster_snowflake/snowflake_io_manager.py +++ b/python_modules/libraries/dagster-snowflake/dagster_snowflake/snowflake_io_manager.py @@ -347,7 +347,7 @@ def connect(context, table_slice): if context.resource_config else {} ) - with SnowflakeResource(schema=table_slice.schema, **no_schema_config).get_connection( + with SnowflakeResource(schema=table_slice.schema, **no_schema_config).get_connection( # pyright: ignore[reportArgumentType] raw_conn=False ) as conn: yield conn diff --git a/python_modules/libraries/dagster-spark/dagster_spark_tests/test_error.py b/python_modules/libraries/dagster-spark/dagster_spark_tests/test_error.py index 2ee8622953ed2..a133b4bf2d53a 100644 --- a/python_modules/libraries/dagster-spark/dagster_spark_tests/test_error.py +++ b/python_modules/libraries/dagster-spark/dagster_spark_tests/test_error.py @@ -36,7 +36,7 @@ def test_jar_not_found(): assert result.is_node_failed("spark_op") assert ( "does not exist. A valid jar must be built before running this op." - in result.failure_data_for_node("spark_op").error.cause.message + in result.failure_data_for_node("spark_op").error.cause.message # pyright: ignore[reportOptionalMemberAccess] ) @@ -71,5 +71,5 @@ def test_no_spark_home(): assert ( "No spark home set. You must either pass spark_home in config or set " "$SPARK_HOME in your environment (got None)." - in result.failure_data_for_node("spark_op").error.cause.message + in result.failure_data_for_node("spark_op").error.cause.message # pyright: ignore[reportOptionalMemberAccess] ) diff --git a/python_modules/libraries/dagster-ssh/dagster_ssh_tests/test_resources.py b/python_modules/libraries/dagster-ssh/dagster_ssh_tests/test_resources.py index 4aa240bc56c67..3dfb097b83ce2 100644 --- a/python_modules/libraries/dagster-ssh/dagster_ssh_tests/test_resources.py +++ b/python_modules/libraries/dagster-ssh/dagster_ssh_tests/test_resources.py @@ -24,8 +24,8 @@ def generate_ssh_key(): # get private key in PEM container format return key.private_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PrivateFormat.TraditionalOpenSSL, + encoding=serialization.Encoding.PEM, # pyright: ignore[reportArgumentType] + format=serialization.PrivateFormat.TraditionalOpenSSL, # pyright: ignore[reportArgumentType] encryption_algorithm=serialization.NoEncryption(), ).decode("utf-8") diff --git a/python_modules/libraries/dagster-tableau/dagster_tableau/assets.py b/python_modules/libraries/dagster-tableau/dagster_tableau/assets.py index 55507697ee0a1..4ab7c99cf038b 100644 --- a/python_modules/libraries/dagster-tableau/dagster_tableau/assets.py +++ b/python_modules/libraries/dagster-tableau/dagster_tableau/assets.py @@ -42,7 +42,7 @@ def build_tableau_materializable_assets_definition( def asset_fn(context: AssetExecutionContext): tableau = cast(BaseTableauWorkspace, getattr(context.resources, resource_key)) with tableau.get_client() as client: - yield from client.refresh_and_materialize_workbooks( + yield from client.refresh_and_materialize_workbooks( # pyright: ignore[reportOptionalMemberAccess] specs=specs, refreshable_workbook_ids=refreshable_workbook_ids ) diff --git a/python_modules/libraries/dagster-tableau/dagster_tableau/resources.py b/python_modules/libraries/dagster-tableau/dagster_tableau/resources.py index 3c2e9bd89faa7..adcac20362c0d 100644 --- a/python_modules/libraries/dagster-tableau/dagster_tableau/resources.py +++ b/python_modules/libraries/dagster-tableau/dagster_tableau/resources.py @@ -4,7 +4,7 @@ import uuid from abc import abstractmethod from contextlib import contextmanager -from typing import Any, List, Mapping, Optional, Sequence, Set, Type, Union +from typing import Any, Iterator, List, Mapping, Optional, Sequence, Set, Type, Union import jwt import requests @@ -254,7 +254,7 @@ def sign_in(self) -> Auth.contextmgr: headers={"kid": self.connected_app_secret_id, "iss": self.connected_app_client_id}, ) - tableau_auth = TSC.JWTAuth(jwt_token, site_id=self.site_name) # pyright: ignore (reportAttributeAccessIssue) + tableau_auth = TSC.JWTAuth(jwt_token, site_id=self.site_name) return self._server.auth.sign_in(tableau_auth) @property @@ -381,11 +381,13 @@ def build_client(self) -> None: raise NotImplementedError() @contextmanager - def get_client(self): + def get_client(self) -> Iterator[Union[TableauCloudClient, TableauServerClient]]: if not self._client: self.build_client() - with self._client.sign_in(): - yield self._client + + client = check.not_none(self._client, "build_client failed to set _client") + with client.sign_in(): + yield client def fetch_tableau_workspace_data( self, @@ -407,7 +409,7 @@ def fetch_tableau_workspace_data( for workbook_id in workbook_ids: workbook = client.get_workbook(workbook_id=workbook_id) workbook_data_list = check.is_list( - workbook["data"]["workbooks"], + workbook["data"]["workbooks"], # pyright: ignore[reportIndexIssue] additional_message=f"Invalid data for Tableau workbook for id {workbook_id}.", ) if not workbook_data_list: diff --git a/python_modules/libraries/dagster-wandb/dagster_wandb/utils/pickling.py b/python_modules/libraries/dagster-wandb/dagster_wandb/utils/pickling.py index 014d26d4496ed..cba7dfad71c28 100644 --- a/python_modules/libraries/dagster-wandb/dagster_wandb/utils/pickling.py +++ b/python_modules/libraries/dagster-wandb/dagster_wandb/utils/pickling.py @@ -55,7 +55,7 @@ def pickle_artifact_content( **artifact.metadata, **{ "source_serialization_module": "dill", - "source_dill_version_used": dill.__version__, + "source_dill_version_used": dill.__version__, # pyright: ignore[reportOptionalMemberAccess] "source_pickle_protocol_used": serialization_module_parameters_with_protocol[ "protocol" ], @@ -63,7 +63,7 @@ def pickle_artifact_content( } with artifact.new_file(DILL_FILENAME, "wb") as file: try: - dill.dump( + dill.dump( # pyright: ignore[reportOptionalMemberAccess] obj, file, **serialization_module_parameters_with_protocol, @@ -88,7 +88,7 @@ def pickle_artifact_content( **artifact.metadata, **{ "source_serialization_module": "cloudpickle", - "source_cloudpickle_version_used": cloudpickle.__version__, + "source_cloudpickle_version_used": cloudpickle.__version__, # pyright: ignore[reportOptionalMemberAccess] "source_pickle_protocol_used": serialization_module_parameters_with_protocol[ "protocol" ], @@ -96,7 +96,7 @@ def pickle_artifact_content( } with artifact.new_file(CLOUDPICKLE_FILENAME, "wb") as file: try: - cloudpickle.dump( + cloudpickle.dump( # pyright: ignore[reportOptionalMemberAccess] obj, file, **serialization_module_parameters_with_protocol, @@ -120,7 +120,7 @@ def pickle_artifact_content( **artifact.metadata, **{ "source_serialization_module": "joblib", - "source_joblib_version_used": joblib.__version__, + "source_joblib_version_used": joblib.__version__, # pyright: ignore[reportOptionalMemberAccess] "source_pickle_protocol_used": serialization_module_parameters_with_protocol[ "protocol" ], @@ -128,7 +128,7 @@ def pickle_artifact_content( } with artifact.new_file(JOBLIB_FILENAME, "wb") as file: try: - joblib.dump( + joblib.dump( # pyright: ignore[reportOptionalMemberAccess] obj, file, **serialization_module_parameters_with_protocol, @@ -182,7 +182,7 @@ def unpickle_artifact_content(artifact_dir): " was not found. Please, make sure it's installed." ) with open(f"{artifact_dir}/{DILL_FILENAME}", "rb") as file: - input_value = dill.load(file) + input_value = dill.load(file) # pyright: ignore[reportOptionalMemberAccess] return input_value elif os.path.exists(f"{artifact_dir}/{CLOUDPICKLE_FILENAME}"): if not has_cloudpickle: @@ -191,7 +191,7 @@ def unpickle_artifact_content(artifact_dir): " module was not found. Please, make sure it's installed." ) with open(f"{artifact_dir}/{CLOUDPICKLE_FILENAME}", "rb") as file: - input_value = cloudpickle.load(file) + input_value = cloudpickle.load(file) # pyright: ignore[reportOptionalMemberAccess] return input_value elif os.path.exists(f"{artifact_dir}/{JOBLIB_FILENAME}"): if not has_joblib: @@ -200,7 +200,7 @@ def unpickle_artifact_content(artifact_dir): " was not found. Please, make sure it's installed." ) with open(f"{artifact_dir}/{JOBLIB_FILENAME}", "rb") as file: - input_value = joblib.load(file) + input_value = joblib.load(file) # pyright: ignore[reportOptionalMemberAccess] return input_value elif os.path.exists(f"{artifact_dir}/{PICKLE_FILENAME}"): with open(f"{artifact_dir}/{PICKLE_FILENAME}", "rb") as file: diff --git a/python_modules/libraries/dagstermill/dagstermill/manager.py b/python_modules/libraries/dagstermill/dagstermill/manager.py index c482a2ac081e5..58b0e7664c9ae 100644 --- a/python_modules/libraries/dagstermill/dagstermill/manager.py +++ b/python_modules/libraries/dagstermill/dagstermill/manager.py @@ -311,18 +311,18 @@ def yield_result(self, value, output_name="result"): # deferred import for perf import scrapbook - if not self.op_def.has_output(output_name): + if not self.op_def.has_output(output_name): # pyright: ignore[reportOptionalMemberAccess] raise DagstermillError( - f"Op {self.op_def.name} does not have output named {output_name}.Expected one of" - f" {[str(output_def.name) for output_def in self.op_def.output_defs]}" + f"Op {self.op_def.name} does not have output named {output_name}.Expected one of" # pyright: ignore[reportOptionalMemberAccess] + f" {[str(output_def.name) for output_def in self.op_def.output_defs]}" # pyright: ignore[reportOptionalMemberAccess] ) # pass output value cross process boundary using io manager - step_context = self.context._step_context # noqa: SLF001 + step_context = self.context._step_context # noqa: SLF001 # pyright: ignore[reportAttributeAccessIssue,reportOptionalMemberAccess] # Note: yield_result currently does not support DynamicOutput # dagstermill assets do not support yielding additional results within the notebook: - if len(step_context.job_def.asset_layer.executable_asset_keys) > 0: + if len(step_context.job_def.asset_layer.executable_asset_keys) > 0: # pyright: ignore[reportArgumentType] raise DagstermillError( "dagstermill assets do not currently support dagstermill.yield_result" ) @@ -369,7 +369,7 @@ def yield_event(self, dagster_event): import scrapbook event_id = f"event-{uuid.uuid4()}" - out_file_path = os.path.join(self.marshal_dir, event_id) + out_file_path = os.path.join(self.marshal_dir, event_id) # pyright: ignore[reportCallIssue,reportArgumentType] with open(out_file_path, "wb") as fd: fd.write(pickle.dumps(dagster_event, PICKLE_PROTOCOL)) diff --git a/python_modules/libraries/dagstermill/dagstermill_tests/test_cli_commands.py b/python_modules/libraries/dagstermill/dagstermill_tests/test_cli_commands.py index 6e608022329f7..c36ddc0e24a07 100644 --- a/python_modules/libraries/dagstermill/dagstermill_tests/test_cli_commands.py +++ b/python_modules/libraries/dagstermill/dagstermill_tests/test_cli_commands.py @@ -48,13 +48,13 @@ def scaffold(notebook_name=None, kernel=None): raise res.exception assert res.exit_code == 0 - yield os.path.abspath(notebook_name) + yield os.path.abspath(notebook_name) # pyright: ignore[reportArgumentType] - if os.path.exists(notebook_name): - os.unlink(notebook_name) + if os.path.exists(notebook_name): # pyright: ignore[reportArgumentType] + os.unlink(notebook_name) # pyright: ignore[reportArgumentType] - if os.path.exists(notebook_name + ".ipynb"): - os.unlink(notebook_name + ".ipynb") + if os.path.exists(notebook_name + ".ipynb"): # pyright: ignore[reportOptionalOperand] + os.unlink(notebook_name + ".ipynb") # pyright: ignore[reportOptionalOperand] def test_scaffold(): diff --git a/python_modules/libraries/dagstermill/dagstermill_tests/test_event_callback.py b/python_modules/libraries/dagstermill/dagstermill_tests/test_event_callback.py index ebe0ddff6598d..9f3395a47429b 100644 --- a/python_modules/libraries/dagstermill/dagstermill_tests/test_event_callback.py +++ b/python_modules/libraries/dagstermill/dagstermill_tests/test_event_callback.py @@ -14,7 +14,7 @@ def test_event_callback_logging(): def _event_callback(record, _cursor): assert isinstance(record, EventLogEntry) if record.is_dagster_event: - events[record.dagster_event.event_type].append(record) + events[record.dagster_event.event_type].append(record) # pyright: ignore[reportOptionalMemberAccess] recon_job = ReconstructableJob.for_module( "dagstermill.examples.repository", diff --git a/python_modules/libraries/dagstermill/dagstermill_tests/test_io.py b/python_modules/libraries/dagstermill/dagstermill_tests/test_io.py index 21848581e65be..520600c8f1931 100644 --- a/python_modules/libraries/dagstermill/dagstermill_tests/test_io.py +++ b/python_modules/libraries/dagstermill/dagstermill_tests/test_io.py @@ -67,10 +67,10 @@ def test_yes_output_notebook_yes_io_manager(): output_path = ( materializations[0] - .event_specific_data.materialization.metadata["Executed notebook"] + .event_specific_data.materialization.metadata["Executed notebook"] # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] .path ) - assert os.path.exists(output_path) + assert os.path.exists(output_path) # pyright: ignore[reportArgumentType] - with open(output_path, "rb") as f: + with open(output_path, "rb") as f: # pyright: ignore[reportCallIssue,reportArgumentType] assert f.read() == result.output_for_node("load_notebook") diff --git a/python_modules/libraries/dagstermill/dagstermill_tests/test_manager.py b/python_modules/libraries/dagstermill/dagstermill_tests/test_manager.py index f9b3284554aa8..6abeb746a18ed 100644 --- a/python_modules/libraries/dagstermill/dagstermill_tests/test_manager.py +++ b/python_modules/libraries/dagstermill/dagstermill_tests/test_manager.py @@ -143,13 +143,13 @@ def test_out_of_job_yield_event(): def test_in_job_manager_resources(): with in_job_manager() as manager: - assert "output_notebook_io_manager" in manager.context.resources._asdict() - assert len(manager.context.resources._asdict()) == 1 + assert "output_notebook_io_manager" in manager.context.resources._asdict() # pyright: ignore[reportOptionalMemberAccess] + assert len(manager.context.resources._asdict()) == 1 # pyright: ignore[reportOptionalMemberAccess] def test_in_job_manager_op_config(): with in_job_manager() as manager: - assert manager.context.op_config is None + assert manager.context.op_config is None # pyright: ignore[reportOptionalMemberAccess] with in_job_manager( job_name="hello_world_config_job", @@ -160,7 +160,7 @@ def test_in_job_manager_op_config(): ).to_dict(), step_key="hello_world_config", ) as manager: - assert manager.context.op_config == {"greeting": "hello"} + assert manager.context.op_config == {"greeting": "hello"} # pyright: ignore[reportOptionalMemberAccess] with in_job_manager( job_name="hello_world_config_job", @@ -177,7 +177,7 @@ def test_in_job_manager_op_config(): ).to_dict(), step_key="hello_world_config", ) as manager: - assert manager.context.op_config == {"greeting": "bonjour"} + assert manager.context.op_config == {"greeting": "bonjour"} # pyright: ignore[reportOptionalMemberAccess] with in_job_manager( job_name="hello_world_config_job", @@ -196,7 +196,7 @@ def test_in_job_manager_op_config(): ).to_dict(), step_key="goodbye_config", ) as manager: - assert manager.context.op_config == {"farewell": "goodbye"} + assert manager.context.op_config == {"farewell": "goodbye"} # pyright: ignore[reportOptionalMemberAccess] def test_in_job_manager_with_resources(): @@ -214,7 +214,7 @@ def test_in_job_manager_with_resources(): run_config={"resources": {"list": {"config": path}}}, step_key="hello_world_resource", ) as manager: - assert "list" in manager.context.resources._asdict() + assert "list" in manager.context.resources._asdict() # pyright: ignore[reportOptionalMemberAccess] with open(path, "rb") as fd: messages = pickle.load(fd) diff --git a/python_modules/libraries/dagstermill/dagstermill_tests/test_ops.py b/python_modules/libraries/dagstermill/dagstermill_tests/test_ops.py index 616f4d2c9eeba..f55d5118a5c28 100644 --- a/python_modules/libraries/dagstermill/dagstermill_tests/test_ops.py +++ b/python_modules/libraries/dagstermill/dagstermill_tests/test_ops.py @@ -39,8 +39,8 @@ def cleanup_result_notebook(result): ] for materialization_event in materialization_events: result_path = get_path(materialization_event) - if os.path.exists(result_path): - os.unlink(result_path) + if os.path.exists(result_path): # pyright: ignore[reportArgumentType] + os.unlink(result_path) # pyright: ignore[reportArgumentType] @contextmanager @@ -167,12 +167,12 @@ def _strip_execution_metadata(nb): for materialization_event in materialization_events: result_path = get_path(materialization_event) - if result_path.endswith(".ipynb"): - with open(result_path, encoding="utf8") as fd: + if result_path.endswith(".ipynb"): # pyright: ignore[reportOptionalMemberAccess] + with open(result_path, encoding="utf8") as fd: # pyright: ignore[reportArgumentType] nb = nbformat.read(fd, as_version=4) ep = ExecutePreprocessor() ep.preprocess(nb) - with open(result_path, encoding="utf8") as fd: + with open(result_path, encoding="utf8") as fd: # pyright: ignore[reportArgumentType] expected = _strip_execution_metadata(nb) actual = _strip_execution_metadata(nbformat.read(fd, as_version=4)) assert actual == expected @@ -406,7 +406,7 @@ def test_resources_notebook_with_exception(): assert not result.success assert result.all_events[8].event_type.value == "STEP_FAILURE" assert ( - "raise Exception()" in result.all_events[8].event_specific_data.error.cause.message + "raise Exception()" in result.all_events[8].event_specific_data.error.cause.message # pyright: ignore[reportOptionalMemberAccess,reportAttributeAccessIssue] ) # Expect something like: @@ -519,7 +519,7 @@ def test_reserved_tags_not_overridden(): def test_default_description(): test_op = define_dagstermill_op(BACKING_NB_NAME, BACKING_NB_PATH) - assert test_op.description.startswith("This op is backed by the notebook at ") + assert test_op.description.startswith("This op is backed by the notebook at ") # pyright: ignore[reportOptionalMemberAccess] def test_custom_description(): @@ -551,7 +551,7 @@ def test_failure(capsys): "failure_job", {"execution": {"config": {"in_process": {}}}}, raise_on_error=False ) as result: assert ( - result.failure_data_for_node("yield_failure").user_failure_data.description + result.failure_data_for_node("yield_failure").user_failure_data.description # pyright: ignore[reportOptionalMemberAccess] == "bad bad notebook" ) diff --git a/scripts/auto_ignore_pyright_errors.py b/scripts/auto_ignore_pyright_errors.py index ffd9d72be840c..c21172f15dfac 100755 --- a/scripts/auto_ignore_pyright_errors.py +++ b/scripts/auto_ignore_pyright_errors.py @@ -53,8 +53,8 @@ def main(): rule = match.group("rules").strip() # Only append if it's a valid rule if rule and "pyright:" not in rule: - file_path = error_pattern.match(previous_line).group("file_name") - line_number = int(error_pattern.match(previous_line).group("line_number")) + file_path = error_pattern.match(previous_line).group("file_name") # pyright: ignore[reportOptionalMemberAccess] + line_number = int(error_pattern.match(previous_line).group("line_number")) # pyright: ignore[reportOptionalMemberAccess] errors[file_path][line_number].append(rule) # Reset for the next error message diff --git a/scripts/gen_airbyte_classes.py b/scripts/gen_airbyte_classes.py index 6ab2c3a3c2716..5a46ff960da95 100644 --- a/scripts/gen_airbyte_classes.py +++ b/scripts/gen_airbyte_classes.py @@ -568,9 +568,9 @@ def gen_airbyte_classes(airbyte_repo_root, airbyte_tag): spec = importlib.util.spec_from_file_location( "module.name", out_file ) - foo = importlib.util.module_from_spec(spec) + foo = importlib.util.module_from_spec(spec) # pyright: ignore[reportArgumentType] sys.modules["module.name"] = foo - spec.loader.exec_module(foo) + spec.loader.exec_module(foo) # pyright: ignore[reportOptionalMemberAccess] out = new_out successes += 1 diff --git a/scripts/run-pyright.py b/scripts/run-pyright.py index c8d6663584580..59a38b3448e7c 100755 --- a/scripts/run-pyright.py +++ b/scripts/run-pyright.py @@ -40,12 +40,6 @@ ), ) -parser.add_argument( - "--unannotated", - action="store_true", - default=False, - help="Analyze unannotated functions. This is not currently used in CI.", -) parser.add_argument( "--diff", @@ -124,7 +118,6 @@ class Params(TypedDict): - unannotated: bool mode: Literal["env", "path"] targets: Sequence[str] json: bool @@ -240,7 +233,6 @@ def get_params(args: argparse.Namespace) -> Params: update_pins=args.update_pins, json=args.json, rebuild=args.rebuild, - unannotated=args.unannotated, no_cache=args.no_cache, venv_python=venv_python, skip_typecheck=args.skip_typecheck, @@ -413,11 +405,10 @@ def run_pyright( env: str, paths: Optional[Sequence[str]], rebuild: bool, - unannotated: bool, pinned_deps: bool, venv_python: str, ) -> RunResult: - with temp_pyright_config_file(env, unannotated) as config_path: + with temp_pyright_config_file(env) as config_path: base_pyright_cmd = " ".join( [ "pyright", @@ -443,7 +434,7 @@ def run_pyright( @contextmanager -def temp_pyright_config_file(env: str, unannotated: bool) -> Iterator[str]: +def temp_pyright_config_file(env: str) -> Iterator[str]: with open("pyproject.toml", "r", encoding="utf-8") as f: toml = tomli.loads(f.read()) config = toml["tool"]["pyright"] @@ -453,7 +444,6 @@ def temp_pyright_config_file(env: str, unannotated: bool) -> Iterator[str]: config["include"] = load_path_file(include_path) if os.path.exists(exclude_path): config["exclude"] += load_path_file(exclude_path) - config["analyzeUnannotatedFunctions"] = unannotated temp_config_path = f"pyrightconfig-{env}.json" print("Creating temporary pyright config file at", temp_config_path) try: @@ -587,7 +577,6 @@ def print_report(result: RunResult) -> None: env, paths=env_path_map[env], rebuild=params["rebuild"], - unannotated=params["unannotated"], pinned_deps=params["update_pins"], venv_python=params["venv_python"], )