From 6b70b14908fc28a53a444c4ed9dc96d3d3c91523 Mon Sep 17 00:00:00 2001 From: Alex Langenfeld Date: Wed, 11 Dec 2024 18:17:05 -0600 Subject: [PATCH] fix pyright (#26420) fixes for pyright issues that manifested with updated pins ## How I Tested These Changes bk --- .../guides/migrations/migrating_airflow_to_dagster.py | 1 + .../airlift/operator_migration/kubernetes_pod_operator.py | 1 + examples/experimental/external_assets/airflow_example.py | 1 + python_modules/dagster/dagster/_generate/download.py | 1 + .../pythonic_config_tests/test_basic_pythonic_config.py | 4 ++-- .../dagster_tests/storage_tests/test_upath_io_manager.py | 2 +- .../libraries/dagster-airbyte/dagster_airbyte/resources.py | 2 +- .../dagster-databricks/dagster_databricks/databricks.py | 4 ++-- .../libraries/dagster-databricks/dagster_databricks/ops.py | 4 ++-- .../dagster-deltalake/dagster_deltalake/io_manager.py | 6 ++++-- .../dagster-fivetran/dagster_fivetran/resources.py | 4 +--- python_modules/libraries/dagster-ge/dagster_ge/factory.py | 2 +- .../dagster-prometheus/dagster_prometheus/resources.py | 2 +- .../libraries/dagster-pyspark/dagster_pyspark/resources.py | 4 ++-- python_modules/libraries/dagster-shell/dagster_shell/ops.py | 2 +- .../libraries/dagster-tableau/dagster_tableau/assets.py | 2 +- 16 files changed, 23 insertions(+), 19 deletions(-) diff --git a/examples/docs_snippets/docs_snippets/guides/migrations/migrating_airflow_to_dagster.py b/examples/docs_snippets/docs_snippets/guides/migrations/migrating_airflow_to_dagster.py index c546978f13096..e88e8c01e467a 100644 --- a/examples/docs_snippets/docs_snippets/guides/migrations/migrating_airflow_to_dagster.py +++ b/examples/docs_snippets/docs_snippets/guides/migrations/migrating_airflow_to_dagster.py @@ -1,5 +1,6 @@ # ruff: isort: skip_file # ruff: noqa: T201,D415 +# type: ignore # problematic imports in example code def scope_simple_airflow_task(): diff --git a/examples/docs_snippets/docs_snippets/integrations/airlift/operator_migration/kubernetes_pod_operator.py b/examples/docs_snippets/docs_snippets/integrations/airlift/operator_migration/kubernetes_pod_operator.py index bde2a3e5b1a86..aeafe9c74b84f 100644 --- a/examples/docs_snippets/docs_snippets/integrations/airlift/operator_migration/kubernetes_pod_operator.py +++ b/examples/docs_snippets/docs_snippets/integrations/airlift/operator_migration/kubernetes_pod_operator.py @@ -1,3 +1,4 @@ +# type: ignore from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator k8s_hello_world = KubernetesPodOperator( diff --git a/examples/experimental/external_assets/airflow_example.py b/examples/experimental/external_assets/airflow_example.py index 9585d06d701f6..b326ea323bfbc 100644 --- a/examples/experimental/external_assets/airflow_example.py +++ b/examples/experimental/external_assets/airflow_example.py @@ -1,3 +1,4 @@ +# type: ignore from airflow import DAG from airflow.providers.cncf.kubernetes.operators.kubernetes_pod import KubernetesPodOperator from pendulum import datetime diff --git a/python_modules/dagster/dagster/_generate/download.py b/python_modules/dagster/dagster/_generate/download.py index c8b7ca7223f81..c0365830c1954 100644 --- a/python_modules/dagster/dagster/_generate/download.py +++ b/python_modules/dagster/dagster/_generate/download.py @@ -37,6 +37,7 @@ "feature_graph_backed_assets", "getting_started_etl_tutorial", "project_analytics", + "project_dagster_modal_pipes", "project_dagster_university_start", "project_du_dbt_starter", "project_fully_featured", diff --git a/python_modules/dagster/dagster_tests/core_tests/pythonic_config_tests/test_basic_pythonic_config.py b/python_modules/dagster/dagster_tests/core_tests/pythonic_config_tests/test_basic_pythonic_config.py index 0098cb7588ac2..9e7941ed83a15 100644 --- a/python_modules/dagster/dagster_tests/core_tests/pythonic_config_tests/test_basic_pythonic_config.py +++ b/python_modules/dagster/dagster_tests/core_tests/pythonic_config_tests/test_basic_pythonic_config.py @@ -707,7 +707,7 @@ def test_structured_run_config_optional() -> None: class ANewConfigOpConfig(Config): a_string: Optional[str] an_int: Optional[int] = None - a_float: float = PyField(None) + a_float: float = PyField(None) # type: ignore executed = {} @@ -805,7 +805,7 @@ def my_asset(config: AnAssetConfig): def test_structured_run_config_assets_optional() -> None: class AnAssetConfig(Config): - a_string: str = PyField(None) + a_string: str = PyField(None) # type: ignore an_int: Optional[int] = None executed = {} diff --git a/python_modules/dagster/dagster_tests/storage_tests/test_upath_io_manager.py b/python_modules/dagster/dagster_tests/storage_tests/test_upath_io_manager.py index da3b9ee155a37..be3858701e26f 100644 --- a/python_modules/dagster/dagster_tests/storage_tests/test_upath_io_manager.py +++ b/python_modules/dagster/dagster_tests/storage_tests/test_upath_io_manager.py @@ -464,7 +464,7 @@ def my_asset() -> Any: class AsyncJSONIOManager(ConfigurableIOManager, UPathIOManager): - base_dir: str = PydanticField(None, description="Base directory for storing files.") + base_dir: str = PydanticField(None, description="Base directory for storing files.") # type: ignore _base_path: UPath = PrivateAttr() diff --git a/python_modules/libraries/dagster-airbyte/dagster_airbyte/resources.py b/python_modules/libraries/dagster-airbyte/dagster_airbyte/resources.py index 0625b7c1073b0..d17f975d3586a 100644 --- a/python_modules/libraries/dagster-airbyte/dagster_airbyte/resources.py +++ b/python_modules/libraries/dagster-airbyte/dagster_airbyte/resources.py @@ -1018,7 +1018,7 @@ class AirbyteCloudWorkspace(ConfigurableResource): description="Time (in seconds) after which the requests to Airbyte are declared timed out.", ) - _client: AirbyteCloudClient = PrivateAttr(default=None) + _client: AirbyteCloudClient = PrivateAttr(default=None) # type: ignore @cached_method def get_client(self) -> AirbyteCloudClient: diff --git a/python_modules/libraries/dagster-databricks/dagster_databricks/databricks.py b/python_modules/libraries/dagster-databricks/dagster_databricks/databricks.py index afbd41a4a8786..9a5c6656c6c44 100644 --- a/python_modules/libraries/dagster-databricks/dagster_databricks/databricks.py +++ b/python_modules/libraries/dagster-databricks/dagster_databricks/databricks.py @@ -374,7 +374,7 @@ def wait_for_run_to_complete( logger: logging.Logger, databricks_run_id: int, poll_interval_sec: float, - max_wait_time_sec: int, + max_wait_time_sec: float, verbose_logs: bool = True, ) -> None: logger.info(f"Waiting for Databricks run `{databricks_run_id}` to complete...") @@ -413,7 +413,7 @@ def __init__( azure_client_secret: Optional[str] = None, azure_tenant_id: Optional[str] = None, poll_interval_sec: float = 5, - max_wait_time_sec: int = DEFAULT_RUN_MAX_WAIT_TIME_SEC, + max_wait_time_sec: float = DEFAULT_RUN_MAX_WAIT_TIME_SEC, ): self.host = check.opt_str_param(host, "host") self.token = check.opt_str_param(token, "token") diff --git a/python_modules/libraries/dagster-databricks/dagster_databricks/ops.py b/python_modules/libraries/dagster-databricks/dagster_databricks/ops.py index 56d70045daae9..bb0afabb36e54 100644 --- a/python_modules/libraries/dagster-databricks/dagster_databricks/ops.py +++ b/python_modules/libraries/dagster-databricks/dagster_databricks/ops.py @@ -93,7 +93,7 @@ class DatabricksRunNowOpConfig(Config): default=_poll_interval_seconds, description="Check whether the Databricks Job is done at this interval, in seconds.", ) - max_wait_time_seconds: int = Field( + max_wait_time_seconds: float = Field( default=_max_wait_time_seconds, description=( "If the Databricks Job is not complete after this length of time, in seconds," @@ -205,7 +205,7 @@ class DatabricksSubmitRunOpConfig(Config): default=_poll_interval_seconds, description="Check whether the Databricks Job is done at this interval, in seconds.", ) - max_wait_time_seconds: int = Field( + max_wait_time_seconds: float = Field( default=_max_wait_time_seconds, description=( "If the Databricks Job is not complete after this length of time, in seconds," diff --git a/python_modules/libraries/dagster-deltalake/dagster_deltalake/io_manager.py b/python_modules/libraries/dagster-deltalake/dagster_deltalake/io_manager.py index 17ce19d31c1af..131e1475a260f 100644 --- a/python_modules/libraries/dagster-deltalake/dagster_deltalake/io_manager.py +++ b/python_modules/libraries/dagster-deltalake/dagster_deltalake/io_manager.py @@ -125,11 +125,13 @@ def my_table_a(my_table: pd.DataFrame): root_uri: str = Field(description="Storage location where Delta tables are stored.") mode: WriteMode = Field( - default=WriteMode.overwrite.value, description="The write mode passed to save the output." + default=WriteMode.overwrite.value, # type: ignore + description="The write mode passed to save the output.", ) overwrite_schema: bool = Field(default=False) writer_engine: WriterEngine = Field( - default=WriterEngine.pyarrow.value, description="Engine passed to write_deltalake." + default=WriterEngine.pyarrow.value, # type: ignore + description="Engine passed to write_deltalake.", ) storage_options: Union[AzureConfig, S3Config, LocalConfig, GcsConfig] = Field( diff --git a/python_modules/libraries/dagster-fivetran/dagster_fivetran/resources.py b/python_modules/libraries/dagster-fivetran/dagster_fivetran/resources.py index 01936211cc2ed..867c9e6a8a30c 100644 --- a/python_modules/libraries/dagster-fivetran/dagster_fivetran/resources.py +++ b/python_modules/libraries/dagster-fivetran/dagster_fivetran/resources.py @@ -30,7 +30,7 @@ from dagster._record import as_dict, record from dagster._utils.cached_method import cached_method from dagster._vendored.dateutil import parser -from pydantic import Field, PrivateAttr +from pydantic import Field from requests.auth import HTTPBasicAuth from requests.exceptions import RequestException @@ -858,8 +858,6 @@ class FivetranWorkspace(ConfigurableResource): ), ) - _client: FivetranClient = PrivateAttr(default=None) - @cached_method def get_client(self) -> FivetranClient: return FivetranClient( diff --git a/python_modules/libraries/dagster-ge/dagster_ge/factory.py b/python_modules/libraries/dagster-ge/dagster_ge/factory.py index bf42e6e8a5310..b0313bc4d88df 100644 --- a/python_modules/libraries/dagster-ge/dagster_ge/factory.py +++ b/python_modules/libraries/dagster-ge/dagster_ge/factory.py @@ -27,7 +27,7 @@ class GEContextResource(ConfigurableResource, IAttachDifferentObjectToOpContext): - ge_root_dir: str = Field( + ge_root_dir: Optional[str] = Field( default=None, description="The root directory for your Great Expectations project.", ) diff --git a/python_modules/libraries/dagster-prometheus/dagster_prometheus/resources.py b/python_modules/libraries/dagster-prometheus/dagster_prometheus/resources.py index da9c1f202d493..993fccede5ddd 100644 --- a/python_modules/libraries/dagster-prometheus/dagster_prometheus/resources.py +++ b/python_modules/libraries/dagster-prometheus/dagster_prometheus/resources.py @@ -46,7 +46,7 @@ def my_job(): default=30, description="is how long delete will attempt to connect before giving up. Defaults to 30s.", ) - _registry: prometheus_client.CollectorRegistry = PrivateAttr(default=None) + _registry: prometheus_client.CollectorRegistry = PrivateAttr(default=None) # type: ignore @classmethod def _is_dagster_maintained(cls) -> bool: diff --git a/python_modules/libraries/dagster-pyspark/dagster_pyspark/resources.py b/python_modules/libraries/dagster-pyspark/dagster_pyspark/resources.py index 9a3c34df7f0d9..71fb888515d6b 100644 --- a/python_modules/libraries/dagster-pyspark/dagster_pyspark/resources.py +++ b/python_modules/libraries/dagster-pyspark/dagster_pyspark/resources.py @@ -132,12 +132,12 @@ def _init_session(self) -> None: @property def spark_session(self) -> Any: self._init_session() - return self._spark_session + return check.not_none(self._spark_session) @property def spark_context(self) -> Any: self._init_session() - return self._spark_session.sparkContext + return check.not_none(self._spark_session).sparkContext @dagster_maintained_resource diff --git a/python_modules/libraries/dagster-shell/dagster_shell/ops.py b/python_modules/libraries/dagster-shell/dagster_shell/ops.py index d22d49b41b1c3..ae0cbf32c6eb3 100644 --- a/python_modules/libraries/dagster-shell/dagster_shell/ops.py +++ b/python_modules/libraries/dagster-shell/dagster_shell/ops.py @@ -36,7 +36,7 @@ class ShellOpConfig(Config): description="An optional dict of environment variables to pass to the subprocess.", ) output_logging: OutputType = Field( - default=OutputType.BUFFER.value, + default=OutputType.BUFFER.value, # type: ignore ) cwd: Optional[str] = Field( default=None, description="Working directory in which to execute shell script" diff --git a/python_modules/libraries/dagster-tableau/dagster_tableau/assets.py b/python_modules/libraries/dagster-tableau/dagster_tableau/assets.py index 4ab7c99cf038b..55507697ee0a1 100644 --- a/python_modules/libraries/dagster-tableau/dagster_tableau/assets.py +++ b/python_modules/libraries/dagster-tableau/dagster_tableau/assets.py @@ -42,7 +42,7 @@ def build_tableau_materializable_assets_definition( def asset_fn(context: AssetExecutionContext): tableau = cast(BaseTableauWorkspace, getattr(context.resources, resource_key)) with tableau.get_client() as client: - yield from client.refresh_and_materialize_workbooks( # pyright: ignore[reportOptionalMemberAccess] + yield from client.refresh_and_materialize_workbooks( specs=specs, refreshable_workbook_ids=refreshable_workbook_ids )