diff --git a/examples/docs_snippets/docs_snippets/guides/migrations/migrating_airflow_to_dagster.py b/examples/docs_snippets/docs_snippets/guides/migrations/migrating_airflow_to_dagster.py index c546978f13096..e88e8c01e467a 100644 --- a/examples/docs_snippets/docs_snippets/guides/migrations/migrating_airflow_to_dagster.py +++ b/examples/docs_snippets/docs_snippets/guides/migrations/migrating_airflow_to_dagster.py @@ -1,5 +1,6 @@ # ruff: isort: skip_file # ruff: noqa: T201,D415 +# type: ignore # problematic imports in example code def scope_simple_airflow_task(): diff --git a/python_modules/dagster/dagster/_generate/download.py b/python_modules/dagster/dagster/_generate/download.py index c8b7ca7223f81..c0365830c1954 100644 --- a/python_modules/dagster/dagster/_generate/download.py +++ b/python_modules/dagster/dagster/_generate/download.py @@ -37,6 +37,7 @@ "feature_graph_backed_assets", "getting_started_etl_tutorial", "project_analytics", + "project_dagster_modal_pipes", "project_dagster_university_start", "project_du_dbt_starter", "project_fully_featured", diff --git a/python_modules/libraries/dagster-databricks/dagster_databricks/databricks.py b/python_modules/libraries/dagster-databricks/dagster_databricks/databricks.py index afbd41a4a8786..9a5c6656c6c44 100644 --- a/python_modules/libraries/dagster-databricks/dagster_databricks/databricks.py +++ b/python_modules/libraries/dagster-databricks/dagster_databricks/databricks.py @@ -374,7 +374,7 @@ def wait_for_run_to_complete( logger: logging.Logger, databricks_run_id: int, poll_interval_sec: float, - max_wait_time_sec: int, + max_wait_time_sec: float, verbose_logs: bool = True, ) -> None: logger.info(f"Waiting for Databricks run `{databricks_run_id}` to complete...") @@ -413,7 +413,7 @@ def __init__( azure_client_secret: Optional[str] = None, azure_tenant_id: Optional[str] = None, poll_interval_sec: float = 5, - max_wait_time_sec: int = DEFAULT_RUN_MAX_WAIT_TIME_SEC, + max_wait_time_sec: float = DEFAULT_RUN_MAX_WAIT_TIME_SEC, ): self.host = check.opt_str_param(host, "host") self.token = check.opt_str_param(token, "token") diff --git a/python_modules/libraries/dagster-databricks/dagster_databricks/ops.py b/python_modules/libraries/dagster-databricks/dagster_databricks/ops.py index 56d70045daae9..bb0afabb36e54 100644 --- a/python_modules/libraries/dagster-databricks/dagster_databricks/ops.py +++ b/python_modules/libraries/dagster-databricks/dagster_databricks/ops.py @@ -93,7 +93,7 @@ class DatabricksRunNowOpConfig(Config): default=_poll_interval_seconds, description="Check whether the Databricks Job is done at this interval, in seconds.", ) - max_wait_time_seconds: int = Field( + max_wait_time_seconds: float = Field( default=_max_wait_time_seconds, description=( "If the Databricks Job is not complete after this length of time, in seconds," @@ -205,7 +205,7 @@ class DatabricksSubmitRunOpConfig(Config): default=_poll_interval_seconds, description="Check whether the Databricks Job is done at this interval, in seconds.", ) - max_wait_time_seconds: int = Field( + max_wait_time_seconds: float = Field( default=_max_wait_time_seconds, description=( "If the Databricks Job is not complete after this length of time, in seconds," diff --git a/python_modules/libraries/dagster-deltalake/dagster_deltalake/io_manager.py b/python_modules/libraries/dagster-deltalake/dagster_deltalake/io_manager.py index 17ce19d31c1af..131e1475a260f 100644 --- a/python_modules/libraries/dagster-deltalake/dagster_deltalake/io_manager.py +++ b/python_modules/libraries/dagster-deltalake/dagster_deltalake/io_manager.py @@ -125,11 +125,13 @@ def my_table_a(my_table: pd.DataFrame): root_uri: str = Field(description="Storage location where Delta tables are stored.") mode: WriteMode = Field( - default=WriteMode.overwrite.value, description="The write mode passed to save the output." + default=WriteMode.overwrite.value, # type: ignore + description="The write mode passed to save the output.", ) overwrite_schema: bool = Field(default=False) writer_engine: WriterEngine = Field( - default=WriterEngine.pyarrow.value, description="Engine passed to write_deltalake." + default=WriterEngine.pyarrow.value, # type: ignore + description="Engine passed to write_deltalake.", ) storage_options: Union[AzureConfig, S3Config, LocalConfig, GcsConfig] = Field( diff --git a/python_modules/libraries/dagster-fivetran/dagster_fivetran/resources.py b/python_modules/libraries/dagster-fivetran/dagster_fivetran/resources.py index 01936211cc2ed..867c9e6a8a30c 100644 --- a/python_modules/libraries/dagster-fivetran/dagster_fivetran/resources.py +++ b/python_modules/libraries/dagster-fivetran/dagster_fivetran/resources.py @@ -30,7 +30,7 @@ from dagster._record import as_dict, record from dagster._utils.cached_method import cached_method from dagster._vendored.dateutil import parser -from pydantic import Field, PrivateAttr +from pydantic import Field from requests.auth import HTTPBasicAuth from requests.exceptions import RequestException @@ -858,8 +858,6 @@ class FivetranWorkspace(ConfigurableResource): ), ) - _client: FivetranClient = PrivateAttr(default=None) - @cached_method def get_client(self) -> FivetranClient: return FivetranClient( diff --git a/python_modules/libraries/dagster-ge/dagster_ge/factory.py b/python_modules/libraries/dagster-ge/dagster_ge/factory.py index bf42e6e8a5310..b0313bc4d88df 100644 --- a/python_modules/libraries/dagster-ge/dagster_ge/factory.py +++ b/python_modules/libraries/dagster-ge/dagster_ge/factory.py @@ -27,7 +27,7 @@ class GEContextResource(ConfigurableResource, IAttachDifferentObjectToOpContext): - ge_root_dir: str = Field( + ge_root_dir: Optional[str] = Field( default=None, description="The root directory for your Great Expectations project.", ) diff --git a/python_modules/libraries/dagster-prometheus/dagster_prometheus/resources.py b/python_modules/libraries/dagster-prometheus/dagster_prometheus/resources.py index da9c1f202d493..993fccede5ddd 100644 --- a/python_modules/libraries/dagster-prometheus/dagster_prometheus/resources.py +++ b/python_modules/libraries/dagster-prometheus/dagster_prometheus/resources.py @@ -46,7 +46,7 @@ def my_job(): default=30, description="is how long delete will attempt to connect before giving up. Defaults to 30s.", ) - _registry: prometheus_client.CollectorRegistry = PrivateAttr(default=None) + _registry: prometheus_client.CollectorRegistry = PrivateAttr(default=None) # type: ignore @classmethod def _is_dagster_maintained(cls) -> bool: diff --git a/python_modules/libraries/dagster-pyspark/dagster_pyspark/resources.py b/python_modules/libraries/dagster-pyspark/dagster_pyspark/resources.py index 9a3c34df7f0d9..39304723fe0a1 100644 --- a/python_modules/libraries/dagster-pyspark/dagster_pyspark/resources.py +++ b/python_modules/libraries/dagster-pyspark/dagster_pyspark/resources.py @@ -132,12 +132,12 @@ def _init_session(self) -> None: @property def spark_session(self) -> Any: self._init_session() - return self._spark_session + return check.not_none(self._spark_session) @property def spark_context(self) -> Any: self._init_session() - return self._spark_session.sparkContext + return self.spark_session.sparkContext @dagster_maintained_resource diff --git a/python_modules/libraries/dagster-shell/dagster_shell/ops.py b/python_modules/libraries/dagster-shell/dagster_shell/ops.py index d22d49b41b1c3..29ff50a202978 100644 --- a/python_modules/libraries/dagster-shell/dagster_shell/ops.py +++ b/python_modules/libraries/dagster-shell/dagster_shell/ops.py @@ -36,7 +36,7 @@ class ShellOpConfig(Config): description="An optional dict of environment variables to pass to the subprocess.", ) output_logging: OutputType = Field( - default=OutputType.BUFFER.value, + default=OutputType.BUFFER, ) cwd: Optional[str] = Field( default=None, description="Working directory in which to execute shell script" diff --git a/python_modules/libraries/dagster-tableau/dagster_tableau/assets.py b/python_modules/libraries/dagster-tableau/dagster_tableau/assets.py index 4ab7c99cf038b..55507697ee0a1 100644 --- a/python_modules/libraries/dagster-tableau/dagster_tableau/assets.py +++ b/python_modules/libraries/dagster-tableau/dagster_tableau/assets.py @@ -42,7 +42,7 @@ def build_tableau_materializable_assets_definition( def asset_fn(context: AssetExecutionContext): tableau = cast(BaseTableauWorkspace, getattr(context.resources, resource_key)) with tableau.get_client() as client: - yield from client.refresh_and_materialize_workbooks( # pyright: ignore[reportOptionalMemberAccess] + yield from client.refresh_and_materialize_workbooks( specs=specs, refreshable_workbook_ids=refreshable_workbook_ids )