Skip to content

Commit

Permalink
fix pyright (#26420)
Browse files Browse the repository at this point in the history
fixes for pyright issues that manifested with updated pins

## How I Tested These Changes

bk
  • Loading branch information
alangenfeld authored Dec 12, 2024
1 parent 3a2bdd8 commit 6b70b14
Show file tree
Hide file tree
Showing 16 changed files with 23 additions and 19 deletions.
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# ruff: isort: skip_file
# ruff: noqa: T201,D415
# type: ignore # problematic imports in example code


def scope_simple_airflow_task():
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# type: ignore
from airflow.providers.cncf.kubernetes.operators.pod import KubernetesPodOperator

k8s_hello_world = KubernetesPodOperator(
Expand Down
1 change: 1 addition & 0 deletions examples/experimental/external_assets/airflow_example.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# type: ignore
from airflow import DAG
from airflow.providers.cncf.kubernetes.operators.kubernetes_pod import KubernetesPodOperator
from pendulum import datetime
Expand Down
1 change: 1 addition & 0 deletions python_modules/dagster/dagster/_generate/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
"feature_graph_backed_assets",
"getting_started_etl_tutorial",
"project_analytics",
"project_dagster_modal_pipes",
"project_dagster_university_start",
"project_du_dbt_starter",
"project_fully_featured",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -707,7 +707,7 @@ def test_structured_run_config_optional() -> None:
class ANewConfigOpConfig(Config):
a_string: Optional[str]
an_int: Optional[int] = None
a_float: float = PyField(None)
a_float: float = PyField(None) # type: ignore

executed = {}

Expand Down Expand Up @@ -805,7 +805,7 @@ def my_asset(config: AnAssetConfig):

def test_structured_run_config_assets_optional() -> None:
class AnAssetConfig(Config):
a_string: str = PyField(None)
a_string: str = PyField(None) # type: ignore
an_int: Optional[int] = None

executed = {}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -464,7 +464,7 @@ def my_asset() -> Any:


class AsyncJSONIOManager(ConfigurableIOManager, UPathIOManager):
base_dir: str = PydanticField(None, description="Base directory for storing files.")
base_dir: str = PydanticField(None, description="Base directory for storing files.") # type: ignore

_base_path: UPath = PrivateAttr()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1018,7 +1018,7 @@ class AirbyteCloudWorkspace(ConfigurableResource):
description="Time (in seconds) after which the requests to Airbyte are declared timed out.",
)

_client: AirbyteCloudClient = PrivateAttr(default=None)
_client: AirbyteCloudClient = PrivateAttr(default=None) # type: ignore

@cached_method
def get_client(self) -> AirbyteCloudClient:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -374,7 +374,7 @@ def wait_for_run_to_complete(
logger: logging.Logger,
databricks_run_id: int,
poll_interval_sec: float,
max_wait_time_sec: int,
max_wait_time_sec: float,
verbose_logs: bool = True,
) -> None:
logger.info(f"Waiting for Databricks run `{databricks_run_id}` to complete...")
Expand Down Expand Up @@ -413,7 +413,7 @@ def __init__(
azure_client_secret: Optional[str] = None,
azure_tenant_id: Optional[str] = None,
poll_interval_sec: float = 5,
max_wait_time_sec: int = DEFAULT_RUN_MAX_WAIT_TIME_SEC,
max_wait_time_sec: float = DEFAULT_RUN_MAX_WAIT_TIME_SEC,
):
self.host = check.opt_str_param(host, "host")
self.token = check.opt_str_param(token, "token")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ class DatabricksRunNowOpConfig(Config):
default=_poll_interval_seconds,
description="Check whether the Databricks Job is done at this interval, in seconds.",
)
max_wait_time_seconds: int = Field(
max_wait_time_seconds: float = Field(
default=_max_wait_time_seconds,
description=(
"If the Databricks Job is not complete after this length of time, in seconds,"
Expand Down Expand Up @@ -205,7 +205,7 @@ class DatabricksSubmitRunOpConfig(Config):
default=_poll_interval_seconds,
description="Check whether the Databricks Job is done at this interval, in seconds.",
)
max_wait_time_seconds: int = Field(
max_wait_time_seconds: float = Field(
default=_max_wait_time_seconds,
description=(
"If the Databricks Job is not complete after this length of time, in seconds,"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,11 +125,13 @@ def my_table_a(my_table: pd.DataFrame):

root_uri: str = Field(description="Storage location where Delta tables are stored.")
mode: WriteMode = Field(
default=WriteMode.overwrite.value, description="The write mode passed to save the output."
default=WriteMode.overwrite.value, # type: ignore
description="The write mode passed to save the output.",
)
overwrite_schema: bool = Field(default=False)
writer_engine: WriterEngine = Field(
default=WriterEngine.pyarrow.value, description="Engine passed to write_deltalake."
default=WriterEngine.pyarrow.value, # type: ignore
description="Engine passed to write_deltalake.",
)

storage_options: Union[AzureConfig, S3Config, LocalConfig, GcsConfig] = Field(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
from dagster._record import as_dict, record
from dagster._utils.cached_method import cached_method
from dagster._vendored.dateutil import parser
from pydantic import Field, PrivateAttr
from pydantic import Field
from requests.auth import HTTPBasicAuth
from requests.exceptions import RequestException

Expand Down Expand Up @@ -858,8 +858,6 @@ class FivetranWorkspace(ConfigurableResource):
),
)

_client: FivetranClient = PrivateAttr(default=None)

@cached_method
def get_client(self) -> FivetranClient:
return FivetranClient(
Expand Down
2 changes: 1 addition & 1 deletion python_modules/libraries/dagster-ge/dagster_ge/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@


class GEContextResource(ConfigurableResource, IAttachDifferentObjectToOpContext):
ge_root_dir: str = Field(
ge_root_dir: Optional[str] = Field(
default=None,
description="The root directory for your Great Expectations project.",
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def my_job():
default=30,
description="is how long delete will attempt to connect before giving up. Defaults to 30s.",
)
_registry: prometheus_client.CollectorRegistry = PrivateAttr(default=None)
_registry: prometheus_client.CollectorRegistry = PrivateAttr(default=None) # type: ignore

@classmethod
def _is_dagster_maintained(cls) -> bool:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,12 +132,12 @@ def _init_session(self) -> None:
@property
def spark_session(self) -> Any:
self._init_session()
return self._spark_session
return check.not_none(self._spark_session)

@property
def spark_context(self) -> Any:
self._init_session()
return self._spark_session.sparkContext
return check.not_none(self._spark_session).sparkContext


@dagster_maintained_resource
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ class ShellOpConfig(Config):
description="An optional dict of environment variables to pass to the subprocess.",
)
output_logging: OutputType = Field(
default=OutputType.BUFFER.value,
default=OutputType.BUFFER.value, # type: ignore
)
cwd: Optional[str] = Field(
default=None, description="Working directory in which to execute shell script"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def build_tableau_materializable_assets_definition(
def asset_fn(context: AssetExecutionContext):
tableau = cast(BaseTableauWorkspace, getattr(context.resources, resource_key))
with tableau.get_client() as client:
yield from client.refresh_and_materialize_workbooks( # pyright: ignore[reportOptionalMemberAccess]
yield from client.refresh_and_materialize_workbooks(
specs=specs, refreshable_workbook_ids=refreshable_workbook_ids
)

Expand Down

1 comment on commit 6b70b14

@github-actions
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Deploy preview for dagster-docs ready!

✅ Preview
https://dagster-docs-bp41bcqnq-elementl.vercel.app
https://master.dagster.dagster-docs.io

Built with commit 6b70b14.
This pull request is being automatically deployed with vercel-action

Please sign in to comment.