Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable UP rule in ruff #416

Merged
merged 2 commits into from
Jan 14, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion cognite/extractorutils/_inner_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ class _DecimalEncoder(json.JSONEncoder):
def default(self, obj: Any) -> dict[str, str]:
if isinstance(obj, Decimal):
return {"type": "decimal_encoded", "value": str(obj)}
return super(_DecimalEncoder, self).default(obj)
return super().default(obj)


class _DecimalDecoder(json.JSONDecoder):
Expand Down
7 changes: 4 additions & 3 deletions cognite/extractorutils/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,12 @@
import logging
import os
import sys
from collections.abc import Callable
from dataclasses import is_dataclass
from enum import Enum
from threading import Thread
from types import TracebackType
from typing import Any, Callable, Generic, Type, TypeVar
from typing import Any, Generic, TypeVar

from dotenv import find_dotenv, load_dotenv

Expand Down Expand Up @@ -79,7 +80,7 @@ def __init__(
description: str,
version: str | None = None,
run_handle: RunHandle | None = None,
config_class: Type[CustomConfigClass],
config_class: type[CustomConfigClass],
metrics: BaseMetrics | None = None,
use_default_state_store: bool = True,
cancellation_token: CancellationToken | None = None,
Expand Down Expand Up @@ -322,7 +323,7 @@ def heartbeat_loop() -> None:
return self

def __exit__(
self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
) -> bool:
"""
Shuts down the extractor. Makes sure states are preserved, that all uploads of data and metrics are done, etc.
Expand Down
3 changes: 2 additions & 1 deletion cognite/extractorutils/configtools/_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,9 @@
# limitations under the License.
import base64
import re
from collections.abc import Callable
from pathlib import Path
from typing import Any, Callable
from typing import Any

from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives import serialization as serialization
Expand Down
2 changes: 1 addition & 1 deletion cognite/extractorutils/configtools/elements.py
Original file line number Diff line number Diff line change
Expand Up @@ -770,7 +770,7 @@ def __new__(cls, value: Any) -> "CastableInt":
floats and other types supported by standard int.
"""

if not isinstance(value, (int, str, bytes)):
if not isinstance(value, int | str | bytes):
raise ValueError(f"CastableInt cannot be created form value {value!r} of type {type(value)!r}.")

return super().__new__(cls, value)
Expand Down
19 changes: 10 additions & 9 deletions cognite/extractorutils/configtools/loaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,11 @@
import os
import re
import sys
from collections.abc import Callable, Iterable
from enum import Enum
from hashlib import sha256
from pathlib import Path
from typing import Any, Callable, Generic, Iterable, TextIO, Type, TypeVar, cast
from typing import Any, Generic, TextIO, TypeVar, cast

import dacite
import yaml
Expand Down Expand Up @@ -211,7 +212,7 @@ def _load_yaml_dict(

def _load_yaml(
source: TextIO | str,
config_type: Type[CustomConfigClass],
config_type: type[CustomConfigClass],
case_style: str = "hyphen",
expand_envvars: bool = True,
dict_manipulator: Callable[[dict[str, Any]], dict[str, Any]] = lambda x: x,
Expand Down Expand Up @@ -243,13 +244,13 @@ def _load_yaml(
else:
path = None

def name(type_: Type) -> str:
def name(type_: type) -> str:
return type_.__name__ if hasattr(type_, "__name__") else str(type_)

def all_types(type_: Type) -> Iterable[Type]:
def all_types(type_: type) -> Iterable[type]:
return type_.__args__ if hasattr(type_, "__args__") else [type_]

if isinstance(e, (dacite.WrongTypeError, dacite.UnionMatchError)) and e.value is not None:
if isinstance(e, dacite.WrongTypeError | dacite.UnionMatchError) and e.value is not None:
got_type = name(type(e.value))
need_type = ", ".join(name(t) for t in all_types(e.field_type))

Expand All @@ -268,7 +269,7 @@ def all_types(type_: Type) -> Iterable[Type]:

def load_yaml(
source: TextIO | str,
config_type: Type[CustomConfigClass],
config_type: type[CustomConfigClass],
case_style: str = "hyphen",
expand_envvars: bool = True,
keyvault_loader: KeyVaultLoader | None = None,
Expand Down Expand Up @@ -346,7 +347,7 @@ def compile_patterns(ignore_patterns: list[str | IgnorePattern]) -> list[re.Patt


class ConfigResolver(Generic[CustomConfigClass]):
def __init__(self, config_path: str, config_type: Type[CustomConfigClass]):
def __init__(self, config_path: str, config_type: type[CustomConfigClass]):
self.config_path = config_path
self.config_type = config_type

Expand All @@ -356,7 +357,7 @@ def __init__(self, config_path: str, config_type: Type[CustomConfigClass]):
self._cognite_client: CogniteClient | None = None

def _reload_file(self) -> None:
with open(self.config_path, "r") as stream:
with open(self.config_path) as stream:
self._config_text = stream.read()

@property
Expand Down Expand Up @@ -401,7 +402,7 @@ def accept_new_config(self) -> None:

@classmethod
def from_cli(
cls, name: str, description: str, version: str, config_type: Type[CustomConfigClass]
cls, name: str, description: str, version: str, config_type: type[CustomConfigClass]
) -> "ConfigResolver":
argument_parser = argparse.ArgumentParser(sys.argv[0], description=description)
argument_parser.add_argument(
Expand Down
2 changes: 1 addition & 1 deletion cognite/extractorutils/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ class InvalidConfigError(Exception):
"""

def __init__(self, message: str, details: list[str] | None = None):
super(InvalidConfigError, self).__init__()
super().__init__()
self.message = message
self.details = details

Expand Down
13 changes: 7 additions & 6 deletions cognite/extractorutils/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,10 @@ def __init__(self):
import os
import threading
from abc import ABC, abstractmethod
from collections.abc import Callable
from time import sleep
from types import TracebackType
from typing import Any, Callable, Type, TypeVar
from typing import Any, TypeVar

import arrow
import psutil
Expand All @@ -65,7 +66,7 @@ def __init__(self):
T = TypeVar("T")


def safe_get(cls: Type[T], *args: Any, **kwargs: Any) -> T:
def safe_get(cls: type[T], *args: Any, **kwargs: Any) -> T:
"""
A factory for instances of metrics collections.

Expand Down Expand Up @@ -232,7 +233,7 @@ def __enter__(self) -> "AbstractMetricsPusher":
return self

def __exit__(
self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
) -> None:
"""
Wraps around stop method, for use as context manager
Expand Down Expand Up @@ -269,7 +270,7 @@ def __init__(
thread_name: str | None = None,
cancellation_token: CancellationToken | None = None,
):
super(PrometheusPusher, self).__init__(push_interval, thread_name, cancellation_token)
super().__init__(push_interval, thread_name, cancellation_token)

self.username = username
self.job_name = job_name
Expand Down Expand Up @@ -345,7 +346,7 @@ def __init__(
thread_name: str | None = None,
cancellation_token: CancellationToken | None = None,
):
super(CognitePusher, self).__init__(push_interval, thread_name, cancellation_token)
super().__init__(push_interval, thread_name, cancellation_token)

self.cdf_client = cdf_client
self.asset = asset
Expand Down Expand Up @@ -409,7 +410,7 @@ def _push_to_server(self) -> None:
datapoints: list[dict[str, str | int | list[Any] | Datapoints | DatapointsArray]] = []

for metric in REGISTRY.collect():
if type(metric) == Metric and metric.type in ["gauge", "counter"]:
if isinstance(metric, Metric) and metric.type in ["gauge", "counter"]:
if len(metric.samples) == 0:
continue

Expand Down
12 changes: 6 additions & 6 deletions cognite/extractorutils/statestore/hashing.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import hashlib
import json
from abc import ABC
from collections.abc import Iterable, Iterator
from types import TracebackType
from typing import Any, Iterable, Iterator, Type
from typing import Any

import orjson

Expand Down Expand Up @@ -66,8 +67,7 @@ def __len__(self) -> int:

def __iter__(self) -> Iterator[str]:
with self.lock:
for key in self._local_state:
yield key
yield from self._local_state


class RawHashStateStore(AbstractHashStateStore):
Expand Down Expand Up @@ -169,7 +169,7 @@ def __enter__(self) -> "RawHashStateStore":

def __exit__(
self,
exc_type: Type[BaseException] | None,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
Expand Down Expand Up @@ -214,7 +214,7 @@ def initialize(self, force: bool = False) -> None:

with self.lock:
try:
with open(self._file_path, "r") as f:
with open(self._file_path) as f:
self._local_state = json.load(f, cls=_DecimalDecoder)
except FileNotFoundError:
pass
Expand Down Expand Up @@ -243,7 +243,7 @@ def __enter__(self) -> "LocalHashStateStore":

def __exit__(
self,
exc_type: Type[BaseException] | None,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
Expand Down
26 changes: 13 additions & 13 deletions cognite/extractorutils/statestore/watermark.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,9 @@

import json
from abc import ABC
from collections.abc import Callable, Iterator
from types import TracebackType
from typing import Any, Callable, Dict, Iterator, List, Tuple, Type, Union
from typing import Any

from cognite.client import CogniteClient
from cognite.client.exceptions import CogniteAPIError
Expand Down Expand Up @@ -126,10 +127,10 @@ def __init__(
cancellation_token=cancellation_token,
)

self._local_state: Dict[str, Dict[str, Any]] = {}
self._deleted: List[str] = []
self._local_state: dict[str, dict[str, Any]] = {}
self._deleted: list[str] = []

def get_state(self, external_id: Union[str, List[str]]) -> Union[Tuple[Any, Any], List[Tuple[Any, Any]]]:
def get_state(self, external_id: str | list[str]) -> tuple[Any, Any] | list[tuple[Any, Any]]:
"""
Get state(s) for external ID(s)

Expand Down Expand Up @@ -192,7 +193,7 @@ def delete_state(self, external_id: str) -> None:
self._local_state.pop(external_id, None)
self._deleted.append(external_id)

def post_upload_handler(self) -> Callable[[List[Dict[str, Union[str, DataPointList]]]], None]:
def post_upload_handler(self) -> Callable[[list[dict[str, str | DataPointList]]], None]:
"""
Get a callable suitable for passing to a time series upload queue as post_upload_function, that will
automatically update the states in this state store when that upload queue is uploading.
Expand All @@ -201,7 +202,7 @@ def post_upload_handler(self) -> Callable[[List[Dict[str, Union[str, DataPointLi
A function that expands the current states with the values given
"""

def callback(uploaded_points: List[Dict[str, Union[str, DataPointList]]]) -> None:
def callback(uploaded_points: list[dict[str, str | DataPointList]]) -> None:
for time_series in uploaded_points:
# Use CDF timestamps
data_points = time_series["datapoints"]
Expand Down Expand Up @@ -238,10 +239,10 @@ def outside_state(self, external_id: str, new_state: Any) -> bool:

return False

def __getitem__(self, external_id: str) -> Tuple[Any, Any]:
def __getitem__(self, external_id: str) -> tuple[Any, Any]:
return self.get_state(external_id) # type: ignore # will not be list if input is single str

def __setitem__(self, key: str, value: Tuple[Any, Any]) -> None:
def __setitem__(self, key: str, value: tuple[Any, Any]) -> None:
self.set_state(external_id=key, low=value[0], high=value[1])

def __contains__(self, external_id: str) -> bool:
Expand All @@ -251,8 +252,7 @@ def __len__(self) -> int:
return len(self._local_state)

def __iter__(self) -> Iterator[str]:
for key in self._local_state:
yield key
yield from self._local_state


class RawStateStore(AbstractStateStore):
Expand Down Expand Up @@ -380,7 +380,7 @@ def __enter__(self) -> "RawStateStore":
return self

def __exit__(
self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
) -> None:
"""
Wraps around stop method, for use as context manager
Expand Down Expand Up @@ -430,7 +430,7 @@ def initialize(self, force: bool = False) -> None:

with self.lock:
try:
with open(self._file_path, "r") as f:
with open(self._file_path) as f:
self._local_state = json.load(f, cls=_DecimalDecoder)
except FileNotFoundError:
pass
Expand Down Expand Up @@ -460,7 +460,7 @@ def __enter__(self) -> "LocalStateStore":

def __exit__(
self,
exc_type: Type[BaseException] | None,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
Expand Down
2 changes: 1 addition & 1 deletion cognite/extractorutils/threading.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ class CancellationToken:
def __init__(self, condition: Condition | None = None) -> None:
self._cv: Condition = condition or Condition()
self._is_cancelled_int: bool = False
self._parent: "CancellationToken" | None = None
self._parent: CancellationToken | None = None

def __repr__(self) -> str:
cls = self.__class__
Expand Down
7 changes: 2 additions & 5 deletions cognite/extractorutils/unstable/configuration/exceptions.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
from typing import List, Optional


class InvalidConfigError(Exception):
"""
Exception thrown from ``load_yaml`` and ``load_yaml_dict`` if config file is invalid. This can be due to
Expand All @@ -10,8 +7,8 @@ class InvalidConfigError(Exception):
* Unkown fields
"""

def __init__(self, message: str, details: Optional[List[str]] = None):
super(InvalidConfigError, self).__init__()
def __init__(self, message: str, details: list[str] | None = None):
super().__init__()
self.message = message
self.details = details

Expand Down
Loading
Loading