Skip to content

Commit 1fa785e

Browse files
style: pre-commit fixes
1 parent 8f805c2 commit 1fa785e

17 files changed

+62
-61
lines changed

src/zarr/api/asynchronous.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -319,7 +319,7 @@ async def open(
319319
try:
320320
metadata_dict = await get_array_metadata(store_path, zarr_format=zarr_format)
321321
# TODO: remove this cast when we fix typing for array metadata dicts
322-
_metadata_dict = cast(ArrayMetadataDict, metadata_dict)
322+
_metadata_dict = cast("ArrayMetadataDict", metadata_dict)
323323
# for v2, the above would already have raised an exception if not an array
324324
zarr_format = _metadata_dict["zarr_format"]
325325
is_v3_array = zarr_format == 3 and _metadata_dict.get("node_type") == "array"

src/zarr/codecs/crc32c_.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,9 @@ async def _decode_single(
4040
inner_bytes = data[:-4]
4141

4242
# Need to do a manual cast until https://github.com/numpy/numpy/issues/26783 is resolved
43-
computed_checksum = np.uint32(crc32c(cast(typing_extensions.Buffer, inner_bytes))).tobytes()
43+
computed_checksum = np.uint32(
44+
crc32c(cast("typing_extensions.Buffer", inner_bytes))
45+
).tobytes()
4446
stored_checksum = bytes(crc32_bytes)
4547
if computed_checksum != stored_checksum:
4648
raise ValueError(
@@ -55,7 +57,7 @@ async def _encode_single(
5557
) -> Buffer | None:
5658
data = chunk_bytes.as_numpy_array()
5759
# Calculate the checksum and "cast" it to a numpy array
58-
checksum = np.array([crc32c(cast(typing_extensions.Buffer, data))], dtype=np.uint32)
60+
checksum = np.array([crc32c(cast("typing_extensions.Buffer", data))], dtype=np.uint32)
5961
# Append the checksum (as bytes) to the data
6062
return chunk_spec.prototype.buffer.from_array_like(np.append(data, checksum.view("b")))
6163

src/zarr/codecs/sharding.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ class _ShardIndex(NamedTuple):
115115
def chunks_per_shard(self) -> ChunkCoords:
116116
result = tuple(self.offsets_and_lengths.shape[0:-1])
117117
# The cast is required until https://github.com/numpy/numpy/pull/27211 is merged
118-
return cast(ChunkCoords, result)
118+
return cast("ChunkCoords", result)
119119

120120
def _localize_chunk(self, chunk_coords: ChunkCoords) -> ChunkCoords:
121121
return tuple(

src/zarr/codecs/transpose.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ def parse_transpose_order(data: JSON | Iterable[int]) -> tuple[int, ...]:
2323
raise TypeError(f"Expected an iterable. Got {data} instead.")
2424
if not all(isinstance(a, int) for a in data):
2525
raise TypeError(f"Expected an iterable of integers. Got {data} instead.")
26-
return tuple(cast(Iterable[int], data))
26+
return tuple(cast("Iterable[int]", data))
2727

2828

2929
@dataclass(frozen=True)

src/zarr/core/array.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -266,7 +266,7 @@ def __init__(
266266
if isinstance(metadata, dict):
267267
zarr_format = metadata["zarr_format"]
268268
# TODO: remove this when we extensively type the dict representation of metadata
269-
_metadata = cast(dict[str, JSON], metadata)
269+
_metadata = cast("dict[str, JSON]", metadata)
270270
if zarr_format == 2:
271271
metadata = ArrayV2Metadata.from_dict(_metadata)
272272
elif zarr_format == 3:
@@ -895,7 +895,7 @@ async def open(
895895
store_path = await make_store_path(store)
896896
metadata_dict = await get_array_metadata(store_path, zarr_format=zarr_format)
897897
# TODO: remove this cast when we have better type hints
898-
_metadata_dict = cast(ArrayV3MetadataDict, metadata_dict)
898+
_metadata_dict = cast("ArrayV3MetadataDict", metadata_dict)
899899
return cls(store_path=store_path, metadata=_metadata_dict)
900900

901901
@property
@@ -1386,7 +1386,7 @@ async def _set_selection(
13861386
if isinstance(array_like, np._typing._SupportsArrayFunc):
13871387
# TODO: need to handle array types that don't support __array_function__
13881388
# like PyTorch and JAX
1389-
array_like_ = cast(np._typing._SupportsArrayFunc, array_like)
1389+
array_like_ = cast("np._typing._SupportsArrayFunc", array_like)
13901390
value = np.asanyarray(value, dtype=self.metadata.dtype, like=array_like_)
13911391
else:
13921392
if not hasattr(value, "shape"):
@@ -1400,7 +1400,7 @@ async def _set_selection(
14001400
value = value.astype(dtype=self.metadata.dtype, order="A")
14011401
else:
14021402
value = np.array(value, dtype=self.metadata.dtype, order="A")
1403-
value = cast(NDArrayLike, value)
1403+
value = cast("NDArrayLike", value)
14041404
# We accept any ndarray like object from the user and convert it
14051405
# to a NDBuffer (or subclass). From this point onwards, we only pass
14061406
# Buffer and NDBuffer between components.
@@ -2420,11 +2420,11 @@ def __getitem__(self, selection: Selection) -> NDArrayLike:
24202420
"""
24212421
fields, pure_selection = pop_fields(selection)
24222422
if is_pure_fancy_indexing(pure_selection, self.ndim):
2423-
return self.vindex[cast(CoordinateSelection | MaskSelection, selection)]
2423+
return self.vindex[cast("CoordinateSelection | MaskSelection", selection)]
24242424
elif is_pure_orthogonal_indexing(pure_selection, self.ndim):
24252425
return self.get_orthogonal_selection(pure_selection, fields=fields)
24262426
else:
2427-
return self.get_basic_selection(cast(BasicSelection, pure_selection), fields=fields)
2427+
return self.get_basic_selection(cast("BasicSelection", pure_selection), fields=fields)
24282428

24292429
def __setitem__(self, selection: Selection, value: npt.ArrayLike) -> None:
24302430
"""Modify data for an item or region of the array.
@@ -2519,11 +2519,11 @@ def __setitem__(self, selection: Selection, value: npt.ArrayLike) -> None:
25192519
"""
25202520
fields, pure_selection = pop_fields(selection)
25212521
if is_pure_fancy_indexing(pure_selection, self.ndim):
2522-
self.vindex[cast(CoordinateSelection | MaskSelection, selection)] = value
2522+
self.vindex[cast("CoordinateSelection | MaskSelection", selection)] = value
25232523
elif is_pure_orthogonal_indexing(pure_selection, self.ndim):
25242524
self.set_orthogonal_selection(pure_selection, value, fields=fields)
25252525
else:
2526-
self.set_basic_selection(cast(BasicSelection, pure_selection), value, fields=fields)
2526+
self.set_basic_selection(cast("BasicSelection", pure_selection), value, fields=fields)
25272527

25282528
@_deprecate_positional_args
25292529
def get_basic_selection(
@@ -3641,7 +3641,7 @@ def update_attributes(self, new_attributes: dict[str, JSON]) -> Array:
36413641
# TODO: remove this cast when type inference improves
36423642
new_array = sync(self._async_array.update_attributes(new_attributes))
36433643
# TODO: remove this cast when type inference improves
3644-
_new_array = cast(AsyncArray[ArrayV2Metadata] | AsyncArray[ArrayV3Metadata], new_array)
3644+
_new_array = cast("AsyncArray[ArrayV2Metadata] | AsyncArray[ArrayV3Metadata]", new_array)
36453645
return type(self)(_new_array)
36463646

36473647
def __repr__(self) -> str:
@@ -3964,7 +3964,7 @@ async def init_array(
39643964
serializer=serializer,
39653965
dtype=dtype_parsed,
39663966
)
3967-
sub_codecs = cast(tuple[Codec, ...], (*array_array, array_bytes, *bytes_bytes))
3967+
sub_codecs = cast("tuple[Codec, ...]", (*array_array, array_bytes, *bytes_bytes))
39683968
codecs_out: tuple[Codec, ...]
39693969
if shard_shape_parsed is not None:
39703970
index_location = None
@@ -4308,7 +4308,7 @@ def _parse_chunk_encoding_v3(
43084308
if isinstance(filters, dict | Codec):
43094309
maybe_array_array = (filters,)
43104310
else:
4311-
maybe_array_array = cast(Iterable[Codec | dict[str, JSON]], filters)
4311+
maybe_array_array = cast("Iterable[Codec | dict[str, JSON]]", filters)
43124312
out_array_array = tuple(_parse_array_array_codec(c) for c in maybe_array_array)
43134313

43144314
if serializer == "auto":
@@ -4325,7 +4325,7 @@ def _parse_chunk_encoding_v3(
43254325
if isinstance(compressors, dict | Codec):
43264326
maybe_bytes_bytes = (compressors,)
43274327
else:
4328-
maybe_bytes_bytes = cast(Iterable[Codec | dict[str, JSON]], compressors)
4328+
maybe_bytes_bytes = cast("Iterable[Codec | dict[str, JSON]]", compressors)
43294329

43304330
out_bytes_bytes = tuple(_parse_bytes_bytes_codec(c) for c in maybe_bytes_bytes)
43314331

src/zarr/core/array_spec.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def from_dict(cls, data: ArrayConfigParams) -> Self:
6464
"""
6565
kwargs_out: ArrayConfigParams = {}
6666
for f in fields(ArrayConfig):
67-
field_name = cast(Literal["order", "write_empty_chunks"], f.name)
67+
field_name = cast("Literal['order', 'write_empty_chunks']", f.name)
6868
if field_name not in data:
6969
kwargs_out[field_name] = zarr_config.get(f"array.{field_name}")
7070
else:

src/zarr/core/buffer/core.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ def create_zero_length(cls) -> Self:
155155
if cls is Buffer:
156156
raise NotImplementedError("Cannot call abstract method on the abstract class 'Buffer'")
157157
return cls(
158-
cast(ArrayLike, None)
158+
cast("ArrayLike", None)
159159
) # This line will never be reached, but it satisfies the type checker
160160

161161
@classmethod
@@ -203,7 +203,7 @@ def from_buffer(cls, buffer: Buffer) -> Self:
203203
if cls is Buffer:
204204
raise NotImplementedError("Cannot call abstract method on the abstract class 'Buffer'")
205205
return cls(
206-
cast(ArrayLike, None)
206+
cast("ArrayLike", None)
207207
) # This line will never be reached, but it satisfies the type checker
208208

209209
@classmethod
@@ -223,7 +223,7 @@ def from_bytes(cls, bytes_like: BytesLike) -> Self:
223223
if cls is Buffer:
224224
raise NotImplementedError("Cannot call abstract method on the abstract class 'Buffer'")
225225
return cls(
226-
cast(ArrayLike, None)
226+
cast("ArrayLike", None)
227227
) # This line will never be reached, but it satisfies the type checker
228228

229229
def as_array_like(self) -> ArrayLike:
@@ -354,7 +354,7 @@ def create(
354354
"Cannot call abstract method on the abstract class 'NDBuffer'"
355355
)
356356
return cls(
357-
cast(NDArrayLike, None)
357+
cast("NDArrayLike", None)
358358
) # This line will never be reached, but it satisfies the type checker
359359

360360
@classmethod
@@ -391,7 +391,7 @@ def from_numpy_array(cls, array_like: npt.ArrayLike) -> Self:
391391
"Cannot call abstract method on the abstract class 'NDBuffer'"
392392
)
393393
return cls(
394-
cast(NDArrayLike, None)
394+
cast("NDArrayLike", None)
395395
) # This line will never be reached, but it satisfies the type checker
396396

397397
def as_ndarray_like(self) -> NDArrayLike:

src/zarr/core/buffer/gpu.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@ def from_bytes(cls, bytes_like: BytesLike) -> Self:
103103
return cls.from_array_like(cp.frombuffer(bytes_like, dtype="b"))
104104

105105
def as_numpy_array(self) -> npt.NDArray[Any]:
106-
return cast(npt.NDArray[Any], cp.asnumpy(self._data))
106+
return cast("npt.NDArray[Any]", cp.asnumpy(self._data))
107107

108108
def __add__(self, other: core.Buffer) -> Self:
109109
other_array = other.as_array_like()
@@ -204,7 +204,7 @@ def as_numpy_array(self) -> npt.NDArray[Any]:
204204
-------
205205
NumPy array of this buffer (might be a data copy)
206206
"""
207-
return cast(npt.NDArray[Any], cp.asnumpy(self._data))
207+
return cast("npt.NDArray[Any]", cp.asnumpy(self._data))
208208

209209
def __getitem__(self, key: Any) -> Self:
210210
return self.__class__(self._data.__getitem__(key))

src/zarr/core/chunk_key_encodings.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
def parse_separator(data: JSON) -> SeparatorLiteral:
2121
if data not in (".", "/"):
2222
raise ValueError(f"Expected an '.' or '/' separator. Got {data} instead.")
23-
return cast(SeparatorLiteral, data)
23+
return cast("SeparatorLiteral", data)
2424

2525

2626
class ChunkKeyEncodingParams(TypedDict):
@@ -48,7 +48,7 @@ def from_dict(cls, data: dict[str, JSON] | ChunkKeyEncodingLike) -> ChunkKeyEnco
4848
data = {"name": data["name"], "configuration": {"separator": data["separator"]}}
4949

5050
# TODO: remove this cast when we are statically typing the JSON metadata completely.
51-
data = cast(dict[str, JSON], data)
51+
data = cast("dict[str, JSON]", data)
5252

5353
# configuration is optional for chunk key encodings
5454
name_parsed, config_parsed = parse_named_configuration(data, require_configuration=False)

src/zarr/core/common.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ def parse_fill_value(data: Any) -> Any:
157157

158158
def parse_order(data: Any) -> Literal["C", "F"]:
159159
if data in ("C", "F"):
160-
return cast(Literal["C", "F"], data)
160+
return cast("Literal['C', 'F']", data)
161161
raise ValueError(f"Expected one of ('C', 'F'), got {data} instead.")
162162

163163

@@ -201,4 +201,4 @@ def _warn_order_kwarg() -> None:
201201

202202
def _default_zarr_format() -> ZarrFormat:
203203
"""Return the default zarr_version"""
204-
return cast(ZarrFormat, int(zarr_config.get("default_zarr_format", 3)))
204+
return cast("ZarrFormat", int(zarr_config.get("default_zarr_format", 3)))

src/zarr/core/config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -134,6 +134,6 @@ def enable_gpu(self) -> ConfigSet:
134134

135135
def parse_indexing_order(data: Any) -> Literal["C", "F"]:
136136
if data in ("C", "F"):
137-
return cast(Literal["C", "F"], data)
137+
return cast("Literal['C', 'F']", data)
138138
msg = f"Expected one of ('C', 'F'), got {data} instead."
139139
raise ValueError(msg)

src/zarr/core/group.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -79,15 +79,15 @@
7979
def parse_zarr_format(data: Any) -> ZarrFormat:
8080
"""Parse the zarr_format field from metadata."""
8181
if data in (2, 3):
82-
return cast(ZarrFormat, data)
82+
return cast("ZarrFormat", data)
8383
msg = f"Invalid zarr_format. Expected one of 2 or 3. Got {data}."
8484
raise ValueError(msg)
8585

8686

8787
def parse_node_type(data: Any) -> NodeType:
8888
"""Parse the node_type field from metadata."""
8989
if data in ("array", "group"):
90-
return cast(Literal["array", "group"], data)
90+
return cast("Literal['array', 'group']", data)
9191
raise MetadataValidationError("node_type", "array or group", data)
9292

9393

@@ -3234,8 +3234,7 @@ def _ensure_consistent_zarr_format(
32343234
raise ValueError(msg)
32353235

32363236
return cast(
3237-
Mapping[str, GroupMetadata | ArrayV2Metadata]
3238-
| Mapping[str, GroupMetadata | ArrayV3Metadata],
3237+
"Mapping[str, GroupMetadata | ArrayV2Metadata] | Mapping[str, GroupMetadata | ArrayV3Metadata]",
32393238
data,
32403239
)
32413240

0 commit comments

Comments
 (0)