Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable ruff/pyupgrade rules (UP) and fix issues #1703

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions docs/release.rst
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,9 @@ Unreleased
Enhancements
~~~~~~~~~~~~

* Enable ruff/pyupgrade rules (UP) and fix issues.
By :user:`Dimitri Papadopoulos Orfanos <DimitriPapadopoulos>` :issue:`1703`.

Docs
~~~~

Expand Down
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,8 @@ exclude = [

[tool.ruff.lint]
extend-select = [
"B"
"B",
"UP"
]

[tool.black]
Expand Down
20 changes: 12 additions & 8 deletions zarr/_storage/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
from collections.abc import MutableMapping
from copy import copy
from string import ascii_letters, digits
from typing import Any, Dict, List, Mapping, Optional, Sequence, Tuple, Union
from typing import Any, Optional, Union
from collections.abc import Mapping, Sequence

from zarr.meta import Metadata2, Metadata3
from zarr.util import normalize_storage_path
Expand Down Expand Up @@ -188,7 +189,7 @@ class Store(BaseStore):

"""

def listdir(self, path: str = "") -> List[str]:
def listdir(self, path: str = "") -> list[str]:
path = normalize_storage_path(path)
return _listdir_from_keys(self, path)

Expand All @@ -201,6 +202,9 @@ def rmdir(self, path: str = "") -> None:
_rmdir_from_keys(self, path)


_builtin_list = list


class StoreV3(BaseStore):
_store_version = 3
_metadata_class = Metadata3
Expand Down Expand Up @@ -312,8 +316,8 @@ def supports_efficient_get_partial_values(self):
return False

def get_partial_values(
self, key_ranges: Sequence[Tuple[str, Tuple[int, Optional[int]]]]
) -> List[Union[bytes, memoryview, bytearray]]:
self, key_ranges: Sequence[tuple[str, tuple[int, Optional[int]]]]
) -> _builtin_list[Union[bytes, memoryview, bytearray]]:
"""Get multiple partial values.
key_ranges can be an iterable of key, range pairs,
where a range specifies two integers range_start and range_length
Expand All @@ -323,9 +327,9 @@ def get_partial_values(
from the end of the file.
A key may occur multiple times with different ranges.
Inserts None for missing keys into the returned list."""
results: List[Union[bytes, memoryview, bytearray]] = [None] * len(key_ranges) # type: ignore[list-item] # noqa: E501
indexed_ranges_by_key: Dict[str, List[Tuple[int, Tuple[int, Optional[int]]]]] = defaultdict(
list
results: _builtin_list[Union[bytes, memoryview, bytearray]] = [None] * len(key_ranges) # type: ignore[list-item] # noqa: E501
indexed_ranges_by_key: dict[str, _builtin_list[tuple[int, tuple[int, Optional[int]]]]] = (
defaultdict(_builtin_list)
)
for i, (key, range_) in enumerate(key_ranges):
indexed_ranges_by_key[key].append((i, range_))
Expand Down Expand Up @@ -666,7 +670,7 @@ def _rmdir_from_keys_v3(store: StoreV3, path: str = "") -> None:
store.erase(group_meta_file)


def _listdir_from_keys(store: BaseStore, path: Optional[str] = None) -> List[str]:
def _listdir_from_keys(store: BaseStore, path: Optional[str] = None) -> list[str]:
# assume path already normalized
prefix = _path_to_prefix(path)
children = set()
Expand Down
6 changes: 3 additions & 3 deletions zarr/_storage/v3.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from collections import OrderedDict
from collections.abc import MutableMapping
from threading import Lock
from typing import Union, Dict, Any, Optional
from typing import Union, Any, Optional

from zarr.errors import (
MetadataError,
Expand Down Expand Up @@ -512,8 +512,8 @@ def __init__(self, store, max_size: int):
self._current_size = 0
self._keys_cache = None
self._contains_cache = {}
self._listdir_cache: Dict[Path, Any] = dict()
self._values_cache: Dict[Path, Any] = OrderedDict()
self._listdir_cache: dict[Path, Any] = dict()
self._values_cache: dict[Path, Any] = OrderedDict()
self._mutex = Lock()
self.hits = self.misses = 0

Expand Down
13 changes: 7 additions & 6 deletions zarr/_storage/v3_storage_transformers.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import functools
import itertools
import os
from typing import NamedTuple, Tuple, Optional, Union, Iterator
from typing import NamedTuple, Optional, Union
from collections.abc import Iterator

from numcodecs.compat import ensure_bytes
import numpy as np
Expand Down Expand Up @@ -30,23 +31,23 @@ class _ShardIndex(NamedTuple):
# dtype uint64, shape (chunks_per_shard_0, chunks_per_shard_1, ..., 2)
offsets_and_lengths: np.ndarray

def __localize_chunk__(self, chunk: Tuple[int, ...]) -> Tuple[int, ...]:
def __localize_chunk__(self, chunk: tuple[int, ...]) -> tuple[int, ...]:
return tuple(
chunk_i % shard_i for chunk_i, shard_i in zip(chunk, self.store.chunks_per_shard)
)

def is_all_empty(self) -> bool:
return np.array_equiv(self.offsets_and_lengths, MAX_UINT_64)

def get_chunk_slice(self, chunk: Tuple[int, ...]) -> Optional[slice]:
def get_chunk_slice(self, chunk: tuple[int, ...]) -> Optional[slice]:
localized_chunk = self.__localize_chunk__(chunk)
chunk_start, chunk_len = self.offsets_and_lengths[localized_chunk]
if (chunk_start, chunk_len) == (MAX_UINT_64, MAX_UINT_64):
return None
else:
return slice(int(chunk_start), int(chunk_start + chunk_len))

def set_chunk_slice(self, chunk: Tuple[int, ...], chunk_slice: Optional[slice]) -> None:
def set_chunk_slice(self, chunk: tuple[int, ...], chunk_slice: Optional[slice]) -> None:
localized_chunk = self.__localize_chunk__(chunk)
if chunk_slice is None:
self.offsets_and_lengths[localized_chunk] = (MAX_UINT_64, MAX_UINT_64)
Expand Down Expand Up @@ -131,7 +132,7 @@ def _is_data_key(self, key: str) -> bool:
), "data_key_prefix is not initialized, first get a copy via _copy_for_array."
return key.startswith(self._data_key_prefix)

def _key_to_shard(self, chunk_key: str) -> Tuple[str, Tuple[int, ...]]:
def _key_to_shard(self, chunk_key: str) -> tuple[str, tuple[int, ...]]:
prefix, _, chunk_string = chunk_key.rpartition("c")
chunk_subkeys = (
tuple(map(int, chunk_string.split(self.dimension_separator))) if chunk_string else (0,)
Expand All @@ -158,7 +159,7 @@ def _get_index_from_buffer(self, buffer: Union[bytes, bytearray]) -> _ShardIndex
# At the end of each shard 2*64bit per chunk for offset and length define the index:
return _ShardIndex.from_bytes(buffer[-16 * self._num_chunks_per_shard :], self)

def _get_chunks_in_shard(self, shard_key: str) -> Iterator[Tuple[int, ...]]:
def _get_chunks_in_shard(self, shard_key: str) -> Iterator[tuple[int, ...]]:
_, _, chunk_string = shard_key.rpartition("c")
shard_key_tuple = (
tuple(map(int, chunk_string.split(self.dimension_separator))) if chunk_string else (0,)
Expand Down
8 changes: 4 additions & 4 deletions zarr/creation.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from collections.abc import MutableMapping
from typing import Optional, Tuple, Union, Sequence
from typing import Optional, Union
from collections.abc import MutableMapping, Sequence
from warnings import warn

import numpy as np
Expand Down Expand Up @@ -29,8 +29,8 @@


def create(
shape: Union[int, Tuple[int, ...]],
chunks: Union[int, Tuple[int, ...], bool] = True,
shape: Union[int, tuple[int, ...]],
chunks: Union[int, tuple[int, ...], bool] = True,
dtype: Optional[npt.DTypeLike] = None,
compressor="default",
fill_value: Optional[int] = 0,
Expand Down
4 changes: 2 additions & 2 deletions zarr/hierarchy.py
Original file line number Diff line number Diff line change
Expand Up @@ -1337,11 +1337,11 @@ def move(self, source, dest):
contains_array(self._store, source)
or contains_group(self._store, source, explicit_only=False)
):
raise ValueError('The source, "%s", does not exist.' % source)
raise ValueError(f'The source, "{source}", does not exist.')
if contains_array(self._store, dest) or contains_group(
self._store, dest, explicit_only=False
):
raise ValueError('The dest, "%s", already exists.' % dest)
raise ValueError(f'The dest, "{dest}", already exists.')

# Ensure groups needed for `dest` exist.
if "/" in dest:
Expand Down
5 changes: 3 additions & 2 deletions zarr/meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,8 @@
from zarr.errors import MetadataError
from zarr.util import json_dumps, json_loads

from typing import cast, Union, Any, List, Mapping as MappingType, Optional, TYPE_CHECKING
from typing import cast, Union, Any, Optional, TYPE_CHECKING
from collections.abc import Mapping as MappingType

if TYPE_CHECKING: # pragma: no cover
from zarr._storage.store import StorageTransformer
Expand Down Expand Up @@ -180,7 +181,7 @@ def encode_dtype(cls, d: np.dtype):
return d.descr

@classmethod
def _decode_dtype_descr(cls, d) -> List[Any]:
def _decode_dtype_descr(cls, d) -> list[Any]:
# need to convert list of lists to list of tuples
if isinstance(d, list):
# recurse to handle nested structures
Expand Down
20 changes: 10 additions & 10 deletions zarr/n5.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import os
import struct
import sys
from typing import Any, Dict, Optional, cast
from typing import Any, Optional, cast
import warnings

import numpy as np
Expand Down Expand Up @@ -236,7 +236,7 @@ def listdir(self, path: Optional[str] = None):
else:
return children

def _load_n5_attrs(self, path: str) -> Dict[str, Any]:
def _load_n5_attrs(self, path: str) -> dict[str, Any]:
try:
s = super().__getitem__(path)
return json_loads(s)
Expand Down Expand Up @@ -581,23 +581,23 @@ def invert_chunk_coords(key: str):
return key


def group_metadata_to_n5(group_metadata: Dict[str, Any]) -> Dict[str, Any]:
def group_metadata_to_n5(group_metadata: dict[str, Any]) -> dict[str, Any]:
"""Convert group metadata from zarr to N5 format."""
del group_metadata["zarr_format"]
# TODO: This should only exist at the top-level
group_metadata["n5"] = N5_FORMAT
return group_metadata


def group_metadata_to_zarr(group_metadata: Dict[str, Any]) -> Dict[str, Any]:
def group_metadata_to_zarr(group_metadata: dict[str, Any]) -> dict[str, Any]:
"""Convert group metadata from N5 to zarr format."""
# This only exists at the top level
group_metadata.pop("n5", None)
group_metadata["zarr_format"] = ZARR_FORMAT
return group_metadata


def array_metadata_to_n5(array_metadata: Dict[str, Any], top_level=False) -> Dict[str, Any]:
def array_metadata_to_n5(array_metadata: dict[str, Any], top_level=False) -> dict[str, Any]:
"""Convert array metadata from zarr to N5 format. If the `top_level` keyword argument is True,
then the `N5` : N5_FORMAT key : value pair will be inserted into the metadata."""

Expand Down Expand Up @@ -647,8 +647,8 @@ def array_metadata_to_n5(array_metadata: Dict[str, Any], top_level=False) -> Dic


def array_metadata_to_zarr(
array_metadata: Dict[str, Any], top_level: bool = False
) -> Dict[str, Any]:
array_metadata: dict[str, Any], top_level: bool = False
) -> dict[str, Any]:
"""Convert array metadata from N5 to zarr format.
If the `top_level` keyword argument is True, then the `N5` key will be removed from metadata"""
for t, f in zarr_to_n5_keys:
Expand Down Expand Up @@ -677,7 +677,7 @@ def array_metadata_to_zarr(
return array_metadata


def attrs_to_zarr(attrs: Dict[str, Any]) -> Dict[str, Any]:
def attrs_to_zarr(attrs: dict[str, Any]) -> dict[str, Any]:
"""Get all zarr attributes from an N5 attributes dictionary (i.e.,
all non-keyword attributes)."""

Expand All @@ -689,7 +689,7 @@ def attrs_to_zarr(attrs: Dict[str, Any]) -> Dict[str, Any]:
return attrs


def compressor_config_to_n5(compressor_config: Optional[Dict[str, Any]]) -> Dict[str, Any]:
def compressor_config_to_n5(compressor_config: Optional[dict[str, Any]]) -> dict[str, Any]:
if compressor_config is None:
return {"type": "raw"}
else:
Expand Down Expand Up @@ -750,7 +750,7 @@ def compressor_config_to_n5(compressor_config: Optional[Dict[str, Any]]) -> Dict
return n5_config


def compressor_config_to_zarr(compressor_config: Dict[str, Any]) -> Optional[Dict[str, Any]]:
def compressor_config_to_zarr(compressor_config: dict[str, Any]) -> Optional[dict[str, Any]]:
codec_id = compressor_config["type"]
zarr_config = {"id": codec_id}

Expand Down
21 changes: 11 additions & 10 deletions zarr/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,12 @@
import zipfile
from collections import OrderedDict
from collections.abc import MutableMapping
from functools import lru_cache
from functools import cache
from os import scandir
from pickle import PicklingError
from threading import Lock, RLock
from typing import Sequence, Mapping, Optional, Union, List, Tuple, Dict, Any
from typing import Optional, Union, Any
from collections.abc import Sequence, Mapping
import uuid
import time

Expand Down Expand Up @@ -318,8 +319,8 @@ def _require_parent_group(

def init_array(
store: StoreLike,
shape: Union[int, Tuple[int, ...]],
chunks: Union[bool, int, Tuple[int, ...]] = True,
shape: Union[int, tuple[int, ...]],
chunks: Union[bool, int, tuple[int, ...]] = True,
dtype=None,
compressor="default",
fill_value=None,
Expand Down Expand Up @@ -745,7 +746,7 @@ def _init_group_metadata(
store[key] = encode_group_metadata(meta)


def _dict_store_keys(d: Dict, prefix="", cls=dict):
def _dict_store_keys(d: dict, prefix="", cls=dict):
for k in d.keys():
v = d[k]
if isinstance(v, cls):
Expand Down Expand Up @@ -916,7 +917,7 @@ def __iter__(self):
def __len__(self) -> int:
return sum(1 for _ in self.keys())

def listdir(self, path: Path = None) -> List[str]:
def listdir(self, path: Path = None) -> list[str]:
path = normalize_storage_path(path)
if path:
try:
Expand Down Expand Up @@ -1558,7 +1559,7 @@ def clear(self):
self.map.clear()

@classmethod
@lru_cache(maxsize=None)
@cache
def _fsspec_installed(cls):
"""Returns true if fsspec is installed"""
import importlib.util
Expand Down Expand Up @@ -2459,9 +2460,9 @@ def __init__(self, store: StoreLike, max_size: int):
self._max_size = max_size
self._current_size = 0
self._keys_cache = None
self._contains_cache: Dict[Any, Any] = {}
self._listdir_cache: Dict[Path, Any] = dict()
self._values_cache: Dict[Path, Any] = OrderedDict()
self._contains_cache: dict[Any, Any] = {}
self._listdir_cache: dict[Path, Any] = dict()
self._values_cache: dict[Path, Any] = OrderedDict()
self._mutex = Lock()
self.hits = self.misses = 0

Expand Down
Loading