From c0f94e9825808447bca714c7c577d5b269117121 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Mon, 11 Mar 2024 22:23:53 +0100 Subject: [PATCH 1/6] Enable ruff/pyupgrade rules (UP) As suggested by Repo-Review. --- pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index dacd45ec2..8aa6febb6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -105,7 +105,8 @@ exclude = [ [tool.ruff.lint] extend-select = [ - "B" + "B", + "UP" ] [tool.black] From 65d7dbd403e7ebb2f2499c85601ac51c8bb42c7f Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Mon, 11 Mar 2024 22:21:30 +0100 Subject: [PATCH 2/6] Apply ruff/pyupgrade rule UP006 UP006 Use `dict` instead of `Dict` for type annotation UP006 Use `list` instead of `List` for type annotation UP006 Use `tuple` instead of `Tuple` for type annotation https://docs.astral.sh/ruff/rules/non-pep585-annotation/ --- zarr/_storage/store.py | 19 +++++++++++-------- zarr/_storage/v3.py | 6 +++--- zarr/_storage/v3_storage_transformers.py | 12 ++++++------ zarr/creation.py | 6 +++--- zarr/meta.py | 4 ++-- zarr/n5.py | 20 ++++++++++---------- zarr/storage.py | 16 ++++++++-------- zarr/tests/test_core.py | 18 +++++++++--------- zarr/tests/test_n5.py | 3 +-- zarr/util.py | 20 +++++++++----------- 10 files changed, 62 insertions(+), 62 deletions(-) diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index dba29d13c..df53aaa5e 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -5,7 +5,7 @@ from collections.abc import MutableMapping from copy import copy from string import ascii_letters, digits -from typing import Any, Dict, List, Mapping, Optional, Sequence, Tuple, Union +from typing import Any, Mapping, Optional, Sequence, Union from zarr.meta import Metadata2, Metadata3 from zarr.util import normalize_storage_path @@ -188,7 +188,7 @@ class Store(BaseStore): """ - def listdir(self, path: str = "") -> List[str]: + def listdir(self, path: str = "") -> list[str]: path = normalize_storage_path(path) return _listdir_from_keys(self, path) @@ -201,6 +201,9 @@ def rmdir(self, path: str = "") -> None: _rmdir_from_keys(self, path) +_builtin_list = list + + class StoreV3(BaseStore): _store_version = 3 _metadata_class = Metadata3 @@ -312,8 +315,8 @@ def supports_efficient_get_partial_values(self): return False def get_partial_values( - self, key_ranges: Sequence[Tuple[str, Tuple[int, Optional[int]]]] - ) -> List[Union[bytes, memoryview, bytearray]]: + self, key_ranges: Sequence[tuple[str, tuple[int, Optional[int]]]] + ) -> _builtin_list[Union[bytes, memoryview, bytearray]]: """Get multiple partial values. key_ranges can be an iterable of key, range pairs, where a range specifies two integers range_start and range_length @@ -323,9 +326,9 @@ def get_partial_values( from the end of the file. A key may occur multiple times with different ranges. Inserts None for missing keys into the returned list.""" - results: List[Union[bytes, memoryview, bytearray]] = [None] * len(key_ranges) # type: ignore[list-item] # noqa: E501 - indexed_ranges_by_key: Dict[str, List[Tuple[int, Tuple[int, Optional[int]]]]] = defaultdict( - list + results: _builtin_list[Union[bytes, memoryview, bytearray]] = [None] * len(key_ranges) # type: ignore[list-item] # noqa: E501 + indexed_ranges_by_key: dict[str, _builtin_list[tuple[int, tuple[int, Optional[int]]]]] = ( + defaultdict(_builtin_list) ) for i, (key, range_) in enumerate(key_ranges): indexed_ranges_by_key[key].append((i, range_)) @@ -666,7 +669,7 @@ def _rmdir_from_keys_v3(store: StoreV3, path: str = "") -> None: store.erase(group_meta_file) -def _listdir_from_keys(store: BaseStore, path: Optional[str] = None) -> List[str]: +def _listdir_from_keys(store: BaseStore, path: Optional[str] = None) -> list[str]: # assume path already normalized prefix = _path_to_prefix(path) children = set() diff --git a/zarr/_storage/v3.py b/zarr/_storage/v3.py index 4987f820c..41241687a 100644 --- a/zarr/_storage/v3.py +++ b/zarr/_storage/v3.py @@ -3,7 +3,7 @@ from collections import OrderedDict from collections.abc import MutableMapping from threading import Lock -from typing import Union, Dict, Any, Optional +from typing import Union, Any, Optional from zarr.errors import ( MetadataError, @@ -512,8 +512,8 @@ def __init__(self, store, max_size: int): self._current_size = 0 self._keys_cache = None self._contains_cache = {} - self._listdir_cache: Dict[Path, Any] = dict() - self._values_cache: Dict[Path, Any] = OrderedDict() + self._listdir_cache: dict[Path, Any] = dict() + self._values_cache: dict[Path, Any] = OrderedDict() self._mutex = Lock() self.hits = self.misses = 0 diff --git a/zarr/_storage/v3_storage_transformers.py b/zarr/_storage/v3_storage_transformers.py index 00467d44f..b529601c1 100644 --- a/zarr/_storage/v3_storage_transformers.py +++ b/zarr/_storage/v3_storage_transformers.py @@ -1,7 +1,7 @@ import functools import itertools import os -from typing import NamedTuple, Tuple, Optional, Union, Iterator +from typing import NamedTuple, Optional, Union, Iterator from numcodecs.compat import ensure_bytes import numpy as np @@ -30,7 +30,7 @@ class _ShardIndex(NamedTuple): # dtype uint64, shape (chunks_per_shard_0, chunks_per_shard_1, ..., 2) offsets_and_lengths: np.ndarray - def __localize_chunk__(self, chunk: Tuple[int, ...]) -> Tuple[int, ...]: + def __localize_chunk__(self, chunk: tuple[int, ...]) -> tuple[int, ...]: return tuple( chunk_i % shard_i for chunk_i, shard_i in zip(chunk, self.store.chunks_per_shard) ) @@ -38,7 +38,7 @@ def __localize_chunk__(self, chunk: Tuple[int, ...]) -> Tuple[int, ...]: def is_all_empty(self) -> bool: return np.array_equiv(self.offsets_and_lengths, MAX_UINT_64) - def get_chunk_slice(self, chunk: Tuple[int, ...]) -> Optional[slice]: + def get_chunk_slice(self, chunk: tuple[int, ...]) -> Optional[slice]: localized_chunk = self.__localize_chunk__(chunk) chunk_start, chunk_len = self.offsets_and_lengths[localized_chunk] if (chunk_start, chunk_len) == (MAX_UINT_64, MAX_UINT_64): @@ -46,7 +46,7 @@ def get_chunk_slice(self, chunk: Tuple[int, ...]) -> Optional[slice]: else: return slice(int(chunk_start), int(chunk_start + chunk_len)) - def set_chunk_slice(self, chunk: Tuple[int, ...], chunk_slice: Optional[slice]) -> None: + def set_chunk_slice(self, chunk: tuple[int, ...], chunk_slice: Optional[slice]) -> None: localized_chunk = self.__localize_chunk__(chunk) if chunk_slice is None: self.offsets_and_lengths[localized_chunk] = (MAX_UINT_64, MAX_UINT_64) @@ -131,7 +131,7 @@ def _is_data_key(self, key: str) -> bool: ), "data_key_prefix is not initialized, first get a copy via _copy_for_array." return key.startswith(self._data_key_prefix) - def _key_to_shard(self, chunk_key: str) -> Tuple[str, Tuple[int, ...]]: + def _key_to_shard(self, chunk_key: str) -> tuple[str, tuple[int, ...]]: prefix, _, chunk_string = chunk_key.rpartition("c") chunk_subkeys = ( tuple(map(int, chunk_string.split(self.dimension_separator))) if chunk_string else (0,) @@ -158,7 +158,7 @@ def _get_index_from_buffer(self, buffer: Union[bytes, bytearray]) -> _ShardIndex # At the end of each shard 2*64bit per chunk for offset and length define the index: return _ShardIndex.from_bytes(buffer[-16 * self._num_chunks_per_shard :], self) - def _get_chunks_in_shard(self, shard_key: str) -> Iterator[Tuple[int, ...]]: + def _get_chunks_in_shard(self, shard_key: str) -> Iterator[tuple[int, ...]]: _, _, chunk_string = shard_key.rpartition("c") shard_key_tuple = ( tuple(map(int, chunk_string.split(self.dimension_separator))) if chunk_string else (0,) diff --git a/zarr/creation.py b/zarr/creation.py index 9b2b1d6d4..84e4666e7 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -1,5 +1,5 @@ from collections.abc import MutableMapping -from typing import Optional, Tuple, Union, Sequence +from typing import Optional, Union, Sequence from warnings import warn import numpy as np @@ -29,8 +29,8 @@ def create( - shape: Union[int, Tuple[int, ...]], - chunks: Union[int, Tuple[int, ...], bool] = True, + shape: Union[int, tuple[int, ...]], + chunks: Union[int, tuple[int, ...], bool] = True, dtype: Optional[npt.DTypeLike] = None, compressor="default", fill_value: Optional[int] = 0, diff --git a/zarr/meta.py b/zarr/meta.py index 5430ab305..4c5790801 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -9,7 +9,7 @@ from zarr.errors import MetadataError from zarr.util import json_dumps, json_loads -from typing import cast, Union, Any, List, Mapping as MappingType, Optional, TYPE_CHECKING +from typing import cast, Union, Any, Mapping as MappingType, Optional, TYPE_CHECKING if TYPE_CHECKING: # pragma: no cover from zarr._storage.store import StorageTransformer @@ -180,7 +180,7 @@ def encode_dtype(cls, d: np.dtype): return d.descr @classmethod - def _decode_dtype_descr(cls, d) -> List[Any]: + def _decode_dtype_descr(cls, d) -> list[Any]: # need to convert list of lists to list of tuples if isinstance(d, list): # recurse to handle nested structures diff --git a/zarr/n5.py b/zarr/n5.py index 3d3e9afa2..65621116d 100644 --- a/zarr/n5.py +++ b/zarr/n5.py @@ -4,7 +4,7 @@ import os import struct import sys -from typing import Any, Dict, Optional, cast +from typing import Any, Optional, cast import warnings import numpy as np @@ -236,7 +236,7 @@ def listdir(self, path: Optional[str] = None): else: return children - def _load_n5_attrs(self, path: str) -> Dict[str, Any]: + def _load_n5_attrs(self, path: str) -> dict[str, Any]: try: s = super().__getitem__(path) return json_loads(s) @@ -581,7 +581,7 @@ def invert_chunk_coords(key: str): return key -def group_metadata_to_n5(group_metadata: Dict[str, Any]) -> Dict[str, Any]: +def group_metadata_to_n5(group_metadata: dict[str, Any]) -> dict[str, Any]: """Convert group metadata from zarr to N5 format.""" del group_metadata["zarr_format"] # TODO: This should only exist at the top-level @@ -589,7 +589,7 @@ def group_metadata_to_n5(group_metadata: Dict[str, Any]) -> Dict[str, Any]: return group_metadata -def group_metadata_to_zarr(group_metadata: Dict[str, Any]) -> Dict[str, Any]: +def group_metadata_to_zarr(group_metadata: dict[str, Any]) -> dict[str, Any]: """Convert group metadata from N5 to zarr format.""" # This only exists at the top level group_metadata.pop("n5", None) @@ -597,7 +597,7 @@ def group_metadata_to_zarr(group_metadata: Dict[str, Any]) -> Dict[str, Any]: return group_metadata -def array_metadata_to_n5(array_metadata: Dict[str, Any], top_level=False) -> Dict[str, Any]: +def array_metadata_to_n5(array_metadata: dict[str, Any], top_level=False) -> dict[str, Any]: """Convert array metadata from zarr to N5 format. If the `top_level` keyword argument is True, then the `N5` : N5_FORMAT key : value pair will be inserted into the metadata.""" @@ -647,8 +647,8 @@ def array_metadata_to_n5(array_metadata: Dict[str, Any], top_level=False) -> Dic def array_metadata_to_zarr( - array_metadata: Dict[str, Any], top_level: bool = False -) -> Dict[str, Any]: + array_metadata: dict[str, Any], top_level: bool = False +) -> dict[str, Any]: """Convert array metadata from N5 to zarr format. If the `top_level` keyword argument is True, then the `N5` key will be removed from metadata""" for t, f in zarr_to_n5_keys: @@ -677,7 +677,7 @@ def array_metadata_to_zarr( return array_metadata -def attrs_to_zarr(attrs: Dict[str, Any]) -> Dict[str, Any]: +def attrs_to_zarr(attrs: dict[str, Any]) -> dict[str, Any]: """Get all zarr attributes from an N5 attributes dictionary (i.e., all non-keyword attributes).""" @@ -689,7 +689,7 @@ def attrs_to_zarr(attrs: Dict[str, Any]) -> Dict[str, Any]: return attrs -def compressor_config_to_n5(compressor_config: Optional[Dict[str, Any]]) -> Dict[str, Any]: +def compressor_config_to_n5(compressor_config: Optional[dict[str, Any]]) -> dict[str, Any]: if compressor_config is None: return {"type": "raw"} else: @@ -750,7 +750,7 @@ def compressor_config_to_n5(compressor_config: Optional[Dict[str, Any]]) -> Dict return n5_config -def compressor_config_to_zarr(compressor_config: Dict[str, Any]) -> Optional[Dict[str, Any]]: +def compressor_config_to_zarr(compressor_config: dict[str, Any]) -> Optional[dict[str, Any]]: codec_id = compressor_config["type"] zarr_config = {"id": codec_id} diff --git a/zarr/storage.py b/zarr/storage.py index f412870f7..30a62d709 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -33,7 +33,7 @@ from os import scandir from pickle import PicklingError from threading import Lock, RLock -from typing import Sequence, Mapping, Optional, Union, List, Tuple, Dict, Any +from typing import Sequence, Mapping, Optional, Union, Any import uuid import time @@ -318,8 +318,8 @@ def _require_parent_group( def init_array( store: StoreLike, - shape: Union[int, Tuple[int, ...]], - chunks: Union[bool, int, Tuple[int, ...]] = True, + shape: Union[int, tuple[int, ...]], + chunks: Union[bool, int, tuple[int, ...]] = True, dtype=None, compressor="default", fill_value=None, @@ -745,7 +745,7 @@ def _init_group_metadata( store[key] = encode_group_metadata(meta) -def _dict_store_keys(d: Dict, prefix="", cls=dict): +def _dict_store_keys(d: dict, prefix="", cls=dict): for k in d.keys(): v = d[k] if isinstance(v, cls): @@ -916,7 +916,7 @@ def __iter__(self): def __len__(self) -> int: return sum(1 for _ in self.keys()) - def listdir(self, path: Path = None) -> List[str]: + def listdir(self, path: Path = None) -> list[str]: path = normalize_storage_path(path) if path: try: @@ -2459,9 +2459,9 @@ def __init__(self, store: StoreLike, max_size: int): self._max_size = max_size self._current_size = 0 self._keys_cache = None - self._contains_cache: Dict[Any, Any] = {} - self._listdir_cache: Dict[Path, Any] = dict() - self._values_cache: Dict[Path, Any] = OrderedDict() + self._contains_cache: dict[Any, Any] = {} + self._listdir_cache: dict[Path, Any] = dict() + self._values_cache: dict[Path, Any] = OrderedDict() self._mutex = Lock() self.hits = self.misses = 0 diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index 730f72431..73c2e158b 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -3,7 +3,7 @@ import sys import pickle import shutil -from typing import Any, Literal, Optional, Tuple, Union, Sequence +from typing import Any, Literal, Optional, Union, Sequence import unittest from itertools import zip_longest from tempfile import mkdtemp @@ -99,7 +99,7 @@ class TestArray: partial_decompress: bool = False write_empty_chunks = True read_only = False - storage_transformers: Tuple[Any, ...] = () + storage_transformers: tuple[Any, ...] = () def create_store(self) -> BaseStore: return KVStore(dict()) @@ -108,13 +108,13 @@ def create_store(self) -> BaseStore: def create_chunk_store(self) -> Optional[BaseStore]: return None - def create_storage_transformers(self, shape: Union[int, Tuple[int, ...]]) -> Tuple[Any, ...]: + def create_storage_transformers(self, shape: Union[int, tuple[int, ...]]) -> tuple[Any, ...]: return () - def create_filters(self, dtype: Optional[str]) -> Tuple[Any, ...]: + def create_filters(self, dtype: Optional[str]) -> tuple[Any, ...]: return () - def create_array(self, shape: Union[int, Tuple[int, ...]], **kwargs): + def create_array(self, shape: Union[int, tuple[int, ...]], **kwargs): store = self.create_store() chunk_store = self.create_chunk_store() # keyword arguments for array initialization @@ -2161,7 +2161,7 @@ def expected(self): class TestArrayWithFilters(TestArray): compressor = Zlib(1) - def create_filters(self, dtype: Optional[str]) -> Tuple[Any, ...]: + def create_filters(self, dtype: Optional[str]) -> tuple[Any, ...]: return ( Delta(dtype=dtype), FixedScaleOffset(dtype=dtype, scale=1, offset=0), @@ -2994,7 +2994,7 @@ class TestArrayWithFSStoreV3PartialReadUncompressedSharded(TestArrayWithFSStoreV partial_decompress = True compressor = None - def create_storage_transformers(self, shape) -> Tuple[Any]: + def create_storage_transformers(self, shape) -> tuple[Any]: num_dims = 1 if isinstance(shape, int) else len(shape) sharding_transformer = ShardingStorageTransformer( "indexed", chunks_per_shard=(2,) * num_dims @@ -3056,7 +3056,7 @@ def expected(self): @pytest.mark.skipif(not v3_api_available, reason="V3 is disabled") class TestArrayWithStorageTransformersV3(TestArrayWithChunkStoreV3): - def create_storage_transformers(self, shape) -> Tuple[Any]: + def create_storage_transformers(self, shape) -> tuple[Any]: return ( DummyStorageTransfomer("dummy_type", test_value=DummyStorageTransfomer.TEST_CONSTANT), ) @@ -3076,7 +3076,7 @@ def expected(self): class TestArrayWithShardingStorageTransformerV3(TestArrayV3): compressor = None - def create_storage_transformers(self, shape) -> Tuple[Any]: + def create_storage_transformers(self, shape) -> tuple[Any]: num_dims = 1 if isinstance(shape, int) else len(shape) return (ShardingStorageTransformer("indexed", chunks_per_shard=(2,) * num_dims),) diff --git a/zarr/tests/test_n5.py b/zarr/tests/test_n5.py index 2602aa06c..04f37b69c 100644 --- a/zarr/tests/test_n5.py +++ b/zarr/tests/test_n5.py @@ -5,7 +5,6 @@ from zarr.storage import atexit_rmtree from numcodecs import GZip import numpy as np -from typing import Tuple import json import atexit @@ -28,7 +27,7 @@ def test_make_n5_chunk_wrapper(): @pytest.mark.parametrize("chunk_shape", ((2,), (4, 4), (8, 8, 8))) -def test_partial_chunk_decode(chunk_shape: Tuple[int, ...]): +def test_partial_chunk_decode(chunk_shape: tuple[int, ...]): # Test that the N5Chunk wrapper can handle fractional chunks that # may be generated by other N5 implementations dtype = "uint8" diff --git a/zarr/util.py b/zarr/util.py index 8a96f92c2..a82e75fd3 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -8,11 +8,9 @@ from typing import ( Any, Callable, - Dict, Iterator, Mapping, Optional, - Tuple, TypeVar, Union, Iterable, @@ -71,12 +69,12 @@ def json_dumps(o: Any) -> bytes: ).encode("ascii") -def json_loads(s: Union[bytes, str]) -> Dict[str, Any]: +def json_loads(s: Union[bytes, str]) -> dict[str, Any]: """Read JSON in a consistent way.""" return json.loads(ensure_text(s, "utf-8")) -def normalize_shape(shape: Union[int, Tuple[int, ...], None]) -> Tuple[int, ...]: +def normalize_shape(shape: Union[int, tuple[int, ...], None]) -> tuple[int, ...]: """Convenience function to normalize the `shape` argument.""" if shape is None: @@ -87,7 +85,7 @@ def normalize_shape(shape: Union[int, Tuple[int, ...], None]) -> Tuple[int, ...] shape = (int(shape),) # normalize - shape = cast(Tuple[int, ...], shape) + shape = cast(tuple[int, ...], shape) shape = tuple(int(s) for s in shape) return shape @@ -99,7 +97,7 @@ def normalize_shape(shape: Union[int, Tuple[int, ...], None]) -> Tuple[int, ...] CHUNK_MAX = 64 * 1024 * 1024 # Hard upper limit -def guess_chunks(shape: Tuple[int, ...], typesize: int) -> Tuple[int, ...]: +def guess_chunks(shape: tuple[int, ...], typesize: int) -> tuple[int, ...]: """ Guess an appropriate chunk layout for an array, given its shape and the size of each element in bytes. Will allocate chunks only as large @@ -145,7 +143,7 @@ def guess_chunks(shape: Tuple[int, ...], typesize: int) -> Tuple[int, ...]: return tuple(int(x) for x in chunks) -def normalize_chunks(chunks: Any, shape: Tuple[int, ...], typesize: int) -> Tuple[int, ...]: +def normalize_chunks(chunks: Any, shape: tuple[int, ...], typesize: int) -> tuple[int, ...]: """Convenience function to normalize the `chunks` argument for an array with the given `shape`.""" @@ -180,7 +178,7 @@ def normalize_chunks(chunks: Any, shape: Tuple[int, ...], typesize: int) -> Tupl return chunks -def normalize_dtype(dtype: Union[str, np.dtype], object_codec) -> Tuple[np.dtype, Any]: +def normalize_dtype(dtype: Union[str, np.dtype], object_codec) -> tuple[np.dtype, Any]: # convenience API for object arrays if inspect.isclass(dtype): dtype = dtype.__name__ @@ -218,7 +216,7 @@ def normalize_dtype(dtype: Union[str, np.dtype], object_codec) -> Tuple[np.dtype # noinspection PyTypeChecker -def is_total_slice(item, shape: Tuple[int]) -> bool: +def is_total_slice(item, shape: tuple[int]) -> bool: """Determine whether `item` specifies a complete slice of array with the given `shape`. Used to optimize __setitem__ operations on the Chunk class.""" @@ -384,7 +382,7 @@ def buffer_size(v) -> int: return ensure_ndarray_like(v).nbytes -def info_text_report(items: Dict[Any, Any]) -> str: +def info_text_report(items: dict[Any, Any]) -> str: keys = [k for k, v in items] max_key_len = max(len(k) for k in keys) report = "" @@ -673,7 +671,7 @@ def retry_call( callabl: Callable, args=None, kwargs=None, - exceptions: Tuple[Any, ...] = (), + exceptions: tuple[Any, ...] = (), retries: int = 10, wait: float = 0.1, ) -> Any: From cf478cd99427290d1ac46b6cef063aa9df77c47f Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Tue, 7 May 2024 17:58:31 +0200 Subject: [PATCH 3/6] Apply ruff/pyupgrade rule UP031 UP031 Use format specifiers instead of percent format https://docs.astral.sh/ruff/rules/printf-string-formatting/ --- zarr/hierarchy.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/zarr/hierarchy.py b/zarr/hierarchy.py index 8894a5ed5..804166e2b 100644 --- a/zarr/hierarchy.py +++ b/zarr/hierarchy.py @@ -1337,11 +1337,11 @@ def move(self, source, dest): contains_array(self._store, source) or contains_group(self._store, source, explicit_only=False) ): - raise ValueError('The source, "%s", does not exist.' % source) + raise ValueError(f'The source, "{source}", does not exist.') if contains_array(self._store, dest) or contains_group( self._store, dest, explicit_only=False ): - raise ValueError('The dest, "%s", already exists.' % dest) + raise ValueError(f'The dest, "{dest}", already exists.') # Ensure groups needed for `dest` exist. if "/" in dest: From ec04f4c264774efc5b014c50ade6967a647684eb Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 12 May 2024 13:18:35 +0200 Subject: [PATCH 4/6] Apply ruff/pyupgrade issue UP033 UP033 Use `@functools.cache` instead of `@functools.lru_cache(maxsize=None)` https://docs.astral.sh/ruff/rules/lru-cache-with-maxsize-none/ --- zarr/storage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/zarr/storage.py b/zarr/storage.py index 30a62d709..488cf6644 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -29,7 +29,7 @@ import zipfile from collections import OrderedDict from collections.abc import MutableMapping -from functools import lru_cache +from functools import cache from os import scandir from pickle import PicklingError from threading import Lock, RLock @@ -1558,7 +1558,7 @@ def clear(self): self.map.clear() @classmethod - @lru_cache(maxsize=None) + @cache def _fsspec_installed(cls): """Returns true if fsspec is installed""" import importlib.util From be98763de702e624a3e6cd707f5879d98b5a55ae Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Mon, 11 Mar 2024 22:27:50 +0100 Subject: [PATCH 5/6] Apply ruff/pyupgrade issue UP035 UP035 Import from `collections.abc` instead: `Iterable`, `Iterator`, `Mapping`, `Sequence` https://docs.astral.sh/ruff/rules/deprecated-import/ --- zarr/_storage/store.py | 3 ++- zarr/_storage/v3_storage_transformers.py | 3 ++- zarr/creation.py | 4 ++-- zarr/meta.py | 3 ++- zarr/storage.py | 3 ++- zarr/tests/test_core.py | 3 ++- zarr/tests/util.py | 3 ++- zarr/util.py | 4 +--- 8 files changed, 15 insertions(+), 11 deletions(-) diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index df53aaa5e..4c545ae75 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -5,7 +5,8 @@ from collections.abc import MutableMapping from copy import copy from string import ascii_letters, digits -from typing import Any, Mapping, Optional, Sequence, Union +from typing import Any, Optional, Union +from collections.abc import Mapping, Sequence from zarr.meta import Metadata2, Metadata3 from zarr.util import normalize_storage_path diff --git a/zarr/_storage/v3_storage_transformers.py b/zarr/_storage/v3_storage_transformers.py index b529601c1..bae02ba49 100644 --- a/zarr/_storage/v3_storage_transformers.py +++ b/zarr/_storage/v3_storage_transformers.py @@ -1,7 +1,8 @@ import functools import itertools import os -from typing import NamedTuple, Optional, Union, Iterator +from typing import NamedTuple, Optional, Union +from collections.abc import Iterator from numcodecs.compat import ensure_bytes import numpy as np diff --git a/zarr/creation.py b/zarr/creation.py index 84e4666e7..e98378f37 100644 --- a/zarr/creation.py +++ b/zarr/creation.py @@ -1,5 +1,5 @@ -from collections.abc import MutableMapping -from typing import Optional, Union, Sequence +from typing import Optional, Union +from collections.abc import MutableMapping, Sequence from warnings import warn import numpy as np diff --git a/zarr/meta.py b/zarr/meta.py index 4c5790801..b9f5a0d00 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -9,7 +9,8 @@ from zarr.errors import MetadataError from zarr.util import json_dumps, json_loads -from typing import cast, Union, Any, Mapping as MappingType, Optional, TYPE_CHECKING +from typing import cast, Union, Any, Optional, TYPE_CHECKING +from collections.abc import Mapping as MappingType if TYPE_CHECKING: # pragma: no cover from zarr._storage.store import StorageTransformer diff --git a/zarr/storage.py b/zarr/storage.py index 488cf6644..202493fbb 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -33,7 +33,8 @@ from os import scandir from pickle import PicklingError from threading import Lock, RLock -from typing import Sequence, Mapping, Optional, Union, Any +from typing import Optional, Union, Any +from collections.abc import Sequence, Mapping import uuid import time diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index 73c2e158b..8397eecba 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -3,7 +3,8 @@ import sys import pickle import shutil -from typing import Any, Literal, Optional, Union, Sequence +from typing import Any, Literal, Optional, Union +from collections.abc import Sequence import unittest from itertools import zip_longest from tempfile import mkdtemp diff --git a/zarr/tests/util.py b/zarr/tests/util.py index b3c3249ca..e696ca4b7 100644 --- a/zarr/tests/util.py +++ b/zarr/tests/util.py @@ -1,7 +1,8 @@ import collections import os import tempfile -from typing import Any, Mapping, Sequence +from typing import Any +from collections.abc import Mapping, Sequence from zarr.context import Context from zarr.storage import Store diff --git a/zarr/util.py b/zarr/util.py index a82e75fd3..b1c56e383 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -8,14 +8,12 @@ from typing import ( Any, Callable, - Iterator, - Mapping, Optional, TypeVar, Union, - Iterable, cast, ) +from collections.abc import Iterator, Mapping, Iterable import numpy as np from asciitree import BoxStyle, LeftAligned From 289ff29ddcf7ee493b568c1695c14253aacff54a Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Mon, 11 Mar 2024 22:38:05 +0100 Subject: [PATCH 6/6] Document changes in docs/release.rst --- docs/release.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/release.rst b/docs/release.rst index e2f9f3de8..44bb4493d 100644 --- a/docs/release.rst +++ b/docs/release.rst @@ -26,6 +26,9 @@ Unreleased Enhancements ~~~~~~~~~~~~ +* Enable ruff/pyupgrade rules (UP) and fix issues. + By :user:`Dimitri Papadopoulos Orfanos ` :issue:`1703`. + Docs ~~~~