diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 7013f1784f..bb3d433629 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -42,7 +42,7 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -56,7 +56,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v2 + uses: github/codeql-action/autobuild@v3 # ℹī¸ Command-line programs to run using the OS shell. # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun @@ -69,4 +69,4 @@ jobs: # ./location_of_script_within_repo/buildscript.sh - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index bb8138e57b..daf74c52bf 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -16,7 +16,7 @@ jobs: submodules: true fetch-depth: 0 - - uses: actions/setup-python@v4.7.1 + - uses: actions/setup-python@v5.0.0 name: Install Python with: python-version: '3.8' diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 029dcda58f..b4e7ab3ccf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,27 +6,25 @@ default_stages: [commit, push] default_language_version: python: python3 repos: - - repo: https://github.com/charliermarsh/ruff-pre-commit + - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.0.224' + rev: 'v0.1.8' hooks: - id: ruff - # Respect `exclude` and `extend-exclude` settings. - args: ["--force-exclude"] - repo: https://github.com/psf/black - rev: 23.10.1 + rev: 23.12.0 hooks: - id: black - repo: https://github.com/codespell-project/codespell - rev: v2.2.5 + rev: v2.2.6 hooks: - id: codespell - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: check-yaml - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.3.0 + rev: v1.7.1 hooks: - id: mypy files: zarr diff --git a/pyproject.toml b/pyproject.toml index 4b7fef6003..33e8573830 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -120,9 +120,10 @@ exclude = ''' ''' [tool.mypy] -python_version = "3.8" ignore_missing_imports = true -follow_imports = "silent" +warn_unused_configs = true +warn_redundant_casts = true +warn_unused_ignores = true [tool.pytest.ini_options] doctest_optionflags = [ diff --git a/zarr/_storage/store.py b/zarr/_storage/store.py index 09f0b68602..36b596769a 100644 --- a/zarr/_storage/store.py +++ b/zarr/_storage/store.py @@ -642,10 +642,10 @@ def _rmdir_from_keys_v3(store: StoreV3, path: str = "") -> None: sfx = _get_metadata_suffix(store) array_meta_file = meta_dir + ".array" + sfx if array_meta_file in store: - store.erase(array_meta_file) # type: ignore + store.erase(array_meta_file) group_meta_file = meta_dir + ".group" + sfx if group_meta_file in store: - store.erase(group_meta_file) # type: ignore + store.erase(group_meta_file) def _listdir_from_keys(store: BaseStore, path: Optional[str] = None) -> List[str]: diff --git a/zarr/_storage/v3_storage_transformers.py b/zarr/_storage/v3_storage_transformers.py index ff31a7281c..3afc3823a3 100644 --- a/zarr/_storage/v3_storage_transformers.py +++ b/zarr/_storage/v3_storage_transformers.py @@ -351,7 +351,7 @@ def erase_prefix(self, prefix): def rmdir(self, path=None): path = normalize_storage_path(path) - _rmdir_from_keys_v3(self, path) # type: ignore + _rmdir_from_keys_v3(self, path) def __contains__(self, key): if self._is_data_key(key): diff --git a/zarr/core.py b/zarr/core.py index c07a31e95f..d22a9d79c3 100644 --- a/zarr/core.py +++ b/zarr/core.py @@ -2536,7 +2536,7 @@ def hexdigest(self, hashname="sha1"): checksum = binascii.hexlify(self.digest(hashname=hashname)) # This is a bytes object on Python 3 and we want a str. - if type(checksum) is not str: + if not isinstance(checksum, str): checksum = checksum.decode("utf8") return checksum diff --git a/zarr/meta.py b/zarr/meta.py index f23889f3ea..d9797e4754 100644 --- a/zarr/meta.py +++ b/zarr/meta.py @@ -234,8 +234,8 @@ def decode_fill_value(cls, v: Any, dtype: np.dtype, object_codec: Any = None) -> return np.array(v, dtype=dtype)[()] elif dtype.kind in "c": v = ( - cls.decode_fill_value(v[0], dtype.type().real.dtype), # type: ignore - cls.decode_fill_value(v[1], dtype.type().imag.dtype), # type: ignore + cls.decode_fill_value(v[0], dtype.type().real.dtype), + cls.decode_fill_value(v[1], dtype.type().imag.dtype), ) v = v[0] + 1j * v[1] return np.array(v, dtype=dtype)[()] diff --git a/zarr/storage.py b/zarr/storage.py index 1c3b39862a..aa27e98e6f 100644 --- a/zarr/storage.py +++ b/zarr/storage.py @@ -206,7 +206,7 @@ def rmdir(store: StoreLike, path: Path = None): store_version = getattr(store, "_store_version", 2) if hasattr(store, "rmdir") and store.is_erasable(): # type: ignore # pass through - store.rmdir(path) # type: ignore + store.rmdir(path) else: # slow version, delete one key at a time if store_version == 2: @@ -236,7 +236,7 @@ def listdir(store: BaseStore, path: Path = None): path = normalize_storage_path(path) if hasattr(store, "listdir"): # pass through - return store.listdir(path) # type: ignore + return store.listdir(path) else: # slow version, iterate through all keys warnings.warn( @@ -289,7 +289,7 @@ def getsize(store: BaseStore, path: Path = None) -> int: if hasattr(store, "getsize"): # pass through path = normalize_storage_path(path) - return store.getsize(path) # type: ignore + return store.getsize(path) elif isinstance(store, MutableMapping): return _getsize(store, path) else: @@ -627,7 +627,7 @@ def _init_array_metadata( key = _prefix_to_array_key(store, _path_to_prefix(path)) if hasattr(store, "_metadata_class"): - store[key] = store._metadata_class.encode_array_metadata(meta) # type: ignore + store[key] = store._metadata_class.encode_array_metadata(meta) else: store[key] = encode_array_metadata(meta) @@ -730,10 +730,10 @@ def _init_group_metadata( if store_version == 3: meta = {"attributes": {}} # type: ignore else: - meta = {} # type: ignore + meta = {} key = _prefix_to_group_key(store, _path_to_prefix(path)) if hasattr(store, "_metadata_class"): - store[key] = store._metadata_class.encode_group_metadata(meta) # type: ignore + store[key] = store._metadata_class.encode_group_metadata(meta) else: store[key] = encode_group_metadata(meta) diff --git a/zarr/util.py b/zarr/util.py index f97094b93a..54c389db69 100644 --- a/zarr/util.py +++ b/zarr/util.py @@ -183,7 +183,7 @@ def normalize_chunks(chunks: Any, shape: Tuple[int, ...], typesize: int) -> Tupl def normalize_dtype(dtype: Union[str, np.dtype], object_codec) -> Tuple[np.dtype, Any]: # convenience API for object arrays if inspect.isclass(dtype): - dtype = dtype.__name__ # type: ignore + dtype = dtype.__name__ if isinstance(dtype, str): # allow ':' to delimit class from codec arguments tokens = dtype.split(":")