From f691d02b8a4576f4626ac50d68a875a0762a0ca4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Oct 2024 06:35:32 +0000 Subject: [PATCH 1/3] ci: Bump crate-ci/typos from 1.24.2 to 1.25.0 Bumps [crate-ci/typos](https://github.com/crate-ci/typos) from 1.24.2 to 1.25.0. - [Release notes](https://github.com/crate-ci/typos/releases) - [Changelog](https://github.com/crate-ci/typos/blob/master/CHANGELOG.md) - [Commits](https://github.com/crate-ci/typos/compare/v1.24.2...v1.25.0) --- updated-dependencies: - dependency-name: crate-ci/typos dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/lint-global.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lint-global.yml b/.github/workflows/lint-global.yml index 8a599fe382a9..2df859cf324c 100644 --- a/.github/workflows/lint-global.yml +++ b/.github/workflows/lint-global.yml @@ -15,4 +15,4 @@ jobs: - name: Lint Markdown and TOML uses: dprint/check@v2.2 - name: Spell Check with Typos - uses: crate-ci/typos@v1.24.2 + uses: crate-ci/typos@v1.25.0 From 0a1109256766623c6caf81a4efc48b7b05c6b159 Mon Sep 17 00:00:00 2001 From: Stijn de Gooijer Date: Tue, 1 Oct 2024 09:43:10 +0200 Subject: [PATCH 2/3] Bump typos in venv --- py-polars/requirements-lint.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/py-polars/requirements-lint.txt b/py-polars/requirements-lint.txt index 1490335f9266..26762ec671b0 100644 --- a/py-polars/requirements-lint.txt +++ b/py-polars/requirements-lint.txt @@ -1,3 +1,3 @@ mypy==1.11.1 ruff==0.6.4 -typos==1.24.2 +typos==1.25.0 From cea803f59b95d243072c0d1161ea51f732280bde Mon Sep 17 00:00:00 2001 From: Stijn de Gooijer Date: Tue, 1 Oct 2024 09:45:26 +0200 Subject: [PATCH 3/3] Fix typos --- crates/polars-arrow/src/array/mod.rs | 2 +- crates/polars-ops/src/chunked_array/list/sets.rs | 2 +- .../src/parquet/encoding/delta_length_byte_array/encoder.rs | 2 +- .../polars-pipe/src/executors/sinks/group_by/primitive/mod.rs | 2 +- crates/polars-pipe/src/executors/sinks/group_by/string.rs | 2 +- py-polars/tests/unit/io/test_scan.py | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/crates/polars-arrow/src/array/mod.rs b/crates/polars-arrow/src/array/mod.rs index 36f285dfd3d3..1004e36f2514 100644 --- a/crates/polars-arrow/src/array/mod.rs +++ b/crates/polars-arrow/src/array/mod.rs @@ -15,7 +15,7 @@ //! to a concrete struct based on [`PhysicalType`](crate::datatypes::PhysicalType) available from [`Array::dtype`]. //! All immutable arrays are backed by [`Buffer`](crate::buffer::Buffer) and thus cloning and slicing them is `O(1)`. //! -//! Most arrays contain a [`MutableArray`] counterpart that is neither clonable nor sliceable, but +//! Most arrays contain a [`MutableArray`] counterpart that is neither cloneable nor sliceable, but //! can be operated in-place. use std::any::Any; use std::sync::Arc; diff --git a/crates/polars-ops/src/chunked_array/list/sets.rs b/crates/polars-ops/src/chunked_array/list/sets.rs index 4a3187631575..a26d76b5a768 100644 --- a/crates/polars-ops/src/chunked_array/list/sets.rs +++ b/crates/polars-ops/src/chunked_array/list/sets.rs @@ -94,7 +94,7 @@ where set2.clear(); set2.extend(b); } - // We could speed this up, but implementing ourselves, but we need to have a clonable + // We could speed this up, but implementing ourselves, but we need to have a cloneable // iterator as we need 2 passes set.extend(a); out.extend_buf(set.symmetric_difference(set2).copied()) diff --git a/crates/polars-parquet/src/parquet/encoding/delta_length_byte_array/encoder.rs b/crates/polars-parquet/src/parquet/encoding/delta_length_byte_array/encoder.rs index d768b10c24f3..4e57c699504f 100644 --- a/crates/polars-parquet/src/parquet/encoding/delta_length_byte_array/encoder.rs +++ b/crates/polars-parquet/src/parquet/encoding/delta_length_byte_array/encoder.rs @@ -1,6 +1,6 @@ use crate::parquet::encoding::delta_bitpacked; -/// Encodes a clonable iterator of `&[u8]` into `buffer`. This does not allocated on the heap. +/// Encodes a cloneable iterator of `&[u8]` into `buffer`. This does not allocated on the heap. /// # Implementation /// This encoding is equivalent to call [`delta_bitpacked::encode`] on the lengths of the items /// of the iterator followed by extending the buffer from each item of the iterator. diff --git a/crates/polars-pipe/src/executors/sinks/group_by/primitive/mod.rs b/crates/polars-pipe/src/executors/sinks/group_by/primitive/mod.rs index 9bfddb7d3f1d..8715dd6f3fa9 100644 --- a/crates/polars-pipe/src/executors/sinks/group_by/primitive/mod.rs +++ b/crates/polars-pipe/src/executors/sinks/group_by/primitive/mod.rs @@ -277,7 +277,7 @@ where let s = s.to_physical_repr(); let s = prepare_key(&s, chunk); - // todo! ammortize allocation + // TODO: Amortize allocation. for phys_e in self.aggregation_columns.iter() { let s = phys_e.evaluate(chunk, &context.execution_state)?; let s = s.to_physical_repr(); diff --git a/crates/polars-pipe/src/executors/sinks/group_by/string.rs b/crates/polars-pipe/src/executors/sinks/group_by/string.rs index d2fec9c16173..0855e4cbf42d 100644 --- a/crates/polars-pipe/src/executors/sinks/group_by/string.rs +++ b/crates/polars-pipe/src/executors/sinks/group_by/string.rs @@ -232,7 +232,7 @@ impl StringGroupbySink { let s = s.to_physical_repr(); let s = prepare_key(&s, chunk); - // todo! ammortize allocation + // TODO: Amortize allocation. for phys_e in self.aggregation_columns.iter() { let s = phys_e.evaluate(chunk, &context.execution_state)?; let s = s.to_physical_repr(); diff --git a/py-polars/tests/unit/io/test_scan.py b/py-polars/tests/unit/io/test_scan.py index 3da1ade6b1e3..799c4953cbf6 100644 --- a/py-polars/tests/unit/io/test_scan.py +++ b/py-polars/tests/unit/io/test_scan.py @@ -149,7 +149,7 @@ def data_file_glob(session_tmp_dir: Path, data_file_extension: str) -> _DataFile assert sum(row_counts) == 10000 # Make sure we pad file names with enough zeros to ensure correct - # lexographical ordering. + # lexicographical ordering. assert len(row_counts) < 100 # Make sure that some of our data frames consist of multiple chunks which