Compare commits

...

20 Commits

Author SHA1 Message Date
Charlie Marsh
74a8a218f3 Bump version to 0.0.224 2023-01-16 23:43:14 -05:00
Colin Delahunty
1730f2a603 [pyupgrade] Automatically rewrite format-strings to f-strings (#1905) 2023-01-16 23:06:39 -05:00
Charlie Marsh
a4862857de Update PIE796 fixture 2023-01-16 19:29:14 -05:00
Leonardo Esparis
6e88c60c46 Add flake8-pie PIE796: prefer-unique-enum (#1923)
I accept any suggestion. By the way, I have a doubt, I have checked and all flake8-pie plugins can be fixed by ruff, but is it necessary that this one is also fixed automatically ?

rel #1543
2023-01-16 19:27:34 -05:00
Charlie Marsh
2ed1f78873 Add benchmark scripts for no-IO (#1925) 2023-01-16 17:38:40 -05:00
Charlie Marsh
f3bf008aed Avoid removing statements that contain side-effects (#1920)
Closes #1917.
2023-01-16 14:45:02 -05:00
Charlie Marsh
3b4aaa53c1 Add some new testimonials (#1921) 2023-01-16 14:44:52 -05:00
Charlie Marsh
6abf71639f Avoid syntax errors when fixing parenthesized unused variables (#1919)
Closes #1917.
2023-01-16 14:27:41 -05:00
Charlie Marsh
c0845a8c28 Rewrite lru_cache to cache on Python 3.9+ (#1918)
Closes #1913.
2023-01-16 13:14:27 -05:00
Paul Barrett
019ecc4add Trigger update to pre-commit mirror after pypi publish (#1910) 2023-01-16 13:14:18 -05:00
Martin Fischer
f4cf48d885 refactor: Move rule-specific details out of mod.rs via type aliases 2023-01-16 11:27:24 -05:00
Martin Fischer
005f5d7911 refactor: Make flake8_tidy_imports::Settings derive Default 2023-01-16 11:27:24 -05:00
Martin Fischer
2fce580693 refactor: Move flake8_tidy_imports Settings to mod.rs 2023-01-16 11:27:24 -05:00
Martin Fischer
8862565a0f refactor: Split ruff::rules::flake8_tidy_imports::rules 2023-01-16 11:27:24 -05:00
Martin Fischer
5bf6da0db7 refactor: Rename BannedRelativeImport to RelativeImports
The idea is to follow the Rust naming convention for lints[1]:

> the lint name should make sense when read as
> "allow lint-name" or "allow lint-name items"

Following that convention prefixing "Banned" is
redundant as it could be prefixed to any lint name.

[1]: https://rust-lang.github.io/rfcs/0344-conventions-galore.html#lints
2023-01-16 11:27:24 -05:00
Martin Fischer
ee655c1a88 refactor: Rename BannedApi to ApiBan to distinguish it from the violation struct 2023-01-16 11:27:24 -05:00
Harutaka Kawamura
2236b4bd59 Add backticks to B904's message (#1914)
This PR adds backticks to B904's message to improve readability.


Without backticks:

<img width="1480" alt="image" src="https://user-images.githubusercontent.com/17039389/212682457-71f13de9-e3dd-4ead-a82b-98e5b60653c2.png">

With backticks:

<img width="1480" alt="image" src="https://user-images.githubusercontent.com/17039389/212682775-36868401-b63e-47d1-ae25-b43b61866b6c.png">
2023-01-16 11:12:43 -05:00
Charlie Marsh
fbf311f7d5 Add instructions for Pyupgrade benchmark 2023-01-16 03:21:31 -05:00
Martin Fischer
8c18b28bc4 Derive Hash instead of implementing it by hand
The caching mechanism of the CLI (ruff_cli::cache) relies on
ruff::settings::Settings implementing the Hash trait.

The ruff::settings::Settings struct previously couldn't automatically
derive the Hash implementation via the #[derive(Hash)] macro attribute
since some of its field types intentionally[1][2] don't implement Hash
(namely regex::Regex, globset::GlobMatcher and globset::GlobSet and
HashMap and HashSet from the standard library).

The code therefore previously implemented the Hash trait by hand for the
whole struct. Implementing Hash by hand for structs that are subject to
change is a bad idea since it's very easy to forget to update the Hash
implementation when adding a new field to the struct. And the Hash
implementation indeed was already incorrect by omitting several fields
from the hash.

This commit introduces wrapper types for Regex, GlobMatcher, GlobSet,
HashSet & HashMap that implement Hash so that we can still add
#[derive(Hash)] to the Settings struct, guaranteeing a correct hash
implementation.

[1]: https://github.com/rust-lang/regex/issues/364#issuecomment-301082076
[2]: The standard library doesn't impl<T: Hash + Ord> Hash for HashSet<T>
     presumably since sorted() requires an allocation and Hash
     implementations are generally expected to work without allocations.
2023-01-16 01:42:55 -05:00
Charlie Marsh
42031b8574 Re-run benchmark and update documentation (#1907)
Closes #269.
2023-01-16 01:38:58 -05:00
71 changed files with 3329 additions and 1019 deletions

View File

@@ -293,3 +293,6 @@ jobs:
run: |
pip install --upgrade twine
twine upload --skip-existing *
- name: Update pre-commit mirror
run: |
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.RUFF_PRE_COMMIT_PAT }}" -H "X-GitHub-Api-Version: 2022-11-28" https://api.github.com/repos/charliermarsh/ruff-pre-commit/dispatches --data '{"event_type": "build_pypi_release"}'

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.223
rev: v0.0.224
hooks:
- id: ruff

10
Cargo.lock generated
View File

@@ -735,7 +735,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8-to-ruff"
version = "0.0.223"
version = "0.0.224"
dependencies = [
"anyhow",
"clap 4.0.32",
@@ -1906,7 +1906,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.223"
version = "0.0.224"
dependencies = [
"anyhow",
"bitflags",
@@ -1958,7 +1958,7 @@ dependencies = [
[[package]]
name = "ruff_cli"
version = "0.0.223"
version = "0.0.224"
dependencies = [
"annotate-snippets 0.9.1",
"anyhow",
@@ -1995,7 +1995,7 @@ dependencies = [
[[package]]
name = "ruff_dev"
version = "0.0.223"
version = "0.0.224"
dependencies = [
"anyhow",
"clap 4.0.32",
@@ -2016,7 +2016,7 @@ dependencies = [
[[package]]
name = "ruff_macros"
version = "0.0.223"
version = "0.0.224"
dependencies = [
"once_cell",
"proc-macro2",

View File

@@ -8,7 +8,7 @@ default-members = [".", "ruff_cli"]
[package]
name = "ruff"
version = "0.0.223"
version = "0.0.224"
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
edition = "2021"
rust-version = "1.65.0"
@@ -46,7 +46,7 @@ once_cell = { version = "1.16.0" }
path-absolutize = { version = "3.0.14", features = ["once_cell_cache", "use_unix_paths_on_wasm"] }
regex = { version = "1.6.0" }
ropey = { version = "1.5.0", features = ["cr_lines", "simd"], default-features = false }
ruff_macros = { version = "0.0.223", path = "ruff_macros" }
ruff_macros = { version = "0.0.224", path = "ruff_macros" }
rustc-hash = { version = "1.1.0" }
rustpython-ast = { features = ["unparse"], git = "https://github.com/RustPython/RustPython.git", rev = "acbc517b55406c76da83d7b2711941d8d3f65b87" }
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "acbc517b55406c76da83d7b2711941d8d3f65b87" }

192
README.md
View File

@@ -10,9 +10,9 @@ An extremely fast Python linter, written in Rust.
<p align="center">
<picture align="center">
<source media="(prefers-color-scheme: dark)" srcset="https://user-images.githubusercontent.com/1309177/210156880-a97c2a0d-2c03-4393-8695-36547935a94e.svg">
<source media="(prefers-color-scheme: light)" srcset="https://user-images.githubusercontent.com/1309177/210156881-a88fd142-5008-4695-9407-d028cec3eff7.svg">
<img alt="Shows a bar chart with benchmark results." src="https://user-images.githubusercontent.com/1309177/210156881-a88fd142-5008-4695-9407-d028cec3eff7.svg">
<source media="(prefers-color-scheme: dark)" srcset="https://user-images.githubusercontent.com/1309177/212613422-7faaf278-706b-4294-ad92-236ffcab3430.svg">
<source media="(prefers-color-scheme: light)" srcset="https://user-images.githubusercontent.com/1309177/212613257-5f4bca12-6d6b-4c79-9bac-51a4c6d08928.svg">
<img alt="Shows a bar chart with benchmark results." src="https://user-images.githubusercontent.com/1309177/212613257-5f4bca12-6d6b-4c79-9bac-51a4c6d08928.svg">
</picture>
</p>
@@ -74,6 +74,13 @@ of [FastAPI](https://github.com/tiangolo/fastapi):
> Ruff is so fast that sometimes I add an intentional bug in the code just to confirm it's actually
> running and checking the code.
[**Nick Schrock**](https://twitter.com/schrockn/status/1612615862904827904), founder of [Elementl](https://www.elementl.com/),
co-creator of [GraphQL](https://graphql.org/):
> Why is Ruff a gamechanger? Primarily because it is nearly 1000x faster. Literally. Not a typo. On
> our largest module (dagster itself, 250k LOC) pylint takes about 2.5 minutes, parallelized across 4
> cores on my M1. Running ruff against our *entire* codebase takes .4 seconds.
[**Bryan Van de Ven**](https://github.com/bokeh/bokeh/pull/12605), co-creator
of [Bokeh](https://github.com/bokeh/bokeh/), original author
of [Conda](https://docs.conda.io/en/latest/):
@@ -82,7 +89,13 @@ of [Conda](https://docs.conda.io/en/latest/):
> ~20s. This is an enormous quality of life improvement for local dev. It's fast enough that I added
> it as an actual commit hook, which is terrific.
[**Tim Abbott**](https://github.com/charliermarsh/ruff/issues/465#issuecomment-1317400028), lead developer of [Zulip](https://github.com/zulip/zulip):
[**Timothy Crosley**](https://twitter.com/timothycrosley/status/1606420868514877440),
creator of [isort](https://github.com/PyCQA/isort):
> Just switched my first project to Ruff. Only one downside so far: it's so fast I couldn't believe it was working till I intentionally introduced some errors.
[**Tim Abbott**](https://github.com/charliermarsh/ruff/issues/465#issuecomment-1317400028), lead
developer of [Zulip](https://github.com/zulip/zulip):
> This is just ridiculously fast... `ruff` is amazing.
@@ -185,7 +198,7 @@ Ruff also works with [pre-commit](https://pre-commit.com):
```yaml
- repo: https://github.com/charliermarsh/ruff-pre-commit
# Ruff version.
rev: 'v0.0.223'
rev: 'v0.0.224'
hooks:
- id: ruff
# Respect `exclude` and `extend-exclude` settings.
@@ -712,6 +725,7 @@ For more, see [pyupgrade](https://pypi.org/project/pyupgrade/3.2.0/) on PyPI.
| UP028 | RewriteYieldFrom | Replace `yield` over `for` loop with `yield from` | 🛠 |
| UP029 | UnnecessaryBuiltinImport | Unnecessary builtin import: `...` | 🛠 |
| UP030 | FormatLiterals | Use implicit references for positional format fields | 🛠 |
| UP032 | FString | Use f-string instead of `format` call | 🛠 |
### pep8-naming (N)
@@ -842,7 +856,7 @@ For more, see [flake8-bugbear](https://pypi.org/project/flake8-bugbear/22.10.27/
| B025 | DuplicateTryBlockException | try-except block with duplicate exception `Exception` | |
| B026 | StarArgUnpackingAfterKeywordArg | Star-arg unpacking after a keyword argument is strongly discouraged | |
| B027 | EmptyMethodWithoutAbstractDecorator | `...` is an empty method in an abstract base class, but has no abstract decorator | |
| B904 | RaiseWithoutFromInsideExcept | Within an except clause, raise exceptions with raise ... from err or raise ... from None to distinguish them from errors in exception handling | |
| B904 | RaiseWithoutFromInsideExcept | Within an except clause, raise exceptions with `raise ... from err` or `raise ... from None` to distinguish them from errors in exception handling | |
| B905 | ZipWithoutExplicitStrict | `zip()` without an explicit `strict=` parameter | |
### flake8-builtins (A)
@@ -1018,7 +1032,7 @@ For more, see [flake8-tidy-imports](https://pypi.org/project/flake8-tidy-imports
| Code | Name | Message | Fix |
| ---- | ---- | ------- | --- |
| TID251 | BannedApi | `...` is banned: ... | |
| TID252 | BannedRelativeImport | Relative imports are banned | |
| TID252 | RelativeImports | Relative imports are banned | |
### flake8-unused-arguments (ARG)
@@ -1128,6 +1142,7 @@ For more, see [flake8-pie](https://pypi.org/project/flake8-pie/0.16.0/) on PyPI.
| ---- | ---- | ------- | --- |
| PIE790 | NoUnnecessaryPass | Unnecessary `pass` statement | 🛠 |
| PIE794 | DupeClassFieldDefinitions | Class field `...` is defined multiple times | 🛠 |
| PIE796 | PreferUniqueEnums | Enum contains duplicate value: `...` | |
| PIE807 | PreferListBuiltin | Prefer `list()` over useless lambda | 🛠 |
### flake8-commas (COM)
@@ -1442,6 +1457,7 @@ natively, including:
- [`flake8-tidy-imports`](https://pypi.org/project/flake8-tidy-imports/)
- [`isort`](https://pypi.org/project/isort/)
- [`mccabe`](https://pypi.org/project/mccabe/)
- [`pandas-vet`](https://pypi.org/project/pandas-vet/)
- [`pep8-naming`](https://pypi.org/project/pep8-naming/)
- [`pydocstyle`](https://pypi.org/project/pydocstyle/)
- [`pygrep-hooks`](https://github.com/pre-commit/pygrep-hooks) ([#980](https://github.com/charliermarsh/ruff/issues/980))
@@ -1507,6 +1523,7 @@ Today, Ruff can be used to replace Flake8 when used with any of the following pl
- [`flake8-super`](https://pypi.org/project/flake8-super/)
- [`flake8-tidy-imports`](https://pypi.org/project/flake8-tidy-imports/)
- [`mccabe`](https://pypi.org/project/mccabe/)
- [`pandas-vet`](https://pypi.org/project/pandas-vet/)
- [`pep8-naming`](https://pypi.org/project/pep8-naming/)
- [`pydocstyle`](https://pypi.org/project/pydocstyle/)
@@ -1642,57 +1659,28 @@ which makes it a good target for benchmarking.
git clone --branch 3.10 https://github.com/python/cpython.git resources/test/cpython
```
Add this `pyproject.toml` to the CPython directory:
```toml
[tool.ruff]
line-length = 88
extend-exclude = [
"Lib/lib2to3/tests/data/bom.py",
"Lib/lib2to3/tests/data/crlf.py",
"Lib/lib2to3/tests/data/different_encoding.py",
"Lib/lib2to3/tests/data/false_encoding.py",
"Lib/lib2to3/tests/data/py2_test_grammar.py",
"Lib/test/bad_coding2.py",
"Lib/test/badsyntax_3131.py",
"Lib/test/badsyntax_pep3120.py",
"Lib/test/encoded_modules/module_iso_8859_1.py",
"Lib/test/encoded_modules/module_koi8_r.py",
"Lib/test/test_fstring.py",
"Lib/test/test_grammar.py",
"Lib/test/test_importlib/test_util.py",
"Lib/test/test_named_expressions.py",
"Lib/test/test_patma.py",
"Lib/test/test_source_encoding.py",
"Tools/c-analyzer/c_parser/parser/_delim.py",
"Tools/i18n/pygettext.py",
"Tools/test2to3/maintest.py",
"Tools/test2to3/setup.py",
"Tools/test2to3/test/test_foo.py",
"Tools/test2to3/test2to3/hello.py",
]
```
Next, to benchmark the release build:
To benchmark the release build:
```shell
cargo build --release
hyperfine --ignore-failure --warmup 10 --runs 100 \
cargo build --release && hyperfine --ignore-failure --warmup 10 \
"./target/release/ruff ./resources/test/cpython/ --no-cache" \
"./target/release/ruff ./resources/test/cpython/"
Benchmark 1: ./target/release/ruff ./resources/test/cpython/ --no-cache
Time (mean ± σ): 297.4 ms ± 4.9 ms [User: 2460.0 ms, System: 67.2 ms]
Range (min … max): 287.7 ms … 312.1 ms 100 runs
Time (mean ± σ): 293.8 ms ± 3.2 ms [User: 2384.6 ms, System: 90.3 ms]
Range (min … max): 289.9 ms … 301.6 ms 10 runs
Warning: Ignoring non-zero exit code.
Benchmark 2: ./target/release/ruff ./resources/test/cpython/
Time (mean ± σ): 79.6 ms ± 7.3 ms [User: 59.7 ms, System: 356.1 ms]
Range (min … max): 62.4 ms … 111.2 ms 100 runs
Time (mean ± σ): 48.0 ms ± 3.1 ms [User: 65.2 ms, System: 124.7 ms]
Range (min … max): 45.0 ms … 66.7 ms 62 runs
Warning: Ignoring non-zero exit code.
Summary
'./target/release/ruff ./resources/test/cpython/' ran
6.12 ± 0.41 times faster than './target/release/ruff ./resources/test/cpython/ --no-cache'
```
To benchmark against the ecosystem's existing tools:
@@ -1700,73 +1688,89 @@ To benchmark against the ecosystem's existing tools:
```shell
hyperfine --ignore-failure --warmup 5 \
"./target/release/ruff ./resources/test/cpython/ --no-cache" \
"pylint --recursive=y resources/test/cpython/" \
"pyflakes resources/test/cpython" \
"autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython" \
"pycodestyle resources/test/cpython" \
"flake8 resources/test/cpython" \
"python -m scripts.run_flake8 resources/test/cpython"
```
"flake8 resources/test/cpython"
In order, these evaluate:
- Ruff
- Pylint
- Pyflakes
- autoflake
- pycodestyle
- Flake8
- Flake8, with a hack to enable multiprocessing on macOS
(You can `poetry install` from `./scripts` to create a working environment for the above.)
```shell
Benchmark 1: ./target/release/ruff ./resources/test/cpython/ --no-cache
Time (mean ± σ): 297.9 ms ± 7.0 ms [User: 2436.6 ms, System: 65.9 ms]
Range (min … max): 289.9 ms … 314.6 ms 10 runs
Time (mean ± σ): 294.3 ms ± 3.3 ms [User: 2467.5 ms, System: 89.6 ms]
Range (min … max): 291.1 ms … 302.8 ms 10 runs
Warning: Ignoring non-zero exit code.
Benchmark 2: pylint --recursive=y resources/test/cpython/
Time (mean ± σ): 37.634 s ± 0.225 s [User: 36.728 s, System: 0.853 s]
Range (min … max): 37.201 s … 38.106 s 10 runs
Benchmark 2: pyflakes resources/test/cpython
Time (mean ± σ): 15.786 s ± 0.143 s [User: 15.560 s, System: 0.214 s]
Range (min … max): 15.640 s … 16.157 s 10 runs
Warning: Ignoring non-zero exit code.
Benchmark 3: pyflakes resources/test/cpython
Time (mean ± σ): 40.950 s ± 0.449 s [User: 40.688 s, System: 0.229 s]
Range (min … max): 40.348 s … 41.671 s 10 runs
Benchmark 3: autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython
Time (mean ± σ): 6.175 s ± 0.169 s [User: 54.102 s, System: 1.057 s]
Range (min … max): 5.950 s … 6.391 s 10 runs
Benchmark 4: pycodestyle resources/test/cpython
Time (mean ± σ): 46.921 s ± 0.508 s [User: 46.699 s, System: 0.202 s]
Range (min … max): 46.171 s … 47.863 s 10 runs
Warning: Ignoring non-zero exit code.
Benchmark 4: autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython
Time (mean ± σ): 11.562 s ± 0.160 s [User: 107.022 s, System: 1.143 s]
Range (min … max): 11.417 s … 11.917 s 10 runs
Benchmark 5: pycodestyle resources/test/cpython
Time (mean ± σ): 67.428 s ± 0.985 s [User: 67.199 s, System: 0.203 s]
Range (min … max): 65.313 s … 68.496 s 10 runs
Benchmark 5: flake8 resources/test/cpython
Time (mean ± σ): 12.260 s ± 0.321 s [User: 102.934 s, System: 1.230 s]
Range (min … max): 11.848 s … 12.933 s 10 runs
Warning: Ignoring non-zero exit code.
Benchmark 6: flake8 resources/test/cpython
Time (mean ± σ): 116.099 s ± 1.178 s [User: 115.217 s, System: 0.845 s]
Range (min … max): 114.180 s … 117.724 s 10 runs
Warning: Ignoring non-zero exit code.
Benchmark 7: python -m scripts.run_flake8 resources/test/cpython
Time (mean ± σ): 20.477 s ± 0.349 s [User: 142.372 s, System: 1.504 s]
Range (min … max): 20.107 s … 21.183 s 10 runs
Summary
'./target/release/ruff ./resources/test/cpython/ --no-cache' ran
38.81 ± 1.05 times faster than 'autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython'
68.74 ± 1.99 times faster than 'python -m scripts.run_flake8 resources/test/cpython'
126.33 ± 3.05 times faster than 'pylint --recursive=y resources/test/cpython/'
137.46 ± 3.55 times faster than 'pyflakes resources/test/cpython'
226.35 ± 6.23 times faster than 'pycodestyle resources/test/cpython'
389.73 ± 9.92 times faster than 'flake8 resources/test/cpython'
20.98 ± 0.62 times faster than 'autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython'
41.66 ± 1.18 times faster than 'flake8 resources/test/cpython'
53.64 ± 0.77 times faster than 'pyflakes resources/test/cpython'
159.43 ± 2.48 times faster than 'pycodestyle resources/test/cpython'
```
You can run `poetry install` from `./scripts` to create a working environment for the above. All
reported benchmarks were computed using the versions specified by `./scripts/pyproject.toml`
on Python 3.11.
To benchmark Pylint, remove the following files from the CPython repository:
```shell
rm Lib/test/bad_coding.py \
Lib/test/bad_coding2.py \
Lib/test/bad_getattr.py \
Lib/test/bad_getattr2.py \
Lib/test/bad_getattr3.py \
Lib/test/badcert.pem \
Lib/test/badkey.pem \
Lib/test/badsyntax_3131.py \
Lib/test/badsyntax_future10.py \
Lib/test/badsyntax_future3.py \
Lib/test/badsyntax_future4.py \
Lib/test/badsyntax_future5.py \
Lib/test/badsyntax_future6.py \
Lib/test/badsyntax_future7.py \
Lib/test/badsyntax_future8.py \
Lib/test/badsyntax_future9.py \
Lib/test/badsyntax_pep3120.py \
Lib/test/test_asyncio/test_runners.py \
Lib/test/test_copy.py \
Lib/test/test_inspect.py \
Lib/test/test_typing.py
```
Then, from `resources/test/cpython`, run: `time pylint -j 0 -E $(git ls-files '*.py')`. This
will execute Pylint with maximum parallelism and only report errors.
To benchmark Pyupgrade, run the following from `resources/test/cpython`:
```shell
hyperfine --ignore-failure --warmup 5 --prepare "git reset --hard HEAD" \
"find . -type f -name \"*.py\" | xargs -P 0 pyupgrade --py311-plus"
Benchmark 1: find . -type f -name "*.py" | xargs -P 0 pyupgrade --py311-plus
Time (mean ± σ): 30.119 s ± 0.195 s [User: 28.638 s, System: 0.390 s]
Range (min … max): 29.813 s … 30.356 s 10 runs
```
## Reference

View File

@@ -771,7 +771,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8_to_ruff"
version = "0.0.223"
version = "0.0.224"
dependencies = [
"anyhow",
"clap",
@@ -1975,7 +1975,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.223"
version = "0.0.224"
dependencies = [
"anyhow",
"bincode",

View File

@@ -1,6 +1,6 @@
[package]
name = "flake8-to-ruff"
version = "0.0.223"
version = "0.0.224"
edition = "2021"
[dependencies]

View File

@@ -7,7 +7,7 @@ build-backend = "maturin"
[project]
name = "ruff"
version = "0.0.223"
version = "0.0.224"
description = "An extremely fast Python linter, written in Rust."
authors = [
{ name = "Charlie Marsh", email = "charlie.r.marsh@gmail.com" },

View File

@@ -0,0 +1,60 @@
import enum
from enum import Enum, unique
class FakeEnum(enum.Enum):
A = "A"
B = "B"
C = "B" # PIE796
class FakeEnum2(Enum):
A = 1
B = 2
C = 2 # PIE796
class FakeEnum3(str, Enum):
A = "1"
B = "2"
C = "2" # PIE796
class FakeEnum4(Enum):
A = 1.0
B = 2.5
C = 2.5 # PIE796
class FakeEnum5(Enum):
A = 1.0
B = True
C = False
D = False # PIE796
class FakeEnum6(Enum):
A = 1
B = 2
C = None
D = None # PIE796
@enum.unique
class FakeEnum7(enum.Enum):
A = "A"
B = "B"
C = "C"
@unique
class FakeEnum8(Enum):
A = 1
B = 2
C = 2 # PIE796
class FakeEnum9(enum.Enum):
A = "A"
B = "B"
C = "C"

View File

@@ -44,3 +44,25 @@ def f():
1 / 0
except (ValueError, ZeroDivisionError) as x2:
pass
def f(a, b):
x = (
a()
if a is not None
else b
)
y = \
a() if a is not None else b
def f(a, b):
x = (
a
if a is not None
else b
)
y = \
a if a is not None else b

View File

@@ -0,0 +1,86 @@
###
# Errors
###
"{} {}".format(a, b)
"{1} {0}".format(a, b)
"{x.y}".format(x=z)
"{.x} {.y}".format(a, b)
"{} {}".format(a.b, c.d)
"{}".format(a())
"{}".format(a.b())
"{}".format(a.b().c())
"hello {}!".format(name)
"{}{b}{}".format(a, c, b=b)
"{}".format(0x0)
"{} {}".format(a, b)
"""{} {}""".format(a, b)
"foo{}".format(1)
r"foo{}".format(1)
x = "{a}".format(a=1)
print("foo {} ".format(x))
"{a[b]}".format(a=a)
"{a.a[b]}".format(a=a)
"{}{{}}{}".format(escaped, y)
"{}".format(a)
###
# Non-errors
###
# False-negative: RustPython doesn't parse the `\N{snowman}`.
"\N{snowman} {}".format(a)
"{".format(a)
"}".format(a)
"{} {}".format(*a)
"{0} {0}".format(arg)
"{x} {x}".format(arg)
"{x.y} {x.z}".format(arg)
b"{} {}".format(a, b)
"{:{}}".format(x, y)
"{}{}".format(a)
"" "{}".format(a["\\"])
"{}".format(a["b"])
r'"\N{snowman} {}".format(a)'
"{a}" "{b}".format(a=1, b=1)
async def c():
return "{}".format(await 3)
async def c():
return "{}".format(1 + await 3)

View File

@@ -448,7 +448,7 @@
},
"additionalProperties": false,
"definitions": {
"BannedApi": {
"ApiBan": {
"type": "object",
"required": [
"msg"
@@ -743,7 +743,7 @@
"null"
],
"additionalProperties": {
"$ref": "#/definitions/BannedApi"
"$ref": "#/definitions/ApiBan"
}
}
},
@@ -1431,6 +1431,7 @@
"PIE79",
"PIE790",
"PIE794",
"PIE796",
"PIE8",
"PIE80",
"PIE807",
@@ -1686,6 +1687,7 @@
"UP029",
"UP03",
"UP030",
"UP032",
"W",
"W2",
"W29",

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_cli"
version = "0.0.223"
version = "0.0.224"
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
edition = "2021"
rust-version = "1.65.0"

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_dev"
version = "0.0.223"
version = "0.0.224"
edition = "2021"
[dependencies]

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_macros"
version = "0.0.223"
version = "0.0.224"
edition = "2021"
[lib]

View File

@@ -0,0 +1,25 @@
# benchmarks
Utilities for benchmarking Ruff.
## Getting Started
Run `./scripts/benchmarks/run.sh` to clone the benchmarking target (CPython).
If you're looking to benchmark Ruff against other tools, you'll also need to run `poetry
install` to create a virtual environment with the required dependencies.
## Running Benchmarks
Run `./scripts/benchmarks/run.sh` to run Ruff over the target repo (CPython). The
`./scripts/benchmarks` folder contains a few other benchmarks (e.g., `scripts/benchmarks/run_comparisons.sh`
compares Ruff to a variety of other tools).
## Generating Plots
The Vega specification for the benchmark plot depicted in the root README can be found at
`scripts/benchmarks/graph-spec.json`. You can render this JSON spec in the [Vega Editor](https://vega.github.io/editor/#/edited).
The images seen in the README are generated by exporting the rendered Vega spec as SVG (at around
688px wide) and manually bolding the Ruff title and benchmark time. The dark mode variant is
generated by changing the fill from `fill="#333333"` to `fill="#C9D1D9"`.

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 9.0 KiB

View File

@@ -0,0 +1,209 @@
{
"$schema": "https://vega.github.io/schema/vega-lite/v5.json",
"data": {
"values": [
{
"tool": "Ruff",
"time": 0.2943,
"timeFormat": "0.29s"
},
{
"tool": "Autoflake",
"time": 6.175,
"timeFormat": "6.18s"
},
{
"tool": "Flake8",
"time": 12.26,
"timeFormat": "12.26s"
},
{
"tool": "Pyflakes",
"time": 15.786,
"timeFormat": "15.79s"
},
{
"tool": "Pycodestyle",
"time": 46.921,
"timeFormat": "46.92s"
},
{
"tool": "Pylint",
"time": 62.0,
"timeFormat": "> 60s"
}
]
},
"config": {
"params": [
{
"name": "defaultFont",
"value": "-apple-system,BlinkMacSystemFont,\"Segoe UI\",Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\""
},
{
"name": "titleColor",
"value": "#333333"
},
{
"name": "labelColor",
"value": "#333333"
}
],
"header": {
"labelFont": {
"expr": "defaultFont"
},
"titleFont": {
"expr": "defaultFont"
},
"titleFontWeight": 500
},
"text": {
"font": {
"expr": "defaultFont"
},
"color": {
"expr": "labelColor"
}
},
"mark": {
"font": {
"expr": "defaultFont"
},
"color": {
"expr": "labelColor"
}
},
"title": {
"font": {
"expr": "defaultFont"
},
"subtitleFont": {
"expr": "defaultFont"
},
"fontWeight": 500
},
"axis": {
"labelColor": {
"expr": "labelColor"
},
"labelFont": {
"expr": "defaultFont"
},
"titleFont": {
"expr": "defaultFont"
},
"titleFontWeight": 500,
"titleColor": {
"expr": "titleColor"
},
"titleFontSize": 12
},
"legend": {
"titleFontWeight": 500,
"titleColor": {
"expr": "titleColor"
},
"titleFontSize": 12,
"labelColor": {
"expr": "labelColor"
},
"labelFont": {
"expr": "defaultFont"
},
"titleFont": {
"expr": "defaultFont"
}
},
"view": {
"stroke": null
},
"background": "transparent"
},
"background": "transparent",
"encoding": {
"y": {
"field": "tool",
"type": "nominal",
"axis": {
"grid": false,
"title": null,
"labelFontSize": 12,
"ticks": false,
"labelPadding": 10,
"domain": false
},
"sort": null
},
"x": {
"field": "time",
"type": "quantitative",
"axis": {
"title": null,
"labelExpr": "datum.value + 's'",
"tickCount": 3,
"tickSize": 0,
"labelPadding": 6,
"labelAlign": "center",
"labelFontSize": 12,
"tickColor": "rgba(127,127,127,0.25)",
"gridColor": "rgba(127,127,127,0.25)",
"domain": false
}
}
},
"height": 140,
"width": "container",
"layer": [
{
"mark": "bar",
"encoding": {
"size": {
"value": 13
},
"color": {
"value": "#E15759"
}
}
},
{
"transform": [
{
"filter": "datum.tool !== 'ruff'"
}
],
"mark": {
"type": "text",
"align": "left",
"baseline": "middle",
"dx": 6,
"fontSize": 12
},
"encoding": {
"text": {
"field": "timeFormat"
}
}
},
{
"transform": [
{
"filter": "datum.tool === 'ruff'"
}
],
"mark": {
"type": "text",
"align": "left",
"baseline": "middle",
"dx": 6,
"fontSize": 12,
"fontWeight": "bold"
},
"encoding": {
"text": {
"field": "timeFormat"
}
}
}
]
}

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 9.0 KiB

1005
scripts/benchmarks/poetry.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,77 @@
[tool.poetry]
name = "scripts"
version = "0.1.0"
description = ""
authors = ["Charles Marsh <charlie.r.marsh@gmail.com>"]
[tool.poetry.dependencies]
python = ">=3.10,<3.12"
autoflake = "^2.0.0"
flake8 = "^6.0.0"
pycodestyle = "^2.10.0"
pyflakes = "^3.0.1"
pylint = "^2.15.10"
black = "^22.12.0"
isort = "^5.11.4"
flake8-2020 = { version = "*", optional = true }
flake8-annotations = { version = "*", optional = true }
flake8-bandit = { version = "*", optional = true }
flake8-blind-except = { version = "*", optional = true }
# flake8-boolean-trap = { version = "*", optional = true }
flake8-bugbear = { version = "*", optional = true }
flake8-builtins = { version = "*", optional = true }
flake8-commas = { version = "*", optional = true }
flake8-comprehensions = { version = "*", optional = true }
flake8-datetimez = { version = "*", optional = true }
flake8-debugger = { version = "*", optional = true }
flake8-docstrings = { version = "*", optional = true }
# flake8-eradicate = { version = "*", optional = true }
flake8-errmsg = { version = "*", optional = true }
flake8-implicit-str-concat = { version = "*", optional = true }
# flake8-import-conventions = { version = "*", optional = true }
flake8-isort = { version = "*", optional = true }
flake8-pie = { version = "*", optional = true }
flake8-print = { version = "*", optional = true }
flake8-quotes = { version = "*", optional = true }
flake8-return = { version = "*", optional = true }
flake8-simplify = { version = "*", optional = true }
flake8-super = { version = "*", optional = true }
flake8-tidy-imports = { version = "*", optional = true }
pandas-vet = { version = "*", optional = true }
pep8-naming = { version = "*", optional = true }
[tool.poetry.dev-dependencies]
[tool.poetry.extras]
plugins = [
"flake8-2020",
"flake8-annotations",
"flake8-bandit",
"flake8-blind-except",
# "flake8-boolean-trap",
"flake8-bugbear",
"flake8-builtins",
"flake8-commas",
"flake8-comprehensions",
"flake8-datetimez",
"flake8-debugger",
"flake8-docstrings",
# "flake8-eradicate",
"flake8-errmsg",
"flake8-implicit-str-concat",
# "flake8-import-conventions",
"flake8-isort",
"flake8-pie",
"flake8-print",
"flake8-quotes",
"flake8-return",
"flake8-simplify",
"flake8-super",
"flake8-tidy-imports",
"pandas-vet",
"pep8-naming",
]
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

8
scripts/benchmarks/run.sh Executable file
View File

@@ -0,0 +1,8 @@
#!/usr/bin/env sh
###
# Benchmark Ruff on the CPython codebase.
###
cargo build --release && hyperfine --ignore-failure --warmup 10 \
"./target/release/ruff ./resources/test/cpython/ --no-cache"

26
scripts/benchmarks/run_all.sh Executable file
View File

@@ -0,0 +1,26 @@
#!/usr/bin/env sh
###
# Benchmark Ruff's performance against a variety of similar tools, suppressing output as much as
# possible (so as to reduce I/O overhead).
###
# Note: Flake8's `checker.py` requires the following variant of `mp_run`:
# def _mp_run(filename: str) -> tuple[str, Results, dict[str, int]]:
# try:
# return FileChecker(
# filename=filename, plugins=_mp_plugins, options=_mp_options
# ).run_checks()
# except:
# return (filename, [], {
# "files": 0,
# "logical lines": 0,
# "physical lines": 0,
# "tokens": 0,
# })
hyperfine --ignore-failure --warmup 5 \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --select ALL" \
"flake8 resources/test/cpython -qq --docstring-convention=all" \
"pycodestyle resources/test/cpython -qq" \
"pylint resources/test/cpython -j 0 --recursive=y --disable=E,W,C,R"

View File

@@ -0,0 +1,12 @@
#!/usr/bin/env sh
###
# Benchmark Ruff's performance against a variety of similar tools.
###
hyperfine --ignore-failure --warmup 5 \
"./target/release/ruff ./resources/test/cpython/ --no-cache" \
"pyflakes resources/test/cpython" \
"autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython" \
"pycodestyle resources/test/cpython" \
"flake8 resources/test/cpython"

View File

@@ -0,0 +1,43 @@
#!/usr/bin/env sh
###
# Benchmark the incremental performance of each subsequent plugin.
###
cargo build --release && hyperfine --ignore-failure --warmup 10 \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select C90" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select I" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select D" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select UP" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select N" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select YTT" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select ANN" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select S" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select BLE" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select FBT" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select B" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select A" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select C4" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select T10" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select EM" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select ISC" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select ICN" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select T20" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select PT" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select Q" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select RET" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select SIM" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select TID" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select ARG" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select DTZ" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select ERA" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select PD" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select PGH" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select PLC" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select PLE" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select PLR" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select PLW" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select PIE" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select COM" \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent --extend-select RUF"

View File

@@ -0,0 +1,12 @@
#!/usr/bin/env sh
###
# Benchmark Ruff's performance against a variety of similar tools, suppressing output as much as
# possible (so as to reduce I/O overhead).
###
hyperfine --ignore-failure --warmup 5 \
"./target/release/ruff ./resources/test/cpython/ --no-cache --silent" \
"pycodestyle resources/test/cpython -qq" \
"flake8 resources/test/cpython -qq" \
"pylint resources/test/cpython -j 0 --recursive=y --disable=E,W,C,R"

7
scripts/benchmarks/setup.sh Executable file
View File

@@ -0,0 +1,7 @@
#!/usr/bin/env sh
###
# Setup the CPython repository to enable benchmarking.
###
git clone --branch 3.10 https://github.com/python/cpython.git resources/test/cpython

305
scripts/poetry.lock generated
View File

@@ -1,305 +0,0 @@
# This file is automatically @generated by Poetry and should not be changed by hand.
[[package]]
name = "astroid"
version = "2.12.13"
description = "An abstract syntax tree for Python with inference support."
category = "main"
optional = false
python-versions = ">=3.7.2"
files = [
{file = "astroid-2.12.13-py3-none-any.whl", hash = "sha256:10e0ad5f7b79c435179d0d0f0df69998c4eef4597534aae44910db060baeb907"},
{file = "astroid-2.12.13.tar.gz", hash = "sha256:1493fe8bd3dfd73dc35bd53c9d5b6e49ead98497c47b2307662556a5692d29d7"},
]
[package.dependencies]
lazy-object-proxy = ">=1.4.0"
wrapt = {version = ">=1.11,<2", markers = "python_version < \"3.11\""}
[[package]]
name = "autoflake"
version = "1.7.8"
description = "Removes unused imports and unused variables"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "autoflake-1.7.8-py3-none-any.whl", hash = "sha256:46373ef69b6714f5064c923bb28bd797c4f8a9497f557d87fc36665c6d956b39"},
{file = "autoflake-1.7.8.tar.gz", hash = "sha256:e7e46372dee46fa1c97acf310d99d922b63d369718a270809d7c278d34a194cf"},
]
[package.dependencies]
pyflakes = ">=1.1.0,<3"
tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""}
[[package]]
name = "colorama"
version = "0.4.6"
description = "Cross-platform colored terminal text."
category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
[[package]]
name = "dill"
version = "0.3.6"
description = "serialize all of python"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"},
{file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"},
]
[package.extras]
graph = ["objgraph (>=1.7.2)"]
[[package]]
name = "flake8"
version = "5.0.4"
description = "the modular source code checker: pep8 pyflakes and co"
category = "main"
optional = false
python-versions = ">=3.6.1"
files = [
{file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"},
{file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"},
]
[package.dependencies]
mccabe = ">=0.7.0,<0.8.0"
pycodestyle = ">=2.9.0,<2.10.0"
pyflakes = ">=2.5.0,<2.6.0"
[[package]]
name = "isort"
version = "5.11.4"
description = "A Python utility / library to sort Python imports."
category = "main"
optional = false
python-versions = ">=3.7.0"
files = [
{file = "isort-5.11.4-py3-none-any.whl", hash = "sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b"},
{file = "isort-5.11.4.tar.gz", hash = "sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6"},
]
[package.extras]
colors = ["colorama (>=0.4.3,<0.5.0)"]
pipfile-deprecated-finder = ["pipreqs", "requirementslib"]
plugins = ["setuptools"]
requirements-deprecated-finder = ["pip-api", "pipreqs"]
[[package]]
name = "lazy-object-proxy"
version = "1.8.0"
description = "A fast and thorough lazy object proxy."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "lazy-object-proxy-1.8.0.tar.gz", hash = "sha256:c219a00245af0f6fa4e95901ed28044544f50152840c5b6a3e7b2568db34d156"},
{file = "lazy_object_proxy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4fd031589121ad46e293629b39604031d354043bb5cdf83da4e93c2d7f3389fe"},
{file = "lazy_object_proxy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:b70d6e7a332eb0217e7872a73926ad4fdc14f846e85ad6749ad111084e76df25"},
{file = "lazy_object_proxy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:eb329f8d8145379bf5dbe722182410fe8863d186e51bf034d2075eb8d85ee25b"},
{file = "lazy_object_proxy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4e2d9f764f1befd8bdc97673261b8bb888764dfdbd7a4d8f55e4fbcabb8c3fb7"},
{file = "lazy_object_proxy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:e20bfa6db17a39c706d24f82df8352488d2943a3b7ce7d4c22579cb89ca8896e"},
{file = "lazy_object_proxy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:14010b49a2f56ec4943b6cf925f597b534ee2fe1f0738c84b3bce0c1a11ff10d"},
{file = "lazy_object_proxy-1.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6850e4aeca6d0df35bb06e05c8b934ff7c533734eb51d0ceb2d63696f1e6030c"},
{file = "lazy_object_proxy-1.8.0-cp37-cp37m-win32.whl", hash = "sha256:5b51d6f3bfeb289dfd4e95de2ecd464cd51982fe6f00e2be1d0bf94864d58acd"},
{file = "lazy_object_proxy-1.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6f593f26c470a379cf7f5bc6db6b5f1722353e7bf937b8d0d0b3fba911998858"},
{file = "lazy_object_proxy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c1c7c0433154bb7c54185714c6929acc0ba04ee1b167314a779b9025517eada"},
{file = "lazy_object_proxy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:d176f392dbbdaacccf15919c77f526edf11a34aece58b55ab58539807b85436f"},
{file = "lazy_object_proxy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:afcaa24e48bb23b3be31e329deb3f1858f1f1df86aea3d70cb5c8578bfe5261c"},
{file = "lazy_object_proxy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:71d9ae8a82203511a6f60ca5a1b9f8ad201cac0fc75038b2dc5fa519589c9288"},
{file = "lazy_object_proxy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:8f6ce2118a90efa7f62dd38c7dbfffd42f468b180287b748626293bf12ed468f"},
{file = "lazy_object_proxy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:eac3a9a5ef13b332c059772fd40b4b1c3d45a3a2b05e33a361dee48e54a4dad0"},
{file = "lazy_object_proxy-1.8.0-pp37-pypy37_pp73-any.whl", hash = "sha256:ae032743794fba4d171b5b67310d69176287b5bf82a21f588282406a79498891"},
{file = "lazy_object_proxy-1.8.0-pp38-pypy38_pp73-any.whl", hash = "sha256:7e1561626c49cb394268edd00501b289053a652ed762c58e1081224c8d881cec"},
{file = "lazy_object_proxy-1.8.0-pp39-pypy39_pp73-any.whl", hash = "sha256:ce58b2b3734c73e68f0e30e4e725264d4d6be95818ec0a0be4bb6bf9a7e79aa8"},
]
[[package]]
name = "mccabe"
version = "0.7.0"
description = "McCabe checker, plugin for flake8"
category = "main"
optional = false
python-versions = ">=3.6"
files = [
{file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
{file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
]
[[package]]
name = "platformdirs"
version = "2.6.2"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"},
{file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"},
]
[package.extras]
docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"]
test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
[[package]]
name = "pycodestyle"
version = "2.9.1"
description = "Python style guide checker"
category = "main"
optional = false
python-versions = ">=3.6"
files = [
{file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"},
{file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"},
]
[[package]]
name = "pyflakes"
version = "2.5.0"
description = "passive checker of Python programs"
category = "main"
optional = false
python-versions = ">=3.6"
files = [
{file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"},
{file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"},
]
[[package]]
name = "pylint"
version = "2.15.9"
description = "python code static checker"
category = "main"
optional = false
python-versions = ">=3.7.2"
files = [
{file = "pylint-2.15.9-py3-none-any.whl", hash = "sha256:349c8cd36aede4d50a0754a8c0218b43323d13d5d88f4b2952ddfe3e169681eb"},
{file = "pylint-2.15.9.tar.gz", hash = "sha256:18783cca3cfee5b83c6c5d10b3cdb66c6594520ffae61890858fe8d932e1c6b4"},
]
[package.dependencies]
astroid = ">=2.12.13,<=2.14.0-dev0"
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
dill = {version = ">=0.2", markers = "python_version < \"3.11\""}
isort = ">=4.2.5,<6"
mccabe = ">=0.6,<0.8"
platformdirs = ">=2.2.0"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
tomlkit = ">=0.10.1"
[package.extras]
spelling = ["pyenchant (>=3.2,<4.0)"]
testutils = ["gitpython (>3)"]
[[package]]
name = "tomli"
version = "2.0.1"
description = "A lil' TOML parser"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
[[package]]
name = "tomlkit"
version = "0.11.6"
description = "Style preserving TOML library"
category = "main"
optional = false
python-versions = ">=3.6"
files = [
{file = "tomlkit-0.11.6-py3-none-any.whl", hash = "sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b"},
{file = "tomlkit-0.11.6.tar.gz", hash = "sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73"},
]
[[package]]
name = "wrapt"
version = "1.14.1"
description = "Module for decorators, wrappers and monkey patching."
category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
files = [
{file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"},
{file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"},
{file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"},
{file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"},
{file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"},
{file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"},
{file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"},
{file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"},
{file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"},
{file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"},
{file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"},
{file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"},
{file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"},
{file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"},
{file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"},
{file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"},
{file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"},
{file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"},
{file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"},
{file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"},
{file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"},
{file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"},
{file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"},
{file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"},
{file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"},
{file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"},
{file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"},
{file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"},
{file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"},
{file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"},
{file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"},
{file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"},
{file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"},
{file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"},
{file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"},
{file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"},
{file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"},
{file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"},
{file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"},
{file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"},
{file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"},
{file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"},
{file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"},
{file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"},
{file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"},
{file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"},
{file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"},
{file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"},
{file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"},
{file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"},
{file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"},
{file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"},
{file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"},
{file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"},
{file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"},
{file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"},
{file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"},
{file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"},
{file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"},
{file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"},
{file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"},
{file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"},
{file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"},
{file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"},
]
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<3.11"
content-hash = "959633dfe6335ab3f943a95c5fdd12ff1bb66cd03c5917704f10ae38d3a5009c"

View File

@@ -1,27 +0,0 @@
[tool.black]
line-length = 120
[tool.ruff]
line-length = 120
select = ["E", "F", "W", "I", "C", "RET", "ANN", "UP"]
target-version = "py310"
[tool.poetry]
name = "scripts"
version = "0.1.0"
description = ""
authors = ["Charles Marsh <charlie.r.marsh@gmail.com>"]
[tool.poetry.dependencies]
python = ">=3.10,<3.11"
autoflake = "^1.4"
flake8 = "^5.0.4"
pycodestyle = "^2.9.1"
pyflakes = "^2.5.0"
pylint = "^2.15.0"
[tool.poetry.dev-dependencies]
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

View File

@@ -1,29 +0,0 @@
#!/usr/bin/env python3
"""Wrapper around Flake8 to enable multiprocessing on all operating systems.
As of Python 3.8, macOS's default "start method" for multiprocessing is `spawn`. Flake8
requires a "start method" of `fork`, and disables multiprocessing if it detects `spawn`
or some other "start method". This script enables the `fork` start method before passing
along any command-line arguments to `flake8`.
This has never caused me any problems, but note that they disabled this for a reason:
Flake8's plugin interface doesn't work with `spawn`, and the maintainer says that `fork`
is "pretty broken" on macOS.
See:
- https://github.com/pycqa/flake8/issues/955
- https://github.com/PyCQA/flake8/issues/1337
- https://github.com/PyCQA/flake8/issues/342
- https://github.com/PyCQA/flake8/pull/1621
Example usage: python -m run_flake8 --select=E501 .
"""
import multiprocessing
import sys
from flake8.main import cli
if __name__ == "__main__":
multiprocessing.set_start_method("fork", force=True)
cli.main(sys.argv[1:])

View File

@@ -94,6 +94,45 @@ pub fn contains_call_path(checker: &Checker, expr: &Expr, target: &[&str]) -> bo
})
}
/// Return `true` if the `Expr` contains an expression that appears to include a
/// side-effect (like a function call).
pub fn contains_effect(checker: &Checker, expr: &Expr) -> bool {
any_over_expr(expr, &|expr| {
// Accept empty initializers.
if let ExprKind::Call {
func,
args,
keywords,
} = &expr.node
{
if args.is_empty() && keywords.is_empty() {
if let ExprKind::Name { id, .. } = &func.node {
let is_empty_initializer = (id == "set"
|| id == "list"
|| id == "tuple"
|| id == "dict"
|| id == "frozenset")
&& checker.is_builtin(id);
return !is_empty_initializer;
}
}
}
// Otherwise, avoid all complex expressions.
matches!(
expr.node,
ExprKind::Call { .. }
| ExprKind::Await { .. }
| ExprKind::GeneratorExp { .. }
| ExprKind::ListComp { .. }
| ExprKind::SetComp { .. }
| ExprKind::DictComp { .. }
| ExprKind::Yield { .. }
| ExprKind::YieldFrom { .. }
)
})
}
/// Call `func` over every `Expr` in `expr`, returning `true` if any expression
/// returns `true`..
pub fn any_over_expr<F>(expr: &Expr, func: &F) -> bool

View File

@@ -701,6 +701,10 @@ where
flake8_pie::rules::dupe_class_field_definitions(self, stmt, body);
}
if self.settings.enabled.contains(&RuleCode::PIE796) {
flake8_pie::rules::prefer_unique_enums(self, stmt, body);
}
self.check_builtin_shadowing(name, stmt, false);
for expr in bases {
@@ -804,7 +808,7 @@ where
// flake8_tidy_imports
if self.settings.enabled.contains(&RuleCode::TID251) {
if let Some(diagnostic) =
flake8_tidy_imports::rules::name_or_parent_is_banned(
flake8_tidy_imports::banned_api::name_or_parent_is_banned(
alias,
&alias.node.name,
&self.settings.flake8_tidy_imports.banned_api,
@@ -948,16 +952,18 @@ where
if self.settings.enabled.contains(&RuleCode::TID251) {
if let Some(module) = module {
for name in names {
if let Some(diagnostic) = flake8_tidy_imports::rules::name_is_banned(
module,
name,
&self.settings.flake8_tidy_imports.banned_api,
) {
if let Some(diagnostic) =
flake8_tidy_imports::banned_api::name_is_banned(
module,
name,
&self.settings.flake8_tidy_imports.banned_api,
)
{
self.diagnostics.push(diagnostic);
}
}
if let Some(diagnostic) =
flake8_tidy_imports::rules::name_or_parent_is_banned(
flake8_tidy_imports::banned_api::name_or_parent_is_banned(
stmt,
module,
&self.settings.flake8_tidy_imports.banned_api,
@@ -1106,11 +1112,13 @@ where
}
if self.settings.enabled.contains(&RuleCode::TID252) {
if let Some(diagnostic) = flake8_tidy_imports::rules::banned_relative_import(
stmt,
level.as_ref(),
&self.settings.flake8_tidy_imports.ban_relative_imports,
) {
if let Some(diagnostic) =
flake8_tidy_imports::relative_imports::banned_relative_import(
stmt,
level.as_ref(),
&self.settings.flake8_tidy_imports.ban_relative_imports,
)
{
self.diagnostics.push(diagnostic);
}
}
@@ -1843,7 +1851,7 @@ where
}
if self.settings.enabled.contains(&RuleCode::TID251) {
flake8_tidy_imports::rules::banned_attribute_access(self, expr);
flake8_tidy_imports::banned_api::banned_attribute_access(self, expr);
}
}
ExprKind::Call {
@@ -1859,6 +1867,7 @@ where
|| self.settings.enabled.contains(&RuleCode::F525)
// pyupgrade
|| self.settings.enabled.contains(&RuleCode::UP030)
|| self.settings.enabled.contains(&RuleCode::UP032)
{
if let ExprKind::Attribute { value, attr, .. } = &func.node {
if let ExprKind::Constant {
@@ -1882,8 +1891,8 @@ where
}
Ok(summary) => {
if self.settings.enabled.contains(&RuleCode::F522) {
pyflakes::rules::string_dot_format_extra_named_arguments(self,
&summary, keywords, location,
pyflakes::rules::string_dot_format_extra_named_arguments(
self, &summary, keywords, location,
);
}
@@ -1909,6 +1918,10 @@ where
if self.settings.enabled.contains(&RuleCode::UP030) {
pyupgrade::rules::format_literals(self, &summary, expr);
}
if self.settings.enabled.contains(&RuleCode::UP032) {
pyupgrade::rules::f_strings(self, &summary, expr);
}
}
}
}

View File

@@ -7,3 +7,7 @@ pub const TRIPLE_QUOTE_PREFIXES: &[&str] = &[
pub const SINGLE_QUOTE_PREFIXES: &[&str] = &[
"u\"", "u'", "r\"", "r'", "u\"", "u'", "r\"", "r'", "U\"", "U'", "R\"", "R'", "\"", "'",
];
pub const TRIPLE_QUOTE_SUFFIXES: &[&str] = &["\"\"\"", "'''"];
pub const SINGLE_QUOTE_SUFFIXES: &[&str] = &["\"", "'"];

View File

@@ -11,7 +11,7 @@ use crate::rules::flake8_pytest_style::types::{
ParametrizeNameType, ParametrizeValuesRowType, ParametrizeValuesType,
};
use crate::rules::flake8_quotes::settings::Quote;
use crate::rules::flake8_tidy_imports::settings::Strictness;
use crate::rules::flake8_tidy_imports::relative_imports::Strictness;
use crate::rules::pydocstyle::settings::Convention;
use crate::rules::{
flake8_annotations, flake8_bugbear, flake8_errmsg, flake8_pytest_style, flake8_quotes,
@@ -93,7 +93,7 @@ pub fn convert(
let mut flake8_errmsg = flake8_errmsg::settings::Options::default();
let mut flake8_pytest_style = flake8_pytest_style::settings::Options::default();
let mut flake8_quotes = flake8_quotes::settings::Options::default();
let mut flake8_tidy_imports = flake8_tidy_imports::settings::Options::default();
let mut flake8_tidy_imports = flake8_tidy_imports::options::Options::default();
let mut mccabe = mccabe::settings::Options::default();
let mut pep8_naming = pep8_naming::settings::Options::default();
let mut pydocstyle = pydocstyle::settings::Options::default();
@@ -354,7 +354,7 @@ pub fn convert(
if flake8_quotes != flake8_quotes::settings::Options::default() {
options.flake8_quotes = Some(flake8_quotes);
}
if flake8_tidy_imports != flake8_tidy_imports::settings::Options::default() {
if flake8_tidy_imports != flake8_tidy_imports::options::Options::default() {
options.flake8_tidy_imports = Some(flake8_tidy_imports);
}
if mccabe != mccabe::settings::Options::default() {

View File

@@ -4,11 +4,11 @@ use std::io::{BufReader, Read};
use std::path::{Path, PathBuf};
use anyhow::{anyhow, Result};
use globset::GlobMatcher;
use path_absolutize::{path_dedot, Absolutize};
use rustc_hash::FxHashSet;
use crate::registry::RuleCode;
use crate::settings::hashable::{HashableGlobMatcher, HashableHashSet};
/// Extract the absolute path and basename (as strings) from a Path.
pub fn extract_path_names(path: &Path) -> Result<(&str, &str)> {
@@ -26,7 +26,11 @@ pub fn extract_path_names(path: &Path) -> Result<(&str, &str)> {
/// Create a set with codes matching the pattern/code pairs.
pub(crate) fn ignores_from_path<'a>(
path: &Path,
pattern_code_pairs: &'a [(GlobMatcher, GlobMatcher, FxHashSet<RuleCode>)],
pattern_code_pairs: &'a [(
HashableGlobMatcher,
HashableGlobMatcher,
HashableHashSet<RuleCode>,
)],
) -> Result<FxHashSet<&'a RuleCode>> {
let (file_path, file_basename) = extract_path_names(path)?;
Ok(pattern_code_pairs
@@ -34,7 +38,7 @@ pub(crate) fn ignores_from_path<'a>(
.filter(|(absolute, basename, _)| {
basename.is_match(file_basename) || absolute.is_match(file_path)
})
.flat_map(|(_, _, codes)| codes)
.flat_map(|(_, _, codes)| codes.iter())
.collect())
}

View File

@@ -123,7 +123,7 @@ pub fn defaultSettings() -> Result<JsValue, JsValue> {
flake8_errmsg: Some(flake8_errmsg::settings::Settings::default().into()),
flake8_pytest_style: Some(flake8_pytest_style::settings::Settings::default().into()),
flake8_quotes: Some(flake8_quotes::settings::Settings::default().into()),
flake8_tidy_imports: Some(flake8_tidy_imports::settings::Settings::default().into()),
flake8_tidy_imports: Some(flake8_tidy_imports::Settings::default().into()),
flake8_import_conventions: Some(
flake8_import_conventions::settings::Settings::default().into(),
),

View File

@@ -9,6 +9,7 @@ use regex::Regex;
use rustc_hash::{FxHashMap, FxHashSet};
use crate::registry::{Diagnostic, RuleCode, CODE_REDIRECTS};
use crate::settings::hashable::HashableHashSet;
use crate::source_code::LineEnding;
static NOQA_LINE_REGEX: Lazy<Regex> = Lazy::new(|| {
@@ -84,7 +85,7 @@ pub fn add_noqa(
diagnostics: &[Diagnostic],
contents: &str,
noqa_line_for: &IntMap<usize, usize>,
external: &FxHashSet<String>,
external: &HashableHashSet<String>,
line_ending: &LineEnding,
) -> Result<usize> {
let (count, output) =
@@ -97,7 +98,7 @@ fn add_noqa_inner(
diagnostics: &[Diagnostic],
contents: &str,
noqa_line_for: &IntMap<usize, usize>,
external: &FxHashSet<String>,
external: &HashableHashSet<String>,
line_ending: &LineEnding,
) -> (usize, String) {
let mut matches_by_line: FxHashMap<usize, FxHashSet<&RuleCode>> = FxHashMap::default();
@@ -208,12 +209,12 @@ fn add_noqa_inner(
#[cfg(test)]
mod tests {
use nohash_hasher::IntMap;
use rustc_hash::FxHashSet;
use rustpython_parser::ast::Location;
use crate::ast::types::Range;
use crate::noqa::{add_noqa_inner, NOQA_LINE_REGEX};
use crate::registry::Diagnostic;
use crate::settings::hashable::HashableHashSet;
use crate::source_code::LineEnding;
use crate::violations;
@@ -236,7 +237,7 @@ mod tests {
let diagnostics = vec![];
let contents = "x = 1";
let noqa_line_for = IntMap::default();
let external = FxHashSet::default();
let external = HashableHashSet::default();
let (count, output) = add_noqa_inner(
&diagnostics,
contents,
@@ -253,7 +254,7 @@ mod tests {
)];
let contents = "x = 1";
let noqa_line_for = IntMap::default();
let external = FxHashSet::default();
let external = HashableHashSet::default();
let (count, output) = add_noqa_inner(
&diagnostics,
contents,
@@ -276,7 +277,7 @@ mod tests {
];
let contents = "x = 1 # noqa: E741\n";
let noqa_line_for = IntMap::default();
let external = FxHashSet::default();
let external = HashableHashSet::default();
let (count, output) = add_noqa_inner(
&diagnostics,
contents,
@@ -299,7 +300,7 @@ mod tests {
];
let contents = "x = 1 # noqa";
let noqa_line_for = IntMap::default();
let external = FxHashSet::default();
let external = HashableHashSet::default();
let (count, output) = add_noqa_inner(
&diagnostics,
contents,

View File

@@ -13,7 +13,7 @@ use strum_macros::{AsRefStr, Display, EnumIter, EnumString};
use crate::ast::types::Range;
use crate::fix::Fix;
use crate::violation::Violation;
use crate::violations;
use crate::{rules, violations};
ruff_macros::define_rule_mapping!(
// pycodestyle errors
@@ -152,8 +152,8 @@ ruff_macros::define_rule_mapping!(
// mccabe
C901 => violations::FunctionIsTooComplex,
// flake8-tidy-imports
TID251 => violations::BannedApi,
TID252 => violations::BannedRelativeImport,
TID251 => rules::flake8_tidy_imports::banned_api::BannedApi,
TID252 => rules::flake8_tidy_imports::relative_imports::RelativeImports,
// flake8-return
RET501 => violations::UnnecessaryReturnNone,
RET502 => violations::ImplicitReturnValue,
@@ -254,6 +254,7 @@ ruff_macros::define_rule_mapping!(
UP028 => violations::RewriteYieldFrom,
UP029 => violations::UnnecessaryBuiltinImport,
UP030 => violations::FormatLiterals,
UP032 => violations::FString,
// pydocstyle
D100 => violations::PublicModule,
D101 => violations::PublicClass,
@@ -410,6 +411,7 @@ ruff_macros::define_rule_mapping!(
// flake8-pie
PIE790 => violations::NoUnnecessaryPass,
PIE794 => violations::DupeClassFieldDefinitions,
PIE796 => violations::PreferUniqueEnums,
PIE807 => violations::PreferListBuiltin,
// flake8-commas
COM812 => violations::TrailingCommaMissing,

View File

@@ -1,13 +1,14 @@
//! Settings for import conventions.
use std::hash::{Hash, Hasher};
use std::hash::Hash;
use itertools::Itertools;
use ruff_macros::ConfigurationOptions;
use rustc_hash::FxHashMap;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::settings::hashable::HashableHashMap;
const CONVENTIONAL_ALIASES: &[(&str, &str)] = &[
("altair", "alt"),
("matplotlib.pyplot", "plt"),
@@ -55,17 +56,9 @@ pub struct Options {
pub extend_aliases: Option<FxHashMap<String, String>>,
}
#[derive(Debug)]
#[derive(Debug, Hash)]
pub struct Settings {
pub aliases: FxHashMap<String, String>,
}
impl Hash for Settings {
fn hash<H: Hasher>(&self, state: &mut H) {
for value in self.aliases.iter().sorted() {
value.hash(state);
}
}
pub aliases: HashableHashMap<String, String>,
}
fn default_aliases() -> FxHashMap<String, String> {
@@ -89,7 +82,7 @@ fn resolve_aliases(options: Options) -> FxHashMap<String, String> {
impl Default for Settings {
fn default() -> Self {
Self {
aliases: default_aliases(),
aliases: default_aliases().into(),
}
}
}
@@ -97,7 +90,7 @@ impl Default for Settings {
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
aliases: resolve_aliases(options),
aliases: resolve_aliases(options).into(),
}
}
}
@@ -105,7 +98,7 @@ impl From<Options> for Settings {
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
aliases: Some(settings.aliases),
aliases: Some(settings.aliases.into()),
extend_aliases: None,
}
}

View File

@@ -14,6 +14,7 @@ mod tests {
#[test_case(RuleCode::PIE790, Path::new("PIE790.py"); "PIE790")]
#[test_case(RuleCode::PIE794, Path::new("PIE794.py"); "PIE794")]
#[test_case(RuleCode::PIE796, Path::new("PIE796.py"); "PIE796")]
#[test_case(RuleCode::PIE807, Path::new("PIE807.py"); "PIE807")]
fn rules(rule_code: RuleCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());

View File

@@ -2,6 +2,8 @@ use log::error;
use rustc_hash::FxHashSet;
use rustpython_ast::{Constant, Expr, ExprKind, Stmt, StmtKind};
use crate::ast::comparable::ComparableExpr;
use crate::ast::helpers::unparse_expr;
use crate::ast::types::{Range, RefEquality};
use crate::autofix::helpers::delete_stmt;
use crate::checkers::ast::Checker;
@@ -106,6 +108,41 @@ pub fn dupe_class_field_definitions<'a, 'b>(
}
}
/// PIE796
pub fn prefer_unique_enums<'a, 'b>(checker: &mut Checker<'a>, parent: &'b Stmt, body: &'b [Stmt])
where
'b: 'a,
{
let StmtKind::ClassDef { bases, .. } = &parent.node else {
return;
};
if !bases.iter().any(|expr| {
checker
.resolve_call_path(expr)
.map_or(false, |call_path| call_path == ["enum", "Enum"])
}) {
return;
}
let mut seen_targets: FxHashSet<ComparableExpr> = FxHashSet::default();
for stmt in body {
let StmtKind::Assign { value, .. } = &stmt.node else {
continue;
};
if !seen_targets.insert(ComparableExpr::from(value)) {
let diagnostic = Diagnostic::new(
violations::PreferUniqueEnums {
value: unparse_expr(value, checker.stylist),
},
Range::from_located(stmt),
);
checker.diagnostics.push(diagnostic);
}
}
}
/// PIE807
pub fn prefer_list_builtin(checker: &mut Checker, expr: &Expr) {
let ExprKind::Lambda { args, body } = &expr.node else {

View File

@@ -0,0 +1,82 @@
---
source: src/rules/flake8_pie/mod.rs
expression: diagnostics
---
- kind:
PreferUniqueEnums:
value: "\"B\""
location:
row: 8
column: 4
end_location:
row: 8
column: 11
fix: ~
parent: ~
- kind:
PreferUniqueEnums:
value: "2"
location:
row: 14
column: 4
end_location:
row: 14
column: 9
fix: ~
parent: ~
- kind:
PreferUniqueEnums:
value: "\"2\""
location:
row: 20
column: 4
end_location:
row: 20
column: 11
fix: ~
parent: ~
- kind:
PreferUniqueEnums:
value: "2.5"
location:
row: 26
column: 4
end_location:
row: 26
column: 11
fix: ~
parent: ~
- kind:
PreferUniqueEnums:
value: "False"
location:
row: 33
column: 4
end_location:
row: 33
column: 13
fix: ~
parent: ~
- kind:
PreferUniqueEnums:
value: None
location:
row: 40
column: 4
end_location:
row: 40
column: 12
fix: ~
parent: ~
- kind:
PreferUniqueEnums:
value: "2"
location:
row: 54
column: 4
end_location:
row: 54
column: 9
fix: ~
parent: ~

View File

@@ -2,7 +2,7 @@ use rustpython_ast::{Cmpop, Constant, Expr, ExprContext, ExprKind, Stmt, StmtKin
use crate::ast::comparable::ComparableExpr;
use crate::ast::helpers::{
any_over_expr, contains_call_path, create_expr, create_stmt, has_comments, unparse_expr,
contains_call_path, contains_effect, create_expr, create_stmt, has_comments, unparse_expr,
unparse_stmt,
};
use crate::ast::types::Range;
@@ -272,19 +272,7 @@ pub fn use_dict_get_with_default(
}
// Check that the default value is not "complex".
if any_over_expr(default_val, &|expr| {
matches!(
expr.node,
ExprKind::Call { .. }
| ExprKind::Await { .. }
| ExprKind::GeneratorExp { .. }
| ExprKind::ListComp { .. }
| ExprKind::SetComp { .. }
| ExprKind::DictComp { .. }
| ExprKind::Yield { .. }
| ExprKind::YieldFrom { .. }
)
}) {
if contains_effect(checker, default_val) {
return;
}

View File

@@ -0,0 +1,146 @@
use rustc_hash::FxHashMap;
use rustpython_ast::{Alias, Expr, Located};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::define_violation;
use crate::registry::Diagnostic;
use crate::settings::hashable::HashableHashMap;
use crate::violation::Violation;
pub type Settings = HashableHashMap<String, ApiBan>;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, JsonSchema)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
pub struct ApiBan {
/// The message to display when the API is used.
pub msg: String,
}
define_violation!(
pub struct BannedApi {
pub name: String,
pub message: String,
}
);
impl Violation for BannedApi {
fn message(&self) -> String {
let BannedApi { name, message } = self;
format!("`{name}` is banned: {message}")
}
fn placeholder() -> Self {
BannedApi {
name: "...".to_string(),
message: "...".to_string(),
}
}
}
/// TID251
pub fn name_is_banned(
module: &str,
name: &Alias,
api_bans: &FxHashMap<String, ApiBan>,
) -> Option<Diagnostic> {
let full_name = format!("{module}.{}", &name.node.name);
if let Some(ban) = api_bans.get(&full_name) {
return Some(Diagnostic::new(
BannedApi {
name: full_name,
message: ban.msg.to_string(),
},
Range::from_located(name),
));
}
None
}
/// TID251
pub fn name_or_parent_is_banned<T>(
located: &Located<T>,
name: &str,
api_bans: &FxHashMap<String, ApiBan>,
) -> Option<Diagnostic> {
let mut name = name;
loop {
if let Some(ban) = api_bans.get(name) {
return Some(Diagnostic::new(
BannedApi {
name: name.to_string(),
message: ban.msg.to_string(),
},
Range::from_located(located),
));
}
match name.rfind('.') {
Some(idx) => {
name = &name[..idx];
}
None => return None,
}
}
}
/// TID251
pub fn banned_attribute_access(checker: &mut Checker, expr: &Expr) {
if let Some(call_path) = checker.resolve_call_path(expr) {
for (banned_path, ban) in checker.settings.flake8_tidy_imports.banned_api.iter() {
if call_path == banned_path.split('.').collect::<Vec<_>>() {
checker.diagnostics.push(Diagnostic::new(
BannedApi {
name: banned_path.to_string(),
message: ban.msg.to_string(),
},
Range::from_located(expr),
));
return;
}
}
}
}
#[cfg(test)]
mod tests {
use std::path::Path;
use anyhow::Result;
use rustc_hash::FxHashMap;
use super::ApiBan;
use crate::linter::test_path;
use crate::registry::RuleCode;
use crate::settings::Settings;
#[test]
fn banned_api_true_positives() -> Result<()> {
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_tidy_imports/TID251.py"),
&Settings {
flake8_tidy_imports: super::super::Settings {
banned_api: FxHashMap::from_iter([
(
"cgi".to_string(),
ApiBan {
msg: "The cgi module is deprecated.".to_string(),
},
),
(
"typing.TypedDict".to_string(),
ApiBan {
msg: "Use typing_extensions.TypedDict instead.".to_string(),
},
),
])
.into(),
..Default::default()
},
..Settings::for_rules(vec![RuleCode::TID251])
},
)?;
insta::assert_yaml_snapshot!(diagnostics);
Ok(())
}
}

View File

@@ -1,76 +1,10 @@
pub(crate) mod rules;
pub mod settings;
pub mod options;
#[cfg(test)]
mod tests {
use std::path::Path;
pub mod banned_api;
pub mod relative_imports;
use anyhow::Result;
use rustc_hash::FxHashMap;
use super::settings::{BannedApi, Strictness};
use crate::linter::test_path;
use crate::registry::RuleCode;
use crate::settings::Settings;
#[test]
fn ban_parent_imports() -> Result<()> {
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_tidy_imports/TID252.py"),
&Settings {
flake8_tidy_imports: super::settings::Settings {
ban_relative_imports: Strictness::Parents,
..Default::default()
},
..Settings::for_rules(vec![RuleCode::TID252])
},
)?;
insta::assert_yaml_snapshot!(diagnostics);
Ok(())
}
#[test]
fn ban_all_imports() -> Result<()> {
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_tidy_imports/TID252.py"),
&Settings {
flake8_tidy_imports: super::settings::Settings {
ban_relative_imports: Strictness::All,
..Default::default()
},
..Settings::for_rules(vec![RuleCode::TID252])
},
)?;
insta::assert_yaml_snapshot!(diagnostics);
Ok(())
}
#[test]
fn banned_api_true_positives() -> Result<()> {
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_tidy_imports/TID251.py"),
&Settings {
flake8_tidy_imports: super::settings::Settings {
banned_api: FxHashMap::from_iter([
(
"cgi".to_string(),
BannedApi {
msg: "The cgi module is deprecated.".to_string(),
},
),
(
"typing.TypedDict".to_string(),
BannedApi {
msg: "Use typing_extensions.TypedDict instead.".to_string(),
},
),
]),
..Default::default()
},
..Settings::for_rules(vec![RuleCode::TID251])
},
)?;
insta::assert_yaml_snapshot!(diagnostics);
Ok(())
}
#[derive(Debug, Hash, Default)]
pub struct Settings {
pub ban_relative_imports: relative_imports::Settings,
pub banned_api: banned_api::Settings,
}

View File

@@ -1,28 +1,13 @@
//! Settings for the `flake8-tidy-imports` plugin.
use std::hash::{Hash, Hasher};
use itertools::Itertools;
use ruff_macros::ConfigurationOptions;
use rustc_hash::FxHashMap;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, JsonSchema)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
pub enum Strictness {
/// Ban imports that extend into the parent module or beyond.
Parents,
/// Ban all relative imports.
All,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, JsonSchema)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
pub struct BannedApi {
/// The message to display when the API is used.
pub msg: String,
}
use super::banned_api::ApiBan;
use super::relative_imports::Strictness;
use super::Settings;
#[derive(
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, JsonSchema,
@@ -56,29 +41,14 @@ pub struct Options {
/// Specific modules or module members that may not be imported or accessed.
/// Note that this rule is only meant to flag accidental uses,
/// and can be circumvented via `eval` or `importlib`.
pub banned_api: Option<FxHashMap<String, BannedApi>>,
}
#[derive(Debug)]
pub struct Settings {
pub ban_relative_imports: Strictness,
pub banned_api: FxHashMap<String, BannedApi>,
}
impl Default for Settings {
fn default() -> Self {
Self {
ban_relative_imports: Strictness::Parents,
banned_api: FxHashMap::default(),
}
}
pub banned_api: Option<FxHashMap<String, ApiBan>>,
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
ban_relative_imports: options.ban_relative_imports.unwrap_or(Strictness::Parents),
banned_api: options.banned_api.unwrap_or_default(),
banned_api: options.banned_api.unwrap_or_default().into(),
}
}
}
@@ -87,17 +57,7 @@ impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
ban_relative_imports: Some(settings.ban_relative_imports),
banned_api: Some(settings.banned_api),
}
}
}
impl Hash for Settings {
fn hash<H: Hasher>(&self, state: &mut H) {
self.ban_relative_imports.hash(state);
for key in self.banned_api.keys().sorted() {
key.hash(state);
self.banned_api[key].hash(state);
banned_api: Some(settings.banned_api.into()),
}
}
}

View File

@@ -0,0 +1,101 @@
use rustpython_ast::Stmt;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::ast::types::Range;
use crate::define_violation;
use crate::registry::Diagnostic;
use crate::violation::Violation;
pub type Settings = Strictness;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, JsonSchema, Default)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
pub enum Strictness {
/// Ban imports that extend into the parent module or beyond.
#[default]
Parents,
/// Ban all relative imports.
All,
}
define_violation!(
pub struct RelativeImports(pub Strictness);
);
impl Violation for RelativeImports {
fn message(&self) -> String {
let RelativeImports(strictness) = self;
match strictness {
Strictness::Parents => "Relative imports from parent modules are banned".to_string(),
Strictness::All => "Relative imports are banned".to_string(),
}
}
fn placeholder() -> Self {
RelativeImports(Strictness::All)
}
}
/// TID252
pub fn banned_relative_import(
stmt: &Stmt,
level: Option<&usize>,
strictness: &Strictness,
) -> Option<Diagnostic> {
let strictness_level = match strictness {
Strictness::All => 0,
Strictness::Parents => 1,
};
if level? > &strictness_level {
Some(Diagnostic::new(
RelativeImports(strictness.clone()),
Range::from_located(stmt),
))
} else {
None
}
}
#[cfg(test)]
mod tests {
use std::path::Path;
use anyhow::Result;
use super::Strictness;
use crate::linter::test_path;
use crate::registry::RuleCode;
use crate::settings::Settings;
#[test]
fn ban_parent_imports() -> Result<()> {
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_tidy_imports/TID252.py"),
&Settings {
flake8_tidy_imports: super::super::Settings {
ban_relative_imports: Strictness::Parents,
..Default::default()
},
..Settings::for_rules(vec![RuleCode::TID252])
},
)?;
insta::assert_yaml_snapshot!(diagnostics);
Ok(())
}
#[test]
fn ban_all_imports() -> Result<()> {
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_tidy_imports/TID252.py"),
&Settings {
flake8_tidy_imports: super::super::Settings {
ban_relative_imports: Strictness::All,
..Default::default()
},
..Settings::for_rules(vec![RuleCode::TID252])
},
)?;
insta::assert_yaml_snapshot!(diagnostics);
Ok(())
}
}

View File

@@ -1,91 +0,0 @@
use rustc_hash::FxHashMap;
use rustpython_ast::{Alias, Expr, Located, Stmt};
use super::settings::{BannedApi, Strictness};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::Diagnostic;
use crate::violations;
/// TID252
pub fn banned_relative_import(
stmt: &Stmt,
level: Option<&usize>,
strictness: &Strictness,
) -> Option<Diagnostic> {
let strictness_level = match strictness {
Strictness::All => 0,
Strictness::Parents => 1,
};
if level? > &strictness_level {
Some(Diagnostic::new(
violations::BannedRelativeImport(strictness.clone()),
Range::from_located(stmt),
))
} else {
None
}
}
/// TID251
pub fn name_is_banned(
module: &str,
name: &Alias,
banned_apis: &FxHashMap<String, BannedApi>,
) -> Option<Diagnostic> {
let full_name = format!("{module}.{}", &name.node.name);
if let Some(ban) = banned_apis.get(&full_name) {
return Some(Diagnostic::new(
violations::BannedApi {
name: full_name,
message: ban.msg.to_string(),
},
Range::from_located(name),
));
}
None
}
/// TID251
pub fn name_or_parent_is_banned<T>(
located: &Located<T>,
name: &str,
banned_apis: &FxHashMap<String, BannedApi>,
) -> Option<Diagnostic> {
let mut name = name;
loop {
if let Some(ban) = banned_apis.get(name) {
return Some(Diagnostic::new(
violations::BannedApi {
name: name.to_string(),
message: ban.msg.to_string(),
},
Range::from_located(located),
));
}
match name.rfind('.') {
Some(idx) => {
name = &name[..idx];
}
None => return None,
}
}
}
/// TID251
pub fn banned_attribute_access(checker: &mut Checker, expr: &Expr) {
if let Some(call_path) = checker.resolve_call_path(expr) {
for (banned_path, ban) in &checker.settings.flake8_tidy_imports.banned_api {
if call_path == banned_path.split('.').collect::<Vec<_>>() {
checker.diagnostics.push(Diagnostic::new(
violations::BannedApi {
name: banned_path.to_string(),
message: ban.msg.to_string(),
},
Range::from_located(expr),
));
return;
}
}
}
}

View File

@@ -1,5 +1,5 @@
---
source: src/rules/flake8_tidy_imports/mod.rs
source: src/rules/flake8_tidy_imports/banned_api.rs
expression: diagnostics
---
- kind:

View File

@@ -1,9 +1,9 @@
---
source: src/rules/flake8_tidy_imports/mod.rs
source: src/rules/flake8_tidy_imports/relative_imports.rs
expression: diagnostics
---
- kind:
BannedRelativeImport: all
RelativeImports: all
location:
row: 1
column: 0
@@ -13,7 +13,7 @@ expression: diagnostics
fix: ~
parent: ~
- kind:
BannedRelativeImport: all
RelativeImports: all
location:
row: 2
column: 0
@@ -23,7 +23,7 @@ expression: diagnostics
fix: ~
parent: ~
- kind:
BannedRelativeImport: all
RelativeImports: all
location:
row: 4
column: 0
@@ -33,7 +33,7 @@ expression: diagnostics
fix: ~
parent: ~
- kind:
BannedRelativeImport: all
RelativeImports: all
location:
row: 5
column: 0
@@ -43,7 +43,7 @@ expression: diagnostics
fix: ~
parent: ~
- kind:
BannedRelativeImport: all
RelativeImports: all
location:
row: 7
column: 0
@@ -53,7 +53,7 @@ expression: diagnostics
fix: ~
parent: ~
- kind:
BannedRelativeImport: all
RelativeImports: all
location:
row: 8
column: 0

View File

@@ -1,9 +1,9 @@
---
source: src/rules/flake8_tidy_imports/mod.rs
source: src/rules/flake8_tidy_imports/relative_imports.rs
expression: diagnostics
---
- kind:
BannedRelativeImport: parents
RelativeImports: parents
location:
row: 4
column: 0
@@ -13,7 +13,7 @@ expression: diagnostics
fix: ~
parent: ~
- kind:
BannedRelativeImport: parents
RelativeImports: parents
location:
row: 5
column: 0
@@ -23,7 +23,7 @@ expression: diagnostics
fix: ~
parent: ~
- kind:
BannedRelativeImport: parents
RelativeImports: parents
location:
row: 7
column: 0
@@ -33,7 +33,7 @@ expression: diagnostics
fix: ~
parent: ~
- kind:
BannedRelativeImport: parents
RelativeImports: parents
location:
row: 8
column: 0

View File

@@ -30,6 +30,14 @@ pub fn leading_quote(content: &str) -> Option<&str> {
None
}
/// Return the trailing quote string for a docstring (e.g., `"""`).
pub fn trailing_quote(content: &str) -> Option<&&str> {
constants::TRIPLE_QUOTE_SUFFIXES
.iter()
.chain(constants::SINGLE_QUOTE_SUFFIXES)
.find(|&pattern| content.ends_with(pattern))
}
/// Return the index of the first logical line in a string.
pub fn logical_line(content: &str) -> Option<usize> {
// Find the first logical line.

View File

@@ -121,7 +121,7 @@ mod tests {
let diagnostics = test_path(
Path::new("./resources/test/fixtures/pyflakes/F841_0.py"),
&settings::Settings {
dummy_variable_rgx: Regex::new(r"^z$").unwrap(),
dummy_variable_rgx: Regex::new(r"^z$").unwrap().into(),
..settings::Settings::for_rule(RuleCode::F841)
},
)?;

View File

@@ -1,46 +1,32 @@
use itertools::Itertools;
use log::error;
use rustpython_ast::{Expr, ExprKind, Stmt, StmtKind};
use rustpython_ast::{ExprKind, Location, Stmt, StmtKind};
use rustpython_parser::lexer;
use rustpython_parser::lexer::Tok;
use crate::ast::helpers::contains_effect;
use crate::ast::types::{BindingKind, Range, RefEquality, ScopeKind};
use crate::autofix::helpers::delete_stmt;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::{Diagnostic, RuleCode};
use crate::source_code::Locator;
use crate::violations;
fn is_literal_or_name(expr: &Expr, checker: &Checker) -> bool {
// Accept any obvious literals or names.
if matches!(
expr.node,
ExprKind::Constant { .. }
| ExprKind::Name { .. }
| ExprKind::List { .. }
| ExprKind::Tuple { .. }
| ExprKind::Set { .. }
) {
return true;
}
// Accept empty initializers.
if let ExprKind::Call {
func,
args,
keywords,
} = &expr.node
fn match_token_after<F>(stmt: &Stmt, locator: &Locator, f: F) -> Location
where
F: Fn(Tok) -> bool,
{
let contents = locator.slice_source_code_range(&Range::from_located(stmt));
for ((_, tok, _), (start, ..)) in lexer::make_tokenizer_located(&contents, stmt.location)
.flatten()
.tuple_windows()
{
if args.is_empty() && keywords.is_empty() {
if let ExprKind::Name { id, .. } = &func.node {
return (id == "set"
|| id == "list"
|| id == "tuple"
|| id == "dict"
|| id == "frozenset")
&& checker.is_builtin(id);
}
if f(tok) {
return start;
}
}
false
unreachable!("No token after matched");
}
enum DeletionKind {
@@ -58,8 +44,18 @@ fn remove_unused_variable(
// First case: simple assignment (`x = 1`)
if let StmtKind::Assign { targets, value, .. } = &stmt.node {
if targets.len() == 1 && matches!(targets[0].node, ExprKind::Name { .. }) {
return if is_literal_or_name(value, checker) {
// If assigning to a constant (`x = 1`), delete the entire statement.
return if contains_effect(checker, value) {
// If the expression is complex (`x = foo()`), remove the assignment,
// but preserve the right-hand side.
Some((
DeletionKind::Partial,
Fix::deletion(
stmt.location,
match_token_after(stmt, checker.locator, |tok| tok == Tok::Equal),
),
))
} else {
// If (e.g.) assigning to a constant (`x = 1`), delete the entire statement.
let parent = checker
.child_to_parent
.get(&RefEquality(stmt))
@@ -78,13 +74,6 @@ fn remove_unused_variable(
None
}
}
} else {
// If the expression is more complex (`x = foo()`), remove the assignment,
// but preserve the right-hand side.
Some((
DeletionKind::Partial,
Fix::deletion(stmt.location, value.location),
))
};
}
}
@@ -97,7 +86,17 @@ fn remove_unused_variable(
} = &stmt.node
{
if matches!(target.node, ExprKind::Name { .. }) {
return if is_literal_or_name(value, checker) {
return if contains_effect(checker, value) {
// If the expression is complex (`x = foo()`), remove the assignment,
// but preserve the right-hand side.
Some((
DeletionKind::Partial,
Fix::deletion(
stmt.location,
match_token_after(stmt, checker.locator, |tok| tok == Tok::Equal),
),
))
} else {
// If assigning to a constant (`x = 1`), delete the entire statement.
let parent = checker
.child_to_parent
@@ -117,13 +116,6 @@ fn remove_unused_variable(
None
}
}
} else {
// If the expression is more complex (`x = foo()`), remove the assignment,
// but preserve the right-hand side.
Some((
DeletionKind::Partial,
Fix::deletion(stmt.location, value.location),
))
};
}
}

View File

@@ -31,10 +31,10 @@ expression: diagnostics
content: ""
location:
row: 16
column: 4
column: 0
end_location:
row: 16
column: 8
row: 17
column: 0
parent: ~
- kind:
UnusedVariable: foo

View File

@@ -212,4 +212,72 @@ expression: diagnostics
row: 45
column: 48
parent: ~
- kind:
UnusedVariable: x
location:
row: 50
column: 4
end_location:
row: 50
column: 5
fix:
content: ""
location:
row: 50
column: 4
end_location:
row: 50
column: 8
parent: ~
- kind:
UnusedVariable: y
location:
row: 56
column: 4
end_location:
row: 56
column: 5
fix:
content: ""
location:
row: 56
column: 4
end_location:
row: 57
column: 8
parent: ~
- kind:
UnusedVariable: x
location:
row: 61
column: 4
end_location:
row: 61
column: 5
fix:
content: pass
location:
row: 61
column: 4
end_location:
row: 65
column: 5
parent: ~
- kind:
UnusedVariable: y
location:
row: 67
column: 4
end_location:
row: 67
column: 5
fix:
content: ""
location:
row: 67
column: 0
end_location:
row: 69
column: 0
parent: ~

View File

@@ -53,6 +53,7 @@ mod tests {
#[test_case(RuleCode::UP029, Path::new("UP029.py"); "UP029")]
#[test_case(RuleCode::UP030, Path::new("UP030_0.py"); "UP030_0")]
#[test_case(RuleCode::UP030, Path::new("UP030_1.py"); "UP030_1")]
#[test_case(RuleCode::UP032, Path::new("UP032.py"); "UP032")]
fn rules(rule_code: RuleCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(

View File

@@ -0,0 +1,274 @@
use rustc_hash::FxHashMap;
use rustpython_ast::{Constant, Expr, ExprKind, KeywordData};
use rustpython_common::format::{
FieldName, FieldNamePart, FieldType, FormatPart, FormatString, FromTemplate,
};
use rustpython_parser::lexer;
use rustpython_parser::lexer::Tok;
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::{Diagnostic, RuleCode};
use crate::rules::pydocstyle::helpers::{leading_quote, trailing_quote};
use crate::rules::pyflakes::format::FormatSummary;
use crate::violations;
/// Like [`FormatSummary`], but maps positional and keyword arguments to their
/// values. For example, given `{a} {b}".format(a=1, b=2)`, `FormatFunction`
/// would include `"a"` and `'b'` in `kwargs`, mapped to `1` and `2`
/// respectively.
#[derive(Debug)]
struct FormatSummaryValues<'a> {
args: Vec<String>,
kwargs: FxHashMap<&'a str, String>,
}
impl<'a> FormatSummaryValues<'a> {
fn try_from_expr(checker: &'a Checker, expr: &'a Expr) -> Option<Self> {
let mut extracted_args: Vec<String> = Vec::new();
let mut extracted_kwargs: FxHashMap<&str, String> = FxHashMap::default();
if let ExprKind::Call { args, keywords, .. } = &expr.node {
for arg in args {
let arg = checker
.locator
.slice_source_code_range(&Range::from_located(arg));
if contains_invalids(&arg) {
return None;
}
extracted_args.push(arg.to_string());
}
for keyword in keywords {
let KeywordData { arg, value } = &keyword.node;
if let Some(key) = arg {
let kwarg = checker
.locator
.slice_source_code_range(&Range::from_located(value));
if contains_invalids(&kwarg) {
return None;
}
extracted_kwargs.insert(key, kwarg.to_string());
}
}
}
if extracted_args.is_empty() && extracted_kwargs.is_empty() {
return None;
}
Some(Self {
args: extracted_args,
kwargs: extracted_kwargs,
})
}
fn consume_next(&mut self) -> Option<String> {
if self.args.is_empty() {
None
} else {
Some(self.args.remove(0))
}
}
fn consume_arg(&mut self, index: usize) -> Option<String> {
if self.args.len() > index {
Some(self.args.remove(index))
} else {
None
}
}
fn consume_kwarg(&mut self, key: &str) -> Option<String> {
self.kwargs.remove(key)
}
}
/// Return `true` if the string contains characters that are forbidden in
/// argument identifier.
fn contains_invalids(string: &str) -> bool {
string.contains('*')
|| string.contains('\'')
|| string.contains('"')
|| string.contains("await")
}
/// Generate an f-string from an [`Expr`].
fn try_convert_to_f_string(checker: &Checker, expr: &Expr) -> Option<String> {
let ExprKind::Call { func, .. } = &expr.node else {
return None;
};
let ExprKind::Attribute { value, .. } = &func.node else {
return None;
};
if !matches!(
&value.node,
ExprKind::Constant {
value: Constant::Str(..),
..
},
) {
return None;
};
let Some(mut summary) = FormatSummaryValues::try_from_expr(checker, expr) else {
return None;
};
let contents = checker
.locator
.slice_source_code_range(&Range::from_located(value));
// Tokenize: we need to avoid trying to fix implicit string concatenations.
if lexer::make_tokenizer(&contents)
.flatten()
.filter(|(_, tok, _)| matches!(tok, Tok::String { .. }))
.count()
> 1
{
return None;
}
// Strip the unicode prefix. It's redundant in Python 3, and invalid when used
// with f-strings.
let contents = if contents.starts_with('U') || contents.starts_with('u') {
&contents[1..]
} else {
&contents
};
if contents.is_empty() {
return None;
}
// Remove the leading and trailing quotes.
let Some(leading_quote) = leading_quote(contents) else {
return None;
};
let Some(trailing_quote) = trailing_quote(contents) else {
return None;
};
let contents = &contents[leading_quote.len()..contents.len() - trailing_quote.len()];
// Parse the format string.
let Ok(format_string) = FormatString::from_str(contents) else {
return None;
};
let mut converted = String::with_capacity(contents.len());
for part in format_string.format_parts {
match part {
FormatPart::Field {
field_name,
preconversion_spec,
format_spec,
} => {
converted.push('{');
let field = FieldName::parse(&field_name).ok()?;
match field.field_type {
FieldType::Auto => {
let Some(arg) = summary.consume_next() else {
return None;
};
converted.push_str(&arg);
}
FieldType::Index(index) => {
let Some(arg) = summary.consume_arg(index) else {
return None;
};
converted.push_str(&arg);
}
FieldType::Keyword(name) => {
let Some(arg) = summary.consume_kwarg(&name) else {
return None;
};
converted.push_str(&arg);
}
}
for part in field.parts {
match part {
FieldNamePart::Attribute(name) => {
converted.push('.');
converted.push_str(&name);
}
FieldNamePart::Index(index) => {
converted.push('[');
converted.push_str(index.to_string().as_str());
converted.push(']');
}
FieldNamePart::StringIndex(index) => {
converted.push('[');
converted.push_str(&index);
converted.push(']');
}
}
}
if let Some(preconversion_spec) = preconversion_spec {
converted.push('!');
converted.push(preconversion_spec);
}
if !format_spec.is_empty() {
converted.push(':');
converted.push_str(&format_spec);
}
converted.push('}');
}
FormatPart::Literal(value) => {
if value.starts_with('{') {
converted.push('{');
}
converted.push_str(&value);
if value.ends_with('}') {
converted.push('}');
}
}
}
}
// Construct the format string.
let mut contents = String::with_capacity(1 + converted.len());
contents.push('f');
contents.push_str(leading_quote);
contents.push_str(&converted);
contents.push_str(trailing_quote);
Some(contents)
}
/// UP032
pub(crate) fn f_strings(checker: &mut Checker, summary: &FormatSummary, expr: &Expr) {
if summary.has_nested_parts {
return;
}
// Avoid refactoring multi-line strings.
if expr.location.row() != expr.end_location.unwrap().row() {
return;
}
// Currently, the only issue we know of is in LibCST:
// https://github.com/Instagram/LibCST/issues/846
let Some(contents) = try_convert_to_f_string(checker, expr) else {
return;
};
// Avoid refactors that increase the resulting string length.
let existing = checker
.locator
.slice_source_code_range(&Range::from_located(expr));
if contents.len() > existing.len() {
return;
}
let mut diagnostic = Diagnostic::new(violations::FString, Range::from_located(expr));
if checker.patch(&RuleCode::UP032) {
diagnostic.amend(Fix::replacement(
contents,
expr.location,
expr.end_location.unwrap(),
));
};
checker.diagnostics.push(diagnostic);
}

View File

@@ -2,6 +2,7 @@ pub(crate) use convert_named_tuple_functional_to_class::convert_named_tuple_func
pub(crate) use convert_typed_dict_functional_to_class::convert_typed_dict_functional_to_class;
pub(crate) use datetime_utc_alias::datetime_utc_alias;
pub(crate) use deprecated_unittest_alias::deprecated_unittest_alias;
pub(crate) use f_strings::f_strings;
pub(crate) use format_literals::format_literals;
pub(crate) use native_literals::native_literals;
use once_cell::sync::Lazy;
@@ -31,7 +32,7 @@ pub(crate) use use_pep604_annotation::use_pep604_annotation;
pub(crate) use useless_metaclass_type::useless_metaclass_type;
pub(crate) use useless_object_inheritance::useless_object_inheritance;
use crate::ast::helpers::{self};
use crate::ast::helpers;
use crate::ast::types::{Range, Scope, ScopeKind};
use crate::fix::Fix;
use crate::registry::Diagnostic;
@@ -41,6 +42,7 @@ mod convert_named_tuple_functional_to_class;
mod convert_typed_dict_functional_to_class;
mod datetime_utc_alias;
mod deprecated_unittest_alias;
mod f_strings;
mod format_literals;
mod native_literals;
mod open_alias;

View File

@@ -1,28 +1,26 @@
use rustpython_ast::{Constant, ExprKind, KeywordData};
use rustpython_parser::ast::Expr;
use crate::ast::helpers::{create_expr, unparse_expr};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::Diagnostic;
use crate::registry::{Diagnostic, RuleCode};
use crate::settings::types::PythonVersion;
use crate::violations;
fn rule(
checker: &Checker,
decorator_list: &[Expr],
target_version: PythonVersion,
) -> Option<Diagnostic> {
/// UP011
pub fn unnecessary_lru_cache_params(checker: &mut Checker, decorator_list: &[Expr]) {
for expr in decorator_list.iter() {
let ExprKind::Call {
func,
args,
keywords,
} = &expr.node
else {
} = &expr.node else {
continue;
};
// Look for, e.g., `import functools; @functools.lru_cache`.
if !(args.is_empty()
&& checker
.resolve_call_path(func)
@@ -31,21 +29,29 @@ fn rule(
continue;
}
let range = Range::new(func.end_location.unwrap(), expr.end_location.unwrap());
// Ex) `functools.lru_cache()`
if keywords.is_empty() {
return Some(Diagnostic::new(
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryLRUCacheParams,
range,
));
Range::new(func.end_location.unwrap(), expr.end_location.unwrap()),
);
if checker.patch(&RuleCode::UP011) {
diagnostic.amend(Fix::replacement(
unparse_expr(func, checker.stylist),
expr.location,
expr.end_location.unwrap(),
));
}
checker.diagnostics.push(diagnostic);
}
// Ex) `functools.lru_cache(maxsize=None)`
if !(target_version >= PythonVersion::Py39 && keywords.len() == 1) {
if !(checker.settings.target_version >= PythonVersion::Py39 && keywords.len() == 1) {
continue;
}
let KeywordData { arg, value } = &keywords[0].node;
if !(arg.as_ref().map(|arg| arg == "maxsize").unwrap_or_default()
if !(arg.as_ref().map_or(false, |arg| arg == "maxsize")
&& matches!(
value.node,
ExprKind::Constant {
@@ -56,25 +62,27 @@ fn rule(
{
continue;
}
return Some(Diagnostic::new(
violations::UnnecessaryLRUCacheParams,
range,
));
}
None
}
/// UP011
pub fn unnecessary_lru_cache_params(checker: &mut Checker, decorator_list: &[Expr]) {
let Some(mut diagnostic) = rule(
checker,
decorator_list,
checker.settings.target_version,
) else {
return;
};
if checker.patch(diagnostic.kind.code()) {
diagnostic.amend(Fix::deletion(diagnostic.location, diagnostic.end_location));
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryLRUCacheParams,
Range::new(func.end_location.unwrap(), expr.end_location.unwrap()),
);
if checker.patch(&RuleCode::UP011) {
if let ExprKind::Attribute { value, ctx, .. } = &func.node {
diagnostic.amend(Fix::replacement(
unparse_expr(
&create_expr(ExprKind::Attribute {
value: value.clone(),
attr: "cache".to_string(),
ctx: ctx.clone(),
}),
checker.stylist,
),
expr.location,
expr.end_location.unwrap(),
));
}
}
checker.diagnostics.push(diagnostic);
}
checker.diagnostics.push(diagnostic);
}

View File

@@ -11,10 +11,10 @@ expression: diagnostics
row: 5
column: 12
fix:
content: ""
content: lru_cache
location:
row: 5
column: 10
column: 1
end_location:
row: 5
column: 12
@@ -28,10 +28,10 @@ expression: diagnostics
row: 11
column: 22
fix:
content: ""
content: functools.lru_cache
location:
row: 11
column: 20
column: 1
end_location:
row: 11
column: 22
@@ -44,14 +44,7 @@ expression: diagnostics
end_location:
row: 16
column: 24
fix:
content: ""
location:
row: 16
column: 10
end_location:
row: 16
column: 24
fix: ~
parent: ~
- kind:
UnnecessaryLRUCacheParams: ~
@@ -62,10 +55,10 @@ expression: diagnostics
row: 21
column: 34
fix:
content: ""
content: functools.cache
location:
row: 21
column: 20
column: 1
end_location:
row: 21
column: 34
@@ -79,10 +72,10 @@ expression: diagnostics
row: 28
column: 1
fix:
content: ""
content: lru_cache
location:
row: 27
column: 10
column: 1
end_location:
row: 28
column: 1
@@ -96,10 +89,10 @@ expression: diagnostics
row: 35
column: 1
fix:
content: ""
content: lru_cache
location:
row: 33
column: 10
column: 1
end_location:
row: 35
column: 1
@@ -113,10 +106,10 @@ expression: diagnostics
row: 42
column: 19
fix:
content: ""
content: functools.cache
location:
row: 40
column: 20
column: 1
end_location:
row: 42
column: 19
@@ -130,10 +123,10 @@ expression: diagnostics
row: 51
column: 1
fix:
content: ""
content: functools.cache
location:
row: 47
column: 20
column: 1
end_location:
row: 51
column: 1
@@ -147,10 +140,10 @@ expression: diagnostics
row: 62
column: 1
fix:
content: ""
content: functools.cache
location:
row: 56
column: 20
column: 1
end_location:
row: 62
column: 1
@@ -164,10 +157,10 @@ expression: diagnostics
row: 72
column: 1
fix:
content: ""
content: functools.cache
location:
row: 67
column: 20
column: 1
end_location:
row: 72
column: 1

View File

@@ -0,0 +1,362 @@
---
source: src/rules/pyupgrade/mod.rs
expression: diagnostics
---
- kind:
FString: ~
location:
row: 5
column: 0
end_location:
row: 5
column: 20
fix:
content: "f\"{a} {b}\""
location:
row: 5
column: 0
end_location:
row: 5
column: 20
parent: ~
- kind:
FString: ~
location:
row: 7
column: 0
end_location:
row: 7
column: 22
fix:
content: "f\"{b} {a}\""
location:
row: 7
column: 0
end_location:
row: 7
column: 22
parent: ~
- kind:
FString: ~
location:
row: 9
column: 0
end_location:
row: 9
column: 19
fix:
content: "f\"{z.y}\""
location:
row: 9
column: 0
end_location:
row: 9
column: 19
parent: ~
- kind:
FString: ~
location:
row: 11
column: 0
end_location:
row: 11
column: 24
fix:
content: "f\"{a.x} {b.y}\""
location:
row: 11
column: 0
end_location:
row: 11
column: 24
parent: ~
- kind:
FString: ~
location:
row: 13
column: 0
end_location:
row: 13
column: 24
fix:
content: "f\"{a.b} {c.d}\""
location:
row: 13
column: 0
end_location:
row: 13
column: 24
parent: ~
- kind:
FString: ~
location:
row: 15
column: 0
end_location:
row: 15
column: 16
fix:
content: "f\"{a()}\""
location:
row: 15
column: 0
end_location:
row: 15
column: 16
parent: ~
- kind:
FString: ~
location:
row: 17
column: 0
end_location:
row: 17
column: 18
fix:
content: "f\"{a.b()}\""
location:
row: 17
column: 0
end_location:
row: 17
column: 18
parent: ~
- kind:
FString: ~
location:
row: 19
column: 0
end_location:
row: 19
column: 22
fix:
content: "f\"{a.b().c()}\""
location:
row: 19
column: 0
end_location:
row: 19
column: 22
parent: ~
- kind:
FString: ~
location:
row: 21
column: 0
end_location:
row: 21
column: 24
fix:
content: "f\"hello {name}!\""
location:
row: 21
column: 0
end_location:
row: 21
column: 24
parent: ~
- kind:
FString: ~
location:
row: 23
column: 0
end_location:
row: 23
column: 27
fix:
content: "f\"{a}{b}{c}\""
location:
row: 23
column: 0
end_location:
row: 23
column: 27
parent: ~
- kind:
FString: ~
location:
row: 25
column: 0
end_location:
row: 25
column: 16
fix:
content: "f\"{0x0}\""
location:
row: 25
column: 0
end_location:
row: 25
column: 16
parent: ~
- kind:
FString: ~
location:
row: 27
column: 0
end_location:
row: 27
column: 20
fix:
content: "f\"{a} {b}\""
location:
row: 27
column: 0
end_location:
row: 27
column: 20
parent: ~
- kind:
FString: ~
location:
row: 29
column: 0
end_location:
row: 29
column: 24
fix:
content: "f\"\"\"{a} {b}\"\"\""
location:
row: 29
column: 0
end_location:
row: 29
column: 24
parent: ~
- kind:
FString: ~
location:
row: 31
column: 0
end_location:
row: 31
column: 17
fix:
content: "f\"foo{1}\""
location:
row: 31
column: 0
end_location:
row: 31
column: 17
parent: ~
- kind:
FString: ~
location:
row: 33
column: 0
end_location:
row: 33
column: 18
fix:
content: "fr\"foo{1}\""
location:
row: 33
column: 0
end_location:
row: 33
column: 18
parent: ~
- kind:
FString: ~
location:
row: 35
column: 4
end_location:
row: 35
column: 21
fix:
content: "f\"{1}\""
location:
row: 35
column: 4
end_location:
row: 35
column: 21
parent: ~
- kind:
FString: ~
location:
row: 37
column: 6
end_location:
row: 37
column: 25
fix:
content: "f\"foo {x} \""
location:
row: 37
column: 6
end_location:
row: 37
column: 25
parent: ~
- kind:
FString: ~
location:
row: 39
column: 0
end_location:
row: 39
column: 20
fix:
content: "f\"{a[b]}\""
location:
row: 39
column: 0
end_location:
row: 39
column: 20
parent: ~
- kind:
FString: ~
location:
row: 41
column: 0
end_location:
row: 41
column: 22
fix:
content: "f\"{a.a[b]}\""
location:
row: 41
column: 0
end_location:
row: 41
column: 22
parent: ~
- kind:
FString: ~
location:
row: 43
column: 0
end_location:
row: 43
column: 29
fix:
content: "f\"{escaped}{{}}{y}\""
location:
row: 43
column: 0
end_location:
row: 43
column: 29
parent: ~
- kind:
FString: ~
location:
row: 45
column: 0
end_location:
row: 45
column: 14
fix:
content: "f\"{a}\""
location:
row: 45
column: 0
end_location:
row: 45
column: 14
parent: ~

View File

@@ -31,7 +31,7 @@ mod tests {
let diagnostics = test_path(
Path::new("./resources/test/fixtures/ruff/confusables.py"),
&settings::Settings {
allowed_confusables: FxHashSet::from_iter(['', 'ρ', '']),
allowed_confusables: FxHashSet::from_iter(['', 'ρ', '']).into(),
..settings::Settings::for_rules(vec![
RuleCode::RUF001,
RuleCode::RUF002,

View File

@@ -65,7 +65,7 @@ pub struct Configuration {
pub flake8_import_conventions: Option<flake8_import_conventions::settings::Options>,
pub flake8_pytest_style: Option<flake8_pytest_style::settings::Options>,
pub flake8_quotes: Option<flake8_quotes::settings::Options>,
pub flake8_tidy_imports: Option<flake8_tidy_imports::settings::Options>,
pub flake8_tidy_imports: Option<flake8_tidy_imports::options::Options>,
pub flake8_unused_arguments: Option<flake8_unused_arguments::settings::Options>,
pub isort: Option<isort::settings::Options>,
pub mccabe: Option<mccabe::settings::Options>,

176
src/settings/hashable.rs Normal file
View File

@@ -0,0 +1,176 @@
use std::hash::{Hash, Hasher};
use std::ops::{Deref, DerefMut};
use globset::{GlobMatcher, GlobSet};
use itertools::Itertools;
use regex::Regex;
use rustc_hash::{FxHashMap, FxHashSet};
use super::types::FilePattern;
#[derive(Debug)]
pub struct HashableRegex(Regex);
impl Hash for HashableRegex {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.0.as_str().hash(state);
}
}
impl From<Regex> for HashableRegex {
fn from(regex: Regex) -> Self {
Self(regex)
}
}
impl Deref for HashableRegex {
type Target = Regex;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Debug)]
pub struct HashableGlobMatcher(GlobMatcher);
impl From<GlobMatcher> for HashableGlobMatcher {
fn from(matcher: GlobMatcher) -> Self {
Self(matcher)
}
}
impl Deref for HashableGlobMatcher {
type Target = GlobMatcher;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Hash for HashableGlobMatcher {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.0.glob().hash(state);
}
}
#[derive(Debug)]
pub struct HashableGlobSet {
patterns: Vec<FilePattern>,
globset: GlobSet,
}
impl HashableGlobSet {
pub fn new(patterns: Vec<FilePattern>) -> anyhow::Result<Self> {
let mut builder = globset::GlobSetBuilder::new();
for pattern in &patterns {
pattern.clone().add_to(&mut builder)?;
}
let globset = builder.build()?;
Ok(HashableGlobSet { patterns, globset })
}
pub fn empty() -> Self {
Self {
patterns: Vec::new(),
globset: GlobSet::empty(),
}
}
}
impl Deref for HashableGlobSet {
type Target = GlobSet;
fn deref(&self) -> &Self::Target {
&self.globset
}
}
impl Hash for HashableGlobSet {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
for pattern in self.patterns.iter().sorted() {
pattern.hash(state);
}
}
}
#[derive(Debug, Clone)]
pub struct HashableHashSet<T>(FxHashSet<T>);
impl<T: Hash + Ord> Hash for HashableHashSet<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
for v in self.0.iter().sorted() {
v.hash(state);
}
}
}
impl<T> Default for HashableHashSet<T> {
fn default() -> Self {
Self(FxHashSet::default())
}
}
impl<T> From<FxHashSet<T>> for HashableHashSet<T> {
fn from(set: FxHashSet<T>) -> Self {
Self(set)
}
}
impl<T> From<HashableHashSet<T>> for FxHashSet<T> {
fn from(set: HashableHashSet<T>) -> Self {
set.0
}
}
impl<T> Deref for HashableHashSet<T> {
type Target = FxHashSet<T>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Debug, Clone)]
pub struct HashableHashMap<K, V>(FxHashMap<K, V>);
impl<K: Hash + Ord, V: Hash> Hash for HashableHashMap<K, V> {
fn hash<H: Hasher>(&self, state: &mut H) {
for key in self.0.keys().sorted() {
key.hash(state);
self.0[key].hash(state);
}
}
}
impl<K, V> Default for HashableHashMap<K, V> {
fn default() -> Self {
Self(FxHashMap::default())
}
}
impl<K, V> From<FxHashMap<K, V>> for HashableHashMap<K, V> {
fn from(map: FxHashMap<K, V>) -> Self {
Self(map)
}
}
impl<K, V> From<HashableHashMap<K, V>> for FxHashMap<K, V> {
fn from(map: HashableHashMap<K, V>) -> Self {
map.0
}
}
impl<K, V> Deref for HashableHashMap<K, V> {
type Target = FxHashMap<K, V>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<K, V> DerefMut for HashableHashMap<K, V> {
fn deref_mut(&mut self) -> &mut <Self as Deref>::Target {
&mut self.0
}
}

View File

@@ -2,21 +2,20 @@
//! command-line options. Structure is optimized for internal usage, as opposed
//! to external visibility or parsing.
use std::hash::{Hash, Hasher};
use std::iter;
use std::path::{Path, PathBuf};
use anyhow::{anyhow, Result};
use colored::Colorize;
use globset::{Glob, GlobMatcher, GlobSet};
use globset::Glob;
use itertools::Either::{Left, Right};
use itertools::Itertools;
use once_cell::sync::Lazy;
#[cfg(test)]
use path_absolutize::path_dedot;
use regex::Regex;
use rustc_hash::FxHashSet;
use self::hashable::{HashableGlobMatcher, HashableGlobSet, HashableHashSet, HashableRegex};
use crate::cache::cache_dir;
use crate::registry::{RuleCode, RuleCodePrefix, SuffixLength, CATEGORIES, INCOMPATIBLE_CODES};
use crate::rules::{
@@ -32,6 +31,7 @@ use crate::warn_user_once;
pub mod configuration;
pub mod flags;
pub mod hashable;
pub mod options;
pub mod options_base;
pub mod pyproject;
@@ -74,22 +74,26 @@ pub struct CliSettings {
pub update_check: bool,
}
#[derive(Debug)]
#[derive(Debug, Hash)]
#[allow(clippy::struct_excessive_bools)]
pub struct Settings {
pub allowed_confusables: FxHashSet<char>,
pub allowed_confusables: HashableHashSet<char>,
pub builtins: Vec<String>,
pub dummy_variable_rgx: Regex,
pub enabled: FxHashSet<RuleCode>,
pub exclude: GlobSet,
pub extend_exclude: GlobSet,
pub external: FxHashSet<String>,
pub fixable: FxHashSet<RuleCode>,
pub dummy_variable_rgx: HashableRegex,
pub enabled: HashableHashSet<RuleCode>,
pub exclude: HashableGlobSet,
pub extend_exclude: HashableGlobSet,
pub external: HashableHashSet<String>,
pub fixable: HashableHashSet<RuleCode>,
pub force_exclude: bool,
pub ignore_init_module_imports: bool,
pub line_length: usize,
pub namespace_packages: Vec<PathBuf>,
pub per_file_ignores: Vec<(GlobMatcher, GlobMatcher, FxHashSet<RuleCode>)>,
pub per_file_ignores: Vec<(
HashableGlobMatcher,
HashableGlobMatcher,
HashableHashSet<RuleCode>,
)>,
pub required_version: Option<Version>,
pub respect_gitignore: bool,
pub show_source: bool,
@@ -105,7 +109,7 @@ pub struct Settings {
pub flake8_import_conventions: flake8_import_conventions::settings::Settings,
pub flake8_pytest_style: flake8_pytest_style::settings::Settings,
pub flake8_quotes: flake8_quotes::settings::Settings,
pub flake8_tidy_imports: flake8_tidy_imports::settings::Settings,
pub flake8_tidy_imports: flake8_tidy_imports::Settings,
pub flake8_unused_arguments: flake8_unused_arguments::settings::Settings,
pub isort: isort::settings::Settings,
pub mccabe: mccabe::settings::Settings,
@@ -148,11 +152,13 @@ impl Settings {
allowed_confusables: config
.allowed_confusables
.map(FxHashSet::from_iter)
.unwrap_or_default(),
.unwrap_or_default()
.into(),
builtins: config.builtins.unwrap_or_default(),
dummy_variable_rgx: config
.dummy_variable_rgx
.unwrap_or_else(|| DEFAULT_DUMMY_VARIABLE_RGX.clone()),
.unwrap_or_else(|| DEFAULT_DUMMY_VARIABLE_RGX.clone())
.into(),
enabled: validate_enabled(resolve_codes(
[RuleCodeSpec {
select: &config
@@ -184,17 +190,21 @@ impl Settings {
Right(iter::empty())
},
),
)),
exclude: resolve_globset(config.exclude.unwrap_or_else(|| DEFAULT_EXCLUDE.clone()))?,
extend_exclude: resolve_globset(config.extend_exclude)?,
external: FxHashSet::from_iter(config.external.unwrap_or_default()),
))
.into(),
exclude: HashableGlobSet::new(
config.exclude.unwrap_or_else(|| DEFAULT_EXCLUDE.clone()),
)?,
extend_exclude: HashableGlobSet::new(config.extend_exclude)?,
external: FxHashSet::from_iter(config.external.unwrap_or_default()).into(),
fixable: resolve_codes(
[RuleCodeSpec {
select: &config.fixable.unwrap_or_else(|| CATEGORIES.to_vec()),
ignore: &config.unfixable.unwrap_or_default(),
}]
.into_iter(),
),
)
.into(),
force_exclude: config.force_exclude.unwrap_or(false),
ignore_init_module_imports: config.ignore_init_module_imports.unwrap_or_default(),
line_length: config.line_length.unwrap_or(88),
@@ -250,14 +260,16 @@ impl Settings {
#[cfg(test)]
pub fn for_rule(rule_code: RuleCode) -> Self {
Self {
allowed_confusables: FxHashSet::from_iter([]),
allowed_confusables: FxHashSet::from_iter([]).into(),
builtins: vec![],
dummy_variable_rgx: Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$").unwrap(),
enabled: FxHashSet::from_iter([rule_code.clone()]),
exclude: GlobSet::empty(),
extend_exclude: GlobSet::empty(),
external: FxHashSet::default(),
fixable: FxHashSet::from_iter([rule_code]),
dummy_variable_rgx: Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$")
.unwrap()
.into(),
enabled: FxHashSet::from_iter([rule_code.clone()]).into(),
exclude: HashableGlobSet::empty(),
extend_exclude: HashableGlobSet::empty(),
external: HashableHashSet::default(),
fixable: FxHashSet::from_iter([rule_code]).into(),
force_exclude: false,
ignore_init_module_imports: false,
line_length: 88,
@@ -277,7 +289,7 @@ impl Settings {
flake8_import_conventions: flake8_import_conventions::settings::Settings::default(),
flake8_pytest_style: flake8_pytest_style::settings::Settings::default(),
flake8_quotes: flake8_quotes::settings::Settings::default(),
flake8_tidy_imports: flake8_tidy_imports::settings::Settings::default(),
flake8_tidy_imports: flake8_tidy_imports::Settings::default(),
flake8_unused_arguments: flake8_unused_arguments::settings::Settings::default(),
isort: isort::settings::Settings::default(),
mccabe: mccabe::settings::Settings::default(),
@@ -291,14 +303,16 @@ impl Settings {
#[cfg(test)]
pub fn for_rules(rule_codes: Vec<RuleCode>) -> Self {
Self {
allowed_confusables: FxHashSet::from_iter([]),
allowed_confusables: HashableHashSet::default(),
builtins: vec![],
dummy_variable_rgx: Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$").unwrap(),
enabled: FxHashSet::from_iter(rule_codes.clone()),
exclude: GlobSet::empty(),
extend_exclude: GlobSet::empty(),
external: FxHashSet::default(),
fixable: FxHashSet::from_iter(rule_codes),
dummy_variable_rgx: Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$")
.unwrap()
.into(),
enabled: FxHashSet::from_iter(rule_codes.clone()).into(),
exclude: HashableGlobSet::empty(),
extend_exclude: HashableGlobSet::empty(),
external: HashableHashSet::default(),
fixable: FxHashSet::from_iter(rule_codes).into(),
force_exclude: false,
ignore_init_module_imports: false,
line_length: 88,
@@ -318,7 +332,7 @@ impl Settings {
flake8_import_conventions: flake8_import_conventions::settings::Settings::default(),
flake8_pytest_style: flake8_pytest_style::settings::Settings::default(),
flake8_quotes: flake8_quotes::settings::Settings::default(),
flake8_tidy_imports: flake8_tidy_imports::settings::Settings::default(),
flake8_tidy_imports: flake8_tidy_imports::Settings::default(),
flake8_unused_arguments: flake8_unused_arguments::settings::Settings::default(),
isort: isort::settings::Settings::default(),
mccabe: mccabe::settings::Settings::default(),
@@ -343,68 +357,16 @@ impl Settings {
}
}
impl Hash for Settings {
fn hash<H: Hasher>(&self, state: &mut H) {
// Add base properties in alphabetical order.
for confusable in &self.allowed_confusables {
confusable.hash(state);
}
self.builtins.hash(state);
self.dummy_variable_rgx.as_str().hash(state);
for value in self.enabled.iter().sorted() {
value.hash(state);
}
for value in self.external.iter().sorted() {
value.hash(state);
}
for value in self.fixable.iter().sorted() {
value.hash(state);
}
self.ignore_init_module_imports.hash(state);
self.line_length.hash(state);
for (absolute, basename, codes) in &self.per_file_ignores {
absolute.glob().hash(state);
basename.glob().hash(state);
for value in codes.iter().sorted() {
value.hash(state);
}
}
self.show_source.hash(state);
self.src.hash(state);
self.target_version.hash(state);
self.task_tags.hash(state);
self.typing_modules.hash(state);
// Add plugin properties in alphabetical order.
self.flake8_annotations.hash(state);
self.flake8_bandit.hash(state);
self.flake8_bugbear.hash(state);
self.flake8_errmsg.hash(state);
self.flake8_import_conventions.hash(state);
self.flake8_pytest_style.hash(state);
self.flake8_quotes.hash(state);
self.flake8_tidy_imports.hash(state);
self.flake8_unused_arguments.hash(state);
self.isort.hash(state);
self.mccabe.hash(state);
self.pep8_naming.hash(state);
self.pydocstyle.hash(state);
self.pyupgrade.hash(state);
}
}
/// Given a list of patterns, create a `GlobSet`.
pub fn resolve_globset(patterns: Vec<FilePattern>) -> Result<GlobSet> {
let mut builder = globset::GlobSetBuilder::new();
for pattern in patterns {
pattern.add_to(&mut builder)?;
}
builder.build().map_err(Into::into)
}
/// Given a list of patterns, create a `GlobSet`.
pub fn resolve_per_file_ignores(
per_file_ignores: Vec<PerFileIgnore>,
) -> Result<Vec<(GlobMatcher, GlobMatcher, FxHashSet<RuleCode>)>> {
) -> Result<
Vec<(
HashableGlobMatcher,
HashableGlobMatcher,
HashableHashSet<RuleCode>,
)>,
> {
per_file_ignores
.into_iter()
.map(|per_file_ignore| {
@@ -415,7 +377,7 @@ pub fn resolve_per_file_ignores(
// Construct basename matcher.
let basename = Glob::new(&per_file_ignore.basename)?.compile_matcher();
Ok((absolute, basename, per_file_ignore.codes))
Ok((absolute.into(), basename.into(), per_file_ignore.codes))
})
.collect()
}

View File

@@ -436,7 +436,7 @@ pub struct Options {
pub flake8_quotes: Option<flake8_quotes::settings::Options>,
#[option_group]
/// Options for the `flake8-tidy-imports` plugin.
pub flake8_tidy_imports: Option<flake8_tidy_imports::settings::Options>,
pub flake8_tidy_imports: Option<flake8_tidy_imports::options::Options>,
#[option_group]
/// Options for the `flake8-import-conventions` plugin.
pub flake8_import_conventions: Option<flake8_import_conventions::settings::Options>,

View File

@@ -131,7 +131,8 @@ mod tests {
use crate::registry::RuleCodePrefix;
use crate::rules::flake8_quotes::settings::Quote;
use crate::rules::flake8_tidy_imports::settings::{BannedApi, Strictness};
use crate::rules::flake8_tidy_imports::banned_api::ApiBan;
use crate::rules::flake8_tidy_imports::relative_imports::Strictness;
use crate::rules::{
flake8_bugbear, flake8_errmsg, flake8_import_conventions, flake8_pytest_style,
flake8_quotes, flake8_tidy_imports, mccabe, pep8_naming,
@@ -574,18 +575,18 @@ other-attribute = 1
docstring_quotes: Some(Quote::Double),
avoid_escape: Some(true),
}),
flake8_tidy_imports: Some(flake8_tidy_imports::settings::Options {
flake8_tidy_imports: Some(flake8_tidy_imports::options::Options {
ban_relative_imports: Some(Strictness::Parents),
banned_api: Some(FxHashMap::from_iter([
(
"cgi".to_string(),
BannedApi {
ApiBan {
msg: "The cgi module is deprecated.".to_string()
}
),
(
"typing.TypedDict".to_string(),
BannedApi {
ApiBan {
msg: "Use typing_extensions.TypedDict instead.".to_string()
}
)

View File

@@ -10,6 +10,7 @@ use rustc_hash::FxHashSet;
use schemars::JsonSchema;
use serde::{de, Deserialize, Deserializer, Serialize};
use super::hashable::HashableHashSet;
use crate::fs;
use crate::registry::{RuleCode, RuleCodePrefix};
@@ -54,7 +55,7 @@ impl FromStr for PythonVersion {
}
}
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Hash, PartialEq, PartialOrd, Eq, Ord)]
pub enum FilePattern {
Builtin(&'static str),
User(String, PathBuf),
@@ -94,16 +95,16 @@ impl FromStr for FilePattern {
pub struct PerFileIgnore {
pub basename: String,
pub absolute: PathBuf,
pub codes: FxHashSet<RuleCode>,
pub codes: HashableHashSet<RuleCode>,
}
impl PerFileIgnore {
pub fn new(basename: String, absolute: PathBuf, prefixes: &[RuleCodePrefix]) -> Self {
let codes = prefixes.iter().flat_map(RuleCodePrefix::codes).collect();
let codes: FxHashSet<_> = prefixes.iter().flat_map(RuleCodePrefix::codes).collect();
Self {
basename,
absolute,
codes,
codes: codes.into(),
}
}
}
@@ -150,7 +151,9 @@ impl FromStr for PatternPrefixPair {
}
}
#[derive(Clone, Copy, ValueEnum, PartialEq, Eq, Serialize, Deserialize, Debug, JsonSchema)]
#[derive(
Clone, Copy, ValueEnum, PartialEq, Eq, Serialize, Deserialize, Debug, JsonSchema, Hash,
)]
#[serde(rename_all = "kebab-case")]
pub enum SerializationFormat {
Text,
@@ -167,7 +170,7 @@ impl Default for SerializationFormat {
}
}
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema, Hash)]
#[serde(try_from = "String")]
pub struct Version(String);

View File

@@ -61,6 +61,15 @@ impl Default for Quote {
}
}
impl From<Quote> for char {
fn from(val: Quote) -> Self {
match val {
Quote::Single => '\'',
Quote::Double => '"',
}
}
}
impl From<&Quote> for vendor::str::Quote {
fn from(val: &Quote) -> Self {
match val {

View File

@@ -10,7 +10,6 @@ use crate::rules::flake8_pytest_style::types::{
ParametrizeNameType, ParametrizeValuesRowType, ParametrizeValuesType,
};
use crate::rules::flake8_quotes::settings::Quote;
use crate::rules::flake8_tidy_imports::settings::Strictness;
use crate::rules::pyupgrade::types::Primitive;
use crate::violation::{AlwaysAutofixableViolation, Violation};
@@ -1781,8 +1780,8 @@ define_violation!(
);
impl Violation for RaiseWithoutFromInsideExcept {
fn message(&self) -> String {
"Within an except clause, raise exceptions with raise ... from err or raise ... from None \
to distinguish them from errors in exception handling"
"Within an except clause, raise exceptions with `raise ... from err` or `raise ... from \
None` to distinguish them from errors in exception handling"
.to_string()
}
@@ -2169,45 +2168,6 @@ impl Violation for FunctionIsTooComplex {
}
}
// flake8-tidy-imports
define_violation!(
pub struct BannedApi {
pub name: String,
pub message: String,
}
);
impl Violation for BannedApi {
fn message(&self) -> String {
let BannedApi { name, message } = self;
format!("`{name}` is banned: {message}")
}
fn placeholder() -> Self {
BannedApi {
name: "...".to_string(),
message: "...".to_string(),
}
}
}
define_violation!(
pub struct BannedRelativeImport(pub Strictness);
);
impl Violation for BannedRelativeImport {
fn message(&self) -> String {
let BannedRelativeImport(strictness) = self;
match strictness {
Strictness::Parents => "Relative imports from parent modules are banned".to_string(),
Strictness::All => "Relative imports are banned".to_string(),
}
}
fn placeholder() -> Self {
BannedRelativeImport(Strictness::All)
}
}
// flake8-return
define_violation!(
@@ -3837,6 +3797,23 @@ impl AlwaysAutofixableViolation for FormatLiterals {
}
}
define_violation!(
pub struct FString;
);
impl AlwaysAutofixableViolation for FString {
fn message(&self) -> String {
"Use f-string instead of `format` call".to_string()
}
fn autofix_title(&self) -> String {
"Convert to f-string".to_string()
}
fn placeholder() -> Self {
FString
}
}
// pydocstyle
define_violation!(
@@ -6010,6 +5987,24 @@ impl AlwaysAutofixableViolation for DupeClassFieldDefinitions {
}
}
define_violation!(
pub struct PreferUniqueEnums {
pub value: String,
}
);
impl Violation for PreferUniqueEnums {
fn message(&self) -> String {
let PreferUniqueEnums { value } = self;
format!("Enum contains duplicate value: `{value}`")
}
fn placeholder() -> Self {
PreferUniqueEnums {
value: "...".to_string(),
}
}
}
define_violation!(
pub struct PreferListBuiltin;
);