Compare commits
142 Commits
david/allo
...
dylan/stab
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8bda1837d9 | ||
|
|
e667d53828 | ||
|
|
a071114638 | ||
|
|
86ae9fbc08 | ||
|
|
11966beeec | ||
|
|
1274521f9f | ||
|
|
8d24760643 | ||
|
|
db8db536f8 | ||
|
|
cb8246bc5f | ||
|
|
5faf72a4d9 | ||
|
|
28dbc5c51e | ||
|
|
ce216c79cc | ||
|
|
33468cc8cc | ||
|
|
8531f4b3ca | ||
|
|
55100209c7 | ||
|
|
c0bb83b882 | ||
|
|
74a4e9af3d | ||
|
|
8485dbb324 | ||
|
|
0858896bc4 | ||
|
|
ce8b744f17 | ||
|
|
5a8cdab771 | ||
|
|
3a8191529c | ||
|
|
e658778ced | ||
|
|
f1883d71a4 | ||
|
|
11db567b0b | ||
|
|
9f8c3de462 | ||
|
|
293d4ac388 | ||
|
|
9e8a7e9353 | ||
|
|
453e5f5934 | ||
|
|
7ea773daf2 | ||
|
|
0079cc6817 | ||
|
|
e8ea40012a | ||
|
|
71d8a5da2a | ||
|
|
2c3b3d3230 | ||
|
|
8d98c601d8 | ||
|
|
0986edf427 | ||
|
|
03f1f8e218 | ||
|
|
628bb2cd1d | ||
|
|
f23d2c9b9e | ||
|
|
67d94d9ec8 | ||
|
|
d1cb8e2142 | ||
|
|
57202c1c77 | ||
|
|
2289187b74 | ||
|
|
14c42a8ddf | ||
|
|
e677863787 | ||
|
|
f379eb6e62 | ||
|
|
47698883ae | ||
|
|
e2d96df501 | ||
|
|
384e80ec80 | ||
|
|
b9f3b0e0a6 | ||
|
|
1e6d76c878 | ||
|
|
844c8626c3 | ||
|
|
1c8d9d707e | ||
|
|
4856377478 | ||
|
|
643c845a47 | ||
|
|
9e952cf0e0 | ||
|
|
c4015edf48 | ||
|
|
97b824db3e | ||
|
|
220ab88779 | ||
|
|
7a63ac145a | ||
|
|
54f597658c | ||
|
|
aa1fad61e0 | ||
|
|
b390b3cb8e | ||
|
|
88866f0048 | ||
|
|
9bbf4987e8 | ||
|
|
ad024f9a09 | ||
|
|
fc549bda94 | ||
|
|
77c8ddf101 | ||
|
|
e730f27f80 | ||
|
|
d65bd69963 | ||
|
|
c713e76e4d | ||
|
|
8005ebb405 | ||
|
|
0c29e258c6 | ||
|
|
b5b6b657cc | ||
|
|
ad2f667ee4 | ||
|
|
363f061f09 | ||
|
|
9b0dfc505f | ||
|
|
695de4f27f | ||
|
|
3445d1322d | ||
|
|
2c3f091e0e | ||
|
|
9d3cad95bc | ||
|
|
7df79cfb70 | ||
|
|
33ed502edb | ||
|
|
a827b16ebd | ||
|
|
47a2ec002e | ||
|
|
aee3af0f7a | ||
|
|
04dc48e17c | ||
|
|
27743efa1b | ||
|
|
c60b4d7f30 | ||
|
|
16621fa19d | ||
|
|
e23d4ea027 | ||
|
|
452f992fbc | ||
|
|
a5ebb3f3a2 | ||
|
|
9925910a29 | ||
|
|
a3ee6bb3b5 | ||
|
|
b60ba75d09 | ||
|
|
66ba1d8775 | ||
|
|
bbcd7e0196 | ||
|
|
48c425c15b | ||
|
|
6d210dd0c7 | ||
|
|
9ce83c215d | ||
|
|
602dd5c039 | ||
|
|
3eada01153 | ||
|
|
3e811fc369 | ||
|
|
743764d384 | ||
|
|
e03e05d2b3 | ||
|
|
9ec4a178a4 | ||
|
|
8d5655a7ba | ||
|
|
6453ac9ea1 | ||
|
|
0a11baf29c | ||
|
|
1d20cf9570 | ||
|
|
62ef96f51e | ||
|
|
4e68dd96a6 | ||
|
|
b25b642371 | ||
|
|
175402aa75 | ||
|
|
d8216fa328 | ||
|
|
d51f6940fe | ||
|
|
66b082ff71 | ||
|
|
5d93d619f3 | ||
|
|
e1b662bf5d | ||
|
|
f885cb8a2f | ||
|
|
4ef2c223c9 | ||
|
|
d078ecff37 | ||
|
|
7eca6f96e3 | ||
|
|
fbaf826a9d | ||
|
|
d8a5b9de17 | ||
|
|
c3feb8ce27 | ||
|
|
97ff015c88 | ||
|
|
1f7134f727 | ||
|
|
6a0b93170e | ||
|
|
cc59ff8aad | ||
|
|
2b90e7fcd7 | ||
|
|
a43f5b2129 | ||
|
|
f3fb7429ca | ||
|
|
83498b95fb | ||
|
|
03d7be3747 | ||
|
|
d95b029862 | ||
|
|
14c3755445 | ||
|
|
83a036960b | ||
|
|
be76fadb05 | ||
|
|
e293411679 | ||
|
|
53d19f8368 |
4
.github/workflows/build-docker.yml
vendored
4
.github/workflows/build-docker.yml
vendored
@@ -79,7 +79,7 @@ jobs:
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: .
|
||||
platforms: ${{ matrix.platform }}
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
${{ env.TAG_PATTERNS }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
36
.github/workflows/ci.yaml
vendored
36
.github/workflows/ci.yaml
vendored
@@ -237,13 +237,13 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
uses: rui314/setup-mold@67424c1b3680e35255d95971cbd5de0047bf31c3 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: ty mdtests (GitHub annotations)
|
||||
@@ -295,13 +295,13 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
uses: rui314/setup-mold@67424c1b3680e35255d95971cbd5de0047bf31c3 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
@@ -324,7 +324,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Run tests"
|
||||
@@ -380,7 +380,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
uses: rui314/setup-mold@67424c1b3680e35255d95971cbd5de0047bf31c3 # v1
|
||||
- name: "Build"
|
||||
run: cargo build --release --locked
|
||||
|
||||
@@ -405,13 +405,13 @@ jobs:
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: rustup default "${MSRV}"
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
uses: rui314/setup-mold@67424c1b3680e35255d95971cbd5de0047bf31c3 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
@@ -437,7 +437,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@5cbf019d8cb9b9d5b086218c41458ea35d817691 # v1.12.5
|
||||
uses: cargo-bins/cargo-binstall@e8c9cc3599f6c4063d143083205f98ca25d91677 # v1.12.6
|
||||
with:
|
||||
tool: cargo-fuzz@0.11.2
|
||||
- name: "Install cargo-fuzz"
|
||||
@@ -459,7 +459,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
|
||||
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
@@ -660,7 +660,7 @@ jobs:
|
||||
branch: ${{ github.event.pull_request.base.ref }}
|
||||
workflow: "ci.yaml"
|
||||
check_artifacts: true
|
||||
- uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
|
||||
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- name: Fuzz
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
@@ -690,7 +690,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: cargo-bins/cargo-binstall@5cbf019d8cb9b9d5b086218c41458ea35d817691 # v1.12.5
|
||||
- uses: cargo-bins/cargo-binstall@e8c9cc3599f6c4063d143083205f98ca25d91677 # v1.12.6
|
||||
- run: cargo binstall --no-confirm cargo-shear
|
||||
- run: cargo shear
|
||||
|
||||
@@ -730,7 +730,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
|
||||
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
@@ -773,7 +773,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
|
||||
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: uv pip install -r docs/requirements-insiders.txt --system
|
||||
@@ -910,7 +910,7 @@ jobs:
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
|
||||
4
.github/workflows/daily_fuzz.yaml
vendored
4
.github/workflows/daily_fuzz.yaml
vendored
@@ -34,11 +34,11 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
|
||||
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
uses: rui314/setup-mold@67424c1b3680e35255d95971cbd5de0047bf31c3 # v1
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: Build ruff
|
||||
# A debug build means the script runs slower once it gets started,
|
||||
|
||||
2
.github/workflows/mypy_primer.yaml
vendored
2
.github/workflows/mypy_primer.yaml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
|
||||
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
|
||||
2
.github/workflows/publish-pypi.yml
vendored
2
.github/workflows/publish-pypi.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
|
||||
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels-*
|
||||
|
||||
@@ -80,7 +80,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.11.10
|
||||
rev: v0.11.12
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
@@ -98,7 +98,7 @@ repos:
|
||||
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
||||
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.7.0
|
||||
rev: v1.9.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
|
||||
54
CHANGELOG.md
54
CHANGELOG.md
@@ -1,5 +1,59 @@
|
||||
# Changelog
|
||||
|
||||
## 0.11.13
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Add unsafe fix for module moved cases (`AIR301`,`AIR311`,`AIR312`,`AIR302`) ([#18367](https://github.com/astral-sh/ruff/pull/18367),[#18366](https://github.com/astral-sh/ruff/pull/18366),[#18363](https://github.com/astral-sh/ruff/pull/18363),[#18093](https://github.com/astral-sh/ruff/pull/18093))
|
||||
- \[`refurb`\] Add coverage of `set` and `frozenset` calls (`FURB171`) ([#18035](https://github.com/astral-sh/ruff/pull/18035))
|
||||
- \[`refurb`\] Mark `FURB180` fix unsafe when class has bases ([#18149](https://github.com/astral-sh/ruff/pull/18149))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`perflint`\] Fix missing parentheses for lambda and ternary conditions (`PERF401`, `PERF403`) ([#18412](https://github.com/astral-sh/ruff/pull/18412))
|
||||
- \[`pyupgrade`\] Apply `UP035` only on py313+ for `get_type_hints()` ([#18476](https://github.com/astral-sh/ruff/pull/18476))
|
||||
- \[`pyupgrade`\] Make fix unsafe if it deletes comments (`UP004`,`UP050`) ([#18393](https://github.com/astral-sh/ruff/pull/18393), [#18390](https://github.com/astral-sh/ruff/pull/18390))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`fastapi`\] Avoid false positive for class dependencies (`FAST003`) ([#18271](https://github.com/astral-sh/ruff/pull/18271))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update editor setup docs for Neovim and Vim ([#18324](https://github.com/astral-sh/ruff/pull/18324))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Support Python 3.14 template strings (t-strings) in formatter and parser ([#17851](https://github.com/astral-sh/ruff/pull/17851))
|
||||
|
||||
## 0.11.12
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Revise fix titles (`AIR3`) ([#18215](https://github.com/astral-sh/ruff/pull/18215))
|
||||
- \[`pylint`\] Implement `missing-maxsplit-arg` (`PLC0207`) ([#17454](https://github.com/astral-sh/ruff/pull/17454))
|
||||
- \[`pyupgrade`\] New rule `UP050` (`useless-class-metaclass-type`) ([#18334](https://github.com/astral-sh/ruff/pull/18334))
|
||||
- \[`flake8-use-pathlib`\] Replace `os.symlink` with `Path.symlink_to` (`PTH211`) ([#18337](https://github.com/astral-sh/ruff/pull/18337))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-bugbear`\] Ignore `__debug__` attribute in `B010` ([#18357](https://github.com/astral-sh/ruff/pull/18357))
|
||||
- \[`flake8-async`\] Fix `anyio.sleep` argument name (`ASYNC115`, `ASYNC116`) ([#18262](https://github.com/astral-sh/ruff/pull/18262))
|
||||
- \[`refurb`\] Fix `FURB129` autofix generating invalid syntax ([#18235](https://github.com/astral-sh/ruff/pull/18235))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-implicit-str-concat`\] Add autofix for `ISC003` ([#18256](https://github.com/astral-sh/ruff/pull/18256))
|
||||
- \[`pycodestyle`\] Improve the diagnostic message for `E712` ([#18328](https://github.com/astral-sh/ruff/pull/18328))
|
||||
- \[`flake8-2020`\] Fix diagnostic message for `!=` comparisons (`YTT201`) ([#18293](https://github.com/astral-sh/ruff/pull/18293))
|
||||
- \[`pyupgrade`\] Make fix unsafe if it deletes comments (`UP010`) ([#18291](https://github.com/astral-sh/ruff/pull/18291))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Simplify rules table to improve readability ([#18297](https://github.com/astral-sh/ruff/pull/18297))
|
||||
- Update editor integrations link in README ([#17977](https://github.com/astral-sh/ruff/pull/17977))
|
||||
- \[`flake8-bugbear`\] Add fix safety section (`B006`) ([#17652](https://github.com/astral-sh/ruff/pull/17652))
|
||||
|
||||
## 0.11.11
|
||||
|
||||
### Preview features
|
||||
|
||||
79
Cargo.lock
generated
79
Cargo.lock
generated
@@ -8,6 +8,18 @@ version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.8.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
"version_check",
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "1.1.3"
|
||||
@@ -342,9 +354,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.38"
|
||||
version = "4.5.39"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed93b9805f8ba930df42c2590f05453d5ec36cbb85d018868a5b24d31f6ac000"
|
||||
checksum = "fd60e63e9be68e5fb56422e397cf9baddded06dae1d2e523401542383bc72a9f"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -352,9 +364,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.38"
|
||||
version = "4.5.39"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "379026ff283facf611b0ea629334361c4211d1b12ee01024eec1591133b04120"
|
||||
checksum = "89cc6392a1f72bbeb820d71f32108f61fdaf18bc526e1d23954168a67759ef51"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -1106,6 +1118,10 @@ name = "hashbrown"
|
||||
version = "0.14.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"allocator-api2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
@@ -1498,9 +1514,9 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
|
||||
|
||||
[[package]]
|
||||
name = "jiff"
|
||||
version = "0.2.13"
|
||||
version = "0.2.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f02000660d30638906021176af16b17498bd0d12813dbfe7b276d8bc7f3c0806"
|
||||
checksum = "a194df1107f33c79f4f93d02c80798520551949d59dfad22b6157048a88cca93"
|
||||
dependencies = [
|
||||
"jiff-static",
|
||||
"jiff-tzdb-platform",
|
||||
@@ -1513,9 +1529,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "jiff-static"
|
||||
version = "0.2.13"
|
||||
version = "0.2.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f3c30758ddd7188629c6713fc45d1188af4f44c90582311d0c8d8c9907f60c48"
|
||||
checksum = "6c6e1db7ed32c6c71b759497fae34bf7933636f75a251b9e736555da426f6442"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -1597,9 +1613,9 @@ checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
|
||||
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
version = "1.7.0"
|
||||
version = "1.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ad9e315e3f679e61b9095ffd5e509de78b8a4ea3bba9d772f6fb243209f808d4"
|
||||
checksum = "3ac076e37f8fe6bcddbb6c3282897e6e9498b254907ccbfc806dc8f9f1491f02"
|
||||
dependencies = [
|
||||
"annotate-snippets",
|
||||
"libcst_derive",
|
||||
@@ -1607,14 +1623,14 @@ dependencies = [
|
||||
"paste",
|
||||
"peg",
|
||||
"regex",
|
||||
"thiserror 1.0.69",
|
||||
"thiserror 2.0.12",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libcst_derive"
|
||||
version = "1.7.0"
|
||||
version = "1.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bfa96ed35d0dccc67cf7ba49350cb86de3dcb1d072a7ab28f99117f19d874953"
|
||||
checksum = "9cf4a12c744a301b216c4f0cb73542709ab15e6dadbb06966ac05864109d05da"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
@@ -2485,7 +2501,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.11.11"
|
||||
version = "0.11.13"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2642,7 +2658,6 @@ dependencies = [
|
||||
"rayon",
|
||||
"regex",
|
||||
"ruff",
|
||||
"ruff_diagnostics",
|
||||
"ruff_formatter",
|
||||
"ruff_linter",
|
||||
"ruff_notebook",
|
||||
@@ -2672,9 +2687,7 @@ dependencies = [
|
||||
name = "ruff_diagnostics"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"is-macro",
|
||||
"log",
|
||||
"ruff_text_size",
|
||||
"serde",
|
||||
]
|
||||
@@ -2725,7 +2738,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.11.11"
|
||||
version = "0.11.13"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"anyhow",
|
||||
@@ -3061,7 +3074,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.11.11"
|
||||
version = "0.11.13"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -3081,6 +3094,7 @@ dependencies = [
|
||||
"ruff_workspace",
|
||||
"serde",
|
||||
"serde-wasm-bindgen",
|
||||
"uuid",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-test",
|
||||
]
|
||||
@@ -3179,13 +3193,14 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
|
||||
|
||||
[[package]]
|
||||
name = "salsa"
|
||||
version = "0.21.1"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=4818b15f3b7516555d39f5a41cb75970448bee4c#4818b15f3b7516555d39f5a41cb75970448bee4c"
|
||||
version = "0.22.0"
|
||||
source = "git+https://github.com/carljm/salsa.git?rev=0f6d406f6c309964279baef71588746b8c67b4a3#0f6d406f6c309964279baef71588746b8c67b4a3"
|
||||
dependencies = [
|
||||
"boxcar",
|
||||
"compact_str",
|
||||
"crossbeam-queue",
|
||||
"dashmap 6.1.0",
|
||||
"hashbrown 0.14.5",
|
||||
"hashbrown 0.15.3",
|
||||
"hashlink",
|
||||
"indexmap",
|
||||
@@ -3202,15 +3217,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "salsa-macro-rules"
|
||||
version = "0.21.1"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=4818b15f3b7516555d39f5a41cb75970448bee4c#4818b15f3b7516555d39f5a41cb75970448bee4c"
|
||||
version = "0.22.0"
|
||||
source = "git+https://github.com/carljm/salsa.git?rev=0f6d406f6c309964279baef71588746b8c67b4a3#0f6d406f6c309964279baef71588746b8c67b4a3"
|
||||
|
||||
[[package]]
|
||||
name = "salsa-macros"
|
||||
version = "0.21.1"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=4818b15f3b7516555d39f5a41cb75970448bee4c#4818b15f3b7516555d39f5a41cb75970448bee4c"
|
||||
version = "0.22.0"
|
||||
source = "git+https://github.com/carljm/salsa.git?rev=0f6d406f6c309964279baef71588746b8c67b4a3#0f6d406f6c309964279baef71588746b8c67b4a3"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
@@ -3874,6 +3888,7 @@ dependencies = [
|
||||
"countme",
|
||||
"crossbeam",
|
||||
"ctrlc",
|
||||
"dunce",
|
||||
"filetime",
|
||||
"indicatif",
|
||||
"insta",
|
||||
@@ -3950,6 +3965,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.9.1",
|
||||
"camino",
|
||||
"colored 3.0.0",
|
||||
"compact_str",
|
||||
"countme",
|
||||
"dir-test",
|
||||
@@ -3962,6 +3978,7 @@ dependencies = [
|
||||
"ordermap",
|
||||
"quickcheck",
|
||||
"quickcheck_macros",
|
||||
"ruff_annotate_snippets",
|
||||
"ruff_db",
|
||||
"ruff_index",
|
||||
"ruff_macros",
|
||||
@@ -4003,6 +4020,7 @@ dependencies = [
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.1.1",
|
||||
"salsa",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
@@ -4071,6 +4089,7 @@ dependencies = [
|
||||
"ty_ide",
|
||||
"ty_project",
|
||||
"ty_python_semantic",
|
||||
"uuid",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-test",
|
||||
]
|
||||
@@ -4240,9 +4259,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.16.0"
|
||||
version = "1.17.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9"
|
||||
checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d"
|
||||
dependencies = [
|
||||
"getrandom 0.3.3",
|
||||
"js-sys",
|
||||
@@ -4253,9 +4272,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "uuid-macro-internal"
|
||||
version = "1.16.0"
|
||||
version = "1.17.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72dcd78c4f979627a754f5522cea6e6a25e55139056535fe6e69c506cd64a862"
|
||||
checksum = "26b682e8c381995ea03130e381928e0e005b7c9eb483c6c8682f50e07b33c2b7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
||||
@@ -129,7 +129,7 @@ regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
rustc-stable-hash = { version = "0.1.2" }
|
||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "4818b15f3b7516555d39f5a41cb75970448bee4c" }
|
||||
salsa = { git = "https://github.com/carljm/salsa.git", rev = "0f6d406f6c309964279baef71588746b8c67b4a3" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
@@ -179,7 +179,6 @@ uuid = { version = "1.6.1", features = [
|
||||
"v4",
|
||||
"fast-rng",
|
||||
"macro-diagnostics",
|
||||
"js",
|
||||
] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
@@ -188,7 +187,7 @@ wild = { version = "2" }
|
||||
zip = { version = "0.6.6", default-features = false }
|
||||
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["getrandom", "ruff_options_metadata"]
|
||||
ignored = ["getrandom", "ruff_options_metadata", "uuid"]
|
||||
|
||||
|
||||
[workspace.lints.rust]
|
||||
|
||||
@@ -34,8 +34,7 @@ An extremely fast Python linter and code formatter, written in Rust.
|
||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
of popular Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/editors) for [VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#config-file-discovery)
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
@@ -149,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.11.11/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.11.11/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.11.13/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.11.13/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -183,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.11.11
|
||||
rev: v0.11.13
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -4,6 +4,10 @@ extend-exclude = [
|
||||
"crates/ty_vendored/vendor/**/*",
|
||||
"**/resources/**/*",
|
||||
"**/snapshots/**/*",
|
||||
# Completion tests tend to have a lot of incomplete
|
||||
# words naturally. It's annoying to have to make all
|
||||
# of them actually words. So just ignore typos here.
|
||||
"crates/ty_ide/src/completion.rs",
|
||||
]
|
||||
|
||||
[default.extend-words]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.11.11"
|
||||
version = "0.11.13"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -349,7 +349,6 @@ impl FileCache {
|
||||
.iter()
|
||||
.map(|msg| {
|
||||
Message::diagnostic(
|
||||
msg.rule.into(),
|
||||
msg.body.clone(),
|
||||
msg.suggestion.clone(),
|
||||
msg.range,
|
||||
@@ -357,6 +356,7 @@ impl FileCache {
|
||||
msg.parent,
|
||||
file.clone(),
|
||||
msg.noqa_offset,
|
||||
msg.rule,
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
@@ -439,7 +439,10 @@ impl LintCacheData {
|
||||
|
||||
let messages = messages
|
||||
.iter()
|
||||
.filter_map(|msg| msg.to_rule().map(|rule| (rule, msg)))
|
||||
// Parse the kebab-case rule name into a `Rule`. This will fail for syntax errors, so
|
||||
// this also serves to filter them out, but we shouldn't be caching files with syntax
|
||||
// errors anyway.
|
||||
.filter_map(|msg| Some((msg.name().parse().ok()?, msg)))
|
||||
.map(|(rule, msg)| {
|
||||
// Make sure that all message use the same source file.
|
||||
assert_eq!(
|
||||
|
||||
@@ -12,7 +12,7 @@ use rayon::prelude::*;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::panic::catch_unwind;
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_linter::OldDiagnostic;
|
||||
use ruff_linter::message::Message;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::registry::Rule;
|
||||
@@ -131,8 +131,7 @@ pub(crate) fn check(
|
||||
|
||||
Diagnostics::new(
|
||||
vec![Message::from_diagnostic(
|
||||
Diagnostic::new(IOError { message }, TextRange::default()),
|
||||
dummy,
|
||||
OldDiagnostic::new(IOError { message }, TextRange::default(), &dummy),
|
||||
None,
|
||||
)],
|
||||
FxHashMap::default(),
|
||||
|
||||
@@ -6,7 +6,7 @@ use serde::ser::SerializeSeq;
|
||||
use serde::{Serialize, Serializer};
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use ruff_diagnostics::FixAvailability;
|
||||
use ruff_linter::FixAvailability;
|
||||
use ruff_linter::registry::{Linter, Rule, RuleNamespace};
|
||||
|
||||
use crate::args::HelpFormat;
|
||||
@@ -30,7 +30,7 @@ impl<'a> Explanation<'a> {
|
||||
let (linter, _) = Linter::parse_code(&code).unwrap();
|
||||
let fix = rule.fixable().to_string();
|
||||
Self {
|
||||
name: rule.as_ref(),
|
||||
name: rule.name().as_str(),
|
||||
code,
|
||||
linter: linter.name(),
|
||||
summary: rule.message_formats()[0],
|
||||
@@ -44,7 +44,7 @@ impl<'a> Explanation<'a> {
|
||||
|
||||
fn format_rule_text(rule: Rule) -> String {
|
||||
let mut output = String::new();
|
||||
let _ = write!(&mut output, "# {} ({})", rule.as_ref(), rule.noqa_code());
|
||||
let _ = write!(&mut output, "# {} ({})", rule.name(), rule.noqa_code());
|
||||
output.push('\n');
|
||||
output.push('\n');
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ use colored::Colorize;
|
||||
use log::{debug, warn};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_linter::OldDiagnostic;
|
||||
use ruff_linter::codes::Rule;
|
||||
use ruff_linter::linter::{FixTable, FixerResult, LinterResult, ParseSource, lint_fix, lint_only};
|
||||
use ruff_linter::message::Message;
|
||||
@@ -64,13 +64,13 @@ impl Diagnostics {
|
||||
let source_file = SourceFileBuilder::new(name, "").finish();
|
||||
Self::new(
|
||||
vec![Message::from_diagnostic(
|
||||
Diagnostic::new(
|
||||
OldDiagnostic::new(
|
||||
IOError {
|
||||
message: err.to_string(),
|
||||
},
|
||||
TextRange::default(),
|
||||
&source_file,
|
||||
),
|
||||
source_file,
|
||||
None,
|
||||
)],
|
||||
FxHashMap::default(),
|
||||
@@ -165,9 +165,9 @@ impl AddAssign for FixMap {
|
||||
continue;
|
||||
}
|
||||
let fixed_in_file = self.0.entry(filename).or_default();
|
||||
for (rule, count) in fixed {
|
||||
for (rule, name, count) in fixed.iter() {
|
||||
if count > 0 {
|
||||
*fixed_in_file.entry(rule).or_default() += count;
|
||||
*fixed_in_file.entry(rule).or_default(name) += count;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -235,7 +235,7 @@ pub(crate) fn lint_path(
|
||||
};
|
||||
let source_file =
|
||||
SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
|
||||
lint_pyproject_toml(source_file, settings)
|
||||
lint_pyproject_toml(&source_file, settings)
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
@@ -305,7 +305,7 @@ pub(crate) fn lint_path(
|
||||
ParseSource::None,
|
||||
);
|
||||
let transformed = source_kind;
|
||||
let fixed = FxHashMap::default();
|
||||
let fixed = FixTable::default();
|
||||
(result, transformed, fixed)
|
||||
}
|
||||
} else {
|
||||
@@ -319,7 +319,7 @@ pub(crate) fn lint_path(
|
||||
ParseSource::None,
|
||||
);
|
||||
let transformed = source_kind;
|
||||
let fixed = FxHashMap::default();
|
||||
let fixed = FixTable::default();
|
||||
(result, transformed, fixed)
|
||||
};
|
||||
|
||||
@@ -396,7 +396,7 @@ pub(crate) fn lint_stdin(
|
||||
}
|
||||
|
||||
return Ok(Diagnostics {
|
||||
messages: lint_pyproject_toml(source_file, &settings.linter),
|
||||
messages: lint_pyproject_toml(&source_file, &settings.linter),
|
||||
fixed: FixMap::from_iter([(fs::relativize_path(path), FixTable::default())]),
|
||||
notebook_indexes: FxHashMap::default(),
|
||||
});
|
||||
@@ -473,7 +473,7 @@ pub(crate) fn lint_stdin(
|
||||
}
|
||||
|
||||
let transformed = source_kind;
|
||||
let fixed = FxHashMap::default();
|
||||
let fixed = FixTable::default();
|
||||
(result, transformed, fixed)
|
||||
}
|
||||
} else {
|
||||
@@ -487,7 +487,7 @@ pub(crate) fn lint_stdin(
|
||||
ParseSource::None,
|
||||
);
|
||||
let transformed = source_kind;
|
||||
let fixed = FxHashMap::default();
|
||||
let fixed = FixTable::default();
|
||||
(result, transformed, fixed)
|
||||
};
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ use bitflags::bitflags;
|
||||
use colored::Colorize;
|
||||
use itertools::{Itertools, iterate};
|
||||
use ruff_linter::codes::NoqaCode;
|
||||
use ruff_linter::linter::FixTable;
|
||||
use serde::Serialize;
|
||||
|
||||
use ruff_linter::fs::relativize_path;
|
||||
@@ -80,7 +81,7 @@ impl Printer {
|
||||
let fixed = diagnostics
|
||||
.fixed
|
||||
.values()
|
||||
.flat_map(std::collections::HashMap::values)
|
||||
.flat_map(FixTable::counts)
|
||||
.sum::<usize>();
|
||||
|
||||
if self.flags.intersects(Flags::SHOW_VIOLATIONS) {
|
||||
@@ -302,7 +303,7 @@ impl Printer {
|
||||
let statistics: Vec<ExpandedStatistics> = diagnostics
|
||||
.messages
|
||||
.iter()
|
||||
.map(|message| (message.to_noqa_code(), message))
|
||||
.map(|message| (message.noqa_code(), message))
|
||||
.sorted_by_key(|(code, message)| (*code, message.fixable()))
|
||||
.fold(
|
||||
vec![],
|
||||
@@ -472,13 +473,13 @@ fn show_fix_status(fix_mode: flags::FixMode, fixables: Option<&FixableStatistics
|
||||
fn print_fix_summary(writer: &mut dyn Write, fixed: &FixMap) -> Result<()> {
|
||||
let total = fixed
|
||||
.values()
|
||||
.map(|table| table.values().sum::<usize>())
|
||||
.map(|table| table.counts().sum::<usize>())
|
||||
.sum::<usize>();
|
||||
assert!(total > 0);
|
||||
let num_digits = num_digits(
|
||||
*fixed
|
||||
fixed
|
||||
.values()
|
||||
.filter_map(|table| table.values().max())
|
||||
.filter_map(|table| table.counts().max())
|
||||
.max()
|
||||
.unwrap(),
|
||||
);
|
||||
@@ -498,12 +499,11 @@ fn print_fix_summary(writer: &mut dyn Write, fixed: &FixMap) -> Result<()> {
|
||||
relativize_path(filename).bold(),
|
||||
":".cyan()
|
||||
)?;
|
||||
for (rule, count) in table.iter().sorted_by_key(|(.., count)| Reverse(*count)) {
|
||||
for (code, name, count) in table.iter().sorted_by_key(|(.., count)| Reverse(*count)) {
|
||||
writeln!(
|
||||
writer,
|
||||
" {count:>num_digits$} × {} ({})",
|
||||
rule.noqa_code().to_string().red().bold(),
|
||||
rule.as_ref(),
|
||||
" {count:>num_digits$} × {code} ({name})",
|
||||
code = code.to_string().red().bold(),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -566,7 +566,7 @@ fn venv() -> Result<()> {
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: Invalid search path settings
|
||||
Cause: Failed to discover the site-packages directory: Invalid `--python` argument: `none` could not be canonicalized
|
||||
Cause: Failed to discover the site-packages directory: Invalid `--python` argument `none`: does not point to a Python executable or a directory on disk
|
||||
");
|
||||
});
|
||||
|
||||
|
||||
@@ -78,7 +78,7 @@ fn setup_tomllib_case() -> Case {
|
||||
|
||||
let src_root = SystemPath::new("/src");
|
||||
let mut metadata = ProjectMetadata::discover(src_root, &system).unwrap();
|
||||
metadata.apply_cli_options(Options {
|
||||
metadata.apply_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: Some(RangedValue::cli(PythonVersion::PY312)),
|
||||
..EnvironmentOptions::default()
|
||||
@@ -131,7 +131,7 @@ fn benchmark_incremental(criterion: &mut Criterion) {
|
||||
fn setup() -> Case {
|
||||
let case = setup_tomllib_case();
|
||||
|
||||
let result: Vec<_> = case.db.check().unwrap();
|
||||
let result: Vec<_> = case.db.check();
|
||||
|
||||
assert_diagnostics(&case.db, &result, EXPECTED_TOMLLIB_DIAGNOSTICS);
|
||||
|
||||
@@ -159,7 +159,7 @@ fn benchmark_incremental(criterion: &mut Criterion) {
|
||||
None,
|
||||
);
|
||||
|
||||
let result = db.check().unwrap();
|
||||
let result = db.check();
|
||||
|
||||
assert_eq!(result.len(), EXPECTED_TOMLLIB_DIAGNOSTICS.len());
|
||||
}
|
||||
@@ -179,7 +179,7 @@ fn benchmark_cold(criterion: &mut Criterion) {
|
||||
setup_tomllib_case,
|
||||
|case| {
|
||||
let Case { db, .. } = case;
|
||||
let result: Vec<_> = db.check().unwrap();
|
||||
let result: Vec<_> = db.check();
|
||||
|
||||
assert_diagnostics(db, &result, EXPECTED_TOMLLIB_DIAGNOSTICS);
|
||||
},
|
||||
@@ -224,7 +224,7 @@ fn setup_micro_case(code: &str) -> Case {
|
||||
|
||||
let src_root = SystemPath::new("/src");
|
||||
let mut metadata = ProjectMetadata::discover(src_root, &system).unwrap();
|
||||
metadata.apply_cli_options(Options {
|
||||
metadata.apply_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: Some(RangedValue::cli(PythonVersion::PY312)),
|
||||
..EnvironmentOptions::default()
|
||||
@@ -293,7 +293,7 @@ fn benchmark_many_string_assignments(criterion: &mut Criterion) {
|
||||
},
|
||||
|case| {
|
||||
let Case { db, .. } = case;
|
||||
let result = db.check().unwrap();
|
||||
let result = db.check();
|
||||
assert_eq!(result.len(), 0);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
@@ -339,7 +339,7 @@ fn benchmark_many_tuple_assignments(criterion: &mut Criterion) {
|
||||
},
|
||||
|case| {
|
||||
let Case { db, .. } = case;
|
||||
let result = db.check().unwrap();
|
||||
let result = db.check();
|
||||
assert_eq!(result.len(), 0);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::any::Any;
|
||||
use std::backtrace::BacktraceStatus;
|
||||
use std::cell::Cell;
|
||||
use std::panic::Location;
|
||||
@@ -24,17 +25,25 @@ impl Payload {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn downcast_ref<R: Any>(&self) -> Option<&R> {
|
||||
self.0.downcast_ref::<R>()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for PanicError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
writeln!(f, "panicked at")?;
|
||||
write!(f, "panicked at")?;
|
||||
if let Some(location) = &self.location {
|
||||
write!(f, " {location}")?;
|
||||
}
|
||||
if let Some(payload) = self.payload.as_str() {
|
||||
write!(f, ":\n{payload}")?;
|
||||
}
|
||||
if let Some(query_trace) = self.salsa_backtrace.as_ref() {
|
||||
let _ = writeln!(f, "{query_trace}");
|
||||
}
|
||||
|
||||
if let Some(backtrace) = &self.backtrace {
|
||||
match backtrace.status() {
|
||||
BacktraceStatus::Disabled => {
|
||||
@@ -49,6 +58,7 @@ impl std::fmt::Display for PanicError {
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_python_ast::ModModule;
|
||||
@@ -18,7 +17,7 @@ use crate::source::source_text;
|
||||
/// The query is only cached when the [`source_text()`] hasn't changed. This is because
|
||||
/// comparing two ASTs is a non-trivial operation and every offset change is directly
|
||||
/// reflected in the changed AST offsets.
|
||||
/// The other reason is that Ruff's AST doesn't implement `Eq` which Sala requires
|
||||
/// The other reason is that Ruff's AST doesn't implement `Eq` which Salsa requires
|
||||
/// for determining if a query result is unchanged.
|
||||
#[salsa::tracked(returns(ref), no_eq)]
|
||||
pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
|
||||
@@ -36,7 +35,10 @@ pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
|
||||
ParsedModule::new(parsed)
|
||||
}
|
||||
|
||||
/// Cheap cloneable wrapper around the parsed module.
|
||||
/// A wrapper around a parsed module.
|
||||
///
|
||||
/// This type manages instances of the module AST. A particular instance of the AST
|
||||
/// is represented with the [`ParsedModuleRef`] type.
|
||||
#[derive(Clone)]
|
||||
pub struct ParsedModule {
|
||||
inner: Arc<Parsed<ModModule>>,
|
||||
@@ -49,17 +51,11 @@ impl ParsedModule {
|
||||
}
|
||||
}
|
||||
|
||||
/// Consumes `self` and returns the Arc storing the parsed module.
|
||||
pub fn into_arc(self) -> Arc<Parsed<ModModule>> {
|
||||
self.inner
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ParsedModule {
|
||||
type Target = Parsed<ModModule>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.inner
|
||||
/// Loads a reference to the parsed module.
|
||||
pub fn load(&self, _db: &dyn Db) -> ParsedModuleRef {
|
||||
ParsedModuleRef {
|
||||
module_ref: self.inner.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -77,6 +73,30 @@ impl PartialEq for ParsedModule {
|
||||
|
||||
impl Eq for ParsedModule {}
|
||||
|
||||
/// Cheap cloneable wrapper around an instance of a module AST.
|
||||
#[derive(Clone)]
|
||||
pub struct ParsedModuleRef {
|
||||
module_ref: Arc<Parsed<ModModule>>,
|
||||
}
|
||||
|
||||
impl ParsedModuleRef {
|
||||
pub fn as_arc(&self) -> &Arc<Parsed<ModModule>> {
|
||||
&self.module_ref
|
||||
}
|
||||
|
||||
pub fn into_arc(self) -> Arc<Parsed<ModModule>> {
|
||||
self.module_ref
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for ParsedModuleRef {
|
||||
type Target = Parsed<ModModule>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.module_ref
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::Db;
|
||||
@@ -98,7 +118,7 @@ mod tests {
|
||||
|
||||
let file = system_path_to_file(&db, path).unwrap();
|
||||
|
||||
let parsed = parsed_module(&db, file);
|
||||
let parsed = parsed_module(&db, file).load(&db);
|
||||
|
||||
assert!(parsed.has_valid_syntax());
|
||||
|
||||
@@ -114,7 +134,7 @@ mod tests {
|
||||
|
||||
let file = system_path_to_file(&db, path).unwrap();
|
||||
|
||||
let parsed = parsed_module(&db, file);
|
||||
let parsed = parsed_module(&db, file).load(&db);
|
||||
|
||||
assert!(parsed.has_valid_syntax());
|
||||
|
||||
@@ -130,7 +150,7 @@ mod tests {
|
||||
|
||||
let virtual_file = db.files().virtual_file(&db, path);
|
||||
|
||||
let parsed = parsed_module(&db, virtual_file.file());
|
||||
let parsed = parsed_module(&db, virtual_file.file()).load(&db);
|
||||
|
||||
assert!(parsed.has_valid_syntax());
|
||||
|
||||
@@ -146,7 +166,7 @@ mod tests {
|
||||
|
||||
let virtual_file = db.files().virtual_file(&db, path);
|
||||
|
||||
let parsed = parsed_module(&db, virtual_file.file());
|
||||
let parsed = parsed_module(&db, virtual_file.file()).load(&db);
|
||||
|
||||
assert!(parsed.has_valid_syntax());
|
||||
|
||||
@@ -177,7 +197,7 @@ else:
|
||||
|
||||
let file = vendored_path_to_file(&db, VendoredPath::new("path.pyi")).unwrap();
|
||||
|
||||
let parsed = parsed_module(&db, file);
|
||||
let parsed = parsed_module(&db, file).load(&db);
|
||||
|
||||
assert!(parsed.has_valid_syntax());
|
||||
}
|
||||
|
||||
@@ -596,6 +596,13 @@ impl AsRef<SystemPath> for Utf8PathBuf {
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<SystemPath> for camino::Utf8Component<'_> {
|
||||
#[inline]
|
||||
fn as_ref(&self) -> &SystemPath {
|
||||
SystemPath::new(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<SystemPath> for str {
|
||||
#[inline]
|
||||
fn as_ref(&self) -> &SystemPath {
|
||||
@@ -626,6 +633,22 @@ impl Deref for SystemPathBuf {
|
||||
}
|
||||
}
|
||||
|
||||
impl<P: AsRef<SystemPath>> FromIterator<P> for SystemPathBuf {
|
||||
fn from_iter<I: IntoIterator<Item = P>>(iter: I) -> Self {
|
||||
let mut buf = SystemPathBuf::new();
|
||||
buf.extend(iter);
|
||||
buf
|
||||
}
|
||||
}
|
||||
|
||||
impl<P: AsRef<SystemPath>> Extend<P> for SystemPathBuf {
|
||||
fn extend<I: IntoIterator<Item = P>>(&mut self, iter: I) {
|
||||
for path in iter {
|
||||
self.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for SystemPath {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.0.fmt(f)
|
||||
|
||||
@@ -13,12 +13,12 @@ pub fn assert_function_query_was_not_run<Db, Q, QDb, I, R>(
|
||||
Q: Fn(QDb, I) -> R,
|
||||
I: salsa::plumbing::AsId + std::fmt::Debug + Copy,
|
||||
{
|
||||
let id = input.as_id().as_u32();
|
||||
let id = input.as_id();
|
||||
let (query_name, will_execute_event) = find_will_execute_event(db, query, input, events);
|
||||
|
||||
db.attach(|_| {
|
||||
if let Some(will_execute_event) = will_execute_event {
|
||||
panic!("Expected query {query_name}({id}) not to have run but it did: {will_execute_event:?}\n\n{events:#?}");
|
||||
panic!("Expected query {query_name}({id:?}) not to have run but it did: {will_execute_event:?}\n\n{events:#?}");
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -65,7 +65,7 @@ pub fn assert_function_query_was_run<Db, Q, QDb, I, R>(
|
||||
Q: Fn(QDb, I) -> R,
|
||||
I: salsa::plumbing::AsId + std::fmt::Debug + Copy,
|
||||
{
|
||||
let id = input.as_id().as_u32();
|
||||
let id = input.as_id();
|
||||
let (query_name, will_execute_event) = find_will_execute_event(db, query, input, events);
|
||||
|
||||
db.attach(|_| {
|
||||
@@ -224,7 +224,7 @@ fn query_was_not_run() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Expected query len(0) not to have run but it did:")]
|
||||
#[should_panic(expected = "Expected query len(Id(0)) not to have run but it did:")]
|
||||
fn query_was_not_run_fails_if_query_was_run() {
|
||||
use crate::tests::TestDb;
|
||||
use salsa::prelude::*;
|
||||
@@ -287,7 +287,7 @@ fn const_query_was_not_run_fails_if_query_was_run() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Expected query len(0) to have run but it did not:")]
|
||||
#[should_panic(expected = "Expected query len(Id(0)) to have run but it did not:")]
|
||||
fn query_was_run_fails_if_query_was_not_run() {
|
||||
use crate::tests::TestDb;
|
||||
use salsa::prelude::*;
|
||||
|
||||
@@ -14,7 +14,6 @@ license = { workspace = true }
|
||||
ty = { workspace = true }
|
||||
ty_project = { workspace = true, features = ["schemars"] }
|
||||
ruff = { workspace = true }
|
||||
ruff_diagnostics = { workspace = true }
|
||||
ruff_formatter = { workspace = true }
|
||||
ruff_linter = { workspace = true, features = ["schemars"] }
|
||||
ruff_notebook = { workspace = true }
|
||||
|
||||
@@ -10,7 +10,7 @@ use itertools::Itertools;
|
||||
use regex::{Captures, Regex};
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use ruff_diagnostics::FixAvailability;
|
||||
use ruff_linter::FixAvailability;
|
||||
use ruff_linter::registry::{Linter, Rule, RuleNamespace};
|
||||
use ruff_options_metadata::{OptionEntry, OptionsMetadata};
|
||||
use ruff_workspace::options::Options;
|
||||
@@ -29,7 +29,7 @@ pub(crate) fn main(args: &Args) -> Result<()> {
|
||||
if let Some(explanation) = rule.explanation() {
|
||||
let mut output = String::new();
|
||||
|
||||
let _ = writeln!(&mut output, "# {} ({})", rule.as_ref(), rule.noqa_code());
|
||||
let _ = writeln!(&mut output, "# {} ({})", rule.name(), rule.noqa_code());
|
||||
|
||||
let (linter, _) = Linter::parse_code(&rule.noqa_code().to_string()).unwrap();
|
||||
if linter.url().is_some() {
|
||||
@@ -101,7 +101,7 @@ pub(crate) fn main(args: &Args) -> Result<()> {
|
||||
let filename = PathBuf::from(ROOT_DIR)
|
||||
.join("docs")
|
||||
.join("rules")
|
||||
.join(rule.as_ref())
|
||||
.join(&*rule.name())
|
||||
.with_extension("md");
|
||||
|
||||
if args.dry_run {
|
||||
|
||||
@@ -8,7 +8,7 @@ use std::borrow::Cow;
|
||||
use std::fmt::Write;
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use ruff_diagnostics::FixAvailability;
|
||||
use ruff_linter::FixAvailability;
|
||||
use ruff_linter::registry::{Linter, Rule, RuleNamespace};
|
||||
use ruff_linter::upstream_categories::UpstreamCategoryAndPrefix;
|
||||
use ruff_options_metadata::OptionsMetadata;
|
||||
@@ -18,45 +18,44 @@ const FIX_SYMBOL: &str = "🛠️";
|
||||
const PREVIEW_SYMBOL: &str = "🧪";
|
||||
const REMOVED_SYMBOL: &str = "❌";
|
||||
const WARNING_SYMBOL: &str = "⚠️";
|
||||
const STABLE_SYMBOL: &str = "✔️";
|
||||
const SPACER: &str = " ";
|
||||
|
||||
/// Style for the rule's fixability and status icons.
|
||||
const SYMBOL_STYLE: &str = "style='width: 1em; display: inline-block;'";
|
||||
/// Style for the container wrapping the fixability and status icons.
|
||||
const SYMBOLS_CONTAINER: &str = "style='display: flex; gap: 0.5rem; justify-content: end;'";
|
||||
|
||||
fn generate_table(table_out: &mut String, rules: impl IntoIterator<Item = Rule>, linter: &Linter) {
|
||||
table_out.push_str("| Code | Name | Message | |");
|
||||
table_out.push_str("| Code | Name | Message | |");
|
||||
table_out.push('\n');
|
||||
table_out.push_str("| ---- | ---- | ------- | ------: |");
|
||||
table_out.push_str("| ---- | ---- | ------- | -: |");
|
||||
table_out.push('\n');
|
||||
for rule in rules {
|
||||
let status_token = match rule.group() {
|
||||
RuleGroup::Removed => {
|
||||
format!("<span title='Rule has been removed'>{REMOVED_SYMBOL}</span>")
|
||||
format!(
|
||||
"<span {SYMBOL_STYLE} title='Rule has been removed'>{REMOVED_SYMBOL}</span>"
|
||||
)
|
||||
}
|
||||
RuleGroup::Deprecated => {
|
||||
format!("<span title='Rule has been deprecated'>{WARNING_SYMBOL}</span>")
|
||||
format!(
|
||||
"<span {SYMBOL_STYLE} title='Rule has been deprecated'>{WARNING_SYMBOL}</span>"
|
||||
)
|
||||
}
|
||||
RuleGroup::Preview => {
|
||||
format!("<span title='Rule is in preview'>{PREVIEW_SYMBOL}</span>")
|
||||
}
|
||||
RuleGroup::Stable => {
|
||||
// A full opacity checkmark is a bit aggressive for indicating stable
|
||||
format!("<span title='Rule is stable' style='opacity: 0.6'>{STABLE_SYMBOL}</span>")
|
||||
format!("<span {SYMBOL_STYLE} title='Rule is in preview'>{PREVIEW_SYMBOL}</span>")
|
||||
}
|
||||
RuleGroup::Stable => format!("<span {SYMBOL_STYLE}></span>"),
|
||||
};
|
||||
|
||||
let fix_token = match rule.fixable() {
|
||||
FixAvailability::Always | FixAvailability::Sometimes => {
|
||||
format!("<span title='Automatic fix available'>{FIX_SYMBOL}</span>")
|
||||
}
|
||||
FixAvailability::None => {
|
||||
format!(
|
||||
"<span title='Automatic fix not available' style='opacity: 0.1' aria-hidden='true'>{FIX_SYMBOL}</span>"
|
||||
)
|
||||
format!("<span {SYMBOL_STYLE} title='Automatic fix available'>{FIX_SYMBOL}</span>")
|
||||
}
|
||||
FixAvailability::None => format!("<span {SYMBOL_STYLE}></span>"),
|
||||
};
|
||||
|
||||
let tokens = format!("{status_token} {fix_token}");
|
||||
|
||||
let rule_name = rule.as_ref();
|
||||
let rule_name = rule.name();
|
||||
|
||||
// If the message ends in a bracketed expression (like: "Use {replacement}"), escape the
|
||||
// brackets. Otherwise, it'll be interpreted as an HTML attribute via the `attr_list`
|
||||
@@ -82,15 +81,14 @@ fn generate_table(table_out: &mut String, rules: impl IntoIterator<Item = Rule>,
|
||||
#[expect(clippy::or_fun_call)]
|
||||
let _ = write!(
|
||||
table_out,
|
||||
"| {ss}{0}{1}{se} {{ #{0}{1} }} | {ss}{2}{se} | {ss}{3}{se} | {ss}{4}{se} |",
|
||||
linter.common_prefix(),
|
||||
linter.code_for_rule(rule).unwrap(),
|
||||
rule.explanation()
|
||||
"| {ss}{prefix}{code}{se} {{ #{prefix}{code} }} | {ss}{explanation}{se} | {ss}{message}{se} | <div {SYMBOLS_CONTAINER}>{status_token}{fix_token}</div>|",
|
||||
prefix = linter.common_prefix(),
|
||||
code = linter.code_for_rule(rule).unwrap(),
|
||||
explanation = rule
|
||||
.explanation()
|
||||
.is_some()
|
||||
.then_some(format_args!("[{rule_name}](rules/{rule_name}.md)"))
|
||||
.unwrap_or(format_args!("{rule_name}")),
|
||||
message,
|
||||
tokens,
|
||||
);
|
||||
table_out.push('\n');
|
||||
}
|
||||
@@ -104,12 +102,6 @@ pub(crate) fn generate() -> String {
|
||||
table_out.push_str("### Legend");
|
||||
table_out.push('\n');
|
||||
|
||||
let _ = write!(
|
||||
&mut table_out,
|
||||
"{SPACER}{STABLE_SYMBOL}{SPACER} The rule is stable."
|
||||
);
|
||||
table_out.push_str("<br />");
|
||||
|
||||
let _ = write!(
|
||||
&mut table_out,
|
||||
"{SPACER}{PREVIEW_SYMBOL}{SPACER} The rule is unstable and is in [\"preview\"](faq.md#what-is-preview)."
|
||||
@@ -132,7 +124,8 @@ pub(crate) fn generate() -> String {
|
||||
&mut table_out,
|
||||
"{SPACER}{FIX_SYMBOL}{SPACER} The rule is automatically fixable by the `--fix` command-line option."
|
||||
);
|
||||
table_out.push_str("<br />");
|
||||
table_out.push_str("\n\n");
|
||||
table_out.push_str("All rules not marked as preview, deprecated or removed are stable.");
|
||||
table_out.push('\n');
|
||||
|
||||
for linter in Linter::iter() {
|
||||
|
||||
@@ -16,7 +16,5 @@ doctest = false
|
||||
[dependencies]
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
log = { workspace = true }
|
||||
is-macro = { workspace = true }
|
||||
serde = { workspace = true, optional = true, features = [] }
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
pub use diagnostic::Diagnostic;
|
||||
pub use edit::Edit;
|
||||
pub use fix::{Applicability, Fix, IsolationLevel};
|
||||
pub use source_map::{SourceMap, SourceMarker};
|
||||
pub use violation::{AlwaysFixableViolation, FixAvailability, Violation, ViolationMetadata};
|
||||
|
||||
mod diagnostic;
|
||||
mod edit;
|
||||
mod fix;
|
||||
mod source_map;
|
||||
mod violation;
|
||||
|
||||
@@ -10,7 +10,7 @@ use ruff_python_ast::PythonVersion;
|
||||
use ty_python_semantic::lint::{LintRegistry, RuleSelection};
|
||||
use ty_python_semantic::{
|
||||
Db, Program, ProgramSettings, PythonPath, PythonPlatform, PythonVersionSource,
|
||||
PythonVersionWithSource, SearchPathSettings, default_lint_registry,
|
||||
PythonVersionWithSource, SearchPathSettings, SysPrefixPathOrigin, default_lint_registry,
|
||||
};
|
||||
|
||||
static EMPTY_VENDORED: std::sync::LazyLock<VendoredFileSystem> = std::sync::LazyLock::new(|| {
|
||||
@@ -37,17 +37,18 @@ impl ModuleDb {
|
||||
) -> Result<Self> {
|
||||
let mut search_paths = SearchPathSettings::new(src_roots);
|
||||
if let Some(venv_path) = venv_path {
|
||||
search_paths.python_path = PythonPath::from_cli_flag(venv_path);
|
||||
search_paths.python_path =
|
||||
PythonPath::sys_prefix(venv_path, SysPrefixPathOrigin::PythonCliFlag);
|
||||
}
|
||||
|
||||
let db = Self::default();
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
python_version: PythonVersionWithSource {
|
||||
python_version: Some(PythonVersionWithSource {
|
||||
version: python_version,
|
||||
source: PythonVersionSource::default(),
|
||||
},
|
||||
}),
|
||||
python_platform: PythonPlatform::default(),
|
||||
search_paths,
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.11.11"
|
||||
version = "0.11.13"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -10,22 +10,10 @@ from airflow import (
|
||||
PY312,
|
||||
)
|
||||
from airflow.api_connexion.security import requires_access
|
||||
from airflow.configuration import (
|
||||
as_dict,
|
||||
get,
|
||||
getboolean,
|
||||
getfloat,
|
||||
getint,
|
||||
has_option,
|
||||
remove_option,
|
||||
set,
|
||||
)
|
||||
from airflow.contrib.aws_athena_hook import AWSAthenaHook
|
||||
from airflow.datasets import DatasetAliasEvent
|
||||
from airflow.hooks.base_hook import BaseHook
|
||||
from airflow.operators.subdag import SubDagOperator
|
||||
from airflow.secrets.local_filesystem import LocalFilesystemBackend
|
||||
from airflow.sensors.base_sensor_operator import BaseSensorOperator
|
||||
from airflow.triggers.external_task import TaskStateTrigger
|
||||
from airflow.utils import dates
|
||||
from airflow.utils.dag_cycle_tester import test_cycle
|
||||
@@ -40,13 +28,10 @@ from airflow.utils.dates import (
|
||||
)
|
||||
from airflow.utils.db import create_session
|
||||
from airflow.utils.decorators import apply_defaults
|
||||
from airflow.utils.file import TemporaryDirectory, mkdirs
|
||||
from airflow.utils.helpers import chain as helper_chain
|
||||
from airflow.utils.helpers import cross_downstream as helper_cross_downstream
|
||||
from airflow.utils.log import secrets_masker
|
||||
from airflow.utils.file import mkdirs
|
||||
from airflow.utils.state import SHUTDOWN, terminating_states
|
||||
from airflow.utils.trigger_rule import TriggerRule
|
||||
from airflow.www.auth import has_access
|
||||
from airflow.www.auth import has_access, has_access_dataset
|
||||
from airflow.www.utils import get_sensitive_variables_fields, should_hide_value_for_key
|
||||
|
||||
# airflow root
|
||||
@@ -55,11 +40,6 @@ PY36, PY37, PY38, PY39, PY310, PY311, PY312
|
||||
# airflow.api_connexion.security
|
||||
requires_access
|
||||
|
||||
|
||||
# airflow.configuration
|
||||
get, getboolean, getfloat, getint, has_option, remove_option, as_dict, set
|
||||
|
||||
|
||||
# airflow.contrib.*
|
||||
AWSAthenaHook()
|
||||
|
||||
@@ -68,10 +48,6 @@ AWSAthenaHook()
|
||||
DatasetAliasEvent()
|
||||
|
||||
|
||||
# airflow.hooks
|
||||
BaseHook()
|
||||
|
||||
|
||||
# airflow.operators.subdag.*
|
||||
SubDagOperator()
|
||||
|
||||
@@ -81,10 +57,6 @@ SubDagOperator()
|
||||
LocalFilesystemBackend()
|
||||
|
||||
|
||||
# airflow.sensors.base_sensor_operator
|
||||
BaseSensorOperator()
|
||||
|
||||
|
||||
# airflow.triggers.external_task
|
||||
TaskStateTrigger()
|
||||
|
||||
@@ -114,15 +86,8 @@ create_session
|
||||
apply_defaults
|
||||
|
||||
# airflow.utils.file
|
||||
TemporaryDirectory()
|
||||
mkdirs
|
||||
|
||||
# airflow.utils.helpers
|
||||
helper_chain
|
||||
helper_cross_downstream
|
||||
|
||||
# airflow.utils.log
|
||||
secrets_masker
|
||||
|
||||
# airflow.utils.state
|
||||
SHUTDOWN
|
||||
@@ -135,37 +100,8 @@ TriggerRule.NONE_FAILED_OR_SKIPPED
|
||||
|
||||
# airflow.www.auth
|
||||
has_access
|
||||
has_access_dataset
|
||||
|
||||
# airflow.www.utils
|
||||
get_sensitive_variables_fields
|
||||
should_hide_value_for_key
|
||||
|
||||
# airflow.operators.python
|
||||
from airflow.operators.python import get_current_context
|
||||
|
||||
get_current_context()
|
||||
|
||||
# airflow.providers.mysql
|
||||
from airflow.providers.mysql.datasets.mysql import sanitize_uri
|
||||
|
||||
sanitize_uri
|
||||
|
||||
# airflow.providers.postgres
|
||||
from airflow.providers.postgres.datasets.postgres import sanitize_uri
|
||||
|
||||
sanitize_uri
|
||||
|
||||
# airflow.providers.trino
|
||||
from airflow.providers.trino.datasets.trino import sanitize_uri
|
||||
|
||||
sanitize_uri
|
||||
|
||||
# airflow.notifications.basenotifier
|
||||
from airflow.notifications.basenotifier import BaseNotifier
|
||||
|
||||
BaseNotifier()
|
||||
|
||||
# airflow.auth.manager
|
||||
from airflow.auth.managers.base_auth_manager import BaseAuthManager
|
||||
|
||||
BaseAuthManager()
|
||||
|
||||
@@ -3,7 +3,6 @@ from __future__ import annotations
|
||||
from airflow.api_connexion.security import requires_access_dataset
|
||||
from airflow.auth.managers.models.resource_details import (
|
||||
DatasetDetails,
|
||||
|
||||
)
|
||||
from airflow.datasets.manager import (
|
||||
DatasetManager,
|
||||
@@ -12,15 +11,13 @@ from airflow.datasets.manager import (
|
||||
)
|
||||
from airflow.lineage.hook import DatasetLineageInfo
|
||||
from airflow.metrics.validators import AllowListValidator, BlockListValidator
|
||||
from airflow.secrets.local_filesystm import load_connections
|
||||
from airflow.secrets.local_filesystem import load_connections
|
||||
from airflow.security.permissions import RESOURCE_DATASET
|
||||
from airflow.www.auth import has_access_dataset
|
||||
|
||||
requires_access_dataset()
|
||||
|
||||
DatasetDetails()
|
||||
|
||||
|
||||
DatasetManager()
|
||||
dataset_manager()
|
||||
resolve_dataset_manager()
|
||||
@@ -34,7 +31,6 @@ load_connections()
|
||||
|
||||
RESOURCE_DATASET
|
||||
|
||||
has_access_dataset()
|
||||
|
||||
from airflow.listeners.spec.dataset import (
|
||||
on_dataset_changed,
|
||||
@@ -43,3 +39,76 @@ from airflow.listeners.spec.dataset import (
|
||||
|
||||
on_dataset_created()
|
||||
on_dataset_changed()
|
||||
|
||||
|
||||
# airflow.operators.python
|
||||
from airflow.operators.python import get_current_context
|
||||
|
||||
get_current_context()
|
||||
|
||||
# airflow.providers.mysql
|
||||
from airflow.providers.mysql.datasets.mysql import sanitize_uri
|
||||
|
||||
sanitize_uri
|
||||
|
||||
# airflow.providers.postgres
|
||||
from airflow.providers.postgres.datasets.postgres import sanitize_uri
|
||||
|
||||
sanitize_uri
|
||||
|
||||
# airflow.providers.trino
|
||||
from airflow.providers.trino.datasets.trino import sanitize_uri
|
||||
|
||||
sanitize_uri
|
||||
|
||||
# airflow.notifications.basenotifier
|
||||
from airflow.notifications.basenotifier import BaseNotifier
|
||||
|
||||
BaseNotifier()
|
||||
|
||||
# airflow.auth.manager
|
||||
from airflow.auth.managers.base_auth_manager import BaseAuthManager
|
||||
|
||||
BaseAuthManager()
|
||||
|
||||
|
||||
from airflow.configuration import (
|
||||
as_dict,
|
||||
get,
|
||||
getboolean,
|
||||
getfloat,
|
||||
getint,
|
||||
has_option,
|
||||
remove_option,
|
||||
set,
|
||||
)
|
||||
|
||||
# airflow.configuration
|
||||
get, getboolean, getfloat, getint, has_option, remove_option, as_dict, set
|
||||
from airflow.hooks.base_hook import BaseHook
|
||||
|
||||
# airflow.hooks
|
||||
BaseHook()
|
||||
|
||||
from airflow.sensors.base_sensor_operator import BaseSensorOperator
|
||||
|
||||
# airflow.sensors.base_sensor_operator
|
||||
BaseSensorOperator()
|
||||
BaseHook()
|
||||
|
||||
from airflow.utils.helpers import chain as helper_chain
|
||||
from airflow.utils.helpers import cross_downstream as helper_cross_downstream
|
||||
|
||||
# airflow.utils.helpers
|
||||
helper_chain
|
||||
helper_cross_downstream
|
||||
|
||||
# airflow.utils.file
|
||||
from airflow.utils.file import TemporaryDirectory
|
||||
|
||||
TemporaryDirectory()
|
||||
|
||||
from airflow.utils.log import secrets_masker
|
||||
|
||||
# airflow.utils.log
|
||||
secrets_masker
|
||||
|
||||
@@ -1,54 +1,54 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from airflow.providers.amazon.aws.auth_manager.avp.entities import AvpEntities
|
||||
from airflow.providers.openlineage.utils.utils import (
|
||||
DatasetInfo,
|
||||
translate_airflow_dataset,
|
||||
)
|
||||
from airflow.secrets.local_filesystem import load_connections
|
||||
from airflow.security.permissions import RESOURCE_DATASET
|
||||
|
||||
AvpEntities.DATASET
|
||||
|
||||
# airflow.providers.openlineage.utils.utils
|
||||
DatasetInfo()
|
||||
translate_airflow_dataset()
|
||||
|
||||
# airflow.secrets.local_filesystem
|
||||
load_connections()
|
||||
|
||||
# airflow.security.permissions
|
||||
RESOURCE_DATASET
|
||||
|
||||
from airflow.providers.amazon.aws.datasets.s3 import (
|
||||
convert_dataset_to_openlineage as s3_convert_dataset_to_openlineage,
|
||||
)
|
||||
from airflow.providers.amazon.aws.datasets.s3 import create_dataset as s3_create_dataset
|
||||
|
||||
s3_create_dataset()
|
||||
s3_convert_dataset_to_openlineage()
|
||||
|
||||
from airflow.providers.common.io.dataset.file import (
|
||||
convert_dataset_to_openlineage as io_convert_dataset_to_openlineage,
|
||||
)
|
||||
from airflow.providers.common.io.dataset.file import create_dataset as io_create_dataset
|
||||
|
||||
from airflow.providers.google.datasets.bigquery import (
|
||||
create_dataset as bigquery_create_dataset,
|
||||
)
|
||||
from airflow.providers.google.datasets.gcs import (
|
||||
convert_dataset_to_openlineage as gcs_convert_dataset_to_openlineage,
|
||||
)
|
||||
from airflow.providers.google.datasets.gcs import create_dataset as gcs_create_dataset
|
||||
from airflow.providers.openlineage.utils.utils import (
|
||||
DatasetInfo,
|
||||
translate_airflow_dataset,
|
||||
)
|
||||
|
||||
AvpEntities.DATASET
|
||||
|
||||
s3_create_dataset()
|
||||
s3_convert_dataset_to_openlineage()
|
||||
|
||||
io_create_dataset()
|
||||
io_convert_dataset_to_openlineage()
|
||||
|
||||
|
||||
# # airflow.providers.google.datasets.bigquery
|
||||
from airflow.providers.google.datasets.bigquery import (
|
||||
create_dataset as bigquery_create_dataset,
|
||||
)
|
||||
|
||||
# airflow.providers.google.datasets.bigquery
|
||||
bigquery_create_dataset()
|
||||
|
||||
# airflow.providers.google.datasets.gcs
|
||||
from airflow.providers.google.datasets.gcs import (
|
||||
convert_dataset_to_openlineage as gcs_convert_dataset_to_openlineage,
|
||||
)
|
||||
from airflow.providers.google.datasets.gcs import create_dataset as gcs_create_dataset
|
||||
|
||||
gcs_create_dataset()
|
||||
gcs_convert_dataset_to_openlineage()
|
||||
# airflow.providers.openlineage.utils.utils
|
||||
DatasetInfo()
|
||||
translate_airflow_dataset()
|
||||
#
|
||||
# airflow.secrets.local_filesystem
|
||||
load_connections()
|
||||
#
|
||||
# airflow.security.permissions
|
||||
RESOURCE_DATASET
|
||||
|
||||
# airflow.timetables
|
||||
DatasetTriggeredTimetable()
|
||||
#
|
||||
# airflow.www.auth
|
||||
has_access_dataset
|
||||
|
||||
@@ -5,35 +5,30 @@ from airflow.hooks.S3_hook import (
|
||||
provide_bucket_name,
|
||||
)
|
||||
from airflow.operators.gcs_to_s3 import GCSToS3Operator
|
||||
from airflow.operators.google_api_to_s3_transfer import (
|
||||
GoogleApiToS3Operator,
|
||||
GoogleApiToS3Transfer,
|
||||
)
|
||||
from airflow.operators.redshift_to_s3_operator import (
|
||||
RedshiftToS3Operator,
|
||||
RedshiftToS3Transfer,
|
||||
)
|
||||
from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Operator
|
||||
from airflow.operators.redshift_to_s3_operator import RedshiftToS3Operator
|
||||
from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
from airflow.operators.s3_to_redshift_operator import (
|
||||
S3ToRedshiftOperator,
|
||||
S3ToRedshiftTransfer,
|
||||
)
|
||||
from airflow.operators.s3_to_redshift_operator import S3ToRedshiftOperator
|
||||
from airflow.sensors.s3_key_sensor import S3KeySensor
|
||||
|
||||
S3Hook()
|
||||
provide_bucket_name()
|
||||
|
||||
GCSToS3Operator()
|
||||
|
||||
GoogleApiToS3Operator()
|
||||
RedshiftToS3Operator()
|
||||
S3FileTransformOperator()
|
||||
S3ToRedshiftOperator()
|
||||
S3KeySensor()
|
||||
|
||||
from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Transfer
|
||||
|
||||
GoogleApiToS3Transfer()
|
||||
|
||||
RedshiftToS3Operator()
|
||||
from airflow.operators.redshift_to_s3_operator import RedshiftToS3Transfer
|
||||
|
||||
RedshiftToS3Transfer()
|
||||
|
||||
S3FileTransformOperator()
|
||||
from airflow.operators.s3_to_redshift_operator import S3ToRedshiftTransfer
|
||||
|
||||
S3ToRedshiftOperator()
|
||||
S3ToRedshiftTransfer()
|
||||
|
||||
S3KeySensor()
|
||||
|
||||
@@ -4,10 +4,13 @@ from airflow.hooks.dbapi import (
|
||||
ConnectorProtocol,
|
||||
DbApiHook,
|
||||
)
|
||||
|
||||
ConnectorProtocol()
|
||||
DbApiHook()
|
||||
|
||||
from airflow.hooks.dbapi_hook import DbApiHook
|
||||
from airflow.operators.check_operator import SQLCheckOperator
|
||||
|
||||
ConnectorProtocol()
|
||||
DbApiHook()
|
||||
SQLCheckOperator()
|
||||
|
||||
@@ -114,16 +117,11 @@ from airflow.sensors.sql_sensor import SqlSensor
|
||||
SqlSensor()
|
||||
|
||||
|
||||
from airflow.operators.jdbc_operator import JdbcOperator
|
||||
from airflow.operators.mssql_operator import MsSqlOperator
|
||||
from airflow.operators.mysql_operator import MySqlOperator
|
||||
from airflow.operators.oracle_operator import OracleOperator
|
||||
from airflow.operators.postgres_operator import PostgresOperator
|
||||
from airflow.operators.sqlite_operator import SqliteOperator
|
||||
from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
|
||||
|
||||
JdbcOperator()
|
||||
MsSqlOperator()
|
||||
MySqlOperator()
|
||||
OracleOperator()
|
||||
PostgresOperator()
|
||||
SqliteOperator()
|
||||
SQLExecuteQueryOperator()
|
||||
SQLExecuteQueryOperator()
|
||||
SQLExecuteQueryOperator()
|
||||
SQLExecuteQueryOperator()
|
||||
SQLExecuteQueryOperator()
|
||||
SQLExecuteQueryOperator()
|
||||
|
||||
@@ -12,55 +12,59 @@ from airflow.macros.hive import (
|
||||
)
|
||||
from airflow.operators.hive_operator import HiveOperator
|
||||
from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
from airflow.operators.hive_to_mysql import (
|
||||
HiveToMySqlOperator,
|
||||
HiveToMySqlTransfer,
|
||||
)
|
||||
from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
from airflow.operators.mssql_to_hive import (
|
||||
MsSqlToHiveOperator,
|
||||
MsSqlToHiveTransfer,
|
||||
)
|
||||
from airflow.operators.mysql_to_hive import (
|
||||
MySqlToHiveOperator,
|
||||
MySqlToHiveTransfer,
|
||||
)
|
||||
from airflow.operators.s3_to_hive_operator import (
|
||||
S3ToHiveOperator,
|
||||
S3ToHiveTransfer,
|
||||
)
|
||||
from airflow.sensors.hive_partition_sensor import HivePartitionSensor
|
||||
from airflow.sensors.metastore_partition_sensor import MetastorePartitionSensor
|
||||
from airflow.sensors.named_hive_partition_sensor import NamedHivePartitionSensor
|
||||
|
||||
HIVE_QUEUE_PRIORITIES
|
||||
HiveCliHook()
|
||||
HiveMetastoreHook()
|
||||
HiveServer2Hook()
|
||||
|
||||
closest_ds_partition()
|
||||
max_partition()
|
||||
|
||||
HiveCliHook()
|
||||
HiveMetastoreHook()
|
||||
HiveServer2Hook()
|
||||
HIVE_QUEUE_PRIORITIES
|
||||
|
||||
HiveOperator()
|
||||
|
||||
HiveStatsCollectionOperator()
|
||||
|
||||
HiveToMySqlOperator()
|
||||
HiveToMySqlTransfer()
|
||||
|
||||
HiveToSambaOperator()
|
||||
|
||||
MsSqlToHiveOperator()
|
||||
MsSqlToHiveTransfer()
|
||||
|
||||
from airflow.operators.hive_to_mysql import HiveToMySqlTransfer
|
||||
|
||||
HiveToMySqlTransfer()
|
||||
|
||||
from airflow.operators.mysql_to_hive import MySqlToHiveOperator
|
||||
|
||||
MySqlToHiveOperator()
|
||||
|
||||
from airflow.operators.mysql_to_hive import MySqlToHiveTransfer
|
||||
|
||||
MySqlToHiveTransfer()
|
||||
|
||||
from airflow.operators.mssql_to_hive import MsSqlToHiveOperator
|
||||
|
||||
MsSqlToHiveOperator()
|
||||
|
||||
from airflow.operators.mssql_to_hive import MsSqlToHiveTransfer
|
||||
|
||||
MsSqlToHiveTransfer()
|
||||
|
||||
from airflow.operators.s3_to_hive_operator import S3ToHiveOperator
|
||||
|
||||
S3ToHiveOperator()
|
||||
|
||||
from airflow.operators.s3_to_hive_operator import S3ToHiveTransfer
|
||||
|
||||
S3ToHiveTransfer()
|
||||
|
||||
from airflow.sensors.hive_partition_sensor import HivePartitionSensor
|
||||
|
||||
HivePartitionSensor()
|
||||
|
||||
from airflow.sensors.metastore_partition_sensor import MetastorePartitionSensor
|
||||
|
||||
MetastorePartitionSensor()
|
||||
|
||||
from airflow.sensors.named_hive_partition_sensor import NamedHivePartitionSensor
|
||||
|
||||
NamedHivePartitionSensor()
|
||||
|
||||
@@ -16,14 +16,7 @@ from airflow.kubernetes.kube_client import (
|
||||
from airflow.kubernetes.kubernetes_helper_functions import (
|
||||
add_pod_suffix,
|
||||
annotations_for_logging_task_metadata,
|
||||
annotations_to_key,
|
||||
create_pod_id,
|
||||
get_logs_task_metadata,
|
||||
rand_str,
|
||||
)
|
||||
from airflow.kubernetes.pod import (
|
||||
Port,
|
||||
Resources,
|
||||
)
|
||||
|
||||
ALL_NAMESPACES
|
||||
@@ -37,21 +30,13 @@ _enable_tcp_keepalive()
|
||||
get_kube_client()
|
||||
|
||||
add_pod_suffix()
|
||||
create_pod_id()
|
||||
|
||||
annotations_for_logging_task_metadata()
|
||||
annotations_to_key()
|
||||
get_logs_task_metadata()
|
||||
rand_str()
|
||||
|
||||
Port()
|
||||
Resources()
|
||||
create_pod_id()
|
||||
|
||||
|
||||
from airflow.kubernetes.pod_generator import (
|
||||
PodDefaults,
|
||||
PodGenerator,
|
||||
PodGeneratorDeprecated,
|
||||
add_pod_suffix,
|
||||
datetime_to_label_safe_datestring,
|
||||
extend_object_field,
|
||||
@@ -61,18 +46,16 @@ from airflow.kubernetes.pod_generator import (
|
||||
rand_str,
|
||||
)
|
||||
|
||||
PodDefaults()
|
||||
PodGenerator()
|
||||
add_pod_suffix()
|
||||
datetime_to_label_safe_datestring()
|
||||
extend_object_field()
|
||||
label_safe_datestring_to_datetime()
|
||||
make_safe_label_value()
|
||||
merge_objects()
|
||||
PodGenerator()
|
||||
PodDefaults()
|
||||
PodGeneratorDeprecated()
|
||||
add_pod_suffix()
|
||||
rand_str()
|
||||
|
||||
|
||||
from airflow.kubernetes.pod_generator_deprecated import (
|
||||
PodDefaults,
|
||||
PodGenerator,
|
||||
@@ -90,7 +73,6 @@ make_safe_label_value()
|
||||
PodLauncher()
|
||||
PodStatus()
|
||||
|
||||
|
||||
from airflow.kubernetes.pod_launcher_deprecated import (
|
||||
PodDefaults,
|
||||
PodLauncher,
|
||||
@@ -115,3 +97,17 @@ K8SModel()
|
||||
Secret()
|
||||
Volume()
|
||||
VolumeMount()
|
||||
|
||||
from airflow.kubernetes.kubernetes_helper_functions import (
|
||||
annotations_to_key,
|
||||
get_logs_task_metadata,
|
||||
rand_str,
|
||||
)
|
||||
|
||||
annotations_to_key()
|
||||
get_logs_task_metadata()
|
||||
rand_str()
|
||||
|
||||
from airflow.kubernetes.pod_generator import PodGeneratorDeprecated
|
||||
|
||||
PodGeneratorDeprecated()
|
||||
|
||||
@@ -5,10 +5,6 @@ from airflow.operators.dagrun_operator import (
|
||||
TriggerDagRunLink,
|
||||
TriggerDagRunOperator,
|
||||
)
|
||||
from airflow.operators.dummy import (
|
||||
DummyOperator,
|
||||
EmptyOperator,
|
||||
)
|
||||
from airflow.operators.latest_only_operator import LatestOnlyOperator
|
||||
from airflow.operators.python_operator import (
|
||||
BranchPythonOperator,
|
||||
@@ -19,15 +15,12 @@ from airflow.operators.python_operator import (
|
||||
from airflow.sensors.external_task_sensor import (
|
||||
ExternalTaskMarker,
|
||||
ExternalTaskSensor,
|
||||
ExternalTaskSensorLink,
|
||||
)
|
||||
|
||||
BashOperator()
|
||||
|
||||
TriggerDagRunLink()
|
||||
TriggerDagRunOperator()
|
||||
DummyOperator()
|
||||
EmptyOperator()
|
||||
|
||||
LatestOnlyOperator()
|
||||
|
||||
@@ -38,25 +31,48 @@ ShortCircuitOperator()
|
||||
|
||||
ExternalTaskMarker()
|
||||
ExternalTaskSensor()
|
||||
ExternalTaskSensorLink()
|
||||
|
||||
from airflow.operators.dummy_operator import (
|
||||
DummyOperator,
|
||||
EmptyOperator,
|
||||
)
|
||||
|
||||
DummyOperator()
|
||||
EmptyOperator()
|
||||
|
||||
from airflow.hooks.subprocess import SubprocessResult
|
||||
|
||||
SubprocessResult()
|
||||
|
||||
from airflow.hooks.subprocess import working_directory
|
||||
|
||||
working_directory()
|
||||
|
||||
from airflow.operators.datetime import target_times_as_dates
|
||||
|
||||
target_times_as_dates()
|
||||
|
||||
from airflow.operators.trigger_dagrun import TriggerDagRunLink
|
||||
|
||||
TriggerDagRunLink()
|
||||
|
||||
from airflow.sensors.external_task import ExternalTaskSensorLink
|
||||
|
||||
ExternalTaskSensorLink()
|
||||
|
||||
from airflow.sensors.time_delta import WaitSensor
|
||||
WaitSensor()
|
||||
|
||||
WaitSensor()
|
||||
|
||||
from airflow.operators.dummy import DummyOperator
|
||||
|
||||
DummyOperator()
|
||||
|
||||
from airflow.operators.dummy import EmptyOperator
|
||||
|
||||
EmptyOperator()
|
||||
|
||||
from airflow.operators.dummy_operator import DummyOperator
|
||||
|
||||
DummyOperator()
|
||||
|
||||
from airflow.operators.dummy_operator import EmptyOperator
|
||||
|
||||
EmptyOperator()
|
||||
|
||||
from airflow.sensors.external_task_sensor import ExternalTaskSensorLink
|
||||
|
||||
ExternalTaskSensorLink()
|
||||
|
||||
@@ -9,19 +9,12 @@ from airflow.datasets import (
|
||||
expand_alias_to_datasets,
|
||||
)
|
||||
from airflow.datasets.metadata import Metadata
|
||||
from airflow.decorators import dag, setup, task, task_group, teardown
|
||||
from airflow.io.path import ObjectStoragePath
|
||||
from airflow.io.storage import attach
|
||||
from airflow.models import DAG as DAGFromModel
|
||||
from airflow.models import (
|
||||
Connection,
|
||||
Variable,
|
||||
from airflow.decorators import (
|
||||
dag,
|
||||
setup,
|
||||
task,
|
||||
task_group,
|
||||
)
|
||||
from airflow.models.baseoperator import chain, chain_linear, cross_downstream
|
||||
from airflow.models.baseoperatorlink import BaseOperatorLink
|
||||
from airflow.models.dag import DAG as DAGFromDag
|
||||
from airflow.timetables.datasets import DatasetOrTimeSchedule
|
||||
from airflow.utils.dag_parsing_context import get_parsing_context
|
||||
|
||||
# airflow
|
||||
DatasetFromRoot()
|
||||
@@ -39,9 +32,22 @@ dag()
|
||||
task()
|
||||
task_group()
|
||||
setup()
|
||||
from airflow.decorators import teardown
|
||||
from airflow.io.path import ObjectStoragePath
|
||||
from airflow.io.storage import attach
|
||||
from airflow.models import DAG as DAGFromModel
|
||||
from airflow.models import (
|
||||
Connection,
|
||||
Variable,
|
||||
)
|
||||
from airflow.models.baseoperator import chain, chain_linear, cross_downstream
|
||||
from airflow.models.baseoperatorlink import BaseOperatorLink
|
||||
from airflow.models.dag import DAG as DAGFromDag
|
||||
|
||||
# airflow.decorators
|
||||
teardown()
|
||||
|
||||
# airflow.io
|
||||
# # airflow.io
|
||||
ObjectStoragePath()
|
||||
attach()
|
||||
|
||||
@@ -60,6 +66,9 @@ BaseOperatorLink()
|
||||
|
||||
# airflow.models.dag
|
||||
DAGFromDag()
|
||||
from airflow.timetables.datasets import DatasetOrTimeSchedule
|
||||
from airflow.utils.dag_parsing_context import get_parsing_context
|
||||
|
||||
# airflow.timetables.datasets
|
||||
DatasetOrTimeSchedule()
|
||||
|
||||
|
||||
@@ -7,49 +7,71 @@ from airflow.operators.bash import BashOperator
|
||||
from airflow.operators.datetime import BranchDateTimeOperator
|
||||
from airflow.operators.empty import EmptyOperator
|
||||
from airflow.operators.latest_only import LatestOnlyOperator
|
||||
from airflow.operators.trigger_dagrun import TriggerDagRunOperator
|
||||
from airflow.operators.weekday import BranchDayOfWeekOperator
|
||||
from airflow.sensors.date_time import DateTimeSensor
|
||||
|
||||
FSHook()
|
||||
PackageIndexHook()
|
||||
SubprocessHook()
|
||||
|
||||
BashOperator()
|
||||
BranchDateTimeOperator()
|
||||
TriggerDagRunOperator()
|
||||
EmptyOperator()
|
||||
|
||||
LatestOnlyOperator()
|
||||
BranchDayOfWeekOperator()
|
||||
DateTimeSensor()
|
||||
|
||||
from airflow.operators.python import (
|
||||
BranchPythonOperator,
|
||||
PythonOperator,
|
||||
PythonVirtualenvOperator,
|
||||
ShortCircuitOperator,
|
||||
)
|
||||
from airflow.operators.trigger_dagrun import TriggerDagRunOperator
|
||||
from airflow.operators.weekday import BranchDayOfWeekOperator
|
||||
from airflow.sensors.date_time import DateTimeSensor, DateTimeSensorAsync
|
||||
from airflow.sensors.date_time import DateTimeSensorAsync
|
||||
from airflow.sensors.external_task import (
|
||||
ExternalTaskMarker,
|
||||
ExternalTaskSensor,
|
||||
|
||||
)
|
||||
from airflow.sensors.time_sensor import (
|
||||
TimeSensor,
|
||||
TimeSensorAsync,
|
||||
)
|
||||
from airflow.sensors.filesystem import FileSensor
|
||||
from airflow.sensors.time_delta import TimeDeltaSensor, TimeDeltaSensorAsync
|
||||
from airflow.sensors.time_sensor import TimeSensor, TimeSensorAsync
|
||||
from airflow.sensors.weekday import DayOfWeekSensor
|
||||
from airflow.triggers.external_task import DagStateTrigger, WorkflowTrigger
|
||||
from airflow.triggers.file import FileTrigger
|
||||
from airflow.triggers.temporal import DateTimeTrigger, TimeDeltaTrigger
|
||||
|
||||
FSHook()
|
||||
PackageIndexHook()
|
||||
SubprocessHook()
|
||||
BashOperator()
|
||||
BranchDateTimeOperator()
|
||||
TriggerDagRunOperator()
|
||||
EmptyOperator()
|
||||
LatestOnlyOperator()
|
||||
(
|
||||
BranchPythonOperator(),
|
||||
PythonOperator(),
|
||||
PythonVirtualenvOperator(),
|
||||
ShortCircuitOperator(),
|
||||
)
|
||||
BranchDayOfWeekOperator()
|
||||
DateTimeSensor(), DateTimeSensorAsync()
|
||||
ExternalTaskMarker(), ExternalTaskSensor()
|
||||
BranchPythonOperator()
|
||||
PythonOperator()
|
||||
PythonVirtualenvOperator()
|
||||
ShortCircuitOperator()
|
||||
DateTimeSensorAsync()
|
||||
ExternalTaskMarker()
|
||||
ExternalTaskSensor()
|
||||
FileSensor()
|
||||
TimeSensor(), TimeSensorAsync()
|
||||
TimeDeltaSensor(), TimeDeltaSensorAsync()
|
||||
TimeSensor()
|
||||
TimeSensorAsync()
|
||||
|
||||
from airflow.sensors.time_delta import (
|
||||
TimeDeltaSensor,
|
||||
TimeDeltaSensorAsync,
|
||||
)
|
||||
from airflow.sensors.weekday import DayOfWeekSensor
|
||||
from airflow.triggers.external_task import (
|
||||
DagStateTrigger,
|
||||
WorkflowTrigger,
|
||||
)
|
||||
from airflow.triggers.file import FileTrigger
|
||||
from airflow.triggers.temporal import (
|
||||
DateTimeTrigger,
|
||||
TimeDeltaTrigger,
|
||||
)
|
||||
|
||||
TimeDeltaSensor()
|
||||
TimeDeltaSensorAsync()
|
||||
DayOfWeekSensor()
|
||||
DagStateTrigger(), WorkflowTrigger()
|
||||
DagStateTrigger()
|
||||
WorkflowTrigger()
|
||||
FileTrigger()
|
||||
DateTimeTrigger(), TimeDeltaTrigger()
|
||||
DateTimeTrigger()
|
||||
TimeDeltaTrigger()
|
||||
|
||||
@@ -178,3 +178,38 @@ async def unknown_1(other: str = Depends(unknown_unresolved)): ...
|
||||
async def unknown_2(other: str = Depends(unknown_not_function)): ...
|
||||
@app.get("/things/{thing_id}")
|
||||
async def unknown_3(other: str = Depends(unknown_imported)): ...
|
||||
|
||||
|
||||
# Class dependencies
|
||||
from pydantic import BaseModel
|
||||
from dataclasses import dataclass
|
||||
|
||||
class PydanticParams(BaseModel):
|
||||
my_id: int
|
||||
|
||||
|
||||
class InitParams:
|
||||
def __init__(self, my_id: int):
|
||||
self.my_id = my_id
|
||||
|
||||
|
||||
# Errors
|
||||
@app.get("/{id}")
|
||||
async def get_id_pydantic_full(
|
||||
params: Annotated[PydanticParams, Depends(PydanticParams)],
|
||||
): ...
|
||||
@app.get("/{id}")
|
||||
async def get_id_pydantic_short(params: Annotated[PydanticParams, Depends()]): ...
|
||||
@app.get("/{id}")
|
||||
async def get_id_init_not_annotated(params = Depends(InitParams)): ...
|
||||
|
||||
|
||||
# No errors
|
||||
@app.get("/{my_id}")
|
||||
async def get_id_pydantic_full(
|
||||
params: Annotated[PydanticParams, Depends(PydanticParams)],
|
||||
): ...
|
||||
@app.get("/{my_id}")
|
||||
async def get_id_pydantic_short(params: Annotated[PydanticParams, Depends()]): ...
|
||||
@app.get("/{my_id}")
|
||||
async def get_id_init_not_annotated(params = Depends(InitParams)): ...
|
||||
|
||||
@@ -145,3 +145,23 @@ def func():
|
||||
sleep = 10
|
||||
|
||||
anyio.run(main)
|
||||
|
||||
|
||||
async def test_anyio_async115_helpers():
|
||||
import anyio
|
||||
|
||||
await anyio.sleep(delay=1) # OK
|
||||
await anyio.sleep(seconds=1) # OK
|
||||
|
||||
await anyio.sleep(delay=0) # ASYNC115
|
||||
await anyio.sleep(seconds=0) # OK
|
||||
|
||||
|
||||
async def test_trio_async115_helpers():
|
||||
import trio
|
||||
|
||||
await trio.sleep(seconds=1) # OK
|
||||
await trio.sleep(delay=1) # OK
|
||||
|
||||
await trio.sleep(seconds=0) # ASYNC115
|
||||
await trio.sleep(delay=0) # OK
|
||||
|
||||
@@ -108,3 +108,23 @@ async def import_from_anyio():
|
||||
|
||||
# catch from import
|
||||
await sleep(86401) # error: 116, "async"
|
||||
|
||||
|
||||
async def test_anyio_async116_helpers():
|
||||
import anyio
|
||||
|
||||
await anyio.sleep(delay=1) # OK
|
||||
await anyio.sleep(seconds=1) # OK
|
||||
|
||||
await anyio.sleep(delay=86401) # ASYNC116
|
||||
await anyio.sleep(seconds=86401) # OK
|
||||
|
||||
|
||||
async def test_trio_async116_helpers():
|
||||
import trio
|
||||
|
||||
await trio.sleep(seconds=1) # OK
|
||||
await trio.sleep(delay=1) # OK
|
||||
|
||||
await trio.sleep(seconds=86401) # ASYNC116
|
||||
await trio.sleep(delay=86401) # OK
|
||||
|
||||
@@ -22,3 +22,8 @@ def my_func():
|
||||
|
||||
# Implicit string concatenation
|
||||
"0.0.0.0" f"0.0.0.0{expr}0.0.0.0"
|
||||
|
||||
# t-strings - all ok
|
||||
t"0.0.0.0"
|
||||
"0.0.0.0" t"0.0.0.0{expr}0.0.0.0"
|
||||
"0.0.0.0" f"0.0.0.0{expr}0.0.0.0" t"0.0.0.0{expr}0.0.0.0"
|
||||
|
||||
@@ -40,3 +40,7 @@ with tempfile.TemporaryDirectory(dir="/dev/shm") as d:
|
||||
|
||||
with TemporaryDirectory(dir="/tmp") as d:
|
||||
pass
|
||||
|
||||
# ok (runtime error from t-string)
|
||||
with open(t"/foo/bar", "w") as f:
|
||||
f.write("def")
|
||||
|
||||
@@ -169,3 +169,13 @@ query60 = f"""
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/17967
|
||||
query61 = f"SELECT * FROM table" # skip expressionless f-strings
|
||||
|
||||
# t-strings
|
||||
query62 = t"SELECT * FROM table"
|
||||
query63 = t"""
|
||||
SELECT *,
|
||||
foo
|
||||
FROM ({user_input}) raw
|
||||
"""
|
||||
query64 = f"update {t"{table}"} set var = {t"{var}"}"
|
||||
query65 = t"update {f"{table}"} set var = {f"{var}"}"
|
||||
|
||||
@@ -67,3 +67,6 @@ getattr(self.
|
||||
|
||||
import builtins
|
||||
builtins.getattr(foo, "bar")
|
||||
|
||||
# Regression test for: https://github.com/astral-sh/ruff/issues/18353
|
||||
setattr(foo, "__debug__", 0)
|
||||
|
||||
@@ -91,3 +91,99 @@ _ = "\8""0" # fix should be "\80"
|
||||
_ = "\12""8" # fix should be "\128"
|
||||
_ = "\12""foo" # fix should be "\12foo"
|
||||
_ = "\12" "" # fix should be "\12"
|
||||
|
||||
|
||||
# Mixed literal + non-literal scenarios
|
||||
_ = (
|
||||
"start" +
|
||||
variable +
|
||||
"end"
|
||||
)
|
||||
|
||||
_ = (
|
||||
f"format" +
|
||||
func_call() +
|
||||
"literal"
|
||||
)
|
||||
|
||||
_ = (
|
||||
rf"raw_f{x}" +
|
||||
r"raw_normal"
|
||||
)
|
||||
|
||||
|
||||
# Different prefix combinations
|
||||
_ = (
|
||||
u"unicode" +
|
||||
r"raw"
|
||||
)
|
||||
|
||||
_ = (
|
||||
rb"raw_bytes" +
|
||||
b"normal_bytes"
|
||||
)
|
||||
|
||||
_ = (
|
||||
b"bytes" +
|
||||
b"with_bytes"
|
||||
)
|
||||
|
||||
# Repeated concatenation
|
||||
|
||||
_ = ("a" +
|
||||
"b" +
|
||||
"c" +
|
||||
"d" + "e"
|
||||
)
|
||||
|
||||
_ = ("a"
|
||||
+ "b"
|
||||
+ "c"
|
||||
+ "d"
|
||||
+ "e"
|
||||
)
|
||||
|
||||
_ = (
|
||||
"start" +
|
||||
variable + # comment
|
||||
"end"
|
||||
)
|
||||
|
||||
_ = (
|
||||
"start" +
|
||||
variable
|
||||
# leading comment
|
||||
+ "end"
|
||||
)
|
||||
|
||||
_ = (
|
||||
"first"
|
||||
+ "second" # extra spaces around +
|
||||
)
|
||||
|
||||
_ = (
|
||||
"first" + # trailing spaces before +
|
||||
"second"
|
||||
)
|
||||
|
||||
_ = ((
|
||||
"deep" +
|
||||
"nesting"
|
||||
))
|
||||
|
||||
_ = (
|
||||
"contains + plus" +
|
||||
"another string"
|
||||
)
|
||||
|
||||
_ = (
|
||||
"start"
|
||||
# leading comment
|
||||
+ "end"
|
||||
)
|
||||
|
||||
_ = (
|
||||
"start" +
|
||||
# leading comment
|
||||
"end"
|
||||
)
|
||||
|
||||
@@ -72,3 +72,5 @@ def not_warnings_dot_deprecated(
|
||||
|
||||
@not_warnings_dot_deprecated("Not warnings.deprecated, so this one *should* lead to PYI053 in a stub!")
|
||||
def not_a_deprecated_function() -> None: ...
|
||||
|
||||
baz: str = t"51 character stringgggggggggggggggggggggggggggggggg"
|
||||
|
||||
@@ -80,3 +80,7 @@ x: TypeAlias = Literal["fooooooooooooooooooooooooooooooooooooooooooooooooooooooo
|
||||
|
||||
# Ok
|
||||
y: TypeAlias = Annotated[int, "metadataaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"]
|
||||
|
||||
ttoo: str = t"50 character stringggggggggggggggggggggggggggggggg" # OK
|
||||
|
||||
tbar: str = t"51 character stringgggggggggggggggggggggggggggggggg" # Error: PYI053
|
||||
|
||||
@@ -39,3 +39,27 @@ f'\'normal\' {f'nested'} normal' # Q003
|
||||
f'\'normal\' {f'nested'} "double quotes"'
|
||||
f'\'normal\' {f'\'nested\' {'other'} normal'} "double quotes"' # Q003
|
||||
f'\'normal\' {f'\'nested\' {'other'} "double quotes"'} normal' # Q00l
|
||||
|
||||
|
||||
|
||||
# Same as above, but with t-strings
|
||||
t'This is a \'string\'' # Q003
|
||||
t'This is \\ a \\\'string\'' # Q003
|
||||
t'"This" is a \'string\''
|
||||
f"This is a 'string'"
|
||||
f"\"This\" is a 'string'"
|
||||
fr'This is a \'string\''
|
||||
fR'This is a \'string\''
|
||||
foo = (
|
||||
t'This is a'
|
||||
t'\'string\'' # Q003
|
||||
)
|
||||
t'\'foo\' {'nested'}' # Q003
|
||||
t'\'foo\' {t'nested'}' # Q003
|
||||
t'\'foo\' {t'\'nested\''} \'\'' # Q003
|
||||
|
||||
t'normal {t'nested'} normal'
|
||||
t'\'normal\' {t'nested'} normal' # Q003
|
||||
t'\'normal\' {t'nested'} "double quotes"'
|
||||
t'\'normal\' {t'\'nested\' {'other'} normal'} "double quotes"' # Q003
|
||||
t'\'normal\' {t'\'nested\' {'other'} "double quotes"'} normal' # Q00l
|
||||
|
||||
@@ -37,3 +37,25 @@ f"\"normal\" {f"nested"} normal" # Q003
|
||||
f"\"normal\" {f"nested"} 'single quotes'"
|
||||
f"\"normal\" {f"\"nested\" {"other"} normal"} 'single quotes'" # Q003
|
||||
f"\"normal\" {f"\"nested\" {"other"} 'single quotes'"} normal" # Q003
|
||||
|
||||
|
||||
# Same as above, but with t-strings
|
||||
t"This is a \"string\""
|
||||
t"'This' is a \"string\""
|
||||
f'This is a "string"'
|
||||
f'\'This\' is a "string"'
|
||||
fr"This is a \"string\""
|
||||
fR"This is a \"string\""
|
||||
foo = (
|
||||
t"This is a"
|
||||
t"\"string\""
|
||||
)
|
||||
t"\"foo\" {"foo"}" # Q003
|
||||
t"\"foo\" {t"foo"}" # Q003
|
||||
t"\"foo\" {t"\"foo\""} \"\"" # Q003
|
||||
|
||||
t"normal {t"nested"} normal"
|
||||
t"\"normal\" {t"nested"} normal" # Q003
|
||||
t"\"normal\" {t"nested"} 'single quotes'"
|
||||
t"\"normal\" {t"\"nested\" {"other"} normal"} 'single quotes'" # Q003
|
||||
t"\"normal\" {t"\"nested\" {"other"} 'single quotes'"} normal" # Q003
|
||||
|
||||
15
crates/ruff_linter/resources/test/fixtures/flake8_use_pathlib/PTH211.py
vendored
Normal file
15
crates/ruff_linter/resources/test/fixtures/flake8_use_pathlib/PTH211.py
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
os.symlink("usr/bin/python", "tmp/python")
|
||||
os.symlink(b"usr/bin/python", b"tmp/python")
|
||||
Path("tmp/python").symlink_to("usr/bin/python") # Ok
|
||||
|
||||
os.symlink("usr/bin/python", "tmp/python", target_is_directory=True)
|
||||
os.symlink(b"usr/bin/python", b"tmp/python", target_is_directory=True)
|
||||
Path("tmp/python").symlink_to("usr/bin/python", target_is_directory=True) # Ok
|
||||
|
||||
fd = os.open(".", os.O_RDONLY)
|
||||
os.symlink("source.txt", "link.txt", dir_fd=fd) # Ok: dir_fd is not supported by pathlib
|
||||
os.close(fd)
|
||||
@@ -266,3 +266,15 @@ def f():
|
||||
result = list() # this should be replaced with a comprehension
|
||||
for i in values:
|
||||
result.append(i + 1) # PERF401
|
||||
|
||||
def f():
|
||||
src = [1]
|
||||
dst = []
|
||||
|
||||
for i in src:
|
||||
if True if True else False:
|
||||
dst.append(i)
|
||||
|
||||
for i in src:
|
||||
if lambda: 0:
|
||||
dst.append(i)
|
||||
|
||||
@@ -151,3 +151,16 @@ def foo():
|
||||
result = {}
|
||||
for idx, name in indices, fruit:
|
||||
result[name] = idx # PERF403
|
||||
|
||||
|
||||
def foo():
|
||||
src = (("x", 1),)
|
||||
dst = {}
|
||||
|
||||
for k, v in src:
|
||||
if True if True else False:
|
||||
dst[k] = v
|
||||
|
||||
for k, v in src:
|
||||
if lambda: 0:
|
||||
dst[k] = v
|
||||
@@ -1,4 +1,4 @@
|
||||
# Same as `W605_0.py` but using f-strings instead.
|
||||
# Same as `W605_0.py` but using f-strings and t-strings instead.
|
||||
|
||||
#: W605:1:10
|
||||
regex = f'\.png$'
|
||||
@@ -66,3 +66,72 @@ s = f"TOTAL: {total}\nOK: {ok}\INCOMPLETE: {incomplete}\n"
|
||||
|
||||
# Debug text (should trigger)
|
||||
t = f"{'\InHere'=}"
|
||||
|
||||
|
||||
|
||||
#: W605:1:10
|
||||
regex = t'\.png$'
|
||||
|
||||
#: W605:2:1
|
||||
regex = t'''
|
||||
\.png$
|
||||
'''
|
||||
|
||||
#: W605:2:6
|
||||
f(
|
||||
t'\_'
|
||||
)
|
||||
|
||||
#: W605:4:6
|
||||
t"""
|
||||
multi-line
|
||||
literal
|
||||
with \_ somewhere
|
||||
in the middle
|
||||
"""
|
||||
|
||||
#: W605:1:38
|
||||
value = t'new line\nand invalid escape \_ here'
|
||||
|
||||
|
||||
#: Okay
|
||||
regex = fr'\.png$'
|
||||
regex = t'\\.png$'
|
||||
regex = fr'''
|
||||
\.png$
|
||||
'''
|
||||
regex = fr'''
|
||||
\\.png$
|
||||
'''
|
||||
s = t'\\'
|
||||
regex = t'\w' # noqa
|
||||
regex = t'''
|
||||
\w
|
||||
''' # noqa
|
||||
|
||||
regex = t'\\\_'
|
||||
value = t'\{{1}}'
|
||||
value = t'\{1}'
|
||||
value = t'{1:\}'
|
||||
value = t"{t"\{1}"}"
|
||||
value = rt"{t"\{1}"}"
|
||||
|
||||
# Okay
|
||||
value = rt'\{{1}}'
|
||||
value = rt'\{1}'
|
||||
value = rt'{1:\}'
|
||||
value = t"{rt"\{1}"}"
|
||||
|
||||
# Regression tests for https://github.com/astral-sh/ruff/issues/10434
|
||||
t"{{}}+-\d"
|
||||
t"\n{{}}+-\d+"
|
||||
t"\n{{}}<7D>+-\d+"
|
||||
|
||||
# See https://github.com/astral-sh/ruff/issues/11491
|
||||
total = 10
|
||||
ok = 7
|
||||
incomplete = 3
|
||||
s = t"TOTAL: {total}\nOK: {ok}\INCOMPLETE: {incomplete}\n"
|
||||
|
||||
# Debug text (should trigger)
|
||||
t = t"{'\InHere'=}"
|
||||
|
||||
184
crates/ruff_linter/resources/test/fixtures/pylint/missing_maxsplit_arg.py
vendored
Normal file
184
crates/ruff_linter/resources/test/fixtures/pylint/missing_maxsplit_arg.py
vendored
Normal file
@@ -0,0 +1,184 @@
|
||||
SEQ = "1,2,3"
|
||||
|
||||
class Foo(str):
|
||||
class_str = "1,2,3"
|
||||
|
||||
def split(self, sep=None, maxsplit=-1) -> list[str]:
|
||||
return super().split(sep, maxsplit)
|
||||
|
||||
class Bar():
|
||||
split = "1,2,3"
|
||||
|
||||
# Errors
|
||||
## Test split called directly on string literal
|
||||
"1,2,3".split(",")[0] # [missing-maxsplit-arg]
|
||||
"1,2,3".split(",")[-1] # [missing-maxsplit-arg]
|
||||
"1,2,3".rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
"1,2,3".rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
||||
## Test split called on string variable
|
||||
SEQ.split(",")[0] # [missing-maxsplit-arg]
|
||||
SEQ.split(",")[-1] # [missing-maxsplit-arg]
|
||||
SEQ.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
SEQ.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
||||
## Test split called on class attribute
|
||||
Foo.class_str.split(",")[0] # [missing-maxsplit-arg]
|
||||
Foo.class_str.split(",")[-1] # [missing-maxsplit-arg]
|
||||
Foo.class_str.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
Foo.class_str.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
||||
## Test split called on sliced string
|
||||
"1,2,3"[::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
"1,2,3"[::-1][::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
SEQ[:3].split(",")[0] # [missing-maxsplit-arg]
|
||||
Foo.class_str[1:3].split(",")[-1] # [missing-maxsplit-arg]
|
||||
"1,2,3"[::-1].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
SEQ[:3].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
Foo.class_str[1:3].rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
||||
## Test sep given as named argument
|
||||
"1,2,3".split(sep=",")[0] # [missing-maxsplit-arg]
|
||||
"1,2,3".split(sep=",")[-1] # [missing-maxsplit-arg]
|
||||
"1,2,3".rsplit(sep=",")[0] # [missing-maxsplit-arg]
|
||||
"1,2,3".rsplit(sep=",")[-1] # [missing-maxsplit-arg]
|
||||
|
||||
## Special cases
|
||||
"1,2,3".split("\n")[0] # [missing-maxsplit-arg]
|
||||
"1,2,3".split("split")[-1] # [missing-maxsplit-arg]
|
||||
"1,2,3".rsplit("rsplit")[0] # [missing-maxsplit-arg]
|
||||
|
||||
## Test class attribute named split
|
||||
Bar.split.split(",")[0] # [missing-maxsplit-arg]
|
||||
Bar.split.split(",")[-1] # [missing-maxsplit-arg]
|
||||
Bar.split.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
Bar.split.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
||||
## Test unpacked dict literal kwargs
|
||||
"1,2,3".split(**{"sep": ","})[0] # [missing-maxsplit-arg]
|
||||
|
||||
|
||||
# OK
|
||||
## Test not accessing the first or last element
|
||||
### Test split called directly on string literal
|
||||
"1,2,3".split(",")[1]
|
||||
"1,2,3".split(",")[-2]
|
||||
"1,2,3".rsplit(",")[1]
|
||||
"1,2,3".rsplit(",")[-2]
|
||||
|
||||
### Test split called on string variable
|
||||
SEQ.split(",")[1]
|
||||
SEQ.split(",")[-2]
|
||||
SEQ.rsplit(",")[1]
|
||||
SEQ.rsplit(",")[-2]
|
||||
|
||||
### Test split called on class attribute
|
||||
Foo.class_str.split(",")[1]
|
||||
Foo.class_str.split(",")[-2]
|
||||
Foo.class_str.rsplit(",")[1]
|
||||
Foo.class_str.rsplit(",")[-2]
|
||||
|
||||
### Test split called on sliced string
|
||||
"1,2,3"[::-1].split(",")[1]
|
||||
SEQ[:3].split(",")[1]
|
||||
Foo.class_str[1:3].split(",")[-2]
|
||||
"1,2,3"[::-1].rsplit(",")[1]
|
||||
SEQ[:3].rsplit(",")[1]
|
||||
Foo.class_str[1:3].rsplit(",")[-2]
|
||||
|
||||
### Test sep given as named argument
|
||||
"1,2,3".split(sep=",")[1]
|
||||
"1,2,3".split(sep=",")[-2]
|
||||
"1,2,3".rsplit(sep=",")[1]
|
||||
"1,2,3".rsplit(sep=",")[-2]
|
||||
|
||||
## Test varying maxsplit argument
|
||||
### str.split() tests
|
||||
"1,2,3".split(sep=",", maxsplit=1)[-1]
|
||||
"1,2,3".split(sep=",", maxsplit=1)[0]
|
||||
"1,2,3".split(sep=",", maxsplit=2)[-1]
|
||||
"1,2,3".split(sep=",", maxsplit=2)[0]
|
||||
"1,2,3".split(sep=",", maxsplit=2)[1]
|
||||
|
||||
### str.rsplit() tests
|
||||
"1,2,3".rsplit(sep=",", maxsplit=1)[-1]
|
||||
"1,2,3".rsplit(sep=",", maxsplit=1)[0]
|
||||
"1,2,3".rsplit(sep=",", maxsplit=2)[-1]
|
||||
"1,2,3".rsplit(sep=",", maxsplit=2)[0]
|
||||
"1,2,3".rsplit(sep=",", maxsplit=2)[1]
|
||||
|
||||
## Test user-defined split
|
||||
Foo("1,2,3").split(",")[0]
|
||||
Foo("1,2,3").split(",")[-1]
|
||||
Foo("1,2,3").rsplit(",")[0]
|
||||
Foo("1,2,3").rsplit(",")[-1]
|
||||
|
||||
## Test split called on sliced list
|
||||
["1", "2", "3"][::-1].split(",")[0]
|
||||
|
||||
## Test class attribute named split
|
||||
Bar.split[0]
|
||||
Bar.split[-1]
|
||||
Bar.split[0]
|
||||
Bar.split[-1]
|
||||
|
||||
## Test unpacked dict literal kwargs
|
||||
"1,2,3".split(",", **{"maxsplit": 1})[0]
|
||||
"1,2,3".split(**{"sep": ",", "maxsplit": 1})[0]
|
||||
|
||||
|
||||
# TODO
|
||||
|
||||
## Test variable split result index
|
||||
## TODO: These require the ability to resolve a variable name to a value
|
||||
# Errors
|
||||
result_index = 0
|
||||
"1,2,3".split(",")[result_index] # TODO: [missing-maxsplit-arg]
|
||||
result_index = -1
|
||||
"1,2,3".split(",")[result_index] # TODO: [missing-maxsplit-arg]
|
||||
# OK
|
||||
result_index = 1
|
||||
"1,2,3".split(",")[result_index]
|
||||
result_index = -2
|
||||
"1,2,3".split(",")[result_index]
|
||||
|
||||
|
||||
## Test split result index modified in loop
|
||||
## TODO: These require the ability to recognize being in a loop where:
|
||||
## - the result of split called on a string is indexed by a variable
|
||||
## - the variable index above is modified
|
||||
# OK
|
||||
result_index = 0
|
||||
for j in range(3):
|
||||
print(SEQ.split(",")[result_index])
|
||||
result_index = result_index + 1
|
||||
|
||||
|
||||
## Test accessor
|
||||
## TODO: These require the ability to get the return type of a method
|
||||
## (possibly via `typing::is_string`)
|
||||
class Baz():
|
||||
def __init__(self):
|
||||
self.my_str = "1,2,3"
|
||||
|
||||
def get_string(self) -> str:
|
||||
return self.my_str
|
||||
|
||||
# Errors
|
||||
Baz().get_string().split(",")[0] # TODO: [missing-maxsplit-arg]
|
||||
Baz().get_string().split(",")[-1] # TODO: [missing-maxsplit-arg]
|
||||
# OK
|
||||
Baz().get_string().split(",")[1]
|
||||
Baz().get_string().split(",")[-2]
|
||||
|
||||
|
||||
## Test unpacked dict instance kwargs
|
||||
## TODO: These require the ability to resolve a dict variable name to a value
|
||||
# Errors
|
||||
kwargs_without_maxsplit = {"seq": ","}
|
||||
"1,2,3".split(**kwargs_without_maxsplit)[0] # TODO: [missing-maxsplit-arg]
|
||||
# OK
|
||||
kwargs_with_maxsplit = {"maxsplit": 1}
|
||||
"1,2,3".split(",", **kwargs_with_maxsplit)[0] # TODO: false positive
|
||||
kwargs_with_maxsplit = {"sep": ",", "maxsplit": 1}
|
||||
"1,2,3".split(**kwargs_with_maxsplit)[0] # TODO: false positive
|
||||
@@ -12,3 +12,4 @@ if True:
|
||||
if True:
|
||||
from __future__ import generator_stop
|
||||
from __future__ import invalid_module, generators
|
||||
from __future__ import generators # comment
|
||||
|
||||
@@ -110,6 +110,8 @@ from typing_extensions import CapsuleType
|
||||
# UP035 on py313+ only
|
||||
from typing_extensions import deprecated
|
||||
|
||||
# UP035 on py313+ only
|
||||
from typing_extensions import get_type_hints
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/15780
|
||||
from typing_extensions import is_typeddict
|
||||
|
||||
84
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP050.py
vendored
Normal file
84
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP050.py
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
class A:
|
||||
...
|
||||
|
||||
|
||||
class A(metaclass=type):
|
||||
...
|
||||
|
||||
|
||||
class A(
|
||||
metaclass=type
|
||||
):
|
||||
...
|
||||
|
||||
|
||||
class A(
|
||||
metaclass=type
|
||||
#
|
||||
):
|
||||
...
|
||||
|
||||
|
||||
class A(
|
||||
#
|
||||
metaclass=type
|
||||
):
|
||||
...
|
||||
|
||||
|
||||
class A(
|
||||
metaclass=type,
|
||||
#
|
||||
):
|
||||
...
|
||||
|
||||
|
||||
class A(
|
||||
#
|
||||
metaclass=type,
|
||||
#
|
||||
):
|
||||
...
|
||||
|
||||
|
||||
class B(A, metaclass=type):
|
||||
...
|
||||
|
||||
|
||||
class B(
|
||||
A,
|
||||
metaclass=type,
|
||||
):
|
||||
...
|
||||
|
||||
|
||||
class B(
|
||||
A,
|
||||
# comment
|
||||
metaclass=type,
|
||||
):
|
||||
...
|
||||
|
||||
|
||||
def foo():
|
||||
class A(metaclass=type):
|
||||
...
|
||||
|
||||
|
||||
class A(
|
||||
metaclass=type # comment
|
||||
,
|
||||
):
|
||||
...
|
||||
|
||||
|
||||
type = str
|
||||
|
||||
class Foo(metaclass=type):
|
||||
...
|
||||
|
||||
|
||||
import builtins
|
||||
|
||||
class A(metaclass=builtins.type):
|
||||
...
|
||||
@@ -43,7 +43,6 @@ def func():
|
||||
|
||||
import builtins
|
||||
|
||||
|
||||
with builtins.open("FURB129.py") as f:
|
||||
for line in f.readlines():
|
||||
pass
|
||||
@@ -51,7 +50,6 @@ with builtins.open("FURB129.py") as f:
|
||||
|
||||
from builtins import open as o
|
||||
|
||||
|
||||
with o("FURB129.py") as f:
|
||||
for line in f.readlines():
|
||||
pass
|
||||
@@ -89,3 +87,18 @@ with open("FURB129.py") as f:
|
||||
pass
|
||||
for _not_line in f.readline():
|
||||
pass
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/18231
|
||||
with open("furb129.py") as f:
|
||||
for line in (f).readlines():
|
||||
pass
|
||||
|
||||
with open("furb129.py") as f:
|
||||
[line for line in (f).readlines()]
|
||||
|
||||
|
||||
with open("furb129.py") as f:
|
||||
for line in (((f))).readlines():
|
||||
pass
|
||||
for line in(f).readlines():
|
||||
pass
|
||||
|
||||
53
crates/ruff_linter/resources/test/fixtures/refurb/FURB171_1.py
vendored
Normal file
53
crates/ruff_linter/resources/test/fixtures/refurb/FURB171_1.py
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
# Errors.
|
||||
|
||||
if 1 in set([1]):
|
||||
print("Single-element set")
|
||||
|
||||
if 1 in set((1,)):
|
||||
print("Single-element set")
|
||||
|
||||
if 1 in set({1}):
|
||||
print("Single-element set")
|
||||
|
||||
if 1 in frozenset([1]):
|
||||
print("Single-element set")
|
||||
|
||||
if 1 in frozenset((1,)):
|
||||
print("Single-element set")
|
||||
|
||||
if 1 in frozenset({1}):
|
||||
print("Single-element set")
|
||||
|
||||
if 1 in set(set([1])):
|
||||
print('Recursive solution')
|
||||
|
||||
|
||||
|
||||
# Non-errors.
|
||||
|
||||
if 1 in set((1, 2)):
|
||||
pass
|
||||
|
||||
if 1 in set([1, 2]):
|
||||
pass
|
||||
|
||||
if 1 in set({1, 2}):
|
||||
pass
|
||||
|
||||
if 1 in frozenset((1, 2)):
|
||||
pass
|
||||
|
||||
if 1 in frozenset([1, 2]):
|
||||
pass
|
||||
|
||||
if 1 in frozenset({1, 2}):
|
||||
pass
|
||||
|
||||
if 1 in set(1,):
|
||||
pass
|
||||
|
||||
if 1 in set(1,2):
|
||||
pass
|
||||
|
||||
if 1 in set((x for x in range(2))):
|
||||
pass
|
||||
@@ -1,6 +1,6 @@
|
||||
use ruff_diagnostics::{Diagnostic, Fix};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::codes::Rule;
|
||||
use crate::rules::{
|
||||
@@ -38,92 +38,64 @@ pub(crate) fn bindings(checker: &Checker) {
|
||||
.dummy_variable_rgx
|
||||
.is_match(binding.name(checker.source()))
|
||||
{
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
pyflakes::rules::UnusedVariable {
|
||||
name: binding.name(checker.source()).to_string(),
|
||||
},
|
||||
binding.range(),
|
||||
);
|
||||
diagnostic.try_set_fix(|| {
|
||||
pyflakes::fixes::remove_exception_handler_assignment(binding, checker.locator)
|
||||
checker
|
||||
.report_diagnostic(
|
||||
pyflakes::rules::UnusedVariable {
|
||||
name: binding.name(checker.source()).to_string(),
|
||||
},
|
||||
binding.range(),
|
||||
)
|
||||
.try_set_fix(|| {
|
||||
pyflakes::fixes::remove_exception_handler_assignment(
|
||||
binding,
|
||||
checker.locator,
|
||||
)
|
||||
.map(Fix::safe_edit)
|
||||
});
|
||||
checker.report_diagnostic(diagnostic);
|
||||
});
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::InvalidAllFormat) {
|
||||
if let Some(diagnostic) = pylint::rules::invalid_all_format(binding) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
pylint::rules::invalid_all_format(checker, binding);
|
||||
}
|
||||
if checker.enabled(Rule::InvalidAllObject) {
|
||||
if let Some(diagnostic) = pylint::rules::invalid_all_object(binding) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
pylint::rules::invalid_all_object(checker, binding);
|
||||
}
|
||||
if checker.enabled(Rule::NonAsciiName) {
|
||||
if let Some(diagnostic) = pylint::rules::non_ascii_name(binding, checker.locator) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
pylint::rules::non_ascii_name(checker, binding);
|
||||
}
|
||||
if checker.enabled(Rule::UnconventionalImportAlias) {
|
||||
if let Some(diagnostic) = flake8_import_conventions::rules::unconventional_import_alias(
|
||||
flake8_import_conventions::rules::unconventional_import_alias(
|
||||
checker,
|
||||
binding,
|
||||
&checker.settings.flake8_import_conventions.aliases,
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::UnaliasedCollectionsAbcSetImport) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_pyi::rules::unaliased_collections_abc_set_import(checker, binding)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
flake8_pyi::rules::unaliased_collections_abc_set_import(checker, binding);
|
||||
}
|
||||
if !checker.source_type.is_stub() && checker.enabled(Rule::UnquotedTypeAlias) {
|
||||
flake8_type_checking::rules::unquoted_type_alias(checker, binding);
|
||||
}
|
||||
if checker.enabled(Rule::UnsortedDunderSlots) {
|
||||
if let Some(diagnostic) = ruff::rules::sort_dunder_slots(checker, binding) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
ruff::rules::sort_dunder_slots(checker, binding);
|
||||
}
|
||||
if checker.enabled(Rule::UsedDummyVariable) {
|
||||
if let Some(diagnostic) = ruff::rules::used_dummy_variable(checker, binding, binding_id)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
ruff::rules::used_dummy_variable(checker, binding, binding_id);
|
||||
}
|
||||
if checker.enabled(Rule::AssignmentInAssert) {
|
||||
if let Some(diagnostic) = ruff::rules::assignment_in_assert(checker, binding) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
ruff::rules::assignment_in_assert(checker, binding);
|
||||
}
|
||||
if checker.enabled(Rule::PytestUnittestRaisesAssertion) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_pytest_style::rules::unittest_raises_assertion_binding(checker, binding)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
flake8_pytest_style::rules::unittest_raises_assertion_binding(checker, binding);
|
||||
}
|
||||
if checker.enabled(Rule::ForLoopWrites) {
|
||||
if let Some(diagnostic) = refurb::rules::for_loop_writes_binding(checker, binding) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
refurb::rules::for_loop_writes_binding(checker, binding);
|
||||
}
|
||||
if checker.enabled(Rule::CustomTypeVarForSelf) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_pyi::rules::custom_type_var_instead_of_self(checker, binding)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
flake8_pyi::rules::custom_type_var_instead_of_self(checker, binding);
|
||||
}
|
||||
if checker.enabled(Rule::PrivateTypeParameter) {
|
||||
if let Some(diagnostic) = pyupgrade::rules::private_type_parameter(checker, binding) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
pyupgrade::rules::private_type_parameter(checker, binding);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use ruff_diagnostics::{Diagnostic, Fix};
|
||||
use ruff_python_semantic::analyze::visibility;
|
||||
use ruff_python_semantic::{Binding, BindingKind, Imported, ResolvedReference, ScopeKind};
|
||||
use ruff_text_size::Ranged;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::codes::Rule;
|
||||
use crate::fix;
|
||||
@@ -112,12 +112,12 @@ pub(crate) fn deferred_scopes(checker: &Checker) {
|
||||
.map(|id| checker.semantic.reference(*id))
|
||||
.all(ResolvedReference::is_load)
|
||||
{
|
||||
checker.report_diagnostic(Diagnostic::new(
|
||||
checker.report_diagnostic(
|
||||
pylint::rules::GlobalVariableNotAssigned {
|
||||
name: (*name).to_string(),
|
||||
},
|
||||
binding.range(),
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -146,12 +146,12 @@ pub(crate) fn deferred_scopes(checker: &Checker) {
|
||||
if scope.kind.is_generator() {
|
||||
continue;
|
||||
}
|
||||
checker.report_diagnostic(Diagnostic::new(
|
||||
checker.report_diagnostic(
|
||||
pylint::rules::RedefinedArgumentFromLocal {
|
||||
name: name.to_string(),
|
||||
},
|
||||
binding.range(),
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -186,13 +186,13 @@ pub(crate) fn deferred_scopes(checker: &Checker) {
|
||||
continue;
|
||||
}
|
||||
|
||||
checker.report_diagnostic(Diagnostic::new(
|
||||
checker.report_diagnostic(
|
||||
pyflakes::rules::ImportShadowedByLoopVar {
|
||||
name: name.to_string(),
|
||||
row: checker.compute_source_row(shadowed.start()),
|
||||
},
|
||||
binding.range(),
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -331,7 +331,7 @@ pub(crate) fn deferred_scopes(checker: &Checker) {
|
||||
// Create diagnostics for each statement.
|
||||
for (source, entries) in &redefinitions {
|
||||
for (shadowed, binding) in entries {
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
let mut diagnostic = checker.report_diagnostic(
|
||||
pyflakes::rules::RedefinedWhileUnused {
|
||||
name: binding.name(checker.source()).to_string(),
|
||||
row: checker.compute_source_row(shadowed.start()),
|
||||
@@ -346,8 +346,6 @@ pub(crate) fn deferred_scopes(checker: &Checker) {
|
||||
if let Some(fix) = source.as_ref().and_then(|source| fixes.get(source)) {
|
||||
diagnostic.set_fix(fix.clone());
|
||||
}
|
||||
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,14 +17,7 @@ pub(crate) fn except_handler(except_handler: &ExceptHandler, checker: &Checker)
|
||||
range: _,
|
||||
}) => {
|
||||
if checker.enabled(Rule::BareExcept) {
|
||||
if let Some(diagnostic) = pycodestyle::rules::bare_except(
|
||||
type_.as_deref(),
|
||||
body,
|
||||
except_handler,
|
||||
checker.locator,
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
pycodestyle::rules::bare_except(checker, type_.as_deref(), body, except_handler);
|
||||
}
|
||||
if checker.enabled(Rule::RaiseWithoutFromInsideExcept) {
|
||||
flake8_bugbear::rules::raise_without_from_inside_except(
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
use ruff_python_ast::{self as ast, Arguments, Expr, ExprContext, Operator};
|
||||
use ruff_python_literal::cformat::{CFormatError, CFormatErrorType};
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
|
||||
use ruff_python_ast::types::Node;
|
||||
use ruff_python_semantic::ScopeKind;
|
||||
use ruff_python_semantic::analyze::typing;
|
||||
@@ -178,6 +176,9 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
|
||||
if checker.enabled(Rule::Airflow3Removal) {
|
||||
airflow::rules::airflow_3_removal_expr(checker, expr);
|
||||
}
|
||||
if checker.enabled(Rule::MissingMaxsplitArg) {
|
||||
pylint::rules::missing_maxsplit_arg(checker, value, slice, expr);
|
||||
}
|
||||
pandas_vet::rules::subscript(checker, value, expr);
|
||||
}
|
||||
Expr::Tuple(ast::ExprTuple {
|
||||
@@ -195,14 +196,13 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
|
||||
let check_too_many_expressions = checker.enabled(Rule::ExpressionsInStarAssignment);
|
||||
let check_two_starred_expressions =
|
||||
checker.enabled(Rule::MultipleStarredExpressions);
|
||||
if let Some(diagnostic) = pyflakes::rules::starred_expressions(
|
||||
pyflakes::rules::starred_expressions(
|
||||
checker,
|
||||
elts,
|
||||
check_too_many_expressions,
|
||||
check_two_starred_expressions,
|
||||
expr.range(),
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
Expr::Name(ast::ExprName { id, ctx, range }) => {
|
||||
@@ -527,12 +527,12 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
|
||||
match pyflakes::format::FormatSummary::try_from(string_value.to_str()) {
|
||||
Err(e) => {
|
||||
if checker.enabled(Rule::StringDotFormatInvalidFormat) {
|
||||
checker.report_diagnostic(Diagnostic::new(
|
||||
checker.report_diagnostic(
|
||||
pyflakes::rules::StringDotFormatInvalidFormat {
|
||||
message: pyflakes::format::error_to_string(&e),
|
||||
},
|
||||
location,
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
Ok(summary) => {
|
||||
@@ -936,9 +936,7 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
|
||||
pylint::rules::repeated_keyword_argument(checker, call);
|
||||
}
|
||||
if checker.enabled(Rule::PytestPatchWithLambda) {
|
||||
if let Some(diagnostic) = flake8_pytest_style::rules::patch_with_lambda(call) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
flake8_pytest_style::rules::patch_with_lambda(checker, call);
|
||||
}
|
||||
if checker.any_enabled(&[
|
||||
Rule::PytestParametrizeNamesWrongType,
|
||||
@@ -1041,6 +1039,7 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
|
||||
Rule::OsPathGetctime,
|
||||
Rule::Glob,
|
||||
Rule::OsListdir,
|
||||
Rule::OsSymlink,
|
||||
]) {
|
||||
flake8_use_pathlib::rules::replaceable_by_pathlib(checker, call);
|
||||
}
|
||||
@@ -1285,22 +1284,22 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
|
||||
..
|
||||
}) => {
|
||||
if checker.enabled(Rule::PercentFormatUnsupportedFormatCharacter) {
|
||||
checker.report_diagnostic(Diagnostic::new(
|
||||
checker.report_diagnostic(
|
||||
pyflakes::rules::PercentFormatUnsupportedFormatCharacter {
|
||||
char: c,
|
||||
},
|
||||
location,
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
if checker.enabled(Rule::PercentFormatInvalidFormat) {
|
||||
checker.report_diagnostic(Diagnostic::new(
|
||||
checker.report_diagnostic(
|
||||
pyflakes::rules::PercentFormatInvalidFormat {
|
||||
message: e.to_string(),
|
||||
},
|
||||
location,
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
Ok(summary) => {
|
||||
@@ -1364,13 +1363,7 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) {
|
||||
op: Operator::Add, ..
|
||||
}) => {
|
||||
if checker.enabled(Rule::ExplicitStringConcatenation) {
|
||||
if let Some(diagnostic) = flake8_implicit_str_concat::rules::explicit(
|
||||
expr,
|
||||
checker.locator,
|
||||
checker.settings,
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
flake8_implicit_str_concat::rules::explicit(checker, expr);
|
||||
}
|
||||
if checker.enabled(Rule::CollectionLiteralConcatenation) {
|
||||
ruff::rules::collection_literal_concatenation(checker, expr);
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_ast::helpers;
|
||||
use ruff_python_ast::types::Node;
|
||||
use ruff_python_ast::{self as ast, Expr, Stmt};
|
||||
@@ -39,12 +38,12 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
if !checker.semantic.scope_id.is_global() {
|
||||
for name in names {
|
||||
if checker.semantic.nonlocal(name).is_none() {
|
||||
checker.report_diagnostic(Diagnostic::new(
|
||||
checker.report_diagnostic(
|
||||
pylint::rules::NonlocalWithoutBinding {
|
||||
name: name.to_string(),
|
||||
},
|
||||
name.range(),
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -55,22 +54,20 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
}
|
||||
Stmt::Break(_) => {
|
||||
if checker.enabled(Rule::BreakOutsideLoop) {
|
||||
if let Some(diagnostic) = pyflakes::rules::break_outside_loop(
|
||||
pyflakes::rules::break_outside_loop(
|
||||
checker,
|
||||
stmt,
|
||||
&mut checker.semantic.current_statements().skip(1),
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
Stmt::Continue(_) => {
|
||||
if checker.enabled(Rule::ContinueOutsideLoop) {
|
||||
if let Some(diagnostic) = pyflakes::rules::continue_outside_loop(
|
||||
pyflakes::rules::continue_outside_loop(
|
||||
checker,
|
||||
stmt,
|
||||
&mut checker.semantic.current_statements().skip(1),
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
Stmt::FunctionDef(
|
||||
@@ -98,9 +95,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
fastapi::rules::fastapi_unused_path_parameter(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::AmbiguousFunctionName) {
|
||||
if let Some(diagnostic) = pycodestyle::rules::ambiguous_function_name(name) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
pycodestyle::rules::ambiguous_function_name(checker, name);
|
||||
}
|
||||
if checker.enabled(Rule::InvalidBoolReturnType) {
|
||||
pylint::rules::invalid_bool_return(checker, function_def);
|
||||
@@ -121,15 +116,14 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
pylint::rules::invalid_str_return(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::InvalidFunctionName) {
|
||||
if let Some(diagnostic) = pep8_naming::rules::invalid_function_name(
|
||||
pep8_naming::rules::invalid_function_name(
|
||||
checker,
|
||||
stmt,
|
||||
name,
|
||||
decorator_list,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
&checker.semantic,
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
if checker.source_type.is_stub() {
|
||||
if checker.enabled(Rule::PassStatementStubBody) {
|
||||
@@ -179,14 +173,13 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
flake8_pyi::rules::pep_484_positional_parameter(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::DunderFunctionName) {
|
||||
if let Some(diagnostic) = pep8_naming::rules::dunder_function_name(
|
||||
pep8_naming::rules::dunder_function_name(
|
||||
checker,
|
||||
checker.semantic.current_scope(),
|
||||
stmt,
|
||||
name,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::GlobalStatement) {
|
||||
pylint::rules::global_statement(checker, name);
|
||||
@@ -231,14 +224,13 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::ComplexStructure) {
|
||||
if let Some(diagnostic) = mccabe::rules::function_is_too_complex(
|
||||
mccabe::rules::function_is_too_complex(
|
||||
checker,
|
||||
stmt,
|
||||
name,
|
||||
body,
|
||||
checker.settings.mccabe.max_complexity,
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::HardcodedPasswordDefault) {
|
||||
flake8_bandit::rules::hardcoded_password_default(checker, parameters);
|
||||
@@ -258,31 +250,28 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
pylint::rules::too_many_positional_arguments(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::TooManyReturnStatements) {
|
||||
if let Some(diagnostic) = pylint::rules::too_many_return_statements(
|
||||
pylint::rules::too_many_return_statements(
|
||||
checker,
|
||||
stmt,
|
||||
body,
|
||||
checker.settings.pylint.max_returns,
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::TooManyBranches) {
|
||||
if let Some(diagnostic) = pylint::rules::too_many_branches(
|
||||
pylint::rules::too_many_branches(
|
||||
checker,
|
||||
stmt,
|
||||
body,
|
||||
checker.settings.pylint.max_branches,
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::TooManyStatements) {
|
||||
if let Some(diagnostic) = pylint::rules::too_many_statements(
|
||||
pylint::rules::too_many_statements(
|
||||
checker,
|
||||
stmt,
|
||||
body,
|
||||
checker.settings.pylint.max_statements,
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
if checker.any_enabled(&[
|
||||
Rule::PytestFixtureIncorrectParenthesesStyle,
|
||||
@@ -439,6 +428,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::UselessObjectInheritance) {
|
||||
pyupgrade::rules::useless_object_inheritance(checker, class_def);
|
||||
}
|
||||
if checker.enabled(Rule::UselessClassMetaclassType) {
|
||||
pyupgrade::rules::useless_class_metaclass_type(checker, class_def);
|
||||
}
|
||||
if checker.enabled(Rule::ReplaceStrEnum) {
|
||||
if checker.target_version() >= PythonVersion::PY311 {
|
||||
pyupgrade::rules::replace_str_enum(checker, class_def);
|
||||
@@ -448,28 +440,24 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
pyupgrade::rules::unnecessary_class_parentheses(checker, class_def);
|
||||
}
|
||||
if checker.enabled(Rule::AmbiguousClassName) {
|
||||
if let Some(diagnostic) = pycodestyle::rules::ambiguous_class_name(name) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
pycodestyle::rules::ambiguous_class_name(checker, name);
|
||||
}
|
||||
if checker.enabled(Rule::InvalidClassName) {
|
||||
if let Some(diagnostic) = pep8_naming::rules::invalid_class_name(
|
||||
pep8_naming::rules::invalid_class_name(
|
||||
checker,
|
||||
stmt,
|
||||
name,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::ErrorSuffixOnExceptionName) {
|
||||
if let Some(diagnostic) = pep8_naming::rules::error_suffix_on_exception_name(
|
||||
pep8_naming::rules::error_suffix_on_exception_name(
|
||||
checker,
|
||||
stmt,
|
||||
arguments.as_deref(),
|
||||
name,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
if !checker.source_type.is_stub() {
|
||||
if checker.any_enabled(&[
|
||||
@@ -608,11 +596,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
}
|
||||
|
||||
if checker.enabled(Rule::Debugger) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_debugger::rules::debugger_import(stmt, None, &alias.name)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
flake8_debugger::rules::debugger_import(checker, stmt, None, &alias.name);
|
||||
}
|
||||
if checker.enabled(Rule::BannedApi) {
|
||||
flake8_tidy_imports::rules::banned_api(
|
||||
@@ -635,94 +619,74 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
pylint::rules::manual_from_import(checker, stmt, alias, names);
|
||||
}
|
||||
if checker.enabled(Rule::ImportSelf) {
|
||||
if let Some(diagnostic) =
|
||||
pylint::rules::import_self(alias, checker.module.qualified_name())
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
pylint::rules::import_self(checker, alias, checker.module.qualified_name());
|
||||
}
|
||||
if let Some(asname) = &alias.asname {
|
||||
let name = alias.name.split('.').next_back().unwrap();
|
||||
if checker.enabled(Rule::ConstantImportedAsNonConstant) {
|
||||
if let Some(diagnostic) =
|
||||
pep8_naming::rules::constant_imported_as_non_constant(
|
||||
name,
|
||||
asname,
|
||||
alias,
|
||||
stmt,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::LowercaseImportedAsNonLowercase) {
|
||||
if let Some(diagnostic) =
|
||||
pep8_naming::rules::lowercase_imported_as_non_lowercase(
|
||||
name,
|
||||
asname,
|
||||
alias,
|
||||
stmt,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::CamelcaseImportedAsLowercase) {
|
||||
if let Some(diagnostic) =
|
||||
pep8_naming::rules::camelcase_imported_as_lowercase(
|
||||
name,
|
||||
asname,
|
||||
alias,
|
||||
stmt,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::CamelcaseImportedAsConstant) {
|
||||
if let Some(diagnostic) = pep8_naming::rules::camelcase_imported_as_constant(
|
||||
pep8_naming::rules::constant_imported_as_non_constant(
|
||||
checker,
|
||||
name,
|
||||
asname,
|
||||
alias,
|
||||
stmt,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::LowercaseImportedAsNonLowercase) {
|
||||
pep8_naming::rules::lowercase_imported_as_non_lowercase(
|
||||
checker,
|
||||
name,
|
||||
asname,
|
||||
alias,
|
||||
stmt,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::CamelcaseImportedAsLowercase) {
|
||||
pep8_naming::rules::camelcase_imported_as_lowercase(
|
||||
checker,
|
||||
name,
|
||||
asname,
|
||||
alias,
|
||||
stmt,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::CamelcaseImportedAsConstant) {
|
||||
pep8_naming::rules::camelcase_imported_as_constant(
|
||||
checker,
|
||||
name,
|
||||
asname,
|
||||
alias,
|
||||
stmt,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::CamelcaseImportedAsAcronym) {
|
||||
if let Some(diagnostic) = pep8_naming::rules::camelcase_imported_as_acronym(
|
||||
pep8_naming::rules::camelcase_imported_as_acronym(
|
||||
name, asname, alias, stmt, checker,
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::BannedImportAlias) {
|
||||
if let Some(asname) = &alias.asname {
|
||||
if let Some(diagnostic) =
|
||||
flake8_import_conventions::rules::banned_import_alias(
|
||||
stmt,
|
||||
&alias.name,
|
||||
asname,
|
||||
&checker.settings.flake8_import_conventions.banned_aliases,
|
||||
)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
flake8_import_conventions::rules::banned_import_alias(
|
||||
checker,
|
||||
stmt,
|
||||
&alias.name,
|
||||
asname,
|
||||
&checker.settings.flake8_import_conventions.banned_aliases,
|
||||
);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::PytestIncorrectPytestImport) {
|
||||
if let Some(diagnostic) = flake8_pytest_style::rules::import(
|
||||
flake8_pytest_style::rules::import(
|
||||
checker,
|
||||
stmt,
|
||||
&alias.name,
|
||||
alias.asname.as_deref(),
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::BuiltinImportShadowing) {
|
||||
flake8_builtins::rules::builtin_import_shadowing(checker, alias);
|
||||
@@ -834,11 +798,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
}
|
||||
|
||||
if checker.enabled(Rule::PytestIncorrectPytestImport) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_pytest_style::rules::import_from(stmt, module, level)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
flake8_pytest_style::rules::import_from(checker, stmt, module, level);
|
||||
}
|
||||
if checker.source_type.is_stub() {
|
||||
if checker.enabled(Rule::FutureAnnotationsInStub) {
|
||||
@@ -853,119 +813,98 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
} else if &alias.name == "*" {
|
||||
if checker.enabled(Rule::UndefinedLocalWithNestedImportStarUsage) {
|
||||
if !matches!(checker.semantic.current_scope().kind, ScopeKind::Module) {
|
||||
checker.report_diagnostic(Diagnostic::new(
|
||||
checker.report_diagnostic(
|
||||
pyflakes::rules::UndefinedLocalWithNestedImportStarUsage {
|
||||
name: helpers::format_import_from(level, module).to_string(),
|
||||
},
|
||||
stmt.range(),
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::UndefinedLocalWithImportStar) {
|
||||
checker.report_diagnostic(Diagnostic::new(
|
||||
checker.report_diagnostic(
|
||||
pyflakes::rules::UndefinedLocalWithImportStar {
|
||||
name: helpers::format_import_from(level, module).to_string(),
|
||||
},
|
||||
stmt.range(),
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::RelativeImports) {
|
||||
if let Some(diagnostic) = flake8_tidy_imports::rules::banned_relative_import(
|
||||
flake8_tidy_imports::rules::banned_relative_import(
|
||||
checker,
|
||||
stmt,
|
||||
level,
|
||||
module,
|
||||
checker.module.qualified_name(),
|
||||
checker.settings.flake8_tidy_imports.ban_relative_imports,
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::Debugger) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_debugger::rules::debugger_import(stmt, module, &alias.name)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
flake8_debugger::rules::debugger_import(checker, stmt, module, &alias.name);
|
||||
}
|
||||
if checker.enabled(Rule::BannedImportAlias) {
|
||||
if let Some(asname) = &alias.asname {
|
||||
let qualified_name =
|
||||
helpers::format_import_from_member(level, module, &alias.name);
|
||||
if let Some(diagnostic) =
|
||||
flake8_import_conventions::rules::banned_import_alias(
|
||||
stmt,
|
||||
&qualified_name,
|
||||
asname,
|
||||
&checker.settings.flake8_import_conventions.banned_aliases,
|
||||
)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
flake8_import_conventions::rules::banned_import_alias(
|
||||
checker,
|
||||
stmt,
|
||||
&qualified_name,
|
||||
asname,
|
||||
&checker.settings.flake8_import_conventions.banned_aliases,
|
||||
);
|
||||
}
|
||||
}
|
||||
if let Some(asname) = &alias.asname {
|
||||
if checker.enabled(Rule::ConstantImportedAsNonConstant) {
|
||||
if let Some(diagnostic) =
|
||||
pep8_naming::rules::constant_imported_as_non_constant(
|
||||
&alias.name,
|
||||
asname,
|
||||
alias,
|
||||
stmt,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::LowercaseImportedAsNonLowercase) {
|
||||
if let Some(diagnostic) =
|
||||
pep8_naming::rules::lowercase_imported_as_non_lowercase(
|
||||
&alias.name,
|
||||
asname,
|
||||
alias,
|
||||
stmt,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::CamelcaseImportedAsLowercase) {
|
||||
if let Some(diagnostic) =
|
||||
pep8_naming::rules::camelcase_imported_as_lowercase(
|
||||
&alias.name,
|
||||
asname,
|
||||
alias,
|
||||
stmt,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::CamelcaseImportedAsConstant) {
|
||||
if let Some(diagnostic) = pep8_naming::rules::camelcase_imported_as_constant(
|
||||
pep8_naming::rules::constant_imported_as_non_constant(
|
||||
checker,
|
||||
&alias.name,
|
||||
asname,
|
||||
alias,
|
||||
stmt,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::LowercaseImportedAsNonLowercase) {
|
||||
pep8_naming::rules::lowercase_imported_as_non_lowercase(
|
||||
checker,
|
||||
&alias.name,
|
||||
asname,
|
||||
alias,
|
||||
stmt,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::CamelcaseImportedAsLowercase) {
|
||||
pep8_naming::rules::camelcase_imported_as_lowercase(
|
||||
checker,
|
||||
&alias.name,
|
||||
asname,
|
||||
alias,
|
||||
stmt,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::CamelcaseImportedAsConstant) {
|
||||
pep8_naming::rules::camelcase_imported_as_constant(
|
||||
checker,
|
||||
&alias.name,
|
||||
asname,
|
||||
alias,
|
||||
stmt,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::CamelcaseImportedAsAcronym) {
|
||||
if let Some(diagnostic) = pep8_naming::rules::camelcase_imported_as_acronym(
|
||||
pep8_naming::rules::camelcase_imported_as_acronym(
|
||||
&alias.name,
|
||||
asname,
|
||||
alias,
|
||||
stmt,
|
||||
checker,
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
if !checker.source_type.is_stub() {
|
||||
if checker.enabled(Rule::UselessImportAlias) {
|
||||
@@ -978,23 +917,21 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::ImportSelf) {
|
||||
if let Some(diagnostic) = pylint::rules::import_from_self(
|
||||
pylint::rules::import_from_self(
|
||||
checker,
|
||||
level,
|
||||
module,
|
||||
names,
|
||||
checker.module.qualified_name(),
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::BannedImportFrom) {
|
||||
if let Some(diagnostic) = flake8_import_conventions::rules::banned_import_from(
|
||||
flake8_import_conventions::rules::banned_import_from(
|
||||
checker,
|
||||
stmt,
|
||||
&helpers::format_import_from(level, module),
|
||||
&checker.settings.flake8_import_conventions.banned_from,
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::ByteStringUsage) {
|
||||
flake8_pyi::rules::bytestring_import(checker, import_from);
|
||||
@@ -1209,7 +1146,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
) => {
|
||||
if !checker.semantic.in_type_checking_block() {
|
||||
if checker.enabled(Rule::Assert) {
|
||||
checker.report_diagnostic(flake8_bandit::rules::assert_used(stmt));
|
||||
flake8_bandit::rules::assert_used(checker, stmt);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::AssertTuple) {
|
||||
@@ -1406,11 +1343,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::DefaultExceptNotLast) {
|
||||
if let Some(diagnostic) =
|
||||
pyflakes::rules::default_except_not_last(handlers, checker.locator)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
pyflakes::rules::default_except_not_last(checker, handlers, checker.locator);
|
||||
}
|
||||
if checker.any_enabled(&[
|
||||
Rule::DuplicateHandlerException,
|
||||
@@ -1505,9 +1438,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::PandasDfVariableName) {
|
||||
if let Some(diagnostic) = pandas_vet::rules::assignment_to_df(targets) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
pandas_vet::rules::assignment_to_df(checker, targets);
|
||||
}
|
||||
if checker
|
||||
.settings
|
||||
@@ -1701,11 +1632,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
pylint::rules::named_expr_without_context(checker, value);
|
||||
}
|
||||
if checker.enabled(Rule::AsyncioDanglingTask) {
|
||||
if let Some(diagnostic) =
|
||||
ruff::rules::asyncio_dangling_task(value, checker.semantic())
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
ruff::rules::asyncio_dangling_task(checker, value, checker.semantic());
|
||||
}
|
||||
if checker.enabled(Rule::RepeatedAppend) {
|
||||
refurb::rules::repeated_append(checker, stmt);
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_semantic::Exceptions;
|
||||
use ruff_python_stdlib::builtins::version_builtin_was_added;
|
||||
|
||||
@@ -15,12 +14,12 @@ pub(crate) fn unresolved_references(checker: &Checker) {
|
||||
for reference in checker.semantic.unresolved_references() {
|
||||
if reference.is_wildcard_import() {
|
||||
if checker.enabled(Rule::UndefinedLocalWithImportStarUsage) {
|
||||
checker.report_diagnostic(Diagnostic::new(
|
||||
checker.report_diagnostic(
|
||||
pyflakes::rules::UndefinedLocalWithImportStarUsage {
|
||||
name: reference.name(checker.source()).to_string(),
|
||||
},
|
||||
reference.range(),
|
||||
));
|
||||
);
|
||||
}
|
||||
} else {
|
||||
if checker.enabled(Rule::UndefinedName) {
|
||||
@@ -42,13 +41,13 @@ pub(crate) fn unresolved_references(checker: &Checker) {
|
||||
|
||||
let symbol_name = reference.name(checker.source());
|
||||
|
||||
checker.report_diagnostic(Diagnostic::new(
|
||||
checker.report_diagnostic(
|
||||
pyflakes::rules::UndefinedName {
|
||||
name: symbol_name.to_string(),
|
||||
minor_version_builtin_added: version_builtin_was_added(symbol_name),
|
||||
},
|
||||
reference.range(),
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,12 +26,9 @@ use std::path::Path;
|
||||
|
||||
use itertools::Itertools;
|
||||
use log::debug;
|
||||
use ruff_python_parser::semantic_errors::{
|
||||
SemanticSyntaxChecker, SemanticSyntaxContext, SemanticSyntaxError, SemanticSyntaxErrorKind,
|
||||
};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, IsolationLevel};
|
||||
use ruff_diagnostics::IsolationLevel;
|
||||
use ruff_notebook::{CellOffsets, NotebookIndex};
|
||||
use ruff_python_ast::helpers::{collect_import_from_member, is_docstring_stmt, to_module_path};
|
||||
use ruff_python_ast::identifier::Identifier;
|
||||
@@ -40,12 +37,15 @@ use ruff_python_ast::str::Quote;
|
||||
use ruff_python_ast::visitor::{Visitor, walk_except_handler, walk_pattern};
|
||||
use ruff_python_ast::{
|
||||
self as ast, AnyParameterRef, ArgOrKeyword, Comprehension, ElifElseClause, ExceptHandler, Expr,
|
||||
ExprContext, FStringElement, Keyword, MatchCase, ModModule, Parameter, Parameters, Pattern,
|
||||
PythonVersion, Stmt, Suite, UnaryOp,
|
||||
ExprContext, InterpolatedStringElement, Keyword, MatchCase, ModModule, Parameter, Parameters,
|
||||
Pattern, PythonVersion, Stmt, Suite, UnaryOp,
|
||||
};
|
||||
use ruff_python_ast::{PySourceType, helpers, str, visitor};
|
||||
use ruff_python_codegen::{Generator, Stylist};
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::semantic_errors::{
|
||||
SemanticSyntaxChecker, SemanticSyntaxContext, SemanticSyntaxError, SemanticSyntaxErrorKind,
|
||||
};
|
||||
use ruff_python_parser::typing::{AnnotationKind, ParsedAnnotation, parse_type_annotation};
|
||||
use ruff_python_parser::{ParseError, Parsed, Tokens};
|
||||
use ruff_python_semantic::all::{DunderAllDefinition, DunderAllFlags};
|
||||
@@ -57,7 +57,7 @@ use ruff_python_semantic::{
|
||||
};
|
||||
use ruff_python_stdlib::builtins::{MAGIC_GLOBALS, python_builtins};
|
||||
use ruff_python_trivia::CommentRanges;
|
||||
use ruff_source_file::{OneIndexed, SourceRow};
|
||||
use ruff_source_file::{OneIndexed, SourceFile, SourceRow};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
|
||||
use crate::checkers::ast::annotation::AnnotationContext;
|
||||
@@ -65,14 +65,15 @@ use crate::docstrings::extraction::ExtractionTarget;
|
||||
use crate::importer::{ImportRequest, Importer, ResolutionError};
|
||||
use crate::noqa::NoqaMapping;
|
||||
use crate::package::PackageRoot;
|
||||
use crate::preview::{is_semantic_errors_enabled, is_undefined_export_in_dunder_init_enabled};
|
||||
use crate::registry::Rule;
|
||||
use crate::preview::is_semantic_errors_enabled;
|
||||
use crate::registry::{AsRule, Rule};
|
||||
use crate::rules::pyflakes::rules::{
|
||||
LateFutureImport, ReturnOutsideFunction, YieldOutsideFunction,
|
||||
};
|
||||
use crate::rules::pylint::rules::{AwaitOutsideAsync, LoadBeforeGlobalDeclaration};
|
||||
use crate::rules::{flake8_pyi, flake8_type_checking, pyflakes, pyupgrade};
|
||||
use crate::settings::{LinterSettings, TargetVersion, flags};
|
||||
use crate::{Edit, OldDiagnostic, Violation};
|
||||
use crate::{Locator, docstrings, noqa};
|
||||
|
||||
mod analyze;
|
||||
@@ -223,8 +224,6 @@ pub(crate) struct Checker<'a> {
|
||||
visit: deferred::Visit<'a>,
|
||||
/// A set of deferred nodes to be analyzed after the AST traversal (e.g., `for` loops).
|
||||
analyze: deferred::Analyze,
|
||||
/// The cumulative set of diagnostics computed across all lint rules.
|
||||
diagnostics: RefCell<Vec<Diagnostic>>,
|
||||
/// The list of names already seen by flake8-bugbear diagnostics, to avoid duplicate violations.
|
||||
flake8_bugbear_seen: RefCell<FxHashSet<TextRange>>,
|
||||
/// The end offset of the last visited statement.
|
||||
@@ -238,6 +237,7 @@ pub(crate) struct Checker<'a> {
|
||||
semantic_checker: SemanticSyntaxChecker,
|
||||
/// Errors collected by the `semantic_checker`.
|
||||
semantic_errors: RefCell<Vec<SemanticSyntaxError>>,
|
||||
context: &'a LintContext<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Checker<'a> {
|
||||
@@ -258,6 +258,7 @@ impl<'a> Checker<'a> {
|
||||
cell_offsets: Option<&'a CellOffsets>,
|
||||
notebook_index: Option<&'a NotebookIndex>,
|
||||
target_version: TargetVersion,
|
||||
context: &'a LintContext<'a>,
|
||||
) -> Checker<'a> {
|
||||
let semantic = SemanticModel::new(&settings.typing_modules, path, module);
|
||||
Self {
|
||||
@@ -278,7 +279,6 @@ impl<'a> Checker<'a> {
|
||||
semantic,
|
||||
visit: deferred::Visit::default(),
|
||||
analyze: deferred::Analyze::default(),
|
||||
diagnostics: RefCell::default(),
|
||||
flake8_bugbear_seen: RefCell::default(),
|
||||
cell_offsets,
|
||||
notebook_index,
|
||||
@@ -287,6 +287,7 @@ impl<'a> Checker<'a> {
|
||||
target_version,
|
||||
semantic_checker: SemanticSyntaxChecker::new(),
|
||||
semantic_errors: RefCell::default(),
|
||||
context,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -337,6 +338,7 @@ impl<'a> Checker<'a> {
|
||||
ast::BytesLiteralFlags::empty().with_quote_style(self.preferred_quote())
|
||||
}
|
||||
|
||||
// TODO(dylan) add similar method for t-strings
|
||||
/// Return the default f-string flags a generated `FString` node should use, given where we are
|
||||
/// in the AST.
|
||||
pub(crate) fn default_fstring_flags(&self) -> ast::FStringFlags {
|
||||
@@ -379,10 +381,30 @@ impl<'a> Checker<'a> {
|
||||
self.indexer.comment_ranges()
|
||||
}
|
||||
|
||||
/// Push a new [`Diagnostic`] to the collection in the [`Checker`]
|
||||
pub(crate) fn report_diagnostic(&self, diagnostic: Diagnostic) {
|
||||
let mut diagnostics = self.diagnostics.borrow_mut();
|
||||
diagnostics.push(diagnostic);
|
||||
/// Return a [`DiagnosticGuard`] for reporting a diagnostic.
|
||||
///
|
||||
/// The guard derefs to a [`Diagnostic`], so it can be used to further modify the diagnostic
|
||||
/// before it is added to the collection in the checker on `Drop`.
|
||||
pub(crate) fn report_diagnostic<'chk, T: Violation>(
|
||||
&'chk self,
|
||||
kind: T,
|
||||
range: TextRange,
|
||||
) -> DiagnosticGuard<'chk, 'a> {
|
||||
self.context.report_diagnostic(kind, range)
|
||||
}
|
||||
|
||||
/// Return a [`DiagnosticGuard`] for reporting a diagnostic if the corresponding rule is
|
||||
/// enabled.
|
||||
///
|
||||
/// Prefer [`Checker::report_diagnostic`] in general because the conversion from a `Diagnostic`
|
||||
/// to a `Rule` is somewhat expensive.
|
||||
pub(crate) fn report_diagnostic_if_enabled<'chk, T: Violation>(
|
||||
&'chk self,
|
||||
kind: T,
|
||||
range: TextRange,
|
||||
) -> Option<DiagnosticGuard<'chk, 'a>> {
|
||||
self.context
|
||||
.report_diagnostic_if_enabled(kind, range, self.settings)
|
||||
}
|
||||
|
||||
/// Adds a [`TextRange`] to the set of ranges of variable names
|
||||
@@ -508,9 +530,9 @@ impl<'a> Checker<'a> {
|
||||
}
|
||||
|
||||
/// Push `diagnostic` if the checker is not in a `@no_type_check` context.
|
||||
pub(crate) fn report_type_diagnostic(&self, diagnostic: Diagnostic) {
|
||||
pub(crate) fn report_type_diagnostic<T: Violation>(&self, kind: T, range: TextRange) {
|
||||
if !self.semantic.in_no_type_check() {
|
||||
self.report_diagnostic(diagnostic);
|
||||
self.report_diagnostic(kind, range);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -595,7 +617,7 @@ impl SemanticSyntaxContext for Checker<'_> {
|
||||
match error.kind {
|
||||
SemanticSyntaxErrorKind::LateFutureImport => {
|
||||
if self.settings.rules.enabled(Rule::LateFutureImport) {
|
||||
self.report_diagnostic(Diagnostic::new(LateFutureImport, error.range));
|
||||
self.report_diagnostic(LateFutureImport, error.range);
|
||||
}
|
||||
}
|
||||
SemanticSyntaxErrorKind::LoadBeforeGlobalDeclaration { name, start } => {
|
||||
@@ -604,31 +626,28 @@ impl SemanticSyntaxContext for Checker<'_> {
|
||||
.rules
|
||||
.enabled(Rule::LoadBeforeGlobalDeclaration)
|
||||
{
|
||||
self.report_diagnostic(Diagnostic::new(
|
||||
self.report_diagnostic(
|
||||
LoadBeforeGlobalDeclaration {
|
||||
name,
|
||||
row: self.compute_source_row(start),
|
||||
},
|
||||
error.range,
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
SemanticSyntaxErrorKind::YieldOutsideFunction(kind) => {
|
||||
if self.settings.rules.enabled(Rule::YieldOutsideFunction) {
|
||||
self.report_diagnostic(Diagnostic::new(
|
||||
YieldOutsideFunction::new(kind),
|
||||
error.range,
|
||||
));
|
||||
self.report_diagnostic(YieldOutsideFunction::new(kind), error.range);
|
||||
}
|
||||
}
|
||||
SemanticSyntaxErrorKind::ReturnOutsideFunction => {
|
||||
if self.settings.rules.enabled(Rule::ReturnOutsideFunction) {
|
||||
self.report_diagnostic(Diagnostic::new(ReturnOutsideFunction, error.range));
|
||||
self.report_diagnostic(ReturnOutsideFunction, error.range);
|
||||
}
|
||||
}
|
||||
SemanticSyntaxErrorKind::AwaitOutsideAsyncFunction(_) => {
|
||||
if self.settings.rules.enabled(Rule::AwaitOutsideAsync) {
|
||||
self.report_diagnostic(Diagnostic::new(AwaitOutsideAsync, error.range));
|
||||
self.report_diagnostic(AwaitOutsideAsync, error.range);
|
||||
}
|
||||
}
|
||||
SemanticSyntaxErrorKind::ReboundComprehensionVariable
|
||||
@@ -1879,6 +1898,10 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
self.semantic.flags |= SemanticModelFlags::F_STRING;
|
||||
visitor::walk_expr(self, expr);
|
||||
}
|
||||
Expr::TString(_) => {
|
||||
self.semantic.flags |= SemanticModelFlags::T_STRING;
|
||||
visitor::walk_expr(self, expr);
|
||||
}
|
||||
Expr::Named(ast::ExprNamed {
|
||||
target,
|
||||
value,
|
||||
@@ -1912,6 +1935,7 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
}
|
||||
Expr::BytesLiteral(bytes_literal) => analyze::string_like(bytes_literal.into(), self),
|
||||
Expr::FString(f_string) => analyze::string_like(f_string.into(), self),
|
||||
Expr::TString(t_string) => analyze::string_like(t_string.into(), self),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
@@ -2101,12 +2125,15 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_f_string_element(&mut self, f_string_element: &'a FStringElement) {
|
||||
fn visit_interpolated_string_element(
|
||||
&mut self,
|
||||
interpolated_string_element: &'a InterpolatedStringElement,
|
||||
) {
|
||||
let snapshot = self.semantic.flags;
|
||||
if f_string_element.is_expression() {
|
||||
self.semantic.flags |= SemanticModelFlags::F_STRING_REPLACEMENT_FIELD;
|
||||
if interpolated_string_element.is_interpolation() {
|
||||
self.semantic.flags |= SemanticModelFlags::INTERPOLATED_STRING_REPLACEMENT_FIELD;
|
||||
}
|
||||
visitor::walk_f_string_element(self, f_string_element);
|
||||
visitor::walk_interpolated_string_element(self, interpolated_string_element);
|
||||
self.semantic.flags = snapshot;
|
||||
}
|
||||
}
|
||||
@@ -2717,12 +2744,12 @@ impl<'a> Checker<'a> {
|
||||
self.semantic.restore(snapshot);
|
||||
|
||||
if self.enabled(Rule::ForwardAnnotationSyntaxError) {
|
||||
self.report_type_diagnostic(Diagnostic::new(
|
||||
self.report_type_diagnostic(
|
||||
pyflakes::rules::ForwardAnnotationSyntaxError {
|
||||
parse_error: parse_error.error.to_string(),
|
||||
},
|
||||
string_expr.range(),
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2863,31 +2890,23 @@ impl<'a> Checker<'a> {
|
||||
} else {
|
||||
if self.semantic.global_scope().uses_star_imports() {
|
||||
if self.enabled(Rule::UndefinedLocalWithImportStarUsage) {
|
||||
self.diagnostics.get_mut().push(
|
||||
Diagnostic::new(
|
||||
pyflakes::rules::UndefinedLocalWithImportStarUsage {
|
||||
name: name.to_string(),
|
||||
},
|
||||
range,
|
||||
)
|
||||
.with_parent(definition.start()),
|
||||
);
|
||||
self.report_diagnostic(
|
||||
pyflakes::rules::UndefinedLocalWithImportStarUsage {
|
||||
name: name.to_string(),
|
||||
},
|
||||
range,
|
||||
)
|
||||
.set_parent(definition.start());
|
||||
}
|
||||
} else {
|
||||
if self.enabled(Rule::UndefinedExport) {
|
||||
if is_undefined_export_in_dunder_init_enabled(self.settings)
|
||||
|| !self.path.ends_with("__init__.py")
|
||||
{
|
||||
self.diagnostics.get_mut().push(
|
||||
Diagnostic::new(
|
||||
pyflakes::rules::UndefinedExport {
|
||||
name: name.to_string(),
|
||||
},
|
||||
range,
|
||||
)
|
||||
.with_parent(definition.start()),
|
||||
);
|
||||
}
|
||||
self.report_diagnostic(
|
||||
pyflakes::rules::UndefinedExport {
|
||||
name: name.to_string(),
|
||||
},
|
||||
range,
|
||||
)
|
||||
.set_parent(definition.start());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2947,7 +2966,8 @@ pub(crate) fn check_ast(
|
||||
cell_offsets: Option<&CellOffsets>,
|
||||
notebook_index: Option<&NotebookIndex>,
|
||||
target_version: TargetVersion,
|
||||
) -> (Vec<Diagnostic>, Vec<SemanticSyntaxError>) {
|
||||
context: &LintContext,
|
||||
) -> Vec<SemanticSyntaxError> {
|
||||
let module_path = package
|
||||
.map(PackageRoot::path)
|
||||
.and_then(|package| to_module_path(package, path));
|
||||
@@ -2987,6 +3007,7 @@ pub(crate) fn check_ast(
|
||||
cell_offsets,
|
||||
notebook_index,
|
||||
target_version,
|
||||
context,
|
||||
);
|
||||
checker.bind_builtins();
|
||||
|
||||
@@ -3013,10 +3034,137 @@ pub(crate) fn check_ast(
|
||||
analyze::deferred_scopes(&checker);
|
||||
|
||||
let Checker {
|
||||
diagnostics,
|
||||
semantic_errors,
|
||||
..
|
||||
semantic_errors, ..
|
||||
} = checker;
|
||||
|
||||
(diagnostics.into_inner(), semantic_errors.into_inner())
|
||||
semantic_errors.into_inner()
|
||||
}
|
||||
|
||||
/// A type for collecting diagnostics in a given file.
|
||||
///
|
||||
/// [`LintContext::report_diagnostic`] can be used to obtain a [`DiagnosticGuard`], which will push
|
||||
/// a [`Violation`] to the contained [`OldDiagnostic`] collection on `Drop`.
|
||||
pub(crate) struct LintContext<'a> {
|
||||
diagnostics: RefCell<Vec<OldDiagnostic>>,
|
||||
source_file: &'a SourceFile,
|
||||
}
|
||||
|
||||
impl<'a> LintContext<'a> {
|
||||
/// Create a new collector with the given `source_file` and an empty collection of
|
||||
/// `OldDiagnostic`s.
|
||||
pub(crate) fn new(source_file: &'a SourceFile) -> Self {
|
||||
Self {
|
||||
diagnostics: RefCell::default(),
|
||||
source_file,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a [`DiagnosticGuard`] for reporting a diagnostic.
|
||||
///
|
||||
/// The guard derefs to an [`OldDiagnostic`], so it can be used to further modify the diagnostic
|
||||
/// before it is added to the collection in the collector on `Drop`.
|
||||
pub(crate) fn report_diagnostic<'chk, T: Violation>(
|
||||
&'chk self,
|
||||
kind: T,
|
||||
range: TextRange,
|
||||
) -> DiagnosticGuard<'chk, 'a> {
|
||||
DiagnosticGuard {
|
||||
context: self,
|
||||
diagnostic: Some(OldDiagnostic::new(kind, range, self.source_file)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a [`DiagnosticGuard`] for reporting a diagnostic if the corresponding rule is
|
||||
/// enabled.
|
||||
///
|
||||
/// Prefer [`DiagnosticsCollector::report_diagnostic`] in general because the conversion from an
|
||||
/// `OldDiagnostic` to a `Rule` is somewhat expensive.
|
||||
pub(crate) fn report_diagnostic_if_enabled<'chk, T: Violation>(
|
||||
&'chk self,
|
||||
kind: T,
|
||||
range: TextRange,
|
||||
settings: &LinterSettings,
|
||||
) -> Option<DiagnosticGuard<'chk, 'a>> {
|
||||
let diagnostic = OldDiagnostic::new(kind, range, self.source_file);
|
||||
if settings.rules.enabled(diagnostic.rule()) {
|
||||
Some(DiagnosticGuard {
|
||||
context: self,
|
||||
diagnostic: Some(diagnostic),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn into_diagnostics(self) -> Vec<OldDiagnostic> {
|
||||
self.diagnostics.into_inner()
|
||||
}
|
||||
|
||||
pub(crate) fn is_empty(&self) -> bool {
|
||||
self.diagnostics.borrow().is_empty()
|
||||
}
|
||||
|
||||
pub(crate) fn as_mut_vec(&mut self) -> &mut Vec<OldDiagnostic> {
|
||||
self.diagnostics.get_mut()
|
||||
}
|
||||
|
||||
pub(crate) fn iter(&mut self) -> impl Iterator<Item = &OldDiagnostic> {
|
||||
self.diagnostics.get_mut().iter()
|
||||
}
|
||||
}
|
||||
|
||||
/// An abstraction for mutating a diagnostic.
|
||||
///
|
||||
/// Callers can build this guard by starting with `Checker::report_diagnostic`.
|
||||
///
|
||||
/// The primary function of this guard is to add the underlying diagnostic to the `Checker`'s list
|
||||
/// of diagnostics on `Drop`, while dereferencing to the underlying diagnostic for mutations like
|
||||
/// adding fixes or parent ranges.
|
||||
pub(crate) struct DiagnosticGuard<'a, 'b> {
|
||||
/// The parent checker that will receive the diagnostic on `Drop`.
|
||||
context: &'a LintContext<'b>,
|
||||
/// The diagnostic that we want to report.
|
||||
///
|
||||
/// This is always `Some` until the `Drop` (or `defuse`) call.
|
||||
diagnostic: Option<OldDiagnostic>,
|
||||
}
|
||||
|
||||
impl DiagnosticGuard<'_, '_> {
|
||||
/// Consume the underlying `Diagnostic` without emitting it.
|
||||
///
|
||||
/// In general you should avoid constructing diagnostics that may not be emitted, but this
|
||||
/// method can be used where this is unavoidable.
|
||||
pub(crate) fn defuse(mut self) {
|
||||
self.diagnostic = None;
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for DiagnosticGuard<'_, '_> {
|
||||
type Target = OldDiagnostic;
|
||||
|
||||
fn deref(&self) -> &OldDiagnostic {
|
||||
// OK because `self.diagnostic` is only `None` within `Drop`.
|
||||
self.diagnostic.as_ref().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a mutable borrow of the diagnostic in this guard.
|
||||
impl std::ops::DerefMut for DiagnosticGuard<'_, '_> {
|
||||
fn deref_mut(&mut self) -> &mut OldDiagnostic {
|
||||
// OK because `self.diagnostic` is only `None` within `Drop`.
|
||||
self.diagnostic.as_mut().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for DiagnosticGuard<'_, '_> {
|
||||
fn drop(&mut self) {
|
||||
if std::thread::panicking() {
|
||||
// Don't submit diagnostics when panicking because they might be incomplete.
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(diagnostic) = self.diagnostic.take() {
|
||||
self.context.diagnostics.borrow_mut().push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
use std::path::Path;
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_ast::PythonVersion;
|
||||
use ruff_python_trivia::CommentRanges;
|
||||
|
||||
use crate::Locator;
|
||||
use crate::checkers::ast::LintContext;
|
||||
use crate::package::PackageRoot;
|
||||
use crate::preview::is_allow_nested_roots_enabled;
|
||||
use crate::registry::Rule;
|
||||
@@ -20,13 +20,12 @@ pub(crate) fn check_file_path(
|
||||
comment_ranges: &CommentRanges,
|
||||
settings: &LinterSettings,
|
||||
target_version: PythonVersion,
|
||||
) -> Vec<Diagnostic> {
|
||||
let mut diagnostics: Vec<Diagnostic> = vec![];
|
||||
|
||||
context: &LintContext,
|
||||
) {
|
||||
// flake8-no-pep420
|
||||
if settings.rules.enabled(Rule::ImplicitNamespacePackage) {
|
||||
let allow_nested_roots = is_allow_nested_roots_enabled(settings);
|
||||
if let Some(diagnostic) = implicit_namespace_package(
|
||||
implicit_namespace_package(
|
||||
path,
|
||||
package,
|
||||
locator,
|
||||
@@ -34,26 +33,17 @@ pub(crate) fn check_file_path(
|
||||
&settings.project_root,
|
||||
&settings.src,
|
||||
allow_nested_roots,
|
||||
) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
context,
|
||||
);
|
||||
}
|
||||
|
||||
// pep8-naming
|
||||
if settings.rules.enabled(Rule::InvalidModuleName) {
|
||||
if let Some(diagnostic) =
|
||||
invalid_module_name(path, package, &settings.pep8_naming.ignore_names)
|
||||
{
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
invalid_module_name(path, package, &settings.pep8_naming.ignore_names, context);
|
||||
}
|
||||
|
||||
// flake8-builtins
|
||||
if settings.rules.enabled(Rule::StdlibModuleShadowing) {
|
||||
if let Some(diagnostic) = stdlib_module_shadowing(path, settings, target_version) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
stdlib_module_shadowing(path, settings, target_version, context);
|
||||
}
|
||||
|
||||
diagnostics
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
//! Lint rules based on import analysis.
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_notebook::CellOffsets;
|
||||
use ruff_python_ast::statement_visitor::StatementVisitor;
|
||||
use ruff_python_ast::{ModModule, PySourceType, PythonVersion};
|
||||
@@ -16,6 +15,8 @@ use crate::rules::isort;
|
||||
use crate::rules::isort::block::{Block, BlockBuilder};
|
||||
use crate::settings::LinterSettings;
|
||||
|
||||
use super::ast::LintContext;
|
||||
|
||||
#[expect(clippy::too_many_arguments)]
|
||||
pub(crate) fn check_imports(
|
||||
parsed: &Parsed<ModModule>,
|
||||
@@ -28,7 +29,8 @@ pub(crate) fn check_imports(
|
||||
source_type: PySourceType,
|
||||
cell_offsets: Option<&CellOffsets>,
|
||||
target_version: PythonVersion,
|
||||
) -> Vec<Diagnostic> {
|
||||
context: &LintContext,
|
||||
) {
|
||||
// Extract all import blocks from the AST.
|
||||
let tracker = {
|
||||
let mut tracker =
|
||||
@@ -40,11 +42,10 @@ pub(crate) fn check_imports(
|
||||
let blocks: Vec<&Block> = tracker.iter().collect();
|
||||
|
||||
// Enforce import rules.
|
||||
let mut diagnostics = vec![];
|
||||
if settings.rules.enabled(Rule::UnsortedImports) {
|
||||
for block in &blocks {
|
||||
if !block.imports.is_empty() {
|
||||
if let Some(diagnostic) = isort::rules::organize_imports(
|
||||
isort::rules::organize_imports(
|
||||
block,
|
||||
locator,
|
||||
stylist,
|
||||
@@ -54,21 +55,19 @@ pub(crate) fn check_imports(
|
||||
source_type,
|
||||
parsed.tokens(),
|
||||
target_version,
|
||||
) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
context,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
if settings.rules.enabled(Rule::MissingRequiredImport) {
|
||||
diagnostics.extend(isort::rules::add_required_imports(
|
||||
isort::rules::add_required_imports(
|
||||
parsed,
|
||||
locator,
|
||||
stylist,
|
||||
settings,
|
||||
source_type,
|
||||
));
|
||||
context,
|
||||
);
|
||||
}
|
||||
|
||||
diagnostics
|
||||
}
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::{TokenKind, Tokens};
|
||||
use ruff_source_file::LineRanges;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::Locator;
|
||||
use crate::line_width::IndentWidth;
|
||||
use crate::registry::{AsRule, Rule};
|
||||
use crate::registry::Rule;
|
||||
use crate::rules::pycodestyle::rules::logical_lines::{
|
||||
LogicalLines, TokenFlags, extraneous_whitespace, indentation, missing_whitespace,
|
||||
missing_whitespace_after_keyword, missing_whitespace_around_operator, redundant_backslash,
|
||||
@@ -16,6 +14,9 @@ use crate::rules::pycodestyle::rules::logical_lines::{
|
||||
whitespace_before_parameters,
|
||||
};
|
||||
use crate::settings::LinterSettings;
|
||||
use crate::{Locator, Violation};
|
||||
|
||||
use super::ast::{DiagnosticGuard, LintContext};
|
||||
|
||||
/// Return the amount of indentation, expanding tabs to the next multiple of the settings' tab size.
|
||||
pub(crate) fn expand_indent(line: &str, indent_width: IndentWidth) -> usize {
|
||||
@@ -40,8 +41,9 @@ pub(crate) fn check_logical_lines(
|
||||
indexer: &Indexer,
|
||||
stylist: &Stylist,
|
||||
settings: &LinterSettings,
|
||||
) -> Vec<Diagnostic> {
|
||||
let mut context = LogicalLinesContext::new(settings);
|
||||
lint_context: &LintContext,
|
||||
) {
|
||||
let mut context = LogicalLinesContext::new(settings, lint_context);
|
||||
|
||||
let mut prev_line = None;
|
||||
let mut prev_indent_level = None;
|
||||
@@ -170,7 +172,7 @@ pub(crate) fn check_logical_lines(
|
||||
let indent_size = 4;
|
||||
|
||||
if enforce_indentation {
|
||||
for diagnostic in indentation(
|
||||
indentation(
|
||||
&line,
|
||||
prev_line.as_ref(),
|
||||
indent_char,
|
||||
@@ -178,11 +180,9 @@ pub(crate) fn check_logical_lines(
|
||||
prev_indent_level,
|
||||
indent_size,
|
||||
range,
|
||||
) {
|
||||
if settings.rules.enabled(diagnostic.rule()) {
|
||||
context.push_diagnostic(diagnostic);
|
||||
}
|
||||
}
|
||||
lint_context,
|
||||
settings,
|
||||
);
|
||||
}
|
||||
|
||||
if !line.is_comment_only() {
|
||||
@@ -190,26 +190,24 @@ pub(crate) fn check_logical_lines(
|
||||
prev_indent_level = Some(indent_level);
|
||||
}
|
||||
}
|
||||
context.diagnostics
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct LogicalLinesContext<'a> {
|
||||
pub(crate) struct LogicalLinesContext<'a, 'b> {
|
||||
settings: &'a LinterSettings,
|
||||
diagnostics: Vec<Diagnostic>,
|
||||
context: &'a LintContext<'b>,
|
||||
}
|
||||
|
||||
impl<'a> LogicalLinesContext<'a> {
|
||||
fn new(settings: &'a LinterSettings) -> Self {
|
||||
Self {
|
||||
settings,
|
||||
diagnostics: Vec::new(),
|
||||
}
|
||||
impl<'a, 'b> LogicalLinesContext<'a, 'b> {
|
||||
fn new(settings: &'a LinterSettings, context: &'a LintContext<'b>) -> Self {
|
||||
Self { settings, context }
|
||||
}
|
||||
|
||||
pub(crate) fn push_diagnostic(&mut self, diagnostic: Diagnostic) {
|
||||
if self.settings.rules.enabled(diagnostic.rule()) {
|
||||
self.diagnostics.push(diagnostic);
|
||||
}
|
||||
pub(crate) fn report_diagnostic<'chk, T: Violation>(
|
||||
&'chk self,
|
||||
kind: T,
|
||||
range: TextRange,
|
||||
) -> Option<DiagnosticGuard<'chk, 'a>> {
|
||||
self.context
|
||||
.report_diagnostic_if_enabled(kind, range, self.settings)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,11 +5,9 @@ use std::path::Path;
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix};
|
||||
use ruff_python_trivia::CommentRanges;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::Locator;
|
||||
use crate::fix::edits::delete_comment;
|
||||
use crate::noqa::{
|
||||
Code, Directive, FileExemption, FileNoqaDirectives, NoqaDirectives, NoqaMapping,
|
||||
@@ -21,10 +19,13 @@ use crate::rules::pygrep_hooks;
|
||||
use crate::rules::ruff;
|
||||
use crate::rules::ruff::rules::{UnusedCodes, UnusedNOQA};
|
||||
use crate::settings::LinterSettings;
|
||||
use crate::{Edit, Fix, Locator};
|
||||
|
||||
use super::ast::LintContext;
|
||||
|
||||
#[expect(clippy::too_many_arguments)]
|
||||
pub(crate) fn check_noqa(
|
||||
diagnostics: &mut Vec<Diagnostic>,
|
||||
context: &mut LintContext,
|
||||
path: &Path,
|
||||
locator: &Locator,
|
||||
comment_ranges: &CommentRanges,
|
||||
@@ -46,7 +47,7 @@ pub(crate) fn check_noqa(
|
||||
let mut ignored_diagnostics = vec![];
|
||||
|
||||
// Remove any ignored diagnostics.
|
||||
'outer: for (index, diagnostic) in diagnostics.iter().enumerate() {
|
||||
'outer: for (index, diagnostic) in context.iter().enumerate() {
|
||||
let rule = diagnostic.rule();
|
||||
|
||||
if matches!(rule, Rule::BlanketNOQA) {
|
||||
@@ -135,11 +136,9 @@ pub(crate) fn check_noqa(
|
||||
Directive::All(directive) => {
|
||||
if matches.is_empty() {
|
||||
let edit = delete_comment(directive.range(), locator);
|
||||
let mut diagnostic =
|
||||
Diagnostic::new(UnusedNOQA { codes: None }, directive.range());
|
||||
let mut diagnostic = context
|
||||
.report_diagnostic(UnusedNOQA { codes: None }, directive.range());
|
||||
diagnostic.set_fix(Fix::safe_edit(edit));
|
||||
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
Directive::Codes(directive) => {
|
||||
@@ -159,9 +158,7 @@ pub(crate) fn check_noqa(
|
||||
|
||||
if seen_codes.insert(original_code) {
|
||||
let is_code_used = if is_file_level {
|
||||
diagnostics
|
||||
.iter()
|
||||
.any(|diag| diag.rule().noqa_code() == code)
|
||||
context.iter().any(|diag| diag.rule().noqa_code() == code)
|
||||
} else {
|
||||
matches.iter().any(|match_| *match_ == code)
|
||||
} || settings
|
||||
@@ -212,7 +209,7 @@ pub(crate) fn check_noqa(
|
||||
directive.range(),
|
||||
)
|
||||
};
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
let mut diagnostic = context.report_diagnostic(
|
||||
UnusedNOQA {
|
||||
codes: Some(UnusedCodes {
|
||||
disabled: disabled_codes
|
||||
@@ -236,7 +233,6 @@ pub(crate) fn check_noqa(
|
||||
directive.range(),
|
||||
);
|
||||
diagnostic.set_fix(Fix::safe_edit(edit));
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -247,8 +243,8 @@ pub(crate) fn check_noqa(
|
||||
&& !per_file_ignores.contains(Rule::RedirectedNOQA)
|
||||
&& !exemption.includes(Rule::RedirectedNOQA)
|
||||
{
|
||||
ruff::rules::redirected_noqa(diagnostics, &noqa_directives);
|
||||
ruff::rules::redirected_file_noqa(diagnostics, &file_noqa_directives);
|
||||
ruff::rules::redirected_noqa(context, &noqa_directives);
|
||||
ruff::rules::redirected_file_noqa(context, &file_noqa_directives);
|
||||
}
|
||||
|
||||
if settings.rules.enabled(Rule::BlanketNOQA)
|
||||
@@ -256,7 +252,7 @@ pub(crate) fn check_noqa(
|
||||
&& !exemption.enumerates(Rule::BlanketNOQA)
|
||||
{
|
||||
pygrep_hooks::rules::blanket_noqa(
|
||||
diagnostics,
|
||||
context,
|
||||
&noqa_directives,
|
||||
locator,
|
||||
&file_noqa_directives,
|
||||
@@ -267,7 +263,7 @@ pub(crate) fn check_noqa(
|
||||
&& !per_file_ignores.contains(Rule::InvalidRuleCode)
|
||||
&& !exemption.enumerates(Rule::InvalidRuleCode)
|
||||
{
|
||||
ruff::rules::invalid_noqa_code(diagnostics, &noqa_directives, locator, &settings.external);
|
||||
ruff::rules::invalid_noqa_code(context, &noqa_directives, locator, &settings.external);
|
||||
}
|
||||
|
||||
ignored_diagnostics.sort_unstable();
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
//! Lint rules based on checking physical lines.
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_source_file::UniversalNewlines;
|
||||
@@ -17,15 +16,16 @@ use crate::rules::pylint;
|
||||
use crate::rules::ruff::rules::indented_form_feed;
|
||||
use crate::settings::LinterSettings;
|
||||
|
||||
use super::ast::LintContext;
|
||||
|
||||
pub(crate) fn check_physical_lines(
|
||||
locator: &Locator,
|
||||
stylist: &Stylist,
|
||||
indexer: &Indexer,
|
||||
doc_lines: &[TextSize],
|
||||
settings: &LinterSettings,
|
||||
) -> Vec<Diagnostic> {
|
||||
let mut diagnostics: Vec<Diagnostic> = vec![];
|
||||
|
||||
context: &LintContext,
|
||||
) {
|
||||
let enforce_doc_line_too_long = settings.rules.enabled(Rule::DocLineTooLong);
|
||||
let enforce_line_too_long = settings.rules.enabled(Rule::LineTooLong);
|
||||
let enforce_no_newline_at_end_of_file = settings.rules.enabled(Rule::MissingNewlineAtEndOfFile);
|
||||
@@ -45,54 +45,38 @@ pub(crate) fn check_physical_lines(
|
||||
.is_some()
|
||||
{
|
||||
if enforce_doc_line_too_long {
|
||||
if let Some(diagnostic) = doc_line_too_long(&line, comment_ranges, settings) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
doc_line_too_long(&line, comment_ranges, settings, context);
|
||||
}
|
||||
}
|
||||
|
||||
if enforce_mixed_spaces_and_tabs {
|
||||
if let Some(diagnostic) = mixed_spaces_and_tabs(&line) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
mixed_spaces_and_tabs(&line, context);
|
||||
}
|
||||
|
||||
if enforce_line_too_long {
|
||||
if let Some(diagnostic) = line_too_long(&line, comment_ranges, settings) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
line_too_long(&line, comment_ranges, settings, context);
|
||||
}
|
||||
|
||||
if enforce_bidirectional_unicode {
|
||||
diagnostics.extend(pylint::rules::bidirectional_unicode(&line));
|
||||
pylint::rules::bidirectional_unicode(&line, context);
|
||||
}
|
||||
|
||||
if enforce_trailing_whitespace || enforce_blank_line_contains_whitespace {
|
||||
if let Some(diagnostic) = trailing_whitespace(&line, locator, indexer, settings) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
trailing_whitespace(&line, locator, indexer, settings, context);
|
||||
}
|
||||
|
||||
if settings.rules.enabled(Rule::IndentedFormFeed) {
|
||||
if let Some(diagnostic) = indented_form_feed(&line) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
indented_form_feed(&line, context);
|
||||
}
|
||||
}
|
||||
|
||||
if enforce_no_newline_at_end_of_file {
|
||||
if let Some(diagnostic) = no_newline_at_end_of_file(locator, stylist) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
no_newline_at_end_of_file(locator, stylist, context);
|
||||
}
|
||||
|
||||
if enforce_copyright_notice {
|
||||
if let Some(diagnostic) = missing_copyright_notice(locator, settings) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
missing_copyright_notice(locator, settings, context);
|
||||
}
|
||||
|
||||
diagnostics
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -100,8 +84,10 @@ mod tests {
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::parse_module;
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
|
||||
use crate::Locator;
|
||||
use crate::checkers::ast::LintContext;
|
||||
use crate::line_width::LineLength;
|
||||
use crate::registry::Rule;
|
||||
use crate::rules::pycodestyle;
|
||||
@@ -118,6 +104,8 @@ mod tests {
|
||||
let stylist = Stylist::from_tokens(parsed.tokens(), locator.contents());
|
||||
|
||||
let check_with_max_line_length = |line_length: LineLength| {
|
||||
let source_file = SourceFileBuilder::new("<filename>", line).finish();
|
||||
let diagnostics = LintContext::new(&source_file);
|
||||
check_physical_lines(
|
||||
&locator,
|
||||
&stylist,
|
||||
@@ -130,7 +118,9 @@ mod tests {
|
||||
},
|
||||
..LinterSettings::for_rule(Rule::LineTooLong)
|
||||
},
|
||||
)
|
||||
&diagnostics,
|
||||
);
|
||||
diagnostics.into_diagnostics()
|
||||
};
|
||||
let line_length = LineLength::try_from(8).unwrap();
|
||||
assert_eq!(check_with_max_line_length(line_length), vec![]);
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_notebook::CellOffsets;
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_python_codegen::Stylist;
|
||||
@@ -19,6 +18,8 @@ use crate::rules::{
|
||||
};
|
||||
use crate::settings::LinterSettings;
|
||||
|
||||
use super::ast::LintContext;
|
||||
|
||||
#[expect(clippy::too_many_arguments)]
|
||||
pub(crate) fn check_tokens(
|
||||
tokens: &Tokens,
|
||||
@@ -29,8 +30,8 @@ pub(crate) fn check_tokens(
|
||||
settings: &LinterSettings,
|
||||
source_type: PySourceType,
|
||||
cell_offsets: Option<&CellOffsets>,
|
||||
) -> Vec<Diagnostic> {
|
||||
let mut diagnostics: Vec<Diagnostic> = vec![];
|
||||
context: &mut LintContext,
|
||||
) {
|
||||
let comment_ranges = indexer.comment_ranges();
|
||||
|
||||
if settings.rules.any_enabled(&[
|
||||
@@ -41,16 +42,23 @@ pub(crate) fn check_tokens(
|
||||
Rule::BlankLinesAfterFunctionOrClass,
|
||||
Rule::BlankLinesBeforeNestedDefinition,
|
||||
]) {
|
||||
BlankLinesChecker::new(locator, stylist, settings, source_type, cell_offsets)
|
||||
.check_lines(tokens, &mut diagnostics);
|
||||
BlankLinesChecker::new(
|
||||
locator,
|
||||
stylist,
|
||||
settings,
|
||||
source_type,
|
||||
cell_offsets,
|
||||
context,
|
||||
)
|
||||
.check_lines(tokens);
|
||||
}
|
||||
|
||||
if settings.rules.enabled(Rule::BlanketTypeIgnore) {
|
||||
pygrep_hooks::rules::blanket_type_ignore(&mut diagnostics, comment_ranges, locator);
|
||||
pygrep_hooks::rules::blanket_type_ignore(context, comment_ranges, locator);
|
||||
}
|
||||
|
||||
if settings.rules.enabled(Rule::EmptyComment) {
|
||||
pylint::rules::empty_comments(&mut diagnostics, comment_ranges, locator);
|
||||
pylint::rules::empty_comments(context, comment_ranges, locator);
|
||||
}
|
||||
|
||||
if settings
|
||||
@@ -58,25 +66,20 @@ pub(crate) fn check_tokens(
|
||||
.enabled(Rule::AmbiguousUnicodeCharacterComment)
|
||||
{
|
||||
for range in comment_ranges {
|
||||
ruff::rules::ambiguous_unicode_character_comment(
|
||||
&mut diagnostics,
|
||||
locator,
|
||||
range,
|
||||
settings,
|
||||
);
|
||||
ruff::rules::ambiguous_unicode_character_comment(context, locator, range, settings);
|
||||
}
|
||||
}
|
||||
|
||||
if settings.rules.enabled(Rule::CommentedOutCode) {
|
||||
eradicate::rules::commented_out_code(&mut diagnostics, locator, comment_ranges, settings);
|
||||
eradicate::rules::commented_out_code(context, locator, comment_ranges, settings);
|
||||
}
|
||||
|
||||
if settings.rules.enabled(Rule::UTF8EncodingDeclaration) {
|
||||
pyupgrade::rules::unnecessary_coding_comment(&mut diagnostics, locator, comment_ranges);
|
||||
pyupgrade::rules::unnecessary_coding_comment(context, locator, comment_ranges);
|
||||
}
|
||||
|
||||
if settings.rules.enabled(Rule::TabIndentation) {
|
||||
pycodestyle::rules::tab_indentation(&mut diagnostics, locator, indexer);
|
||||
pycodestyle::rules::tab_indentation(context, locator, indexer);
|
||||
}
|
||||
|
||||
if settings.rules.any_enabled(&[
|
||||
@@ -87,7 +90,7 @@ pub(crate) fn check_tokens(
|
||||
Rule::InvalidCharacterZeroWidthSpace,
|
||||
]) {
|
||||
for token in tokens {
|
||||
pylint::rules::invalid_string_characters(&mut diagnostics, token, locator);
|
||||
pylint::rules::invalid_string_characters(context, token, locator);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -97,7 +100,7 @@ pub(crate) fn check_tokens(
|
||||
Rule::UselessSemicolon,
|
||||
]) {
|
||||
pycodestyle::rules::compound_statements(
|
||||
&mut diagnostics,
|
||||
context,
|
||||
tokens,
|
||||
locator,
|
||||
indexer,
|
||||
@@ -110,13 +113,7 @@ pub(crate) fn check_tokens(
|
||||
Rule::SingleLineImplicitStringConcatenation,
|
||||
Rule::MultiLineImplicitStringConcatenation,
|
||||
]) {
|
||||
flake8_implicit_str_concat::rules::implicit(
|
||||
&mut diagnostics,
|
||||
tokens,
|
||||
locator,
|
||||
indexer,
|
||||
settings,
|
||||
);
|
||||
flake8_implicit_str_concat::rules::implicit(context, tokens, locator, indexer, settings);
|
||||
}
|
||||
|
||||
if settings.rules.any_enabled(&[
|
||||
@@ -124,15 +121,15 @@ pub(crate) fn check_tokens(
|
||||
Rule::TrailingCommaOnBareTuple,
|
||||
Rule::ProhibitedTrailingComma,
|
||||
]) {
|
||||
flake8_commas::rules::trailing_commas(&mut diagnostics, tokens, locator, indexer);
|
||||
flake8_commas::rules::trailing_commas(context, tokens, locator, indexer);
|
||||
}
|
||||
|
||||
if settings.rules.enabled(Rule::ExtraneousParentheses) {
|
||||
pyupgrade::rules::extraneous_parentheses(&mut diagnostics, tokens, locator);
|
||||
pyupgrade::rules::extraneous_parentheses(context, tokens, locator);
|
||||
}
|
||||
|
||||
if source_type.is_stub() && settings.rules.enabled(Rule::TypeCommentInStub) {
|
||||
flake8_pyi::rules::type_comment_in_stub(&mut diagnostics, locator, comment_ranges);
|
||||
flake8_pyi::rules::type_comment_in_stub(context, locator, comment_ranges);
|
||||
}
|
||||
|
||||
if settings.rules.any_enabled(&[
|
||||
@@ -142,13 +139,7 @@ pub(crate) fn check_tokens(
|
||||
Rule::ShebangNotFirstLine,
|
||||
Rule::ShebangMissingPython,
|
||||
]) {
|
||||
flake8_executable::rules::from_tokens(
|
||||
&mut diagnostics,
|
||||
path,
|
||||
locator,
|
||||
comment_ranges,
|
||||
settings,
|
||||
);
|
||||
flake8_executable::rules::from_tokens(context, path, locator, comment_ranges, settings);
|
||||
}
|
||||
|
||||
if settings.rules.any_enabled(&[
|
||||
@@ -172,19 +163,15 @@ pub(crate) fn check_tokens(
|
||||
TodoComment::from_comment(comment, *comment_range, i)
|
||||
})
|
||||
.collect();
|
||||
flake8_todos::rules::todos(&mut diagnostics, &todo_comments, locator, comment_ranges);
|
||||
flake8_fixme::rules::todos(&mut diagnostics, &todo_comments);
|
||||
flake8_todos::rules::todos(context, &todo_comments, locator, comment_ranges);
|
||||
flake8_fixme::rules::todos(context, &todo_comments);
|
||||
}
|
||||
|
||||
if settings.rules.enabled(Rule::TooManyNewlinesAtEndOfFile) {
|
||||
pycodestyle::rules::too_many_newlines_at_end_of_file(
|
||||
&mut diagnostics,
|
||||
tokens,
|
||||
cell_offsets,
|
||||
);
|
||||
pycodestyle::rules::too_many_newlines_at_end_of_file(context, tokens, cell_offsets);
|
||||
}
|
||||
|
||||
diagnostics.retain(|diagnostic| settings.rules.enabled(diagnostic.rule()));
|
||||
|
||||
diagnostics
|
||||
context
|
||||
.as_mut_vec()
|
||||
.retain(|diagnostic| settings.rules.enabled(diagnostic.rule()));
|
||||
}
|
||||
|
||||
@@ -4,13 +4,13 @@
|
||||
/// `--select`. For pylint this is e.g. C0414 and E0118 but also C and E01.
|
||||
use std::fmt::Formatter;
|
||||
|
||||
use strum_macros::{AsRefStr, EnumIter};
|
||||
use strum_macros::EnumIter;
|
||||
|
||||
use crate::registry::{AsRule, Linter};
|
||||
use crate::registry::Linter;
|
||||
use crate::rule_selector::is_single_rule_selector;
|
||||
use crate::rules;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct NoqaCode(&'static str, &'static str);
|
||||
|
||||
impl NoqaCode {
|
||||
@@ -198,6 +198,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pylint, "C0132") => (RuleGroup::Stable, rules::pylint::rules::TypeParamNameMismatch),
|
||||
(Pylint, "C0205") => (RuleGroup::Stable, rules::pylint::rules::SingleStringSlots),
|
||||
(Pylint, "C0206") => (RuleGroup::Stable, rules::pylint::rules::DictIndexMissingItems),
|
||||
(Pylint, "C0207") => (RuleGroup::Preview, rules::pylint::rules::MissingMaxsplitArg),
|
||||
(Pylint, "C0208") => (RuleGroup::Stable, rules::pylint::rules::IterationOverSet),
|
||||
(Pylint, "C0414") => (RuleGroup::Stable, rules::pylint::rules::UselessImportAlias),
|
||||
(Pylint, "C0415") => (RuleGroup::Preview, rules::pylint::rules::ImportOutsideTopLevel),
|
||||
@@ -552,6 +553,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pyupgrade, "046") => (RuleGroup::Preview, rules::pyupgrade::rules::NonPEP695GenericClass),
|
||||
(Pyupgrade, "047") => (RuleGroup::Preview, rules::pyupgrade::rules::NonPEP695GenericFunction),
|
||||
(Pyupgrade, "049") => (RuleGroup::Preview, rules::pyupgrade::rules::PrivateTypeParameter),
|
||||
(Pyupgrade, "050") => (RuleGroup::Preview, rules::pyupgrade::rules::UselessClassMetaclassType),
|
||||
|
||||
// pydocstyle
|
||||
(Pydocstyle, "100") => (RuleGroup::Stable, rules::pydocstyle::rules::UndocumentedPublicModule),
|
||||
@@ -933,6 +935,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8UsePathlib, "207") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::Glob),
|
||||
(Flake8UsePathlib, "208") => (RuleGroup::Stable, rules::flake8_use_pathlib::violations::OsListdir),
|
||||
(Flake8UsePathlib, "210") => (RuleGroup::Stable, rules::flake8_use_pathlib::rules::InvalidPathlibWithSuffix),
|
||||
(Flake8UsePathlib, "211") => (RuleGroup::Preview, rules::flake8_use_pathlib::violations::OsSymlink),
|
||||
|
||||
// flake8-logging-format
|
||||
(Flake8LoggingFormat, "001") => (RuleGroup::Stable, rules::flake8_logging_format::violations::LoggingStringFormat),
|
||||
@@ -1154,3 +1157,9 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Rule {
|
||||
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
|
||||
f.write_str(self.into())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
use anyhow::Result;
|
||||
use log::debug;
|
||||
|
||||
use ruff_source_file::SourceFile;
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
|
||||
use crate::{Fix, Violation};
|
||||
use crate::registry::AsRule;
|
||||
use crate::violation::Violation;
|
||||
use crate::{Fix, codes::Rule};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub struct Diagnostic {
|
||||
/// The identifier of the diagnostic, used to align the diagnostic with a rule.
|
||||
pub name: &'static str,
|
||||
pub struct OldDiagnostic {
|
||||
/// The message body to display to the user, to explain the diagnostic.
|
||||
pub body: String,
|
||||
/// The message to display to the user, to explain the suggested fix.
|
||||
@@ -16,17 +17,27 @@ pub struct Diagnostic {
|
||||
pub range: TextRange,
|
||||
pub fix: Option<Fix>,
|
||||
pub parent: Option<TextSize>,
|
||||
|
||||
pub(crate) rule: Rule,
|
||||
|
||||
pub(crate) file: SourceFile,
|
||||
}
|
||||
|
||||
impl Diagnostic {
|
||||
pub fn new<T: Violation>(kind: T, range: TextRange) -> Self {
|
||||
impl OldDiagnostic {
|
||||
// TODO(brent) We temporarily allow this to avoid updating all of the call sites to add
|
||||
// references. I expect this method to go away or change significantly with the rest of the
|
||||
// diagnostic refactor, but if it still exists in this form at the end of the refactor, we
|
||||
// should just update the call sites.
|
||||
#[expect(clippy::needless_pass_by_value)]
|
||||
pub fn new<T: Violation>(kind: T, range: TextRange, file: &SourceFile) -> Self {
|
||||
Self {
|
||||
name: T::rule_name(),
|
||||
body: Violation::message(&kind),
|
||||
suggestion: Violation::fix_title(&kind),
|
||||
range,
|
||||
fix: None,
|
||||
parent: None,
|
||||
rule: T::rule(),
|
||||
file: file.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -50,7 +61,7 @@ impl Diagnostic {
|
||||
pub fn try_set_fix(&mut self, func: impl FnOnce() -> Result<Fix>) {
|
||||
match func() {
|
||||
Ok(fix) => self.fix = Some(fix),
|
||||
Err(err) => debug!("Failed to create fix for {}: {}", self.name, err),
|
||||
Err(err) => debug!("Failed to create fix for {}: {}", self.rule, err),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -61,7 +72,7 @@ impl Diagnostic {
|
||||
match func() {
|
||||
Ok(None) => {}
|
||||
Ok(Some(fix)) => self.fix = Some(fix),
|
||||
Err(err) => debug!("Failed to create fix for {}: {}", self.name, err),
|
||||
Err(err) => debug!("Failed to create fix for {}: {}", self.rule, err),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -80,7 +91,13 @@ impl Diagnostic {
|
||||
}
|
||||
}
|
||||
|
||||
impl Ranged for Diagnostic {
|
||||
impl AsRule for OldDiagnostic {
|
||||
fn rule(&self) -> Rule {
|
||||
self.rule
|
||||
}
|
||||
}
|
||||
|
||||
impl Ranged for OldDiagnostic {
|
||||
fn range(&self) -> TextRange {
|
||||
self.range
|
||||
}
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_python_ast::parenthesize::parenthesized_range;
|
||||
use ruff_python_ast::{self as ast, Arguments, ExceptHandler, Expr, ExprList, Parameters, Stmt};
|
||||
use ruff_python_ast::{AnyNodeRef, ArgOrKeyword};
|
||||
@@ -16,6 +15,7 @@ use ruff_python_trivia::{
|
||||
use ruff_source_file::{LineRanges, NewlineWithTrailingNewline, UniversalNewlines};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::Edit;
|
||||
use crate::Locator;
|
||||
use crate::cst::matchers::{match_function_def, match_indented_block, match_statement};
|
||||
use crate::fix::codemods;
|
||||
@@ -595,18 +595,17 @@ mod tests {
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use test_case::test_case;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix};
|
||||
use ruff_python_ast::Stmt;
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_parser::{parse_expression, parse_module};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
|
||||
use crate::Locator;
|
||||
use crate::fix::apply_fixes;
|
||||
use crate::fix::edits::{
|
||||
add_to_dunder_all, make_redundant_alias, next_stmt_break, trailing_semicolon,
|
||||
};
|
||||
use crate::message::Message;
|
||||
use crate::{Edit, Fix, Locator, OldDiagnostic};
|
||||
|
||||
/// Parse the given source using [`Mode::Module`] and return the first statement.
|
||||
fn parse_first_stmt(source: &str) -> Result<Stmt> {
|
||||
@@ -737,24 +736,16 @@ x = 1 \
|
||||
let diag = {
|
||||
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
|
||||
let mut iter = edits.into_iter();
|
||||
let diag = Diagnostic::new(
|
||||
let diag = OldDiagnostic::new(
|
||||
MissingNewlineAtEndOfFile, // The choice of rule here is arbitrary.
|
||||
TextRange::default(),
|
||||
&SourceFileBuilder::new("<filename>", "<code>").finish(),
|
||||
)
|
||||
.with_fix(Fix::safe_edits(
|
||||
iter.next().ok_or(anyhow!("expected edits nonempty"))?,
|
||||
iter,
|
||||
));
|
||||
Message::diagnostic(
|
||||
diag.name,
|
||||
diag.body,
|
||||
diag.suggestion,
|
||||
diag.range,
|
||||
diag.fix,
|
||||
diag.parent,
|
||||
SourceFileBuilder::new("<filename>", "<code>").finish(),
|
||||
None,
|
||||
)
|
||||
Message::from_diagnostic(diag, None)
|
||||
};
|
||||
assert_eq!(apply_fixes([diag].iter(), &locator).code, expect);
|
||||
Ok(())
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ruff_diagnostics::{Edit, Fix, IsolationLevel, SourceMap};
|
||||
use ruff_diagnostics::{IsolationLevel, SourceMap};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::Locator;
|
||||
@@ -11,6 +11,7 @@ use crate::linter::FixTable;
|
||||
use crate::message::Message;
|
||||
use crate::registry::Rule;
|
||||
use crate::settings::types::UnsafeFixes;
|
||||
use crate::{Edit, Fix};
|
||||
|
||||
pub(crate) mod codemods;
|
||||
pub(crate) mod edits;
|
||||
@@ -58,13 +59,13 @@ fn apply_fixes<'a>(
|
||||
let mut last_pos: Option<TextSize> = None;
|
||||
let mut applied: BTreeSet<&Edit> = BTreeSet::default();
|
||||
let mut isolated: FxHashSet<u32> = FxHashSet::default();
|
||||
let mut fixed = FxHashMap::default();
|
||||
let mut fixed = FixTable::default();
|
||||
let mut source_map = SourceMap::default();
|
||||
|
||||
for (rule, fix) in diagnostics
|
||||
.filter_map(|msg| msg.to_rule().map(|rule| (rule, msg)))
|
||||
.filter_map(|(rule, diagnostic)| diagnostic.fix().map(|fix| (rule, fix)))
|
||||
.sorted_by(|(rule1, fix1), (rule2, fix2)| cmp_fix(*rule1, *rule2, fix1, fix2))
|
||||
for (code, name, fix) in diagnostics
|
||||
.filter_map(|msg| msg.noqa_code().map(|code| (code, msg.name(), msg)))
|
||||
.filter_map(|(code, name, diagnostic)| diagnostic.fix().map(|fix| (code, name, fix)))
|
||||
.sorted_by(|(_, name1, fix1), (_, name2, fix2)| cmp_fix(name1, name2, fix1, fix2))
|
||||
{
|
||||
let mut edits = fix
|
||||
.edits()
|
||||
@@ -109,7 +110,7 @@ fn apply_fixes<'a>(
|
||||
}
|
||||
|
||||
applied.extend(applied_edits.drain(..));
|
||||
*fixed.entry(rule).or_default() += 1;
|
||||
*fixed.entry(code).or_default(name) += 1;
|
||||
}
|
||||
|
||||
// Add the remaining content.
|
||||
@@ -124,47 +125,59 @@ fn apply_fixes<'a>(
|
||||
}
|
||||
|
||||
/// Compare two fixes.
|
||||
fn cmp_fix(rule1: Rule, rule2: Rule, fix1: &Fix, fix2: &Fix) -> std::cmp::Ordering {
|
||||
fn cmp_fix(name1: &str, name2: &str, fix1: &Fix, fix2: &Fix) -> std::cmp::Ordering {
|
||||
// Always apply `RedefinedWhileUnused` before `UnusedImport`, as the latter can end up fixing
|
||||
// the former. But we can't apply this just for `RedefinedWhileUnused` and `UnusedImport` because it violates
|
||||
// `< is transitive: a < b and b < c implies a < c. The same must hold for both == and >.`
|
||||
// See https://github.com/astral-sh/ruff/issues/12469#issuecomment-2244392085
|
||||
match (rule1, rule2) {
|
||||
(Rule::RedefinedWhileUnused, Rule::RedefinedWhileUnused) => std::cmp::Ordering::Equal,
|
||||
(Rule::RedefinedWhileUnused, _) => std::cmp::Ordering::Less,
|
||||
(_, Rule::RedefinedWhileUnused) => std::cmp::Ordering::Greater,
|
||||
_ => std::cmp::Ordering::Equal,
|
||||
let redefined_while_unused = Rule::RedefinedWhileUnused.name().as_str();
|
||||
if (name1, name2) == (redefined_while_unused, redefined_while_unused) {
|
||||
std::cmp::Ordering::Equal
|
||||
} else if name1 == redefined_while_unused {
|
||||
std::cmp::Ordering::Less
|
||||
} else if name2 == redefined_while_unused {
|
||||
std::cmp::Ordering::Greater
|
||||
} else {
|
||||
std::cmp::Ordering::Equal
|
||||
}
|
||||
// Apply fixes in order of their start position.
|
||||
.then_with(|| fix1.min_start().cmp(&fix2.min_start()))
|
||||
// Break ties in the event of overlapping rules, for some specific combinations.
|
||||
.then_with(|| match (&rule1, &rule2) {
|
||||
.then_with(|| {
|
||||
let rules = (name1, name2);
|
||||
// Apply `MissingTrailingPeriod` fixes before `NewLineAfterLastParagraph` fixes.
|
||||
(Rule::MissingTrailingPeriod, Rule::NewLineAfterLastParagraph) => std::cmp::Ordering::Less,
|
||||
(Rule::NewLineAfterLastParagraph, Rule::MissingTrailingPeriod) => {
|
||||
let missing_trailing_period = Rule::MissingTrailingPeriod.name().as_str();
|
||||
let newline_after_last_paragraph = Rule::NewLineAfterLastParagraph.name().as_str();
|
||||
let if_else_instead_of_dict_get = Rule::IfElseBlockInsteadOfDictGet.name().as_str();
|
||||
let if_else_instead_of_if_exp = Rule::IfElseBlockInsteadOfIfExp.name().as_str();
|
||||
if rules == (missing_trailing_period, newline_after_last_paragraph) {
|
||||
std::cmp::Ordering::Less
|
||||
} else if rules == (newline_after_last_paragraph, missing_trailing_period) {
|
||||
std::cmp::Ordering::Greater
|
||||
}
|
||||
// Apply `IfElseBlockInsteadOfDictGet` fixes before `IfElseBlockInsteadOfIfExp` fixes.
|
||||
(Rule::IfElseBlockInsteadOfDictGet, Rule::IfElseBlockInsteadOfIfExp) => {
|
||||
else if rules == (if_else_instead_of_dict_get, if_else_instead_of_if_exp) {
|
||||
std::cmp::Ordering::Less
|
||||
}
|
||||
(Rule::IfElseBlockInsteadOfIfExp, Rule::IfElseBlockInsteadOfDictGet) => {
|
||||
} else if rules == (if_else_instead_of_if_exp, if_else_instead_of_dict_get) {
|
||||
std::cmp::Ordering::Greater
|
||||
} else {
|
||||
std::cmp::Ordering::Equal
|
||||
}
|
||||
_ => std::cmp::Ordering::Equal,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, SourceMarker};
|
||||
use ruff_diagnostics::SourceMarker;
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
|
||||
use crate::Locator;
|
||||
use crate::diagnostic::OldDiagnostic;
|
||||
use crate::fix::{FixResult, apply_fixes};
|
||||
use crate::message::Message;
|
||||
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
|
||||
use crate::{Edit, Fix};
|
||||
|
||||
fn create_diagnostics(
|
||||
filename: &str,
|
||||
@@ -174,12 +187,12 @@ mod tests {
|
||||
edit.into_iter()
|
||||
.map(|edit| {
|
||||
// The choice of rule here is arbitrary.
|
||||
let diagnostic = Diagnostic::new(MissingNewlineAtEndOfFile, edit.range());
|
||||
Message::from_diagnostic(
|
||||
diagnostic.with_fix(Fix::safe_edit(edit)),
|
||||
SourceFileBuilder::new(filename, source).finish(),
|
||||
None,
|
||||
)
|
||||
let diagnostic = OldDiagnostic::new(
|
||||
MissingNewlineAtEndOfFile,
|
||||
edit.range(),
|
||||
&SourceFileBuilder::new(filename, source).finish(),
|
||||
);
|
||||
Message::from_diagnostic(diagnostic.with_fix(Fix::safe_edit(edit)), None)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
@@ -194,7 +207,7 @@ mod tests {
|
||||
source_map,
|
||||
} = apply_fixes(diagnostics.iter(), &locator);
|
||||
assert_eq!(code, "");
|
||||
assert_eq!(fixes.values().sum::<usize>(), 0);
|
||||
assert_eq!(fixes.counts().sum::<usize>(), 0);
|
||||
assert!(source_map.markers().is_empty());
|
||||
}
|
||||
|
||||
@@ -231,7 +244,7 @@ print("hello world")
|
||||
"#
|
||||
.trim()
|
||||
);
|
||||
assert_eq!(fixes.values().sum::<usize>(), 1);
|
||||
assert_eq!(fixes.counts().sum::<usize>(), 1);
|
||||
assert_eq!(
|
||||
source_map.markers(),
|
||||
&[
|
||||
@@ -272,7 +285,7 @@ class A(Bar):
|
||||
"
|
||||
.trim(),
|
||||
);
|
||||
assert_eq!(fixes.values().sum::<usize>(), 1);
|
||||
assert_eq!(fixes.counts().sum::<usize>(), 1);
|
||||
assert_eq!(
|
||||
source_map.markers(),
|
||||
&[
|
||||
@@ -309,7 +322,7 @@ class A:
|
||||
"
|
||||
.trim()
|
||||
);
|
||||
assert_eq!(fixes.values().sum::<usize>(), 1);
|
||||
assert_eq!(fixes.counts().sum::<usize>(), 1);
|
||||
assert_eq!(
|
||||
source_map.markers(),
|
||||
&[
|
||||
@@ -350,7 +363,7 @@ class A(object):
|
||||
"
|
||||
.trim()
|
||||
);
|
||||
assert_eq!(fixes.values().sum::<usize>(), 2);
|
||||
assert_eq!(fixes.counts().sum::<usize>(), 2);
|
||||
assert_eq!(
|
||||
source_map.markers(),
|
||||
&[
|
||||
@@ -392,7 +405,7 @@ class A:
|
||||
"
|
||||
.trim(),
|
||||
);
|
||||
assert_eq!(fixes.values().sum::<usize>(), 1);
|
||||
assert_eq!(fixes.counts().sum::<usize>(), 1);
|
||||
assert_eq!(
|
||||
source_map.markers(),
|
||||
&[
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
//! Insert statements into Python code.
|
||||
use std::ops::Add;
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_python_ast::Stmt;
|
||||
use ruff_python_ast::helpers::is_docstring_stmt;
|
||||
use ruff_python_codegen::Stylist;
|
||||
@@ -10,6 +9,7 @@ use ruff_python_trivia::{PythonWhitespace, textwrap::indent};
|
||||
use ruff_source_file::{LineRanges, UniversalNewlineIterator};
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
|
||||
use crate::Edit;
|
||||
use crate::Locator;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
|
||||
@@ -8,7 +8,6 @@ use std::error::Error;
|
||||
use anyhow::Result;
|
||||
use libcst_native::{ImportAlias, Name as cstName, NameOrAttribute};
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_python_ast::{self as ast, Expr, ModModule, Stmt};
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_parser::{Parsed, Tokens};
|
||||
@@ -18,6 +17,7 @@ use ruff_python_semantic::{
|
||||
use ruff_python_trivia::textwrap::indent;
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
|
||||
use crate::Edit;
|
||||
use crate::Locator;
|
||||
use crate::cst::matchers::{match_aliases, match_import_from, match_statement};
|
||||
use crate::fix;
|
||||
|
||||
@@ -14,12 +14,17 @@ pub use rule_selector::RuleSelector;
|
||||
pub use rule_selector::clap_completion::RuleSelectorParser;
|
||||
pub use rules::pycodestyle::rules::IOError;
|
||||
|
||||
pub use diagnostic::OldDiagnostic;
|
||||
pub(crate) use ruff_diagnostics::{Applicability, Edit, Fix};
|
||||
pub use violation::{AlwaysFixableViolation, FixAvailability, Violation, ViolationMetadata};
|
||||
|
||||
pub const VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
|
||||
mod checkers;
|
||||
pub mod codes;
|
||||
mod comments;
|
||||
mod cst;
|
||||
mod diagnostic;
|
||||
pub mod directives;
|
||||
mod doc_lines;
|
||||
mod docstrings;
|
||||
@@ -45,6 +50,7 @@ pub mod settings;
|
||||
pub mod source_kind;
|
||||
mod text_helpers;
|
||||
pub mod upstream_categories;
|
||||
mod violation;
|
||||
|
||||
#[cfg(any(test, fuzzing))]
|
||||
pub mod test;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use std::borrow::Cow;
|
||||
use std::cell::LazyCell;
|
||||
use std::ops::Deref;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::{Result, anyhow};
|
||||
@@ -9,21 +8,22 @@ use itertools::Itertools;
|
||||
use ruff_python_parser::semantic_errors::SemanticSyntaxError;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_notebook::Notebook;
|
||||
use ruff_python_ast::{ModModule, PySourceType, PythonVersion};
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::{ParseError, ParseOptions, Parsed, UnsupportedSyntaxError};
|
||||
use ruff_source_file::SourceFileBuilder;
|
||||
use ruff_source_file::{SourceFile, SourceFileBuilder};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::check_ast;
|
||||
use crate::OldDiagnostic;
|
||||
use crate::checkers::ast::{LintContext, check_ast};
|
||||
use crate::checkers::filesystem::check_file_path;
|
||||
use crate::checkers::imports::check_imports;
|
||||
use crate::checkers::noqa::check_noqa;
|
||||
use crate::checkers::physical_lines::check_physical_lines;
|
||||
use crate::checkers::tokens::check_tokens;
|
||||
use crate::codes::NoqaCode;
|
||||
use crate::directives::Directives;
|
||||
use crate::doc_lines::{doc_lines_from_ast, doc_lines_from_tokens};
|
||||
use crate::fix::{FixResult, fix_file};
|
||||
@@ -86,7 +86,53 @@ impl LinterResult {
|
||||
}
|
||||
}
|
||||
|
||||
pub type FixTable = FxHashMap<Rule, usize>;
|
||||
#[derive(Debug, Default, PartialEq)]
|
||||
struct FixCount {
|
||||
rule_name: &'static str,
|
||||
count: usize,
|
||||
}
|
||||
|
||||
/// A mapping from a noqa code to the corresponding lint name and a count of applied fixes.
|
||||
#[derive(Debug, Default, PartialEq)]
|
||||
pub struct FixTable(FxHashMap<NoqaCode, FixCount>);
|
||||
|
||||
impl FixTable {
|
||||
pub fn counts(&self) -> impl Iterator<Item = usize> {
|
||||
self.0.values().map(|fc| fc.count)
|
||||
}
|
||||
|
||||
pub fn entry(&mut self, code: NoqaCode) -> FixTableEntry {
|
||||
FixTableEntry(self.0.entry(code))
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = (NoqaCode, &'static str, usize)> {
|
||||
self.0
|
||||
.iter()
|
||||
.map(|(code, FixCount { rule_name, count })| (*code, *rule_name, *count))
|
||||
}
|
||||
|
||||
pub fn keys(&self) -> impl Iterator<Item = NoqaCode> {
|
||||
self.0.keys().copied()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FixTableEntry<'a>(Entry<'a, NoqaCode, FixCount>);
|
||||
|
||||
impl<'a> FixTableEntry<'a> {
|
||||
pub fn or_default(self, rule_name: &'static str) -> &'a mut usize {
|
||||
&mut (self
|
||||
.0
|
||||
.or_insert(FixCount {
|
||||
rule_name,
|
||||
count: 0,
|
||||
})
|
||||
.count)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FixerResult<'a> {
|
||||
/// The result returned by the linter, after applying any fixes.
|
||||
@@ -113,8 +159,11 @@ pub fn check_path(
|
||||
parsed: &Parsed<ModModule>,
|
||||
target_version: TargetVersion,
|
||||
) -> Vec<Message> {
|
||||
let source_file =
|
||||
SourceFileBuilder::new(path.to_string_lossy().as_ref(), locator.contents()).finish();
|
||||
|
||||
// Aggregate all diagnostics.
|
||||
let mut diagnostics = vec![];
|
||||
let mut diagnostics = LintContext::new(&source_file);
|
||||
|
||||
// Aggregate all semantic syntax errors.
|
||||
let mut semantic_syntax_errors = vec![];
|
||||
@@ -136,7 +185,7 @@ pub fn check_path(
|
||||
.iter_enabled()
|
||||
.any(|rule_code| rule_code.lint_source().is_tokens())
|
||||
{
|
||||
diagnostics.extend(check_tokens(
|
||||
check_tokens(
|
||||
tokens,
|
||||
path,
|
||||
locator,
|
||||
@@ -145,7 +194,8 @@ pub fn check_path(
|
||||
settings,
|
||||
source_type,
|
||||
source_kind.as_ipy_notebook().map(Notebook::cell_offsets),
|
||||
));
|
||||
&mut diagnostics,
|
||||
);
|
||||
}
|
||||
|
||||
// Run the filesystem-based rules.
|
||||
@@ -154,14 +204,15 @@ pub fn check_path(
|
||||
.iter_enabled()
|
||||
.any(|rule_code| rule_code.lint_source().is_filesystem())
|
||||
{
|
||||
diagnostics.extend(check_file_path(
|
||||
check_file_path(
|
||||
path,
|
||||
package,
|
||||
locator,
|
||||
comment_ranges,
|
||||
settings,
|
||||
target_version.linter_version(),
|
||||
));
|
||||
&diagnostics,
|
||||
);
|
||||
}
|
||||
|
||||
// Run the logical line-based rules.
|
||||
@@ -170,9 +221,14 @@ pub fn check_path(
|
||||
.iter_enabled()
|
||||
.any(|rule_code| rule_code.lint_source().is_logical_lines())
|
||||
{
|
||||
diagnostics.extend(crate::checkers::logical_lines::check_logical_lines(
|
||||
tokens, locator, indexer, stylist, settings,
|
||||
));
|
||||
crate::checkers::logical_lines::check_logical_lines(
|
||||
tokens,
|
||||
locator,
|
||||
indexer,
|
||||
stylist,
|
||||
settings,
|
||||
&diagnostics,
|
||||
);
|
||||
}
|
||||
|
||||
// Run the AST-based rules only if there are no syntax errors.
|
||||
@@ -180,7 +236,7 @@ pub fn check_path(
|
||||
let cell_offsets = source_kind.as_ipy_notebook().map(Notebook::cell_offsets);
|
||||
let notebook_index = source_kind.as_ipy_notebook().map(Notebook::index);
|
||||
|
||||
let (new_diagnostics, new_semantic_syntax_errors) = check_ast(
|
||||
semantic_syntax_errors.extend(check_ast(
|
||||
parsed,
|
||||
locator,
|
||||
stylist,
|
||||
@@ -194,9 +250,8 @@ pub fn check_path(
|
||||
cell_offsets,
|
||||
notebook_index,
|
||||
target_version,
|
||||
);
|
||||
diagnostics.extend(new_diagnostics);
|
||||
semantic_syntax_errors.extend(new_semantic_syntax_errors);
|
||||
&diagnostics,
|
||||
));
|
||||
|
||||
let use_imports = !directives.isort.skip_file
|
||||
&& settings
|
||||
@@ -205,7 +260,7 @@ pub fn check_path(
|
||||
.any(|rule_code| rule_code.lint_source().is_imports());
|
||||
if use_imports || use_doc_lines {
|
||||
if use_imports {
|
||||
let import_diagnostics = check_imports(
|
||||
check_imports(
|
||||
parsed,
|
||||
locator,
|
||||
indexer,
|
||||
@@ -216,9 +271,8 @@ pub fn check_path(
|
||||
source_type,
|
||||
cell_offsets,
|
||||
target_version.linter_version(),
|
||||
&diagnostics,
|
||||
);
|
||||
|
||||
diagnostics.extend(import_diagnostics);
|
||||
}
|
||||
if use_doc_lines {
|
||||
doc_lines.extend(doc_lines_from_ast(parsed.suite(), locator));
|
||||
@@ -238,9 +292,14 @@ pub fn check_path(
|
||||
.iter_enabled()
|
||||
.any(|rule_code| rule_code.lint_source().is_physical_lines())
|
||||
{
|
||||
diagnostics.extend(check_physical_lines(
|
||||
locator, stylist, indexer, &doc_lines, settings,
|
||||
));
|
||||
check_physical_lines(
|
||||
locator,
|
||||
stylist,
|
||||
indexer,
|
||||
&doc_lines,
|
||||
settings,
|
||||
&diagnostics,
|
||||
);
|
||||
}
|
||||
|
||||
// Raise violations for internal test rules
|
||||
@@ -250,47 +309,70 @@ pub fn check_path(
|
||||
if !settings.rules.enabled(*test_rule) {
|
||||
continue;
|
||||
}
|
||||
let diagnostic = match test_rule {
|
||||
match test_rule {
|
||||
Rule::StableTestRule => {
|
||||
test_rules::StableTestRule::diagnostic(locator, comment_ranges)
|
||||
}
|
||||
Rule::StableTestRuleSafeFix => {
|
||||
test_rules::StableTestRuleSafeFix::diagnostic(locator, comment_ranges)
|
||||
}
|
||||
Rule::StableTestRuleUnsafeFix => {
|
||||
test_rules::StableTestRuleUnsafeFix::diagnostic(locator, comment_ranges)
|
||||
test_rules::StableTestRule::diagnostic(locator, comment_ranges, &diagnostics);
|
||||
}
|
||||
Rule::StableTestRuleSafeFix => test_rules::StableTestRuleSafeFix::diagnostic(
|
||||
locator,
|
||||
comment_ranges,
|
||||
&diagnostics,
|
||||
),
|
||||
Rule::StableTestRuleUnsafeFix => test_rules::StableTestRuleUnsafeFix::diagnostic(
|
||||
locator,
|
||||
comment_ranges,
|
||||
&diagnostics,
|
||||
),
|
||||
Rule::StableTestRuleDisplayOnlyFix => {
|
||||
test_rules::StableTestRuleDisplayOnlyFix::diagnostic(locator, comment_ranges)
|
||||
test_rules::StableTestRuleDisplayOnlyFix::diagnostic(
|
||||
locator,
|
||||
comment_ranges,
|
||||
&diagnostics,
|
||||
);
|
||||
}
|
||||
Rule::PreviewTestRule => {
|
||||
test_rules::PreviewTestRule::diagnostic(locator, comment_ranges)
|
||||
test_rules::PreviewTestRule::diagnostic(locator, comment_ranges, &diagnostics);
|
||||
}
|
||||
Rule::DeprecatedTestRule => {
|
||||
test_rules::DeprecatedTestRule::diagnostic(locator, comment_ranges)
|
||||
test_rules::DeprecatedTestRule::diagnostic(
|
||||
locator,
|
||||
comment_ranges,
|
||||
&diagnostics,
|
||||
);
|
||||
}
|
||||
Rule::AnotherDeprecatedTestRule => {
|
||||
test_rules::AnotherDeprecatedTestRule::diagnostic(locator, comment_ranges)
|
||||
test_rules::AnotherDeprecatedTestRule::diagnostic(
|
||||
locator,
|
||||
comment_ranges,
|
||||
&diagnostics,
|
||||
);
|
||||
}
|
||||
Rule::RemovedTestRule => {
|
||||
test_rules::RemovedTestRule::diagnostic(locator, comment_ranges)
|
||||
}
|
||||
Rule::AnotherRemovedTestRule => {
|
||||
test_rules::AnotherRemovedTestRule::diagnostic(locator, comment_ranges)
|
||||
}
|
||||
Rule::RedirectedToTestRule => {
|
||||
test_rules::RedirectedToTestRule::diagnostic(locator, comment_ranges)
|
||||
}
|
||||
Rule::RedirectedFromTestRule => {
|
||||
test_rules::RedirectedFromTestRule::diagnostic(locator, comment_ranges)
|
||||
test_rules::RemovedTestRule::diagnostic(locator, comment_ranges, &diagnostics);
|
||||
}
|
||||
Rule::AnotherRemovedTestRule => test_rules::AnotherRemovedTestRule::diagnostic(
|
||||
locator,
|
||||
comment_ranges,
|
||||
&diagnostics,
|
||||
),
|
||||
Rule::RedirectedToTestRule => test_rules::RedirectedToTestRule::diagnostic(
|
||||
locator,
|
||||
comment_ranges,
|
||||
&diagnostics,
|
||||
),
|
||||
Rule::RedirectedFromTestRule => test_rules::RedirectedFromTestRule::diagnostic(
|
||||
locator,
|
||||
comment_ranges,
|
||||
&diagnostics,
|
||||
),
|
||||
Rule::RedirectedFromPrefixTestRule => {
|
||||
test_rules::RedirectedFromPrefixTestRule::diagnostic(locator, comment_ranges)
|
||||
test_rules::RedirectedFromPrefixTestRule::diagnostic(
|
||||
locator,
|
||||
comment_ranges,
|
||||
&diagnostics,
|
||||
);
|
||||
}
|
||||
_ => unreachable!("All test rules must have an implementation"),
|
||||
};
|
||||
if let Some(diagnostic) = diagnostic {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -308,7 +390,9 @@ pub fn check_path(
|
||||
RuleSet::empty()
|
||||
};
|
||||
if !per_file_ignores.is_empty() {
|
||||
diagnostics.retain(|diagnostic| !per_file_ignores.contains(diagnostic.rule()));
|
||||
diagnostics
|
||||
.as_mut_vec()
|
||||
.retain(|diagnostic| !per_file_ignores.contains(diagnostic.rule()));
|
||||
}
|
||||
|
||||
// Enforce `noqa` directives.
|
||||
@@ -330,11 +414,13 @@ pub fn check_path(
|
||||
);
|
||||
if noqa.is_enabled() {
|
||||
for index in ignored.iter().rev() {
|
||||
diagnostics.swap_remove(*index);
|
||||
diagnostics.as_mut_vec().swap_remove(*index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut diagnostics = diagnostics.into_diagnostics();
|
||||
|
||||
if parsed.has_valid_syntax() {
|
||||
// Remove fixes for any rules marked as unfixable.
|
||||
for diagnostic in &mut diagnostics {
|
||||
@@ -372,9 +458,9 @@ pub fn check_path(
|
||||
parsed.errors(),
|
||||
syntax_errors,
|
||||
&semantic_syntax_errors,
|
||||
path,
|
||||
locator,
|
||||
directives,
|
||||
&source_file,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -438,7 +524,7 @@ pub fn add_noqa_to_path(
|
||||
)
|
||||
}
|
||||
|
||||
/// Generate a [`Message`] for each [`Diagnostic`] triggered by the given source
|
||||
/// Generate a [`Message`] for each [`OldDiagnostic`] triggered by the given source
|
||||
/// code.
|
||||
pub fn lint_only(
|
||||
path: &Path,
|
||||
@@ -503,39 +589,28 @@ pub fn lint_only(
|
||||
|
||||
/// Convert from diagnostics to messages.
|
||||
fn diagnostics_to_messages(
|
||||
diagnostics: Vec<Diagnostic>,
|
||||
diagnostics: Vec<OldDiagnostic>,
|
||||
parse_errors: &[ParseError],
|
||||
unsupported_syntax_errors: &[UnsupportedSyntaxError],
|
||||
semantic_syntax_errors: &[SemanticSyntaxError],
|
||||
path: &Path,
|
||||
locator: &Locator,
|
||||
directives: &Directives,
|
||||
source_file: &SourceFile,
|
||||
) -> Vec<Message> {
|
||||
let file = LazyCell::new(|| {
|
||||
let mut builder =
|
||||
SourceFileBuilder::new(path.to_string_lossy().as_ref(), locator.contents());
|
||||
|
||||
if let Some(line_index) = locator.line_index() {
|
||||
builder.set_line_index(line_index.clone());
|
||||
}
|
||||
|
||||
builder.finish()
|
||||
});
|
||||
|
||||
parse_errors
|
||||
.iter()
|
||||
.map(|parse_error| Message::from_parse_error(parse_error, locator, file.deref().clone()))
|
||||
.map(|parse_error| Message::from_parse_error(parse_error, locator, source_file.clone()))
|
||||
.chain(unsupported_syntax_errors.iter().map(|syntax_error| {
|
||||
Message::from_unsupported_syntax_error(syntax_error, file.deref().clone())
|
||||
Message::from_unsupported_syntax_error(syntax_error, source_file.clone())
|
||||
}))
|
||||
.chain(
|
||||
semantic_syntax_errors
|
||||
.iter()
|
||||
.map(|error| Message::from_semantic_syntax_error(error, file.deref().clone())),
|
||||
.map(|error| Message::from_semantic_syntax_error(error, source_file.clone())),
|
||||
)
|
||||
.chain(diagnostics.into_iter().map(|diagnostic| {
|
||||
let noqa_offset = directives.noqa_line_for.resolve(diagnostic.start());
|
||||
Message::from_diagnostic(diagnostic, file.deref().clone(), Some(noqa_offset))
|
||||
Message::from_diagnostic(diagnostic, Some(noqa_offset))
|
||||
}))
|
||||
.collect()
|
||||
}
|
||||
@@ -554,7 +629,7 @@ pub fn lint_fix<'a>(
|
||||
let mut transformed = Cow::Borrowed(source_kind);
|
||||
|
||||
// Track the number of fixed errors across iterations.
|
||||
let mut fixed = FxHashMap::default();
|
||||
let mut fixed = FixTable::default();
|
||||
|
||||
// As an escape hatch, bail after 100 iterations.
|
||||
let mut iterations = 0;
|
||||
@@ -623,12 +698,7 @@ pub fn lint_fix<'a>(
|
||||
// syntax error. Return the original code.
|
||||
if has_valid_syntax && has_no_syntax_errors {
|
||||
if let Some(error) = parsed.errors().first() {
|
||||
report_fix_syntax_error(
|
||||
path,
|
||||
transformed.source_code(),
|
||||
error,
|
||||
fixed.keys().copied(),
|
||||
);
|
||||
report_fix_syntax_error(path, transformed.source_code(), error, fixed.keys());
|
||||
return Err(anyhow!("Fix introduced a syntax error"));
|
||||
}
|
||||
}
|
||||
@@ -643,8 +713,8 @@ pub fn lint_fix<'a>(
|
||||
{
|
||||
if iterations < MAX_ITERATIONS {
|
||||
// Count the number of fixed errors.
|
||||
for (rule, count) in applied {
|
||||
*fixed.entry(rule).or_default() += count;
|
||||
for (rule, name, count) in applied.iter() {
|
||||
*fixed.entry(rule).or_default(name) += count;
|
||||
}
|
||||
|
||||
transformed = Cow::Owned(transformed.updated(fixed_contents, &source_map));
|
||||
@@ -671,10 +741,10 @@ pub fn lint_fix<'a>(
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_rule_codes(rules: impl IntoIterator<Item = Rule>) -> String {
|
||||
fn collect_rule_codes(rules: impl IntoIterator<Item = NoqaCode>) -> String {
|
||||
rules
|
||||
.into_iter()
|
||||
.map(|rule| rule.noqa_code().to_string())
|
||||
.map(|rule| rule.to_string())
|
||||
.sorted_unstable()
|
||||
.dedup()
|
||||
.join(", ")
|
||||
@@ -682,7 +752,7 @@ fn collect_rule_codes(rules: impl IntoIterator<Item = Rule>) -> String {
|
||||
|
||||
#[expect(clippy::print_stderr)]
|
||||
fn report_failed_to_converge_error(path: &Path, transformed: &str, messages: &[Message]) {
|
||||
let codes = collect_rule_codes(messages.iter().filter_map(Message::to_rule));
|
||||
let codes = collect_rule_codes(messages.iter().filter_map(Message::noqa_code));
|
||||
if cfg!(debug_assertions) {
|
||||
eprintln!(
|
||||
"{}{} Failed to converge after {} iterations in `{}` with rule codes {}:---\n{}\n---",
|
||||
@@ -718,7 +788,7 @@ fn report_fix_syntax_error(
|
||||
path: &Path,
|
||||
transformed: &str,
|
||||
error: &ParseError,
|
||||
rules: impl IntoIterator<Item = Rule>,
|
||||
rules: impl IntoIterator<Item = NoqaCode>,
|
||||
) {
|
||||
let codes = collect_rule_codes(rules);
|
||||
if cfg!(debug_assertions) {
|
||||
|
||||
@@ -33,7 +33,7 @@ impl Emitter for AzureEmitter {
|
||||
line = location.line,
|
||||
col = location.column,
|
||||
code = message
|
||||
.to_noqa_code()
|
||||
.noqa_code()
|
||||
.map_or_else(String::new, |code| format!("code={code};")),
|
||||
body = message.body(),
|
||||
)?;
|
||||
|
||||
@@ -6,11 +6,11 @@ use colored::{Color, ColoredString, Colorize, Styles};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use similar::{ChangeTag, TextDiff};
|
||||
|
||||
use ruff_diagnostics::{Applicability, Fix};
|
||||
use ruff_source_file::{OneIndexed, SourceFile};
|
||||
|
||||
use crate::message::Message;
|
||||
use crate::text_helpers::ShowNonprinting;
|
||||
use crate::{Applicability, Fix};
|
||||
|
||||
/// Renders a diff that shows the code fixes.
|
||||
///
|
||||
|
||||
@@ -33,7 +33,7 @@ impl Emitter for GithubEmitter {
|
||||
writer,
|
||||
"::error title=Ruff{code},file={file},line={row},col={column},endLine={end_row},endColumn={end_column}::",
|
||||
code = message
|
||||
.to_noqa_code()
|
||||
.noqa_code()
|
||||
.map_or_else(String::new, |code| format!(" ({code})")),
|
||||
file = message.filename(),
|
||||
row = source_location.line,
|
||||
@@ -50,7 +50,7 @@ impl Emitter for GithubEmitter {
|
||||
column = location.column,
|
||||
)?;
|
||||
|
||||
if let Some(code) = message.to_noqa_code() {
|
||||
if let Some(code) = message.noqa_code() {
|
||||
write!(writer, " {code}")?;
|
||||
}
|
||||
|
||||
|
||||
@@ -90,7 +90,7 @@ impl Serialize for SerializedMessages<'_> {
|
||||
}
|
||||
fingerprints.insert(message_fingerprint);
|
||||
|
||||
let (description, check_name) = if let Some(code) = message.to_noqa_code() {
|
||||
let (description, check_name) = if let Some(code) = message.noqa_code() {
|
||||
(message.body().to_string(), code.to_string())
|
||||
} else {
|
||||
let description = message.body();
|
||||
|
||||
@@ -4,11 +4,11 @@ use serde::ser::SerializeSeq;
|
||||
use serde::{Serialize, Serializer};
|
||||
use serde_json::{Value, json};
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::{LineColumn, OneIndexed, SourceCode};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::Edit;
|
||||
use crate::message::{Emitter, EmitterContext, Message};
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -81,8 +81,8 @@ pub(crate) fn message_to_json_value(message: &Message, context: &EmitterContext)
|
||||
}
|
||||
|
||||
json!({
|
||||
"code": message.to_noqa_code().map(|code| code.to_string()),
|
||||
"url": message.to_rule().and_then(|rule| rule.url()),
|
||||
"code": message.noqa_code().map(|code| code.to_string()),
|
||||
"url": message.to_url(),
|
||||
"message": message.body(),
|
||||
"fix": fix,
|
||||
"cell": notebook_cell_index,
|
||||
|
||||
@@ -59,7 +59,7 @@ impl Emitter for JunitEmitter {
|
||||
body = message.body()
|
||||
));
|
||||
let mut case = TestCase::new(
|
||||
if let Some(code) = message.to_noqa_code() {
|
||||
if let Some(code) = message.noqa_code() {
|
||||
format!("org.ruff.{code}")
|
||||
} else {
|
||||
"org.ruff".to_string()
|
||||
|
||||
@@ -16,7 +16,6 @@ pub use json_lines::JsonLinesEmitter;
|
||||
pub use junit::JunitEmitter;
|
||||
pub use pylint::PylintEmitter;
|
||||
pub use rdjson::RdjsonEmitter;
|
||||
use ruff_diagnostics::{Diagnostic, Fix};
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_python_parser::{ParseError, UnsupportedSyntaxError};
|
||||
use ruff_source_file::{LineColumn, SourceFile};
|
||||
@@ -28,6 +27,7 @@ use crate::Locator;
|
||||
use crate::codes::NoqaCode;
|
||||
use crate::logging::DisplayParseErrorType;
|
||||
use crate::registry::Rule;
|
||||
use crate::{Fix, OldDiagnostic};
|
||||
|
||||
mod azure;
|
||||
mod diff;
|
||||
@@ -50,7 +50,7 @@ mod text;
|
||||
/// `noqa` offsets.
|
||||
///
|
||||
/// For diagnostic messages, the [`db::Diagnostic`]'s primary message contains the
|
||||
/// [`Diagnostic::body`], and the primary annotation optionally contains the suggestion accompanying
|
||||
/// [`OldDiagnostic::body`], and the primary annotation optionally contains the suggestion accompanying
|
||||
/// a fix. The `db::Diagnostic::id` field contains the kebab-case lint name derived from the `Rule`.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct Message {
|
||||
@@ -60,6 +60,7 @@ pub struct Message {
|
||||
pub fix: Option<Fix>,
|
||||
pub parent: Option<TextSize>,
|
||||
pub(crate) noqa_offset: Option<TextSize>,
|
||||
noqa_code: Option<NoqaCode>,
|
||||
}
|
||||
|
||||
impl Message {
|
||||
@@ -76,12 +77,12 @@ impl Message {
|
||||
fix: None,
|
||||
parent: None,
|
||||
noqa_offset: None,
|
||||
noqa_code: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[expect(clippy::too_many_arguments)]
|
||||
pub fn diagnostic(
|
||||
name: &'static str,
|
||||
body: String,
|
||||
suggestion: Option<String>,
|
||||
range: TextRange,
|
||||
@@ -89,9 +90,10 @@ impl Message {
|
||||
parent: Option<TextSize>,
|
||||
file: SourceFile,
|
||||
noqa_offset: Option<TextSize>,
|
||||
rule: Rule,
|
||||
) -> Message {
|
||||
let mut diagnostic = db::Diagnostic::new(
|
||||
DiagnosticId::Lint(LintName::of(name)),
|
||||
DiagnosticId::Lint(LintName::of(rule.into())),
|
||||
Severity::Error,
|
||||
body,
|
||||
);
|
||||
@@ -107,25 +109,22 @@ impl Message {
|
||||
fix,
|
||||
parent,
|
||||
noqa_offset,
|
||||
noqa_code: Some(rule.noqa_code()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a [`Message`] from the given [`Diagnostic`] corresponding to a rule violation.
|
||||
pub fn from_diagnostic(
|
||||
diagnostic: Diagnostic,
|
||||
file: SourceFile,
|
||||
noqa_offset: Option<TextSize>,
|
||||
) -> Message {
|
||||
let Diagnostic {
|
||||
name,
|
||||
/// Create a [`Message`] from the given [`OldDiagnostic`] corresponding to a rule violation.
|
||||
pub fn from_diagnostic(diagnostic: OldDiagnostic, noqa_offset: Option<TextSize>) -> Message {
|
||||
let OldDiagnostic {
|
||||
body,
|
||||
suggestion,
|
||||
range,
|
||||
fix,
|
||||
parent,
|
||||
rule,
|
||||
file,
|
||||
} = diagnostic;
|
||||
Self::diagnostic(
|
||||
name,
|
||||
body,
|
||||
suggestion,
|
||||
range,
|
||||
@@ -133,6 +132,7 @@ impl Message {
|
||||
parent,
|
||||
file,
|
||||
noqa_offset,
|
||||
rule,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -224,30 +224,22 @@ impl Message {
|
||||
self.fix().is_some()
|
||||
}
|
||||
|
||||
/// Returns the [`Rule`] corresponding to the diagnostic message.
|
||||
pub fn to_rule(&self) -> Option<Rule> {
|
||||
if self.is_syntax_error() {
|
||||
None
|
||||
} else {
|
||||
Some(self.name().parse().expect("Expected a valid rule name"))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the [`NoqaCode`] corresponding to the diagnostic message.
|
||||
pub fn to_noqa_code(&self) -> Option<NoqaCode> {
|
||||
self.to_rule().map(|rule| rule.noqa_code())
|
||||
pub fn noqa_code(&self) -> Option<NoqaCode> {
|
||||
self.noqa_code
|
||||
}
|
||||
|
||||
/// Returns the URL for the rule documentation, if it exists.
|
||||
pub fn to_url(&self) -> Option<String> {
|
||||
// TODO(brent) Rule::url calls Rule::explanation, which calls ViolationMetadata::explain,
|
||||
// which when derived (seems always to be the case?) is always `Some`, so I think it's
|
||||
// pretty safe to inline the Rule::url implementation here, using `self.name()`:
|
||||
//
|
||||
// format!("{}/rules/{}", env!("CARGO_PKG_HOMEPAGE"), self.name())
|
||||
//
|
||||
// at least in the case of diagnostics, I guess syntax errors will return None
|
||||
self.to_rule().and_then(|rule| rule.url())
|
||||
if self.is_syntax_error() {
|
||||
None
|
||||
} else {
|
||||
Some(format!(
|
||||
"{}/rules/{}",
|
||||
env!("CARGO_PKG_HOMEPAGE"),
|
||||
self.name()
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the filename for the message.
|
||||
@@ -371,7 +363,8 @@ impl<'a> EmitterContext<'a> {
|
||||
mod tests {
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::{Edit, Fix};
|
||||
use crate::codes::Rule;
|
||||
use crate::{Edit, Fix};
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_python_parser::{Mode, ParseOptions, parse_unchecked};
|
||||
use ruff_source_file::{OneIndexed, SourceFileBuilder};
|
||||
@@ -417,7 +410,6 @@ def fibonacci(n):
|
||||
|
||||
let unused_import_start = TextSize::from(7);
|
||||
let unused_import = Message::diagnostic(
|
||||
"unused-import",
|
||||
"`os` imported but unused".to_string(),
|
||||
Some("Remove unused import: `os`".to_string()),
|
||||
TextRange::new(unused_import_start, TextSize::from(9)),
|
||||
@@ -428,11 +420,11 @@ def fibonacci(n):
|
||||
None,
|
||||
fib_source.clone(),
|
||||
Some(unused_import_start),
|
||||
Rule::UnusedImport,
|
||||
);
|
||||
|
||||
let unused_variable_start = TextSize::from(94);
|
||||
let unused_variable = Message::diagnostic(
|
||||
"unused-variable",
|
||||
"Local variable `x` is assigned to but never used".to_string(),
|
||||
Some("Remove assignment to unused variable `x`".to_string()),
|
||||
TextRange::new(unused_variable_start, TextSize::from(95)),
|
||||
@@ -443,13 +435,13 @@ def fibonacci(n):
|
||||
None,
|
||||
fib_source,
|
||||
Some(unused_variable_start),
|
||||
Rule::UnusedVariable,
|
||||
);
|
||||
|
||||
let file_2 = r"if a == 1: pass";
|
||||
|
||||
let undefined_name_start = TextSize::from(3);
|
||||
let undefined_name = Message::diagnostic(
|
||||
"undefined-name",
|
||||
"Undefined name `a`".to_string(),
|
||||
None,
|
||||
TextRange::new(undefined_name_start, TextSize::from(4)),
|
||||
@@ -457,6 +449,7 @@ def fibonacci(n):
|
||||
None,
|
||||
SourceFileBuilder::new("undef.py", file_2).finish(),
|
||||
Some(undefined_name_start),
|
||||
Rule::UndefinedName,
|
||||
);
|
||||
|
||||
vec![unused_import, unused_variable, undefined_name]
|
||||
@@ -479,7 +472,6 @@ def foo():
|
||||
|
||||
let unused_import_os_start = TextSize::from(16);
|
||||
let unused_import_os = Message::diagnostic(
|
||||
"unused-import",
|
||||
"`os` imported but unused".to_string(),
|
||||
Some("Remove unused import: `os`".to_string()),
|
||||
TextRange::new(unused_import_os_start, TextSize::from(18)),
|
||||
@@ -490,11 +482,11 @@ def foo():
|
||||
None,
|
||||
notebook_source.clone(),
|
||||
Some(unused_import_os_start),
|
||||
Rule::UnusedImport,
|
||||
);
|
||||
|
||||
let unused_import_math_start = TextSize::from(35);
|
||||
let unused_import_math = Message::diagnostic(
|
||||
"unused-import",
|
||||
"`math` imported but unused".to_string(),
|
||||
Some("Remove unused import: `math`".to_string()),
|
||||
TextRange::new(unused_import_math_start, TextSize::from(39)),
|
||||
@@ -505,11 +497,11 @@ def foo():
|
||||
None,
|
||||
notebook_source.clone(),
|
||||
Some(unused_import_math_start),
|
||||
Rule::UnusedImport,
|
||||
);
|
||||
|
||||
let unused_variable_start = TextSize::from(98);
|
||||
let unused_variable = Message::diagnostic(
|
||||
"unused-variable",
|
||||
"Local variable `x` is assigned to but never used".to_string(),
|
||||
Some("Remove assignment to unused variable `x`".to_string()),
|
||||
TextRange::new(unused_variable_start, TextSize::from(99)),
|
||||
@@ -520,6 +512,7 @@ def foo():
|
||||
None,
|
||||
notebook_source,
|
||||
Some(unused_variable_start),
|
||||
Rule::UnusedVariable,
|
||||
);
|
||||
|
||||
let mut notebook_indexes = FxHashMap::default();
|
||||
|
||||
@@ -26,7 +26,7 @@ impl Emitter for PylintEmitter {
|
||||
message.compute_start_location().line
|
||||
};
|
||||
|
||||
let body = if let Some(code) = message.to_noqa_code() {
|
||||
let body = if let Some(code) = message.noqa_code() {
|
||||
format!("[{code}] {body}", body = message.body())
|
||||
} else {
|
||||
message.body().to_string()
|
||||
|
||||
@@ -4,10 +4,10 @@ use serde::ser::SerializeSeq;
|
||||
use serde::{Serialize, Serializer};
|
||||
use serde_json::{Value, json};
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_source_file::SourceCode;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::Edit;
|
||||
use crate::message::{Emitter, EmitterContext, LineColumn, Message};
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -71,7 +71,7 @@ fn message_to_rdjson_value(message: &Message) -> Value {
|
||||
"range": rdjson_range(start_location, end_location),
|
||||
},
|
||||
"code": {
|
||||
"value": message.to_noqa_code().map(|code| code.to_string()),
|
||||
"value": message.noqa_code().map(|code| code.to_string()),
|
||||
"url": message.to_url(),
|
||||
},
|
||||
"suggestions": rdjson_suggestions(fix.edits(), &source_code),
|
||||
@@ -84,7 +84,7 @@ fn message_to_rdjson_value(message: &Message) -> Value {
|
||||
"range": rdjson_range(start_location, end_location),
|
||||
},
|
||||
"code": {
|
||||
"value": message.to_noqa_code().map(|code| code.to_string()),
|
||||
"value": message.noqa_code().map(|code| code.to_string()),
|
||||
"url": message.to_url(),
|
||||
},
|
||||
})
|
||||
|
||||
@@ -8,7 +8,7 @@ use serde_json::json;
|
||||
use ruff_source_file::OneIndexed;
|
||||
|
||||
use crate::VERSION;
|
||||
use crate::codes::Rule;
|
||||
use crate::codes::NoqaCode;
|
||||
use crate::fs::normalize_path;
|
||||
use crate::message::{Emitter, EmitterContext, Message};
|
||||
use crate::registry::{Linter, RuleNamespace};
|
||||
@@ -27,7 +27,7 @@ impl Emitter for SarifEmitter {
|
||||
.map(SarifResult::from_message)
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
|
||||
let unique_rules: HashSet<_> = results.iter().filter_map(|result| result.rule).collect();
|
||||
let unique_rules: HashSet<_> = results.iter().filter_map(|result| result.code).collect();
|
||||
let mut rules: Vec<SarifRule> = unique_rules.into_iter().map(SarifRule::from).collect();
|
||||
rules.sort_by(|a, b| a.code.cmp(&b.code));
|
||||
|
||||
@@ -61,13 +61,19 @@ struct SarifRule<'a> {
|
||||
url: Option<String>,
|
||||
}
|
||||
|
||||
impl From<Rule> for SarifRule<'_> {
|
||||
fn from(rule: Rule) -> Self {
|
||||
let code = rule.noqa_code().to_string();
|
||||
let (linter, _) = Linter::parse_code(&code).unwrap();
|
||||
impl From<NoqaCode> for SarifRule<'_> {
|
||||
fn from(code: NoqaCode) -> Self {
|
||||
let code_str = code.to_string();
|
||||
// This is a manual re-implementation of Rule::from_code, but we also want the Linter. This
|
||||
// avoids calling Linter::parse_code twice.
|
||||
let (linter, suffix) = Linter::parse_code(&code_str).unwrap();
|
||||
let rule = linter
|
||||
.all_rules()
|
||||
.find(|rule| rule.noqa_code().suffix() == suffix)
|
||||
.expect("Expected a valid noqa code corresponding to a rule");
|
||||
Self {
|
||||
name: rule.into(),
|
||||
code,
|
||||
code: code_str,
|
||||
linter: linter.name(),
|
||||
summary: rule.message_formats()[0],
|
||||
explanation: rule.explanation(),
|
||||
@@ -106,7 +112,7 @@ impl Serialize for SarifRule<'_> {
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SarifResult {
|
||||
rule: Option<Rule>,
|
||||
code: Option<NoqaCode>,
|
||||
level: String,
|
||||
message: String,
|
||||
uri: String,
|
||||
@@ -123,7 +129,7 @@ impl SarifResult {
|
||||
let end_location = message.compute_end_location();
|
||||
let path = normalize_path(&*message.filename());
|
||||
Ok(Self {
|
||||
rule: message.to_rule(),
|
||||
code: message.noqa_code(),
|
||||
level: "error".to_string(),
|
||||
message: message.body().to_string(),
|
||||
uri: url::Url::from_file_path(&path)
|
||||
@@ -143,7 +149,7 @@ impl SarifResult {
|
||||
let end_location = message.compute_end_location();
|
||||
let path = normalize_path(&*message.filename());
|
||||
Ok(Self {
|
||||
rule: message.to_rule(),
|
||||
code: message.noqa_code(),
|
||||
level: "error".to_string(),
|
||||
message: message.body().to_string(),
|
||||
uri: path.display().to_string(),
|
||||
@@ -178,7 +184,7 @@ impl Serialize for SarifResult {
|
||||
}
|
||||
}
|
||||
}],
|
||||
"ruleId": self.rule.map(|rule| rule.noqa_code().to_string()),
|
||||
"ruleId": self.code.map(|code| code.to_string()),
|
||||
})
|
||||
.serialize(serializer)
|
||||
}
|
||||
|
||||
@@ -151,7 +151,7 @@ impl Display for RuleCodeAndBody<'_> {
|
||||
if let Some(fix) = self.message.fix() {
|
||||
// Do not display an indicator for inapplicable fixes
|
||||
if fix.applies(self.unsafe_fixes.required_applicability()) {
|
||||
if let Some(code) = self.message.to_noqa_code() {
|
||||
if let Some(code) = self.message.noqa_code() {
|
||||
write!(f, "{} ", code.to_string().red().bold())?;
|
||||
}
|
||||
return write!(
|
||||
@@ -164,7 +164,7 @@ impl Display for RuleCodeAndBody<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(code) = self.message.to_noqa_code() {
|
||||
if let Some(code) = self.message.noqa_code() {
|
||||
write!(
|
||||
f,
|
||||
"{code} {body}",
|
||||
@@ -254,7 +254,7 @@ impl Display for MessageCodeFrame<'_> {
|
||||
|
||||
let label = self
|
||||
.message
|
||||
.to_noqa_code()
|
||||
.noqa_code()
|
||||
.map_or_else(String::new, |code| code.to_string());
|
||||
|
||||
let line_start = self.notebook_index.map_or_else(
|
||||
|
||||
@@ -9,16 +9,17 @@ use anyhow::Result;
|
||||
use itertools::Itertools;
|
||||
use log::warn;
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_python_trivia::{CommentRanges, Cursor, indentation_at_offset};
|
||||
use ruff_source_file::{LineEnding, LineRanges};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::Edit;
|
||||
use crate::Locator;
|
||||
use crate::codes::NoqaCode;
|
||||
use crate::fs::relativize_path;
|
||||
use crate::message::Message;
|
||||
use crate::registry::{Rule, RuleSet};
|
||||
use crate::registry::Rule;
|
||||
use crate::rule_redirects::get_redirect_target;
|
||||
|
||||
/// Generates an array of edits that matches the length of `messages`.
|
||||
@@ -780,7 +781,7 @@ fn build_noqa_edits_by_diagnostic(
|
||||
if let Some(noqa_edit) = generate_noqa_edit(
|
||||
comment.directive,
|
||||
comment.line,
|
||||
RuleSet::from_rule(comment.rule),
|
||||
FxHashSet::from_iter([comment.code]),
|
||||
locator,
|
||||
line_ending,
|
||||
) {
|
||||
@@ -816,7 +817,7 @@ fn build_noqa_edits_by_line<'a>(
|
||||
offset,
|
||||
matches
|
||||
.into_iter()
|
||||
.map(|NoqaComment { rule, .. }| rule)
|
||||
.map(|NoqaComment { code, .. }| code)
|
||||
.collect(),
|
||||
locator,
|
||||
line_ending,
|
||||
@@ -829,7 +830,7 @@ fn build_noqa_edits_by_line<'a>(
|
||||
|
||||
struct NoqaComment<'a> {
|
||||
line: TextSize,
|
||||
rule: Rule,
|
||||
code: NoqaCode,
|
||||
directive: Option<&'a Directive<'a>>,
|
||||
}
|
||||
|
||||
@@ -845,13 +846,11 @@ fn find_noqa_comments<'a>(
|
||||
|
||||
// Mark any non-ignored diagnostics.
|
||||
for message in messages {
|
||||
let Some(rule) = message.to_rule() else {
|
||||
let Some(code) = message.noqa_code() else {
|
||||
comments_by_line.push(None);
|
||||
continue;
|
||||
};
|
||||
|
||||
let code = rule.noqa_code();
|
||||
|
||||
match &exemption {
|
||||
FileExemption::All(_) => {
|
||||
// If the file is exempted, don't add any noqa directives.
|
||||
@@ -900,7 +899,7 @@ fn find_noqa_comments<'a>(
|
||||
if !codes.includes(code) {
|
||||
comments_by_line.push(Some(NoqaComment {
|
||||
line: directive_line.start(),
|
||||
rule,
|
||||
code,
|
||||
directive: Some(directive),
|
||||
}));
|
||||
}
|
||||
@@ -912,7 +911,7 @@ fn find_noqa_comments<'a>(
|
||||
// There's no existing noqa directive that suppresses the diagnostic.
|
||||
comments_by_line.push(Some(NoqaComment {
|
||||
line: locator.line_start(noqa_offset),
|
||||
rule,
|
||||
code,
|
||||
directive: None,
|
||||
}));
|
||||
}
|
||||
@@ -922,7 +921,7 @@ fn find_noqa_comments<'a>(
|
||||
|
||||
struct NoqaEdit<'a> {
|
||||
edit_range: TextRange,
|
||||
rules: RuleSet,
|
||||
noqa_codes: FxHashSet<NoqaCode>,
|
||||
codes: Option<&'a Codes<'a>>,
|
||||
line_ending: LineEnding,
|
||||
}
|
||||
@@ -941,18 +940,15 @@ impl NoqaEdit<'_> {
|
||||
Some(codes) => {
|
||||
push_codes(
|
||||
writer,
|
||||
self.rules
|
||||
self.noqa_codes
|
||||
.iter()
|
||||
.map(|rule| rule.noqa_code().to_string())
|
||||
.map(ToString::to_string)
|
||||
.chain(codes.iter().map(ToString::to_string))
|
||||
.sorted_unstable(),
|
||||
);
|
||||
}
|
||||
None => {
|
||||
push_codes(
|
||||
writer,
|
||||
self.rules.iter().map(|rule| rule.noqa_code().to_string()),
|
||||
);
|
||||
push_codes(writer, self.noqa_codes.iter().map(ToString::to_string));
|
||||
}
|
||||
}
|
||||
write!(writer, "{}", self.line_ending.as_str()).unwrap();
|
||||
@@ -968,7 +964,7 @@ impl Ranged for NoqaEdit<'_> {
|
||||
fn generate_noqa_edit<'a>(
|
||||
directive: Option<&'a Directive>,
|
||||
offset: TextSize,
|
||||
rules: RuleSet,
|
||||
noqa_codes: FxHashSet<NoqaCode>,
|
||||
locator: &Locator,
|
||||
line_ending: LineEnding,
|
||||
) -> Option<NoqaEdit<'a>> {
|
||||
@@ -997,7 +993,7 @@ fn generate_noqa_edit<'a>(
|
||||
|
||||
Some(NoqaEdit {
|
||||
edit_range,
|
||||
rules,
|
||||
noqa_codes,
|
||||
codes,
|
||||
line_ending,
|
||||
})
|
||||
@@ -1221,7 +1217,6 @@ mod tests {
|
||||
|
||||
use insta::assert_debug_snapshot;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit};
|
||||
use ruff_python_trivia::CommentRanges;
|
||||
use ruff_source_file::{LineEnding, SourceFileBuilder};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
@@ -1234,6 +1229,7 @@ mod tests {
|
||||
use crate::rules::pycodestyle::rules::{AmbiguousVariableName, UselessSemicolon};
|
||||
use crate::rules::pyflakes::rules::UnusedVariable;
|
||||
use crate::rules::pyupgrade::rules::PrintfStringFormatting;
|
||||
use crate::{Edit, OldDiagnostic};
|
||||
use crate::{Locator, generate_noqa_edits};
|
||||
|
||||
fn assert_lexed_ranges_match_slices(
|
||||
@@ -1252,14 +1248,9 @@ mod tests {
|
||||
}
|
||||
|
||||
/// Create a [`Message`] with a placeholder filename and rule code from `diagnostic`.
|
||||
fn message_from_diagnostic(
|
||||
diagnostic: Diagnostic,
|
||||
path: impl AsRef<Path>,
|
||||
source: &str,
|
||||
) -> Message {
|
||||
fn message_from_diagnostic(diagnostic: OldDiagnostic) -> Message {
|
||||
let noqa_offset = diagnostic.start();
|
||||
let file = SourceFileBuilder::new(path.as_ref().to_string_lossy(), source).finish();
|
||||
Message::from_diagnostic(diagnostic, file, Some(noqa_offset))
|
||||
Message::from_diagnostic(diagnostic, Some(noqa_offset))
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -2842,13 +2833,15 @@ mod tests {
|
||||
assert_eq!(count, 0);
|
||||
assert_eq!(output, format!("{contents}"));
|
||||
|
||||
let messages = [Diagnostic::new(
|
||||
let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
|
||||
let messages = [OldDiagnostic::new(
|
||||
UnusedVariable {
|
||||
name: "x".to_string(),
|
||||
},
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
&source_file,
|
||||
)]
|
||||
.map(|d| message_from_diagnostic(d, path, contents));
|
||||
.map(message_from_diagnostic);
|
||||
|
||||
let contents = "x = 1";
|
||||
let noqa_line_for = NoqaMapping::default();
|
||||
@@ -2864,19 +2857,22 @@ mod tests {
|
||||
assert_eq!(count, 1);
|
||||
assert_eq!(output, "x = 1 # noqa: F841\n");
|
||||
|
||||
let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
|
||||
let messages = [
|
||||
Diagnostic::new(
|
||||
OldDiagnostic::new(
|
||||
AmbiguousVariableName("x".to_string()),
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
&source_file,
|
||||
),
|
||||
Diagnostic::new(
|
||||
OldDiagnostic::new(
|
||||
UnusedVariable {
|
||||
name: "x".to_string(),
|
||||
},
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
&source_file,
|
||||
),
|
||||
]
|
||||
.map(|d| message_from_diagnostic(d, path, contents));
|
||||
.map(message_from_diagnostic);
|
||||
let contents = "x = 1 # noqa: E741\n";
|
||||
let noqa_line_for = NoqaMapping::default();
|
||||
let comment_ranges =
|
||||
@@ -2893,19 +2889,22 @@ mod tests {
|
||||
assert_eq!(count, 1);
|
||||
assert_eq!(output, "x = 1 # noqa: E741, F841\n");
|
||||
|
||||
let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
|
||||
let messages = [
|
||||
Diagnostic::new(
|
||||
OldDiagnostic::new(
|
||||
AmbiguousVariableName("x".to_string()),
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
&source_file,
|
||||
),
|
||||
Diagnostic::new(
|
||||
OldDiagnostic::new(
|
||||
UnusedVariable {
|
||||
name: "x".to_string(),
|
||||
},
|
||||
TextRange::new(TextSize::from(0), TextSize::from(0)),
|
||||
&source_file,
|
||||
),
|
||||
]
|
||||
.map(|d| message_from_diagnostic(d, path, contents));
|
||||
.map(message_from_diagnostic);
|
||||
let contents = "x = 1 # noqa";
|
||||
let noqa_line_for = NoqaMapping::default();
|
||||
let comment_ranges =
|
||||
@@ -2936,11 +2935,13 @@ print(
|
||||
)
|
||||
"#;
|
||||
let noqa_line_for = [TextRange::new(8.into(), 68.into())].into_iter().collect();
|
||||
let messages = [Diagnostic::new(
|
||||
let source_file = SourceFileBuilder::new(path.to_string_lossy(), source).finish();
|
||||
let messages = [OldDiagnostic::new(
|
||||
PrintfStringFormatting,
|
||||
TextRange::new(12.into(), 79.into()),
|
||||
&source_file,
|
||||
)]
|
||||
.map(|d| message_from_diagnostic(d, path, source));
|
||||
.map(message_from_diagnostic);
|
||||
let comment_ranges = CommentRanges::default();
|
||||
let edits = generate_noqa_edits(
|
||||
path,
|
||||
@@ -2968,11 +2969,13 @@ print(
|
||||
foo;
|
||||
bar =
|
||||
";
|
||||
let messages = [Diagnostic::new(
|
||||
let source_file = SourceFileBuilder::new(path.to_string_lossy(), source).finish();
|
||||
let messages = [OldDiagnostic::new(
|
||||
UselessSemicolon,
|
||||
TextRange::new(4.into(), 5.into()),
|
||||
&source_file,
|
||||
)]
|
||||
.map(|d| message_from_diagnostic(d, path, source));
|
||||
.map(message_from_diagnostic);
|
||||
let noqa_line_for = NoqaMapping::default();
|
||||
let comment_ranges = CommentRanges::default();
|
||||
let edits = generate_noqa_edits(
|
||||
|
||||
@@ -111,11 +111,6 @@ pub(crate) const fn is_support_slices_in_literal_concatenation_enabled(
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/11370
|
||||
pub(crate) const fn is_undefined_export_in_dunder_init_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/14236
|
||||
pub(crate) const fn is_allow_nested_roots_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
|
||||
@@ -3,16 +3,16 @@ use log::warn;
|
||||
use pyproject_toml::PyProjectToml;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_source_file::SourceFile;
|
||||
|
||||
use crate::IOError;
|
||||
use crate::OldDiagnostic;
|
||||
use crate::message::Message;
|
||||
use crate::registry::Rule;
|
||||
use crate::rules::ruff::rules::InvalidPyprojectToml;
|
||||
use crate::settings::LinterSettings;
|
||||
|
||||
pub fn lint_pyproject_toml(source_file: SourceFile, settings: &LinterSettings) -> Vec<Message> {
|
||||
pub fn lint_pyproject_toml(source_file: &SourceFile, settings: &LinterSettings) -> Vec<Message> {
|
||||
let Some(err) = toml::from_str::<PyProjectToml>(source_file.source_text()).err() else {
|
||||
return Vec::default();
|
||||
};
|
||||
@@ -29,8 +29,9 @@ pub fn lint_pyproject_toml(source_file: SourceFile, settings: &LinterSettings) -
|
||||
source_file.name(),
|
||||
);
|
||||
if settings.rules.enabled(Rule::IOError) {
|
||||
let diagnostic = Diagnostic::new(IOError { message }, TextRange::default());
|
||||
messages.push(Message::from_diagnostic(diagnostic, source_file, None));
|
||||
let diagnostic =
|
||||
OldDiagnostic::new(IOError { message }, TextRange::default(), source_file);
|
||||
messages.push(Message::from_diagnostic(diagnostic, None));
|
||||
} else {
|
||||
warn!(
|
||||
"{}{}{} {message}",
|
||||
@@ -51,8 +52,12 @@ pub fn lint_pyproject_toml(source_file: SourceFile, settings: &LinterSettings) -
|
||||
|
||||
if settings.rules.enabled(Rule::InvalidPyprojectToml) {
|
||||
let toml_err = err.message().to_string();
|
||||
let diagnostic = Diagnostic::new(InvalidPyprojectToml { message: toml_err }, range);
|
||||
messages.push(Message::from_diagnostic(diagnostic, source_file, None));
|
||||
let diagnostic = OldDiagnostic::new(
|
||||
InvalidPyprojectToml { message: toml_err },
|
||||
range,
|
||||
source_file,
|
||||
);
|
||||
messages.push(Message::from_diagnostic(diagnostic, None));
|
||||
}
|
||||
|
||||
messages
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
//! Remnant of the registry of all [`Rule`] implementations, now it's reexporting from codes.rs
|
||||
//! with some helper symbols
|
||||
|
||||
use ruff_db::diagnostic::LintName;
|
||||
use strum_macros::EnumIter;
|
||||
|
||||
pub use codes::Rule;
|
||||
@@ -348,9 +349,18 @@ impl Rule {
|
||||
|
||||
/// Return the URL for the rule documentation, if it exists.
|
||||
pub fn url(&self) -> Option<String> {
|
||||
self.explanation()
|
||||
.is_some()
|
||||
.then(|| format!("{}/rules/{}", env!("CARGO_PKG_HOMEPAGE"), self.as_ref()))
|
||||
self.explanation().is_some().then(|| {
|
||||
format!(
|
||||
"{}/rules/{name}",
|
||||
env!("CARGO_PKG_HOMEPAGE"),
|
||||
name = self.name()
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn name(&self) -> LintName {
|
||||
let name: &'static str = self.into();
|
||||
LintName::of(name)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -421,7 +431,7 @@ pub mod clap_completion {
|
||||
fn possible_values(&self) -> Option<Box<dyn Iterator<Item = PossibleValue> + '_>> {
|
||||
Some(Box::new(Rule::iter().map(|rule| {
|
||||
let name = rule.noqa_code().to_string();
|
||||
let help = rule.as_ref().to_string();
|
||||
let help = rule.name().as_str();
|
||||
PossibleValue::new(name).help(help)
|
||||
})))
|
||||
}
|
||||
@@ -443,7 +453,7 @@ mod tests {
|
||||
assert!(
|
||||
rule.explanation().is_some(),
|
||||
"Rule {} is missing documentation",
|
||||
rule.as_ref()
|
||||
rule.name()
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -460,10 +470,10 @@ mod tests {
|
||||
.collect();
|
||||
|
||||
for rule in Rule::iter() {
|
||||
let rule_name = rule.as_ref();
|
||||
let rule_name = rule.name();
|
||||
for pattern in &patterns {
|
||||
assert!(
|
||||
!pattern.matches(rule_name),
|
||||
!pattern.matches(&rule_name),
|
||||
"{rule_name} does not match naming convention, see CONTRIBUTING.md"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -302,9 +302,8 @@ impl Display for RuleSet {
|
||||
} else {
|
||||
writeln!(f, "[")?;
|
||||
for rule in self {
|
||||
let name = rule.as_ref();
|
||||
let code = rule.noqa_code();
|
||||
writeln!(f, "\t{name} ({code}),")?;
|
||||
writeln!(f, "\t{name} ({code}),", name = rule.name())?;
|
||||
}
|
||||
write!(f, "]")?;
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user