Compare commits
273 Commits
david/allo
...
0.12.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
87f0feb21a | ||
|
|
685eac10e5 | ||
|
|
a93992fa30 | ||
|
|
50f84808bc | ||
|
|
6754e94abc | ||
|
|
33c8c7569d | ||
|
|
34dc8e0531 | ||
|
|
b01195b166 | ||
|
|
ce176b1acf | ||
|
|
7072cf69b4 | ||
|
|
72c8dc006f | ||
|
|
ad9ae4e2b6 | ||
|
|
de4fc5b171 | ||
|
|
56f2aaaebc | ||
|
|
ebd2a27559 | ||
|
|
1278e3442a | ||
|
|
7efbf469dd | ||
|
|
2a1fed9327 | ||
|
|
7de8a0b429 | ||
|
|
0a1c6cb70b | ||
|
|
2dafc5a8bd | ||
|
|
72a4c3ed83 | ||
|
|
e559e21e93 | ||
|
|
c948be495a | ||
|
|
cd245d292e | ||
|
|
620b84443b | ||
|
|
1f70ceba0c | ||
|
|
00e9de8db9 | ||
|
|
7211660f8b | ||
|
|
c1610e2eaf | ||
|
|
bf53bc4256 | ||
|
|
9f2ae1f568 | ||
|
|
5cf2c40d13 | ||
|
|
02b5376a3c | ||
|
|
18a134ae1f | ||
|
|
c063940d52 | ||
|
|
8aea383f29 | ||
|
|
913f136d33 | ||
|
|
c7e020df6b | ||
|
|
1d458d4314 | ||
|
|
342b2665db | ||
|
|
390918e790 | ||
|
|
a1c69ca460 | ||
|
|
3a77768f79 | ||
|
|
c22f809049 | ||
|
|
2b731d19b9 | ||
|
|
cff5adf324 | ||
|
|
7880a20794 | ||
|
|
83b0cde2fc | ||
|
|
373a3bfcd6 | ||
|
|
5e57e4680f | ||
|
|
2b15f1d240 | ||
|
|
c3aa965546 | ||
|
|
c5b58187da | ||
|
|
a842899862 | ||
|
|
ee3152dace | ||
|
|
869d7bf9a8 | ||
|
|
2f3bd24900 | ||
|
|
d715c1fef8 | ||
|
|
cb2ae8d9ac | ||
|
|
5383bcc497 | ||
|
|
9b927265f9 | ||
|
|
b38115ba95 | ||
|
|
32a0d4bb21 | ||
|
|
ccae65630a | ||
|
|
4cdf128748 | ||
|
|
0c18a5a737 | ||
|
|
37b2de90f8 | ||
|
|
3a430fa6da | ||
|
|
782363b736 | ||
|
|
8237d4670c | ||
|
|
5e02d839d5 | ||
|
|
e4423044f8 | ||
|
|
6d56ee803e | ||
|
|
89d915a1e3 | ||
|
|
1889a5e6eb | ||
|
|
793ff9bdbc | ||
|
|
c9dff5c7d5 | ||
|
|
76d9009a6e | ||
|
|
015222900f | ||
|
|
1f27d53fd5 | ||
|
|
3c6c017950 | ||
|
|
ef564094a9 | ||
|
|
96171f41c2 | ||
|
|
8123dab05a | ||
|
|
324e5cbc19 | ||
|
|
e6fe2af292 | ||
|
|
dbb0d60caa | ||
|
|
ef4108af2a | ||
|
|
f74527f4e9 | ||
|
|
65a2c6d4eb | ||
|
|
1a3befe8d6 | ||
|
|
7893cf9fe1 | ||
|
|
8fdf3fc47f | ||
|
|
65f32edbc7 | ||
|
|
e84406d8be | ||
|
|
a863000cbc | ||
|
|
3aae1cd59b | ||
|
|
5dcfc9f074 | ||
|
|
0724bee59c | ||
|
|
2213698a5d | ||
|
|
dc322d23dd | ||
|
|
a2de81cb27 | ||
|
|
eb60bd64fd | ||
|
|
b21ac567e1 | ||
|
|
6cd0669475 | ||
|
|
6051a118d1 | ||
|
|
161446a47a | ||
|
|
caf885c20a | ||
|
|
79006dfb52 | ||
|
|
b44062b9ae | ||
|
|
ae2150bfa3 | ||
|
|
07cb84426d | ||
|
|
b01c95d460 | ||
|
|
aa3c312f5f | ||
|
|
475a02b725 | ||
|
|
b4b53183b7 | ||
|
|
5fe6fa74a0 | ||
|
|
ea64c01524 | ||
|
|
3fa5a9ff3b | ||
|
|
b5a77df46f | ||
|
|
8d1d0be648 | ||
|
|
1cf7b67e85 | ||
|
|
c18dc41f1a | ||
|
|
6cefbb6b38 | ||
|
|
0232e422b2 | ||
|
|
331821244b | ||
|
|
1dc8f8f903 | ||
|
|
301b9f4135 | ||
|
|
86e5a311f0 | ||
|
|
0c20010bb9 | ||
|
|
72552f31e4 | ||
|
|
95497ffaab | ||
|
|
b3b900dc1e | ||
|
|
503427855d | ||
|
|
6e785867c3 | ||
|
|
1274521f9f | ||
|
|
8d24760643 | ||
|
|
db8db536f8 | ||
|
|
cb8246bc5f | ||
|
|
5faf72a4d9 | ||
|
|
28dbc5c51e | ||
|
|
ce216c79cc | ||
|
|
33468cc8cc | ||
|
|
8531f4b3ca | ||
|
|
55100209c7 | ||
|
|
c0bb83b882 | ||
|
|
74a4e9af3d | ||
|
|
8485dbb324 | ||
|
|
0858896bc4 | ||
|
|
ce8b744f17 | ||
|
|
5a8cdab771 | ||
|
|
3a8191529c | ||
|
|
e658778ced | ||
|
|
f1883d71a4 | ||
|
|
11db567b0b | ||
|
|
9f8c3de462 | ||
|
|
293d4ac388 | ||
|
|
9e8a7e9353 | ||
|
|
453e5f5934 | ||
|
|
7ea773daf2 | ||
|
|
0079cc6817 | ||
|
|
e8ea40012a | ||
|
|
71d8a5da2a | ||
|
|
2c3b3d3230 | ||
|
|
8d98c601d8 | ||
|
|
0986edf427 | ||
|
|
03f1f8e218 | ||
|
|
628bb2cd1d | ||
|
|
f23d2c9b9e | ||
|
|
67d94d9ec8 | ||
|
|
d1cb8e2142 | ||
|
|
57202c1c77 | ||
|
|
2289187b74 | ||
|
|
14c42a8ddf | ||
|
|
e677863787 | ||
|
|
f379eb6e62 | ||
|
|
47698883ae | ||
|
|
e2d96df501 | ||
|
|
384e80ec80 | ||
|
|
b9f3b0e0a6 | ||
|
|
1e6d76c878 | ||
|
|
844c8626c3 | ||
|
|
1c8d9d707e | ||
|
|
4856377478 | ||
|
|
643c845a47 | ||
|
|
9e952cf0e0 | ||
|
|
c4015edf48 | ||
|
|
97b824db3e | ||
|
|
220ab88779 | ||
|
|
7a63ac145a | ||
|
|
54f597658c | ||
|
|
aa1fad61e0 | ||
|
|
b390b3cb8e | ||
|
|
88866f0048 | ||
|
|
9bbf4987e8 | ||
|
|
ad024f9a09 | ||
|
|
fc549bda94 | ||
|
|
77c8ddf101 | ||
|
|
e730f27f80 | ||
|
|
d65bd69963 | ||
|
|
c713e76e4d | ||
|
|
8005ebb405 | ||
|
|
0c29e258c6 | ||
|
|
b5b6b657cc | ||
|
|
ad2f667ee4 | ||
|
|
363f061f09 | ||
|
|
9b0dfc505f | ||
|
|
695de4f27f | ||
|
|
3445d1322d | ||
|
|
2c3f091e0e | ||
|
|
9d3cad95bc | ||
|
|
7df79cfb70 | ||
|
|
33ed502edb | ||
|
|
a827b16ebd | ||
|
|
47a2ec002e | ||
|
|
aee3af0f7a | ||
|
|
04dc48e17c | ||
|
|
27743efa1b | ||
|
|
c60b4d7f30 | ||
|
|
16621fa19d | ||
|
|
e23d4ea027 | ||
|
|
452f992fbc | ||
|
|
a5ebb3f3a2 | ||
|
|
9925910a29 | ||
|
|
a3ee6bb3b5 | ||
|
|
b60ba75d09 | ||
|
|
66ba1d8775 | ||
|
|
bbcd7e0196 | ||
|
|
48c425c15b | ||
|
|
6d210dd0c7 | ||
|
|
9ce83c215d | ||
|
|
602dd5c039 | ||
|
|
3eada01153 | ||
|
|
3e811fc369 | ||
|
|
743764d384 | ||
|
|
e03e05d2b3 | ||
|
|
9ec4a178a4 | ||
|
|
8d5655a7ba | ||
|
|
6453ac9ea1 | ||
|
|
0a11baf29c | ||
|
|
1d20cf9570 | ||
|
|
62ef96f51e | ||
|
|
4e68dd96a6 | ||
|
|
b25b642371 | ||
|
|
175402aa75 | ||
|
|
d8216fa328 | ||
|
|
d51f6940fe | ||
|
|
66b082ff71 | ||
|
|
5d93d619f3 | ||
|
|
e1b662bf5d | ||
|
|
f885cb8a2f | ||
|
|
4ef2c223c9 | ||
|
|
d078ecff37 | ||
|
|
7eca6f96e3 | ||
|
|
fbaf826a9d | ||
|
|
d8a5b9de17 | ||
|
|
c3feb8ce27 | ||
|
|
97ff015c88 | ||
|
|
1f7134f727 | ||
|
|
6a0b93170e | ||
|
|
cc59ff8aad | ||
|
|
2b90e7fcd7 | ||
|
|
a43f5b2129 | ||
|
|
f3fb7429ca | ||
|
|
83498b95fb | ||
|
|
03d7be3747 | ||
|
|
d95b029862 | ||
|
|
14c3755445 | ||
|
|
83a036960b | ||
|
|
be76fadb05 | ||
|
|
e293411679 | ||
|
|
53d19f8368 |
3
.gitattributes
vendored
3
.gitattributes
vendored
@@ -5,6 +5,9 @@ crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_2.py text eol=crlf
|
||||
crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_3.py text eol=crlf
|
||||
|
||||
crates/ruff_python_formatter/resources/test/fixtures/ruff/f-string-carriage-return-newline.py text eol=crlf
|
||||
crates/ruff_python_formatter/tests/snapshots/format@f-string-carriage-return-newline.py.snap text eol=crlf
|
||||
|
||||
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
|
||||
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
|
||||
|
||||
|
||||
4
.github/workflows/build-docker.yml
vendored
4
.github/workflows/build-docker.yml
vendored
@@ -79,7 +79,7 @@ jobs:
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: .
|
||||
platforms: ${{ matrix.platform }}
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
${{ env.TAG_PATTERNS }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
36
.github/workflows/ci.yaml
vendored
36
.github/workflows/ci.yaml
vendored
@@ -237,13 +237,13 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
uses: rui314/setup-mold@b3958095189f34b95d402a680b6e96b7f194f7b9 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: ty mdtests (GitHub annotations)
|
||||
@@ -295,13 +295,13 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
uses: rui314/setup-mold@b3958095189f34b95d402a680b6e96b7f194f7b9 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
@@ -324,7 +324,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Run tests"
|
||||
@@ -380,7 +380,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
uses: rui314/setup-mold@b3958095189f34b95d402a680b6e96b7f194f7b9 # v1
|
||||
- name: "Build"
|
||||
run: cargo build --release --locked
|
||||
|
||||
@@ -405,13 +405,13 @@ jobs:
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: rustup default "${MSRV}"
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
uses: rui314/setup-mold@b3958095189f34b95d402a680b6e96b7f194f7b9 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
@@ -437,7 +437,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@5cbf019d8cb9b9d5b086218c41458ea35d817691 # v1.12.5
|
||||
uses: cargo-bins/cargo-binstall@ea65a39d2dcca142c53bddd3a097a674e903f475 # v1.12.7
|
||||
with:
|
||||
tool: cargo-fuzz@0.11.2
|
||||
- name: "Install cargo-fuzz"
|
||||
@@ -459,7 +459,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
|
||||
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
@@ -660,7 +660,7 @@ jobs:
|
||||
branch: ${{ github.event.pull_request.base.ref }}
|
||||
workflow: "ci.yaml"
|
||||
check_artifacts: true
|
||||
- uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
|
||||
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- name: Fuzz
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
@@ -690,7 +690,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: cargo-bins/cargo-binstall@5cbf019d8cb9b9d5b086218c41458ea35d817691 # v1.12.5
|
||||
- uses: cargo-bins/cargo-binstall@ea65a39d2dcca142c53bddd3a097a674e903f475 # v1.12.7
|
||||
- run: cargo binstall --no-confirm cargo-shear
|
||||
- run: cargo shear
|
||||
|
||||
@@ -730,7 +730,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
|
||||
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
@@ -773,7 +773,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
|
||||
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: uv pip install -r docs/requirements-insiders.txt --system
|
||||
@@ -910,7 +910,7 @@ jobs:
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@941e8a4d9d7cdb696bd4f017cf54aca281f8ffff # v2.51.2
|
||||
uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
|
||||
4
.github/workflows/daily_fuzz.yaml
vendored
4
.github/workflows/daily_fuzz.yaml
vendored
@@ -34,11 +34,11 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
|
||||
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
uses: rui314/setup-mold@b3958095189f34b95d402a680b6e96b7f194f7b9 # v1
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: Build ruff
|
||||
# A debug build means the script runs slower once it gets started,
|
||||
|
||||
2
.github/workflows/mypy_primer.yaml
vendored
2
.github/workflows/mypy_primer.yaml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
|
||||
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
|
||||
2
.github/workflows/publish-pypi.yml
vendored
2
.github/workflows/publish-pypi.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1
|
||||
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels-*
|
||||
|
||||
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
@@ -61,7 +61,7 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@85e6279cec87321a52edac9c87bce653a07cf6c2
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -124,7 +124,7 @@ jobs:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
||||
steps:
|
||||
- uses: actions/checkout@85e6279cec87321a52edac9c87bce653a07cf6c2
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -175,7 +175,7 @@ jobs:
|
||||
outputs:
|
||||
val: ${{ steps.host.outputs.manifest }}
|
||||
steps:
|
||||
- uses: actions/checkout@85e6279cec87321a52edac9c87bce653a07cf6c2
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -251,7 +251,7 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@85e6279cec87321a52edac9c87bce653a07cf6c2
|
||||
- uses: actions/checkout@09d2acae674a48949e3602304ab46fd20ae0c42f
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
|
||||
2
.github/workflows/sync_typeshed.yaml
vendored
2
.github/workflows/sync_typeshed.yaml
vendored
@@ -60,7 +60,7 @@ jobs:
|
||||
cd ruff
|
||||
git push --force origin typeshedbot/sync-typeshed
|
||||
gh pr list --repo "$GITHUB_REPOSITORY" --head typeshedbot/sync-typeshed --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
gh pr create --title "Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "internal"
|
||||
gh pr create --title "[ty] Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "ty"
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the typeshed sync failed
|
||||
|
||||
@@ -5,6 +5,7 @@ exclude: |
|
||||
.github/workflows/release.yml|
|
||||
crates/ty_vendored/vendor/.*|
|
||||
crates/ty_project/resources/.*|
|
||||
crates/ty_python_semantic/resources/corpus/.*|
|
||||
crates/ty/docs/(configuration|rules|cli).md|
|
||||
crates/ruff_benchmark/resources/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
@@ -66,7 +67,7 @@ repos:
|
||||
- black==25.1.0
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.32.0
|
||||
rev: v1.33.1
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -80,7 +81,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.11.10
|
||||
rev: v0.11.13
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
@@ -98,7 +99,7 @@ repos:
|
||||
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
||||
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.7.0
|
||||
rev: v1.9.0
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
|
||||
@@ -1,5 +1,39 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.12.0
|
||||
|
||||
- **Detection of more syntax errors**
|
||||
|
||||
Ruff now detects version-related syntax errors, such as the use of the `match`
|
||||
statement on Python versions before 3.10, and syntax errors emitted by
|
||||
CPython's compiler, such as irrefutable `match` patterns before the final
|
||||
`case` arm.
|
||||
|
||||
- **New default Python version handling for syntax errors**
|
||||
|
||||
Ruff will default to the _latest_ supported Python version (3.13) when
|
||||
checking for the version-related syntax errors mentioned above to prevent
|
||||
false positives in projects without a Python version configured. The default
|
||||
in all other cases, like applying lint rules, is unchanged and remains at the
|
||||
minimum supported Python version (3.9).
|
||||
|
||||
- **Updated f-string formatting**
|
||||
|
||||
Ruff now formats multi-line f-strings with format specifiers to avoid adding a
|
||||
line break after the format specifier. This addresses a change to the Python
|
||||
grammar in version 3.13.4 that made such a line break a syntax error.
|
||||
|
||||
- **`rust-toolchain.toml` is no longer included in source distributions**
|
||||
|
||||
The `rust-toolchain.toml` is used to specify a higher Rust version than Ruff's
|
||||
minimum supported Rust version (MSRV) for development and building release
|
||||
artifacts. However, when present in source distributions, it would also cause
|
||||
downstream package maintainers to pull in the same Rust toolchain, even if
|
||||
their available toolchain was MSRV-compatible.
|
||||
|
||||
- **[`suspicious-xmle-tree-usage`](https://docs.astral.sh/ruff/rules/suspicious-xmle-tree-usage/)
|
||||
(`S320`) has been removed**
|
||||
|
||||
## 0.11.0
|
||||
|
||||
This is a follow-up to release 0.10.0. Because of a mistake in the release process, the `requires-python` inference changes were not included in that release. Ruff 0.11.0 now includes this change as well as the stabilization of the preview behavior for `PGH004`.
|
||||
|
||||
215
CHANGELOG.md
215
CHANGELOG.md
@@ -1,5 +1,199 @@
|
||||
# Changelog
|
||||
|
||||
## 0.12.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.12.0) for a migration
|
||||
guide and overview of the changes!
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- **Detection of more syntax errors**
|
||||
|
||||
Ruff now detects version-related syntax errors, such as the use of the `match`
|
||||
statement on Python versions before 3.10, and syntax errors emitted by
|
||||
CPython's compiler, such as irrefutable `match` patterns before the final
|
||||
`case` arm.
|
||||
|
||||
- **New default Python version handling for syntax errors**
|
||||
|
||||
Ruff will default to the _latest_ supported Python version (3.13) when
|
||||
checking for the version-related syntax errors mentioned above to prevent
|
||||
false positives in projects without a Python version configured. The default
|
||||
in all other cases, like applying lint rules, is unchanged and remains at the
|
||||
minimum supported Python version (3.9).
|
||||
|
||||
- **Updated f-string formatting**
|
||||
|
||||
Ruff now formats multi-line f-strings with format specifiers to avoid adding a
|
||||
line break after the format specifier. This addresses a change to the Python
|
||||
grammar in version 3.13.4 that made such a line break a syntax error.
|
||||
|
||||
- **`rust-toolchain.toml` is no longer included in source distributions**
|
||||
|
||||
The `rust-toolchain.toml` is used to specify a higher Rust version than Ruff's
|
||||
minimum supported Rust version (MSRV) for development and building release
|
||||
artifacts. However, when present in source distributions, it would also cause
|
||||
downstream package maintainers to pull in the same Rust toolchain, even if
|
||||
their available toolchain was MSRV-compatible.
|
||||
|
||||
### Removed Rules
|
||||
|
||||
The following rules have been removed:
|
||||
|
||||
- [`suspicious-xmle-tree-usage`](https://docs.astral.sh/ruff/rules/suspicious-xmle-tree-usage/)
|
||||
(`S320`)
|
||||
|
||||
### Deprecated Rules
|
||||
|
||||
The following rules have been deprecated:
|
||||
|
||||
- [`pandas-df-variable-name`](https://docs.astral.sh/ruff/rules/pandas-df-variable-name/)
|
||||
|
||||
### Stabilization
|
||||
|
||||
The following rules have been stabilized and are no longer in preview:
|
||||
|
||||
- [`for-loop-writes`](https://docs.astral.sh/ruff/rules/for-loop-writes) (`FURB122`)
|
||||
- [`check-and-remove-from-set`](https://docs.astral.sh/ruff/rules/check-and-remove-from-set) (`FURB132`)
|
||||
- [`verbose-decimal-constructor`](https://docs.astral.sh/ruff/rules/verbose-decimal-constructor) (`FURB157`)
|
||||
- [`fromisoformat-replace-z`](https://docs.astral.sh/ruff/rules/fromisoformat-replace-z) (`FURB162`)
|
||||
- [`int-on-sliced-str`](https://docs.astral.sh/ruff/rules/int-on-sliced-str) (`FURB166`)
|
||||
- [`exc-info-outside-except-handler`](https://docs.astral.sh/ruff/rules/exc-info-outside-except-handler) (`LOG014`)
|
||||
- [`import-outside-top-level`](https://docs.astral.sh/ruff/rules/import-outside-top-level) (`PLC0415`)
|
||||
- [`unnecessary-dict-index-lookup`](https://docs.astral.sh/ruff/rules/unnecessary-dict-index-lookup) (`PLR1733`)
|
||||
- [`nan-comparison`](https://docs.astral.sh/ruff/rules/nan-comparison) (`PLW0177`)
|
||||
- [`eq-without-hash`](https://docs.astral.sh/ruff/rules/eq-without-hash) (`PLW1641`)
|
||||
- [`pytest-parameter-with-default-argument`](https://docs.astral.sh/ruff/rules/pytest-parameter-with-default-argument) (`PT028`)
|
||||
- [`pytest-warns-too-broad`](https://docs.astral.sh/ruff/rules/pytest-warns-too-broad) (`PT030`)
|
||||
- [`pytest-warns-with-multiple-statements`](https://docs.astral.sh/ruff/rules/pytest-warns-with-multiple-statements) (`PT031`)
|
||||
- [`invalid-formatter-suppression-comment`](https://docs.astral.sh/ruff/rules/invalid-formatter-suppression-comment) (`RUF028`)
|
||||
- [`dataclass-enum`](https://docs.astral.sh/ruff/rules/dataclass-enum) (`RUF049`)
|
||||
- [`class-with-mixed-type-vars`](https://docs.astral.sh/ruff/rules/class-with-mixed-type-vars) (`RUF053`)
|
||||
- [`unnecessary-round`](https://docs.astral.sh/ruff/rules/unnecessary-round) (`RUF057`)
|
||||
- [`starmap-zip`](https://docs.astral.sh/ruff/rules/starmap-zip) (`RUF058`)
|
||||
- [`non-pep604-annotation-optional`](https://docs.astral.sh/ruff/rules/non-pep604-annotation-optional) (`UP045`)
|
||||
- [`non-pep695-generic-class`](https://docs.astral.sh/ruff/rules/non-pep695-generic-class) (`UP046`)
|
||||
- [`non-pep695-generic-function`](https://docs.astral.sh/ruff/rules/non-pep695-generic-function) (`UP047`)
|
||||
- [`private-type-parameter`](https://docs.astral.sh/ruff/rules/private-type-parameter) (`UP049`)
|
||||
|
||||
The following behaviors have been stabilized:
|
||||
|
||||
- [`collection-literal-concatenation`] (`RUF005`) now recognizes slices, in
|
||||
addition to list literals and variables.
|
||||
- The fix for [`readlines-in-for`] (`FURB129`) is now marked as always safe.
|
||||
- [`if-else-block-instead-of-if-exp`] (`SIM108`) will now further simplify
|
||||
expressions to use `or` instead of an `if` expression, where possible.
|
||||
- [`unused-noqa`] (`RUF100`) now checks for file-level `noqa` comments as well
|
||||
as inline comments.
|
||||
- [`subprocess-without-shell-equals-true`] (`S603`) now accepts literal strings,
|
||||
as well as lists and tuples of literal strings, as trusted input.
|
||||
- [`boolean-type-hint-positional-argument`] (`FBT001`) now applies to types that
|
||||
include `bool`, like `bool | int` or `typing.Optional[bool]`, in addition to
|
||||
plain `bool` annotations.
|
||||
- [`non-pep604-annotation-union`] (`UP007`) has now been split into two rules.
|
||||
`UP007` now applies only to `typing.Union`, while
|
||||
[`non-pep604-annotation-optional`] (`UP045`) checks for use of
|
||||
`typing.Optional`. `UP045` has also been stabilized in this release, but you
|
||||
may need to update existing `include`, `ignore`, or `noqa` settings to
|
||||
accommodate this change.
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`ruff`\] Check for non-context-manager use of `pytest.raises`, `pytest.warns`, and `pytest.deprecated_call` (`RUF061`) ([#17368](https://github.com/astral-sh/ruff/pull/17368))
|
||||
- [syntax-errors] Raise unsupported syntax error for template strings prior to Python 3.14 ([#18664](https://github.com/astral-sh/ruff/pull/18664))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Add syntax error when conversion flag does not immediately follow exclamation mark ([#18706](https://github.com/astral-sh/ruff/pull/18706))
|
||||
- Add trailing space around `readlines` ([#18542](https://github.com/astral-sh/ruff/pull/18542))
|
||||
- Fix `\r` and `\r\n` handling in t- and f-string debug texts ([#18673](https://github.com/astral-sh/ruff/pull/18673))
|
||||
- Hug closing `}` when f-string expression has a format specifier ([#18704](https://github.com/astral-sh/ruff/pull/18704))
|
||||
- \[`flake8-pyi`\] Avoid syntax error in the case of starred and keyword arguments (`PYI059`) ([#18611](https://github.com/astral-sh/ruff/pull/18611))
|
||||
- \[`flake8-return`\] Fix `RET504` autofix generating a syntax error ([#18428](https://github.com/astral-sh/ruff/pull/18428))
|
||||
- \[`pep8-naming`\] Suppress fix for `N804` and `N805` if the recommended name is already used ([#18472](https://github.com/astral-sh/ruff/pull/18472))
|
||||
- \[`pycodestyle`\] Avoid causing a syntax error in expressions spanning multiple lines (`E731`) ([#18479](https://github.com/astral-sh/ruff/pull/18479))
|
||||
- \[`pyupgrade`\] Suppress `UP008` if `super` is shadowed ([#18688](https://github.com/astral-sh/ruff/pull/18688))
|
||||
- \[`refurb`\] Parenthesize lambda and ternary expressions (`FURB122`, `FURB142`) ([#18592](https://github.com/astral-sh/ruff/pull/18592))
|
||||
- \[`ruff`\] Handle extra arguments to `deque` (`RUF037`) ([#18614](https://github.com/astral-sh/ruff/pull/18614))
|
||||
- \[`ruff`\] Preserve parentheses around `deque` in fix for `unnecessary-empty-iterable-within-deque-call` (`RUF037`) ([#18598](https://github.com/astral-sh/ruff/pull/18598))
|
||||
- \[`ruff`\] Validate arguments before offering a fix (`RUF056`) ([#18631](https://github.com/astral-sh/ruff/pull/18631))
|
||||
- \[`ruff`\] Skip fix for `RUF059` if dummy name is already bound ([#18509](https://github.com/astral-sh/ruff/pull/18509))
|
||||
- \[`pylint`\] Fix `PLW0128` to check assignment targets in square brackets and after asterisks ([#18665](https://github.com/astral-sh/ruff/pull/18665))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Fix false positive on mutations in `return` statements (`B909`) ([#18408](https://github.com/astral-sh/ruff/pull/18408))
|
||||
- Treat `ty:` comments as pragma comments ([#18532](https://github.com/astral-sh/ruff/pull/18532))
|
||||
- \[`flake8-pyi`\] Apply `custom-typevar-for-self` to string annotations (`PYI019`) ([#18311](https://github.com/astral-sh/ruff/pull/18311))
|
||||
- \[`pyupgrade`\] Don't offer a fix for `Optional[None]` (`UP007`, `UP045)` ([#18545](https://github.com/astral-sh/ruff/pull/18545))
|
||||
- \[`pyupgrade`\] Fix `super(__class__, self)` detection (`UP008`) ([#18478](https://github.com/astral-sh/ruff/pull/18478))
|
||||
- \[`refurb`\] Make the fix for `FURB163` unsafe for `log2`, `log10`, `*args`, and deleted comments ([#18645](https://github.com/astral-sh/ruff/pull/18645))
|
||||
|
||||
### Server
|
||||
|
||||
- Support cancellation requests ([#18627](https://github.com/astral-sh/ruff/pull/18627))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Drop confusing second `*` from glob pattern example for `per-file-target-version` ([#18709](https://github.com/astral-sh/ruff/pull/18709))
|
||||
- Update Neovim configuration examples ([#18491](https://github.com/astral-sh/ruff/pull/18491))
|
||||
- \[`pylint`\] De-emphasize `__hash__ = Parent.__hash__` (`PLW1641`) ([#18613](https://github.com/astral-sh/ruff/pull/18613))
|
||||
- \[`refurb`\] Add a note about float literal handling (`FURB157`) ([#18615](https://github.com/astral-sh/ruff/pull/18615))
|
||||
|
||||
## 0.11.13
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Add unsafe fix for module moved cases (`AIR301`,`AIR311`,`AIR312`,`AIR302`) ([#18367](https://github.com/astral-sh/ruff/pull/18367),[#18366](https://github.com/astral-sh/ruff/pull/18366),[#18363](https://github.com/astral-sh/ruff/pull/18363),[#18093](https://github.com/astral-sh/ruff/pull/18093))
|
||||
- \[`refurb`\] Add coverage of `set` and `frozenset` calls (`FURB171`) ([#18035](https://github.com/astral-sh/ruff/pull/18035))
|
||||
- \[`refurb`\] Mark `FURB180` fix unsafe when class has bases ([#18149](https://github.com/astral-sh/ruff/pull/18149))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`perflint`\] Fix missing parentheses for lambda and ternary conditions (`PERF401`, `PERF403`) ([#18412](https://github.com/astral-sh/ruff/pull/18412))
|
||||
- \[`pyupgrade`\] Apply `UP035` only on py313+ for `get_type_hints()` ([#18476](https://github.com/astral-sh/ruff/pull/18476))
|
||||
- \[`pyupgrade`\] Make fix unsafe if it deletes comments (`UP004`,`UP050`) ([#18393](https://github.com/astral-sh/ruff/pull/18393), [#18390](https://github.com/astral-sh/ruff/pull/18390))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`fastapi`\] Avoid false positive for class dependencies (`FAST003`) ([#18271](https://github.com/astral-sh/ruff/pull/18271))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update editor setup docs for Neovim and Vim ([#18324](https://github.com/astral-sh/ruff/pull/18324))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Support Python 3.14 template strings (t-strings) in formatter and parser ([#17851](https://github.com/astral-sh/ruff/pull/17851))
|
||||
|
||||
## 0.11.12
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Revise fix titles (`AIR3`) ([#18215](https://github.com/astral-sh/ruff/pull/18215))
|
||||
- \[`pylint`\] Implement `missing-maxsplit-arg` (`PLC0207`) ([#17454](https://github.com/astral-sh/ruff/pull/17454))
|
||||
- \[`pyupgrade`\] New rule `UP050` (`useless-class-metaclass-type`) ([#18334](https://github.com/astral-sh/ruff/pull/18334))
|
||||
- \[`flake8-use-pathlib`\] Replace `os.symlink` with `Path.symlink_to` (`PTH211`) ([#18337](https://github.com/astral-sh/ruff/pull/18337))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-bugbear`\] Ignore `__debug__` attribute in `B010` ([#18357](https://github.com/astral-sh/ruff/pull/18357))
|
||||
- \[`flake8-async`\] Fix `anyio.sleep` argument name (`ASYNC115`, `ASYNC116`) ([#18262](https://github.com/astral-sh/ruff/pull/18262))
|
||||
- \[`refurb`\] Fix `FURB129` autofix generating invalid syntax ([#18235](https://github.com/astral-sh/ruff/pull/18235))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-implicit-str-concat`\] Add autofix for `ISC003` ([#18256](https://github.com/astral-sh/ruff/pull/18256))
|
||||
- \[`pycodestyle`\] Improve the diagnostic message for `E712` ([#18328](https://github.com/astral-sh/ruff/pull/18328))
|
||||
- \[`flake8-2020`\] Fix diagnostic message for `!=` comparisons (`YTT201`) ([#18293](https://github.com/astral-sh/ruff/pull/18293))
|
||||
- \[`pyupgrade`\] Make fix unsafe if it deletes comments (`UP010`) ([#18291](https://github.com/astral-sh/ruff/pull/18291))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Simplify rules table to improve readability ([#18297](https://github.com/astral-sh/ruff/pull/18297))
|
||||
- Update editor integrations link in README ([#17977](https://github.com/astral-sh/ruff/pull/17977))
|
||||
- \[`flake8-bugbear`\] Add fix safety section (`B006`) ([#17652](https://github.com/astral-sh/ruff/pull/17652))
|
||||
|
||||
## 0.11.11
|
||||
|
||||
### Preview features
|
||||
@@ -176,7 +370,7 @@
|
||||
- \[`airflow`\] Add missing `AIR302` attribute check ([#17115](https://github.com/astral-sh/ruff/pull/17115))
|
||||
- \[`airflow`\] Expand module path check to individual symbols (`AIR302`) ([#17278](https://github.com/astral-sh/ruff/pull/17278))
|
||||
- \[`airflow`\] Extract `AIR312` from `AIR302` rules (`AIR302`, `AIR312`) ([#17152](https://github.com/astral-sh/ruff/pull/17152))
|
||||
- \[`airflow`\] Update oudated `AIR301`, `AIR302` rules ([#17123](https://github.com/astral-sh/ruff/pull/17123))
|
||||
- \[`airflow`\] Update outdated `AIR301`, `AIR302` rules ([#17123](https://github.com/astral-sh/ruff/pull/17123))
|
||||
- [syntax-errors] Async comprehension in sync comprehension ([#17177](https://github.com/astral-sh/ruff/pull/17177))
|
||||
- [syntax-errors] Check annotations in annotated assignments ([#17283](https://github.com/astral-sh/ruff/pull/17283))
|
||||
- [syntax-errors] Extend annotation checks to `await` ([#17282](https://github.com/astral-sh/ruff/pull/17282))
|
||||
@@ -337,7 +531,7 @@ See also, the "Remapped rules" section which may result in disabled rules.
|
||||
|
||||
- **More robust noqa parsing** ([#16483](https://github.com/astral-sh/ruff/pull/16483))
|
||||
|
||||
The syntax for both file-level and in-line suppression comments has been unified and made more robust to certain errors. In most cases, this will result in more suppression comments being read by Ruff, but there are a few instances where previously read comments will now log an error to the user instead. Please refer to the documentation on [*Error suppression*](https://docs.astral.sh/ruff/linter/#error-suppression) for the full specification.
|
||||
The syntax for both file-level and in-line suppression comments has been unified and made more robust to certain errors. In most cases, this will result in more suppression comments being read by Ruff, but there are a few instances where previously read comments will now log an error to the user instead. Please refer to the documentation on [_Error suppression_](https://docs.astral.sh/ruff/linter/#error-suppression) for the full specification.
|
||||
|
||||
- **Avoid unnecessary parentheses around with statements with a single context manager and a trailing comment** ([#14005](https://github.com/astral-sh/ruff/pull/14005))
|
||||
|
||||
@@ -1259,7 +1453,7 @@ The following fixes have been stabilized:
|
||||
- Detect items that hash to same value in duplicate sets (`B033`, `PLC0208`) ([#14064](https://github.com/astral-sh/ruff/pull/14064))
|
||||
- \[`eradicate`\] Better detection of IntelliJ language injection comments (`ERA001`) ([#14094](https://github.com/astral-sh/ruff/pull/14094))
|
||||
- \[`flake8-pyi`\] Add autofix for `docstring-in-stub` (`PYI021`) ([#14150](https://github.com/astral-sh/ruff/pull/14150))
|
||||
- \[`flake8-pyi`\] Update `duplicate-literal-member` (`PYI062`) to alawys provide an autofix ([#14188](https://github.com/astral-sh/ruff/pull/14188))
|
||||
- \[`flake8-pyi`\] Update `duplicate-literal-member` (`PYI062`) to always provide an autofix ([#14188](https://github.com/astral-sh/ruff/pull/14188))
|
||||
- \[`pyflakes`\] Detect items that hash to same value in duplicate dictionaries (`F601`) ([#14065](https://github.com/astral-sh/ruff/pull/14065))
|
||||
- \[`ruff`\] Fix false positive for decorators (`RUF028`) ([#14061](https://github.com/astral-sh/ruff/pull/14061))
|
||||
|
||||
@@ -1749,7 +1943,7 @@ The following fixes have been stabilized:
|
||||
|
||||
## 0.5.6
|
||||
|
||||
Ruff 0.5.6 automatically enables linting and formatting of notebooks in *preview mode*.
|
||||
Ruff 0.5.6 automatically enables linting and formatting of notebooks in _preview mode_.
|
||||
You can opt-out of this behavior by adding `*.ipynb` to the `extend-exclude` setting.
|
||||
|
||||
```toml
|
||||
@@ -2502,7 +2696,7 @@ To setup `ruff server` with your editor, refer to the [README.md](https://github
|
||||
|
||||
### Server
|
||||
|
||||
*This section is devoted to updates for our new language server, written in Rust.*
|
||||
_This section is devoted to updates for our new language server, written in Rust._
|
||||
|
||||
- Enable ruff-specific source actions ([#10916](https://github.com/astral-sh/ruff/pull/10916))
|
||||
- Refreshes diagnostics for open files when file configuration is changed ([#10988](https://github.com/astral-sh/ruff/pull/10988))
|
||||
@@ -3909,7 +4103,7 @@ Read Ruff's new [versioning policy](https://docs.astral.sh/ruff/versioning/).
|
||||
- \[`refurb`\] Add `single-item-membership-test` (`FURB171`) ([#7815](https://github.com/astral-sh/ruff/pull/7815))
|
||||
- \[`pylint`\] Add `and-or-ternary` (`R1706`) ([#7811](https://github.com/astral-sh/ruff/pull/7811))
|
||||
|
||||
*New rules are added in [preview](https://docs.astral.sh/ruff/preview/).*
|
||||
_New rules are added in [preview](https://docs.astral.sh/ruff/preview/)._
|
||||
|
||||
### Configuration
|
||||
|
||||
@@ -3984,3 +4178,12 @@ Read Ruff's new [versioning policy](https://docs.astral.sh/ruff/versioning/).
|
||||
### Playground
|
||||
|
||||
- Fix playground `Quick Fix` action ([#7824](https://github.com/astral-sh/ruff/pull/7824))
|
||||
|
||||
[`boolean-type-hint-positional-argument`]: https://docs.astral.sh/ruff/rules/boolean-type-hint-positional-argument
|
||||
[`collection-literal-concatenation`]: https://docs.astral.sh/ruff/rules/collection-literal-concatenation
|
||||
[`if-else-block-instead-of-if-exp`]: https://docs.astral.sh/ruff/rules/if-else-block-instead-of-if-exp
|
||||
[`non-pep604-annotation-optional`]: https://docs.astral.sh/ruff/rules/non-pep604-annotation-optional
|
||||
[`non-pep604-annotation-union`]: https://docs.astral.sh/ruff/rules/non-pep604-annotation-union
|
||||
[`readlines-in-for`]: https://docs.astral.sh/ruff/rules/readlines-in-for
|
||||
[`subprocess-without-shell-equals-true`]: https://docs.astral.sh/ruff/rules/subprocess-without-shell-equals-true
|
||||
[`unused-noqa`]: https://docs.astral.sh/ruff/rules/unused-noqa
|
||||
|
||||
200
Cargo.lock
generated
200
Cargo.lock
generated
@@ -51,14 +51,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "710e8eae58854cdc1790fcb56cca04d712a17be849eeb81da2a724bf4bae2bc4"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"unicode-width 0.2.0",
|
||||
"unicode-width 0.2.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anstream"
|
||||
version = "0.6.18"
|
||||
version = "0.6.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b"
|
||||
checksum = "301af1932e46185686725e0fad2f8f2aa7da69dd70bf6ecc44d6b703844a3933"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"anstyle-parse",
|
||||
@@ -71,9 +71,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anstyle"
|
||||
version = "1.0.10"
|
||||
version = "1.0.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9"
|
||||
checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd"
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-lossy"
|
||||
@@ -112,7 +112,7 @@ dependencies = [
|
||||
"anstyle",
|
||||
"anstyle-lossy",
|
||||
"html-escape",
|
||||
"unicode-width 0.2.0",
|
||||
"unicode-width 0.2.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -132,6 +132,12 @@ version = "1.0.98"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
version = "1.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457"
|
||||
|
||||
[[package]]
|
||||
name = "argfile"
|
||||
version = "0.2.1"
|
||||
@@ -218,9 +224,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "boxcar"
|
||||
version = "0.2.12"
|
||||
version = "0.2.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "66bb12751a83493ef4b8da1120451a262554e216a247f14b48cb5e8fe7ed8bdf"
|
||||
checksum = "26c4925bc979b677330a8c7fe7a8c94af2dbb4a2d37b4a20a80d884400f46baa"
|
||||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
@@ -256,9 +262,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "camino"
|
||||
version = "1.1.9"
|
||||
version = "1.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3"
|
||||
checksum = "0da45bc31171d8d6960122e222a67740df867c1dd53b4d51caa297084c185cab"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
@@ -342,9 +348,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.38"
|
||||
version = "4.5.40"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed93b9805f8ba930df42c2590f05453d5ec36cbb85d018868a5b24d31f6ac000"
|
||||
checksum = "40b6887a1d8685cebccf115538db5c0efe625ccac9696ad45c409d96566e910f"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -352,9 +358,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.38"
|
||||
version = "4.5.40"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "379026ff283facf611b0ea629334361c4211d1b12ee01024eec1591133b04120"
|
||||
checksum = "e0c66c08ce9f0c698cbce5c0279d0bb6ac936d8674174fe48f736533b964f59e"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -395,9 +401,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.5.32"
|
||||
version = "4.5.40"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7"
|
||||
checksum = "d2c7947ae4cc3d851207c1adb5b5e260ff0cca11446b1d6d1423788e442257ce"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
@@ -522,7 +528,7 @@ dependencies = [
|
||||
"encode_unicode",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"unicode-width 0.2.0",
|
||||
"unicode-width 0.2.1",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
@@ -1109,9 +1115,9 @@ checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.15.3"
|
||||
version = "0.15.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "84b26c544d002229e640969970a2e74021aadf6e2f96372b9c58eff97de08eb3"
|
||||
checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5"
|
||||
dependencies = [
|
||||
"allocator-api2",
|
||||
"equivalent",
|
||||
@@ -1124,7 +1130,7 @@ version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1"
|
||||
dependencies = [
|
||||
"hashbrown 0.15.3",
|
||||
"hashbrown 0.15.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1322,7 +1328,7 @@ version = "0.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "17d34b7d42178945f775e84bc4c36dde7c1c6cdfea656d3354d009056f2bb3d2"
|
||||
dependencies = [
|
||||
"hashbrown 0.15.3",
|
||||
"hashbrown 0.15.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1342,7 +1348,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown 0.15.3",
|
||||
"hashbrown 0.15.4",
|
||||
"serde",
|
||||
]
|
||||
|
||||
@@ -1355,7 +1361,7 @@ dependencies = [
|
||||
"console",
|
||||
"number_prefix",
|
||||
"portable-atomic",
|
||||
"unicode-width 0.2.0",
|
||||
"unicode-width 0.2.1",
|
||||
"vt100",
|
||||
"web-time",
|
||||
]
|
||||
@@ -1415,6 +1421,15 @@ dependencies = [
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "intrusive-collections"
|
||||
version = "0.9.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "189d0897e4cbe8c75efedf3502c18c887b05046e59d28404d4d8e46cbc4d1e86"
|
||||
dependencies = [
|
||||
"memoffset",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "is-docker"
|
||||
version = "0.2.0"
|
||||
@@ -1498,9 +1513,9 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
|
||||
|
||||
[[package]]
|
||||
name = "jiff"
|
||||
version = "0.2.13"
|
||||
version = "0.2.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f02000660d30638906021176af16b17498bd0d12813dbfe7b276d8bc7f3c0806"
|
||||
checksum = "be1f93b8b1eb69c77f24bbb0afdf66f54b632ee39af40ca21c4365a1d7347e49"
|
||||
dependencies = [
|
||||
"jiff-static",
|
||||
"jiff-tzdb-platform",
|
||||
@@ -1513,9 +1528,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "jiff-static"
|
||||
version = "0.2.13"
|
||||
version = "0.2.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f3c30758ddd7188629c6713fc45d1188af4f44c90582311d0c8d8c9907f60c48"
|
||||
checksum = "03343451ff899767262ec32146f6d559dd759fdadf42ff0e227c7c48f72594b4"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -1591,15 +1606,15 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.172"
|
||||
version = "0.2.173"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
|
||||
checksum = "d8cfeafaffdbc32176b64fb251369d52ea9f0a8fbc6f8759edffef7b525d64bb"
|
||||
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
version = "1.7.0"
|
||||
version = "1.8.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ad9e315e3f679e61b9095ffd5e509de78b8a4ea3bba9d772f6fb243209f808d4"
|
||||
checksum = "ae28ddc5b90c3e3146a21d051ca095cbc8d932ad8714cf65ddf71a9abb35684c"
|
||||
dependencies = [
|
||||
"annotate-snippets",
|
||||
"libcst_derive",
|
||||
@@ -1607,14 +1622,14 @@ dependencies = [
|
||||
"paste",
|
||||
"peg",
|
||||
"regex",
|
||||
"thiserror 1.0.69",
|
||||
"thiserror 2.0.12",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libcst_derive"
|
||||
version = "1.7.0"
|
||||
version = "1.8.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bfa96ed35d0dccc67cf7ba49350cb86de3dcb1d072a7ab28f99117f19d874953"
|
||||
checksum = "dc2de5c2f62bcf8a4f7290b1854388b262c4b68f1db1a3ee3ef6d4c1319b00a3"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
@@ -1738,9 +1753,18 @@ checksum = "2f926ade0c4e170215ae43342bf13b9310a437609c81f29f86c5df6657582ef9"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.7.4"
|
||||
version = "2.7.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
|
||||
checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0"
|
||||
|
||||
[[package]]
|
||||
name = "memoffset"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mimalloc"
|
||||
@@ -1928,6 +1952,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7d31b8b7a99f71bdff4235faf9ce9eada0ad3562c8fbeb7d607d9f41a6ec569d"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2049,7 +2074,7 @@ checksum = "31095ca1f396e3de32745f42b20deef7bc09077f918b085307e8eab6ddd8fb9c"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"serde",
|
||||
"unicode-width 0.2.0",
|
||||
"unicode-width 0.2.1",
|
||||
"unscanny",
|
||||
"version-ranges",
|
||||
]
|
||||
@@ -2070,7 +2095,7 @@ dependencies = [
|
||||
"serde",
|
||||
"smallvec",
|
||||
"thiserror 1.0.69",
|
||||
"unicode-width 0.2.0",
|
||||
"unicode-width 0.2.1",
|
||||
"url",
|
||||
"urlencoding",
|
||||
"version-ranges",
|
||||
@@ -2258,15 +2283,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyproject-toml"
|
||||
version = "0.13.4"
|
||||
version = "0.13.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "643af57c3f36ba90a8b53e972727d8092f7408a9ebfbaf4c3d2c17b07c58d835"
|
||||
checksum = "7b0f6160dc48298b9260d9b958ad1d7f96f6cd0b9df200b22329204e09334663"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"pep440_rs",
|
||||
"pep508_rs",
|
||||
"serde",
|
||||
"thiserror 1.0.69",
|
||||
"thiserror 2.0.12",
|
||||
"toml",
|
||||
]
|
||||
|
||||
@@ -2485,7 +2510,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.11.11"
|
||||
version = "0.12.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2554,7 +2579,7 @@ dependencies = [
|
||||
"snapbox",
|
||||
"toml",
|
||||
"tryfn",
|
||||
"unicode-width 0.2.0",
|
||||
"unicode-width 0.2.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2594,6 +2619,7 @@ name = "ruff_db"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"arc-swap",
|
||||
"camino",
|
||||
"countme",
|
||||
"dashmap 6.1.0",
|
||||
@@ -2642,7 +2668,6 @@ dependencies = [
|
||||
"rayon",
|
||||
"regex",
|
||||
"ruff",
|
||||
"ruff_diagnostics",
|
||||
"ruff_formatter",
|
||||
"ruff_linter",
|
||||
"ruff_notebook",
|
||||
@@ -2672,9 +2697,7 @@ dependencies = [
|
||||
name = "ruff_diagnostics"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"is-macro",
|
||||
"log",
|
||||
"ruff_text_size",
|
||||
"serde",
|
||||
]
|
||||
@@ -2692,7 +2715,7 @@ dependencies = [
|
||||
"serde",
|
||||
"static_assertions",
|
||||
"tracing",
|
||||
"unicode-width 0.2.0",
|
||||
"unicode-width 0.2.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2725,7 +2748,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.11.11"
|
||||
version = "0.12.0"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"anyhow",
|
||||
@@ -2781,7 +2804,7 @@ dependencies = [
|
||||
"toml",
|
||||
"typed-arena",
|
||||
"unicode-normalization",
|
||||
"unicode-width 0.2.0",
|
||||
"unicode-width 0.2.1",
|
||||
"unicode_names2",
|
||||
"url",
|
||||
]
|
||||
@@ -2842,6 +2865,7 @@ dependencies = [
|
||||
"salsa",
|
||||
"schemars",
|
||||
"serde",
|
||||
"thiserror 2.0.12",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3061,7 +3085,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.11.11"
|
||||
version = "0.12.0"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -3081,6 +3105,7 @@ dependencies = [
|
||||
"ruff_workspace",
|
||||
"serde",
|
||||
"serde-wasm-bindgen",
|
||||
"uuid",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-test",
|
||||
]
|
||||
@@ -3179,16 +3204,17 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
|
||||
|
||||
[[package]]
|
||||
name = "salsa"
|
||||
version = "0.21.1"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=4818b15f3b7516555d39f5a41cb75970448bee4c#4818b15f3b7516555d39f5a41cb75970448bee4c"
|
||||
version = "0.22.0"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=09627e450566f894956710a3fd923dc80462ae6d#09627e450566f894956710a3fd923dc80462ae6d"
|
||||
dependencies = [
|
||||
"boxcar",
|
||||
"compact_str",
|
||||
"crossbeam-queue",
|
||||
"dashmap 6.1.0",
|
||||
"hashbrown 0.15.3",
|
||||
"crossbeam-utils",
|
||||
"hashbrown 0.15.4",
|
||||
"hashlink",
|
||||
"indexmap",
|
||||
"intrusive-collections",
|
||||
"parking_lot",
|
||||
"portable-atomic",
|
||||
"rayon",
|
||||
@@ -3202,15 +3228,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "salsa-macro-rules"
|
||||
version = "0.21.1"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=4818b15f3b7516555d39f5a41cb75970448bee4c#4818b15f3b7516555d39f5a41cb75970448bee4c"
|
||||
version = "0.22.0"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=09627e450566f894956710a3fd923dc80462ae6d#09627e450566f894956710a3fd923dc80462ae6d"
|
||||
|
||||
[[package]]
|
||||
name = "salsa-macros"
|
||||
version = "0.21.1"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=4818b15f3b7516555d39f5a41cb75970448bee4c#4818b15f3b7516555d39f5a41cb75970448bee4c"
|
||||
version = "0.22.0"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=09627e450566f894956710a3fd923dc80462ae6d#09627e450566f894956710a3fd923dc80462ae6d"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
@@ -3329,9 +3354,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_spanned"
|
||||
version = "0.6.8"
|
||||
version = "0.6.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1"
|
||||
checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
@@ -3417,9 +3442,9 @@ checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d"
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.15.0"
|
||||
version = "1.15.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9"
|
||||
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
|
||||
|
||||
[[package]]
|
||||
name = "snapbox"
|
||||
@@ -3501,9 +3526,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.101"
|
||||
version = "2.0.103"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf"
|
||||
checksum = "e4307e30089d6fd6aff212f2da3a1f9e32f3223b1f010fb09b7c95f90f3ca1e8"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3726,9 +3751,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.8.22"
|
||||
version = "0.8.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "05ae329d1f08c4d17a59bed7ff5b5a769d062e64a62d34a3261b219e62cd5aae"
|
||||
checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_spanned",
|
||||
@@ -3738,18 +3763,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "toml_datetime"
|
||||
version = "0.6.9"
|
||||
version = "0.6.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3da5db5a963e24bc68be8b17b6fa82814bb22ee8660f192bb182771d498f09a3"
|
||||
checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml_edit"
|
||||
version = "0.22.26"
|
||||
version = "0.22.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e"
|
||||
checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"serde",
|
||||
@@ -3761,9 +3786,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "toml_write"
|
||||
version = "0.1.1"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bfb942dfe1d8e29a7ee7fcbde5bd2b9a25fb89aa70caea2eba3bee836ff41076"
|
||||
checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801"
|
||||
|
||||
[[package]]
|
||||
name = "tracing"
|
||||
@@ -3874,6 +3899,7 @@ dependencies = [
|
||||
"countme",
|
||||
"crossbeam",
|
||||
"ctrlc",
|
||||
"dunce",
|
||||
"filetime",
|
||||
"indicatif",
|
||||
"insta",
|
||||
@@ -3918,12 +3944,17 @@ name = "ty_project"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"camino",
|
||||
"colored 3.0.0",
|
||||
"crossbeam",
|
||||
"glob",
|
||||
"globset",
|
||||
"insta",
|
||||
"notify",
|
||||
"ordermap",
|
||||
"pep440_rs",
|
||||
"rayon",
|
||||
"regex",
|
||||
"regex-automata 0.4.9",
|
||||
"ruff_cache",
|
||||
"ruff_db",
|
||||
"ruff_macros",
|
||||
@@ -3950,11 +3981,13 @@ dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.9.1",
|
||||
"camino",
|
||||
"colored 3.0.0",
|
||||
"compact_str",
|
||||
"countme",
|
||||
"dir-test",
|
||||
"drop_bomb",
|
||||
"hashbrown 0.15.3",
|
||||
"glob",
|
||||
"hashbrown 0.15.4",
|
||||
"indexmap",
|
||||
"insta",
|
||||
"itertools 0.14.0",
|
||||
@@ -3962,6 +3995,7 @@ dependencies = [
|
||||
"ordermap",
|
||||
"quickcheck",
|
||||
"quickcheck_macros",
|
||||
"ruff_annotate_snippets",
|
||||
"ruff_db",
|
||||
"ruff_index",
|
||||
"ruff_macros",
|
||||
@@ -3984,6 +4018,7 @@ dependencies = [
|
||||
"test-case",
|
||||
"thiserror 2.0.12",
|
||||
"tracing",
|
||||
"ty_python_semantic",
|
||||
"ty_test",
|
||||
"ty_vendored",
|
||||
]
|
||||
@@ -4003,6 +4038,7 @@ dependencies = [
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.1.1",
|
||||
"salsa",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
@@ -4018,6 +4054,7 @@ name = "ty_test"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.9.1",
|
||||
"camino",
|
||||
"colored 3.0.0",
|
||||
"insta",
|
||||
@@ -4071,6 +4108,7 @@ dependencies = [
|
||||
"ty_ide",
|
||||
"ty_project",
|
||||
"ty_python_semantic",
|
||||
"uuid",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-test",
|
||||
]
|
||||
@@ -4164,9 +4202,9 @@ checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-width"
|
||||
version = "0.2.0"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd"
|
||||
checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c"
|
||||
|
||||
[[package]]
|
||||
name = "unicode_names2"
|
||||
@@ -4240,9 +4278,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.16.0"
|
||||
version = "1.17.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9"
|
||||
checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d"
|
||||
dependencies = [
|
||||
"getrandom 0.3.3",
|
||||
"js-sys",
|
||||
@@ -4253,9 +4291,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "uuid-macro-internal"
|
||||
version = "1.16.0"
|
||||
version = "1.17.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72dcd78c4f979627a754f5522cea6e6a25e55139056535fe6e69c506cd64a862"
|
||||
checksum = "26b682e8c381995ea03130e381928e0e005b7c9eb483c6c8682f50e07b33c2b7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
||||
@@ -51,6 +51,7 @@ aho-corasick = { version = "1.1.3" }
|
||||
anstream = { version = "0.6.18" }
|
||||
anstyle = { version = "1.0.10" }
|
||||
anyhow = { version = "1.0.80" }
|
||||
arc-swap = { version = "1.7.1" }
|
||||
assert_fs = { version = "1.1.0" }
|
||||
argfile = { version = "0.2.0" }
|
||||
bincode = { version = "2.0.0" }
|
||||
@@ -126,10 +127,11 @@ quote = { version = "1.0.23" }
|
||||
rand = { version = "0.9.0" }
|
||||
rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
regex-automata = { version = "0.4.9" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
rustc-stable-hash = { version = "0.1.2" }
|
||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "4818b15f3b7516555d39f5a41cb75970448bee4c" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "09627e450566f894956710a3fd923dc80462ae6d" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
@@ -179,7 +181,6 @@ uuid = { version = "1.6.1", features = [
|
||||
"v4",
|
||||
"fast-rng",
|
||||
"macro-diagnostics",
|
||||
"js",
|
||||
] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
@@ -188,7 +189,7 @@ wild = { version = "2" }
|
||||
zip = { version = "0.6.6", default-features = false }
|
||||
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["getrandom", "ruff_options_metadata"]
|
||||
ignored = ["getrandom", "ruff_options_metadata", "uuid"]
|
||||
|
||||
|
||||
[workspace.lints.rust]
|
||||
|
||||
@@ -34,8 +34,7 @@ An extremely fast Python linter and code formatter, written in Rust.
|
||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
of popular Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/editors) for [VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#config-file-discovery)
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
@@ -149,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.11.11/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.11.11/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.12.0/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.12.0/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -183,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.11.11
|
||||
rev: v0.12.0
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -4,6 +4,10 @@ extend-exclude = [
|
||||
"crates/ty_vendored/vendor/**/*",
|
||||
"**/resources/**/*",
|
||||
"**/snapshots/**/*",
|
||||
# Completion tests tend to have a lot of incomplete
|
||||
# words naturally. It's annoying to have to make all
|
||||
# of them actually words. So just ignore typos here.
|
||||
"crates/ty_ide/src/completion.rs",
|
||||
]
|
||||
|
||||
[default.extend-words]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.11.11"
|
||||
version = "0.12.0"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -349,7 +349,6 @@ impl FileCache {
|
||||
.iter()
|
||||
.map(|msg| {
|
||||
Message::diagnostic(
|
||||
msg.rule.into(),
|
||||
msg.body.clone(),
|
||||
msg.suggestion.clone(),
|
||||
msg.range,
|
||||
@@ -357,6 +356,7 @@ impl FileCache {
|
||||
msg.parent,
|
||||
file.clone(),
|
||||
msg.noqa_offset,
|
||||
msg.rule,
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
@@ -439,7 +439,10 @@ impl LintCacheData {
|
||||
|
||||
let messages = messages
|
||||
.iter()
|
||||
.filter_map(|msg| msg.to_rule().map(|rule| (rule, msg)))
|
||||
// Parse the kebab-case rule name into a `Rule`. This will fail for syntax errors, so
|
||||
// this also serves to filter them out, but we shouldn't be caching files with syntax
|
||||
// errors anyway.
|
||||
.filter_map(|msg| Some((msg.name().parse().ok()?, msg)))
|
||||
.map(|(rule, msg)| {
|
||||
// Make sure that all message use the same source file.
|
||||
assert_eq!(
|
||||
|
||||
@@ -12,7 +12,7 @@ use rayon::prelude::*;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::panic::catch_unwind;
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_linter::OldDiagnostic;
|
||||
use ruff_linter::message::Message;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::registry::Rule;
|
||||
@@ -131,8 +131,7 @@ pub(crate) fn check(
|
||||
|
||||
Diagnostics::new(
|
||||
vec![Message::from_diagnostic(
|
||||
Diagnostic::new(IOError { message }, TextRange::default()),
|
||||
dummy,
|
||||
OldDiagnostic::new(IOError { message }, TextRange::default(), &dummy),
|
||||
None,
|
||||
)],
|
||||
FxHashMap::default(),
|
||||
|
||||
@@ -6,7 +6,7 @@ use serde::ser::SerializeSeq;
|
||||
use serde::{Serialize, Serializer};
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use ruff_diagnostics::FixAvailability;
|
||||
use ruff_linter::FixAvailability;
|
||||
use ruff_linter::registry::{Linter, Rule, RuleNamespace};
|
||||
|
||||
use crate::args::HelpFormat;
|
||||
@@ -30,7 +30,7 @@ impl<'a> Explanation<'a> {
|
||||
let (linter, _) = Linter::parse_code(&code).unwrap();
|
||||
let fix = rule.fixable().to_string();
|
||||
Self {
|
||||
name: rule.as_ref(),
|
||||
name: rule.name().as_str(),
|
||||
code,
|
||||
linter: linter.name(),
|
||||
summary: rule.message_formats()[0],
|
||||
@@ -44,7 +44,7 @@ impl<'a> Explanation<'a> {
|
||||
|
||||
fn format_rule_text(rule: Rule) -> String {
|
||||
let mut output = String::new();
|
||||
let _ = write!(&mut output, "# {} ({})", rule.as_ref(), rule.noqa_code());
|
||||
let _ = write!(&mut output, "# {} ({})", rule.name(), rule.noqa_code());
|
||||
output.push('\n');
|
||||
output.push('\n');
|
||||
|
||||
|
||||
@@ -1,14 +1,7 @@
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
use crate::ExitStatus;
|
||||
use anyhow::Result;
|
||||
use ruff_server::Server;
|
||||
|
||||
pub(crate) fn run_server(
|
||||
worker_threads: NonZeroUsize,
|
||||
preview: Option<bool>,
|
||||
) -> Result<ExitStatus> {
|
||||
let server = Server::new(worker_threads, preview)?;
|
||||
|
||||
server.run().map(|()| ExitStatus::Success)
|
||||
pub(crate) fn run_server(preview: Option<bool>) -> Result<ExitStatus> {
|
||||
ruff_server::run(preview)?;
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ use colored::Colorize;
|
||||
use log::{debug, warn};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::Diagnostic;
|
||||
use ruff_linter::OldDiagnostic;
|
||||
use ruff_linter::codes::Rule;
|
||||
use ruff_linter::linter::{FixTable, FixerResult, LinterResult, ParseSource, lint_fix, lint_only};
|
||||
use ruff_linter::message::Message;
|
||||
@@ -64,13 +64,13 @@ impl Diagnostics {
|
||||
let source_file = SourceFileBuilder::new(name, "").finish();
|
||||
Self::new(
|
||||
vec![Message::from_diagnostic(
|
||||
Diagnostic::new(
|
||||
OldDiagnostic::new(
|
||||
IOError {
|
||||
message: err.to_string(),
|
||||
},
|
||||
TextRange::default(),
|
||||
&source_file,
|
||||
),
|
||||
source_file,
|
||||
None,
|
||||
)],
|
||||
FxHashMap::default(),
|
||||
@@ -165,9 +165,9 @@ impl AddAssign for FixMap {
|
||||
continue;
|
||||
}
|
||||
let fixed_in_file = self.0.entry(filename).or_default();
|
||||
for (rule, count) in fixed {
|
||||
for (rule, name, count) in fixed.iter() {
|
||||
if count > 0 {
|
||||
*fixed_in_file.entry(rule).or_default() += count;
|
||||
*fixed_in_file.entry(rule).or_default(name) += count;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -235,7 +235,7 @@ pub(crate) fn lint_path(
|
||||
};
|
||||
let source_file =
|
||||
SourceFileBuilder::new(path.to_string_lossy(), contents).finish();
|
||||
lint_pyproject_toml(source_file, settings)
|
||||
lint_pyproject_toml(&source_file, settings)
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
@@ -305,7 +305,7 @@ pub(crate) fn lint_path(
|
||||
ParseSource::None,
|
||||
);
|
||||
let transformed = source_kind;
|
||||
let fixed = FxHashMap::default();
|
||||
let fixed = FixTable::default();
|
||||
(result, transformed, fixed)
|
||||
}
|
||||
} else {
|
||||
@@ -319,7 +319,7 @@ pub(crate) fn lint_path(
|
||||
ParseSource::None,
|
||||
);
|
||||
let transformed = source_kind;
|
||||
let fixed = FxHashMap::default();
|
||||
let fixed = FixTable::default();
|
||||
(result, transformed, fixed)
|
||||
};
|
||||
|
||||
@@ -396,7 +396,7 @@ pub(crate) fn lint_stdin(
|
||||
}
|
||||
|
||||
return Ok(Diagnostics {
|
||||
messages: lint_pyproject_toml(source_file, &settings.linter),
|
||||
messages: lint_pyproject_toml(&source_file, &settings.linter),
|
||||
fixed: FixMap::from_iter([(fs::relativize_path(path), FixTable::default())]),
|
||||
notebook_indexes: FxHashMap::default(),
|
||||
});
|
||||
@@ -473,7 +473,7 @@ pub(crate) fn lint_stdin(
|
||||
}
|
||||
|
||||
let transformed = source_kind;
|
||||
let fixed = FxHashMap::default();
|
||||
let fixed = FixTable::default();
|
||||
(result, transformed, fixed)
|
||||
}
|
||||
} else {
|
||||
@@ -487,7 +487,7 @@ pub(crate) fn lint_stdin(
|
||||
ParseSource::None,
|
||||
);
|
||||
let transformed = source_kind;
|
||||
let fixed = FxHashMap::default();
|
||||
let fixed = FixTable::default();
|
||||
(result, transformed, fixed)
|
||||
};
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufWriter, Write, stdout};
|
||||
use std::num::NonZeroUsize;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::ExitCode;
|
||||
use std::sync::mpsc::channel;
|
||||
@@ -223,13 +222,7 @@ fn analyze_graph(
|
||||
}
|
||||
|
||||
fn server(args: ServerCommand) -> Result<ExitStatus> {
|
||||
let four = NonZeroUsize::new(4).unwrap();
|
||||
|
||||
// by default, we set the number of worker threads to `num_cpus`, with a maximum of 4.
|
||||
let worker_threads = std::thread::available_parallelism()
|
||||
.unwrap_or(four)
|
||||
.min(four);
|
||||
commands::server::run_server(worker_threads, args.resolve_preview())
|
||||
commands::server::run_server(args.resolve_preview())
|
||||
}
|
||||
|
||||
pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<ExitStatus> {
|
||||
|
||||
@@ -7,6 +7,7 @@ use bitflags::bitflags;
|
||||
use colored::Colorize;
|
||||
use itertools::{Itertools, iterate};
|
||||
use ruff_linter::codes::NoqaCode;
|
||||
use ruff_linter::linter::FixTable;
|
||||
use serde::Serialize;
|
||||
|
||||
use ruff_linter::fs::relativize_path;
|
||||
@@ -80,7 +81,7 @@ impl Printer {
|
||||
let fixed = diagnostics
|
||||
.fixed
|
||||
.values()
|
||||
.flat_map(std::collections::HashMap::values)
|
||||
.flat_map(FixTable::counts)
|
||||
.sum::<usize>();
|
||||
|
||||
if self.flags.intersects(Flags::SHOW_VIOLATIONS) {
|
||||
@@ -302,7 +303,7 @@ impl Printer {
|
||||
let statistics: Vec<ExpandedStatistics> = diagnostics
|
||||
.messages
|
||||
.iter()
|
||||
.map(|message| (message.to_noqa_code(), message))
|
||||
.map(|message| (message.noqa_code(), message))
|
||||
.sorted_by_key(|(code, message)| (*code, message.fixable()))
|
||||
.fold(
|
||||
vec![],
|
||||
@@ -472,13 +473,13 @@ fn show_fix_status(fix_mode: flags::FixMode, fixables: Option<&FixableStatistics
|
||||
fn print_fix_summary(writer: &mut dyn Write, fixed: &FixMap) -> Result<()> {
|
||||
let total = fixed
|
||||
.values()
|
||||
.map(|table| table.values().sum::<usize>())
|
||||
.map(|table| table.counts().sum::<usize>())
|
||||
.sum::<usize>();
|
||||
assert!(total > 0);
|
||||
let num_digits = num_digits(
|
||||
*fixed
|
||||
fixed
|
||||
.values()
|
||||
.filter_map(|table| table.values().max())
|
||||
.filter_map(|table| table.counts().max())
|
||||
.max()
|
||||
.unwrap(),
|
||||
);
|
||||
@@ -498,12 +499,11 @@ fn print_fix_summary(writer: &mut dyn Write, fixed: &FixMap) -> Result<()> {
|
||||
relativize_path(filename).bold(),
|
||||
":".cyan()
|
||||
)?;
|
||||
for (rule, count) in table.iter().sorted_by_key(|(.., count)| Reverse(*count)) {
|
||||
for (code, name, count) in table.iter().sorted_by_key(|(.., count)| Reverse(*count)) {
|
||||
writeln!(
|
||||
writer,
|
||||
" {count:>num_digits$} × {} ({})",
|
||||
rule.noqa_code().to_string().red().bold(),
|
||||
rule.as_ref(),
|
||||
" {count:>num_digits$} × {code} ({name})",
|
||||
code = code.to_string().red().bold(),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -566,7 +566,7 @@ fn venv() -> Result<()> {
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: Invalid search path settings
|
||||
Cause: Failed to discover the site-packages directory: Invalid `--python` argument: `none` could not be canonicalized
|
||||
Cause: Failed to discover the site-packages directory: Invalid `--python` argument `none`: does not point to a Python executable or a directory on disk
|
||||
");
|
||||
});
|
||||
|
||||
|
||||
@@ -5436,14 +5436,15 @@ match 2:
|
||||
print("it's one")
|
||||
"#
|
||||
),
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
@r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
test.py:2:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
"
|
||||
"###
|
||||
);
|
||||
|
||||
// syntax error on 3.9 with preview
|
||||
|
||||
@@ -78,7 +78,7 @@ fn setup_tomllib_case() -> Case {
|
||||
|
||||
let src_root = SystemPath::new("/src");
|
||||
let mut metadata = ProjectMetadata::discover(src_root, &system).unwrap();
|
||||
metadata.apply_cli_options(Options {
|
||||
metadata.apply_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: Some(RangedValue::cli(PythonVersion::PY312)),
|
||||
..EnvironmentOptions::default()
|
||||
@@ -131,7 +131,7 @@ fn benchmark_incremental(criterion: &mut Criterion) {
|
||||
fn setup() -> Case {
|
||||
let case = setup_tomllib_case();
|
||||
|
||||
let result: Vec<_> = case.db.check().unwrap();
|
||||
let result: Vec<_> = case.db.check();
|
||||
|
||||
assert_diagnostics(&case.db, &result, EXPECTED_TOMLLIB_DIAGNOSTICS);
|
||||
|
||||
@@ -159,7 +159,7 @@ fn benchmark_incremental(criterion: &mut Criterion) {
|
||||
None,
|
||||
);
|
||||
|
||||
let result = db.check().unwrap();
|
||||
let result = db.check();
|
||||
|
||||
assert_eq!(result.len(), EXPECTED_TOMLLIB_DIAGNOSTICS.len());
|
||||
}
|
||||
@@ -179,7 +179,7 @@ fn benchmark_cold(criterion: &mut Criterion) {
|
||||
setup_tomllib_case,
|
||||
|case| {
|
||||
let Case { db, .. } = case;
|
||||
let result: Vec<_> = db.check().unwrap();
|
||||
let result: Vec<_> = db.check();
|
||||
|
||||
assert_diagnostics(db, &result, EXPECTED_TOMLLIB_DIAGNOSTICS);
|
||||
},
|
||||
@@ -224,7 +224,7 @@ fn setup_micro_case(code: &str) -> Case {
|
||||
|
||||
let src_root = SystemPath::new("/src");
|
||||
let mut metadata = ProjectMetadata::discover(src_root, &system).unwrap();
|
||||
metadata.apply_cli_options(Options {
|
||||
metadata.apply_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: Some(RangedValue::cli(PythonVersion::PY312)),
|
||||
..EnvironmentOptions::default()
|
||||
@@ -293,7 +293,7 @@ fn benchmark_many_string_assignments(criterion: &mut Criterion) {
|
||||
},
|
||||
|case| {
|
||||
let Case { db, .. } = case;
|
||||
let result = db.check().unwrap();
|
||||
let result = db.check();
|
||||
assert_eq!(result.len(), 0);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
@@ -339,7 +339,7 @@ fn benchmark_many_tuple_assignments(criterion: &mut Criterion) {
|
||||
},
|
||||
|case| {
|
||||
let Case { db, .. } = case;
|
||||
let result = db.check().unwrap();
|
||||
let result = db.check();
|
||||
assert_eq!(result.len(), 0);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
|
||||
@@ -21,6 +21,7 @@ ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
anstyle = { workspace = true }
|
||||
arc-swap = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
countme = { workspace = true }
|
||||
dashmap = { workspace = true }
|
||||
|
||||
@@ -665,6 +665,76 @@ pub enum DiagnosticId {
|
||||
|
||||
/// No rule with the given name exists.
|
||||
UnknownRule,
|
||||
|
||||
/// A glob pattern doesn't follow the expected syntax.
|
||||
InvalidGlob,
|
||||
|
||||
/// An `include` glob without any patterns.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// An `include` glob without any patterns won't match any files. This is probably a mistake and
|
||||
/// either the `include` should be removed or a pattern should be added.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```toml
|
||||
/// [src]
|
||||
/// include = []
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
///
|
||||
/// ```toml
|
||||
/// [src]
|
||||
/// include = ["src"]
|
||||
/// ```
|
||||
///
|
||||
/// or remove the `include` option.
|
||||
EmptyInclude,
|
||||
|
||||
/// An override configuration is unnecessary because it applies to all files.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// An overrides section that applies to all files is probably a mistake and can be rolled-up into the root configuration.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```toml
|
||||
/// [[overrides]]
|
||||
/// [overrides.rules]
|
||||
/// unused-reference = "ignore"
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
///
|
||||
/// ```toml
|
||||
/// [rules]
|
||||
/// unused-reference = "ignore"
|
||||
/// ```
|
||||
///
|
||||
/// or
|
||||
///
|
||||
/// ```toml
|
||||
/// [[overrides]]
|
||||
/// include = ["test"]
|
||||
///
|
||||
/// [overrides.rules]
|
||||
/// unused-reference = "ignore"
|
||||
/// ```
|
||||
UnnecessaryOverridesSection,
|
||||
|
||||
/// An `overrides` section in the configuration that doesn't contain any overrides.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// An `overrides` section without any configuration overrides is probably a mistake.
|
||||
/// It is either a leftover after removing overrides, or a user forgot to add any overrides,
|
||||
/// or used an incorrect syntax to do so (e.g. used `rules` instead of `overrides.rules`).
|
||||
///
|
||||
/// ## Example
|
||||
/// ```toml
|
||||
/// [[overrides]]
|
||||
/// include = ["test"]
|
||||
/// # no `[overrides.rules]`
|
||||
/// ```
|
||||
UselessOverridesSection,
|
||||
}
|
||||
|
||||
impl DiagnosticId {
|
||||
@@ -699,6 +769,10 @@ impl DiagnosticId {
|
||||
DiagnosticId::Lint(name) => name.as_str(),
|
||||
DiagnosticId::RevealedType => "revealed-type",
|
||||
DiagnosticId::UnknownRule => "unknown-rule",
|
||||
DiagnosticId::InvalidGlob => "invalid-glob",
|
||||
DiagnosticId::EmptyInclude => "empty-include",
|
||||
DiagnosticId::UnnecessaryOverridesSection => "unnecessary-overrides-section",
|
||||
DiagnosticId::UselessOverridesSection => "useless-overrides-section",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -265,7 +265,7 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||
.get();
|
||||
// The boundary case here is when `prev_context_ends`
|
||||
// is exactly one less than `this_context_begins`. In
|
||||
// that case, the context windows are adajcent and we
|
||||
// that case, the context windows are adjacent and we
|
||||
// should fall through below to add this annotation to
|
||||
// the existing snippet.
|
||||
if this_context_begins.saturating_sub(prev_context_ends) > 1 {
|
||||
@@ -754,7 +754,7 @@ kangaroo
|
||||
static FRUITS: &str = "\
|
||||
apple
|
||||
banana
|
||||
cantelope
|
||||
cantaloupe
|
||||
lime
|
||||
orange
|
||||
pear
|
||||
@@ -1376,8 +1376,8 @@ watermelon
|
||||
|
|
||||
1 | apple
|
||||
2 | banana
|
||||
3 | cantelope
|
||||
| ^^^^^^^^^
|
||||
3 | cantaloupe
|
||||
| ^^^^^^^^^^
|
||||
4 | lime
|
||||
5 | orange
|
||||
|
|
||||
@@ -1479,8 +1479,8 @@ watermelon
|
||||
|
|
||||
1 | apple
|
||||
2 | banana
|
||||
3 | cantelope
|
||||
| ^^^^^^^^^
|
||||
3 | cantaloupe
|
||||
| ^^^^^^^^^^
|
||||
4 | lime
|
||||
5 | orange
|
||||
|
|
||||
@@ -1515,8 +1515,8 @@ watermelon
|
||||
|
|
||||
1 | apple
|
||||
2 | banana
|
||||
3 | cantelope
|
||||
| ^^^^^^^^^
|
||||
3 | cantaloupe
|
||||
| ^^^^^^^^^^
|
||||
4 | lime
|
||||
5 | orange
|
||||
|
|
||||
@@ -1562,8 +1562,8 @@ watermelon
|
||||
|
|
||||
1 | apple
|
||||
2 | banana
|
||||
3 | cantelope
|
||||
| ^^^^^^^^^
|
||||
3 | cantaloupe
|
||||
| ^^^^^^^^^^
|
||||
4 | lime
|
||||
5 | orange
|
||||
|
|
||||
@@ -2040,7 +2040,7 @@ watermelon
|
||||
1 | apple
|
||||
| ^^^^^ primary
|
||||
2 | banana
|
||||
3 | cantelope
|
||||
3 | cantaloupe
|
||||
|
|
||||
::: animals:1:1
|
||||
|
|
||||
|
||||
@@ -59,6 +59,13 @@ pub fn max_parallelism() -> NonZeroUsize {
|
||||
})
|
||||
}
|
||||
|
||||
/// Trait for types that can provide Rust documentation.
|
||||
///
|
||||
/// Use `derive(RustDoc)` to automatically implement this trait for types that have a static string documentation.
|
||||
pub trait RustDoc {
|
||||
fn rust_doc() -> &'static str;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::any::Any;
|
||||
use std::backtrace::BacktraceStatus;
|
||||
use std::cell::Cell;
|
||||
use std::panic::Location;
|
||||
@@ -24,17 +25,25 @@ impl Payload {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn downcast_ref<R: Any>(&self) -> Option<&R> {
|
||||
self.0.downcast_ref::<R>()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for PanicError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
writeln!(f, "panicked at")?;
|
||||
write!(f, "panicked at")?;
|
||||
if let Some(location) = &self.location {
|
||||
write!(f, " {location}")?;
|
||||
}
|
||||
if let Some(payload) = self.payload.as_str() {
|
||||
write!(f, ":\n{payload}")?;
|
||||
}
|
||||
if let Some(query_trace) = self.salsa_backtrace.as_ref() {
|
||||
let _ = writeln!(f, "{query_trace}");
|
||||
}
|
||||
|
||||
if let Some(backtrace) = &self.backtrace {
|
||||
match backtrace.status() {
|
||||
BacktraceStatus::Disabled => {
|
||||
@@ -49,6 +58,7 @@ impl std::fmt::Display for PanicError {
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_python_ast::ModModule;
|
||||
use arc_swap::ArcSwapOption;
|
||||
use ruff_python_ast::{AnyRootNodeRef, ModModule, NodeIndex};
|
||||
use ruff_python_parser::{ParseOptions, Parsed, parse_unchecked};
|
||||
|
||||
use crate::Db;
|
||||
@@ -18,48 +18,86 @@ use crate::source::source_text;
|
||||
/// The query is only cached when the [`source_text()`] hasn't changed. This is because
|
||||
/// comparing two ASTs is a non-trivial operation and every offset change is directly
|
||||
/// reflected in the changed AST offsets.
|
||||
/// The other reason is that Ruff's AST doesn't implement `Eq` which Sala requires
|
||||
/// The other reason is that Ruff's AST doesn't implement `Eq` which Salsa requires
|
||||
/// for determining if a query result is unchanged.
|
||||
#[salsa::tracked(returns(ref), no_eq)]
|
||||
pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
|
||||
let _span = tracing::trace_span!("parsed_module", ?file).entered();
|
||||
|
||||
let parsed = parsed_module_impl(db, file);
|
||||
|
||||
ParsedModule::new(file, parsed)
|
||||
}
|
||||
|
||||
pub fn parsed_module_impl(db: &dyn Db, file: File) -> Parsed<ModModule> {
|
||||
let source = source_text(db, file);
|
||||
let ty = file.source_type(db);
|
||||
|
||||
let target_version = db.python_version();
|
||||
let options = ParseOptions::from(ty).with_target_version(target_version);
|
||||
let parsed = parse_unchecked(&source, options)
|
||||
parse_unchecked(&source, options)
|
||||
.try_into_module()
|
||||
.expect("PySourceType always parses into a module");
|
||||
|
||||
ParsedModule::new(parsed)
|
||||
.expect("PySourceType always parses into a module")
|
||||
}
|
||||
|
||||
/// Cheap cloneable wrapper around the parsed module.
|
||||
/// A wrapper around a parsed module.
|
||||
///
|
||||
/// This type manages instances of the module AST. A particular instance of the AST
|
||||
/// is represented with the [`ParsedModuleRef`] type.
|
||||
#[derive(Clone)]
|
||||
pub struct ParsedModule {
|
||||
inner: Arc<Parsed<ModModule>>,
|
||||
file: File,
|
||||
inner: Arc<ArcSwapOption<indexed::IndexedModule>>,
|
||||
}
|
||||
|
||||
impl ParsedModule {
|
||||
pub fn new(parsed: Parsed<ModModule>) -> Self {
|
||||
pub fn new(file: File, parsed: Parsed<ModModule>) -> Self {
|
||||
Self {
|
||||
inner: Arc::new(parsed),
|
||||
file,
|
||||
inner: Arc::new(ArcSwapOption::new(Some(indexed::IndexedModule::new(
|
||||
parsed,
|
||||
)))),
|
||||
}
|
||||
}
|
||||
/// Loads a reference to the parsed module.
|
||||
///
|
||||
/// Note that holding on to the reference will prevent garbage collection
|
||||
/// of the AST. This method will reparse the module if it has been collected.
|
||||
pub fn load(&self, db: &dyn Db) -> ParsedModuleRef {
|
||||
let parsed = match self.inner.load_full() {
|
||||
Some(parsed) => parsed,
|
||||
None => {
|
||||
// Re-parse the file.
|
||||
let parsed = indexed::IndexedModule::new(parsed_module_impl(db, self.file));
|
||||
tracing::debug!(
|
||||
"File `{}` was reparsed after being collected in the current Salsa revision",
|
||||
self.file.path(db)
|
||||
);
|
||||
|
||||
self.inner.store(Some(parsed.clone()));
|
||||
parsed
|
||||
}
|
||||
};
|
||||
|
||||
ParsedModuleRef {
|
||||
module: self.clone(),
|
||||
indexed: parsed,
|
||||
}
|
||||
}
|
||||
|
||||
/// Consumes `self` and returns the Arc storing the parsed module.
|
||||
pub fn into_arc(self) -> Arc<Parsed<ModModule>> {
|
||||
self.inner
|
||||
/// Clear the parsed module, dropping the AST once all references to it are dropped.
|
||||
pub fn clear(&self) {
|
||||
self.inner.store(None);
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ParsedModule {
|
||||
type Target = Parsed<ModModule>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.inner
|
||||
/// Returns a pointer for this [`ParsedModule`].
|
||||
///
|
||||
/// The pointer uniquely identifies the module within the current Salsa revision,
|
||||
/// regardless of whether particular [`ParsedModuleRef`] instances are garbage collected.
|
||||
pub fn as_ptr(&self) -> *const () {
|
||||
// Note that the outer `Arc` in `inner` is stable across garbage collection, while the inner
|
||||
// `Arc` within the `ArcSwap` may change.
|
||||
Arc::as_ptr(&self.inner).cast()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -77,6 +115,273 @@ impl PartialEq for ParsedModule {
|
||||
|
||||
impl Eq for ParsedModule {}
|
||||
|
||||
/// Cheap cloneable wrapper around an instance of a module AST.
|
||||
#[derive(Clone)]
|
||||
pub struct ParsedModuleRef {
|
||||
module: ParsedModule,
|
||||
indexed: Arc<indexed::IndexedModule>,
|
||||
}
|
||||
|
||||
impl ParsedModuleRef {
|
||||
/// Returns a reference to the [`ParsedModule`] that this instance was loaded from.
|
||||
pub fn module(&self) -> &ParsedModule {
|
||||
&self.module
|
||||
}
|
||||
|
||||
/// Returns a reference to the AST node at the given index.
|
||||
pub fn get_by_index<'ast>(&'ast self, index: NodeIndex) -> AnyRootNodeRef<'ast> {
|
||||
self.indexed.get_by_index(index)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for ParsedModuleRef {
|
||||
type Target = Parsed<ModModule>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.indexed.parsed
|
||||
}
|
||||
}
|
||||
|
||||
mod indexed {
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_python_ast::visitor::source_order::*;
|
||||
use ruff_python_ast::*;
|
||||
use ruff_python_parser::Parsed;
|
||||
|
||||
/// A wrapper around the AST that allows access to AST nodes by index.
|
||||
#[derive(Debug)]
|
||||
pub struct IndexedModule {
|
||||
index: Box<[AnyRootNodeRef<'static>]>,
|
||||
pub parsed: Parsed<ModModule>,
|
||||
}
|
||||
|
||||
impl IndexedModule {
|
||||
/// Create a new [`IndexedModule`] from the given AST.
|
||||
#[allow(clippy::unnecessary_cast)]
|
||||
pub fn new(parsed: Parsed<ModModule>) -> Arc<Self> {
|
||||
let mut visitor = Visitor {
|
||||
nodes: Vec::new(),
|
||||
index: 0,
|
||||
};
|
||||
|
||||
let mut inner = Arc::new(IndexedModule {
|
||||
parsed,
|
||||
index: Box::new([]),
|
||||
});
|
||||
|
||||
AnyNodeRef::from(inner.parsed.syntax()).visit_source_order(&mut visitor);
|
||||
|
||||
let index: Box<[AnyRootNodeRef<'_>]> = visitor.nodes.into_boxed_slice();
|
||||
|
||||
// SAFETY: We cast from `Box<[AnyRootNodeRef<'_>]>` to `Box<[AnyRootNodeRef<'static>]>`,
|
||||
// faking the 'static lifetime to create the self-referential struct. The node references
|
||||
// are into the `Arc<Parsed<ModModule>>`, so are valid for as long as the `IndexedModule`
|
||||
// is alive. We make sure to restore the correct lifetime in `get_by_index`.
|
||||
//
|
||||
// Note that we can never move the data within the `Arc` after this point.
|
||||
Arc::get_mut(&mut inner).unwrap().index =
|
||||
unsafe { Box::from_raw(Box::into_raw(index) as *mut [AnyRootNodeRef<'static>]) };
|
||||
|
||||
inner
|
||||
}
|
||||
|
||||
/// Returns the node at the given index.
|
||||
pub fn get_by_index<'ast>(&'ast self, index: NodeIndex) -> AnyRootNodeRef<'ast> {
|
||||
// Note that this method restores the correct lifetime: the nodes are valid for as
|
||||
// long as the reference to `IndexedModule` is alive.
|
||||
self.index[index.as_usize()]
|
||||
}
|
||||
}
|
||||
|
||||
/// A visitor that collects nodes in source order.
|
||||
pub struct Visitor<'a> {
|
||||
pub index: u32,
|
||||
pub nodes: Vec<AnyRootNodeRef<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> Visitor<'a> {
|
||||
fn visit_node<T>(&mut self, node: &'a T)
|
||||
where
|
||||
T: HasNodeIndex + std::fmt::Debug,
|
||||
AnyRootNodeRef<'a>: From<&'a T>,
|
||||
{
|
||||
node.node_index().set(self.index);
|
||||
self.nodes.push(AnyRootNodeRef::from(node));
|
||||
self.index += 1;
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> SourceOrderVisitor<'a> for Visitor<'a> {
|
||||
#[inline]
|
||||
fn visit_mod(&mut self, module: &'a Mod) {
|
||||
self.visit_node(module);
|
||||
walk_module(self, module);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
self.visit_node(stmt);
|
||||
walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_annotation(&mut self, expr: &'a Expr) {
|
||||
self.visit_node(expr);
|
||||
walk_annotation(self, expr);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_expr(&mut self, expr: &'a Expr) {
|
||||
self.visit_node(expr);
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_decorator(&mut self, decorator: &'a Decorator) {
|
||||
self.visit_node(decorator);
|
||||
walk_decorator(self, decorator);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_comprehension(&mut self, comprehension: &'a Comprehension) {
|
||||
self.visit_node(comprehension);
|
||||
walk_comprehension(self, comprehension);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_except_handler(&mut self, except_handler: &'a ExceptHandler) {
|
||||
self.visit_node(except_handler);
|
||||
walk_except_handler(self, except_handler);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_arguments(&mut self, arguments: &'a Arguments) {
|
||||
self.visit_node(arguments);
|
||||
walk_arguments(self, arguments);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_parameters(&mut self, parameters: &'a Parameters) {
|
||||
self.visit_node(parameters);
|
||||
walk_parameters(self, parameters);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_parameter(&mut self, arg: &'a Parameter) {
|
||||
self.visit_node(arg);
|
||||
walk_parameter(self, arg);
|
||||
}
|
||||
|
||||
fn visit_parameter_with_default(
|
||||
&mut self,
|
||||
parameter_with_default: &'a ParameterWithDefault,
|
||||
) {
|
||||
self.visit_node(parameter_with_default);
|
||||
walk_parameter_with_default(self, parameter_with_default);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_keyword(&mut self, keyword: &'a Keyword) {
|
||||
self.visit_node(keyword);
|
||||
walk_keyword(self, keyword);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_alias(&mut self, alias: &'a Alias) {
|
||||
self.visit_node(alias);
|
||||
walk_alias(self, alias);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_with_item(&mut self, with_item: &'a WithItem) {
|
||||
self.visit_node(with_item);
|
||||
walk_with_item(self, with_item);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_type_params(&mut self, type_params: &'a TypeParams) {
|
||||
self.visit_node(type_params);
|
||||
walk_type_params(self, type_params);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_type_param(&mut self, type_param: &'a TypeParam) {
|
||||
self.visit_node(type_param);
|
||||
walk_type_param(self, type_param);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_match_case(&mut self, match_case: &'a MatchCase) {
|
||||
self.visit_node(match_case);
|
||||
walk_match_case(self, match_case);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_pattern(&mut self, pattern: &'a Pattern) {
|
||||
self.visit_node(pattern);
|
||||
walk_pattern(self, pattern);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_pattern_arguments(&mut self, pattern_arguments: &'a PatternArguments) {
|
||||
self.visit_node(pattern_arguments);
|
||||
walk_pattern_arguments(self, pattern_arguments);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_pattern_keyword(&mut self, pattern_keyword: &'a PatternKeyword) {
|
||||
self.visit_node(pattern_keyword);
|
||||
walk_pattern_keyword(self, pattern_keyword);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_elif_else_clause(&mut self, elif_else_clause: &'a ElifElseClause) {
|
||||
self.visit_node(elif_else_clause);
|
||||
walk_elif_else_clause(self, elif_else_clause);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_f_string(&mut self, f_string: &'a FString) {
|
||||
self.visit_node(f_string);
|
||||
walk_f_string(self, f_string);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_interpolated_string_element(
|
||||
&mut self,
|
||||
interpolated_string_element: &'a InterpolatedStringElement,
|
||||
) {
|
||||
self.visit_node(interpolated_string_element);
|
||||
walk_interpolated_string_element(self, interpolated_string_element);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_t_string(&mut self, t_string: &'a TString) {
|
||||
self.visit_node(t_string);
|
||||
walk_t_string(self, t_string);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_string_literal(&mut self, string_literal: &'a StringLiteral) {
|
||||
self.visit_node(string_literal);
|
||||
walk_string_literal(self, string_literal);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_bytes_literal(&mut self, bytes_literal: &'a BytesLiteral) {
|
||||
self.visit_node(bytes_literal);
|
||||
walk_bytes_literal(self, bytes_literal);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn visit_identifier(&mut self, identifier: &'a Identifier) {
|
||||
self.visit_node(identifier);
|
||||
walk_identifier(self, identifier);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::Db;
|
||||
@@ -98,7 +403,7 @@ mod tests {
|
||||
|
||||
let file = system_path_to_file(&db, path).unwrap();
|
||||
|
||||
let parsed = parsed_module(&db, file);
|
||||
let parsed = parsed_module(&db, file).load(&db);
|
||||
|
||||
assert!(parsed.has_valid_syntax());
|
||||
|
||||
@@ -114,7 +419,7 @@ mod tests {
|
||||
|
||||
let file = system_path_to_file(&db, path).unwrap();
|
||||
|
||||
let parsed = parsed_module(&db, file);
|
||||
let parsed = parsed_module(&db, file).load(&db);
|
||||
|
||||
assert!(parsed.has_valid_syntax());
|
||||
|
||||
@@ -130,7 +435,7 @@ mod tests {
|
||||
|
||||
let virtual_file = db.files().virtual_file(&db, path);
|
||||
|
||||
let parsed = parsed_module(&db, virtual_file.file());
|
||||
let parsed = parsed_module(&db, virtual_file.file()).load(&db);
|
||||
|
||||
assert!(parsed.has_valid_syntax());
|
||||
|
||||
@@ -146,7 +451,7 @@ mod tests {
|
||||
|
||||
let virtual_file = db.files().virtual_file(&db, path);
|
||||
|
||||
let parsed = parsed_module(&db, virtual_file.file());
|
||||
let parsed = parsed_module(&db, virtual_file.file()).load(&db);
|
||||
|
||||
assert!(parsed.has_valid_syntax());
|
||||
|
||||
@@ -177,7 +482,7 @@ else:
|
||||
|
||||
let file = vendored_path_to_file(&db, VendoredPath::new("path.pyi")).unwrap();
|
||||
|
||||
let parsed = parsed_module(&db, file);
|
||||
let parsed = parsed_module(&db, file).load(&db);
|
||||
|
||||
assert!(parsed.has_valid_syntax());
|
||||
}
|
||||
|
||||
@@ -171,6 +171,21 @@ pub trait System: Debug {
|
||||
PatternError,
|
||||
>;
|
||||
|
||||
/// Fetches the environment variable `key` from the current process.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns [`std::env::VarError::NotPresent`] if:
|
||||
/// - The variable is not set.
|
||||
/// - The variable's name contains an equal sign or NUL (`'='` or `'\0'`).
|
||||
///
|
||||
/// Returns [`std::env::VarError::NotUnicode`] if the variable's value is not valid
|
||||
/// Unicode.
|
||||
fn env_var(&self, name: &str) -> std::result::Result<String, std::env::VarError> {
|
||||
let _ = name;
|
||||
Err(std::env::VarError::NotPresent)
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn std::any::Any;
|
||||
|
||||
fn as_any_mut(&mut self) -> &mut dyn std::any::Any;
|
||||
|
||||
@@ -214,6 +214,10 @@ impl System for OsSystem {
|
||||
})
|
||||
})))
|
||||
}
|
||||
|
||||
fn env_var(&self, name: &str) -> std::result::Result<String, std::env::VarError> {
|
||||
std::env::var(name)
|
||||
}
|
||||
}
|
||||
|
||||
impl OsSystem {
|
||||
|
||||
@@ -45,6 +45,30 @@ impl SystemPath {
|
||||
SystemPath::from_std_path(dunce::simplified(self.as_std_path())).unwrap()
|
||||
}
|
||||
|
||||
/// Returns `true` if the `SystemPath` is absolute, i.e., if it is independent of
|
||||
/// the current directory.
|
||||
///
|
||||
/// * On Unix, a path is absolute if it starts with the root, so
|
||||
/// `is_absolute` and [`has_root`] are equivalent.
|
||||
///
|
||||
/// * On Windows, a path is absolute if it has a prefix and starts with the
|
||||
/// root: `c:\windows` is absolute, while `c:temp` and `\temp` are not.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use ruff_db::system::SystemPath;
|
||||
///
|
||||
/// assert!(!SystemPath::new("foo.txt").is_absolute());
|
||||
/// ```
|
||||
///
|
||||
/// [`has_root`]: Utf8Path::has_root
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn is_absolute(&self) -> bool {
|
||||
self.0.is_absolute()
|
||||
}
|
||||
|
||||
/// Extracts the file extension, if possible.
|
||||
///
|
||||
/// The extension is:
|
||||
@@ -538,6 +562,10 @@ impl SystemPathBuf {
|
||||
self.0.into_std_path_buf()
|
||||
}
|
||||
|
||||
pub fn into_string(self) -> String {
|
||||
self.0.into_string()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn as_path(&self) -> &SystemPath {
|
||||
SystemPath::new(&self.0)
|
||||
@@ -596,6 +624,13 @@ impl AsRef<SystemPath> for Utf8PathBuf {
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<SystemPath> for camino::Utf8Component<'_> {
|
||||
#[inline]
|
||||
fn as_ref(&self) -> &SystemPath {
|
||||
SystemPath::new(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<SystemPath> for str {
|
||||
#[inline]
|
||||
fn as_ref(&self) -> &SystemPath {
|
||||
@@ -626,6 +661,22 @@ impl Deref for SystemPathBuf {
|
||||
}
|
||||
}
|
||||
|
||||
impl<P: AsRef<SystemPath>> FromIterator<P> for SystemPathBuf {
|
||||
fn from_iter<I: IntoIterator<Item = P>>(iter: I) -> Self {
|
||||
let mut buf = SystemPathBuf::new();
|
||||
buf.extend(iter);
|
||||
buf
|
||||
}
|
||||
}
|
||||
|
||||
impl<P: AsRef<SystemPath>> Extend<P> for SystemPathBuf {
|
||||
fn extend<I: IntoIterator<Item = P>>(&mut self, iter: I) {
|
||||
for path in iter {
|
||||
self.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for SystemPath {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.0.fmt(f)
|
||||
|
||||
@@ -13,12 +13,12 @@ pub fn assert_function_query_was_not_run<Db, Q, QDb, I, R>(
|
||||
Q: Fn(QDb, I) -> R,
|
||||
I: salsa::plumbing::AsId + std::fmt::Debug + Copy,
|
||||
{
|
||||
let id = input.as_id().as_u32();
|
||||
let id = input.as_id();
|
||||
let (query_name, will_execute_event) = find_will_execute_event(db, query, input, events);
|
||||
|
||||
db.attach(|_| {
|
||||
if let Some(will_execute_event) = will_execute_event {
|
||||
panic!("Expected query {query_name}({id}) not to have run but it did: {will_execute_event:?}\n\n{events:#?}");
|
||||
panic!("Expected query {query_name}({id:?}) not to have run but it did: {will_execute_event:?}\n\n{events:#?}");
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -65,7 +65,7 @@ pub fn assert_function_query_was_run<Db, Q, QDb, I, R>(
|
||||
Q: Fn(QDb, I) -> R,
|
||||
I: salsa::plumbing::AsId + std::fmt::Debug + Copy,
|
||||
{
|
||||
let id = input.as_id().as_u32();
|
||||
let id = input.as_id();
|
||||
let (query_name, will_execute_event) = find_will_execute_event(db, query, input, events);
|
||||
|
||||
db.attach(|_| {
|
||||
@@ -224,7 +224,7 @@ fn query_was_not_run() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Expected query len(0) not to have run but it did:")]
|
||||
#[should_panic(expected = "Expected query len(Id(0)) not to have run but it did:")]
|
||||
fn query_was_not_run_fails_if_query_was_run() {
|
||||
use crate::tests::TestDb;
|
||||
use salsa::prelude::*;
|
||||
@@ -287,7 +287,7 @@ fn const_query_was_not_run_fails_if_query_was_run() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "Expected query len(0) to have run but it did not:")]
|
||||
#[should_panic(expected = "Expected query len(Id(0)) to have run but it did not:")]
|
||||
fn query_was_run_fails_if_query_was_not_run() {
|
||||
use crate::tests::TestDb;
|
||||
use salsa::prelude::*;
|
||||
|
||||
@@ -14,7 +14,6 @@ license = { workspace = true }
|
||||
ty = { workspace = true }
|
||||
ty_project = { workspace = true, features = ["schemars"] }
|
||||
ruff = { workspace = true }
|
||||
ruff_diagnostics = { workspace = true }
|
||||
ruff_formatter = { workspace = true }
|
||||
ruff_linter = { workspace = true, features = ["schemars"] }
|
||||
ruff_notebook = { workspace = true }
|
||||
|
||||
@@ -10,7 +10,7 @@ use itertools::Itertools;
|
||||
use regex::{Captures, Regex};
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use ruff_diagnostics::FixAvailability;
|
||||
use ruff_linter::FixAvailability;
|
||||
use ruff_linter::registry::{Linter, Rule, RuleNamespace};
|
||||
use ruff_options_metadata::{OptionEntry, OptionsMetadata};
|
||||
use ruff_workspace::options::Options;
|
||||
@@ -29,7 +29,7 @@ pub(crate) fn main(args: &Args) -> Result<()> {
|
||||
if let Some(explanation) = rule.explanation() {
|
||||
let mut output = String::new();
|
||||
|
||||
let _ = writeln!(&mut output, "# {} ({})", rule.as_ref(), rule.noqa_code());
|
||||
let _ = writeln!(&mut output, "# {} ({})", rule.name(), rule.noqa_code());
|
||||
|
||||
let (linter, _) = Linter::parse_code(&rule.noqa_code().to_string()).unwrap();
|
||||
if linter.url().is_some() {
|
||||
@@ -101,7 +101,7 @@ pub(crate) fn main(args: &Args) -> Result<()> {
|
||||
let filename = PathBuf::from(ROOT_DIR)
|
||||
.join("docs")
|
||||
.join("rules")
|
||||
.join(rule.as_ref())
|
||||
.join(&*rule.name())
|
||||
.with_extension("md");
|
||||
|
||||
if args.dry_run {
|
||||
|
||||
@@ -8,7 +8,7 @@ use std::borrow::Cow;
|
||||
use std::fmt::Write;
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use ruff_diagnostics::FixAvailability;
|
||||
use ruff_linter::FixAvailability;
|
||||
use ruff_linter::registry::{Linter, Rule, RuleNamespace};
|
||||
use ruff_linter::upstream_categories::UpstreamCategoryAndPrefix;
|
||||
use ruff_options_metadata::OptionsMetadata;
|
||||
@@ -18,45 +18,44 @@ const FIX_SYMBOL: &str = "🛠️";
|
||||
const PREVIEW_SYMBOL: &str = "🧪";
|
||||
const REMOVED_SYMBOL: &str = "❌";
|
||||
const WARNING_SYMBOL: &str = "⚠️";
|
||||
const STABLE_SYMBOL: &str = "✔️";
|
||||
const SPACER: &str = " ";
|
||||
|
||||
/// Style for the rule's fixability and status icons.
|
||||
const SYMBOL_STYLE: &str = "style='width: 1em; display: inline-block;'";
|
||||
/// Style for the container wrapping the fixability and status icons.
|
||||
const SYMBOLS_CONTAINER: &str = "style='display: flex; gap: 0.5rem; justify-content: end;'";
|
||||
|
||||
fn generate_table(table_out: &mut String, rules: impl IntoIterator<Item = Rule>, linter: &Linter) {
|
||||
table_out.push_str("| Code | Name | Message | |");
|
||||
table_out.push_str("| Code | Name | Message | |");
|
||||
table_out.push('\n');
|
||||
table_out.push_str("| ---- | ---- | ------- | ------: |");
|
||||
table_out.push_str("| ---- | ---- | ------- | -: |");
|
||||
table_out.push('\n');
|
||||
for rule in rules {
|
||||
let status_token = match rule.group() {
|
||||
RuleGroup::Removed => {
|
||||
format!("<span title='Rule has been removed'>{REMOVED_SYMBOL}</span>")
|
||||
format!(
|
||||
"<span {SYMBOL_STYLE} title='Rule has been removed'>{REMOVED_SYMBOL}</span>"
|
||||
)
|
||||
}
|
||||
RuleGroup::Deprecated => {
|
||||
format!("<span title='Rule has been deprecated'>{WARNING_SYMBOL}</span>")
|
||||
format!(
|
||||
"<span {SYMBOL_STYLE} title='Rule has been deprecated'>{WARNING_SYMBOL}</span>"
|
||||
)
|
||||
}
|
||||
RuleGroup::Preview => {
|
||||
format!("<span title='Rule is in preview'>{PREVIEW_SYMBOL}</span>")
|
||||
}
|
||||
RuleGroup::Stable => {
|
||||
// A full opacity checkmark is a bit aggressive for indicating stable
|
||||
format!("<span title='Rule is stable' style='opacity: 0.6'>{STABLE_SYMBOL}</span>")
|
||||
format!("<span {SYMBOL_STYLE} title='Rule is in preview'>{PREVIEW_SYMBOL}</span>")
|
||||
}
|
||||
RuleGroup::Stable => format!("<span {SYMBOL_STYLE}></span>"),
|
||||
};
|
||||
|
||||
let fix_token = match rule.fixable() {
|
||||
FixAvailability::Always | FixAvailability::Sometimes => {
|
||||
format!("<span title='Automatic fix available'>{FIX_SYMBOL}</span>")
|
||||
}
|
||||
FixAvailability::None => {
|
||||
format!(
|
||||
"<span title='Automatic fix not available' style='opacity: 0.1' aria-hidden='true'>{FIX_SYMBOL}</span>"
|
||||
)
|
||||
format!("<span {SYMBOL_STYLE} title='Automatic fix available'>{FIX_SYMBOL}</span>")
|
||||
}
|
||||
FixAvailability::None => format!("<span {SYMBOL_STYLE}></span>"),
|
||||
};
|
||||
|
||||
let tokens = format!("{status_token} {fix_token}");
|
||||
|
||||
let rule_name = rule.as_ref();
|
||||
let rule_name = rule.name();
|
||||
|
||||
// If the message ends in a bracketed expression (like: "Use {replacement}"), escape the
|
||||
// brackets. Otherwise, it'll be interpreted as an HTML attribute via the `attr_list`
|
||||
@@ -82,15 +81,14 @@ fn generate_table(table_out: &mut String, rules: impl IntoIterator<Item = Rule>,
|
||||
#[expect(clippy::or_fun_call)]
|
||||
let _ = write!(
|
||||
table_out,
|
||||
"| {ss}{0}{1}{se} {{ #{0}{1} }} | {ss}{2}{se} | {ss}{3}{se} | {ss}{4}{se} |",
|
||||
linter.common_prefix(),
|
||||
linter.code_for_rule(rule).unwrap(),
|
||||
rule.explanation()
|
||||
"| {ss}{prefix}{code}{se} {{ #{prefix}{code} }} | {ss}{explanation}{se} | {ss}{message}{se} | <div {SYMBOLS_CONTAINER}>{status_token}{fix_token}</div>|",
|
||||
prefix = linter.common_prefix(),
|
||||
code = linter.code_for_rule(rule).unwrap(),
|
||||
explanation = rule
|
||||
.explanation()
|
||||
.is_some()
|
||||
.then_some(format_args!("[{rule_name}](rules/{rule_name}.md)"))
|
||||
.unwrap_or(format_args!("{rule_name}")),
|
||||
message,
|
||||
tokens,
|
||||
);
|
||||
table_out.push('\n');
|
||||
}
|
||||
@@ -104,12 +102,6 @@ pub(crate) fn generate() -> String {
|
||||
table_out.push_str("### Legend");
|
||||
table_out.push('\n');
|
||||
|
||||
let _ = write!(
|
||||
&mut table_out,
|
||||
"{SPACER}{STABLE_SYMBOL}{SPACER} The rule is stable."
|
||||
);
|
||||
table_out.push_str("<br />");
|
||||
|
||||
let _ = write!(
|
||||
&mut table_out,
|
||||
"{SPACER}{PREVIEW_SYMBOL}{SPACER} The rule is unstable and is in [\"preview\"](faq.md#what-is-preview)."
|
||||
@@ -132,7 +124,8 @@ pub(crate) fn generate() -> String {
|
||||
&mut table_out,
|
||||
"{SPACER}{FIX_SYMBOL}{SPACER} The rule is automatically fixable by the `--fix` command-line option."
|
||||
);
|
||||
table_out.push_str("<br />");
|
||||
table_out.push_str("\n\n");
|
||||
table_out.push_str("All rules not marked as preview, deprecated or removed are stable.");
|
||||
table_out.push('\n');
|
||||
|
||||
for linter in Linter::iter() {
|
||||
|
||||
@@ -16,7 +16,5 @@ doctest = false
|
||||
[dependencies]
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
log = { workspace = true }
|
||||
is-macro = { workspace = true }
|
||||
serde = { workspace = true, optional = true, features = [] }
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
pub use diagnostic::Diagnostic;
|
||||
pub use edit::Edit;
|
||||
pub use fix::{Applicability, Fix, IsolationLevel};
|
||||
pub use source_map::{SourceMap, SourceMarker};
|
||||
pub use violation::{AlwaysFixableViolation, FixAvailability, Violation, ViolationMetadata};
|
||||
|
||||
mod diagnostic;
|
||||
mod edit;
|
||||
mod fix;
|
||||
mod source_map;
|
||||
mod violation;
|
||||
|
||||
@@ -39,6 +39,7 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
|
||||
module,
|
||||
level,
|
||||
range: _,
|
||||
node_index: _,
|
||||
}) => {
|
||||
let module = module.as_deref();
|
||||
let level = *level;
|
||||
@@ -78,7 +79,11 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
|
||||
}
|
||||
}
|
||||
}
|
||||
Stmt::Import(ast::StmtImport { names, range: _ }) => {
|
||||
Stmt::Import(ast::StmtImport {
|
||||
names,
|
||||
range: _,
|
||||
node_index: _,
|
||||
}) => {
|
||||
for alias in names {
|
||||
if let Some(module_name) = ModuleName::new(alias.name.as_str()) {
|
||||
self.imports.push(CollectedImport::Import(module_name));
|
||||
@@ -122,7 +127,12 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
|
||||
|
||||
fn visit_expr(&mut self, expr: &'ast Expr) {
|
||||
if self.string_imports {
|
||||
if let Expr::StringLiteral(ast::ExprStringLiteral { value, range: _ }) = expr {
|
||||
if let Expr::StringLiteral(ast::ExprStringLiteral {
|
||||
value,
|
||||
range: _,
|
||||
node_index: _,
|
||||
}) = expr
|
||||
{
|
||||
// Determine whether the string literal "looks like" an import statement: contains
|
||||
// a dot, and consists solely of valid Python identifiers.
|
||||
let value = value.to_str();
|
||||
|
||||
@@ -10,7 +10,7 @@ use ruff_python_ast::PythonVersion;
|
||||
use ty_python_semantic::lint::{LintRegistry, RuleSelection};
|
||||
use ty_python_semantic::{
|
||||
Db, Program, ProgramSettings, PythonPath, PythonPlatform, PythonVersionSource,
|
||||
PythonVersionWithSource, SearchPathSettings, default_lint_registry,
|
||||
PythonVersionWithSource, SearchPathSettings, SysPrefixPathOrigin, default_lint_registry,
|
||||
};
|
||||
|
||||
static EMPTY_VENDORED: std::sync::LazyLock<VendoredFileSystem> = std::sync::LazyLock::new(|| {
|
||||
@@ -37,17 +37,18 @@ impl ModuleDb {
|
||||
) -> Result<Self> {
|
||||
let mut search_paths = SearchPathSettings::new(src_roots);
|
||||
if let Some(venv_path) = venv_path {
|
||||
search_paths.python_path = PythonPath::from_cli_flag(venv_path);
|
||||
search_paths.python_path =
|
||||
PythonPath::sys_prefix(venv_path, SysPrefixPathOrigin::PythonCliFlag);
|
||||
}
|
||||
|
||||
let db = Self::default();
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
python_version: PythonVersionWithSource {
|
||||
python_version: Some(PythonVersionWithSource {
|
||||
version: python_version,
|
||||
source: PythonVersionSource::default(),
|
||||
},
|
||||
}),
|
||||
python_platform: PythonPlatform::default(),
|
||||
search_paths,
|
||||
},
|
||||
@@ -91,7 +92,7 @@ impl Db for ModuleDb {
|
||||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
|
||||
fn rule_selection(&self) -> &RuleSelection {
|
||||
fn rule_selection(&self, _file: File) -> &RuleSelection {
|
||||
&self.rule_selection
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.11.11"
|
||||
version = "0.12.0"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -10,22 +10,10 @@ from airflow import (
|
||||
PY312,
|
||||
)
|
||||
from airflow.api_connexion.security import requires_access
|
||||
from airflow.configuration import (
|
||||
as_dict,
|
||||
get,
|
||||
getboolean,
|
||||
getfloat,
|
||||
getint,
|
||||
has_option,
|
||||
remove_option,
|
||||
set,
|
||||
)
|
||||
from airflow.contrib.aws_athena_hook import AWSAthenaHook
|
||||
from airflow.datasets import DatasetAliasEvent
|
||||
from airflow.hooks.base_hook import BaseHook
|
||||
from airflow.operators.subdag import SubDagOperator
|
||||
from airflow.secrets.local_filesystem import LocalFilesystemBackend
|
||||
from airflow.sensors.base_sensor_operator import BaseSensorOperator
|
||||
from airflow.triggers.external_task import TaskStateTrigger
|
||||
from airflow.utils import dates
|
||||
from airflow.utils.dag_cycle_tester import test_cycle
|
||||
@@ -40,13 +28,10 @@ from airflow.utils.dates import (
|
||||
)
|
||||
from airflow.utils.db import create_session
|
||||
from airflow.utils.decorators import apply_defaults
|
||||
from airflow.utils.file import TemporaryDirectory, mkdirs
|
||||
from airflow.utils.helpers import chain as helper_chain
|
||||
from airflow.utils.helpers import cross_downstream as helper_cross_downstream
|
||||
from airflow.utils.log import secrets_masker
|
||||
from airflow.utils.file import mkdirs
|
||||
from airflow.utils.state import SHUTDOWN, terminating_states
|
||||
from airflow.utils.trigger_rule import TriggerRule
|
||||
from airflow.www.auth import has_access
|
||||
from airflow.www.auth import has_access, has_access_dataset
|
||||
from airflow.www.utils import get_sensitive_variables_fields, should_hide_value_for_key
|
||||
|
||||
# airflow root
|
||||
@@ -55,11 +40,6 @@ PY36, PY37, PY38, PY39, PY310, PY311, PY312
|
||||
# airflow.api_connexion.security
|
||||
requires_access
|
||||
|
||||
|
||||
# airflow.configuration
|
||||
get, getboolean, getfloat, getint, has_option, remove_option, as_dict, set
|
||||
|
||||
|
||||
# airflow.contrib.*
|
||||
AWSAthenaHook()
|
||||
|
||||
@@ -68,10 +48,6 @@ AWSAthenaHook()
|
||||
DatasetAliasEvent()
|
||||
|
||||
|
||||
# airflow.hooks
|
||||
BaseHook()
|
||||
|
||||
|
||||
# airflow.operators.subdag.*
|
||||
SubDagOperator()
|
||||
|
||||
@@ -81,10 +57,6 @@ SubDagOperator()
|
||||
LocalFilesystemBackend()
|
||||
|
||||
|
||||
# airflow.sensors.base_sensor_operator
|
||||
BaseSensorOperator()
|
||||
|
||||
|
||||
# airflow.triggers.external_task
|
||||
TaskStateTrigger()
|
||||
|
||||
@@ -114,15 +86,8 @@ create_session
|
||||
apply_defaults
|
||||
|
||||
# airflow.utils.file
|
||||
TemporaryDirectory()
|
||||
mkdirs
|
||||
|
||||
# airflow.utils.helpers
|
||||
helper_chain
|
||||
helper_cross_downstream
|
||||
|
||||
# airflow.utils.log
|
||||
secrets_masker
|
||||
|
||||
# airflow.utils.state
|
||||
SHUTDOWN
|
||||
@@ -135,37 +100,8 @@ TriggerRule.NONE_FAILED_OR_SKIPPED
|
||||
|
||||
# airflow.www.auth
|
||||
has_access
|
||||
has_access_dataset
|
||||
|
||||
# airflow.www.utils
|
||||
get_sensitive_variables_fields
|
||||
should_hide_value_for_key
|
||||
|
||||
# airflow.operators.python
|
||||
from airflow.operators.python import get_current_context
|
||||
|
||||
get_current_context()
|
||||
|
||||
# airflow.providers.mysql
|
||||
from airflow.providers.mysql.datasets.mysql import sanitize_uri
|
||||
|
||||
sanitize_uri
|
||||
|
||||
# airflow.providers.postgres
|
||||
from airflow.providers.postgres.datasets.postgres import sanitize_uri
|
||||
|
||||
sanitize_uri
|
||||
|
||||
# airflow.providers.trino
|
||||
from airflow.providers.trino.datasets.trino import sanitize_uri
|
||||
|
||||
sanitize_uri
|
||||
|
||||
# airflow.notifications.basenotifier
|
||||
from airflow.notifications.basenotifier import BaseNotifier
|
||||
|
||||
BaseNotifier()
|
||||
|
||||
# airflow.auth.manager
|
||||
from airflow.auth.managers.base_auth_manager import BaseAuthManager
|
||||
|
||||
BaseAuthManager()
|
||||
|
||||
@@ -3,7 +3,6 @@ from __future__ import annotations
|
||||
from airflow.api_connexion.security import requires_access_dataset
|
||||
from airflow.auth.managers.models.resource_details import (
|
||||
DatasetDetails,
|
||||
|
||||
)
|
||||
from airflow.datasets.manager import (
|
||||
DatasetManager,
|
||||
@@ -12,15 +11,13 @@ from airflow.datasets.manager import (
|
||||
)
|
||||
from airflow.lineage.hook import DatasetLineageInfo
|
||||
from airflow.metrics.validators import AllowListValidator, BlockListValidator
|
||||
from airflow.secrets.local_filesystm import load_connections
|
||||
from airflow.secrets.local_filesystem import load_connections
|
||||
from airflow.security.permissions import RESOURCE_DATASET
|
||||
from airflow.www.auth import has_access_dataset
|
||||
|
||||
requires_access_dataset()
|
||||
|
||||
DatasetDetails()
|
||||
|
||||
|
||||
DatasetManager()
|
||||
dataset_manager()
|
||||
resolve_dataset_manager()
|
||||
@@ -34,7 +31,6 @@ load_connections()
|
||||
|
||||
RESOURCE_DATASET
|
||||
|
||||
has_access_dataset()
|
||||
|
||||
from airflow.listeners.spec.dataset import (
|
||||
on_dataset_changed,
|
||||
@@ -43,3 +39,76 @@ from airflow.listeners.spec.dataset import (
|
||||
|
||||
on_dataset_created()
|
||||
on_dataset_changed()
|
||||
|
||||
|
||||
# airflow.operators.python
|
||||
from airflow.operators.python import get_current_context
|
||||
|
||||
get_current_context()
|
||||
|
||||
# airflow.providers.mysql
|
||||
from airflow.providers.mysql.datasets.mysql import sanitize_uri
|
||||
|
||||
sanitize_uri
|
||||
|
||||
# airflow.providers.postgres
|
||||
from airflow.providers.postgres.datasets.postgres import sanitize_uri
|
||||
|
||||
sanitize_uri
|
||||
|
||||
# airflow.providers.trino
|
||||
from airflow.providers.trino.datasets.trino import sanitize_uri
|
||||
|
||||
sanitize_uri
|
||||
|
||||
# airflow.notifications.basenotifier
|
||||
from airflow.notifications.basenotifier import BaseNotifier
|
||||
|
||||
BaseNotifier()
|
||||
|
||||
# airflow.auth.manager
|
||||
from airflow.auth.managers.base_auth_manager import BaseAuthManager
|
||||
|
||||
BaseAuthManager()
|
||||
|
||||
|
||||
from airflow.configuration import (
|
||||
as_dict,
|
||||
get,
|
||||
getboolean,
|
||||
getfloat,
|
||||
getint,
|
||||
has_option,
|
||||
remove_option,
|
||||
set,
|
||||
)
|
||||
|
||||
# airflow.configuration
|
||||
get, getboolean, getfloat, getint, has_option, remove_option, as_dict, set
|
||||
from airflow.hooks.base_hook import BaseHook
|
||||
|
||||
# airflow.hooks
|
||||
BaseHook()
|
||||
|
||||
from airflow.sensors.base_sensor_operator import BaseSensorOperator
|
||||
|
||||
# airflow.sensors.base_sensor_operator
|
||||
BaseSensorOperator()
|
||||
BaseHook()
|
||||
|
||||
from airflow.utils.helpers import chain as helper_chain
|
||||
from airflow.utils.helpers import cross_downstream as helper_cross_downstream
|
||||
|
||||
# airflow.utils.helpers
|
||||
helper_chain
|
||||
helper_cross_downstream
|
||||
|
||||
# airflow.utils.file
|
||||
from airflow.utils.file import TemporaryDirectory
|
||||
|
||||
TemporaryDirectory()
|
||||
|
||||
from airflow.utils.log import secrets_masker
|
||||
|
||||
# airflow.utils.log
|
||||
secrets_masker
|
||||
|
||||
@@ -1,54 +1,54 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from airflow.providers.amazon.aws.auth_manager.avp.entities import AvpEntities
|
||||
from airflow.providers.openlineage.utils.utils import (
|
||||
DatasetInfo,
|
||||
translate_airflow_dataset,
|
||||
)
|
||||
from airflow.secrets.local_filesystem import load_connections
|
||||
from airflow.security.permissions import RESOURCE_DATASET
|
||||
|
||||
AvpEntities.DATASET
|
||||
|
||||
# airflow.providers.openlineage.utils.utils
|
||||
DatasetInfo()
|
||||
translate_airflow_dataset()
|
||||
|
||||
# airflow.secrets.local_filesystem
|
||||
load_connections()
|
||||
|
||||
# airflow.security.permissions
|
||||
RESOURCE_DATASET
|
||||
|
||||
from airflow.providers.amazon.aws.datasets.s3 import (
|
||||
convert_dataset_to_openlineage as s3_convert_dataset_to_openlineage,
|
||||
)
|
||||
from airflow.providers.amazon.aws.datasets.s3 import create_dataset as s3_create_dataset
|
||||
|
||||
s3_create_dataset()
|
||||
s3_convert_dataset_to_openlineage()
|
||||
|
||||
from airflow.providers.common.io.dataset.file import (
|
||||
convert_dataset_to_openlineage as io_convert_dataset_to_openlineage,
|
||||
)
|
||||
from airflow.providers.common.io.dataset.file import create_dataset as io_create_dataset
|
||||
|
||||
from airflow.providers.google.datasets.bigquery import (
|
||||
create_dataset as bigquery_create_dataset,
|
||||
)
|
||||
from airflow.providers.google.datasets.gcs import (
|
||||
convert_dataset_to_openlineage as gcs_convert_dataset_to_openlineage,
|
||||
)
|
||||
from airflow.providers.google.datasets.gcs import create_dataset as gcs_create_dataset
|
||||
from airflow.providers.openlineage.utils.utils import (
|
||||
DatasetInfo,
|
||||
translate_airflow_dataset,
|
||||
)
|
||||
|
||||
AvpEntities.DATASET
|
||||
|
||||
s3_create_dataset()
|
||||
s3_convert_dataset_to_openlineage()
|
||||
|
||||
io_create_dataset()
|
||||
io_convert_dataset_to_openlineage()
|
||||
|
||||
|
||||
# # airflow.providers.google.datasets.bigquery
|
||||
from airflow.providers.google.datasets.bigquery import (
|
||||
create_dataset as bigquery_create_dataset,
|
||||
)
|
||||
|
||||
# airflow.providers.google.datasets.bigquery
|
||||
bigquery_create_dataset()
|
||||
|
||||
# airflow.providers.google.datasets.gcs
|
||||
from airflow.providers.google.datasets.gcs import (
|
||||
convert_dataset_to_openlineage as gcs_convert_dataset_to_openlineage,
|
||||
)
|
||||
from airflow.providers.google.datasets.gcs import create_dataset as gcs_create_dataset
|
||||
|
||||
gcs_create_dataset()
|
||||
gcs_convert_dataset_to_openlineage()
|
||||
# airflow.providers.openlineage.utils.utils
|
||||
DatasetInfo()
|
||||
translate_airflow_dataset()
|
||||
#
|
||||
# airflow.secrets.local_filesystem
|
||||
load_connections()
|
||||
#
|
||||
# airflow.security.permissions
|
||||
RESOURCE_DATASET
|
||||
|
||||
# airflow.timetables
|
||||
DatasetTriggeredTimetable()
|
||||
#
|
||||
# airflow.www.auth
|
||||
has_access_dataset
|
||||
|
||||
@@ -5,35 +5,30 @@ from airflow.hooks.S3_hook import (
|
||||
provide_bucket_name,
|
||||
)
|
||||
from airflow.operators.gcs_to_s3 import GCSToS3Operator
|
||||
from airflow.operators.google_api_to_s3_transfer import (
|
||||
GoogleApiToS3Operator,
|
||||
GoogleApiToS3Transfer,
|
||||
)
|
||||
from airflow.operators.redshift_to_s3_operator import (
|
||||
RedshiftToS3Operator,
|
||||
RedshiftToS3Transfer,
|
||||
)
|
||||
from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Operator
|
||||
from airflow.operators.redshift_to_s3_operator import RedshiftToS3Operator
|
||||
from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
from airflow.operators.s3_to_redshift_operator import (
|
||||
S3ToRedshiftOperator,
|
||||
S3ToRedshiftTransfer,
|
||||
)
|
||||
from airflow.operators.s3_to_redshift_operator import S3ToRedshiftOperator
|
||||
from airflow.sensors.s3_key_sensor import S3KeySensor
|
||||
|
||||
S3Hook()
|
||||
provide_bucket_name()
|
||||
|
||||
GCSToS3Operator()
|
||||
|
||||
GoogleApiToS3Operator()
|
||||
RedshiftToS3Operator()
|
||||
S3FileTransformOperator()
|
||||
S3ToRedshiftOperator()
|
||||
S3KeySensor()
|
||||
|
||||
from airflow.operators.google_api_to_s3_transfer import GoogleApiToS3Transfer
|
||||
|
||||
GoogleApiToS3Transfer()
|
||||
|
||||
RedshiftToS3Operator()
|
||||
from airflow.operators.redshift_to_s3_operator import RedshiftToS3Transfer
|
||||
|
||||
RedshiftToS3Transfer()
|
||||
|
||||
S3FileTransformOperator()
|
||||
from airflow.operators.s3_to_redshift_operator import S3ToRedshiftTransfer
|
||||
|
||||
S3ToRedshiftOperator()
|
||||
S3ToRedshiftTransfer()
|
||||
|
||||
S3KeySensor()
|
||||
|
||||
@@ -4,10 +4,13 @@ from airflow.hooks.dbapi import (
|
||||
ConnectorProtocol,
|
||||
DbApiHook,
|
||||
)
|
||||
|
||||
ConnectorProtocol()
|
||||
DbApiHook()
|
||||
|
||||
from airflow.hooks.dbapi_hook import DbApiHook
|
||||
from airflow.operators.check_operator import SQLCheckOperator
|
||||
|
||||
ConnectorProtocol()
|
||||
DbApiHook()
|
||||
SQLCheckOperator()
|
||||
|
||||
@@ -114,16 +117,11 @@ from airflow.sensors.sql_sensor import SqlSensor
|
||||
SqlSensor()
|
||||
|
||||
|
||||
from airflow.operators.jdbc_operator import JdbcOperator
|
||||
from airflow.operators.mssql_operator import MsSqlOperator
|
||||
from airflow.operators.mysql_operator import MySqlOperator
|
||||
from airflow.operators.oracle_operator import OracleOperator
|
||||
from airflow.operators.postgres_operator import PostgresOperator
|
||||
from airflow.operators.sqlite_operator import SqliteOperator
|
||||
from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
|
||||
|
||||
JdbcOperator()
|
||||
MsSqlOperator()
|
||||
MySqlOperator()
|
||||
OracleOperator()
|
||||
PostgresOperator()
|
||||
SqliteOperator()
|
||||
SQLExecuteQueryOperator()
|
||||
SQLExecuteQueryOperator()
|
||||
SQLExecuteQueryOperator()
|
||||
SQLExecuteQueryOperator()
|
||||
SQLExecuteQueryOperator()
|
||||
SQLExecuteQueryOperator()
|
||||
|
||||
@@ -12,55 +12,59 @@ from airflow.macros.hive import (
|
||||
)
|
||||
from airflow.operators.hive_operator import HiveOperator
|
||||
from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
from airflow.operators.hive_to_mysql import (
|
||||
HiveToMySqlOperator,
|
||||
HiveToMySqlTransfer,
|
||||
)
|
||||
from airflow.operators.hive_to_mysql import HiveToMySqlOperator
|
||||
from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
from airflow.operators.mssql_to_hive import (
|
||||
MsSqlToHiveOperator,
|
||||
MsSqlToHiveTransfer,
|
||||
)
|
||||
from airflow.operators.mysql_to_hive import (
|
||||
MySqlToHiveOperator,
|
||||
MySqlToHiveTransfer,
|
||||
)
|
||||
from airflow.operators.s3_to_hive_operator import (
|
||||
S3ToHiveOperator,
|
||||
S3ToHiveTransfer,
|
||||
)
|
||||
from airflow.sensors.hive_partition_sensor import HivePartitionSensor
|
||||
from airflow.sensors.metastore_partition_sensor import MetastorePartitionSensor
|
||||
from airflow.sensors.named_hive_partition_sensor import NamedHivePartitionSensor
|
||||
|
||||
HIVE_QUEUE_PRIORITIES
|
||||
HiveCliHook()
|
||||
HiveMetastoreHook()
|
||||
HiveServer2Hook()
|
||||
|
||||
closest_ds_partition()
|
||||
max_partition()
|
||||
|
||||
HiveCliHook()
|
||||
HiveMetastoreHook()
|
||||
HiveServer2Hook()
|
||||
HIVE_QUEUE_PRIORITIES
|
||||
|
||||
HiveOperator()
|
||||
|
||||
HiveStatsCollectionOperator()
|
||||
|
||||
HiveToMySqlOperator()
|
||||
HiveToMySqlTransfer()
|
||||
|
||||
HiveToSambaOperator()
|
||||
|
||||
MsSqlToHiveOperator()
|
||||
MsSqlToHiveTransfer()
|
||||
|
||||
from airflow.operators.hive_to_mysql import HiveToMySqlTransfer
|
||||
|
||||
HiveToMySqlTransfer()
|
||||
|
||||
from airflow.operators.mysql_to_hive import MySqlToHiveOperator
|
||||
|
||||
MySqlToHiveOperator()
|
||||
|
||||
from airflow.operators.mysql_to_hive import MySqlToHiveTransfer
|
||||
|
||||
MySqlToHiveTransfer()
|
||||
|
||||
from airflow.operators.mssql_to_hive import MsSqlToHiveOperator
|
||||
|
||||
MsSqlToHiveOperator()
|
||||
|
||||
from airflow.operators.mssql_to_hive import MsSqlToHiveTransfer
|
||||
|
||||
MsSqlToHiveTransfer()
|
||||
|
||||
from airflow.operators.s3_to_hive_operator import S3ToHiveOperator
|
||||
|
||||
S3ToHiveOperator()
|
||||
|
||||
from airflow.operators.s3_to_hive_operator import S3ToHiveTransfer
|
||||
|
||||
S3ToHiveTransfer()
|
||||
|
||||
from airflow.sensors.hive_partition_sensor import HivePartitionSensor
|
||||
|
||||
HivePartitionSensor()
|
||||
|
||||
from airflow.sensors.metastore_partition_sensor import MetastorePartitionSensor
|
||||
|
||||
MetastorePartitionSensor()
|
||||
|
||||
from airflow.sensors.named_hive_partition_sensor import NamedHivePartitionSensor
|
||||
|
||||
NamedHivePartitionSensor()
|
||||
|
||||
@@ -16,14 +16,7 @@ from airflow.kubernetes.kube_client import (
|
||||
from airflow.kubernetes.kubernetes_helper_functions import (
|
||||
add_pod_suffix,
|
||||
annotations_for_logging_task_metadata,
|
||||
annotations_to_key,
|
||||
create_pod_id,
|
||||
get_logs_task_metadata,
|
||||
rand_str,
|
||||
)
|
||||
from airflow.kubernetes.pod import (
|
||||
Port,
|
||||
Resources,
|
||||
)
|
||||
|
||||
ALL_NAMESPACES
|
||||
@@ -37,21 +30,13 @@ _enable_tcp_keepalive()
|
||||
get_kube_client()
|
||||
|
||||
add_pod_suffix()
|
||||
create_pod_id()
|
||||
|
||||
annotations_for_logging_task_metadata()
|
||||
annotations_to_key()
|
||||
get_logs_task_metadata()
|
||||
rand_str()
|
||||
|
||||
Port()
|
||||
Resources()
|
||||
create_pod_id()
|
||||
|
||||
|
||||
from airflow.kubernetes.pod_generator import (
|
||||
PodDefaults,
|
||||
PodGenerator,
|
||||
PodGeneratorDeprecated,
|
||||
add_pod_suffix,
|
||||
datetime_to_label_safe_datestring,
|
||||
extend_object_field,
|
||||
@@ -61,18 +46,16 @@ from airflow.kubernetes.pod_generator import (
|
||||
rand_str,
|
||||
)
|
||||
|
||||
PodDefaults()
|
||||
PodGenerator()
|
||||
add_pod_suffix()
|
||||
datetime_to_label_safe_datestring()
|
||||
extend_object_field()
|
||||
label_safe_datestring_to_datetime()
|
||||
make_safe_label_value()
|
||||
merge_objects()
|
||||
PodGenerator()
|
||||
PodDefaults()
|
||||
PodGeneratorDeprecated()
|
||||
add_pod_suffix()
|
||||
rand_str()
|
||||
|
||||
|
||||
from airflow.kubernetes.pod_generator_deprecated import (
|
||||
PodDefaults,
|
||||
PodGenerator,
|
||||
@@ -90,7 +73,6 @@ make_safe_label_value()
|
||||
PodLauncher()
|
||||
PodStatus()
|
||||
|
||||
|
||||
from airflow.kubernetes.pod_launcher_deprecated import (
|
||||
PodDefaults,
|
||||
PodLauncher,
|
||||
@@ -115,3 +97,17 @@ K8SModel()
|
||||
Secret()
|
||||
Volume()
|
||||
VolumeMount()
|
||||
|
||||
from airflow.kubernetes.kubernetes_helper_functions import (
|
||||
annotations_to_key,
|
||||
get_logs_task_metadata,
|
||||
rand_str,
|
||||
)
|
||||
|
||||
annotations_to_key()
|
||||
get_logs_task_metadata()
|
||||
rand_str()
|
||||
|
||||
from airflow.kubernetes.pod_generator import PodGeneratorDeprecated
|
||||
|
||||
PodGeneratorDeprecated()
|
||||
|
||||
@@ -5,10 +5,6 @@ from airflow.operators.dagrun_operator import (
|
||||
TriggerDagRunLink,
|
||||
TriggerDagRunOperator,
|
||||
)
|
||||
from airflow.operators.dummy import (
|
||||
DummyOperator,
|
||||
EmptyOperator,
|
||||
)
|
||||
from airflow.operators.latest_only_operator import LatestOnlyOperator
|
||||
from airflow.operators.python_operator import (
|
||||
BranchPythonOperator,
|
||||
@@ -19,15 +15,12 @@ from airflow.operators.python_operator import (
|
||||
from airflow.sensors.external_task_sensor import (
|
||||
ExternalTaskMarker,
|
||||
ExternalTaskSensor,
|
||||
ExternalTaskSensorLink,
|
||||
)
|
||||
|
||||
BashOperator()
|
||||
|
||||
TriggerDagRunLink()
|
||||
TriggerDagRunOperator()
|
||||
DummyOperator()
|
||||
EmptyOperator()
|
||||
|
||||
LatestOnlyOperator()
|
||||
|
||||
@@ -38,25 +31,48 @@ ShortCircuitOperator()
|
||||
|
||||
ExternalTaskMarker()
|
||||
ExternalTaskSensor()
|
||||
ExternalTaskSensorLink()
|
||||
|
||||
from airflow.operators.dummy_operator import (
|
||||
DummyOperator,
|
||||
EmptyOperator,
|
||||
)
|
||||
|
||||
DummyOperator()
|
||||
EmptyOperator()
|
||||
|
||||
from airflow.hooks.subprocess import SubprocessResult
|
||||
|
||||
SubprocessResult()
|
||||
|
||||
from airflow.hooks.subprocess import working_directory
|
||||
|
||||
working_directory()
|
||||
|
||||
from airflow.operators.datetime import target_times_as_dates
|
||||
|
||||
target_times_as_dates()
|
||||
|
||||
from airflow.operators.trigger_dagrun import TriggerDagRunLink
|
||||
|
||||
TriggerDagRunLink()
|
||||
|
||||
from airflow.sensors.external_task import ExternalTaskSensorLink
|
||||
|
||||
ExternalTaskSensorLink()
|
||||
|
||||
from airflow.sensors.time_delta import WaitSensor
|
||||
WaitSensor()
|
||||
|
||||
WaitSensor()
|
||||
|
||||
from airflow.operators.dummy import DummyOperator
|
||||
|
||||
DummyOperator()
|
||||
|
||||
from airflow.operators.dummy import EmptyOperator
|
||||
|
||||
EmptyOperator()
|
||||
|
||||
from airflow.operators.dummy_operator import DummyOperator
|
||||
|
||||
DummyOperator()
|
||||
|
||||
from airflow.operators.dummy_operator import EmptyOperator
|
||||
|
||||
EmptyOperator()
|
||||
|
||||
from airflow.sensors.external_task_sensor import ExternalTaskSensorLink
|
||||
|
||||
ExternalTaskSensorLink()
|
||||
|
||||
@@ -9,19 +9,12 @@ from airflow.datasets import (
|
||||
expand_alias_to_datasets,
|
||||
)
|
||||
from airflow.datasets.metadata import Metadata
|
||||
from airflow.decorators import dag, setup, task, task_group, teardown
|
||||
from airflow.io.path import ObjectStoragePath
|
||||
from airflow.io.storage import attach
|
||||
from airflow.models import DAG as DAGFromModel
|
||||
from airflow.models import (
|
||||
Connection,
|
||||
Variable,
|
||||
from airflow.decorators import (
|
||||
dag,
|
||||
setup,
|
||||
task,
|
||||
task_group,
|
||||
)
|
||||
from airflow.models.baseoperator import chain, chain_linear, cross_downstream
|
||||
from airflow.models.baseoperatorlink import BaseOperatorLink
|
||||
from airflow.models.dag import DAG as DAGFromDag
|
||||
from airflow.timetables.datasets import DatasetOrTimeSchedule
|
||||
from airflow.utils.dag_parsing_context import get_parsing_context
|
||||
|
||||
# airflow
|
||||
DatasetFromRoot()
|
||||
@@ -39,9 +32,22 @@ dag()
|
||||
task()
|
||||
task_group()
|
||||
setup()
|
||||
from airflow.decorators import teardown
|
||||
from airflow.io.path import ObjectStoragePath
|
||||
from airflow.io.storage import attach
|
||||
from airflow.models import DAG as DAGFromModel
|
||||
from airflow.models import (
|
||||
Connection,
|
||||
Variable,
|
||||
)
|
||||
from airflow.models.baseoperator import chain, chain_linear, cross_downstream
|
||||
from airflow.models.baseoperatorlink import BaseOperatorLink
|
||||
from airflow.models.dag import DAG as DAGFromDag
|
||||
|
||||
# airflow.decorators
|
||||
teardown()
|
||||
|
||||
# airflow.io
|
||||
# # airflow.io
|
||||
ObjectStoragePath()
|
||||
attach()
|
||||
|
||||
@@ -60,6 +66,9 @@ BaseOperatorLink()
|
||||
|
||||
# airflow.models.dag
|
||||
DAGFromDag()
|
||||
from airflow.timetables.datasets import DatasetOrTimeSchedule
|
||||
from airflow.utils.dag_parsing_context import get_parsing_context
|
||||
|
||||
# airflow.timetables.datasets
|
||||
DatasetOrTimeSchedule()
|
||||
|
||||
|
||||
@@ -7,49 +7,71 @@ from airflow.operators.bash import BashOperator
|
||||
from airflow.operators.datetime import BranchDateTimeOperator
|
||||
from airflow.operators.empty import EmptyOperator
|
||||
from airflow.operators.latest_only import LatestOnlyOperator
|
||||
from airflow.operators.trigger_dagrun import TriggerDagRunOperator
|
||||
from airflow.operators.weekday import BranchDayOfWeekOperator
|
||||
from airflow.sensors.date_time import DateTimeSensor
|
||||
|
||||
FSHook()
|
||||
PackageIndexHook()
|
||||
SubprocessHook()
|
||||
|
||||
BashOperator()
|
||||
BranchDateTimeOperator()
|
||||
TriggerDagRunOperator()
|
||||
EmptyOperator()
|
||||
|
||||
LatestOnlyOperator()
|
||||
BranchDayOfWeekOperator()
|
||||
DateTimeSensor()
|
||||
|
||||
from airflow.operators.python import (
|
||||
BranchPythonOperator,
|
||||
PythonOperator,
|
||||
PythonVirtualenvOperator,
|
||||
ShortCircuitOperator,
|
||||
)
|
||||
from airflow.operators.trigger_dagrun import TriggerDagRunOperator
|
||||
from airflow.operators.weekday import BranchDayOfWeekOperator
|
||||
from airflow.sensors.date_time import DateTimeSensor, DateTimeSensorAsync
|
||||
from airflow.sensors.date_time import DateTimeSensorAsync
|
||||
from airflow.sensors.external_task import (
|
||||
ExternalTaskMarker,
|
||||
ExternalTaskSensor,
|
||||
|
||||
)
|
||||
from airflow.sensors.time_sensor import (
|
||||
TimeSensor,
|
||||
TimeSensorAsync,
|
||||
)
|
||||
from airflow.sensors.filesystem import FileSensor
|
||||
from airflow.sensors.time_delta import TimeDeltaSensor, TimeDeltaSensorAsync
|
||||
from airflow.sensors.time_sensor import TimeSensor, TimeSensorAsync
|
||||
from airflow.sensors.weekday import DayOfWeekSensor
|
||||
from airflow.triggers.external_task import DagStateTrigger, WorkflowTrigger
|
||||
from airflow.triggers.file import FileTrigger
|
||||
from airflow.triggers.temporal import DateTimeTrigger, TimeDeltaTrigger
|
||||
|
||||
FSHook()
|
||||
PackageIndexHook()
|
||||
SubprocessHook()
|
||||
BashOperator()
|
||||
BranchDateTimeOperator()
|
||||
TriggerDagRunOperator()
|
||||
EmptyOperator()
|
||||
LatestOnlyOperator()
|
||||
(
|
||||
BranchPythonOperator(),
|
||||
PythonOperator(),
|
||||
PythonVirtualenvOperator(),
|
||||
ShortCircuitOperator(),
|
||||
)
|
||||
BranchDayOfWeekOperator()
|
||||
DateTimeSensor(), DateTimeSensorAsync()
|
||||
ExternalTaskMarker(), ExternalTaskSensor()
|
||||
BranchPythonOperator()
|
||||
PythonOperator()
|
||||
PythonVirtualenvOperator()
|
||||
ShortCircuitOperator()
|
||||
DateTimeSensorAsync()
|
||||
ExternalTaskMarker()
|
||||
ExternalTaskSensor()
|
||||
FileSensor()
|
||||
TimeSensor(), TimeSensorAsync()
|
||||
TimeDeltaSensor(), TimeDeltaSensorAsync()
|
||||
TimeSensor()
|
||||
TimeSensorAsync()
|
||||
|
||||
from airflow.sensors.time_delta import (
|
||||
TimeDeltaSensor,
|
||||
TimeDeltaSensorAsync,
|
||||
)
|
||||
from airflow.sensors.weekday import DayOfWeekSensor
|
||||
from airflow.triggers.external_task import (
|
||||
DagStateTrigger,
|
||||
WorkflowTrigger,
|
||||
)
|
||||
from airflow.triggers.file import FileTrigger
|
||||
from airflow.triggers.temporal import (
|
||||
DateTimeTrigger,
|
||||
TimeDeltaTrigger,
|
||||
)
|
||||
|
||||
TimeDeltaSensor()
|
||||
TimeDeltaSensorAsync()
|
||||
DayOfWeekSensor()
|
||||
DagStateTrigger(), WorkflowTrigger()
|
||||
DagStateTrigger()
|
||||
WorkflowTrigger()
|
||||
FileTrigger()
|
||||
DateTimeTrigger(), TimeDeltaTrigger()
|
||||
DateTimeTrigger()
|
||||
TimeDeltaTrigger()
|
||||
|
||||
@@ -178,3 +178,38 @@ async def unknown_1(other: str = Depends(unknown_unresolved)): ...
|
||||
async def unknown_2(other: str = Depends(unknown_not_function)): ...
|
||||
@app.get("/things/{thing_id}")
|
||||
async def unknown_3(other: str = Depends(unknown_imported)): ...
|
||||
|
||||
|
||||
# Class dependencies
|
||||
from pydantic import BaseModel
|
||||
from dataclasses import dataclass
|
||||
|
||||
class PydanticParams(BaseModel):
|
||||
my_id: int
|
||||
|
||||
|
||||
class InitParams:
|
||||
def __init__(self, my_id: int):
|
||||
self.my_id = my_id
|
||||
|
||||
|
||||
# Errors
|
||||
@app.get("/{id}")
|
||||
async def get_id_pydantic_full(
|
||||
params: Annotated[PydanticParams, Depends(PydanticParams)],
|
||||
): ...
|
||||
@app.get("/{id}")
|
||||
async def get_id_pydantic_short(params: Annotated[PydanticParams, Depends()]): ...
|
||||
@app.get("/{id}")
|
||||
async def get_id_init_not_annotated(params = Depends(InitParams)): ...
|
||||
|
||||
|
||||
# No errors
|
||||
@app.get("/{my_id}")
|
||||
async def get_id_pydantic_full(
|
||||
params: Annotated[PydanticParams, Depends(PydanticParams)],
|
||||
): ...
|
||||
@app.get("/{my_id}")
|
||||
async def get_id_pydantic_short(params: Annotated[PydanticParams, Depends()]): ...
|
||||
@app.get("/{my_id}")
|
||||
async def get_id_init_not_annotated(params = Depends(InitParams)): ...
|
||||
|
||||
@@ -145,3 +145,23 @@ def func():
|
||||
sleep = 10
|
||||
|
||||
anyio.run(main)
|
||||
|
||||
|
||||
async def test_anyio_async115_helpers():
|
||||
import anyio
|
||||
|
||||
await anyio.sleep(delay=1) # OK
|
||||
await anyio.sleep(seconds=1) # OK
|
||||
|
||||
await anyio.sleep(delay=0) # ASYNC115
|
||||
await anyio.sleep(seconds=0) # OK
|
||||
|
||||
|
||||
async def test_trio_async115_helpers():
|
||||
import trio
|
||||
|
||||
await trio.sleep(seconds=1) # OK
|
||||
await trio.sleep(delay=1) # OK
|
||||
|
||||
await trio.sleep(seconds=0) # ASYNC115
|
||||
await trio.sleep(delay=0) # OK
|
||||
|
||||
@@ -108,3 +108,23 @@ async def import_from_anyio():
|
||||
|
||||
# catch from import
|
||||
await sleep(86401) # error: 116, "async"
|
||||
|
||||
|
||||
async def test_anyio_async116_helpers():
|
||||
import anyio
|
||||
|
||||
await anyio.sleep(delay=1) # OK
|
||||
await anyio.sleep(seconds=1) # OK
|
||||
|
||||
await anyio.sleep(delay=86401) # ASYNC116
|
||||
await anyio.sleep(seconds=86401) # OK
|
||||
|
||||
|
||||
async def test_trio_async116_helpers():
|
||||
import trio
|
||||
|
||||
await trio.sleep(seconds=1) # OK
|
||||
await trio.sleep(delay=1) # OK
|
||||
|
||||
await trio.sleep(seconds=86401) # ASYNC116
|
||||
await trio.sleep(delay=86401) # OK
|
||||
|
||||
@@ -22,3 +22,8 @@ def my_func():
|
||||
|
||||
# Implicit string concatenation
|
||||
"0.0.0.0" f"0.0.0.0{expr}0.0.0.0"
|
||||
|
||||
# t-strings - all ok
|
||||
t"0.0.0.0"
|
||||
"0.0.0.0" t"0.0.0.0{expr}0.0.0.0"
|
||||
"0.0.0.0" f"0.0.0.0{expr}0.0.0.0" t"0.0.0.0{expr}0.0.0.0"
|
||||
|
||||
@@ -40,3 +40,7 @@ with tempfile.TemporaryDirectory(dir="/dev/shm") as d:
|
||||
|
||||
with TemporaryDirectory(dir="/tmp") as d:
|
||||
pass
|
||||
|
||||
# ok (runtime error from t-string)
|
||||
with open(t"/foo/bar", "w") as f:
|
||||
f.write("def")
|
||||
|
||||
@@ -169,3 +169,13 @@ query60 = f"""
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/17967
|
||||
query61 = f"SELECT * FROM table" # skip expressionless f-strings
|
||||
|
||||
# t-strings
|
||||
query62 = t"SELECT * FROM table"
|
||||
query63 = t"""
|
||||
SELECT *,
|
||||
foo
|
||||
FROM ({user_input}) raw
|
||||
"""
|
||||
query64 = f"update {t"{table}"} set var = {t"{var}"}"
|
||||
query65 = t"update {f"{table}"} set var = {f"{var}"}"
|
||||
|
||||
@@ -67,3 +67,6 @@ getattr(self.
|
||||
|
||||
import builtins
|
||||
builtins.getattr(foo, "bar")
|
||||
|
||||
# Regression test for: https://github.com/astral-sh/ruff/issues/18353
|
||||
setattr(foo, "__debug__", 0)
|
||||
|
||||
@@ -179,3 +179,17 @@ def func():
|
||||
for elem in some_list:
|
||||
if some_list.pop() == 2:
|
||||
return
|
||||
|
||||
# should not error - direct return with mutation (Issue #18399)
|
||||
def fail_map(mapping):
|
||||
for key in mapping:
|
||||
return mapping.pop(key)
|
||||
|
||||
def success_map(mapping):
|
||||
for key in mapping:
|
||||
ret = mapping.pop(key) # should not error
|
||||
return ret
|
||||
|
||||
def fail_list(seq):
|
||||
for val in seq:
|
||||
return seq.pop(4)
|
||||
|
||||
@@ -91,3 +91,99 @@ _ = "\8""0" # fix should be "\80"
|
||||
_ = "\12""8" # fix should be "\128"
|
||||
_ = "\12""foo" # fix should be "\12foo"
|
||||
_ = "\12" "" # fix should be "\12"
|
||||
|
||||
|
||||
# Mixed literal + non-literal scenarios
|
||||
_ = (
|
||||
"start" +
|
||||
variable +
|
||||
"end"
|
||||
)
|
||||
|
||||
_ = (
|
||||
f"format" +
|
||||
func_call() +
|
||||
"literal"
|
||||
)
|
||||
|
||||
_ = (
|
||||
rf"raw_f{x}" +
|
||||
r"raw_normal"
|
||||
)
|
||||
|
||||
|
||||
# Different prefix combinations
|
||||
_ = (
|
||||
u"unicode" +
|
||||
r"raw"
|
||||
)
|
||||
|
||||
_ = (
|
||||
rb"raw_bytes" +
|
||||
b"normal_bytes"
|
||||
)
|
||||
|
||||
_ = (
|
||||
b"bytes" +
|
||||
b"with_bytes"
|
||||
)
|
||||
|
||||
# Repeated concatenation
|
||||
|
||||
_ = ("a" +
|
||||
"b" +
|
||||
"c" +
|
||||
"d" + "e"
|
||||
)
|
||||
|
||||
_ = ("a"
|
||||
+ "b"
|
||||
+ "c"
|
||||
+ "d"
|
||||
+ "e"
|
||||
)
|
||||
|
||||
_ = (
|
||||
"start" +
|
||||
variable + # comment
|
||||
"end"
|
||||
)
|
||||
|
||||
_ = (
|
||||
"start" +
|
||||
variable
|
||||
# leading comment
|
||||
+ "end"
|
||||
)
|
||||
|
||||
_ = (
|
||||
"first"
|
||||
+ "second" # extra spaces around +
|
||||
)
|
||||
|
||||
_ = (
|
||||
"first" + # trailing spaces before +
|
||||
"second"
|
||||
)
|
||||
|
||||
_ = ((
|
||||
"deep" +
|
||||
"nesting"
|
||||
))
|
||||
|
||||
_ = (
|
||||
"contains + plus" +
|
||||
"another string"
|
||||
)
|
||||
|
||||
_ = (
|
||||
"start"
|
||||
# leading comment
|
||||
+ "end"
|
||||
)
|
||||
|
||||
_ = (
|
||||
"start" +
|
||||
# leading comment
|
||||
"end"
|
||||
)
|
||||
|
||||
@@ -181,3 +181,34 @@ MetaType = TypeVar("MetaType")
|
||||
class MetaTestClass(type):
|
||||
def m(cls: MetaType) -> MetaType:
|
||||
return cls
|
||||
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
class BadClassWithStringTypeHints:
|
||||
def bad_instance_method_with_string_annotations(self: "_S", arg: str) -> "_S": ... # PYI019
|
||||
|
||||
@classmethod
|
||||
def bad_class_method_with_string_annotations(cls: "type[_S]") -> "_S": ... # PYI019
|
||||
|
||||
|
||||
@classmethod
|
||||
def bad_class_method_with_mixed_annotations_1(cls: "type[_S]") -> _S: ... # PYI019
|
||||
|
||||
|
||||
@classmethod
|
||||
def bad_class_method_with_mixed_annotations_1(cls: type[_S]) -> "_S": ... # PYI019
|
||||
|
||||
|
||||
class BadSubscriptReturnTypeWithStringTypeHints:
|
||||
@classmethod
|
||||
def m[S](cls: "type[S]") -> "type[S]": ... # PYI019
|
||||
|
||||
|
||||
class GoodClassWiStringTypeHints:
|
||||
@classmethod
|
||||
def good_cls_method_with_mixed_annotations(cls: "type[Self]", arg: str) -> Self: ...
|
||||
@staticmethod
|
||||
def good_static_method_with_string_annotations(arg: "_S") -> "_S": ...
|
||||
@classmethod
|
||||
def good_class_method_with_args_string_annotations(cls, arg1: "_S", arg2: "_S") -> "_S": ...
|
||||
|
||||
@@ -172,3 +172,36 @@ MetaType = TypeVar("MetaType")
|
||||
class MetaTestClass(type):
|
||||
def m(cls: MetaType) -> MetaType:
|
||||
return cls
|
||||
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
class BadClassWithStringTypeHints:
|
||||
def bad_instance_method_with_string_annotations(self: "_S", arg: str) -> "_S": ... # PYI019
|
||||
|
||||
@classmethod
|
||||
def bad_class_method_with_string_annotations(cls: "type[_S]") -> "_S": ... # PYI019
|
||||
|
||||
|
||||
@classmethod
|
||||
def bad_class_method_with_mixed_annotations_1(cls: "type[_S]") -> _S: ... # PYI019
|
||||
|
||||
|
||||
@classmethod
|
||||
def bad_class_method_with_mixed_annotations_1(cls: type[_S]) -> "_S": ... # PYI019
|
||||
|
||||
|
||||
class BadSubscriptReturnTypeWithStringTypeHints:
|
||||
@classmethod
|
||||
def m[S](cls: "type[S]") -> "type[S]": ... # PYI019
|
||||
|
||||
|
||||
class GoodClassWithStringTypeHints:
|
||||
@classmethod
|
||||
def good_cls_method_with_mixed_annotations(cls: "type[Self]", arg: str) -> Self: ...
|
||||
@staticmethod
|
||||
def good_static_method_with_string_annotations(arg: "_S") -> "_S": ...
|
||||
@classmethod
|
||||
def good_class_method_with_args_string_annotations(cls, arg1: "_S", arg2: "_S") -> "_S": ...
|
||||
|
||||
|
||||
@@ -72,3 +72,5 @@ def not_warnings_dot_deprecated(
|
||||
|
||||
@not_warnings_dot_deprecated("Not warnings.deprecated, so this one *should* lead to PYI053 in a stub!")
|
||||
def not_a_deprecated_function() -> None: ...
|
||||
|
||||
baz: str = t"51 character stringgggggggggggggggggggggggggggggggg"
|
||||
|
||||
@@ -80,3 +80,7 @@ x: TypeAlias = Literal["fooooooooooooooooooooooooooooooooooooooooooooooooooooooo
|
||||
|
||||
# Ok
|
||||
y: TypeAlias = Annotated[int, "metadataaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"]
|
||||
|
||||
ttoo: str = t"50 character stringggggggggggggggggggggggggggggggg" # OK
|
||||
|
||||
tbar: str = t"51 character stringgggggggggggggggggggggggggggggggg" # Error: PYI053
|
||||
|
||||
@@ -52,3 +52,15 @@ class MyList(Sized, Generic[T]): # Generic already in last place
|
||||
|
||||
class SomeGeneric(Generic[T]): # Only one generic
|
||||
pass
|
||||
|
||||
|
||||
# syntax errors with starred and keyword arguments from
|
||||
# https://github.com/astral-sh/ruff/issues/18602
|
||||
class C1(Generic[T], str, **{"metaclass": type}): # PYI059
|
||||
...
|
||||
|
||||
class C2(Generic[T], str, metaclass=type): # PYI059
|
||||
...
|
||||
|
||||
class C3(Generic[T], metaclass=type, *[str]): # PYI059 but no fix
|
||||
...
|
||||
|
||||
@@ -39,3 +39,27 @@ f'\'normal\' {f'nested'} normal' # Q003
|
||||
f'\'normal\' {f'nested'} "double quotes"'
|
||||
f'\'normal\' {f'\'nested\' {'other'} normal'} "double quotes"' # Q003
|
||||
f'\'normal\' {f'\'nested\' {'other'} "double quotes"'} normal' # Q00l
|
||||
|
||||
|
||||
|
||||
# Same as above, but with t-strings
|
||||
t'This is a \'string\'' # Q003
|
||||
t'This is \\ a \\\'string\'' # Q003
|
||||
t'"This" is a \'string\''
|
||||
f"This is a 'string'"
|
||||
f"\"This\" is a 'string'"
|
||||
fr'This is a \'string\''
|
||||
fR'This is a \'string\''
|
||||
foo = (
|
||||
t'This is a'
|
||||
t'\'string\'' # Q003
|
||||
)
|
||||
t'\'foo\' {'nested'}' # Q003
|
||||
t'\'foo\' {t'nested'}' # Q003
|
||||
t'\'foo\' {t'\'nested\''} \'\'' # Q003
|
||||
|
||||
t'normal {t'nested'} normal'
|
||||
t'\'normal\' {t'nested'} normal' # Q003
|
||||
t'\'normal\' {t'nested'} "double quotes"'
|
||||
t'\'normal\' {t'\'nested\' {'other'} normal'} "double quotes"' # Q003
|
||||
t'\'normal\' {t'\'nested\' {'other'} "double quotes"'} normal' # Q00l
|
||||
|
||||
@@ -37,3 +37,25 @@ f"\"normal\" {f"nested"} normal" # Q003
|
||||
f"\"normal\" {f"nested"} 'single quotes'"
|
||||
f"\"normal\" {f"\"nested\" {"other"} normal"} 'single quotes'" # Q003
|
||||
f"\"normal\" {f"\"nested\" {"other"} 'single quotes'"} normal" # Q003
|
||||
|
||||
|
||||
# Same as above, but with t-strings
|
||||
t"This is a \"string\""
|
||||
t"'This' is a \"string\""
|
||||
f'This is a "string"'
|
||||
f'\'This\' is a "string"'
|
||||
fr"This is a \"string\""
|
||||
fR"This is a \"string\""
|
||||
foo = (
|
||||
t"This is a"
|
||||
t"\"string\""
|
||||
)
|
||||
t"\"foo\" {"foo"}" # Q003
|
||||
t"\"foo\" {t"foo"}" # Q003
|
||||
t"\"foo\" {t"\"foo\""} \"\"" # Q003
|
||||
|
||||
t"normal {t"nested"} normal"
|
||||
t"\"normal\" {t"nested"} normal" # Q003
|
||||
t"\"normal\" {t"nested"} 'single quotes'"
|
||||
t"\"normal\" {t"\"nested\" {"other"} normal"} 'single quotes'" # Q003
|
||||
t"\"normal\" {t"\"nested\" {"other"} 'single quotes'"} normal" # Q003
|
||||
|
||||
@@ -421,3 +421,14 @@ def func(a: dict[str, int]) -> list[dict[str, int]]:
|
||||
if "services" in a:
|
||||
services = a["services"]
|
||||
return services
|
||||
|
||||
# See: https://github.com/astral-sh/ruff/issues/18411
|
||||
def f():
|
||||
(#=
|
||||
x) = 1
|
||||
return x
|
||||
|
||||
def f():
|
||||
x = (1
|
||||
)
|
||||
return x
|
||||
|
||||
15
crates/ruff_linter/resources/test/fixtures/flake8_use_pathlib/PTH211.py
vendored
Normal file
15
crates/ruff_linter/resources/test/fixtures/flake8_use_pathlib/PTH211.py
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
os.symlink("usr/bin/python", "tmp/python")
|
||||
os.symlink(b"usr/bin/python", b"tmp/python")
|
||||
Path("tmp/python").symlink_to("usr/bin/python") # Ok
|
||||
|
||||
os.symlink("usr/bin/python", "tmp/python", target_is_directory=True)
|
||||
os.symlink(b"usr/bin/python", b"tmp/python", target_is_directory=True)
|
||||
Path("tmp/python").symlink_to("usr/bin/python", target_is_directory=True) # Ok
|
||||
|
||||
fd = os.open(".", os.O_RDONLY)
|
||||
os.symlink("source.txt", "link.txt", dir_fd=fd) # Ok: dir_fd is not supported by pathlib
|
||||
os.close(fd)
|
||||
@@ -81,3 +81,9 @@ foo = {}
|
||||
class Bar(type(foo)):
|
||||
def foo_method(self):
|
||||
pass
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/18459
|
||||
class Example:
|
||||
@classmethod
|
||||
def function(this):
|
||||
cls = 1234
|
||||
|
||||
@@ -134,3 +134,9 @@ class MyMeta(type):
|
||||
|
||||
class MyProtocolMeta(type(Protocol)):
|
||||
def __subclasscheck__(cls, other): ...
|
||||
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/18459
|
||||
class C:
|
||||
def f(this):
|
||||
self = 123
|
||||
|
||||
@@ -266,3 +266,15 @@ def f():
|
||||
result = list() # this should be replaced with a comprehension
|
||||
for i in values:
|
||||
result.append(i + 1) # PERF401
|
||||
|
||||
def f():
|
||||
src = [1]
|
||||
dst = []
|
||||
|
||||
for i in src:
|
||||
if True if True else False:
|
||||
dst.append(i)
|
||||
|
||||
for i in src:
|
||||
if lambda: 0:
|
||||
dst.append(i)
|
||||
|
||||
@@ -151,3 +151,16 @@ def foo():
|
||||
result = {}
|
||||
for idx, name in indices, fruit:
|
||||
result[name] = idx # PERF403
|
||||
|
||||
|
||||
def foo():
|
||||
src = (("x", 1),)
|
||||
dst = {}
|
||||
|
||||
for k, v in src:
|
||||
if True if True else False:
|
||||
dst[k] = v
|
||||
|
||||
for k, v in src:
|
||||
if lambda: 0:
|
||||
dst[k] = v
|
||||
@@ -176,4 +176,17 @@ x = lambda: (
|
||||
x = lambda: (
|
||||
# comment
|
||||
y := 10
|
||||
)
|
||||
)
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/18475
|
||||
foo_tooltip = (
|
||||
lambda x, data: f"\nfoo: {data['foo'][int(x)]}"
|
||||
if data["foo"] is not None
|
||||
else ""
|
||||
)
|
||||
|
||||
foo_tooltip = (
|
||||
lambda x, data: f"\nfoo: {data['foo'][int(x)]}" +
|
||||
more
|
||||
|
||||
)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Same as `W605_0.py` but using f-strings instead.
|
||||
# Same as `W605_0.py` but using f-strings and t-strings instead.
|
||||
|
||||
#: W605:1:10
|
||||
regex = f'\.png$'
|
||||
@@ -66,3 +66,72 @@ s = f"TOTAL: {total}\nOK: {ok}\INCOMPLETE: {incomplete}\n"
|
||||
|
||||
# Debug text (should trigger)
|
||||
t = f"{'\InHere'=}"
|
||||
|
||||
|
||||
|
||||
#: W605:1:10
|
||||
regex = t'\.png$'
|
||||
|
||||
#: W605:2:1
|
||||
regex = t'''
|
||||
\.png$
|
||||
'''
|
||||
|
||||
#: W605:2:6
|
||||
f(
|
||||
t'\_'
|
||||
)
|
||||
|
||||
#: W605:4:6
|
||||
t"""
|
||||
multi-line
|
||||
literal
|
||||
with \_ somewhere
|
||||
in the middle
|
||||
"""
|
||||
|
||||
#: W605:1:38
|
||||
value = t'new line\nand invalid escape \_ here'
|
||||
|
||||
|
||||
#: Okay
|
||||
regex = fr'\.png$'
|
||||
regex = t'\\.png$'
|
||||
regex = fr'''
|
||||
\.png$
|
||||
'''
|
||||
regex = fr'''
|
||||
\\.png$
|
||||
'''
|
||||
s = t'\\'
|
||||
regex = t'\w' # noqa
|
||||
regex = t'''
|
||||
\w
|
||||
''' # noqa
|
||||
|
||||
regex = t'\\\_'
|
||||
value = t'\{{1}}'
|
||||
value = t'\{1}'
|
||||
value = t'{1:\}'
|
||||
value = t"{t"\{1}"}"
|
||||
value = rt"{t"\{1}"}"
|
||||
|
||||
# Okay
|
||||
value = rt'\{{1}}'
|
||||
value = rt'\{1}'
|
||||
value = rt'{1:\}'
|
||||
value = t"{rt"\{1}"}"
|
||||
|
||||
# Regression tests for https://github.com/astral-sh/ruff/issues/10434
|
||||
t"{{}}+-\d"
|
||||
t"\n{{}}+-\d+"
|
||||
t"\n{{}}<7D>+-\d+"
|
||||
|
||||
# See https://github.com/astral-sh/ruff/issues/11491
|
||||
total = 10
|
||||
ok = 7
|
||||
incomplete = 3
|
||||
s = t"TOTAL: {total}\nOK: {ok}\INCOMPLETE: {incomplete}\n"
|
||||
|
||||
# Debug text (should trigger)
|
||||
t = t"{'\InHere'=}"
|
||||
|
||||
184
crates/ruff_linter/resources/test/fixtures/pylint/missing_maxsplit_arg.py
vendored
Normal file
184
crates/ruff_linter/resources/test/fixtures/pylint/missing_maxsplit_arg.py
vendored
Normal file
@@ -0,0 +1,184 @@
|
||||
SEQ = "1,2,3"
|
||||
|
||||
class Foo(str):
|
||||
class_str = "1,2,3"
|
||||
|
||||
def split(self, sep=None, maxsplit=-1) -> list[str]:
|
||||
return super().split(sep, maxsplit)
|
||||
|
||||
class Bar():
|
||||
split = "1,2,3"
|
||||
|
||||
# Errors
|
||||
## Test split called directly on string literal
|
||||
"1,2,3".split(",")[0] # [missing-maxsplit-arg]
|
||||
"1,2,3".split(",")[-1] # [missing-maxsplit-arg]
|
||||
"1,2,3".rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
"1,2,3".rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
||||
## Test split called on string variable
|
||||
SEQ.split(",")[0] # [missing-maxsplit-arg]
|
||||
SEQ.split(",")[-1] # [missing-maxsplit-arg]
|
||||
SEQ.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
SEQ.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
||||
## Test split called on class attribute
|
||||
Foo.class_str.split(",")[0] # [missing-maxsplit-arg]
|
||||
Foo.class_str.split(",")[-1] # [missing-maxsplit-arg]
|
||||
Foo.class_str.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
Foo.class_str.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
||||
## Test split called on sliced string
|
||||
"1,2,3"[::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
"1,2,3"[::-1][::-1].split(",")[0] # [missing-maxsplit-arg]
|
||||
SEQ[:3].split(",")[0] # [missing-maxsplit-arg]
|
||||
Foo.class_str[1:3].split(",")[-1] # [missing-maxsplit-arg]
|
||||
"1,2,3"[::-1].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
SEQ[:3].rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
Foo.class_str[1:3].rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
||||
## Test sep given as named argument
|
||||
"1,2,3".split(sep=",")[0] # [missing-maxsplit-arg]
|
||||
"1,2,3".split(sep=",")[-1] # [missing-maxsplit-arg]
|
||||
"1,2,3".rsplit(sep=",")[0] # [missing-maxsplit-arg]
|
||||
"1,2,3".rsplit(sep=",")[-1] # [missing-maxsplit-arg]
|
||||
|
||||
## Special cases
|
||||
"1,2,3".split("\n")[0] # [missing-maxsplit-arg]
|
||||
"1,2,3".split("split")[-1] # [missing-maxsplit-arg]
|
||||
"1,2,3".rsplit("rsplit")[0] # [missing-maxsplit-arg]
|
||||
|
||||
## Test class attribute named split
|
||||
Bar.split.split(",")[0] # [missing-maxsplit-arg]
|
||||
Bar.split.split(",")[-1] # [missing-maxsplit-arg]
|
||||
Bar.split.rsplit(",")[0] # [missing-maxsplit-arg]
|
||||
Bar.split.rsplit(",")[-1] # [missing-maxsplit-arg]
|
||||
|
||||
## Test unpacked dict literal kwargs
|
||||
"1,2,3".split(**{"sep": ","})[0] # [missing-maxsplit-arg]
|
||||
|
||||
|
||||
# OK
|
||||
## Test not accessing the first or last element
|
||||
### Test split called directly on string literal
|
||||
"1,2,3".split(",")[1]
|
||||
"1,2,3".split(",")[-2]
|
||||
"1,2,3".rsplit(",")[1]
|
||||
"1,2,3".rsplit(",")[-2]
|
||||
|
||||
### Test split called on string variable
|
||||
SEQ.split(",")[1]
|
||||
SEQ.split(",")[-2]
|
||||
SEQ.rsplit(",")[1]
|
||||
SEQ.rsplit(",")[-2]
|
||||
|
||||
### Test split called on class attribute
|
||||
Foo.class_str.split(",")[1]
|
||||
Foo.class_str.split(",")[-2]
|
||||
Foo.class_str.rsplit(",")[1]
|
||||
Foo.class_str.rsplit(",")[-2]
|
||||
|
||||
### Test split called on sliced string
|
||||
"1,2,3"[::-1].split(",")[1]
|
||||
SEQ[:3].split(",")[1]
|
||||
Foo.class_str[1:3].split(",")[-2]
|
||||
"1,2,3"[::-1].rsplit(",")[1]
|
||||
SEQ[:3].rsplit(",")[1]
|
||||
Foo.class_str[1:3].rsplit(",")[-2]
|
||||
|
||||
### Test sep given as named argument
|
||||
"1,2,3".split(sep=",")[1]
|
||||
"1,2,3".split(sep=",")[-2]
|
||||
"1,2,3".rsplit(sep=",")[1]
|
||||
"1,2,3".rsplit(sep=",")[-2]
|
||||
|
||||
## Test varying maxsplit argument
|
||||
### str.split() tests
|
||||
"1,2,3".split(sep=",", maxsplit=1)[-1]
|
||||
"1,2,3".split(sep=",", maxsplit=1)[0]
|
||||
"1,2,3".split(sep=",", maxsplit=2)[-1]
|
||||
"1,2,3".split(sep=",", maxsplit=2)[0]
|
||||
"1,2,3".split(sep=",", maxsplit=2)[1]
|
||||
|
||||
### str.rsplit() tests
|
||||
"1,2,3".rsplit(sep=",", maxsplit=1)[-1]
|
||||
"1,2,3".rsplit(sep=",", maxsplit=1)[0]
|
||||
"1,2,3".rsplit(sep=",", maxsplit=2)[-1]
|
||||
"1,2,3".rsplit(sep=",", maxsplit=2)[0]
|
||||
"1,2,3".rsplit(sep=",", maxsplit=2)[1]
|
||||
|
||||
## Test user-defined split
|
||||
Foo("1,2,3").split(",")[0]
|
||||
Foo("1,2,3").split(",")[-1]
|
||||
Foo("1,2,3").rsplit(",")[0]
|
||||
Foo("1,2,3").rsplit(",")[-1]
|
||||
|
||||
## Test split called on sliced list
|
||||
["1", "2", "3"][::-1].split(",")[0]
|
||||
|
||||
## Test class attribute named split
|
||||
Bar.split[0]
|
||||
Bar.split[-1]
|
||||
Bar.split[0]
|
||||
Bar.split[-1]
|
||||
|
||||
## Test unpacked dict literal kwargs
|
||||
"1,2,3".split(",", **{"maxsplit": 1})[0]
|
||||
"1,2,3".split(**{"sep": ",", "maxsplit": 1})[0]
|
||||
|
||||
|
||||
# TODO
|
||||
|
||||
## Test variable split result index
|
||||
## TODO: These require the ability to resolve a variable name to a value
|
||||
# Errors
|
||||
result_index = 0
|
||||
"1,2,3".split(",")[result_index] # TODO: [missing-maxsplit-arg]
|
||||
result_index = -1
|
||||
"1,2,3".split(",")[result_index] # TODO: [missing-maxsplit-arg]
|
||||
# OK
|
||||
result_index = 1
|
||||
"1,2,3".split(",")[result_index]
|
||||
result_index = -2
|
||||
"1,2,3".split(",")[result_index]
|
||||
|
||||
|
||||
## Test split result index modified in loop
|
||||
## TODO: These require the ability to recognize being in a loop where:
|
||||
## - the result of split called on a string is indexed by a variable
|
||||
## - the variable index above is modified
|
||||
# OK
|
||||
result_index = 0
|
||||
for j in range(3):
|
||||
print(SEQ.split(",")[result_index])
|
||||
result_index = result_index + 1
|
||||
|
||||
|
||||
## Test accessor
|
||||
## TODO: These require the ability to get the return type of a method
|
||||
## (possibly via `typing::is_string`)
|
||||
class Baz():
|
||||
def __init__(self):
|
||||
self.my_str = "1,2,3"
|
||||
|
||||
def get_string(self) -> str:
|
||||
return self.my_str
|
||||
|
||||
# Errors
|
||||
Baz().get_string().split(",")[0] # TODO: [missing-maxsplit-arg]
|
||||
Baz().get_string().split(",")[-1] # TODO: [missing-maxsplit-arg]
|
||||
# OK
|
||||
Baz().get_string().split(",")[1]
|
||||
Baz().get_string().split(",")[-2]
|
||||
|
||||
|
||||
## Test unpacked dict instance kwargs
|
||||
## TODO: These require the ability to resolve a dict variable name to a value
|
||||
# Errors
|
||||
kwargs_without_maxsplit = {"seq": ","}
|
||||
"1,2,3".split(**kwargs_without_maxsplit)[0] # TODO: [missing-maxsplit-arg]
|
||||
# OK
|
||||
kwargs_with_maxsplit = {"maxsplit": 1}
|
||||
"1,2,3".split(",", **kwargs_with_maxsplit)[0] # TODO: false positive
|
||||
kwargs_with_maxsplit = {"sep": ",", "maxsplit": 1}
|
||||
"1,2,3".split(**kwargs_with_maxsplit)[0] # TODO: false positive
|
||||
@@ -2,5 +2,8 @@ FIRST, FIRST = (1, 2) # PLW0128
|
||||
FIRST, (FIRST, SECOND) = (1, (1, 2)) # PLW0128
|
||||
FIRST, (FIRST, SECOND, (THIRD, FIRST)) = (1, (1, 2)) # PLW0128
|
||||
FIRST, SECOND, THIRD, FIRST, SECOND = (1, 2, 3, 4) # PLW0128
|
||||
FIRST, [FIRST, SECOND] = (1, (1, 2)) # PLW0128
|
||||
FIRST, [FIRST, SECOND, [THIRD, FIRST]] = (1, (1, 2)) # PLW0128
|
||||
FIRST, *FIRST = (1, 2) # PLW0128
|
||||
|
||||
FIRST, SECOND, _, _, _ignored = (1, 2, 3, 4, 5) # OK
|
||||
|
||||
@@ -79,3 +79,29 @@ class DataClass:
|
||||
def normal(self):
|
||||
super(DataClass, self).f() # OK
|
||||
super().f() # OK (`TypeError` in practice)
|
||||
|
||||
|
||||
# see: https://github.com/astral-sh/ruff/issues/18477
|
||||
class A:
|
||||
def foo(self):
|
||||
pass
|
||||
|
||||
|
||||
class B(A):
|
||||
def bar(self):
|
||||
super(__class__, self).foo()
|
||||
|
||||
|
||||
# see: https://github.com/astral-sh/ruff/issues/18684
|
||||
class C:
|
||||
def f(self):
|
||||
super = print
|
||||
super(C, self)
|
||||
|
||||
|
||||
import builtins
|
||||
|
||||
|
||||
class C:
|
||||
def f(self):
|
||||
builtins.super(C, self)
|
||||
|
||||
@@ -12,3 +12,4 @@ if True:
|
||||
if True:
|
||||
from __future__ import generator_stop
|
||||
from __future__ import invalid_module, generators
|
||||
from __future__ import generators # comment
|
||||
|
||||
@@ -110,6 +110,8 @@ from typing_extensions import CapsuleType
|
||||
# UP035 on py313+ only
|
||||
from typing_extensions import deprecated
|
||||
|
||||
# UP035 on py313+ only
|
||||
from typing_extensions import get_type_hints
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/15780
|
||||
from typing_extensions import is_typeddict
|
||||
|
||||
@@ -42,3 +42,8 @@ class ServiceRefOrValue:
|
||||
# Regression test for: https://github.com/astral-sh/ruff/issues/7201
|
||||
class ServiceRefOrValue:
|
||||
service_specification: Optional[str]is not True = None
|
||||
|
||||
|
||||
# Test for: https://github.com/astral-sh/ruff/issues/18508
|
||||
# Optional[None] should not be offered a fix
|
||||
foo: Optional[None] = None
|
||||
|
||||
84
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP050.py
vendored
Normal file
84
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP050.py
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
class A:
|
||||
...
|
||||
|
||||
|
||||
class A(metaclass=type):
|
||||
...
|
||||
|
||||
|
||||
class A(
|
||||
metaclass=type
|
||||
):
|
||||
...
|
||||
|
||||
|
||||
class A(
|
||||
metaclass=type
|
||||
#
|
||||
):
|
||||
...
|
||||
|
||||
|
||||
class A(
|
||||
#
|
||||
metaclass=type
|
||||
):
|
||||
...
|
||||
|
||||
|
||||
class A(
|
||||
metaclass=type,
|
||||
#
|
||||
):
|
||||
...
|
||||
|
||||
|
||||
class A(
|
||||
#
|
||||
metaclass=type,
|
||||
#
|
||||
):
|
||||
...
|
||||
|
||||
|
||||
class B(A, metaclass=type):
|
||||
...
|
||||
|
||||
|
||||
class B(
|
||||
A,
|
||||
metaclass=type,
|
||||
):
|
||||
...
|
||||
|
||||
|
||||
class B(
|
||||
A,
|
||||
# comment
|
||||
metaclass=type,
|
||||
):
|
||||
...
|
||||
|
||||
|
||||
def foo():
|
||||
class A(metaclass=type):
|
||||
...
|
||||
|
||||
|
||||
class A(
|
||||
metaclass=type # comment
|
||||
,
|
||||
):
|
||||
...
|
||||
|
||||
|
||||
type = str
|
||||
|
||||
class Foo(metaclass=type):
|
||||
...
|
||||
|
||||
|
||||
import builtins
|
||||
|
||||
class A(metaclass=builtins.type):
|
||||
...
|
||||
@@ -174,3 +174,43 @@ def _():
|
||||
global global_foo
|
||||
for [a, b, (global_foo, c)] in d:
|
||||
f.write((a, b))
|
||||
|
||||
|
||||
# Test cases for lambda and ternary expressions - https://github.com/astral-sh/ruff/issues/18590
|
||||
|
||||
def _():
|
||||
with Path("file.txt").open("w", encoding="utf-8") as f:
|
||||
for l in lambda: 0:
|
||||
f.write(f"[{l}]")
|
||||
|
||||
|
||||
def _():
|
||||
with Path("file.txt").open("w", encoding="utf-8") as f:
|
||||
for l in (1,) if True else (2,):
|
||||
f.write(f"[{l}]")
|
||||
|
||||
|
||||
# don't need to add parentheses when making a function argument
|
||||
def _():
|
||||
with open("file", "w") as f:
|
||||
for line in lambda: 0:
|
||||
f.write(line)
|
||||
|
||||
|
||||
def _():
|
||||
with open("file", "w") as f:
|
||||
for line in (1,) if True else (2,):
|
||||
f.write(line)
|
||||
|
||||
|
||||
# don't add extra parentheses if they're already parenthesized
|
||||
def _():
|
||||
with open("file", "w") as f:
|
||||
for line in (lambda: 0):
|
||||
f.write(f"{line}")
|
||||
|
||||
|
||||
def _():
|
||||
with open("file", "w") as f:
|
||||
for line in ((1,) if True else (2,)):
|
||||
f.write(f"{line}")
|
||||
|
||||
@@ -43,7 +43,6 @@ def func():
|
||||
|
||||
import builtins
|
||||
|
||||
|
||||
with builtins.open("FURB129.py") as f:
|
||||
for line in f.readlines():
|
||||
pass
|
||||
@@ -51,7 +50,6 @@ with builtins.open("FURB129.py") as f:
|
||||
|
||||
from builtins import open as o
|
||||
|
||||
|
||||
with o("FURB129.py") as f:
|
||||
for line in f.readlines():
|
||||
pass
|
||||
@@ -89,3 +87,21 @@ with open("FURB129.py") as f:
|
||||
pass
|
||||
for _not_line in f.readline():
|
||||
pass
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/18231
|
||||
with open("furb129.py") as f:
|
||||
for line in (f).readlines():
|
||||
pass
|
||||
|
||||
with open("furb129.py") as f:
|
||||
[line for line in (f).readlines()]
|
||||
|
||||
|
||||
with open("furb129.py") as f:
|
||||
for line in (((f))).readlines():
|
||||
pass
|
||||
for line in(f).readlines():
|
||||
pass
|
||||
|
||||
# Test case for issue #17683 (missing space before keyword)
|
||||
print([line for line in f.readlines()if True])
|
||||
|
||||
@@ -74,3 +74,28 @@ async def f(y):
|
||||
def g():
|
||||
for x in (set(),):
|
||||
x.add(x)
|
||||
|
||||
|
||||
# Test cases for lambda and ternary expressions - https://github.com/astral-sh/ruff/issues/18590
|
||||
|
||||
s = set()
|
||||
|
||||
for x in lambda: 0:
|
||||
s.discard(-x)
|
||||
|
||||
for x in (1,) if True else (2,):
|
||||
s.add(-x)
|
||||
|
||||
# don't add extra parens
|
||||
for x in (lambda: 0):
|
||||
s.discard(-x)
|
||||
|
||||
for x in ((1,) if True else (2,)):
|
||||
s.add(-x)
|
||||
|
||||
# don't add parens directly in function call
|
||||
for x in lambda: 0:
|
||||
s.discard(x)
|
||||
|
||||
for x in (1,) if True else (2,):
|
||||
s.add(x)
|
||||
|
||||
@@ -43,3 +43,33 @@ log(1, math.e)
|
||||
|
||||
math.log(1, 2.0001)
|
||||
math.log(1, 10.0001)
|
||||
|
||||
|
||||
# see: https://github.com/astral-sh/ruff/issues/18639
|
||||
math.log(1, 10 # comment
|
||||
)
|
||||
|
||||
math.log(1,
|
||||
10 # comment
|
||||
)
|
||||
|
||||
math.log(1 # comment
|
||||
, # comment
|
||||
10 # comment
|
||||
)
|
||||
|
||||
math.log(
|
||||
1 # comment
|
||||
,
|
||||
10 # comment
|
||||
)
|
||||
|
||||
math.log(4.13e223, 2)
|
||||
math.log(4.14e223, 10)
|
||||
|
||||
|
||||
def print_log(*args):
|
||||
try:
|
||||
print(math.log(*args, math.e))
|
||||
except TypeError as e:
|
||||
print(repr(e))
|
||||
|
||||
53
crates/ruff_linter/resources/test/fixtures/refurb/FURB171_1.py
vendored
Normal file
53
crates/ruff_linter/resources/test/fixtures/refurb/FURB171_1.py
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
# Errors.
|
||||
|
||||
if 1 in set([1]):
|
||||
print("Single-element set")
|
||||
|
||||
if 1 in set((1,)):
|
||||
print("Single-element set")
|
||||
|
||||
if 1 in set({1}):
|
||||
print("Single-element set")
|
||||
|
||||
if 1 in frozenset([1]):
|
||||
print("Single-element set")
|
||||
|
||||
if 1 in frozenset((1,)):
|
||||
print("Single-element set")
|
||||
|
||||
if 1 in frozenset({1}):
|
||||
print("Single-element set")
|
||||
|
||||
if 1 in set(set([1])):
|
||||
print('Recursive solution')
|
||||
|
||||
|
||||
|
||||
# Non-errors.
|
||||
|
||||
if 1 in set((1, 2)):
|
||||
pass
|
||||
|
||||
if 1 in set([1, 2]):
|
||||
pass
|
||||
|
||||
if 1 in set({1, 2}):
|
||||
pass
|
||||
|
||||
if 1 in frozenset((1, 2)):
|
||||
pass
|
||||
|
||||
if 1 in frozenset([1, 2]):
|
||||
pass
|
||||
|
||||
if 1 in frozenset({1, 2}):
|
||||
pass
|
||||
|
||||
if 1 in set(1,):
|
||||
pass
|
||||
|
||||
if 1 in set(1,2):
|
||||
pass
|
||||
|
||||
if 1 in set((x for x in range(2))):
|
||||
pass
|
||||
@@ -56,3 +56,38 @@ def f():
|
||||
|
||||
def f():
|
||||
queue = deque() # Ok
|
||||
|
||||
def f():
|
||||
x = 0 or(deque)([])
|
||||
|
||||
|
||||
# regression tests for https://github.com/astral-sh/ruff/issues/18612
|
||||
def f():
|
||||
deque([], *[10]) # RUF037 but no fix
|
||||
deque([], **{"maxlen": 10}) # RUF037
|
||||
deque([], foo=1) # RUF037
|
||||
|
||||
|
||||
# Somewhat related to the issue, both okay because we can't generally look
|
||||
# inside *args or **kwargs
|
||||
def f():
|
||||
deque(*([], 10)) # Ok
|
||||
deque(**{"iterable": [], "maxlen": 10}) # Ok
|
||||
|
||||
# The fix was actually always unsafe in the presence of comments. all of these
|
||||
# are deleted
|
||||
def f():
|
||||
deque( # a comment in deque, deleted
|
||||
[ # a comment _in_ the list, deleted
|
||||
], # a comment after the list, deleted
|
||||
maxlen=10, # a comment on maxlen, deleted
|
||||
) # only this is preserved
|
||||
|
||||
|
||||
# `maxlen` can also be passed positionally
|
||||
def f():
|
||||
deque([], 10)
|
||||
|
||||
|
||||
def f():
|
||||
deque([], iterable=[])
|
||||
|
||||
@@ -149,23 +149,39 @@ value = not my_dict.get("key", 0) # [RUF056]
|
||||
value = not my_dict.get("key", 0.0) # [RUF056]
|
||||
value = not my_dict.get("key", "") # [RUF056]
|
||||
|
||||
# testing dict.get call using kwargs
|
||||
value = not my_dict.get(key="key", default=False) # [RUF056]
|
||||
value = not my_dict.get(default=[], key="key") # [RUF056]
|
||||
|
||||
# testing invalid dict.get call with inline comment
|
||||
value = not my_dict.get("key", # comment1
|
||||
[] # comment2
|
||||
) # [RUF056]
|
||||
|
||||
# testing invalid dict.get call with kwargs and inline comment
|
||||
value = not my_dict.get(key="key", # comment1
|
||||
default=False # comment2
|
||||
) # [RUF056]
|
||||
value = not my_dict.get(default=[], # comment1
|
||||
key="key" # comment2
|
||||
) # [RUF056]
|
||||
# regression tests for https://github.com/astral-sh/ruff/issues/18628
|
||||
# we should avoid fixes when there are "unknown" arguments present, including
|
||||
# extra positional arguments, either of the positional-only arguments passed as
|
||||
# a keyword, or completely unknown keywords.
|
||||
|
||||
# testing invalid dict.get calls
|
||||
value = not my_dict.get(key="key", other="something", default=False)
|
||||
value = not my_dict.get(default=False, other="something", key="test")
|
||||
# extra positional
|
||||
not my_dict.get("key", False, "?!")
|
||||
|
||||
# `default` is positional-only, so these are invalid
|
||||
not my_dict.get("key", default=False)
|
||||
not my_dict.get(key="key", default=False)
|
||||
not my_dict.get(default=[], key="key")
|
||||
not my_dict.get(default=False)
|
||||
not my_dict.get(key="key", other="something", default=False)
|
||||
not my_dict.get(default=False, other="something", key="test")
|
||||
|
||||
# comments don't really matter here because of the kwargs but include them for
|
||||
# completeness
|
||||
not my_dict.get(
|
||||
key="key", # comment1
|
||||
default=False, # comment2
|
||||
) # comment 3
|
||||
not my_dict.get(
|
||||
default=[], # comment1
|
||||
key="key", # comment2
|
||||
) # comment 3
|
||||
|
||||
# the fix is arguably okay here because the same `takes no keyword arguments`
|
||||
# TypeError is raised at runtime before and after the fix, but we still bail
|
||||
# out for having an unrecognized number of arguments
|
||||
not my_dict.get("key", False, foo=...)
|
||||
|
||||
@@ -94,3 +94,9 @@ def f():
|
||||
(exponential := (exponential * base_multiplier) % 3): i + 1 for i in range(2)
|
||||
}
|
||||
return hash_map
|
||||
|
||||
|
||||
# see: https://github.com/astral-sh/ruff/issues/18507
|
||||
def f(_x):
|
||||
x, = "1"
|
||||
print(_x)
|
||||
|
||||
25
crates/ruff_linter/resources/test/fixtures/ruff/RUF061_deprecated_call.py
vendored
Normal file
25
crates/ruff_linter/resources/test/fixtures/ruff/RUF061_deprecated_call.py
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
import warnings
|
||||
import pytest
|
||||
|
||||
|
||||
def raise_deprecation_warning(s):
|
||||
warnings.warn(s, DeprecationWarning)
|
||||
return s
|
||||
|
||||
|
||||
def test_ok():
|
||||
with pytest.deprecated_call():
|
||||
raise_deprecation_warning("")
|
||||
|
||||
|
||||
def test_error_trivial():
|
||||
pytest.deprecated_call(raise_deprecation_warning, "deprecated")
|
||||
|
||||
|
||||
def test_error_assign():
|
||||
s = pytest.deprecated_call(raise_deprecation_warning, "deprecated")
|
||||
print(s)
|
||||
|
||||
|
||||
def test_error_lambda():
|
||||
pytest.deprecated_call(lambda: warnings.warn("", DeprecationWarning))
|
||||
40
crates/ruff_linter/resources/test/fixtures/ruff/RUF061_raises.py
vendored
Normal file
40
crates/ruff_linter/resources/test/fixtures/ruff/RUF061_raises.py
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
import pytest
|
||||
|
||||
|
||||
def func(a, b):
|
||||
return a / b
|
||||
|
||||
|
||||
def test_ok():
|
||||
with pytest.raises(ValueError):
|
||||
raise ValueError
|
||||
|
||||
|
||||
def test_ok_as():
|
||||
with pytest.raises(ValueError) as excinfo:
|
||||
raise ValueError
|
||||
|
||||
|
||||
def test_error_trivial():
|
||||
pytest.raises(ZeroDivisionError, func, 1, b=0)
|
||||
|
||||
|
||||
def test_error_match():
|
||||
pytest.raises(ZeroDivisionError, func, 1, b=0).match("division by zero")
|
||||
|
||||
|
||||
def test_error_assign():
|
||||
excinfo = pytest.raises(ZeroDivisionError, func, 1, b=0)
|
||||
|
||||
|
||||
def test_error_kwargs():
|
||||
pytest.raises(func=func, expected_exception=ZeroDivisionError)
|
||||
|
||||
|
||||
def test_error_multi_statement():
|
||||
excinfo = pytest.raises(ValueError, int, "hello")
|
||||
assert excinfo.match("^invalid literal")
|
||||
|
||||
|
||||
def test_error_lambda():
|
||||
pytest.raises(ZeroDivisionError, lambda: 1 / 0)
|
||||
25
crates/ruff_linter/resources/test/fixtures/ruff/RUF061_warns.py
vendored
Normal file
25
crates/ruff_linter/resources/test/fixtures/ruff/RUF061_warns.py
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
import warnings
|
||||
import pytest
|
||||
|
||||
|
||||
def raise_user_warning(s):
|
||||
warnings.warn(s, UserWarning)
|
||||
return s
|
||||
|
||||
|
||||
def test_ok():
|
||||
with pytest.warns(UserWarning):
|
||||
raise_user_warning("")
|
||||
|
||||
|
||||
def test_error_trivial():
|
||||
pytest.warns(UserWarning, raise_user_warning, "warning")
|
||||
|
||||
|
||||
def test_error_assign():
|
||||
s = pytest.warns(UserWarning, raise_user_warning, "warning")
|
||||
print(s)
|
||||
|
||||
|
||||
def test_error_lambda():
|
||||
pytest.warns(UserWarning, lambda: warnings.warn("", UserWarning))
|
||||
@@ -1,6 +1,6 @@
|
||||
use ruff_diagnostics::{Diagnostic, Fix};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::codes::Rule;
|
||||
use crate::rules::{
|
||||
@@ -38,92 +38,64 @@ pub(crate) fn bindings(checker: &Checker) {
|
||||
.dummy_variable_rgx
|
||||
.is_match(binding.name(checker.source()))
|
||||
{
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
pyflakes::rules::UnusedVariable {
|
||||
name: binding.name(checker.source()).to_string(),
|
||||
},
|
||||
binding.range(),
|
||||
);
|
||||
diagnostic.try_set_fix(|| {
|
||||
pyflakes::fixes::remove_exception_handler_assignment(binding, checker.locator)
|
||||
checker
|
||||
.report_diagnostic(
|
||||
pyflakes::rules::UnusedVariable {
|
||||
name: binding.name(checker.source()).to_string(),
|
||||
},
|
||||
binding.range(),
|
||||
)
|
||||
.try_set_fix(|| {
|
||||
pyflakes::fixes::remove_exception_handler_assignment(
|
||||
binding,
|
||||
checker.locator,
|
||||
)
|
||||
.map(Fix::safe_edit)
|
||||
});
|
||||
checker.report_diagnostic(diagnostic);
|
||||
});
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::InvalidAllFormat) {
|
||||
if let Some(diagnostic) = pylint::rules::invalid_all_format(binding) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
pylint::rules::invalid_all_format(checker, binding);
|
||||
}
|
||||
if checker.enabled(Rule::InvalidAllObject) {
|
||||
if let Some(diagnostic) = pylint::rules::invalid_all_object(binding) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
pylint::rules::invalid_all_object(checker, binding);
|
||||
}
|
||||
if checker.enabled(Rule::NonAsciiName) {
|
||||
if let Some(diagnostic) = pylint::rules::non_ascii_name(binding, checker.locator) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
pylint::rules::non_ascii_name(checker, binding);
|
||||
}
|
||||
if checker.enabled(Rule::UnconventionalImportAlias) {
|
||||
if let Some(diagnostic) = flake8_import_conventions::rules::unconventional_import_alias(
|
||||
flake8_import_conventions::rules::unconventional_import_alias(
|
||||
checker,
|
||||
binding,
|
||||
&checker.settings.flake8_import_conventions.aliases,
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
);
|
||||
}
|
||||
if checker.enabled(Rule::UnaliasedCollectionsAbcSetImport) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_pyi::rules::unaliased_collections_abc_set_import(checker, binding)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
flake8_pyi::rules::unaliased_collections_abc_set_import(checker, binding);
|
||||
}
|
||||
if !checker.source_type.is_stub() && checker.enabled(Rule::UnquotedTypeAlias) {
|
||||
flake8_type_checking::rules::unquoted_type_alias(checker, binding);
|
||||
}
|
||||
if checker.enabled(Rule::UnsortedDunderSlots) {
|
||||
if let Some(diagnostic) = ruff::rules::sort_dunder_slots(checker, binding) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
ruff::rules::sort_dunder_slots(checker, binding);
|
||||
}
|
||||
if checker.enabled(Rule::UsedDummyVariable) {
|
||||
if let Some(diagnostic) = ruff::rules::used_dummy_variable(checker, binding, binding_id)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
ruff::rules::used_dummy_variable(checker, binding, binding_id);
|
||||
}
|
||||
if checker.enabled(Rule::AssignmentInAssert) {
|
||||
if let Some(diagnostic) = ruff::rules::assignment_in_assert(checker, binding) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
ruff::rules::assignment_in_assert(checker, binding);
|
||||
}
|
||||
if checker.enabled(Rule::PytestUnittestRaisesAssertion) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_pytest_style::rules::unittest_raises_assertion_binding(checker, binding)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
flake8_pytest_style::rules::unittest_raises_assertion_binding(checker, binding);
|
||||
}
|
||||
if checker.enabled(Rule::ForLoopWrites) {
|
||||
if let Some(diagnostic) = refurb::rules::for_loop_writes_binding(checker, binding) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
refurb::rules::for_loop_writes_binding(checker, binding);
|
||||
}
|
||||
if checker.enabled(Rule::CustomTypeVarForSelf) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_pyi::rules::custom_type_var_instead_of_self(checker, binding)
|
||||
{
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
flake8_pyi::rules::custom_type_var_instead_of_self(checker, binding);
|
||||
}
|
||||
if checker.enabled(Rule::PrivateTypeParameter) {
|
||||
if let Some(diagnostic) = pyupgrade::rules::private_type_parameter(checker, binding) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
pyupgrade::rules::private_type_parameter(checker, binding);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use ruff_diagnostics::{Diagnostic, Fix};
|
||||
use ruff_python_semantic::analyze::visibility;
|
||||
use ruff_python_semantic::{Binding, BindingKind, Imported, ResolvedReference, ScopeKind};
|
||||
use ruff_text_size::Ranged;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::codes::Rule;
|
||||
use crate::fix;
|
||||
@@ -112,12 +112,12 @@ pub(crate) fn deferred_scopes(checker: &Checker) {
|
||||
.map(|id| checker.semantic.reference(*id))
|
||||
.all(ResolvedReference::is_load)
|
||||
{
|
||||
checker.report_diagnostic(Diagnostic::new(
|
||||
checker.report_diagnostic(
|
||||
pylint::rules::GlobalVariableNotAssigned {
|
||||
name: (*name).to_string(),
|
||||
},
|
||||
binding.range(),
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -146,12 +146,12 @@ pub(crate) fn deferred_scopes(checker: &Checker) {
|
||||
if scope.kind.is_generator() {
|
||||
continue;
|
||||
}
|
||||
checker.report_diagnostic(Diagnostic::new(
|
||||
checker.report_diagnostic(
|
||||
pylint::rules::RedefinedArgumentFromLocal {
|
||||
name: name.to_string(),
|
||||
},
|
||||
binding.range(),
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -186,13 +186,13 @@ pub(crate) fn deferred_scopes(checker: &Checker) {
|
||||
continue;
|
||||
}
|
||||
|
||||
checker.report_diagnostic(Diagnostic::new(
|
||||
checker.report_diagnostic(
|
||||
pyflakes::rules::ImportShadowedByLoopVar {
|
||||
name: name.to_string(),
|
||||
row: checker.compute_source_row(shadowed.start()),
|
||||
},
|
||||
binding.range(),
|
||||
));
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -331,7 +331,7 @@ pub(crate) fn deferred_scopes(checker: &Checker) {
|
||||
// Create diagnostics for each statement.
|
||||
for (source, entries) in &redefinitions {
|
||||
for (shadowed, binding) in entries {
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
let mut diagnostic = checker.report_diagnostic(
|
||||
pyflakes::rules::RedefinedWhileUnused {
|
||||
name: binding.name(checker.source()).to_string(),
|
||||
row: checker.compute_source_row(shadowed.start()),
|
||||
@@ -346,8 +346,6 @@ pub(crate) fn deferred_scopes(checker: &Checker) {
|
||||
if let Some(fix) = source.as_ref().and_then(|source| fixes.get(source)) {
|
||||
diagnostic.set_fix(fix.clone());
|
||||
}
|
||||
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,16 +15,10 @@ pub(crate) fn except_handler(except_handler: &ExceptHandler, checker: &Checker)
|
||||
name,
|
||||
body,
|
||||
range: _,
|
||||
node_index: _,
|
||||
}) => {
|
||||
if checker.enabled(Rule::BareExcept) {
|
||||
if let Some(diagnostic) = pycodestyle::rules::bare_except(
|
||||
type_.as_deref(),
|
||||
body,
|
||||
except_handler,
|
||||
checker.locator,
|
||||
) {
|
||||
checker.report_diagnostic(diagnostic);
|
||||
}
|
||||
pycodestyle::rules::bare_except(checker, type_.as_deref(), body, except_handler);
|
||||
}
|
||||
if checker.enabled(Rule::RaiseWithoutFromInsideExcept) {
|
||||
flake8_bugbear::rules::raise_without_from_inside_except(
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user