Compare commits
251 Commits
dcreager/f
...
cjm/tvassi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e3876e6d6d | ||
|
|
b2de749c32 | ||
|
|
9085f18353 | ||
|
|
8152ba7cb7 | ||
|
|
3d01d3be3e | ||
|
|
4510a236d3 | ||
|
|
76b6d53d8b | ||
|
|
f82b72882b | ||
|
|
d07eefc408 | ||
|
|
f7237e3b69 | ||
|
|
2f9992b6ef | ||
|
|
457ec4dddd | ||
|
|
aa0614509b | ||
|
|
9000eb3bfd | ||
|
|
7f50b503cf | ||
|
|
24d3fc27fb | ||
|
|
6f821ac846 | ||
|
|
d410d12bc5 | ||
|
|
89424cce5f | ||
|
|
fd76d70a31 | ||
|
|
a4c8e43c5f | ||
|
|
ada4c4cb1f | ||
|
|
bb6c7cad07 | ||
|
|
47e3aa40b3 | ||
|
|
9a6633da0b | ||
|
|
de78da5ee6 | ||
|
|
20d64b9c85 | ||
|
|
4850c187ea | ||
|
|
3f32446e16 | ||
|
|
784daae497 | ||
|
|
178c882740 | ||
|
|
101e1a5ddd | ||
|
|
965a4dd731 | ||
|
|
5e2c818417 | ||
|
|
90c12f4177 | ||
|
|
6e9fb9af38 | ||
|
|
a507c1b8b3 | ||
|
|
5a91badb8b | ||
|
|
1945bfdb84 | ||
|
|
a95c73d5d0 | ||
|
|
78b4c3ccf1 | ||
|
|
2485afe640 | ||
|
|
b8ed729f59 | ||
|
|
108c470348 | ||
|
|
87c64c9eab | ||
|
|
a10606dda2 | ||
|
|
d1c6dd9ac1 | ||
|
|
073b993ab0 | ||
|
|
6a36cd6f02 | ||
|
|
3b15af6d4f | ||
|
|
e95130ad80 | ||
|
|
68e32c103f | ||
|
|
fe4051b2e6 | ||
|
|
fa628018b2 | ||
|
|
8535af8516 | ||
|
|
b51c4f82ea | ||
|
|
e6a798b962 | ||
|
|
52b0470870 | ||
|
|
c4a08782cc | ||
|
|
91481a8be7 | ||
|
|
097af060c9 | ||
|
|
b7d0b3f9e5 | ||
|
|
084352f72c | ||
|
|
78d4356301 | ||
|
|
96697c98f3 | ||
|
|
f7cae4ffb5 | ||
|
|
675a5af89a | ||
|
|
ea3f4ac059 | ||
|
|
6d2c10cca2 | ||
|
|
3cf44e401a | ||
|
|
17050e2ec5 | ||
|
|
a6dc04f96e | ||
|
|
e515899141 | ||
|
|
0c80c56afc | ||
|
|
b7ce694162 | ||
|
|
163d526407 | ||
|
|
75effb8ed7 | ||
|
|
3353d07938 | ||
|
|
41f3f21629 | ||
|
|
76ec64d535 | ||
|
|
b7e69ecbfc | ||
|
|
9c57862262 | ||
|
|
67ef370733 | ||
|
|
e17e1e860b | ||
|
|
03d8679adf | ||
|
|
d33a503686 | ||
|
|
650cbdd296 | ||
|
|
d2a238dfad | ||
|
|
6e765b4527 | ||
|
|
c5e41c278c | ||
|
|
0eeb02c0c1 | ||
|
|
f31b1c695c | ||
|
|
5679bf00bc | ||
|
|
a7c358ab5c | ||
|
|
b6de01b9a5 | ||
|
|
18bac94226 | ||
|
|
7568eeb7a5 | ||
|
|
0e85cbdd91 | ||
|
|
7825975972 | ||
|
|
f584b66824 | ||
|
|
ad1a8da4d1 | ||
|
|
0861ecfa55 | ||
|
|
d1f359afbb | ||
|
|
b84b58760e | ||
|
|
d94be0e780 | ||
|
|
8a6787b39e | ||
|
|
4a621c2c12 | ||
|
|
2bb99df394 | ||
|
|
f11d9cb509 | ||
|
|
549ab74bd6 | ||
|
|
81fc7d7d3a | ||
|
|
8c68d30c3a | ||
|
|
93d6a3567b | ||
|
|
1d788981cd | ||
|
|
7d46579808 | ||
|
|
c9a6b1a9d0 | ||
|
|
9b9d16c3ba | ||
|
|
79f8473e51 | ||
|
|
ca4fdf452d | ||
|
|
3c460a7b9a | ||
|
|
31e6576971 | ||
|
|
c953e7d143 | ||
|
|
5096824793 | ||
|
|
ae7691b026 | ||
|
|
504fa20057 | ||
|
|
f0868ac0c9 | ||
|
|
01a31c08f5 | ||
|
|
405878a128 | ||
|
|
80103a179d | ||
|
|
9a8f3cf247 | ||
|
|
07718f4788 | ||
|
|
1e8881f9af | ||
|
|
152a0b6585 | ||
|
|
1ad5015e19 | ||
|
|
92f95ff494 | ||
|
|
ceb2bf1168 | ||
|
|
f521358033 | ||
|
|
74081032d9 | ||
|
|
dbc137c951 | ||
|
|
826b2c9ff3 | ||
|
|
a3e55cfd8f | ||
|
|
d2246278e6 | ||
|
|
6bd1863bf0 | ||
|
|
97dc58fc77 | ||
|
|
53a9448fb5 | ||
|
|
516291b693 | ||
|
|
b09f00a4ef | ||
|
|
03065c245c | ||
|
|
b45598389d | ||
|
|
4729ff2bc8 | ||
|
|
1bdb22c139 | ||
|
|
1c65e0ad25 | ||
|
|
4443f6653c | ||
|
|
b0d475f353 | ||
|
|
b578a828ef | ||
|
|
64ba39a385 | ||
|
|
a4e225ee8a | ||
|
|
45d0634b01 | ||
|
|
4bcf1778fa | ||
|
|
6044f04137 | ||
|
|
2e95475f57 | ||
|
|
cfa1505068 | ||
|
|
0251679f87 | ||
|
|
6ab32a7746 | ||
|
|
bc0a5aa409 | ||
|
|
aba21a5d47 | ||
|
|
b6281a8805 | ||
|
|
049280a3bc | ||
|
|
fa88989ef0 | ||
|
|
4c3f389598 | ||
|
|
6d3b1d13d6 | ||
|
|
3f84e75e20 | ||
|
|
afc18ff1a1 | ||
|
|
f1a539dac6 | ||
|
|
ef0343189c | ||
|
|
4eecc40110 | ||
|
|
cf59cee928 | ||
|
|
538393d1f3 | ||
|
|
92ecfc908b | ||
|
|
f7b48510b5 | ||
|
|
9937064761 | ||
|
|
8d2c79276d | ||
|
|
0f47810768 | ||
|
|
eb1d2518c1 | ||
|
|
a45a0a92bd | ||
|
|
43bd043755 | ||
|
|
9a54ee3a1c | ||
|
|
25c3be51d2 | ||
|
|
e71f3ed2c5 | ||
|
|
ac6219ec38 | ||
|
|
e93fa7062c | ||
|
|
21fd28d713 | ||
|
|
a01f25107a | ||
|
|
48a85c4ed4 | ||
|
|
1796ca97d5 | ||
|
|
e897f37911 | ||
|
|
00e73dc331 | ||
|
|
7b6222700b | ||
|
|
bfc1650198 | ||
|
|
d5410ef9fe | ||
|
|
9db63fc58c | ||
|
|
61e73481fe | ||
|
|
e170fe493d | ||
|
|
e91e2f49db | ||
|
|
b537552927 | ||
|
|
5a719f2d60 | ||
|
|
e7f38fe74b | ||
|
|
624f5c6c22 | ||
|
|
8abf93f5fb | ||
|
|
5407249467 | ||
|
|
0a1f9d090e | ||
|
|
f9c7908bb7 | ||
|
|
99fa850e53 | ||
|
|
a241321735 | ||
|
|
b1b8ca3bcd | ||
|
|
3fae176345 | ||
|
|
f36262d970 | ||
|
|
e45f23b0ec | ||
|
|
aa46047649 | ||
|
|
f9da115fdc | ||
|
|
3872d57463 | ||
|
|
27ada26ddb | ||
|
|
810478f68b | ||
|
|
17f799424a | ||
|
|
c12640fea8 | ||
|
|
3796b13ea2 | ||
|
|
ad5a659f29 | ||
|
|
27a377f077 | ||
|
|
b8b624d890 | ||
|
|
6dc2d29966 | ||
|
|
890ba725d9 | ||
|
|
298f43f34e | ||
|
|
3b300559ab | ||
|
|
14f71ceb83 | ||
|
|
4775719abf | ||
|
|
6bdffc3cbf | ||
|
|
775815ef22 | ||
|
|
0299a52fb1 | ||
|
|
83d5ad8983 | ||
|
|
ae6fde152c | ||
|
|
d2b20f7367 | ||
|
|
38a3b056e3 | ||
|
|
37a0836bd2 | ||
|
|
f83295fe51 | ||
|
|
c4581788b2 | ||
|
|
2894aaa943 | ||
|
|
ed4866a00b | ||
|
|
9b5fe51b32 | ||
|
|
53ffe7143f | ||
|
|
21561000b1 | ||
|
|
9c0772d8f0 |
6
.gitattributes
vendored
6
.gitattributes
vendored
@@ -12,6 +12,12 @@ crates/ruff_python_parser/resources/invalid/re_lexing/line_continuation_windows_
|
||||
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_windows_eol.py text eol=crlf
|
||||
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_mac_eol.py text eol=cr
|
||||
|
||||
crates/ruff_linter/resources/test/fixtures/ruff/RUF046_CR.py text eol=cr
|
||||
crates/ruff_linter/resources/test/fixtures/ruff/RUF046_LF.py text eol=lf
|
||||
|
||||
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP018_CR.py text eol=cr
|
||||
crates/ruff_linter/resources/test/fixtures/pyupgrade/UP018_LF.py text eol=lf
|
||||
|
||||
crates/ruff_python_parser/resources/inline linguist-generated=true
|
||||
|
||||
ruff.schema.json -diff linguist-generated=true text=auto eol=lf
|
||||
|
||||
10
.github/CODEOWNERS
vendored
10
.github/CODEOWNERS
vendored
@@ -14,11 +14,11 @@
|
||||
# flake8-pyi
|
||||
/crates/ruff_linter/src/rules/flake8_pyi/ @AlexWaygood
|
||||
|
||||
# Script for fuzzing the parser/red-knot etc.
|
||||
# Script for fuzzing the parser/ty etc.
|
||||
/python/py-fuzzer/ @AlexWaygood
|
||||
|
||||
# red-knot
|
||||
/crates/red_knot* @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
# ty
|
||||
/crates/ty* @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/scripts/knot_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/crates/red_knot_python_semantic @carljm @AlexWaygood @sharkdp @dcreager
|
||||
/scripts/ty_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp @dcreager
|
||||
/crates/ty_python_semantic @carljm @AlexWaygood @sharkdp @dcreager
|
||||
|
||||
1
.github/actionlint.yaml
vendored
1
.github/actionlint.yaml
vendored
@@ -6,5 +6,6 @@ self-hosted-runner:
|
||||
labels:
|
||||
- depot-ubuntu-latest-8
|
||||
- depot-ubuntu-22.04-16
|
||||
- depot-ubuntu-22.04-32
|
||||
- github-windows-2025-x86_64-8
|
||||
- github-windows-2025-x86_64-16
|
||||
|
||||
16
.github/workflows/build-binaries.yml
vendored
16
.github/workflows/build-binaries.yml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
@@ -72,7 +72,7 @@ jobs:
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -114,7 +114,7 @@ jobs:
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: arm64
|
||||
@@ -170,7 +170,7 @@ jobs:
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.platform.arch }}
|
||||
@@ -223,7 +223,7 @@ jobs:
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -298,7 +298,7 @@ jobs:
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
@@ -363,7 +363,7 @@ jobs:
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -429,7 +429,7 @@ jobs:
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
|
||||
8
.github/workflows/build-docker.yml
vendored
8
.github/workflows/build-docker.yml
vendored
@@ -79,7 +79,7 @@ jobs:
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6
|
||||
with:
|
||||
context: .
|
||||
platforms: ${{ matrix.platform }}
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
${{ env.TAG_PATTERNS }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6
|
||||
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
@@ -256,7 +256,7 @@ jobs:
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
|
||||
113
.github/workflows/ci.yaml
vendored
113
.github/workflows/ci.yaml
vendored
@@ -36,8 +36,8 @@ jobs:
|
||||
code: ${{ steps.check_code.outputs.changed }}
|
||||
# Flag that is raised when any code that affects the fuzzer is changed
|
||||
fuzz: ${{ steps.check_fuzzer.outputs.changed }}
|
||||
# Flag that is set to "true" when code related to red-knot changes.
|
||||
red_knot: ${{ steps.check_red_knot.outputs.changed }}
|
||||
# Flag that is set to "true" when code related to ty changes.
|
||||
ty: ${{ steps.check_ty.outputs.changed }}
|
||||
|
||||
# Flag that is set to "true" when code related to the playground changes.
|
||||
playground: ${{ steps.check_playground.outputs.changed }}
|
||||
@@ -84,7 +84,7 @@ jobs:
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':Cargo.toml' \
|
||||
':Cargo.lock' \
|
||||
':crates/**' \
|
||||
':!crates/red_knot*/**' \
|
||||
':!crates/ty*/**' \
|
||||
':!crates/ruff_python_formatter/**' \
|
||||
':!crates/ruff_formatter/**' \
|
||||
':!crates/ruff_dev/**' \
|
||||
@@ -145,7 +145,7 @@ jobs:
|
||||
run: |
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- ':**' \
|
||||
':!**/*.md' \
|
||||
':crates/red_knot_python_semantic/resources/mdtest/**/*.md' \
|
||||
':crates/ty_python_semantic/resources/mdtest/**/*.md' \
|
||||
':!docs/**' \
|
||||
':!assets/**' \
|
||||
':.github/workflows/ci.yaml' \
|
||||
@@ -168,15 +168,15 @@ jobs:
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Check if the red-knot code changed
|
||||
id: check_red_knot
|
||||
- name: Check if the ty code changed
|
||||
id: check_ty
|
||||
env:
|
||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
||||
run: |
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- \
|
||||
':Cargo.toml' \
|
||||
':Cargo.lock' \
|
||||
':crates/red_knot*/**' \
|
||||
':crates/ty*/**' \
|
||||
':crates/ruff_db/**' \
|
||||
':crates/ruff_annotate_snippets/**' \
|
||||
':crates/ruff_python_ast/**' \
|
||||
@@ -221,7 +221,7 @@ jobs:
|
||||
- name: "Clippy"
|
||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
||||
- name: "Clippy (wasm)"
|
||||
run: cargo clippy -p ruff_wasm -p red_knot_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
run: cargo clippy -p ruff_wasm -p ty_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
|
||||
cargo-test-linux:
|
||||
name: "cargo test (linux)"
|
||||
@@ -239,21 +239,21 @@ jobs:
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
uses: taiki-e/install-action@86c23eed46c17b80677df6d8151545ce3e236c61 # v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
uses: taiki-e/install-action@86c23eed46c17b80677df6d8151545ce3e236c61 # v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: Red-knot mdtests (GitHub annotations)
|
||||
if: ${{ needs.determine_changes.outputs.red_knot == 'true' }}
|
||||
- name: ty mdtests (GitHub annotations)
|
||||
if: ${{ needs.determine_changes.outputs.ty == 'true' }}
|
||||
env:
|
||||
NO_COLOR: 1
|
||||
MDTEST_GITHUB_ANNOTATIONS_FORMAT: 1
|
||||
# Ignore errors if this step fails; we want to continue to later steps in the workflow anyway.
|
||||
# This step is just to get nice GitHub annotations on the PR diff in the files-changed tab.
|
||||
run: cargo test -p red_knot_python_semantic --test mdtest || true
|
||||
run: cargo test -p ty_python_semantic --test mdtest || true
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
@@ -268,7 +268,7 @@ jobs:
|
||||
# sync, not just public items. Eventually we should do this for all
|
||||
# crates; for now add crates here as they are warning-clean to prevent
|
||||
# regression.
|
||||
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p red_knot_test -p ruff_db --document-private-items
|
||||
- run: cargo doc --no-deps -p ty_python_semantic -p ty -p ty_test -p ruff_db --document-private-items
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
@@ -276,6 +276,10 @@ jobs:
|
||||
with:
|
||||
name: ruff
|
||||
path: target/debug/ruff
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: ty
|
||||
path: target/debug/ty
|
||||
|
||||
cargo-test-linux-release:
|
||||
name: "cargo test (linux, release)"
|
||||
@@ -293,11 +297,11 @@ jobs:
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
uses: taiki-e/install-action@86c23eed46c17b80677df6d8151545ce3e236c61 # v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
uses: taiki-e/install-action@86c23eed46c17b80677df6d8151545ce3e236c61 # v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
@@ -320,7 +324,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
uses: taiki-e/install-action@86c23eed46c17b80677df6d8151545ce3e236c61 # v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Run tests"
|
||||
@@ -346,7 +350,7 @@ jobs:
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4.3.0
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "npm"
|
||||
@@ -358,9 +362,9 @@ jobs:
|
||||
run: |
|
||||
cd crates/ruff_wasm
|
||||
wasm-pack test --node
|
||||
- name: "Test red_knot_wasm"
|
||||
- name: "Test ty_wasm"
|
||||
run: |
|
||||
cd crates/red_knot_wasm
|
||||
cd crates/ty_wasm
|
||||
wasm-pack test --node
|
||||
|
||||
cargo-build-release:
|
||||
@@ -403,11 +407,11 @@ jobs:
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
uses: taiki-e/install-action@86c23eed46c17b80677df6d8151545ce3e236c61 # v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
uses: taiki-e/install-action@86c23eed46c17b80677df6d8151545ce3e236c61 # v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: "Run tests"
|
||||
@@ -456,7 +460,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
- uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
with:
|
||||
@@ -521,11 +525,11 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
name: Download comparison Ruff binary
|
||||
id: ruff-target
|
||||
with:
|
||||
@@ -632,6 +636,53 @@ jobs:
|
||||
name: ecosystem-result
|
||||
path: ecosystem-result
|
||||
|
||||
fuzz-ty:
|
||||
name: "Fuzz for new ty panics"
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.ty == 'true' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
name: Download new ty binary
|
||||
id: ty-new
|
||||
with:
|
||||
name: ty
|
||||
path: target/debug
|
||||
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
name: Download baseline ty binary
|
||||
with:
|
||||
name: ty
|
||||
branch: ${{ github.event.pull_request.base.ref }}
|
||||
workflow: "ci.yaml"
|
||||
check_artifacts: true
|
||||
- uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
- name: Fuzz
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
NEW_TY: ${{ steps.ty-new.outputs.download-path }}
|
||||
run: |
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x "${PWD}/ty" "${NEW_TY}/ty"
|
||||
|
||||
(
|
||||
uvx \
|
||||
--python="${PYTHON_VERSION}" \
|
||||
--from=./python/py-fuzzer \
|
||||
fuzz \
|
||||
--test-executable="${NEW_TY}/ty" \
|
||||
--baseline-executable="${PWD}/ty" \
|
||||
--only-new-bugs \
|
||||
--bin=ty \
|
||||
0-500
|
||||
)
|
||||
|
||||
cargo-shear:
|
||||
name: "cargo shear"
|
||||
runs-on: ubuntu-latest
|
||||
@@ -654,7 +705,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
@@ -708,7 +759,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: "3.13"
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
@@ -779,12 +830,12 @@ jobs:
|
||||
persist-credentials: false
|
||||
repository: "astral-sh/ruff-lsp"
|
||||
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
# installation fails on 3.13 and newer
|
||||
python-version: "3.12"
|
||||
|
||||
- uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
name: Download development ruff binary
|
||||
id: ruff-target
|
||||
with:
|
||||
@@ -821,7 +872,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4.3.0
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 22
|
||||
cache: "npm"
|
||||
@@ -857,7 +908,7 @@ jobs:
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@09dc018eee06ae1c9e0409786563f534210ceb83 # v2
|
||||
uses: taiki-e/install-action@86c23eed46c17b80677df6d8151545ce3e236c61 # v2
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
|
||||
10
.github/workflows/daily_property_tests.yaml
vendored
10
.github/workflows/daily_property_tests.yaml
vendored
@@ -38,17 +38,17 @@ jobs:
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@e16410e7f8d9e167b74ad5697a9089a35126eb50 # v1
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- name: Build Red Knot
|
||||
- name: Build ty
|
||||
# A release build takes longer (2 min vs 1 min), but the property tests run much faster in release
|
||||
# mode (1.5 min vs 14 min), so the overall time is shorter with a release build.
|
||||
run: cargo build --locked --release --package red_knot_python_semantic --tests
|
||||
run: cargo build --locked --release --package ty_python_semantic --tests
|
||||
- name: Run property tests
|
||||
shell: bash
|
||||
run: |
|
||||
export QUICKCHECK_TESTS=100000
|
||||
for _ in {1..5}; do
|
||||
cargo test --locked --release --package red_knot_python_semantic -- --ignored list::property_tests
|
||||
cargo test --locked --release --package red_knot_python_semantic -- --ignored types::property_tests::stable
|
||||
cargo test --locked --release --package ty_python_semantic -- --ignored list::property_tests
|
||||
cargo test --locked --release --package ty_python_semantic -- --ignored types::property_tests::stable
|
||||
done
|
||||
|
||||
create-issue-on-failure:
|
||||
@@ -68,5 +68,5 @@ jobs:
|
||||
repo: "ruff",
|
||||
title: `Daily property test run failed on ${new Date().toDateString()}`,
|
||||
body: "Run listed here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
labels: ["bug", "red-knot", "testing"],
|
||||
labels: ["bug", "ty", "testing"],
|
||||
})
|
||||
|
||||
18
.github/workflows/mypy_primer.yaml
vendored
18
.github/workflows/mypy_primer.yaml
vendored
@@ -5,7 +5,7 @@ permissions: {}
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "crates/red_knot*/**"
|
||||
- "crates/ty*/**"
|
||||
- "crates/ruff_db"
|
||||
- "crates/ruff_python_ast"
|
||||
- "crates/ruff_python_parser"
|
||||
@@ -21,11 +21,12 @@ env:
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
jobs:
|
||||
mypy_primer:
|
||||
name: Run mypy_primer
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
@@ -40,19 +41,16 @@ jobs:
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Install mypy_primer
|
||||
run: |
|
||||
uv tool install "git+https://github.com/astral-sh/mypy_primer.git@add-red-knot-support-v5"
|
||||
|
||||
- name: Run mypy_primer
|
||||
shell: bash
|
||||
run: |
|
||||
cd ruff
|
||||
|
||||
PRIMER_SELECTOR="$(paste -s -d'|' crates/red_knot_python_semantic/resources/primer/good.txt)"
|
||||
PRIMER_SELECTOR="$(paste -s -d'|' crates/ty_python_semantic/resources/primer/good.txt)"
|
||||
|
||||
echo "new commit"
|
||||
git rev-list --format=%s --max-count=1 "$GITHUB_SHA"
|
||||
@@ -66,9 +64,11 @@ jobs:
|
||||
|
||||
echo "Project selector: $PRIMER_SELECTOR"
|
||||
# Allow the exit code to be 0 or 1, only fail for actual mypy_primer crashes/bugs
|
||||
uvx mypy_primer \
|
||||
uvx \
|
||||
--from="git+https://github.com/hauntsaninja/mypy_primer@4b15cf3b07db69db67bbfaebfffb2a8a28040933" \
|
||||
mypy_primer \
|
||||
--repo ruff \
|
||||
--type-checker knot \
|
||||
--type-checker ty \
|
||||
--old base_commit \
|
||||
--new "$GITHUB_SHA" \
|
||||
--project-selector "/($PRIMER_SELECTOR)\$" \
|
||||
|
||||
2
.github/workflows/publish-docs.yml
vendored
2
.github/workflows/publish-docs.yml
vendored
@@ -28,7 +28,7 @@ jobs:
|
||||
ref: ${{ inputs.ref }}
|
||||
persist-credentials: true
|
||||
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: 3.12
|
||||
|
||||
|
||||
2
.github/workflows/publish-playground.yml
vendored
2
.github/workflows/publish-playground.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4.3.0
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 22
|
||||
cache: "npm"
|
||||
|
||||
2
.github/workflows/publish-pypi.yml
vendored
2
.github/workflows/publish-pypi.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
- uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels-*
|
||||
path: wheels
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Publish the Red Knot playground.
|
||||
name: "[Knot Playground] Release"
|
||||
# Publish the ty playground.
|
||||
name: "[ty Playground] Release"
|
||||
|
||||
permissions: {}
|
||||
|
||||
@@ -7,12 +7,12 @@ on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "crates/red_knot*/**"
|
||||
- "crates/ty*/**"
|
||||
- "crates/ruff_db/**"
|
||||
- "crates/ruff_python_ast/**"
|
||||
- "crates/ruff_python_parser/**"
|
||||
- "playground/**"
|
||||
- ".github/workflows/publish-knot-playground.yml"
|
||||
- ".github/workflows/publish-ty-playground.yml"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4.3.0
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 22
|
||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||
@@ -45,8 +45,8 @@ jobs:
|
||||
- name: "Run TypeScript checks"
|
||||
run: npm run check
|
||||
working-directory: playground
|
||||
- name: "Build Knot playground"
|
||||
run: npm run build --workspace knot-playground
|
||||
- name: "Build ty playground"
|
||||
run: npm run build --workspace ty-playground
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
@@ -55,4 +55,4 @@ jobs:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy playground/knot/dist --project-name=knot-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
command: pages deploy playground/ty/dist --project-name=ty-playground --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
2
.github/workflows/publish-wasm.yml
vendored
2
.github/workflows/publish-wasm.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
|
||||
mv /tmp/package.json crates/ruff_wasm/pkg
|
||||
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
||||
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4.3.0
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version: 20
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
|
||||
29
.github/workflows/release.yml
vendored
29
.github/workflows/release.yml
vendored
@@ -40,6 +40,7 @@ permissions:
|
||||
# If there's a prerelease-style suffix to the version, then the release(s)
|
||||
# will be marked as a prerelease.
|
||||
on:
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
@@ -60,7 +61,7 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@85e6279cec87321a52edac9c87bce653a07cf6c2
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
@@ -68,9 +69,9 @@ jobs:
|
||||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.4-prerelease.1/cargo-dist-installer.sh | sh"
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.5-prerelease.1/cargo-dist-installer.sh | sh"
|
||||
- name: Cache dist
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/dist
|
||||
@@ -86,7 +87,7 @@ jobs:
|
||||
cat plan-dist-manifest.json
|
||||
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
name: artifacts-plan-dist-manifest
|
||||
path: plan-dist-manifest.json
|
||||
@@ -123,19 +124,19 @@ jobs:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@85e6279cec87321a52edac9c87bce653a07cf6c2
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
||||
- name: Fetch local artifacts
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
@@ -153,7 +154,7 @@ jobs:
|
||||
|
||||
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
|
||||
- name: "Upload artifacts"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
name: artifacts-build-global
|
||||
path: |
|
||||
@@ -174,19 +175,19 @@ jobs:
|
||||
outputs:
|
||||
val: ${{ steps.host.outputs.manifest }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@85e6279cec87321a52edac9c87bce653a07cf6c2
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Fetch artifacts from scratch-storage
|
||||
- name: Fetch artifacts
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
@@ -200,7 +201,7 @@ jobs:
|
||||
cat dist-manifest.json
|
||||
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
# Overwrite the previous copy
|
||||
name: artifacts-dist-manifest
|
||||
@@ -250,13 +251,13 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@85e6279cec87321a52edac9c87bce653a07cf6c2
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
- name: "Download GitHub Artifacts"
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: artifacts
|
||||
|
||||
16
.github/workflows/sync_typeshed.yaml
vendored
16
.github/workflows/sync_typeshed.yaml
vendored
@@ -39,13 +39,13 @@ jobs:
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
run: |
|
||||
rm -rf ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_vendored/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_vendored/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_vendored/vendor/typeshed/source_commit.txt
|
||||
rm -rf ruff/crates/ty_vendored/vendor/typeshed
|
||||
mkdir ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/ty_vendored/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/ty_vendored/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/ty_vendored/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/ty_vendored/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
@@ -79,5 +79,5 @@ jobs:
|
||||
repo: "ruff",
|
||||
title: `Automated typeshed sync failed on ${new Date().toDateString()}`,
|
||||
body: "Run listed here: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
labels: ["bug", "red-knot"],
|
||||
labels: ["bug", "ty"],
|
||||
})
|
||||
|
||||
@@ -3,8 +3,8 @@ fail_fast: false
|
||||
exclude: |
|
||||
(?x)^(
|
||||
.github/workflows/release.yml|
|
||||
crates/red_knot_vendored/vendor/.*|
|
||||
crates/red_knot_project/resources/.*|
|
||||
crates/ty_vendored/vendor/.*|
|
||||
crates/ty_project/resources/.*|
|
||||
crates/ruff_benchmark/resources/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
@@ -65,7 +65,7 @@ repos:
|
||||
- black==25.1.0
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.31.1
|
||||
rev: v1.32.0
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -79,7 +79,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.11.6
|
||||
rev: v0.11.8
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
|
||||
61
CHANGELOG.md
61
CHANGELOG.md
@@ -1,5 +1,66 @@
|
||||
# Changelog
|
||||
|
||||
## 0.11.8
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Apply auto fixes to cases where the names have changed in Airflow 3 (`AIR302`, `AIR311`) ([#17553](https://github.com/astral-sh/ruff/pull/17553), [#17570](https://github.com/astral-sh/ruff/pull/17570), [#17571](https://github.com/astral-sh/ruff/pull/17571))
|
||||
- \[`airflow`\] Extend `AIR301` rule ([#17598](https://github.com/astral-sh/ruff/pull/17598))
|
||||
- \[`airflow`\] Update existing `AIR302` rules with better suggestions ([#17542](https://github.com/astral-sh/ruff/pull/17542))
|
||||
- \[`refurb`\] Mark fix as safe for `readlines-in-for` (`FURB129`) ([#17644](https://github.com/astral-sh/ruff/pull/17644))
|
||||
- [syntax-errors] `nonlocal` declaration at module level ([#17559](https://github.com/astral-sh/ruff/pull/17559))
|
||||
- [syntax-errors] Detect single starred expression assignment `x = *y` ([#17624](https://github.com/astral-sh/ruff/pull/17624))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-pyi`\] Ensure `Literal[None,] | Literal[None,]` is not autofixed to `None | None` (`PYI061`) ([#17659](https://github.com/astral-sh/ruff/pull/17659))
|
||||
- \[`flake8-use-pathlib`\] Avoid suggesting `Path.iterdir()` for `os.listdir` with file descriptor (`PTH208`) ([#17715](https://github.com/astral-sh/ruff/pull/17715))
|
||||
- \[`flake8-use-pathlib`\] Fix `PTH104` false positive when `rename` is passed a file descriptor ([#17712](https://github.com/astral-sh/ruff/pull/17712))
|
||||
- \[`flake8-use-pathlib`\] Fix `PTH116` false positive when `stat` is passed a file descriptor ([#17709](https://github.com/astral-sh/ruff/pull/17709))
|
||||
- \[`flake8-use-pathlib`\] Fix `PTH123` false positive when `open` is passed a file descriptor from a function call ([#17705](https://github.com/astral-sh/ruff/pull/17705))
|
||||
- \[`pycodestyle`\] Fix duplicated diagnostic in `E712` ([#17651](https://github.com/astral-sh/ruff/pull/17651))
|
||||
- \[`pylint`\] Detect `global` declarations in module scope (`PLE0118`) ([#17411](https://github.com/astral-sh/ruff/pull/17411))
|
||||
- [syntax-errors] Make `async-comprehension-in-sync-comprehension` more specific ([#17460](https://github.com/astral-sh/ruff/pull/17460))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add option to disable `typing_extensions` imports ([#17611](https://github.com/astral-sh/ruff/pull/17611))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix example syntax for the `lint.pydocstyle.ignore-var-parameters` option ([#17740](https://github.com/astral-sh/ruff/pull/17740))
|
||||
- Add fix safety sections (`ASYNC116`, `FLY002`, `D200`, `RUF005`, `RUF017`, `RUF027`, `RUF028`, `RUF057`) ([#17497](https://github.com/astral-sh/ruff/pull/17497), [#17496](https://github.com/astral-sh/ruff/pull/17496), [#17502](https://github.com/astral-sh/ruff/pull/17502), [#17484](https://github.com/astral-sh/ruff/pull/17484), [#17480](https://github.com/astral-sh/ruff/pull/17480), [#17485](https://github.com/astral-sh/ruff/pull/17485), [#17722](https://github.com/astral-sh/ruff/pull/17722), [#17483](https://github.com/astral-sh/ruff/pull/17483))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Add Python 3.14 to configuration options ([#17647](https://github.com/astral-sh/ruff/pull/17647))
|
||||
- Make syntax error for unparenthesized except tuples version specific to before 3.14 ([#17660](https://github.com/astral-sh/ruff/pull/17660))
|
||||
|
||||
## 0.11.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Apply auto fixes to cases where the names have changed in Airflow 3 (`AIR301`) ([#17355](https://github.com/astral-sh/ruff/pull/17355))
|
||||
- \[`perflint`\] Implement fix for `manual-dict-comprehension` (`PERF403`) ([#16719](https://github.com/astral-sh/ruff/pull/16719))
|
||||
- [syntax-errors] Make duplicate parameter names a semantic error ([#17131](https://github.com/astral-sh/ruff/pull/17131))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`airflow`\] Fix typos in provider package names (`AIR302`, `AIR312`) ([#17574](https://github.com/astral-sh/ruff/pull/17574))
|
||||
- \[`flake8-type-checking`\] Visit keyword arguments in checks involving `typing.cast`/`typing.NewType` arguments ([#17538](https://github.com/astral-sh/ruff/pull/17538))
|
||||
- \[`pyupgrade`\] Preserve parenthesis when fixing native literals containing newlines (`UP018`) ([#17220](https://github.com/astral-sh/ruff/pull/17220))
|
||||
- \[`refurb`\] Mark the `FURB161` fix unsafe except for integers and booleans ([#17240](https://github.com/astral-sh/ruff/pull/17240))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`perflint`\] Allow list function calls to be replaced with a comprehension (`PERF401`) ([#17519](https://github.com/astral-sh/ruff/pull/17519))
|
||||
- \[`pycodestyle`\] Auto-fix redundant boolean comparison (`E712`) ([#17090](https://github.com/astral-sh/ruff/pull/17090))
|
||||
- \[`pylint`\] make fix unsafe if delete comments (`PLR1730`) ([#17459](https://github.com/astral-sh/ruff/pull/17459))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add fix safety sections to docs for several rules ([#17410](https://github.com/astral-sh/ruff/pull/17410),[#17440](https://github.com/astral-sh/ruff/pull/17440),[#17441](https://github.com/astral-sh/ruff/pull/17441),[#17443](https://github.com/astral-sh/ruff/pull/17443),[#17444](https://github.com/astral-sh/ruff/pull/17444))
|
||||
|
||||
## 0.11.6
|
||||
|
||||
### Preview features
|
||||
|
||||
@@ -71,8 +71,7 @@ representative at an online or offline event.
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
<charlie.r.marsh@gmail.com>.
|
||||
reported to the community leaders responsible for enforcement at <hey@astral.sh>.
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
|
||||
602
Cargo.lock
generated
602
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
84
Cargo.toml
84
Cargo.toml
@@ -35,14 +35,14 @@ ruff_python_trivia = { path = "crates/ruff_python_trivia" }
|
||||
ruff_server = { path = "crates/ruff_server" }
|
||||
ruff_source_file = { path = "crates/ruff_source_file" }
|
||||
ruff_text_size = { path = "crates/ruff_text_size" }
|
||||
red_knot_vendored = { path = "crates/red_knot_vendored" }
|
||||
ruff_workspace = { path = "crates/ruff_workspace" }
|
||||
|
||||
red_knot_ide = { path = "crates/red_knot_ide" }
|
||||
red_knot_project = { path = "crates/red_knot_project", default-features = false }
|
||||
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
|
||||
red_knot_server = { path = "crates/red_knot_server" }
|
||||
red_knot_test = { path = "crates/red_knot_test" }
|
||||
ty_ide = { path = "crates/ty_ide" }
|
||||
ty_project = { path = "crates/ty_project", default-features = false }
|
||||
ty_python_semantic = { path = "crates/ty_python_semantic" }
|
||||
ty_server = { path = "crates/ty_server" }
|
||||
ty_test = { path = "crates/ty_test" }
|
||||
ty_vendored = { path = "crates/ty_vendored" }
|
||||
|
||||
aho-corasick = { version = "1.1.3" }
|
||||
anstream = { version = "0.6.18" }
|
||||
@@ -124,7 +124,7 @@ rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "87bf6b6c2d5f6479741271da73bd9d30c2580c26" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "b2b82bccdbef3e7ce7f302c52f43a0c98ac7177a" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
@@ -268,73 +268,3 @@ debug = 1
|
||||
# The profile that 'cargo dist' will build with.
|
||||
[profile.dist]
|
||||
inherits = "release"
|
||||
|
||||
# Config for 'dist'
|
||||
[workspace.metadata.dist]
|
||||
# The preferred dist version to use in CI (Cargo.toml SemVer syntax)
|
||||
cargo-dist-version = "0.28.4-prerelease.1"
|
||||
# CI backends to support
|
||||
ci = "github"
|
||||
# The installers to generate for each app
|
||||
installers = ["shell", "powershell"]
|
||||
# The archive format to use for windows builds (defaults .zip)
|
||||
windows-archive = ".zip"
|
||||
# The archive format to use for non-windows builds (defaults .tar.xz)
|
||||
unix-archive = ".tar.gz"
|
||||
# Target platforms to build apps for (Rust target-triple syntax)
|
||||
targets = [
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-pc-windows-msvc",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"arm-unknown-linux-musleabihf",
|
||||
"armv7-unknown-linux-gnueabihf",
|
||||
"armv7-unknown-linux-musleabihf",
|
||||
"i686-pc-windows-msvc",
|
||||
"i686-unknown-linux-gnu",
|
||||
"i686-unknown-linux-musl",
|
||||
"powerpc64-unknown-linux-gnu",
|
||||
"powerpc64le-unknown-linux-gnu",
|
||||
"s390x-unknown-linux-gnu",
|
||||
"x86_64-apple-darwin",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-unknown-linux-musl",
|
||||
]
|
||||
# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true)
|
||||
auto-includes = false
|
||||
# Whether dist should create a Github Release or use an existing draft
|
||||
create-release = true
|
||||
# Which actions to run on pull requests
|
||||
pr-run-mode = "skip"
|
||||
# Whether CI should trigger releases with dispatches instead of tag pushes
|
||||
dispatch-releases = true
|
||||
# Which phase dist should use to create the GitHub release
|
||||
github-release = "announce"
|
||||
# Whether CI should include auto-generated code to build local artifacts
|
||||
build-local-artifacts = false
|
||||
# Local artifacts jobs to run in CI
|
||||
local-artifacts-jobs = ["./build-binaries", "./build-docker"]
|
||||
# Publish jobs to run in CI
|
||||
publish-jobs = ["./publish-pypi", "./publish-wasm"]
|
||||
# Post-announce jobs to run in CI
|
||||
post-announce-jobs = [
|
||||
"./notify-dependents",
|
||||
"./publish-docs",
|
||||
"./publish-playground",
|
||||
]
|
||||
# Custom permissions for GitHub Jobs
|
||||
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
|
||||
# Whether to install an updater program
|
||||
install-updater = false
|
||||
# Path that installers should place binaries in
|
||||
install-path = ["$XDG_BIN_HOME/", "$XDG_DATA_HOME/../bin", "~/.local/bin"]
|
||||
|
||||
[workspace.metadata.dist.github-custom-runners]
|
||||
global = "depot-ubuntu-latest-4"
|
||||
|
||||
[workspace.metadata.dist.github-action-commits]
|
||||
"actions/checkout" = "11bd71901bbe5b1630ceea73d27597364c9af683" # v4
|
||||
"actions/upload-artifact" = "ea165f8d65b6e75b540449e92b4886f43607fa02" # v4.6.2
|
||||
"actions/download-artifact" = "95815c38cf2ff2164869cbab79da8d1f422bc89e" # v4.2.1
|
||||
"actions/attest-build-provenance" = "c074443f1aee8d4aeeae555aebba3282517141b2" #v2.2.3
|
||||
|
||||
@@ -149,8 +149,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.11.6/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.11.6/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.11.8/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.11.8/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -183,7 +183,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.11.6
|
||||
rev: v0.11.8
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = [
|
||||
"crates/red_knot_vendored/vendor/**/*",
|
||||
"crates/ty_vendored/vendor/**/*",
|
||||
"**/resources/**/*",
|
||||
"**/snapshots/**/*",
|
||||
]
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
# Red Knot
|
||||
|
||||
Red Knot is an extremely fast type checker.
|
||||
Currently, it is a work-in-progress and not ready for user testing.
|
||||
|
||||
Red Knot is designed to prioritize good type inference, even in unannotated code,
|
||||
and aims to avoid false positives.
|
||||
|
||||
While Red Knot will produce similar results to mypy and pyright on many codebases,
|
||||
100% compatibility with these tools is a non-goal.
|
||||
On some codebases, Red Knot's design decisions lead to different outcomes
|
||||
than you would get from running one of these more established tools.
|
||||
|
||||
## Contributing
|
||||
|
||||
Core type checking tests are written as Markdown code blocks.
|
||||
They can be found in [`red_knot_python_semantic/resources/mdtest`][resources-mdtest].
|
||||
See [`red_knot_test/README.md`][mdtest-readme] for more information
|
||||
on the test framework itself.
|
||||
|
||||
The list of open issues can be found [here][open-issues].
|
||||
|
||||
[mdtest-readme]: ../red_knot_test/README.md
|
||||
[open-issues]: https://github.com/astral-sh/ruff/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20label%3Ared-knot
|
||||
[resources-mdtest]: ../red_knot_python_semantic/resources/mdtest
|
||||
@@ -1,83 +0,0 @@
|
||||
# Any
|
||||
|
||||
## Annotation
|
||||
|
||||
`typing.Any` is a way to name the Any type.
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
|
||||
x: Any = 1
|
||||
x = "foo"
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: Any
|
||||
```
|
||||
|
||||
## Aliased to a different name
|
||||
|
||||
If you alias `typing.Any` to another name, we still recognize that as a spelling of the Any type.
|
||||
|
||||
```py
|
||||
from typing import Any as RenamedAny
|
||||
|
||||
x: RenamedAny = 1
|
||||
x = "foo"
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: Any
|
||||
```
|
||||
|
||||
## Shadowed class
|
||||
|
||||
If you define your own class named `Any`, using that in a type expression refers to your class, and
|
||||
isn't a spelling of the Any type.
|
||||
|
||||
```py
|
||||
class Any: ...
|
||||
|
||||
x: Any
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: Any
|
||||
|
||||
# This verifies that we're not accidentally seeing typing.Any, since str is assignable
|
||||
# to that but not to our locally defined class.
|
||||
y: Any = "not an Any" # error: [invalid-assignment]
|
||||
```
|
||||
|
||||
## Subclass
|
||||
|
||||
The spec allows you to define subclasses of `Any`.
|
||||
|
||||
TODO: Handle assignments correctly. `Subclass` has an unknown superclass, which might be `int`. The
|
||||
assignment to `x` should not be allowed, even when the unknown superclass is `int`. The assignment
|
||||
to `y` should be allowed, since `Subclass` might have `int` as a superclass, and is therefore
|
||||
assignable to `int`.
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
|
||||
class Subclass(Any): ...
|
||||
|
||||
reveal_type(Subclass.__mro__) # revealed: tuple[Literal[Subclass], Any, Literal[object]]
|
||||
|
||||
x: Subclass = 1 # error: [invalid-assignment]
|
||||
# TODO: no diagnostic
|
||||
y: int = Subclass() # error: [invalid-assignment]
|
||||
|
||||
def _(s: Subclass):
|
||||
reveal_type(s) # revealed: Subclass
|
||||
```
|
||||
|
||||
## Invalid
|
||||
|
||||
`Any` cannot be parameterized:
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
|
||||
# error: [invalid-type-form] "Type `typing.Any` expected no type parameter"
|
||||
def f(x: Any[int]):
|
||||
reveal_type(x) # revealed: Unknown
|
||||
```
|
||||
@@ -1,7 +0,0 @@
|
||||
# Dictionaries
|
||||
|
||||
## Empty dictionary
|
||||
|
||||
```py
|
||||
reveal_type({}) # revealed: dict
|
||||
```
|
||||
@@ -1,53 +0,0 @@
|
||||
# Narrowing with assert statements
|
||||
|
||||
## `assert` a value `is None` or `is not None`
|
||||
|
||||
```py
|
||||
def _(x: str | None, y: str | None):
|
||||
assert x is not None
|
||||
reveal_type(x) # revealed: str
|
||||
assert y is None
|
||||
reveal_type(y) # revealed: None
|
||||
```
|
||||
|
||||
## `assert` a value is truthy or falsy
|
||||
|
||||
```py
|
||||
def _(x: bool, y: bool):
|
||||
assert x
|
||||
reveal_type(x) # revealed: Literal[True]
|
||||
assert not y
|
||||
reveal_type(y) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
## `assert` with `is` and `==` for literals
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
def _(x: Literal[1, 2, 3], y: Literal[1, 2, 3]):
|
||||
assert x is 2
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
assert y == 2
|
||||
reveal_type(y) # revealed: Literal[1, 2, 3]
|
||||
```
|
||||
|
||||
## `assert` with `isinstance`
|
||||
|
||||
```py
|
||||
def _(x: int | str):
|
||||
assert isinstance(x, int)
|
||||
reveal_type(x) # revealed: int
|
||||
```
|
||||
|
||||
## `assert` a value `in` a tuple
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
def _(x: Literal[1, 2, 3], y: Literal[1, 2, 3]):
|
||||
assert x in (1, 2)
|
||||
reveal_type(x) # revealed: Literal[1, 2]
|
||||
assert y not in (1, 2)
|
||||
reveal_type(y) # revealed: Literal[3]
|
||||
```
|
||||
@@ -1,47 +0,0 @@
|
||||
# Narrowing for nested conditionals
|
||||
|
||||
## Multiple negative contributions
|
||||
|
||||
```py
|
||||
def _(x: int):
|
||||
if x != 1:
|
||||
if x != 2:
|
||||
if x != 3:
|
||||
reveal_type(x) # revealed: int & ~Literal[1] & ~Literal[2] & ~Literal[3]
|
||||
```
|
||||
|
||||
## Multiple negative contributions with simplification
|
||||
|
||||
```py
|
||||
def _(flag1: bool, flag2: bool):
|
||||
x = 1 if flag1 else 2 if flag2 else 3
|
||||
|
||||
if x != 1:
|
||||
reveal_type(x) # revealed: Literal[2, 3]
|
||||
if x != 2:
|
||||
reveal_type(x) # revealed: Literal[3]
|
||||
```
|
||||
|
||||
## elif-else blocks
|
||||
|
||||
```py
|
||||
def _(flag1: bool, flag2: bool):
|
||||
x = 1 if flag1 else 2 if flag2 else 3
|
||||
|
||||
if x != 1:
|
||||
reveal_type(x) # revealed: Literal[2, 3]
|
||||
if x == 2:
|
||||
# TODO should be `Literal[2]`
|
||||
reveal_type(x) # revealed: Literal[2, 3]
|
||||
elif x == 3:
|
||||
reveal_type(x) # revealed: Literal[3]
|
||||
else:
|
||||
reveal_type(x) # revealed: Never
|
||||
|
||||
elif x != 2:
|
||||
# TODO should be Literal[1]
|
||||
reveal_type(x) # revealed: Literal[1, 3]
|
||||
else:
|
||||
# TODO should be Never
|
||||
reveal_type(x) # revealed: Literal[1, 2, 3]
|
||||
```
|
||||
@@ -1,28 +0,0 @@
|
||||
---
|
||||
source: crates/red_knot_test/src/lib.rs
|
||||
expression: snapshot
|
||||
---
|
||||
---
|
||||
mdtest name: basic.md - Structures - Unresolvable module import
|
||||
mdtest path: crates/red_knot_python_semantic/resources/mdtest/import/basic.md
|
||||
---
|
||||
|
||||
# Python source files
|
||||
|
||||
## mdtest_snippet.py
|
||||
|
||||
```
|
||||
1 | import zqzqzqzqzqzqzq # error: [unresolved-import] "Cannot resolve import `zqzqzqzqzqzqzq`"
|
||||
```
|
||||
|
||||
# Diagnostics
|
||||
|
||||
```
|
||||
error: lint:unresolved-import
|
||||
--> /src/mdtest_snippet.py:1:8
|
||||
|
|
||||
1 | import zqzqzqzqzqzqzq # error: [unresolved-import] "Cannot resolve import `zqzqzqzqzqzqzq`"
|
||||
| ^^^^^^^^^^^^^^ Cannot resolve import `zqzqzqzqzqzqzq`
|
||||
|
|
||||
|
||||
```
|
||||
@@ -1,32 +0,0 @@
|
||||
---
|
||||
source: crates/red_knot_test/src/lib.rs
|
||||
expression: snapshot
|
||||
---
|
||||
---
|
||||
mdtest name: for.md - For loops - Invalid iterable
|
||||
mdtest path: crates/red_knot_python_semantic/resources/mdtest/loops/for.md
|
||||
---
|
||||
|
||||
# Python source files
|
||||
|
||||
## mdtest_snippet.py
|
||||
|
||||
```
|
||||
1 | nonsense = 123
|
||||
2 | for x in nonsense: # error: [not-iterable]
|
||||
3 | pass
|
||||
```
|
||||
|
||||
# Diagnostics
|
||||
|
||||
```
|
||||
error: lint:not-iterable
|
||||
--> /src/mdtest_snippet.py:2:10
|
||||
|
|
||||
1 | nonsense = 123
|
||||
2 | for x in nonsense: # error: [not-iterable]
|
||||
| ^^^^^^^^ Object of type `Literal[123]` is not iterable because it doesn't have an `__iter__` method or a `__getitem__` method
|
||||
3 | pass
|
||||
|
|
||||
|
||||
```
|
||||
@@ -1,28 +0,0 @@
|
||||
---
|
||||
source: crates/red_knot_test/src/lib.rs
|
||||
expression: snapshot
|
||||
---
|
||||
---
|
||||
mdtest name: unpacking.md - Unpacking - Right hand side not iterable
|
||||
mdtest path: crates/red_knot_python_semantic/resources/mdtest/diagnostics/unpacking.md
|
||||
---
|
||||
|
||||
# Python source files
|
||||
|
||||
## mdtest_snippet.py
|
||||
|
||||
```
|
||||
1 | a, b = 1 # error: [not-iterable]
|
||||
```
|
||||
|
||||
# Diagnostics
|
||||
|
||||
```
|
||||
error: lint:not-iterable
|
||||
--> /src/mdtest_snippet.py:1:8
|
||||
|
|
||||
1 | a, b = 1 # error: [not-iterable]
|
||||
| ^ Object of type `Literal[1]` is not iterable because it doesn't have an `__iter__` method or a `__getitem__` method
|
||||
|
|
||||
|
||||
```
|
||||
@@ -1,28 +0,0 @@
|
||||
---
|
||||
source: crates/red_knot_test/src/lib.rs
|
||||
expression: snapshot
|
||||
---
|
||||
---
|
||||
mdtest name: unpacking.md - Unpacking - Too few values to unpack
|
||||
mdtest path: crates/red_knot_python_semantic/resources/mdtest/diagnostics/unpacking.md
|
||||
---
|
||||
|
||||
# Python source files
|
||||
|
||||
## mdtest_snippet.py
|
||||
|
||||
```
|
||||
1 | a, b = (1,) # error: [invalid-assignment]
|
||||
```
|
||||
|
||||
# Diagnostics
|
||||
|
||||
```
|
||||
error: lint:invalid-assignment
|
||||
--> /src/mdtest_snippet.py:1:1
|
||||
|
|
||||
1 | a, b = (1,) # error: [invalid-assignment]
|
||||
| ^^^^ Not enough values to unpack (expected 2, got 1)
|
||||
|
|
||||
|
||||
```
|
||||
@@ -1,28 +0,0 @@
|
||||
---
|
||||
source: crates/red_knot_test/src/lib.rs
|
||||
expression: snapshot
|
||||
---
|
||||
---
|
||||
mdtest name: unpacking.md - Unpacking - Too many values to unpack
|
||||
mdtest path: crates/red_knot_python_semantic/resources/mdtest/diagnostics/unpacking.md
|
||||
---
|
||||
|
||||
# Python source files
|
||||
|
||||
## mdtest_snippet.py
|
||||
|
||||
```
|
||||
1 | a, b = (1, 2, 3) # error: [invalid-assignment]
|
||||
```
|
||||
|
||||
# Diagnostics
|
||||
|
||||
```
|
||||
error: lint:invalid-assignment
|
||||
--> /src/mdtest_snippet.py:1:1
|
||||
|
|
||||
1 | a, b = (1, 2, 3) # error: [invalid-assignment]
|
||||
| ^^^^ Too many values to unpack (expected 2, got 3)
|
||||
|
|
||||
|
||||
```
|
||||
@@ -1,191 +0,0 @@
|
||||
# Suppressing errors with `knot: ignore`
|
||||
|
||||
Type check errors can be suppressed by a `knot: ignore` comment on the same line as the violation.
|
||||
|
||||
## Simple `knot: ignore`
|
||||
|
||||
```py
|
||||
a = 4 + test # knot: ignore
|
||||
```
|
||||
|
||||
## Suppressing a specific code
|
||||
|
||||
```py
|
||||
a = 4 + test # knot: ignore[unresolved-reference]
|
||||
```
|
||||
|
||||
## Unused suppression
|
||||
|
||||
```py
|
||||
test = 10
|
||||
# error: [unused-ignore-comment] "Unused `knot: ignore` directive: 'possibly-unresolved-reference'"
|
||||
a = test + 3 # knot: ignore[possibly-unresolved-reference]
|
||||
```
|
||||
|
||||
## Unused suppression if the error codes don't match
|
||||
|
||||
```py
|
||||
# error: [unresolved-reference]
|
||||
# error: [unused-ignore-comment] "Unused `knot: ignore` directive: 'possibly-unresolved-reference'"
|
||||
a = test + 3 # knot: ignore[possibly-unresolved-reference]
|
||||
```
|
||||
|
||||
## Suppressed unused comment
|
||||
|
||||
```py
|
||||
# error: [unused-ignore-comment]
|
||||
a = 10 / 2 # knot: ignore[division-by-zero]
|
||||
a = 10 / 2 # knot: ignore[division-by-zero, unused-ignore-comment]
|
||||
a = 10 / 2 # knot: ignore[unused-ignore-comment, division-by-zero]
|
||||
a = 10 / 2 # knot: ignore[unused-ignore-comment] # type: ignore
|
||||
a = 10 / 2 # type: ignore # knot: ignore[unused-ignore-comment]
|
||||
```
|
||||
|
||||
## Unused ignore comment
|
||||
|
||||
```py
|
||||
# error: [unused-ignore-comment] "Unused `knot: ignore` directive: 'unused-ignore-comment'"
|
||||
a = 10 / 0 # knot: ignore[division-by-zero, unused-ignore-comment]
|
||||
```
|
||||
|
||||
## Multiple unused comments
|
||||
|
||||
Today, Red Knot emits a diagnostic for every unused code. We might want to group the codes by
|
||||
comment at some point in the future.
|
||||
|
||||
```py
|
||||
# error: [unused-ignore-comment] "Unused `knot: ignore` directive: 'division-by-zero'"
|
||||
# error: [unused-ignore-comment] "Unused `knot: ignore` directive: 'unresolved-reference'"
|
||||
a = 10 / 2 # knot: ignore[division-by-zero, unresolved-reference]
|
||||
|
||||
# error: [unused-ignore-comment] "Unused `knot: ignore` directive: 'invalid-assignment'"
|
||||
# error: [unused-ignore-comment] "Unused `knot: ignore` directive: 'unresolved-reference'"
|
||||
a = 10 / 0 # knot: ignore[invalid-assignment, division-by-zero, unresolved-reference]
|
||||
```
|
||||
|
||||
## Multiple suppressions
|
||||
|
||||
```py
|
||||
# fmt: off
|
||||
def test(a: f"f-string type annotation", b: b"byte-string-type-annotation"): ... # knot: ignore[fstring-type-annotation, byte-string-type-annotation]
|
||||
```
|
||||
|
||||
## Can't suppress syntax errors
|
||||
|
||||
<!-- blacken-docs:off -->
|
||||
|
||||
```py
|
||||
# error: [invalid-syntax]
|
||||
# error: [unused-ignore-comment]
|
||||
def test($): # knot: ignore
|
||||
pass
|
||||
```
|
||||
|
||||
<!-- blacken-docs:on -->
|
||||
|
||||
## Can't suppress `revealed-type` diagnostics
|
||||
|
||||
```py
|
||||
a = 10
|
||||
# revealed: Literal[10]
|
||||
# error: [unknown-rule] "Unknown rule `revealed-type`"
|
||||
reveal_type(a) # knot: ignore[revealed-type]
|
||||
```
|
||||
|
||||
## Extra whitespace in type ignore comments is allowed
|
||||
|
||||
```py
|
||||
a = 10 / 0 # knot : ignore
|
||||
a = 10 / 0 # knot: ignore [ division-by-zero ]
|
||||
```
|
||||
|
||||
## Whitespace is optional
|
||||
|
||||
```py
|
||||
# fmt: off
|
||||
a = 10 / 0 #knot:ignore[division-by-zero]
|
||||
```
|
||||
|
||||
## Trailing codes comma
|
||||
|
||||
Trailing commas in the codes section are allowed:
|
||||
|
||||
```py
|
||||
a = 10 / 0 # knot: ignore[division-by-zero,]
|
||||
```
|
||||
|
||||
## Invalid characters in codes
|
||||
|
||||
```py
|
||||
# error: [division-by-zero]
|
||||
# error: [invalid-ignore-comment] "Invalid `knot: ignore` comment: expected a alphanumeric character or `-` or `_` as code"
|
||||
a = 10 / 0 # knot: ignore[*-*]
|
||||
```
|
||||
|
||||
## Trailing whitespace
|
||||
|
||||
<!-- blacken-docs:off -->
|
||||
|
||||
```py
|
||||
a = 10 / 0 # knot: ignore[division-by-zero]
|
||||
# ^^^^^^ trailing whitespace
|
||||
```
|
||||
|
||||
<!-- blacken-docs:on -->
|
||||
|
||||
## Missing comma
|
||||
|
||||
A missing comma results in an invalid suppression comment. We may want to recover from this in the
|
||||
future.
|
||||
|
||||
```py
|
||||
# error: [unresolved-reference]
|
||||
# error: [invalid-ignore-comment] "Invalid `knot: ignore` comment: expected a comma separating the rule codes"
|
||||
a = x / 0 # knot: ignore[division-by-zero unresolved-reference]
|
||||
```
|
||||
|
||||
## Missing closing bracket
|
||||
|
||||
```py
|
||||
# error: [unresolved-reference] "Name `x` used when not defined"
|
||||
# error: [invalid-ignore-comment] "Invalid `knot: ignore` comment: expected a comma separating the rule codes"
|
||||
a = x / 2 # knot: ignore[unresolved-reference
|
||||
```
|
||||
|
||||
## Empty codes
|
||||
|
||||
An empty codes array suppresses no-diagnostics and is always useless
|
||||
|
||||
```py
|
||||
# error: [division-by-zero]
|
||||
# error: [unused-ignore-comment] "Unused `knot: ignore` without a code"
|
||||
a = 4 / 0 # knot: ignore[]
|
||||
```
|
||||
|
||||
## File-level suppression comments
|
||||
|
||||
File level suppression comments are currently intentionally unsupported because we've yet to decide
|
||||
if they should use a different syntax that also supports enabling rules or changing the rule's
|
||||
severity: `knot: possibly-undefined-reference=error`
|
||||
|
||||
```py
|
||||
# error: [unused-ignore-comment]
|
||||
# knot: ignore[division-by-zero]
|
||||
|
||||
a = 4 / 0 # error: [division-by-zero]
|
||||
```
|
||||
|
||||
## Unknown rule
|
||||
|
||||
```py
|
||||
# error: [unknown-rule] "Unknown rule `is-equal-14`"
|
||||
a = 10 + 4 # knot: ignore[is-equal-14]
|
||||
```
|
||||
|
||||
## Code with `lint:` prefix
|
||||
|
||||
```py
|
||||
# error:[unknown-rule] "Unknown rule `lint:division-by-zero`. Did you mean `division-by-zero`?"
|
||||
# error: [division-by-zero]
|
||||
a = 10 / 0 # knot: ignore[lint:division-by-zero]
|
||||
```
|
||||
@@ -1,23 +0,0 @@
|
||||
arrow
|
||||
async-utils
|
||||
bidict
|
||||
black
|
||||
dacite
|
||||
git-revise
|
||||
isort
|
||||
itsdangerous
|
||||
mypy_primer
|
||||
packaging
|
||||
paroxython
|
||||
porcupine
|
||||
psycopg
|
||||
pybind11
|
||||
pyinstrument
|
||||
pyp
|
||||
python-chess
|
||||
python-htmlgen
|
||||
rich
|
||||
scrapy
|
||||
typeshed-stats
|
||||
werkzeug
|
||||
zipp
|
||||
@@ -1,3 +0,0 @@
|
||||
The `knot_extensions.pyi` file in this directory will be symlinked into
|
||||
the `vendor/typeshed/stdlib` directory every time we sync our `typeshed`
|
||||
stubs (see `.github/workflows/sync_typeshed.yaml`).
|
||||
@@ -1 +0,0 @@
|
||||
f65bdc1acde54fda93c802459280da74518d2eef
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.11.6"
|
||||
version = "0.11.8"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -77,6 +77,9 @@ test-case = { workspace = true }
|
||||
# Used via macro expansion.
|
||||
ignored = ["jiff"]
|
||||
|
||||
[package.metadata.dist]
|
||||
dist = true
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
mimalloc = { workspace = true }
|
||||
|
||||
|
||||
@@ -13,7 +13,6 @@ fn main() {
|
||||
|
||||
commit_info(&workspace_root);
|
||||
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let target = std::env::var("TARGET").unwrap();
|
||||
println!("cargo::rustc-env=RUST_HOST_TARGET={target}");
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::commands::completions::config::{OptionString, OptionStringParser};
|
||||
use anyhow::bail;
|
||||
use clap::builder::{TypedValueParser, ValueParserFactory};
|
||||
use clap::{command, Parser, Subcommand};
|
||||
@@ -22,7 +23,7 @@ use ruff_linter::settings::types::{
|
||||
};
|
||||
use ruff_linter::{RuleParser, RuleSelector, RuleSelectorParser};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_source_file::{LineIndex, OneIndexed};
|
||||
use ruff_source_file::{LineIndex, OneIndexed, PositionEncoding};
|
||||
use ruff_text_size::TextRange;
|
||||
use ruff_workspace::configuration::{Configuration, RuleSelection};
|
||||
use ruff_workspace::options::{Options, PycodestyleOptions};
|
||||
@@ -31,8 +32,6 @@ use ruff_workspace::resolver::ConfigurationTransformer;
|
||||
use rustc_hash::FxHashMap;
|
||||
use toml;
|
||||
|
||||
use crate::commands::completions::config::{OptionString, OptionStringParser};
|
||||
|
||||
/// All configuration options that can be passed "globally",
|
||||
/// i.e., can be passed to all subcommands
|
||||
#[derive(Debug, Default, Clone, clap::Args)]
|
||||
@@ -94,7 +93,7 @@ pub struct Args {
|
||||
pub(crate) global_options: GlobalConfigArgs,
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[expect(clippy::large_enum_variant)]
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum Command {
|
||||
/// Run Ruff on the given files or directories.
|
||||
@@ -178,11 +177,14 @@ pub struct AnalyzeGraphCommand {
|
||||
/// The minimum Python version that should be supported.
|
||||
#[arg(long, value_enum)]
|
||||
target_version: Option<PythonVersion>,
|
||||
/// Path to a virtual environment to use for resolving additional dependencies
|
||||
#[arg(long)]
|
||||
python: Option<PathBuf>,
|
||||
}
|
||||
|
||||
// The `Parser` derive is for ruff_dev, for ruff `Args` would be sufficient
|
||||
#[derive(Clone, Debug, clap::Parser)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
#[expect(clippy::struct_excessive_bools)]
|
||||
pub struct CheckCommand {
|
||||
/// List of files or directories to check.
|
||||
#[clap(help = "List of files or directories to check [default: .]")]
|
||||
@@ -444,7 +446,7 @@ pub struct CheckCommand {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, clap::Parser)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
#[expect(clippy::struct_excessive_bools)]
|
||||
pub struct FormatCommand {
|
||||
/// List of files or directories to format.
|
||||
#[clap(help = "List of files or directories to format [default: .]")]
|
||||
@@ -558,7 +560,7 @@ pub enum HelpFormat {
|
||||
Json,
|
||||
}
|
||||
|
||||
#[allow(clippy::module_name_repetitions)]
|
||||
#[expect(clippy::module_name_repetitions)]
|
||||
#[derive(Debug, Default, Clone, clap::Args)]
|
||||
pub struct LogLevelArgs {
|
||||
/// Enable verbose logging.
|
||||
@@ -797,6 +799,7 @@ impl AnalyzeGraphCommand {
|
||||
let format_arguments = AnalyzeGraphArgs {
|
||||
files: self.files,
|
||||
direction: self.direction,
|
||||
python: self.python,
|
||||
};
|
||||
|
||||
let cli_overrides = ExplicitConfigOverrides {
|
||||
@@ -1028,7 +1031,7 @@ Possible choices:
|
||||
|
||||
/// CLI settings that are distinct from configuration (commands, lists of files,
|
||||
/// etc.).
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
#[expect(clippy::struct_excessive_bools)]
|
||||
pub struct CheckArguments {
|
||||
pub add_noqa: bool,
|
||||
pub diff: bool,
|
||||
@@ -1047,7 +1050,7 @@ pub struct CheckArguments {
|
||||
|
||||
/// CLI settings that are distinct from configuration (commands, lists of files,
|
||||
/// etc.).
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
#[expect(clippy::struct_excessive_bools)]
|
||||
pub struct FormatArguments {
|
||||
pub check: bool,
|
||||
pub no_cache: bool,
|
||||
@@ -1070,8 +1073,9 @@ impl FormatRange {
|
||||
///
|
||||
/// Returns an empty range if the start range is past the end of `source`.
|
||||
pub(super) fn to_text_range(self, source: &str, line_index: &LineIndex) -> TextRange {
|
||||
let start_byte_offset = line_index.offset(self.start.line, self.start.column, source);
|
||||
let end_byte_offset = line_index.offset(self.end.line, self.end.column, source);
|
||||
let start_byte_offset =
|
||||
line_index.offset(self.start.into(), source, PositionEncoding::Utf32);
|
||||
let end_byte_offset = line_index.offset(self.end.into(), source, PositionEncoding::Utf32);
|
||||
|
||||
TextRange::new(start_byte_offset, end_byte_offset)
|
||||
}
|
||||
@@ -1142,6 +1146,15 @@ pub struct LineColumn {
|
||||
pub column: OneIndexed,
|
||||
}
|
||||
|
||||
impl From<LineColumn> for ruff_source_file::SourceLocation {
|
||||
fn from(value: LineColumn) -> Self {
|
||||
Self {
|
||||
line: value.line,
|
||||
character_offset: value.column,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for LineColumn {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{line}:{column}", line = self.line, column = self.column)
|
||||
@@ -1252,12 +1265,12 @@ impl LineColumnParseError {
|
||||
pub struct AnalyzeGraphArgs {
|
||||
pub files: Vec<PathBuf>,
|
||||
pub direction: Direction,
|
||||
pub python: Option<PathBuf>,
|
||||
}
|
||||
|
||||
/// Configuration overrides provided via dedicated CLI flags:
|
||||
/// `--line-length`, `--respect-gitignore`, etc.
|
||||
#[derive(Clone, Default)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
struct ExplicitConfigOverrides {
|
||||
dummy_variable_rgx: Option<Regex>,
|
||||
exclude: Option<Vec<FilePattern>>,
|
||||
|
||||
@@ -86,7 +86,7 @@ pub(crate) struct Cache {
|
||||
changes: Mutex<Vec<Change>>,
|
||||
/// The "current" timestamp used as cache for the updates of
|
||||
/// [`FileCache::last_seen`]
|
||||
#[allow(clippy::struct_field_names)]
|
||||
#[expect(clippy::struct_field_names)]
|
||||
last_seen_cache: u64,
|
||||
}
|
||||
|
||||
@@ -146,7 +146,7 @@ impl Cache {
|
||||
Cache::new(path, package)
|
||||
}
|
||||
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
#[expect(clippy::cast_possible_truncation)]
|
||||
fn new(path: PathBuf, package: PackageCache) -> Self {
|
||||
Cache {
|
||||
path,
|
||||
@@ -204,7 +204,7 @@ impl Cache {
|
||||
}
|
||||
|
||||
/// Applies the pending changes without storing the cache to disk.
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
#[expect(clippy::cast_possible_truncation)]
|
||||
pub(crate) fn save(&mut self) -> bool {
|
||||
/// Maximum duration for which we keep a file in cache that hasn't been seen.
|
||||
const MAX_LAST_SEEN: Duration = Duration::from_secs(30 * 24 * 60 * 60); // 30 days.
|
||||
@@ -616,7 +616,7 @@ mod tests {
|
||||
let settings = Settings {
|
||||
cache_dir,
|
||||
linter: LinterSettings {
|
||||
unresolved_target_version: PythonVersion::latest(),
|
||||
unresolved_target_version: PythonVersion::latest().into(),
|
||||
..Default::default()
|
||||
},
|
||||
..Settings::default()
|
||||
@@ -834,7 +834,6 @@ mod tests {
|
||||
// Regression test for issue #3086.
|
||||
|
||||
#[cfg(unix)]
|
||||
#[allow(clippy::items_after_statements)]
|
||||
fn flip_execute_permission_bit(path: &Path) -> io::Result<()> {
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let file = fs::OpenOptions::new().write(true).open(path)?;
|
||||
@@ -843,7 +842,6 @@ mod tests {
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
#[allow(clippy::items_after_statements)]
|
||||
fn flip_read_only_permission(path: &Path) -> io::Result<()> {
|
||||
let file = fs::OpenOptions::new().write(true).open(path)?;
|
||||
let mut perms = file.metadata()?.permissions();
|
||||
|
||||
@@ -75,6 +75,8 @@ pub(crate) fn analyze_graph(
|
||||
.target_version
|
||||
.as_tuple()
|
||||
.into(),
|
||||
args.python
|
||||
.and_then(|python| SystemPathBuf::from_path_buf(python).ok()),
|
||||
)?;
|
||||
|
||||
let imports = {
|
||||
|
||||
@@ -30,7 +30,6 @@ use crate::cache::{Cache, PackageCacheMap, PackageCaches};
|
||||
use crate::diagnostics::Diagnostics;
|
||||
|
||||
/// Run the linter over a collection of files.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn check(
|
||||
files: &[PathBuf],
|
||||
pyproject_config: &PyprojectConfig,
|
||||
@@ -181,7 +180,6 @@ pub(crate) fn check(
|
||||
|
||||
/// Wraps [`lint_path`](crate::diagnostics::lint_path) in a [`catch_unwind`](std::panic::catch_unwind) and emits
|
||||
/// a diagnostic if the linting the file panics.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn lint_path(
|
||||
path: &Path,
|
||||
package: Option<PackageRoot<'_>>,
|
||||
|
||||
@@ -5,7 +5,7 @@ use crate::args::HelpFormat;
|
||||
use ruff_workspace::options::Options;
|
||||
use ruff_workspace::options_base::OptionsMetadata;
|
||||
|
||||
#[allow(clippy::print_stdout)]
|
||||
#[expect(clippy::print_stdout)]
|
||||
pub(crate) fn config(key: Option<&str>, format: HelpFormat) -> Result<()> {
|
||||
match key {
|
||||
None => {
|
||||
|
||||
@@ -160,7 +160,7 @@ pub(crate) fn format(
|
||||
}),
|
||||
Err(error) => Err(FormatCommandError::Panic(
|
||||
Some(resolved_file.path().to_path_buf()),
|
||||
error,
|
||||
Box::new(error),
|
||||
)),
|
||||
},
|
||||
)
|
||||
@@ -362,7 +362,7 @@ pub(crate) fn format_source(
|
||||
})
|
||||
} else {
|
||||
// Using `Printed::into_code` requires adding `ruff_formatter` as a direct dependency, and I suspect that Rust can optimize the closure away regardless.
|
||||
#[allow(clippy::redundant_closure_for_method_calls)]
|
||||
#[expect(clippy::redundant_closure_for_method_calls)]
|
||||
format_module_source(unformatted, options).map(|formatted| formatted.into_code())
|
||||
};
|
||||
|
||||
@@ -635,7 +635,7 @@ impl<'a> FormatResults<'a> {
|
||||
pub(crate) enum FormatCommandError {
|
||||
Ignore(#[from] ignore::Error),
|
||||
Parse(#[from] DisplayParseError),
|
||||
Panic(Option<PathBuf>, PanicError),
|
||||
Panic(Option<PathBuf>, Box<PanicError>),
|
||||
Read(Option<PathBuf>, SourceError),
|
||||
Format(Option<PathBuf>, FormatModuleError),
|
||||
Write(Option<PathBuf>, SourceError),
|
||||
|
||||
@@ -19,7 +19,7 @@ struct Explanation<'a> {
|
||||
summary: &'a str,
|
||||
message_formats: &'a [&'a str],
|
||||
fix: String,
|
||||
#[allow(clippy::struct_field_names)]
|
||||
#[expect(clippy::struct_field_names)]
|
||||
explanation: Option<&'a str>,
|
||||
preview: bool,
|
||||
}
|
||||
|
||||
@@ -134,7 +134,7 @@ pub fn run(
|
||||
{
|
||||
let default_panic_hook = std::panic::take_hook();
|
||||
std::panic::set_hook(Box::new(move |info| {
|
||||
#[allow(clippy::print_stderr)]
|
||||
#[expect(clippy::print_stderr)]
|
||||
{
|
||||
eprintln!(
|
||||
r#"
|
||||
@@ -326,7 +326,7 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||
commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments)?;
|
||||
if modifications > 0 && config_arguments.log_level >= LogLevel::Default {
|
||||
let s = if modifications == 1 { "" } else { "s" };
|
||||
#[allow(clippy::print_stderr)]
|
||||
#[expect(clippy::print_stderr)]
|
||||
{
|
||||
eprintln!("Added {modifications} noqa directive{s}.");
|
||||
}
|
||||
|
||||
@@ -241,7 +241,6 @@ impl Printer {
|
||||
}
|
||||
|
||||
if !self.flags.intersects(Flags::SHOW_VIOLATIONS) {
|
||||
#[allow(deprecated)]
|
||||
if matches!(
|
||||
self.format,
|
||||
OutputFormat::Full | OutputFormat::Concise | OutputFormat::Grouped
|
||||
|
||||
@@ -422,3 +422,153 @@ fn nested_imports() -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test for venv resolution with the `--python` flag.
|
||||
///
|
||||
/// Based on the [albatross-virtual-workspace] example from the uv repo and the report in [#16598].
|
||||
///
|
||||
/// [albatross-virtual-workspace]: https://github.com/astral-sh/uv/tree/aa629c4a/scripts/workspaces/albatross-virtual-workspace
|
||||
/// [#16598]: https://github.com/astral-sh/ruff/issues/16598
|
||||
#[test]
|
||||
fn venv() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let root = ChildPath::new(tempdir.path());
|
||||
|
||||
// packages
|
||||
// ├── albatross
|
||||
// │ ├── check_installed_albatross.py
|
||||
// │ ├── pyproject.toml
|
||||
// │ └── src
|
||||
// │ └── albatross
|
||||
// │ └── __init__.py
|
||||
// └── bird-feeder
|
||||
// ├── check_installed_bird_feeder.py
|
||||
// ├── pyproject.toml
|
||||
// └── src
|
||||
// └── bird_feeder
|
||||
// └── __init__.py
|
||||
|
||||
let packages = root.child("packages");
|
||||
|
||||
let albatross = packages.child("albatross");
|
||||
albatross
|
||||
.child("check_installed_albatross.py")
|
||||
.write_str("from albatross import fly")?;
|
||||
albatross
|
||||
.child("pyproject.toml")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
[project]
|
||||
name = "albatross"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = ["bird-feeder", "tqdm>=4,<5"]
|
||||
|
||||
[tool.uv.sources]
|
||||
bird-feeder = { workspace = true }
|
||||
"#})?;
|
||||
albatross
|
||||
.child("src")
|
||||
.child("albatross")
|
||||
.child("__init__.py")
|
||||
.write_str("import tqdm; from bird_feeder import use")?;
|
||||
|
||||
let bird_feeder = packages.child("bird-feeder");
|
||||
bird_feeder
|
||||
.child("check_installed_bird_feeder.py")
|
||||
.write_str("from bird_feeder import use; from albatross import fly")?;
|
||||
bird_feeder
|
||||
.child("pyproject.toml")
|
||||
.write_str(indoc::indoc! {r#"
|
||||
[project]
|
||||
name = "bird-feeder"
|
||||
version = "1.0.0"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = ["anyio>=4.3.0,<5"]
|
||||
"#})?;
|
||||
bird_feeder
|
||||
.child("src")
|
||||
.child("bird_feeder")
|
||||
.child("__init__.py")
|
||||
.write_str("import anyio")?;
|
||||
|
||||
let venv = root.child(".venv");
|
||||
let bin = venv.child("bin");
|
||||
bin.child("python").touch()?;
|
||||
let home = format!("home = {}", bin.to_string_lossy());
|
||||
venv.child("pyvenv.cfg").write_str(&home)?;
|
||||
let site_packages = venv.child("lib").child("python3.12").child("site-packages");
|
||||
site_packages
|
||||
.child("_albatross.pth")
|
||||
.write_str(&albatross.join("src").to_string_lossy())?;
|
||||
site_packages
|
||||
.child("_bird_feeder.pth")
|
||||
.write_str(&bird_feeder.join("src").to_string_lossy())?;
|
||||
site_packages.child("tqdm").child("__init__.py").touch()?;
|
||||
|
||||
// without `--python .venv`, the result should only include dependencies within the albatross
|
||||
// package
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(
|
||||
command().arg("packages/albatross").current_dir(&root),
|
||||
@r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"packages/albatross/check_installed_albatross.py": [
|
||||
"packages/albatross/src/albatross/__init__.py"
|
||||
],
|
||||
"packages/albatross/src/albatross/__init__.py": []
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
});
|
||||
|
||||
// with `--python .venv` both workspace and third-party dependencies are included
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(
|
||||
command().args(["--python", ".venv"]).arg("packages/albatross").current_dir(&root),
|
||||
@r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"packages/albatross/check_installed_albatross.py": [
|
||||
"packages/albatross/src/albatross/__init__.py"
|
||||
],
|
||||
"packages/albatross/src/albatross/__init__.py": [
|
||||
".venv/lib/python3.12/site-packages/tqdm/__init__.py",
|
||||
"packages/bird-feeder/src/bird_feeder/__init__.py"
|
||||
]
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
});
|
||||
|
||||
// test the error message for a non-existent venv. it's important that the `ruff analyze graph`
|
||||
// flag matches the ty flag used to generate the error message (`--python`)
|
||||
insta::with_settings!({
|
||||
filters => INSTA_FILTERS.to_vec(),
|
||||
}, {
|
||||
assert_cmd_snapshot!(
|
||||
command().args(["--python", "none"]).arg("packages/albatross").current_dir(&root),
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
ruff failed
|
||||
Cause: Invalid search path settings
|
||||
Cause: Failed to discover the site-packages directory: Invalid `--python` argument: `none` could not be canonicalized
|
||||
");
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -994,6 +994,7 @@ fn value_given_to_table_key_is_not_inline_table_2() {
|
||||
- `lint.extend-per-file-ignores`
|
||||
- `lint.exclude`
|
||||
- `lint.preview`
|
||||
- `lint.typing-extensions`
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
@@ -1899,6 +1900,40 @@ def first_square():
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Regression test for <https://github.com/astral-sh/ruff/issues/2253>
|
||||
#[test]
|
||||
fn add_noqa_parent() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let test_path = tempdir.path().join("noqa.py");
|
||||
fs::write(
|
||||
&test_path,
|
||||
r#"
|
||||
from foo import ( # noqa: F401
|
||||
bar
|
||||
)
|
||||
"#,
|
||||
)?;
|
||||
|
||||
insta::with_settings!({
|
||||
filters => vec![(tempdir_filter(&tempdir).as_str(), "[TMP]/")]
|
||||
}, {
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--add-noqa")
|
||||
.arg("--select=F401")
|
||||
.arg("noqa.py")
|
||||
.current_dir(&tempdir), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Infer `3.11` from `requires-python` in `pyproject.toml`.
|
||||
#[test]
|
||||
fn requires_python() -> Result<()> {
|
||||
@@ -2117,7 +2152,7 @@ requires-python = ">= 3.11"
|
||||
.arg("test.py")
|
||||
.arg("-")
|
||||
.current_dir(project_dir)
|
||||
, @r###"
|
||||
, @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -2207,6 +2242,7 @@ requires-python = ">= 3.11"
|
||||
XXX,
|
||||
]
|
||||
linter.typing_modules = []
|
||||
linter.typing_extensions = true
|
||||
|
||||
# Linter Plugins
|
||||
linter.flake8_annotations.mypy_init_return = false
|
||||
@@ -2390,7 +2426,7 @@ requires-python = ">= 3.11"
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
"#);
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
@@ -2428,7 +2464,7 @@ requires-python = ">= 3.11"
|
||||
.arg("test.py")
|
||||
.arg("-")
|
||||
.current_dir(project_dir)
|
||||
, @r###"
|
||||
, @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -2518,6 +2554,7 @@ requires-python = ">= 3.11"
|
||||
XXX,
|
||||
]
|
||||
linter.typing_modules = []
|
||||
linter.typing_extensions = true
|
||||
|
||||
# Linter Plugins
|
||||
linter.flake8_annotations.mypy_init_return = false
|
||||
@@ -2701,7 +2738,7 @@ requires-python = ">= 3.11"
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
"#);
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
@@ -2790,7 +2827,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("test.py")
|
||||
.arg("--show-settings")
|
||||
.current_dir(project_dir), @r###"
|
||||
.current_dir(project_dir), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -2881,6 +2918,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
XXX,
|
||||
]
|
||||
linter.typing_modules = []
|
||||
linter.typing_extensions = true
|
||||
|
||||
# Linter Plugins
|
||||
linter.flake8_annotations.mypy_init_return = false
|
||||
@@ -3064,7 +3102,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
"#);
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
@@ -3170,7 +3208,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
.arg("--show-settings")
|
||||
.args(["--select","UP007"])
|
||||
.arg("foo/test.py")
|
||||
.current_dir(&project_dir), @r###"
|
||||
.current_dir(&project_dir), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -3260,6 +3298,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
XXX,
|
||||
]
|
||||
linter.typing_modules = []
|
||||
linter.typing_extensions = true
|
||||
|
||||
# Linter Plugins
|
||||
linter.flake8_annotations.mypy_init_return = false
|
||||
@@ -3443,7 +3482,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
"#);
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
@@ -3475,7 +3514,7 @@ requires-python = ">= 3.11"
|
||||
&inner_pyproject,
|
||||
r#"
|
||||
[tool.ruff]
|
||||
target-version = "py310"
|
||||
target-version = "py310"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
@@ -3497,7 +3536,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
.arg("--show-settings")
|
||||
.args(["--select","UP007"])
|
||||
.arg("foo/test.py")
|
||||
.current_dir(&project_dir), @r###"
|
||||
.current_dir(&project_dir), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -3587,6 +3626,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
XXX,
|
||||
]
|
||||
linter.typing_modules = []
|
||||
linter.typing_extensions = true
|
||||
|
||||
# Linter Plugins
|
||||
linter.flake8_annotations.mypy_init_return = false
|
||||
@@ -3770,7 +3810,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
"#);
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
@@ -3823,7 +3863,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--show-settings")
|
||||
.arg("foo/test.py")
|
||||
.current_dir(&project_dir), @r###"
|
||||
.current_dir(&project_dir), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -3890,7 +3930,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
linter.per_file_ignores = {}
|
||||
linter.safety_table.forced_safe = []
|
||||
linter.safety_table.forced_unsafe = []
|
||||
linter.unresolved_target_version = 3.9
|
||||
linter.unresolved_target_version = none
|
||||
linter.per_file_target_version = {}
|
||||
linter.preview = disabled
|
||||
linter.explicit_preview_rules = false
|
||||
@@ -3914,6 +3954,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
XXX,
|
||||
]
|
||||
linter.typing_modules = []
|
||||
linter.typing_extensions = true
|
||||
|
||||
# Linter Plugins
|
||||
linter.flake8_annotations.mypy_init_return = false
|
||||
@@ -4097,7 +4138,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
"#);
|
||||
});
|
||||
|
||||
insta::with_settings!({
|
||||
@@ -4107,7 +4148,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--show-settings")
|
||||
.arg("test.py")
|
||||
.current_dir(project_dir.join("foo")), @r###"
|
||||
.current_dir(project_dir.join("foo")), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -4174,7 +4215,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
linter.per_file_ignores = {}
|
||||
linter.safety_table.forced_safe = []
|
||||
linter.safety_table.forced_unsafe = []
|
||||
linter.unresolved_target_version = 3.9
|
||||
linter.unresolved_target_version = none
|
||||
linter.per_file_target_version = {}
|
||||
linter.preview = disabled
|
||||
linter.explicit_preview_rules = false
|
||||
@@ -4198,6 +4239,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
XXX,
|
||||
]
|
||||
linter.typing_modules = []
|
||||
linter.typing_extensions = true
|
||||
|
||||
# Linter Plugins
|
||||
linter.flake8_annotations.mypy_init_return = false
|
||||
@@ -4381,7 +4423,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
"#);
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
@@ -4444,7 +4486,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--show-settings")
|
||||
.arg("test.py")
|
||||
.current_dir(&project_dir), @r###"
|
||||
.current_dir(&project_dir), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -4535,6 +4577,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
XXX,
|
||||
]
|
||||
linter.typing_modules = []
|
||||
linter.typing_extensions = true
|
||||
|
||||
# Linter Plugins
|
||||
linter.flake8_annotations.mypy_init_return = false
|
||||
@@ -4718,7 +4761,7 @@ from typing import Union;foo: Union[int, str] = 1
|
||||
analyze.include_dependencies = {}
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
"#);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
@@ -4980,6 +5023,53 @@ fn flake8_import_convention_unused_aliased_import_no_conflict() {
|
||||
.pass_stdin("1"));
|
||||
}
|
||||
|
||||
// See: https://github.com/astral-sh/ruff/issues/16177
|
||||
#[test]
|
||||
fn flake8_pyi_redundant_none_literal() {
|
||||
let snippet = r#"
|
||||
from typing import Literal
|
||||
|
||||
# For each of these expressions, Ruff provides a fix for one of the `Literal[None]` elements
|
||||
# but not both, as if both were autofixed it would result in `None | None`,
|
||||
# which leads to a `TypeError` at runtime.
|
||||
a: Literal[None,] | Literal[None,]
|
||||
b: Literal[None] | Literal[None]
|
||||
c: Literal[None] | Literal[None,]
|
||||
d: Literal[None,] | Literal[None]
|
||||
"#;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--select", "PYI061"])
|
||||
.args(["--stdin-filename", "test.py"])
|
||||
.arg("--preview")
|
||||
.arg("--diff")
|
||||
.arg("-")
|
||||
.pass_stdin(snippet), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
--- test.py
|
||||
+++ test.py
|
||||
@@ -4,7 +4,7 @@
|
||||
# For each of these expressions, Ruff provides a fix for one of the `Literal[None]` elements
|
||||
# but not both, as if both were autofixed it would result in `None | None`,
|
||||
# which leads to a `TypeError` at runtime.
|
||||
-a: Literal[None,] | Literal[None,]
|
||||
-b: Literal[None] | Literal[None]
|
||||
-c: Literal[None] | Literal[None,]
|
||||
-d: Literal[None,] | Literal[None]
|
||||
+a: None | Literal[None,]
|
||||
+b: None | Literal[None]
|
||||
+c: None | Literal[None,]
|
||||
+d: None | Literal[None]
|
||||
|
||||
|
||||
----- stderr -----
|
||||
Would fix 4 errors.
|
||||
");
|
||||
}
|
||||
|
||||
/// Test that private, old-style `TypeVar` generics
|
||||
/// 1. Get replaced with PEP 695 type parameters (UP046, UP047)
|
||||
/// 2. Get renamed to remove leading underscores (UP049)
|
||||
@@ -5175,8 +5265,8 @@ fn a005_module_shadowing_non_strict() -> Result<()> {
|
||||
}
|
||||
|
||||
/// Test A005 with `strict-checking` unset
|
||||
/// TODO(brent) This should currently match the strict version, but after the next minor
|
||||
/// release it will match the non-strict version directly above
|
||||
///
|
||||
/// This should match the non-strict version directly above
|
||||
#[test]
|
||||
fn a005_module_shadowing_strict_default() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
@@ -5564,3 +5654,34 @@ fn semantic_syntax_errors() -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Regression test for <https://github.com/astral-sh/ruff/issues/17821>.
|
||||
///
|
||||
/// `lint.typing-extensions = false` with Python 3.9 should disable the PYI019 lint because it would
|
||||
/// try to import `Self` from `typing_extensions`
|
||||
#[test]
|
||||
fn combine_typing_extensions_config() {
|
||||
let contents = "
|
||||
from typing import TypeVar
|
||||
T = TypeVar('T')
|
||||
class Foo:
|
||||
def f(self: T) -> T: ...
|
||||
";
|
||||
assert_cmd_snapshot!(
|
||||
Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.args(["--config", "lint.typing-extensions = false"])
|
||||
.arg("--select=PYI019")
|
||||
.arg("--target-version=py39")
|
||||
.arg("-")
|
||||
.pass_stdin(contents),
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ info:
|
||||
args:
|
||||
- rule
|
||||
- F401
|
||||
snapshot_kind: text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
@@ -84,6 +83,11 @@ else:
|
||||
print("numpy is not installed")
|
||||
```
|
||||
|
||||
## Preview
|
||||
When [preview](https://docs.astral.sh/ruff/preview/) is enabled,
|
||||
the criterion for determining whether an import is first-party
|
||||
is stricter, which could affect the suggested fix. See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc) for more details.
|
||||
|
||||
## Options
|
||||
- `lint.ignore-init-module-imports`
|
||||
- `lint.pyflakes.allowed-unused-imports`
|
||||
|
||||
@@ -213,6 +213,7 @@ linter.task_tags = [
|
||||
XXX,
|
||||
]
|
||||
linter.typing_modules = []
|
||||
linter.typing_extensions = true
|
||||
|
||||
# Linter Plugins
|
||||
linter.flake8_annotations.mypy_init_return = false
|
||||
|
||||
@@ -33,7 +33,7 @@ name = "formatter"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "red_knot"
|
||||
name = "ty"
|
||||
harness = false
|
||||
|
||||
[dependencies]
|
||||
@@ -49,7 +49,7 @@ ruff_python_ast = { workspace = true }
|
||||
ruff_python_formatter = { workspace = true }
|
||||
ruff_python_parser = { workspace = true }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
red_knot_project = { workspace = true }
|
||||
ty_project = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -45,9 +45,9 @@ static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
|
||||
target_arch = "powerpc64"
|
||||
)
|
||||
))]
|
||||
#[allow(non_upper_case_globals)]
|
||||
#[expect(non_upper_case_globals)]
|
||||
#[export_name = "_rjem_malloc_conf"]
|
||||
#[allow(unsafe_code)]
|
||||
#[expect(unsafe_code)]
|
||||
pub static _rjem_malloc_conf: &[u8] = b"dirty_decay_ms:-1,muzzy_decay_ms:-1\0";
|
||||
|
||||
fn create_test_cases() -> Vec<TestCase> {
|
||||
|
||||
@@ -7,16 +7,16 @@ use criterion::{criterion_group, criterion_main, BatchSize, Criterion};
|
||||
use rayon::ThreadPoolBuilder;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use red_knot_project::metadata::options::{EnvironmentOptions, Options};
|
||||
use red_knot_project::metadata::value::RangedValue;
|
||||
use red_knot_project::watch::{ChangeEvent, ChangedKind};
|
||||
use red_knot_project::{Db, ProjectDatabase, ProjectMetadata};
|
||||
use ruff_benchmark::TestFile;
|
||||
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, Severity};
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::source::source_text;
|
||||
use ruff_db::system::{MemoryFileSystem, SystemPath, SystemPathBuf, TestSystem};
|
||||
use ruff_python_ast::PythonVersion;
|
||||
use ty_project::metadata::options::{EnvironmentOptions, Options};
|
||||
use ty_project::metadata::value::RangedValue;
|
||||
use ty_project::watch::{ChangeEvent, ChangedKind};
|
||||
use ty_project::{Db, ProjectDatabase, ProjectMetadata};
|
||||
|
||||
struct Case {
|
||||
db: ProjectDatabase,
|
||||
@@ -122,7 +122,7 @@ static RAYON_INITIALIZED: std::sync::Once = std::sync::Once::new();
|
||||
fn setup_rayon() {
|
||||
// Initialize the rayon thread pool outside the benchmark because it has a significant cost.
|
||||
// We limit the thread pool to only one (the current thread) because we're focused on
|
||||
// where red knot spends time and less about how well the code runs concurrently.
|
||||
// where ty spends time and less about how well the code runs concurrently.
|
||||
// We might want to add a benchmark focusing on concurrency to detect congestion in the future.
|
||||
RAYON_INITIALIZED.call_once(|| {
|
||||
ThreadPoolBuilder::new()
|
||||
@@ -172,7 +172,7 @@ fn benchmark_incremental(criterion: &mut Criterion) {
|
||||
|
||||
setup_rayon();
|
||||
|
||||
criterion.bench_function("red_knot_check_file[incremental]", |b| {
|
||||
criterion.bench_function("ty_check_file[incremental]", |b| {
|
||||
b.iter_batched_ref(setup, incremental, BatchSize::SmallInput);
|
||||
});
|
||||
}
|
||||
@@ -180,7 +180,7 @@ fn benchmark_incremental(criterion: &mut Criterion) {
|
||||
fn benchmark_cold(criterion: &mut Criterion) {
|
||||
setup_rayon();
|
||||
|
||||
criterion.bench_function("red_knot_check_file[cold]", |b| {
|
||||
criterion.bench_function("ty_check_file[cold]", |b| {
|
||||
b.iter_batched_ref(
|
||||
setup_tomllib_case,
|
||||
|case| {
|
||||
@@ -257,7 +257,7 @@ fn setup_micro_case(code: &str) -> Case {
|
||||
fn benchmark_many_string_assignments(criterion: &mut Criterion) {
|
||||
setup_rayon();
|
||||
|
||||
criterion.bench_function("red_knot_micro[many_string_assignments]", |b| {
|
||||
criterion.bench_function("ty_micro[many_string_assignments]", |b| {
|
||||
b.iter_batched_ref(
|
||||
|| {
|
||||
// This is a micro benchmark, but it is effectively identical to a code sample
|
||||
@@ -213,7 +213,7 @@ macro_rules! impl_cache_key_tuple {
|
||||
|
||||
( $($name:ident)+) => (
|
||||
impl<$($name: CacheKey),+> CacheKey for ($($name,)+) where last_type!($($name,)+): ?Sized {
|
||||
#[allow(non_snake_case)]
|
||||
#[expect(non_snake_case)]
|
||||
#[inline]
|
||||
fn cache_key(&self, state: &mut CacheKeyHasher) {
|
||||
let ($(ref $name,)+) = *self;
|
||||
|
||||
@@ -47,7 +47,7 @@ fn struct_ignored_fields() {
|
||||
struct NamedFieldsStruct {
|
||||
a: String,
|
||||
#[cache_key(ignore)]
|
||||
#[allow(unused)]
|
||||
#[expect(unused)]
|
||||
b: String,
|
||||
}
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ ruff_python_trivia = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
anstyle = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
countme = { workspace = true }
|
||||
dashmap = { workspace = true }
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::{fmt::Formatter, sync::Arc};
|
||||
use thiserror::Error;
|
||||
|
||||
use ruff_annotate_snippets::Level as AnnotateLevel;
|
||||
use ruff_text_size::TextRange;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
pub use self::render::DisplayDiagnostic;
|
||||
use crate::files::File;
|
||||
@@ -11,6 +11,7 @@ use crate::Db;
|
||||
|
||||
use self::render::FileResolver;
|
||||
mod render;
|
||||
mod stylesheet;
|
||||
|
||||
/// A collection of information that can be rendered into a diagnostic.
|
||||
///
|
||||
@@ -133,20 +134,20 @@ impl Diagnostic {
|
||||
/// NOTE: At present, this routine will return the first primary
|
||||
/// annotation's message as the primary message when the main diagnostic
|
||||
/// message is empty. This is meant to facilitate an incremental migration
|
||||
/// in Red Knot over to the new diagnostic data model. (The old data model
|
||||
/// in ty over to the new diagnostic data model. (The old data model
|
||||
/// didn't distinguish between messages on the entire diagnostic and
|
||||
/// messages attached to a particular span.)
|
||||
pub fn primary_message(&self) -> &str {
|
||||
if !self.inner.message.as_str().is_empty() {
|
||||
return self.inner.message.as_str();
|
||||
}
|
||||
// FIXME: As a special case, while we're migrating Red Knot
|
||||
// FIXME: As a special case, while we're migrating ty
|
||||
// to the new diagnostic data model, we'll look for a primary
|
||||
// message from the primary annotation. This is because most
|
||||
// Red Knot diagnostics are created with an empty diagnostic
|
||||
// ty diagnostics are created with an empty diagnostic
|
||||
// message and instead attach the message to the annotation.
|
||||
// Fixing this will require touching basically every diagnostic
|
||||
// in Red Knot, so we do it this way for now to match the old
|
||||
// in ty, so we do it this way for now to match the old
|
||||
// semantics. ---AG
|
||||
self.primary_annotation()
|
||||
.and_then(|ann| ann.get_message())
|
||||
@@ -164,7 +165,7 @@ impl Diagnostic {
|
||||
///
|
||||
/// The reason why we don't just always return both the main diagnostic
|
||||
/// message and the primary annotation message is because this was written
|
||||
/// in the midst of an incremental migration of Red Knot over to the new
|
||||
/// in the midst of an incremental migration of ty over to the new
|
||||
/// diagnostic data model. At time of writing, diagnostics were still
|
||||
/// constructed in the old model where the main diagnostic message and the
|
||||
/// primary annotation message were not distinguished from each other. So
|
||||
@@ -226,6 +227,11 @@ impl Diagnostic {
|
||||
pub fn primary_span(&self) -> Option<Span> {
|
||||
self.primary_annotation().map(|ann| ann.span.clone())
|
||||
}
|
||||
|
||||
/// Returns the tags from the primary annotation of this diagnostic if it exists.
|
||||
pub fn primary_tags(&self) -> Option<&[DiagnosticTag]> {
|
||||
self.primary_annotation().map(|ann| ann.tags.as_slice())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
@@ -337,6 +343,8 @@ pub struct Annotation {
|
||||
/// Whether this annotation is "primary" or not. When it isn't primary, an
|
||||
/// annotation is said to be "secondary."
|
||||
is_primary: bool,
|
||||
/// The diagnostic tags associated with this annotation.
|
||||
tags: Vec<DiagnosticTag>,
|
||||
}
|
||||
|
||||
impl Annotation {
|
||||
@@ -354,6 +362,7 @@ impl Annotation {
|
||||
span,
|
||||
message: None,
|
||||
is_primary: true,
|
||||
tags: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -369,6 +378,7 @@ impl Annotation {
|
||||
span,
|
||||
message: None,
|
||||
is_primary: false,
|
||||
tags: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -411,6 +421,36 @@ impl Annotation {
|
||||
pub fn get_span(&self) -> &Span {
|
||||
&self.span
|
||||
}
|
||||
|
||||
/// Returns the tags associated with this annotation.
|
||||
pub fn get_tags(&self) -> &[DiagnosticTag] {
|
||||
&self.tags
|
||||
}
|
||||
|
||||
/// Attaches this tag to this annotation.
|
||||
///
|
||||
/// It will not replace any existing tags.
|
||||
pub fn tag(mut self, tag: DiagnosticTag) -> Annotation {
|
||||
self.tags.push(tag);
|
||||
self
|
||||
}
|
||||
|
||||
/// Attaches an additional tag to this annotation.
|
||||
pub fn push_tag(&mut self, tag: DiagnosticTag) {
|
||||
self.tags.push(tag);
|
||||
}
|
||||
}
|
||||
|
||||
/// Tags that can be associated with an annotation.
|
||||
///
|
||||
/// These tags are used to provide additional information about the annotation.
|
||||
/// and are passed through to the language server protocol.
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum DiagnosticTag {
|
||||
/// Unused or unnecessary code. Used for unused parameters, unreachable code, etc.
|
||||
Unnecessary,
|
||||
/// Deprecated or obsolete code.
|
||||
Deprecated,
|
||||
}
|
||||
|
||||
/// A string identifier for a lint rule.
|
||||
@@ -461,6 +501,8 @@ impl PartialEq<&str> for LintName {
|
||||
/// Uniquely identifies the kind of a diagnostic.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Hash)]
|
||||
pub enum DiagnosticId {
|
||||
Panic,
|
||||
|
||||
/// Some I/O operation failed
|
||||
Io,
|
||||
|
||||
@@ -521,6 +563,7 @@ impl DiagnosticId {
|
||||
|
||||
pub fn as_str(&self) -> Result<&str, DiagnosticAsStrError> {
|
||||
Ok(match self {
|
||||
DiagnosticId::Panic => "panic",
|
||||
DiagnosticId::Io => "io",
|
||||
DiagnosticId::InvalidSyntax => "invalid-syntax",
|
||||
DiagnosticId::Lint(name) => {
|
||||
@@ -601,6 +644,12 @@ impl From<File> for Span {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<crate::files::FileRange> for Span {
|
||||
fn from(file_range: crate::files::FileRange) -> Span {
|
||||
Span::from(file_range.file()).with_range(file_range.range())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd)]
|
||||
pub enum Severity {
|
||||
Info,
|
||||
@@ -625,6 +674,10 @@ impl Severity {
|
||||
Severity::Fatal => AnnotateLevel::Error,
|
||||
}
|
||||
}
|
||||
|
||||
pub const fn is_fatal(self) -> bool {
|
||||
matches!(self, Severity::Fatal)
|
||||
}
|
||||
}
|
||||
|
||||
/// Configuration for rendering diagnostics.
|
||||
@@ -845,3 +898,16 @@ pub fn create_unsupported_syntax_diagnostic(
|
||||
diag.annotate(Annotation::primary(span).message(err.to_string()));
|
||||
diag
|
||||
}
|
||||
|
||||
/// Creates a `Diagnostic` from a semantic syntax error.
|
||||
///
|
||||
/// See [`create_parse_diagnostic`] for more details.
|
||||
pub fn create_semantic_syntax_diagnostic(
|
||||
file: File,
|
||||
err: &ruff_python_parser::semantic_errors::SemanticSyntaxError,
|
||||
) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
||||
let span = Span::from(file).with_range(err.range);
|
||||
diag.annotate(Annotation::primary(span).message(err.to_string()));
|
||||
diag
|
||||
}
|
||||
|
||||
@@ -7,9 +7,11 @@ use ruff_annotate_snippets::{
|
||||
use ruff_source_file::{LineIndex, OneIndexed, SourceCode};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
use crate::diagnostic::stylesheet::{fmt_styled, DiagnosticStylesheet};
|
||||
use crate::{
|
||||
files::File,
|
||||
source::{line_index, source_text, SourceText},
|
||||
system::SystemPath,
|
||||
Db,
|
||||
};
|
||||
|
||||
@@ -47,6 +49,7 @@ impl<'a> DisplayDiagnostic<'a> {
|
||||
} else {
|
||||
AnnotateRenderer::plain()
|
||||
};
|
||||
|
||||
DisplayDiagnostic {
|
||||
config,
|
||||
resolver,
|
||||
@@ -58,31 +61,64 @@ impl<'a> DisplayDiagnostic<'a> {
|
||||
|
||||
impl std::fmt::Display for DisplayDiagnostic<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
if matches!(self.config.format, DiagnosticFormat::Concise) {
|
||||
match self.diag.severity() {
|
||||
Severity::Info => f.write_str("info")?,
|
||||
Severity::Warning => f.write_str("warning")?,
|
||||
Severity::Error => f.write_str("error")?,
|
||||
Severity::Fatal => f.write_str("fatal")?,
|
||||
}
|
||||
let stylesheet = if self.config.color {
|
||||
DiagnosticStylesheet::styled()
|
||||
} else {
|
||||
DiagnosticStylesheet::plain()
|
||||
};
|
||||
|
||||
if matches!(self.config.format, DiagnosticFormat::Concise) {
|
||||
let (severity, severity_style) = match self.diag.severity() {
|
||||
Severity::Info => ("info", stylesheet.info),
|
||||
Severity::Warning => ("warning", stylesheet.warning),
|
||||
Severity::Error => ("error", stylesheet.error),
|
||||
Severity::Fatal => ("fatal", stylesheet.error),
|
||||
};
|
||||
|
||||
write!(
|
||||
f,
|
||||
"{severity}[{id}]",
|
||||
severity = fmt_styled(severity, severity_style),
|
||||
id = fmt_styled(self.diag.id(), stylesheet.emphasis)
|
||||
)?;
|
||||
|
||||
write!(f, "[{rule}]", rule = self.diag.id())?;
|
||||
if let Some(span) = self.diag.primary_span() {
|
||||
write!(f, " {path}", path = self.resolver.path(span.file()))?;
|
||||
write!(
|
||||
f,
|
||||
" {path}",
|
||||
path = fmt_styled(self.resolver.path(span.file()), stylesheet.emphasis)
|
||||
)?;
|
||||
if let Some(range) = span.range() {
|
||||
let input = self.resolver.input(span.file());
|
||||
let start = input.as_source_code().source_location(range.start());
|
||||
write!(f, ":{line}:{col}", line = start.row, col = start.column)?;
|
||||
let start = input.as_source_code().line_column(range.start());
|
||||
|
||||
write!(
|
||||
f,
|
||||
":{line}:{col}",
|
||||
line = fmt_styled(start.line, stylesheet.emphasis),
|
||||
col = fmt_styled(start.column, stylesheet.emphasis),
|
||||
)?;
|
||||
}
|
||||
write!(f, ":")?;
|
||||
}
|
||||
return writeln!(f, " {}", self.diag.concise_message());
|
||||
return writeln!(f, " {message}", message = self.diag.concise_message());
|
||||
}
|
||||
|
||||
let mut renderer = self.annotate_renderer.clone();
|
||||
renderer = renderer
|
||||
.error(stylesheet.error)
|
||||
.warning(stylesheet.warning)
|
||||
.info(stylesheet.info)
|
||||
.note(stylesheet.note)
|
||||
.help(stylesheet.help)
|
||||
.line_no(stylesheet.line_no)
|
||||
.emphasis(stylesheet.emphasis)
|
||||
.none(stylesheet.none);
|
||||
|
||||
let resolved = Resolved::new(&self.resolver, self.diag);
|
||||
let renderable = resolved.to_renderable(self.config.context);
|
||||
for diag in renderable.diagnostics.iter() {
|
||||
writeln!(f, "{}", self.annotate_renderer.render(diag.to_annotate()))?;
|
||||
writeln!(f, "{}", renderer.render(diag.to_annotate()))?;
|
||||
}
|
||||
writeln!(f)
|
||||
}
|
||||
@@ -286,8 +322,7 @@ impl<'a> ResolvedAnnotation<'a> {
|
||||
let source = input.as_source_code();
|
||||
let (range, line_start, line_end) = match (ann.span.range(), ann.message.is_some()) {
|
||||
// An annotation with no range AND no message is probably(?)
|
||||
// meaningless, so just ignore it.
|
||||
(None, false) => return None,
|
||||
// meaningless, but we should try to render it anyway.
|
||||
(None, _) => (
|
||||
TextRange::empty(TextSize::new(0)),
|
||||
OneIndexed::MIN,
|
||||
@@ -341,7 +376,7 @@ struct Renderable<'r> {
|
||||
// (At time of writing, 2025-03-13, we currently render the diagnostic
|
||||
// ID into the main message of the parent diagnostic. We don't use this
|
||||
// specific field to do that though.)
|
||||
#[allow(dead_code)]
|
||||
#[expect(dead_code)]
|
||||
id: &'r str,
|
||||
diagnostics: Vec<RenderableDiagnostic<'r>>,
|
||||
}
|
||||
@@ -589,7 +624,7 @@ impl<'r> RenderableAnnotation<'r> {
|
||||
/// For example, at time of writing (2025-03-07), the plan is (roughly) for
|
||||
/// Ruff to grow its own interner of file paths so that a `Span` can store an
|
||||
/// interned ID instead of a (roughly) `Arc<Path>`. This interner is planned
|
||||
/// to be entirely separate from the Salsa interner used by Red Knot, and so,
|
||||
/// to be entirely separate from the Salsa interner used by ty, and so,
|
||||
/// callers will need to pass in a different "resolver" for turning `Span`s
|
||||
/// into actual file paths/contents. The infrastructure for this isn't fully in
|
||||
/// place, but this type serves to demarcate the intended abstraction boundary.
|
||||
@@ -605,7 +640,10 @@ impl<'a> FileResolver<'a> {
|
||||
|
||||
/// Returns the path associated with the file given.
|
||||
fn path(&self, file: File) -> &'a str {
|
||||
file.path(self.db).as_str()
|
||||
relativize_path(
|
||||
self.db.system().current_directory(),
|
||||
file.path(self.db).as_str(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns the input contents associated with the file given.
|
||||
@@ -677,6 +715,14 @@ fn context_after(source: &SourceCode<'_, '_>, len: usize, start: OneIndexed) ->
|
||||
line
|
||||
}
|
||||
|
||||
/// Convert an absolute path to be relative to the current working directory.
|
||||
fn relativize_path<'p>(cwd: &SystemPath, path: &'p str) -> &'p str {
|
||||
if let Ok(path) = SystemPath::new(path).strip_prefix(cwd) {
|
||||
return path.as_str();
|
||||
}
|
||||
path
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
@@ -758,7 +804,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:5:1
|
||||
--> animals:5:1
|
||||
|
|
||||
3 | canary
|
||||
4 | dog
|
||||
@@ -782,7 +828,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
warning: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:5:1
|
||||
--> animals:5:1
|
||||
|
|
||||
3 | canary
|
||||
4 | dog
|
||||
@@ -802,7 +848,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
info: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:5:1
|
||||
--> animals:5:1
|
||||
|
|
||||
3 | canary
|
||||
4 | dog
|
||||
@@ -815,6 +861,50 @@ watermelon
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_range() {
|
||||
let mut env = TestEnvironment::new();
|
||||
env.add("animals", ANIMALS);
|
||||
|
||||
let mut builder = env.err();
|
||||
builder
|
||||
.diag
|
||||
.annotate(Annotation::primary(builder.env.path("animals")));
|
||||
let diag = builder.build();
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> animals:1:1
|
||||
|
|
||||
1 | aardvark
|
||||
| ^
|
||||
2 | beetle
|
||||
3 | canary
|
||||
|
|
||||
",
|
||||
);
|
||||
|
||||
let mut builder = env.err();
|
||||
builder.diag.annotate(
|
||||
Annotation::primary(builder.env.path("animals")).message("primary annotation message"),
|
||||
);
|
||||
let diag = builder.build();
|
||||
insta::assert_snapshot!(
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> animals:1:1
|
||||
|
|
||||
1 | aardvark
|
||||
| ^ primary annotation message
|
||||
2 | beetle
|
||||
3 | canary
|
||||
|
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_ascii() {
|
||||
let mut env = TestEnvironment::new();
|
||||
@@ -825,7 +915,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /non-ascii:5:1
|
||||
--> non-ascii:5:1
|
||||
|
|
||||
3 | ΔΔΔΔΔΔΔΔΔΔΔΔ
|
||||
4 | ββββββββββββ
|
||||
@@ -844,7 +934,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /non-ascii:2:2
|
||||
--> non-ascii:2:2
|
||||
|
|
||||
1 | ☃☃☃☃☃☃☃☃☃☃☃☃
|
||||
2 | 💩💩💩💩💩💩💩💩💩💩💩💩
|
||||
@@ -868,7 +958,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:5:1
|
||||
--> animals:5:1
|
||||
|
|
||||
4 | dog
|
||||
5 | elephant
|
||||
@@ -885,7 +975,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:5:1
|
||||
--> animals:5:1
|
||||
|
|
||||
5 | elephant
|
||||
| ^^^^^^^^
|
||||
@@ -900,7 +990,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:1:1
|
||||
--> animals:1:1
|
||||
|
|
||||
1 | aardvark
|
||||
| ^^^^^^^^
|
||||
@@ -917,7 +1007,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:11:1
|
||||
--> animals:11:1
|
||||
|
|
||||
9 | inchworm
|
||||
10 | jackrabbit
|
||||
@@ -934,7 +1024,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:5:1
|
||||
--> animals:5:1
|
||||
|
|
||||
1 | aardvark
|
||||
2 | beetle
|
||||
@@ -967,14 +1057,14 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:1:1
|
||||
--> animals:1:1
|
||||
|
|
||||
1 | aardvark
|
||||
| ^^^^^^^^
|
||||
2 | beetle
|
||||
3 | canary
|
||||
|
|
||||
::: /animals:11:1
|
||||
::: animals:11:1
|
||||
|
|
||||
9 | inchworm
|
||||
10 | jackrabbit
|
||||
@@ -1011,7 +1101,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:1:1
|
||||
--> animals:1:1
|
||||
|
|
||||
1 | aardvark
|
||||
| ^^^^^^^^
|
||||
@@ -1036,7 +1126,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:1:1
|
||||
--> animals:1:1
|
||||
|
|
||||
1 | aardvark
|
||||
| ^^^^^^^^
|
||||
@@ -1064,13 +1154,13 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:1:1
|
||||
--> animals:1:1
|
||||
|
|
||||
1 | aardvark
|
||||
| ^^^^^^^^
|
||||
2 | beetle
|
||||
|
|
||||
::: /animals:5:1
|
||||
::: animals:5:1
|
||||
|
|
||||
4 | dog
|
||||
5 | elephant
|
||||
@@ -1092,7 +1182,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:1:1
|
||||
--> animals:1:1
|
||||
|
|
||||
1 | aardvark
|
||||
| ^^^^^^^^
|
||||
@@ -1117,7 +1207,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:1:1
|
||||
--> animals:1:1
|
||||
|
|
||||
1 | aardvark
|
||||
| ^^^^^^^^
|
||||
@@ -1148,7 +1238,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:1:1
|
||||
--> animals:1:1
|
||||
|
|
||||
1 | aardvark
|
||||
| ^^^^^^^^
|
||||
@@ -1156,7 +1246,7 @@ watermelon
|
||||
3 | canary
|
||||
4 | dog
|
||||
|
|
||||
::: /animals:9:1
|
||||
::: animals:9:1
|
||||
|
|
||||
6 | finch
|
||||
7 | gorilla
|
||||
@@ -1186,7 +1276,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /spacey-animals:8:1
|
||||
--> spacey-animals:8:1
|
||||
|
|
||||
7 | dog
|
||||
8 | elephant
|
||||
@@ -1203,7 +1293,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /spacey-animals:12:1
|
||||
--> spacey-animals:12:1
|
||||
|
|
||||
11 | gorilla
|
||||
12 | hippopotamus
|
||||
@@ -1221,7 +1311,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /spacey-animals:13:1
|
||||
--> spacey-animals:13:1
|
||||
|
|
||||
11 | gorilla
|
||||
12 | hippopotamus
|
||||
@@ -1261,12 +1351,12 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /spacey-animals:3:1
|
||||
--> spacey-animals:3:1
|
||||
|
|
||||
3 | beetle
|
||||
| ^^^^^^
|
||||
|
|
||||
::: /spacey-animals:5:1
|
||||
::: spacey-animals:5:1
|
||||
|
|
||||
5 | canary
|
||||
| ^^^^^^
|
||||
@@ -1290,7 +1380,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:3:1
|
||||
--> animals:3:1
|
||||
|
|
||||
1 | aardvark
|
||||
2 | beetle
|
||||
@@ -1299,7 +1389,7 @@ watermelon
|
||||
4 | dog
|
||||
5 | elephant
|
||||
|
|
||||
::: /fruits:3:1
|
||||
::: fruits:3:1
|
||||
|
|
||||
1 | apple
|
||||
2 | banana
|
||||
@@ -1327,7 +1417,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:3:1
|
||||
--> animals:3:1
|
||||
|
|
||||
1 | aardvark
|
||||
2 | beetle
|
||||
@@ -1364,7 +1454,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:3:1
|
||||
--> animals:3:1
|
||||
|
|
||||
1 | aardvark
|
||||
2 | beetle
|
||||
@@ -1392,7 +1482,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:3:1
|
||||
--> animals:3:1
|
||||
|
|
||||
1 | aardvark
|
||||
2 | beetle
|
||||
@@ -1402,7 +1492,7 @@ watermelon
|
||||
5 | elephant
|
||||
|
|
||||
warning: sub-diagnostic message
|
||||
--> /fruits:3:1
|
||||
--> fruits:3:1
|
||||
|
|
||||
1 | apple
|
||||
2 | banana
|
||||
@@ -1428,7 +1518,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:3:1
|
||||
--> animals:3:1
|
||||
|
|
||||
1 | aardvark
|
||||
2 | beetle
|
||||
@@ -1438,7 +1528,7 @@ watermelon
|
||||
5 | elephant
|
||||
|
|
||||
warning: sub-diagnostic message
|
||||
--> /fruits:3:1
|
||||
--> fruits:3:1
|
||||
|
|
||||
1 | apple
|
||||
2 | banana
|
||||
@@ -1448,7 +1538,7 @@ watermelon
|
||||
5 | orange
|
||||
|
|
||||
warning: sub-diagnostic message
|
||||
--> /animals:11:1
|
||||
--> animals:11:1
|
||||
|
|
||||
9 | inchworm
|
||||
10 | jackrabbit
|
||||
@@ -1467,7 +1557,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:3:1
|
||||
--> animals:3:1
|
||||
|
|
||||
1 | aardvark
|
||||
2 | beetle
|
||||
@@ -1477,7 +1567,7 @@ watermelon
|
||||
5 | elephant
|
||||
|
|
||||
warning: sub-diagnostic message
|
||||
--> /animals:11:1
|
||||
--> animals:11:1
|
||||
|
|
||||
9 | inchworm
|
||||
10 | jackrabbit
|
||||
@@ -1485,7 +1575,7 @@ watermelon
|
||||
| ^^^^^^^^
|
||||
|
|
||||
warning: sub-diagnostic message
|
||||
--> /fruits:3:1
|
||||
--> fruits:3:1
|
||||
|
|
||||
1 | apple
|
||||
2 | banana
|
||||
@@ -1515,7 +1605,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:3:1
|
||||
--> animals:3:1
|
||||
|
|
||||
1 | aardvark
|
||||
2 | beetle
|
||||
@@ -1525,7 +1615,7 @@ watermelon
|
||||
5 | elephant
|
||||
|
|
||||
warning: sub-diagnostic message
|
||||
--> /animals:3:1
|
||||
--> animals:3:1
|
||||
|
|
||||
1 | aardvark
|
||||
2 | beetle
|
||||
@@ -1551,7 +1641,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:5:1
|
||||
--> animals:5:1
|
||||
|
|
||||
3 | canary
|
||||
4 | dog
|
||||
@@ -1574,7 +1664,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:5:1
|
||||
--> animals:5:1
|
||||
|
|
||||
3 | canary
|
||||
4 | dog
|
||||
@@ -1594,7 +1684,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:5:1
|
||||
--> animals:5:1
|
||||
|
|
||||
3 | canary
|
||||
4 | dog
|
||||
@@ -1614,7 +1704,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:5:4
|
||||
--> animals:5:4
|
||||
|
|
||||
3 | canary
|
||||
4 | dog
|
||||
@@ -1636,7 +1726,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:5:4
|
||||
--> animals:5:4
|
||||
|
|
||||
3 | canary
|
||||
4 | dog
|
||||
@@ -1668,7 +1758,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:4:1
|
||||
--> animals:4:1
|
||||
|
|
||||
2 | beetle
|
||||
3 | canary
|
||||
@@ -1697,7 +1787,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:4:1
|
||||
--> animals:4:1
|
||||
|
|
||||
2 | beetle
|
||||
3 | canary
|
||||
@@ -1728,7 +1818,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:5:1
|
||||
--> animals:5:1
|
||||
|
|
||||
3 | canary
|
||||
4 | dog
|
||||
@@ -1763,7 +1853,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:5:1
|
||||
--> animals:5:1
|
||||
|
|
||||
3 | canary
|
||||
4 | dog
|
||||
@@ -1791,7 +1881,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:5:1
|
||||
--> animals:5:1
|
||||
|
|
||||
3 | canary
|
||||
4 | dog
|
||||
@@ -1823,7 +1913,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:5:3
|
||||
--> animals:5:3
|
||||
|
|
||||
3 | canary
|
||||
4 | dog
|
||||
@@ -1845,7 +1935,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:5:3
|
||||
--> animals:5:3
|
||||
|
|
||||
3 | canary
|
||||
4 | dog
|
||||
@@ -1878,7 +1968,7 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:8:1
|
||||
--> animals:8:1
|
||||
|
|
||||
6 | finch
|
||||
7 | gorilla
|
||||
@@ -1887,7 +1977,7 @@ watermelon
|
||||
9 | inchworm
|
||||
10 | jackrabbit
|
||||
|
|
||||
::: /animals:1:1
|
||||
::: animals:1:1
|
||||
|
|
||||
1 | aardvark
|
||||
| -------- secondary
|
||||
@@ -1918,27 +2008,27 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:5:1
|
||||
--> animals:5:1
|
||||
|
|
||||
5 | elephant
|
||||
| ^^^^^^^^ primary 5
|
||||
|
|
||||
::: /animals:9:1
|
||||
::: animals:9:1
|
||||
|
|
||||
9 | inchworm
|
||||
| ^^^^^^^^ primary 9
|
||||
|
|
||||
::: /animals:1:1
|
||||
::: animals:1:1
|
||||
|
|
||||
1 | aardvark
|
||||
| -------- secondary 1
|
||||
|
|
||||
::: /animals:3:1
|
||||
::: animals:3:1
|
||||
|
|
||||
3 | canary
|
||||
| ------ secondary 3
|
||||
|
|
||||
::: /animals:7:1
|
||||
::: animals:7:1
|
||||
|
|
||||
7 | gorilla
|
||||
| ------- secondary 7
|
||||
@@ -1962,14 +2052,14 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /fruits:1:1
|
||||
--> fruits:1:1
|
||||
|
|
||||
1 | apple
|
||||
| ^^^^^ primary
|
||||
2 | banana
|
||||
3 | cantelope
|
||||
|
|
||||
::: /animals:1:1
|
||||
::: animals:1:1
|
||||
|
|
||||
1 | aardvark
|
||||
| -------- secondary
|
||||
@@ -1997,32 +2087,32 @@ watermelon
|
||||
env.render(&diag),
|
||||
@r"
|
||||
error: lint:test-diagnostic: main diagnostic message
|
||||
--> /animals:11:1
|
||||
--> animals:11:1
|
||||
|
|
||||
11 | kangaroo
|
||||
| ^^^^^^^^ primary animals 11
|
||||
|
|
||||
::: /animals:1:1
|
||||
::: animals:1:1
|
||||
|
|
||||
1 | aardvark
|
||||
| -------- secondary animals 1
|
||||
|
|
||||
::: /animals:3:1
|
||||
::: animals:3:1
|
||||
|
|
||||
3 | canary
|
||||
| ------ secondary animals 3
|
||||
|
|
||||
::: /animals:7:1
|
||||
::: animals:7:1
|
||||
|
|
||||
7 | gorilla
|
||||
| ------- secondary animals 7
|
||||
|
|
||||
::: /fruits:10:1
|
||||
::: fruits:10:1
|
||||
|
|
||||
10 | watermelon
|
||||
| ^^^^^^^^^^ primary fruits 10
|
||||
|
|
||||
::: /fruits:2:1
|
||||
::: fruits:2:1
|
||||
|
|
||||
2 | banana
|
||||
| ------ secondary fruits 2
|
||||
@@ -2082,10 +2172,10 @@ watermelon
|
||||
/// otherwise, the span will end where the next line begins, and this
|
||||
/// confuses `ruff_annotate_snippets` as of 2025-03-13.)
|
||||
fn span(&self, path: &str, line_offset_start: &str, line_offset_end: &str) -> Span {
|
||||
let file = system_path_to_file(&self.db, path).unwrap();
|
||||
let span = self.path(path);
|
||||
|
||||
let text = source_text(&self.db, file);
|
||||
let line_index = line_index(&self.db, file);
|
||||
let text = source_text(&self.db, span.file());
|
||||
let line_index = line_index(&self.db, span.file());
|
||||
let source = SourceCode::new(text.as_str(), &line_index);
|
||||
|
||||
let (line_start, offset_start) = parse_line_offset(line_offset_start);
|
||||
@@ -2099,7 +2189,13 @@ watermelon
|
||||
None => source.line_end(line_end) - TextSize::from(1),
|
||||
Some(offset) => source.line_start(line_end) + offset,
|
||||
};
|
||||
Span::from(file).with_range(TextRange::new(start, end))
|
||||
span.with_range(TextRange::new(start, end))
|
||||
}
|
||||
|
||||
/// Like `span`, but only attaches a file path.
|
||||
fn path(&self, path: &str) -> Span {
|
||||
let file = system_path_to_file(&self.db, path).unwrap();
|
||||
Span::from(file)
|
||||
}
|
||||
|
||||
/// A convenience function for returning a builder for a diagnostic
|
||||
|
||||
80
crates/ruff_db/src/diagnostic/stylesheet.rs
Normal file
80
crates/ruff_db/src/diagnostic/stylesheet.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
use anstyle::{AnsiColor, Effects, Style};
|
||||
use std::fmt::Formatter;
|
||||
|
||||
pub(super) const fn fmt_styled<'a, T>(
|
||||
content: T,
|
||||
style: anstyle::Style,
|
||||
) -> impl std::fmt::Display + 'a
|
||||
where
|
||||
T: std::fmt::Display + 'a,
|
||||
{
|
||||
struct FmtStyled<T> {
|
||||
content: T,
|
||||
style: anstyle::Style,
|
||||
}
|
||||
|
||||
impl<T> std::fmt::Display for FmtStyled<T>
|
||||
where
|
||||
T: std::fmt::Display,
|
||||
{
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{style_start}{content}{style_end}",
|
||||
style_start = self.style.render(),
|
||||
content = self.content,
|
||||
style_end = self.style.render_reset()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
FmtStyled { content, style }
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct DiagnosticStylesheet {
|
||||
pub(crate) error: Style,
|
||||
pub(crate) warning: Style,
|
||||
pub(crate) info: Style,
|
||||
pub(crate) note: Style,
|
||||
pub(crate) help: Style,
|
||||
pub(crate) line_no: Style,
|
||||
pub(crate) emphasis: Style,
|
||||
pub(crate) none: Style,
|
||||
}
|
||||
|
||||
impl Default for DiagnosticStylesheet {
|
||||
fn default() -> Self {
|
||||
Self::plain()
|
||||
}
|
||||
}
|
||||
|
||||
impl DiagnosticStylesheet {
|
||||
/// Default terminal styling
|
||||
pub fn styled() -> Self {
|
||||
let bright_blue = AnsiColor::BrightBlue.on_default();
|
||||
Self {
|
||||
error: AnsiColor::BrightRed.on_default().effects(Effects::BOLD),
|
||||
warning: AnsiColor::Yellow.on_default().effects(Effects::BOLD),
|
||||
info: bright_blue.effects(Effects::BOLD),
|
||||
note: AnsiColor::BrightGreen.on_default().effects(Effects::BOLD),
|
||||
help: AnsiColor::BrightCyan.on_default().effects(Effects::BOLD),
|
||||
line_no: bright_blue.effects(Effects::BOLD),
|
||||
emphasis: Style::new().effects(Effects::BOLD),
|
||||
none: Style::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn plain() -> Self {
|
||||
Self {
|
||||
error: Style::new(),
|
||||
warning: Style::new(),
|
||||
info: Style::new(),
|
||||
note: Style::new(),
|
||||
help: Style::new(),
|
||||
line_no: Style::new(),
|
||||
emphasis: Style::new(),
|
||||
none: Style::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -94,7 +94,9 @@ impl Files {
|
||||
.root(db, path)
|
||||
.map_or(Durability::default(), |root| root.durability(db));
|
||||
|
||||
let builder = File::builder(FilePath::System(absolute)).durability(durability);
|
||||
let builder = File::builder(FilePath::System(absolute))
|
||||
.durability(durability)
|
||||
.path_durability(Durability::HIGH);
|
||||
|
||||
let builder = match metadata {
|
||||
Ok(metadata) if metadata.file_type().is_file() => builder
|
||||
@@ -159,9 +161,11 @@ impl Files {
|
||||
tracing::trace!("Adding virtual file {}", path);
|
||||
let virtual_file = VirtualFile(
|
||||
File::builder(FilePath::SystemVirtual(path.to_path_buf()))
|
||||
.path_durability(Durability::HIGH)
|
||||
.status(FileStatus::Exists)
|
||||
.revision(FileRevision::zero())
|
||||
.permissions(None)
|
||||
.permissions_durability(Durability::HIGH)
|
||||
.new(db),
|
||||
);
|
||||
self.inner
|
||||
@@ -272,7 +276,7 @@ impl std::panic::RefUnwindSafe for Files {}
|
||||
/// A file that's either stored on the host system's file system or in the vendored file system.
|
||||
#[salsa::input]
|
||||
pub struct File {
|
||||
/// The path of the file.
|
||||
/// The path of the file (immutable).
|
||||
#[return_ref]
|
||||
pub path: FilePath,
|
||||
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
use std::hash::BuildHasherDefault;
|
||||
|
||||
use ruff_python_ast::PythonVersion;
|
||||
use rustc_hash::FxHasher;
|
||||
|
||||
use crate::files::Files;
|
||||
use crate::system::System;
|
||||
use crate::vendored::VendoredFileSystem;
|
||||
use ruff_python_ast::PythonVersion;
|
||||
use rustc_hash::FxHasher;
|
||||
use std::hash::BuildHasherDefault;
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
pub mod diagnostic;
|
||||
pub mod display;
|
||||
@@ -37,6 +36,29 @@ pub trait Upcast<T: ?Sized> {
|
||||
fn upcast_mut(&mut self) -> &mut T;
|
||||
}
|
||||
|
||||
/// Returns the maximum number of tasks that ty is allowed
|
||||
/// to process in parallel.
|
||||
///
|
||||
/// Returns [`std::thread::available_parallelism`], unless the environment
|
||||
/// variable `TY_MAX_PARALLELISM` or `RAYON_NUM_THREADS` is set. `TY_MAX_PARALLELISM` takes
|
||||
/// precedence over `RAYON_NUM_THREADS`.
|
||||
///
|
||||
/// Falls back to `1` if `available_parallelism` is not available.
|
||||
///
|
||||
/// Setting `TY_MAX_PARALLELISM` to `2` only restricts the number of threads that ty spawns
|
||||
/// to process work in parallel. For example, to index a directory or checking the files of a project.
|
||||
/// ty can still spawn more threads for other tasks, e.g. to wait for a Ctrl+C signal or
|
||||
/// watching the files for changes.
|
||||
pub fn max_parallelism() -> NonZeroUsize {
|
||||
std::env::var("TY_MAX_PARALLELISM")
|
||||
.or_else(|_| std::env::var("RAYON_NUM_THREADS"))
|
||||
.ok()
|
||||
.and_then(|s| s.parse().ok())
|
||||
.unwrap_or_else(|| {
|
||||
std::thread::available_parallelism().unwrap_or_else(|_| NonZeroUsize::new(1).unwrap())
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
@@ -1,12 +1,29 @@
|
||||
use std::backtrace::BacktraceStatus;
|
||||
use std::cell::Cell;
|
||||
use std::panic::Location;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
#[derive(Debug)]
|
||||
pub struct PanicError {
|
||||
pub location: Option<String>,
|
||||
pub payload: Option<String>,
|
||||
pub payload: Payload,
|
||||
pub backtrace: Option<std::backtrace::Backtrace>,
|
||||
pub salsa_backtrace: Option<salsa::Backtrace>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Payload(Box<dyn std::any::Any + Send>);
|
||||
|
||||
impl Payload {
|
||||
pub fn as_str(&self) -> Option<&str> {
|
||||
if let Some(s) = self.0.downcast_ref::<String>() {
|
||||
Some(s)
|
||||
} else if let Some(s) = self.0.downcast_ref::<&str>() {
|
||||
Some(s)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for PanicError {
|
||||
@@ -15,20 +32,39 @@ impl std::fmt::Display for PanicError {
|
||||
if let Some(location) = &self.location {
|
||||
write!(f, " {location}")?;
|
||||
}
|
||||
if let Some(payload) = &self.payload {
|
||||
if let Some(payload) = self.payload.as_str() {
|
||||
write!(f, ":\n{payload}")?;
|
||||
}
|
||||
if let Some(backtrace) = &self.backtrace {
|
||||
writeln!(f, "\nBacktrace: {backtrace}")?;
|
||||
match backtrace.status() {
|
||||
BacktraceStatus::Disabled => {
|
||||
writeln!(
|
||||
f,
|
||||
"\nrun with `RUST_BACKTRACE=1` environment variable to display a backtrace"
|
||||
)?;
|
||||
}
|
||||
BacktraceStatus::Captured => {
|
||||
writeln!(f, "\nBacktrace: {backtrace}")?;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct CapturedPanicInfo {
|
||||
backtrace: Option<std::backtrace::Backtrace>,
|
||||
location: Option<String>,
|
||||
salsa_backtrace: Option<salsa::Backtrace>,
|
||||
}
|
||||
|
||||
thread_local! {
|
||||
static CAPTURE_PANIC_INFO: Cell<bool> = const { Cell::new(false) };
|
||||
static OUR_HOOK_RAN: Cell<bool> = const { Cell::new(false) };
|
||||
static LAST_PANIC: Cell<Option<PanicError>> = const { Cell::new(None) };
|
||||
static LAST_BACKTRACE: Cell<CapturedPanicInfo> = const {
|
||||
Cell::new(CapturedPanicInfo { backtrace: None, location: None, salsa_backtrace: None })
|
||||
};
|
||||
}
|
||||
|
||||
fn install_hook() {
|
||||
@@ -36,24 +72,18 @@ fn install_hook() {
|
||||
ONCE.get_or_init(|| {
|
||||
let prev = std::panic::take_hook();
|
||||
std::panic::set_hook(Box::new(move |info| {
|
||||
OUR_HOOK_RAN.with(|cell| cell.set(true));
|
||||
let should_capture = CAPTURE_PANIC_INFO.with(Cell::get);
|
||||
if !should_capture {
|
||||
return (*prev)(info);
|
||||
}
|
||||
let payload = if let Some(s) = info.payload().downcast_ref::<&str>() {
|
||||
Some(s.to_string())
|
||||
} else {
|
||||
info.payload().downcast_ref::<String>().cloned()
|
||||
};
|
||||
|
||||
let location = info.location().map(Location::to_string);
|
||||
let backtrace = std::backtrace::Backtrace::force_capture();
|
||||
LAST_PANIC.with(|cell| {
|
||||
cell.set(Some(PanicError {
|
||||
payload,
|
||||
location,
|
||||
backtrace: Some(backtrace),
|
||||
}));
|
||||
let backtrace = Some(std::backtrace::Backtrace::capture());
|
||||
|
||||
LAST_BACKTRACE.set(CapturedPanicInfo {
|
||||
backtrace,
|
||||
location,
|
||||
salsa_backtrace: salsa::Backtrace::capture(),
|
||||
});
|
||||
}));
|
||||
});
|
||||
@@ -70,7 +100,7 @@ fn install_hook() {
|
||||
/// stderr).
|
||||
///
|
||||
/// We assume that there is nothing else running in this process that needs to install a competing
|
||||
/// panic hook. We are careful to install our custom hook only once, and we do not ever restore
|
||||
/// panic hook. We are careful to install our custom hook only once, and we do not ever restore
|
||||
/// the previous hook (since you can always retain the previous hook's behavior by not calling this
|
||||
/// wrapper).
|
||||
pub fn catch_unwind<F, R>(f: F) -> Result<R, PanicError>
|
||||
@@ -78,15 +108,83 @@ where
|
||||
F: FnOnce() -> R + std::panic::UnwindSafe,
|
||||
{
|
||||
install_hook();
|
||||
OUR_HOOK_RAN.with(|cell| cell.set(false));
|
||||
let prev_should_capture = CAPTURE_PANIC_INFO.with(|cell| cell.replace(true));
|
||||
let result = std::panic::catch_unwind(f).map_err(|_| {
|
||||
let our_hook_ran = OUR_HOOK_RAN.with(Cell::get);
|
||||
if !our_hook_ran {
|
||||
panic!("detected a competing panic hook");
|
||||
let prev_should_capture = CAPTURE_PANIC_INFO.replace(true);
|
||||
let result = std::panic::catch_unwind(f).map_err(|payload| {
|
||||
// Try to get the backtrace and location from our custom panic hook.
|
||||
// The custom panic hook only runs once when `panic!` is called (or similar). It doesn't
|
||||
// run when the panic is propagated with `std::panic::resume_unwind`. The panic hook
|
||||
// is also not called when the panic is raised with `std::panic::resum_unwind` as is the
|
||||
// case for salsa unwinds (see the ignored test below).
|
||||
// Because of that, always take the payload from `catch_unwind` because it may have been transformed
|
||||
// by an inner `std::panic::catch_unwind` handlers and only use the information
|
||||
// from the custom handler to enrich the error with the backtrace and location.
|
||||
let CapturedPanicInfo {
|
||||
location,
|
||||
backtrace,
|
||||
salsa_backtrace,
|
||||
} = LAST_BACKTRACE.with(Cell::take);
|
||||
|
||||
PanicError {
|
||||
location,
|
||||
payload: Payload(payload),
|
||||
backtrace,
|
||||
salsa_backtrace,
|
||||
}
|
||||
LAST_PANIC.with(Cell::take).unwrap_or_default()
|
||||
});
|
||||
CAPTURE_PANIC_INFO.with(|cell| cell.set(prev_should_capture));
|
||||
CAPTURE_PANIC_INFO.set(prev_should_capture);
|
||||
result
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use salsa::{Database, Durability};
|
||||
|
||||
#[test]
|
||||
#[ignore = "super::catch_unwind installs a custom panic handler, which could effect test isolation"]
|
||||
fn no_backtrace_for_salsa_cancelled() {
|
||||
#[salsa::input]
|
||||
struct Input {
|
||||
value: u32,
|
||||
}
|
||||
|
||||
#[salsa::tracked]
|
||||
fn test_query(db: &dyn Database, input: Input) -> u32 {
|
||||
loop {
|
||||
// This should throw a cancelled error
|
||||
let _ = input.value(db);
|
||||
}
|
||||
}
|
||||
|
||||
let db = salsa::DatabaseImpl::new();
|
||||
|
||||
let input = Input::new(&db, 42);
|
||||
|
||||
let result = std::thread::scope(move |scope| {
|
||||
{
|
||||
let mut db = db.clone();
|
||||
scope.spawn(move || {
|
||||
// This will cancel the other thread by throwing a `salsa::Cancelled` error.
|
||||
db.synthetic_write(Durability::MEDIUM);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
scope.spawn(move || {
|
||||
super::catch_unwind(|| {
|
||||
test_query(&db, input);
|
||||
})
|
||||
})
|
||||
}
|
||||
.join()
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
match result {
|
||||
Ok(_) => panic!("Expected query to panic"),
|
||||
Err(err) => {
|
||||
// Panics triggered with `resume_unwind` have no backtrace.
|
||||
assert!(err.backtrace.is_none());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
use filetime::FileTime;
|
||||
use ruff_notebook::{Notebook, NotebookError};
|
||||
use rustc_hash::FxHashSet;
|
||||
use std::panic::RefUnwindSafe;
|
||||
use std::sync::Arc;
|
||||
use std::{any::Any, path::PathBuf};
|
||||
|
||||
use crate::system::{
|
||||
CaseSensitivity, DirectoryEntry, FileType, GlobError, GlobErrorKind, Metadata, Result, System,
|
||||
SystemPath, SystemPathBuf, SystemVirtualPath, WritableSystem,
|
||||
};
|
||||
|
||||
use super::walk_directory::{
|
||||
self, DirectoryWalker, WalkDirectoryBuilder, WalkDirectoryConfiguration,
|
||||
WalkDirectoryVisitorBuilder, WalkState,
|
||||
};
|
||||
use crate::max_parallelism;
|
||||
use crate::system::{
|
||||
CaseSensitivity, DirectoryEntry, FileType, GlobError, GlobErrorKind, Metadata, Result, System,
|
||||
SystemPath, SystemPathBuf, SystemVirtualPath, WritableSystem,
|
||||
};
|
||||
use filetime::FileTime;
|
||||
use ruff_notebook::{Notebook, NotebookError};
|
||||
use rustc_hash::FxHashSet;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::panic::RefUnwindSafe;
|
||||
use std::sync::Arc;
|
||||
use std::{any::Any, path::PathBuf};
|
||||
|
||||
/// A system implementation that uses the OS file system.
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -50,7 +50,6 @@ impl OsSystem {
|
||||
|
||||
Self {
|
||||
// Spreading `..Default` because it isn't possible to feature gate the initializer of a single field.
|
||||
#[allow(clippy::needless_update)]
|
||||
inner: Arc::new(OsSystemInner {
|
||||
cwd: cwd.to_path_buf(),
|
||||
case_sensitivity,
|
||||
@@ -427,11 +426,7 @@ impl DirectoryWalker for OsDirectoryWalker {
|
||||
builder.add(additional_path.as_std_path());
|
||||
}
|
||||
|
||||
builder.threads(
|
||||
std::thread::available_parallelism()
|
||||
.map_or(1, std::num::NonZeroUsize::get)
|
||||
.min(12),
|
||||
);
|
||||
builder.threads(max_parallelism().min(NonZeroUsize::new(12).unwrap()).get());
|
||||
|
||||
builder.build_parallel().run(|| {
|
||||
let mut visitor = visitor_builder.build();
|
||||
|
||||
@@ -35,7 +35,7 @@ impl WalkDirectoryBuilder {
|
||||
/// Each additional path is traversed recursively.
|
||||
/// This should be preferred over building multiple
|
||||
/// walkers since it enables reusing resources.
|
||||
#[allow(clippy::should_implement_trait)]
|
||||
#[expect(clippy::should_implement_trait)]
|
||||
pub fn add(mut self, path: impl AsRef<SystemPath>) -> Self {
|
||||
self.paths.push(path.as_ref().to_path_buf());
|
||||
self
|
||||
|
||||
@@ -107,7 +107,7 @@ fn query_name<Q>(_query: &Q) -> &'static str {
|
||||
.unwrap_or(full_qualified_query_name)
|
||||
}
|
||||
|
||||
/// Sets up logging for the current thread. It captures all `red_knot` and `ruff` events.
|
||||
/// Sets up logging for the current thread. It captures all `ty` and `ruff` events.
|
||||
///
|
||||
/// Useful for capturing the tracing output in a failing test.
|
||||
///
|
||||
@@ -128,7 +128,7 @@ pub fn setup_logging() -> LoggingGuard {
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// use ruff_db::testing::setup_logging_with_filter;
|
||||
/// let _logging = setup_logging_with_filter("red_knot_module_resolver::resolver");
|
||||
/// let _logging = setup_logging_with_filter("ty_module_resolver::resolver");
|
||||
/// ```
|
||||
///
|
||||
/// # Filter
|
||||
@@ -148,11 +148,7 @@ impl LoggingBuilder {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
filter: EnvFilter::default()
|
||||
.add_directive(
|
||||
"red_knot=trace"
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
)
|
||||
.add_directive("ty=trace".parse().expect("Hardcoded directive to be valid"))
|
||||
.add_directive(
|
||||
"ruff=trace"
|
||||
.parse()
|
||||
|
||||
@@ -172,7 +172,7 @@ impl Default for VendoredFileSystem {
|
||||
/// that users of the `VendoredFileSystem` could realistically need.
|
||||
/// For debugging purposes, however, we want to have all information
|
||||
/// available.
|
||||
#[allow(unused)]
|
||||
#[expect(unused)]
|
||||
#[derive(Debug)]
|
||||
struct ZipFileDebugInfo {
|
||||
crc32_hash: u32,
|
||||
|
||||
@@ -11,7 +11,7 @@ repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
red_knot_project = { workspace = true, features = ["schemars"] }
|
||||
ty_project = { workspace = true, features = ["schemars"] }
|
||||
ruff = { workspace = true }
|
||||
ruff_diagnostics = { workspace = true }
|
||||
ruff_formatter = { workspace = true }
|
||||
|
||||
@@ -63,7 +63,6 @@ fn find_pyproject_config(
|
||||
}
|
||||
|
||||
/// Find files that ruff would check so we can format them. Adapted from `ruff`.
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn ruff_check_paths<'a>(
|
||||
pyproject_config: &'a PyprojectConfig,
|
||||
cli: &FormatArguments,
|
||||
@@ -135,12 +134,12 @@ impl Statistics {
|
||||
}
|
||||
|
||||
/// We currently prefer the similarity index, but i'd like to keep this around
|
||||
#[allow(clippy::cast_precision_loss, unused)]
|
||||
#[expect(clippy::cast_precision_loss, unused)]
|
||||
pub(crate) fn jaccard_index(&self) -> f32 {
|
||||
self.intersection as f32 / (self.black_input + self.ruff_output + self.intersection) as f32
|
||||
}
|
||||
|
||||
#[allow(clippy::cast_precision_loss)]
|
||||
#[expect(clippy::cast_precision_loss)]
|
||||
pub(crate) fn similarity_index(&self) -> f32 {
|
||||
self.intersection as f32 / (self.black_input + self.intersection) as f32
|
||||
}
|
||||
@@ -177,7 +176,7 @@ pub(crate) enum Format {
|
||||
Full,
|
||||
}
|
||||
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
#[expect(clippy::struct_excessive_bools)]
|
||||
#[derive(clap::Args)]
|
||||
pub(crate) struct Args {
|
||||
/// Like `ruff check`'s files. See `--multi-project` if you want to format an ecosystem
|
||||
@@ -222,7 +221,7 @@ pub(crate) struct Args {
|
||||
#[arg(long)]
|
||||
pub(crate) files_with_errors: Option<u32>,
|
||||
#[clap(flatten)]
|
||||
#[allow(clippy::struct_field_names)]
|
||||
#[expect(clippy::struct_field_names)]
|
||||
pub(crate) log_level_args: LogLevelArgs,
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::{generate_cli_help, generate_docs, generate_json_schema, generate_knot_schema};
|
||||
use crate::{generate_cli_help, generate_docs, generate_json_schema, generate_ty_schema};
|
||||
|
||||
pub(crate) const REGENERATE_ALL_COMMAND: &str = "cargo dev generate-all";
|
||||
|
||||
@@ -33,7 +33,7 @@ impl Mode {
|
||||
|
||||
pub(crate) fn main(args: &Args) -> Result<()> {
|
||||
generate_json_schema::main(&generate_json_schema::Args { mode: args.mode })?;
|
||||
generate_knot_schema::main(&generate_knot_schema::Args { mode: args.mode })?;
|
||||
generate_ty_schema::main(&generate_ty_schema::Args { mode: args.mode })?;
|
||||
generate_cli_help::main(&generate_cli_help::Args { mode: args.mode })?;
|
||||
generate_docs::main(&generate_docs::Args {
|
||||
dry_run: args.mode.is_dry_run(),
|
||||
|
||||
@@ -34,7 +34,6 @@ fn generate_table(table_out: &mut String, rules: impl IntoIterator<Item = Rule>,
|
||||
RuleGroup::Deprecated => {
|
||||
format!("<span title='Rule has been deprecated'>{WARNING_SYMBOL}</span>")
|
||||
}
|
||||
#[allow(deprecated)]
|
||||
RuleGroup::Preview => {
|
||||
format!("<span title='Rule is in preview'>{PREVIEW_SYMBOL}</span>")
|
||||
}
|
||||
@@ -78,7 +77,7 @@ fn generate_table(table_out: &mut String, rules: impl IntoIterator<Item = Rule>,
|
||||
se = "</span>";
|
||||
}
|
||||
|
||||
#[allow(clippy::or_fun_call)]
|
||||
#[expect(clippy::or_fun_call)]
|
||||
let _ = write!(
|
||||
table_out,
|
||||
"| {ss}{0}{1}{se} {{ #{0}{1} }} | {ss}{2}{se} | {ss}{3}{se} | {ss}{4}{se} |",
|
||||
|
||||
@@ -9,11 +9,11 @@ use schemars::schema_for;
|
||||
|
||||
use crate::generate_all::{Mode, REGENERATE_ALL_COMMAND};
|
||||
use crate::ROOT_DIR;
|
||||
use red_knot_project::metadata::options::Options;
|
||||
use ty_project::metadata::options::Options;
|
||||
|
||||
#[derive(clap::Args)]
|
||||
pub(crate) struct Args {
|
||||
/// Write the generated table to stdout (rather than to `knot.schema.json`).
|
||||
/// Write the generated table to stdout (rather than to `ty.schema.json`).
|
||||
#[arg(long, default_value_t, value_enum)]
|
||||
pub(crate) mode: Mode,
|
||||
}
|
||||
@@ -21,7 +21,7 @@ pub(crate) struct Args {
|
||||
pub(crate) fn main(args: &Args) -> Result<()> {
|
||||
let schema = schema_for!(Options);
|
||||
let schema_string = serde_json::to_string_pretty(&schema).unwrap();
|
||||
let filename = "knot.schema.json";
|
||||
let filename = "ty.schema.json";
|
||||
let schema_path = PathBuf::from(ROOT_DIR).join(filename);
|
||||
|
||||
match args.mode {
|
||||
@@ -62,7 +62,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_generate_json_schema() -> Result<()> {
|
||||
let mode = if env::var("KNOT_UPDATE_SCHEMA").as_deref() == Ok("1") {
|
||||
let mode = if env::var("TY_UPDATE_SCHEMA").as_deref() == Ok("1") {
|
||||
Mode::Write
|
||||
} else {
|
||||
Mode::Check
|
||||
@@ -13,9 +13,9 @@ mod generate_all;
|
||||
mod generate_cli_help;
|
||||
mod generate_docs;
|
||||
mod generate_json_schema;
|
||||
mod generate_knot_schema;
|
||||
mod generate_options;
|
||||
mod generate_rules_table;
|
||||
mod generate_ty_schema;
|
||||
mod print_ast;
|
||||
mod print_cst;
|
||||
mod print_tokens;
|
||||
@@ -34,14 +34,14 @@ struct Args {
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[expect(clippy::large_enum_variant)]
|
||||
enum Command {
|
||||
/// Run all code and documentation generation steps.
|
||||
GenerateAll(generate_all::Args),
|
||||
/// Generate JSON schema for the TOML configuration file.
|
||||
GenerateJSONSchema(generate_json_schema::Args),
|
||||
/// Generate JSON schema for the Red Knot TOML configuration file.
|
||||
GenerateKnotSchema(generate_knot_schema::Args),
|
||||
/// Generate JSON schema for the ty TOML configuration file.
|
||||
GenerateTySchema(generate_ty_schema::Args),
|
||||
/// Generate a Markdown-compatible table of supported lint rules.
|
||||
GenerateRulesTable,
|
||||
/// Generate a Markdown-compatible listing of configuration options.
|
||||
@@ -82,11 +82,11 @@ fn main() -> Result<ExitCode> {
|
||||
command,
|
||||
global_options,
|
||||
} = Args::parse();
|
||||
#[allow(clippy::print_stdout)]
|
||||
#[expect(clippy::print_stdout)]
|
||||
match command {
|
||||
Command::GenerateAll(args) => generate_all::main(&args)?,
|
||||
Command::GenerateJSONSchema(args) => generate_json_schema::main(&args)?,
|
||||
Command::GenerateKnotSchema(args) => generate_knot_schema::main(&args)?,
|
||||
Command::GenerateTySchema(args) => generate_ty_schema::main(&args)?,
|
||||
Command::GenerateRulesTable => println!("{}", generate_rules_table::generate()),
|
||||
Command::GenerateOptions => println!("{}", generate_options::generate()),
|
||||
Command::GenerateCliHelp(args) => generate_cli_help::main(&args)?,
|
||||
|
||||
@@ -519,7 +519,7 @@ impl TextWidth {
|
||||
let char_width = match c {
|
||||
'\t' => indent_width.value(),
|
||||
'\n' => return TextWidth::Multiline,
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
#[expect(clippy::cast_possible_truncation)]
|
||||
c => c.width().unwrap_or(0) as u32,
|
||||
};
|
||||
width += char_width;
|
||||
|
||||
@@ -280,7 +280,7 @@ impl Format<IrFormatContext<'_>> for &[FormatElement] {
|
||||
| FormatElement::SourceCodeSlice { .. }) => {
|
||||
fn write_escaped(element: &FormatElement, f: &mut Formatter<IrFormatContext>) {
|
||||
let (text, text_width) = match element {
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
#[expect(clippy::cast_possible_truncation)]
|
||||
FormatElement::Token { text } => {
|
||||
(*text, TextWidth::Width(Width::new(text.len() as u32)))
|
||||
}
|
||||
|
||||
@@ -379,7 +379,7 @@ impl PartialEq for LabelId {
|
||||
}
|
||||
|
||||
impl LabelId {
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
#[expect(clippy::needless_pass_by_value)]
|
||||
pub fn of<T: LabelDefinition>(label: T) -> Self {
|
||||
Self {
|
||||
value: label.value(),
|
||||
|
||||
@@ -925,7 +925,7 @@ pub struct FormatState<Context> {
|
||||
group_id_builder: UniqueGroupIdBuilder,
|
||||
}
|
||||
|
||||
#[allow(clippy::missing_fields_in_debug)]
|
||||
#[expect(clippy::missing_fields_in_debug)]
|
||||
impl<Context> std::fmt::Debug for FormatState<Context>
|
||||
where
|
||||
Context: std::fmt::Debug,
|
||||
|
||||
@@ -331,7 +331,7 @@ impl<'a> Printer<'a> {
|
||||
FormatElement::Tag(StartVerbatim(kind)) => {
|
||||
if let VerbatimKind::Verbatim { length } = kind {
|
||||
// SAFETY: Ruff only supports formatting files <= 4GB
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
#[expect(clippy::cast_possible_truncation)]
|
||||
self.state.verbatim_markers.push(TextRange::at(
|
||||
TextSize::from(self.state.buffer.len() as u32),
|
||||
*length,
|
||||
@@ -464,7 +464,7 @@ impl<'a> Printer<'a> {
|
||||
self.push_marker();
|
||||
|
||||
match text {
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
#[expect(clippy::cast_possible_truncation)]
|
||||
Text::Token(token) => {
|
||||
self.state.buffer.push_str(token);
|
||||
self.state.line_width += token.len() as u32;
|
||||
@@ -831,7 +831,7 @@ impl<'a> Printer<'a> {
|
||||
} else {
|
||||
self.state.buffer.push(char);
|
||||
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
#[expect(clippy::cast_possible_truncation)]
|
||||
let char_width = if char == '\t' {
|
||||
self.options.indent_width.value()
|
||||
} else {
|
||||
@@ -1480,7 +1480,7 @@ impl<'a, 'print> FitsMeasurer<'a, 'print> {
|
||||
u32::from(indent.level()) * self.options().indent_width() + u32::from(indent.align());
|
||||
|
||||
match text {
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
#[expect(clippy::cast_possible_truncation)]
|
||||
Text::Token(token) => {
|
||||
self.state.line_width += token.len() as u32;
|
||||
}
|
||||
@@ -1511,7 +1511,7 @@ impl<'a, 'print> FitsMeasurer<'a, 'print> {
|
||||
}
|
||||
}
|
||||
// SAFETY: A u32 is sufficient to format files <= 4GB
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
#[expect(clippy::cast_possible_truncation)]
|
||||
c => c.width().unwrap_or(0) as u32,
|
||||
};
|
||||
self.state.line_width += char_width;
|
||||
|
||||
@@ -10,13 +10,13 @@ authors.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
ruff_cache = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["os", "serde"] }
|
||||
ruff_linter = { workspace = true }
|
||||
ruff_macros = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_parser = { workspace = true }
|
||||
ty_python_semantic = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true, optional = true }
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use red_knot_python_semantic::ModuleName;
|
||||
use ruff_python_ast::visitor::source_order::{
|
||||
walk_expr, walk_module, walk_stmt, SourceOrderVisitor,
|
||||
};
|
||||
use ruff_python_ast::{self as ast, Expr, Mod, Stmt};
|
||||
use ty_python_semantic::ModuleName;
|
||||
|
||||
/// Collect all imports for a given Python file.
|
||||
#[derive(Default, Debug)]
|
||||
|
||||
@@ -2,15 +2,16 @@ use anyhow::Result;
|
||||
use std::sync::Arc;
|
||||
use zip::CompressionMethod;
|
||||
|
||||
use red_knot_python_semantic::lint::{LintRegistry, RuleSelection};
|
||||
use red_knot_python_semantic::{
|
||||
default_lint_registry, Db, Program, ProgramSettings, PythonPlatform, SearchPathSettings,
|
||||
};
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::{OsSystem, System, SystemPathBuf};
|
||||
use ruff_db::vendored::{VendoredFileSystem, VendoredFileSystemBuilder};
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
use ruff_python_ast::PythonVersion;
|
||||
use ty_python_semantic::lint::{LintRegistry, RuleSelection};
|
||||
use ty_python_semantic::{
|
||||
default_lint_registry, Db, Program, ProgramSettings, PythonPath, PythonPlatform,
|
||||
SearchPathSettings,
|
||||
};
|
||||
|
||||
static EMPTY_VENDORED: std::sync::LazyLock<VendoredFileSystem> = std::sync::LazyLock::new(|| {
|
||||
let mut builder = VendoredFileSystemBuilder::new(CompressionMethod::Stored);
|
||||
@@ -32,8 +33,12 @@ impl ModuleDb {
|
||||
pub fn from_src_roots(
|
||||
src_roots: Vec<SystemPathBuf>,
|
||||
python_version: PythonVersion,
|
||||
venv_path: Option<SystemPathBuf>,
|
||||
) -> Result<Self> {
|
||||
let search_paths = SearchPathSettings::new(src_roots);
|
||||
let mut search_paths = SearchPathSettings::new(src_roots);
|
||||
if let Some(venv_path) = venv_path {
|
||||
search_paths.python_path = PythonPath::from_cli_flag(venv_path);
|
||||
}
|
||||
|
||||
let db = Self::default();
|
||||
Program::from_settings(
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use red_knot_python_semantic::resolve_module;
|
||||
use ruff_db::files::FilePath;
|
||||
use ty_python_semantic::resolve_module;
|
||||
|
||||
use crate::collector::CollectedImport;
|
||||
use crate::ModuleDb;
|
||||
|
||||
@@ -22,7 +22,7 @@ impl<I: Idx, T> IndexSlice<I, T> {
|
||||
pub const fn from_raw(raw: &[T]) -> &Self {
|
||||
let ptr: *const [T] = raw;
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
#[expect(unsafe_code)]
|
||||
// SAFETY: `IndexSlice` is `repr(transparent)` over a normal slice
|
||||
unsafe {
|
||||
&*(ptr as *const Self)
|
||||
@@ -33,7 +33,7 @@ impl<I: Idx, T> IndexSlice<I, T> {
|
||||
pub fn from_raw_mut(raw: &mut [T]) -> &mut Self {
|
||||
let ptr: *mut [T] = raw;
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
#[expect(unsafe_code)]
|
||||
// SAFETY: `IndexSlice` is `repr(transparent)` over a normal slice
|
||||
unsafe {
|
||||
&mut *(ptr as *mut Self)
|
||||
@@ -209,5 +209,5 @@ impl<I: Idx, T> Default for &mut IndexSlice<I, T> {
|
||||
|
||||
// Whether `IndexSlice` is `Send` depends only on the data,
|
||||
// not the phantom data.
|
||||
#[allow(unsafe_code)]
|
||||
#[expect(unsafe_code)]
|
||||
unsafe impl<I: Idx, T> Send for IndexSlice<I, T> where T: Send {}
|
||||
|
||||
@@ -179,16 +179,16 @@ impl<I: Idx, T, const N: usize> From<[T; N]> for IndexVec<I, T> {
|
||||
|
||||
// Whether `IndexVec` is `Send` depends only on the data,
|
||||
// not the phantom data.
|
||||
#[allow(unsafe_code)]
|
||||
#[expect(unsafe_code)]
|
||||
unsafe impl<I: Idx, T> Send for IndexVec<I, T> where T: Send {}
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
#[expect(unsafe_code)]
|
||||
#[cfg(feature = "salsa")]
|
||||
unsafe impl<I, T> salsa::Update for IndexVec<I, T>
|
||||
where
|
||||
T: salsa::Update,
|
||||
{
|
||||
#[allow(unsafe_code)]
|
||||
#[expect(unsafe_code)]
|
||||
unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
|
||||
let old_vec: &mut IndexVec<I, T> = unsafe { &mut *old_pointer };
|
||||
salsa::Update::maybe_update(&mut old_vec.raw, new_value.raw)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.11.6"
|
||||
version = "0.11.8"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
@@ -76,6 +76,7 @@ insta = { workspace = true, features = ["filters", "json", "redactions"] }
|
||||
test-case = { workspace = true }
|
||||
# Disable colored output in tests
|
||||
colored = { workspace = true, features = ["no-color"] }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
@@ -9,9 +9,7 @@ from airflow import (
|
||||
PY311,
|
||||
PY312,
|
||||
)
|
||||
from airflow.api_connexion.security import requires_access, requires_access_dataset
|
||||
from airflow.auth.managers.base_auth_manager import is_authorized_dataset
|
||||
from airflow.auth.managers.models.resource_details import DatasetDetails
|
||||
from airflow.api_connexion.security import requires_access
|
||||
from airflow.configuration import (
|
||||
as_dict,
|
||||
get,
|
||||
@@ -24,32 +22,13 @@ from airflow.configuration import (
|
||||
)
|
||||
from airflow.contrib.aws_athena_hook import AWSAthenaHook
|
||||
from airflow.datasets import DatasetAliasEvent
|
||||
from airflow.datasets.manager import (
|
||||
DatasetManager,
|
||||
dataset_manager,
|
||||
resolve_dataset_manager,
|
||||
)
|
||||
from airflow.hooks.base_hook import BaseHook
|
||||
from airflow.lineage.hook import DatasetLineageInfo
|
||||
from airflow.listeners.spec.dataset import on_dataset_changed, on_dataset_created
|
||||
from airflow.metrics.validators import AllowListValidator, BlockListValidator
|
||||
from airflow.operators.subdag import SubDagOperator
|
||||
from airflow.providers.amazon.aws.auth_manager.avp.entities import AvpEntities
|
||||
from airflow.providers.amazon.aws.datasets import s3
|
||||
from airflow.providers.common.io.datasets import file as common_io_file
|
||||
from airflow.providers.fab.auth_manager import fab_auth_manager
|
||||
from airflow.providers.google.datasets import bigquery, gcs
|
||||
from airflow.providers.mysql.datasets import mysql
|
||||
from airflow.providers.openlineage.utils.utils import (
|
||||
DatasetInfo,
|
||||
translate_airflow_dataset,
|
||||
)
|
||||
from airflow.providers.postgres.datasets import postgres
|
||||
from airflow.providers.trino.datasets import trino
|
||||
from airflow.secrets.local_filesystem import LocalFilesystemBackend, load_connections
|
||||
from airflow.security.permissions import RESOURCE_DATASET
|
||||
from airflow.secrets.local_filesystem import LocalFilesystemBackend
|
||||
from airflow.sensors.base_sensor_operator import BaseSensorOperator
|
||||
from airflow.timetables.simple import DatasetTriggeredTimetable
|
||||
from airflow.triggers.external_task import TaskStateTrigger
|
||||
from airflow.utils import dates
|
||||
from airflow.utils.dag_cycle_tester import test_cycle
|
||||
@@ -70,19 +49,15 @@ from airflow.utils.helpers import cross_downstream as helper_cross_downstream
|
||||
from airflow.utils.log import secrets_masker
|
||||
from airflow.utils.state import SHUTDOWN, terminating_states
|
||||
from airflow.utils.trigger_rule import TriggerRule
|
||||
from airflow.www.auth import has_access, has_access_dataset
|
||||
from airflow.www.auth import has_access
|
||||
from airflow.www.utils import get_sensitive_variables_fields, should_hide_value_for_key
|
||||
|
||||
# airflow root
|
||||
PY36, PY37, PY38, PY39, PY310, PY311, PY312
|
||||
DatasetFromRoot()
|
||||
|
||||
# airflow.api_connexion.security
|
||||
requires_access, requires_access_dataset
|
||||
requires_access
|
||||
|
||||
# airflow.auth.managers
|
||||
is_authorized_dataset
|
||||
DatasetDetails()
|
||||
|
||||
# airflow.configuration
|
||||
get, getboolean, getfloat, getint, has_option, remove_option, as_dict, set
|
||||
@@ -95,76 +70,17 @@ AWSAthenaHook()
|
||||
# airflow.datasets
|
||||
DatasetAliasEvent()
|
||||
|
||||
# airflow.datasets.manager
|
||||
DatasetManager()
|
||||
dataset_manager
|
||||
resolve_dataset_manager
|
||||
|
||||
# airflow.hooks
|
||||
BaseHook()
|
||||
|
||||
# airflow.lineage.hook
|
||||
DatasetLineageInfo()
|
||||
|
||||
# airflow.listeners.spec.dataset
|
||||
on_dataset_changed
|
||||
on_dataset_created
|
||||
|
||||
# airflow.metrics.validators
|
||||
AllowListValidator()
|
||||
BlockListValidator()
|
||||
|
||||
|
||||
# airflow.operators.branch_operator
|
||||
BaseBranchOperator()
|
||||
|
||||
# airflow.operators.dagrun_operator
|
||||
TriggerDagRunLink()
|
||||
TriggerDagRunOperator()
|
||||
|
||||
# airflow.operators.email_operator
|
||||
EmailOperator()
|
||||
|
||||
# airflow.operators.latest_only_operator
|
||||
LatestOnlyOperator()
|
||||
|
||||
# airflow.operators.python_operator
|
||||
BranchPythonOperator()
|
||||
PythonOperator()
|
||||
PythonVirtualenvOperator()
|
||||
ShortCircuitOperator()
|
||||
|
||||
# airflow.operators.subdag.*
|
||||
SubDagOperator()
|
||||
|
||||
# airflow.providers.amazon
|
||||
AvpEntities.DATASET
|
||||
s3.create_dataset
|
||||
s3.convert_dataset_to_openlineage
|
||||
s3.sanitize_uri
|
||||
|
||||
# airflow.providers.common.io
|
||||
common_io_file.convert_dataset_to_openlineage
|
||||
common_io_file.create_dataset
|
||||
common_io_file.sanitize_uri
|
||||
|
||||
# airflow.providers.fab
|
||||
fab_auth_manager.is_authorized_dataset
|
||||
|
||||
# airflow.providers.google
|
||||
bigquery.sanitize_uri
|
||||
|
||||
gcs.create_dataset
|
||||
gcs.sanitize_uri
|
||||
gcs.convert_dataset_to_openlineage
|
||||
|
||||
# airflow.providers.mysql
|
||||
mysql.sanitize_uri
|
||||
|
||||
# airflow.providers.openlineage
|
||||
DatasetInfo()
|
||||
translate_airflow_dataset
|
||||
|
||||
# airflow.providers.postgres
|
||||
postgres.sanitize_uri
|
||||
|
||||
@@ -174,18 +90,12 @@ trino.sanitize_uri
|
||||
# airflow.secrets
|
||||
# get_connection
|
||||
LocalFilesystemBackend()
|
||||
load_connections
|
||||
|
||||
# airflow.security.permissions
|
||||
RESOURCE_DATASET
|
||||
|
||||
# airflow.sensors.base_sensor_operator
|
||||
BaseSensorOperator()
|
||||
|
||||
|
||||
# airflow.timetables
|
||||
DatasetTriggeredTimetable()
|
||||
|
||||
# airflow.triggers.external_task
|
||||
TaskStateTrigger()
|
||||
|
||||
@@ -233,10 +143,15 @@ terminating_states
|
||||
TriggerRule.DUMMY
|
||||
TriggerRule.NONE_FAILED_OR_SKIPPED
|
||||
|
||||
|
||||
# airflow.www.auth
|
||||
has_access
|
||||
has_access_dataset
|
||||
|
||||
# airflow.www.utils
|
||||
get_sensitive_variables_fields
|
||||
should_hide_value_for_key
|
||||
|
||||
# airflow.operators.python
|
||||
from airflow.operators.python import get_current_context
|
||||
|
||||
get_current_context()
|
||||
|
||||
45
crates/ruff_linter/resources/test/fixtures/airflow/AIR301_names_fix.py
vendored
Normal file
45
crates/ruff_linter/resources/test/fixtures/airflow/AIR301_names_fix.py
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from airflow.api_connexion.security import requires_access_dataset
|
||||
from airflow.auth.managers.models.resource_details import (
|
||||
DatasetDetails,
|
||||
is_authorized_dataset,
|
||||
)
|
||||
from airflow.datasets.manager import (
|
||||
DatasetManager,
|
||||
dataset_manager,
|
||||
resolve_dataset_manager,
|
||||
)
|
||||
from airflow.lineage.hook import DatasetLineageInfo
|
||||
from airflow.metrics.validators import AllowListValidator, BlockListValidator
|
||||
from airflow.secrets.local_filesystm import load_connections
|
||||
from airflow.security.permissions import RESOURCE_DATASET
|
||||
from airflow.www.auth import has_access_dataset
|
||||
|
||||
requires_access_dataset()
|
||||
|
||||
DatasetDetails()
|
||||
is_authorized_dataset()
|
||||
|
||||
DatasetManager()
|
||||
dataset_manager()
|
||||
resolve_dataset_manager()
|
||||
|
||||
DatasetLineageInfo()
|
||||
|
||||
AllowListValidator()
|
||||
BlockListValidator()
|
||||
|
||||
load_connections()
|
||||
|
||||
RESOURCE_DATASET
|
||||
|
||||
has_access_dataset()
|
||||
|
||||
from airflow.listeners.spec.dataset import (
|
||||
on_dataset_changed,
|
||||
on_dataset_created,
|
||||
)
|
||||
|
||||
on_dataset_created()
|
||||
on_dataset_changed()
|
||||
@@ -6,3 +6,12 @@ except ModuleNotFoundError:
|
||||
from airflow.datasets import Dataset as Asset
|
||||
|
||||
Asset
|
||||
|
||||
try:
|
||||
from airflow.sdk import Asset
|
||||
except ModuleNotFoundError:
|
||||
from airflow import datasets
|
||||
|
||||
Asset = datasets.Dataset
|
||||
|
||||
asset = Asset()
|
||||
|
||||
54
crates/ruff_linter/resources/test/fixtures/airflow/AIR301_provider_names_fix.py
vendored
Normal file
54
crates/ruff_linter/resources/test/fixtures/airflow/AIR301_provider_names_fix.py
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from airflow.providers.amazon.aws.auth_manager.avp.entities.AvpEntities import DATASET
|
||||
from airflow.providers.amazon.aws.datasets.s3 import (
|
||||
convert_dataset_to_openlineage as s3_convert_dataset_to_openlineage,
|
||||
)
|
||||
from airflow.providers.amazon.aws.datasets.s3 import create_dataset as s3_create_dataset
|
||||
from airflow.providers.common.io.dataset.file import (
|
||||
convert_dataset_to_openlineage as io_convert_dataset_to_openlineage,
|
||||
)
|
||||
from airflow.providers.common.io.dataset.file import create_dataset as io_create_dataset
|
||||
from airflow.providers.fab.auth_manager.fab_auth_manager import is_authorized_dataset as fab_is_authorized_dataset
|
||||
from airflow.providers.google.datasets.bigquery import (
|
||||
create_dataset as bigquery_create_dataset,
|
||||
)
|
||||
from airflow.providers.google.datasets.gcs import (
|
||||
convert_dataset_to_openlineage as gcs_convert_dataset_to_openlineage,
|
||||
)
|
||||
from airflow.providers.google.datasets.gcs import create_dataset as gcs_create_dataset
|
||||
from airflow.providers.openlineage.utils.utils import (
|
||||
DatasetInfo,
|
||||
translate_airflow_dataset,
|
||||
)
|
||||
|
||||
DATASET
|
||||
|
||||
s3_create_dataset()
|
||||
s3_convert_dataset_to_openlineage()
|
||||
|
||||
io_create_dataset()
|
||||
io_convert_dataset_to_openlineage()
|
||||
|
||||
fab_is_authorized_dataset()
|
||||
|
||||
# airflow.providers.google.datasets.bigquery
|
||||
bigquery_create_dataset()
|
||||
# airflow.providers.google.datasets.gcs
|
||||
gcs_create_dataset()
|
||||
gcs_convert_dataset_to_openlineage()
|
||||
# airflow.providers.openlineage.utils.utils
|
||||
DatasetInfo()
|
||||
translate_airflow_dataset()
|
||||
#
|
||||
# airflow.secrets.local_filesystem
|
||||
load_connections()
|
||||
#
|
||||
# airflow.security.permissions
|
||||
RESOURCE_DATASET
|
||||
|
||||
# airflow.timetables
|
||||
DatasetTriggeredTimetable()
|
||||
#
|
||||
# airflow.www.auth
|
||||
has_access_dataset
|
||||
@@ -1,481 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from airflow.api.auth.backend import basic_auth, kerberos_auth
|
||||
from airflow.api.auth.backend.basic_auth import auth_current_user
|
||||
from airflow.auth.managers.fab.api.auth.backend import (
|
||||
kerberos_auth as backend_kerberos_auth,
|
||||
)
|
||||
from airflow.auth.managers.fab.fab_auth_manager import FabAuthManager
|
||||
from airflow.auth.managers.fab.security_manager import override as fab_override
|
||||
from airflow.config_templates.default_celery import DEFAULT_CELERY_CONFIG
|
||||
from airflow.executors.celery_executor import CeleryExecutor, app
|
||||
from airflow.executors.celery_kubernetes_executor import CeleryKubernetesExecutor
|
||||
from airflow.executors.dask_executor import DaskExecutor
|
||||
from airflow.executors.kubernetes_executor_types import (
|
||||
ALL_NAMESPACES,
|
||||
POD_EXECUTOR_DONE_KEY,
|
||||
)
|
||||
from airflow.hooks.dbapi import ConnectorProtocol, DbApiHook
|
||||
from airflow.hooks.dbapi_hook import DbApiHook as DbApiHook2
|
||||
from airflow.hooks.docker_hook import DockerHook
|
||||
from airflow.hooks.druid_hook import DruidDbApiHook, DruidHook
|
||||
from airflow.hooks.filesystem import FSHook
|
||||
from airflow.hooks.hive_hooks import (
|
||||
HIVE_QUEUE_PRIORITIES,
|
||||
HiveCliHook,
|
||||
HiveMetastoreHook,
|
||||
HiveServer2Hook,
|
||||
)
|
||||
from airflow.hooks.http_hook import HttpHook
|
||||
from airflow.hooks.jdbc_hook import JdbcHook, jaydebeapi
|
||||
from airflow.hooks.mssql_hook import MsSqlHook
|
||||
from airflow.hooks.mysql_hook import MySqlHook
|
||||
from airflow.hooks.oracle_hook import OracleHook
|
||||
from airflow.hooks.package_index import PackageIndexHook
|
||||
from airflow.hooks.pig_hook import PigCliHook
|
||||
from airflow.hooks.postgres_hook import PostgresHook
|
||||
from airflow.hooks.presto_hook import PrestoHook
|
||||
from airflow.hooks.S3_hook import S3Hook, provide_bucket_name
|
||||
from airflow.hooks.samba_hook import SambaHook
|
||||
from airflow.hooks.slack_hook import SlackHook
|
||||
from airflow.hooks.sqlite_hook import SqliteHook
|
||||
from airflow.hooks.subprocess import SubprocessHook, SubprocessResult, working_directory
|
||||
from airflow.hooks.webhdfs_hook import WebHDFSHook
|
||||
from airflow.hooks.zendesk_hook import ZendeskHook
|
||||
from airflow.kubernetes.k8s_model import K8SModel, append_to_pod
|
||||
from airflow.kubernetes.kube_client import (
|
||||
_disable_verify_ssl,
|
||||
_enable_tcp_keepalive,
|
||||
get_kube_client,
|
||||
)
|
||||
from airflow.kubernetes.kubernetes_helper_functions import (
|
||||
add_pod_suffix,
|
||||
annotations_for_logging_task_metadata,
|
||||
annotations_to_key,
|
||||
create_pod_id,
|
||||
get_logs_task_metadata,
|
||||
rand_str,
|
||||
)
|
||||
from airflow.kubernetes.pod import Port, Resources
|
||||
from airflow.kubernetes.pod_generator import (
|
||||
PodDefaults,
|
||||
PodGenerator,
|
||||
PodGeneratorDeprecated,
|
||||
datetime_to_label_safe_datestring,
|
||||
extend_object_field,
|
||||
label_safe_datestring_to_datetime,
|
||||
make_safe_label_value,
|
||||
merge_objects,
|
||||
)
|
||||
from airflow.kubernetes.pod_generator import (
|
||||
add_pod_suffix as add_pod_suffix2,
|
||||
)
|
||||
from airflow.kubernetes.pod_generator import (
|
||||
rand_str as rand_str2,
|
||||
)
|
||||
from airflow.kubernetes.pod_generator_deprecated import (
|
||||
PodDefaults as PodDefaults3,
|
||||
)
|
||||
from airflow.kubernetes.pod_generator_deprecated import (
|
||||
PodGenerator as PodGenerator2,
|
||||
)
|
||||
from airflow.kubernetes.pod_generator_deprecated import (
|
||||
make_safe_label_value as make_safe_label_value2,
|
||||
)
|
||||
from airflow.kubernetes.pod_launcher import PodLauncher, PodStatus
|
||||
from airflow.kubernetes.pod_launcher_deprecated import (
|
||||
PodDefaults as PodDefaults2,
|
||||
)
|
||||
from airflow.kubernetes.pod_launcher_deprecated import (
|
||||
PodLauncher as PodLauncher2,
|
||||
)
|
||||
from airflow.kubernetes.pod_launcher_deprecated import (
|
||||
PodStatus as PodStatus2,
|
||||
)
|
||||
from airflow.kubernetes.pod_launcher_deprecated import (
|
||||
get_kube_client as get_kube_client2,
|
||||
)
|
||||
from airflow.kubernetes.pod_runtime_info_env import PodRuntimeInfoEnv
|
||||
from airflow.kubernetes.secret import K8SModel2, Secret
|
||||
from airflow.kubernetes.volume import Volume
|
||||
from airflow.kubernetes.volume_mount import VolumeMount
|
||||
from airflow.macros.hive import closest_ds_partition, max_partition
|
||||
from airflow.operators.bash import BashOperator
|
||||
from airflow.operators.bash_operator import BashOperator as LegacyBashOperator
|
||||
from airflow.operators.check_operator import (
|
||||
CheckOperator,
|
||||
IntervalCheckOperator,
|
||||
SQLCheckOperator,
|
||||
SQLIntervalCheckOperator,
|
||||
SQLThresholdCheckOperator,
|
||||
SQLValueCheckOperator,
|
||||
ThresholdCheckOperator,
|
||||
ValueCheckOperator,
|
||||
)
|
||||
from airflow.operators.datetime import BranchDateTimeOperator, target_times_as_dates
|
||||
from airflow.operators.docker_operator import DockerOperator
|
||||
from airflow.operators.druid_check_operator import DruidCheckOperator
|
||||
from airflow.operators.dummy import DummyOperator, EmptyOperator
|
||||
from airflow.operators.email import EmailOperator
|
||||
from airflow.operators.email_operator import EmailOperator
|
||||
from airflow.operators.gcs_to_s3 import GCSToS3Operator
|
||||
from airflow.operators.google_api_to_s3_transfer import (
|
||||
GoogleApiToS3Operator,
|
||||
GoogleApiToS3Transfer,
|
||||
)
|
||||
from airflow.operators.hive_operator import HiveOperator
|
||||
from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
from airflow.operators.hive_to_druid import HiveToDruidOperator, HiveToDruidTransfer
|
||||
from airflow.operators.hive_to_mysql import HiveToMySqlOperator, HiveToMySqlTransfer
|
||||
from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
from airflow.operators.http_operator import SimpleHttpOperator
|
||||
from airflow.operators.jdbc_operator import JdbcOperator
|
||||
from airflow.operators.mssql_operator import MsSqlOperator
|
||||
from airflow.operators.mssql_to_hive import MsSqlToHiveOperator, MsSqlToHiveTransfer
|
||||
from airflow.operators.mysql_operator import MySqlOperator
|
||||
from airflow.operators.mysql_to_hive import MySqlToHiveOperator, MySqlToHiveTransfer
|
||||
from airflow.operators.oracle_operator import OracleOperator
|
||||
from airflow.operators.papermill_operator import PapermillOperator
|
||||
from airflow.operators.pig_operator import PigOperator
|
||||
from airflow.operators.postgres_operator import Mapping, PostgresOperator
|
||||
from airflow.operators.presto_check_operator import (
|
||||
PrestoCheckOperator,
|
||||
PrestoIntervalCheckOperator,
|
||||
PrestoValueCheckOperator,
|
||||
)
|
||||
from airflow.operators.presto_check_operator import (
|
||||
SQLCheckOperator as SQLCheckOperator2,
|
||||
)
|
||||
from airflow.operators.presto_check_operator import (
|
||||
SQLIntervalCheckOperator as SQLIntervalCheckOperator2,
|
||||
)
|
||||
from airflow.operators.presto_check_operator import (
|
||||
SQLValueCheckOperator as SQLValueCheckOperator2,
|
||||
)
|
||||
from airflow.operators.presto_to_mysql import (
|
||||
PrestoToMySqlOperator,
|
||||
PrestoToMySqlTransfer,
|
||||
)
|
||||
from airflow.operators.python import (
|
||||
BranchPythonOperator,
|
||||
PythonOperator,
|
||||
PythonVirtualenvOperator,
|
||||
ShortCircuitOperator,
|
||||
)
|
||||
from airflow.operators.redshift_to_s3_operator import (
|
||||
RedshiftToS3Operator,
|
||||
RedshiftToS3Transfer,
|
||||
)
|
||||
from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
from airflow.operators.s3_to_hive_operator import S3ToHiveOperator, S3ToHiveTransfer
|
||||
from airflow.operators.s3_to_redshift_operator import (
|
||||
S3ToRedshiftOperator,
|
||||
S3ToRedshiftTransfer,
|
||||
)
|
||||
from airflow.operators.slack_operator import SlackAPIOperator, SlackAPIPostOperator
|
||||
from airflow.operators.sql import (
|
||||
BaseSQLOperator,
|
||||
BranchSQLOperator,
|
||||
SQLTableCheckOperator,
|
||||
_convert_to_float_if_possible,
|
||||
parse_boolean,
|
||||
)
|
||||
from airflow.operators.sql import (
|
||||
SQLCheckOperator as SQLCheckOperator3,
|
||||
)
|
||||
from airflow.operators.sql import (
|
||||
SQLColumnCheckOperator as SQLColumnCheckOperator2,
|
||||
)
|
||||
from airflow.operators.sql import (
|
||||
SQLIntervalCheckOperator as SQLIntervalCheckOperator3,
|
||||
)
|
||||
from airflow.operators.sql import (
|
||||
SQLThresholdCheckOperator as SQLThresholdCheckOperator2,
|
||||
)
|
||||
from airflow.operators.sql import (
|
||||
SQLValueCheckOperator as SQLValueCheckOperator3,
|
||||
)
|
||||
from airflow.operators.sqlite_operator import SqliteOperator
|
||||
from airflow.operators.trigger_dagrun import TriggerDagRunOperator
|
||||
from airflow.operators.weekday import BranchDayOfWeekOperator
|
||||
from airflow.sensors import filesystem
|
||||
from airflow.sensors.date_time import DateTimeSensor, DateTimeSensorAsync
|
||||
from airflow.sensors.date_time_sensor import DateTimeSensor
|
||||
from airflow.sensors.external_task import (
|
||||
ExternalTaskMarker,
|
||||
ExternalTaskSensor,
|
||||
ExternalTaskSensorLink,
|
||||
)
|
||||
from airflow.sensors.filesystem import FileSensor
|
||||
from airflow.sensors.hive_partition_sensor import HivePartitionSensor
|
||||
from airflow.sensors.http_sensor import HttpSensor
|
||||
from airflow.sensors.metastore_partition_sensor import MetastorePartitionSensor
|
||||
from airflow.sensors.named_hive_partition_sensor import NamedHivePartitionSensor
|
||||
from airflow.sensors.sql import SqlSensor
|
||||
from airflow.sensors.sql_sensor import SqlSensor2
|
||||
from airflow.sensors.time_delta import TimeDeltaSensor, TimeDeltaSensorAsync, WaitSensor
|
||||
from airflow.sensors.time_sensor import TimeSensor, TimeSensorAsync
|
||||
from airflow.sensors.web_hdfs_sensor import WebHdfsSensor
|
||||
from airflow.sensors.weekday import DayOfWeekSensor
|
||||
from airflow.triggers.external_task import DagStateTrigger, WorkflowTrigger
|
||||
from airflow.triggers.file import FileTrigger
|
||||
from airflow.triggers.temporal import DateTimeTrigger, TimeDeltaTrigger
|
||||
from airflow.www.security import FabAirflowSecurityManagerOverride
|
||||
|
||||
# apache-airflow-providers-amazon
|
||||
provide_bucket_name()
|
||||
GCSToS3Operator()
|
||||
GoogleApiToS3Operator()
|
||||
GoogleApiToS3Transfer()
|
||||
RedshiftToS3Operator()
|
||||
RedshiftToS3Transfer()
|
||||
S3FileTransformOperator()
|
||||
S3Hook()
|
||||
SSQLTableCheckOperator3KeySensor()
|
||||
S3ToRedshiftOperator()
|
||||
S3ToRedshiftTransfer()
|
||||
|
||||
# apache-airflow-providers-celery
|
||||
DEFAULT_CELERY_CONFIG
|
||||
app
|
||||
CeleryExecutor()
|
||||
CeleryKubernetesExecutor()
|
||||
|
||||
# apache-airflow-providers-common-sql
|
||||
_convert_to_float_if_possible()
|
||||
parse_boolean()
|
||||
BaseSQLOperator()
|
||||
BashOperator()
|
||||
LegacyBashOperator()
|
||||
BranchSQLOperator()
|
||||
CheckOperator()
|
||||
ConnectorProtocol()
|
||||
DbApiHook()
|
||||
DbApiHook2()
|
||||
IntervalCheckOperator()
|
||||
PrestoCheckOperator()
|
||||
PrestoIntervalCheckOperator()
|
||||
PrestoValueCheckOperator()
|
||||
SQLCheckOperator()
|
||||
SQLCheckOperator2()
|
||||
SQLCheckOperator3()
|
||||
SQLColumnCheckOperator2()
|
||||
SQLIntervalCheckOperator()
|
||||
SQLIntervalCheckOperator2()
|
||||
SQLIntervalCheckOperator3()
|
||||
SQLTableCheckOperator()
|
||||
SQLThresholdCheckOperator()
|
||||
SQLThresholdCheckOperator2()
|
||||
SQLValueCheckOperator()
|
||||
SQLValueCheckOperator2()
|
||||
SQLValueCheckOperator3()
|
||||
SqlSensor()
|
||||
SqlSensor2()
|
||||
ThresholdCheckOperator()
|
||||
ValueCheckOperator()
|
||||
|
||||
# apache-airflow-providers-daskexecutor
|
||||
DaskExecutor()
|
||||
|
||||
# apache-airflow-providers-docker
|
||||
DockerHook()
|
||||
DockerOperator()
|
||||
|
||||
# apache-airflow-providers-apache-druid
|
||||
DruidDbApiHook()
|
||||
DruidHook()
|
||||
DruidCheckOperator()
|
||||
|
||||
# apache-airflow-providers-apache-hdfs
|
||||
WebHDFSHook()
|
||||
WebHdfsSensor()
|
||||
|
||||
# apache-airflow-providers-apache-hive
|
||||
HIVE_QUEUE_PRIORITIES
|
||||
closest_ds_partition()
|
||||
max_partition()
|
||||
HiveCliHook()
|
||||
HiveMetastoreHook()
|
||||
HiveOperator()
|
||||
HivePartitionSensor()
|
||||
HiveServer2Hook()
|
||||
HiveStatsCollectionOperator()
|
||||
HiveToDruidOperator()
|
||||
HiveToDruidTransfer()
|
||||
HiveToSambaOperator()
|
||||
S3ToHiveOperator()
|
||||
S3ToHiveTransfer()
|
||||
MetastorePartitionSensor()
|
||||
NamedHivePartitionSensor()
|
||||
|
||||
# apache-airflow-providers-http
|
||||
HttpHook()
|
||||
HttpSensor()
|
||||
SimpleHttpOperator()
|
||||
|
||||
# apache-airflow-providers-jdbc
|
||||
jaydebeapi
|
||||
JdbcHook()
|
||||
JdbcOperator()
|
||||
|
||||
# apache-airflow-providers-fab
|
||||
basic_auth.CLIENT_AUTH
|
||||
basic_auth.init_app
|
||||
basic_auth.auth_current_user
|
||||
basic_auth.requires_authentication
|
||||
|
||||
kerberos_auth.log
|
||||
kerberos_auth.CLIENT_AUTH
|
||||
kerberos_auth.find_user
|
||||
kerberos_auth.init_app
|
||||
kerberos_auth.requires_authentication
|
||||
auth_current_user
|
||||
backend_kerberos_auth
|
||||
fab_override
|
||||
FabAuthManager()
|
||||
FabAirflowSecurityManagerOverride()
|
||||
|
||||
# check whether attribute access
|
||||
basic_auth.auth_current_user
|
||||
|
||||
# apache-airflow-providers-cncf-kubernetes
|
||||
ALL_NAMESPACES
|
||||
POD_EXECUTOR_DONE_KEY
|
||||
_disable_verify_ssl()
|
||||
_enable_tcp_keepalive()
|
||||
append_to_pod()
|
||||
annotations_for_logging_task_metadata()
|
||||
annotations_to_key()
|
||||
create_pod_id()
|
||||
datetime_to_label_safe_datestring()
|
||||
extend_object_field()
|
||||
get_logs_task_metadata()
|
||||
label_safe_datestring_to_datetime()
|
||||
merge_objects()
|
||||
Port()
|
||||
Resources()
|
||||
PodRuntimeInfoEnv()
|
||||
PodGeneratorDeprecated()
|
||||
Volume()
|
||||
VolumeMount()
|
||||
Secret()
|
||||
|
||||
add_pod_suffix()
|
||||
add_pod_suffix2()
|
||||
get_kube_client()
|
||||
get_kube_client2()
|
||||
make_safe_label_value()
|
||||
make_safe_label_value2()
|
||||
rand_str()
|
||||
rand_str2()
|
||||
K8SModel()
|
||||
K8SModel2()
|
||||
PodLauncher()
|
||||
PodLauncher2()
|
||||
PodStatus()
|
||||
PodStatus2()
|
||||
PodDefaults()
|
||||
PodDefaults2()
|
||||
PodDefaults3()
|
||||
PodGenerator()
|
||||
PodGenerator2()
|
||||
|
||||
|
||||
# apache-airflow-providers-microsoft-mssql
|
||||
MsSqlHook()
|
||||
MsSqlOperator()
|
||||
MsSqlToHiveOperator()
|
||||
MsSqlToHiveTransfer()
|
||||
|
||||
# apache-airflow-providers-mysql
|
||||
HiveToMySqlOperator()
|
||||
HiveToMySqlTransfer()
|
||||
MySqlHook()
|
||||
MySqlOperator()
|
||||
MySqlToHiveOperator()
|
||||
MySqlToHiveTransfer()
|
||||
PrestoToMySqlOperator()
|
||||
PrestoToMySqlTransfer()
|
||||
|
||||
# apache-airflow-providers-oracle
|
||||
OracleHook()
|
||||
OracleOperator()
|
||||
|
||||
# apache-airflow-providers-papermill
|
||||
PapermillOperator()
|
||||
|
||||
# apache-airflow-providers-apache-pig
|
||||
PigCliHook()
|
||||
PigOperator()
|
||||
|
||||
# apache-airflow-providers-postgres
|
||||
Mapping
|
||||
PostgresHook()
|
||||
PostgresOperator()
|
||||
|
||||
# apache-airflow-providers-presto
|
||||
PrestoHook()
|
||||
|
||||
# apache-airflow-providers-samba
|
||||
SambaHook()
|
||||
|
||||
# apache-airflow-providers-slack
|
||||
SlackHook()
|
||||
SlackAPIOperator()
|
||||
SlackAPIPostOperator()
|
||||
|
||||
# apache-airflow-providers-sqlite
|
||||
SqliteHook()
|
||||
SqliteOperator()
|
||||
|
||||
# apache-airflow-providers-zendesk
|
||||
ZendeskHook()
|
||||
|
||||
# apache-airflow-providers-smtp
|
||||
EmailOperator()
|
||||
|
||||
# apache-airflow-providers-standard
|
||||
filesystem.FileSensor()
|
||||
FileSensor()
|
||||
TriggerDagRunOperator()
|
||||
ExternalTaskMarker()
|
||||
ExternalTaskSensor()
|
||||
BranchDateTimeOperator()
|
||||
BranchDayOfWeekOperator()
|
||||
BranchPythonOperator()
|
||||
DateTimeSensor()
|
||||
DateTimeSensorAsync()
|
||||
TimeSensor()
|
||||
TimeDeltaSensor()
|
||||
DayOfWeekSensor()
|
||||
DummyOperator()
|
||||
EmptyOperator()
|
||||
ExternalTaskMarker()
|
||||
ExternalTaskSensor()
|
||||
ExternalTaskSensorLink()
|
||||
FileSensor()
|
||||
FileTrigger()
|
||||
FSHook()
|
||||
PackageIndexHook()
|
||||
SubprocessHook()
|
||||
ShortCircuitOperator()
|
||||
TimeDeltaSensor()
|
||||
TimeSensor()
|
||||
TriggerDagRunOperator()
|
||||
WorkflowTrigger()
|
||||
PythonOperator()
|
||||
PythonVirtualenvOperator()
|
||||
DagStateTrigger()
|
||||
FileTrigger()
|
||||
DateTimeTrigger()
|
||||
TimeDeltaTrigger()
|
||||
SubprocessResult()
|
||||
SubprocessHook()
|
||||
TimeDeltaSensor()
|
||||
TimeDeltaSensorAsync()
|
||||
WaitSensor()
|
||||
TimeSensor()
|
||||
TimeSensorAsync()
|
||||
BranchDateTimeOperator()
|
||||
working_directory()
|
||||
target_times_as_dates()
|
||||
39
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_amazon.py
vendored
Normal file
39
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_amazon.py
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from airflow.hooks.S3_hook import (
|
||||
S3Hook,
|
||||
provide_bucket_name,
|
||||
)
|
||||
from airflow.operators.gcs_to_s3 import GCSToS3Operator
|
||||
from airflow.operators.google_api_to_s3_transfer import (
|
||||
GoogleApiToS3Operator,
|
||||
GoogleApiToS3Transfer,
|
||||
)
|
||||
from airflow.operators.redshift_to_s3_operator import (
|
||||
RedshiftToS3Operator,
|
||||
RedshiftToS3Transfer,
|
||||
)
|
||||
from airflow.operators.s3_file_transform_operator import S3FileTransformOperator
|
||||
from airflow.operators.s3_to_redshift_operator import (
|
||||
S3ToRedshiftOperator,
|
||||
S3ToRedshiftTransfer,
|
||||
)
|
||||
from airflow.sensors.s3_key_sensor import S3KeySensor
|
||||
|
||||
S3Hook()
|
||||
provide_bucket_name()
|
||||
|
||||
GCSToS3Operator()
|
||||
|
||||
GoogleApiToS3Operator()
|
||||
GoogleApiToS3Transfer()
|
||||
|
||||
RedshiftToS3Operator()
|
||||
RedshiftToS3Transfer()
|
||||
|
||||
S3FileTransformOperator()
|
||||
|
||||
S3ToRedshiftOperator()
|
||||
S3ToRedshiftTransfer()
|
||||
|
||||
S3KeySensor()
|
||||
12
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_celery.py
vendored
Normal file
12
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_celery.py
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from airflow.config_templates.default_celery import DEFAULT_CELERY_CONFIG
|
||||
from airflow.executors.celery_executor import (
|
||||
CeleryExecutor,
|
||||
app,
|
||||
)
|
||||
|
||||
DEFAULT_CELERY_CONFIG
|
||||
|
||||
app
|
||||
CeleryExecutor()
|
||||
129
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_common_sql.py
vendored
Normal file
129
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_common_sql.py
vendored
Normal file
@@ -0,0 +1,129 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from airflow.hooks.dbapi import (
|
||||
ConnectorProtocol,
|
||||
DbApiHook,
|
||||
)
|
||||
from airflow.hooks.dbapi_hook import DbApiHook
|
||||
from airflow.operators.check_operator import SQLCheckOperator
|
||||
|
||||
ConnectorProtocol()
|
||||
DbApiHook()
|
||||
SQLCheckOperator()
|
||||
|
||||
|
||||
from airflow.operators.check_operator import CheckOperator
|
||||
from airflow.operators.sql import SQLCheckOperator
|
||||
|
||||
SQLCheckOperator()
|
||||
CheckOperator()
|
||||
|
||||
|
||||
from airflow.operators.druid_check_operator import CheckOperator
|
||||
|
||||
CheckOperator()
|
||||
|
||||
|
||||
from airflow.operators.presto_check_operator import CheckOperator
|
||||
|
||||
CheckOperator()
|
||||
|
||||
|
||||
from airflow.operators.check_operator import (
|
||||
IntervalCheckOperator,
|
||||
SQLIntervalCheckOperator,
|
||||
)
|
||||
from airflow.operators.druid_check_operator import DruidCheckOperator
|
||||
from airflow.operators.presto_check_operator import PrestoCheckOperator
|
||||
|
||||
DruidCheckOperator()
|
||||
PrestoCheckOperator()
|
||||
IntervalCheckOperator()
|
||||
SQLIntervalCheckOperator()
|
||||
|
||||
|
||||
from airflow.operators.presto_check_operator import (
|
||||
IntervalCheckOperator,
|
||||
PrestoIntervalCheckOperator,
|
||||
)
|
||||
from airflow.operators.sql import SQLIntervalCheckOperator
|
||||
|
||||
IntervalCheckOperator()
|
||||
SQLIntervalCheckOperator()
|
||||
PrestoIntervalCheckOperator()
|
||||
|
||||
|
||||
from airflow.operators.check_operator import (
|
||||
SQLThresholdCheckOperator,
|
||||
ThresholdCheckOperator,
|
||||
)
|
||||
|
||||
SQLThresholdCheckOperator()
|
||||
ThresholdCheckOperator()
|
||||
|
||||
|
||||
from airflow.operators.sql import SQLThresholdCheckOperator
|
||||
|
||||
SQLThresholdCheckOperator()
|
||||
|
||||
|
||||
from airflow.operators.check_operator import (
|
||||
SQLValueCheckOperator,
|
||||
ValueCheckOperator,
|
||||
)
|
||||
|
||||
SQLValueCheckOperator()
|
||||
ValueCheckOperator()
|
||||
|
||||
|
||||
from airflow.operators.presto_check_operator import (
|
||||
PrestoValueCheckOperator,
|
||||
ValueCheckOperator,
|
||||
)
|
||||
from airflow.operators.sql import SQLValueCheckOperator
|
||||
|
||||
SQLValueCheckOperator()
|
||||
ValueCheckOperator()
|
||||
PrestoValueCheckOperator()
|
||||
|
||||
|
||||
from airflow.operators.sql import (
|
||||
BaseSQLOperator,
|
||||
BranchSQLOperator,
|
||||
SQLColumnCheckOperator,
|
||||
SQLTablecheckOperator,
|
||||
_convert_to_float_if_possible,
|
||||
parse_boolean,
|
||||
)
|
||||
|
||||
BaseSQLOperator()
|
||||
BranchSQLOperator()
|
||||
SQLTablecheckOperator()
|
||||
SQLColumnCheckOperator()
|
||||
_convert_to_float_if_possible()
|
||||
parse_boolean()
|
||||
|
||||
|
||||
from airflow.sensors.sql import SqlSensor
|
||||
|
||||
SqlSensor()
|
||||
|
||||
|
||||
from airflow.sensors.sql_sensor import SqlSensor
|
||||
|
||||
SqlSensor()
|
||||
|
||||
|
||||
from airflow.operators.jdbc_operator import JdbcOperator
|
||||
from airflow.operators.mssql_operator import MsSqlOperator
|
||||
from airflow.operators.mysql_operator import MySqlOperator
|
||||
from airflow.operators.oracle_operator import OracleOperator
|
||||
from airflow.operators.postgres_operator import PostgresOperator
|
||||
from airflow.operators.sqlite_operator import SqliteOperator
|
||||
|
||||
JdbcOperator()
|
||||
MsSqlOperator()
|
||||
MySqlOperator()
|
||||
OracleOperator()
|
||||
PostgresOperator()
|
||||
SqliteOperator()
|
||||
5
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_daskexecutor.py
vendored
Normal file
5
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_daskexecutor.py
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from airflow.executors.dask_executor import DaskExecutor
|
||||
|
||||
DaskExecutor()
|
||||
7
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_docker.py
vendored
Normal file
7
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_docker.py
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from airflow.hooks.docker_hook import DockerHook
|
||||
from airflow.operators.docker_operator import DockerOperator
|
||||
|
||||
DockerHook()
|
||||
DockerOperator()
|
||||
16
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_druid.py
vendored
Normal file
16
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_druid.py
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from airflow.hooks.druid_hook import (
|
||||
DruidDbApiHook,
|
||||
DruidHook,
|
||||
)
|
||||
from airflow.operators.hive_to_druid import (
|
||||
HiveToDruidOperator,
|
||||
HiveToDruidTransfer,
|
||||
)
|
||||
|
||||
DruidDbApiHook()
|
||||
DruidHook()
|
||||
|
||||
HiveToDruidOperator()
|
||||
HiveToDruidTransfer()
|
||||
55
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_fab.py
vendored
Normal file
55
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_fab.py
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from airflow.api.auth.backend.basic_auth import (
|
||||
CLIENT_AUTH,
|
||||
auth_current_user,
|
||||
init_app,
|
||||
requires_authentication,
|
||||
)
|
||||
|
||||
CLIENT_AUTH
|
||||
init_app()
|
||||
auth_current_user()
|
||||
requires_authentication()
|
||||
|
||||
from airflow.api.auth.backend.kerberos_auth import (
|
||||
CLIENT_AUTH,
|
||||
find_user,
|
||||
init_app,
|
||||
log,
|
||||
requires_authentication,
|
||||
)
|
||||
|
||||
log()
|
||||
CLIENT_AUTH
|
||||
find_user()
|
||||
init_app()
|
||||
requires_authentication()
|
||||
|
||||
from airflow.auth.managers.fab.api.auth.backend.kerberos_auth import (
|
||||
CLIENT_AUTH,
|
||||
find_user,
|
||||
init_app,
|
||||
log,
|
||||
requires_authentication,
|
||||
)
|
||||
|
||||
log()
|
||||
CLIENT_AUTH
|
||||
find_user()
|
||||
init_app()
|
||||
requires_authentication()
|
||||
|
||||
from airflow.auth.managers.fab.fab_auth_manager import FabAuthManager
|
||||
from airflow.auth.managers.fab.security_manager.override import (
|
||||
MAX_NUM_DATABASE_USER_SESSIONS,
|
||||
FabAirflowSecurityManagerOverride,
|
||||
)
|
||||
|
||||
FabAuthManager()
|
||||
MAX_NUM_DATABASE_USER_SESSIONS
|
||||
FabAirflowSecurityManagerOverride()
|
||||
|
||||
from airflow.www.security import FabAirflowSecurityManagerOverride
|
||||
|
||||
FabAirflowSecurityManagerOverride()
|
||||
7
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_hdfs.py
vendored
Normal file
7
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_hdfs.py
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from airflow.hooks.webhdfs_hook import WebHDFSHook
|
||||
from airflow.sensors.web_hdfs_sensor import WebHdfsSensor
|
||||
|
||||
WebHDFSHook()
|
||||
WebHdfsSensor()
|
||||
66
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_hive.py
vendored
Normal file
66
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_hive.py
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from airflow.hooks.hive_hooks import (
|
||||
HIVE_QUEUE_PRIORITIES,
|
||||
HiveCliHook,
|
||||
HiveMetastoreHook,
|
||||
HiveServer2Hook,
|
||||
)
|
||||
from airflow.macros.hive import (
|
||||
closest_ds_partition,
|
||||
max_partition,
|
||||
)
|
||||
from airflow.operators.hive_operator import HiveOperator
|
||||
from airflow.operators.hive_stats_operator import HiveStatsCollectionOperator
|
||||
from airflow.operators.hive_to_mysql import (
|
||||
HiveToMySqlOperator,
|
||||
HiveToMySqlTransfer,
|
||||
)
|
||||
from airflow.operators.hive_to_samba_operator import HiveToSambaOperator
|
||||
from airflow.operators.mssql_to_hive import (
|
||||
MsSqlToHiveOperator,
|
||||
MsSqlToHiveTransfer,
|
||||
)
|
||||
from airflow.operators.mysql_to_hive import (
|
||||
MySqlToHiveOperator,
|
||||
MySqlToHiveTransfer,
|
||||
)
|
||||
from airflow.operators.s3_to_hive_operator import (
|
||||
S3ToHiveOperator,
|
||||
S3ToHiveTransfer,
|
||||
)
|
||||
from airflow.sensors.hive_partition_sensor import HivePartitionSensor
|
||||
from airflow.sensors.metastore_partition_sensor import MetastorePartitionSensor
|
||||
from airflow.sensors.named_hive_partition_sensor import NamedHivePartitionSensor
|
||||
|
||||
closest_ds_partition()
|
||||
max_partition()
|
||||
|
||||
HiveCliHook()
|
||||
HiveMetastoreHook()
|
||||
HiveServer2Hook()
|
||||
HIVE_QUEUE_PRIORITIES
|
||||
|
||||
HiveOperator()
|
||||
|
||||
HiveStatsCollectionOperator()
|
||||
|
||||
HiveToMySqlOperator()
|
||||
HiveToMySqlTransfer()
|
||||
|
||||
HiveToSambaOperator()
|
||||
|
||||
MsSqlToHiveOperator()
|
||||
MsSqlToHiveTransfer()
|
||||
|
||||
MySqlToHiveOperator()
|
||||
MySqlToHiveTransfer()
|
||||
|
||||
S3ToHiveOperator()
|
||||
S3ToHiveTransfer()
|
||||
|
||||
HivePartitionSensor()
|
||||
|
||||
MetastorePartitionSensor()
|
||||
|
||||
NamedHivePartitionSensor()
|
||||
9
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_http.py
vendored
Normal file
9
crates/ruff_linter/resources/test/fixtures/airflow/AIR302_http.py
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from airflow.hooks.http_hook import HttpHook
|
||||
from airflow.operators.http_operator import SimpleHttpOperator
|
||||
from airflow.sensors.http_sensor import HttpSensor
|
||||
|
||||
HttpHook()
|
||||
SimpleHttpOperator()
|
||||
HttpSensor()
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user