Compare commits
308 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b0e26e6fc8 | ||
|
|
e9941cd714 | ||
|
|
43bf1a8907 | ||
|
|
fda8b1f884 | ||
|
|
2d3f557875 | ||
|
|
bd27bfab5d | ||
|
|
155d34bbb9 | ||
|
|
04c887c8fc | ||
|
|
af43bd4b0f | ||
|
|
614917769e | ||
|
|
8b23086eac | ||
|
|
948549fcdc | ||
|
|
e67f7f243d | ||
|
|
c617b2a48a | ||
|
|
1685d95ed2 | ||
|
|
575deb5d4d | ||
|
|
edce559431 | ||
|
|
62e358e929 | ||
|
|
81bfcc9899 | ||
|
|
74309008fd | ||
|
|
a255d79087 | ||
|
|
70bd10614f | ||
|
|
bf0fd04e4e | ||
|
|
a69dfd4a74 | ||
|
|
c2e17d0399 | ||
|
|
10fef8bd5d | ||
|
|
246a6df87d | ||
|
|
3e702e12f7 | ||
|
|
91e2d9a139 | ||
|
|
5137fcc9c8 | ||
|
|
83651deac7 | ||
|
|
6dfe125f44 | ||
|
|
f96dfc179f | ||
|
|
76d2e56501 | ||
|
|
30d80d9746 | ||
|
|
5a67d3269b | ||
|
|
02d1e6a94a | ||
|
|
48ec3a8add | ||
|
|
289a938ae8 | ||
|
|
3e5ab6cf38 | ||
|
|
48d33595b9 | ||
|
|
23ee7a954e | ||
|
|
d4a7c098dc | ||
|
|
0c5f03a059 | ||
|
|
239bfb6de7 | ||
|
|
3c3ec6755c | ||
|
|
4c05f2c8b4 | ||
|
|
d594796e3a | ||
|
|
b5ef2844ef | ||
|
|
06183bd8a1 | ||
|
|
4068006c5f | ||
|
|
145c97c94f | ||
|
|
84748be163 | ||
|
|
9e017634cb | ||
|
|
56ae73a925 | ||
|
|
be07424e80 | ||
|
|
579ef01294 | ||
|
|
90487b8cbd | ||
|
|
f3d8c023d3 | ||
|
|
b63c2e126b | ||
|
|
a6402fb51e | ||
|
|
b3b2c982cd | ||
|
|
abb3c6ea95 | ||
|
|
224fe75a76 | ||
|
|
dc29f52750 | ||
|
|
d9cbf2fe44 | ||
|
|
3f6c65e78c | ||
|
|
976c37a849 | ||
|
|
a378ff38dc | ||
|
|
d8bca0d3a2 | ||
|
|
6f1cf5b686 | ||
|
|
8639f8c1a6 | ||
|
|
f1b2e85339 | ||
|
|
6d61c8aa16 | ||
|
|
8a7ba5d2df | ||
|
|
6fcbe8efb4 | ||
|
|
c40b37aa36 | ||
|
|
ef0e2a6e1b | ||
|
|
4fb1416bf4 | ||
|
|
8a860b89b4 | ||
|
|
f96fa6b0e2 | ||
|
|
4cd2b9926e | ||
|
|
11a2929ed7 | ||
|
|
187974eff4 | ||
|
|
14ba469fc0 | ||
|
|
6fd10e2fe7 | ||
|
|
e0f3eaf1dd | ||
|
|
c84c690f1e | ||
|
|
0d649f9afd | ||
|
|
82c01aa662 | ||
|
|
9f446faa6c | ||
|
|
b94d6cf567 | ||
|
|
cd0c97211c | ||
|
|
0e71c9e3bb | ||
|
|
24c90d6953 | ||
|
|
fbff4dec3a | ||
|
|
f3dac27e9a | ||
|
|
e4cefd9bf9 | ||
|
|
9e4ee98109 | ||
|
|
557d583e32 | ||
|
|
f98eebdbab | ||
|
|
c606bf014e | ||
|
|
e8fce20736 | ||
|
|
5a30ec0df6 | ||
|
|
fab1b0d546 | ||
|
|
66abef433b | ||
|
|
fa22bd604a | ||
|
|
0c9165fc3a | ||
|
|
9f6147490b | ||
|
|
b7571c3e24 | ||
|
|
d178d115f3 | ||
|
|
6501782678 | ||
|
|
bca4341dcc | ||
|
|
31ede11774 | ||
|
|
ba9f881687 | ||
|
|
4357a0a3c2 | ||
|
|
c18afa93b3 | ||
|
|
8f04202ee4 | ||
|
|
efe54081d6 | ||
|
|
ac23c99744 | ||
|
|
e5c7d87461 | ||
|
|
de62e39eba | ||
|
|
d285717da8 | ||
|
|
545e9deba3 | ||
|
|
e3d792605f | ||
|
|
1f303a5eb6 | ||
|
|
07d13c6b4a | ||
|
|
e1838aac29 | ||
|
|
4ba847f250 | ||
|
|
13e9fc9362 | ||
|
|
3fda2d17c7 | ||
|
|
931fa06d85 | ||
|
|
e53ac7985d | ||
|
|
e25e7044ba | ||
|
|
b80de52592 | ||
|
|
2917534279 | ||
|
|
f6b2cd5588 | ||
|
|
302fe76c2b | ||
|
|
a90e404c3f | ||
|
|
8358ad8d25 | ||
|
|
2b8b1ef178 | ||
|
|
2efa3fbb62 | ||
|
|
b9da4305e6 | ||
|
|
87043a2415 | ||
|
|
f684b6fff4 | ||
|
|
47f39ed1a0 | ||
|
|
aecdb8c144 | ||
|
|
3c52d2d1bd | ||
|
|
942d6eeb9f | ||
|
|
4ccacc80f9 | ||
|
|
b2bb119c6a | ||
|
|
cef12f4925 | ||
|
|
aa7ac2ce0f | ||
|
|
70d9c90827 | ||
|
|
adfa723464 | ||
|
|
844c07f1f0 | ||
|
|
11d20a1a51 | ||
|
|
e9079e7d95 | ||
|
|
c400725713 | ||
|
|
1081694140 | ||
|
|
52f526eb38 | ||
|
|
dc05b38165 | ||
|
|
8c3c5ee5e3 | ||
|
|
b46cc6ac0b | ||
|
|
8b925ea626 | ||
|
|
1b180c8342 | ||
|
|
afeb217452 | ||
|
|
c0b3dd3745 | ||
|
|
5f6607bf54 | ||
|
|
a6deca44b5 | ||
|
|
0dbceccbc1 | ||
|
|
48680e10b6 | ||
|
|
b0c88a2a42 | ||
|
|
b9c53a74f9 | ||
|
|
6a4d207db7 | ||
|
|
42c35b6f44 | ||
|
|
9e79d64d62 | ||
|
|
582857f292 | ||
|
|
9bbeb793e5 | ||
|
|
dbbe7a773c | ||
|
|
5f09d4a90a | ||
|
|
f8c20258ae | ||
|
|
d8538d8c98 | ||
|
|
3642381489 | ||
|
|
1f07880d5c | ||
|
|
d81b6cd334 | ||
|
|
d99210c049 | ||
|
|
577653551c | ||
|
|
38a385fb6f | ||
|
|
cd2ae5aa2d | ||
|
|
41694f21c6 | ||
|
|
fccbe56d23 | ||
|
|
c46555da41 | ||
|
|
0a27c9dabd | ||
|
|
3c9e76eb66 | ||
|
|
80f5cdcf66 | ||
|
|
35fe0e90da | ||
|
|
157b49a8ee | ||
|
|
8a6e223df5 | ||
|
|
5a48da53da | ||
|
|
58005b590c | ||
|
|
884835e386 | ||
|
|
efd4407f7f | ||
|
|
761588a60e | ||
|
|
e1eb188049 | ||
|
|
ff19629b11 | ||
|
|
cd80c9d907 | ||
|
|
abb34828bd | ||
|
|
cab7caf80b | ||
|
|
d470f29093 | ||
|
|
1fbed6c325 | ||
|
|
4dcb7ddafe | ||
|
|
5be90c3a67 | ||
|
|
d0dca7bfcf | ||
|
|
78210b198b | ||
|
|
4a2310b595 | ||
|
|
fc392c663a | ||
|
|
81d3c419e9 | ||
|
|
a6a3d3f656 | ||
|
|
c847cad389 | ||
|
|
81e5830585 | ||
|
|
2b58705cc1 | ||
|
|
9f3235a37f | ||
|
|
62d650226b | ||
|
|
5d8a391a3e | ||
|
|
ed7b98cf9b | ||
|
|
6591775cd9 | ||
|
|
1f82731856 | ||
|
|
874da9c400 | ||
|
|
375cead202 | ||
|
|
9ec690b8f8 | ||
|
|
a48d779c4e | ||
|
|
ba6c7f6897 | ||
|
|
8095ff0e55 | ||
|
|
24cd592a1d | ||
|
|
a40bc6a460 | ||
|
|
577de6c599 | ||
|
|
d8b1afbc6e | ||
|
|
9a3001b571 | ||
|
|
ec2c7cad0e | ||
|
|
924741cb11 | ||
|
|
77e8da7497 | ||
|
|
5e64863895 | ||
|
|
78e4753d74 | ||
|
|
eb55b9b5a0 | ||
|
|
0eb36e4345 | ||
|
|
5fcf0afff4 | ||
|
|
b946cfd1f7 | ||
|
|
95c8f5fd0f | ||
|
|
89aa804b2d | ||
|
|
f789b12705 | ||
|
|
3e36a7ab81 | ||
|
|
5c548dcc04 | ||
|
|
bd30701980 | ||
|
|
2b6d66b793 | ||
|
|
147ea399fd | ||
|
|
907047bf4b | ||
|
|
13a1483f1e | ||
|
|
be69f61b3e | ||
|
|
f1f3bd1cd3 | ||
|
|
3bef23669f | ||
|
|
f82ee8ea59 | ||
|
|
b8a65182dd | ||
|
|
fc15d8a3bd | ||
|
|
b3b5c19105 | ||
|
|
f8aae9b1d6 | ||
|
|
9180635171 | ||
|
|
3ef4b3bf32 | ||
|
|
5a3886c8b5 | ||
|
|
813ec23ecd | ||
|
|
13883414af | ||
|
|
84d4f114ef | ||
|
|
1c586b29e2 | ||
|
|
d76a8518c2 | ||
|
|
5f0ee2670a | ||
|
|
f8ca6c3316 | ||
|
|
ba7b023f26 | ||
|
|
e947d163b2 | ||
|
|
1cf4d2ff69 | ||
|
|
2308522f38 | ||
|
|
438f3d967b | ||
|
|
5bf4759cff | ||
|
|
2e9e96338e | ||
|
|
5fa7ace1f5 | ||
|
|
704868ca83 | ||
|
|
dc71c8a484 | ||
|
|
2499297392 | ||
|
|
7b9189bb2c | ||
|
|
d4cf61d98b | ||
|
|
5d91ba0b10 | ||
|
|
a7e9f0c4b9 | ||
|
|
c7d48e10e6 | ||
|
|
94dee2a36d | ||
|
|
555a5c9319 | ||
|
|
1279c20ee1 | ||
|
|
ce3af27f59 | ||
|
|
71da1d6df5 | ||
|
|
e598240f04 | ||
|
|
c9b84e2a85 | ||
|
|
d3f1c8e536 | ||
|
|
eea6b31980 | ||
|
|
b8dc780bdc | ||
|
|
93fdf7ed36 | ||
|
|
b19f388249 | ||
|
|
de947deee7 | ||
|
|
c0c4ae14ac | ||
|
|
645ce7e5ec | ||
|
|
1430f21283 |
@@ -17,4 +17,7 @@ indent_size = 4
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.md]
|
||||
max_line_length = 100
|
||||
max_line_length = 100
|
||||
|
||||
[*.toml]
|
||||
indent_size = 4
|
||||
5
.github/CODEOWNERS
vendored
5
.github/CODEOWNERS
vendored
@@ -13,9 +13,10 @@
|
||||
# flake8-pyi
|
||||
/crates/ruff_linter/src/rules/flake8_pyi/ @AlexWaygood
|
||||
|
||||
# Script for fuzzing the parser
|
||||
/scripts/fuzz-parser/ @AlexWaygood
|
||||
# Script for fuzzing the parser/red-knot etc.
|
||||
/python/py-fuzzer/ @AlexWaygood
|
||||
|
||||
# red-knot
|
||||
/crates/red_knot* @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||
/scripts/knot_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||
|
||||
84
.github/workflows/ci.yaml
vendored
84
.github/workflows/ci.yaml
vendored
@@ -32,6 +32,8 @@ jobs:
|
||||
# Flag that is raised when any code is changed
|
||||
# This is superset of the linter and formatter
|
||||
code: ${{ steps.changed.outputs.code_any_changed }}
|
||||
# Flag that is raised when any code that affects the fuzzer is changed
|
||||
fuzz: ${{ steps.changed.outputs.fuzz_any_changed }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
@@ -49,7 +51,7 @@ jobs:
|
||||
- crates/ruff_text_size/**
|
||||
- crates/ruff_python_ast/**
|
||||
- crates/ruff_python_parser/**
|
||||
- scripts/fuzz-parser/**
|
||||
- python/py-fuzzer/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
linter:
|
||||
@@ -79,9 +81,15 @@ jobs:
|
||||
- python/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
fuzz:
|
||||
- fuzz/Cargo.toml
|
||||
- fuzz/Cargo.lock
|
||||
- fuzz/fuzz_targets/**
|
||||
|
||||
code:
|
||||
- "**/*"
|
||||
- "!**/*.md"
|
||||
- "crates/red_knot_python_semantic/resources/mdtest/**/*.md"
|
||||
- "!docs/**"
|
||||
- "!assets/**"
|
||||
|
||||
@@ -115,7 +123,7 @@ jobs:
|
||||
|
||||
cargo-test-linux:
|
||||
name: "cargo test (linux)"
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
@@ -157,9 +165,36 @@ jobs:
|
||||
name: ruff
|
||||
path: target/debug/ruff
|
||||
|
||||
cargo-test-linux-release:
|
||||
name: "cargo test (linux, release)"
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
run: cargo insta test --release --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
cargo-test-windows:
|
||||
name: "cargo test (windows)"
|
||||
runs-on: windows-latest
|
||||
runs-on: windows-latest-xlarge
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
@@ -197,6 +232,8 @@ jobs:
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
with:
|
||||
version: v0.13.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Test ruff_wasm"
|
||||
run: |
|
||||
@@ -210,8 +247,7 @@ jobs:
|
||||
cargo-build-release:
|
||||
name: "cargo build (release)"
|
||||
runs-on: macos-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
if: ${{ github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -255,11 +291,11 @@ jobs:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
run: cargo +${{ steps.msrv.outputs.value }} insta test --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
cargo-fuzz:
|
||||
name: "cargo fuzz"
|
||||
cargo-fuzz-build:
|
||||
name: "cargo fuzz build"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
if: ${{ github.ref == 'refs/heads/main' || needs.determine_changes.outputs.fuzz == 'true' }}
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -278,7 +314,7 @@ jobs:
|
||||
- run: cargo fuzz build -s none
|
||||
|
||||
fuzz-parser:
|
||||
name: "Fuzz the parser"
|
||||
name: "fuzz parser"
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
@@ -289,13 +325,7 @@ jobs:
|
||||
FORCE_COLOR: 1
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
- name: Install Python requirements
|
||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
||||
- uses: astral-sh/setup-uv@v4
|
||||
- uses: actions/download-artifact@v4
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
@@ -307,7 +337,15 @@ jobs:
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ${{ steps.download-cached-binary.outputs.download-path }}/ruff
|
||||
|
||||
python scripts/fuzz-parser/fuzz.py 0-500 --test-executable ${{ steps.download-cached-binary.outputs.download-path }}/ruff
|
||||
(
|
||||
uvx \
|
||||
--python=${{ env.PYTHON_VERSION }} \
|
||||
--from=./python/py-fuzzer \
|
||||
fuzz \
|
||||
--test-executable=${{ steps.download-cached-binary.outputs.download-path }}/ruff \
|
||||
--bin=ruff \
|
||||
0-500
|
||||
)
|
||||
|
||||
scripts:
|
||||
name: "test scripts"
|
||||
@@ -331,7 +369,7 @@ jobs:
|
||||
|
||||
ecosystem:
|
||||
name: "ecosystem"
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-latest-8
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
@@ -352,7 +390,7 @@ jobs:
|
||||
name: ruff
|
||||
path: target/debug
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
- uses: dawidd6/action-download-artifact@v7
|
||||
name: Download baseline Ruff binary
|
||||
with:
|
||||
name: ruff
|
||||
@@ -528,7 +566,7 @@ jobs:
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v4
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
@@ -561,12 +599,12 @@ jobs:
|
||||
run: rustup show
|
||||
- name: "Cache rust"
|
||||
uses: Swatinem/rust-cache@v2
|
||||
- name: "Formatter progress"
|
||||
- name: "Run checks"
|
||||
run: scripts/formatter_ecosystem_checks.sh
|
||||
- name: "Github step summary"
|
||||
run: cat target/progress_projects_stats.txt > $GITHUB_STEP_SUMMARY
|
||||
run: cat target/formatter-ecosystem/stats.txt > $GITHUB_STEP_SUMMARY
|
||||
- name: "Remove checkouts from cache"
|
||||
run: rm -r target/progress_projects
|
||||
run: rm -r target/formatter-ecosystem
|
||||
|
||||
check-ruff-lsp:
|
||||
name: "test ruff-lsp"
|
||||
|
||||
19
.github/workflows/daily_fuzz.yaml
vendored
19
.github/workflows/daily_fuzz.yaml
vendored
@@ -32,13 +32,7 @@ jobs:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Install uv
|
||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
- name: Install Python requirements
|
||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
||||
- uses: astral-sh/setup-uv@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
@@ -49,7 +43,16 @@ jobs:
|
||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
||||
run: cargo build --locked
|
||||
- name: Fuzz
|
||||
run: python scripts/fuzz-parser/fuzz.py $(shuf -i 0-9999999999999999999 -n 1000) --test-executable target/debug/ruff
|
||||
run: |
|
||||
(
|
||||
uvx \
|
||||
--python=3.12 \
|
||||
--from=./python/py-fuzzer \
|
||||
fuzz \
|
||||
--test-executable=target/debug/ruff \
|
||||
--bin=ruff \
|
||||
$(shuf -i 0-9999999999999999999 -n 1000)
|
||||
)
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the daily fuzz surfaced any bugs
|
||||
|
||||
4
.github/workflows/pr-comment.yaml
vendored
4
.github/workflows/pr-comment.yaml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
- uses: dawidd6/action-download-artifact@v7
|
||||
name: Download pull request number
|
||||
with:
|
||||
name: pr-number
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
- uses: dawidd6/action-download-artifact@v7
|
||||
name: "Download ecosystem results"
|
||||
id: download-ecosystem-result
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
|
||||
2
.github/workflows/publish-playground.yml
vendored
2
.github/workflows/publish-playground.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.11.0
|
||||
uses: cloudflare/wrangler-action@v3.13.0
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
|
||||
2
.github/workflows/publish-pypi.yml
vendored
2
.github/workflows/publish-pypi.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v4
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: wheels-*
|
||||
|
||||
36
.github/workflows/release.yml
vendored
36
.github/workflows/release.yml
vendored
@@ -1,4 +1,4 @@
|
||||
# This file was autogenerated by cargo-dist: https://opensource.axo.dev/cargo-dist/
|
||||
# This file was autogenerated by dist: https://opensource.axo.dev/cargo-dist/
|
||||
#
|
||||
# Copyright 2022-2024, axodotdev
|
||||
# SPDX-License-Identifier: MIT or Apache-2.0
|
||||
@@ -6,7 +6,7 @@
|
||||
# CI that:
|
||||
#
|
||||
# * checks for a Git Tag that looks like a release
|
||||
# * builds artifacts with cargo-dist (archives, installers, hashes)
|
||||
# * builds artifacts with dist (archives, installers, hashes)
|
||||
# * uploads those artifacts to temporary workflow zip
|
||||
# * on success, uploads the artifacts to a GitHub Release
|
||||
#
|
||||
@@ -24,10 +24,10 @@ permissions:
|
||||
# must be a Cargo-style SemVer Version (must have at least major.minor.patch).
|
||||
#
|
||||
# If PACKAGE_NAME is specified, then the announcement will be for that
|
||||
# package (erroring out if it doesn't have the given version or isn't cargo-dist-able).
|
||||
# package (erroring out if it doesn't have the given version or isn't dist-able).
|
||||
#
|
||||
# If PACKAGE_NAME isn't specified, then the announcement will be for all
|
||||
# (cargo-dist-able) packages in the workspace with that version (this mode is
|
||||
# (dist-able) packages in the workspace with that version (this mode is
|
||||
# intended for workspaces with only one dist-able package, or with all dist-able
|
||||
# packages versioned/released in lockstep).
|
||||
#
|
||||
@@ -48,7 +48,7 @@ on:
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
# Run 'cargo dist plan' (or host) to determine what tasks we need to do
|
||||
# Run 'dist plan' (or host) to determine what tasks we need to do
|
||||
plan:
|
||||
runs-on: "ubuntu-20.04"
|
||||
outputs:
|
||||
@@ -62,16 +62,16 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install cargo-dist
|
||||
- name: Install dist
|
||||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.22.1/cargo-dist-installer.sh | sh"
|
||||
- name: Cache cargo-dist
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.25.2-prerelease.3/cargo-dist-installer.sh | sh"
|
||||
- name: Cache dist
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/cargo-dist
|
||||
path: ~/.cargo/bin/dist
|
||||
# sure would be cool if github gave us proper conditionals...
|
||||
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
|
||||
# functionality based on whether this is a pull_request, and whether it's from a fork.
|
||||
@@ -79,8 +79,8 @@ jobs:
|
||||
# but also really annoying to build CI around when it needs secrets to work right.)
|
||||
- id: plan
|
||||
run: |
|
||||
cargo dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
|
||||
echo "cargo dist ran successfully"
|
||||
dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
|
||||
echo "dist ran successfully"
|
||||
cat plan-dist-manifest.json
|
||||
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
@@ -124,12 +124,12 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install cached cargo-dist
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/cargo-dist
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
||||
- name: Fetch local artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
@@ -140,8 +140,8 @@ jobs:
|
||||
- id: cargo-dist
|
||||
shell: bash
|
||||
run: |
|
||||
cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
|
||||
echo "cargo dist ran successfully"
|
||||
dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
|
||||
echo "dist ran successfully"
|
||||
|
||||
# Parse out what we just built and upload it to scratch storage
|
||||
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
|
||||
@@ -174,12 +174,12 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install cached cargo-dist
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/cargo-dist
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Fetch artifacts from scratch-storage
|
||||
- name: Fetch artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
@@ -191,7 +191,7 @@ jobs:
|
||||
- id: host
|
||||
shell: bash
|
||||
run: |
|
||||
cargo dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
|
||||
dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
|
||||
echo "artifacts uploaded and released successfully"
|
||||
cat dist-manifest.json
|
||||
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
@@ -17,12 +17,12 @@ exclude: |
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.22
|
||||
rev: v0.23
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/executablebooks/mdformat
|
||||
rev: 0.7.18
|
||||
rev: 0.7.19
|
||||
hooks:
|
||||
- id: mdformat
|
||||
additional_dependencies:
|
||||
@@ -36,7 +36,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.42.0
|
||||
rev: v0.43.0
|
||||
hooks:
|
||||
- id: markdownlint-fix
|
||||
exclude: |
|
||||
@@ -53,13 +53,13 @@ repos:
|
||||
files: '^crates/.*/resources/mdtest/.*\.md'
|
||||
exclude: |
|
||||
(?x)^(
|
||||
.*?invalid(_.+)_syntax.md
|
||||
.*?invalid(_.+)*_syntax\.md
|
||||
)$
|
||||
additional_dependencies:
|
||||
- black==24.10.0
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.27.0
|
||||
rev: v1.28.1
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -73,7 +73,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.7.2
|
||||
rev: v0.8.1
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
@@ -83,7 +83,7 @@ repos:
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: v3.3.3
|
||||
rev: v3.4.1
|
||||
hooks:
|
||||
- id: prettier
|
||||
types: [yaml]
|
||||
|
||||
@@ -1,5 +1,30 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.8.0
|
||||
|
||||
- **Default to Python 3.9**
|
||||
|
||||
Ruff now defaults to Python 3.9 instead of 3.8 if no explicit Python version is configured using [`ruff.target-version`](https://docs.astral.sh/ruff/settings/#target-version) or [`project.requires-python`](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#python-requires) ([#13896](https://github.com/astral-sh/ruff/pull/13896))
|
||||
|
||||
- **Changed location of `pydoclint` diagnostics**
|
||||
|
||||
[`pydoclint`](https://docs.astral.sh/ruff/rules/#pydoclint-doc) diagnostics now point to the first-line of the problematic docstring. Previously, this was not the case.
|
||||
|
||||
If you've opted into these preview rules but have them suppressed using
|
||||
[`noqa`](https://docs.astral.sh/ruff/linter/#error-suppression) comments in
|
||||
some places, this change may mean that you need to move the `noqa` suppression
|
||||
comments. Most users should be unaffected by this change.
|
||||
|
||||
- **Use XDG (i.e. `~/.local/bin`) instead of the Cargo home directory in the standalone installer**
|
||||
|
||||
Previously, Ruff's installer used `$CARGO_HOME` or `~/.cargo/bin` for its target install directory. Now, Ruff will be installed into `$XDG_BIN_HOME`, `$XDG_DATA_HOME/../bin`, or `~/.local/bin` (in that order).
|
||||
|
||||
This change is only relevant to users of the standalone Ruff installer (using the shell or PowerShell script). If you installed Ruff using uv or pip, you should be unaffected.
|
||||
|
||||
- **Changes to the line width calculation**
|
||||
|
||||
Ruff now uses a new version of the [unicode-width](https://github.com/unicode-rs/unicode-width) Rust crate to calculate the line width. In very rare cases, this may lead to lines containing Unicode characters being reformatted, or being considered too long when they were not before ([`E501`](https://docs.astral.sh/ruff/rules/line-too-long/)).
|
||||
|
||||
## 0.7.0
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments
|
||||
@@ -167,7 +192,7 @@ flag or `unsafe-fixes` configuration option can be used to enable unsafe fixes.
|
||||
|
||||
See the [docs](https://docs.astral.sh/ruff/configuration/#fix-safety) for details.
|
||||
|
||||
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
|
||||
### Remove formatter-conflicting rules from the default rule set ([#7900](https://github.com/astral-sh/ruff/pull/7900))
|
||||
|
||||
Previously, Ruff enabled all implemented rules in Pycodestyle (`E`) by default. Ruff now only includes the
|
||||
Pycodestyle prefixes `E4`, `E7`, and `E9` to exclude rules that conflict with automatic formatters. Consequently,
|
||||
|
||||
234
CHANGELOG.md
234
CHANGELOG.md
@@ -1,5 +1,231 @@
|
||||
# Changelog
|
||||
|
||||
## 0.8.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`airflow`\] Avoid deprecated values (`AIR302`) ([#14582](https://github.com/astral-sh/ruff/pull/14582))
|
||||
- \[`airflow`\] Extend removed names for `AIR302` ([#14734](https://github.com/astral-sh/ruff/pull/14734))
|
||||
- \[`ruff`\] Extend `unnecessary-regular-expression` to non-literal strings (`RUF055`) ([#14679](https://github.com/astral-sh/ruff/pull/14679))
|
||||
- \[`ruff`\] Implement `used-dummy-variable` (`RUF052`) ([#14611](https://github.com/astral-sh/ruff/pull/14611))
|
||||
- \[`ruff`\] Implement `unnecessary-cast-to-int` (`RUF046`) ([#14697](https://github.com/astral-sh/ruff/pull/14697))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`airflow`\] Check `AIR001` from builtin or providers `operators` module ([#14631](https://github.com/astral-sh/ruff/pull/14631))
|
||||
- \[`flake8-pytest-style`\] Remove `@` in `pytest.mark.parametrize` rule messages ([#14770](https://github.com/astral-sh/ruff/pull/14770))
|
||||
- \[`pandas-vet`\] Skip rules if the `panda` module hasn't been seen ([#14671](https://github.com/astral-sh/ruff/pull/14671))
|
||||
- \[`pylint`\] Fix false negatives for `ascii` and `sorted` in `len-as-condition` (`PLC1802`) ([#14692](https://github.com/astral-sh/ruff/pull/14692))
|
||||
- \[`refurb`\] Guard `hashlib` imports and mark `hashlib-digest-hex` fix as safe (`FURB181`) ([#14694](https://github.com/astral-sh/ruff/pull/14694))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`flake8-import-conventions`\] Improve syntax check for aliases supplied in configuration for `unconventional-import-alias` (`ICN001`) ([#14745](https://github.com/astral-sh/ruff/pull/14745))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Revert: [pyflakes] Avoid false positives in `@no_type_check` contexts (`F821`, `F722`) (#14615) ([#14726](https://github.com/astral-sh/ruff/pull/14726))
|
||||
- \[`pep8-naming`\] Avoid false positive for `class Bar(type(foo))` (`N804`) ([#14683](https://github.com/astral-sh/ruff/pull/14683))
|
||||
- \[`pycodestyle`\] Handle f-strings properly for `invalid-escape-sequence` (`W605`) ([#14748](https://github.com/astral-sh/ruff/pull/14748))
|
||||
- \[`pylint`\] Ignore `@overload` in `PLR0904` ([#14730](https://github.com/astral-sh/ruff/pull/14730))
|
||||
- \[`refurb`\] Handle non-finite decimals in `verbose-decimal-constructor` (`FURB157`) ([#14596](https://github.com/astral-sh/ruff/pull/14596))
|
||||
- \[`ruff`\] Avoid emitting `assignment-in-assert` when all references to the assigned variable are themselves inside `assert`s (`RUF018`) ([#14661](https://github.com/astral-sh/ruff/pull/14661))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Improve docs for `flake8-use-pathlib` rules ([#14741](https://github.com/astral-sh/ruff/pull/14741))
|
||||
- Improve error messages and docs for `flake8-comprehensions` rules ([#14729](https://github.com/astral-sh/ruff/pull/14729))
|
||||
- \[`flake8-type-checking`\] Expands `TC006` docs to better explain itself ([#14749](https://github.com/astral-sh/ruff/pull/14749))
|
||||
|
||||
## 0.8.1
|
||||
|
||||
### Preview features
|
||||
|
||||
- Formatter: Avoid invalid syntax for format-spec with quotes for all Python versions ([#14625](https://github.com/astral-sh/ruff/pull/14625))
|
||||
- Formatter: Consider quotes inside format-specs when choosing the quotes for an f-string ([#14493](https://github.com/astral-sh/ruff/pull/14493))
|
||||
- Formatter: Do not consider f-strings with escaped newlines as multiline ([#14624](https://github.com/astral-sh/ruff/pull/14624))
|
||||
- Formatter: Fix f-string formatting in assignment statement ([#14454](https://github.com/astral-sh/ruff/pull/14454))
|
||||
- Formatter: Fix unnecessary space around power operator (`**`) in overlong f-string expressions ([#14489](https://github.com/astral-sh/ruff/pull/14489))
|
||||
- \[`airflow`\] Avoid implicit `schedule` argument to `DAG` and `@dag` (`AIR301`) ([#14581](https://github.com/astral-sh/ruff/pull/14581))
|
||||
- \[`flake8-builtins`\] Exempt private built-in modules (`A005`) ([#14505](https://github.com/astral-sh/ruff/pull/14505))
|
||||
- \[`flake8-pytest-style`\] Fix `pytest.mark.parametrize` rules to check calls instead of decorators ([#14515](https://github.com/astral-sh/ruff/pull/14515))
|
||||
- \[`flake8-type-checking`\] Implement `runtime-cast-value` (`TC006`) ([#14511](https://github.com/astral-sh/ruff/pull/14511))
|
||||
- \[`flake8-type-checking`\] Implement `unquoted-type-alias` (`TC007`) and `quoted-type-alias` (`TC008`) ([#12927](https://github.com/astral-sh/ruff/pull/12927))
|
||||
- \[`flake8-use-pathlib`\] Recommend `Path.iterdir()` over `os.listdir()` (`PTH208`) ([#14509](https://github.com/astral-sh/ruff/pull/14509))
|
||||
- \[`pylint`\] Extend `invalid-envvar-default` to detect `os.environ.get` (`PLW1508`) ([#14512](https://github.com/astral-sh/ruff/pull/14512))
|
||||
- \[`pylint`\] Implement `len-test` (`PLC1802`) ([#14309](https://github.com/astral-sh/ruff/pull/14309))
|
||||
- \[`refurb`\] Fix bug where methods defined using lambdas were flagged by `FURB118` ([#14639](https://github.com/astral-sh/ruff/pull/14639))
|
||||
- \[`ruff`\] Auto-add `r` prefix when string has no backslashes for `unraw-re-pattern` (`RUF039`) ([#14536](https://github.com/astral-sh/ruff/pull/14536))
|
||||
- \[`ruff`\] Implement `invalid-assert-message-literal-argument` (`RUF040`) ([#14488](https://github.com/astral-sh/ruff/pull/14488))
|
||||
- \[`ruff`\] Implement `unnecessary-nested-literal` (`RUF041`) ([#14323](https://github.com/astral-sh/ruff/pull/14323))
|
||||
- \[`ruff`\] Implement `unnecessary-regular-expression` (`RUF055`) ([#14659](https://github.com/astral-sh/ruff/pull/14659))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Ignore more rules for stub files ([#14541](https://github.com/astral-sh/ruff/pull/14541))
|
||||
- \[`pep8-naming`\] Eliminate false positives for single-letter names (`N811`, `N814`) ([#14584](https://github.com/astral-sh/ruff/pull/14584))
|
||||
- \[`pyflakes`\] Avoid false positives in `@no_type_check` contexts (`F821`, `F722`) ([#14615](https://github.com/astral-sh/ruff/pull/14615))
|
||||
- \[`ruff`\] Detect redirected-noqa in file-level comments (`RUF101`) ([#14635](https://github.com/astral-sh/ruff/pull/14635))
|
||||
- \[`ruff`\] Mark fixes for `unsorted-dunder-all` and `unsorted-dunder-slots` as unsafe when there are complex comments in the sequence (`RUF022`, `RUF023`) ([#14560](https://github.com/astral-sh/ruff/pull/14560))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid fixing code to `None | None` for `redundant-none-literal` (`PYI061`) and `never-union` (`RUF020`) ([#14583](https://github.com/astral-sh/ruff/pull/14583), [#14589](https://github.com/astral-sh/ruff/pull/14589))
|
||||
- \[`flake8-bugbear`\] Fix `mutable-contextvar-default` to resolve annotated function calls properly (`B039`) ([#14532](https://github.com/astral-sh/ruff/pull/14532))
|
||||
- \[`flake8-pyi`, `ruff`\] Fix traversal of nested literals and unions (`PYI016`, `PYI051`, `PYI055`, `PYI062`, `RUF041`) ([#14641](https://github.com/astral-sh/ruff/pull/14641))
|
||||
- \[`flake8-pyi`\] Avoid rewriting invalid type expressions in `unnecessary-type-union` (`PYI055`) ([#14660](https://github.com/astral-sh/ruff/pull/14660))
|
||||
- \[`flake8-type-checking`\] Avoid syntax errors and type checking problem for quoted annotations autofix (`TC003`, `TC006`) ([#14634](https://github.com/astral-sh/ruff/pull/14634))
|
||||
- \[`pylint`\] Do not wrap function calls in parentheses in the fix for unnecessary-dunder-call (`PLC2801`) ([#14601](https://github.com/astral-sh/ruff/pull/14601))
|
||||
- \[`ruff`\] Handle `attrs`'s `auto_attribs` correctly (`RUF009`) ([#14520](https://github.com/astral-sh/ruff/pull/14520))
|
||||
|
||||
## 0.8.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.8.0) for a migration guide and overview of the changes!
|
||||
|
||||
### Breaking changes
|
||||
|
||||
See also, the "Remapped rules" section which may result in disabled rules.
|
||||
|
||||
- **Default to Python 3.9**
|
||||
|
||||
Ruff now defaults to Python 3.9 instead of 3.8 if no explicit Python version is configured using [`ruff.target-version`](https://docs.astral.sh/ruff/settings/#target-version) or [`project.requires-python`](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#python-requires) ([#13896](https://github.com/astral-sh/ruff/pull/13896))
|
||||
|
||||
- **Changed location of `pydoclint` diagnostics**
|
||||
|
||||
[`pydoclint`](https://docs.astral.sh/ruff/rules/#pydoclint-doc) diagnostics now point to the first-line of the problematic docstring. Previously, this was not the case.
|
||||
|
||||
If you've opted into these preview rules but have them suppressed using
|
||||
[`noqa`](https://docs.astral.sh/ruff/linter/#error-suppression) comments in
|
||||
some places, this change may mean that you need to move the `noqa` suppression
|
||||
comments. Most users should be unaffected by this change.
|
||||
|
||||
- **Use XDG (i.e. `~/.local/bin`) instead of the Cargo home directory in the standalone installer**
|
||||
|
||||
Previously, Ruff's installer used `$CARGO_HOME` or `~/.cargo/bin` for its target install directory. Now, Ruff will be installed into `$XDG_BIN_HOME`, `$XDG_DATA_HOME/../bin`, or `~/.local/bin` (in that order).
|
||||
|
||||
This change is only relevant to users of the standalone Ruff installer (using the shell or PowerShell script). If you installed Ruff using uv or pip, you should be unaffected.
|
||||
|
||||
- **Changes to the line width calculation**
|
||||
|
||||
Ruff now uses a new version of the [unicode-width](https://github.com/unicode-rs/unicode-width) Rust crate to calculate the line width. In very rare cases, this may lead to lines containing Unicode characters being reformatted, or being considered too long when they were not before ([`E501`](https://docs.astral.sh/ruff/rules/line-too-long/)).
|
||||
|
||||
### Removed Rules
|
||||
|
||||
The following deprecated rules have been removed:
|
||||
|
||||
- [`missing-type-self`](https://docs.astral.sh/ruff/rules/missing-type-self/) (`ANN101`)
|
||||
- [`missing-type-cls`](https://docs.astral.sh/ruff/rules/missing-type-cls/) (`ANN102`)
|
||||
- [`syntax-error`](https://docs.astral.sh/ruff/rules/syntax-error/) (`E999`)
|
||||
- [`pytest-missing-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-missing-fixture-name-underscore/) (`PT004`)
|
||||
- [`pytest-incorrect-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-incorrect-fixture-name-underscore/) (`PT005`)
|
||||
- [`unpacked-list-comprehension`](https://docs.astral.sh/ruff/rules/unpacked-list-comprehension/) (`UP027`)
|
||||
|
||||
### Remapped rules
|
||||
|
||||
The following rules have been remapped to new rule codes:
|
||||
|
||||
- [`flake8-type-checking`](https://docs.astral.sh/ruff/rules/#flake8-type-checking-tc): `TCH` to `TC`
|
||||
|
||||
### Stabilization
|
||||
|
||||
The following rules have been stabilized and are no longer in preview:
|
||||
|
||||
- [`builtin-import-shadowing`](https://docs.astral.sh/ruff/rules/builtin-import-shadowing/) (`A004`)
|
||||
- [`mutable-contextvar-default`](https://docs.astral.sh/ruff/rules/mutable-contextvar-default/) (`B039`)
|
||||
- [`fast-api-redundant-response-model`](https://docs.astral.sh/ruff/rules/fast-api-redundant-response-model/) (`FAST001`)
|
||||
- [`fast-api-non-annotated-dependency`](https://docs.astral.sh/ruff/rules/fast-api-non-annotated-dependency/) (`FAST002`)
|
||||
- [`dict-index-missing-items`](https://docs.astral.sh/ruff/rules/dict-index-missing-items/) (`PLC0206`)
|
||||
- [`pep484-style-positional-only-parameter`](https://docs.astral.sh/ruff/rules/pep484-style-positional-only-parameter/) (`PYI063`)
|
||||
- [`redundant-final-literal`](https://docs.astral.sh/ruff/rules/redundant-final-literal/) (`PYI064`)
|
||||
- [`bad-version-info-order`](https://docs.astral.sh/ruff/rules/bad-version-info-order/) (`PYI066`)
|
||||
- [`parenthesize-chained-operators`](https://docs.astral.sh/ruff/rules/parenthesize-chained-operators/) (`RUF021`)
|
||||
- [`unsorted-dunder-all`](https://docs.astral.sh/ruff/rules/unsorted-dunder-all/) (`RUF022`)
|
||||
- [`unsorted-dunder-slots`](https://docs.astral.sh/ruff/rules/unsorted-dunder-slots/) (`RUF023`)
|
||||
- [`assert-with-print-message`](https://docs.astral.sh/ruff/rules/assert-with-print-message/) (`RUF030`)
|
||||
- [`unnecessary-default-type-args`](https://docs.astral.sh/ruff/rules/unnecessary-default-type-args/) (`UP043`)
|
||||
|
||||
The following behaviors have been stabilized:
|
||||
|
||||
- [`ambiguous-variable-name`](https://docs.astral.sh/ruff/rules/ambiguous-variable-name/) (`E741`): Violations in stub files are now ignored. Stub authors typically don't control variable names.
|
||||
- [`printf-string-formatting`](https://docs.astral.sh/ruff/rules/printf-string-formatting/) (`UP031`): Report all `printf`-like usages even if no autofix is available
|
||||
|
||||
The following fixes have been stabilized:
|
||||
|
||||
- [`zip-instead-of-pairwise`](https://docs.astral.sh/ruff/rules/zip-instead-of-pairwise/) (`RUF007`)
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-datetimez`\] Exempt `min.time()` and `max.time()` (`DTZ901`) ([#14394](https://github.com/astral-sh/ruff/pull/14394))
|
||||
- \[`flake8-pie`\] Mark fix as unsafe if the following statement is a string literal (`PIE790`) ([#14393](https://github.com/astral-sh/ruff/pull/14393))
|
||||
- \[`flake8-pyi`\] New rule `redundant-none-literal` (`PYI061`) ([#14316](https://github.com/astral-sh/ruff/pull/14316))
|
||||
- \[`flake8-pyi`\] Add autofix for `redundant-numeric-union` (`PYI041`) ([#14273](https://github.com/astral-sh/ruff/pull/14273))
|
||||
- \[`ruff`\] New rule `map-int-version-parsing` (`RUF048`) ([#14373](https://github.com/astral-sh/ruff/pull/14373))
|
||||
- \[`ruff`\] New rule `redundant-bool-literal` (`RUF038`) ([#14319](https://github.com/astral-sh/ruff/pull/14319))
|
||||
- \[`ruff`\] New rule `unraw-re-pattern` (`RUF039`) ([#14446](https://github.com/astral-sh/ruff/pull/14446))
|
||||
- \[`pycodestyle`\] Exempt `pytest.importorskip()` calls (`E402`) ([#14474](https://github.com/astral-sh/ruff/pull/14474))
|
||||
- \[`pylint`\] Autofix suggests using sets when possible (`PLR1714`) ([#14372](https://github.com/astral-sh/ruff/pull/14372))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- [`invalid-pyproject-toml`](https://docs.astral.sh/ruff/rules/invalid-pyproject-toml/) (`RUF200`): Updated to reflect the provisionally accepted [PEP 639](https://peps.python.org/pep-0639/).
|
||||
- \[`flake8-pyi`\] Avoid panic in unfixable case (`PYI041`) ([#14402](https://github.com/astral-sh/ruff/pull/14402))
|
||||
- \[`flake8-type-checking`\] Correctly handle quotes in subscript expression when generating an autofix ([#14371](https://github.com/astral-sh/ruff/pull/14371))
|
||||
- \[`pylint`\] Suggest correct autofix for `__contains__` (`PLC2801`) ([#14424](https://github.com/astral-sh/ruff/pull/14424))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Ruff now emits a warning instead of an error when a configuration [`ignore`](https://docs.astral.sh/ruff/settings/#lint_ignore)s a rule that has been removed ([#14435](https://github.com/astral-sh/ruff/pull/14435))
|
||||
- Ruff now validates that `lint.flake8-import-conventions.aliases` only uses valid module names and aliases ([#14477](https://github.com/astral-sh/ruff/pull/14477))
|
||||
|
||||
## 0.7.4
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-datetimez`\] Detect usages of `datetime.max`/`datetime.min` (`DTZ901`) ([#14288](https://github.com/astral-sh/ruff/pull/14288))
|
||||
- \[`flake8-logging`\] Implement `root-logger-calls` (`LOG015`) ([#14302](https://github.com/astral-sh/ruff/pull/14302))
|
||||
- \[`flake8-no-pep420`\] Detect empty implicit namespace packages (`INP001`) ([#14236](https://github.com/astral-sh/ruff/pull/14236))
|
||||
- \[`flake8-pyi`\] Add "replace with `Self`" fix (`PYI019`) ([#14238](https://github.com/astral-sh/ruff/pull/14238))
|
||||
- \[`perflint`\] Implement quick-fix for `manual-list-comprehension` (`PERF401`) ([#13919](https://github.com/astral-sh/ruff/pull/13919))
|
||||
- \[`pylint`\] Implement `shallow-copy-environ` (`W1507`) ([#14241](https://github.com/astral-sh/ruff/pull/14241))
|
||||
- \[`ruff`\] Implement `none-not-at-end-of-union` (`RUF036`) ([#14314](https://github.com/astral-sh/ruff/pull/14314))
|
||||
- \[`ruff`\] Implementation `unsafe-markup-call` from `flake8-markupsafe` plugin (`RUF035`) ([#14224](https://github.com/astral-sh/ruff/pull/14224))
|
||||
- \[`ruff`\] Report problems for `attrs` dataclasses (`RUF008`, `RUF009`) ([#14327](https://github.com/astral-sh/ruff/pull/14327))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-boolean-trap`\] Exclude dunder methods that define operators (`FBT001`) ([#14203](https://github.com/astral-sh/ruff/pull/14203))
|
||||
- \[`flake8-pyi`\] Add "replace with `Self`" fix (`PYI034`) ([#14217](https://github.com/astral-sh/ruff/pull/14217))
|
||||
- \[`flake8-pyi`\] Always autofix `duplicate-union-members` (`PYI016`) ([#14270](https://github.com/astral-sh/ruff/pull/14270))
|
||||
- \[`flake8-pyi`\] Improve autofix for nested and mixed type unions for `unnecessary-type-union` (`PYI055`) ([#14272](https://github.com/astral-sh/ruff/pull/14272))
|
||||
- \[`flake8-pyi`\] Mark fix as unsafe when type annotation contains comments for `duplicate-literal-member` (`PYI062`) ([#14268](https://github.com/astral-sh/ruff/pull/14268))
|
||||
|
||||
### Server
|
||||
|
||||
- Use the current working directory to resolve settings from `ruff.configuration` ([#14352](https://github.com/astral-sh/ruff/pull/14352))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid conflicts between `PLC014` (`useless-import-alias`) and `I002` (`missing-required-import`) by considering `lint.isort.required-imports` for `PLC014` ([#14287](https://github.com/astral-sh/ruff/pull/14287))
|
||||
- \[`flake8-type-checking`\] Skip quoting annotation if it becomes invalid syntax (`TCH001`)
|
||||
- \[`flake8-pyi`\] Avoid using `typing.Self` in stub files pre-Python 3.11 (`PYI034`) ([#14230](https://github.com/astral-sh/ruff/pull/14230))
|
||||
- \[`flake8-pytest-style`\] Flag `pytest.raises` call with keyword argument `expected_exception` (`PT011`) ([#14298](https://github.com/astral-sh/ruff/pull/14298))
|
||||
- \[`flake8-simplify`\] Infer "unknown" truthiness for literal iterables whose items are all unpacks (`SIM222`) ([#14263](https://github.com/astral-sh/ruff/pull/14263))
|
||||
- \[`flake8-type-checking`\] Fix false positives for `typing.Annotated` (`TCH001`) ([#14311](https://github.com/astral-sh/ruff/pull/14311))
|
||||
- \[`pylint`\] Allow `await` at the top-level scope of a notebook (`PLE1142`) ([#14225](https://github.com/astral-sh/ruff/pull/14225))
|
||||
- \[`pylint`\] Fix miscellaneous issues in `await-outside-async` detection (`PLE1142`) ([#14218](https://github.com/astral-sh/ruff/pull/14218))
|
||||
- \[`pyupgrade`\] Avoid applying PEP 646 rewrites in invalid contexts (`UP044`) ([#14234](https://github.com/astral-sh/ruff/pull/14234))
|
||||
- \[`pyupgrade`\] Detect permutations in redundant open modes (`UP015`) ([#14255](https://github.com/astral-sh/ruff/pull/14255))
|
||||
- \[`refurb`\] Avoid triggering `hardcoded-string-charset` for reordered sets (`FURB156`) ([#14233](https://github.com/astral-sh/ruff/pull/14233))
|
||||
- \[`refurb`\] Further special cases added to `verbose-decimal-constructor` (`FURB157`) ([#14216](https://github.com/astral-sh/ruff/pull/14216))
|
||||
- \[`refurb`\] Use `UserString` instead of non-existent `UserStr` (`FURB189`) ([#14209](https://github.com/astral-sh/ruff/pull/14209))
|
||||
- \[`ruff`\] Avoid treating lowercase letters as `# noqa` codes (`RUF100`) ([#14229](https://github.com/astral-sh/ruff/pull/14229))
|
||||
- \[`ruff`\] Do not report when `Optional` has no type arguments (`RUF013`) ([#14181](https://github.com/astral-sh/ruff/pull/14181))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add "Notebook behavior" section for `F704`, `PLE1142` ([#14266](https://github.com/astral-sh/ruff/pull/14266))
|
||||
- Document comment policy around fix safety ([#14300](https://github.com/astral-sh/ruff/pull/14300))
|
||||
|
||||
## 0.7.3
|
||||
|
||||
### Preview features
|
||||
@@ -68,7 +294,7 @@
|
||||
### Preview features
|
||||
|
||||
- Fix `E221` and `E222` to flag missing or extra whitespace around `==` operator ([#13890](https://github.com/astral-sh/ruff/pull/13890))
|
||||
- Formatter: Alternate quotes for strings inside f-strings in preview ([#13860](https://github.com/astral-sh/ruff/pull/13860))
|
||||
- Formatter: Alternate quotes for strings inside f-strings in preview ([#13860](https://github.com/astral-sh/ruff/pull/13860))
|
||||
- Formatter: Join implicit concatenated strings when they fit on a line ([#13663](https://github.com/astral-sh/ruff/pull/13663))
|
||||
- \[`pylint`\] Restrict `iteration-over-set` to only work on sets of literals (`PLC0208`) ([#13731](https://github.com/astral-sh/ruff/pull/13731))
|
||||
|
||||
@@ -929,7 +1155,7 @@ The following deprecated CLI commands have been removed:
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bugbear`\] Implement `return-in-generator` (`B901`) ([#11644](https://github.com/astral-sh/ruff/pull/11644))
|
||||
- \[`flake8-pyi`\] Implement `PYI063` ([#11699](https://github.com/astral-sh/ruff/pull/11699))
|
||||
- \[`flake8-pyi`\] Implement `pep484-style-positional-only-parameter` (`PYI063`) ([#11699](https://github.com/astral-sh/ruff/pull/11699))
|
||||
- \[`pygrep_hooks`\] Check blanket ignores via file-level pragmas (`PGH004`) ([#11540](https://github.com/astral-sh/ruff/pull/11540))
|
||||
|
||||
### Rule changes
|
||||
@@ -1083,7 +1309,7 @@ To read more about this exciting milestone, check out our [blog post](https://as
|
||||
### Preview features
|
||||
|
||||
- \[`pycodestyle`\] Ignore end-of-line comments when determining blank line rules ([#11342](https://github.com/astral-sh/ruff/pull/11342))
|
||||
- \[`pylint`\] Detect `pathlib.Path.open` calls in `unspecified-encoding` (`PLW1514`) ([#11288](https://github.com/astral-sh/ruff/pull/11288))
|
||||
- \[`pylint`\] Detect `pathlib.Path.open` calls in `unspecified-encoding` (`PLW1514`) ([#11288](https://github.com/astral-sh/ruff/pull/11288))
|
||||
- \[`flake8-pyi`\] Implement `PYI059` (`generic-not-last-base-class`) ([#11233](https://github.com/astral-sh/ruff/pull/11233))
|
||||
- \[`flake8-pyi`\] Implement `PYI062` (`duplicate-literal-member`) ([#11269](https://github.com/astral-sh/ruff/pull/11269))
|
||||
|
||||
@@ -1458,7 +1684,7 @@ To setup `ruff server` with your editor, refer to the [README.md](https://github
|
||||
- \[`pycodestyle`\] Do not ignore lines before the first logical line in blank lines rules. ([#10382](https://github.com/astral-sh/ruff/pull/10382))
|
||||
- \[`pycodestyle`\] Do not trigger `E225` and `E275` when the next token is a ')' ([#10315](https://github.com/astral-sh/ruff/pull/10315))
|
||||
- \[`pylint`\] Avoid false-positive slot non-assignment for `__dict__` (`PLE0237`) ([#10348](https://github.com/astral-sh/ruff/pull/10348))
|
||||
- Gate f-string struct size test for Rustc \< 1.76 ([#10371](https://github.com/astral-sh/ruff/pull/10371))
|
||||
- Gate f-string struct size test for Rustc < 1.76 ([#10371](https://github.com/astral-sh/ruff/pull/10371))
|
||||
|
||||
### Documentation
|
||||
|
||||
|
||||
@@ -139,7 +139,7 @@ At a high level, the steps involved in adding a new lint rule are as follows:
|
||||
1. Create a file for your rule (e.g., `crates/ruff_linter/src/rules/flake8_bugbear/rules/assert_false.rs`).
|
||||
|
||||
1. In that file, define a violation struct (e.g., `pub struct AssertFalse`). You can grep for
|
||||
`#[violation]` to see examples.
|
||||
`#[derive(ViolationMetadata)]` to see examples.
|
||||
|
||||
1. In that file, define a function that adds the violation to the diagnostic list as appropriate
|
||||
(e.g., `pub(crate) fn assert_false`) based on whatever inputs are required for the rule (e.g.,
|
||||
@@ -863,7 +863,7 @@ each configuration file.
|
||||
|
||||
The package root is used to determine a file's "module path". Consider, again, `baz.py`. In that
|
||||
case, `./my_project/src/foo` was identified as the package root, so the module path for `baz.py`
|
||||
would resolve to `foo.bar.baz` — as computed by taking the relative path from the package root
|
||||
would resolve to `foo.bar.baz` — as computed by taking the relative path from the package root
|
||||
(inclusive of the root itself). The module path can be thought of as "the path you would use to
|
||||
import the module" (e.g., `import foo.bar.baz`).
|
||||
|
||||
|
||||
743
Cargo.lock
generated
743
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
29
Cargo.toml
29
Cargo.toml
@@ -65,7 +65,8 @@ compact_str = "0.8.0"
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
dir-test = { version = "0.3.0" }
|
||||
dir-test = { version = "0.4.0" }
|
||||
dunce = { version = "1.0.5" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
env_logger = { version = "0.11.0" }
|
||||
etcetera = { version = "0.8.0" }
|
||||
@@ -81,7 +82,7 @@ hashbrown = { version = "0.15.0", default-features = false, features = [
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = {version = "2.6.0" }
|
||||
indexmap = { version = "2.6.0" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1" }
|
||||
@@ -110,7 +111,7 @@ pathdiff = { version = "0.2.1" }
|
||||
pep440_rs = { version = "0.7.1" }
|
||||
pretty_assertions = "1.3.0"
|
||||
proc-macro2 = { version = "1.0.79" }
|
||||
pyproject-toml = { version = "0.9.0" }
|
||||
pyproject-toml = { version = "0.13.4" }
|
||||
quick-junit = { version = "0.5.0" }
|
||||
quote = { version = "1.0.23" }
|
||||
rand = { version = "0.8.5" }
|
||||
@@ -136,7 +137,7 @@ strum_macros = { version = "0.26.0" }
|
||||
syn = { version = "2.0.55" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
test-case = { version = "3.3.1" }
|
||||
thiserror = { version = "1.0.58" }
|
||||
thiserror = { version = "2.0.0" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
@@ -150,7 +151,7 @@ tracing-tree = { version = "0.4.0" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
unicode-width = { version = "0.1.11" }
|
||||
unicode-width = { version = "0.2.0" }
|
||||
unicode_names2 = { version = "1.2.2" }
|
||||
unicode-normalization = { version = "0.1.23" }
|
||||
ureq = { version = "2.9.6" }
|
||||
@@ -247,10 +248,10 @@ debug = 1
|
||||
[profile.dist]
|
||||
inherits = "release"
|
||||
|
||||
# Config for 'cargo dist'
|
||||
# Config for 'dist'
|
||||
[workspace.metadata.dist]
|
||||
# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax)
|
||||
cargo-dist-version = "0.22.1"
|
||||
# The preferred dist version to use in CI (Cargo.toml SemVer syntax)
|
||||
cargo-dist-version = "0.25.2-prerelease.3"
|
||||
# CI backends to support
|
||||
ci = "github"
|
||||
# The installers to generate for each app
|
||||
@@ -281,13 +282,13 @@ targets = [
|
||||
]
|
||||
# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true)
|
||||
auto-includes = false
|
||||
# Whether cargo-dist should create a GitHub Release or use an existing draft
|
||||
# Whether dist should create a Github Release or use an existing draft
|
||||
create-release = true
|
||||
# Which actions to run on pull requests
|
||||
pr-run-mode = "skip"
|
||||
# Whether CI should trigger releases with dispatches instead of tag pushes
|
||||
dispatch-releases = true
|
||||
# Which phase cargo-dist should use to create the GitHub release
|
||||
# Which phase dist should use to create the GitHub release
|
||||
github-release = "announce"
|
||||
# Whether CI should include auto-generated code to build local artifacts
|
||||
build-local-artifacts = false
|
||||
@@ -296,14 +297,10 @@ local-artifacts-jobs = ["./build-binaries", "./build-docker"]
|
||||
# Publish jobs to run in CI
|
||||
publish-jobs = ["./publish-pypi", "./publish-wasm"]
|
||||
# Post-announce jobs to run in CI
|
||||
post-announce-jobs = [
|
||||
"./notify-dependents",
|
||||
"./publish-docs",
|
||||
"./publish-playground",
|
||||
]
|
||||
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
|
||||
# Custom permissions for GitHub Jobs
|
||||
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
|
||||
# Whether to install an updater program
|
||||
install-updater = false
|
||||
# Path that installers should place binaries in
|
||||
install-path = "CARGO_HOME"
|
||||
install-path = ["$XDG_BIN_HOME/", "$XDG_DATA_HOME/../bin", "~/.local/bin"]
|
||||
|
||||
10
README.md
10
README.md
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.7.3/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.7.3/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.8.2/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.8.2/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.7.3
|
||||
rev: v0.8.2
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -238,8 +238,8 @@ exclude = [
|
||||
line-length = 88
|
||||
indent-width = 4
|
||||
|
||||
# Assume Python 3.8
|
||||
target-version = "py38"
|
||||
# Assume Python 3.9
|
||||
target-version = "py39"
|
||||
|
||||
[lint]
|
||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = ["crates/red_knot_vendored/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
extend-exclude = [
|
||||
"crates/red_knot_vendored/vendor/**/*",
|
||||
"**/resources/**/*",
|
||||
"**/snapshots/**/*",
|
||||
"crates/red_knot_workspace/src/workspace/pyproject/package_name.rs"
|
||||
]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
|
||||
36
clippy.toml
36
clippy.toml
@@ -1,21 +1,25 @@
|
||||
doc-valid-idents = [
|
||||
"..",
|
||||
"CodeQL",
|
||||
"FastAPI",
|
||||
"IPython",
|
||||
"LangChain",
|
||||
"LibCST",
|
||||
"McCabe",
|
||||
"NumPy",
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
"..",
|
||||
"CodeQL",
|
||||
"FastAPI",
|
||||
"IPython",
|
||||
"LangChain",
|
||||
"LibCST",
|
||||
"McCabe",
|
||||
"NumPy",
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
"SNMPv1",
|
||||
"SNMPv2",
|
||||
"SNMPv3",
|
||||
"PyFlakes"
|
||||
]
|
||||
|
||||
ignore-interior-mutability = [
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
]
|
||||
|
||||
@@ -34,6 +34,7 @@ tracing-tree = { workspace = true }
|
||||
[dev-dependencies]
|
||||
filetime = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["testing"] }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -103,7 +103,7 @@ called **once**.
|
||||
|
||||
## Profiling
|
||||
|
||||
Red Knot generates a folded stack trace to the current directory named `tracing.folded` when setting the environment variable `RED_KNOT_LOG_PROFILE` to `1` or `true`.
|
||||
Red Knot generates a folded stack trace to the current directory named `tracing.folded` when setting the environment variable `RED_KNOT_LOG_PROFILE` to `1` or `true`.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG_PROFILE=1 red_knot -- --current-directory=../test -vvv
|
||||
|
||||
@@ -183,10 +183,10 @@ fn run() -> anyhow::Result<ExitStatus> {
|
||||
|
||||
let system = OsSystem::new(cwd.clone());
|
||||
let cli_configuration = args.to_configuration(&cwd);
|
||||
let workspace_metadata = WorkspaceMetadata::from_path(
|
||||
let workspace_metadata = WorkspaceMetadata::discover(
|
||||
system.current_directory(),
|
||||
&system,
|
||||
Some(cli_configuration.clone()),
|
||||
Some(&cli_configuration),
|
||||
)?;
|
||||
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
|
||||
@@ -4,8 +4,8 @@
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
#[default]
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
@@ -46,3 +46,17 @@ impl From<TargetVersion> for red_knot_python_semantic::PythonVersion {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::target_version::TargetVersion;
|
||||
use red_knot_python_semantic::PythonVersion;
|
||||
|
||||
#[test]
|
||||
fn same_default_as_python_version() {
|
||||
assert_eq!(
|
||||
PythonVersion::from(TargetVersion::default()),
|
||||
PythonVersion::default()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
#![allow(clippy::disallowed_names)]
|
||||
|
||||
use std::io::Write;
|
||||
use std::time::Duration;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
|
||||
use red_knot_python_semantic::{resolve_module, ModuleName, Program, PythonVersion, SitePackages};
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::watch;
|
||||
use red_knot_workspace::watch::{directory_watcher, WorkspaceWatcher};
|
||||
use red_knot_workspace::db::{Db, RootDatabase};
|
||||
use red_knot_workspace::watch::{directory_watcher, ChangeEvent, WorkspaceWatcher};
|
||||
use red_knot_workspace::workspace::settings::{Configuration, SearchPathConfiguration};
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::files::{system_path_to_file, File, FileError};
|
||||
@@ -19,7 +17,7 @@ use ruff_db::Upcast;
|
||||
struct TestCase {
|
||||
db: RootDatabase,
|
||||
watcher: Option<WorkspaceWatcher>,
|
||||
changes_receiver: crossbeam::channel::Receiver<Vec<watch::ChangeEvent>>,
|
||||
changes_receiver: crossbeam::channel::Receiver<Vec<ChangeEvent>>,
|
||||
/// The temporary directory that contains the test files.
|
||||
/// We need to hold on to it in the test case or the temp files get deleted.
|
||||
_temp_dir: tempfile::TempDir,
|
||||
@@ -40,45 +38,87 @@ impl TestCase {
|
||||
&self.db
|
||||
}
|
||||
|
||||
fn stop_watch(&mut self) -> Vec<watch::ChangeEvent> {
|
||||
self.try_stop_watch(Duration::from_secs(10))
|
||||
.expect("Expected watch changes but observed none")
|
||||
#[track_caller]
|
||||
fn stop_watch<M>(&mut self, matcher: M) -> Vec<ChangeEvent>
|
||||
where
|
||||
M: MatchEvent,
|
||||
{
|
||||
// track_caller is unstable for lambdas -> That's why this is a fn
|
||||
#[track_caller]
|
||||
fn panic_with_formatted_events(events: Vec<ChangeEvent>) -> Vec<ChangeEvent> {
|
||||
panic!(
|
||||
"Didn't observe expected change:\n{}",
|
||||
events
|
||||
.into_iter()
|
||||
.map(|event| format!(" - {event:?}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
)
|
||||
}
|
||||
|
||||
self.try_stop_watch(matcher, Duration::from_secs(10))
|
||||
.unwrap_or_else(panic_with_formatted_events)
|
||||
}
|
||||
|
||||
fn try_stop_watch(&mut self, timeout: Duration) -> Option<Vec<watch::ChangeEvent>> {
|
||||
fn try_stop_watch<M>(
|
||||
&mut self,
|
||||
mut matcher: M,
|
||||
timeout: Duration,
|
||||
) -> Result<Vec<ChangeEvent>, Vec<ChangeEvent>>
|
||||
where
|
||||
M: MatchEvent,
|
||||
{
|
||||
tracing::debug!("Try stopping watch with timeout {:?}", timeout);
|
||||
|
||||
let watcher = self
|
||||
.watcher
|
||||
.take()
|
||||
.expect("Cannot call `stop_watch` more than once");
|
||||
|
||||
let mut all_events = self
|
||||
.changes_receiver
|
||||
.recv_timeout(timeout)
|
||||
.unwrap_or_default();
|
||||
watcher.flush();
|
||||
watcher.stop();
|
||||
let start = Instant::now();
|
||||
let mut all_events = Vec::new();
|
||||
|
||||
loop {
|
||||
let events = self
|
||||
.changes_receiver
|
||||
.recv_timeout(Duration::from_millis(100))
|
||||
.unwrap_or_default();
|
||||
|
||||
if events
|
||||
.iter()
|
||||
.any(|event| matcher.match_event(event) || event.is_rescan())
|
||||
{
|
||||
all_events.extend(events);
|
||||
break;
|
||||
}
|
||||
|
||||
all_events.extend(events);
|
||||
|
||||
if start.elapsed() > timeout {
|
||||
return Err(all_events);
|
||||
}
|
||||
}
|
||||
|
||||
watcher.flush();
|
||||
tracing::debug!("Flushed file watcher");
|
||||
watcher.stop();
|
||||
tracing::debug!("Stopping file watcher");
|
||||
|
||||
// Consume remaining events
|
||||
for event in &self.changes_receiver {
|
||||
all_events.extend(event);
|
||||
}
|
||||
|
||||
if all_events.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(all_events)
|
||||
Ok(all_events)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn take_watch_changes(&self) -> Vec<watch::ChangeEvent> {
|
||||
fn take_watch_changes(&self) -> Vec<ChangeEvent> {
|
||||
self.try_take_watch_changes(Duration::from_secs(10))
|
||||
.expect("Expected watch changes but observed none")
|
||||
}
|
||||
|
||||
fn try_take_watch_changes(&self, timeout: Duration) -> Option<Vec<watch::ChangeEvent>> {
|
||||
let Some(watcher) = &self.watcher else {
|
||||
return None;
|
||||
};
|
||||
fn try_take_watch_changes(&self, timeout: Duration) -> Option<Vec<ChangeEvent>> {
|
||||
let watcher = self.watcher.as_ref()?;
|
||||
|
||||
let mut all_events = self
|
||||
.changes_receiver
|
||||
@@ -100,7 +140,7 @@ impl TestCase {
|
||||
Some(all_events)
|
||||
}
|
||||
|
||||
fn apply_changes(&mut self, changes: Vec<watch::ChangeEvent>) {
|
||||
fn apply_changes(&mut self, changes: Vec<ChangeEvent>) {
|
||||
self.db.apply_changes(changes, Some(&self.configuration));
|
||||
}
|
||||
|
||||
@@ -110,8 +150,8 @@ impl TestCase {
|
||||
) -> anyhow::Result<()> {
|
||||
let program = Program::get(self.db());
|
||||
|
||||
self.configuration.search_paths = configuration.clone();
|
||||
let new_settings = configuration.into_settings(self.db.workspace().root(&self.db));
|
||||
let new_settings = configuration.to_settings(self.db.workspace().root(&self.db));
|
||||
self.configuration.search_paths = configuration;
|
||||
|
||||
program.update_search_paths(&mut self.db, &new_settings)?;
|
||||
|
||||
@@ -136,6 +176,23 @@ impl TestCase {
|
||||
}
|
||||
}
|
||||
|
||||
trait MatchEvent {
|
||||
fn match_event(&mut self, event: &ChangeEvent) -> bool;
|
||||
}
|
||||
|
||||
fn event_for_file(name: &str) -> impl MatchEvent + '_ {
|
||||
|event: &ChangeEvent| event.file_name() == Some(name)
|
||||
}
|
||||
|
||||
impl<F> MatchEvent for F
|
||||
where
|
||||
F: FnMut(&ChangeEvent) -> bool,
|
||||
{
|
||||
fn match_event(&mut self, event: &ChangeEvent) -> bool {
|
||||
(*self)(event)
|
||||
}
|
||||
}
|
||||
|
||||
trait SetupFiles {
|
||||
fn setup(self, root_path: &SystemPath, workspace_path: &SystemPath) -> anyhow::Result<()>;
|
||||
}
|
||||
@@ -204,7 +261,9 @@ where
|
||||
.as_utf8_path()
|
||||
.canonicalize_utf8()
|
||||
.with_context(|| "Failed to canonicalize root path.")?,
|
||||
);
|
||||
)
|
||||
.simplified()
|
||||
.to_path_buf();
|
||||
|
||||
let workspace_path = root_path.join("workspace");
|
||||
|
||||
@@ -241,8 +300,7 @@ where
|
||||
search_paths,
|
||||
};
|
||||
|
||||
let workspace =
|
||||
WorkspaceMetadata::from_path(&workspace_path, &system, Some(configuration.clone()))?;
|
||||
let workspace = WorkspaceMetadata::discover(&workspace_path, &system, Some(&configuration))?;
|
||||
|
||||
let db = RootDatabase::new(workspace, system)?;
|
||||
|
||||
@@ -310,7 +368,7 @@ fn new_file() -> anyhow::Result<()> {
|
||||
|
||||
std::fs::write(foo_path.as_std_path(), "print('Hello')")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
let changes = case.stop_watch(event_for_file("foo.py"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -333,7 +391,7 @@ fn new_ignored_file() -> anyhow::Result<()> {
|
||||
|
||||
std::fs::write(foo_path.as_std_path(), "print('Hello')")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
let changes = case.stop_watch(event_for_file("foo.py"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -355,7 +413,7 @@ fn changed_file() -> anyhow::Result<()> {
|
||||
|
||||
update_file(&foo_path, "print('Version 2')")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
let changes = case.stop_watch(event_for_file("foo.py"));
|
||||
|
||||
assert!(!changes.is_empty());
|
||||
|
||||
@@ -380,7 +438,7 @@ fn deleted_file() -> anyhow::Result<()> {
|
||||
|
||||
std::fs::remove_file(foo_path.as_std_path())?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
let changes = case.stop_watch(event_for_file("foo.py"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -412,7 +470,7 @@ fn move_file_to_trash() -> anyhow::Result<()> {
|
||||
trash_path.join("foo.py").as_std_path(),
|
||||
)?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
let changes = case.stop_watch(event_for_file("foo.py"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -444,7 +502,7 @@ fn move_file_to_workspace() -> anyhow::Result<()> {
|
||||
|
||||
std::fs::rename(foo_path.as_std_path(), foo_in_workspace_path.as_std_path())?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
let changes = case.stop_watch(event_for_file("foo.py"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -472,7 +530,7 @@ fn rename_file() -> anyhow::Result<()> {
|
||||
|
||||
std::fs::rename(foo_path.as_std_path(), bar_path.as_std_path())?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
let changes = case.stop_watch(event_for_file("bar.py"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -516,7 +574,7 @@ fn directory_moved_to_workspace() -> anyhow::Result<()> {
|
||||
std::fs::rename(sub_original_path.as_std_path(), sub_new_path.as_std_path())
|
||||
.with_context(|| "Failed to move sub directory")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
let changes = case.stop_watch(event_for_file("sub"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -575,7 +633,7 @@ fn directory_moved_to_trash() -> anyhow::Result<()> {
|
||||
std::fs::rename(sub_path.as_std_path(), trashed_sub.as_std_path())
|
||||
.with_context(|| "Failed to move the sub directory to the trash")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
let changes = case.stop_watch(event_for_file("sub"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -637,7 +695,8 @@ fn directory_renamed() -> anyhow::Result<()> {
|
||||
std::fs::rename(sub_path.as_std_path(), foo_baz.as_std_path())
|
||||
.with_context(|| "Failed to move the sub directory")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
// Linux and windows only emit an event for the newly created root directory, but not for every new component.
|
||||
let changes = case.stop_watch(event_for_file("sub"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -710,7 +769,7 @@ fn directory_deleted() -> anyhow::Result<()> {
|
||||
std::fs::remove_dir_all(sub_path.as_std_path())
|
||||
.with_context(|| "Failed to remove the sub directory")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
let changes = case.stop_watch(event_for_file("sub"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -747,7 +806,7 @@ fn search_path() -> anyhow::Result<()> {
|
||||
|
||||
std::fs::write(site_packages.join("a.py").as_std_path(), "class A: ...")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
let changes = case.stop_watch(event_for_file("a.py"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -778,7 +837,7 @@ fn add_search_path() -> anyhow::Result<()> {
|
||||
|
||||
std::fs::write(site_packages.join("a.py").as_std_path(), "class A: ...")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
let changes = case.stop_watch(event_for_file("a.py"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -807,9 +866,9 @@ fn remove_search_path() -> anyhow::Result<()> {
|
||||
|
||||
std::fs::write(site_packages.join("a.py").as_std_path(), "class A: ...")?;
|
||||
|
||||
let changes = case.try_stop_watch(Duration::from_millis(100));
|
||||
let changes = case.try_stop_watch(|_: &ChangeEvent| true, Duration::from_millis(100));
|
||||
|
||||
assert_eq!(changes, None);
|
||||
assert_eq!(changes, Err(vec![]));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -847,7 +906,7 @@ fn changed_versions_file() -> anyhow::Result<()> {
|
||||
"os: 3.0-",
|
||||
)?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
let changes = case.stop_watch(event_for_file("VERSIONS"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -900,7 +959,7 @@ fn hard_links_in_workspace() -> anyhow::Result<()> {
|
||||
// Write to the hard link target.
|
||||
update_file(foo_path, "print('Version 2')").context("Failed to update foo.py")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
let changes = case.stop_watch(event_for_file("foo.py"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -971,7 +1030,7 @@ fn hard_links_to_target_outside_workspace() -> anyhow::Result<()> {
|
||||
// Write to the hard link target.
|
||||
update_file(foo_path, "print('Version 2')").context("Failed to update foo.py")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
let changes = case.stop_watch(ChangeEvent::is_changed);
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -1010,7 +1069,7 @@ mod unix {
|
||||
)
|
||||
.with_context(|| "Failed to set file permissions.")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
let changes = case.stop_watch(event_for_file("foo.py"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -1108,7 +1167,7 @@ mod unix {
|
||||
update_file(baz_workspace, "def baz(): print('Version 3')")
|
||||
.context("Failed to update bar/baz.py")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
let changes = case.stop_watch(event_for_file("baz.py"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -1179,7 +1238,7 @@ mod unix {
|
||||
update_file(&patched_bar_baz, "def baz(): print('Version 2')")
|
||||
.context("Failed to update bar/baz.py")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
let changes = case.stop_watch(event_for_file("baz.py"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -1287,7 +1346,7 @@ mod unix {
|
||||
update_file(&baz_original, "def baz(): print('Version 2')")
|
||||
.context("Failed to update bar/baz.py")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
let changes = case.stop_watch(event_for_file("baz.py"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
@@ -1311,3 +1370,137 @@ mod unix {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nested_packages_delete_root() -> anyhow::Result<()> {
|
||||
let mut case = setup(|root: &SystemPath, workspace_root: &SystemPath| {
|
||||
std::fs::write(
|
||||
workspace_root.join("pyproject.toml").as_std_path(),
|
||||
r#"
|
||||
[project]
|
||||
name = "inner"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
std::fs::write(
|
||||
root.join("pyproject.toml").as_std_path(),
|
||||
r#"
|
||||
[project]
|
||||
name = "outer"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
assert_eq!(
|
||||
case.db().workspace().root(case.db()),
|
||||
&*case.workspace_path("")
|
||||
);
|
||||
|
||||
std::fs::remove_file(case.workspace_path("pyproject.toml").as_std_path())?;
|
||||
|
||||
let changes = case.stop_watch(ChangeEvent::is_deleted);
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
// It should now pick up the outer workspace.
|
||||
assert_eq!(case.db().workspace().root(case.db()), case.root_path());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn added_package() -> anyhow::Result<()> {
|
||||
let mut case = setup([
|
||||
(
|
||||
"pyproject.toml",
|
||||
r#"
|
||||
[project]
|
||||
name = "inner"
|
||||
|
||||
[tool.knot.workspace]
|
||||
members = ["packages/*"]
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"packages/a/pyproject.toml",
|
||||
r#"
|
||||
[project]
|
||||
name = "a"
|
||||
"#,
|
||||
),
|
||||
])?;
|
||||
|
||||
assert_eq!(case.db().workspace().packages(case.db()).len(), 2);
|
||||
|
||||
std::fs::create_dir(case.workspace_path("packages/b").as_std_path())
|
||||
.context("failed to create folder for package 'b'")?;
|
||||
|
||||
// It seems that the file watcher won't pick up on file changes shortly after the folder
|
||||
// was created... I suspect this is because most file watchers don't support recursive
|
||||
// file watching. Instead, file-watching libraries manually implement recursive file watching
|
||||
// by setting a watcher for each directory. But doing this obviously "lags" behind.
|
||||
case.take_watch_changes();
|
||||
|
||||
std::fs::write(
|
||||
case.workspace_path("packages/b/pyproject.toml")
|
||||
.as_std_path(),
|
||||
r#"
|
||||
[project]
|
||||
name = "b"
|
||||
"#,
|
||||
)
|
||||
.context("failed to write pyproject.toml for package b")?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("pyproject.toml"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
assert_eq!(case.db().workspace().packages(case.db()).len(), 3);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn removed_package() -> anyhow::Result<()> {
|
||||
let mut case = setup([
|
||||
(
|
||||
"pyproject.toml",
|
||||
r#"
|
||||
[project]
|
||||
name = "inner"
|
||||
|
||||
[tool.knot.workspace]
|
||||
members = ["packages/*"]
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"packages/a/pyproject.toml",
|
||||
r#"
|
||||
[project]
|
||||
name = "a"
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"packages/b/pyproject.toml",
|
||||
r#"
|
||||
[project]
|
||||
name = "b"
|
||||
"#,
|
||||
),
|
||||
])?;
|
||||
|
||||
assert_eq!(case.db().workspace().packages(case.db()).len(), 3);
|
||||
|
||||
std::fs::remove_dir_all(case.workspace_path("packages/b").as_std_path())
|
||||
.context("failed to remove package 'b'")?;
|
||||
|
||||
let changes = case.stop_watch(ChangeEvent::is_deleted);
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
assert_eq!(case.db().workspace().packages(case.db()).len(), 2);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -13,7 +13,8 @@ license = { workspace = true }
|
||||
[dependencies]
|
||||
ruff_db = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_python_ast = { workspace = true, features = ["salsa"] }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_parser = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
@@ -32,6 +33,7 @@ thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
smallvec = { workspace = true }
|
||||
static_assertions = { workspace = true }
|
||||
test-case = { workspace = true }
|
||||
@@ -47,6 +49,8 @@ anyhow = { workspace = true }
|
||||
dir-test = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
quickcheck = { version = "1.0.3", default-features = false }
|
||||
quickcheck_macros = { version = "1.0.0" }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -0,0 +1,75 @@
|
||||
# Any
|
||||
|
||||
## Annotation
|
||||
|
||||
`typing.Any` is a way to name the Any type.
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
|
||||
x: Any = 1
|
||||
x = "foo"
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: Any
|
||||
```
|
||||
|
||||
## Aliased to a different name
|
||||
|
||||
If you alias `typing.Any` to another name, we still recognize that as a spelling of the Any type.
|
||||
|
||||
```py
|
||||
from typing import Any as RenamedAny
|
||||
|
||||
x: RenamedAny = 1
|
||||
x = "foo"
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: Any
|
||||
```
|
||||
|
||||
## Shadowed class
|
||||
|
||||
If you define your own class named `Any`, using that in a type expression refers to your class, and
|
||||
isn't a spelling of the Any type.
|
||||
|
||||
```py
|
||||
class Any:
|
||||
pass
|
||||
|
||||
x: Any
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: Any
|
||||
|
||||
# This verifies that we're not accidentally seeing typing.Any, since str is assignable
|
||||
# to that but not to our locally defined class.
|
||||
y: Any = "not an Any" # error: [invalid-assignment]
|
||||
```
|
||||
|
||||
## Subclass
|
||||
|
||||
The spec allows you to define subclasses of `Any`.
|
||||
|
||||
TODO: Handle assignments correctly. `Subclass` has an unknown superclass, which might be `int`. The
|
||||
assignment to `x` should not be allowed, even when the unknown superclass is `int`. The assignment
|
||||
to `y` should be allowed, since `Subclass` might have `int` as a superclass, and is therefore
|
||||
assignable to `int`.
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
|
||||
class Subclass(Any):
|
||||
pass
|
||||
|
||||
reveal_type(Subclass.__mro__) # revealed: tuple[Literal[Subclass], Any, Literal[object]]
|
||||
|
||||
x: Subclass = 1 # error: [invalid-assignment]
|
||||
# TODO: no diagnostic
|
||||
y: int = Subclass() # error: [invalid-assignment]
|
||||
|
||||
def f() -> Subclass:
|
||||
pass
|
||||
|
||||
reveal_type(f()) # revealed: Subclass
|
||||
```
|
||||
@@ -0,0 +1,128 @@
|
||||
# `LiteralString`
|
||||
|
||||
`LiteralString` represents a string that is either defined directly within the source code or is
|
||||
made up of such components.
|
||||
|
||||
Parts of the testcases defined here were adapted from [the specification's examples][1].
|
||||
|
||||
## Usages
|
||||
|
||||
### Valid places
|
||||
|
||||
It can be used anywhere a type is accepted:
|
||||
|
||||
```py
|
||||
from typing import LiteralString
|
||||
|
||||
x: LiteralString
|
||||
|
||||
def f():
|
||||
reveal_type(x) # revealed: LiteralString
|
||||
```
|
||||
|
||||
### Within `Literal`
|
||||
|
||||
`LiteralString` cannot be used within `Literal`:
|
||||
|
||||
```py
|
||||
from typing import Literal, LiteralString
|
||||
|
||||
bad_union: Literal["hello", LiteralString] # error: [invalid-literal-parameter]
|
||||
bad_nesting: Literal[LiteralString] # error: [invalid-literal-parameter]
|
||||
```
|
||||
|
||||
### Parametrized
|
||||
|
||||
`LiteralString` cannot be parametrized.
|
||||
|
||||
```py
|
||||
from typing import LiteralString
|
||||
|
||||
a: LiteralString[str] # error: [invalid-type-parameter]
|
||||
b: LiteralString["foo"] # error: [invalid-type-parameter]
|
||||
```
|
||||
|
||||
### As a base class
|
||||
|
||||
Subclassing `LiteralString` leads to a runtime error.
|
||||
|
||||
```py
|
||||
from typing import LiteralString
|
||||
|
||||
class C(LiteralString): ... # error: [invalid-base]
|
||||
```
|
||||
|
||||
## Inference
|
||||
|
||||
### Common operations
|
||||
|
||||
```py
|
||||
foo: LiteralString = "foo"
|
||||
reveal_type(foo) # revealed: Literal["foo"]
|
||||
|
||||
bar: LiteralString = "bar"
|
||||
reveal_type(foo + bar) # revealed: Literal["foobar"]
|
||||
|
||||
baz: LiteralString = "baz"
|
||||
baz += foo
|
||||
reveal_type(baz) # revealed: Literal["bazfoo"]
|
||||
|
||||
qux = (foo, bar)
|
||||
reveal_type(qux) # revealed: tuple[Literal["foo"], Literal["bar"]]
|
||||
|
||||
# TODO: Infer "LiteralString"
|
||||
reveal_type(foo.join(qux)) # revealed: @Todo(call todo)
|
||||
|
||||
template: LiteralString = "{}, {}"
|
||||
reveal_type(template) # revealed: Literal["{}, {}"]
|
||||
# TODO: Infer `LiteralString`
|
||||
reveal_type(template.format(foo, bar)) # revealed: @Todo(call todo)
|
||||
```
|
||||
|
||||
### Assignability
|
||||
|
||||
`Literal[""]` is assignable to `LiteralString`, and `LiteralString` is assignable to `str`, but not
|
||||
vice versa.
|
||||
|
||||
```py
|
||||
def coinflip() -> bool:
|
||||
return True
|
||||
|
||||
foo_1: Literal["foo"] = "foo"
|
||||
bar_1: LiteralString = foo_1 # fine
|
||||
|
||||
foo_2 = "foo" if coinflip() else "bar"
|
||||
reveal_type(foo_2) # revealed: Literal["foo", "bar"]
|
||||
bar_2: LiteralString = foo_2 # fine
|
||||
|
||||
foo_3: LiteralString = "foo" * 1_000_000_000
|
||||
bar_3: str = foo_2 # fine
|
||||
|
||||
baz_1: str = str()
|
||||
qux_1: LiteralString = baz_1 # error: [invalid-assignment]
|
||||
|
||||
baz_2: LiteralString = "baz" * 1_000_000_000
|
||||
qux_2: Literal["qux"] = baz_2 # error: [invalid-assignment]
|
||||
|
||||
baz_3 = "foo" if coinflip() else 1
|
||||
reveal_type(baz_3) # revealed: Literal["foo"] | Literal[1]
|
||||
qux_3: LiteralString = baz_3 # error: [invalid-assignment]
|
||||
```
|
||||
|
||||
### Narrowing
|
||||
|
||||
```py
|
||||
lorem: LiteralString = "lorem" * 1_000_000_000
|
||||
|
||||
reveal_type(lorem) # revealed: LiteralString
|
||||
|
||||
if lorem == "ipsum":
|
||||
reveal_type(lorem) # revealed: Literal["ipsum"]
|
||||
|
||||
reveal_type(lorem) # revealed: LiteralString
|
||||
|
||||
if "" < lorem == "ipsum":
|
||||
reveal_type(lorem) # revealed: Literal["ipsum"]
|
||||
```
|
||||
|
||||
[1]: https://typing.readthedocs.io/en/latest/spec/literal.html#literalstring
|
||||
@@ -0,0 +1,62 @@
|
||||
# NoReturn & Never
|
||||
|
||||
`NoReturn` is used to annotate the return type for functions that never return. `Never` is the
|
||||
bottom type, representing the empty set of Python objects. These two annotations can be used
|
||||
interchangeably.
|
||||
|
||||
## Function Return Type Annotation
|
||||
|
||||
```py
|
||||
from typing import NoReturn
|
||||
|
||||
def stop() -> NoReturn:
|
||||
raise RuntimeError("no way")
|
||||
|
||||
# revealed: Never
|
||||
reveal_type(stop())
|
||||
```
|
||||
|
||||
## Assignment
|
||||
|
||||
```py
|
||||
from typing import NoReturn, Never, Any
|
||||
|
||||
# error: [invalid-type-parameter] "Type `typing.Never` expected no type parameter"
|
||||
x: Never[int]
|
||||
a1: NoReturn
|
||||
# TODO: Test `Never` is only available in python >= 3.11
|
||||
a2: Never
|
||||
b1: Any
|
||||
b2: int
|
||||
|
||||
def f():
|
||||
# revealed: Never
|
||||
reveal_type(a1)
|
||||
# revealed: Never
|
||||
reveal_type(a2)
|
||||
|
||||
# Never is assignable to all types.
|
||||
v1: int = a1
|
||||
v2: str = a1
|
||||
# Other types are not assignable to Never except for Never (and Any).
|
||||
v3: Never = b1
|
||||
v4: Never = a2
|
||||
v5: Any = b2
|
||||
# error: [invalid-assignment] "Object of type `Literal[1]` is not assignable to `Never`"
|
||||
v6: Never = 1
|
||||
```
|
||||
|
||||
## Typing Extensions
|
||||
|
||||
```py
|
||||
from typing_extensions import NoReturn, Never
|
||||
|
||||
x: NoReturn
|
||||
y: Never
|
||||
|
||||
def f():
|
||||
# revealed: Never
|
||||
reveal_type(x)
|
||||
# revealed: Never
|
||||
reveal_type(y)
|
||||
```
|
||||
@@ -0,0 +1,47 @@
|
||||
# Optional
|
||||
|
||||
## Annotation
|
||||
|
||||
`typing.Optional` is equivalent to using the type with a None in a Union.
|
||||
|
||||
```py
|
||||
from typing import Optional
|
||||
|
||||
a: Optional[int]
|
||||
a1: Optional[bool]
|
||||
a2: Optional[Optional[bool]]
|
||||
a3: Optional[None]
|
||||
|
||||
def f():
|
||||
# revealed: int | None
|
||||
reveal_type(a)
|
||||
# revealed: bool | None
|
||||
reveal_type(a1)
|
||||
# revealed: bool | None
|
||||
reveal_type(a2)
|
||||
# revealed: None
|
||||
reveal_type(a3)
|
||||
```
|
||||
|
||||
## Assignment
|
||||
|
||||
```py
|
||||
from typing import Optional
|
||||
|
||||
a: Optional[int] = 1
|
||||
a = None
|
||||
# error: [invalid-assignment] "Object of type `Literal[""]` is not assignable to `int | None`"
|
||||
a = ""
|
||||
```
|
||||
|
||||
## Typing Extensions
|
||||
|
||||
```py
|
||||
from typing_extensions import Optional
|
||||
|
||||
a: Optional[int]
|
||||
|
||||
def f():
|
||||
# revealed: int | None
|
||||
reveal_type(a)
|
||||
```
|
||||
@@ -9,10 +9,10 @@ Ts = TypeVarTuple("Ts")
|
||||
|
||||
def append_int(*args: *Ts) -> tuple[*Ts, int]:
|
||||
# TODO: should show some representation of the variadic generic type
|
||||
reveal_type(args) # revealed: @Todo
|
||||
reveal_type(args) # revealed: @Todo(function parameter type)
|
||||
|
||||
return (*args, 1)
|
||||
|
||||
# TODO should be tuple[Literal[True], Literal["a"], int]
|
||||
reveal_type(append_int(True, "a")) # revealed: @Todo
|
||||
reveal_type(append_int(True, "a")) # revealed: @Todo(full tuple[...] support)
|
||||
```
|
||||
@@ -0,0 +1,219 @@
|
||||
# String annotations
|
||||
|
||||
## Simple
|
||||
|
||||
```py
|
||||
def f() -> "int":
|
||||
return 1
|
||||
|
||||
reveal_type(f()) # revealed: int
|
||||
```
|
||||
|
||||
## Nested
|
||||
|
||||
```py
|
||||
def f() -> "'int'":
|
||||
return 1
|
||||
|
||||
reveal_type(f()) # revealed: int
|
||||
```
|
||||
|
||||
## Type expression
|
||||
|
||||
```py
|
||||
def f1() -> "int | str":
|
||||
return 1
|
||||
|
||||
def f2() -> "tuple[int, str]":
|
||||
return 1
|
||||
|
||||
reveal_type(f1()) # revealed: int | str
|
||||
reveal_type(f2()) # revealed: tuple[int, str]
|
||||
```
|
||||
|
||||
## Partial
|
||||
|
||||
```py
|
||||
def f() -> tuple[int, "str"]:
|
||||
return 1
|
||||
|
||||
reveal_type(f()) # revealed: tuple[int, str]
|
||||
```
|
||||
|
||||
## Deferred
|
||||
|
||||
```py
|
||||
def f() -> "Foo":
|
||||
return Foo()
|
||||
|
||||
class Foo:
|
||||
pass
|
||||
|
||||
reveal_type(f()) # revealed: Foo
|
||||
```
|
||||
|
||||
## Deferred (undefined)
|
||||
|
||||
```py
|
||||
# error: [unresolved-reference]
|
||||
def f() -> "Foo":
|
||||
pass
|
||||
|
||||
reveal_type(f()) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Partial deferred
|
||||
|
||||
```py
|
||||
def f() -> int | "Foo":
|
||||
return 1
|
||||
|
||||
class Foo:
|
||||
pass
|
||||
|
||||
reveal_type(f()) # revealed: int | Foo
|
||||
```
|
||||
|
||||
## `typing.Literal`
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
def f1() -> Literal["Foo", "Bar"]:
|
||||
return "Foo"
|
||||
|
||||
def f2() -> 'Literal["Foo", "Bar"]':
|
||||
return "Foo"
|
||||
|
||||
class Foo:
|
||||
pass
|
||||
|
||||
reveal_type(f1()) # revealed: Literal["Foo", "Bar"]
|
||||
reveal_type(f2()) # revealed: Literal["Foo", "Bar"]
|
||||
```
|
||||
|
||||
## Various string kinds
|
||||
|
||||
```py
|
||||
# error: [annotation-raw-string] "Type expressions cannot use raw string literal"
|
||||
def f1() -> r"int":
|
||||
return 1
|
||||
|
||||
# error: [annotation-f-string] "Type expressions cannot use f-strings"
|
||||
def f2() -> f"int":
|
||||
return 1
|
||||
|
||||
# error: [annotation-byte-string] "Type expressions cannot use bytes literal"
|
||||
def f3() -> b"int":
|
||||
return 1
|
||||
|
||||
def f4() -> "int":
|
||||
return 1
|
||||
|
||||
# error: [annotation-implicit-concat] "Type expressions cannot span multiple string literals"
|
||||
def f5() -> "in" "t":
|
||||
return 1
|
||||
|
||||
# error: [annotation-escape-character] "Type expressions cannot contain escape characters"
|
||||
def f6() -> "\N{LATIN SMALL LETTER I}nt":
|
||||
return 1
|
||||
|
||||
# error: [annotation-escape-character] "Type expressions cannot contain escape characters"
|
||||
def f7() -> "\x69nt":
|
||||
return 1
|
||||
|
||||
def f8() -> """int""":
|
||||
return 1
|
||||
|
||||
# error: [annotation-byte-string] "Type expressions cannot use bytes literal"
|
||||
def f9() -> "b'int'":
|
||||
return 1
|
||||
|
||||
reveal_type(f1()) # revealed: Unknown
|
||||
reveal_type(f2()) # revealed: Unknown
|
||||
reveal_type(f3()) # revealed: Unknown
|
||||
reveal_type(f4()) # revealed: int
|
||||
reveal_type(f5()) # revealed: Unknown
|
||||
reveal_type(f6()) # revealed: Unknown
|
||||
reveal_type(f7()) # revealed: Unknown
|
||||
reveal_type(f8()) # revealed: int
|
||||
reveal_type(f9()) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Various string kinds in `typing.Literal`
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
def f() -> Literal["a", r"b", b"c", "d" "e", "\N{LATIN SMALL LETTER F}", "\x67", """h"""]:
|
||||
return "normal"
|
||||
|
||||
reveal_type(f()) # revealed: Literal["a", "b", "de", "f", "g", "h"] | Literal[b"c"]
|
||||
```
|
||||
|
||||
## Class variables
|
||||
|
||||
```py
|
||||
MyType = int
|
||||
|
||||
class Aliases:
|
||||
MyType = str
|
||||
|
||||
forward: "MyType"
|
||||
not_forward: MyType
|
||||
|
||||
reveal_type(Aliases.forward) # revealed: str
|
||||
reveal_type(Aliases.not_forward) # revealed: str
|
||||
```
|
||||
|
||||
## Annotated assignment
|
||||
|
||||
```py
|
||||
a: "int" = 1
|
||||
b: "'int'" = 1
|
||||
c: "Foo"
|
||||
# error: [invalid-assignment] "Object of type `Literal[1]` is not assignable to `Foo`"
|
||||
d: "Foo" = 1
|
||||
|
||||
class Foo:
|
||||
pass
|
||||
|
||||
c = Foo()
|
||||
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
reveal_type(b) # revealed: Literal[1]
|
||||
reveal_type(c) # revealed: Foo
|
||||
reveal_type(d) # revealed: Foo
|
||||
```
|
||||
|
||||
## Parameter
|
||||
|
||||
TODO: Add tests once parameter inference is supported
|
||||
|
||||
## Invalid expressions
|
||||
|
||||
The expressions in these string annotations aren't valid expressions in this context but we
|
||||
shouldn't panic.
|
||||
|
||||
```py
|
||||
a: "1 or 2"
|
||||
b: "(x := 1)"
|
||||
c: "1 + 2"
|
||||
d: "lambda x: x"
|
||||
e: "x if True else y"
|
||||
f: "{'a': 1, 'b': 2}"
|
||||
g: "{1, 2}"
|
||||
h: "[i for i in range(5)]"
|
||||
i: "{i for i in range(5)}"
|
||||
j: "{i: i for i in range(5)}"
|
||||
k: "(i for i in range(5))"
|
||||
l: "await 1"
|
||||
# error: [forward-annotation-syntax-error]
|
||||
m: "yield 1"
|
||||
# error: [forward-annotation-syntax-error]
|
||||
n: "yield from 1"
|
||||
o: "1 < 2"
|
||||
p: "call()"
|
||||
r: "[1, 2]"
|
||||
s: "(1, 2)"
|
||||
```
|
||||
@@ -1,9 +0,0 @@
|
||||
# String annotations
|
||||
|
||||
```py
|
||||
def f() -> "int":
|
||||
return 1
|
||||
|
||||
# TODO: We do not support string annotations, but we should not panic if we encounter them
|
||||
reveal_type(f()) # revealed: @Todo
|
||||
```
|
||||
@@ -0,0 +1,61 @@
|
||||
# Union
|
||||
|
||||
## Annotation
|
||||
|
||||
`typing.Union` can be used to construct union types same as `|` operator.
|
||||
|
||||
```py
|
||||
from typing import Union
|
||||
|
||||
a: Union[int, str]
|
||||
a1: Union[int, bool]
|
||||
a2: Union[int, Union[float, str]]
|
||||
a3: Union[int, None]
|
||||
a4: Union[Union[float, str]]
|
||||
a5: Union[int]
|
||||
a6: Union[()]
|
||||
|
||||
def f():
|
||||
# revealed: int | str
|
||||
reveal_type(a)
|
||||
# Since bool is a subtype of int we simplify to int here. But we do allow assigning boolean values (see below).
|
||||
# revealed: int
|
||||
reveal_type(a1)
|
||||
# revealed: int | float | str
|
||||
reveal_type(a2)
|
||||
# revealed: int | None
|
||||
reveal_type(a3)
|
||||
# revealed: float | str
|
||||
reveal_type(a4)
|
||||
# revealed: int
|
||||
reveal_type(a5)
|
||||
# revealed: Never
|
||||
reveal_type(a6)
|
||||
```
|
||||
|
||||
## Assignment
|
||||
|
||||
```py
|
||||
from typing import Union
|
||||
|
||||
a: Union[int, str]
|
||||
a = 1
|
||||
a = ""
|
||||
a1: Union[int, bool]
|
||||
a1 = 1
|
||||
a1 = True
|
||||
# error: [invalid-assignment] "Object of type `Literal[b""]` is not assignable to `int | str`"
|
||||
a = b""
|
||||
```
|
||||
|
||||
## Typing Extensions
|
||||
|
||||
```py
|
||||
from typing_extensions import Union
|
||||
|
||||
a: Union[int, str]
|
||||
|
||||
def f():
|
||||
# revealed: int | str
|
||||
reveal_type(a)
|
||||
```
|
||||
@@ -51,12 +51,12 @@ reveal_type(c) # revealed: tuple[str, int]
|
||||
reveal_type(d) # revealed: tuple[tuple[str, str], tuple[int, int]]
|
||||
|
||||
# TODO: homogenous tuples, PEP-646 tuples
|
||||
reveal_type(e) # revealed: @Todo
|
||||
reveal_type(f) # revealed: @Todo
|
||||
reveal_type(g) # revealed: @Todo
|
||||
reveal_type(e) # revealed: @Todo(full tuple[...] support)
|
||||
reveal_type(f) # revealed: @Todo(full tuple[...] support)
|
||||
reveal_type(g) # revealed: @Todo(full tuple[...] support)
|
||||
|
||||
# TODO: support more kinds of type expressions in annotations
|
||||
reveal_type(h) # revealed: @Todo
|
||||
reveal_type(h) # revealed: @Todo(full tuple[...] support)
|
||||
|
||||
reveal_type(i) # revealed: tuple[str | int, str | int]
|
||||
reveal_type(j) # revealed: tuple[str | int]
|
||||
@@ -110,3 +110,29 @@ c: builtins.tuple[builtins.tuple[builtins.int, builtins.int], builtins.int] = ((
|
||||
# error: [invalid-assignment] "Object of type `Literal["foo"]` is not assignable to `tuple[tuple[int, int], int]`"
|
||||
c: builtins.tuple[builtins.tuple[builtins.int, builtins.int], builtins.int] = "foo"
|
||||
```
|
||||
|
||||
## Future annotations are deferred
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
x: Foo
|
||||
|
||||
class Foo:
|
||||
pass
|
||||
|
||||
x = Foo()
|
||||
reveal_type(x) # revealed: Foo
|
||||
```
|
||||
|
||||
## Annotations in stub files are deferred
|
||||
|
||||
```pyi path=main.pyi
|
||||
x: Foo
|
||||
|
||||
class Foo:
|
||||
pass
|
||||
|
||||
x = Foo()
|
||||
reveal_type(x) # revealed: Foo
|
||||
```
|
||||
|
||||
@@ -35,6 +35,7 @@ class C:
|
||||
if flag:
|
||||
x = 2
|
||||
|
||||
# error: [possibly-unbound-attribute] "Attribute `x` on type `Literal[C]` is possibly unbound"
|
||||
reveal_type(C.x) # revealed: Literal[2]
|
||||
reveal_type(C.y) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
@@ -9,14 +9,21 @@ def bool_instance() -> bool:
|
||||
flag = bool_instance()
|
||||
|
||||
if flag:
|
||||
class C:
|
||||
class C1:
|
||||
x = 1
|
||||
|
||||
else:
|
||||
class C:
|
||||
class C1:
|
||||
x = 2
|
||||
|
||||
reveal_type(C.x) # revealed: Literal[1, 2]
|
||||
class C2:
|
||||
if flag:
|
||||
x = 3
|
||||
else:
|
||||
x = 4
|
||||
|
||||
reveal_type(C1.x) # revealed: Literal[1, 2]
|
||||
reveal_type(C2.x) # revealed: Literal[3, 4]
|
||||
```
|
||||
|
||||
## Inherited attributes
|
||||
@@ -53,3 +60,77 @@ reveal_type(A.__mro__)
|
||||
# `E` is earlier in the MRO than `F`, so we should use the type of `E.X`
|
||||
reveal_type(A.X) # revealed: Literal[42]
|
||||
```
|
||||
|
||||
## Unions with possibly unbound paths
|
||||
|
||||
### Definite boundness within a class
|
||||
|
||||
In this example, the `x` attribute is not defined in the `C2` element of the union:
|
||||
|
||||
```py
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
class C1:
|
||||
x = 1
|
||||
|
||||
class C2: ...
|
||||
|
||||
class C3:
|
||||
x = 3
|
||||
|
||||
flag1 = bool_instance()
|
||||
flag2 = bool_instance()
|
||||
|
||||
C = C1 if flag1 else C2 if flag2 else C3
|
||||
|
||||
# error: [possibly-unbound-attribute] "Attribute `x` on type `Literal[C1, C2, C3]` is possibly unbound"
|
||||
reveal_type(C.x) # revealed: Literal[1, 3]
|
||||
```
|
||||
|
||||
### Possibly-unbound within a class
|
||||
|
||||
We raise the same diagnostic if the attribute is possibly-unbound in at least one element of the
|
||||
union:
|
||||
|
||||
```py
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
class C1:
|
||||
x = 1
|
||||
|
||||
class C2:
|
||||
if bool_instance():
|
||||
x = 2
|
||||
|
||||
class C3:
|
||||
x = 3
|
||||
|
||||
flag1 = bool_instance()
|
||||
flag2 = bool_instance()
|
||||
|
||||
C = C1 if flag1 else C2 if flag2 else C3
|
||||
|
||||
# error: [possibly-unbound-attribute] "Attribute `x` on type `Literal[C1, C2, C3]` is possibly unbound"
|
||||
reveal_type(C.x) # revealed: Literal[1, 2, 3]
|
||||
```
|
||||
|
||||
## Unions with all paths unbound
|
||||
|
||||
If the symbol is unbound in all elements of the union, we detect that:
|
||||
|
||||
```py
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
class C1: ...
|
||||
class C2: ...
|
||||
|
||||
flag = bool_instance()
|
||||
|
||||
C = C1 if flag else C2
|
||||
|
||||
# error: [unresolved-attribute] "Type `Literal[C1, C2]` has no attribute `x`"
|
||||
reveal_type(C.x) # revealed: Unknown
|
||||
```
|
||||
|
||||
@@ -317,7 +317,7 @@ reveal_type(1 + A()) # revealed: int
|
||||
reveal_type(A() + "foo") # revealed: A
|
||||
# TODO should be `A` since `str.__add__` doesn't support `A` instances
|
||||
# TODO overloads
|
||||
reveal_type("foo" + A()) # revealed: @Todo
|
||||
reveal_type("foo" + A()) # revealed: @Todo(return type)
|
||||
|
||||
reveal_type(A() + b"foo") # revealed: A
|
||||
# TODO should be `A` since `bytes.__add__` doesn't support `A` instances
|
||||
@@ -325,7 +325,7 @@ reveal_type(b"foo" + A()) # revealed: bytes
|
||||
|
||||
reveal_type(A() + ()) # revealed: A
|
||||
# TODO this should be `A`, since `tuple.__add__` doesn't support `A` instances
|
||||
reveal_type(() + A()) # revealed: @Todo
|
||||
reveal_type(() + A()) # revealed: @Todo(return type)
|
||||
|
||||
literal_string_instance = "foo" * 1_000_000_000
|
||||
# the test is not testing what it's meant to be testing if this isn't a `LiteralString`:
|
||||
@@ -334,7 +334,7 @@ reveal_type(literal_string_instance) # revealed: LiteralString
|
||||
reveal_type(A() + literal_string_instance) # revealed: A
|
||||
# TODO should be `A` since `str.__add__` doesn't support `A` instances
|
||||
# TODO overloads
|
||||
reveal_type(literal_string_instance + A()) # revealed: @Todo
|
||||
reveal_type(literal_string_instance + A()) # revealed: @Todo(return type)
|
||||
```
|
||||
|
||||
## Operations involving instances of classes inheriting from `Any`
|
||||
|
||||
@@ -16,7 +16,16 @@ async def get_int_async() -> int:
|
||||
return 42
|
||||
|
||||
# TODO: we don't yet support `types.CoroutineType`, should be generic `Coroutine[Any, Any, int]`
|
||||
reveal_type(get_int_async()) # revealed: @Todo
|
||||
reveal_type(get_int_async()) # revealed: @Todo(generic types.CoroutineType)
|
||||
```
|
||||
|
||||
## Generic
|
||||
|
||||
```py
|
||||
def get_int[T]() -> int:
|
||||
return 42
|
||||
|
||||
reveal_type(get_int()) # revealed: int
|
||||
```
|
||||
|
||||
## Decorated
|
||||
@@ -35,7 +44,7 @@ def bar() -> str:
|
||||
return "bar"
|
||||
|
||||
# TODO: should reveal `int`, as the decorator replaces `bar` with `foo`
|
||||
reveal_type(bar()) # revealed: @Todo
|
||||
reveal_type(bar()) # revealed: @Todo(return type)
|
||||
```
|
||||
|
||||
## Invalid callable
|
||||
|
||||
@@ -58,7 +58,9 @@ reveal_type(c >= d) # revealed: Literal[True]
|
||||
#### Results with Ambiguity
|
||||
|
||||
```py
|
||||
def bool_instance() -> bool: ...
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
def int_instance() -> int:
|
||||
return 42
|
||||
|
||||
@@ -134,23 +136,158 @@ reveal_type(c >= c) # revealed: Literal[True]
|
||||
|
||||
#### Non Boolean Rich Comparisons
|
||||
|
||||
Rich comparison methods defined in a class affect tuple comparisons as well. Proper type inference
|
||||
should be possible even in cases where these methods return non-boolean types.
|
||||
|
||||
Note: Tuples use lexicographic comparisons. If the `==` result for all paired elements in the tuple
|
||||
is True, the comparison then considers the tuple’s length. Regardless of the return type of the
|
||||
dunder methods, the final result can still be a boolean value.
|
||||
|
||||
(+cpython: For tuples, `==` and `!=` always produce boolean results, regardless of the return type
|
||||
of the dunder methods.)
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
class A:
|
||||
def __eq__(self, o) -> str: ...
|
||||
def __ne__(self, o) -> int: ...
|
||||
def __lt__(self, o) -> float: ...
|
||||
def __le__(self, o) -> object: ...
|
||||
def __gt__(self, o) -> tuple: ...
|
||||
def __ge__(self, o) -> list: ...
|
||||
def __eq__(self, o: object) -> str:
|
||||
return "hello"
|
||||
|
||||
def __ne__(self, o: object) -> bytes:
|
||||
return b"world"
|
||||
|
||||
def __lt__(self, o: A) -> float:
|
||||
return 3.14
|
||||
|
||||
def __le__(self, o: A) -> complex:
|
||||
return complex(0.5, -0.5)
|
||||
|
||||
def __gt__(self, o: A) -> tuple:
|
||||
return (1, 2, 3)
|
||||
|
||||
def __ge__(self, o: A) -> list:
|
||||
return [1, 2, 3]
|
||||
|
||||
a = (A(), A())
|
||||
|
||||
reveal_type(a == a) # revealed: bool
|
||||
reveal_type(a != a) # revealed: bool
|
||||
reveal_type(a < a) # revealed: float | Literal[False]
|
||||
reveal_type(a <= a) # revealed: complex | Literal[True]
|
||||
reveal_type(a > a) # revealed: tuple | Literal[False]
|
||||
reveal_type(a >= a) # revealed: list | Literal[True]
|
||||
|
||||
# If lexicographic comparison is finished before comparing A()
|
||||
b = ("1_foo", A())
|
||||
c = ("2_bar", A())
|
||||
|
||||
reveal_type(b == c) # revealed: Literal[False]
|
||||
reveal_type(b != c) # revealed: Literal[True]
|
||||
reveal_type(b < c) # revealed: Literal[True]
|
||||
reveal_type(b <= c) # revealed: Literal[True]
|
||||
reveal_type(b > c) # revealed: Literal[False]
|
||||
reveal_type(b >= c) # revealed: Literal[False]
|
||||
|
||||
class B:
|
||||
def __lt__(self, o: B) -> set:
|
||||
return set()
|
||||
|
||||
reveal_type((A(), B()) < (A(), B())) # revealed: float | set | Literal[False]
|
||||
```
|
||||
|
||||
#### Special Handling of Eq and NotEq in Lexicographic Comparisons
|
||||
|
||||
> Example: `(int_instance(), "foo") == (int_instance(), "bar")`
|
||||
|
||||
`Eq` and `NotEq` have unique behavior compared to other operators in lexicographic comparisons.
|
||||
Specifically, for `Eq`, if any non-equal pair exists within the tuples being compared, we can
|
||||
immediately conclude that the tuples are not equal. Conversely, for `NotEq`, if any non-equal pair
|
||||
exists, we can determine that the tuples are unequal.
|
||||
|
||||
In contrast, with operators like `<` and `>`, the comparison must consider each pair of elements
|
||||
sequentially, and the final outcome might remain ambiguous until all pairs are compared.
|
||||
|
||||
```py
|
||||
def str_instance() -> str:
|
||||
return "hello"
|
||||
|
||||
def int_instance() -> int:
|
||||
return 42
|
||||
|
||||
reveal_type("foo" == "bar") # revealed: Literal[False]
|
||||
reveal_type(("foo",) == ("bar",)) # revealed: Literal[False]
|
||||
reveal_type((4, "foo") == (4, "bar")) # revealed: Literal[False]
|
||||
reveal_type((int_instance(), "foo") == (int_instance(), "bar")) # revealed: Literal[False]
|
||||
|
||||
a = (str_instance(), int_instance(), "foo")
|
||||
|
||||
reveal_type(a == a) # revealed: bool
|
||||
reveal_type(a != a) # revealed: bool
|
||||
reveal_type(a < a) # revealed: bool
|
||||
reveal_type(a <= a) # revealed: bool
|
||||
reveal_type(a > a) # revealed: bool
|
||||
reveal_type(a >= a) # revealed: bool
|
||||
|
||||
b = (str_instance(), int_instance(), "bar")
|
||||
|
||||
reveal_type(a == b) # revealed: Literal[False]
|
||||
reveal_type(a != b) # revealed: Literal[True]
|
||||
reveal_type(a < b) # revealed: bool
|
||||
reveal_type(a <= b) # revealed: bool
|
||||
reveal_type(a > b) # revealed: bool
|
||||
reveal_type(a >= b) # revealed: bool
|
||||
|
||||
c = (str_instance(), int_instance(), "foo", "different_length")
|
||||
reveal_type(a == c) # revealed: Literal[False]
|
||||
reveal_type(a != c) # revealed: Literal[True]
|
||||
reveal_type(a < c) # revealed: bool
|
||||
reveal_type(a <= c) # revealed: bool
|
||||
reveal_type(a > c) # revealed: bool
|
||||
reveal_type(a >= c) # revealed: bool
|
||||
```
|
||||
|
||||
#### Error Propagation
|
||||
|
||||
Errors occurring within a tuple comparison should propagate outward. However, if the tuple
|
||||
comparison can clearly conclude before encountering an error, the error should not be raised.
|
||||
|
||||
```py
|
||||
def int_instance() -> int:
|
||||
return 42
|
||||
|
||||
def str_instance() -> str:
|
||||
return "hello"
|
||||
|
||||
class A: ...
|
||||
|
||||
# error: [unsupported-operator] "Operator `<` is not supported for types `A` and `A`"
|
||||
A() < A()
|
||||
# error: [unsupported-operator] "Operator `<=` is not supported for types `A` and `A`"
|
||||
A() <= A()
|
||||
# error: [unsupported-operator] "Operator `>` is not supported for types `A` and `A`"
|
||||
A() > A()
|
||||
# error: [unsupported-operator] "Operator `>=` is not supported for types `A` and `A`"
|
||||
A() >= A()
|
||||
|
||||
a = (0, int_instance(), A())
|
||||
|
||||
# error: [unsupported-operator] "Operator `<` is not supported for types `A` and `A`, in comparing `tuple[Literal[0], int, A]` with `tuple[Literal[0], int, A]`"
|
||||
reveal_type(a < a) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `<=` is not supported for types `A` and `A`, in comparing `tuple[Literal[0], int, A]` with `tuple[Literal[0], int, A]`"
|
||||
reveal_type(a <= a) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `>` is not supported for types `A` and `A`, in comparing `tuple[Literal[0], int, A]` with `tuple[Literal[0], int, A]`"
|
||||
reveal_type(a > a) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `>=` is not supported for types `A` and `A`, in comparing `tuple[Literal[0], int, A]` with `tuple[Literal[0], int, A]`"
|
||||
reveal_type(a >= a) # revealed: Unknown
|
||||
|
||||
# Comparison between `a` and `b` should only involve the first elements, `Literal[0]` and `Literal[99999]`,
|
||||
# and should terminate immediately.
|
||||
b = (99999, int_instance(), A())
|
||||
|
||||
reveal_type(a < b) # revealed: Literal[True]
|
||||
reveal_type(a <= b) # revealed: Literal[True]
|
||||
reveal_type(a > b) # revealed: Literal[False]
|
||||
reveal_type(a >= b) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
### Membership Test Comparisons
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
class A: ...
|
||||
|
||||
a = 1 in 7 # error: "Operator `in` is not supported for types `Literal[1]` and `Literal[7]`"
|
||||
reveal_type(a) # revealed: bool
|
||||
|
||||
@@ -33,4 +35,8 @@ reveal_type(e) # revealed: bool
|
||||
f = (1, 2) < (1, "hello")
|
||||
# TODO: should be Unknown, once operand type check is implemented
|
||||
reveal_type(f) # revealed: bool
|
||||
|
||||
# error: [unsupported-operator] "Operator `<` is not supported for types `A` and `A`, in comparing `tuple[bool, A]` with `tuple[bool, A]`"
|
||||
g = (bool_instance(), A()) < (bool_instance(), A())
|
||||
reveal_type(g) # revealed: Unknown
|
||||
```
|
||||
|
||||
@@ -41,21 +41,20 @@ except EXCEPTIONS as f:
|
||||
## Dynamic exception types
|
||||
|
||||
```py
|
||||
# TODO: we should not emit these `call-possibly-unbound-method` errors for `tuple.__class_getitem__`
|
||||
def foo(
|
||||
x: type[AttributeError],
|
||||
y: tuple[type[OSError], type[RuntimeError]], # error: [call-possibly-unbound-method]
|
||||
z: tuple[type[BaseException], ...], # error: [call-possibly-unbound-method]
|
||||
y: tuple[type[OSError], type[RuntimeError]],
|
||||
z: tuple[type[BaseException], ...],
|
||||
):
|
||||
try:
|
||||
help()
|
||||
except x as e:
|
||||
# TODO: should be `AttributeError`
|
||||
reveal_type(e) # revealed: @Todo
|
||||
reveal_type(e) # revealed: @Todo(exception type)
|
||||
except y as f:
|
||||
# TODO: should be `OSError | RuntimeError`
|
||||
reveal_type(f) # revealed: @Todo
|
||||
reveal_type(f) # revealed: @Todo(exception type)
|
||||
except z as g:
|
||||
# TODO: should be `BaseException`
|
||||
reveal_type(g) # revealed: @Todo
|
||||
reveal_type(g) # revealed: @Todo(exception type)
|
||||
```
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
# Attribute access
|
||||
|
||||
## Boundness
|
||||
|
||||
```py
|
||||
def flag() -> bool: ...
|
||||
|
||||
class A:
|
||||
always_bound = 1
|
||||
|
||||
if flag():
|
||||
union = 1
|
||||
else:
|
||||
union = "abc"
|
||||
|
||||
if flag():
|
||||
possibly_unbound = "abc"
|
||||
|
||||
reveal_type(A.always_bound) # revealed: Literal[1]
|
||||
|
||||
reveal_type(A.union) # revealed: Literal[1] | Literal["abc"]
|
||||
|
||||
# error: [possibly-unbound-attribute] "Attribute `possibly_unbound` on type `Literal[A]` is possibly unbound"
|
||||
reveal_type(A.possibly_unbound) # revealed: Literal["abc"]
|
||||
|
||||
# error: [unresolved-attribute] "Type `Literal[A]` has no attribute `non_existent`"
|
||||
reveal_type(A.non_existent) # revealed: Unknown
|
||||
```
|
||||
@@ -22,3 +22,22 @@ reveal_type(1 if None else 2) # revealed: Literal[2]
|
||||
reveal_type(1 if "" else 2) # revealed: Literal[2]
|
||||
reveal_type(1 if 0 else 2) # revealed: Literal[2]
|
||||
```
|
||||
|
||||
## Leaked Narrowing Constraint
|
||||
|
||||
(issue #14588)
|
||||
|
||||
The test inside an if expression should not affect code outside of the expression.
|
||||
|
||||
```py
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
x: Literal[42, "hello"] = 42 if bool_instance() else "hello"
|
||||
|
||||
reveal_type(x) # revealed: Literal[42] | Literal["hello"]
|
||||
|
||||
_ = ... if isinstance(x, str) else ...
|
||||
|
||||
reveal_type(x) # revealed: Literal[42] | Literal["hello"]
|
||||
```
|
||||
|
||||
@@ -0,0 +1,219 @@
|
||||
# Length (`len()`)
|
||||
|
||||
## Literal and constructed iterables
|
||||
|
||||
### Strings and bytes literals
|
||||
|
||||
```py
|
||||
reveal_type(len("no\rmal")) # revealed: Literal[6]
|
||||
reveal_type(len(r"aw stri\ng")) # revealed: Literal[10]
|
||||
reveal_type(len(r"conca\t" "ena\tion")) # revealed: Literal[14]
|
||||
reveal_type(len(b"ytes lite" rb"al")) # revealed: Literal[11]
|
||||
reveal_type(len("𝒰𝕹🄸©🕲𝕕ℇ")) # revealed: Literal[7]
|
||||
|
||||
reveal_type( # revealed: Literal[7]
|
||||
len(
|
||||
"""foo
|
||||
bar"""
|
||||
)
|
||||
)
|
||||
reveal_type( # revealed: Literal[9]
|
||||
len(
|
||||
r"""foo\r
|
||||
bar"""
|
||||
)
|
||||
)
|
||||
reveal_type( # revealed: Literal[7]
|
||||
len(
|
||||
b"""foo
|
||||
bar"""
|
||||
)
|
||||
)
|
||||
reveal_type( # revealed: Literal[9]
|
||||
len(
|
||||
rb"""foo\r
|
||||
bar"""
|
||||
)
|
||||
)
|
||||
```
|
||||
|
||||
### Tuples
|
||||
|
||||
```py
|
||||
reveal_type(len(())) # revealed: Literal[0]
|
||||
reveal_type(len((1,))) # revealed: Literal[1]
|
||||
reveal_type(len((1, 2))) # revealed: Literal[2]
|
||||
|
||||
# TODO: Handle constructor calls
|
||||
reveal_type(len(tuple())) # revealed: int
|
||||
|
||||
# TODO: Handle star unpacks; Should be: Literal[0]
|
||||
reveal_type(len((*[],))) # revealed: Literal[1]
|
||||
|
||||
# TODO: Handle star unpacks; Should be: Literal[1]
|
||||
reveal_type( # revealed: Literal[2]
|
||||
len(
|
||||
(
|
||||
*[],
|
||||
1,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# TODO: Handle star unpacks; Should be: Literal[2]
|
||||
reveal_type(len((*[], 1, 2))) # revealed: Literal[3]
|
||||
|
||||
# TODO: Handle star unpacks; Should be: Literal[0]
|
||||
reveal_type(len((*[], *{}))) # revealed: Literal[2]
|
||||
```
|
||||
|
||||
### Lists, sets and dictionaries
|
||||
|
||||
```py
|
||||
reveal_type(len([])) # revealed: int
|
||||
reveal_type(len([1])) # revealed: int
|
||||
reveal_type(len([1, 2])) # revealed: int
|
||||
reveal_type(len([*{}, *dict()])) # revealed: int
|
||||
|
||||
reveal_type(len({})) # revealed: int
|
||||
reveal_type(len({**{}})) # revealed: int
|
||||
reveal_type(len({**{}, **{}})) # revealed: int
|
||||
|
||||
reveal_type(len({1})) # revealed: int
|
||||
reveal_type(len({1, 2})) # revealed: int
|
||||
reveal_type(len({*[], 2})) # revealed: int
|
||||
|
||||
reveal_type(len(list())) # revealed: int
|
||||
reveal_type(len(set())) # revealed: int
|
||||
reveal_type(len(dict())) # revealed: int
|
||||
reveal_type(len(frozenset())) # revealed: int
|
||||
```
|
||||
|
||||
## `__len__`
|
||||
|
||||
The returned value of `__len__` is implicitly and recursively converted to `int`.
|
||||
|
||||
### Literal integers
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
class Zero:
|
||||
def __len__(self) -> Literal[0]: ...
|
||||
|
||||
class ZeroOrOne:
|
||||
def __len__(self) -> Literal[0, 1]: ...
|
||||
|
||||
class ZeroOrTrue:
|
||||
def __len__(self) -> Literal[0, True]: ...
|
||||
|
||||
class OneOrFalse:
|
||||
def __len__(self) -> Literal[1] | Literal[False]: ...
|
||||
|
||||
class OneOrFoo:
|
||||
def __len__(self) -> Literal[1, "foo"]: ...
|
||||
|
||||
class ZeroOrStr:
|
||||
def __len__(self) -> Literal[0] | str: ...
|
||||
|
||||
reveal_type(len(Zero())) # revealed: Literal[0]
|
||||
reveal_type(len(ZeroOrOne())) # revealed: Literal[0, 1]
|
||||
reveal_type(len(ZeroOrTrue())) # revealed: Literal[0, 1]
|
||||
reveal_type(len(OneOrFalse())) # revealed: Literal[0, 1]
|
||||
|
||||
# TODO: Emit a diagnostic
|
||||
reveal_type(len(OneOrFoo())) # revealed: int
|
||||
|
||||
# TODO: Emit a diagnostic
|
||||
reveal_type(len(ZeroOrStr())) # revealed: int
|
||||
```
|
||||
|
||||
### Literal booleans
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
class LiteralTrue:
|
||||
def __len__(self) -> Literal[True]: ...
|
||||
|
||||
class LiteralFalse:
|
||||
def __len__(self) -> Literal[False]: ...
|
||||
|
||||
reveal_type(len(LiteralTrue())) # revealed: Literal[1]
|
||||
reveal_type(len(LiteralFalse())) # revealed: Literal[0]
|
||||
```
|
||||
|
||||
### Enums
|
||||
|
||||
```py
|
||||
from enum import Enum, auto
|
||||
from typing import Literal
|
||||
|
||||
class SomeEnum(Enum):
|
||||
AUTO = auto()
|
||||
INT = 2
|
||||
STR = "4"
|
||||
TUPLE = (8, "16")
|
||||
INT_2 = 3_2
|
||||
|
||||
class Auto:
|
||||
def __len__(self) -> Literal[SomeEnum.AUTO]: ...
|
||||
|
||||
class Int:
|
||||
def __len__(self) -> Literal[SomeEnum.INT]: ...
|
||||
|
||||
class Str:
|
||||
def __len__(self) -> Literal[SomeEnum.STR]: ...
|
||||
|
||||
class Tuple:
|
||||
def __len__(self) -> Literal[SomeEnum.TUPLE]: ...
|
||||
|
||||
class IntUnion:
|
||||
def __len__(self) -> Literal[SomeEnum.INT, SomeEnum.INT_2]: ...
|
||||
|
||||
reveal_type(len(Auto())) # revealed: int
|
||||
reveal_type(len(Int())) # revealed: Literal[2]
|
||||
reveal_type(len(Str())) # revealed: int
|
||||
reveal_type(len(Tuple())) # revealed: int
|
||||
reveal_type(len(IntUnion())) # revealed: Literal[2, 32]
|
||||
```
|
||||
|
||||
### Negative integers
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
class Negative:
|
||||
def __len__(self) -> Literal[-1]: ...
|
||||
|
||||
# TODO: Emit a diagnostic
|
||||
reveal_type(len(Negative())) # revealed: int
|
||||
```
|
||||
|
||||
### Wrong signature
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
class SecondOptionalArgument:
|
||||
def __len__(self, v: int = 0) -> Literal[0]: ...
|
||||
|
||||
class SecondRequiredArgument:
|
||||
def __len__(self, v: int) -> Literal[1]: ...
|
||||
|
||||
# TODO: Emit a diagnostic
|
||||
reveal_type(len(SecondOptionalArgument())) # revealed: Literal[0]
|
||||
|
||||
# TODO: Emit a diagnostic
|
||||
reveal_type(len(SecondRequiredArgument())) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
### No `__len__`
|
||||
|
||||
```py
|
||||
class NoDunderLen:
|
||||
pass
|
||||
|
||||
# TODO: Emit a diagnostic
|
||||
reveal_type(len(NoDunderLen())) # revealed: int
|
||||
```
|
||||
@@ -6,13 +6,9 @@ Basic PEP 695 generics
|
||||
|
||||
```py
|
||||
class MyBox[T]:
|
||||
# TODO: `T` is defined here
|
||||
# error: [unresolved-reference] "Name `T` used when not defined"
|
||||
data: T
|
||||
box_model_number = 695
|
||||
|
||||
# TODO: `T` is defined here
|
||||
# error: [unresolved-reference] "Name `T` used when not defined"
|
||||
def __init__(self, data: T):
|
||||
self.data = data
|
||||
|
||||
@@ -22,7 +18,7 @@ box: MyBox[int] = MyBox(5)
|
||||
wrong_innards: MyBox[int] = MyBox("five")
|
||||
|
||||
# TODO reveal int
|
||||
reveal_type(box.data) # revealed: @Todo
|
||||
reveal_type(box.data) # revealed: @Todo(instance attributes)
|
||||
|
||||
reveal_type(MyBox.box_model_number) # revealed: Literal[695]
|
||||
```
|
||||
@@ -31,24 +27,19 @@ reveal_type(MyBox.box_model_number) # revealed: Literal[695]
|
||||
|
||||
```py
|
||||
class MyBox[T]:
|
||||
# TODO: `T` is defined here
|
||||
# error: [unresolved-reference] "Name `T` used when not defined"
|
||||
data: T
|
||||
|
||||
# TODO: `T` is defined here
|
||||
# error: [unresolved-reference] "Name `T` used when not defined"
|
||||
def __init__(self, data: T):
|
||||
self.data = data
|
||||
|
||||
# TODO not error on the subscripting or the use of type param
|
||||
# error: [unresolved-reference] "Name `T` used when not defined"
|
||||
# TODO not error on the subscripting
|
||||
# error: [non-subscriptable]
|
||||
class MySecureBox[T](MyBox[T]): ...
|
||||
|
||||
secure_box: MySecureBox[int] = MySecureBox(5)
|
||||
reveal_type(secure_box) # revealed: MySecureBox
|
||||
# TODO reveal int
|
||||
reveal_type(secure_box.data) # revealed: @Todo
|
||||
reveal_type(secure_box.data) # revealed: @Todo(instance attributes)
|
||||
```
|
||||
|
||||
## Cyclical class definition
|
||||
@@ -66,3 +57,23 @@ class S[T](Seq[S]): ... # error: [non-subscriptable]
|
||||
|
||||
reveal_type(S) # revealed: Literal[S]
|
||||
```
|
||||
|
||||
## Type params
|
||||
|
||||
A PEP695 type variable defines a value of type `typing.TypeVar`.
|
||||
|
||||
```py
|
||||
def f[T]():
|
||||
reveal_type(T) # revealed: T
|
||||
reveal_type(T.__name__) # revealed: Literal["T"]
|
||||
```
|
||||
|
||||
## Minimum two constraints
|
||||
|
||||
A typevar with less than two constraints emits a diagnostic:
|
||||
|
||||
```py
|
||||
# error: [invalid-typevar-constraints] "TypeVar must have at least two constrained types"
|
||||
def f[T: (int,)]():
|
||||
pass
|
||||
```
|
||||
|
||||
@@ -21,6 +21,7 @@ reveal_type(y)
|
||||
```
|
||||
|
||||
```py
|
||||
# error: [possibly-unbound-import] "Member `y` of module `maybe_unbound` is possibly unbound"
|
||||
from maybe_unbound import x, y
|
||||
|
||||
reveal_type(x) # revealed: Literal[3]
|
||||
@@ -50,6 +51,7 @@ reveal_type(y)
|
||||
Importing an annotated name prefers the declared type over the inferred type:
|
||||
|
||||
```py
|
||||
# error: [possibly-unbound-import] "Member `y` of module `maybe_unbound_annotated` is possibly unbound"
|
||||
from maybe_unbound_annotated import x, y
|
||||
|
||||
reveal_type(x) # revealed: Literal[3]
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
# Syntax errors
|
||||
|
||||
Test cases to ensure that red knot does not panic if there are syntax errors in the source code.
|
||||
|
||||
## Keyword as identifiers
|
||||
|
||||
When keywords are used as identifiers, the parser recovers from this syntax error by emitting an
|
||||
error and including the text value of the keyword to create the `Identifier` node.
|
||||
|
||||
### Name expression
|
||||
|
||||
```py
|
||||
# error: [invalid-syntax]
|
||||
pass = 1
|
||||
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
type pass = 1
|
||||
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
def True(for):
|
||||
# error: [invalid-syntax]
|
||||
pass
|
||||
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
# error: [unresolved-reference] "Name `pass` used when not defined"
|
||||
for while in pass:
|
||||
pass
|
||||
|
||||
# error: [invalid-syntax]
|
||||
# error: [unresolved-reference] "Name `in` used when not defined"
|
||||
while in:
|
||||
pass
|
||||
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
# error: [unresolved-reference] "Name `match` used when not defined"
|
||||
match while:
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
# error: [unresolved-reference] "Name `case` used when not defined"
|
||||
case in:
|
||||
# error: [invalid-syntax]
|
||||
# error: [invalid-syntax]
|
||||
pass
|
||||
```
|
||||
|
||||
### Attribute expression
|
||||
|
||||
```py
|
||||
# TODO: Check when support for attribute expressions is added
|
||||
|
||||
# error: [invalid-syntax]
|
||||
# error: [unresolved-reference] "Name `foo` used when not defined"
|
||||
for x in foo.pass:
|
||||
pass
|
||||
```
|
||||
@@ -51,6 +51,8 @@ invalid1: Literal[3 + 4]
|
||||
invalid2: Literal[4 + 3j]
|
||||
# error: [invalid-literal-parameter]
|
||||
invalid3: Literal[(3, 4)]
|
||||
|
||||
hello = "hello"
|
||||
invalid4: Literal[
|
||||
1 + 2, # error: [invalid-literal-parameter]
|
||||
"foo",
|
||||
@@ -76,7 +78,7 @@ from other import Literal
|
||||
a1: Literal[26]
|
||||
|
||||
def f():
|
||||
reveal_type(a1) # revealed: @Todo
|
||||
reveal_type(a1) # revealed: @Todo(generics)
|
||||
```
|
||||
|
||||
## Detecting typing_extensions.Literal
|
||||
|
||||
@@ -18,7 +18,7 @@ async def foo():
|
||||
pass
|
||||
|
||||
# TODO: should reveal `Unknown` because `__aiter__` is not defined
|
||||
# revealed: @Todo
|
||||
# revealed: @Todo(async iterables/iterators)
|
||||
# error: [possibly-unresolved-reference]
|
||||
reveal_type(x)
|
||||
```
|
||||
@@ -40,6 +40,6 @@ async def foo():
|
||||
pass
|
||||
|
||||
# error: [possibly-unresolved-reference]
|
||||
# revealed: @Todo
|
||||
# revealed: @Todo(async iterables/iterators)
|
||||
reveal_type(x)
|
||||
```
|
||||
@@ -52,3 +52,29 @@ else:
|
||||
reveal_type(x) # revealed: Literal[2, 3]
|
||||
reveal_type(y) # revealed: Literal[1, 2, 4]
|
||||
```
|
||||
|
||||
## Nested while loops
|
||||
|
||||
```py
|
||||
def flag() -> bool:
|
||||
return True
|
||||
|
||||
x = 1
|
||||
|
||||
while flag():
|
||||
x = 2
|
||||
|
||||
while flag():
|
||||
x = 3
|
||||
if flag():
|
||||
break
|
||||
else:
|
||||
x = 4
|
||||
|
||||
if flag():
|
||||
break
|
||||
else:
|
||||
x = 5
|
||||
|
||||
reveal_type(x) # revealed: Literal[3, 4, 5]
|
||||
```
|
||||
|
||||
@@ -171,7 +171,7 @@ def f(*args, **kwargs) -> int: ...
|
||||
class A(metaclass=f): ...
|
||||
|
||||
# TODO should be `type[int]`
|
||||
reveal_type(A.__class__) # revealed: @Todo
|
||||
reveal_type(A.__class__) # revealed: @Todo(metaclass not a class)
|
||||
```
|
||||
|
||||
## Cyclic
|
||||
|
||||
@@ -256,7 +256,7 @@ class O: ...
|
||||
class X(O): ...
|
||||
class Y(O): ...
|
||||
|
||||
if bool():
|
||||
if returns_bool():
|
||||
foo = Y
|
||||
else:
|
||||
foo = object
|
||||
|
||||
@@ -0,0 +1,32 @@
|
||||
## Narrowing for `bool(..)` checks
|
||||
|
||||
```py
|
||||
def flag() -> bool: ...
|
||||
|
||||
x = 1 if flag() else None
|
||||
|
||||
# valid invocation, positive
|
||||
reveal_type(x) # revealed: Literal[1] | None
|
||||
if bool(x is not None):
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
|
||||
# valid invocation, negative
|
||||
reveal_type(x) # revealed: Literal[1] | None
|
||||
if not bool(x is not None):
|
||||
reveal_type(x) # revealed: None
|
||||
|
||||
# no args/narrowing
|
||||
reveal_type(x) # revealed: Literal[1] | None
|
||||
if not bool():
|
||||
reveal_type(x) # revealed: Literal[1] | None
|
||||
|
||||
# invalid invocation, too many positional args
|
||||
reveal_type(x) # revealed: Literal[1] | None
|
||||
if bool(x is not None, 5): # TODO diagnostic
|
||||
reveal_type(x) # revealed: Literal[1] | None
|
||||
|
||||
# invalid invocation, too many kwargs
|
||||
reveal_type(x) # revealed: Literal[1] | None
|
||||
if bool(x is not None, y=5): # TODO diagnostic
|
||||
reveal_type(x) # revealed: Literal[1] | None
|
||||
```
|
||||
@@ -102,6 +102,9 @@ else:
|
||||
### Handling of `None`
|
||||
|
||||
```py
|
||||
# TODO: this error should ideally go away once we (1) understand `sys.version_info` branches,
|
||||
# and (2) set the target Python version for this test to 3.10.
|
||||
# error: [possibly-unbound-import] "Member `NoneType` of module `types` is possibly unbound"
|
||||
from types import NoneType
|
||||
|
||||
def flag() -> bool: ...
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
# Consolidating narrowed types after if statement
|
||||
|
||||
## After if-else statements, narrowing has no effect if the variable is not mutated in any branch
|
||||
|
||||
```py
|
||||
def optional_int() -> int | None: ...
|
||||
|
||||
x = optional_int()
|
||||
|
||||
if x is None:
|
||||
pass
|
||||
else:
|
||||
pass
|
||||
|
||||
reveal_type(x) # revealed: int | None
|
||||
```
|
||||
|
||||
## Narrowing can have a persistent effect if the variable is mutated in one branch
|
||||
|
||||
```py
|
||||
def optional_int() -> int | None: ...
|
||||
|
||||
x = optional_int()
|
||||
|
||||
if x is None:
|
||||
x = 10
|
||||
else:
|
||||
pass
|
||||
|
||||
reveal_type(x) # revealed: int
|
||||
```
|
||||
|
||||
## An if statement without an explicit `else` branch is equivalent to one with a no-op `else` branch
|
||||
|
||||
```py
|
||||
def optional_int() -> int | None: ...
|
||||
|
||||
x = optional_int()
|
||||
y = optional_int()
|
||||
|
||||
if x is None:
|
||||
x = 0
|
||||
|
||||
if y is None:
|
||||
pass
|
||||
|
||||
reveal_type(x) # revealed: int
|
||||
reveal_type(y) # revealed: int | None
|
||||
```
|
||||
|
||||
## An if-elif without an explicit else branch is equivalent to one with an empty else branch
|
||||
|
||||
```py
|
||||
def optional_int() -> int | None: ...
|
||||
|
||||
x = optional_int()
|
||||
|
||||
if x is None:
|
||||
x = 0
|
||||
elif x > 50:
|
||||
x = 50
|
||||
|
||||
reveal_type(x) # revealed: int
|
||||
```
|
||||
152
crates/red_knot_python_semantic/resources/mdtest/narrow/type.md
Normal file
152
crates/red_knot_python_semantic/resources/mdtest/narrow/type.md
Normal file
@@ -0,0 +1,152 @@
|
||||
# Narrowing for checks involving `type(x)`
|
||||
|
||||
## `type(x) is C`
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
def get_a_or_b() -> A | B:
|
||||
return A()
|
||||
|
||||
x = get_a_or_b()
|
||||
|
||||
if type(x) is A:
|
||||
reveal_type(x) # revealed: A
|
||||
else:
|
||||
# It would be wrong to infer `B` here. The type
|
||||
# of `x` could be a subclass of `A`, so we need
|
||||
# to infer the full union type:
|
||||
reveal_type(x) # revealed: A | B
|
||||
```
|
||||
|
||||
## `type(x) is not C`
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
def get_a_or_b() -> A | B:
|
||||
return A()
|
||||
|
||||
x = get_a_or_b()
|
||||
|
||||
if type(x) is not A:
|
||||
# Same reasoning as above: no narrowing should occur here.
|
||||
reveal_type(x) # revealed: A | B
|
||||
else:
|
||||
reveal_type(x) # revealed: A
|
||||
```
|
||||
|
||||
## `type(x) == C`, `type(x) != C`
|
||||
|
||||
No narrowing can occur for equality comparisons, since there might be a custom `__eq__`
|
||||
implementation on the metaclass.
|
||||
|
||||
TODO: Narrowing might be possible in some cases where the classes themselves are `@final` or their
|
||||
metaclass is `@final`.
|
||||
|
||||
```py
|
||||
class IsEqualToEverything(type):
|
||||
def __eq__(cls, other):
|
||||
return True
|
||||
|
||||
class A(metaclass=IsEqualToEverything): ...
|
||||
class B(metaclass=IsEqualToEverything): ...
|
||||
|
||||
def get_a_or_b() -> A | B:
|
||||
return B()
|
||||
|
||||
x = get_a_or_b()
|
||||
|
||||
if type(x) == A:
|
||||
reveal_type(x) # revealed: A | B
|
||||
|
||||
if type(x) != A:
|
||||
reveal_type(x) # revealed: A | B
|
||||
```
|
||||
|
||||
## No narrowing for custom `type` callable
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
def type(x):
|
||||
return int
|
||||
|
||||
def get_a_or_b() -> A | B:
|
||||
return A()
|
||||
|
||||
x = get_a_or_b()
|
||||
|
||||
if type(x) is A:
|
||||
reveal_type(x) # revealed: A | B
|
||||
else:
|
||||
reveal_type(x) # revealed: A | B
|
||||
```
|
||||
|
||||
## No narrowing for multiple arguments
|
||||
|
||||
No narrowing should occur if `type` is used to dynamically create a class:
|
||||
|
||||
```py
|
||||
def get_str_or_int() -> str | int:
|
||||
return "test"
|
||||
|
||||
x = get_str_or_int()
|
||||
|
||||
if type(x, (), {}) is str:
|
||||
reveal_type(x) # revealed: str | int
|
||||
else:
|
||||
reveal_type(x) # revealed: str | int
|
||||
```
|
||||
|
||||
## No narrowing for keyword arguments
|
||||
|
||||
`type` can't be used with a keyword argument:
|
||||
|
||||
```py
|
||||
def get_str_or_int() -> str | int:
|
||||
return "test"
|
||||
|
||||
x = get_str_or_int()
|
||||
|
||||
# TODO: we could issue a diagnostic here
|
||||
if type(object=x) is str:
|
||||
reveal_type(x) # revealed: str | int
|
||||
```
|
||||
|
||||
## Narrowing if `type` is aliased
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
alias_for_type = type
|
||||
|
||||
def get_a_or_b() -> A | B:
|
||||
return A()
|
||||
|
||||
x = get_a_or_b()
|
||||
|
||||
if alias_for_type(x) is A:
|
||||
reveal_type(x) # revealed: A
|
||||
```
|
||||
|
||||
## Limitations
|
||||
|
||||
```py
|
||||
class Base: ...
|
||||
class Derived(Base): ...
|
||||
|
||||
def get_base() -> Base:
|
||||
return Base()
|
||||
|
||||
x = get_base()
|
||||
|
||||
if type(x) is Base:
|
||||
# Ideally, this could be narrower, but there is now way to
|
||||
# express a constraint like `Base & ~ProperSubtypeOf[Base]`.
|
||||
reveal_type(x) # revealed: Base
|
||||
```
|
||||
@@ -0,0 +1,13 @@
|
||||
# Regression test for #14334
|
||||
|
||||
Regression test for [this issue](https://github.com/astral-sh/ruff/issues/14334).
|
||||
|
||||
```py path=base.py
|
||||
# error: [invalid-base]
|
||||
class Base(2): ...
|
||||
```
|
||||
|
||||
```py path=a.py
|
||||
# No error here
|
||||
from base import Base
|
||||
```
|
||||
@@ -17,8 +17,7 @@ reveal_type(__doc__) # revealed: str | None
|
||||
# (needs support for `*` imports)
|
||||
reveal_type(__spec__) # revealed: Unknown | None
|
||||
|
||||
# TODO: generics
|
||||
reveal_type(__path__) # revealed: @Todo
|
||||
reveal_type(__path__) # revealed: @Todo(generics)
|
||||
|
||||
class X:
|
||||
reveal_type(__name__) # revealed: str
|
||||
@@ -64,7 +63,7 @@ reveal_type(typing.__class__) # revealed: Literal[type]
|
||||
|
||||
# TODO: needs support for attribute access on instances, properties and generics;
|
||||
# should be `dict[str, Any]`
|
||||
reveal_type(typing.__dict__) # revealed: @Todo
|
||||
reveal_type(typing.__dict__) # revealed: @Todo(instance attributes)
|
||||
```
|
||||
|
||||
Typeshed includes a fake `__getattr__` method in the stub for `types.ModuleType` to help out with
|
||||
@@ -74,6 +73,7 @@ we're dealing with:
|
||||
```py path=__getattr__.py
|
||||
import typing
|
||||
|
||||
# error: [unresolved-attribute]
|
||||
reveal_type(typing.__getattr__) # revealed: Unknown
|
||||
```
|
||||
|
||||
@@ -95,8 +95,8 @@ from foo import __dict__ as foo_dict
|
||||
|
||||
# TODO: needs support for attribute access on instances, properties, and generics;
|
||||
# should be `dict[str, Any]` for both of these:
|
||||
reveal_type(foo.__dict__) # revealed: @Todo
|
||||
reveal_type(foo_dict) # revealed: @Todo
|
||||
reveal_type(foo.__dict__) # revealed: @Todo(instance attributes)
|
||||
reveal_type(foo_dict) # revealed: @Todo(instance attributes)
|
||||
```
|
||||
|
||||
## Conditionally global or `ModuleType` attribute
|
||||
|
||||
@@ -27,7 +27,7 @@ def int_instance() -> int:
|
||||
|
||||
a = b"abcde"[int_instance()]
|
||||
# TODO: Support overloads... Should be `bytes`
|
||||
reveal_type(a) # revealed: @Todo
|
||||
reveal_type(a) # revealed: @Todo(return type)
|
||||
```
|
||||
|
||||
## Slices
|
||||
@@ -47,11 +47,11 @@ def int_instance() -> int: ...
|
||||
|
||||
byte_slice1 = b[int_instance() : int_instance()]
|
||||
# TODO: Support overloads... Should be `bytes`
|
||||
reveal_type(byte_slice1) # revealed: @Todo
|
||||
reveal_type(byte_slice1) # revealed: @Todo(return type)
|
||||
|
||||
def bytes_instance() -> bytes: ...
|
||||
|
||||
byte_slice2 = bytes_instance()[0:5]
|
||||
# TODO: Support overloads... Should be `bytes`
|
||||
reveal_type(byte_slice2) # revealed: @Todo
|
||||
reveal_type(byte_slice2) # revealed: @Todo(return type)
|
||||
```
|
||||
|
||||
@@ -21,10 +21,11 @@ reveal_type(Identity[0]) # revealed: str
|
||||
## Class getitem union
|
||||
|
||||
```py
|
||||
flag = True
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
class UnionClassGetItem:
|
||||
if flag:
|
||||
if bool_instance():
|
||||
|
||||
def __class_getitem__(cls, item: int) -> str:
|
||||
return item
|
||||
@@ -59,9 +60,10 @@ reveal_type(x[0]) # revealed: str | int
|
||||
## Class getitem with unbound method union
|
||||
|
||||
```py
|
||||
flag = True
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
if flag:
|
||||
if bool_instance():
|
||||
class Spam:
|
||||
def __class_getitem__(self, x: int) -> str:
|
||||
return "foo"
|
||||
@@ -77,9 +79,10 @@ reveal_type(Spam[42])
|
||||
## TODO: Class getitem non-class union
|
||||
|
||||
```py
|
||||
flag = True
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
if flag:
|
||||
if bool_instance():
|
||||
class Eggs:
|
||||
def __class_getitem__(self, x: int) -> str:
|
||||
return "foo"
|
||||
|
||||
@@ -30,10 +30,11 @@ reveal_type(Identity()[0]) # revealed: int
|
||||
## Getitem union
|
||||
|
||||
```py
|
||||
flag = True
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
class Identity:
|
||||
if flag:
|
||||
if bool_instance():
|
||||
|
||||
def __getitem__(self, index: int) -> int:
|
||||
return index
|
||||
|
||||
@@ -12,13 +12,13 @@ x = [1, 2, 3]
|
||||
reveal_type(x) # revealed: list
|
||||
|
||||
# TODO reveal int
|
||||
reveal_type(x[0]) # revealed: @Todo
|
||||
reveal_type(x[0]) # revealed: @Todo(return type)
|
||||
|
||||
# TODO reveal list
|
||||
reveal_type(x[0:1]) # revealed: @Todo
|
||||
reveal_type(x[0:1]) # revealed: @Todo(return type)
|
||||
|
||||
# TODO error
|
||||
reveal_type(x["a"]) # revealed: @Todo
|
||||
reveal_type(x["a"]) # revealed: @Todo(return type)
|
||||
```
|
||||
|
||||
## Assignments within list assignment
|
||||
|
||||
@@ -23,7 +23,7 @@ def int_instance() -> int: ...
|
||||
|
||||
a = "abcde"[int_instance()]
|
||||
# TODO: Support overloads... Should be `str`
|
||||
reveal_type(a) # revealed: @Todo
|
||||
reveal_type(a) # revealed: @Todo(return type)
|
||||
```
|
||||
|
||||
## Slices
|
||||
@@ -78,13 +78,13 @@ def int_instance() -> int: ...
|
||||
|
||||
substring1 = s[int_instance() : int_instance()]
|
||||
# TODO: Support overloads... Should be `LiteralString`
|
||||
reveal_type(substring1) # revealed: @Todo
|
||||
reveal_type(substring1) # revealed: @Todo(return type)
|
||||
|
||||
def str_instance() -> str: ...
|
||||
|
||||
substring2 = str_instance()[0:5]
|
||||
# TODO: Support overloads... Should be `str`
|
||||
reveal_type(substring2) # revealed: @Todo
|
||||
reveal_type(substring2) # revealed: @Todo(return type)
|
||||
```
|
||||
|
||||
## Unsupported slice types
|
||||
|
||||
@@ -71,5 +71,5 @@ def int_instance() -> int: ...
|
||||
|
||||
tuple_slice = t[int_instance() : int_instance()]
|
||||
# TODO: Support overloads... Should be `tuple[Literal[1, 'a', b"b"] | None, ...]`
|
||||
reveal_type(tuple_slice) # revealed: @Todo
|
||||
reveal_type(tuple_slice) # revealed: @Todo(return type)
|
||||
```
|
||||
|
||||
@@ -0,0 +1,138 @@
|
||||
# `sys.version_info`
|
||||
|
||||
## The type of `sys.version_info`
|
||||
|
||||
The type of `sys.version_info` is `sys._version_info`, at least according to typeshed's stubs (which
|
||||
we treat as the single source of truth for the standard library). This is quite a complicated type
|
||||
in typeshed, so there are many things we don't fully understand about the type yet; this is the
|
||||
source of several TODOs in this test file. Many of these TODOs should be naturally fixed as we
|
||||
implement more type-system features in the future.
|
||||
|
||||
```py
|
||||
import sys
|
||||
|
||||
reveal_type(sys.version_info) # revealed: _version_info
|
||||
```
|
||||
|
||||
## Literal types from comparisons
|
||||
|
||||
Comparing `sys.version_info` with a 2-element tuple of literal integers always produces a `Literal`
|
||||
type:
|
||||
|
||||
```py
|
||||
import sys
|
||||
|
||||
reveal_type(sys.version_info >= (3, 9)) # revealed: Literal[True]
|
||||
reveal_type((3, 9) <= sys.version_info) # revealed: Literal[True]
|
||||
|
||||
reveal_type(sys.version_info > (3, 9)) # revealed: Literal[True]
|
||||
reveal_type((3, 9) < sys.version_info) # revealed: Literal[True]
|
||||
|
||||
reveal_type(sys.version_info < (3, 9)) # revealed: Literal[False]
|
||||
reveal_type((3, 9) > sys.version_info) # revealed: Literal[False]
|
||||
|
||||
reveal_type(sys.version_info <= (3, 9)) # revealed: Literal[False]
|
||||
reveal_type((3, 9) >= sys.version_info) # revealed: Literal[False]
|
||||
|
||||
reveal_type(sys.version_info == (3, 9)) # revealed: Literal[False]
|
||||
reveal_type((3, 9) == sys.version_info) # revealed: Literal[False]
|
||||
|
||||
reveal_type(sys.version_info != (3, 9)) # revealed: Literal[True]
|
||||
reveal_type((3, 9) != sys.version_info) # revealed: Literal[True]
|
||||
```
|
||||
|
||||
## Non-literal types from comparisons
|
||||
|
||||
Comparing `sys.version_info` with tuples of other lengths will sometimes produce `Literal` types,
|
||||
sometimes not:
|
||||
|
||||
```py
|
||||
import sys
|
||||
|
||||
reveal_type(sys.version_info >= (3, 9, 1)) # revealed: bool
|
||||
reveal_type(sys.version_info >= (3, 9, 1, "final", 0)) # revealed: bool
|
||||
|
||||
# TODO: While this won't fail at runtime, the user has probably made a mistake
|
||||
# if they're comparing a tuple of length >5 with `sys.version_info`
|
||||
# (`sys.version_info` is a tuple of length 5). It might be worth
|
||||
# emitting a lint diagnostic of some kind warning them about the probable error?
|
||||
reveal_type(sys.version_info >= (3, 9, 1, "final", 0, 5)) # revealed: bool
|
||||
|
||||
reveal_type(sys.version_info == (3, 8, 1, "finallllll", 0)) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
## Imports and aliases
|
||||
|
||||
Comparisons with `sys.version_info` still produce literal types, even if the symbol is aliased to
|
||||
another name:
|
||||
|
||||
```py
|
||||
from sys import version_info
|
||||
from sys import version_info as foo
|
||||
|
||||
reveal_type(version_info >= (3, 9)) # revealed: Literal[True]
|
||||
reveal_type(foo >= (3, 9)) # revealed: Literal[True]
|
||||
|
||||
bar = version_info
|
||||
reveal_type(bar >= (3, 9)) # revealed: Literal[True]
|
||||
```
|
||||
|
||||
## Non-stdlib modules named `sys`
|
||||
|
||||
Only comparisons with the symbol `version_info` from the `sys` module produce literal types:
|
||||
|
||||
```py path=package/__init__.py
|
||||
```
|
||||
|
||||
```py path=package/sys.py
|
||||
version_info: tuple[int, int] = (4, 2)
|
||||
```
|
||||
|
||||
```py path=package/script.py
|
||||
from .sys import version_info
|
||||
|
||||
reveal_type(version_info >= (3, 9)) # revealed: bool
|
||||
```
|
||||
|
||||
## Accessing fields by name
|
||||
|
||||
The fields of `sys.version_info` can be accessed by name:
|
||||
|
||||
```py path=a.py
|
||||
import sys
|
||||
|
||||
reveal_type(sys.version_info.major >= 3) # revealed: Literal[True]
|
||||
reveal_type(sys.version_info.minor >= 9) # revealed: Literal[True]
|
||||
reveal_type(sys.version_info.minor >= 10) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
But the `micro`, `releaselevel` and `serial` fields are inferred as `@Todo` until we support
|
||||
properties on instance types:
|
||||
|
||||
```py path=b.py
|
||||
import sys
|
||||
|
||||
reveal_type(sys.version_info.micro) # revealed: @Todo(instance attributes)
|
||||
reveal_type(sys.version_info.releaselevel) # revealed: @Todo(instance attributes)
|
||||
reveal_type(sys.version_info.serial) # revealed: @Todo(instance attributes)
|
||||
```
|
||||
|
||||
## Accessing fields by index/slice
|
||||
|
||||
The fields of `sys.version_info` can be accessed by index or by slice:
|
||||
|
||||
```py
|
||||
import sys
|
||||
|
||||
reveal_type(sys.version_info[0] < 3) # revealed: Literal[False]
|
||||
reveal_type(sys.version_info[1] > 9) # revealed: Literal[False]
|
||||
|
||||
# revealed: tuple[Literal[3], Literal[9], int, Literal["alpha", "beta", "candidate", "final"], int]
|
||||
reveal_type(sys.version_info[:5])
|
||||
|
||||
reveal_type(sys.version_info[:2] >= (3, 9)) # revealed: Literal[True]
|
||||
reveal_type(sys.version_info[0:2] >= (3, 10)) # revealed: Literal[False]
|
||||
reveal_type(sys.version_info[:3] >= (3, 10, 1)) # revealed: Literal[False]
|
||||
reveal_type(sys.version_info[3] == "final") # revealed: bool
|
||||
reveal_type(sys.version_info[3] == "finalllllll") # revealed: Literal[False]
|
||||
```
|
||||
@@ -0,0 +1,71 @@
|
||||
# Type aliases
|
||||
|
||||
## Basic
|
||||
|
||||
```py
|
||||
type IntOrStr = int | str
|
||||
|
||||
reveal_type(IntOrStr) # revealed: typing.TypeAliasType
|
||||
reveal_type(IntOrStr.__name__) # revealed: Literal["IntOrStr"]
|
||||
|
||||
x: IntOrStr = 1
|
||||
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
|
||||
def f() -> None:
|
||||
reveal_type(x) # revealed: int | str
|
||||
```
|
||||
|
||||
## `__value__` attribute
|
||||
|
||||
```py
|
||||
type IntOrStr = int | str
|
||||
|
||||
# TODO: This should either fall back to the specified type from typeshed,
|
||||
# which is `Any`, or be the actual type of the runtime value expression
|
||||
# `int | str`, i.e. `types.UnionType`.
|
||||
reveal_type(IntOrStr.__value__) # revealed: @Todo(instance attributes)
|
||||
```
|
||||
|
||||
## Invalid assignment
|
||||
|
||||
```py
|
||||
type OptionalInt = int | None
|
||||
|
||||
# error: [invalid-assignment]
|
||||
x: OptionalInt = "1"
|
||||
```
|
||||
|
||||
## Type aliases in type aliases
|
||||
|
||||
```py
|
||||
type IntOrStr = int | str
|
||||
type IntOrStrOrBytes = IntOrStr | bytes
|
||||
|
||||
x: IntOrStrOrBytes = 1
|
||||
|
||||
def f() -> None:
|
||||
reveal_type(x) # revealed: int | str | bytes
|
||||
```
|
||||
|
||||
## Aliased type aliases
|
||||
|
||||
```py
|
||||
type IntOrStr = int | str
|
||||
MyIntOrStr = IntOrStr
|
||||
|
||||
x: MyIntOrStr = 1
|
||||
|
||||
# error: [invalid-assignment]
|
||||
y: MyIntOrStr = None
|
||||
```
|
||||
|
||||
## Generic type aliases
|
||||
|
||||
```py
|
||||
type ListOrSet[T] = list[T] | set[T]
|
||||
|
||||
# TODO: Should be `tuple[typing.TypeVar | typing.ParamSpec | typing.TypeVarTuple, ...]`,
|
||||
# as specified in the `typeshed` stubs.
|
||||
reveal_type(ListOrSet.__type_params__) # revealed: @Todo(instance attributes)
|
||||
```
|
||||
@@ -1,4 +1,6 @@
|
||||
# Unary Operations
|
||||
# Invert, UAdd, USub
|
||||
|
||||
## Instance
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
@@ -10,8 +10,6 @@ reveal_type(not not None) # revealed: Literal[False]
|
||||
## Function
|
||||
|
||||
```py
|
||||
from typing import reveal_type
|
||||
|
||||
def f():
|
||||
return 1
|
||||
|
||||
@@ -115,3 +113,101 @@ reveal_type(not ()) # revealed: Literal[True]
|
||||
reveal_type(not ("hello",)) # revealed: Literal[False]
|
||||
reveal_type(not (1, "hello")) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
## Instance
|
||||
|
||||
Not operator is inferred based on
|
||||
<https://docs.python.org/3/library/stdtypes.html#truth-value-testing>. An instance is True or False
|
||||
if the `__bool__` method says so.
|
||||
|
||||
At runtime, the `__len__` method is a fallback for `__bool__`, but we can't make use of that. If we
|
||||
have a class that defines `__len__` but not `__bool__`, it is possible that any subclass could add a
|
||||
`__bool__` method that would invalidate whatever conclusion we drew from `__len__`. So instances of
|
||||
classes without a `__bool__` method, with or without `__len__`, must be inferred as unknown
|
||||
truthiness.
|
||||
|
||||
```py
|
||||
class AlwaysTrue:
|
||||
def __bool__(self) -> Literal[True]:
|
||||
return True
|
||||
|
||||
# revealed: Literal[False]
|
||||
reveal_type(not AlwaysTrue())
|
||||
|
||||
class AlwaysFalse:
|
||||
def __bool__(self) -> Literal[False]:
|
||||
return False
|
||||
|
||||
# revealed: Literal[True]
|
||||
reveal_type(not AlwaysFalse())
|
||||
|
||||
# We don't get into a cycle if someone sets their `__bool__` method to the `bool` builtin:
|
||||
class BoolIsBool:
|
||||
__bool__ = bool
|
||||
|
||||
# revealed: bool
|
||||
reveal_type(not BoolIsBool())
|
||||
|
||||
# At runtime, no `__bool__` and no `__len__` means truthy, but we can't rely on that, because
|
||||
# a subclass could add a `__bool__` method.
|
||||
class NoBoolMethod: ...
|
||||
|
||||
# revealed: bool
|
||||
reveal_type(not NoBoolMethod())
|
||||
|
||||
# And we can't rely on `__len__` for the same reason: a subclass could add `__bool__`.
|
||||
class LenZero:
|
||||
def __len__(self) -> Literal[0]:
|
||||
return 0
|
||||
|
||||
# revealed: bool
|
||||
reveal_type(not LenZero())
|
||||
|
||||
class LenNonZero:
|
||||
def __len__(self) -> Literal[1]:
|
||||
return 1
|
||||
|
||||
# revealed: bool
|
||||
reveal_type(not LenNonZero())
|
||||
|
||||
class WithBothLenAndBool1:
|
||||
def __bool__(self) -> Literal[False]:
|
||||
return False
|
||||
|
||||
def __len__(self) -> Literal[2]:
|
||||
return 2
|
||||
|
||||
# revealed: Literal[True]
|
||||
reveal_type(not WithBothLenAndBool1())
|
||||
|
||||
class WithBothLenAndBool2:
|
||||
def __bool__(self) -> Literal[True]:
|
||||
return True
|
||||
|
||||
def __len__(self) -> Literal[0]:
|
||||
return 0
|
||||
|
||||
# revealed: Literal[False]
|
||||
reveal_type(not WithBothLenAndBool2())
|
||||
|
||||
# TODO: raise diagnostic when __bool__ method is not valid: [unsupported-operator] "Method __bool__ for type `MethodBoolInvalid` should return `bool`, returned type `int`"
|
||||
# https://docs.python.org/3/reference/datamodel.html#object.__bool__
|
||||
class MethodBoolInvalid:
|
||||
def __bool__(self) -> int:
|
||||
return 0
|
||||
|
||||
# revealed: bool
|
||||
reveal_type(not MethodBoolInvalid())
|
||||
|
||||
# Don't trust a possibly-unbound `__bool__` method:
|
||||
def get_flag() -> bool:
|
||||
return True
|
||||
|
||||
class PossiblyUnboundBool:
|
||||
if get_flag():
|
||||
def __bool__(self) -> Literal[False]:
|
||||
return False
|
||||
|
||||
# revealed: bool
|
||||
reveal_type(not PossiblyUnboundBool())
|
||||
```
|
||||
|
||||
@@ -84,7 +84,7 @@ reveal_type(b) # revealed: Literal[2]
|
||||
[a, *b, c, d] = (1, 2)
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
# TODO: Should be list[Any] once support for assigning to starred expression is added
|
||||
reveal_type(b) # revealed: @Todo
|
||||
reveal_type(b) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(c) # revealed: Literal[2]
|
||||
reveal_type(d) # revealed: Unknown
|
||||
```
|
||||
@@ -95,7 +95,7 @@ reveal_type(d) # revealed: Unknown
|
||||
[a, *b, c] = (1, 2)
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
# TODO: Should be list[Any] once support for assigning to starred expression is added
|
||||
reveal_type(b) # revealed: @Todo
|
||||
reveal_type(b) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(c) # revealed: Literal[2]
|
||||
```
|
||||
|
||||
@@ -105,7 +105,7 @@ reveal_type(c) # revealed: Literal[2]
|
||||
[a, *b, c] = (1, 2, 3)
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
# TODO: Should be list[int] once support for assigning to starred expression is added
|
||||
reveal_type(b) # revealed: @Todo
|
||||
reveal_type(b) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(c) # revealed: Literal[3]
|
||||
```
|
||||
|
||||
@@ -115,7 +115,7 @@ reveal_type(c) # revealed: Literal[3]
|
||||
[a, *b, c, d] = (1, 2, 3, 4, 5, 6)
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
# TODO: Should be list[int] once support for assigning to starred expression is added
|
||||
reveal_type(b) # revealed: @Todo
|
||||
reveal_type(b) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(c) # revealed: Literal[5]
|
||||
reveal_type(d) # revealed: Literal[6]
|
||||
```
|
||||
@@ -127,7 +127,7 @@ reveal_type(d) # revealed: Literal[6]
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
reveal_type(b) # revealed: Literal[2]
|
||||
# TODO: Should be list[int] once support for assigning to starred expression is added
|
||||
reveal_type(c) # revealed: @Todo
|
||||
reveal_type(c) # revealed: @Todo(starred unpacking)
|
||||
```
|
||||
|
||||
### Starred expression (6)
|
||||
@@ -138,7 +138,7 @@ reveal_type(c) # revealed: @Todo
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
reveal_type(b) # revealed: Unknown
|
||||
reveal_type(c) # revealed: Unknown
|
||||
reveal_type(d) # revealed: @Todo
|
||||
reveal_type(d) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(e) # revealed: Unknown
|
||||
reveal_type(f) # revealed: Unknown
|
||||
```
|
||||
@@ -222,7 +222,7 @@ reveal_type(b) # revealed: LiteralString
|
||||
(a, *b, c, d) = "ab"
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
# TODO: Should be list[LiteralString] once support for assigning to starred expression is added
|
||||
reveal_type(b) # revealed: @Todo
|
||||
reveal_type(b) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(c) # revealed: LiteralString
|
||||
reveal_type(d) # revealed: Unknown
|
||||
```
|
||||
@@ -233,7 +233,7 @@ reveal_type(d) # revealed: Unknown
|
||||
(a, *b, c) = "ab"
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
# TODO: Should be list[Any] once support for assigning to starred expression is added
|
||||
reveal_type(b) # revealed: @Todo
|
||||
reveal_type(b) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(c) # revealed: LiteralString
|
||||
```
|
||||
|
||||
@@ -243,7 +243,7 @@ reveal_type(c) # revealed: LiteralString
|
||||
(a, *b, c) = "abc"
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
# TODO: Should be list[LiteralString] once support for assigning to starred expression is added
|
||||
reveal_type(b) # revealed: @Todo
|
||||
reveal_type(b) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(c) # revealed: LiteralString
|
||||
```
|
||||
|
||||
@@ -253,7 +253,7 @@ reveal_type(c) # revealed: LiteralString
|
||||
(a, *b, c, d) = "abcdef"
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
# TODO: Should be list[LiteralString] once support for assigning to starred expression is added
|
||||
reveal_type(b) # revealed: @Todo
|
||||
reveal_type(b) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(c) # revealed: LiteralString
|
||||
reveal_type(d) # revealed: LiteralString
|
||||
```
|
||||
@@ -265,5 +265,44 @@ reveal_type(d) # revealed: LiteralString
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
reveal_type(b) # revealed: LiteralString
|
||||
# TODO: Should be list[int] once support for assigning to starred expression is added
|
||||
reveal_type(c) # revealed: @Todo
|
||||
reveal_type(c) # revealed: @Todo(starred unpacking)
|
||||
```
|
||||
|
||||
### Unicode
|
||||
|
||||
```py
|
||||
# TODO: Add diagnostic (need more values to unpack)
|
||||
(a, b) = "é"
|
||||
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
reveal_type(b) # revealed: Unknown
|
||||
```
|
||||
|
||||
### Unicode escape (1)
|
||||
|
||||
```py
|
||||
# TODO: Add diagnostic (need more values to unpack)
|
||||
(a, b) = "\u9E6C"
|
||||
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
reveal_type(b) # revealed: Unknown
|
||||
```
|
||||
|
||||
### Unicode escape (2)
|
||||
|
||||
```py
|
||||
# TODO: Add diagnostic (need more values to unpack)
|
||||
(a, b) = "\U0010FFFF"
|
||||
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
reveal_type(b) # revealed: Unknown
|
||||
```
|
||||
|
||||
### Surrogates
|
||||
|
||||
```py
|
||||
(a, b) = "\uD800\uDFFF"
|
||||
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
reveal_type(b) # revealed: LiteralString
|
||||
```
|
||||
|
||||
@@ -17,5 +17,5 @@ class Manager:
|
||||
|
||||
async def test():
|
||||
async with Manager() as f:
|
||||
reveal_type(f) # revealed: @Todo
|
||||
reveal_type(f) # revealed: @Todo(async with statement)
|
||||
```
|
||||
@@ -11,8 +11,13 @@ pub trait Db: SourceDb + Upcast<dyn SourceDb> {
|
||||
pub(crate) mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::ProgramSettings;
|
||||
|
||||
use anyhow::Context;
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
||||
use ruff_db::system::{DbWithTestSystem, System, SystemPathBuf, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
@@ -108,4 +113,66 @@ pub(crate) mod tests {
|
||||
events.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TestDbBuilder<'a> {
|
||||
/// Target Python version
|
||||
python_version: PythonVersion,
|
||||
/// Path to a custom typeshed directory
|
||||
custom_typeshed: Option<SystemPathBuf>,
|
||||
/// Path and content pairs for files that should be present
|
||||
files: Vec<(&'a str, &'a str)>,
|
||||
}
|
||||
|
||||
impl<'a> TestDbBuilder<'a> {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
python_version: PythonVersion::default(),
|
||||
custom_typeshed: None,
|
||||
files: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn with_python_version(mut self, version: PythonVersion) -> Self {
|
||||
self.python_version = version;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn with_custom_typeshed(mut self, path: &str) -> Self {
|
||||
self.custom_typeshed = Some(SystemPathBuf::from(path));
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn with_file(mut self, path: &'a str, content: &'a str) -> Self {
|
||||
self.files.push((path, content));
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn build(self) -> anyhow::Result<TestDb> {
|
||||
let mut db = TestDb::new();
|
||||
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
db.memory_file_system().create_directory_all(&src_root)?;
|
||||
|
||||
db.write_files(self.files)
|
||||
.context("Failed to write test files")?;
|
||||
|
||||
let mut search_paths = SearchPathSettings::new(src_root);
|
||||
search_paths.custom_typeshed = self.custom_typeshed;
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: self.python_version,
|
||||
search_paths,
|
||||
},
|
||||
)
|
||||
.context("Failed to configure Program settings")?;
|
||||
|
||||
Ok(db)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn setup_db() -> TestDb {
|
||||
TestDbBuilder::new().build().expect("valid TestDb setup")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -416,7 +416,7 @@ impl<'db> Iterator for SearchPathIterator<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> FusedIterator for SearchPathIterator<'db> {}
|
||||
impl FusedIterator for SearchPathIterator<'_> {}
|
||||
|
||||
/// Represents a single `.pth` file in a `site-packages` directory.
|
||||
/// One or more lines in a `.pth` file may be a (relative or absolute)
|
||||
|
||||
@@ -459,11 +459,11 @@ foo: 3.8- # trailing comment
|
||||
";
|
||||
let parsed_versions = TypeshedVersions::from_str(VERSIONS).unwrap();
|
||||
assert_eq!(parsed_versions.len(), 3);
|
||||
assert_snapshot!(parsed_versions.to_string(), @r###"
|
||||
assert_snapshot!(parsed_versions.to_string(), @r"
|
||||
bar: 2.7-3.10
|
||||
bar.baz: 3.1-3.9
|
||||
foo: 3.8-
|
||||
"###
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
use ruff_python_ast::{AnyNodeRef, NodeKind};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use ruff_python_ast::AnyNodeRef;
|
||||
|
||||
/// Compact key for a node for use in a hash map.
|
||||
///
|
||||
/// Compares two nodes by their kind and text range.
|
||||
/// Stores the memory address of the node, because using the range and the kind
|
||||
/// of the node is not enough to uniquely identify them in ASTs resulting from
|
||||
/// invalid syntax. For example, parsing the input `for` results in a `StmtFor`
|
||||
/// AST node where both the `target` and the `iter` field are `ExprName` nodes
|
||||
/// with the same (empty) range `3..3`.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub(super) struct NodeKey {
|
||||
kind: NodeKind,
|
||||
range: TextRange,
|
||||
}
|
||||
pub(super) struct NodeKey(usize);
|
||||
|
||||
impl NodeKey {
|
||||
pub(super) fn from_node<'a, N>(node: N) -> Self
|
||||
@@ -16,9 +16,6 @@ impl NodeKey {
|
||||
N: Into<AnyNodeRef<'a>>,
|
||||
{
|
||||
let node = node.into();
|
||||
NodeKey {
|
||||
kind: node.kind(),
|
||||
range: node.range(),
|
||||
}
|
||||
NodeKey(node.as_ptr().as_ptr() as usize)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,6 +54,7 @@ impl Program {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
|
||||
pub struct ProgramSettings {
|
||||
pub target_version: PythonVersion,
|
||||
pub search_paths: SearchPathSettings,
|
||||
@@ -61,6 +62,7 @@ pub struct ProgramSettings {
|
||||
|
||||
/// Configures the search paths for module resolution.
|
||||
#[derive(Eq, PartialEq, Debug, Clone)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
|
||||
pub struct SearchPathSettings {
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
@@ -91,6 +93,7 @@ impl SearchPathSettings {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
|
||||
pub enum SitePackages {
|
||||
Derived {
|
||||
venv_path: SystemPathBuf,
|
||||
|
||||
@@ -5,6 +5,7 @@ use std::fmt;
|
||||
/// Unlike the `TargetVersion` enums in the CLI crates,
|
||||
/// this does not necessarily represent a Python version that we actually support.
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
|
||||
pub struct PythonVersion {
|
||||
pub major: u8,
|
||||
pub minor: u8,
|
||||
@@ -38,7 +39,7 @@ impl PythonVersion {
|
||||
|
||||
impl Default for PythonVersion {
|
||||
fn default() -> Self {
|
||||
Self::PY38
|
||||
Self::PY39
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -49,64 +49,50 @@ fn ast_ids<'db>(db: &'db dyn Db, scope: ScopeId) -> &'db AstIds {
|
||||
semantic_index(db, scope.file(db)).ast_ids(scope.file_scope_id(db))
|
||||
}
|
||||
|
||||
pub trait HasScopedUseId {
|
||||
/// The type of the ID uniquely identifying the use.
|
||||
type Id: Copy;
|
||||
|
||||
/// Returns the ID that uniquely identifies the use in `scope`.
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id;
|
||||
}
|
||||
|
||||
/// Uniquely identifies a use of a name in a [`crate::semantic_index::symbol::FileScopeId`].
|
||||
#[newtype_index]
|
||||
pub struct ScopedUseId;
|
||||
|
||||
impl HasScopedUseId for ast::ExprName {
|
||||
type Id = ScopedUseId;
|
||||
pub trait HasScopedUseId {
|
||||
/// Returns the ID that uniquely identifies the use in `scope`.
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedUseId;
|
||||
}
|
||||
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
impl HasScopedUseId for ast::ExprName {
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedUseId {
|
||||
let expression_ref = ExpressionRef::from(self);
|
||||
expression_ref.scoped_use_id(db, scope)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasScopedUseId for ast::ExpressionRef<'_> {
|
||||
type Id = ScopedUseId;
|
||||
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedUseId {
|
||||
let ast_ids = ast_ids(db, scope);
|
||||
ast_ids.use_id(*self)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait HasScopedAstId {
|
||||
/// The type of the ID uniquely identifying the node.
|
||||
type Id: Copy;
|
||||
|
||||
/// Returns the ID that uniquely identifies the node in `scope`.
|
||||
fn scoped_ast_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id;
|
||||
}
|
||||
|
||||
impl<T: HasScopedAstId> HasScopedAstId for Box<T> {
|
||||
type Id = <T as HasScopedAstId>::Id;
|
||||
|
||||
fn scoped_ast_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
self.as_ref().scoped_ast_id(db, scope)
|
||||
}
|
||||
}
|
||||
|
||||
/// Uniquely identifies an [`ast::Expr`] in a [`crate::semantic_index::symbol::FileScopeId`].
|
||||
#[newtype_index]
|
||||
pub struct ScopedExpressionId;
|
||||
|
||||
pub trait HasScopedExpressionId {
|
||||
/// Returns the ID that uniquely identifies the node in `scope`.
|
||||
fn scoped_expression_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedExpressionId;
|
||||
}
|
||||
|
||||
impl<T: HasScopedExpressionId> HasScopedExpressionId for Box<T> {
|
||||
fn scoped_expression_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedExpressionId {
|
||||
self.as_ref().scoped_expression_id(db, scope)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_has_scoped_expression_id {
|
||||
($ty: ty) => {
|
||||
impl HasScopedAstId for $ty {
|
||||
type Id = ScopedExpressionId;
|
||||
|
||||
fn scoped_ast_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
impl HasScopedExpressionId for $ty {
|
||||
fn scoped_expression_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedExpressionId {
|
||||
let expression_ref = ExpressionRef::from(self);
|
||||
expression_ref.scoped_ast_id(db, scope)
|
||||
expression_ref.scoped_expression_id(db, scope)
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -146,29 +132,20 @@ impl_has_scoped_expression_id!(ast::ExprSlice);
|
||||
impl_has_scoped_expression_id!(ast::ExprIpyEscapeCommand);
|
||||
impl_has_scoped_expression_id!(ast::Expr);
|
||||
|
||||
impl HasScopedAstId for ast::ExpressionRef<'_> {
|
||||
type Id = ScopedExpressionId;
|
||||
|
||||
fn scoped_ast_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
impl HasScopedExpressionId for ast::ExpressionRef<'_> {
|
||||
fn scoped_expression_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedExpressionId {
|
||||
let ast_ids = ast_ids(db, scope);
|
||||
ast_ids.expression_id(*self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct AstIdsBuilder {
|
||||
expressions_map: FxHashMap<ExpressionNodeKey, ScopedExpressionId>,
|
||||
uses_map: FxHashMap<ExpressionNodeKey, ScopedUseId>,
|
||||
}
|
||||
|
||||
impl AstIdsBuilder {
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
expressions_map: FxHashMap::default(),
|
||||
uses_map: FxHashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds `expr` to the expression ids map and returns its id.
|
||||
pub(super) fn record_expression(&mut self, expr: &ast::Expr) -> ScopedExpressionId {
|
||||
let expression_id = self.expressions_map.len().into();
|
||||
|
||||
@@ -36,12 +36,25 @@ use super::definition::{
|
||||
|
||||
mod except_handlers;
|
||||
|
||||
/// Are we in a state where a `break` statement is allowed?
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
enum LoopState {
|
||||
InLoop,
|
||||
NotInLoop,
|
||||
}
|
||||
|
||||
impl LoopState {
|
||||
fn is_inside(self) -> bool {
|
||||
matches!(self, LoopState::InLoop)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct SemanticIndexBuilder<'db> {
|
||||
// Builder state
|
||||
db: &'db dyn Db,
|
||||
file: File,
|
||||
module: &'db ParsedModule,
|
||||
scope_stack: Vec<FileScopeId>,
|
||||
scope_stack: Vec<(FileScopeId, LoopState)>,
|
||||
/// The assignments we're currently visiting, with
|
||||
/// the most recent visit at the end of the Vec
|
||||
current_assignments: Vec<CurrentAssignment<'db>>,
|
||||
@@ -103,9 +116,24 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
*self
|
||||
.scope_stack
|
||||
.last()
|
||||
.map(|(scope, _)| scope)
|
||||
.expect("Always to have a root scope")
|
||||
}
|
||||
|
||||
fn loop_state(&self) -> LoopState {
|
||||
self.scope_stack
|
||||
.last()
|
||||
.expect("Always to have a root scope")
|
||||
.1
|
||||
}
|
||||
|
||||
fn set_inside_loop(&mut self, state: LoopState) {
|
||||
self.scope_stack
|
||||
.last_mut()
|
||||
.expect("Always to have a root scope")
|
||||
.1 = state;
|
||||
}
|
||||
|
||||
fn push_scope(&mut self, node: NodeWithScopeRef) {
|
||||
let parent = self.current_scope();
|
||||
self.push_scope_with_parent(node, Some(parent));
|
||||
@@ -124,22 +152,23 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
self.try_node_context_stack_manager.enter_nested_scope();
|
||||
|
||||
let file_scope_id = self.scopes.push(scope);
|
||||
self.symbol_tables.push(SymbolTableBuilder::new());
|
||||
self.use_def_maps.push(UseDefMapBuilder::new());
|
||||
let ast_id_scope = self.ast_ids.push(AstIdsBuilder::new());
|
||||
self.symbol_tables.push(SymbolTableBuilder::default());
|
||||
self.use_def_maps.push(UseDefMapBuilder::default());
|
||||
let ast_id_scope = self.ast_ids.push(AstIdsBuilder::default());
|
||||
|
||||
let scope_id = ScopeId::new(self.db, self.file, file_scope_id, countme::Count::default());
|
||||
|
||||
self.scope_ids_by_scope.push(scope_id);
|
||||
self.scopes_by_node.insert(node.node_key(), file_scope_id);
|
||||
let previous = self.scopes_by_node.insert(node.node_key(), file_scope_id);
|
||||
debug_assert_eq!(previous, None);
|
||||
|
||||
debug_assert_eq!(ast_id_scope, file_scope_id);
|
||||
|
||||
self.scope_stack.push(file_scope_id);
|
||||
self.scope_stack.push((file_scope_id, LoopState::NotInLoop));
|
||||
}
|
||||
|
||||
fn pop_scope(&mut self) -> FileScopeId {
|
||||
let id = self.scope_stack.pop().expect("Root scope to be present");
|
||||
let (id, _) = self.scope_stack.pop().expect("Root scope to be present");
|
||||
let children_end = self.scopes.next_index();
|
||||
let scope = &mut self.scopes[id];
|
||||
scope.descendents = scope.descendents.start..children_end;
|
||||
@@ -373,6 +402,11 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
if let Some(default) = default {
|
||||
self.visit_expr(default);
|
||||
}
|
||||
match type_param {
|
||||
ast::TypeParam::TypeVar(node) => self.add_definition(symbol, node),
|
||||
ast::TypeParam::ParamSpec(node) => self.add_definition(symbol, node),
|
||||
ast::TypeParam::TypeVarTuple(node) => self.add_definition(symbol, node),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -584,6 +618,27 @@ where
|
||||
},
|
||||
);
|
||||
}
|
||||
ast::Stmt::TypeAlias(type_alias) => {
|
||||
let symbol = self.add_symbol(
|
||||
type_alias
|
||||
.name
|
||||
.as_name_expr()
|
||||
.map(|name| name.id.clone())
|
||||
.unwrap_or("<unknown>".into()),
|
||||
);
|
||||
self.add_definition(symbol, type_alias);
|
||||
self.visit_expr(&type_alias.name);
|
||||
|
||||
self.with_type_params(
|
||||
NodeWithScopeRef::TypeAliasTypeParameters(type_alias),
|
||||
type_alias.type_params.as_ref(),
|
||||
|builder| {
|
||||
builder.push_scope(NodeWithScopeRef::TypeAlias(type_alias));
|
||||
builder.visit_expr(&type_alias.value);
|
||||
builder.pop_scope()
|
||||
},
|
||||
);
|
||||
}
|
||||
ast::Stmt::Import(node) => {
|
||||
for alias in &node.names {
|
||||
let symbol_name = if let Some(asname) = &alias.asname {
|
||||
@@ -671,9 +726,18 @@ where
|
||||
if let Some(value) = &node.value {
|
||||
self.visit_expr(value);
|
||||
}
|
||||
self.push_assignment(node.into());
|
||||
self.visit_expr(&node.target);
|
||||
self.pop_assignment();
|
||||
|
||||
// See https://docs.python.org/3/library/ast.html#ast.AnnAssign
|
||||
if matches!(
|
||||
*node.target,
|
||||
ast::Expr::Attribute(_) | ast::Expr::Subscript(_) | ast::Expr::Name(_)
|
||||
) {
|
||||
self.push_assignment(node.into());
|
||||
self.visit_expr(&node.target);
|
||||
self.pop_assignment();
|
||||
} else {
|
||||
self.visit_expr(&node.target);
|
||||
}
|
||||
}
|
||||
ast::Stmt::AugAssign(
|
||||
aug_assign @ ast::StmtAugAssign {
|
||||
@@ -685,9 +749,18 @@ where
|
||||
) => {
|
||||
debug_assert_eq!(&self.current_assignments, &[]);
|
||||
self.visit_expr(value);
|
||||
self.push_assignment(aug_assign.into());
|
||||
self.visit_expr(target);
|
||||
self.pop_assignment();
|
||||
|
||||
// See https://docs.python.org/3/library/ast.html#ast.AugAssign
|
||||
if matches!(
|
||||
**target,
|
||||
ast::Expr::Attribute(_) | ast::Expr::Subscript(_) | ast::Expr::Name(_)
|
||||
) {
|
||||
self.push_assignment(aug_assign.into());
|
||||
self.visit_expr(target);
|
||||
self.pop_assignment();
|
||||
} else {
|
||||
self.visit_expr(target);
|
||||
}
|
||||
}
|
||||
ast::Stmt::If(node) => {
|
||||
self.visit_expr(&node.test);
|
||||
@@ -696,7 +769,22 @@ where
|
||||
let mut constraints = vec![constraint];
|
||||
self.visit_body(&node.body);
|
||||
let mut post_clauses: Vec<FlowSnapshot> = vec![];
|
||||
for clause in &node.elif_else_clauses {
|
||||
let elif_else_clauses = node
|
||||
.elif_else_clauses
|
||||
.iter()
|
||||
.map(|clause| (clause.test.as_ref(), clause.body.as_slice()));
|
||||
let has_else = node
|
||||
.elif_else_clauses
|
||||
.last()
|
||||
.is_some_and(|clause| clause.test.is_none());
|
||||
let elif_else_clauses = elif_else_clauses.chain(if has_else {
|
||||
// if there's an `else` clause already, we don't need to add another
|
||||
None
|
||||
} else {
|
||||
// if there's no `else` branch, we should add a no-op `else` branch
|
||||
Some((None, Default::default()))
|
||||
});
|
||||
for (clause_test, clause_body) in elif_else_clauses {
|
||||
// snapshot after every block except the last; the last one will just become
|
||||
// the state that we merge the other snapshots into
|
||||
post_clauses.push(self.flow_snapshot());
|
||||
@@ -706,24 +794,15 @@ where
|
||||
for constraint in &constraints {
|
||||
self.record_negated_constraint(*constraint);
|
||||
}
|
||||
if let Some(elif_test) = &clause.test {
|
||||
if let Some(elif_test) = clause_test {
|
||||
self.visit_expr(elif_test);
|
||||
constraints.push(self.record_expression_constraint(elif_test));
|
||||
}
|
||||
self.visit_body(&clause.body);
|
||||
self.visit_body(clause_body);
|
||||
}
|
||||
for post_clause_state in post_clauses {
|
||||
self.flow_merge(post_clause_state);
|
||||
}
|
||||
let has_else = node
|
||||
.elif_else_clauses
|
||||
.last()
|
||||
.is_some_and(|clause| clause.test.is_none());
|
||||
if !has_else {
|
||||
// if there's no else clause, then it's possible we took none of the branches,
|
||||
// and the pre_if state can reach here
|
||||
self.flow_merge(pre_if);
|
||||
}
|
||||
}
|
||||
ast::Stmt::While(ast::StmtWhile {
|
||||
test,
|
||||
@@ -740,7 +819,10 @@ where
|
||||
|
||||
// TODO: definitions created inside the body should be fully visible
|
||||
// to other statements/expressions inside the body --Alex/Carl
|
||||
let outer_loop_state = self.loop_state();
|
||||
self.set_inside_loop(LoopState::InLoop);
|
||||
self.visit_body(body);
|
||||
self.set_inside_loop(outer_loop_state);
|
||||
|
||||
// Get the break states from the body of this loop, and restore the saved outer
|
||||
// ones.
|
||||
@@ -779,7 +861,9 @@ where
|
||||
self.visit_body(body);
|
||||
}
|
||||
ast::Stmt::Break(_) => {
|
||||
self.loop_break_states.push(self.flow_snapshot());
|
||||
if self.loop_state().is_inside() {
|
||||
self.loop_break_states.push(self.flow_snapshot());
|
||||
}
|
||||
}
|
||||
|
||||
ast::Stmt::For(
|
||||
@@ -806,7 +890,10 @@ where
|
||||
// TODO: Definitions created by loop variables
|
||||
// (and definitions created inside the body)
|
||||
// are fully visible to other statements/expressions inside the body --Alex/Carl
|
||||
let outer_loop_state = self.loop_state();
|
||||
self.set_inside_loop(LoopState::InLoop);
|
||||
self.visit_body(body);
|
||||
self.set_inside_loop(outer_loop_state);
|
||||
|
||||
let break_states =
|
||||
std::mem::replace(&mut self.loop_break_states, saved_break_states);
|
||||
@@ -1067,9 +1154,15 @@ where
|
||||
ast::Expr::Named(node) => {
|
||||
// TODO walrus in comprehensions is implicitly nonlocal
|
||||
self.visit_expr(&node.value);
|
||||
self.push_assignment(node.into());
|
||||
self.visit_expr(&node.target);
|
||||
self.pop_assignment();
|
||||
|
||||
// See https://peps.python.org/pep-0572/#differences-between-assignment-expressions-and-assignment-statements
|
||||
if node.target.is_name_expr() {
|
||||
self.push_assignment(node.into());
|
||||
self.visit_expr(&node.target);
|
||||
self.pop_assignment();
|
||||
} else {
|
||||
self.visit_expr(&node.target);
|
||||
}
|
||||
}
|
||||
ast::Expr::Lambda(lambda) => {
|
||||
if let Some(parameters) = &lambda.parameters {
|
||||
@@ -1102,8 +1195,8 @@ where
|
||||
// AST inspection, so we can't simplify here, need to record test expression for
|
||||
// later checking)
|
||||
self.visit_expr(test);
|
||||
let constraint = self.record_expression_constraint(test);
|
||||
let pre_if = self.flow_snapshot();
|
||||
let constraint = self.record_expression_constraint(test);
|
||||
self.visit_expr(body);
|
||||
let post_body = self.flow_snapshot();
|
||||
self.flow_restore(pre_if);
|
||||
|
||||
@@ -83,6 +83,7 @@ pub(crate) enum DefinitionNodeRef<'a> {
|
||||
For(ForStmtDefinitionNodeRef<'a>),
|
||||
Function(&'a ast::StmtFunctionDef),
|
||||
Class(&'a ast::StmtClassDef),
|
||||
TypeAlias(&'a ast::StmtTypeAlias),
|
||||
NamedExpression(&'a ast::ExprNamed),
|
||||
Assignment(AssignmentDefinitionNodeRef<'a>),
|
||||
AnnotatedAssignment(&'a ast::StmtAnnAssign),
|
||||
@@ -92,6 +93,9 @@ pub(crate) enum DefinitionNodeRef<'a> {
|
||||
WithItem(WithItemDefinitionNodeRef<'a>),
|
||||
MatchPattern(MatchPatternDefinitionNodeRef<'a>),
|
||||
ExceptHandler(ExceptHandlerDefinitionNodeRef<'a>),
|
||||
TypeVar(&'a ast::TypeParamTypeVar),
|
||||
ParamSpec(&'a ast::TypeParamParamSpec),
|
||||
TypeVarTuple(&'a ast::TypeParamTypeVarTuple),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtFunctionDef> for DefinitionNodeRef<'a> {
|
||||
@@ -106,6 +110,12 @@ impl<'a> From<&'a ast::StmtClassDef> for DefinitionNodeRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtTypeAlias> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::StmtTypeAlias) -> Self {
|
||||
Self::TypeAlias(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::ExprNamed> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::ExprNamed) -> Self {
|
||||
Self::NamedExpression(node)
|
||||
@@ -130,6 +140,24 @@ impl<'a> From<&'a ast::Alias> for DefinitionNodeRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::TypeParamTypeVar> for DefinitionNodeRef<'a> {
|
||||
fn from(value: &'a ast::TypeParamTypeVar) -> Self {
|
||||
Self::TypeVar(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::TypeParamParamSpec> for DefinitionNodeRef<'a> {
|
||||
fn from(value: &'a ast::TypeParamParamSpec) -> Self {
|
||||
Self::ParamSpec(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::TypeParamTypeVarTuple> for DefinitionNodeRef<'a> {
|
||||
fn from(value: &'a ast::TypeParamTypeVarTuple) -> Self {
|
||||
Self::TypeVarTuple(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ImportFromDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: ImportFromDefinitionNodeRef<'a>) -> Self {
|
||||
Self::ImportFrom(node_ref)
|
||||
@@ -244,6 +272,9 @@ impl<'db> DefinitionNodeRef<'db> {
|
||||
DefinitionNodeRef::Class(class) => {
|
||||
DefinitionKind::Class(AstNodeRef::new(parsed, class))
|
||||
}
|
||||
DefinitionNodeRef::TypeAlias(type_alias) => {
|
||||
DefinitionKind::TypeAlias(AstNodeRef::new(parsed, type_alias))
|
||||
}
|
||||
DefinitionNodeRef::NamedExpression(named) => {
|
||||
DefinitionKind::NamedExpression(AstNodeRef::new(parsed, named))
|
||||
}
|
||||
@@ -317,6 +348,15 @@ impl<'db> DefinitionNodeRef<'db> {
|
||||
handler: AstNodeRef::new(parsed, handler),
|
||||
is_star,
|
||||
}),
|
||||
DefinitionNodeRef::TypeVar(node) => {
|
||||
DefinitionKind::TypeVar(AstNodeRef::new(parsed, node))
|
||||
}
|
||||
DefinitionNodeRef::ParamSpec(node) => {
|
||||
DefinitionKind::ParamSpec(AstNodeRef::new(parsed, node))
|
||||
}
|
||||
DefinitionNodeRef::TypeVarTuple(node) => {
|
||||
DefinitionKind::TypeVarTuple(AstNodeRef::new(parsed, node))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -328,6 +368,7 @@ impl<'db> DefinitionNodeRef<'db> {
|
||||
}
|
||||
Self::Function(node) => node.into(),
|
||||
Self::Class(node) => node.into(),
|
||||
Self::TypeAlias(node) => node.into(),
|
||||
Self::NamedExpression(node) => node.into(),
|
||||
Self::Assignment(AssignmentDefinitionNodeRef {
|
||||
value: _,
|
||||
@@ -356,6 +397,9 @@ impl<'db> DefinitionNodeRef<'db> {
|
||||
identifier.into()
|
||||
}
|
||||
Self::ExceptHandler(ExceptHandlerDefinitionNodeRef { handler, .. }) => handler.into(),
|
||||
Self::TypeVar(node) => node.into(),
|
||||
Self::ParamSpec(node) => node.into(),
|
||||
Self::TypeVarTuple(node) => node.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -401,6 +445,7 @@ pub enum DefinitionKind<'db> {
|
||||
ImportFrom(ImportFromDefinitionKind),
|
||||
Function(AstNodeRef<ast::StmtFunctionDef>),
|
||||
Class(AstNodeRef<ast::StmtClassDef>),
|
||||
TypeAlias(AstNodeRef<ast::StmtTypeAlias>),
|
||||
NamedExpression(AstNodeRef<ast::ExprNamed>),
|
||||
Assignment(AssignmentDefinitionKind<'db>),
|
||||
AnnotatedAssignment(AstNodeRef<ast::StmtAnnAssign>),
|
||||
@@ -412,6 +457,9 @@ pub enum DefinitionKind<'db> {
|
||||
WithItem(WithItemDefinitionKind),
|
||||
MatchPattern(MatchPatternDefinitionKind),
|
||||
ExceptHandler(ExceptHandlerDefinitionKind),
|
||||
TypeVar(AstNodeRef<ast::TypeParamTypeVar>),
|
||||
ParamSpec(AstNodeRef<ast::TypeParamParamSpec>),
|
||||
TypeVarTuple(AstNodeRef<ast::TypeParamTypeVarTuple>),
|
||||
}
|
||||
|
||||
impl DefinitionKind<'_> {
|
||||
@@ -420,8 +468,12 @@ impl DefinitionKind<'_> {
|
||||
// functions, classes, and imports always bind, and we consider them declarations
|
||||
DefinitionKind::Function(_)
|
||||
| DefinitionKind::Class(_)
|
||||
| DefinitionKind::TypeAlias(_)
|
||||
| DefinitionKind::Import(_)
|
||||
| DefinitionKind::ImportFrom(_) => DefinitionCategory::DeclarationAndBinding,
|
||||
| DefinitionKind::ImportFrom(_)
|
||||
| DefinitionKind::TypeVar(_)
|
||||
| DefinitionKind::ParamSpec(_)
|
||||
| DefinitionKind::TypeVarTuple(_) => DefinitionCategory::DeclarationAndBinding,
|
||||
// a parameter always binds a value, but is only a declaration if annotated
|
||||
DefinitionKind::Parameter(parameter) => {
|
||||
if parameter.annotation.is_some() {
|
||||
@@ -643,6 +695,12 @@ impl From<&ast::StmtClassDef> for DefinitionNodeKey {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtTypeAlias> for DefinitionNodeKey {
|
||||
fn from(node: &ast::StmtTypeAlias) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::ExprName> for DefinitionNodeKey {
|
||||
fn from(node: &ast::ExprName) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
@@ -696,3 +754,21 @@ impl From<&ast::ExceptHandlerExceptHandler> for DefinitionNodeKey {
|
||||
Self(NodeKey::from_node(handler))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::TypeParamTypeVar> for DefinitionNodeKey {
|
||||
fn from(value: &ast::TypeParamTypeVar) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::TypeParamParamSpec> for DefinitionNodeKey {
|
||||
fn from(value: &ast::TypeParamParamSpec) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::TypeParamTypeVarTuple> for DefinitionNodeKey {
|
||||
fn from(value: &ast::TypeParamTypeVarTuple) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -116,14 +116,11 @@ impl<'db> ScopeId<'db> {
|
||||
// Type parameter scopes behave like function scopes in terms of name resolution; CPython
|
||||
// symbol table also uses the term "function-like" for these scopes.
|
||||
matches!(
|
||||
self.node(db),
|
||||
NodeWithScopeKind::ClassTypeParameters(_)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(_)
|
||||
| NodeWithScopeKind::Function(_)
|
||||
| NodeWithScopeKind::ListComprehension(_)
|
||||
| NodeWithScopeKind::SetComprehension(_)
|
||||
| NodeWithScopeKind::DictComprehension(_)
|
||||
| NodeWithScopeKind::GeneratorExpression(_)
|
||||
self.node(db).scope_kind(),
|
||||
ScopeKind::Annotation
|
||||
| ScopeKind::Function
|
||||
| ScopeKind::TypeAlias
|
||||
| ScopeKind::Comprehension
|
||||
)
|
||||
}
|
||||
|
||||
@@ -144,6 +141,12 @@ impl<'db> ScopeId<'db> {
|
||||
}
|
||||
NodeWithScopeKind::Function(function)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(function) => function.name.as_str(),
|
||||
NodeWithScopeKind::TypeAlias(type_alias)
|
||||
| NodeWithScopeKind::TypeAliasTypeParameters(type_alias) => type_alias
|
||||
.name
|
||||
.as_name_expr()
|
||||
.map(|name| name.id.as_str())
|
||||
.unwrap_or("<type alias>"),
|
||||
NodeWithScopeKind::Lambda(_) => "<lambda>",
|
||||
NodeWithScopeKind::ListComprehension(_) => "<listcomp>",
|
||||
NodeWithScopeKind::SetComprehension(_) => "<setcomp>",
|
||||
@@ -201,6 +204,7 @@ pub enum ScopeKind {
|
||||
Class,
|
||||
Function,
|
||||
Comprehension,
|
||||
TypeAlias,
|
||||
}
|
||||
|
||||
impl ScopeKind {
|
||||
@@ -210,7 +214,7 @@ impl ScopeKind {
|
||||
}
|
||||
|
||||
/// Symbol table for a specific [`Scope`].
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SymbolTable {
|
||||
/// The symbols in this scope.
|
||||
symbols: IndexVec<ScopedSymbolId, Symbol>,
|
||||
@@ -220,13 +224,6 @@ pub struct SymbolTable {
|
||||
}
|
||||
|
||||
impl SymbolTable {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
symbols: IndexVec::new(),
|
||||
symbols_by_name: SymbolMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn shrink_to_fit(&mut self) {
|
||||
self.symbols.shrink_to_fit();
|
||||
}
|
||||
@@ -278,18 +275,12 @@ impl PartialEq for SymbolTable {
|
||||
|
||||
impl Eq for SymbolTable {}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct SymbolTableBuilder {
|
||||
table: SymbolTable,
|
||||
}
|
||||
|
||||
impl SymbolTableBuilder {
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
table: SymbolTable::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn add_symbol(&mut self, name: Name) -> (ScopedSymbolId, bool) {
|
||||
let hash = SymbolTable::hash_name(&name);
|
||||
let entry = self
|
||||
@@ -339,6 +330,8 @@ pub(crate) enum NodeWithScopeRef<'a> {
|
||||
Lambda(&'a ast::ExprLambda),
|
||||
FunctionTypeParameters(&'a ast::StmtFunctionDef),
|
||||
ClassTypeParameters(&'a ast::StmtClassDef),
|
||||
TypeAlias(&'a ast::StmtTypeAlias),
|
||||
TypeAliasTypeParameters(&'a ast::StmtTypeAlias),
|
||||
ListComprehension(&'a ast::ExprListComp),
|
||||
SetComprehension(&'a ast::ExprSetComp),
|
||||
DictComprehension(&'a ast::ExprDictComp),
|
||||
@@ -360,6 +353,12 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::Function(function) => {
|
||||
NodeWithScopeKind::Function(AstNodeRef::new(module, function))
|
||||
}
|
||||
NodeWithScopeRef::TypeAlias(type_alias) => {
|
||||
NodeWithScopeKind::TypeAlias(AstNodeRef::new(module, type_alias))
|
||||
}
|
||||
NodeWithScopeRef::TypeAliasTypeParameters(type_alias) => {
|
||||
NodeWithScopeKind::TypeAliasTypeParameters(AstNodeRef::new(module, type_alias))
|
||||
}
|
||||
NodeWithScopeRef::Lambda(lambda) => {
|
||||
NodeWithScopeKind::Lambda(AstNodeRef::new(module, lambda))
|
||||
}
|
||||
@@ -400,6 +399,12 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::ClassTypeParameters(class) => {
|
||||
NodeWithScopeKey::ClassTypeParameters(NodeKey::from_node(class))
|
||||
}
|
||||
NodeWithScopeRef::TypeAlias(type_alias) => {
|
||||
NodeWithScopeKey::TypeAlias(NodeKey::from_node(type_alias))
|
||||
}
|
||||
NodeWithScopeRef::TypeAliasTypeParameters(type_alias) => {
|
||||
NodeWithScopeKey::TypeAliasTypeParameters(NodeKey::from_node(type_alias))
|
||||
}
|
||||
NodeWithScopeRef::ListComprehension(comprehension) => {
|
||||
NodeWithScopeKey::ListComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
@@ -424,6 +429,8 @@ pub enum NodeWithScopeKind {
|
||||
ClassTypeParameters(AstNodeRef<ast::StmtClassDef>),
|
||||
Function(AstNodeRef<ast::StmtFunctionDef>),
|
||||
FunctionTypeParameters(AstNodeRef<ast::StmtFunctionDef>),
|
||||
TypeAliasTypeParameters(AstNodeRef<ast::StmtTypeAlias>),
|
||||
TypeAlias(AstNodeRef<ast::StmtTypeAlias>),
|
||||
Lambda(AstNodeRef<ast::ExprLambda>),
|
||||
ListComprehension(AstNodeRef<ast::ExprListComp>),
|
||||
SetComprehension(AstNodeRef<ast::ExprSetComp>),
|
||||
@@ -436,9 +443,11 @@ impl NodeWithScopeKind {
|
||||
match self {
|
||||
Self::Module => ScopeKind::Module,
|
||||
Self::Class(_) => ScopeKind::Class,
|
||||
Self::Function(_) => ScopeKind::Function,
|
||||
Self::Lambda(_) => ScopeKind::Function,
|
||||
Self::FunctionTypeParameters(_) | Self::ClassTypeParameters(_) => ScopeKind::Annotation,
|
||||
Self::Function(_) | Self::Lambda(_) => ScopeKind::Function,
|
||||
Self::FunctionTypeParameters(_)
|
||||
| Self::ClassTypeParameters(_)
|
||||
| Self::TypeAliasTypeParameters(_) => ScopeKind::Annotation,
|
||||
Self::TypeAlias(_) => ScopeKind::TypeAlias,
|
||||
Self::ListComprehension(_)
|
||||
| Self::SetComprehension(_)
|
||||
| Self::DictComprehension(_)
|
||||
@@ -459,6 +468,13 @@ impl NodeWithScopeKind {
|
||||
_ => panic!("expected function"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_type_alias(&self) -> &ast::StmtTypeAlias {
|
||||
match self {
|
||||
Self::TypeAlias(type_alias) => type_alias.node(),
|
||||
_ => panic!("expected type alias"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
@@ -468,6 +484,8 @@ pub(crate) enum NodeWithScopeKey {
|
||||
ClassTypeParameters(NodeKey),
|
||||
Function(NodeKey),
|
||||
FunctionTypeParameters(NodeKey),
|
||||
TypeAlias(NodeKey),
|
||||
TypeAliasTypeParameters(NodeKey),
|
||||
Lambda(NodeKey),
|
||||
ListComprehension(NodeKey),
|
||||
SetComprehension(NodeKey),
|
||||
|
||||
@@ -277,7 +277,7 @@ impl<'db> UseDefMap<'db> {
|
||||
|
||||
pub(crate) fn use_boundness(&self, use_id: ScopedUseId) -> Boundness {
|
||||
if self.bindings_by_use[use_id].may_be_unbound() {
|
||||
Boundness::MayBeUnbound
|
||||
Boundness::PossiblyUnbound
|
||||
} else {
|
||||
Boundness::Bound
|
||||
}
|
||||
@@ -292,7 +292,7 @@ impl<'db> UseDefMap<'db> {
|
||||
|
||||
pub(crate) fn public_boundness(&self, symbol: ScopedSymbolId) -> Boundness {
|
||||
if self.public_symbols[symbol].may_be_unbound() {
|
||||
Boundness::MayBeUnbound
|
||||
Boundness::PossiblyUnbound
|
||||
} else {
|
||||
Boundness::Bound
|
||||
}
|
||||
@@ -400,7 +400,7 @@ pub(crate) struct ConstraintsIterator<'map, 'db> {
|
||||
constraint_ids: ConstraintIdIterator<'map>,
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for ConstraintsIterator<'map, 'db> {
|
||||
impl<'db> Iterator for ConstraintsIterator<'_, 'db> {
|
||||
type Item = Constraint<'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
@@ -424,7 +424,7 @@ impl DeclarationsIterator<'_, '_> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for DeclarationsIterator<'map, 'db> {
|
||||
impl<'db> Iterator for DeclarationsIterator<'_, 'db> {
|
||||
type Item = Definition<'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
@@ -459,10 +459,6 @@ pub(super) struct UseDefMapBuilder<'db> {
|
||||
}
|
||||
|
||||
impl<'db> UseDefMapBuilder<'db> {
|
||||
pub(super) fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub(super) fn add_symbol(&mut self, symbol: ScopedSymbolId) {
|
||||
let new_symbol = self.symbol_states.push(SymbolState::undefined());
|
||||
debug_assert_eq!(symbol, new_symbol);
|
||||
|
||||
@@ -401,7 +401,7 @@ pub(super) struct DeclarationIdIterator<'a> {
|
||||
inner: DeclarationsIterator<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for DeclarationIdIterator<'a> {
|
||||
impl Iterator for DeclarationIdIterator<'_> {
|
||||
type Item = ScopedDefinitionId;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
||||
@@ -6,7 +6,7 @@ use ruff_source_file::LineIndex;
|
||||
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::{resolve_module, Module};
|
||||
use crate::semantic_index::ast_ids::HasScopedAstId;
|
||||
use crate::semantic_index::ast_ids::HasScopedExpressionId;
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::types::{binding_ty, infer_scope_types, Type};
|
||||
use crate::Db;
|
||||
@@ -54,7 +54,7 @@ impl HasTy for ast::ExpressionRef<'_> {
|
||||
let file_scope = index.expression_scope_id(*self);
|
||||
let scope = file_scope.to_scope_id(model.db, model.file);
|
||||
|
||||
let expression_id = self.scoped_ast_id(model.db, scope);
|
||||
let expression_id = self.scoped_expression_id(model.db, scope);
|
||||
infer_scope_types(model.db, scope).expression_ty(expression_id)
|
||||
}
|
||||
}
|
||||
@@ -166,31 +166,15 @@ impl_binding_has_ty!(ast::ParameterWithDefault);
|
||||
mod tests {
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::{HasTy, ProgramSettings, SemanticModel};
|
||||
|
||||
fn setup_db<'a>(files: impl IntoIterator<Item = (&'a str, &'a str)>) -> anyhow::Result<TestDb> {
|
||||
let mut db = TestDb::new();
|
||||
db.write_files(files)?;
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings::new(SystemPathBuf::from("/src")),
|
||||
},
|
||||
)?;
|
||||
|
||||
Ok(db)
|
||||
}
|
||||
use crate::db::tests::TestDbBuilder;
|
||||
use crate::{HasTy, SemanticModel};
|
||||
|
||||
#[test]
|
||||
fn function_ty() -> anyhow::Result<()> {
|
||||
let db = setup_db([("/src/foo.py", "def test(): pass")])?;
|
||||
let db = TestDbBuilder::new()
|
||||
.with_file("/src/foo.py", "def test(): pass")
|
||||
.build()?;
|
||||
|
||||
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
|
||||
|
||||
@@ -207,7 +191,9 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn class_ty() -> anyhow::Result<()> {
|
||||
let db = setup_db([("/src/foo.py", "class Test: pass")])?;
|
||||
let db = TestDbBuilder::new()
|
||||
.with_file("/src/foo.py", "class Test: pass")
|
||||
.build()?;
|
||||
|
||||
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
|
||||
|
||||
@@ -224,10 +210,10 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn alias_ty() -> anyhow::Result<()> {
|
||||
let db = setup_db([
|
||||
("/src/foo.py", "class Test: pass"),
|
||||
("/src/bar.py", "from foo import Test"),
|
||||
])?;
|
||||
let db = TestDbBuilder::new()
|
||||
.with_file("/src/foo.py", "class Test: pass")
|
||||
.with_file("/src/bar.py", "from foo import Test")
|
||||
.build()?;
|
||||
|
||||
let bar = system_path_to_file(&db, "/src/bar.py").unwrap();
|
||||
|
||||
|
||||
@@ -732,7 +732,20 @@ mod tests {
|
||||
let system = TestSystem::default();
|
||||
assert!(matches!(
|
||||
VirtualEnvironment::new("/.venv", &system),
|
||||
Err(SitePackagesDiscoveryError::VenvDirIsNotADirectory(_))
|
||||
Err(SitePackagesDiscoveryError::VenvDirCanonicalizationError(..))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reject_venv_that_is_not_a_directory() {
|
||||
let system = TestSystem::default();
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_file("/.venv", "")
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
VirtualEnvironment::new("/.venv", &system),
|
||||
Err(SitePackagesDiscoveryError::VenvDirIsNotADirectory(..))
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
@@ -8,32 +8,38 @@ use crate::Db;
|
||||
|
||||
/// Enumeration of various core stdlib modules, for which we have dedicated Salsa queries.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
enum CoreStdlibModule {
|
||||
pub(crate) enum CoreStdlibModule {
|
||||
Builtins,
|
||||
Types,
|
||||
Typeshed,
|
||||
TypingExtensions,
|
||||
Typing,
|
||||
Sys,
|
||||
}
|
||||
|
||||
impl CoreStdlibModule {
|
||||
fn name(self) -> ModuleName {
|
||||
let module_name = match self {
|
||||
pub(crate) const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Builtins => "builtins",
|
||||
Self::Types => "types",
|
||||
Self::Typing => "typing",
|
||||
Self::Typeshed => "_typeshed",
|
||||
Self::TypingExtensions => "typing_extensions",
|
||||
};
|
||||
ModuleName::new_static(module_name)
|
||||
.unwrap_or_else(|| panic!("{module_name} should be a valid module name!"))
|
||||
Self::Sys => "sys",
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn name(self) -> ModuleName {
|
||||
let self_as_str = self.as_str();
|
||||
ModuleName::new_static(self_as_str)
|
||||
.unwrap_or_else(|| panic!("{self_as_str} should be a valid module name!"))
|
||||
}
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in a given core module
|
||||
///
|
||||
/// Returns `Symbol::Unbound` if the given core module cannot be resolved for some reason
|
||||
fn core_module_symbol<'db>(
|
||||
pub(crate) fn core_module_symbol<'db>(
|
||||
db: &'db dyn Db,
|
||||
core_module: CoreStdlibModule,
|
||||
symbol: &str,
|
||||
@@ -51,29 +57,14 @@ pub(crate) fn builtins_symbol<'db>(db: &'db dyn Db, symbol: &str) -> Symbol<'db>
|
||||
core_module_symbol(db, CoreStdlibModule::Builtins, symbol)
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in the `types` module namespace.
|
||||
///
|
||||
/// Returns `Symbol::Unbound` if the `types` module isn't available for some reason.
|
||||
#[inline]
|
||||
pub(crate) fn types_symbol<'db>(db: &'db dyn Db, symbol: &str) -> Symbol<'db> {
|
||||
core_module_symbol(db, CoreStdlibModule::Types, symbol)
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in the `typing` module namespace.
|
||||
///
|
||||
/// Returns `Symbol::Unbound` if the `typing` module isn't available for some reason.
|
||||
#[inline]
|
||||
#[allow(dead_code)] // currently only used in tests
|
||||
#[cfg(test)]
|
||||
pub(crate) fn typing_symbol<'db>(db: &'db dyn Db, symbol: &str) -> Symbol<'db> {
|
||||
core_module_symbol(db, CoreStdlibModule::Typing, symbol)
|
||||
}
|
||||
/// Lookup the type of `symbol` in the `_typeshed` module namespace.
|
||||
///
|
||||
/// Returns `Symbol::Unbound` if the `_typeshed` module isn't available for some reason.
|
||||
#[inline]
|
||||
pub(crate) fn typeshed_symbol<'db>(db: &'db dyn Db, symbol: &str) -> Symbol<'db> {
|
||||
core_module_symbol(db, CoreStdlibModule::Typeshed, symbol)
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in the `typing_extensions` module namespace.
|
||||
///
|
||||
|
||||
@@ -6,7 +6,16 @@ use crate::{
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) enum Boundness {
|
||||
Bound,
|
||||
MayBeUnbound,
|
||||
PossiblyUnbound,
|
||||
}
|
||||
|
||||
impl Boundness {
|
||||
pub(crate) fn or(self, other: Boundness) -> Boundness {
|
||||
match (self, other) {
|
||||
(Boundness::Bound, _) | (_, Boundness::Bound) => Boundness::Bound,
|
||||
(Boundness::PossiblyUnbound, Boundness::PossiblyUnbound) => Boundness::PossiblyUnbound,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The result of a symbol lookup, which can either be a (possibly unbound) type
|
||||
@@ -17,14 +26,14 @@ pub(crate) enum Boundness {
|
||||
/// bound = 1
|
||||
///
|
||||
/// if flag:
|
||||
/// maybe_unbound = 2
|
||||
/// possibly_unbound = 2
|
||||
/// ```
|
||||
///
|
||||
/// If we look up symbols in this scope, we would get the following results:
|
||||
/// ```rs
|
||||
/// bound: Symbol::Type(Type::IntLiteral(1), Boundness::Bound),
|
||||
/// maybe_unbound: Symbol::Type(Type::IntLiteral(2), Boundness::MayBeUnbound),
|
||||
/// non_existent: Symbol::Unbound,
|
||||
/// bound: Symbol::Type(Type::IntLiteral(1), Boundness::Bound),
|
||||
/// possibly_unbound: Symbol::Type(Type::IntLiteral(2), Boundness::PossiblyUnbound),
|
||||
/// non_existent: Symbol::Unbound,
|
||||
/// ```
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub(crate) enum Symbol<'db> {
|
||||
@@ -37,21 +46,18 @@ impl<'db> Symbol<'db> {
|
||||
matches!(self, Symbol::Unbound)
|
||||
}
|
||||
|
||||
pub(crate) fn may_be_unbound(&self) -> bool {
|
||||
pub(crate) fn possibly_unbound(&self) -> bool {
|
||||
match self {
|
||||
Symbol::Type(_, Boundness::MayBeUnbound) | Symbol::Unbound => true,
|
||||
Symbol::Type(_, Boundness::PossiblyUnbound) | Symbol::Unbound => true,
|
||||
Symbol::Type(_, Boundness::Bound) => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn unwrap_or_unknown(&self) -> Type<'db> {
|
||||
match self {
|
||||
Symbol::Type(ty, _) => *ty,
|
||||
Symbol::Unbound => Type::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn as_type(&self) -> Option<Type<'db>> {
|
||||
/// Returns the type of the symbol, ignoring possible unboundness.
|
||||
///
|
||||
/// If the symbol is *definitely* unbound, this function will return `None`. Otherwise,
|
||||
/// if there is at least one control-flow path where the symbol is bound, return the type.
|
||||
pub(crate) fn ignore_possibly_unbound(&self) -> Option<Type<'db>> {
|
||||
match self {
|
||||
Symbol::Type(ty, _) => Some(*ty),
|
||||
Symbol::Unbound => None,
|
||||
@@ -61,28 +67,80 @@ impl<'db> Symbol<'db> {
|
||||
#[cfg(test)]
|
||||
#[track_caller]
|
||||
pub(crate) fn expect_type(self) -> Type<'db> {
|
||||
self.as_type()
|
||||
self.ignore_possibly_unbound()
|
||||
.expect("Expected a (possibly unbound) type, not an unbound symbol")
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn replace_unbound_with(
|
||||
self,
|
||||
db: &'db dyn Db,
|
||||
replacement: &Symbol<'db>,
|
||||
) -> Symbol<'db> {
|
||||
match replacement {
|
||||
Symbol::Type(replacement, _) => Symbol::Type(
|
||||
match self {
|
||||
Symbol::Type(ty, Boundness::Bound) => ty,
|
||||
Symbol::Type(ty, Boundness::MayBeUnbound) => {
|
||||
UnionType::from_elements(db, [*replacement, ty])
|
||||
}
|
||||
Symbol::Unbound => *replacement,
|
||||
},
|
||||
Boundness::Bound,
|
||||
),
|
||||
pub(crate) fn or_fall_back_to(self, db: &'db dyn Db, fallback: &Symbol<'db>) -> Symbol<'db> {
|
||||
match fallback {
|
||||
Symbol::Type(fallback_ty, fallback_boundness) => match self {
|
||||
Symbol::Type(_, Boundness::Bound) => self,
|
||||
Symbol::Type(ty, boundness @ Boundness::PossiblyUnbound) => Symbol::Type(
|
||||
UnionType::from_elements(db, [*fallback_ty, ty]),
|
||||
fallback_boundness.or(boundness),
|
||||
),
|
||||
Symbol::Unbound => fallback.clone(),
|
||||
},
|
||||
Symbol::Unbound => self,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::db::tests::setup_db;
|
||||
|
||||
#[test]
|
||||
fn test_symbol_or_fall_back_to() {
|
||||
use Boundness::{Bound, PossiblyUnbound};
|
||||
|
||||
let db = setup_db();
|
||||
let ty1 = Type::IntLiteral(1);
|
||||
let ty2 = Type::IntLiteral(2);
|
||||
|
||||
// Start from an unbound symbol
|
||||
assert_eq!(
|
||||
Symbol::Unbound.or_fall_back_to(&db, &Symbol::Unbound),
|
||||
Symbol::Unbound
|
||||
);
|
||||
assert_eq!(
|
||||
Symbol::Unbound.or_fall_back_to(&db, &Symbol::Type(ty1, PossiblyUnbound)),
|
||||
Symbol::Type(ty1, PossiblyUnbound)
|
||||
);
|
||||
assert_eq!(
|
||||
Symbol::Unbound.or_fall_back_to(&db, &Symbol::Type(ty1, Bound)),
|
||||
Symbol::Type(ty1, Bound)
|
||||
);
|
||||
|
||||
// Start from a possibly unbound symbol
|
||||
assert_eq!(
|
||||
Symbol::Type(ty1, PossiblyUnbound).or_fall_back_to(&db, &Symbol::Unbound),
|
||||
Symbol::Type(ty1, PossiblyUnbound)
|
||||
);
|
||||
assert_eq!(
|
||||
Symbol::Type(ty1, PossiblyUnbound)
|
||||
.or_fall_back_to(&db, &Symbol::Type(ty2, PossiblyUnbound)),
|
||||
Symbol::Type(UnionType::from_elements(&db, [ty2, ty1]), PossiblyUnbound)
|
||||
);
|
||||
assert_eq!(
|
||||
Symbol::Type(ty1, PossiblyUnbound).or_fall_back_to(&db, &Symbol::Type(ty2, Bound)),
|
||||
Symbol::Type(UnionType::from_elements(&db, [ty2, ty1]), Bound)
|
||||
);
|
||||
|
||||
// Start from a definitely bound symbol
|
||||
assert_eq!(
|
||||
Symbol::Type(ty1, Bound).or_fall_back_to(&db, &Symbol::Unbound),
|
||||
Symbol::Type(ty1, Bound)
|
||||
);
|
||||
assert_eq!(
|
||||
Symbol::Type(ty1, Bound).or_fall_back_to(&db, &Symbol::Type(ty2, PossiblyUnbound)),
|
||||
Symbol::Type(ty1, Bound)
|
||||
);
|
||||
assert_eq!(
|
||||
Symbol::Type(ty1, Bound).or_fall_back_to(&db, &Symbol::Type(ty2, Bound)),
|
||||
Symbol::Type(ty1, Bound)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -73,7 +73,8 @@ impl<'db> UnionBuilder<'db> {
|
||||
// supertype of bool. Therefore, we are done.
|
||||
break;
|
||||
}
|
||||
if ty.is_subtype_of(self.db, *element) {
|
||||
|
||||
if ty.is_same_gradual_form(*element) || ty.is_subtype_of(self.db, *element) {
|
||||
return self;
|
||||
} else if element.is_subtype_of(self.db, ty) {
|
||||
to_remove.push(index);
|
||||
@@ -128,7 +129,7 @@ impl<'db> IntersectionBuilder<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
intersections: vec![InnerIntersectionBuilder::new()],
|
||||
intersections: vec![InnerIntersectionBuilder::default()],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -231,10 +232,6 @@ struct InnerIntersectionBuilder<'db> {
|
||||
}
|
||||
|
||||
impl<'db> InnerIntersectionBuilder<'db> {
|
||||
fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Adds a positive type to this intersection.
|
||||
fn add_positive(&mut self, db: &'db dyn Db, new_positive: Type<'db>) {
|
||||
if let Type::Intersection(other) = new_positive {
|
||||
@@ -253,7 +250,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
.iter()
|
||||
.find(|element| element.is_boolean_literal())
|
||||
{
|
||||
*self = Self::new();
|
||||
*self = Self::default();
|
||||
self.positive.insert(Type::BooleanLiteral(!value));
|
||||
return;
|
||||
}
|
||||
@@ -263,7 +260,9 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
let mut to_remove = SmallVec::<[usize; 1]>::new();
|
||||
for (index, existing_positive) in self.positive.iter().enumerate() {
|
||||
// S & T = S if S <: T
|
||||
if existing_positive.is_subtype_of(db, new_positive) {
|
||||
if existing_positive.is_subtype_of(db, new_positive)
|
||||
|| existing_positive.is_same_gradual_form(new_positive)
|
||||
{
|
||||
return;
|
||||
}
|
||||
// same rule, reverse order
|
||||
@@ -272,7 +271,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
}
|
||||
// A & B = Never if A and B are disjoint
|
||||
if new_positive.is_disjoint_from(db, *existing_positive) {
|
||||
*self = Self::new();
|
||||
*self = Self::default();
|
||||
self.positive.insert(Type::Never);
|
||||
return;
|
||||
}
|
||||
@@ -285,7 +284,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
for (index, existing_negative) in self.negative.iter().enumerate() {
|
||||
// S & ~T = Never if S <: T
|
||||
if new_positive.is_subtype_of(db, *existing_negative) {
|
||||
*self = Self::new();
|
||||
*self = Self::default();
|
||||
self.positive.insert(Type::Never);
|
||||
return;
|
||||
}
|
||||
@@ -313,7 +312,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
self.add_positive(db, *neg);
|
||||
}
|
||||
}
|
||||
ty @ (Type::Any | Type::Unknown | Type::Todo) => {
|
||||
ty @ (Type::Any | Type::Unknown | Type::Todo(_)) => {
|
||||
// Adding any of these types to the negative side of an intersection
|
||||
// is equivalent to adding it to the positive side. We do this to
|
||||
// simplify the representation.
|
||||
@@ -326,7 +325,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
.iter()
|
||||
.any(|pos| *pos == KnownClass::Bool.to_instance(db)) =>
|
||||
{
|
||||
*self = Self::new();
|
||||
*self = Self::default();
|
||||
self.positive.insert(Type::BooleanLiteral(!bool));
|
||||
}
|
||||
_ => {
|
||||
@@ -348,7 +347,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
for existing_positive in &self.positive {
|
||||
// S & ~T = Never if S <: T
|
||||
if existing_positive.is_subtype_of(db, new_negative) {
|
||||
*self = Self::new();
|
||||
*self = Self::default();
|
||||
self.positive.insert(Type::Never);
|
||||
return;
|
||||
}
|
||||
@@ -379,36 +378,14 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{IntersectionBuilder, IntersectionType, Type, UnionType};
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::stdlib::typing_symbol;
|
||||
use crate::types::{global_symbol, KnownClass, StringLiteralType, UnionBuilder};
|
||||
use crate::ProgramSettings;
|
||||
|
||||
use crate::db::tests::{setup_db, TestDb};
|
||||
use crate::types::{global_symbol, todo_type, KnownClass, UnionBuilder};
|
||||
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_db::system::DbWithTestSystem;
|
||||
use test_case::test_case;
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&src_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings::new(src_root),
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union() {
|
||||
let db = setup_db();
|
||||
@@ -501,6 +478,17 @@ mod tests {
|
||||
assert_eq!(u1.expect_union().elements(&db), &[t1, t0]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_simplify_multiple_unknown() {
|
||||
let db = setup_db();
|
||||
let t0 = KnownClass::Str.to_instance(&db);
|
||||
let t1 = Type::Unknown;
|
||||
|
||||
let u = UnionType::from_elements(&db, [t0, t1, t1]);
|
||||
|
||||
assert_eq!(u.expect_union().elements(&db), &[t0, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_subsume_multiple() {
|
||||
let db = setup_db();
|
||||
@@ -608,6 +596,42 @@ mod tests {
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_multiple_unknown() {
|
||||
let db = setup_db();
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::Unknown)
|
||||
.add_positive(Type::Unknown)
|
||||
.build();
|
||||
assert_eq!(ty, Type::Unknown);
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::Unknown)
|
||||
.add_negative(Type::Unknown)
|
||||
.build();
|
||||
assert_eq!(ty, Type::Unknown);
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::Unknown)
|
||||
.add_negative(Type::Unknown)
|
||||
.build();
|
||||
assert_eq!(ty, Type::Unknown);
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::Unknown)
|
||||
.add_positive(Type::IntLiteral(0))
|
||||
.add_negative(Type::Unknown)
|
||||
.build();
|
||||
assert_eq!(
|
||||
ty,
|
||||
IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::Unknown)
|
||||
.add_positive(Type::IntLiteral(0))
|
||||
.build()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersection_distributes_over_union() {
|
||||
let db = setup_db();
|
||||
@@ -630,59 +654,85 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn intersection_negation_distributes_over_union() {
|
||||
let db = setup_db();
|
||||
let st = typing_symbol(&db, "Sized").expect_type().to_instance(&db);
|
||||
let ht = typing_symbol(&db, "Hashable")
|
||||
let mut db = setup_db();
|
||||
db.write_dedented(
|
||||
"/src/module.py",
|
||||
r#"
|
||||
class A: ...
|
||||
class B: ...
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
let module = ruff_db::files::system_path_to_file(&db, "/src/module.py").unwrap();
|
||||
|
||||
let a = global_symbol(&db, module, "A")
|
||||
.expect_type()
|
||||
.to_instance(&db);
|
||||
// sh_t: Sized & Hashable
|
||||
let sh_t = IntersectionBuilder::new(&db)
|
||||
.add_positive(st)
|
||||
.add_positive(ht)
|
||||
let b = global_symbol(&db, module, "B")
|
||||
.expect_type()
|
||||
.to_instance(&db);
|
||||
|
||||
// intersection: A & B
|
||||
let intersection = IntersectionBuilder::new(&db)
|
||||
.add_positive(a)
|
||||
.add_positive(b)
|
||||
.build()
|
||||
.expect_intersection();
|
||||
assert_eq!(sh_t.pos_vec(&db), &[st, ht]);
|
||||
assert_eq!(sh_t.neg_vec(&db), &[]);
|
||||
assert_eq!(intersection.pos_vec(&db), &[a, b]);
|
||||
assert_eq!(intersection.neg_vec(&db), &[]);
|
||||
|
||||
// ~sh_t => ~Sized | ~Hashable
|
||||
let not_s_h_t = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::Intersection(sh_t))
|
||||
// ~intersection => ~A | ~B
|
||||
let negated_intersection = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::Intersection(intersection))
|
||||
.build()
|
||||
.expect_union();
|
||||
|
||||
// should have as elements: (~Sized),(~Hashable)
|
||||
let not_st = st.negate(&db);
|
||||
let not_ht = ht.negate(&db);
|
||||
assert_eq!(not_s_h_t.elements(&db), &[not_st, not_ht]);
|
||||
// should have as elements ~A and ~B
|
||||
let not_a = a.negate(&db);
|
||||
let not_b = b.negate(&db);
|
||||
assert_eq!(negated_intersection.elements(&db), &[not_a, not_b]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mixed_intersection_negation_distributes_over_union() {
|
||||
let db = setup_db();
|
||||
let it = KnownClass::Int.to_instance(&db);
|
||||
let st = typing_symbol(&db, "Sized").expect_type().to_instance(&db);
|
||||
let ht = typing_symbol(&db, "Hashable")
|
||||
let mut db = setup_db();
|
||||
db.write_dedented(
|
||||
"/src/module.py",
|
||||
r#"
|
||||
class A: ...
|
||||
class B: ...
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
let module = ruff_db::files::system_path_to_file(&db, "/src/module.py").unwrap();
|
||||
|
||||
let a = global_symbol(&db, module, "A")
|
||||
.expect_type()
|
||||
.to_instance(&db);
|
||||
// s_not_h_t: Sized & ~Hashable
|
||||
let s_not_h_t = IntersectionBuilder::new(&db)
|
||||
.add_positive(st)
|
||||
.add_negative(ht)
|
||||
let b = global_symbol(&db, module, "B")
|
||||
.expect_type()
|
||||
.to_instance(&db);
|
||||
let int = KnownClass::Int.to_instance(&db);
|
||||
|
||||
// a_not_b: A & ~B
|
||||
let a_not_b = IntersectionBuilder::new(&db)
|
||||
.add_positive(a)
|
||||
.add_negative(b)
|
||||
.build()
|
||||
.expect_intersection();
|
||||
assert_eq!(s_not_h_t.pos_vec(&db), &[st]);
|
||||
assert_eq!(s_not_h_t.neg_vec(&db), &[ht]);
|
||||
assert_eq!(a_not_b.pos_vec(&db), &[a]);
|
||||
assert_eq!(a_not_b.neg_vec(&db), &[b]);
|
||||
|
||||
// let's build int & ~(Sized & ~Hashable)
|
||||
let tt = IntersectionBuilder::new(&db)
|
||||
.add_positive(it)
|
||||
.add_negative(Type::Intersection(s_not_h_t))
|
||||
// let's build
|
||||
// int & ~(A & ~B)
|
||||
// = int & ~(A & ~B)
|
||||
// = int & (~A | B)
|
||||
// = (int & ~A) | (int & B)
|
||||
let t = IntersectionBuilder::new(&db)
|
||||
.add_positive(int)
|
||||
.add_negative(Type::Intersection(a_not_b))
|
||||
.build();
|
||||
|
||||
// int & ~(Sized & ~Hashable)
|
||||
// -> int & (~Sized | Hashable)
|
||||
// -> (int & ~Sized) | (int & Hashable)
|
||||
assert_eq!(tt.display(&db).to_string(), "int & ~Sized | int & Hashable");
|
||||
assert_eq!(t.display(&db).to_string(), "int & ~A | int & B");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -775,7 +825,7 @@ mod tests {
|
||||
.build();
|
||||
assert_eq!(ty, s);
|
||||
|
||||
let literal = Type::StringLiteral(StringLiteralType::new(&db, "a"));
|
||||
let literal = Type::string_literal(&db, "a");
|
||||
let expected = IntersectionBuilder::new(&db)
|
||||
.add_positive(s)
|
||||
.add_negative(literal)
|
||||
@@ -878,7 +928,7 @@ mod tests {
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(s)
|
||||
.add_negative(Type::StringLiteral(StringLiteralType::new(&db, "a")))
|
||||
.add_negative(Type::string_literal(&db, "a"))
|
||||
.add_negative(t)
|
||||
.build();
|
||||
assert_eq!(ty, Type::Never);
|
||||
@@ -912,7 +962,7 @@ mod tests {
|
||||
let db = setup_db();
|
||||
|
||||
let t_p = KnownClass::Int.to_instance(&db);
|
||||
let t_n = Type::StringLiteral(StringLiteralType::new(&db, "t_n"));
|
||||
let t_n = Type::string_literal(&db, "t_n");
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(t_p)
|
||||
@@ -991,7 +1041,7 @@ mod tests {
|
||||
|
||||
#[test_case(Type::Any)]
|
||||
#[test_case(Type::Unknown)]
|
||||
#[test_case(Type::Todo)]
|
||||
#[test_case(todo_type!())]
|
||||
fn build_intersection_t_and_negative_t_does_not_simplify(ty: Type) {
|
||||
let db = setup_db();
|
||||
|
||||
|
||||
@@ -73,10 +73,6 @@ pub struct TypeCheckDiagnostics {
|
||||
}
|
||||
|
||||
impl TypeCheckDiagnostics {
|
||||
pub fn new() -> Self {
|
||||
Self { inner: Vec::new() }
|
||||
}
|
||||
|
||||
pub(super) fn push(&mut self, diagnostic: TypeCheckDiagnostic) {
|
||||
self.inner.push(Arc::new(diagnostic));
|
||||
}
|
||||
@@ -148,7 +144,7 @@ impl<'db> TypeCheckDiagnosticsBuilder<'db> {
|
||||
Self {
|
||||
db,
|
||||
file,
|
||||
diagnostics: TypeCheckDiagnostics::new(),
|
||||
diagnostics: TypeCheckDiagnostics::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
//! Display implementations for types.
|
||||
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
use std::fmt::{self, Display, Formatter, Write};
|
||||
|
||||
use ruff_db::display::FormatterJoinExtension;
|
||||
use ruff_python_ast::str::Quote;
|
||||
use ruff_python_literal::escape::AsciiEscape;
|
||||
|
||||
use crate::types::{
|
||||
ClassLiteralType, InstanceType, IntersectionType, KnownClass, SubclassOfType, Type, UnionType,
|
||||
ClassLiteralType, InstanceType, IntersectionType, KnownClass, StringLiteralType,
|
||||
SubclassOfType, Type, UnionType,
|
||||
};
|
||||
use crate::Db;
|
||||
use rustc_hash::FxHashMap;
|
||||
@@ -66,14 +67,17 @@ impl Display for DisplayRepresentation<'_> {
|
||||
Type::Any => f.write_str("Any"),
|
||||
Type::Never => f.write_str("Never"),
|
||||
Type::Unknown => f.write_str("Unknown"),
|
||||
Type::Instance(InstanceType { class })
|
||||
if class.is_known(self.db, KnownClass::NoneType) =>
|
||||
{
|
||||
f.write_str("None")
|
||||
Type::Instance(InstanceType { class }) => {
|
||||
let representation = match class.known(self.db) {
|
||||
Some(KnownClass::NoneType) => "None",
|
||||
Some(KnownClass::NoDefaultType) => "NoDefault",
|
||||
_ => class.name(self.db),
|
||||
};
|
||||
f.write_str(representation)
|
||||
}
|
||||
// `[Type::Todo]`'s display should be explicit that is not a valid display of
|
||||
// any other type
|
||||
Type::Todo => f.write_str("@Todo"),
|
||||
Type::Todo(todo) => write!(f, "@Todo{todo}"),
|
||||
Type::ModuleLiteral(file) => {
|
||||
write!(f, "<module '{:?}'>", file.path(self.db))
|
||||
}
|
||||
@@ -82,16 +86,13 @@ impl Display for DisplayRepresentation<'_> {
|
||||
Type::SubclassOf(SubclassOfType { class }) => {
|
||||
write!(f, "type[{}]", class.name(self.db))
|
||||
}
|
||||
Type::Instance(InstanceType { class }) => f.write_str(class.name(self.db)),
|
||||
Type::KnownInstance(known_instance) => f.write_str(known_instance.as_str()),
|
||||
Type::KnownInstance(known_instance) => f.write_str(known_instance.repr(self.db)),
|
||||
Type::FunctionLiteral(function) => f.write_str(function.name(self.db)),
|
||||
Type::Union(union) => union.display(self.db).fmt(f),
|
||||
Type::Intersection(intersection) => intersection.display(self.db).fmt(f),
|
||||
Type::IntLiteral(n) => n.fmt(f),
|
||||
Type::BooleanLiteral(boolean) => f.write_str(if boolean { "True" } else { "False" }),
|
||||
Type::StringLiteral(string) => {
|
||||
write!(f, r#""{}""#, string.value(self.db).replace('"', r#"\""#))
|
||||
}
|
||||
Type::StringLiteral(string) => string.display(self.db).fmt(f),
|
||||
Type::LiteralString => f.write_str("LiteralString"),
|
||||
Type::BytesLiteral(bytes) => {
|
||||
let escape =
|
||||
@@ -288,7 +289,7 @@ struct DisplayMaybeNegatedType<'db> {
|
||||
negated: bool,
|
||||
}
|
||||
|
||||
impl<'db> Display for DisplayMaybeNegatedType<'db> {
|
||||
impl Display for DisplayMaybeNegatedType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
if self.negated {
|
||||
f.write_str("~")?;
|
||||
@@ -318,7 +319,7 @@ pub(crate) struct DisplayTypeArray<'b, 'db> {
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> Display for DisplayTypeArray<'_, 'db> {
|
||||
impl Display for DisplayTypeArray<'_, '_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
f.join(", ")
|
||||
.entries(self.types.iter().map(|ty| ty.display(self.db)))
|
||||
@@ -326,36 +327,40 @@ impl<'db> Display for DisplayTypeArray<'_, 'db> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> StringLiteralType<'db> {
|
||||
fn display(&'db self, db: &'db dyn Db) -> DisplayStringLiteralType<'db> {
|
||||
DisplayStringLiteralType { db, ty: self }
|
||||
}
|
||||
}
|
||||
|
||||
struct DisplayStringLiteralType<'db> {
|
||||
ty: &'db StringLiteralType<'db>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl Display for DisplayStringLiteralType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
let value = self.ty.value(self.db);
|
||||
f.write_char('"')?;
|
||||
for ch in value.chars() {
|
||||
match ch {
|
||||
// `escape_debug` will escape even single quotes, which is not necessary for our
|
||||
// use case as we are already using double quotes to wrap the string.
|
||||
'\'' => f.write_char('\'')?,
|
||||
_ => write!(f, "{}", ch.escape_debug())?,
|
||||
}
|
||||
}
|
||||
f.write_char('"')
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_db::system::DbWithTestSystem;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::types::{
|
||||
global_symbol, BytesLiteralType, SliceLiteralType, StringLiteralType, Type, UnionType,
|
||||
};
|
||||
use crate::{Program, ProgramSettings, PythonVersion, SearchPathSettings};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&src_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings::new(src_root),
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
use crate::db::tests::setup_db;
|
||||
use crate::types::{global_symbol, SliceLiteralType, StringLiteralType, Type, UnionType};
|
||||
|
||||
#[test]
|
||||
fn test_condense_literal_display_by_type() -> anyhow::Result<()> {
|
||||
@@ -380,12 +385,12 @@ mod tests {
|
||||
Type::Unknown,
|
||||
Type::IntLiteral(-1),
|
||||
global_symbol(&db, mod_file, "A").expect_type(),
|
||||
Type::StringLiteral(StringLiteralType::new(&db, "A")),
|
||||
Type::BytesLiteral(BytesLiteralType::new(&db, [0u8].as_slice())),
|
||||
Type::BytesLiteral(BytesLiteralType::new(&db, [7u8].as_slice())),
|
||||
Type::string_literal(&db, "A"),
|
||||
Type::bytes_literal(&db, &[0u8]),
|
||||
Type::bytes_literal(&db, &[7u8]),
|
||||
Type::IntLiteral(0),
|
||||
Type::IntLiteral(1),
|
||||
Type::StringLiteral(StringLiteralType::new(&db, "B")),
|
||||
Type::string_literal(&db, "B"),
|
||||
global_symbol(&db, mod_file, "foo").expect_type(),
|
||||
global_symbol(&db, mod_file, "bar").expect_type(),
|
||||
global_symbol(&db, mod_file, "B").expect_type(),
|
||||
@@ -451,4 +456,28 @@ mod tests {
|
||||
"slice[None, None, Literal[2]]"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn string_literal_display() {
|
||||
let db = setup_db();
|
||||
|
||||
assert_eq!(
|
||||
Type::StringLiteral(StringLiteralType::new(&db, r"\n"))
|
||||
.display(&db)
|
||||
.to_string(),
|
||||
r#"Literal["\\n"]"#
|
||||
);
|
||||
assert_eq!(
|
||||
Type::StringLiteral(StringLiteralType::new(&db, "'"))
|
||||
.display(&db)
|
||||
.to_string(),
|
||||
r#"Literal["'"]"#
|
||||
);
|
||||
assert_eq!(
|
||||
Type::StringLiteral(StringLiteralType::new(&db, r#"""#))
|
||||
.display(&db)
|
||||
.to_string(),
|
||||
r#"Literal["\""]"#
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user