Compare commits
439 Commits
0.5.1
...
perf-node-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
82f33db5e6 | ||
|
|
f873d2ac12 | ||
|
|
ecd9e6a650 | ||
|
|
785c39927b | ||
|
|
a35cdbb275 | ||
|
|
0c98b5949c | ||
|
|
e5f37a8254 | ||
|
|
5c5dfc11f0 | ||
|
|
678045e1aa | ||
|
|
dedefd73da | ||
|
|
37a60460ed | ||
|
|
0bd258a370 | ||
|
|
9baab8672a | ||
|
|
c65e3310d5 | ||
|
|
38c19fb96e | ||
|
|
abb4cdbf3d | ||
|
|
fc811f5168 | ||
|
|
1a8f29ea41 | ||
|
|
aefaddeae7 | ||
|
|
df09045176 | ||
|
|
049cda2ff3 | ||
|
|
358792f2c9 | ||
|
|
e6d5a7af37 | ||
|
|
f5bff82e70 | ||
|
|
ab44152eb5 | ||
|
|
f4c8c7eb70 | ||
|
|
65de8f2c9b | ||
|
|
e6226436fd | ||
|
|
0345d46759 | ||
|
|
4d0d3b00cb | ||
|
|
2be1c4ff04 | ||
|
|
edd86d5603 | ||
|
|
78ad7959ca | ||
|
|
d72ecd6ded | ||
|
|
8617a508bd | ||
|
|
c88bd4e884 | ||
|
|
fbcda90316 | ||
|
|
169d4390cb | ||
|
|
80ade591df | ||
|
|
4881d32c80 | ||
|
|
81a2220ce1 | ||
|
|
900e98b584 | ||
|
|
f9d8189670 | ||
|
|
52ba94191a | ||
|
|
96802d6a7f | ||
|
|
dd0a7ec73e | ||
|
|
25f5ae44c4 | ||
|
|
251efe5c41 | ||
|
|
6359e55383 | ||
|
|
a9847af6e8 | ||
|
|
d61d75d4fa | ||
|
|
499c0bd875 | ||
|
|
4cb30b598f | ||
|
|
aba0d83c11 | ||
|
|
c319414e54 | ||
|
|
ef1f6d98a0 | ||
|
|
b850b812de | ||
|
|
a87b27c075 | ||
|
|
9b73532b11 | ||
|
|
d8debb7a36 | ||
|
|
bd4a947b29 | ||
|
|
f121f8b31b | ||
|
|
80efb865e9 | ||
|
|
52d27befe8 | ||
|
|
6ed06afd28 | ||
|
|
b9da31610a | ||
|
|
ac7b1770e2 | ||
|
|
e4c2859c0f | ||
|
|
6dcd743111 | ||
|
|
73160dc8b6 | ||
|
|
15aa5a6d57 | ||
|
|
33512a4249 | ||
|
|
d8ebb03591 | ||
|
|
2e211c5c22 | ||
|
|
9fd8aaaf29 | ||
|
|
d110bd4e60 | ||
|
|
eb9c7ae869 | ||
|
|
7defc0d136 | ||
|
|
45f459bafd | ||
|
|
99e946a005 | ||
|
|
78a7ac0722 | ||
|
|
fa2f3f9f2f | ||
|
|
3898d737d8 | ||
|
|
c487149b7d | ||
|
|
bebed67bf1 | ||
|
|
3ddcad64f5 | ||
|
|
05c35b6975 | ||
|
|
7fc39ad624 | ||
|
|
2520ebb145 | ||
|
|
89c8b49027 | ||
|
|
e05953a991 | ||
|
|
d0ac38f9d3 | ||
|
|
ff53db3d99 | ||
|
|
899a52390b | ||
|
|
82a3e69b8a | ||
|
|
7027344dfc | ||
|
|
fb9f0c448f | ||
|
|
75131c6f4a | ||
|
|
4b9ddc4a06 | ||
|
|
99dc208b00 | ||
|
|
540023262e | ||
|
|
2ea79572ae | ||
|
|
aa0db338d9 | ||
|
|
a99a45868c | ||
|
|
fabf19fdc9 | ||
|
|
59f712a566 | ||
|
|
1d080465de | ||
|
|
3481e16cdf | ||
|
|
d7e9280e1e | ||
|
|
f237d36d2f | ||
|
|
12f22b1fdd | ||
|
|
47d05ee9ea | ||
|
|
9caec36b59 | ||
|
|
cb364780b3 | ||
|
|
71b8bf211f | ||
|
|
109b9cc4f9 | ||
|
|
5d02627794 | ||
|
|
65444bb00e | ||
|
|
8822a79b4d | ||
|
|
2df4d23113 | ||
|
|
603b62607a | ||
|
|
2b71fc4510 | ||
|
|
1b78d872ec | ||
|
|
feba5031dc | ||
|
|
0c2b88f224 | ||
|
|
cf1a57df5a | ||
|
|
597c5f9124 | ||
|
|
69e1c567d4 | ||
|
|
37b9bac403 | ||
|
|
83db48d316 | ||
|
|
c4e651921b | ||
|
|
b595346213 | ||
|
|
253474b312 | ||
|
|
a176679b24 | ||
|
|
1f51048fa4 | ||
|
|
2abfab0f9b | ||
|
|
64f1f3468d | ||
|
|
ffaa35eafe | ||
|
|
c906b0183b | ||
|
|
bc5b9b81dd | ||
|
|
221ea662e0 | ||
|
|
d28c5afd14 | ||
|
|
f1de08c2a0 | ||
|
|
33e9a6a54e | ||
|
|
f577e03021 | ||
|
|
f53733525c | ||
|
|
2daa914334 | ||
|
|
6d9205e346 | ||
|
|
df7345e118 | ||
|
|
dc6aafecc2 | ||
|
|
5107a50ae7 | ||
|
|
a631d600ac | ||
|
|
f34b9a77f0 | ||
|
|
7997da47f5 | ||
|
|
d380b37a09 | ||
|
|
b14fee9320 | ||
|
|
037e817450 | ||
|
|
7fcfedd430 | ||
|
|
50ff5c7544 | ||
|
|
90e5bc2bd9 | ||
|
|
aae9619d3d | ||
|
|
7fa76a2b2b | ||
|
|
14dd6d980e | ||
|
|
846f57fd15 | ||
|
|
8e6aa78796 | ||
|
|
e91a0fe94a | ||
|
|
d2c627efb3 | ||
|
|
10e977d5f5 | ||
|
|
f0318ff889 | ||
|
|
5cc3fed9a8 | ||
|
|
39dd732e27 | ||
|
|
52630a1d55 | ||
|
|
7b5fd63ce8 | ||
|
|
5499821c67 | ||
|
|
7ee7c68f36 | ||
|
|
2393d19f91 | ||
|
|
a8e2ba508e | ||
|
|
0b4d3ce39b | ||
|
|
0a345dc627 | ||
|
|
ff2aa3ea00 | ||
|
|
0d3bad877d | ||
|
|
756060d676 | ||
|
|
b647f3fba8 | ||
|
|
82e69ebf23 | ||
|
|
2c79045342 | ||
|
|
3497f5257b | ||
|
|
25aabec814 | ||
|
|
0e71485ea9 | ||
|
|
43a9d282f7 | ||
|
|
6f357b8b45 | ||
|
|
73d9f11a9c | ||
|
|
d6c6db5a44 | ||
|
|
56d985a972 | ||
|
|
b3e0655cc9 | ||
|
|
06baffec9e | ||
|
|
67a2ae800a | ||
|
|
7a2c75e2fc | ||
|
|
9ee44637ca | ||
|
|
733341ab39 | ||
|
|
341a25eec1 | ||
|
|
38e178e914 | ||
|
|
daccb3f4f3 | ||
|
|
c858afe03a | ||
|
|
3c1c3199d0 | ||
|
|
fbfe2cb2f5 | ||
|
|
1c311e4fdb | ||
|
|
12177a42e3 | ||
|
|
dfb08856eb | ||
|
|
94d817e1a5 | ||
|
|
9296bd4e3f | ||
|
|
da824ba316 | ||
|
|
012198a1b0 | ||
|
|
fbab04fbe1 | ||
|
|
9aa43d5f91 | ||
|
|
966563c79b | ||
|
|
27edadec29 | ||
|
|
2e2b1b460f | ||
|
|
a3e67abf4c | ||
|
|
ee0518e8f7 | ||
|
|
d774a3bd48 | ||
|
|
7e6b19048e | ||
|
|
8e383b9587 | ||
|
|
3f49ab126f | ||
|
|
c1bc7f4dee | ||
|
|
a44d579f21 | ||
|
|
a3900d2b0b | ||
|
|
83b1c48a93 | ||
|
|
138e70bd5c | ||
|
|
ee103ffb25 | ||
|
|
18f87b9497 | ||
|
|
adc8d4e1e7 | ||
|
|
90db361199 | ||
|
|
4738135801 | ||
|
|
264cd750e9 | ||
|
|
7a4419a2a5 | ||
|
|
ac1666d6e2 | ||
|
|
459c85ba27 | ||
|
|
aaa56eb0bd | ||
|
|
f3c14a4276 | ||
|
|
3169d408fa | ||
|
|
a2286c8e47 | ||
|
|
fb9f566f56 | ||
|
|
381bd1ff4a | ||
|
|
2f54d05d97 | ||
|
|
e18b4e42d3 | ||
|
|
9495331a5f | ||
|
|
e1076db7d0 | ||
|
|
1986c9e8e2 | ||
|
|
d7e80dc955 | ||
|
|
87d09f77cd | ||
|
|
bd37ef13b8 | ||
|
|
ec23c974db | ||
|
|
122e5ab428 | ||
|
|
2f2149aca8 | ||
|
|
9d5c31e7da | ||
|
|
25f3ad6238 | ||
|
|
79926329a4 | ||
|
|
9cdc578dd9 | ||
|
|
665c75f7ab | ||
|
|
f37b39d6cc | ||
|
|
e18c45c310 | ||
|
|
d930052de8 | ||
|
|
7ad4df9e9f | ||
|
|
425761e960 | ||
|
|
4b69271809 | ||
|
|
bf23d38a21 | ||
|
|
49f51583fa | ||
|
|
1fe4a5faed | ||
|
|
998bfe0847 | ||
|
|
6f4db8675b | ||
|
|
71f7aa4971 | ||
|
|
9f72f474e6 | ||
|
|
10c993e21a | ||
|
|
2d3914296d | ||
|
|
7571da8778 | ||
|
|
2ceac5f868 | ||
|
|
5ce80827d2 | ||
|
|
e047b9685a | ||
|
|
fc16d8d04d | ||
|
|
175e5d7b88 | ||
|
|
c03f257ed7 | ||
|
|
6bbb4a28c2 | ||
|
|
2ce3e3ae60 | ||
|
|
2a64cccb61 | ||
|
|
928ffd6650 | ||
|
|
e52be0951a | ||
|
|
889073578e | ||
|
|
eac965ecaf | ||
|
|
8659f2f4ea | ||
|
|
c1b292a0dc | ||
|
|
3af6ccb720 | ||
|
|
f0fc6a95fe | ||
|
|
f96a3c71ff | ||
|
|
b9b7deff17 | ||
|
|
40d9324f5a | ||
|
|
a9f8bd59b2 | ||
|
|
143e172431 | ||
|
|
b2d3a05ee4 | ||
|
|
ef1ca0dd38 | ||
|
|
c7b13bb8fc | ||
|
|
dbbe3526ef | ||
|
|
f22c8ab811 | ||
|
|
2a8f95c437 | ||
|
|
ea2d51c2bb | ||
|
|
ed238e0c76 | ||
|
|
3ace12943e | ||
|
|
978909fcf4 | ||
|
|
f8735e1ee8 | ||
|
|
d70ceb6a56 | ||
|
|
fc7d9e95b8 | ||
|
|
b578fca9cb | ||
|
|
8d3146c2b2 | ||
|
|
fa5c841154 | ||
|
|
f8fcbc19d9 | ||
|
|
97fdd48208 | ||
|
|
731ed2e40b | ||
|
|
3a742c17f8 | ||
|
|
053243635c | ||
|
|
82355712c3 | ||
|
|
4bc73dd87e | ||
|
|
53b84ab054 | ||
|
|
3664f85f45 | ||
|
|
2c1926beeb | ||
|
|
4bcc96ae51 | ||
|
|
c0a2b49bac | ||
|
|
ca22248628 | ||
|
|
d8cf8ac2ef | ||
|
|
1c7b84059e | ||
|
|
f82bb67555 | ||
|
|
5f96f69151 | ||
|
|
ad19b3fd0e | ||
|
|
a62e2d2000 | ||
|
|
d61747093c | ||
|
|
0ba7fc63d0 | ||
|
|
fa5b19d4b6 | ||
|
|
181e7b3c0d | ||
|
|
519eca9fe7 | ||
|
|
f0d589d7a3 | ||
|
|
512c8b2cc5 | ||
|
|
811f78d94d | ||
|
|
8f1be31289 | ||
|
|
8cfbac71a4 | ||
|
|
9460857932 | ||
|
|
a028ca22f0 | ||
|
|
7953f6aa79 | ||
|
|
764d9ab4ee | ||
|
|
9b9d701500 | ||
|
|
648cca199b | ||
|
|
2e77b775b0 | ||
|
|
ebe5b06c95 | ||
|
|
b2a49d8140 | ||
|
|
985a999234 | ||
|
|
1df51b1fbf | ||
|
|
1435b0f022 | ||
|
|
e39298dcbc | ||
|
|
1de8ff3308 | ||
|
|
72e02206d6 | ||
|
|
80f0116641 | ||
|
|
79b535587b | ||
|
|
6e0cbe0f35 | ||
|
|
91338ae902 | ||
|
|
0c72577b5d | ||
|
|
fe04f2b09d | ||
|
|
073588b48e | ||
|
|
9a2dafb43d | ||
|
|
595b1aa4a1 | ||
|
|
30cef67b45 | ||
|
|
d0c5925672 | ||
|
|
b1487b6b4f | ||
|
|
85ae02d62e | ||
|
|
9a817a2922 | ||
|
|
ecd4b4d943 | ||
|
|
b9a8cd390f | ||
|
|
2348714081 | ||
|
|
3817b207cf | ||
|
|
b1cf9ea663 | ||
|
|
8ad10b9307 | ||
|
|
9c5524a9a2 | ||
|
|
1530223311 | ||
|
|
b9671522c4 | ||
|
|
9918202422 | ||
|
|
42e7147860 | ||
|
|
25feab93f8 | ||
|
|
dc8db1afb0 | ||
|
|
18c364d5df | ||
|
|
7a7c601d5e | ||
|
|
3bfbbbc78c | ||
|
|
1a3ee45b23 | ||
|
|
65848869d5 | ||
|
|
456d6a2fb2 | ||
|
|
940df67823 | ||
|
|
e58713e2ac | ||
|
|
aa5c53b38b | ||
|
|
4e6ecb2348 | ||
|
|
6febd96dfe | ||
|
|
17e84d5f40 | ||
|
|
b6545ce5d6 | ||
|
|
90e9aae3f4 | ||
|
|
bd01004a42 | ||
|
|
d0298dc26d | ||
|
|
bbb9fe1692 | ||
|
|
5b21922420 | ||
|
|
abcf07c8c5 | ||
|
|
e8b5341c97 | ||
|
|
880c31d164 | ||
|
|
d365f1a648 | ||
|
|
4cc7bc9d32 | ||
|
|
0bb2fc6eec | ||
|
|
855d62cdde | ||
|
|
88abc6aed8 | ||
|
|
6fa4e32ad3 | ||
|
|
000dabcd88 | ||
|
|
f8ff42a13d | ||
|
|
b5834d57af | ||
|
|
3d3ff10bb9 | ||
|
|
ac04380f36 | ||
|
|
16a63c88cf | ||
|
|
10f07d88a2 | ||
|
|
1e04bd0b73 | ||
|
|
2041b0e5fb | ||
|
|
bf3d903939 | ||
|
|
64855c5f06 | ||
|
|
b5ab4ce293 | ||
|
|
c396b9f08b | ||
|
|
9ed3893e6d | ||
|
|
30c9604c1d | ||
|
|
7e4a1c2b33 | ||
|
|
e379160941 | ||
|
|
38b503ebcc | ||
|
|
dac476f2c0 | ||
|
|
754e5d6a7d | ||
|
|
d9c15e7a12 | ||
|
|
757c75752e | ||
|
|
9d61727289 | ||
|
|
a62a432a48 | ||
|
|
8198723201 | ||
|
|
7df10ea3e9 | ||
|
|
0e44235981 | ||
|
|
7b50061b43 |
27
.github/renovate.json5
vendored
27
.github/renovate.json5
vendored
@@ -8,15 +8,32 @@
|
||||
semanticCommits: "disabled",
|
||||
separateMajorMinor: false,
|
||||
prHourlyLimit: 10,
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "npm"],
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "pip_requirements", "npm"],
|
||||
cargo: {
|
||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||
rangeStrategy: "update-lockfile",
|
||||
},
|
||||
pep621: {
|
||||
// The default for this package manager is to only search for `pyproject.toml` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/pep621/#file-matching
|
||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||
},
|
||||
pip_requirements: {
|
||||
// The default for this package manager is to run on all requirements.txt files:
|
||||
// https://docs.renovatebot.com/modules/manager/pip_requirements/#file-matching
|
||||
// `fileMatch` doesn't work for excluding files; to exclude `requirements.txt` files
|
||||
// outside the `doc/` directory, we instead have to use `ignorePaths`. Unlike `fileMatch`,
|
||||
// which takes a regex string, `ignorePaths` takes a glob string, so we have to use
|
||||
// a "negative glob pattern".
|
||||
// See:
|
||||
// - https://docs.renovatebot.com/modules/manager/#ignoring-files-that-match-the-default-filematch
|
||||
// - https://docs.renovatebot.com/configuration-options/#ignorepaths
|
||||
// - https://docs.renovatebot.com/string-pattern-matching/#negative-matching
|
||||
ignorePaths: ["!docs/requirements*.txt"]
|
||||
},
|
||||
npm: {
|
||||
// The default for this package manager is to only search for `package.json` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/npm/#file-matching
|
||||
fileMatch: ["^playground/.*package\\.json$"],
|
||||
},
|
||||
"pre-commit": {
|
||||
@@ -48,6 +65,14 @@
|
||||
matchManagers: ["cargo"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// `mkdocs-material` requires a manual update to keep the version in sync
|
||||
// with `mkdocs-material-insider`.
|
||||
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
|
||||
matchManagers: ["pip_requirements"],
|
||||
matchPackagePatterns: ["mkdocs-material"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
groupName: "pre-commit dependencies",
|
||||
matchManagers: ["pre-commit"],
|
||||
|
||||
17
.github/workflows/ci.yaml
vendored
17
.github/workflows/ci.yaml
vendored
@@ -111,7 +111,7 @@ jobs:
|
||||
- name: "Clippy"
|
||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
||||
- name: "Clippy (wasm)"
|
||||
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
run: cargo clippy -p ruff_wasm -p red_knot_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
|
||||
cargo-test-linux:
|
||||
name: "cargo test (linux)"
|
||||
@@ -142,6 +142,13 @@ jobs:
|
||||
|
||||
# Check for broken links in the documentation.
|
||||
- run: cargo doc --all --no-deps
|
||||
env:
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
# Use --document-private-items so that all our doc comments are kept in
|
||||
# sync, not just public items. Eventually we should do this for all
|
||||
# crates; for now add crates here as they are warning-clean to prevent
|
||||
# regression.
|
||||
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p ruff_db --document-private-items
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
@@ -191,10 +198,14 @@ jobs:
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run wasm-pack"
|
||||
- name: "Test ruff_wasm"
|
||||
run: |
|
||||
cd crates/ruff_wasm
|
||||
wasm-pack test --node
|
||||
- name: "Test red_knot_wasm"
|
||||
run: |
|
||||
cd crates/red_knot_wasm
|
||||
wasm-pack test --node
|
||||
|
||||
cargo-build-release:
|
||||
name: "cargo build (release)"
|
||||
@@ -619,7 +630,7 @@ jobs:
|
||||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@v2
|
||||
uses: CodSpeedHQ/action@v3
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
||||
2
.github/workflows/pr-comment.yaml
vendored
2
.github/workflows/pr-comment.yaml
vendored
@@ -23,6 +23,7 @@ jobs:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
@@ -43,6 +44,7 @@ jobs:
|
||||
path: pr/ecosystem
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
|
||||
107
.github/workflows/publish-docs.yml
vendored
107
.github/workflows/publish-docs.yml
vendored
@@ -21,42 +21,131 @@ jobs:
|
||||
mkdocs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CF_API_TOKEN_EXISTS: ${{ secrets.CF_API_TOKEN != '' }}
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.12
|
||||
|
||||
- name: "Set docs version"
|
||||
run: |
|
||||
version="${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}"
|
||||
# if version is missing, exit with error
|
||||
if [[ -z "$version" ]]; then
|
||||
echo "Can't build docs without a version."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Use version as display name for now
|
||||
display_name="$version"
|
||||
|
||||
echo "version=$version" >> $GITHUB_ENV
|
||||
echo "display_name=$display_name" >> $GITHUB_ENV
|
||||
|
||||
- name: "Set branch name"
|
||||
run: |
|
||||
version="${{ env.version }}"
|
||||
display_name="${{ env.display_name }}"
|
||||
timestamp="$(date +%s)"
|
||||
|
||||
# create branch_display_name from display_name by replacing all
|
||||
# characters disallowed in git branch names with hyphens
|
||||
branch_display_name="$(echo "$display_name" | tr -c '[:alnum:]._' '-' | tr -s '-')"
|
||||
|
||||
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> $GITHUB_ENV
|
||||
echo "timestamp=$timestamp" >> $GITHUB_ENV
|
||||
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: pip install -r docs/requirements.txt
|
||||
|
||||
- name: "Copy README File"
|
||||
run: |
|
||||
python scripts/transform_readme.py --target mkdocs
|
||||
python scripts/generate_mkdocs.py
|
||||
|
||||
- name: "Build Insiders docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.6.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
# `github.head_ref` is only set during pull requests and for manual runs or tags we use `main` to deploy to production
|
||||
command: pages deploy site --project-name=astral-docs --branch ${{ github.head_ref || 'main' }} --commit-hash ${GITHUB_SHA}
|
||||
|
||||
- name: "Clone docs repo"
|
||||
run: |
|
||||
version="${{ env.version }}"
|
||||
git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
|
||||
|
||||
- name: "Copy docs"
|
||||
run: rm -rf astral-docs/site/ruff && mkdir -p astral-docs/site && cp -r site/ruff astral-docs/site/
|
||||
|
||||
- name: "Commit docs"
|
||||
working-directory: astral-docs
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
git config user.name "astral-docs-bot"
|
||||
git config user.email "176161322+astral-docs-bot@users.noreply.github.com"
|
||||
|
||||
git checkout -b $branch_name
|
||||
git add site/ruff
|
||||
git commit -m "Update ruff documentation for $version"
|
||||
|
||||
- name: "Create Pull Request"
|
||||
working-directory: astral-docs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
version="${{ env.version }}"
|
||||
display_name="${{ env.display_name }}"
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
# set the PR title
|
||||
pull_request_title="Update ruff documentation for $display_name"
|
||||
|
||||
# Delete any existing pull requests that are open for this version
|
||||
# by checking against pull_request_title because the new PR will
|
||||
# supersede the old one.
|
||||
gh pr list --state open --json title --jq '.[] | select(.title == "$pull_request_title") | .number' | \
|
||||
xargs -I {} gh pr close {}
|
||||
|
||||
# push the branch to GitHub
|
||||
git push origin $branch_name
|
||||
|
||||
# create the PR
|
||||
gh pr create --base main --head $branch_name \
|
||||
--title "$pull_request_title" \
|
||||
--body "Automated documentation update for $display_name" \
|
||||
--label "documentation"
|
||||
|
||||
- name: "Merge Pull Request"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
working-directory: astral-docs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
|
||||
# give the PR a few seconds to be created before trying to auto-merge it
|
||||
sleep 10
|
||||
gh pr merge --squash $branch_name
|
||||
|
||||
2
.github/workflows/publish-playground.yml
vendored
2
.github/workflows/publish-playground.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.6.1
|
||||
uses: cloudflare/wrangler-action@v3.7.0
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
|
||||
55
.github/workflows/publish-wasm.yml
vendored
Normal file
55
.github/workflows/publish-wasm.yml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
# Build and publish ruff-api for wasm.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish
|
||||
# job within `cargo-dist`.
|
||||
name: "Build and publish wasm"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
ruff_wasm:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
strategy:
|
||||
matrix:
|
||||
target: [web, bundler, nodejs]
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: jetli/wasm-bindgen-action@v0.2.0
|
||||
- name: "Run wasm-pack build"
|
||||
run: wasm-pack build --target ${{ matrix.target }} crates/ruff_wasm
|
||||
- name: "Rename generated package"
|
||||
run: | # Replace the package name w/ jq
|
||||
jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json
|
||||
mv /tmp/package.json crates/ruff_wasm/pkg
|
||||
- run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: "Publish (dry-run)"
|
||||
if: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: npm publish --dry-run crates/ruff_wasm/pkg
|
||||
- name: "Publish"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: npm publish --provenance --access public crates/ruff_wasm/pkg
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
18
.github/workflows/release.yml
vendored
18
.github/workflows/release.yml
vendored
@@ -214,16 +214,32 @@ jobs:
|
||||
"id-token": "write"
|
||||
"packages": "write"
|
||||
|
||||
custom-publish-wasm:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
||||
uses: ./.github/workflows/publish-wasm.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
# publish jobs get escalated permissions
|
||||
permissions:
|
||||
"contents": "read"
|
||||
"id-token": "write"
|
||||
"packages": "write"
|
||||
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
announce:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
- custom-publish-pypi
|
||||
- custom-publish-wasm
|
||||
# use "always() && ..." to allow us to wait for all publish jobs while
|
||||
# still allowing individual publish jobs to skip themselves (for prereleases).
|
||||
# "host" however must run to completion, no skipping allowed!
|
||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') }}
|
||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-wasm.result == 'skipped' || needs.custom-publish-wasm.result == 'success') }}
|
||||
runs-on: "ubuntu-20.04"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
14
.github/workflows/sync_typeshed.yaml
vendored
14
.github/workflows/sync_typeshed.yaml
vendored
@@ -37,13 +37,13 @@ jobs:
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
run: |
|
||||
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt
|
||||
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -21,6 +21,14 @@ flamegraph.svg
|
||||
# `CARGO_TARGET_DIR=target-llvm-lines RUSTFLAGS="-Csymbol-mangling-version=v0" cargo llvm-lines -p ruff --lib`
|
||||
/target*
|
||||
|
||||
# samply profiles
|
||||
profile.json
|
||||
|
||||
# tracing-flame traces
|
||||
tracing.folded
|
||||
tracing-flamechart.svg
|
||||
tracing-flamegraph.svg
|
||||
|
||||
###
|
||||
# Rust.gitignore
|
||||
###
|
||||
|
||||
@@ -14,6 +14,9 @@ MD041: false
|
||||
# MD013/line-length
|
||||
MD013: false
|
||||
|
||||
# MD014/commands-show-output
|
||||
MD014: false
|
||||
|
||||
# MD024/no-duplicate-heading
|
||||
MD024:
|
||||
# Allow when nested under different parents e.g. CHANGELOG.md
|
||||
|
||||
@@ -2,9 +2,12 @@ fail_fast: true
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/red_knot_module_resolver/vendor/.*|
|
||||
crates/red_knot_python_semantic/vendor/.*|
|
||||
crates/red_knot_workspace/resources/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
crates/ruff_notebook/resources/.*|
|
||||
crates/ruff_server/resources/.*|
|
||||
crates/ruff/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*|
|
||||
crates/ruff_python_formatter/tests/snapshots/.*|
|
||||
@@ -14,7 +17,7 @@ exclude: |
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.18
|
||||
rev: v0.19
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
@@ -42,7 +45,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.22.9
|
||||
rev: v1.23.6
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -56,18 +59,13 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.5.0
|
||||
rev: v0.6.1
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*
|
||||
)$
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
|
||||
@@ -1,5 +1,43 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.6.0
|
||||
|
||||
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
|
||||
|
||||
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
|
||||
|
||||
You can disable specific rules for notebooks using [`per-file-ignores`](https://docs.astral.sh/ruff/settings/#lint_per-file-ignores):
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"*.ipynb" = ["E501"] # disable line-too-long in notebooks
|
||||
```
|
||||
|
||||
If you'd prefer to either only lint or only format Jupyter Notebook files, you can use the
|
||||
section-specific `exclude` option to do so. For example, the following would only lint Jupyter
|
||||
Notebook files and not format them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.format]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
And, conversely, the following would only format Jupyter Notebook files and not lint them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
You can completely disable Jupyter Notebook support by updating the [`extend-exclude`](https://docs.astral.sh/ruff/settings/#extend-exclude) setting:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
## 0.5.0
|
||||
|
||||
- Follow the XDG specification to discover user-level configurations on macOS (same as on other Unix platforms)
|
||||
|
||||
343
CHANGELOG.md
343
CHANGELOG.md
@@ -1,5 +1,348 @@
|
||||
# Changelog
|
||||
|
||||
## 0.6.1
|
||||
|
||||
This is a hotfix release to address an issue with `ruff-pre-commit`. In v0.6,
|
||||
Ruff changed its behavior to lint and format Jupyter notebooks by default;
|
||||
however, due to an oversight, these files were still excluded by default if
|
||||
Ruff was run via pre-commit, leading to inconsistent behavior.
|
||||
This has [now been fixed](https://github.com/astral-sh/ruff-pre-commit/pull/96).
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`fastapi`\] Implement `fast-api-unused-path-parameter` (`FAST003`) ([#12638](https://github.com/astral-sh/ruff/pull/12638))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pylint`\] Rename `too-many-positional` to `too-many-positional-arguments` (`R0917`) ([#12905](https://github.com/astral-sh/ruff/pull/12905))
|
||||
|
||||
### Server
|
||||
|
||||
- Fix crash when applying "fix-all" code-action to notebook cells ([#12929](https://github.com/astral-sh/ruff/pull/12929))
|
||||
|
||||
### Other changes
|
||||
|
||||
- \[`flake8-naming`\]: Respect import conventions (`N817`) ([#12922](https://github.com/astral-sh/ruff/pull/12922))
|
||||
|
||||
## 0.6.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.6.0) for a migration guide and overview of the changes!
|
||||
|
||||
### Breaking changes
|
||||
|
||||
See also, the "Remapped rules" section which may result in disabled rules.
|
||||
|
||||
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
|
||||
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
|
||||
|
||||
### Deprecations
|
||||
|
||||
The following rules are now deprecated:
|
||||
|
||||
- [`pytest-missing-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-missing-fixture-name-underscore/) (`PT004`)
|
||||
- [`pytest-incorrect-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-incorrect-fixture-name-underscore/) (`PT005`)
|
||||
- [`unpacked-list-comprehension`](https://docs.astral.sh/ruff/rules/unpacked-list-comprehension/) (`UP027`)
|
||||
|
||||
### Remapped rules
|
||||
|
||||
The following rules have been remapped to new rule codes:
|
||||
|
||||
- [`unnecessary-dict-comprehension-for-iterable`](https://docs.astral.sh/ruff/rules/unnecessary-dict-comprehension-for-iterable/): `RUF025` to `C420`
|
||||
|
||||
### Stabilization
|
||||
|
||||
The following rules have been stabilized and are no longer in preview:
|
||||
|
||||
- [`singledispatch-method`](https://docs.astral.sh/ruff/rules/singledispatch-method/) (`PLE1519`)
|
||||
- [`singledispatchmethod-function`](https://docs.astral.sh/ruff/rules/singledispatchmethod-function/) (`PLE1520`)
|
||||
- [`bad-staticmethod-argument`](https://docs.astral.sh/ruff/rules/bad-staticmethod-argument/) (`PLW0211`)
|
||||
- [`if-stmt-min-max`](https://docs.astral.sh/ruff/rules/if-stmt-min-max/) (`PLR1730`)
|
||||
- [`invalid-bytes-return-type`](https://docs.astral.sh/ruff/rules/invalid-bytes-return-type/) (`PLE0308`)
|
||||
- [`invalid-hash-return-type`](https://docs.astral.sh/ruff/rules/invalid-hash-return-type/) (`PLE0309`)
|
||||
- [`invalid-index-return-type`](https://docs.astral.sh/ruff/rules/invalid-index-return-type/) (`PLE0305`)
|
||||
- [`invalid-length-return-type`](https://docs.astral.sh/ruff/rules/invalid-length-return-type/) (`PLEE303`)
|
||||
- [`self-or-cls-assignment`](https://docs.astral.sh/ruff/rules/self-or-cls-assignment/) (`PLW0642`)
|
||||
- [`byte-string-usage`](https://docs.astral.sh/ruff/rules/byte-string-usage/) (`PYI057`)
|
||||
- [`duplicate-literal-member`](https://docs.astral.sh/ruff/rules/duplicate-literal-member/) (`PYI062`)
|
||||
- [`redirected-noqa`](https://docs.astral.sh/ruff/rules/redirected-noqa/) (`RUF101`)
|
||||
|
||||
The following behaviors have been stabilized:
|
||||
|
||||
- [`cancel-scope-no-checkpoint`](https://docs.astral.sh/ruff/rules/cancel-scope-no-checkpoint/) (`ASYNC100`): Support `asyncio` and `anyio` context mangers.
|
||||
- [`async-function-with-timeout`](https://docs.astral.sh/ruff/rules/async-function-with-timeout/) (`ASYNC109`): Support `asyncio` and `anyio` context mangers.
|
||||
- [`async-busy-wait`](https://docs.astral.sh/ruff/rules/async-busy-wait/) (`ASYNC110`): Support `asyncio` and `anyio` context mangers.
|
||||
- [`async-zero-sleep`](https://docs.astral.sh/ruff/rules/async-zero-sleep/) (`ASYNC115`): Support `anyio` context mangers.
|
||||
- [`long-sleep-not-forever`](https://docs.astral.sh/ruff/rules/long-sleep-not-forever/) (`ASYNC116`): Support `anyio` context mangers.
|
||||
|
||||
The following fixes have been stabilized:
|
||||
|
||||
- [`superfluous-else-return`](https://docs.astral.sh/ruff/rules/superfluous-else-return/) (`RET505`)
|
||||
- [`superfluous-else-raise`](https://docs.astral.sh/ruff/rules/superfluous-else-raise/) (`RET506`)
|
||||
- [`superfluous-else-continue`](https://docs.astral.sh/ruff/rules/superfluous-else-continue/) (`RET507`)
|
||||
- [`superfluous-else-break`](https://docs.astral.sh/ruff/rules/superfluous-else-break/) (`RET508`)
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Further simplify to binary in preview for (`SIM108`) ([#12796](https://github.com/astral-sh/ruff/pull/12796))
|
||||
- \[`pyupgrade`\] Show violations without auto-fix (`UP031`) ([#11229](https://github.com/astral-sh/ruff/pull/11229))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-import-conventions`\] Add `xml.etree.ElementTree` to default conventions ([#12455](https://github.com/astral-sh/ruff/pull/12455))
|
||||
- \[`flake8-pytest-style`\] Add a space after comma in CSV output (`PT006`) ([#12853](https://github.com/astral-sh/ruff/pull/12853))
|
||||
|
||||
### Server
|
||||
|
||||
- Show a message for incorrect settings ([#12781](https://github.com/astral-sh/ruff/pull/12781))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Do not lint yield in context manager (`ASYNC100`) ([#12896](https://github.com/astral-sh/ruff/pull/12896))
|
||||
- \[`flake8-comprehensions`\] Do not lint `async for` comprehensions (`C419`) ([#12895](https://github.com/astral-sh/ruff/pull/12895))
|
||||
- \[`flake8-return`\] Only add return `None` at end of a function (`RET503`) ([#11074](https://github.com/astral-sh/ruff/pull/11074))
|
||||
- \[`flake8-type-checking`\] Avoid treating `dataclasses.KW_ONLY` as typing-only (`TCH003`) ([#12863](https://github.com/astral-sh/ruff/pull/12863))
|
||||
- \[`pep8-naming`\] Treat `type(Protocol)` et al as metaclass base (`N805`) ([#12770](https://github.com/astral-sh/ruff/pull/12770))
|
||||
- \[`pydoclint`\] Don't enforce returns and yields in abstract methods (`DOC201`, `DOC202`) ([#12771](https://github.com/astral-sh/ruff/pull/12771))
|
||||
- \[`ruff`\] Skip tuples with slice expressions in (`RUF031`) ([#12768](https://github.com/astral-sh/ruff/pull/12768))
|
||||
- \[`ruff`\] Ignore unparenthesized tuples in subscripts when the subscript is a type annotation or type alias (`RUF031`) ([#12762](https://github.com/astral-sh/ruff/pull/12762))
|
||||
- \[`ruff`\] Ignore template strings passed to logging and `builtins._()` calls (`RUF027`) ([#12889](https://github.com/astral-sh/ruff/pull/12889))
|
||||
- \[`ruff`\] Do not remove parens for tuples with starred expressions in Python \<=3.10 (`RUF031`) ([#12784](https://github.com/astral-sh/ruff/pull/12784))
|
||||
- Evaluate default parameter values for a function in that function's enclosing scope ([#12852](https://github.com/astral-sh/ruff/pull/12852))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Respect VS Code cell metadata when detecting the language of Jupyter Notebook cells ([#12864](https://github.com/astral-sh/ruff/pull/12864))
|
||||
- Respect `kernelspec` notebook metadata when detecting the preferred language for a Jupyter Notebook ([#12875](https://github.com/astral-sh/ruff/pull/12875))
|
||||
|
||||
## 0.5.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-comprehensions`\] Account for list and set comprehensions in `unnecessary-literal-within-tuple-call` (`C409`) ([#12657](https://github.com/astral-sh/ruff/pull/12657))
|
||||
- \[`flake8-pyi`\] Add autofix for `future-annotations-in-stub` (`PYI044`) ([#12676](https://github.com/astral-sh/ruff/pull/12676))
|
||||
- \[`flake8-return`\] Avoid syntax error when auto-fixing `RET505` with mixed indentation (space and tabs) ([#12740](https://github.com/astral-sh/ruff/pull/12740))
|
||||
- \[`pydoclint`\] Add `docstring-missing-yields` (`DOC402`) and `docstring-extraneous-yields` (`DOC403`) ([#12538](https://github.com/astral-sh/ruff/pull/12538))
|
||||
- \[`pydoclint`\] Avoid `DOC201` if docstring begins with "Return", "Returns", "Yield", or "Yields" ([#12675](https://github.com/astral-sh/ruff/pull/12675))
|
||||
- \[`pydoclint`\] Deduplicate collected exceptions after traversing function bodies (`DOC501`) ([#12642](https://github.com/astral-sh/ruff/pull/12642))
|
||||
- \[`pydoclint`\] Ignore `DOC` errors for stub functions ([#12651](https://github.com/astral-sh/ruff/pull/12651))
|
||||
- \[`pydoclint`\] Teach rules to understand reraised exceptions as being explicitly raised (`DOC501`, `DOC502`) ([#12639](https://github.com/astral-sh/ruff/pull/12639))
|
||||
- \[`ruff`\] Implement `incorrectly-parenthesized-tuple-in-subscript` (`RUF031`) ([#12480](https://github.com/astral-sh/ruff/pull/12480))
|
||||
- \[`ruff`\] Mark `RUF023` fix as unsafe if `__slots__` is not a set and the binding is used elsewhere ([#12692](https://github.com/astral-sh/ruff/pull/12692))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`refurb`\] Add autofix for `implicit-cwd` (`FURB177`) ([#12708](https://github.com/astral-sh/ruff/pull/12708))
|
||||
- \[`ruff`\] Add autofix for `zip-instead-of-pairwise` (`RUF007`) ([#12663](https://github.com/astral-sh/ruff/pull/12663))
|
||||
- \[`tryceratops`\] Add `BaseException` to `raise-vanilla-class` rule (`TRY002`) ([#12620](https://github.com/astral-sh/ruff/pull/12620))
|
||||
|
||||
### Server
|
||||
|
||||
- Ignore non-file workspace URL; Ruff will display a warning notification in this case ([#12725](https://github.com/astral-sh/ruff/pull/12725))
|
||||
|
||||
### CLI
|
||||
|
||||
- Fix cache invalidation for nested `pyproject.toml` files ([#12727](https://github.com/astral-sh/ruff/pull/12727))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Fix false positives with multiple `async with` items (`ASYNC100`) ([#12643](https://github.com/astral-sh/ruff/pull/12643))
|
||||
- \[`flake8-bandit`\] Avoid false-positives for list concatenations in SQL construction (`S608`) ([#12720](https://github.com/astral-sh/ruff/pull/12720))
|
||||
- \[`flake8-bugbear`\] Treat `return` as equivalent to `break` (`B909`) ([#12646](https://github.com/astral-sh/ruff/pull/12646))
|
||||
- \[`flake8-comprehensions`\] Set comprehensions not a violation for `sum` in `unnecessary-comprehension-in-call` (`C419`) ([#12691](https://github.com/astral-sh/ruff/pull/12691))
|
||||
- \[`flake8-simplify`\] Parenthesize conditions based on precedence when merging if arms (`SIM114`) ([#12737](https://github.com/astral-sh/ruff/pull/12737))
|
||||
- \[`pydoclint`\] Try both 'Raises' section styles when convention is unspecified (`DOC501`) ([#12649](https://github.com/astral-sh/ruff/pull/12649))
|
||||
|
||||
## 0.5.6
|
||||
|
||||
Ruff 0.5.6 automatically enables linting and formatting of notebooks in *preview mode*.
|
||||
You can opt-out of this behavior by adding `*.ipynb` to the `extend-exclude` setting.
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
### Preview features
|
||||
|
||||
- Enable notebooks by default in preview mode ([#12621](https://github.com/astral-sh/ruff/pull/12621))
|
||||
- \[`flake8-builtins`\] Implement import, lambda, and module shadowing ([#12546](https://github.com/astral-sh/ruff/pull/12546))
|
||||
- \[`pydoclint`\] Add `docstring-missing-returns` (`DOC201`) and `docstring-extraneous-returns` (`DOC202`) ([#12485](https://github.com/astral-sh/ruff/pull/12485))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-return`\] Exempt cached properties and other property-like decorators from explicit return rule (`RET501`) ([#12563](https://github.com/astral-sh/ruff/pull/12563))
|
||||
|
||||
### Server
|
||||
|
||||
- Make server panic hook more error resilient ([#12610](https://github.com/astral-sh/ruff/pull/12610))
|
||||
- Use `$/logTrace` for server trace logs in Zed and VS Code ([#12564](https://github.com/astral-sh/ruff/pull/12564))
|
||||
- Keep track of deleted cells for reorder change request ([#12575](https://github.com/astral-sh/ruff/pull/12575))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`flake8-implicit-str-concat`\] Always allow explicit multi-line concatenations when implicit concatenations are banned ([#12532](https://github.com/astral-sh/ruff/pull/12532))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Avoid flagging `asyncio.timeout`s as unused when the context manager includes `asyncio.TaskGroup` ([#12605](https://github.com/astral-sh/ruff/pull/12605))
|
||||
- \[`flake8-slots`\] Avoid recommending `__slots__` for classes that inherit from more than `namedtuple` ([#12531](https://github.com/astral-sh/ruff/pull/12531))
|
||||
- \[`isort`\] Avoid marking required imports as unused ([#12537](https://github.com/astral-sh/ruff/pull/12537))
|
||||
- \[`isort`\] Preserve trailing inline comments on import-from statements ([#12498](https://github.com/astral-sh/ruff/pull/12498))
|
||||
- \[`pycodestyle`\] Add newlines before comments (`E305`) ([#12606](https://github.com/astral-sh/ruff/pull/12606))
|
||||
- \[`pycodestyle`\] Don't attach comments with mismatched indents ([#12604](https://github.com/astral-sh/ruff/pull/12604))
|
||||
- \[`pyflakes`\] Fix preview-mode bugs in `F401` when attempting to autofix unused first-party submodule imports in an `__init__.py` file ([#12569](https://github.com/astral-sh/ruff/pull/12569))
|
||||
- \[`pylint`\] Respect start index in `unnecessary-list-index-lookup` ([#12603](https://github.com/astral-sh/ruff/pull/12603))
|
||||
- \[`pyupgrade`\] Avoid recommending no-argument super in `slots=True` dataclasses ([#12530](https://github.com/astral-sh/ruff/pull/12530))
|
||||
- \[`pyupgrade`\] Use colon rather than dot formatting for integer-only types ([#12534](https://github.com/astral-sh/ruff/pull/12534))
|
||||
- Fix NFKC normalization bug when removing unused imports ([#12571](https://github.com/astral-sh/ruff/pull/12571))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Consider more stdlib decorators to be property-like ([#12583](https://github.com/astral-sh/ruff/pull/12583))
|
||||
- Improve handling of metaclasses in various linter rules ([#12579](https://github.com/astral-sh/ruff/pull/12579))
|
||||
- Improve consistency between linter rules in determining whether a function is property ([#12581](https://github.com/astral-sh/ruff/pull/12581))
|
||||
|
||||
## 0.5.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`fastapi`\] Implement `fastapi-redundant-response-model` (`FAST001`) and `fastapi-non-annotated-dependency`(`FAST002`) ([#11579](https://github.com/astral-sh/ruff/pull/11579))
|
||||
- \[`pydoclint`\] Implement `docstring-missing-exception` (`DOC501`) and `docstring-extraneous-exception` (`DOC502`) ([#11471](https://github.com/astral-sh/ruff/pull/11471))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`numpy`\] Fix NumPy 2.0 rule for `np.alltrue` and `np.sometrue` ([#12473](https://github.com/astral-sh/ruff/pull/12473))
|
||||
- \[`numpy`\] Ignore `NPY201` inside `except` blocks for compatibility with older numpy versions ([#12490](https://github.com/astral-sh/ruff/pull/12490))
|
||||
- \[`pep8-naming`\] Avoid applying `ignore-names` to `self` and `cls` function names (`N804`, `N805`) ([#12497](https://github.com/astral-sh/ruff/pull/12497))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix incorrect placement of leading function comment with type params ([#12447](https://github.com/astral-sh/ruff/pull/12447))
|
||||
|
||||
### Server
|
||||
|
||||
- Do not bail code action resolution when a quick fix is requested ([#12462](https://github.com/astral-sh/ruff/pull/12462))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `Ord` implementation of `cmp_fix` ([#12471](https://github.com/astral-sh/ruff/pull/12471))
|
||||
- Raise syntax error for unparenthesized generator expression in multi-argument call ([#12445](https://github.com/astral-sh/ruff/pull/12445))
|
||||
- \[`pydoclint`\] Fix panic in `DOC501` reported in [#12428](https://github.com/astral-sh/ruff/pull/12428) ([#12435](https://github.com/astral-sh/ruff/pull/12435))
|
||||
- \[`flake8-bugbear`\] Allow singleton tuples with starred expressions in `B013` ([#12484](https://github.com/astral-sh/ruff/pull/12484))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add Eglot setup guide for Emacs editor ([#12426](https://github.com/astral-sh/ruff/pull/12426))
|
||||
- Add note about the breaking change in `nvim-lspconfig` ([#12507](https://github.com/astral-sh/ruff/pull/12507))
|
||||
- Add note to include notebook files for native server ([#12449](https://github.com/astral-sh/ruff/pull/12449))
|
||||
- Add setup docs for Zed editor ([#12501](https://github.com/astral-sh/ruff/pull/12501))
|
||||
|
||||
## 0.5.4
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`ruff`\] Rename `RUF007` to `zip-instead-of-pairwise` ([#12399](https://github.com/astral-sh/ruff/pull/12399))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-builtins`\] Avoid shadowing diagnostics for `@override` methods ([#12415](https://github.com/astral-sh/ruff/pull/12415))
|
||||
- \[`flake8-comprehensions`\] Insert parentheses for multi-argument generators ([#12422](https://github.com/astral-sh/ruff/pull/12422))
|
||||
- \[`pydocstyle`\] Handle escaped docstrings within docstring (`D301`) ([#12192](https://github.com/astral-sh/ruff/pull/12192))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix GitHub link to Neovim setup ([#12410](https://github.com/astral-sh/ruff/pull/12410))
|
||||
- Fix `output-format` default in settings reference ([#12409](https://github.com/astral-sh/ruff/pull/12409))
|
||||
|
||||
## 0.5.3
|
||||
|
||||
**Ruff 0.5.3 marks the stable release of the Ruff language server and introduces revamped
|
||||
[documentation](https://docs.astral.sh/ruff/editors), including [setup guides for your editor of
|
||||
choice](https://docs.astral.sh/ruff/editors/setup) and [the language server
|
||||
itself](https://docs.astral.sh/ruff/editors/settings)**.
|
||||
|
||||
### Preview features
|
||||
|
||||
- Formatter: Insert empty line between suite and alternative branch after function/class definition ([#12294](https://github.com/astral-sh/ruff/pull/12294))
|
||||
- \[`pyupgrade`\] Implement `unnecessary-default-type-args` (`UP043`) ([#12371](https://github.com/astral-sh/ruff/pull/12371))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bugbear`\] Detect enumerate iterations in `loop-iterator-mutation` (`B909`) ([#12366](https://github.com/astral-sh/ruff/pull/12366))
|
||||
- \[`flake8-bugbear`\] Remove `discard`, `remove`, and `pop` allowance for `loop-iterator-mutation` (`B909`) ([#12365](https://github.com/astral-sh/ruff/pull/12365))
|
||||
- \[`pylint`\] Allow `repeated-equality-comparison` for mixed operations (`PLR1714`) ([#12369](https://github.com/astral-sh/ruff/pull/12369))
|
||||
- \[`pylint`\] Ignore `self` and `cls` when counting arguments (`PLR0913`) ([#12367](https://github.com/astral-sh/ruff/pull/12367))
|
||||
- \[`pylint`\] Use UTF-8 as default encoding in `unspecified-encoding` fix (`PLW1514`) ([#12370](https://github.com/astral-sh/ruff/pull/12370))
|
||||
|
||||
### Server
|
||||
|
||||
- Build settings index in parallel for the native server ([#12299](https://github.com/astral-sh/ruff/pull/12299))
|
||||
- Use fallback settings when indexing the project ([#12362](https://github.com/astral-sh/ruff/pull/12362))
|
||||
- Consider `--preview` flag for `server` subcommand for the linter and formatter ([#12208](https://github.com/astral-sh/ruff/pull/12208))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-comprehensions`\] Allow additional arguments for `sum` and `max` comprehensions (`C419`) ([#12364](https://github.com/astral-sh/ruff/pull/12364))
|
||||
- \[`pylint`\] Avoid dropping extra boolean operations in `repeated-equality-comparison` (`PLR1714`) ([#12368](https://github.com/astral-sh/ruff/pull/12368))
|
||||
- \[`pylint`\] Consider expression before statement when determining binding kind (`PLR1704`) ([#12346](https://github.com/astral-sh/ruff/pull/12346))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add docs for Ruff language server ([#12344](https://github.com/astral-sh/ruff/pull/12344))
|
||||
- Migrate to standalone docs repo ([#12341](https://github.com/astral-sh/ruff/pull/12341))
|
||||
- Update versioning policy for editor integration ([#12375](https://github.com/astral-sh/ruff/pull/12375))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Publish Wasm API to npm ([#12317](https://github.com/astral-sh/ruff/pull/12317))
|
||||
|
||||
## 0.5.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- Use `space` separator before parenthesized expressions in comprehensions with leading comments ([#12282](https://github.com/astral-sh/ruff/pull/12282))
|
||||
- \[`flake8-async`\] Update `ASYNC100` to include `anyio` and `asyncio` ([#12221](https://github.com/astral-sh/ruff/pull/12221))
|
||||
- \[`flake8-async`\] Update `ASYNC109` to include `anyio` and `asyncio` ([#12236](https://github.com/astral-sh/ruff/pull/12236))
|
||||
- \[`flake8-async`\] Update `ASYNC110` to include `anyio` and `asyncio` ([#12261](https://github.com/astral-sh/ruff/pull/12261))
|
||||
- \[`flake8-async`\] Update `ASYNC115` to include `anyio` and `asyncio` ([#12262](https://github.com/astral-sh/ruff/pull/12262))
|
||||
- \[`flake8-async`\] Update `ASYNC116` to include `anyio` and `asyncio` ([#12266](https://github.com/astral-sh/ruff/pull/12266))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-return`\] Exempt properties from explicit return rule (`RET501`) ([#12243](https://github.com/astral-sh/ruff/pull/12243))
|
||||
- \[`numpy`\] Add `np.NAN`-to-`np.nan` diagnostic ([#12292](https://github.com/astral-sh/ruff/pull/12292))
|
||||
- \[`refurb`\] Make `list-reverse-copy` an unsafe fix ([#12303](https://github.com/astral-sh/ruff/pull/12303))
|
||||
|
||||
### Server
|
||||
|
||||
- Consider `include` and `extend-include` settings in native server ([#12252](https://github.com/astral-sh/ruff/pull/12252))
|
||||
- Include nested configurations in settings reloading ([#12253](https://github.com/astral-sh/ruff/pull/12253))
|
||||
|
||||
### CLI
|
||||
|
||||
- Omit code frames for fixes with empty ranges ([#12304](https://github.com/astral-sh/ruff/pull/12304))
|
||||
- Warn about formatter incompatibility for `D203` ([#12238](https://github.com/astral-sh/ruff/pull/12238))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make cache-write failures non-fatal on Windows ([#12302](https://github.com/astral-sh/ruff/pull/12302))
|
||||
- Treat `not` operations as boolean tests ([#12301](https://github.com/astral-sh/ruff/pull/12301))
|
||||
- \[`flake8-bandit`\] Avoid `S310` violations for HTTP-safe f-strings ([#12305](https://github.com/astral-sh/ruff/pull/12305))
|
||||
- \[`flake8-bandit`\] Support explicit string concatenations in S310 HTTP detection ([#12315](https://github.com/astral-sh/ruff/pull/12315))
|
||||
- \[`flake8-bandit`\] fix S113 false positive for httpx without `timeout` argument ([#12213](https://github.com/astral-sh/ruff/pull/12213))
|
||||
- \[`pycodestyle`\] Remove "non-obvious" allowance for E721 ([#12300](https://github.com/astral-sh/ruff/pull/12300))
|
||||
- \[`pyflakes`\] Consider `with` blocks as single-item branches for redefinition analysis ([#12311](https://github.com/astral-sh/ruff/pull/12311))
|
||||
- \[`refurb`\] Restrict forwarding for `newline` argument in `open()` calls to Python versions >= 3.10 ([#12244](https://github.com/astral-sh/ruff/pull/12244))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update help and documentation to reflect `--output-format full` default ([#12248](https://github.com/astral-sh/ruff/pull/12248))
|
||||
|
||||
### Performance
|
||||
|
||||
- Use more threads when discovering Python files ([#12258](https://github.com/astral-sh/ruff/pull/12258))
|
||||
|
||||
## 0.5.1
|
||||
|
||||
### Preview features
|
||||
|
||||
@@ -2,35 +2,6 @@
|
||||
|
||||
Welcome! We're happy to have you here. Thank you in advance for your contribution to Ruff.
|
||||
|
||||
- [The Basics](#the-basics)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Development](#development)
|
||||
- [Project Structure](#project-structure)
|
||||
- [Example: Adding a new lint rule](#example-adding-a-new-lint-rule)
|
||||
- [Rule naming convention](#rule-naming-convention)
|
||||
- [Rule testing: fixtures and snapshots](#rule-testing-fixtures-and-snapshots)
|
||||
- [Example: Adding a new configuration option](#example-adding-a-new-configuration-option)
|
||||
- [MkDocs](#mkdocs)
|
||||
- [Release Process](#release-process)
|
||||
- [Creating a new release](#creating-a-new-release)
|
||||
- [Ecosystem CI](#ecosystem-ci)
|
||||
- [Benchmarking and Profiling](#benchmarking-and-profiling)
|
||||
- [CPython Benchmark](#cpython-benchmark)
|
||||
- [Microbenchmarks](#microbenchmarks)
|
||||
- [Benchmark-driven Development](#benchmark-driven-development)
|
||||
- [PR Summary](#pr-summary)
|
||||
- [Tips](#tips)
|
||||
- [Profiling Projects](#profiling-projects)
|
||||
- [Linux](#linux)
|
||||
- [Mac](#mac)
|
||||
- [`cargo dev`](#cargo-dev)
|
||||
- [Subsystems](#subsystems)
|
||||
- [Compilation Pipeline](#compilation-pipeline)
|
||||
- [Import Categorization](#import-categorization)
|
||||
- [Project root](#project-root)
|
||||
- [Package root](#package-root)
|
||||
- [Import categorization](#import-categorization-1)
|
||||
|
||||
## The Basics
|
||||
|
||||
Ruff welcomes contributions in the form of pull requests.
|
||||
@@ -333,22 +304,34 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
### Creating a new release
|
||||
|
||||
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
|
||||
|
||||
1. Run `./scripts/release.sh`; this command will:
|
||||
|
||||
- Generate a temporary virtual environment with `rooster`
|
||||
- Generate a changelog entry in `CHANGELOG.md`
|
||||
- Update versions in `pyproject.toml` and `Cargo.toml`
|
||||
- Update references to versions in the `README.md` and documentation
|
||||
- Display contributors for the release
|
||||
|
||||
1. The changelog should then be editorialized for consistency
|
||||
|
||||
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
||||
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
||||
|
||||
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
|
||||
|
||||
1. Run `cargo check`. This should update the lock file with new versions.
|
||||
|
||||
1. Create a pull request with the changelog and version updates
|
||||
|
||||
1. Merge the PR
|
||||
|
||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yml) with:
|
||||
|
||||
- The new version number (without starting `v`)
|
||||
|
||||
1. The release workflow will do the following:
|
||||
|
||||
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
||||
uploaded anything, you can restart after pushing a fix. If you just need to rerun the build,
|
||||
make sure you're [re-running all the failed
|
||||
@@ -359,14 +342,25 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
1. Attach artifacts to draft GitHub release
|
||||
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
|
||||
downstream jobs manually if needed.
|
||||
|
||||
1. Verify the GitHub release:
|
||||
|
||||
1. The Changelog should match the content of `CHANGELOG.md`
|
||||
1. Append the contributors from the `bump.sh` script
|
||||
1. Append the contributors from the `scripts/release.sh` script
|
||||
|
||||
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
|
||||
|
||||
1. One can determine if an update is needed when
|
||||
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
|
||||
1. Once run successfully, you should follow the link in the output to create a PR.
|
||||
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
|
||||
|
||||
1. If needed, update the [`ruff-lsp`](https://github.com/astral-sh/ruff-lsp) and
|
||||
[`ruff-vscode`](https://github.com/astral-sh/ruff-vscode) repositories and follow
|
||||
the release instructions in those repositories. `ruff-lsp` should always be updated
|
||||
before `ruff-vscode`.
|
||||
|
||||
This step is generally not required for a patch release, but should always be done
|
||||
for a minor release.
|
||||
|
||||
## Ecosystem CI
|
||||
|
||||
@@ -389,7 +383,7 @@ We have several ways of benchmarking and profiling Ruff:
|
||||
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
|
||||
- Profiling the linter on either the microbenchmarks or entire projects
|
||||
|
||||
> \[!NOTE\]
|
||||
> **Note**
|
||||
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
|
||||
> applications, like web browsers). You may also want to switch your CPU to a "performance"
|
||||
> mode, if it exists, especially when benchmarking short-lived processes.
|
||||
@@ -905,15 +899,11 @@ There are three ways in which an import can be categorized as "first-party":
|
||||
package (e.g., `from foo import bar` or `import foo.bar`), they'll be classified as first-party
|
||||
automatically. This check is as simple as comparing the first segment of the current file's
|
||||
module path to the first segment of the import.
|
||||
1. **Source roots**: Ruff supports a `[src](https://docs.astral.sh/ruff/settings/#src)` setting, which
|
||||
1. **Source roots**: Ruff supports a [`src`](https://docs.astral.sh/ruff/settings/#src) setting, which
|
||||
sets the directories to scan when identifying first-party imports. The algorithm is
|
||||
straightforward: given an import, like `import foo`, iterate over the directories enumerated in
|
||||
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
|
||||
file `foo.py`.
|
||||
|
||||
By default, `src` is set to the project root. In the above example, we'd want to set
|
||||
`src = ["./src"]` to ensure that we locate `./my_project/src/foo` and thus categorize `import foo`
|
||||
as first-party in `baz.py`. In practice, for this limited example, setting `src = ["./src"]` is
|
||||
unnecessary, as all imports within `./my_project/src/foo` would be categorized as first-party via
|
||||
the same-package heuristic; but if your project contains multiple packages, you'll want to set `src`
|
||||
explicitly.
|
||||
By default, `src` is set to the project root, along with `"src"` subdirectory in the project root.
|
||||
This ensures that Ruff supports both flat and "src" layouts out of the box.
|
||||
|
||||
595
Cargo.lock
generated
595
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
28
Cargo.toml
28
Cargo.toml
@@ -4,7 +4,7 @@ resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
rust-version = "1.75"
|
||||
rust-version = "1.76"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -35,9 +35,9 @@ ruff_source_file = { path = "crates/ruff_source_file" }
|
||||
ruff_text_size = { path = "crates/ruff_text_size" }
|
||||
ruff_workspace = { path = "crates/ruff_workspace" }
|
||||
|
||||
red_knot = { path = "crates/red_knot" }
|
||||
red_knot_module_resolver = { path = "crates/red_knot_module_resolver" }
|
||||
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
|
||||
red_knot_server = { path = "crates/red_knot_server" }
|
||||
red_knot_workspace = { path = "crates/red_knot_workspace" }
|
||||
|
||||
aho-corasick = { version = "1.1.3" }
|
||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
||||
@@ -50,14 +50,14 @@ cachedir = { version = "0.3.1" }
|
||||
camino = { version = "1.1.7" }
|
||||
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.5.3", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.5.1" }
|
||||
clap_complete_command = { version = "0.6.0" }
|
||||
clearscreen = { version = "3.0.0" }
|
||||
codspeed-criterion-compat = { version = "2.6.0", default-features = false }
|
||||
colored = { version = "2.1.0" }
|
||||
console_error_panic_hook = { version = "0.1.7" }
|
||||
console_log = { version = "1.0.0" }
|
||||
countme = { version = "3.0.1" }
|
||||
compact_str = "0.7.1"
|
||||
compact_str = "0.8.0"
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
@@ -72,7 +72,6 @@ hashbrown = "0.14.3"
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = { version = "2.2.6" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1" }
|
||||
@@ -95,6 +94,7 @@ mimalloc = { version = "0.1.39" }
|
||||
natord = { version = "1.0.9" }
|
||||
notify = { version = "6.1.1" }
|
||||
once_cell = { version = "1.19.0" }
|
||||
ordermap = { version = "0.5.0" }
|
||||
path-absolutize = { version = "3.1.1" }
|
||||
path-slash = { version = "0.2.1" }
|
||||
pathdiff = { version = "0.2.1" }
|
||||
@@ -108,7 +108,7 @@ rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "a1bf3a613f451af7fc0a59411c56abc47fe8e8e1" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "f608ff8b24f07706492027199f51132244034f29" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
@@ -128,12 +128,13 @@ syn = { version = "2.0.55" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
test-case = { version = "3.3.1" }
|
||||
thiserror = { version = "1.0.58" }
|
||||
tikv-jemallocator = { version = "0.5.0" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-flame = { version = "0.2.0" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
tracing-tree = { version = "0.3.0" }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = ["env-filter", "fmt"] }
|
||||
tracing-tree = { version = "0.4.0" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
@@ -152,11 +153,12 @@ walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
wasm-bindgen-test = { version = "0.3.42" }
|
||||
wild = { version = "2" }
|
||||
zip = { version = "0.6.6", default-features = false, features = ["zstd"] }
|
||||
zip = { version = "0.6.6", default-features = false }
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
unreachable_pub = "warn"
|
||||
unexpected_cfgs = { level = "warn", check-cfg = ["cfg(fuzzing)", "cfg(codspeed)"] }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
pedantic = { level = "warn", priority = -2 }
|
||||
@@ -272,10 +274,10 @@ build-local-artifacts = false
|
||||
# Local artifacts jobs to run in CI
|
||||
local-artifacts-jobs = ["./build-binaries", "./build-docker"]
|
||||
# Publish jobs to run in CI
|
||||
publish-jobs = ["./publish-pypi"]
|
||||
publish-jobs = ["./publish-pypi", "./publish-wasm"]
|
||||
# Announcement jobs to run in CI
|
||||
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
|
||||
# Custom permissions for GitHub Jobs
|
||||
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" } }
|
||||
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
|
||||
# Whether to install an updater program
|
||||
install-updater = false
|
||||
|
||||
25
LICENSE
25
LICENSE
@@ -1371,3 +1371,28 @@ are:
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
- pydoclint, licensed as follows:
|
||||
"""
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 jsh9
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
17
README.md
17
README.md
@@ -29,14 +29,14 @@ An extremely fast Python linter and code formatter, written in Rust.
|
||||
- 🐍 Installable via `pip`
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 🤝 Python 3.13 compatibility
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruffs-linter-compare-to-flake8), isort, and [Black](https://docs.astral.sh/ruff/faq/#how-does-ruffs-formatter-compare-to-black)
|
||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
of popular Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#pyprojecttoml-discovery)
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#config-file-discovery)
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
functionality behind a single, common interface.
|
||||
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.5.1/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.5.1/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.6.1/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.6.1/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.1
|
||||
rev: v0.6.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -179,8 +179,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
- id: ruff-format
|
||||
```
|
||||
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or
|
||||
alongside any other editor through the [Ruff LSP](https://github.com/astral-sh/ruff-lsp).
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or with [various other editors](https://docs.astral.sh/ruff/editors/setup).
|
||||
|
||||
Ruff can also be used as a [GitHub Action](https://github.com/features/actions) via
|
||||
[`ruff-action`](https://github.com/chartboost/ruff-action):
|
||||
@@ -424,6 +423,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Dagger](https://github.com/dagger/dagger)
|
||||
- [Dagster](https://github.com/dagster-io/dagster)
|
||||
- Databricks ([MLflow](https://github.com/mlflow/mlflow))
|
||||
- [Dify](https://github.com/langgenius/dify)
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Godot](https://github.com/godotengine/godot)
|
||||
- [Gradio](https://github.com/gradio-app/gradio)
|
||||
@@ -434,6 +434,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
|
||||
[Datasets](https://github.com/huggingface/datasets),
|
||||
[Diffusers](https://github.com/huggingface/diffusers))
|
||||
- IBM ([Qiskit](https://github.com/Qiskit/qiskit))
|
||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||
- [Ibis](https://github.com/ibis-project/ibis)
|
||||
- [ivy](https://github.com/unifyai/ivy)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = ["crates/red_knot_module_resolver/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
extend-exclude = ["crates/red_knot_python_semantic/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
|
||||
@@ -10,4 +10,12 @@ doc-valid-idents = [
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
]
|
||||
|
||||
ignore-interior-mutability = [
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
]
|
||||
|
||||
@@ -8,29 +8,33 @@ documentation.workspace = true
|
||||
repository.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
default-run = "red_knot"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
red_knot_module_resolver = { workspace = true }
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
red_knot_workspace = { workspace = true }
|
||||
red_knot_server = { workspace = true }
|
||||
|
||||
ruff_db = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
clap = { workspace = true, features = ["wrap_help"] }
|
||||
colored = { workspace = true }
|
||||
countme = { workspace = true, features = ["enable"] }
|
||||
crossbeam = { workspace = true }
|
||||
ctrlc = { version = "3.4.4" }
|
||||
notify = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
tracing = { workspace = true, features = ["release_max_level_debug"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
tracing-flame = { workspace = true }
|
||||
tracing-tree = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
filetime = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
BIN
crates/red_knot/docs/tracing-flamegraph.png
Normal file
BIN
crates/red_knot/docs/tracing-flamegraph.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 40 KiB |
123
crates/red_knot/docs/tracing.md
Normal file
123
crates/red_knot/docs/tracing.md
Normal file
@@ -0,0 +1,123 @@
|
||||
# Tracing
|
||||
|
||||
Traces are a useful tool to narrow down the location of a bug or, at least, to understand why the compiler is doing a particular thing.
|
||||
Note, tracing messages with severity `debug` or greater are user-facing. They should be phrased accordingly.
|
||||
Tracing spans are only shown when using `-vvv`.
|
||||
|
||||
## Verbosity levels
|
||||
|
||||
The CLI supports different verbosity levels.
|
||||
|
||||
- default: Only show errors and warnings.
|
||||
- `-v` activates `info!`: Show generally useful information such as paths of configuration files, detected platform, etc., but it's not a lot of messages, it's something you'll activate in CI by default. cargo build e.g. shows you which packages are fresh.
|
||||
- `-vv` activates `debug!` and timestamps: This should be enough information to get to the bottom of bug reports. When you're processing many packages or files, you'll get pages and pages of output, but each line is link to a specific action or state change.
|
||||
- `-vvv` activates `trace!` (only in debug builds) and shows tracing-spans: At this level, you're logging everything. Most of this is wasted, it's really slow, we dump e.g. the entire resolution graph. Only useful to developers, and you almost certainly want to use `RED_KNOT_LOG` to filter it down to the area your investigating.
|
||||
|
||||
## `RED_KNOT_LOG`
|
||||
|
||||
By default, the CLI shows messages from the `ruff` and `red_knot` crates. Tracing messages from other crates are not shown.
|
||||
The `RED_KNOT_LOG` environment variable allows you to customize which messages are shown by specifying one
|
||||
or more [filter directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives).
|
||||
|
||||
### Examples
|
||||
|
||||
#### Show all debug messages
|
||||
|
||||
Shows debug messages from all crates.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=debug
|
||||
```
|
||||
|
||||
#### Show salsa query execution messages
|
||||
|
||||
Show the salsa `execute: my_query` messages in addition to all red knot messages.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=ruff=trace,red_knot=trace,salsa=info
|
||||
```
|
||||
|
||||
#### Show typing traces
|
||||
|
||||
Only show traces for the `red_knot_python_semantic::types` module.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG="red_knot_python_semantic::types"
|
||||
```
|
||||
|
||||
Note: Ensure that you use `-vvv` to see tracing spans.
|
||||
|
||||
#### Show messages for a single file
|
||||
|
||||
Shows all messages that are inside of a span for a specific file.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=red_knot[{file=/home/micha/astral/test/x.py}]=trace
|
||||
```
|
||||
|
||||
**Note**: Tracing still shows all spans because tracing can't know at the time of entering the span
|
||||
whether one if its children has the file `x.py`.
|
||||
|
||||
**Note**: Salsa currently logs the entire memoized values. In our case, the source text and parsed AST.
|
||||
This very quickly leads to extremely long outputs.
|
||||
|
||||
## Tracing and Salsa
|
||||
|
||||
Be mindful about using `tracing` in Salsa queries, especially when using `warn` or `error` because it isn't guaranteed
|
||||
that the query will execute after restoring from a persistent cache. In which case the user won't see the message.
|
||||
|
||||
For example, don't use `tracing` to show the user a message when generating a lint violation failed
|
||||
because the message would only be shown when linting the file the first time, but not on subsequent analysis
|
||||
runs or when restoring from a persistent cache. This can be confusing for users because they
|
||||
don't understand why a specific lint violation isn't raised. Instead, change your
|
||||
query to return the failure as part of the query's result or use a Salsa accumulator.
|
||||
|
||||
## Tracing in tests
|
||||
|
||||
You can use `ruff_db::testing::setup_logging` or `ruff_db::testing::setup_logging_with_filter` to set up logging in tests.
|
||||
|
||||
```rust
|
||||
use ruff_db::testing::setup_logging;
|
||||
|
||||
#[test]
|
||||
fn test() {
|
||||
let _logging = setup_logging();
|
||||
|
||||
tracing::info!("This message will be printed to stderr");
|
||||
}
|
||||
```
|
||||
|
||||
Note: Most test runners capture stderr and only show its output when a test fails.
|
||||
|
||||
Note also that `setup_logging` only sets up logging for the current thread because [`set_global_default`](https://docs.rs/tracing/latest/tracing/subscriber/fn.set_global_default.html) can only be
|
||||
called **once**.
|
||||
|
||||
## Release builds
|
||||
|
||||
`trace!` events are removed in release builds.
|
||||
|
||||
## Profiling
|
||||
|
||||
Red Knot generates a folded stack trace to the current directory named `tracing.folded` when setting the environment variable `RED_KNOT_LOG_PROFILE` to `1` or `true`.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG_PROFILE=1 red_knot -- --current-directory=../test -vvv
|
||||
```
|
||||
|
||||
You can convert the textual representation into a visual one using `inferno`.
|
||||
|
||||
```shell
|
||||
cargo install inferno
|
||||
```
|
||||
|
||||
```shell
|
||||
# flamegraph
|
||||
cat tracing.folded | inferno-flamegraph > tracing-flamegraph.svg
|
||||
|
||||
# flamechart
|
||||
cat tracing.folded | inferno-flamegraph --flamechart > tracing-flamechart.svg
|
||||
```
|
||||
|
||||

|
||||
|
||||
See [`tracing-flame`](https://crates.io/crates/tracing-flame) for more details.
|
||||
@@ -1,10 +0,0 @@
|
||||
use red_knot_python_semantic::Db as SemanticDb;
|
||||
use ruff_db::Upcast;
|
||||
use salsa::DbWithJar;
|
||||
|
||||
use crate::lint::{lint_semantic, lint_syntax, unwind_if_cancelled};
|
||||
|
||||
pub trait Db: DbWithJar<Jar> + SemanticDb + Upcast<dyn SemanticDb> {}
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(lint_syntax, lint_semantic, unwind_if_cancelled);
|
||||
@@ -1,52 +0,0 @@
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ruff_db::file_system::{FileSystemPath, FileSystemPathBuf};
|
||||
use ruff_db::vfs::VfsFile;
|
||||
|
||||
use crate::db::Jar;
|
||||
|
||||
pub mod db;
|
||||
pub mod lint;
|
||||
pub mod program;
|
||||
pub mod watch;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Workspace {
|
||||
root: FileSystemPathBuf,
|
||||
/// The files that are open in the workspace.
|
||||
///
|
||||
/// * Editor: The files that are actively being edited in the editor (the user has a tab open with the file).
|
||||
/// * CLI: The resolved files passed as arguments to the CLI.
|
||||
open_files: FxHashSet<VfsFile>,
|
||||
}
|
||||
|
||||
impl Workspace {
|
||||
pub fn new(root: FileSystemPathBuf) -> Self {
|
||||
Self {
|
||||
root,
|
||||
open_files: FxHashSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn root(&self) -> &FileSystemPath {
|
||||
self.root.as_path()
|
||||
}
|
||||
|
||||
// TODO having the content in workspace feels wrong.
|
||||
pub fn open_file(&mut self, file_id: VfsFile) {
|
||||
self.open_files.insert(file_id);
|
||||
}
|
||||
|
||||
pub fn close_file(&mut self, file_id: VfsFile) {
|
||||
self.open_files.remove(&file_id);
|
||||
}
|
||||
|
||||
// TODO introduce an `OpenFile` type instead of using an anonymous tuple.
|
||||
pub fn open_files(&self) -> impl Iterator<Item = VfsFile> + '_ {
|
||||
self.open_files.iter().copied()
|
||||
}
|
||||
|
||||
pub fn is_file_open(&self, file_id: VfsFile) -> bool {
|
||||
self.open_files.contains(&file_id)
|
||||
}
|
||||
}
|
||||
254
crates/red_knot/src/logging.rs
Normal file
254
crates/red_knot/src/logging.rs
Normal file
@@ -0,0 +1,254 @@
|
||||
//! Sets up logging for Red Knot
|
||||
|
||||
use anyhow::Context;
|
||||
use colored::Colorize;
|
||||
use std::fmt;
|
||||
use std::fs::File;
|
||||
use std::io::BufWriter;
|
||||
use tracing::{Event, Subscriber};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::fmt::format::Writer;
|
||||
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields};
|
||||
use tracing_subscriber::registry::LookupSpan;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
/// Logging flags to `#[command(flatten)]` into your CLI
|
||||
#[derive(clap::Args, Debug, Clone, Default)]
|
||||
#[command(about = None, long_about = None)]
|
||||
pub(crate) struct Verbosity {
|
||||
#[arg(
|
||||
long,
|
||||
short = 'v',
|
||||
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
|
||||
action = clap::ArgAction::Count,
|
||||
global = true,
|
||||
)]
|
||||
verbose: u8,
|
||||
}
|
||||
|
||||
impl Verbosity {
|
||||
/// Returns the verbosity level based on the number of `-v` flags.
|
||||
///
|
||||
/// Returns `None` if the user did not specify any verbosity flags.
|
||||
pub(crate) fn level(&self) -> VerbosityLevel {
|
||||
match self.verbose {
|
||||
0 => VerbosityLevel::Default,
|
||||
1 => VerbosityLevel::Verbose,
|
||||
2 => VerbosityLevel::ExtraVerbose,
|
||||
_ => VerbosityLevel::Trace,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub(crate) enum VerbosityLevel {
|
||||
/// Default output level. Only shows Ruff and Red Knot events up to the [`WARN`](tracing::Level::WARN).
|
||||
Default,
|
||||
|
||||
/// Enables verbose output. Emits Ruff and Red Knot events up to the [`INFO`](tracing::Level::INFO).
|
||||
/// Corresponds to `-v`.
|
||||
Verbose,
|
||||
|
||||
/// Enables a more verbose tracing format and emits Ruff and Red Knot events up to [`DEBUG`](tracing::Level::DEBUG).
|
||||
/// Corresponds to `-vv`
|
||||
ExtraVerbose,
|
||||
|
||||
/// Enables all tracing events and uses a tree-like output format. Corresponds to `-vvv`.
|
||||
Trace,
|
||||
}
|
||||
|
||||
impl VerbosityLevel {
|
||||
const fn level_filter(self) -> LevelFilter {
|
||||
match self {
|
||||
VerbosityLevel::Default => LevelFilter::WARN,
|
||||
VerbosityLevel::Verbose => LevelFilter::INFO,
|
||||
VerbosityLevel::ExtraVerbose => LevelFilter::DEBUG,
|
||||
VerbosityLevel::Trace => LevelFilter::TRACE,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) const fn is_trace(self) -> bool {
|
||||
matches!(self, VerbosityLevel::Trace)
|
||||
}
|
||||
|
||||
pub(crate) const fn is_extra_verbose(self) -> bool {
|
||||
matches!(self, VerbosityLevel::ExtraVerbose)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn setup_tracing(level: VerbosityLevel) -> anyhow::Result<TracingGuard> {
|
||||
use tracing_subscriber::prelude::*;
|
||||
|
||||
// The `RED_KNOT_LOG` environment variable overrides the default log level.
|
||||
let filter = if let Ok(log_env_variable) = std::env::var("RED_KNOT_LOG") {
|
||||
EnvFilter::builder()
|
||||
.parse(log_env_variable)
|
||||
.context("Failed to parse directives specified in RED_KNOT_LOG environment variable.")?
|
||||
} else {
|
||||
match level {
|
||||
VerbosityLevel::Default => {
|
||||
// Show warning traces
|
||||
EnvFilter::default().add_directive(LevelFilter::WARN.into())
|
||||
}
|
||||
level => {
|
||||
let level_filter = level.level_filter();
|
||||
|
||||
// Show info|debug|trace events, but allow `RED_KNOT_LOG` to override
|
||||
let filter = EnvFilter::default().add_directive(
|
||||
format!("red_knot={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
);
|
||||
|
||||
filter.add_directive(
|
||||
format!("ruff={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let (profiling_layer, guard) = setup_profile();
|
||||
|
||||
let registry = tracing_subscriber::registry()
|
||||
.with(filter)
|
||||
.with(profiling_layer);
|
||||
|
||||
if level.is_trace() {
|
||||
let subscriber = registry.with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(std::io::stderr)
|
||||
.with_timer(tracing_tree::time::Uptime::default()),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
} else {
|
||||
let subscriber = registry.with(
|
||||
tracing_subscriber::fmt::layer()
|
||||
.event_format(RedKnotFormat {
|
||||
display_level: true,
|
||||
display_timestamp: level.is_extra_verbose(),
|
||||
show_spans: false,
|
||||
})
|
||||
.with_writer(std::io::stderr),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
}
|
||||
|
||||
Ok(TracingGuard {
|
||||
_flame_guard: guard,
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn setup_profile<S>() -> (
|
||||
Option<tracing_flame::FlameLayer<S, BufWriter<File>>>,
|
||||
Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
)
|
||||
where
|
||||
S: Subscriber + for<'span> LookupSpan<'span>,
|
||||
{
|
||||
if let Ok("1" | "true") = std::env::var("RED_KNOT_LOG_PROFILE").as_deref() {
|
||||
let (layer, guard) = tracing_flame::FlameLayer::with_file("tracing.folded")
|
||||
.expect("Flame layer to be created");
|
||||
(Some(layer), Some(guard))
|
||||
} else {
|
||||
(None, None)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TracingGuard {
|
||||
_flame_guard: Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
}
|
||||
|
||||
struct RedKnotFormat {
|
||||
display_timestamp: bool,
|
||||
display_level: bool,
|
||||
show_spans: bool,
|
||||
}
|
||||
|
||||
/// See <https://docs.rs/tracing-subscriber/0.3.18/src/tracing_subscriber/fmt/format/mod.rs.html#1026-1156>
|
||||
impl<S, N> FormatEvent<S, N> for RedKnotFormat
|
||||
where
|
||||
S: Subscriber + for<'a> LookupSpan<'a>,
|
||||
N: for<'a> FormatFields<'a> + 'static,
|
||||
{
|
||||
fn format_event(
|
||||
&self,
|
||||
ctx: &FmtContext<'_, S, N>,
|
||||
mut writer: Writer<'_>,
|
||||
event: &Event<'_>,
|
||||
) -> fmt::Result {
|
||||
let meta = event.metadata();
|
||||
let ansi = writer.has_ansi_escapes();
|
||||
|
||||
if self.display_timestamp {
|
||||
let timestamp = chrono::Local::now()
|
||||
.format("%Y-%m-%d %H:%M:%S.%f")
|
||||
.to_string();
|
||||
if ansi {
|
||||
write!(writer, "{} ", timestamp.dimmed())?;
|
||||
} else {
|
||||
write!(
|
||||
writer,
|
||||
"{} ",
|
||||
chrono::Local::now().format("%Y-%m-%d %H:%M:%S.%f")
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.display_level {
|
||||
let level = meta.level();
|
||||
// Same colors as tracing
|
||||
if ansi {
|
||||
let formatted_level = level.to_string();
|
||||
match *level {
|
||||
tracing::Level::TRACE => {
|
||||
write!(writer, "{} ", formatted_level.purple().bold())?;
|
||||
}
|
||||
tracing::Level::DEBUG => write!(writer, "{} ", formatted_level.blue().bold())?,
|
||||
tracing::Level::INFO => write!(writer, "{} ", formatted_level.green().bold())?,
|
||||
tracing::Level::WARN => write!(writer, "{} ", formatted_level.yellow().bold())?,
|
||||
tracing::Level::ERROR => write!(writer, "{} ", level.to_string().red().bold())?,
|
||||
}
|
||||
} else {
|
||||
write!(writer, "{level} ")?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.show_spans {
|
||||
let span = event.parent();
|
||||
let mut seen = false;
|
||||
|
||||
let span = span
|
||||
.and_then(|id| ctx.span(id))
|
||||
.or_else(|| ctx.lookup_current());
|
||||
|
||||
let scope = span.into_iter().flat_map(|span| span.scope().from_root());
|
||||
|
||||
for span in scope {
|
||||
seen = true;
|
||||
if ansi {
|
||||
write!(writer, "{}:", span.metadata().name().bold())?;
|
||||
} else {
|
||||
write!(writer, "{}:", span.metadata().name())?;
|
||||
}
|
||||
}
|
||||
|
||||
if seen {
|
||||
writer.write_char(' ')?;
|
||||
}
|
||||
}
|
||||
|
||||
ctx.field_format().format_fields(writer.by_ref(), event)?;
|
||||
|
||||
writeln!(writer)
|
||||
}
|
||||
}
|
||||
@@ -1,72 +1,190 @@
|
||||
use std::process::{ExitCode, Termination};
|
||||
use std::sync::Mutex;
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
use clap::Parser;
|
||||
use colored::Colorize;
|
||||
use crossbeam::channel as crossbeam_channel;
|
||||
use salsa::ParallelDatabase;
|
||||
use tracing::subscriber::Interest;
|
||||
use tracing::{Level, Metadata};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::layer::{Context, Filter, SubscriberExt};
|
||||
use tracing_subscriber::{Layer, Registry};
|
||||
use tracing_tree::time::Uptime;
|
||||
use salsa::plumbing::ZalsaDatabase;
|
||||
|
||||
use red_knot::program::{FileWatcherChange, Program};
|
||||
use red_knot::watch::FileWatcher;
|
||||
use red_knot::Workspace;
|
||||
use red_knot_module_resolver::{set_module_resolution_settings, ModuleResolutionSettings};
|
||||
use ruff_db::file_system::{FileSystem, FileSystemPath, OsFileSystem};
|
||||
use ruff_db::vfs::system_path_to_file;
|
||||
use red_knot_python_semantic::{ProgramSettings, SearchPathSettings};
|
||||
use red_knot_server::run_server;
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::site_packages::VirtualEnvironment;
|
||||
use red_knot_workspace::watch;
|
||||
use red_knot_workspace::watch::WorkspaceWatcher;
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
|
||||
use target_version::TargetVersion;
|
||||
|
||||
#[allow(
|
||||
clippy::print_stdout,
|
||||
clippy::unnecessary_wraps,
|
||||
clippy::print_stderr,
|
||||
clippy::dbg_macro
|
||||
use crate::logging::{setup_tracing, Verbosity};
|
||||
|
||||
mod logging;
|
||||
mod target_version;
|
||||
mod verbosity;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
author,
|
||||
name = "red-knot",
|
||||
about = "An extremely fast Python type checker."
|
||||
)]
|
||||
pub fn main() -> anyhow::Result<()> {
|
||||
countme::enable(true);
|
||||
setup_tracing();
|
||||
#[command(version)]
|
||||
struct Args {
|
||||
#[command(subcommand)]
|
||||
pub(crate) command: Option<Command>,
|
||||
|
||||
let arguments: Vec<_> = std::env::args().collect();
|
||||
#[arg(
|
||||
long,
|
||||
help = "Changes the current working directory.",
|
||||
long_help = "Changes the current working directory before any specified operations. This affects the workspace and configuration discovery.",
|
||||
value_name = "PATH"
|
||||
)]
|
||||
current_directory: Option<SystemPathBuf>,
|
||||
|
||||
if arguments.len() < 2 {
|
||||
eprintln!("Usage: red_knot <path>");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
#[arg(
|
||||
long,
|
||||
help = "Path to the virtual environment the project uses",
|
||||
long_help = "\
|
||||
Path to the virtual environment the project uses. \
|
||||
If provided, red-knot will use the `site-packages` directory of this virtual environment \
|
||||
to resolve type information for the project's third-party dependencies.",
|
||||
value_name = "PATH"
|
||||
)]
|
||||
venv_path: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "DIRECTORY",
|
||||
help = "Custom directory to use for stdlib typeshed stubs"
|
||||
)]
|
||||
custom_typeshed_dir: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "PATH",
|
||||
help = "Additional path to use as a module-resolution source (can be passed multiple times)"
|
||||
)]
|
||||
extra_search_path: Vec<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Python version to assume when resolving types",
|
||||
default_value_t = TargetVersion::default(),
|
||||
value_name="VERSION")
|
||||
]
|
||||
target_version: TargetVersion,
|
||||
|
||||
#[clap(flatten)]
|
||||
verbosity: Verbosity,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Run in watch mode by re-running whenever files change",
|
||||
short = 'W'
|
||||
)]
|
||||
watch: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum Command {
|
||||
/// Start the language server
|
||||
Server,
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
|
||||
pub fn main() -> ExitStatus {
|
||||
run().unwrap_or_else(|error| {
|
||||
use std::io::Write;
|
||||
|
||||
// Use `writeln` instead of `eprintln` to avoid panicking when the stderr pipe is broken.
|
||||
let mut stderr = std::io::stderr().lock();
|
||||
|
||||
// This communicates that this isn't a linter error but Red Knot itself hard-errored for
|
||||
// some reason (e.g. failed to resolve the configuration)
|
||||
writeln!(stderr, "{}", "Red Knot failed".red().bold()).ok();
|
||||
// Currently we generally only see one error, but e.g. with io errors when resolving
|
||||
// the configuration it is help to chain errors ("resolving configuration failed" ->
|
||||
// "failed to read file: subdir/pyproject.toml")
|
||||
for cause in error.chain() {
|
||||
writeln!(stderr, " {} {cause}", "Cause:".bold()).ok();
|
||||
}
|
||||
|
||||
ExitStatus::Error
|
||||
})
|
||||
}
|
||||
|
||||
fn run() -> anyhow::Result<ExitStatus> {
|
||||
let Args {
|
||||
command,
|
||||
current_directory,
|
||||
custom_typeshed_dir,
|
||||
extra_search_path: extra_paths,
|
||||
venv_path,
|
||||
target_version,
|
||||
verbosity,
|
||||
watch,
|
||||
} = Args::parse_from(std::env::args().collect::<Vec<_>>());
|
||||
|
||||
if matches!(command, Some(Command::Server)) {
|
||||
return run_server().map(|()| ExitStatus::Success);
|
||||
}
|
||||
|
||||
let fs = OsFileSystem;
|
||||
let entry_point = FileSystemPath::new(&arguments[1]);
|
||||
let verbosity = verbosity.level();
|
||||
countme::enable(verbosity.is_trace());
|
||||
let _guard = setup_tracing(verbosity)?;
|
||||
|
||||
if !fs.exists(entry_point) {
|
||||
eprintln!("The entry point does not exist.");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
// The base path to which all CLI arguments are relative to.
|
||||
let cli_base_path = {
|
||||
let cwd = std::env::current_dir().context("Failed to get the current working directory")?;
|
||||
SystemPathBuf::from_path_buf(cwd)
|
||||
.map_err(|path| {
|
||||
anyhow!(
|
||||
"The current working directory '{}' contains non-unicode characters. Red Knot only supports unicode paths.",
|
||||
path.display()
|
||||
)
|
||||
})?
|
||||
};
|
||||
|
||||
if !fs.is_file(entry_point) {
|
||||
eprintln!("The entry point is not a file.");
|
||||
return Err(anyhow::anyhow!("Invalid arguments"));
|
||||
}
|
||||
let cwd = current_directory
|
||||
.map(|cwd| {
|
||||
if cwd.as_std_path().is_dir() {
|
||||
Ok(SystemPath::absolute(&cwd, &cli_base_path))
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Provided current-directory path '{cwd}' is not a directory."
|
||||
))
|
||||
}
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_else(|| cli_base_path.clone());
|
||||
|
||||
let entry_point = entry_point.to_path_buf();
|
||||
let system = OsSystem::new(cwd.clone());
|
||||
let workspace_metadata = WorkspaceMetadata::from_path(system.current_directory(), &system)?;
|
||||
|
||||
let workspace_folder = entry_point.parent().unwrap();
|
||||
let workspace = Workspace::new(workspace_folder.to_path_buf());
|
||||
// TODO: Verify the remaining search path settings eagerly.
|
||||
let site_packages = venv_path
|
||||
.map(|path| {
|
||||
VirtualEnvironment::new(path, &OsSystem::new(cli_base_path))
|
||||
.and_then(|venv| venv.site_packages_directories(&system))
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
|
||||
let workspace_search_path = workspace.root().to_path_buf();
|
||||
|
||||
let mut program = Program::new(workspace, fs);
|
||||
|
||||
set_module_resolution_settings(
|
||||
&mut program,
|
||||
ModuleResolutionSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: workspace_search_path,
|
||||
site_packages: None,
|
||||
custom_typeshed: None,
|
||||
// TODO: Respect the settings from the workspace metadata. when resolving the program settings.
|
||||
let program_settings = ProgramSettings {
|
||||
target_version: target_version.into(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths,
|
||||
src_root: workspace_metadata.root().to_path_buf(),
|
||||
custom_typeshed: custom_typeshed_dir,
|
||||
site_packages,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
let entry_id = system_path_to_file(&program, entry_point.clone()).unwrap();
|
||||
program.workspace_mut().open_file(entry_id);
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
// cache and load the cache if it exists.
|
||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system)?;
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
|
||||
|
||||
@@ -80,121 +198,158 @@ pub fn main() -> anyhow::Result<()> {
|
||||
}
|
||||
})?;
|
||||
|
||||
let file_changes_notifier = main_loop.file_changes_notifier();
|
||||
let exit_status = if watch {
|
||||
main_loop.watch(&mut db)?
|
||||
} else {
|
||||
main_loop.run(&mut db)
|
||||
};
|
||||
|
||||
// Watch for file changes and re-trigger the analysis.
|
||||
let mut file_watcher = FileWatcher::new(move |changes| {
|
||||
file_changes_notifier.notify(changes);
|
||||
})?;
|
||||
tracing::trace!("Counts for entire CLI run:\n{}", countme::get_all());
|
||||
|
||||
file_watcher.watch_folder(workspace_folder.as_std_path())?;
|
||||
std::mem::forget(db);
|
||||
|
||||
main_loop.run(&mut program);
|
||||
Ok(exit_status)
|
||||
}
|
||||
|
||||
println!("{}", countme::get_all());
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum ExitStatus {
|
||||
/// Checking was successful and there were no errors.
|
||||
Success = 0,
|
||||
|
||||
Ok(())
|
||||
/// Checking was successful but there were errors.
|
||||
Failure = 1,
|
||||
|
||||
/// Checking failed.
|
||||
Error = 2,
|
||||
}
|
||||
|
||||
impl Termination for ExitStatus {
|
||||
fn report(self) -> ExitCode {
|
||||
ExitCode::from(self as u8)
|
||||
}
|
||||
}
|
||||
|
||||
struct MainLoop {
|
||||
orchestrator_sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
main_loop_receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
/// Sender that can be used to send messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
|
||||
/// Receiver for the messages sent **to** the main loop.
|
||||
receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
|
||||
/// The file system watcher, if running in watch mode.
|
||||
watcher: Option<WorkspaceWatcher>,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new() -> (Self, MainLoopCancellationToken) {
|
||||
let (orchestrator_sender, orchestrator_receiver) = crossbeam_channel::bounded(1);
|
||||
let (main_loop_sender, main_loop_receiver) = crossbeam_channel::bounded(1);
|
||||
|
||||
let mut orchestrator = Orchestrator {
|
||||
receiver: orchestrator_receiver,
|
||||
sender: main_loop_sender.clone(),
|
||||
revision: 0,
|
||||
};
|
||||
|
||||
std::thread::spawn(move || {
|
||||
orchestrator.run();
|
||||
});
|
||||
let (sender, receiver) = crossbeam_channel::bounded(10);
|
||||
|
||||
(
|
||||
Self {
|
||||
orchestrator_sender,
|
||||
main_loop_receiver,
|
||||
},
|
||||
MainLoopCancellationToken {
|
||||
sender: main_loop_sender,
|
||||
sender: sender.clone(),
|
||||
receiver,
|
||||
watcher: None,
|
||||
},
|
||||
MainLoopCancellationToken { sender },
|
||||
)
|
||||
}
|
||||
|
||||
fn file_changes_notifier(&self) -> FileChangesNotifier {
|
||||
FileChangesNotifier {
|
||||
sender: self.orchestrator_sender.clone(),
|
||||
}
|
||||
fn watch(mut self, db: &mut RootDatabase) -> anyhow::Result<ExitStatus> {
|
||||
tracing::debug!("Starting watch mode");
|
||||
let sender = self.sender.clone();
|
||||
let watcher = watch::directory_watcher(move |event| {
|
||||
sender.send(MainLoopMessage::ApplyChanges(event)).unwrap();
|
||||
})?;
|
||||
|
||||
self.watcher = Some(WorkspaceWatcher::new(watcher, db));
|
||||
|
||||
self.run(db);
|
||||
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn run(self, program: &mut Program) {
|
||||
self.orchestrator_sender
|
||||
.send(OrchestratorMessage::Run)
|
||||
.unwrap();
|
||||
fn run(mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
|
||||
for message in &self.main_loop_receiver {
|
||||
tracing::trace!("Main Loop: Tick");
|
||||
let result = self.main_loop(db);
|
||||
|
||||
tracing::debug!("Exiting main loop");
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn main_loop(&mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
// Schedule the first check.
|
||||
tracing::debug!("Starting main loop");
|
||||
|
||||
let mut revision = 0u64;
|
||||
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
MainLoopMessage::CheckProgram { revision } => {
|
||||
let program = program.snapshot();
|
||||
let sender = self.orchestrator_sender.clone();
|
||||
MainLoopMessage::CheckWorkspace => {
|
||||
let db = db.snapshot();
|
||||
let sender = self.sender.clone();
|
||||
|
||||
// Spawn a new task that checks the program. This needs to be done in a separate thread
|
||||
// Spawn a new task that checks the workspace. This needs to be done in a separate thread
|
||||
// to prevent blocking the main loop here.
|
||||
rayon::spawn(move || {
|
||||
if let Ok(result) = program.check() {
|
||||
if let Ok(result) = db.check() {
|
||||
// Send the result back to the main loop for printing.
|
||||
sender
|
||||
.send(OrchestratorMessage::CheckProgramCompleted {
|
||||
diagnostics: result,
|
||||
revision,
|
||||
})
|
||||
.send(MainLoopMessage::CheckCompleted { result, revision })
|
||||
.unwrap();
|
||||
}
|
||||
});
|
||||
}
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
program.apply_changes(changes);
|
||||
|
||||
MainLoopMessage::CheckCompleted {
|
||||
result,
|
||||
revision: check_revision,
|
||||
} => {
|
||||
let has_diagnostics = !result.is_empty();
|
||||
if check_revision == revision {
|
||||
for diagnostic in result {
|
||||
tracing::error!("{}", diagnostic);
|
||||
}
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
|
||||
);
|
||||
}
|
||||
|
||||
if self.watcher.is_none() {
|
||||
return if has_diagnostics {
|
||||
ExitStatus::Failure
|
||||
} else {
|
||||
ExitStatus::Success
|
||||
};
|
||||
}
|
||||
|
||||
tracing::trace!("Counts after last check:\n{}", countme::get_all());
|
||||
}
|
||||
MainLoopMessage::CheckCompleted(diagnostics) => {
|
||||
eprintln!("{}", diagnostics.join("\n"));
|
||||
eprintln!("{}", countme::get_all());
|
||||
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
revision += 1;
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
db.apply_changes(changes);
|
||||
if let Some(watcher) = self.watcher.as_mut() {
|
||||
watcher.update(db);
|
||||
}
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
}
|
||||
MainLoopMessage::Exit => {
|
||||
eprintln!("{}", countme::get_all());
|
||||
return;
|
||||
// Cancel any pending queries and wait for them to complete.
|
||||
// TODO: Don't use Salsa internal APIs
|
||||
// [Zulip-Thread](https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries)
|
||||
let _ = db.zalsa_mut();
|
||||
return ExitStatus::Success;
|
||||
}
|
||||
}
|
||||
|
||||
tracing::debug!("Waiting for next main loop message.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for MainLoop {
|
||||
fn drop(&mut self) {
|
||||
self.orchestrator_sender
|
||||
.send(OrchestratorMessage::Shutdown)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct FileChangesNotifier {
|
||||
sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
}
|
||||
|
||||
impl FileChangesNotifier {
|
||||
fn notify(&self, changes: Vec<FileWatcherChange>) {
|
||||
self.sender
|
||||
.send(OrchestratorMessage::FileChanges(changes))
|
||||
.unwrap();
|
||||
ExitStatus::Success
|
||||
}
|
||||
}
|
||||
|
||||
@@ -209,165 +364,11 @@ impl MainLoopCancellationToken {
|
||||
}
|
||||
}
|
||||
|
||||
struct Orchestrator {
|
||||
/// Sends messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
/// Receives messages from the main loop.
|
||||
receiver: crossbeam_channel::Receiver<OrchestratorMessage>,
|
||||
revision: usize,
|
||||
}
|
||||
|
||||
impl Orchestrator {
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn run(&mut self) {
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
OrchestratorMessage::Run => {
|
||||
self.sender
|
||||
.send(MainLoopMessage::CheckProgram {
|
||||
revision: self.revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
OrchestratorMessage::CheckProgramCompleted {
|
||||
diagnostics,
|
||||
revision,
|
||||
} => {
|
||||
// Only take the diagnostics if they are for the latest revision.
|
||||
if self.revision == revision {
|
||||
self.sender
|
||||
.send(MainLoopMessage::CheckCompleted(diagnostics))
|
||||
.unwrap();
|
||||
} else {
|
||||
tracing::debug!("Discarding diagnostics for outdated revision {revision} (current: {}).", self.revision);
|
||||
}
|
||||
}
|
||||
|
||||
OrchestratorMessage::FileChanges(changes) => {
|
||||
// Request cancellation, but wait until all analysis tasks have completed to
|
||||
// avoid stale messages in the next main loop.
|
||||
|
||||
self.revision += 1;
|
||||
self.debounce_changes(changes);
|
||||
}
|
||||
OrchestratorMessage::Shutdown => {
|
||||
return self.shutdown();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn debounce_changes(&self, mut changes: Vec<FileWatcherChange>) {
|
||||
loop {
|
||||
// Consume possibly incoming file change messages before running a new analysis, but don't wait for more than 100ms.
|
||||
crossbeam_channel::select! {
|
||||
recv(self.receiver) -> message => {
|
||||
match message {
|
||||
Ok(OrchestratorMessage::Shutdown) => {
|
||||
return self.shutdown();
|
||||
}
|
||||
Ok(OrchestratorMessage::FileChanges(file_changes)) => {
|
||||
changes.extend(file_changes);
|
||||
}
|
||||
|
||||
Ok(OrchestratorMessage::CheckProgramCompleted { .. })=> {
|
||||
// disregard any outdated completion message.
|
||||
}
|
||||
Ok(OrchestratorMessage::Run) => unreachable!("The orchestrator is already running."),
|
||||
|
||||
Err(_) => {
|
||||
// There are no more senders, no point in waiting for more messages
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
default(std::time::Duration::from_millis(10)) => {
|
||||
// No more file changes after 10 ms, send the changes and schedule a new analysis
|
||||
self.sender.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
|
||||
self.sender.send(MainLoopMessage::CheckProgram { revision: self.revision}).unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_self)]
|
||||
fn shutdown(&self) {
|
||||
tracing::trace!("Shutting down orchestrator.");
|
||||
}
|
||||
}
|
||||
|
||||
/// Message sent from the orchestrator to the main loop.
|
||||
#[derive(Debug)]
|
||||
enum MainLoopMessage {
|
||||
CheckProgram { revision: usize },
|
||||
CheckCompleted(Vec<String>),
|
||||
ApplyChanges(Vec<FileWatcherChange>),
|
||||
CheckWorkspace,
|
||||
CheckCompleted { result: Vec<String>, revision: u64 },
|
||||
ApplyChanges(Vec<watch::ChangeEvent>),
|
||||
Exit,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum OrchestratorMessage {
|
||||
Run,
|
||||
Shutdown,
|
||||
|
||||
CheckProgramCompleted {
|
||||
diagnostics: Vec<String>,
|
||||
revision: usize,
|
||||
},
|
||||
|
||||
FileChanges(Vec<FileWatcherChange>),
|
||||
}
|
||||
|
||||
fn setup_tracing() {
|
||||
let subscriber = Registry::default().with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(|| Box::new(std::io::stderr()))
|
||||
.with_timer(Uptime::default())
|
||||
.with_filter(LoggingFilter {
|
||||
trace_level: Level::TRACE,
|
||||
}),
|
||||
);
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber).unwrap();
|
||||
}
|
||||
|
||||
struct LoggingFilter {
|
||||
trace_level: Level,
|
||||
}
|
||||
|
||||
impl LoggingFilter {
|
||||
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
|
||||
let filter = if meta.target().starts_with("red_knot") || meta.target().starts_with("ruff") {
|
||||
self.trace_level
|
||||
} else {
|
||||
Level::INFO
|
||||
};
|
||||
|
||||
meta.level() <= &filter
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Filter<S> for LoggingFilter {
|
||||
fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool {
|
||||
self.is_enabled(meta)
|
||||
}
|
||||
|
||||
fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
|
||||
if self.is_enabled(meta) {
|
||||
Interest::always()
|
||||
} else {
|
||||
Interest::never()
|
||||
}
|
||||
}
|
||||
|
||||
fn max_level_hint(&self) -> Option<LevelFilter> {
|
||||
Some(LevelFilter::from_level(self.trace_level))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
use ruff_db::vfs::VfsFile;
|
||||
use salsa::Cancelled;
|
||||
|
||||
use crate::lint::{lint_semantic, lint_syntax, Diagnostics};
|
||||
use crate::program::Program;
|
||||
|
||||
impl Program {
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
#[tracing::instrument(level = "debug", skip_all)]
|
||||
pub fn check(&self) -> Result<Vec<String>, Cancelled> {
|
||||
self.with_db(|db| {
|
||||
let mut result = Vec::new();
|
||||
for open_file in db.workspace.open_files() {
|
||||
result.extend_from_slice(&db.check_file_impl(open_file));
|
||||
}
|
||||
|
||||
result
|
||||
})
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self))]
|
||||
pub fn check_file(&self, file: VfsFile) -> Result<Diagnostics, Cancelled> {
|
||||
self.with_db(|db| db.check_file_impl(file))
|
||||
}
|
||||
|
||||
fn check_file_impl(&self, file: VfsFile) -> Diagnostics {
|
||||
let mut diagnostics = Vec::new();
|
||||
diagnostics.extend_from_slice(lint_syntax(self, file));
|
||||
diagnostics.extend_from_slice(lint_semantic(self, file));
|
||||
Diagnostics::from(diagnostics)
|
||||
}
|
||||
}
|
||||
@@ -1,131 +0,0 @@
|
||||
use std::panic::{RefUnwindSafe, UnwindSafe};
|
||||
use std::sync::Arc;
|
||||
|
||||
use salsa::{Cancelled, Database};
|
||||
|
||||
use red_knot_module_resolver::{Db as ResolverDb, Jar as ResolverJar};
|
||||
use red_knot_python_semantic::{Db as SemanticDb, Jar as SemanticJar};
|
||||
use ruff_db::file_system::{FileSystem, FileSystemPathBuf};
|
||||
use ruff_db::vfs::{Vfs, VfsFile, VfsPath};
|
||||
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
|
||||
|
||||
use crate::db::{Db, Jar};
|
||||
use crate::Workspace;
|
||||
|
||||
mod check;
|
||||
|
||||
#[salsa::db(SourceJar, ResolverJar, SemanticJar, Jar)]
|
||||
pub struct Program {
|
||||
storage: salsa::Storage<Program>,
|
||||
vfs: Vfs,
|
||||
fs: Arc<dyn FileSystem + Send + Sync + RefUnwindSafe>,
|
||||
workspace: Workspace,
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn new<Fs>(workspace: Workspace, file_system: Fs) -> Self
|
||||
where
|
||||
Fs: FileSystem + 'static + Send + Sync + RefUnwindSafe,
|
||||
{
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
vfs: Vfs::default(),
|
||||
fs: Arc::new(file_system),
|
||||
workspace,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_changes<I>(&mut self, changes: I)
|
||||
where
|
||||
I: IntoIterator<Item = FileWatcherChange>,
|
||||
{
|
||||
for change in changes {
|
||||
VfsFile::touch_path(self, &VfsPath::file_system(change.path));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn workspace(&self) -> &Workspace {
|
||||
&self.workspace
|
||||
}
|
||||
|
||||
pub fn workspace_mut(&mut self) -> &mut Workspace {
|
||||
&mut self.workspace
|
||||
}
|
||||
|
||||
#[allow(clippy::unnecessary_wraps)]
|
||||
fn with_db<F, T>(&self, f: F) -> Result<T, Cancelled>
|
||||
where
|
||||
F: FnOnce(&Program) -> T + UnwindSafe,
|
||||
{
|
||||
// TODO: Catch in `Caancelled::catch`
|
||||
// See https://salsa.zulipchat.com/#narrow/stream/145099-general/topic/How.20to.20use.20.60Cancelled.3A.3Acatch.60
|
||||
Ok(f(self))
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for Program {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for Program {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ResolverDb> for Program {
|
||||
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ResolverDb for Program {}
|
||||
|
||||
impl SemanticDb for Program {}
|
||||
|
||||
impl SourceDb for Program {
|
||||
fn file_system(&self) -> &dyn FileSystem {
|
||||
&*self.fs
|
||||
}
|
||||
|
||||
fn vfs(&self) -> &Vfs {
|
||||
&self.vfs
|
||||
}
|
||||
}
|
||||
|
||||
impl Database for Program {}
|
||||
|
||||
impl Db for Program {}
|
||||
|
||||
impl salsa::ParallelDatabase for Program {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
storage: self.storage.snapshot(),
|
||||
vfs: self.vfs.snapshot(),
|
||||
fs: self.fs.clone(),
|
||||
workspace: self.workspace.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileWatcherChange {
|
||||
path: FileSystemPathBuf,
|
||||
#[allow(unused)]
|
||||
kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileWatcherChange {
|
||||
pub fn new(path: FileSystemPathBuf, kind: FileChangeKind) -> Self {
|
||||
Self { path, kind }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum FileChangeKind {
|
||||
Created,
|
||||
Modified,
|
||||
Deleted,
|
||||
}
|
||||
48
crates/red_knot/src/target_version.rs
Normal file
48
crates/red_knot/src/target_version.rs
Normal file
@@ -0,0 +1,48 @@
|
||||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl TargetVersion {
|
||||
const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Py37 => "py37",
|
||||
Self::Py38 => "py38",
|
||||
Self::Py39 => "py39",
|
||||
Self::Py310 => "py310",
|
||||
Self::Py311 => "py311",
|
||||
Self::Py312 => "py312",
|
||||
Self::Py313 => "py313",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TargetVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for red_knot_python_semantic::PythonVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => Self::PY37,
|
||||
TargetVersion::Py38 => Self::PY38,
|
||||
TargetVersion::Py39 => Self::PY39,
|
||||
TargetVersion::Py310 => Self::PY310,
|
||||
TargetVersion::Py311 => Self::PY311,
|
||||
TargetVersion::Py312 => Self::PY312,
|
||||
TargetVersion::Py313 => Self::PY313,
|
||||
}
|
||||
}
|
||||
}
|
||||
1
crates/red_knot/src/verbosity.rs
Normal file
1
crates/red_knot/src/verbosity.rs
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
use std::path::Path;
|
||||
|
||||
use crate::program::{FileChangeKind, FileWatcherChange};
|
||||
use anyhow::Context;
|
||||
use notify::event::{CreateKind, RemoveKind};
|
||||
use notify::{recommended_watcher, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
use ruff_db::file_system::FileSystemPath;
|
||||
|
||||
pub struct FileWatcher {
|
||||
watcher: RecommendedWatcher,
|
||||
}
|
||||
|
||||
pub trait EventHandler: Send + 'static {
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>);
|
||||
}
|
||||
|
||||
impl<F> EventHandler for F
|
||||
where
|
||||
F: Fn(Vec<FileWatcherChange>) + Send + 'static,
|
||||
{
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>) {
|
||||
let f = self;
|
||||
f(changes);
|
||||
}
|
||||
}
|
||||
|
||||
impl FileWatcher {
|
||||
pub fn new<E>(handler: E) -> anyhow::Result<Self>
|
||||
where
|
||||
E: EventHandler,
|
||||
{
|
||||
Self::from_handler(Box::new(handler))
|
||||
}
|
||||
|
||||
fn from_handler(handler: Box<dyn EventHandler>) -> anyhow::Result<Self> {
|
||||
let watcher = recommended_watcher(move |changes: notify::Result<Event>| {
|
||||
match changes {
|
||||
Ok(event) => {
|
||||
// TODO verify that this handles all events correctly
|
||||
let change_kind = match event.kind {
|
||||
EventKind::Create(CreateKind::File) => FileChangeKind::Created,
|
||||
EventKind::Modify(_) => FileChangeKind::Modified,
|
||||
EventKind::Remove(RemoveKind::File) => FileChangeKind::Deleted,
|
||||
_ => {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut changes = Vec::new();
|
||||
|
||||
for path in event.paths {
|
||||
if path.is_file() {
|
||||
if let Some(fs_path) = FileSystemPath::from_std_path(&path) {
|
||||
changes.push(FileWatcherChange::new(
|
||||
fs_path.to_path_buf(),
|
||||
change_kind,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !changes.is_empty() {
|
||||
handler.handle(changes);
|
||||
}
|
||||
}
|
||||
// TODO proper error handling
|
||||
Err(err) => {
|
||||
panic!("Error: {err}");
|
||||
}
|
||||
}
|
||||
})
|
||||
.context("Failed to create file watcher.")?;
|
||||
|
||||
Ok(Self { watcher })
|
||||
}
|
||||
|
||||
pub fn watch_folder(&mut self, path: &Path) -> anyhow::Result<()> {
|
||||
self.watcher.watch(path, RecursiveMode::Recursive)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
1239
crates/red_knot/tests/file_watching.rs
Normal file
1239
crates/red_knot/tests/file_watching.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,35 +0,0 @@
|
||||
[package]
|
||||
name = "red_knot_module_resolver"
|
||||
version = "0.0.0"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
ruff_db = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
|
||||
compact_str = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
path-slash = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
@@ -1,156 +0,0 @@
|
||||
use ruff_db::Upcast;
|
||||
|
||||
use crate::resolver::{
|
||||
file_to_module,
|
||||
internal::{ModuleNameIngredient, ModuleResolverSearchPaths},
|
||||
resolve_module_query,
|
||||
};
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(
|
||||
ModuleNameIngredient<'_>,
|
||||
ModuleResolverSearchPaths,
|
||||
resolve_module_query,
|
||||
file_to_module,
|
||||
);
|
||||
|
||||
pub trait Db: salsa::DbWithJar<Jar> + ruff_db::Db + Upcast<dyn ruff_db::Db> {}
|
||||
|
||||
pub(crate) mod tests {
|
||||
use std::sync;
|
||||
|
||||
use salsa::DebugWithDb;
|
||||
|
||||
use ruff_db::file_system::{FileSystem, MemoryFileSystem, OsFileSystem};
|
||||
use ruff_db::vfs::Vfs;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[salsa::db(Jar, ruff_db::Jar)]
|
||||
pub(crate) struct TestDb {
|
||||
storage: salsa::Storage<Self>,
|
||||
file_system: TestFileSystem,
|
||||
events: sync::Arc<sync::Mutex<Vec<salsa::Event>>>,
|
||||
vfs: Vfs,
|
||||
}
|
||||
|
||||
impl TestDb {
|
||||
#[allow(unused)]
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
file_system: TestFileSystem::Memory(MemoryFileSystem::default()),
|
||||
events: sync::Arc::default(),
|
||||
vfs: Vfs::with_stubbed_vendored(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the memory file system.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If this test db isn't using a memory file system.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn memory_file_system(&self) -> &MemoryFileSystem {
|
||||
if let TestFileSystem::Memory(fs) = &self.file_system {
|
||||
fs
|
||||
} else {
|
||||
panic!("The test db is not using a memory file system");
|
||||
}
|
||||
}
|
||||
|
||||
/// Uses the real file system instead of the memory file system.
|
||||
///
|
||||
/// This useful for testing advanced file system features like permissions, symlinks, etc.
|
||||
///
|
||||
/// Note that any files written to the memory file system won't be copied over.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn with_os_file_system(&mut self) {
|
||||
self.file_system = TestFileSystem::Os(OsFileSystem);
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub(crate) fn vfs_mut(&mut self) -> &mut Vfs {
|
||||
&mut self.vfs
|
||||
}
|
||||
|
||||
/// Takes the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn take_salsa_events(&mut self) -> Vec<salsa::Event> {
|
||||
let inner = sync::Arc::get_mut(&mut self.events).expect("no pending salsa snapshots");
|
||||
|
||||
let events = inner.get_mut().unwrap();
|
||||
std::mem::take(&mut *events)
|
||||
}
|
||||
|
||||
/// Clears the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn clear_salsa_events(&mut self) {
|
||||
self.take_salsa_events();
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ruff_db::Db> for TestDb {
|
||||
fn upcast(&self) -> &(dyn ruff_db::Db + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ruff_db::Db for TestDb {
|
||||
fn file_system(&self) -> &dyn ruff_db::file_system::FileSystem {
|
||||
self.file_system.inner()
|
||||
}
|
||||
|
||||
fn vfs(&self) -> &ruff_db::vfs::Vfs {
|
||||
&self.vfs
|
||||
}
|
||||
}
|
||||
|
||||
impl Db for TestDb {}
|
||||
|
||||
impl salsa::Database for TestDb {
|
||||
fn salsa_event(&self, event: salsa::Event) {
|
||||
tracing::trace!("event: {:?}", event.debug(self));
|
||||
let mut events = self.events.lock().unwrap();
|
||||
events.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::ParallelDatabase for TestDb {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
storage: self.storage.snapshot(),
|
||||
file_system: self.file_system.snapshot(),
|
||||
events: self.events.clone(),
|
||||
vfs: self.vfs.snapshot(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
enum TestFileSystem {
|
||||
Memory(MemoryFileSystem),
|
||||
#[allow(unused)]
|
||||
Os(OsFileSystem),
|
||||
}
|
||||
|
||||
impl TestFileSystem {
|
||||
fn inner(&self) -> &dyn FileSystem {
|
||||
match self {
|
||||
Self::Memory(inner) => inner,
|
||||
Self::Os(inner) => inner,
|
||||
}
|
||||
}
|
||||
|
||||
fn snapshot(&self) -> Self {
|
||||
match self {
|
||||
Self::Memory(inner) => Self::Memory(inner.snapshot()),
|
||||
Self::Os(inner) => Self::Os(inner.snapshot()),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
mod db;
|
||||
mod module;
|
||||
mod resolver;
|
||||
mod typeshed;
|
||||
|
||||
pub use db::{Db, Jar};
|
||||
pub use module::{Module, ModuleKind, ModuleName};
|
||||
pub use resolver::{resolve_module, set_module_resolution_settings, ModuleResolutionSettings};
|
||||
pub use typeshed::versions::TypeshedVersions;
|
||||
@@ -1,343 +0,0 @@
|
||||
use compact_str::ToCompactString;
|
||||
use std::fmt::Formatter;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_db::file_system::FileSystemPath;
|
||||
use ruff_db::vfs::{VfsFile, VfsPath};
|
||||
use ruff_python_stdlib::identifiers::is_identifier;
|
||||
|
||||
use crate::Db;
|
||||
|
||||
/// A module name, e.g. `foo.bar`.
|
||||
///
|
||||
/// Always normalized to the absolute form (never a relative module name, i.e., never `.foo`).
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
|
||||
pub struct ModuleName(compact_str::CompactString);
|
||||
|
||||
impl ModuleName {
|
||||
/// Creates a new module name for `name`. Returns `Some` if `name` is a valid, absolute
|
||||
/// module name and `None` otherwise.
|
||||
///
|
||||
/// The module name is invalid if:
|
||||
///
|
||||
/// * The name is empty
|
||||
/// * The name is relative
|
||||
/// * The name ends with a `.`
|
||||
/// * The name contains a sequence of multiple dots
|
||||
/// * A component of a name (the part between two dots) isn't a valid python identifier.
|
||||
#[inline]
|
||||
pub fn new(name: &str) -> Option<Self> {
|
||||
Self::is_valid_name(name).then(|| Self(compact_str::CompactString::from(name)))
|
||||
}
|
||||
|
||||
/// Creates a new module name for `name` where `name` is a static string.
|
||||
/// Returns `Some` if `name` is a valid, absolute module name and `None` otherwise.
|
||||
///
|
||||
/// The module name is invalid if:
|
||||
///
|
||||
/// * The name is empty
|
||||
/// * The name is relative
|
||||
/// * The name ends with a `.`
|
||||
/// * The name contains a sequence of multiple dots
|
||||
/// * A component of a name (the part between two dots) isn't a valid python identifier.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").as_deref(), Some("foo.bar"));
|
||||
/// assert_eq!(ModuleName::new_static(""), None);
|
||||
/// assert_eq!(ModuleName::new_static("..foo"), None);
|
||||
/// assert_eq!(ModuleName::new_static(".foo"), None);
|
||||
/// assert_eq!(ModuleName::new_static("foo."), None);
|
||||
/// assert_eq!(ModuleName::new_static("foo..bar"), None);
|
||||
/// assert_eq!(ModuleName::new_static("2000"), None);
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn new_static(name: &'static str) -> Option<Self> {
|
||||
// TODO(Micha): Use CompactString::const_new once we upgrade to 0.8 https://github.com/ParkMyCar/compact_str/pull/336
|
||||
Self::is_valid_name(name).then(|| Self(compact_str::CompactString::from(name)))
|
||||
}
|
||||
|
||||
fn is_valid_name(name: &str) -> bool {
|
||||
if name.is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
name.split('.').all(is_identifier)
|
||||
}
|
||||
|
||||
/// An iterator over the components of the module name:
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().components().collect::<Vec<_>>(), vec!["foo", "bar", "baz"]);
|
||||
/// ```
|
||||
pub fn components(&self) -> impl DoubleEndedIterator<Item = &str> {
|
||||
self.0.split('.')
|
||||
}
|
||||
|
||||
/// The name of this module's immediate parent, if it has a parent.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").unwrap().parent(), Some(ModuleName::new_static("foo").unwrap()));
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().parent(), Some(ModuleName::new_static("foo.bar").unwrap()));
|
||||
/// assert_eq!(ModuleName::new_static("root").unwrap().parent(), None);
|
||||
/// ```
|
||||
pub fn parent(&self) -> Option<ModuleName> {
|
||||
let (parent, _) = self.0.rsplit_once('.')?;
|
||||
Some(Self(parent.to_compact_string()))
|
||||
}
|
||||
|
||||
/// Returns `true` if the name starts with `other`.
|
||||
///
|
||||
/// This is equivalent to checking if `self` is a sub-module of `other`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert!(ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
||||
///
|
||||
/// assert!(!ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("bar").unwrap()));
|
||||
/// assert!(!ModuleName::new_static("foo_bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
||||
/// ```
|
||||
pub fn starts_with(&self, other: &ModuleName) -> bool {
|
||||
let mut self_components = self.components();
|
||||
let other_components = other.components();
|
||||
|
||||
for other_component in other_components {
|
||||
if self_components.next() != Some(other_component) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
|
||||
pub(crate) fn from_relative_path(path: &FileSystemPath) -> Option<Self> {
|
||||
let path = if path.ends_with("__init__.py") || path.ends_with("__init__.pyi") {
|
||||
path.parent()?
|
||||
} else {
|
||||
path
|
||||
};
|
||||
|
||||
let name = if let Some(parent) = path.parent() {
|
||||
let mut name = compact_str::CompactString::with_capacity(path.as_str().len());
|
||||
|
||||
for component in parent.components() {
|
||||
name.push_str(component.as_os_str().to_str()?);
|
||||
name.push('.');
|
||||
}
|
||||
|
||||
// SAFETY: Unwrap is safe here or `parent` would have returned `None`.
|
||||
name.push_str(path.file_stem().unwrap());
|
||||
|
||||
name
|
||||
} else {
|
||||
path.file_stem()?.to_compact_string()
|
||||
};
|
||||
|
||||
Some(Self(name))
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ModuleName {
|
||||
type Target = str;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<str> for ModuleName {
|
||||
fn eq(&self, other: &str) -> bool {
|
||||
self.as_str() == other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<ModuleName> for str {
|
||||
fn eq(&self, other: &ModuleName) -> bool {
|
||||
self == other.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ModuleName {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
/// Representation of a Python module.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct Module {
|
||||
inner: Arc<ModuleInner>,
|
||||
}
|
||||
|
||||
impl Module {
|
||||
pub(crate) fn new(
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: ModuleSearchPath,
|
||||
file: VfsFile,
|
||||
) -> Self {
|
||||
Self {
|
||||
inner: Arc::new(ModuleInner {
|
||||
name,
|
||||
kind,
|
||||
search_path,
|
||||
file,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// The absolute name of the module (e.g. `foo.bar`)
|
||||
pub fn name(&self) -> &ModuleName {
|
||||
&self.inner.name
|
||||
}
|
||||
|
||||
/// The file to the source code that defines this module
|
||||
pub fn file(&self) -> VfsFile {
|
||||
self.inner.file
|
||||
}
|
||||
|
||||
/// The search path from which the module was resolved.
|
||||
pub fn search_path(&self) -> &ModuleSearchPath {
|
||||
&self.inner.search_path
|
||||
}
|
||||
|
||||
/// Determine whether this module is a single-file module or a package
|
||||
pub fn kind(&self) -> ModuleKind {
|
||||
self.inner.kind
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Module {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Module")
|
||||
.field("name", &self.name())
|
||||
.field("kind", &self.kind())
|
||||
.field("file", &self.file())
|
||||
.field("search_path", &self.search_path())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::DebugWithDb<dyn Db> for Module {
|
||||
fn fmt(&self, f: &mut Formatter<'_>, db: &dyn Db) -> std::fmt::Result {
|
||||
f.debug_struct("Module")
|
||||
.field("name", &self.name())
|
||||
.field("kind", &self.kind())
|
||||
.field("file", &self.file().debug(db.upcast()))
|
||||
.field("search_path", &self.search_path())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
struct ModuleInner {
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: ModuleSearchPath,
|
||||
file: VfsFile,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum ModuleKind {
|
||||
/// A single-file module (e.g. `foo.py` or `foo.pyi`)
|
||||
Module,
|
||||
|
||||
/// A python package (`foo/__init__.py` or `foo/__init__.pyi`)
|
||||
Package,
|
||||
}
|
||||
|
||||
/// A search path in which to search modules.
|
||||
/// Corresponds to a path in [`sys.path`](https://docs.python.org/3/library/sys_path_init.html) at runtime.
|
||||
///
|
||||
/// Cloning a search path is cheap because it's an `Arc`.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct ModuleSearchPath {
|
||||
inner: Arc<ModuleSearchPathInner>,
|
||||
}
|
||||
|
||||
impl ModuleSearchPath {
|
||||
pub fn new<P>(path: P, kind: ModuleSearchPathKind) -> Self
|
||||
where
|
||||
P: Into<VfsPath>,
|
||||
{
|
||||
Self {
|
||||
inner: Arc::new(ModuleSearchPathInner {
|
||||
path: path.into(),
|
||||
kind,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// Determine whether this is a first-party, third-party or standard-library search path
|
||||
pub fn kind(&self) -> ModuleSearchPathKind {
|
||||
self.inner.kind
|
||||
}
|
||||
|
||||
/// Return the location of the search path on the file system
|
||||
pub fn path(&self) -> &VfsPath {
|
||||
&self.inner.path
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for ModuleSearchPath {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("ModuleSearchPath")
|
||||
.field("path", &self.inner.path)
|
||||
.field("kind", &self.kind())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq)]
|
||||
struct ModuleSearchPathInner {
|
||||
path: VfsPath,
|
||||
kind: ModuleSearchPathKind,
|
||||
}
|
||||
|
||||
/// Enumeration of the different kinds of search paths type checkers are expected to support.
|
||||
///
|
||||
/// N.B. Although we don't implement `Ord` for this enum, they are ordered in terms of the
|
||||
/// priority that we want to give these modules when resolving them.
|
||||
/// This is roughly [the order given in the typing spec], but typeshed's stubs
|
||||
/// for the standard library are moved higher up to match Python's semantics at runtime.
|
||||
///
|
||||
/// [the order given in the typing spec]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum ModuleSearchPathKind {
|
||||
/// "Extra" paths provided by the user in a config file, env var or CLI flag.
|
||||
/// E.g. mypy's `MYPYPATH` env var, or pyright's `stubPath` configuration setting
|
||||
Extra,
|
||||
|
||||
/// Files in the project we're directly being invoked on
|
||||
FirstParty,
|
||||
|
||||
/// The `stdlib` directory of typeshed (either vendored or custom)
|
||||
StandardLibrary,
|
||||
|
||||
/// Stubs or runtime modules installed in site-packages
|
||||
SitePackagesThirdParty,
|
||||
|
||||
/// Vendored third-party stubs from typeshed
|
||||
VendoredThirdParty,
|
||||
}
|
||||
@@ -1,935 +0,0 @@
|
||||
use std::ops::Deref;
|
||||
|
||||
use ruff_db::file_system::{FileSystem, FileSystemPath, FileSystemPathBuf};
|
||||
use ruff_db::vfs::{system_path_to_file, vfs_path_to_file, VfsFile, VfsPath};
|
||||
|
||||
use crate::module::{Module, ModuleKind, ModuleName, ModuleSearchPath, ModuleSearchPathKind};
|
||||
use crate::resolver::internal::ModuleResolverSearchPaths;
|
||||
use crate::Db;
|
||||
|
||||
const TYPESHED_STDLIB_DIRECTORY: &str = "stdlib";
|
||||
|
||||
/// Configures the module search paths for the module resolver.
|
||||
///
|
||||
/// Must be called before calling any other module resolution functions.
|
||||
pub fn set_module_resolution_settings(db: &mut dyn Db, config: ModuleResolutionSettings) {
|
||||
// There's no concurrency issue here because we hold a `&mut dyn Db` reference. No other
|
||||
// thread can mutate the `Db` while we're in this call, so using `try_get` to test if
|
||||
// the settings have already been set is safe.
|
||||
if let Some(existing) = ModuleResolverSearchPaths::try_get(db) {
|
||||
existing
|
||||
.set_search_paths(db)
|
||||
.to(config.into_ordered_search_paths());
|
||||
} else {
|
||||
ModuleResolverSearchPaths::new(db, config.into_ordered_search_paths());
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolves a module name to a module.
|
||||
pub fn resolve_module(db: &dyn Db, module_name: ModuleName) -> Option<Module> {
|
||||
let interned_name = internal::ModuleNameIngredient::new(db, module_name);
|
||||
|
||||
resolve_module_query(db, interned_name)
|
||||
}
|
||||
|
||||
/// Salsa query that resolves an interned [`ModuleNameIngredient`] to a module.
|
||||
///
|
||||
/// This query should not be called directly. Instead, use [`resolve_module`]. It only exists
|
||||
/// because Salsa requires the module name to be an ingredient.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn resolve_module_query<'db>(
|
||||
db: &'db dyn Db,
|
||||
module_name: internal::ModuleNameIngredient<'db>,
|
||||
) -> Option<Module> {
|
||||
let _span = tracing::trace_span!("resolve_module", ?module_name).entered();
|
||||
|
||||
let name = module_name.name(db);
|
||||
|
||||
let (search_path, module_file, kind) = resolve_name(db, name)?;
|
||||
|
||||
let module = Module::new(name.clone(), kind, search_path, module_file);
|
||||
|
||||
Some(module)
|
||||
}
|
||||
|
||||
/// Resolves the module for the given path.
|
||||
///
|
||||
/// Returns `None` if the path is not a module locatable via `sys.path`.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn path_to_module(db: &dyn Db, path: &VfsPath) -> Option<Module> {
|
||||
// It's not entirely clear on first sight why this method calls `file_to_module` instead of
|
||||
// it being the other way round, considering that the first thing that `file_to_module` does
|
||||
// is to retrieve the file's path.
|
||||
//
|
||||
// The reason is that `file_to_module` is a tracked Salsa query and salsa queries require that
|
||||
// all arguments are Salsa ingredients (something stored in Salsa). `Path`s aren't salsa ingredients but
|
||||
// `VfsFile` is. So what we do here is to retrieve the `path`'s `VfsFile` so that we can make
|
||||
// use of Salsa's caching and invalidation.
|
||||
let file = vfs_path_to_file(db.upcast(), path)?;
|
||||
file_to_module(db, file)
|
||||
}
|
||||
|
||||
/// Resolves the module for the file with the given id.
|
||||
///
|
||||
/// Returns `None` if the file is not a module locatable via `sys.path`.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn file_to_module(db: &dyn Db, file: VfsFile) -> Option<Module> {
|
||||
let _span = tracing::trace_span!("file_to_module", ?file).entered();
|
||||
|
||||
let path = file.path(db.upcast());
|
||||
|
||||
let search_paths = module_search_paths(db);
|
||||
|
||||
let relative_path = search_paths
|
||||
.iter()
|
||||
.find_map(|root| match (root.path(), path) {
|
||||
(VfsPath::FileSystem(root_path), VfsPath::FileSystem(path)) => {
|
||||
let relative_path = path.strip_prefix(root_path).ok()?;
|
||||
Some(relative_path)
|
||||
}
|
||||
(VfsPath::Vendored(_), VfsPath::Vendored(_)) => {
|
||||
todo!("Add support for vendored modules")
|
||||
}
|
||||
(VfsPath::Vendored(_), VfsPath::FileSystem(_))
|
||||
| (VfsPath::FileSystem(_), VfsPath::Vendored(_)) => None,
|
||||
})?;
|
||||
|
||||
let module_name = ModuleName::from_relative_path(relative_path)?;
|
||||
|
||||
// Resolve the module name to see if Python would resolve the name to the same path.
|
||||
// If it doesn't, then that means that multiple modules have the same name in different
|
||||
// root paths, but that the module corresponding to `path` is in a lower priority search path,
|
||||
// in which case we ignore it.
|
||||
let module = resolve_module(db, module_name)?;
|
||||
|
||||
if file == module.file() {
|
||||
Some(module)
|
||||
} else {
|
||||
// This path is for a module with the same name but with a different precedence. For example:
|
||||
// ```
|
||||
// src/foo.py
|
||||
// src/foo/__init__.py
|
||||
// ```
|
||||
// The module name of `src/foo.py` is `foo`, but the module loaded by Python is `src/foo/__init__.py`.
|
||||
// That means we need to ignore `src/foo.py` even though it resolves to the same module name.
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Configures the search paths that are used to resolve modules.
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
pub struct ModuleResolutionSettings {
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
/// or pyright's stubPath configuration setting.
|
||||
pub extra_paths: Vec<FileSystemPathBuf>,
|
||||
|
||||
/// The root of the workspace, used for finding first-party modules.
|
||||
pub workspace_root: FileSystemPathBuf,
|
||||
|
||||
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
|
||||
pub site_packages: Option<FileSystemPathBuf>,
|
||||
|
||||
/// Optional path to standard-library typeshed stubs.
|
||||
/// Currently this has to be a directory that exists on disk.
|
||||
///
|
||||
/// (TODO: fall back to vendored stubs if no custom directory is provided.)
|
||||
pub custom_typeshed: Option<FileSystemPathBuf>,
|
||||
}
|
||||
|
||||
impl ModuleResolutionSettings {
|
||||
/// Implementation of PEP 561's module resolution order
|
||||
/// (with some small, deliberate, differences)
|
||||
fn into_ordered_search_paths(self) -> OrderedSearchPaths {
|
||||
let ModuleResolutionSettings {
|
||||
extra_paths,
|
||||
workspace_root,
|
||||
site_packages,
|
||||
custom_typeshed,
|
||||
} = self;
|
||||
|
||||
let mut paths: Vec<_> = extra_paths
|
||||
.into_iter()
|
||||
.map(|path| ModuleSearchPath::new(path, ModuleSearchPathKind::Extra))
|
||||
.collect();
|
||||
|
||||
paths.push(ModuleSearchPath::new(
|
||||
workspace_root,
|
||||
ModuleSearchPathKind::FirstParty,
|
||||
));
|
||||
|
||||
// TODO fallback to vendored typeshed stubs if no custom typeshed directory is provided by the user
|
||||
if let Some(custom_typeshed) = custom_typeshed {
|
||||
paths.push(ModuleSearchPath::new(
|
||||
custom_typeshed.join(TYPESHED_STDLIB_DIRECTORY),
|
||||
ModuleSearchPathKind::StandardLibrary,
|
||||
));
|
||||
}
|
||||
|
||||
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
|
||||
if let Some(site_packages) = site_packages {
|
||||
paths.push(ModuleSearchPath::new(
|
||||
site_packages,
|
||||
ModuleSearchPathKind::SitePackagesThirdParty,
|
||||
));
|
||||
}
|
||||
|
||||
OrderedSearchPaths(paths)
|
||||
}
|
||||
}
|
||||
|
||||
/// A resolved module resolution order, implementing PEP 561
|
||||
/// (with some small, deliberate differences)
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub(crate) struct OrderedSearchPaths(Vec<ModuleSearchPath>);
|
||||
|
||||
impl Deref for OrderedSearchPaths {
|
||||
type Target = [ModuleSearchPath];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
// The singleton methods generated by salsa are all `pub` instead of `pub(crate)` which triggers
|
||||
// `unreachable_pub`. Work around this by creating a module and allow `unreachable_pub` for it.
|
||||
// Salsa also generates uses to `_db` variables for `interned` which triggers `clippy::used_underscore_binding`. Suppress that too
|
||||
// TODO(micha): Contribute a fix for this upstream where the singleton methods have the same visibility as the struct.
|
||||
#[allow(unreachable_pub, clippy::used_underscore_binding)]
|
||||
pub(crate) mod internal {
|
||||
use crate::module::ModuleName;
|
||||
use crate::resolver::OrderedSearchPaths;
|
||||
|
||||
#[salsa::input(singleton)]
|
||||
pub(crate) struct ModuleResolverSearchPaths {
|
||||
#[return_ref]
|
||||
pub(super) search_paths: OrderedSearchPaths,
|
||||
}
|
||||
|
||||
/// A thin wrapper around `ModuleName` to make it a Salsa ingredient.
|
||||
///
|
||||
/// This is needed because Salsa requires that all query arguments are salsa ingredients.
|
||||
#[salsa::interned]
|
||||
pub(crate) struct ModuleNameIngredient<'db> {
|
||||
#[return_ref]
|
||||
pub(super) name: ModuleName,
|
||||
}
|
||||
}
|
||||
|
||||
fn module_search_paths(db: &dyn Db) -> &[ModuleSearchPath] {
|
||||
ModuleResolverSearchPaths::get(db).search_paths(db)
|
||||
}
|
||||
|
||||
/// Given a module name and a list of search paths in which to lookup modules,
|
||||
/// attempt to resolve the module name
|
||||
fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(ModuleSearchPath, VfsFile, ModuleKind)> {
|
||||
let search_paths = module_search_paths(db);
|
||||
|
||||
for search_path in search_paths {
|
||||
let mut components = name.components();
|
||||
let module_name = components.next_back()?;
|
||||
|
||||
let VfsPath::FileSystem(fs_search_path) = search_path.path() else {
|
||||
todo!("Vendored search paths are not yet supported");
|
||||
};
|
||||
|
||||
match resolve_package(db.file_system(), fs_search_path, components) {
|
||||
Ok(resolved_package) => {
|
||||
let mut package_path = resolved_package.path;
|
||||
|
||||
package_path.push(module_name);
|
||||
|
||||
// Must be a `__init__.pyi` or `__init__.py` or it isn't a package.
|
||||
let kind = if db.file_system().is_directory(&package_path) {
|
||||
package_path.push("__init__");
|
||||
ModuleKind::Package
|
||||
} else {
|
||||
ModuleKind::Module
|
||||
};
|
||||
|
||||
// TODO Implement full https://peps.python.org/pep-0561/#type-checker-module-resolution-order resolution
|
||||
let stub = package_path.with_extension("pyi");
|
||||
|
||||
if let Some(stub) = system_path_to_file(db.upcast(), &stub) {
|
||||
return Some((search_path.clone(), stub, kind));
|
||||
}
|
||||
|
||||
let module = package_path.with_extension("py");
|
||||
|
||||
if let Some(module) = system_path_to_file(db.upcast(), &module) {
|
||||
return Some((search_path.clone(), module, kind));
|
||||
}
|
||||
|
||||
// For regular packages, don't search the next search path. All files of that
|
||||
// package must be in the same location
|
||||
if resolved_package.kind.is_regular_package() {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
Err(parent_kind) => {
|
||||
if parent_kind.is_regular_package() {
|
||||
// For regular packages, don't search the next search path.
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn resolve_package<'a, I>(
|
||||
fs: &dyn FileSystem,
|
||||
module_search_path: &FileSystemPath,
|
||||
components: I,
|
||||
) -> Result<ResolvedPackage, PackageKind>
|
||||
where
|
||||
I: Iterator<Item = &'a str>,
|
||||
{
|
||||
let mut package_path = module_search_path.to_path_buf();
|
||||
|
||||
// `true` if inside a folder that is a namespace package (has no `__init__.py`).
|
||||
// Namespace packages are special because they can be spread across multiple search paths.
|
||||
// https://peps.python.org/pep-0420/
|
||||
let mut in_namespace_package = false;
|
||||
|
||||
// `true` if resolving a sub-package. For example, `true` when resolving `bar` of `foo.bar`.
|
||||
let mut in_sub_package = false;
|
||||
|
||||
// For `foo.bar.baz`, test that `foo` and `baz` both contain a `__init__.py`.
|
||||
for folder in components {
|
||||
package_path.push(folder);
|
||||
|
||||
let has_init_py = fs.is_file(&package_path.join("__init__.py"))
|
||||
|| fs.is_file(&package_path.join("__init__.pyi"));
|
||||
|
||||
if has_init_py {
|
||||
in_namespace_package = false;
|
||||
} else if fs.is_directory(&package_path) {
|
||||
// A directory without an `__init__.py` is a namespace package, continue with the next folder.
|
||||
in_namespace_package = true;
|
||||
} else if in_namespace_package {
|
||||
// Package not found but it is part of a namespace package.
|
||||
return Err(PackageKind::Namespace);
|
||||
} else if in_sub_package {
|
||||
// A regular sub package wasn't found.
|
||||
return Err(PackageKind::Regular);
|
||||
} else {
|
||||
// We couldn't find `foo` for `foo.bar.baz`, search the next search path.
|
||||
return Err(PackageKind::Root);
|
||||
}
|
||||
|
||||
in_sub_package = true;
|
||||
}
|
||||
|
||||
let kind = if in_namespace_package {
|
||||
PackageKind::Namespace
|
||||
} else if in_sub_package {
|
||||
PackageKind::Regular
|
||||
} else {
|
||||
PackageKind::Root
|
||||
};
|
||||
|
||||
Ok(ResolvedPackage {
|
||||
kind,
|
||||
path: package_path,
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ResolvedPackage {
|
||||
path: FileSystemPathBuf,
|
||||
kind: PackageKind,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
|
||||
enum PackageKind {
|
||||
/// A root package or module. E.g. `foo` in `foo.bar.baz` or just `foo`.
|
||||
Root,
|
||||
|
||||
/// A regular sub-package where the parent contains an `__init__.py`.
|
||||
///
|
||||
/// For example, `bar` in `foo.bar` when the `foo` directory contains an `__init__.py`.
|
||||
Regular,
|
||||
|
||||
/// A sub-package in a namespace package. A namespace package is a package without an `__init__.py`.
|
||||
///
|
||||
/// For example, `bar` in `foo.bar` if the `foo` directory contains no `__init__.py`.
|
||||
Namespace,
|
||||
}
|
||||
|
||||
impl PackageKind {
|
||||
const fn is_regular_package(self) -> bool {
|
||||
matches!(self, PackageKind::Regular)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::file_system::{FileSystemPath, FileSystemPathBuf};
|
||||
use ruff_db::vfs::{system_path_to_file, VfsFile, VfsPath};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::module::{ModuleKind, ModuleName};
|
||||
|
||||
use super::{
|
||||
path_to_module, resolve_module, set_module_resolution_settings, ModuleResolutionSettings,
|
||||
TYPESHED_STDLIB_DIRECTORY,
|
||||
};
|
||||
|
||||
struct TestCase {
|
||||
db: TestDb,
|
||||
|
||||
src: FileSystemPathBuf,
|
||||
custom_typeshed: FileSystemPathBuf,
|
||||
site_packages: FileSystemPathBuf,
|
||||
}
|
||||
|
||||
fn create_resolver() -> std::io::Result<TestCase> {
|
||||
let mut db = TestDb::new();
|
||||
|
||||
let src = FileSystemPath::new("src").to_path_buf();
|
||||
let site_packages = FileSystemPath::new("site_packages").to_path_buf();
|
||||
let custom_typeshed = FileSystemPath::new("typeshed").to_path_buf();
|
||||
|
||||
let fs = db.memory_file_system();
|
||||
|
||||
fs.create_directory_all(&src)?;
|
||||
fs.create_directory_all(&site_packages)?;
|
||||
fs.create_directory_all(&custom_typeshed)?;
|
||||
|
||||
let settings = ModuleResolutionSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
site_packages: Some(site_packages.clone()),
|
||||
custom_typeshed: Some(custom_typeshed.clone()),
|
||||
};
|
||||
|
||||
set_module_resolution_settings(&mut db, settings);
|
||||
|
||||
Ok(TestCase {
|
||||
db,
|
||||
src,
|
||||
custom_typeshed,
|
||||
site_packages,
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn first_party_module() -> anyhow::Result<()> {
|
||||
let TestCase { db, src, .. } = create_resolver()?;
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
let foo_path = src.join("foo.py");
|
||||
db.memory_file_system()
|
||||
.write_file(&foo_path, "print('Hello, world!')")?;
|
||||
|
||||
let foo_module = resolve_module(&db, foo_module_name.clone()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
Some(&foo_module),
|
||||
resolve_module(&db, foo_module_name.clone()).as_ref()
|
||||
);
|
||||
|
||||
assert_eq!("foo", foo_module.name());
|
||||
assert_eq!(&src, foo_module.search_path().path());
|
||||
assert_eq!(ModuleKind::Module, foo_module.kind());
|
||||
assert_eq!(&foo_path, foo_module.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
Some(foo_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo_path))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn stdlib() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
custom_typeshed,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
let stdlib_dir = custom_typeshed.join(TYPESHED_STDLIB_DIRECTORY);
|
||||
let functools_path = stdlib_dir.join("functools.py");
|
||||
db.memory_file_system()
|
||||
.write_file(&functools_path, "def update_wrapper(): ...")?;
|
||||
|
||||
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
||||
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
Some(&functools_module),
|
||||
resolve_module(&db, functools_module_name).as_ref()
|
||||
);
|
||||
|
||||
assert_eq!(&stdlib_dir, functools_module.search_path().path());
|
||||
assert_eq!(ModuleKind::Module, functools_module.kind());
|
||||
assert_eq!(&functools_path.clone(), functools_module.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
Some(functools_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(functools_path))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn first_party_precedence_over_stdlib() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
src,
|
||||
custom_typeshed,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
let stdlib_dir = custom_typeshed.join(TYPESHED_STDLIB_DIRECTORY);
|
||||
let stdlib_functools_path = stdlib_dir.join("functools.py");
|
||||
let first_party_functools_path = src.join("functools.py");
|
||||
|
||||
db.memory_file_system().write_files([
|
||||
(&stdlib_functools_path, "def update_wrapper(): ..."),
|
||||
(&first_party_functools_path, "def update_wrapper(): ..."),
|
||||
])?;
|
||||
|
||||
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
||||
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
Some(&functools_module),
|
||||
resolve_module(&db, functools_module_name).as_ref()
|
||||
);
|
||||
assert_eq!(&src, functools_module.search_path().path());
|
||||
assert_eq!(ModuleKind::Module, functools_module.kind());
|
||||
assert_eq!(
|
||||
&first_party_functools_path.clone(),
|
||||
functools_module.file().path(&db)
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Some(functools_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(first_party_functools_path))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TODO: Port typeshed test case. Porting isn't possible at the moment because the vendored zip
|
||||
// is part of the red knot crate
|
||||
// #[test]
|
||||
// fn typeshed_zip_created_at_build_time() -> anyhow::Result<()> {
|
||||
// // The file path here is hardcoded in this crate's `build.rs` script.
|
||||
// // Luckily this crate will fail to build if this file isn't available at build time.
|
||||
// const TYPESHED_ZIP_BYTES: &[u8] =
|
||||
// include_bytes!(concat!(env!("OUT_DIR"), "/zipped_typeshed.zip"));
|
||||
// assert!(!TYPESHED_ZIP_BYTES.is_empty());
|
||||
// let mut typeshed_zip_archive = ZipArchive::new(Cursor::new(TYPESHED_ZIP_BYTES))?;
|
||||
//
|
||||
// let path_to_functools = Path::new("stdlib").join("functools.pyi");
|
||||
// let mut functools_module_stub = typeshed_zip_archive
|
||||
// .by_name(path_to_functools.to_str().unwrap())
|
||||
// .unwrap();
|
||||
// assert!(functools_module_stub.is_file());
|
||||
//
|
||||
// let mut functools_module_stub_source = String::new();
|
||||
// functools_module_stub.read_to_string(&mut functools_module_stub_source)?;
|
||||
//
|
||||
// assert!(functools_module_stub_source.contains("def update_wrapper("));
|
||||
// Ok(())
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn resolve_package() -> anyhow::Result<()> {
|
||||
let TestCase { src, db, .. } = create_resolver()?;
|
||||
|
||||
let foo_dir = src.join("foo");
|
||||
let foo_path = foo_dir.join("__init__.py");
|
||||
|
||||
db.memory_file_system()
|
||||
.write_file(&foo_path, "print('Hello, world!')")?;
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
|
||||
assert_eq!("foo", foo_module.name());
|
||||
assert_eq!(&src, foo_module.search_path().path());
|
||||
assert_eq!(&foo_path, foo_module.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
Some(&foo_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo_path)).as_ref()
|
||||
);
|
||||
|
||||
// Resolving by directory doesn't resolve to the init file.
|
||||
assert_eq!(None, path_to_module(&db, &VfsPath::FileSystem(foo_dir)));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn package_priority_over_module() -> anyhow::Result<()> {
|
||||
let TestCase { db, src, .. } = create_resolver()?;
|
||||
|
||||
let foo_dir = src.join("foo");
|
||||
let foo_init = foo_dir.join("__init__.py");
|
||||
|
||||
db.memory_file_system()
|
||||
.write_file(&foo_init, "print('Hello, world!')")?;
|
||||
|
||||
let foo_py = src.join("foo.py");
|
||||
db.memory_file_system()
|
||||
.write_file(&foo_py, "print('Hello, world!')")?;
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
|
||||
assert_eq!(&src, foo_module.search_path().path());
|
||||
assert_eq!(&foo_init, foo_module.file().path(&db));
|
||||
assert_eq!(ModuleKind::Package, foo_module.kind());
|
||||
|
||||
assert_eq!(
|
||||
Some(foo_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo_init))
|
||||
);
|
||||
assert_eq!(None, path_to_module(&db, &VfsPath::FileSystem(foo_py)));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn typing_stub_over_module() -> anyhow::Result<()> {
|
||||
let TestCase { db, src, .. } = create_resolver()?;
|
||||
|
||||
let foo_stub = src.join("foo.pyi");
|
||||
let foo_py = src.join("foo.py");
|
||||
db.memory_file_system()
|
||||
.write_files([(&foo_stub, "x: int"), (&foo_py, "print('Hello, world!')")])?;
|
||||
|
||||
let foo = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
|
||||
assert_eq!(&src, foo.search_path().path());
|
||||
assert_eq!(&foo_stub, foo.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
Some(foo),
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo_stub))
|
||||
);
|
||||
assert_eq!(None, path_to_module(&db, &VfsPath::FileSystem(foo_py)));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sub_packages() -> anyhow::Result<()> {
|
||||
let TestCase { db, src, .. } = create_resolver()?;
|
||||
|
||||
let foo = src.join("foo");
|
||||
let bar = foo.join("bar");
|
||||
let baz = bar.join("baz.py");
|
||||
|
||||
db.memory_file_system().write_files([
|
||||
(&foo.join("__init__.py"), ""),
|
||||
(&bar.join("__init__.py"), ""),
|
||||
(&baz, "print('Hello, world!')"),
|
||||
])?;
|
||||
|
||||
let baz_module =
|
||||
resolve_module(&db, ModuleName::new_static("foo.bar.baz").unwrap()).unwrap();
|
||||
|
||||
assert_eq!(&src, baz_module.search_path().path());
|
||||
assert_eq!(&baz, baz_module.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
Some(baz_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(baz))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn namespace_package() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
src,
|
||||
site_packages,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
// From [PEP420](https://peps.python.org/pep-0420/#nested-namespace-packages).
|
||||
// But uses `src` for `project1` and `site_packages2` for `project2`.
|
||||
// ```
|
||||
// src
|
||||
// parent
|
||||
// child
|
||||
// one.py
|
||||
// site_packages
|
||||
// parent
|
||||
// child
|
||||
// two.py
|
||||
// ```
|
||||
|
||||
let parent1 = src.join("parent");
|
||||
let child1 = parent1.join("child");
|
||||
let one = child1.join("one.py");
|
||||
|
||||
let parent2 = site_packages.join("parent");
|
||||
let child2 = parent2.join("child");
|
||||
let two = child2.join("two.py");
|
||||
|
||||
db.memory_file_system().write_files([
|
||||
(&one, "print('Hello, world!')"),
|
||||
(&two, "print('Hello, world!')"),
|
||||
])?;
|
||||
|
||||
let one_module =
|
||||
resolve_module(&db, ModuleName::new_static("parent.child.one").unwrap()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
Some(one_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(one))
|
||||
);
|
||||
|
||||
let two_module =
|
||||
resolve_module(&db, ModuleName::new_static("parent.child.two").unwrap()).unwrap();
|
||||
assert_eq!(
|
||||
Some(two_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(two))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn regular_package_in_namespace_package() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
src,
|
||||
site_packages,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
// Adopted test case from the [PEP420 examples](https://peps.python.org/pep-0420/#nested-namespace-packages).
|
||||
// The `src/parent/child` package is a regular package. Therefore, `site_packages/parent/child/two.py` should not be resolved.
|
||||
// ```
|
||||
// src
|
||||
// parent
|
||||
// child
|
||||
// one.py
|
||||
// site_packages
|
||||
// parent
|
||||
// child
|
||||
// two.py
|
||||
// ```
|
||||
|
||||
let parent1 = src.join("parent");
|
||||
let child1 = parent1.join("child");
|
||||
let one = child1.join("one.py");
|
||||
|
||||
let parent2 = site_packages.join("parent");
|
||||
let child2 = parent2.join("child");
|
||||
let two = child2.join("two.py");
|
||||
|
||||
db.memory_file_system().write_files([
|
||||
(&child1.join("__init__.py"), "print('Hello, world!')"),
|
||||
(&one, "print('Hello, world!')"),
|
||||
(&two, "print('Hello, world!')"),
|
||||
])?;
|
||||
|
||||
let one_module =
|
||||
resolve_module(&db, ModuleName::new_static("parent.child.one").unwrap()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
Some(one_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(one))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
None,
|
||||
resolve_module(&db, ModuleName::new_static("parent.child.two").unwrap())
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn module_search_path_priority() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
src,
|
||||
site_packages,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
let foo_src = src.join("foo.py");
|
||||
let foo_site_packages = site_packages.join("foo.py");
|
||||
|
||||
db.memory_file_system()
|
||||
.write_files([(&foo_src, ""), (&foo_site_packages, "")])?;
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
|
||||
assert_eq!(&src, foo_module.search_path().path());
|
||||
assert_eq!(&foo_src, foo_module.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
Some(foo_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo_src))
|
||||
);
|
||||
assert_eq!(
|
||||
None,
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo_site_packages))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(target_family = "unix")]
|
||||
fn symlink() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
mut db,
|
||||
src,
|
||||
site_packages,
|
||||
custom_typeshed,
|
||||
} = create_resolver()?;
|
||||
|
||||
db.with_os_file_system();
|
||||
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
let root = FileSystemPath::from_std_path(temp_dir.path()).unwrap();
|
||||
|
||||
let src = root.join(src);
|
||||
let site_packages = root.join(site_packages);
|
||||
let custom_typeshed = root.join(custom_typeshed);
|
||||
|
||||
let foo = src.join("foo.py");
|
||||
let bar = src.join("bar.py");
|
||||
|
||||
std::fs::create_dir_all(src.as_std_path())?;
|
||||
std::fs::create_dir_all(site_packages.as_std_path())?;
|
||||
std::fs::create_dir_all(custom_typeshed.as_std_path())?;
|
||||
|
||||
std::fs::write(foo.as_std_path(), "")?;
|
||||
std::os::unix::fs::symlink(foo.as_std_path(), bar.as_std_path())?;
|
||||
|
||||
let settings = ModuleResolutionSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
site_packages: Some(site_packages),
|
||||
custom_typeshed: Some(custom_typeshed),
|
||||
};
|
||||
|
||||
set_module_resolution_settings(&mut db, settings);
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
let bar_module = resolve_module(&db, ModuleName::new_static("bar").unwrap()).unwrap();
|
||||
|
||||
assert_ne!(foo_module, bar_module);
|
||||
|
||||
assert_eq!(&src, foo_module.search_path().path());
|
||||
assert_eq!(&foo, foo_module.file().path(&db));
|
||||
|
||||
// `foo` and `bar` shouldn't resolve to the same file
|
||||
|
||||
assert_eq!(&src, bar_module.search_path().path());
|
||||
assert_eq!(&bar, bar_module.file().path(&db));
|
||||
assert_eq!(&foo, foo_module.file().path(&db));
|
||||
|
||||
assert_ne!(&foo_module, &bar_module);
|
||||
|
||||
assert_eq!(
|
||||
Some(foo_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo))
|
||||
);
|
||||
assert_eq!(
|
||||
Some(bar_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(bar))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deleting_an_unrealted_file_doesnt_change_module_resolution() -> anyhow::Result<()> {
|
||||
let TestCase { mut db, src, .. } = create_resolver()?;
|
||||
|
||||
let foo_path = src.join("foo.py");
|
||||
let bar_path = src.join("bar.py");
|
||||
|
||||
db.memory_file_system()
|
||||
.write_files([(&foo_path, "x = 1"), (&bar_path, "y = 2")])?;
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
let foo_module = resolve_module(&db, foo_module_name.clone()).unwrap();
|
||||
|
||||
let bar = system_path_to_file(&db, &bar_path).expect("bar.py to exist");
|
||||
|
||||
db.clear_salsa_events();
|
||||
|
||||
// Delete `bar.py`
|
||||
db.memory_file_system().remove_file(&bar_path)?;
|
||||
bar.touch(&mut db);
|
||||
|
||||
// Re-query the foo module. The foo module should still be cached because `bar.py` isn't relevant
|
||||
// for resolving `foo`.
|
||||
|
||||
let foo_module2 = resolve_module(&db, foo_module_name);
|
||||
|
||||
assert!(!db
|
||||
.take_salsa_events()
|
||||
.iter()
|
||||
.any(|event| { matches!(event.kind, salsa::EventKind::WillExecute { .. }) }));
|
||||
|
||||
assert_eq!(Some(foo_module), foo_module2);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn adding_a_file_on_which_the_module_resolution_depends_on_invalidates_the_query(
|
||||
) -> anyhow::Result<()> {
|
||||
let TestCase { mut db, src, .. } = create_resolver()?;
|
||||
let foo_path = src.join("foo.py");
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
assert_eq!(resolve_module(&db, foo_module_name.clone()), None);
|
||||
|
||||
// Now write the foo file
|
||||
db.memory_file_system().write_file(&foo_path, "x = 1")?;
|
||||
VfsFile::touch_path(&mut db, &VfsPath::FileSystem(foo_path.clone()));
|
||||
let foo_file = system_path_to_file(&db, &foo_path).expect("foo.py to exist");
|
||||
|
||||
let foo_module = resolve_module(&db, foo_module_name).expect("Foo module to resolve");
|
||||
assert_eq!(foo_file, foo_module.file());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn removing_a_file_that_the_module_resolution_depends_on_invalidates_the_query(
|
||||
) -> anyhow::Result<()> {
|
||||
let TestCase { mut db, src, .. } = create_resolver()?;
|
||||
let foo_path = src.join("foo.py");
|
||||
let foo_init_path = src.join("foo/__init__.py");
|
||||
|
||||
db.memory_file_system()
|
||||
.write_files([(&foo_path, "x = 1"), (&foo_init_path, "x = 2")])?;
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
let foo_module = resolve_module(&db, foo_module_name.clone()).expect("foo module to exist");
|
||||
|
||||
assert_eq!(&foo_init_path, foo_module.file().path(&db));
|
||||
|
||||
// Delete `foo/__init__.py` and the `foo` folder. `foo` should now resolve to `foo.py`
|
||||
db.memory_file_system().remove_file(&foo_init_path)?;
|
||||
db.memory_file_system()
|
||||
.remove_directory(foo_init_path.parent().unwrap())?;
|
||||
VfsFile::touch_path(&mut db, &VfsPath::FileSystem(foo_init_path.clone()));
|
||||
|
||||
let foo_module = resolve_module(&db, foo_module_name).expect("Foo module to resolve");
|
||||
assert_eq!(&foo_path, foo_module.file().path(&db));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
dcab6e88883c629ede9637fb011958f8b4918f52
|
||||
@@ -1,117 +0,0 @@
|
||||
import sys
|
||||
from typing import Literal
|
||||
|
||||
SF_APPEND: Literal[0x00040000]
|
||||
SF_ARCHIVED: Literal[0x00010000]
|
||||
SF_IMMUTABLE: Literal[0x00020000]
|
||||
SF_NOUNLINK: Literal[0x00100000]
|
||||
SF_SNAPSHOT: Literal[0x00200000]
|
||||
|
||||
ST_MODE: Literal[0]
|
||||
ST_INO: Literal[1]
|
||||
ST_DEV: Literal[2]
|
||||
ST_NLINK: Literal[3]
|
||||
ST_UID: Literal[4]
|
||||
ST_GID: Literal[5]
|
||||
ST_SIZE: Literal[6]
|
||||
ST_ATIME: Literal[7]
|
||||
ST_MTIME: Literal[8]
|
||||
ST_CTIME: Literal[9]
|
||||
|
||||
S_IFIFO: Literal[0o010000]
|
||||
S_IFLNK: Literal[0o120000]
|
||||
S_IFREG: Literal[0o100000]
|
||||
S_IFSOCK: Literal[0o140000]
|
||||
S_IFBLK: Literal[0o060000]
|
||||
S_IFCHR: Literal[0o020000]
|
||||
S_IFDIR: Literal[0o040000]
|
||||
|
||||
# These are 0 on systems that don't support the specific kind of file.
|
||||
# Example: Linux doesn't support door files, so S_IFDOOR is 0 on linux.
|
||||
S_IFDOOR: int
|
||||
S_IFPORT: int
|
||||
S_IFWHT: int
|
||||
|
||||
S_ISUID: Literal[0o4000]
|
||||
S_ISGID: Literal[0o2000]
|
||||
S_ISVTX: Literal[0o1000]
|
||||
|
||||
S_IRWXU: Literal[0o0700]
|
||||
S_IRUSR: Literal[0o0400]
|
||||
S_IWUSR: Literal[0o0200]
|
||||
S_IXUSR: Literal[0o0100]
|
||||
|
||||
S_IRWXG: Literal[0o0070]
|
||||
S_IRGRP: Literal[0o0040]
|
||||
S_IWGRP: Literal[0o0020]
|
||||
S_IXGRP: Literal[0o0010]
|
||||
|
||||
S_IRWXO: Literal[0o0007]
|
||||
S_IROTH: Literal[0o0004]
|
||||
S_IWOTH: Literal[0o0002]
|
||||
S_IXOTH: Literal[0o0001]
|
||||
|
||||
S_ENFMT: Literal[0o2000]
|
||||
S_IREAD: Literal[0o0400]
|
||||
S_IWRITE: Literal[0o0200]
|
||||
S_IEXEC: Literal[0o0100]
|
||||
|
||||
UF_APPEND: Literal[0x00000004]
|
||||
UF_COMPRESSED: Literal[0x00000020] # OS X 10.6+ only
|
||||
UF_HIDDEN: Literal[0x00008000] # OX X 10.5+ only
|
||||
UF_IMMUTABLE: Literal[0x00000002]
|
||||
UF_NODUMP: Literal[0x00000001]
|
||||
UF_NOUNLINK: Literal[0x00000010]
|
||||
UF_OPAQUE: Literal[0x00000008]
|
||||
|
||||
def S_IMODE(mode: int, /) -> int: ...
|
||||
def S_IFMT(mode: int, /) -> int: ...
|
||||
def S_ISBLK(mode: int, /) -> bool: ...
|
||||
def S_ISCHR(mode: int, /) -> bool: ...
|
||||
def S_ISDIR(mode: int, /) -> bool: ...
|
||||
def S_ISDOOR(mode: int, /) -> bool: ...
|
||||
def S_ISFIFO(mode: int, /) -> bool: ...
|
||||
def S_ISLNK(mode: int, /) -> bool: ...
|
||||
def S_ISPORT(mode: int, /) -> bool: ...
|
||||
def S_ISREG(mode: int, /) -> bool: ...
|
||||
def S_ISSOCK(mode: int, /) -> bool: ...
|
||||
def S_ISWHT(mode: int, /) -> bool: ...
|
||||
def filemode(mode: int, /) -> str: ...
|
||||
|
||||
if sys.platform == "win32":
|
||||
IO_REPARSE_TAG_SYMLINK: int
|
||||
IO_REPARSE_TAG_MOUNT_POINT: int
|
||||
IO_REPARSE_TAG_APPEXECLINK: int
|
||||
|
||||
if sys.platform == "win32":
|
||||
FILE_ATTRIBUTE_ARCHIVE: Literal[32]
|
||||
FILE_ATTRIBUTE_COMPRESSED: Literal[2048]
|
||||
FILE_ATTRIBUTE_DEVICE: Literal[64]
|
||||
FILE_ATTRIBUTE_DIRECTORY: Literal[16]
|
||||
FILE_ATTRIBUTE_ENCRYPTED: Literal[16384]
|
||||
FILE_ATTRIBUTE_HIDDEN: Literal[2]
|
||||
FILE_ATTRIBUTE_INTEGRITY_STREAM: Literal[32768]
|
||||
FILE_ATTRIBUTE_NORMAL: Literal[128]
|
||||
FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: Literal[8192]
|
||||
FILE_ATTRIBUTE_NO_SCRUB_DATA: Literal[131072]
|
||||
FILE_ATTRIBUTE_OFFLINE: Literal[4096]
|
||||
FILE_ATTRIBUTE_READONLY: Literal[1]
|
||||
FILE_ATTRIBUTE_REPARSE_POINT: Literal[1024]
|
||||
FILE_ATTRIBUTE_SPARSE_FILE: Literal[512]
|
||||
FILE_ATTRIBUTE_SYSTEM: Literal[4]
|
||||
FILE_ATTRIBUTE_TEMPORARY: Literal[256]
|
||||
FILE_ATTRIBUTE_VIRTUAL: Literal[65536]
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
SF_SETTABLE: Literal[0x3FFF0000]
|
||||
# https://github.com/python/cpython/issues/114081#issuecomment-2119017790
|
||||
# SF_RESTRICTED: Literal[0x00080000]
|
||||
SF_FIRMLINK: Literal[0x00800000]
|
||||
SF_DATALESS: Literal[0x40000000]
|
||||
|
||||
SF_SUPPORTED: Literal[0x9F0000]
|
||||
SF_SYNTHETIC: Literal[0xC0000000]
|
||||
|
||||
UF_TRACKED: Literal[0x00000040]
|
||||
UF_DATAVAULT: Literal[0x00000080]
|
||||
UF_SETTABLE: Literal[0x0000FFFF]
|
||||
@@ -1,20 +0,0 @@
|
||||
import enum
|
||||
import sys
|
||||
from typing import Literal
|
||||
|
||||
LOG_THRESHOLD_FOR_CONNLOST_WRITES: Literal[5]
|
||||
ACCEPT_RETRY_DELAY: Literal[1]
|
||||
DEBUG_STACK_DEPTH: Literal[10]
|
||||
SSL_HANDSHAKE_TIMEOUT: float
|
||||
SENDFILE_FALLBACK_READBUFFER_SIZE: Literal[262144]
|
||||
if sys.version_info >= (3, 11):
|
||||
SSL_SHUTDOWN_TIMEOUT: float
|
||||
FLOW_CONTROL_HIGH_WATER_SSL_READ: Literal[256]
|
||||
FLOW_CONTROL_HIGH_WATER_SSL_WRITE: Literal[512]
|
||||
if sys.version_info >= (3, 12):
|
||||
THREAD_JOIN_TIMEOUT: Literal[300]
|
||||
|
||||
class _SendfileMode(enum.Enum):
|
||||
UNSUPPORTED = 1
|
||||
TRY_NATIVE = 2
|
||||
FALLBACK = 3
|
||||
@@ -1,20 +0,0 @@
|
||||
import functools
|
||||
import traceback
|
||||
from collections.abc import Iterable
|
||||
from types import FrameType, FunctionType
|
||||
from typing import Any, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
class _HasWrapper:
|
||||
__wrapper__: _HasWrapper | FunctionType
|
||||
|
||||
_FuncType: TypeAlias = FunctionType | _HasWrapper | functools.partial[Any] | functools.partialmethod[Any]
|
||||
|
||||
@overload
|
||||
def _get_function_source(func: _FuncType) -> tuple[str, int]: ...
|
||||
@overload
|
||||
def _get_function_source(func: object) -> tuple[str, int] | None: ...
|
||||
def _format_callback_source(func: object, args: Iterable[Any]) -> str: ...
|
||||
def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: ...
|
||||
def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = "") -> str: ...
|
||||
def extract_stack(f: FrameType | None = None, limit: int | None = None) -> traceback.StackSummary: ...
|
||||
@@ -1,32 +0,0 @@
|
||||
from ._base import (
|
||||
ALL_COMPLETED as ALL_COMPLETED,
|
||||
FIRST_COMPLETED as FIRST_COMPLETED,
|
||||
FIRST_EXCEPTION as FIRST_EXCEPTION,
|
||||
BrokenExecutor as BrokenExecutor,
|
||||
CancelledError as CancelledError,
|
||||
Executor as Executor,
|
||||
Future as Future,
|
||||
InvalidStateError as InvalidStateError,
|
||||
TimeoutError as TimeoutError,
|
||||
as_completed as as_completed,
|
||||
wait as wait,
|
||||
)
|
||||
from .process import ProcessPoolExecutor as ProcessPoolExecutor
|
||||
from .thread import ThreadPoolExecutor as ThreadPoolExecutor
|
||||
|
||||
__all__ = (
|
||||
"FIRST_COMPLETED",
|
||||
"FIRST_EXCEPTION",
|
||||
"ALL_COMPLETED",
|
||||
"CancelledError",
|
||||
"TimeoutError",
|
||||
"BrokenExecutor",
|
||||
"Future",
|
||||
"Executor",
|
||||
"wait",
|
||||
"as_completed",
|
||||
"ProcessPoolExecutor",
|
||||
"ThreadPoolExecutor",
|
||||
)
|
||||
|
||||
def __dir__() -> tuple[str, ...]: ...
|
||||
@@ -1,16 +0,0 @@
|
||||
from typing import Any, TypeVar
|
||||
|
||||
__all__ = ["Error", "copy", "deepcopy"]
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
# None in CPython but non-None in Jython
|
||||
PyStringMap: Any
|
||||
|
||||
# Note: memo and _nil are internal kwargs.
|
||||
def deepcopy(x: _T, memo: dict[int, Any] | None = None, _nil: Any = []) -> _T: ...
|
||||
def copy(x: _T) -> _T: ...
|
||||
|
||||
class Error(Exception): ...
|
||||
|
||||
error = Error
|
||||
@@ -1,99 +0,0 @@
|
||||
from _typeshed import BytesPath, Incomplete, StrOrBytesPath, StrPath, Unused
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Callable, Iterable
|
||||
from distutils.dist import Distribution
|
||||
from distutils.file_util import _BytesPathT, _StrPathT
|
||||
from typing import Any, ClassVar, Literal, overload
|
||||
|
||||
class Command:
|
||||
distribution: Distribution
|
||||
# Any to work around variance issues
|
||||
sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]]
|
||||
def __init__(self, dist: Distribution) -> None: ...
|
||||
@abstractmethod
|
||||
def initialize_options(self) -> None: ...
|
||||
@abstractmethod
|
||||
def finalize_options(self) -> None: ...
|
||||
@abstractmethod
|
||||
def run(self) -> None: ...
|
||||
def announce(self, msg: str, level: int = 1) -> None: ...
|
||||
def debug_print(self, msg: str) -> None: ...
|
||||
def ensure_string(self, option: str, default: str | None = None) -> None: ...
|
||||
def ensure_string_list(self, option: str | list[str]) -> None: ...
|
||||
def ensure_filename(self, option: str) -> None: ...
|
||||
def ensure_dirname(self, option: str) -> None: ...
|
||||
def get_command_name(self) -> str: ...
|
||||
def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: ...
|
||||
def get_finalized_command(self, command: str, create: bool | Literal[0, 1] = 1) -> Command: ...
|
||||
def reinitialize_command(self, command: Command | str, reinit_subcommands: bool | Literal[0, 1] = 0) -> Command: ...
|
||||
def run_command(self, command: str) -> None: ...
|
||||
def get_sub_commands(self) -> list[str]: ...
|
||||
def warn(self, msg: str) -> None: ...
|
||||
def execute(self, func: Callable[..., object], args: Iterable[Any], msg: str | None = None, level: int = 1) -> None: ...
|
||||
def mkpath(self, name: str, mode: int = 0o777) -> None: ...
|
||||
@overload
|
||||
def copy_file(
|
||||
self,
|
||||
infile: StrPath,
|
||||
outfile: _StrPathT,
|
||||
preserve_mode: bool | Literal[0, 1] = 1,
|
||||
preserve_times: bool | Literal[0, 1] = 1,
|
||||
link: str | None = None,
|
||||
level: Unused = 1,
|
||||
) -> tuple[_StrPathT | str, bool]: ...
|
||||
@overload
|
||||
def copy_file(
|
||||
self,
|
||||
infile: BytesPath,
|
||||
outfile: _BytesPathT,
|
||||
preserve_mode: bool | Literal[0, 1] = 1,
|
||||
preserve_times: bool | Literal[0, 1] = 1,
|
||||
link: str | None = None,
|
||||
level: Unused = 1,
|
||||
) -> tuple[_BytesPathT | bytes, bool]: ...
|
||||
def copy_tree(
|
||||
self,
|
||||
infile: StrPath,
|
||||
outfile: str,
|
||||
preserve_mode: bool | Literal[0, 1] = 1,
|
||||
preserve_times: bool | Literal[0, 1] = 1,
|
||||
preserve_symlinks: bool | Literal[0, 1] = 0,
|
||||
level: Unused = 1,
|
||||
) -> list[str]: ...
|
||||
@overload
|
||||
def move_file(self, src: StrPath, dst: _StrPathT, level: Unused = 1) -> _StrPathT | str: ...
|
||||
@overload
|
||||
def move_file(self, src: BytesPath, dst: _BytesPathT, level: Unused = 1) -> _BytesPathT | bytes: ...
|
||||
def spawn(self, cmd: Iterable[str], search_path: bool | Literal[0, 1] = 1, level: Unused = 1) -> None: ...
|
||||
@overload
|
||||
def make_archive(
|
||||
self,
|
||||
base_name: str,
|
||||
format: str,
|
||||
root_dir: StrOrBytesPath | None = None,
|
||||
base_dir: str | None = None,
|
||||
owner: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str: ...
|
||||
@overload
|
||||
def make_archive(
|
||||
self,
|
||||
base_name: StrPath,
|
||||
format: str,
|
||||
root_dir: StrOrBytesPath,
|
||||
base_dir: str | None = None,
|
||||
owner: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str: ...
|
||||
def make_file(
|
||||
self,
|
||||
infiles: str | list[str] | tuple[str, ...],
|
||||
outfile: StrOrBytesPath,
|
||||
func: Callable[..., object],
|
||||
args: list[Any],
|
||||
exec_msg: str | None = None,
|
||||
skip_msg: str | None = None,
|
||||
level: Unused = 1,
|
||||
) -> None: ...
|
||||
def ensure_finalized(self) -> None: ...
|
||||
def dump_options(self, header: Incomplete | None = None, indent: str = "") -> None: ...
|
||||
@@ -1,25 +0,0 @@
|
||||
from typing import Any
|
||||
|
||||
from ..cmd import Command
|
||||
|
||||
def show_formats() -> None: ...
|
||||
|
||||
class bdist(Command):
|
||||
description: str
|
||||
user_options: Any
|
||||
boolean_options: Any
|
||||
help_options: Any
|
||||
no_format_option: Any
|
||||
default_format: Any
|
||||
format_commands: Any
|
||||
format_command: Any
|
||||
bdist_base: Any
|
||||
plat_name: Any
|
||||
formats: Any
|
||||
dist_dir: Any
|
||||
skip_build: int
|
||||
group: Any
|
||||
owner: Any
|
||||
def initialize_options(self) -> None: ...
|
||||
def finalize_options(self) -> None: ...
|
||||
def run(self) -> None: ...
|
||||
@@ -1,149 +0,0 @@
|
||||
from _typeshed import Incomplete, StrOrBytesPath, StrPath, SupportsWrite
|
||||
from collections.abc import Iterable, Mapping
|
||||
from distutils.cmd import Command
|
||||
from re import Pattern
|
||||
from typing import IO, Any, ClassVar, Literal, TypeVar, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
command_re: Pattern[str]
|
||||
|
||||
_OptionsList: TypeAlias = list[tuple[str, str | None, str, int] | tuple[str, str | None, str]]
|
||||
_CommandT = TypeVar("_CommandT", bound=Command)
|
||||
|
||||
class DistributionMetadata:
|
||||
def __init__(self, path: StrOrBytesPath | None = None) -> None: ...
|
||||
name: str | None
|
||||
version: str | None
|
||||
author: str | None
|
||||
author_email: str | None
|
||||
maintainer: str | None
|
||||
maintainer_email: str | None
|
||||
url: str | None
|
||||
license: str | None
|
||||
description: str | None
|
||||
long_description: str | None
|
||||
keywords: str | list[str] | None
|
||||
platforms: str | list[str] | None
|
||||
classifiers: str | list[str] | None
|
||||
download_url: str | None
|
||||
provides: list[str] | None
|
||||
requires: list[str] | None
|
||||
obsoletes: list[str] | None
|
||||
def read_pkg_file(self, file: IO[str]) -> None: ...
|
||||
def write_pkg_info(self, base_dir: StrPath) -> None: ...
|
||||
def write_pkg_file(self, file: SupportsWrite[str]) -> None: ...
|
||||
def get_name(self) -> str: ...
|
||||
def get_version(self) -> str: ...
|
||||
def get_fullname(self) -> str: ...
|
||||
def get_author(self) -> str: ...
|
||||
def get_author_email(self) -> str: ...
|
||||
def get_maintainer(self) -> str: ...
|
||||
def get_maintainer_email(self) -> str: ...
|
||||
def get_contact(self) -> str: ...
|
||||
def get_contact_email(self) -> str: ...
|
||||
def get_url(self) -> str: ...
|
||||
def get_license(self) -> str: ...
|
||||
def get_licence(self) -> str: ...
|
||||
def get_description(self) -> str: ...
|
||||
def get_long_description(self) -> str: ...
|
||||
def get_keywords(self) -> str | list[str]: ...
|
||||
def get_platforms(self) -> str | list[str]: ...
|
||||
def get_classifiers(self) -> str | list[str]: ...
|
||||
def get_download_url(self) -> str: ...
|
||||
def get_requires(self) -> list[str]: ...
|
||||
def set_requires(self, value: Iterable[str]) -> None: ...
|
||||
def get_provides(self) -> list[str]: ...
|
||||
def set_provides(self, value: Iterable[str]) -> None: ...
|
||||
def get_obsoletes(self) -> list[str]: ...
|
||||
def set_obsoletes(self, value: Iterable[str]) -> None: ...
|
||||
|
||||
class Distribution:
|
||||
cmdclass: dict[str, type[Command]]
|
||||
metadata: DistributionMetadata
|
||||
def __init__(self, attrs: Mapping[str, Any] | None = None) -> None: ...
|
||||
def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: ...
|
||||
def parse_config_files(self, filenames: Iterable[str] | None = None) -> None: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: str, create: Literal[1, True] = 1) -> Command: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: str, create: Literal[0, False]) -> Command | None: ...
|
||||
global_options: ClassVar[_OptionsList]
|
||||
common_usage: ClassVar[str]
|
||||
display_options: ClassVar[_OptionsList]
|
||||
display_option_names: ClassVar[list[str]]
|
||||
negative_opt: ClassVar[dict[str, str]]
|
||||
verbose: int
|
||||
dry_run: int
|
||||
help: int
|
||||
command_packages: list[str] | None
|
||||
script_name: str | None
|
||||
script_args: list[str] | None
|
||||
command_options: dict[str, dict[str, tuple[str, str]]]
|
||||
dist_files: list[tuple[str, str, str]]
|
||||
packages: Incomplete
|
||||
package_data: dict[str, list[str]]
|
||||
package_dir: Incomplete
|
||||
py_modules: Incomplete
|
||||
libraries: Incomplete
|
||||
headers: Incomplete
|
||||
ext_modules: Incomplete
|
||||
ext_package: Incomplete
|
||||
include_dirs: Incomplete
|
||||
extra_path: Incomplete
|
||||
scripts: Incomplete
|
||||
data_files: Incomplete
|
||||
password: str
|
||||
command_obj: Incomplete
|
||||
have_run: Incomplete
|
||||
want_user_cfg: bool
|
||||
def dump_option_dicts(
|
||||
self, header: Incomplete | None = None, commands: Incomplete | None = None, indent: str = ""
|
||||
) -> None: ...
|
||||
def find_config_files(self): ...
|
||||
commands: Incomplete
|
||||
def parse_command_line(self): ...
|
||||
def finalize_options(self) -> None: ...
|
||||
def handle_display_options(self, option_order): ...
|
||||
def print_command_list(self, commands, header, max_length) -> None: ...
|
||||
def print_commands(self) -> None: ...
|
||||
def get_command_list(self): ...
|
||||
def get_command_packages(self): ...
|
||||
def get_command_class(self, command: str) -> type[Command]: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: str, reinit_subcommands: bool = False) -> Command: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool = False) -> _CommandT: ...
|
||||
def announce(self, msg, level: int = 2) -> None: ...
|
||||
def run_commands(self) -> None: ...
|
||||
def run_command(self, command: str) -> None: ...
|
||||
def has_pure_modules(self) -> bool: ...
|
||||
def has_ext_modules(self) -> bool: ...
|
||||
def has_c_libraries(self) -> bool: ...
|
||||
def has_modules(self) -> bool: ...
|
||||
def has_headers(self) -> bool: ...
|
||||
def has_scripts(self) -> bool: ...
|
||||
def has_data_files(self) -> bool: ...
|
||||
def is_pure(self) -> bool: ...
|
||||
|
||||
# Getter methods generated in __init__
|
||||
def get_name(self) -> str: ...
|
||||
def get_version(self) -> str: ...
|
||||
def get_fullname(self) -> str: ...
|
||||
def get_author(self) -> str: ...
|
||||
def get_author_email(self) -> str: ...
|
||||
def get_maintainer(self) -> str: ...
|
||||
def get_maintainer_email(self) -> str: ...
|
||||
def get_contact(self) -> str: ...
|
||||
def get_contact_email(self) -> str: ...
|
||||
def get_url(self) -> str: ...
|
||||
def get_license(self) -> str: ...
|
||||
def get_licence(self) -> str: ...
|
||||
def get_description(self) -> str: ...
|
||||
def get_long_description(self) -> str: ...
|
||||
def get_keywords(self) -> str | list[str]: ...
|
||||
def get_platforms(self) -> str | list[str]: ...
|
||||
def get_classifiers(self) -> str | list[str]: ...
|
||||
def get_download_url(self) -> str: ...
|
||||
def get_requires(self) -> list[str]: ...
|
||||
def get_provides(self) -> list[str]: ...
|
||||
def get_obsoletes(self) -> list[str]: ...
|
||||
@@ -1,67 +0,0 @@
|
||||
ENDMARKER: int
|
||||
NAME: int
|
||||
NUMBER: int
|
||||
STRING: int
|
||||
NEWLINE: int
|
||||
INDENT: int
|
||||
DEDENT: int
|
||||
LPAR: int
|
||||
RPAR: int
|
||||
LSQB: int
|
||||
RSQB: int
|
||||
COLON: int
|
||||
COMMA: int
|
||||
SEMI: int
|
||||
PLUS: int
|
||||
MINUS: int
|
||||
STAR: int
|
||||
SLASH: int
|
||||
VBAR: int
|
||||
AMPER: int
|
||||
LESS: int
|
||||
GREATER: int
|
||||
EQUAL: int
|
||||
DOT: int
|
||||
PERCENT: int
|
||||
BACKQUOTE: int
|
||||
LBRACE: int
|
||||
RBRACE: int
|
||||
EQEQUAL: int
|
||||
NOTEQUAL: int
|
||||
LESSEQUAL: int
|
||||
GREATEREQUAL: int
|
||||
TILDE: int
|
||||
CIRCUMFLEX: int
|
||||
LEFTSHIFT: int
|
||||
RIGHTSHIFT: int
|
||||
DOUBLESTAR: int
|
||||
PLUSEQUAL: int
|
||||
MINEQUAL: int
|
||||
STAREQUAL: int
|
||||
SLASHEQUAL: int
|
||||
PERCENTEQUAL: int
|
||||
AMPEREQUAL: int
|
||||
VBAREQUAL: int
|
||||
CIRCUMFLEXEQUAL: int
|
||||
LEFTSHIFTEQUAL: int
|
||||
RIGHTSHIFTEQUAL: int
|
||||
DOUBLESTAREQUAL: int
|
||||
DOUBLESLASH: int
|
||||
DOUBLESLASHEQUAL: int
|
||||
OP: int
|
||||
COMMENT: int
|
||||
NL: int
|
||||
RARROW: int
|
||||
AT: int
|
||||
ATEQUAL: int
|
||||
AWAIT: int
|
||||
ASYNC: int
|
||||
ERRORTOKEN: int
|
||||
COLONEQUAL: int
|
||||
N_TOKENS: int
|
||||
NT_OFFSET: int
|
||||
tok_name: dict[int, str]
|
||||
|
||||
def ISTERMINAL(x: int) -> bool: ...
|
||||
def ISNONTERMINAL(x: int) -> bool: ...
|
||||
def ISEOF(x: int) -> bool: ...
|
||||
@@ -1,19 +0,0 @@
|
||||
import sys
|
||||
from collections.abc import Callable, Iterable
|
||||
from typing import Literal
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
if sys.platform != "win32":
|
||||
__all__ = ["openpty", "fork", "spawn"]
|
||||
_Reader: TypeAlias = Callable[[int], bytes]
|
||||
|
||||
STDIN_FILENO: Literal[0]
|
||||
STDOUT_FILENO: Literal[1]
|
||||
STDERR_FILENO: Literal[2]
|
||||
|
||||
CHILD: Literal[0]
|
||||
def openpty() -> tuple[int, int]: ...
|
||||
def master_open() -> tuple[int, str]: ... # deprecated, use openpty()
|
||||
def slave_open(tty_name: str) -> int: ... # deprecated, use openpty()
|
||||
def fork() -> tuple[int, int]: ...
|
||||
def spawn(argv: str | Iterable[str], master_read: _Reader = ..., stdin_read: _Reader = ...) -> int: ...
|
||||
@@ -1,55 +0,0 @@
|
||||
import sys
|
||||
from typing import Literal, overload
|
||||
|
||||
if sys.platform != "win32":
|
||||
LOG_ALERT: Literal[1]
|
||||
LOG_AUTH: Literal[32]
|
||||
LOG_AUTHPRIV: Literal[80]
|
||||
LOG_CONS: Literal[2]
|
||||
LOG_CRIT: Literal[2]
|
||||
LOG_CRON: Literal[72]
|
||||
LOG_DAEMON: Literal[24]
|
||||
LOG_DEBUG: Literal[7]
|
||||
LOG_EMERG: Literal[0]
|
||||
LOG_ERR: Literal[3]
|
||||
LOG_INFO: Literal[6]
|
||||
LOG_KERN: Literal[0]
|
||||
LOG_LOCAL0: Literal[128]
|
||||
LOG_LOCAL1: Literal[136]
|
||||
LOG_LOCAL2: Literal[144]
|
||||
LOG_LOCAL3: Literal[152]
|
||||
LOG_LOCAL4: Literal[160]
|
||||
LOG_LOCAL5: Literal[168]
|
||||
LOG_LOCAL6: Literal[176]
|
||||
LOG_LOCAL7: Literal[184]
|
||||
LOG_LPR: Literal[48]
|
||||
LOG_MAIL: Literal[16]
|
||||
LOG_NDELAY: Literal[8]
|
||||
LOG_NEWS: Literal[56]
|
||||
LOG_NOTICE: Literal[5]
|
||||
LOG_NOWAIT: Literal[16]
|
||||
LOG_ODELAY: Literal[4]
|
||||
LOG_PERROR: Literal[32]
|
||||
LOG_PID: Literal[1]
|
||||
LOG_SYSLOG: Literal[40]
|
||||
LOG_USER: Literal[8]
|
||||
LOG_UUCP: Literal[64]
|
||||
LOG_WARNING: Literal[4]
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
LOG_FTP: Literal[88]
|
||||
LOG_INSTALL: Literal[112]
|
||||
LOG_LAUNCHD: Literal[192]
|
||||
LOG_NETINFO: Literal[96]
|
||||
LOG_RAS: Literal[120]
|
||||
LOG_REMOTEAUTH: Literal[104]
|
||||
|
||||
def LOG_MASK(pri: int, /) -> int: ...
|
||||
def LOG_UPTO(pri: int, /) -> int: ...
|
||||
def closelog() -> None: ...
|
||||
def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: ...
|
||||
def setlogmask(maskpri: int, /) -> int: ...
|
||||
@overload
|
||||
def syslog(priority: int, message: str) -> None: ...
|
||||
@overload
|
||||
def syslog(message: str) -> None: ...
|
||||
@@ -1,80 +0,0 @@
|
||||
from typing import Literal
|
||||
|
||||
# These are not actually bools. See #4669
|
||||
NO: bool
|
||||
YES: bool
|
||||
TRUE: bool
|
||||
FALSE: bool
|
||||
ON: bool
|
||||
OFF: bool
|
||||
N: Literal["n"]
|
||||
S: Literal["s"]
|
||||
W: Literal["w"]
|
||||
E: Literal["e"]
|
||||
NW: Literal["nw"]
|
||||
SW: Literal["sw"]
|
||||
NE: Literal["ne"]
|
||||
SE: Literal["se"]
|
||||
NS: Literal["ns"]
|
||||
EW: Literal["ew"]
|
||||
NSEW: Literal["nsew"]
|
||||
CENTER: Literal["center"]
|
||||
NONE: Literal["none"]
|
||||
X: Literal["x"]
|
||||
Y: Literal["y"]
|
||||
BOTH: Literal["both"]
|
||||
LEFT: Literal["left"]
|
||||
TOP: Literal["top"]
|
||||
RIGHT: Literal["right"]
|
||||
BOTTOM: Literal["bottom"]
|
||||
RAISED: Literal["raised"]
|
||||
SUNKEN: Literal["sunken"]
|
||||
FLAT: Literal["flat"]
|
||||
RIDGE: Literal["ridge"]
|
||||
GROOVE: Literal["groove"]
|
||||
SOLID: Literal["solid"]
|
||||
HORIZONTAL: Literal["horizontal"]
|
||||
VERTICAL: Literal["vertical"]
|
||||
NUMERIC: Literal["numeric"]
|
||||
CHAR: Literal["char"]
|
||||
WORD: Literal["word"]
|
||||
BASELINE: Literal["baseline"]
|
||||
INSIDE: Literal["inside"]
|
||||
OUTSIDE: Literal["outside"]
|
||||
SEL: Literal["sel"]
|
||||
SEL_FIRST: Literal["sel.first"]
|
||||
SEL_LAST: Literal["sel.last"]
|
||||
END: Literal["end"]
|
||||
INSERT: Literal["insert"]
|
||||
CURRENT: Literal["current"]
|
||||
ANCHOR: Literal["anchor"]
|
||||
ALL: Literal["all"]
|
||||
NORMAL: Literal["normal"]
|
||||
DISABLED: Literal["disabled"]
|
||||
ACTIVE: Literal["active"]
|
||||
HIDDEN: Literal["hidden"]
|
||||
CASCADE: Literal["cascade"]
|
||||
CHECKBUTTON: Literal["checkbutton"]
|
||||
COMMAND: Literal["command"]
|
||||
RADIOBUTTON: Literal["radiobutton"]
|
||||
SEPARATOR: Literal["separator"]
|
||||
SINGLE: Literal["single"]
|
||||
BROWSE: Literal["browse"]
|
||||
MULTIPLE: Literal["multiple"]
|
||||
EXTENDED: Literal["extended"]
|
||||
DOTBOX: Literal["dotbox"]
|
||||
UNDERLINE: Literal["underline"]
|
||||
PIESLICE: Literal["pieslice"]
|
||||
CHORD: Literal["chord"]
|
||||
ARC: Literal["arc"]
|
||||
FIRST: Literal["first"]
|
||||
LAST: Literal["last"]
|
||||
BUTT: Literal["butt"]
|
||||
PROJECTING: Literal["projecting"]
|
||||
ROUND: Literal["round"]
|
||||
BEVEL: Literal["bevel"]
|
||||
MITER: Literal["miter"]
|
||||
MOVETO: Literal["moveto"]
|
||||
SCROLL: Literal["scroll"]
|
||||
UNITS: Literal["units"]
|
||||
PAGES: Literal["pages"]
|
||||
@@ -1,28 +0,0 @@
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer
|
||||
from typing import Literal, overload
|
||||
|
||||
if sys.platform == "win32":
|
||||
SND_APPLICATION: Literal[128]
|
||||
SND_FILENAME: Literal[131072]
|
||||
SND_ALIAS: Literal[65536]
|
||||
SND_LOOP: Literal[8]
|
||||
SND_MEMORY: Literal[4]
|
||||
SND_PURGE: Literal[64]
|
||||
SND_ASYNC: Literal[1]
|
||||
SND_NODEFAULT: Literal[2]
|
||||
SND_NOSTOP: Literal[16]
|
||||
SND_NOWAIT: Literal[8192]
|
||||
|
||||
MB_ICONASTERISK: Literal[64]
|
||||
MB_ICONEXCLAMATION: Literal[48]
|
||||
MB_ICONHAND: Literal[16]
|
||||
MB_ICONQUESTION: Literal[32]
|
||||
MB_OK: Literal[0]
|
||||
def Beep(frequency: int, duration: int) -> None: ...
|
||||
# Can actually accept anything ORed with 4, and if not it's definitely str, but that's inexpressible
|
||||
@overload
|
||||
def PlaySound(sound: ReadableBuffer | None, flags: Literal[4]) -> None: ...
|
||||
@overload
|
||||
def PlaySound(sound: str | ReadableBuffer | None, flags: int) -> None: ...
|
||||
def MessageBeep(type: int = 0) -> None: ...
|
||||
@@ -11,23 +11,42 @@ repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
red_knot_module_resolver = { workspace = true }
|
||||
ruff_db = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
compact_str = { workspace = true }
|
||||
countme = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
ordermap = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
static_assertions = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
path-slash = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true, features = ["zstd", "deflate"] }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["os", "testing"] }
|
||||
ruff_python_parser = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# Red Knot
|
||||
|
||||
A work-in-progress multifile module resolver for Ruff.
|
||||
Semantic analysis for the red-knot project.
|
||||
|
||||
## Vendored types for the stdlib
|
||||
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_module_resolver/vendor/typeshed`. The file `crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_python_semantic/vendor/typeshed`. The file `crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
|
||||
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).
|
||||
@@ -3,7 +3,7 @@
|
||||
//!
|
||||
//! This script should be automatically run at build time
|
||||
//! whenever the script itself changes, or whenever any files
|
||||
//! in `crates/red_knot_module_resolver/vendor/typeshed` change.
|
||||
//! in `crates/red_knot_python_semantic/vendor/typeshed` change.
|
||||
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
@@ -23,8 +23,21 @@ const TYPESHED_ZIP_LOCATION: &str = "/zipped_typeshed.zip";
|
||||
fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
|
||||
let mut zip = ZipWriter::new(writer);
|
||||
|
||||
// Use deflated compression for WASM builds because compiling `zstd-sys` requires clang
|
||||
// [source](https://github.com/gyscos/zstd-rs/wiki/Compile-for-WASM) which complicates the build
|
||||
// by a lot. Deflated compression is slower but it shouldn't matter much for the WASM use case
|
||||
// (WASM itself is already slower than a native build for a specific platform).
|
||||
// We can't use `#[cfg(...)]` here because the target-arch in a build script is the
|
||||
// architecture of the system running the build script and not the architecture of the build-target.
|
||||
// That's why we use the `TARGET` environment variable here.
|
||||
let method = if std::env::var("TARGET").unwrap().contains("wasm32") {
|
||||
CompressionMethod::Deflated
|
||||
} else {
|
||||
CompressionMethod::Zstd
|
||||
};
|
||||
|
||||
let options = FileOptions::default()
|
||||
.compression_method(CompressionMethod::Zstd)
|
||||
.compression_method(method)
|
||||
.unix_permissions(0o644);
|
||||
|
||||
for entry in walkdir::WalkDir::new(directory_path) {
|
||||
@@ -27,12 +27,13 @@ pub struct AstNodeRef<T> {
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
impl<T> AstNodeRef<T> {
|
||||
/// Creates a new `AstNodeRef` that reference `node`. The `parsed` is the [`ParsedModule`] to which
|
||||
/// the `AstNodeRef` belongs.
|
||||
/// Creates a new `AstNodeRef` that reference `node`. The `parsed` is the [`ParsedModule`] to
|
||||
/// which the `AstNodeRef` belongs.
|
||||
///
|
||||
/// ## Safety
|
||||
/// Dereferencing the `node` can result in undefined behavior if `parsed` isn't the [`ParsedModule`] to
|
||||
/// which `node` belongs. It's the caller's responsibility to ensure that the invariant `node belongs to parsed` is upheld.
|
||||
/// Dereferencing the `node` can result in undefined behavior if `parsed` isn't the
|
||||
/// [`ParsedModule`] to which `node` belongs. It's the caller's responsibility to ensure that
|
||||
/// the invariant `node belongs to parsed` is upheld.
|
||||
|
||||
pub(super) unsafe fn new(parsed: ParsedModule, node: &T) -> Self {
|
||||
Self {
|
||||
@@ -43,8 +44,8 @@ impl<T> AstNodeRef<T> {
|
||||
|
||||
/// Returns a reference to the wrapped node.
|
||||
pub fn node(&self) -> &T {
|
||||
// SAFETY: Holding on to `parsed` ensures that the AST to which `node` belongs is still alive
|
||||
// and not moved.
|
||||
// SAFETY: Holding on to `parsed` ensures that the AST to which `node` belongs is still
|
||||
// alive and not moved.
|
||||
unsafe { self.node.as_ref() }
|
||||
}
|
||||
}
|
||||
|
||||
16
crates/red_knot_python_semantic/src/builtins.rs
Normal file
16
crates/red_knot_python_semantic/src/builtins.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::resolve_module;
|
||||
use crate::semantic_index::global_scope;
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
use crate::Db;
|
||||
|
||||
/// Salsa query to get the builtins scope.
|
||||
///
|
||||
/// Can return None if a custom typeshed is used that is missing `builtins.pyi`.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn builtins_scope(db: &dyn Db) -> Option<ScopeId<'_>> {
|
||||
let builtins_name =
|
||||
ModuleName::new_static("builtins").expect("Expected 'builtins' to be a valid module name");
|
||||
let builtins_file = resolve_module(db, builtins_name)?.file();
|
||||
Some(global_scope(db, builtins_file))
|
||||
}
|
||||
@@ -1,56 +1,30 @@
|
||||
use salsa::DbWithJar;
|
||||
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
use red_knot_module_resolver::Db as ResolverDb;
|
||||
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::symbol::{public_symbols_map, PublicSymbolId, ScopeId};
|
||||
use crate::semantic_index::{root_scope, semantic_index, symbol_table};
|
||||
use crate::types::{infer_types, public_symbol_ty};
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(
|
||||
ScopeId<'_>,
|
||||
PublicSymbolId<'_>,
|
||||
Definition<'_>,
|
||||
symbol_table,
|
||||
root_scope,
|
||||
semantic_index,
|
||||
infer_types,
|
||||
public_symbol_ty,
|
||||
public_symbols_map,
|
||||
);
|
||||
|
||||
/// Database giving access to semantic information about a Python program.
|
||||
pub trait Db:
|
||||
SourceDb + ResolverDb + DbWithJar<Jar> + Upcast<dyn SourceDb> + Upcast<dyn ResolverDb>
|
||||
{
|
||||
#[salsa::db]
|
||||
pub trait Db: SourceDb + Upcast<dyn SourceDb> {
|
||||
fn is_file_open(&self, file: File) -> bool;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::fmt::Formatter;
|
||||
use std::marker::PhantomData;
|
||||
use std::sync::Arc;
|
||||
|
||||
use salsa::id::AsId;
|
||||
use salsa::ingredient::Ingredient;
|
||||
use salsa::storage::HasIngredientsFor;
|
||||
use salsa::DebugWithDb;
|
||||
use crate::module_resolver::vendored_typeshed_stubs;
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
use red_knot_module_resolver::{Db as ResolverDb, Jar as ResolverJar};
|
||||
use ruff_db::file_system::{FileSystem, MemoryFileSystem, OsFileSystem};
|
||||
use ruff_db::vfs::Vfs;
|
||||
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
|
||||
use super::Db;
|
||||
|
||||
use super::{Db, Jar};
|
||||
|
||||
#[salsa::db(Jar, ResolverJar, SourceJar)]
|
||||
#[salsa::db]
|
||||
pub(crate) struct TestDb {
|
||||
storage: salsa::Storage<Self>,
|
||||
vfs: Vfs,
|
||||
file_system: TestFileSystem,
|
||||
files: Files,
|
||||
system: TestSystem,
|
||||
vendored: VendoredFileSystem,
|
||||
events: std::sync::Arc<std::sync::Mutex<Vec<salsa::Event>>>,
|
||||
}
|
||||
|
||||
@@ -58,29 +32,13 @@ pub(crate) mod tests {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
file_system: TestFileSystem::Memory(MemoryFileSystem::default()),
|
||||
system: TestSystem::default(),
|
||||
vendored: vendored_typeshed_stubs().clone(),
|
||||
events: std::sync::Arc::default(),
|
||||
vfs: Vfs::with_stubbed_vendored(),
|
||||
files: Files::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the memory file system.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If this test db isn't using a memory file system.
|
||||
pub(crate) fn memory_file_system(&self) -> &MemoryFileSystem {
|
||||
if let TestFileSystem::Memory(fs) = &self.file_system {
|
||||
fs
|
||||
} else {
|
||||
panic!("The test db is not using a memory file system");
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub(crate) fn vfs_mut(&mut self) -> &mut Vfs {
|
||||
&mut self.vfs
|
||||
}
|
||||
|
||||
/// Takes the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
@@ -101,16 +59,28 @@ pub(crate) mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceDb for TestDb {
|
||||
fn file_system(&self) -> &dyn FileSystem {
|
||||
match &self.file_system {
|
||||
TestFileSystem::Memory(fs) => fs,
|
||||
TestFileSystem::Os(fs) => fs,
|
||||
}
|
||||
impl DbWithTestSystem for TestDb {
|
||||
fn test_system(&self) -> &TestSystem {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn vfs(&self) -> &Vfs {
|
||||
&self.vfs
|
||||
fn test_system_mut(&mut self) -> &mut TestSystem {
|
||||
&mut self.system
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl SourceDb for TestDb {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
&self.vendored
|
||||
}
|
||||
|
||||
fn system(&self) -> &dyn System {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
@@ -118,144 +88,25 @@ pub(crate) mod tests {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ResolverDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
|
||||
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl red_knot_module_resolver::Db for TestDb {}
|
||||
impl Db for TestDb {}
|
||||
#[salsa::db]
|
||||
impl Db for TestDb {
|
||||
fn is_file_open(&self, file: File) -> bool {
|
||||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl salsa::Database for TestDb {
|
||||
fn salsa_event(&self, event: salsa::Event) {
|
||||
tracing::trace!("event: {:?}", event.debug(self));
|
||||
fn salsa_event(&self, event: &dyn Fn() -> salsa::Event) {
|
||||
let event = event();
|
||||
tracing::trace!("event: {event:?}");
|
||||
let mut events = self.events.lock().unwrap();
|
||||
events.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::ParallelDatabase for TestDb {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
storage: self.storage.snapshot(),
|
||||
vfs: self.vfs.snapshot(),
|
||||
file_system: match &self.file_system {
|
||||
TestFileSystem::Memory(memory) => TestFileSystem::Memory(memory.snapshot()),
|
||||
TestFileSystem::Os(fs) => TestFileSystem::Os(fs.snapshot()),
|
||||
},
|
||||
events: self.events.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
enum TestFileSystem {
|
||||
Memory(MemoryFileSystem),
|
||||
#[allow(dead_code)]
|
||||
Os(OsFileSystem),
|
||||
}
|
||||
|
||||
pub(crate) fn assert_will_run_function_query<'db, C, Db, Jar>(
|
||||
db: &'db Db,
|
||||
to_function: impl FnOnce(&C) -> &salsa::function::FunctionIngredient<C>,
|
||||
input: &C::Input<'db>,
|
||||
events: &[salsa::Event],
|
||||
) where
|
||||
C: salsa::function::Configuration<Jar = Jar>
|
||||
+ salsa::storage::IngredientsFor<Jar = Jar, Ingredients = C>,
|
||||
Jar: HasIngredientsFor<C>,
|
||||
Db: salsa::DbWithJar<Jar>,
|
||||
C::Input<'db>: AsId,
|
||||
{
|
||||
will_run_function_query(db, to_function, input, events, true);
|
||||
}
|
||||
|
||||
pub(crate) fn assert_will_not_run_function_query<'db, C, Db, Jar>(
|
||||
db: &'db Db,
|
||||
to_function: impl FnOnce(&C) -> &salsa::function::FunctionIngredient<C>,
|
||||
input: &C::Input<'db>,
|
||||
events: &[salsa::Event],
|
||||
) where
|
||||
C: salsa::function::Configuration<Jar = Jar>
|
||||
+ salsa::storage::IngredientsFor<Jar = Jar, Ingredients = C>,
|
||||
Jar: HasIngredientsFor<C>,
|
||||
Db: salsa::DbWithJar<Jar>,
|
||||
C::Input<'db>: AsId,
|
||||
{
|
||||
will_run_function_query(db, to_function, input, events, false);
|
||||
}
|
||||
|
||||
fn will_run_function_query<'db, C, Db, Jar>(
|
||||
db: &'db Db,
|
||||
to_function: impl FnOnce(&C) -> &salsa::function::FunctionIngredient<C>,
|
||||
input: &C::Input<'db>,
|
||||
events: &[salsa::Event],
|
||||
should_run: bool,
|
||||
) where
|
||||
C: salsa::function::Configuration<Jar = Jar>
|
||||
+ salsa::storage::IngredientsFor<Jar = Jar, Ingredients = C>,
|
||||
Jar: HasIngredientsFor<C>,
|
||||
Db: salsa::DbWithJar<Jar>,
|
||||
C::Input<'db>: AsId,
|
||||
{
|
||||
let (jar, _) =
|
||||
<_ as salsa::storage::HasJar<<C as salsa::storage::IngredientsFor>::Jar>>::jar(db);
|
||||
let ingredient = jar.ingredient();
|
||||
|
||||
let function_ingredient = to_function(ingredient);
|
||||
|
||||
let ingredient_index =
|
||||
<salsa::function::FunctionIngredient<C> as Ingredient<Db>>::ingredient_index(
|
||||
function_ingredient,
|
||||
);
|
||||
|
||||
let did_run = events.iter().any(|event| {
|
||||
if let salsa::EventKind::WillExecute { database_key } = event.kind {
|
||||
database_key.ingredient_index() == ingredient_index
|
||||
&& database_key.key_index() == input.as_id()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
|
||||
if should_run && !did_run {
|
||||
panic!(
|
||||
"Expected query {:?} to run but it didn't",
|
||||
DebugIdx {
|
||||
db: PhantomData::<Db>,
|
||||
value_id: input.as_id(),
|
||||
ingredient: function_ingredient,
|
||||
}
|
||||
);
|
||||
} else if !should_run && did_run {
|
||||
panic!(
|
||||
"Expected query {:?} not to run but it did",
|
||||
DebugIdx {
|
||||
db: PhantomData::<Db>,
|
||||
value_id: input.as_id(),
|
||||
ingredient: function_ingredient,
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
struct DebugIdx<'a, I, Db>
|
||||
where
|
||||
I: Ingredient<Db>,
|
||||
{
|
||||
value_id: salsa::Id,
|
||||
ingredient: &'a I,
|
||||
db: PhantomData<Db>,
|
||||
}
|
||||
|
||||
impl<'a, I, Db> std::fmt::Debug for DebugIdx<'a, I, Db>
|
||||
where
|
||||
I: Ingredient<Db>,
|
||||
{
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
self.ingredient.fmt_index(Some(self.value_id), f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,14 +2,23 @@ use std::hash::BuildHasherDefault;
|
||||
|
||||
use rustc_hash::FxHasher;
|
||||
|
||||
pub use db::{Db, Jar};
|
||||
pub use db::Db;
|
||||
pub use module_name::ModuleName;
|
||||
pub use module_resolver::{resolve_module, system_module_search_paths, vendored_typeshed_stubs};
|
||||
pub use program::{Program, ProgramSettings, SearchPathSettings};
|
||||
pub use python_version::PythonVersion;
|
||||
pub use semantic_model::{HasTy, SemanticModel};
|
||||
|
||||
pub mod ast_node_ref;
|
||||
mod builtins;
|
||||
mod db;
|
||||
mod module_name;
|
||||
mod module_resolver;
|
||||
mod node_key;
|
||||
mod program;
|
||||
mod python_version;
|
||||
pub mod semantic_index;
|
||||
mod semantic_model;
|
||||
pub mod types;
|
||||
|
||||
type FxIndexSet<V> = indexmap::set::IndexSet<V, BuildHasherDefault<FxHasher>>;
|
||||
type FxOrderSet<V> = ordermap::set::OrderSet<V, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
use std::hash::BuildHasherDefault;
|
||||
|
||||
use rustc_hash::FxHasher;
|
||||
|
||||
pub mod ast_node_ref;
|
||||
mod node_key;
|
||||
pub mod semantic_index;
|
||||
pub mod types;
|
||||
|
||||
pub(crate) type FxIndexSet<V> = indexmap::set::IndexSet<V, BuildHasherDefault<FxHasher>>;
|
||||
216
crates/red_knot_python_semantic/src/module_name.rs
Normal file
216
crates/red_knot_python_semantic/src/module_name.rs
Normal file
@@ -0,0 +1,216 @@
|
||||
use std::fmt;
|
||||
use std::ops::Deref;
|
||||
|
||||
use compact_str::{CompactString, ToCompactString};
|
||||
|
||||
use ruff_python_stdlib::identifiers::is_identifier;
|
||||
|
||||
/// A module name, e.g. `foo.bar`.
|
||||
///
|
||||
/// Always normalized to the absolute form (never a relative module name, i.e., never `.foo`).
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
|
||||
pub struct ModuleName(compact_str::CompactString);
|
||||
|
||||
impl ModuleName {
|
||||
/// Creates a new module name for `name`. Returns `Some` if `name` is a valid, absolute
|
||||
/// module name and `None` otherwise.
|
||||
///
|
||||
/// The module name is invalid if:
|
||||
///
|
||||
/// * The name is empty
|
||||
/// * The name is relative
|
||||
/// * The name ends with a `.`
|
||||
/// * The name contains a sequence of multiple dots
|
||||
/// * A component of a name (the part between two dots) isn't a valid python identifier.
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn new(name: &str) -> Option<Self> {
|
||||
Self::is_valid_name(name).then(|| Self(CompactString::from(name)))
|
||||
}
|
||||
|
||||
/// Creates a new module name for `name` where `name` is a static string.
|
||||
/// Returns `Some` if `name` is a valid, absolute module name and `None` otherwise.
|
||||
///
|
||||
/// The module name is invalid if:
|
||||
///
|
||||
/// * The name is empty
|
||||
/// * The name is relative
|
||||
/// * The name ends with a `.`
|
||||
/// * The name contains a sequence of multiple dots
|
||||
/// * A component of a name (the part between two dots) isn't a valid python identifier.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").as_deref(), Some("foo.bar"));
|
||||
/// assert_eq!(ModuleName::new_static(""), None);
|
||||
/// assert_eq!(ModuleName::new_static("..foo"), None);
|
||||
/// assert_eq!(ModuleName::new_static(".foo"), None);
|
||||
/// assert_eq!(ModuleName::new_static("foo."), None);
|
||||
/// assert_eq!(ModuleName::new_static("foo..bar"), None);
|
||||
/// assert_eq!(ModuleName::new_static("2000"), None);
|
||||
/// ```
|
||||
#[inline]
|
||||
#[must_use]
|
||||
pub fn new_static(name: &'static str) -> Option<Self> {
|
||||
Self::is_valid_name(name).then(|| Self(CompactString::const_new(name)))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn is_valid_name(name: &str) -> bool {
|
||||
!name.is_empty() && name.split('.').all(is_identifier)
|
||||
}
|
||||
|
||||
/// An iterator over the components of the module name:
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().components().collect::<Vec<_>>(), vec!["foo", "bar", "baz"]);
|
||||
/// ```
|
||||
#[must_use]
|
||||
pub fn components(&self) -> impl DoubleEndedIterator<Item = &str> {
|
||||
self.0.split('.')
|
||||
}
|
||||
|
||||
/// The name of this module's immediate parent, if it has a parent.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").unwrap().parent(), Some(ModuleName::new_static("foo").unwrap()));
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().parent(), Some(ModuleName::new_static("foo.bar").unwrap()));
|
||||
/// assert_eq!(ModuleName::new_static("root").unwrap().parent(), None);
|
||||
/// ```
|
||||
#[must_use]
|
||||
pub fn parent(&self) -> Option<ModuleName> {
|
||||
let (parent, _) = self.0.rsplit_once('.')?;
|
||||
Some(Self(parent.to_compact_string()))
|
||||
}
|
||||
|
||||
/// Returns `true` if the name starts with `other`.
|
||||
///
|
||||
/// This is equivalent to checking if `self` is a sub-module of `other`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert!(ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
||||
///
|
||||
/// assert!(!ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("bar").unwrap()));
|
||||
/// assert!(!ModuleName::new_static("foo_bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
||||
/// ```
|
||||
#[must_use]
|
||||
pub fn starts_with(&self, other: &ModuleName) -> bool {
|
||||
let mut self_components = self.components();
|
||||
let other_components = other.components();
|
||||
|
||||
for other_component in other_components {
|
||||
if self_components.next() != Some(other_component) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
#[inline]
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
|
||||
/// Construct a [`ModuleName`] from a sequence of parts.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(&*ModuleName::from_components(["a"]).unwrap(), "a");
|
||||
/// assert_eq!(&*ModuleName::from_components(["a", "b"]).unwrap(), "a.b");
|
||||
/// assert_eq!(&*ModuleName::from_components(["a", "b", "c"]).unwrap(), "a.b.c");
|
||||
///
|
||||
/// assert_eq!(ModuleName::from_components(["a-b"]), None);
|
||||
/// assert_eq!(ModuleName::from_components(["a", "a-b"]), None);
|
||||
/// assert_eq!(ModuleName::from_components(["a", "b", "a-b-c"]), None);
|
||||
/// ```
|
||||
#[must_use]
|
||||
pub fn from_components<'a>(components: impl IntoIterator<Item = &'a str>) -> Option<Self> {
|
||||
let mut components = components.into_iter();
|
||||
let first_part = components.next()?;
|
||||
if !is_identifier(first_part) {
|
||||
return None;
|
||||
}
|
||||
let name = if let Some(second_part) = components.next() {
|
||||
if !is_identifier(second_part) {
|
||||
return None;
|
||||
}
|
||||
let mut name = format!("{first_part}.{second_part}");
|
||||
for part in components {
|
||||
if !is_identifier(part) {
|
||||
return None;
|
||||
}
|
||||
name.push('.');
|
||||
name.push_str(part);
|
||||
}
|
||||
CompactString::from(&name)
|
||||
} else {
|
||||
CompactString::from(first_part)
|
||||
};
|
||||
Some(Self(name))
|
||||
}
|
||||
|
||||
/// Extend `self` with the components of `other`
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// let mut module_name = ModuleName::new_static("foo").unwrap();
|
||||
/// module_name.extend(&ModuleName::new_static("bar").unwrap());
|
||||
/// assert_eq!(&module_name, "foo.bar");
|
||||
/// module_name.extend(&ModuleName::new_static("baz.eggs.ham").unwrap());
|
||||
/// assert_eq!(&module_name, "foo.bar.baz.eggs.ham");
|
||||
/// ```
|
||||
pub fn extend(&mut self, other: &ModuleName) {
|
||||
self.0.push('.');
|
||||
self.0.push_str(other);
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ModuleName {
|
||||
type Target = str;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<str> for ModuleName {
|
||||
fn eq(&self, other: &str) -> bool {
|
||||
self.as_str() == other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<ModuleName> for str {
|
||||
fn eq(&self, other: &ModuleName) -> bool {
|
||||
self == other.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ModuleName {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
47
crates/red_knot_python_semantic/src/module_resolver/mod.rs
Normal file
47
crates/red_knot_python_semantic/src/module_resolver/mod.rs
Normal file
@@ -0,0 +1,47 @@
|
||||
use std::iter::FusedIterator;
|
||||
|
||||
pub(crate) use module::Module;
|
||||
pub use resolver::resolve_module;
|
||||
pub(crate) use resolver::{file_to_module, SearchPaths};
|
||||
use ruff_db::system::SystemPath;
|
||||
pub use typeshed::vendored_typeshed_stubs;
|
||||
|
||||
use crate::module_resolver::resolver::search_paths;
|
||||
use crate::Db;
|
||||
use resolver::SearchPathIterator;
|
||||
|
||||
mod module;
|
||||
mod path;
|
||||
mod resolver;
|
||||
mod state;
|
||||
mod typeshed;
|
||||
|
||||
#[cfg(test)]
|
||||
mod testing;
|
||||
|
||||
/// Returns an iterator over all search paths pointing to a system path
|
||||
pub fn system_module_search_paths(db: &dyn Db) -> SystemModuleSearchPathsIter {
|
||||
SystemModuleSearchPathsIter {
|
||||
inner: search_paths(db),
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SystemModuleSearchPathsIter<'db> {
|
||||
inner: SearchPathIterator<'db>,
|
||||
}
|
||||
|
||||
impl<'db> Iterator for SystemModuleSearchPathsIter<'db> {
|
||||
type Item = &'db SystemPath;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
let next = self.inner.next()?;
|
||||
|
||||
if let Some(system_path) = next.as_system_path() {
|
||||
return Some(system_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedIterator for SystemModuleSearchPathsIter<'_> {}
|
||||
@@ -0,0 +1,85 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_db::files::File;
|
||||
|
||||
use super::path::SearchPath;
|
||||
use crate::module_name::ModuleName;
|
||||
|
||||
/// Representation of a Python module.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct Module {
|
||||
inner: Arc<ModuleInner>,
|
||||
}
|
||||
|
||||
impl Module {
|
||||
pub(crate) fn new(
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: SearchPath,
|
||||
file: File,
|
||||
) -> Self {
|
||||
Self {
|
||||
inner: Arc::new(ModuleInner {
|
||||
name,
|
||||
kind,
|
||||
search_path,
|
||||
file,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// The absolute name of the module (e.g. `foo.bar`)
|
||||
pub fn name(&self) -> &ModuleName {
|
||||
&self.inner.name
|
||||
}
|
||||
|
||||
/// The file to the source code that defines this module
|
||||
pub fn file(&self) -> File {
|
||||
self.inner.file
|
||||
}
|
||||
|
||||
/// The search path from which the module was resolved.
|
||||
pub(crate) fn search_path(&self) -> &SearchPath {
|
||||
&self.inner.search_path
|
||||
}
|
||||
|
||||
/// Determine whether this module is a single-file module or a package
|
||||
pub fn kind(&self) -> ModuleKind {
|
||||
self.inner.kind
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Module {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Module")
|
||||
.field("name", &self.name())
|
||||
.field("kind", &self.kind())
|
||||
.field("file", &self.file())
|
||||
.field("search_path", &self.search_path())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
struct ModuleInner {
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: SearchPath,
|
||||
file: File,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum ModuleKind {
|
||||
/// A single-file module (e.g. `foo.py` or `foo.pyi`)
|
||||
Module,
|
||||
|
||||
/// A python package (`foo/__init__.py` or `foo/__init__.pyi`)
|
||||
Package,
|
||||
}
|
||||
|
||||
impl ModuleKind {
|
||||
pub const fn is_package(self) -> bool {
|
||||
matches!(self, ModuleKind::Package)
|
||||
}
|
||||
}
|
||||
1098
crates/red_knot_python_semantic/src/module_resolver/path.rs
Normal file
1098
crates/red_knot_python_semantic/src/module_resolver/path.rs
Normal file
File diff suppressed because it is too large
Load Diff
1731
crates/red_knot_python_semantic/src/module_resolver/resolver.rs
Normal file
1731
crates/red_knot_python_semantic/src/module_resolver/resolver.rs
Normal file
File diff suppressed because it is too large
Load Diff
25
crates/red_knot_python_semantic/src/module_resolver/state.rs
Normal file
25
crates/red_knot_python_semantic/src/module_resolver/state.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
|
||||
use super::typeshed::LazyTypeshedVersions;
|
||||
use crate::db::Db;
|
||||
use crate::python_version::PythonVersion;
|
||||
|
||||
pub(crate) struct ResolverState<'db> {
|
||||
pub(crate) db: &'db dyn Db,
|
||||
pub(crate) typeshed_versions: LazyTypeshedVersions<'db>,
|
||||
pub(crate) target_version: PythonVersion,
|
||||
}
|
||||
|
||||
impl<'db> ResolverState<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db, target_version: PythonVersion) -> Self {
|
||||
Self {
|
||||
db,
|
||||
typeshed_versions: LazyTypeshedVersions::new(),
|
||||
target_version,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn vendored(&self) -> &VendoredFileSystem {
|
||||
self.db.vendored()
|
||||
}
|
||||
}
|
||||
302
crates/red_knot_python_semantic/src/module_resolver/testing.rs
Normal file
302
crates/red_knot_python_semantic/src/module_resolver/testing.rs
Normal file
@@ -0,0 +1,302 @@
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::VendoredPathBuf;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::ProgramSettings;
|
||||
|
||||
/// A test case for the module resolver.
|
||||
///
|
||||
/// You generally shouldn't construct instances of this struct directly;
|
||||
/// instead, use the [`TestCaseBuilder`].
|
||||
pub(crate) struct TestCase<T> {
|
||||
pub(crate) db: TestDb,
|
||||
pub(crate) src: SystemPathBuf,
|
||||
pub(crate) stdlib: T,
|
||||
// Most test cases only ever need a single `site-packages` directory,
|
||||
// so this is a single directory instead of a `Vec` of directories,
|
||||
// like it is in `ruff_db::Program`.
|
||||
pub(crate) site_packages: SystemPathBuf,
|
||||
pub(crate) target_version: PythonVersion,
|
||||
}
|
||||
|
||||
/// A `(file_name, file_contents)` tuple
|
||||
pub(crate) type FileSpec = (&'static str, &'static str);
|
||||
|
||||
/// Specification for a typeshed mock to be created as part of a test
|
||||
#[derive(Debug, Clone, Copy, Default)]
|
||||
pub(crate) struct MockedTypeshed {
|
||||
/// The stdlib files to be created in the typeshed mock
|
||||
pub(crate) stdlib_files: &'static [FileSpec],
|
||||
|
||||
/// The contents of the `stdlib/VERSIONS` file
|
||||
/// to be created in the typeshed mock
|
||||
pub(crate) versions: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct VendoredTypeshed;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct UnspecifiedTypeshed;
|
||||
|
||||
/// A builder for a module-resolver test case.
|
||||
///
|
||||
/// The builder takes care of creating a [`TestDb`]
|
||||
/// instance, applying the module resolver settings,
|
||||
/// and creating mock directories for the stdlib, `site-packages`,
|
||||
/// first-party code, etc.
|
||||
///
|
||||
/// For simple tests that do not involve typeshed,
|
||||
/// test cases can be created as follows:
|
||||
///
|
||||
/// ```rs
|
||||
/// let test_case = TestCaseBuilder::new()
|
||||
/// .with_src_files(...)
|
||||
/// .build();
|
||||
///
|
||||
/// let test_case2 = TestCaseBuilder::new()
|
||||
/// .with_site_packages_files(...)
|
||||
/// .build();
|
||||
/// ```
|
||||
///
|
||||
/// Any tests can specify the target Python version that should be used
|
||||
/// in the module resolver settings:
|
||||
///
|
||||
/// ```rs
|
||||
/// let test_case = TestCaseBuilder::new()
|
||||
/// .with_src_files(...)
|
||||
/// .with_target_version(...)
|
||||
/// .build();
|
||||
/// ```
|
||||
///
|
||||
/// For tests checking that standard-library module resolution is working
|
||||
/// correctly, you should usually create a [`MockedTypeshed`] instance
|
||||
/// and pass it to the [`TestCaseBuilder::with_custom_typeshed`] method.
|
||||
/// If you need to check something that involves the vendored typeshed stubs
|
||||
/// we include as part of the binary, you can instead use the
|
||||
/// [`TestCaseBuilder::with_vendored_typeshed`] method.
|
||||
/// For either of these, you should almost always try to be explicit
|
||||
/// about the Python version you want to be specified in the module-resolver
|
||||
/// settings for the test:
|
||||
///
|
||||
/// ```rs
|
||||
/// const TYPESHED = MockedTypeshed { ... };
|
||||
///
|
||||
/// let test_case = resolver_test_case()
|
||||
/// .with_custom_typeshed(TYPESHED)
|
||||
/// .with_target_version(...)
|
||||
/// .build();
|
||||
///
|
||||
/// let test_case2 = resolver_test_case()
|
||||
/// .with_vendored_typeshed()
|
||||
/// .with_target_version(...)
|
||||
/// .build();
|
||||
/// ```
|
||||
///
|
||||
/// If you have not called one of those options, the `stdlib` field
|
||||
/// on the [`TestCase`] instance created from `.build()` will be set
|
||||
/// to `()`.
|
||||
pub(crate) struct TestCaseBuilder<T> {
|
||||
typeshed_option: T,
|
||||
target_version: PythonVersion,
|
||||
first_party_files: Vec<FileSpec>,
|
||||
site_packages_files: Vec<FileSpec>,
|
||||
}
|
||||
|
||||
impl<T> TestCaseBuilder<T> {
|
||||
/// Specify files to be created in the `src` mock directory
|
||||
pub(crate) fn with_src_files(mut self, files: &[FileSpec]) -> Self {
|
||||
self.first_party_files.extend(files.iter().copied());
|
||||
self
|
||||
}
|
||||
|
||||
/// Specify files to be created in the `site-packages` mock directory
|
||||
pub(crate) fn with_site_packages_files(mut self, files: &[FileSpec]) -> Self {
|
||||
self.site_packages_files.extend(files.iter().copied());
|
||||
self
|
||||
}
|
||||
|
||||
/// Specify the target Python version the module resolver should assume
|
||||
pub(crate) fn with_target_version(mut self, target_version: PythonVersion) -> Self {
|
||||
self.target_version = target_version;
|
||||
self
|
||||
}
|
||||
|
||||
fn write_mock_directory(
|
||||
db: &mut TestDb,
|
||||
location: impl AsRef<SystemPath>,
|
||||
files: impl IntoIterator<Item = FileSpec>,
|
||||
) -> SystemPathBuf {
|
||||
let root = location.as_ref().to_path_buf();
|
||||
// Make sure to create the directory even if the list of files is empty:
|
||||
db.memory_file_system().create_directory_all(&root).unwrap();
|
||||
db.write_files(
|
||||
files
|
||||
.into_iter()
|
||||
.map(|(relative_path, contents)| (root.join(relative_path), contents)),
|
||||
)
|
||||
.unwrap();
|
||||
root
|
||||
}
|
||||
}
|
||||
|
||||
impl TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
pub(crate) fn new() -> TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
Self {
|
||||
typeshed_option: UnspecifiedTypeshed,
|
||||
target_version: PythonVersion::default(),
|
||||
first_party_files: vec![],
|
||||
site_packages_files: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
/// Use the vendored stdlib stubs included in the Ruff binary for this test case
|
||||
pub(crate) fn with_vendored_typeshed(self) -> TestCaseBuilder<VendoredTypeshed> {
|
||||
let TestCaseBuilder {
|
||||
typeshed_option: _,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
} = self;
|
||||
TestCaseBuilder {
|
||||
typeshed_option: VendoredTypeshed,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
}
|
||||
}
|
||||
|
||||
/// Use a mock typeshed directory for this test case
|
||||
pub(crate) fn with_custom_typeshed(
|
||||
self,
|
||||
typeshed: MockedTypeshed,
|
||||
) -> TestCaseBuilder<MockedTypeshed> {
|
||||
let TestCaseBuilder {
|
||||
typeshed_option: _,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
} = self;
|
||||
TestCaseBuilder {
|
||||
typeshed_option: typeshed,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn build(self) -> TestCase<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
src,
|
||||
stdlib: _,
|
||||
site_packages,
|
||||
target_version,
|
||||
} = self.with_custom_typeshed(MockedTypeshed::default()).build();
|
||||
TestCase {
|
||||
db,
|
||||
src,
|
||||
stdlib: (),
|
||||
site_packages,
|
||||
target_version,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TestCaseBuilder<MockedTypeshed> {
|
||||
pub(crate) fn build(self) -> TestCase<SystemPathBuf> {
|
||||
let TestCaseBuilder {
|
||||
typeshed_option,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
} = self;
|
||||
|
||||
let mut db = TestDb::new();
|
||||
|
||||
let site_packages =
|
||||
Self::write_mock_directory(&mut db, "/site-packages", site_packages_files);
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
let typeshed = Self::build_typeshed_mock(&mut db, &typeshed_option);
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
site_packages: vec![site_packages.clone()],
|
||||
},
|
||||
},
|
||||
)
|
||||
.expect("Valid program settings");
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
src,
|
||||
stdlib: typeshed.join("stdlib"),
|
||||
site_packages,
|
||||
target_version,
|
||||
}
|
||||
}
|
||||
|
||||
fn build_typeshed_mock(db: &mut TestDb, typeshed_to_build: &MockedTypeshed) -> SystemPathBuf {
|
||||
let typeshed = SystemPathBuf::from("/typeshed");
|
||||
let MockedTypeshed {
|
||||
stdlib_files,
|
||||
versions,
|
||||
} = typeshed_to_build;
|
||||
Self::write_mock_directory(
|
||||
db,
|
||||
typeshed.join("stdlib"),
|
||||
stdlib_files
|
||||
.iter()
|
||||
.copied()
|
||||
.chain(std::iter::once(("VERSIONS", *versions))),
|
||||
);
|
||||
typeshed
|
||||
}
|
||||
}
|
||||
|
||||
impl TestCaseBuilder<VendoredTypeshed> {
|
||||
pub(crate) fn build(self) -> TestCase<VendoredPathBuf> {
|
||||
let TestCaseBuilder {
|
||||
typeshed_option: VendoredTypeshed,
|
||||
target_version,
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
} = self;
|
||||
|
||||
let mut db = TestDb::new();
|
||||
|
||||
let site_packages =
|
||||
Self::write_mock_directory(&mut db, "/site-packages", site_packages_files);
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![site_packages.clone()],
|
||||
},
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
src,
|
||||
stdlib: VendoredPathBuf::from("stdlib"),
|
||||
site_packages,
|
||||
target_version,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
pub use self::vendored::vendored_typeshed_stubs;
|
||||
pub(super) use self::versions::{
|
||||
parse_typeshed_versions, LazyTypeshedVersions, TypeshedVersionsParseError,
|
||||
TypeshedVersionsQueryResult,
|
||||
};
|
||||
|
||||
mod vendored;
|
||||
mod versions;
|
||||
@@ -1,17 +1,25 @@
|
||||
pub(crate) mod versions;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
|
||||
// The file path here is hardcoded in this crate's `build.rs` script.
|
||||
// Luckily this crate will fail to build if this file isn't available at build time.
|
||||
static TYPESHED_ZIP_BYTES: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/zipped_typeshed.zip"));
|
||||
|
||||
pub fn vendored_typeshed_stubs() -> &'static VendoredFileSystem {
|
||||
static VENDORED_TYPESHED_STUBS: Lazy<VendoredFileSystem> =
|
||||
Lazy::new(|| VendoredFileSystem::new_static(TYPESHED_ZIP_BYTES).unwrap());
|
||||
&VENDORED_TYPESHED_STUBS
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::io::{self, Read};
|
||||
use std::path::Path;
|
||||
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::vfs::VendoredPath;
|
||||
use ruff_db::vendored::VendoredPath;
|
||||
|
||||
// The file path here is hardcoded in this crate's `build.rs` script.
|
||||
// Luckily this crate will fail to build if this file isn't available at build time.
|
||||
const TYPESHED_ZIP_BYTES: &[u8] =
|
||||
include_bytes!(concat!(env!("OUT_DIR"), "/zipped_typeshed.zip"));
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn typeshed_zip_created_at_build_time() {
|
||||
@@ -34,7 +42,7 @@ mod tests {
|
||||
#[test]
|
||||
fn typeshed_vfs_consistent_with_vendored_stubs() {
|
||||
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
|
||||
let vendored_typeshed_stubs = VendoredFileSystem::new(TYPESHED_ZIP_BYTES).unwrap();
|
||||
let vendored_typeshed_stubs = vendored_typeshed_stubs();
|
||||
|
||||
let mut empty_iterator = true;
|
||||
for entry in walkdir::WalkDir::new(&vendored_typeshed_dir).min_depth(1) {
|
||||
@@ -64,7 +72,7 @@ mod tests {
|
||||
|
||||
let vendored_path_kind = vendored_typeshed_stubs
|
||||
.metadata(vendored_path)
|
||||
.unwrap_or_else(|| {
|
||||
.unwrap_or_else(|_| {
|
||||
panic!(
|
||||
"Expected metadata for {vendored_path:?} to be retrievable from the `VendoredFileSystem!
|
||||
|
||||
@@ -1,16 +1,96 @@
|
||||
use std::cell::OnceCell;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt;
|
||||
use std::num::{NonZeroU16, NonZeroUsize};
|
||||
use std::ops::{RangeFrom, RangeInclusive};
|
||||
use std::str::FromStr;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use ruff_db::system::SystemPath;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::module::ModuleName;
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct TypeshedVersionsParseError {
|
||||
line_number: NonZeroU16,
|
||||
use super::vendored::vendored_typeshed_stubs;
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::python_version::PythonVersion;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct LazyTypeshedVersions<'db>(OnceCell<&'db TypeshedVersions>);
|
||||
|
||||
impl<'db> LazyTypeshedVersions<'db> {
|
||||
#[must_use]
|
||||
pub(crate) fn new() -> Self {
|
||||
Self(OnceCell::new())
|
||||
}
|
||||
|
||||
/// Query whether a module exists at runtime in the stdlib on a certain Python version.
|
||||
///
|
||||
/// Simply probing whether a file exists in typeshed is insufficient for this question,
|
||||
/// as a module in the stdlib may have been added in Python 3.10, but the typeshed stub
|
||||
/// will still be available (either in a custom typeshed dir or in our vendored copy)
|
||||
/// even if the user specified Python 3.8 as the target version.
|
||||
///
|
||||
/// For top-level modules and packages, the VERSIONS file can always provide an unambiguous answer
|
||||
/// as to whether the module exists on the specified target version. However, VERSIONS does not
|
||||
/// provide comprehensive information on all submodules, meaning that this method sometimes
|
||||
/// returns [`TypeshedVersionsQueryResult::MaybeExists`].
|
||||
/// See [`TypeshedVersionsQueryResult`] for more details.
|
||||
#[must_use]
|
||||
pub(crate) fn query_module(
|
||||
&self,
|
||||
db: &'db dyn Db,
|
||||
module: &ModuleName,
|
||||
stdlib_root: Option<&SystemPath>,
|
||||
target_version: PythonVersion,
|
||||
) -> TypeshedVersionsQueryResult {
|
||||
let versions = self.0.get_or_init(|| {
|
||||
let versions_path = if let Some(system_path) = stdlib_root {
|
||||
system_path.join("VERSIONS")
|
||||
} else {
|
||||
return &VENDORED_VERSIONS;
|
||||
};
|
||||
let Ok(versions_file) = system_path_to_file(db.upcast(), &versions_path) else {
|
||||
todo!(
|
||||
"Still need to figure out how to handle VERSIONS files being deleted \
|
||||
from custom typeshed directories! Expected a file to exist at {versions_path}"
|
||||
)
|
||||
};
|
||||
// TODO(Alex/Micha): If VERSIONS is invalid,
|
||||
// this should invalidate not just the specific module resolution we're currently attempting,
|
||||
// but all type inference that depends on any standard-library types.
|
||||
// Unwrapping here is not correct...
|
||||
parse_typeshed_versions(db, versions_file).as_ref().unwrap()
|
||||
});
|
||||
versions.query_module(module, target_version)
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn parse_typeshed_versions(
|
||||
db: &dyn Db,
|
||||
versions_file: File,
|
||||
) -> Result<TypeshedVersions, TypeshedVersionsParseError> {
|
||||
// TODO: Handle IO errors
|
||||
let file_content = versions_file
|
||||
.read_to_string(db.upcast())
|
||||
.unwrap_or_default();
|
||||
file_content.parse()
|
||||
}
|
||||
|
||||
static VENDORED_VERSIONS: Lazy<TypeshedVersions> = Lazy::new(|| {
|
||||
TypeshedVersions::from_str(
|
||||
&vendored_typeshed_stubs()
|
||||
.read_to_string("stdlib/VERSIONS")
|
||||
.unwrap(),
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub(crate) struct TypeshedVersionsParseError {
|
||||
line_number: Option<NonZeroU16>,
|
||||
reason: TypeshedVersionsParseErrorKind,
|
||||
}
|
||||
|
||||
@@ -20,10 +100,14 @@ impl fmt::Display for TypeshedVersionsParseError {
|
||||
line_number,
|
||||
reason,
|
||||
} = self;
|
||||
write!(
|
||||
f,
|
||||
"Error while parsing line {line_number} of typeshed's VERSIONS file: {reason}"
|
||||
)
|
||||
if let Some(line_number) = line_number {
|
||||
write!(
|
||||
f,
|
||||
"Error while parsing line {line_number} of typeshed's VERSIONS file: {reason}"
|
||||
)
|
||||
} else {
|
||||
write!(f, "Error while parsing typeshed's VERSIONS file: {reason}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,8 +121,8 @@ impl std::error::Error for TypeshedVersionsParseError {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum TypeshedVersionsParseErrorKind {
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub(super) enum TypeshedVersionsParseErrorKind {
|
||||
TooManyLines(NonZeroUsize),
|
||||
UnexpectedNumberOfColons,
|
||||
InvalidModuleName(String),
|
||||
@@ -81,38 +165,94 @@ impl fmt::Display for TypeshedVersionsParseErrorKind {
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct TypeshedVersions(FxHashMap<ModuleName, PyVersionRange>);
|
||||
pub(crate) struct TypeshedVersions(FxHashMap<ModuleName, PyVersionRange>);
|
||||
|
||||
impl TypeshedVersions {
|
||||
pub fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
#[must_use]
|
||||
fn exact(&self, module_name: &ModuleName) -> Option<&PyVersionRange> {
|
||||
self.0.get(module_name)
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
pub fn contains_module(&self, module_name: &ModuleName) -> bool {
|
||||
self.0.contains_key(module_name)
|
||||
}
|
||||
|
||||
pub fn module_exists_on_version(
|
||||
#[must_use]
|
||||
fn query_module(
|
||||
&self,
|
||||
module: ModuleName,
|
||||
version: impl Into<PyVersion>,
|
||||
) -> bool {
|
||||
let version = version.into();
|
||||
let mut module: Option<ModuleName> = Some(module);
|
||||
while let Some(module_to_try) = module {
|
||||
if let Some(range) = self.0.get(&module_to_try) {
|
||||
return range.contains(version);
|
||||
module: &ModuleName,
|
||||
target_version: PythonVersion,
|
||||
) -> TypeshedVersionsQueryResult {
|
||||
if let Some(range) = self.exact(module) {
|
||||
if range.contains(target_version) {
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
} else {
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
}
|
||||
module = module_to_try.parent();
|
||||
} else {
|
||||
let mut module = module.parent();
|
||||
while let Some(module_to_try) = module {
|
||||
if let Some(range) = self.exact(&module_to_try) {
|
||||
return {
|
||||
if range.contains(target_version) {
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
} else {
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
}
|
||||
};
|
||||
}
|
||||
module = module_to_try.parent();
|
||||
}
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Possible answers [`LazyTypeshedVersions::query_module()`] could give to the question:
|
||||
/// "Does this module exist in the stdlib at runtime on a certain target version?"
|
||||
#[derive(Debug, Copy, PartialEq, Eq, Clone, Hash)]
|
||||
pub(crate) enum TypeshedVersionsQueryResult {
|
||||
/// The module definitely exists in the stdlib at runtime on the user-specified target version.
|
||||
///
|
||||
/// For example:
|
||||
/// - The target version is Python 3.8
|
||||
/// - We're querying whether the `asyncio.tasks` module exists in the stdlib
|
||||
/// - The VERSIONS file contains the line `asyncio.tasks: 3.8-`
|
||||
Exists,
|
||||
|
||||
/// The module definitely does not exist in the stdlib on the user-specified target version.
|
||||
///
|
||||
/// For example:
|
||||
/// - We're querying whether the `foo` module exists in the stdlib
|
||||
/// - There is no top-level `foo` module in VERSIONS
|
||||
///
|
||||
/// OR:
|
||||
/// - The target version is Python 3.8
|
||||
/// - We're querying whether the module `importlib.abc` exists in the stdlib
|
||||
/// - The VERSIONS file contains the line `importlib.abc: 3.10-`,
|
||||
/// indicating that the module was added in 3.10
|
||||
///
|
||||
/// OR:
|
||||
/// - The target version is Python 3.8
|
||||
/// - We're querying whether the module `collections.abc` exists in the stdlib
|
||||
/// - The VERSIONS file does not contain any information about the `collections.abc` submodule,
|
||||
/// but *does* contain the line `collections: 3.10-`,
|
||||
/// indicating that the entire `collections` package was added in Python 3.10.
|
||||
DoesNotExist,
|
||||
|
||||
/// The module potentially exists in the stdlib and, if it does,
|
||||
/// it definitely exists on the user-specified target version.
|
||||
///
|
||||
/// This variant is only relevant for submodules,
|
||||
/// for which the typeshed VERSIONS file does not provide comprehensive information.
|
||||
/// (The VERSIONS file is guaranteed to provide information about all top-level stdlib modules and packages,
|
||||
/// but not necessarily about all submodules within each top-level package.)
|
||||
///
|
||||
/// For example:
|
||||
/// - The target version is Python 3.8
|
||||
/// - We're querying whether the `asyncio.staggered` module exists in the stdlib
|
||||
/// - The typeshed VERSIONS file contains the line `asyncio: 3.8`,
|
||||
/// indicating that the `asyncio` package was added in Python 3.8,
|
||||
/// but does not contain any explicit information about the `asyncio.staggered` submodule.
|
||||
MaybeExists,
|
||||
}
|
||||
|
||||
impl FromStr for TypeshedVersions {
|
||||
type Err = TypeshedVersionsParseError;
|
||||
|
||||
@@ -125,7 +265,7 @@ impl FromStr for TypeshedVersions {
|
||||
|
||||
let Ok(line_number) = NonZeroU16::try_from(line_number) else {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number: NonZeroU16::MAX,
|
||||
line_number: None,
|
||||
reason: TypeshedVersionsParseErrorKind::TooManyLines(line_number),
|
||||
});
|
||||
};
|
||||
@@ -141,14 +281,14 @@ impl FromStr for TypeshedVersions {
|
||||
let (Some(module_name), Some(rest), None) = (parts.next(), parts.next(), parts.next())
|
||||
else {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number,
|
||||
line_number: Some(line_number),
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfColons,
|
||||
});
|
||||
};
|
||||
|
||||
let Some(module_name) = ModuleName::new(module_name) else {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number,
|
||||
line_number: Some(line_number),
|
||||
reason: TypeshedVersionsParseErrorKind::InvalidModuleName(
|
||||
module_name.to_string(),
|
||||
),
|
||||
@@ -159,7 +299,7 @@ impl FromStr for TypeshedVersions {
|
||||
Ok(version) => map.insert(module_name, version),
|
||||
Err(reason) => {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number,
|
||||
line_number: Some(line_number),
|
||||
reason,
|
||||
})
|
||||
}
|
||||
@@ -180,14 +320,15 @@ impl fmt::Display for TypeshedVersions {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
enum PyVersionRange {
|
||||
AvailableFrom(RangeFrom<PyVersion>),
|
||||
AvailableWithin(RangeInclusive<PyVersion>),
|
||||
AvailableFrom(RangeFrom<PythonVersion>),
|
||||
AvailableWithin(RangeInclusive<PythonVersion>),
|
||||
}
|
||||
|
||||
impl PyVersionRange {
|
||||
fn contains(&self, version: PyVersion) -> bool {
|
||||
#[must_use]
|
||||
fn contains(&self, version: PythonVersion) -> bool {
|
||||
match self {
|
||||
Self::AvailableFrom(inner) => inner.contains(&version),
|
||||
Self::AvailableWithin(inner) => inner.contains(&version),
|
||||
@@ -201,9 +342,14 @@ impl FromStr for PyVersionRange {
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut parts = s.split('-').map(str::trim);
|
||||
match (parts.next(), parts.next(), parts.next()) {
|
||||
(Some(lower), Some(""), None) => Ok(Self::AvailableFrom((lower.parse()?)..)),
|
||||
(Some(lower), Some(""), None) => {
|
||||
let lower = PythonVersion::from_versions_file_string(lower)?;
|
||||
Ok(Self::AvailableFrom(lower..))
|
||||
}
|
||||
(Some(lower), Some(upper), None) => {
|
||||
Ok(Self::AvailableWithin((lower.parse()?)..=(upper.parse()?)))
|
||||
let lower = PythonVersion::from_versions_file_string(lower)?;
|
||||
let upper = PythonVersion::from_versions_file_string(upper)?;
|
||||
Ok(Self::AvailableWithin(lower..=upper))
|
||||
}
|
||||
_ => Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens),
|
||||
}
|
||||
@@ -221,87 +367,20 @@ impl fmt::Display for PyVersionRange {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub struct PyVersion {
|
||||
major: u8,
|
||||
minor: u8,
|
||||
}
|
||||
|
||||
impl FromStr for PyVersion {
|
||||
type Err = TypeshedVersionsParseErrorKind;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
impl PythonVersion {
|
||||
fn from_versions_file_string(s: &str) -> Result<Self, TypeshedVersionsParseErrorKind> {
|
||||
let mut parts = s.split('.').map(str::trim);
|
||||
let (Some(major), Some(minor), None) = (parts.next(), parts.next(), parts.next()) else {
|
||||
return Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
|
||||
s.to_string(),
|
||||
));
|
||||
};
|
||||
let major = match u8::from_str(major) {
|
||||
Ok(major) => major,
|
||||
Err(err) => {
|
||||
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err,
|
||||
})
|
||||
PythonVersion::try_from((major, minor)).map_err(|int_parse_error| {
|
||||
TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err: int_parse_error,
|
||||
}
|
||||
};
|
||||
let minor = match u8::from_str(minor) {
|
||||
Ok(minor) => minor,
|
||||
Err(err) => {
|
||||
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err,
|
||||
})
|
||||
}
|
||||
};
|
||||
Ok(Self { major, minor })
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PyVersion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let PyVersion { major, minor } = self;
|
||||
write!(f, "{major}.{minor}")
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: unify with the PythonVersion enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default)]
|
||||
pub enum SupportedPyVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl From<SupportedPyVersion> for PyVersion {
|
||||
fn from(value: SupportedPyVersion) -> Self {
|
||||
match value {
|
||||
SupportedPyVersion::Py37 => PyVersion { major: 3, minor: 7 },
|
||||
SupportedPyVersion::Py38 => PyVersion { major: 3, minor: 8 },
|
||||
SupportedPyVersion::Py39 => PyVersion { major: 3, minor: 9 },
|
||||
SupportedPyVersion::Py310 => PyVersion {
|
||||
major: 3,
|
||||
minor: 10,
|
||||
},
|
||||
SupportedPyVersion::Py311 => PyVersion {
|
||||
major: 3,
|
||||
minor: 11,
|
||||
},
|
||||
SupportedPyVersion::Py312 => PyVersion {
|
||||
major: 3,
|
||||
minor: 12,
|
||||
},
|
||||
SupportedPyVersion::Py313 => PyVersion {
|
||||
major: 3,
|
||||
minor: 13,
|
||||
},
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -310,14 +389,26 @@ mod tests {
|
||||
use std::num::{IntErrorKind, NonZeroU16};
|
||||
use std::path::Path;
|
||||
|
||||
use super::*;
|
||||
|
||||
use insta::assert_snapshot;
|
||||
|
||||
use super::*;
|
||||
|
||||
const TYPESHED_STDLIB_DIR: &str = "stdlib";
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
const ONE: NonZeroU16 = unsafe { NonZeroU16::new_unchecked(1) };
|
||||
const ONE: Option<NonZeroU16> = Some(unsafe { NonZeroU16::new_unchecked(1) });
|
||||
|
||||
impl TypeshedVersions {
|
||||
#[must_use]
|
||||
fn contains_exact(&self, module: &ModuleName) -> bool {
|
||||
self.exact(module).is_some()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_parse_vendored_versions_file() {
|
||||
@@ -334,23 +425,36 @@ mod tests {
|
||||
let asyncio_staggered = ModuleName::new_static("asyncio.staggered").unwrap();
|
||||
let audioop = ModuleName::new_static("audioop").unwrap();
|
||||
|
||||
assert!(versions.contains_module(&asyncio));
|
||||
assert!(versions.module_exists_on_version(asyncio, SupportedPyVersion::Py310));
|
||||
|
||||
assert!(versions.contains_module(&asyncio_staggered));
|
||||
assert!(
|
||||
versions.module_exists_on_version(asyncio_staggered.clone(), SupportedPyVersion::Py38)
|
||||
assert!(versions.contains_exact(&asyncio));
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert!(!versions.module_exists_on_version(asyncio_staggered, SupportedPyVersion::Py37));
|
||||
|
||||
assert!(versions.contains_module(&audioop));
|
||||
assert!(versions.module_exists_on_version(audioop.clone(), SupportedPyVersion::Py312));
|
||||
assert!(!versions.module_exists_on_version(audioop, SupportedPyVersion::Py313));
|
||||
assert!(versions.contains_exact(&asyncio_staggered));
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio_staggered, PythonVersion::PY38),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio_staggered, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
|
||||
assert!(versions.contains_exact(&audioop));
|
||||
assert_eq!(
|
||||
versions.query_module(&audioop, PythonVersion::PY312),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
versions.query_module(&audioop, PythonVersion::PY313),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn typeshed_versions_consistent_with_vendored_stubs() {
|
||||
const VERSIONS_DATA: &str = include_str!("../../vendor/typeshed/stdlib/VERSIONS");
|
||||
const VERSIONS_DATA: &str = include_str!("../../../vendor/typeshed/stdlib/VERSIONS");
|
||||
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
|
||||
let vendored_typeshed_versions = TypeshedVersions::from_str(VERSIONS_DATA).unwrap();
|
||||
|
||||
@@ -393,7 +497,7 @@ mod tests {
|
||||
let top_level_module = ModuleName::new(top_level_module)
|
||||
.unwrap_or_else(|| panic!("{top_level_module:?} was not a valid module name!"));
|
||||
|
||||
assert!(vendored_typeshed_versions.contains_module(&top_level_module));
|
||||
assert!(vendored_typeshed_versions.contains_exact(&top_level_module));
|
||||
}
|
||||
|
||||
assert!(
|
||||
@@ -426,30 +530,102 @@ foo: 3.8- # trailing comment
|
||||
foo: 3.8-
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
let foo = ModuleName::new_static("foo").unwrap();
|
||||
#[test]
|
||||
fn version_within_range_parsed_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("bar: 2.7-3.10").unwrap();
|
||||
let bar = ModuleName::new_static("bar").unwrap();
|
||||
|
||||
assert!(parsed_versions.contains_exact(&bar));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY311),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_from_range_parsed_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("foo: 3.8-").unwrap();
|
||||
let foo = ModuleName::new_static("foo").unwrap();
|
||||
|
||||
assert!(parsed_versions.contains_exact(&foo));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY38),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY311),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn explicit_submodule_parsed_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("bar.baz: 3.1-3.9").unwrap();
|
||||
let bar_baz = ModuleName::new_static("bar.baz").unwrap();
|
||||
|
||||
assert!(parsed_versions.contains_exact(&bar_baz));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY39),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn implicit_submodule_queried_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("bar: 2.7-3.10").unwrap();
|
||||
let bar_eggs = ModuleName::new_static("bar.eggs").unwrap();
|
||||
|
||||
assert!(!parsed_versions.contains_exact(&bar_eggs));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY311),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nonexistent_module_queried_correctly() {
|
||||
let parsed_versions = TypeshedVersions::from_str("eggs: 3.8-").unwrap();
|
||||
let spam = ModuleName::new_static("spam").unwrap();
|
||||
|
||||
assert!(parsed_versions.contains_module(&foo));
|
||||
assert!(!parsed_versions.module_exists_on_version(foo.clone(), SupportedPyVersion::Py37));
|
||||
assert!(parsed_versions.module_exists_on_version(foo.clone(), SupportedPyVersion::Py38));
|
||||
assert!(parsed_versions.module_exists_on_version(foo, SupportedPyVersion::Py311));
|
||||
|
||||
assert!(parsed_versions.contains_module(&bar));
|
||||
assert!(parsed_versions.module_exists_on_version(bar.clone(), SupportedPyVersion::Py37));
|
||||
assert!(parsed_versions.module_exists_on_version(bar.clone(), SupportedPyVersion::Py310));
|
||||
assert!(!parsed_versions.module_exists_on_version(bar, SupportedPyVersion::Py311));
|
||||
|
||||
assert!(parsed_versions.contains_module(&bar_baz));
|
||||
assert!(parsed_versions.module_exists_on_version(bar_baz.clone(), SupportedPyVersion::Py37));
|
||||
assert!(parsed_versions.module_exists_on_version(bar_baz.clone(), SupportedPyVersion::Py39));
|
||||
assert!(!parsed_versions.module_exists_on_version(bar_baz, SupportedPyVersion::Py310));
|
||||
|
||||
assert!(!parsed_versions.contains_module(&spam));
|
||||
assert!(!parsed_versions.module_exists_on_version(spam.clone(), SupportedPyVersion::Py37));
|
||||
assert!(!parsed_versions.module_exists_on_version(spam, SupportedPyVersion::Py313));
|
||||
assert!(!parsed_versions.contains_exact(&spam));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&spam, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&spam, PythonVersion::PY313),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -465,7 +641,7 @@ foo: 3.8- # trailing comment
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str(&massive_versions_file),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: NonZeroU16::MAX,
|
||||
line_number: None,
|
||||
reason: TypeshedVersionsParseErrorKind::TooManyLines(
|
||||
NonZeroUsize::new(too_many + 1 - offset).unwrap()
|
||||
)
|
||||
@@ -1,4 +1,4 @@
|
||||
use ruff_python_ast::{AnyNodeRef, NodeKind};
|
||||
use ruff_python_ast::{AnyNodeRef, AstNode, NodeKind};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
/// Compact key for a node for use in a hash map.
|
||||
@@ -11,7 +11,19 @@ pub(super) struct NodeKey {
|
||||
}
|
||||
|
||||
impl NodeKey {
|
||||
pub(super) fn from_node<'a, N>(node: N) -> Self
|
||||
#[inline]
|
||||
pub(super) fn from_node<'a, N>(node: &N) -> Self
|
||||
where
|
||||
N: AstNode,
|
||||
{
|
||||
NodeKey {
|
||||
kind: node.kind(),
|
||||
range: node.range(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(super) fn from_ref<'a, N>(node: N) -> Self
|
||||
where
|
||||
N: Into<AnyNodeRef<'a>>,
|
||||
{
|
||||
|
||||
78
crates/red_knot_python_semantic/src/program.rs
Normal file
78
crates/red_knot_python_semantic/src/program.rs
Normal file
@@ -0,0 +1,78 @@
|
||||
use crate::python_version::PythonVersion;
|
||||
use anyhow::Context;
|
||||
use salsa::Durability;
|
||||
use salsa::Setter;
|
||||
|
||||
use ruff_db::system::SystemPathBuf;
|
||||
|
||||
use crate::module_resolver::SearchPaths;
|
||||
use crate::Db;
|
||||
|
||||
#[salsa::input(singleton)]
|
||||
pub struct Program {
|
||||
pub target_version: PythonVersion,
|
||||
|
||||
#[default]
|
||||
#[return_ref]
|
||||
pub(crate) search_paths: SearchPaths,
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn from_settings(db: &dyn Db, settings: ProgramSettings) -> anyhow::Result<Self> {
|
||||
let ProgramSettings {
|
||||
target_version,
|
||||
search_paths,
|
||||
} = settings;
|
||||
|
||||
tracing::info!("Target version: Python {target_version}");
|
||||
|
||||
let search_paths = SearchPaths::from_settings(db, search_paths)
|
||||
.with_context(|| "Invalid search path settings")?;
|
||||
|
||||
Ok(Program::builder(settings.target_version)
|
||||
.durability(Durability::HIGH)
|
||||
.search_paths(search_paths)
|
||||
.new(db))
|
||||
}
|
||||
|
||||
pub fn update_search_paths(
|
||||
&self,
|
||||
db: &mut dyn Db,
|
||||
search_path_settings: SearchPathSettings,
|
||||
) -> anyhow::Result<()> {
|
||||
let search_paths = SearchPaths::from_settings(db, search_path_settings)?;
|
||||
|
||||
if self.search_paths(db) != &search_paths {
|
||||
tracing::debug!("Update search paths");
|
||||
self.set_search_paths(db).to(search_paths);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub struct ProgramSettings {
|
||||
pub target_version: PythonVersion,
|
||||
pub search_paths: SearchPathSettings,
|
||||
}
|
||||
|
||||
/// Configures the search paths for module resolution.
|
||||
#[derive(Eq, PartialEq, Debug, Clone, Default)]
|
||||
pub struct SearchPathSettings {
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
/// or pyright's stubPath configuration setting.
|
||||
pub extra_paths: Vec<SystemPathBuf>,
|
||||
|
||||
/// The root of the workspace, used for finding first-party modules.
|
||||
pub src_root: SystemPathBuf,
|
||||
|
||||
/// Optional path to a "custom typeshed" directory on disk for us to use for standard-library types.
|
||||
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
|
||||
/// bundled as a zip file in the binary
|
||||
pub custom_typeshed: Option<SystemPathBuf>,
|
||||
|
||||
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
|
||||
pub site_packages: Vec<SystemPathBuf>,
|
||||
}
|
||||
62
crates/red_knot_python_semantic/src/python_version.rs
Normal file
62
crates/red_knot_python_semantic/src/python_version.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
use std::fmt;
|
||||
|
||||
/// Representation of a Python version.
|
||||
///
|
||||
/// Unlike the `TargetVersion` enums in the CLI crates,
|
||||
/// this does not necessarily represent a Python version that we actually support.
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub struct PythonVersion {
|
||||
pub major: u8,
|
||||
pub minor: u8,
|
||||
}
|
||||
|
||||
impl PythonVersion {
|
||||
pub const PY37: PythonVersion = PythonVersion { major: 3, minor: 7 };
|
||||
pub const PY38: PythonVersion = PythonVersion { major: 3, minor: 8 };
|
||||
pub const PY39: PythonVersion = PythonVersion { major: 3, minor: 9 };
|
||||
pub const PY310: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 10,
|
||||
};
|
||||
pub const PY311: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 11,
|
||||
};
|
||||
pub const PY312: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 12,
|
||||
};
|
||||
pub const PY313: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 13,
|
||||
};
|
||||
|
||||
pub fn free_threaded_build_available(self) -> bool {
|
||||
self >= PythonVersion::PY313
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for PythonVersion {
|
||||
fn default() -> Self {
|
||||
Self::PY38
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<(&str, &str)> for PythonVersion {
|
||||
type Error = std::num::ParseIntError;
|
||||
|
||||
fn try_from(value: (&str, &str)) -> Result<Self, Self::Error> {
|
||||
let (major, minor) = value;
|
||||
Ok(Self {
|
||||
major: major.parse()?,
|
||||
minor: minor.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PythonVersion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let PythonVersion { major, minor } = self;
|
||||
write!(f, "{major}.{minor}")
|
||||
}
|
||||
}
|
||||
@@ -2,25 +2,31 @@ use std::iter::FusedIterator;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
use salsa::plumbing::AsId;
|
||||
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::vfs::VfsFile;
|
||||
use ruff_index::{IndexSlice, IndexVec};
|
||||
|
||||
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
|
||||
use crate::semantic_index::ast_ids::AstIds;
|
||||
use crate::semantic_index::builder::SemanticIndexBuilder;
|
||||
use crate::semantic_index::definition::{Definition, DefinitionNodeKey, DefinitionNodeRef};
|
||||
use crate::semantic_index::definition::{Definition, DefinitionNodeKey};
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{
|
||||
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, PublicSymbolId, Scope, ScopeId,
|
||||
ScopedSymbolId, SymbolTable,
|
||||
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId, SymbolTable,
|
||||
};
|
||||
use crate::semantic_index::use_def::UseDefMap;
|
||||
use crate::Db;
|
||||
|
||||
pub mod ast_ids;
|
||||
mod builder;
|
||||
pub mod definition;
|
||||
pub mod expression;
|
||||
pub mod symbol;
|
||||
mod use_def;
|
||||
|
||||
pub(crate) use self::use_def::{DefinitionWithConstraints, DefinitionWithConstraintsIterator};
|
||||
|
||||
type SymbolMap = hashbrown::HashMap<ScopedSymbolId, (), ()>;
|
||||
|
||||
@@ -28,8 +34,8 @@ type SymbolMap = hashbrown::HashMap<ScopedSymbolId, (), ()>;
|
||||
///
|
||||
/// Prefer using [`symbol_table`] when working with symbols from a single scope.
|
||||
#[salsa::tracked(return_ref, no_eq)]
|
||||
pub(crate) fn semantic_index(db: &dyn Db, file: VfsFile) -> SemanticIndex<'_> {
|
||||
let _span = tracing::trace_span!("semantic_index", ?file).entered();
|
||||
pub(crate) fn semantic_index(db: &dyn Db, file: File) -> SemanticIndex<'_> {
|
||||
let _span = tracing::trace_span!("semantic_index", file = %file.path(db)).entered();
|
||||
|
||||
let parsed = parsed_module(db.upcast(), file);
|
||||
|
||||
@@ -42,57 +48,65 @@ pub(crate) fn semantic_index(db: &dyn Db, file: VfsFile) -> SemanticIndex<'_> {
|
||||
/// Salsa can avoid invalidating dependent queries if this scope's symbol table
|
||||
/// is unchanged.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn symbol_table<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<SymbolTable<'db>> {
|
||||
let _span = tracing::trace_span!("symbol_table", ?scope).entered();
|
||||
let index = semantic_index(db, scope.file(db));
|
||||
pub(crate) fn symbol_table<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<SymbolTable> {
|
||||
let file = scope.file(db);
|
||||
let _span =
|
||||
tracing::trace_span!("symbol_table", scope=?scope.as_id(), file=%file.path(db)).entered();
|
||||
let index = semantic_index(db, file);
|
||||
|
||||
index.symbol_table(scope.file_scope_id(db))
|
||||
}
|
||||
|
||||
/// Returns the root scope of `file`.
|
||||
/// Returns the use-def map for a specific `scope`.
|
||||
///
|
||||
/// Using [`use_def_map`] over [`semantic_index`] has the advantage that
|
||||
/// Salsa can avoid invalidating dependent queries if this scope's use-def map
|
||||
/// is unchanged.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn root_scope(db: &dyn Db, file: VfsFile) -> ScopeId<'_> {
|
||||
let _span = tracing::trace_span!("root_scope", ?file).entered();
|
||||
pub(crate) fn use_def_map<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<UseDefMap<'db>> {
|
||||
let file = scope.file(db);
|
||||
let _span =
|
||||
tracing::trace_span!("use_def_map", scope=?scope.as_id(), file=%file.path(db)).entered();
|
||||
let index = semantic_index(db, file);
|
||||
|
||||
FileScopeId::root().to_scope_id(db, file)
|
||||
index.use_def_map(scope.file_scope_id(db))
|
||||
}
|
||||
|
||||
/// Returns the symbol with the given name in `file`'s public scope or `None` if
|
||||
/// no symbol with the given name exists.
|
||||
pub(crate) fn public_symbol<'db>(
|
||||
db: &'db dyn Db,
|
||||
file: VfsFile,
|
||||
name: &str,
|
||||
) -> Option<PublicSymbolId<'db>> {
|
||||
let root_scope = root_scope(db, file);
|
||||
let symbol_table = symbol_table(db, root_scope);
|
||||
let local = symbol_table.symbol_id_by_name(name)?;
|
||||
Some(local.to_public_symbol(db, file))
|
||||
/// Returns the module global scope of `file`.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn global_scope(db: &dyn Db, file: File) -> ScopeId<'_> {
|
||||
let _span = tracing::trace_span!("global_scope", file = %file.path(db)).entered();
|
||||
|
||||
FileScopeId::global().to_scope_id(db, file)
|
||||
}
|
||||
|
||||
/// The symbol tables for an entire file.
|
||||
/// The symbol tables and use-def maps for all scopes in a file.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct SemanticIndex<'db> {
|
||||
/// List of all symbol tables in this file, indexed by scope.
|
||||
symbol_tables: IndexVec<FileScopeId, Arc<SymbolTable<'db>>>,
|
||||
symbol_tables: IndexVec<FileScopeId, Arc<SymbolTable>>,
|
||||
|
||||
/// List of all scopes in this file.
|
||||
scopes: IndexVec<FileScopeId, Scope>,
|
||||
|
||||
/// Maps expressions to their corresponding scope.
|
||||
/// We can't use [`ExpressionId`] here, because the challenge is how to get from
|
||||
/// an [`ast::Expr`] to an [`ExpressionId`] (which requires knowing the scope).
|
||||
/// Map expressions to their corresponding scope.
|
||||
scopes_by_expression: FxHashMap<ExpressionNodeKey, FileScopeId>,
|
||||
|
||||
/// Maps from a node creating a definition node to its definition.
|
||||
/// Map from a node creating a definition to its definition.
|
||||
definitions_by_node: FxHashMap<DefinitionNodeKey, Definition<'db>>,
|
||||
|
||||
/// Map from a standalone expression to its [`Expression`] ingredient.
|
||||
expressions_by_node: FxHashMap<ExpressionNodeKey, Expression<'db>>,
|
||||
|
||||
/// Map from nodes that create a scope to the scope they create.
|
||||
scopes_by_node: FxHashMap<NodeWithScopeKey, FileScopeId>,
|
||||
|
||||
/// Map from the file-local [`FileScopeId`] to the salsa-ingredient [`ScopeId`].
|
||||
scope_ids_by_scope: IndexVec<FileScopeId, ScopeId<'db>>,
|
||||
|
||||
/// Use-def map for each scope in this file.
|
||||
use_def_maps: IndexVec<FileScopeId, Arc<UseDefMap<'db>>>,
|
||||
|
||||
/// Lookup table to map between node ids and ast nodes.
|
||||
///
|
||||
/// Note: We should not depend on this map when analysing other files or
|
||||
@@ -103,12 +117,20 @@ pub(crate) struct SemanticIndex<'db> {
|
||||
impl<'db> SemanticIndex<'db> {
|
||||
/// Returns the symbol table for a specific scope.
|
||||
///
|
||||
/// Use the Salsa cached [`symbol_table`] query if you only need the
|
||||
/// Use the Salsa cached [`symbol_table()`] query if you only need the
|
||||
/// symbol table for a single scope.
|
||||
pub(super) fn symbol_table(&self, scope_id: FileScopeId) -> Arc<SymbolTable<'db>> {
|
||||
pub(super) fn symbol_table(&self, scope_id: FileScopeId) -> Arc<SymbolTable> {
|
||||
self.symbol_tables[scope_id].clone()
|
||||
}
|
||||
|
||||
/// Returns the use-def map for a specific scope.
|
||||
///
|
||||
/// Use the Salsa cached [`use_def_map()`] query if you only need the
|
||||
/// use-def map for a single scope.
|
||||
pub(super) fn use_def_map(&self, scope_id: FileScopeId) -> Arc<UseDefMap> {
|
||||
self.use_def_maps[scope_id].clone()
|
||||
}
|
||||
|
||||
pub(crate) fn ast_ids(&self, scope_id: FileScopeId) -> &AstIds {
|
||||
&self.ast_ids[scope_id]
|
||||
}
|
||||
@@ -132,6 +154,10 @@ impl<'db> SemanticIndex<'db> {
|
||||
&self.scopes[id]
|
||||
}
|
||||
|
||||
pub(crate) fn scope_ids(&self) -> impl Iterator<Item = ScopeId> {
|
||||
self.scope_ids_by_scope.iter().copied()
|
||||
}
|
||||
|
||||
/// Returns the id of the parent scope.
|
||||
pub(crate) fn parent_scope_id(&self, scope_id: FileScopeId) -> Option<FileScopeId> {
|
||||
let scope = self.scope(scope_id);
|
||||
@@ -157,16 +183,28 @@ impl<'db> SemanticIndex<'db> {
|
||||
}
|
||||
|
||||
/// Returns an iterator over all ancestors of `scope`, starting with `scope` itself.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn ancestor_scopes(&self, scope: FileScopeId) -> AncestorsIter {
|
||||
AncestorsIter::new(self, scope)
|
||||
}
|
||||
|
||||
/// Returns the [`Definition`] salsa ingredient for `definition_node`.
|
||||
pub(crate) fn definition<'def>(
|
||||
/// Returns the [`Definition`] salsa ingredient for `definition_key`.
|
||||
pub(crate) fn definition(
|
||||
&self,
|
||||
definition_node: impl Into<DefinitionNodeRef<'def>>,
|
||||
definition_key: impl Into<DefinitionNodeKey>,
|
||||
) -> Definition<'db> {
|
||||
self.definitions_by_node[&definition_node.into().key()]
|
||||
self.definitions_by_node[&definition_key.into()]
|
||||
}
|
||||
|
||||
/// Returns the [`Expression`] ingredient for an expression node.
|
||||
/// Panics if we have no expression ingredient for that node. We can only call this method for
|
||||
/// standalone-inferable expressions, which we call `add_standalone_expression` for in
|
||||
/// [`SemanticIndexBuilder`].
|
||||
pub(crate) fn expression(
|
||||
&self,
|
||||
expression_key: impl Into<ExpressionNodeKey>,
|
||||
) -> Expression<'db> {
|
||||
self.expressions_by_node[&expression_key.into()]
|
||||
}
|
||||
|
||||
/// Returns the id of the scope that `node` creates. This is different from [`Definition::scope`] which
|
||||
@@ -176,8 +214,6 @@ impl<'db> SemanticIndex<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
/// ID that uniquely identifies an expression inside a [`Scope`].
|
||||
|
||||
pub struct AncestorsIter<'a> {
|
||||
scopes: &'a IndexSlice<FileScopeId, Scope>,
|
||||
next_id: Option<FileScopeId>,
|
||||
@@ -272,24 +308,44 @@ impl FusedIterator for ChildrenIter<'_> {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::vfs::{system_path_to_file, VfsFile};
|
||||
use ruff_db::system::DbWithTestSystem;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::semantic_index::symbol::{FileScopeId, Scope, ScopeKind, SymbolTable};
|
||||
use crate::semantic_index::{root_scope, semantic_index, symbol_table};
|
||||
use crate::semantic_index::ast_ids::{HasScopedUseId, ScopedUseId};
|
||||
use crate::semantic_index::definition::{Definition, DefinitionKind};
|
||||
use crate::semantic_index::symbol::{
|
||||
FileScopeId, Scope, ScopeKind, ScopedSymbolId, SymbolTable,
|
||||
};
|
||||
use crate::semantic_index::use_def::UseDefMap;
|
||||
use crate::semantic_index::{global_scope, semantic_index, symbol_table, use_def_map};
|
||||
use crate::Db;
|
||||
|
||||
impl UseDefMap<'_> {
|
||||
fn first_public_definition(&self, symbol: ScopedSymbolId) -> Option<Definition<'_>> {
|
||||
self.public_definitions(symbol)
|
||||
.next()
|
||||
.map(|constrained_definition| constrained_definition.definition)
|
||||
}
|
||||
|
||||
fn first_use_definition(&self, use_id: ScopedUseId) -> Option<Definition<'_>> {
|
||||
self.use_definitions(use_id)
|
||||
.next()
|
||||
.map(|constrained_definition| constrained_definition.definition)
|
||||
}
|
||||
}
|
||||
|
||||
struct TestCase {
|
||||
db: TestDb,
|
||||
file: VfsFile,
|
||||
file: File,
|
||||
}
|
||||
|
||||
fn test_case(content: impl ToString) -> TestCase {
|
||||
let db = TestDb::new();
|
||||
db.memory_file_system()
|
||||
.write_file("test.py", content)
|
||||
.unwrap();
|
||||
let mut db = TestDb::new();
|
||||
db.write_file("test.py", content).unwrap();
|
||||
|
||||
let file = system_path_to_file(&db, "test.py").unwrap();
|
||||
|
||||
@@ -306,95 +362,128 @@ mod tests {
|
||||
#[test]
|
||||
fn empty() {
|
||||
let TestCase { db, file } = test_case("");
|
||||
let root_table = symbol_table(&db, root_scope(&db, file));
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
let root_names = names(&root_table);
|
||||
let global_names = names(&global_table);
|
||||
|
||||
assert_eq!(root_names, Vec::<&str>::new());
|
||||
assert_eq!(global_names, Vec::<&str>::new());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple() {
|
||||
let TestCase { db, file } = test_case("x");
|
||||
let root_table = symbol_table(&db, root_scope(&db, file));
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&root_table), vec!["x"]);
|
||||
assert_eq!(names(&global_table), vec!["x"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn annotation_only() {
|
||||
let TestCase { db, file } = test_case("x: int");
|
||||
let root_table = symbol_table(&db, root_scope(&db, file));
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&root_table), vec!["int", "x"]);
|
||||
assert_eq!(names(&global_table), vec!["int", "x"]);
|
||||
// TODO record definition
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import() {
|
||||
let TestCase { db, file } = test_case("import foo");
|
||||
let root_table = symbol_table(&db, root_scope(&db, file));
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(names(&root_table), vec!["foo"]);
|
||||
let foo = root_table.symbol_by_name("foo").unwrap();
|
||||
assert_eq!(names(&global_table), vec!["foo"]);
|
||||
let foo = global_table.symbol_id_by_name("foo").unwrap();
|
||||
|
||||
assert_eq!(foo.definitions().len(), 1);
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let definition = use_def.first_public_definition(foo).unwrap();
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Import(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_sub() {
|
||||
let TestCase { db, file } = test_case("import foo.bar");
|
||||
let root_table = symbol_table(&db, root_scope(&db, file));
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&root_table), vec!["foo"]);
|
||||
assert_eq!(names(&global_table), vec!["foo"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_as() {
|
||||
let TestCase { db, file } = test_case("import foo.bar as baz");
|
||||
let root_table = symbol_table(&db, root_scope(&db, file));
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&root_table), vec!["baz"]);
|
||||
assert_eq!(names(&global_table), vec!["baz"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_from() {
|
||||
let TestCase { db, file } = test_case("from bar import foo");
|
||||
let root_table = symbol_table(&db, root_scope(&db, file));
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(names(&root_table), vec!["foo"]);
|
||||
assert_eq!(
|
||||
root_table
|
||||
.symbol_by_name("foo")
|
||||
.unwrap()
|
||||
.definitions()
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
assert_eq!(names(&global_table), vec!["foo"]);
|
||||
assert!(
|
||||
root_table
|
||||
global_table
|
||||
.symbol_by_name("foo")
|
||||
.is_some_and(|symbol| { symbol.is_defined() || !symbol.is_used() }),
|
||||
.is_some_and(|symbol| { symbol.is_defined() && !symbol.is_used() }),
|
||||
"symbols that are defined get the defined flag"
|
||||
);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
global_table
|
||||
.symbol_id_by_name("foo")
|
||||
.expect("symbol to exist"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::ImportFrom(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assign() {
|
||||
let TestCase { db, file } = test_case("x = foo");
|
||||
let root_table = symbol_table(&db, root_scope(&db, file));
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(names(&root_table), vec!["foo", "x"]);
|
||||
assert_eq!(
|
||||
root_table.symbol_by_name("x").unwrap().definitions().len(),
|
||||
1
|
||||
);
|
||||
assert_eq!(names(&global_table), vec!["foo", "x"]);
|
||||
assert!(
|
||||
root_table
|
||||
global_table
|
||||
.symbol_by_name("foo")
|
||||
.is_some_and(|symbol| { !symbol.is_defined() && symbol.is_used() }),
|
||||
"a symbol used but not defined in a scope should have only the used flag"
|
||||
);
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let definition = use_def
|
||||
.first_public_definition(global_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn augmented_assignment() {
|
||||
let TestCase { db, file } = test_case("x += 1");
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(names(&global_table), vec!["x"]);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let definition = use_def
|
||||
.first_public_definition(global_table.symbol_id_by_name("x").unwrap())
|
||||
.unwrap();
|
||||
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::AugmentedAssignment(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -406,26 +495,32 @@ class C:
|
||||
y = 2
|
||||
",
|
||||
);
|
||||
let root_table = symbol_table(&db, root_scope(&db, file));
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&root_table), vec!["C", "y"]);
|
||||
assert_eq!(names(&global_table), vec!["C", "y"]);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
|
||||
let scopes: Vec<_> = index.child_scopes(FileScopeId::root()).collect();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
|
||||
let (class_scope_id, class_scope) = scopes[0];
|
||||
let [(class_scope_id, class_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
assert_eq!(class_scope.kind(), ScopeKind::Class);
|
||||
|
||||
assert_eq!(class_scope_id.to_scope_id(&db, file).name(&db), "C");
|
||||
|
||||
let class_table = index.symbol_table(class_scope_id);
|
||||
assert_eq!(names(&class_table), vec!["x"]);
|
||||
assert_eq!(
|
||||
class_table.symbol_by_name("x").unwrap().definitions().len(),
|
||||
1
|
||||
);
|
||||
|
||||
let use_def = index.use_def_map(class_scope_id);
|
||||
let definition = use_def
|
||||
.first_public_definition(class_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -438,27 +533,261 @@ y = 2
|
||||
",
|
||||
);
|
||||
let index = semantic_index(&db, file);
|
||||
let root_table = index.symbol_table(FileScopeId::root());
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&root_table), vec!["func", "y"]);
|
||||
assert_eq!(names(&global_table), vec!["func", "y"]);
|
||||
|
||||
let scopes = index.child_scopes(FileScopeId::root()).collect::<Vec<_>>();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
|
||||
let (function_scope_id, function_scope) = scopes[0];
|
||||
let [(function_scope_id, function_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
assert_eq!(function_scope.kind(), ScopeKind::Function);
|
||||
assert_eq!(function_scope_id.to_scope_id(&db, file).name(&db), "func");
|
||||
|
||||
let function_table = index.symbol_table(function_scope_id);
|
||||
assert_eq!(names(&function_table), vec!["x"]);
|
||||
assert_eq!(
|
||||
function_table
|
||||
.symbol_by_name("x")
|
||||
.unwrap()
|
||||
.definitions()
|
||||
.len(),
|
||||
1
|
||||
|
||||
let use_def = index.use_def_map(function_scope_id);
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
function_table
|
||||
.symbol_id_by_name("x")
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_parameter_symbols() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
pass
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&global_table), vec!["f", "str", "int"]);
|
||||
|
||||
let [(function_scope_id, _function_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("Expected a function scope")
|
||||
};
|
||||
|
||||
let function_table = index.symbol_table(function_scope_id);
|
||||
assert_eq!(
|
||||
names(&function_table),
|
||||
vec!["a", "b", "c", "args", "d", "kwargs"],
|
||||
);
|
||||
|
||||
let use_def = index.use_def_map(function_scope_id);
|
||||
for name in ["a", "b", "c", "d"] {
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
function_table
|
||||
.symbol_id_by_name(name)
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::ParameterWithDefault(_)
|
||||
));
|
||||
}
|
||||
for name in ["args", "kwargs"] {
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
function_table
|
||||
.symbol_id_by_name(name)
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Parameter(_)));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lambda_parameter_symbols() {
|
||||
let TestCase { db, file } = test_case("lambda a, b, c=1, *args, d=2, **kwargs: None");
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert!(names(&global_table).is_empty());
|
||||
|
||||
let [(lambda_scope_id, _lambda_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("Expected a lambda scope")
|
||||
};
|
||||
|
||||
let lambda_table = index.symbol_table(lambda_scope_id);
|
||||
assert_eq!(
|
||||
names(&lambda_table),
|
||||
vec!["a", "b", "c", "args", "d", "kwargs"],
|
||||
);
|
||||
|
||||
let use_def = index.use_def_map(lambda_scope_id);
|
||||
for name in ["a", "b", "c", "d"] {
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
lambda_table.symbol_id_by_name(name).expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::ParameterWithDefault(_)
|
||||
));
|
||||
}
|
||||
for name in ["args", "kwargs"] {
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
lambda_table.symbol_id_by_name(name).expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Parameter(_)));
|
||||
}
|
||||
}
|
||||
|
||||
/// Test case to validate that the comprehension scope is correctly identified and that the target
|
||||
/// variable is defined only in the comprehension scope and not in the global scope.
|
||||
#[test]
|
||||
fn comprehension_scope() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[x for x in iter1]
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&global_table), vec!["iter1"]);
|
||||
|
||||
let [(comprehension_scope_id, comprehension_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
|
||||
assert_eq!(comprehension_scope.kind(), ScopeKind::Comprehension);
|
||||
assert_eq!(
|
||||
comprehension_scope_id.to_scope_id(&db, file).name(&db),
|
||||
"<listcomp>"
|
||||
);
|
||||
|
||||
let comprehension_symbol_table = index.symbol_table(comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&comprehension_symbol_table), vec!["x"]);
|
||||
}
|
||||
|
||||
/// Test case to validate that the `x` variable used in the comprehension is referencing the
|
||||
/// `x` variable defined by the inner generator (`for x in iter2`) and not the outer one.
|
||||
#[test]
|
||||
fn multiple_generators() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[x for x in iter1 for x in iter2]
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let [(comprehension_scope_id, _)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
|
||||
let use_def = index.use_def_map(comprehension_scope_id);
|
||||
|
||||
let module = parsed_module(&db, file).syntax();
|
||||
let element = module.body[0]
|
||||
.as_expr_stmt()
|
||||
.unwrap()
|
||||
.value
|
||||
.as_list_comp_expr()
|
||||
.unwrap()
|
||||
.elt
|
||||
.as_name_expr()
|
||||
.unwrap();
|
||||
let element_use_id =
|
||||
element.scoped_use_id(&db, comprehension_scope_id.to_scope_id(&db, file));
|
||||
|
||||
let definition = use_def.first_use_definition(element_use_id).unwrap();
|
||||
let DefinitionKind::Comprehension(comprehension) = definition.node(&db) else {
|
||||
panic!("expected generator definition")
|
||||
};
|
||||
let ast::Comprehension { target, .. } = comprehension.node();
|
||||
let name = target.as_name_expr().unwrap().id().as_str();
|
||||
|
||||
assert_eq!(name, "x");
|
||||
assert_eq!(target.range(), TextRange::new(23.into(), 24.into()));
|
||||
}
|
||||
|
||||
/// Test case to validate that the nested comprehension creates a new scope which is a child of
|
||||
/// the outer comprehension scope and the variables are correctly defined in the respective
|
||||
/// scopes.
|
||||
#[test]
|
||||
fn nested_generators() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[{x for x in iter2} for y in iter1]
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&global_table), vec!["iter1"]);
|
||||
|
||||
let [(comprehension_scope_id, comprehension_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
|
||||
assert_eq!(comprehension_scope.kind(), ScopeKind::Comprehension);
|
||||
assert_eq!(
|
||||
comprehension_scope_id.to_scope_id(&db, file).name(&db),
|
||||
"<listcomp>"
|
||||
);
|
||||
|
||||
let comprehension_symbol_table = index.symbol_table(comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&comprehension_symbol_table), vec!["y", "iter2"]);
|
||||
|
||||
let [(inner_comprehension_scope_id, inner_comprehension_scope)] = index
|
||||
.child_scopes(comprehension_scope_id)
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one inner generator scope")
|
||||
};
|
||||
|
||||
assert_eq!(inner_comprehension_scope.kind(), ScopeKind::Comprehension);
|
||||
assert_eq!(
|
||||
inner_comprehension_scope_id
|
||||
.to_scope_id(&db, file)
|
||||
.name(&db),
|
||||
"<setcomp>"
|
||||
);
|
||||
|
||||
let inner_comprehension_symbol_table = index.symbol_table(inner_comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&inner_comprehension_symbol_table), vec!["x"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -472,14 +801,15 @@ def func():
|
||||
",
|
||||
);
|
||||
let index = semantic_index(&db, file);
|
||||
let root_table = index.symbol_table(FileScopeId::root());
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&root_table), vec!["func"]);
|
||||
let scopes: Vec<_> = index.child_scopes(FileScopeId::root()).collect();
|
||||
assert_eq!(scopes.len(), 2);
|
||||
|
||||
let (func_scope1_id, func_scope_1) = scopes[0];
|
||||
let (func_scope2_id, func_scope_2) = scopes[1];
|
||||
assert_eq!(names(&global_table), vec!["func"]);
|
||||
let [(func_scope1_id, func_scope_1), (func_scope2_id, func_scope_2)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected two child scopes");
|
||||
};
|
||||
|
||||
assert_eq!(func_scope_1.kind(), ScopeKind::Function);
|
||||
|
||||
@@ -491,14 +821,16 @@ def func():
|
||||
let func2_table = index.symbol_table(func_scope2_id);
|
||||
assert_eq!(names(&func1_table), vec!["x"]);
|
||||
assert_eq!(names(&func2_table), vec!["y"]);
|
||||
assert_eq!(
|
||||
root_table
|
||||
.symbol_by_name("func")
|
||||
.unwrap()
|
||||
.definitions()
|
||||
.len(),
|
||||
2
|
||||
);
|
||||
|
||||
let use_def = index.use_def_map(FileScopeId::global());
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
global_table
|
||||
.symbol_id_by_name("func")
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Function(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -511,22 +843,27 @@ def func[T]():
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let root_table = index.symbol_table(FileScopeId::root());
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&root_table), vec!["func"]);
|
||||
assert_eq!(names(&global_table), vec!["func"]);
|
||||
|
||||
let scopes: Vec<_> = index.child_scopes(FileScopeId::root()).collect();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let (ann_scope_id, ann_scope) = scopes[0];
|
||||
let [(ann_scope_id, ann_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope");
|
||||
};
|
||||
|
||||
assert_eq!(ann_scope.kind(), ScopeKind::Annotation);
|
||||
assert_eq!(ann_scope_id.to_scope_id(&db, file).name(&db), "func");
|
||||
let ann_table = index.symbol_table(ann_scope_id);
|
||||
assert_eq!(names(&ann_table), vec!["T"]);
|
||||
|
||||
let scopes: Vec<_> = index.child_scopes(ann_scope_id).collect();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let (func_scope_id, func_scope) = scopes[0];
|
||||
let [(func_scope_id, func_scope)] =
|
||||
index.child_scopes(ann_scope_id).collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope");
|
||||
};
|
||||
assert_eq!(func_scope.kind(), ScopeKind::Function);
|
||||
assert_eq!(func_scope_id.to_scope_id(&db, file).name(&db), "func");
|
||||
let func_table = index.symbol_table(func_scope_id);
|
||||
@@ -543,14 +880,17 @@ class C[T]:
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let root_table = index.symbol_table(FileScopeId::root());
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&root_table), vec!["C"]);
|
||||
assert_eq!(names(&global_table), vec!["C"]);
|
||||
|
||||
let scopes: Vec<_> = index.child_scopes(FileScopeId::root()).collect();
|
||||
let [(ann_scope_id, ann_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope");
|
||||
};
|
||||
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let (ann_scope_id, ann_scope) = scopes[0];
|
||||
assert_eq!(ann_scope.kind(), ScopeKind::Annotation);
|
||||
assert_eq!(ann_scope_id.to_scope_id(&db, file).name(&db), "C");
|
||||
let ann_table = index.symbol_table(ann_scope_id);
|
||||
@@ -562,48 +902,47 @@ class C[T]:
|
||||
"type parameters are defined by the scope that introduces them"
|
||||
);
|
||||
|
||||
let scopes: Vec<_> = index.child_scopes(ann_scope_id).collect();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let (class_scope_id, class_scope) = scopes[0];
|
||||
let [(class_scope_id, class_scope)] =
|
||||
index.child_scopes(ann_scope_id).collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope");
|
||||
};
|
||||
|
||||
assert_eq!(class_scope.kind(), ScopeKind::Class);
|
||||
assert_eq!(class_scope_id.to_scope_id(&db, file).name(&db), "C");
|
||||
assert_eq!(names(&index.symbol_table(class_scope_id)), vec!["x"]);
|
||||
}
|
||||
|
||||
// TODO: After porting the control flow graph.
|
||||
// #[test]
|
||||
// fn reachability_trivial() {
|
||||
// let parsed = parse("x = 1; x");
|
||||
// let ast = parsed.syntax();
|
||||
// let index = SemanticIndex::from_ast(ast);
|
||||
// let table = &index.symbol_table;
|
||||
// let x_sym = table
|
||||
// .root_symbol_id_by_name("x")
|
||||
// .expect("x symbol should exist");
|
||||
// let ast::Stmt::Expr(ast::StmtExpr { value: x_use, .. }) = &ast.body[1] else {
|
||||
// panic!("should be an expr")
|
||||
// };
|
||||
// let x_defs: Vec<_> = index
|
||||
// .reachable_definitions(x_sym, x_use)
|
||||
// .map(|constrained_definition| constrained_definition.definition)
|
||||
// .collect();
|
||||
// assert_eq!(x_defs.len(), 1);
|
||||
// let Definition::Assignment(node_key) = &x_defs[0] else {
|
||||
// panic!("def should be an assignment")
|
||||
// };
|
||||
// let Some(def_node) = node_key.resolve(ast.into()) else {
|
||||
// panic!("node key should resolve")
|
||||
// };
|
||||
// let ast::Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
// value: ast::Number::Int(num),
|
||||
// ..
|
||||
// }) = &*def_node.value
|
||||
// else {
|
||||
// panic!("should be a number literal")
|
||||
// };
|
||||
// assert_eq!(*num, 1);
|
||||
// }
|
||||
#[test]
|
||||
fn reachability_trivial() {
|
||||
let TestCase { db, file } = test_case("x = 1; x");
|
||||
let parsed = parsed_module(&db, file);
|
||||
let scope = global_scope(&db, file);
|
||||
let ast = parsed.syntax();
|
||||
let ast::Stmt::Expr(ast::StmtExpr {
|
||||
value: x_use_expr, ..
|
||||
}) = &ast.body[1]
|
||||
else {
|
||||
panic!("should be an expr")
|
||||
};
|
||||
let ast::Expr::Name(x_use_expr_name) = x_use_expr.as_ref() else {
|
||||
panic!("expected a Name");
|
||||
};
|
||||
let x_use_id = x_use_expr_name.scoped_use_id(&db, scope);
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let definition = use_def.first_use_definition(x_use_id).unwrap();
|
||||
let DefinitionKind::Assignment(assignment) = definition.node(&db) else {
|
||||
panic!("should be an assignment definition")
|
||||
};
|
||||
let ast::Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
value: ast::Number::Int(num),
|
||||
..
|
||||
}) = &*assignment.assignment().value
|
||||
else {
|
||||
panic!("should be a number literal")
|
||||
};
|
||||
assert_eq!(*num, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expression_scope() {
|
||||
@@ -617,7 +956,7 @@ class C[T]:
|
||||
let x = &x_stmt.targets[0];
|
||||
|
||||
assert_eq!(index.expression_scope(x).kind(), ScopeKind::Module);
|
||||
assert_eq!(index.expression_scope_id(x), FileScopeId::root());
|
||||
assert_eq!(index.expression_scope_id(x), FileScopeId::global());
|
||||
|
||||
let def = ast.body[1].as_function_def_stmt().unwrap();
|
||||
let y_stmt = def.body[0].as_assign_stmt().unwrap();
|
||||
@@ -631,7 +970,7 @@ class C[T]:
|
||||
fn scope_names<'a>(
|
||||
scopes: impl Iterator<Item = (FileScopeId, &'a Scope)>,
|
||||
db: &'a dyn Db,
|
||||
file: VfsFile,
|
||||
file: File,
|
||||
) -> Vec<&'a str> {
|
||||
scopes
|
||||
.into_iter()
|
||||
@@ -654,16 +993,16 @@ def x():
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
|
||||
let descendents = index.descendent_scopes(FileScopeId::root());
|
||||
let descendents = index.descendent_scopes(FileScopeId::global());
|
||||
assert_eq!(
|
||||
scope_names(descendents, &db, file),
|
||||
vec!["Test", "foo", "bar", "baz", "x"]
|
||||
);
|
||||
|
||||
let children = index.child_scopes(FileScopeId::root());
|
||||
let children = index.child_scopes(FileScopeId::global());
|
||||
assert_eq!(scope_names(children, &db, file), vec!["Test", "x"]);
|
||||
|
||||
let test_class = index.child_scopes(FileScopeId::root()).next().unwrap().0;
|
||||
let test_class = index.child_scopes(FileScopeId::global()).next().unwrap().0;
|
||||
let test_child_scopes = index.child_scopes(test_class);
|
||||
assert_eq!(
|
||||
scope_names(test_child_scopes, &db, file),
|
||||
@@ -671,7 +1010,7 @@ def x():
|
||||
);
|
||||
|
||||
let bar_scope = index
|
||||
.descendent_scopes(FileScopeId::root())
|
||||
.descendent_scopes(FileScopeId::global())
|
||||
.nth(2)
|
||||
.unwrap()
|
||||
.0;
|
||||
@@ -682,4 +1021,28 @@ def x():
|
||||
vec!["bar", "foo", "Test", "<module>"]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn match_stmt_symbols() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
match subject:
|
||||
case a: ...
|
||||
case [b, c, *d]: ...
|
||||
case e as f: ...
|
||||
case {'x': g, **h}: ...
|
||||
case Foo(i, z=j): ...
|
||||
case k | l: ...
|
||||
case _: ...
|
||||
",
|
||||
);
|
||||
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert!(global_table.symbol_by_name("Foo").unwrap().is_used());
|
||||
assert_eq!(
|
||||
names(&global_table),
|
||||
vec!["subject", "a", "b", "c", "d", "f", "e", "h", "g", "Foo", "i", "j", "k", "l"]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_index::{newtype_index, Idx};
|
||||
use ruff_index::newtype_index;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::ExpressionRef;
|
||||
|
||||
@@ -26,20 +26,56 @@ use crate::Db;
|
||||
/// ```
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct AstIds {
|
||||
/// Maps expressions to their expression id. Uses `NodeKey` because it avoids cloning [`Parsed`].
|
||||
/// Maps expressions to their expression id.
|
||||
expressions_map: FxHashMap<ExpressionNodeKey, ScopedExpressionId>,
|
||||
/// Maps expressions which "use" a symbol (that is, [`ast::ExprName`]) to a use id.
|
||||
uses_map: FxHashMap<ExpressionNodeKey, ScopedUseId>,
|
||||
}
|
||||
|
||||
impl AstIds {
|
||||
fn expression_id(&self, key: impl Into<ExpressionNodeKey>) -> ScopedExpressionId {
|
||||
self.expressions_map[&key.into()]
|
||||
}
|
||||
|
||||
fn use_id(&self, key: impl Into<ExpressionNodeKey>) -> ScopedUseId {
|
||||
self.uses_map[&key.into()]
|
||||
}
|
||||
}
|
||||
|
||||
fn ast_ids<'db>(db: &'db dyn Db, scope: ScopeId) -> &'db AstIds {
|
||||
semantic_index(db, scope.file(db)).ast_ids(scope.file_scope_id(db))
|
||||
}
|
||||
|
||||
pub trait HasScopedUseId {
|
||||
/// The type of the ID uniquely identifying the use.
|
||||
type Id: Copy;
|
||||
|
||||
/// Returns the ID that uniquely identifies the use in `scope`.
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id;
|
||||
}
|
||||
|
||||
/// Uniquely identifies a use of a name in a [`crate::semantic_index::symbol::FileScopeId`].
|
||||
#[newtype_index]
|
||||
pub struct ScopedUseId;
|
||||
|
||||
impl HasScopedUseId for ast::ExprName {
|
||||
type Id = ScopedUseId;
|
||||
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
let expression_ref = ExpressionRef::from(self);
|
||||
expression_ref.scoped_use_id(db, scope)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasScopedUseId for ast::ExpressionRef<'_> {
|
||||
type Id = ScopedUseId;
|
||||
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
let ast_ids = ast_ids(db, scope);
|
||||
ast_ids.use_id(*self)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait HasScopedAstId {
|
||||
/// The type of the ID uniquely identifying the node.
|
||||
type Id: Copy;
|
||||
@@ -110,38 +146,43 @@ impl HasScopedAstId for ast::ExpressionRef<'_> {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct AstIdsBuilder {
|
||||
next_id: ScopedExpressionId,
|
||||
expressions_map: FxHashMap<ExpressionNodeKey, ScopedExpressionId>,
|
||||
uses_map: FxHashMap<ExpressionNodeKey, ScopedUseId>,
|
||||
}
|
||||
|
||||
impl AstIdsBuilder {
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
next_id: ScopedExpressionId::new(0),
|
||||
expressions_map: FxHashMap::default(),
|
||||
uses_map: FxHashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds `expr` to the AST ids map and returns its id.
|
||||
///
|
||||
/// ## Safety
|
||||
/// The function is marked as unsafe because it calls [`AstNodeRef::new`] which requires
|
||||
/// that `expr` is a child of `parsed`.
|
||||
#[allow(unsafe_code)]
|
||||
/// Adds `expr` to the expression ids map and returns its id.
|
||||
pub(super) fn record_expression(&mut self, expr: &ast::Expr) -> ScopedExpressionId {
|
||||
let expression_id = self.next_id;
|
||||
self.next_id = expression_id + 1;
|
||||
let expression_id = self.expressions_map.len().into();
|
||||
|
||||
self.expressions_map.insert(expr.into(), expression_id);
|
||||
|
||||
expression_id
|
||||
}
|
||||
|
||||
/// Adds `expr` to the use ids map and returns its id.
|
||||
pub(super) fn record_use(&mut self, expr: &ast::Expr) -> ScopedUseId {
|
||||
let use_id = self.uses_map.len().into();
|
||||
|
||||
self.uses_map.insert(expr.into(), use_id);
|
||||
|
||||
use_id
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> AstIds {
|
||||
self.expressions_map.shrink_to_fit();
|
||||
self.uses_map.shrink_to_fit();
|
||||
|
||||
AstIds {
|
||||
expressions_map: self.expressions_map,
|
||||
uses_map: self.uses_map,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -156,12 +197,14 @@ pub(crate) mod node_key {
|
||||
pub(crate) struct ExpressionNodeKey(NodeKey);
|
||||
|
||||
impl From<ast::ExpressionRef<'_>> for ExpressionNodeKey {
|
||||
#[inline]
|
||||
fn from(value: ast::ExpressionRef<'_>) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
Self(NodeKey::from_ref(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Expr> for ExpressionNodeKey {
|
||||
#[inline]
|
||||
fn from(value: &ast::Expr) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
}
|
||||
|
||||
@@ -2,62 +2,73 @@ use std::sync::Arc;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::ParsedModule;
|
||||
use ruff_db::vfs::VfsFile;
|
||||
use ruff_index::IndexVec;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::name::Name;
|
||||
use ruff_python_ast::visitor::{walk_expr, walk_stmt, Visitor};
|
||||
use ruff_python_ast::visitor::{walk_expr, walk_pattern, walk_stmt, Visitor};
|
||||
use ruff_python_ast::AnyParameterRef;
|
||||
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
|
||||
use crate::semantic_index::ast_ids::AstIdsBuilder;
|
||||
use crate::semantic_index::definition::{Definition, DefinitionNodeKey, DefinitionNodeRef};
|
||||
use crate::semantic_index::definition::{
|
||||
AssignmentDefinitionNodeRef, ComprehensionDefinitionNodeRef, Definition, DefinitionNodeKey,
|
||||
DefinitionNodeRef, ImportFromDefinitionNodeRef,
|
||||
};
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{
|
||||
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId, SymbolFlags,
|
||||
SymbolTableBuilder,
|
||||
};
|
||||
use crate::semantic_index::use_def::{FlowSnapshot, UseDefMapBuilder};
|
||||
use crate::semantic_index::SemanticIndex;
|
||||
use crate::Db;
|
||||
|
||||
pub(super) struct SemanticIndexBuilder<'db, 'ast> {
|
||||
pub(super) struct SemanticIndexBuilder<'db> {
|
||||
// Builder state
|
||||
db: &'db dyn Db,
|
||||
file: VfsFile,
|
||||
file: File,
|
||||
module: &'db ParsedModule,
|
||||
scope_stack: Vec<FileScopeId>,
|
||||
/// the target we're currently inferring
|
||||
current_target: Option<CurrentTarget<'ast>>,
|
||||
/// The assignment we're currently visiting.
|
||||
current_assignment: Option<CurrentAssignment<'db>>,
|
||||
/// Flow states at each `break` in the current loop.
|
||||
loop_break_states: Vec<FlowSnapshot>,
|
||||
|
||||
// Semantic Index fields
|
||||
scopes: IndexVec<FileScopeId, Scope>,
|
||||
scope_ids_by_scope: IndexVec<FileScopeId, ScopeId<'db>>,
|
||||
symbol_tables: IndexVec<FileScopeId, SymbolTableBuilder<'db>>,
|
||||
symbol_tables: IndexVec<FileScopeId, SymbolTableBuilder>,
|
||||
ast_ids: IndexVec<FileScopeId, AstIdsBuilder>,
|
||||
use_def_maps: IndexVec<FileScopeId, UseDefMapBuilder<'db>>,
|
||||
scopes_by_node: FxHashMap<NodeWithScopeKey, FileScopeId>,
|
||||
scopes_by_expression: FxHashMap<ExpressionNodeKey, FileScopeId>,
|
||||
definitions_by_node: FxHashMap<DefinitionNodeKey, Definition<'db>>,
|
||||
expressions_by_node: FxHashMap<ExpressionNodeKey, Expression<'db>>,
|
||||
}
|
||||
|
||||
impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast>
|
||||
where
|
||||
'db: 'ast,
|
||||
{
|
||||
pub(super) fn new(db: &'db dyn Db, file: VfsFile, parsed: &'db ParsedModule) -> Self {
|
||||
impl<'db> SemanticIndexBuilder<'db> {
|
||||
pub(super) fn new(db: &'db dyn Db, file: File, parsed: &'db ParsedModule) -> Self {
|
||||
let mut builder = Self {
|
||||
db,
|
||||
file,
|
||||
module: parsed,
|
||||
scope_stack: Vec::new(),
|
||||
current_target: None,
|
||||
current_assignment: None,
|
||||
loop_break_states: vec![],
|
||||
|
||||
scopes: IndexVec::new(),
|
||||
symbol_tables: IndexVec::new(),
|
||||
ast_ids: IndexVec::new(),
|
||||
scope_ids_by_scope: IndexVec::new(),
|
||||
use_def_maps: IndexVec::new(),
|
||||
|
||||
scopes_by_expression: FxHashMap::default(),
|
||||
scopes_by_node: FxHashMap::default(),
|
||||
definitions_by_node: FxHashMap::default(),
|
||||
expressions_by_node: FxHashMap::default(),
|
||||
};
|
||||
|
||||
builder.push_scope_with_parent(NodeWithScopeRef::Module, None);
|
||||
@@ -72,16 +83,12 @@ where
|
||||
.expect("Always to have a root scope")
|
||||
}
|
||||
|
||||
fn push_scope(&mut self, node: NodeWithScopeRef<'ast>) {
|
||||
fn push_scope(&mut self, node: NodeWithScopeRef) {
|
||||
let parent = self.current_scope();
|
||||
self.push_scope_with_parent(node, Some(parent));
|
||||
}
|
||||
|
||||
fn push_scope_with_parent(
|
||||
&mut self,
|
||||
node: NodeWithScopeRef<'ast>,
|
||||
parent: Option<FileScopeId>,
|
||||
) {
|
||||
fn push_scope_with_parent(&mut self, node: NodeWithScopeRef, parent: Option<FileScopeId>) {
|
||||
let children_start = self.scopes.next_index() + 1;
|
||||
|
||||
let scope = Scope {
|
||||
@@ -92,13 +99,18 @@ where
|
||||
|
||||
let file_scope_id = self.scopes.push(scope);
|
||||
self.symbol_tables.push(SymbolTableBuilder::new());
|
||||
self.use_def_maps.push(UseDefMapBuilder::new());
|
||||
let ast_id_scope = self.ast_ids.push(AstIdsBuilder::new());
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
// SAFETY: `node` is guaranteed to be a child of `self.module`
|
||||
let scope_id = ScopeId::new(self.db, self.file, file_scope_id, unsafe {
|
||||
node.to_kind(self.module.clone())
|
||||
});
|
||||
let scope_id = ScopeId::new(
|
||||
self.db,
|
||||
self.file,
|
||||
file_scope_id,
|
||||
unsafe { node.to_kind(self.module.clone()) },
|
||||
countme::Count::default(),
|
||||
);
|
||||
|
||||
self.scope_ids_by_scope.push(scope_id);
|
||||
self.scopes_by_node.insert(node.node_key(), file_scope_id);
|
||||
@@ -116,83 +128,133 @@ where
|
||||
id
|
||||
}
|
||||
|
||||
fn current_symbol_table(&mut self) -> &mut SymbolTableBuilder<'db> {
|
||||
fn current_symbol_table(&mut self) -> &mut SymbolTableBuilder {
|
||||
let scope_id = self.current_scope();
|
||||
&mut self.symbol_tables[scope_id]
|
||||
}
|
||||
|
||||
fn current_use_def_map_mut(&mut self) -> &mut UseDefMapBuilder<'db> {
|
||||
let scope_id = self.current_scope();
|
||||
&mut self.use_def_maps[scope_id]
|
||||
}
|
||||
|
||||
fn current_use_def_map(&self) -> &UseDefMapBuilder<'db> {
|
||||
let scope_id = self.current_scope();
|
||||
&self.use_def_maps[scope_id]
|
||||
}
|
||||
|
||||
fn current_ast_ids(&mut self) -> &mut AstIdsBuilder {
|
||||
let scope_id = self.current_scope();
|
||||
&mut self.ast_ids[scope_id]
|
||||
}
|
||||
|
||||
fn add_or_update_symbol(&mut self, name: Name, flags: SymbolFlags) -> ScopedSymbolId {
|
||||
let symbol_table = self.current_symbol_table();
|
||||
symbol_table.add_or_update_symbol(name, flags)
|
||||
fn flow_snapshot(&self) -> FlowSnapshot {
|
||||
self.current_use_def_map().snapshot()
|
||||
}
|
||||
|
||||
fn add_definition(
|
||||
fn flow_restore(&mut self, state: FlowSnapshot) {
|
||||
self.current_use_def_map_mut().restore(state);
|
||||
}
|
||||
|
||||
fn flow_merge(&mut self, state: FlowSnapshot) {
|
||||
self.current_use_def_map_mut().merge(state);
|
||||
}
|
||||
|
||||
fn add_or_update_symbol(&mut self, name: Name, flags: SymbolFlags) -> ScopedSymbolId {
|
||||
let symbol_table = self.current_symbol_table();
|
||||
let (symbol_id, added) = symbol_table.add_or_update_symbol(name, flags);
|
||||
if added {
|
||||
let use_def_map = self.current_use_def_map_mut();
|
||||
use_def_map.add_symbol(symbol_id);
|
||||
}
|
||||
symbol_id
|
||||
}
|
||||
|
||||
fn add_definition<'a>(
|
||||
&mut self,
|
||||
definition_node: impl Into<DefinitionNodeRef<'ast>>,
|
||||
symbol_id: ScopedSymbolId,
|
||||
symbol: ScopedSymbolId,
|
||||
definition_node: impl Into<DefinitionNodeRef<'a>>,
|
||||
) -> Definition<'db> {
|
||||
let definition_node = definition_node.into();
|
||||
let definition_node: DefinitionNodeRef<'_> = definition_node.into();
|
||||
let definition = Definition::new(
|
||||
self.db,
|
||||
self.file,
|
||||
self.current_scope(),
|
||||
symbol_id,
|
||||
symbol,
|
||||
#[allow(unsafe_code)]
|
||||
unsafe {
|
||||
definition_node.into_owned(self.module.clone())
|
||||
},
|
||||
countme::Count::default(),
|
||||
);
|
||||
|
||||
self.definitions_by_node
|
||||
.insert(definition_node.key(), definition);
|
||||
self.current_use_def_map_mut()
|
||||
.record_definition(symbol, definition);
|
||||
|
||||
definition
|
||||
}
|
||||
|
||||
fn add_or_update_symbol_with_definition(
|
||||
&mut self,
|
||||
name: Name,
|
||||
definition: impl Into<DefinitionNodeRef<'ast>>,
|
||||
) -> (ScopedSymbolId, Definition<'db>) {
|
||||
let symbol_table = self.current_symbol_table();
|
||||
fn add_constraint(&mut self, constraint_node: &ast::Expr) -> Expression<'db> {
|
||||
let expression = self.add_standalone_expression(constraint_node);
|
||||
self.current_use_def_map_mut().record_constraint(expression);
|
||||
|
||||
let id = symbol_table.add_or_update_symbol(name, SymbolFlags::IS_DEFINED);
|
||||
let definition = self.add_definition(definition, id);
|
||||
self.current_symbol_table().add_definition(id, definition);
|
||||
(id, definition)
|
||||
expression
|
||||
}
|
||||
|
||||
/// Record an expression that needs to be a Salsa ingredient, because we need to infer its type
|
||||
/// standalone (type narrowing tests, RHS of an assignment.)
|
||||
fn add_standalone_expression(&mut self, expression_node: &ast::Expr) -> Expression<'db> {
|
||||
let expression = Expression::new(
|
||||
self.db,
|
||||
self.file,
|
||||
self.current_scope(),
|
||||
#[allow(unsafe_code)]
|
||||
unsafe {
|
||||
AstNodeRef::new(self.module.clone(), expression_node)
|
||||
},
|
||||
countme::Count::default(),
|
||||
);
|
||||
self.expressions_by_node
|
||||
.insert(expression_node.into(), expression);
|
||||
expression
|
||||
}
|
||||
|
||||
fn with_type_params(
|
||||
&mut self,
|
||||
with_params: &WithTypeParams<'ast>,
|
||||
with_scope: NodeWithScopeRef,
|
||||
type_params: Option<&'db ast::TypeParams>,
|
||||
nested: impl FnOnce(&mut Self) -> FileScopeId,
|
||||
) -> FileScopeId {
|
||||
let type_params = with_params.type_parameters();
|
||||
|
||||
if let Some(type_params) = type_params {
|
||||
let with_scope = match with_params {
|
||||
WithTypeParams::ClassDef { node, .. } => {
|
||||
NodeWithScopeRef::ClassTypeParameters(node)
|
||||
}
|
||||
WithTypeParams::FunctionDef { node, .. } => {
|
||||
NodeWithScopeRef::FunctionTypeParameters(node)
|
||||
}
|
||||
};
|
||||
|
||||
self.push_scope(with_scope);
|
||||
|
||||
for type_param in &type_params.type_params {
|
||||
let name = match type_param {
|
||||
ast::TypeParam::TypeVar(ast::TypeParamTypeVar { name, .. }) => name,
|
||||
ast::TypeParam::ParamSpec(ast::TypeParamParamSpec { name, .. }) => name,
|
||||
ast::TypeParam::TypeVarTuple(ast::TypeParamTypeVarTuple { name, .. }) => name,
|
||||
let (name, bound, default) = match type_param {
|
||||
ast::TypeParam::TypeVar(ast::TypeParamTypeVar {
|
||||
range: _,
|
||||
name,
|
||||
bound,
|
||||
default,
|
||||
}) => (name, bound, default),
|
||||
ast::TypeParam::ParamSpec(ast::TypeParamParamSpec {
|
||||
name, default, ..
|
||||
}) => (name, &None, default),
|
||||
ast::TypeParam::TypeVarTuple(ast::TypeParamTypeVarTuple {
|
||||
name,
|
||||
default,
|
||||
..
|
||||
}) => (name, &None, default),
|
||||
};
|
||||
// TODO create Definition for typevars
|
||||
self.add_or_update_symbol(name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
if let Some(bound) = bound {
|
||||
self.visit_expr(bound);
|
||||
}
|
||||
if let Some(default) = default {
|
||||
self.visit_expr(default);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -205,6 +267,66 @@ where
|
||||
nested_scope
|
||||
}
|
||||
|
||||
/// Visit a list of [`Comprehension`] nodes, assumed to be the "generators" that compose a
|
||||
/// comprehension (that is, the `for x in y` and `for y in z` parts of `x for x in y for y in z`.)
|
||||
///
|
||||
/// [`Comprehension`]: ast::Comprehension
|
||||
fn visit_generators(&mut self, scope: NodeWithScopeRef, generators: &'db [ast::Comprehension]) {
|
||||
let mut generators_iter = generators.iter();
|
||||
|
||||
let Some(generator) = generators_iter.next() else {
|
||||
unreachable!("Expression must contain at least one generator");
|
||||
};
|
||||
|
||||
// The `iter` of the first generator is evaluated in the outer scope, while all subsequent
|
||||
// nodes are evaluated in the inner scope.
|
||||
self.visit_expr(&generator.iter);
|
||||
self.push_scope(scope);
|
||||
|
||||
self.current_assignment = Some(CurrentAssignment::Comprehension {
|
||||
node: generator,
|
||||
first: true,
|
||||
});
|
||||
self.visit_expr(&generator.target);
|
||||
self.current_assignment = None;
|
||||
|
||||
for expr in &generator.ifs {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
|
||||
for generator in generators_iter {
|
||||
self.visit_expr(&generator.iter);
|
||||
|
||||
self.current_assignment = Some(CurrentAssignment::Comprehension {
|
||||
node: generator,
|
||||
first: false,
|
||||
});
|
||||
self.visit_expr(&generator.target);
|
||||
self.current_assignment = None;
|
||||
|
||||
for expr in &generator.ifs {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn declare_parameter(&mut self, parameter: AnyParameterRef) {
|
||||
let symbol =
|
||||
self.add_or_update_symbol(parameter.name().id().clone(), SymbolFlags::IS_DEFINED);
|
||||
|
||||
let definition = self.add_definition(symbol, parameter);
|
||||
|
||||
if let AnyParameterRef::NonVariadic(with_default) = parameter {
|
||||
// Insert a mapping from the parameter to the same definition.
|
||||
// This ensures that calling `HasTy::ty` on the inner parameter returns
|
||||
// a valid type (and doesn't panic)
|
||||
self.definitions_by_node.insert(
|
||||
DefinitionNodeRef::from(AnyParameterRef::Variadic(&with_default.parameter)).key(),
|
||||
definition,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn build(mut self) -> SemanticIndex<'db> {
|
||||
let module = self.module;
|
||||
self.visit_body(module.suite());
|
||||
@@ -213,7 +335,7 @@ where
|
||||
self.pop_scope();
|
||||
assert!(self.scope_stack.is_empty());
|
||||
|
||||
assert!(self.current_target.is_none());
|
||||
assert!(self.current_assignment.is_none());
|
||||
|
||||
let mut symbol_tables: IndexVec<_, _> = self
|
||||
.symbol_tables
|
||||
@@ -221,6 +343,12 @@ where
|
||||
.map(|builder| Arc::new(builder.finish()))
|
||||
.collect();
|
||||
|
||||
let mut use_def_maps: IndexVec<_, _> = self
|
||||
.use_def_maps
|
||||
.into_iter()
|
||||
.map(|builder| Arc::new(builder.finish()))
|
||||
.collect();
|
||||
|
||||
let mut ast_ids: IndexVec<_, _> = self
|
||||
.ast_ids
|
||||
.into_iter()
|
||||
@@ -228,8 +356,9 @@ where
|
||||
.collect();
|
||||
|
||||
self.scopes.shrink_to_fit();
|
||||
ast_ids.shrink_to_fit();
|
||||
symbol_tables.shrink_to_fit();
|
||||
use_def_maps.shrink_to_fit();
|
||||
ast_ids.shrink_to_fit();
|
||||
self.scopes_by_expression.shrink_to_fit();
|
||||
self.definitions_by_node.shrink_to_fit();
|
||||
|
||||
@@ -240,17 +369,19 @@ where
|
||||
symbol_tables,
|
||||
scopes: self.scopes,
|
||||
definitions_by_node: self.definitions_by_node,
|
||||
expressions_by_node: self.expressions_by_node,
|
||||
scope_ids_by_scope: self.scope_ids_by_scope,
|
||||
ast_ids,
|
||||
scopes_by_expression: self.scopes_by_expression,
|
||||
scopes_by_node: self.scopes_by_node,
|
||||
use_def_maps,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db, 'ast> Visitor<'ast> for SemanticIndexBuilder<'db, 'ast>
|
||||
impl<'db, 'ast> Visitor<'ast> for SemanticIndexBuilder<'db>
|
||||
where
|
||||
'db: 'ast,
|
||||
'ast: 'db,
|
||||
{
|
||||
fn visit_stmt(&mut self, stmt: &'ast ast::Stmt) {
|
||||
match stmt {
|
||||
@@ -259,20 +390,36 @@ where
|
||||
self.visit_decorator(decorator);
|
||||
}
|
||||
|
||||
self.add_or_update_symbol_with_definition(
|
||||
function_def.name.id.clone(),
|
||||
function_def,
|
||||
);
|
||||
let symbol = self
|
||||
.add_or_update_symbol(function_def.name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, function_def);
|
||||
|
||||
// The default value of the parameters needs to be evaluated in the
|
||||
// enclosing scope.
|
||||
for default in function_def
|
||||
.parameters
|
||||
.iter_non_variadic_params()
|
||||
.filter_map(|param| param.default.as_deref())
|
||||
{
|
||||
self.visit_expr(default);
|
||||
}
|
||||
|
||||
self.with_type_params(
|
||||
&WithTypeParams::FunctionDef { node: function_def },
|
||||
NodeWithScopeRef::FunctionTypeParameters(function_def),
|
||||
function_def.type_params.as_deref(),
|
||||
|builder| {
|
||||
builder.visit_parameters(&function_def.parameters);
|
||||
for expr in &function_def.returns {
|
||||
if let Some(expr) = &function_def.returns {
|
||||
builder.visit_annotation(expr);
|
||||
}
|
||||
|
||||
builder.push_scope(NodeWithScopeRef::Function(function_def));
|
||||
|
||||
// Add symbols and definitions for the parameters to the function scope.
|
||||
for parameter in &*function_def.parameters {
|
||||
builder.declare_parameter(parameter);
|
||||
}
|
||||
|
||||
builder.visit_body(&function_def.body);
|
||||
builder.pop_scope()
|
||||
},
|
||||
@@ -283,54 +430,139 @@ where
|
||||
self.visit_decorator(decorator);
|
||||
}
|
||||
|
||||
self.add_or_update_symbol_with_definition(class.name.id.clone(), class);
|
||||
let symbol =
|
||||
self.add_or_update_symbol(class.name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, class);
|
||||
|
||||
self.with_type_params(&WithTypeParams::ClassDef { node: class }, |builder| {
|
||||
if let Some(arguments) = &class.arguments {
|
||||
builder.visit_arguments(arguments);
|
||||
}
|
||||
self.with_type_params(
|
||||
NodeWithScopeRef::ClassTypeParameters(class),
|
||||
class.type_params.as_deref(),
|
||||
|builder| {
|
||||
if let Some(arguments) = &class.arguments {
|
||||
builder.visit_arguments(arguments);
|
||||
}
|
||||
|
||||
builder.push_scope(NodeWithScopeRef::Class(class));
|
||||
builder.visit_body(&class.body);
|
||||
builder.push_scope(NodeWithScopeRef::Class(class));
|
||||
builder.visit_body(&class.body);
|
||||
|
||||
builder.pop_scope()
|
||||
});
|
||||
builder.pop_scope()
|
||||
},
|
||||
);
|
||||
}
|
||||
ast::Stmt::Import(ast::StmtImport { names, .. }) => {
|
||||
for alias in names {
|
||||
ast::Stmt::Import(node) => {
|
||||
for alias in &node.names {
|
||||
let symbol_name = if let Some(asname) = &alias.asname {
|
||||
asname.id.clone()
|
||||
} else {
|
||||
Name::new(alias.name.id.split('.').next().unwrap())
|
||||
};
|
||||
|
||||
self.add_or_update_symbol_with_definition(symbol_name, alias);
|
||||
let symbol = self.add_or_update_symbol(symbol_name, SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, alias);
|
||||
}
|
||||
}
|
||||
ast::Stmt::ImportFrom(ast::StmtImportFrom {
|
||||
module: _,
|
||||
names,
|
||||
level: _,
|
||||
..
|
||||
}) => {
|
||||
for alias in names {
|
||||
ast::Stmt::ImportFrom(node) => {
|
||||
for (alias_index, alias) in node.names.iter().enumerate() {
|
||||
let symbol_name = if let Some(asname) = &alias.asname {
|
||||
&asname.id
|
||||
} else {
|
||||
&alias.name.id
|
||||
};
|
||||
|
||||
self.add_or_update_symbol_with_definition(symbol_name.clone(), alias);
|
||||
let symbol =
|
||||
self.add_or_update_symbol(symbol_name.clone(), SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, ImportFromDefinitionNodeRef { node, alias_index });
|
||||
}
|
||||
}
|
||||
ast::Stmt::Assign(node) => {
|
||||
debug_assert!(self.current_target.is_none());
|
||||
debug_assert!(self.current_assignment.is_none());
|
||||
self.visit_expr(&node.value);
|
||||
self.add_standalone_expression(&node.value);
|
||||
self.current_assignment = Some(node.into());
|
||||
for target in &node.targets {
|
||||
self.current_target = Some(CurrentTarget::Expr(target));
|
||||
self.visit_expr(target);
|
||||
}
|
||||
self.current_target = None;
|
||||
self.current_assignment = None;
|
||||
}
|
||||
ast::Stmt::AnnAssign(node) => {
|
||||
debug_assert!(self.current_assignment.is_none());
|
||||
// TODO deferred annotation visiting
|
||||
self.visit_expr(&node.annotation);
|
||||
if let Some(value) = &node.value {
|
||||
self.visit_expr(value);
|
||||
}
|
||||
self.current_assignment = Some(node.into());
|
||||
self.visit_expr(&node.target);
|
||||
self.current_assignment = None;
|
||||
}
|
||||
ast::Stmt::AugAssign(
|
||||
aug_assign @ ast::StmtAugAssign {
|
||||
range: _,
|
||||
target,
|
||||
op: _,
|
||||
value,
|
||||
},
|
||||
) => {
|
||||
debug_assert!(self.current_assignment.is_none());
|
||||
self.visit_expr(value);
|
||||
self.current_assignment = Some(aug_assign.into());
|
||||
self.visit_expr(target);
|
||||
self.current_assignment = None;
|
||||
}
|
||||
ast::Stmt::If(node) => {
|
||||
self.visit_expr(&node.test);
|
||||
let pre_if = self.flow_snapshot();
|
||||
self.add_constraint(&node.test);
|
||||
self.visit_body(&node.body);
|
||||
let mut post_clauses: Vec<FlowSnapshot> = vec![];
|
||||
for clause in &node.elif_else_clauses {
|
||||
// snapshot after every block except the last; the last one will just become
|
||||
// the state that we merge the other snapshots into
|
||||
post_clauses.push(self.flow_snapshot());
|
||||
// we can only take an elif/else branch if none of the previous ones were
|
||||
// taken, so the block entry state is always `pre_if`
|
||||
self.flow_restore(pre_if.clone());
|
||||
self.visit_elif_else_clause(clause);
|
||||
}
|
||||
for post_clause_state in post_clauses {
|
||||
self.flow_merge(post_clause_state);
|
||||
}
|
||||
let has_else = node
|
||||
.elif_else_clauses
|
||||
.last()
|
||||
.is_some_and(|clause| clause.test.is_none());
|
||||
if !has_else {
|
||||
// if there's no else clause, then it's possible we took none of the branches,
|
||||
// and the pre_if state can reach here
|
||||
self.flow_merge(pre_if);
|
||||
}
|
||||
}
|
||||
ast::Stmt::While(node) => {
|
||||
self.visit_expr(&node.test);
|
||||
|
||||
let pre_loop = self.flow_snapshot();
|
||||
|
||||
// Save aside any break states from an outer loop
|
||||
let saved_break_states = std::mem::take(&mut self.loop_break_states);
|
||||
self.visit_body(&node.body);
|
||||
// Get the break states from the body of this loop, and restore the saved outer
|
||||
// ones.
|
||||
let break_states =
|
||||
std::mem::replace(&mut self.loop_break_states, saved_break_states);
|
||||
|
||||
// We may execute the `else` clause without ever executing the body, so merge in
|
||||
// the pre-loop state before visiting `else`.
|
||||
self.flow_merge(pre_loop);
|
||||
self.visit_body(&node.orelse);
|
||||
|
||||
// Breaking out of a while loop bypasses the `else` clause, so merge in the break
|
||||
// states after visiting `else`.
|
||||
for break_state in break_states {
|
||||
self.flow_merge(break_state);
|
||||
}
|
||||
}
|
||||
ast::Stmt::Break(_) => {
|
||||
self.loop_break_states.push(self.flow_snapshot());
|
||||
}
|
||||
_ => {
|
||||
walk_stmt(self, stmt);
|
||||
@@ -344,90 +576,230 @@ where
|
||||
self.current_ast_ids().record_expression(expr);
|
||||
|
||||
match expr {
|
||||
ast::Expr::Name(ast::ExprName { id, ctx, .. }) => {
|
||||
let flags = match ctx {
|
||||
ast::Expr::Name(name_node @ ast::ExprName { id, ctx, .. }) => {
|
||||
let mut flags = match ctx {
|
||||
ast::ExprContext::Load => SymbolFlags::IS_USED,
|
||||
ast::ExprContext::Store => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Del => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Invalid => SymbolFlags::empty(),
|
||||
};
|
||||
match self.current_target {
|
||||
Some(target) if flags.contains(SymbolFlags::IS_DEFINED) => {
|
||||
self.add_or_update_symbol_with_definition(id.clone(), target);
|
||||
}
|
||||
_ => {
|
||||
self.add_or_update_symbol(id.clone(), flags);
|
||||
if matches!(
|
||||
self.current_assignment,
|
||||
Some(CurrentAssignment::AugAssign(_))
|
||||
) && !ctx.is_invalid()
|
||||
{
|
||||
// For augmented assignment, the target expression is also used, so we should
|
||||
// record that as a use.
|
||||
flags |= SymbolFlags::IS_USED;
|
||||
}
|
||||
let symbol = self.add_or_update_symbol(id.clone(), flags);
|
||||
if flags.contains(SymbolFlags::IS_DEFINED) {
|
||||
match self.current_assignment {
|
||||
Some(CurrentAssignment::Assign(assignment)) => {
|
||||
self.add_definition(
|
||||
symbol,
|
||||
AssignmentDefinitionNodeRef {
|
||||
assignment,
|
||||
target: name_node,
|
||||
},
|
||||
);
|
||||
}
|
||||
Some(CurrentAssignment::AnnAssign(ann_assign)) => {
|
||||
self.add_definition(symbol, ann_assign);
|
||||
}
|
||||
Some(CurrentAssignment::AugAssign(aug_assign)) => {
|
||||
self.add_definition(symbol, aug_assign);
|
||||
}
|
||||
Some(CurrentAssignment::Named(named)) => {
|
||||
// TODO(dhruvmanila): If the current scope is a comprehension, then the
|
||||
// named expression is implicitly nonlocal. This is yet to be
|
||||
// implemented.
|
||||
self.add_definition(symbol, named);
|
||||
}
|
||||
Some(CurrentAssignment::Comprehension { node, first }) => {
|
||||
self.add_definition(
|
||||
symbol,
|
||||
ComprehensionDefinitionNodeRef { node, first },
|
||||
);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
|
||||
if flags.contains(SymbolFlags::IS_USED) {
|
||||
let use_id = self.current_ast_ids().record_use(expr);
|
||||
self.current_use_def_map_mut().record_use(symbol, use_id);
|
||||
}
|
||||
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
ast::Expr::Named(node) => {
|
||||
debug_assert!(self.current_target.is_none());
|
||||
self.current_target = Some(CurrentTarget::ExprNamed(node));
|
||||
debug_assert!(self.current_assignment.is_none());
|
||||
self.current_assignment = Some(node.into());
|
||||
// TODO walrus in comprehensions is implicitly nonlocal
|
||||
self.visit_expr(&node.target);
|
||||
self.current_target = None;
|
||||
self.current_assignment = None;
|
||||
self.visit_expr(&node.value);
|
||||
}
|
||||
ast::Expr::Lambda(lambda) => {
|
||||
if let Some(parameters) = &lambda.parameters {
|
||||
// The default value of the parameters needs to be evaluated in the
|
||||
// enclosing scope.
|
||||
for default in parameters
|
||||
.iter_non_variadic_params()
|
||||
.filter_map(|param| param.default.as_deref())
|
||||
{
|
||||
self.visit_expr(default);
|
||||
}
|
||||
self.visit_parameters(parameters);
|
||||
}
|
||||
self.push_scope(NodeWithScopeRef::Lambda(lambda));
|
||||
|
||||
// Add symbols and definitions for the parameters to the lambda scope.
|
||||
if let Some(parameters) = &lambda.parameters {
|
||||
for parameter in &**parameters {
|
||||
self.declare_parameter(parameter);
|
||||
}
|
||||
}
|
||||
|
||||
self.visit_expr(lambda.body.as_ref());
|
||||
}
|
||||
ast::Expr::If(ast::ExprIf {
|
||||
body, test, orelse, ..
|
||||
}) => {
|
||||
// TODO detect statically known truthy or falsy test (via type inference, not naive
|
||||
// AST inspection, so we can't simplify here, need to record test expression in CFG
|
||||
// for later checking)
|
||||
|
||||
// AST inspection, so we can't simplify here, need to record test expression for
|
||||
// later checking)
|
||||
self.visit_expr(test);
|
||||
|
||||
// let if_branch = self.flow_graph_builder.add_branch(self.current_flow_node());
|
||||
|
||||
// self.set_current_flow_node(if_branch);
|
||||
// self.insert_constraint(test);
|
||||
let pre_if = self.flow_snapshot();
|
||||
self.visit_expr(body);
|
||||
|
||||
// let post_body = self.current_flow_node();
|
||||
|
||||
// self.set_current_flow_node(if_branch);
|
||||
let post_body = self.flow_snapshot();
|
||||
self.flow_restore(pre_if);
|
||||
self.visit_expr(orelse);
|
||||
|
||||
// let post_else = self
|
||||
// .flow_graph_builder
|
||||
// .add_phi(self.current_flow_node(), post_body);
|
||||
|
||||
// self.set_current_flow_node(post_else);
|
||||
self.flow_merge(post_body);
|
||||
}
|
||||
ast::Expr::ListComp(
|
||||
list_comprehension @ ast::ExprListComp {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
NodeWithScopeRef::ListComprehension(list_comprehension),
|
||||
generators,
|
||||
);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::SetComp(
|
||||
set_comprehension @ ast::ExprSetComp {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
NodeWithScopeRef::SetComprehension(set_comprehension),
|
||||
generators,
|
||||
);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::Generator(
|
||||
generator @ ast::ExprGenerator {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(NodeWithScopeRef::GeneratorExpression(generator), generators);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::DictComp(
|
||||
dict_comprehension @ ast::ExprDictComp {
|
||||
key,
|
||||
value,
|
||||
generators,
|
||||
..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
NodeWithScopeRef::DictComprehension(dict_comprehension),
|
||||
generators,
|
||||
);
|
||||
self.visit_expr(key);
|
||||
self.visit_expr(value);
|
||||
}
|
||||
_ => {
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum WithTypeParams<'node> {
|
||||
ClassDef { node: &'node ast::StmtClassDef },
|
||||
FunctionDef { node: &'node ast::StmtFunctionDef },
|
||||
}
|
||||
|
||||
impl<'node> WithTypeParams<'node> {
|
||||
fn type_parameters(&self) -> Option<&'node ast::TypeParams> {
|
||||
match self {
|
||||
WithTypeParams::ClassDef { node, .. } => node.type_params.as_deref(),
|
||||
WithTypeParams::FunctionDef { node, .. } => node.type_params.as_deref(),
|
||||
if matches!(
|
||||
expr,
|
||||
ast::Expr::Lambda(_)
|
||||
| ast::Expr::ListComp(_)
|
||||
| ast::Expr::SetComp(_)
|
||||
| ast::Expr::Generator(_)
|
||||
| ast::Expr::DictComp(_)
|
||||
) {
|
||||
self.pop_scope();
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_parameters(&mut self, parameters: &'ast ruff_python_ast::Parameters) {
|
||||
// Intentionally avoid walking default expressions, as we handle them in the enclosing
|
||||
// scope.
|
||||
for parameter in parameters.iter().map(ast::AnyParameterRef::as_parameter) {
|
||||
self.visit_parameter(parameter);
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_pattern(&mut self, pattern: &'ast ast::Pattern) {
|
||||
if let ast::Pattern::MatchAs(ast::PatternMatchAs {
|
||||
name: Some(name), ..
|
||||
})
|
||||
| ast::Pattern::MatchStar(ast::PatternMatchStar {
|
||||
name: Some(name),
|
||||
range: _,
|
||||
})
|
||||
| ast::Pattern::MatchMapping(ast::PatternMatchMapping {
|
||||
rest: Some(name), ..
|
||||
}) = pattern
|
||||
{
|
||||
// TODO(dhruvmanila): Add definition
|
||||
self.add_or_update_symbol(name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
}
|
||||
|
||||
walk_pattern(self, pattern);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum CurrentTarget<'a> {
|
||||
Expr(&'a ast::Expr),
|
||||
ExprNamed(&'a ast::ExprNamed),
|
||||
enum CurrentAssignment<'a> {
|
||||
Assign(&'a ast::StmtAssign),
|
||||
AnnAssign(&'a ast::StmtAnnAssign),
|
||||
AugAssign(&'a ast::StmtAugAssign),
|
||||
Named(&'a ast::ExprNamed),
|
||||
Comprehension {
|
||||
node: &'a ast::Comprehension,
|
||||
first: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> From<CurrentTarget<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(val: CurrentTarget<'a>) -> Self {
|
||||
match val {
|
||||
CurrentTarget::Expr(expression) => DefinitionNodeRef::Target(expression),
|
||||
CurrentTarget::ExprNamed(named) => DefinitionNodeRef::NamedExpression(named),
|
||||
}
|
||||
impl<'a> From<&'a ast::StmtAssign> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::StmtAssign) -> Self {
|
||||
Self::Assign(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAnnAssign> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::StmtAnnAssign) -> Self {
|
||||
Self::AnnAssign(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAugAssign> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::StmtAugAssign) -> Self {
|
||||
Self::AugAssign(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::ExprNamed> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::ExprNamed) -> Self {
|
||||
Self::Named(value)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,66 +1,144 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::ParsedModule;
|
||||
use ruff_db::vfs::VfsFile;
|
||||
use ruff_python_ast as ast;
|
||||
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::node_key::NodeKey;
|
||||
use crate::semantic_index::symbol::{FileScopeId, ScopedSymbolId};
|
||||
use crate::semantic_index::symbol::{FileScopeId, ScopeId, ScopedSymbolId};
|
||||
use crate::Db;
|
||||
|
||||
#[salsa::tracked]
|
||||
pub struct Definition<'db> {
|
||||
/// The file in which the definition is defined.
|
||||
/// The file in which the definition occurs.
|
||||
#[id]
|
||||
pub(super) file: VfsFile,
|
||||
pub(crate) file: File,
|
||||
|
||||
/// The scope in which the definition is defined.
|
||||
/// The scope in which the definition occurs.
|
||||
#[id]
|
||||
pub(crate) scope: FileScopeId,
|
||||
pub(crate) file_scope: FileScopeId,
|
||||
|
||||
/// The id of the corresponding symbol. Mainly used as ID.
|
||||
/// The symbol defined.
|
||||
#[id]
|
||||
symbol_id: ScopedSymbolId,
|
||||
pub(crate) symbol: ScopedSymbolId,
|
||||
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) node: DefinitionKind,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<Definition<'static>>,
|
||||
}
|
||||
|
||||
impl<'db> Definition<'db> {
|
||||
pub(crate) fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
|
||||
self.file_scope(db).to_scope_id(db, self.file(db))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) enum DefinitionNodeRef<'a> {
|
||||
Alias(&'a ast::Alias),
|
||||
Import(&'a ast::Alias),
|
||||
ImportFrom(ImportFromDefinitionNodeRef<'a>),
|
||||
Function(&'a ast::StmtFunctionDef),
|
||||
Class(&'a ast::StmtClassDef),
|
||||
NamedExpression(&'a ast::ExprNamed),
|
||||
Target(&'a ast::Expr),
|
||||
Assignment(AssignmentDefinitionNodeRef<'a>),
|
||||
AnnotatedAssignment(&'a ast::StmtAnnAssign),
|
||||
AugmentedAssignment(&'a ast::StmtAugAssign),
|
||||
Comprehension(ComprehensionDefinitionNodeRef<'a>),
|
||||
Parameter(ast::AnyParameterRef<'a>),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::Alias> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::Alias) -> Self {
|
||||
Self::Alias(node)
|
||||
}
|
||||
}
|
||||
impl<'a> From<&'a ast::StmtFunctionDef> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::StmtFunctionDef) -> Self {
|
||||
Self::Function(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtClassDef> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::StmtClassDef) -> Self {
|
||||
Self::Class(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::ExprNamed> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::ExprNamed) -> Self {
|
||||
Self::NamedExpression(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAnnAssign> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::StmtAnnAssign) -> Self {
|
||||
Self::AnnotatedAssignment(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAugAssign> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::StmtAugAssign) -> Self {
|
||||
Self::AugmentedAssignment(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::Alias> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: &'a ast::Alias) -> Self {
|
||||
Self::Import(node_ref)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ImportFromDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: ImportFromDefinitionNodeRef<'a>) -> Self {
|
||||
Self::ImportFrom(node_ref)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<AssignmentDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: AssignmentDefinitionNodeRef<'a>) -> Self {
|
||||
Self::Assignment(node_ref)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ComprehensionDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node: ComprehensionDefinitionNodeRef<'a>) -> Self {
|
||||
Self::Comprehension(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ast::AnyParameterRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node: ast::AnyParameterRef<'a>) -> Self {
|
||||
Self::Parameter(node)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ImportFromDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::StmtImportFrom,
|
||||
pub(crate) alias_index: usize,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct AssignmentDefinitionNodeRef<'a> {
|
||||
pub(crate) assignment: &'a ast::StmtAssign,
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ComprehensionDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::Comprehension,
|
||||
pub(crate) first: bool,
|
||||
}
|
||||
|
||||
impl DefinitionNodeRef<'_> {
|
||||
#[allow(unsafe_code)]
|
||||
pub(super) unsafe fn into_owned(self, parsed: ParsedModule) -> DefinitionKind {
|
||||
match self {
|
||||
DefinitionNodeRef::Alias(alias) => {
|
||||
DefinitionKind::Alias(AstNodeRef::new(parsed, alias))
|
||||
DefinitionNodeRef::Import(alias) => {
|
||||
DefinitionKind::Import(AstNodeRef::new(parsed, alias))
|
||||
}
|
||||
DefinitionNodeRef::ImportFrom(ImportFromDefinitionNodeRef { node, alias_index }) => {
|
||||
DefinitionKind::ImportFrom(ImportFromDefinitionKind {
|
||||
node: AstNodeRef::new(parsed, node),
|
||||
alias_index,
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::Function(function) => {
|
||||
DefinitionKind::Function(AstNodeRef::new(parsed, function))
|
||||
@@ -71,33 +149,182 @@ impl DefinitionNodeRef<'_> {
|
||||
DefinitionNodeRef::NamedExpression(named) => {
|
||||
DefinitionKind::NamedExpression(AstNodeRef::new(parsed, named))
|
||||
}
|
||||
DefinitionNodeRef::Target(target) => {
|
||||
DefinitionKind::Target(AstNodeRef::new(parsed, target))
|
||||
DefinitionNodeRef::Assignment(AssignmentDefinitionNodeRef { assignment, target }) => {
|
||||
DefinitionKind::Assignment(AssignmentDefinitionKind {
|
||||
assignment: AstNodeRef::new(parsed.clone(), assignment),
|
||||
target: AstNodeRef::new(parsed, target),
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::AnnotatedAssignment(assign) => {
|
||||
DefinitionKind::AnnotatedAssignment(AstNodeRef::new(parsed, assign))
|
||||
}
|
||||
DefinitionNodeRef::AugmentedAssignment(augmented_assignment) => {
|
||||
DefinitionKind::AugmentedAssignment(AstNodeRef::new(parsed, augmented_assignment))
|
||||
}
|
||||
DefinitionNodeRef::Comprehension(ComprehensionDefinitionNodeRef { node, first }) => {
|
||||
DefinitionKind::Comprehension(ComprehensionDefinitionKind {
|
||||
node: AstNodeRef::new(parsed, node),
|
||||
first,
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::Parameter(parameter) => match parameter {
|
||||
ast::AnyParameterRef::Variadic(parameter) => {
|
||||
DefinitionKind::Parameter(AstNodeRef::new(parsed, parameter))
|
||||
}
|
||||
ast::AnyParameterRef::NonVariadic(parameter) => {
|
||||
DefinitionKind::ParameterWithDefault(AstNodeRef::new(parsed, parameter))
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DefinitionNodeRef<'_> {
|
||||
pub(super) fn key(self) -> DefinitionNodeKey {
|
||||
match self {
|
||||
Self::Alias(node) => DefinitionNodeKey(NodeKey::from_node(node)),
|
||||
Self::Function(node) => DefinitionNodeKey(NodeKey::from_node(node)),
|
||||
Self::Class(node) => DefinitionNodeKey(NodeKey::from_node(node)),
|
||||
Self::NamedExpression(node) => DefinitionNodeKey(NodeKey::from_node(node)),
|
||||
Self::Target(node) => DefinitionNodeKey(NodeKey::from_node(node)),
|
||||
Self::Import(node) => node.into(),
|
||||
Self::ImportFrom(ImportFromDefinitionNodeRef { node, alias_index }) => {
|
||||
(&node.names[alias_index]).into()
|
||||
}
|
||||
Self::Function(node) => node.into(),
|
||||
Self::Class(node) => node.into(),
|
||||
Self::NamedExpression(node) => node.into(),
|
||||
Self::Assignment(AssignmentDefinitionNodeRef {
|
||||
assignment: _,
|
||||
target,
|
||||
}) => target.into(),
|
||||
Self::AnnotatedAssignment(node) => node.into(),
|
||||
Self::AugmentedAssignment(node) => node.into(),
|
||||
Self::Comprehension(ComprehensionDefinitionNodeRef { node, first: _ }) => node.into(),
|
||||
Self::Parameter(node) => match node {
|
||||
ast::AnyParameterRef::Variadic(parameter) => parameter.into(),
|
||||
ast::AnyParameterRef::NonVariadic(parameter) => parameter.into(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum DefinitionKind {
|
||||
Alias(AstNodeRef<ast::Alias>),
|
||||
Import(AstNodeRef<ast::Alias>),
|
||||
ImportFrom(ImportFromDefinitionKind),
|
||||
Function(AstNodeRef<ast::StmtFunctionDef>),
|
||||
Class(AstNodeRef<ast::StmtClassDef>),
|
||||
NamedExpression(AstNodeRef<ast::ExprNamed>),
|
||||
Target(AstNodeRef<ast::Expr>),
|
||||
Assignment(AssignmentDefinitionKind),
|
||||
AnnotatedAssignment(AstNodeRef<ast::StmtAnnAssign>),
|
||||
AugmentedAssignment(AstNodeRef<ast::StmtAugAssign>),
|
||||
Comprehension(ComprehensionDefinitionKind),
|
||||
Parameter(AstNodeRef<ast::Parameter>),
|
||||
ParameterWithDefault(AstNodeRef<ast::ParameterWithDefault>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ComprehensionDefinitionKind {
|
||||
node: AstNodeRef<ast::Comprehension>,
|
||||
first: bool,
|
||||
}
|
||||
|
||||
impl ComprehensionDefinitionKind {
|
||||
pub(crate) fn node(&self) -> &ast::Comprehension {
|
||||
self.node.node()
|
||||
}
|
||||
|
||||
pub(crate) fn is_first(&self) -> bool {
|
||||
self.first
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ImportFromDefinitionKind {
|
||||
node: AstNodeRef<ast::StmtImportFrom>,
|
||||
alias_index: usize,
|
||||
}
|
||||
|
||||
impl ImportFromDefinitionKind {
|
||||
pub(crate) fn import(&self) -> &ast::StmtImportFrom {
|
||||
self.node.node()
|
||||
}
|
||||
|
||||
pub(crate) fn alias(&self) -> &ast::Alias {
|
||||
&self.node.node().names[self.alias_index]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub struct AssignmentDefinitionKind {
|
||||
assignment: AstNodeRef<ast::StmtAssign>,
|
||||
target: AstNodeRef<ast::ExprName>,
|
||||
}
|
||||
|
||||
impl AssignmentDefinitionKind {
|
||||
pub(crate) fn assignment(&self) -> &ast::StmtAssign {
|
||||
self.assignment.node()
|
||||
}
|
||||
|
||||
pub(crate) fn target(&self) -> &ast::ExprName {
|
||||
self.target.node()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
|
||||
pub(super) struct DefinitionNodeKey(NodeKey);
|
||||
pub(crate) struct DefinitionNodeKey(NodeKey);
|
||||
|
||||
impl From<&ast::Alias> for DefinitionNodeKey {
|
||||
fn from(node: &ast::Alias) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtFunctionDef> for DefinitionNodeKey {
|
||||
fn from(node: &ast::StmtFunctionDef) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtClassDef> for DefinitionNodeKey {
|
||||
fn from(node: &ast::StmtClassDef) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::ExprName> for DefinitionNodeKey {
|
||||
fn from(node: &ast::ExprName) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::ExprNamed> for DefinitionNodeKey {
|
||||
fn from(node: &ast::ExprNamed) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtAnnAssign> for DefinitionNodeKey {
|
||||
fn from(node: &ast::StmtAnnAssign) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtAugAssign> for DefinitionNodeKey {
|
||||
fn from(node: &ast::StmtAugAssign) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Comprehension> for DefinitionNodeKey {
|
||||
fn from(node: &ast::Comprehension) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Parameter> for DefinitionNodeKey {
|
||||
fn from(node: &ast::Parameter) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::ParameterWithDefault> for DefinitionNodeKey {
|
||||
fn from(node: &ast::ParameterWithDefault) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::db::Db;
|
||||
use crate::semantic_index::symbol::{FileScopeId, ScopeId};
|
||||
use ruff_db::files::File;
|
||||
use ruff_python_ast as ast;
|
||||
use salsa;
|
||||
|
||||
/// An independently type-inferable expression.
|
||||
///
|
||||
/// Includes constraint expressions (e.g. if tests) and the RHS of an unpacking assignment.
|
||||
#[salsa::tracked]
|
||||
pub(crate) struct Expression<'db> {
|
||||
/// The file in which the expression occurs.
|
||||
#[id]
|
||||
pub(crate) file: File,
|
||||
|
||||
/// The scope in which the expression occurs.
|
||||
#[id]
|
||||
pub(crate) file_scope: FileScopeId,
|
||||
|
||||
/// The expression node.
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) node_ref: AstNodeRef<ast::Expr>,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<Expression<'static>>,
|
||||
}
|
||||
|
||||
impl<'db> Expression<'db> {
|
||||
pub(crate) fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
|
||||
self.file_scope(db).to_scope_id(db, self.file(db))
|
||||
}
|
||||
}
|
||||
@@ -3,8 +3,8 @@ use std::ops::Range;
|
||||
|
||||
use bitflags::bitflags;
|
||||
use hashbrown::hash_map::RawEntryMut;
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::ParsedModule;
|
||||
use ruff_db::vfs::VfsFile;
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use ruff_python_ast::name::Name;
|
||||
use ruff_python_ast::{self as ast};
|
||||
@@ -12,33 +12,23 @@ use rustc_hash::FxHasher;
|
||||
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::node_key::NodeKey;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::{root_scope, semantic_index, symbol_table, SymbolMap};
|
||||
use crate::semantic_index::{semantic_index, SymbolMap};
|
||||
use crate::Db;
|
||||
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
pub struct Symbol<'db> {
|
||||
pub struct Symbol {
|
||||
name: Name,
|
||||
flags: SymbolFlags,
|
||||
/// The nodes that define this symbol, in source order.
|
||||
///
|
||||
/// TODO: Use smallvec here, but it creates the same lifetime issues as in [QualifiedName](https://github.com/astral-sh/ruff/blob/5109b50bb3847738eeb209352cf26bda392adf62/crates/ruff_python_ast/src/name.rs#L562-L569)
|
||||
definitions: Vec<Definition<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> Symbol<'db> {
|
||||
impl Symbol {
|
||||
fn new(name: Name) -> Self {
|
||||
Self {
|
||||
name,
|
||||
flags: SymbolFlags::empty(),
|
||||
definitions: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn push_definition(&mut self, definition: Definition<'db>) {
|
||||
self.definitions.push(definition);
|
||||
}
|
||||
|
||||
fn insert_flags(&mut self, flags: SymbolFlags) {
|
||||
self.flags.insert(flags);
|
||||
}
|
||||
@@ -57,10 +47,6 @@ impl<'db> Symbol<'db> {
|
||||
pub fn is_defined(&self) -> bool {
|
||||
self.flags.contains(SymbolFlags::IS_DEFINED)
|
||||
}
|
||||
|
||||
pub fn definitions(&self) -> &[Definition] {
|
||||
&self.definitions
|
||||
}
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
@@ -75,15 +61,6 @@ bitflags! {
|
||||
}
|
||||
}
|
||||
|
||||
/// ID that uniquely identifies a public symbol defined in a module's root scope.
|
||||
#[salsa::tracked]
|
||||
pub struct PublicSymbolId<'db> {
|
||||
#[id]
|
||||
pub(crate) file: VfsFile,
|
||||
#[id]
|
||||
pub(crate) scoped_symbol_id: ScopedSymbolId,
|
||||
}
|
||||
|
||||
/// ID that uniquely identifies a symbol in a file.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct FileSymbolId {
|
||||
@@ -111,53 +88,11 @@ impl From<FileSymbolId> for ScopedSymbolId {
|
||||
#[newtype_index]
|
||||
pub struct ScopedSymbolId;
|
||||
|
||||
impl ScopedSymbolId {
|
||||
/// Converts the symbol to a public symbol.
|
||||
///
|
||||
/// # Panics
|
||||
/// May panic if the symbol does not belong to `file` or is not a symbol of `file`'s root scope.
|
||||
pub(crate) fn to_public_symbol(self, db: &dyn Db, file: VfsFile) -> PublicSymbolId {
|
||||
let symbols = public_symbols_map(db, file);
|
||||
symbols.public(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn public_symbols_map(db: &dyn Db, file: VfsFile) -> PublicSymbolsMap<'_> {
|
||||
let _span = tracing::trace_span!("public_symbols_map", ?file).entered();
|
||||
|
||||
let module_scope = root_scope(db, file);
|
||||
let symbols = symbol_table(db, module_scope);
|
||||
|
||||
let public_symbols: IndexVec<_, _> = symbols
|
||||
.symbol_ids()
|
||||
.map(|id| PublicSymbolId::new(db, file, id))
|
||||
.collect();
|
||||
|
||||
PublicSymbolsMap {
|
||||
symbols: public_symbols,
|
||||
}
|
||||
}
|
||||
|
||||
/// Maps [`LocalSymbolId`] of a file's root scope to the corresponding [`PublicSymbolId`] (Salsa ingredients).
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
pub(crate) struct PublicSymbolsMap<'db> {
|
||||
symbols: IndexVec<ScopedSymbolId, PublicSymbolId<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> PublicSymbolsMap<'db> {
|
||||
/// Resolve the [`PublicSymbolId`] for the module-level `symbol_id`.
|
||||
fn public(&self, symbol_id: ScopedSymbolId) -> PublicSymbolId<'db> {
|
||||
self.symbols[symbol_id]
|
||||
}
|
||||
}
|
||||
|
||||
/// A cross-module identifier of a scope that can be used as a salsa query parameter.
|
||||
#[salsa::tracked]
|
||||
pub struct ScopeId<'db> {
|
||||
#[allow(clippy::used_underscore_binding)]
|
||||
#[id]
|
||||
pub file: VfsFile,
|
||||
pub file: File,
|
||||
#[id]
|
||||
pub file_scope_id: FileScopeId,
|
||||
|
||||
@@ -165,9 +100,27 @@ pub struct ScopeId<'db> {
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub node: NodeWithScopeKind,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<ScopeId<'static>>,
|
||||
}
|
||||
|
||||
impl<'db> ScopeId<'db> {
|
||||
pub(crate) fn is_function_like(self, db: &'db dyn Db) -> bool {
|
||||
// Type parameter scopes behave like function scopes in terms of name resolution; CPython
|
||||
// symbol table also uses the term "function-like" for these scopes.
|
||||
matches!(
|
||||
self.node(db),
|
||||
NodeWithScopeKind::ClassTypeParameters(_)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(_)
|
||||
| NodeWithScopeKind::Function(_)
|
||||
| NodeWithScopeKind::ListComprehension(_)
|
||||
| NodeWithScopeKind::SetComprehension(_)
|
||||
| NodeWithScopeKind::DictComprehension(_)
|
||||
| NodeWithScopeKind::GeneratorExpression(_)
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn name(self, db: &'db dyn Db) -> &'db str {
|
||||
match self.node(db) {
|
||||
@@ -177,6 +130,11 @@ impl<'db> ScopeId<'db> {
|
||||
}
|
||||
NodeWithScopeKind::Function(function)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(function) => function.name.as_str(),
|
||||
NodeWithScopeKind::Lambda(_) => "<lambda>",
|
||||
NodeWithScopeKind::ListComprehension(_) => "<listcomp>",
|
||||
NodeWithScopeKind::SetComprehension(_) => "<setcomp>",
|
||||
NodeWithScopeKind::DictComprehension(_) => "<dictcomp>",
|
||||
NodeWithScopeKind::GeneratorExpression(_) => "<generator>",
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -186,12 +144,12 @@ impl<'db> ScopeId<'db> {
|
||||
pub struct FileScopeId;
|
||||
|
||||
impl FileScopeId {
|
||||
/// Returns the scope id of the Root scope.
|
||||
pub fn root() -> Self {
|
||||
/// Returns the scope id of the module-global scope.
|
||||
pub fn global() -> Self {
|
||||
FileScopeId::from_u32(0)
|
||||
}
|
||||
|
||||
pub fn to_scope_id(self, db: &dyn Db, file: VfsFile) -> ScopeId<'_> {
|
||||
pub fn to_scope_id(self, db: &dyn Db, file: File) -> ScopeId<'_> {
|
||||
let index = semantic_index(db, file);
|
||||
index.scope_ids_by_scope[self]
|
||||
}
|
||||
@@ -220,19 +178,26 @@ pub enum ScopeKind {
|
||||
Annotation,
|
||||
Class,
|
||||
Function,
|
||||
Comprehension,
|
||||
}
|
||||
|
||||
impl ScopeKind {
|
||||
pub const fn is_comprehension(self) -> bool {
|
||||
matches!(self, ScopeKind::Comprehension)
|
||||
}
|
||||
}
|
||||
|
||||
/// Symbol table for a specific [`Scope`].
|
||||
#[derive(Debug)]
|
||||
pub struct SymbolTable<'db> {
|
||||
pub struct SymbolTable {
|
||||
/// The symbols in this scope.
|
||||
symbols: IndexVec<ScopedSymbolId, Symbol<'db>>,
|
||||
symbols: IndexVec<ScopedSymbolId, Symbol>,
|
||||
|
||||
/// The symbols indexed by name.
|
||||
symbols_by_name: SymbolMap,
|
||||
}
|
||||
|
||||
impl<'db> SymbolTable<'db> {
|
||||
impl SymbolTable {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
symbols: IndexVec::new(),
|
||||
@@ -244,21 +209,21 @@ impl<'db> SymbolTable<'db> {
|
||||
self.symbols.shrink_to_fit();
|
||||
}
|
||||
|
||||
pub(crate) fn symbol(&self, symbol_id: impl Into<ScopedSymbolId>) -> &Symbol<'db> {
|
||||
pub(crate) fn symbol(&self, symbol_id: impl Into<ScopedSymbolId>) -> &Symbol {
|
||||
&self.symbols[symbol_id.into()]
|
||||
}
|
||||
|
||||
pub(crate) fn symbol_ids(&self) -> impl Iterator<Item = ScopedSymbolId> + 'db {
|
||||
#[allow(unused)]
|
||||
pub(crate) fn symbol_ids(&self) -> impl Iterator<Item = ScopedSymbolId> {
|
||||
self.symbols.indices()
|
||||
}
|
||||
|
||||
pub fn symbols(&self) -> impl Iterator<Item = &Symbol<'db>> {
|
||||
pub fn symbols(&self) -> impl Iterator<Item = &Symbol> {
|
||||
self.symbols.iter()
|
||||
}
|
||||
|
||||
/// Returns the symbol named `name`.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn symbol_by_name(&self, name: &str) -> Option<&Symbol<'db>> {
|
||||
pub(crate) fn symbol_by_name(&self, name: &str) -> Option<&Symbol> {
|
||||
let id = self.symbol_id_by_name(name)?;
|
||||
Some(self.symbol(id))
|
||||
}
|
||||
@@ -282,21 +247,21 @@ impl<'db> SymbolTable<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for SymbolTable<'_> {
|
||||
impl PartialEq for SymbolTable {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
// We don't need to compare the symbols_by_name because the name is already captured in `Symbol`.
|
||||
self.symbols == other.symbols
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for SymbolTable<'_> {}
|
||||
impl Eq for SymbolTable {}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct SymbolTableBuilder<'db> {
|
||||
table: SymbolTable<'db>,
|
||||
pub(super) struct SymbolTableBuilder {
|
||||
table: SymbolTable,
|
||||
}
|
||||
|
||||
impl<'db> SymbolTableBuilder<'db> {
|
||||
impl SymbolTableBuilder {
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
table: SymbolTable::new(),
|
||||
@@ -307,7 +272,7 @@ impl<'db> SymbolTableBuilder<'db> {
|
||||
&mut self,
|
||||
name: Name,
|
||||
flags: SymbolFlags,
|
||||
) -> ScopedSymbolId {
|
||||
) -> (ScopedSymbolId, bool) {
|
||||
let hash = SymbolTable::hash_name(&name);
|
||||
let entry = self
|
||||
.table
|
||||
@@ -320,7 +285,7 @@ impl<'db> SymbolTableBuilder<'db> {
|
||||
let symbol = &mut self.table.symbols[*entry.key()];
|
||||
symbol.insert_flags(flags);
|
||||
|
||||
*entry.key()
|
||||
(*entry.key(), false)
|
||||
}
|
||||
RawEntryMut::Vacant(entry) => {
|
||||
let mut symbol = Symbol::new(name);
|
||||
@@ -330,16 +295,12 @@ impl<'db> SymbolTableBuilder<'db> {
|
||||
entry.insert_with_hasher(hash, id, (), |id| {
|
||||
SymbolTable::hash_name(self.table.symbols[*id].name().as_str())
|
||||
});
|
||||
id
|
||||
(id, true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn add_definition(&mut self, symbol: ScopedSymbolId, definition: Definition<'db>) {
|
||||
self.table.symbols[symbol].push_definition(definition);
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> SymbolTable<'db> {
|
||||
pub(super) fn finish(mut self) -> SymbolTable {
|
||||
self.table.shrink_to_fit();
|
||||
self.table
|
||||
}
|
||||
@@ -351,8 +312,13 @@ pub(crate) enum NodeWithScopeRef<'a> {
|
||||
Module,
|
||||
Class(&'a ast::StmtClassDef),
|
||||
Function(&'a ast::StmtFunctionDef),
|
||||
Lambda(&'a ast::ExprLambda),
|
||||
FunctionTypeParameters(&'a ast::StmtFunctionDef),
|
||||
ClassTypeParameters(&'a ast::StmtClassDef),
|
||||
ListComprehension(&'a ast::ExprListComp),
|
||||
SetComprehension(&'a ast::ExprSetComp),
|
||||
DictComprehension(&'a ast::ExprDictComp),
|
||||
GeneratorExpression(&'a ast::ExprGenerator),
|
||||
}
|
||||
|
||||
impl NodeWithScopeRef<'_> {
|
||||
@@ -370,11 +336,26 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::Function(function) => {
|
||||
NodeWithScopeKind::Function(AstNodeRef::new(module, function))
|
||||
}
|
||||
NodeWithScopeRef::Lambda(lambda) => {
|
||||
NodeWithScopeKind::Lambda(AstNodeRef::new(module, lambda))
|
||||
}
|
||||
NodeWithScopeRef::FunctionTypeParameters(function) => {
|
||||
NodeWithScopeKind::FunctionTypeParameters(AstNodeRef::new(module, function))
|
||||
}
|
||||
NodeWithScopeRef::ClassTypeParameters(class) => {
|
||||
NodeWithScopeKind::Class(AstNodeRef::new(module, class))
|
||||
NodeWithScopeKind::ClassTypeParameters(AstNodeRef::new(module, class))
|
||||
}
|
||||
NodeWithScopeRef::ListComprehension(comprehension) => {
|
||||
NodeWithScopeKind::ListComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::SetComprehension(comprehension) => {
|
||||
NodeWithScopeKind::SetComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::DictComprehension(comprehension) => {
|
||||
NodeWithScopeKind::DictComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::GeneratorExpression(generator) => {
|
||||
NodeWithScopeKind::GeneratorExpression(AstNodeRef::new(module, generator))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -384,8 +365,13 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::Module => ScopeKind::Module,
|
||||
NodeWithScopeRef::Class(_) => ScopeKind::Class,
|
||||
NodeWithScopeRef::Function(_) => ScopeKind::Function,
|
||||
NodeWithScopeRef::Lambda(_) => ScopeKind::Function,
|
||||
NodeWithScopeRef::FunctionTypeParameters(_)
|
||||
| NodeWithScopeRef::ClassTypeParameters(_) => ScopeKind::Annotation,
|
||||
NodeWithScopeRef::ListComprehension(_)
|
||||
| NodeWithScopeRef::SetComprehension(_)
|
||||
| NodeWithScopeRef::DictComprehension(_)
|
||||
| NodeWithScopeRef::GeneratorExpression(_) => ScopeKind::Comprehension,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -396,12 +382,27 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::Function(function) => {
|
||||
NodeWithScopeKey::Function(NodeKey::from_node(function))
|
||||
}
|
||||
NodeWithScopeRef::Lambda(lambda) => {
|
||||
NodeWithScopeKey::Lambda(NodeKey::from_node(lambda))
|
||||
}
|
||||
NodeWithScopeRef::FunctionTypeParameters(function) => {
|
||||
NodeWithScopeKey::FunctionTypeParameters(NodeKey::from_node(function))
|
||||
}
|
||||
NodeWithScopeRef::ClassTypeParameters(class) => {
|
||||
NodeWithScopeKey::ClassTypeParameters(NodeKey::from_node(class))
|
||||
}
|
||||
NodeWithScopeRef::ListComprehension(comprehension) => {
|
||||
NodeWithScopeKey::ListComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::SetComprehension(comprehension) => {
|
||||
NodeWithScopeKey::SetComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::DictComprehension(comprehension) => {
|
||||
NodeWithScopeKey::DictComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::GeneratorExpression(generator) => {
|
||||
NodeWithScopeKey::GeneratorExpression(NodeKey::from_node(generator))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -414,6 +415,11 @@ pub enum NodeWithScopeKind {
|
||||
ClassTypeParameters(AstNodeRef<ast::StmtClassDef>),
|
||||
Function(AstNodeRef<ast::StmtFunctionDef>),
|
||||
FunctionTypeParameters(AstNodeRef<ast::StmtFunctionDef>),
|
||||
Lambda(AstNodeRef<ast::ExprLambda>),
|
||||
ListComprehension(AstNodeRef<ast::ExprListComp>),
|
||||
SetComprehension(AstNodeRef<ast::ExprSetComp>),
|
||||
DictComprehension(AstNodeRef<ast::ExprDictComp>),
|
||||
GeneratorExpression(AstNodeRef<ast::ExprGenerator>),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
@@ -423,4 +429,9 @@ pub(crate) enum NodeWithScopeKey {
|
||||
ClassTypeParameters(NodeKey),
|
||||
Function(NodeKey),
|
||||
FunctionTypeParameters(NodeKey),
|
||||
Lambda(NodeKey),
|
||||
ListComprehension(NodeKey),
|
||||
SetComprehension(NodeKey),
|
||||
DictComprehension(NodeKey),
|
||||
GeneratorExpression(NodeKey),
|
||||
}
|
||||
|
||||
375
crates/red_knot_python_semantic/src/semantic_index/use_def.rs
Normal file
375
crates/red_knot_python_semantic/src/semantic_index/use_def.rs
Normal file
@@ -0,0 +1,375 @@
|
||||
//! Build a map from each use of a symbol to the definitions visible from that use, and the
|
||||
//! type-narrowing constraints that apply to each definition.
|
||||
//!
|
||||
//! Let's take this code sample:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 1
|
||||
//! x = 2
|
||||
//! y = x
|
||||
//! if y is not None:
|
||||
//! x = 3
|
||||
//! else:
|
||||
//! x = 4
|
||||
//! z = x
|
||||
//! ```
|
||||
//!
|
||||
//! In this snippet, we have four definitions of `x` (the statements assigning `1`, `2`, `3`,
|
||||
//! and `4` to it), and two uses of `x` (the `y = x` and `z = x` assignments). The first
|
||||
//! [`Definition`] of `x` is never visible to any use, because it's immediately replaced by the
|
||||
//! second definition, before any use happens. (A linter could thus flag the statement `x = 1`
|
||||
//! as likely superfluous.)
|
||||
//!
|
||||
//! The first use of `x` has one definition visible to it: the assignment `x = 2`.
|
||||
//!
|
||||
//! Things get a bit more complex when we have branches. We will definitely take either the `if` or
|
||||
//! the `else` branch. Thus, the second use of `x` has two definitions visible to it: `x = 3` and
|
||||
//! `x = 4`. The `x = 2` definition is no longer visible, because it must be replaced by either `x
|
||||
//! = 3` or `x = 4`, no matter which branch was taken. We don't know which branch was taken, so we
|
||||
//! must consider both definitions as visible, which means eventually we would (in type inference)
|
||||
//! look at these two definitions and infer a type of `Literal[3, 4]` -- the union of `Literal[3]`
|
||||
//! and `Literal[4]` -- for the second use of `x`.
|
||||
//!
|
||||
//! So that's one question our use-def map needs to answer: given a specific use of a symbol, which
|
||||
//! definition(s) is/are visible from that use. In
|
||||
//! [`AstIds`](crate::semantic_index::ast_ids::AstIds) we number all uses (that means a `Name` node
|
||||
//! with `Load` context) so we have a `ScopedUseId` to efficiently represent each use.
|
||||
//!
|
||||
//! Another case we need to handle is when a symbol is referenced from a different scope (the most
|
||||
//! obvious example of this is an import). We call this "public" use of a symbol. So the other
|
||||
//! question we need to be able to answer is, what are the publicly-visible definitions of each
|
||||
//! symbol?
|
||||
//!
|
||||
//! Technically, public use of a symbol could also occur from any point in control flow of the
|
||||
//! scope where the symbol is defined (via inline imports and import cycles, in the case of an
|
||||
//! import, or via a function call partway through the local scope that ends up using a symbol from
|
||||
//! the scope via a global or nonlocal reference.) But modeling this fully accurately requires
|
||||
//! whole-program analysis that isn't tractable for an efficient incremental compiler, since it
|
||||
//! means a given symbol could have a different type every place it's referenced throughout the
|
||||
//! program, depending on the shape of arbitrarily-sized call/import graphs. So we follow other
|
||||
//! Python type-checkers in making the simplifying assumption that usually the scope will finish
|
||||
//! execution before its symbols are made visible to other scopes; for instance, most imports will
|
||||
//! import from a complete module, not a partially-executed module. (We may want to get a little
|
||||
//! smarter than this in the future, in particular for closures, but for now this is where we
|
||||
//! start.)
|
||||
//!
|
||||
//! So this means that the publicly-visible definitions of a symbol are the definitions still
|
||||
//! visible at the end of the scope; effectively we have an implicit "use" of every symbol at the
|
||||
//! end of the scope.
|
||||
//!
|
||||
//! We also need to know, for a given definition of a symbol, what type-narrowing constraints apply
|
||||
//! to it. For instance, in this code sample:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 1 if flag else None
|
||||
//! if x is not None:
|
||||
//! y = x
|
||||
//! ```
|
||||
//!
|
||||
//! At the use of `x` in `y = x`, the visible definition of `x` is `1 if flag else None`, which
|
||||
//! would infer as the type `Literal[1] | None`. But the constraint `x is not None` dominates this
|
||||
//! use, which means we can rule out the possibility that `x` is `None` here, which should give us
|
||||
//! the type `Literal[1]` for this use.
|
||||
//!
|
||||
//! The data structure we build to answer these questions is the `UseDefMap`. It has a
|
||||
//! `definitions_by_use` vector indexed by [`ScopedUseId`] and a `public_definitions` vector
|
||||
//! indexed by [`ScopedSymbolId`]. The values in each of these vectors are (in principle) a list of
|
||||
//! visible definitions at that use, or at the end of the scope for that symbol, with a list of the
|
||||
//! dominating constraints for each of those definitions.
|
||||
//!
|
||||
//! In order to avoid vectors-of-vectors-of-vectors and all the allocations that would entail, we
|
||||
//! don't actually store these "list of visible definitions" as a vector of [`Definition`].
|
||||
//! Instead, the values in `definitions_by_use` and `public_definitions` are a [`SymbolState`]
|
||||
//! struct which uses bit-sets to track definitions and constraints in terms of
|
||||
//! [`ScopedDefinitionId`] and [`ScopedConstraintId`], which are indices into the `all_definitions`
|
||||
//! and `all_constraints` indexvecs in the [`UseDefMap`].
|
||||
//!
|
||||
//! There is another special kind of possible "definition" for a symbol: there might be a path from
|
||||
//! the scope entry to a given use in which the symbol is never bound.
|
||||
//!
|
||||
//! The simplest way to model "unbound" would be as an actual [`Definition`] itself: the initial
|
||||
//! visible [`Definition`] for each symbol in a scope. But actually modeling it this way would
|
||||
//! unnecessarily increase the number of [`Definition`] that Salsa must track. Since "unbound" is a
|
||||
//! special definition in that all symbols share it, and it doesn't have any additional per-symbol
|
||||
//! state, and constraints are irrelevant to it, we can represent it more efficiently: we use the
|
||||
//! `may_be_unbound` boolean on the [`SymbolState`] struct. If this flag is `true`, it means the
|
||||
//! symbol/use really has one additional visible "definition", which is the unbound state. If this
|
||||
//! flag is `false`, it means we've eliminated the possibility of unbound: every path we've
|
||||
//! followed includes a definition for this symbol.
|
||||
//!
|
||||
//! To build a [`UseDefMap`], the [`UseDefMapBuilder`] is notified of each new use, definition, and
|
||||
//! constraint as they are encountered by the
|
||||
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder) AST visit. For
|
||||
//! each symbol, the builder tracks the `SymbolState` for that symbol. When we hit a use of a
|
||||
//! symbol, it records the current state for that symbol for that use. When we reach the end of the
|
||||
//! scope, it records the state for each symbol as the public definitions of that symbol.
|
||||
//!
|
||||
//! Let's walk through the above example. Initially we record for `x` that it has no visible
|
||||
//! definitions, and may be unbound. When we see `x = 1`, we record that as the sole visible
|
||||
//! definition of `x`, and flip `may_be_unbound` to `false`. Then we see `x = 2`, and it replaces
|
||||
//! `x = 1` as the sole visible definition of `x`. When we get to `y = x`, we record that the
|
||||
//! visible definitions for that use of `x` are just the `x = 2` definition.
|
||||
//!
|
||||
//! Then we hit the `if` branch. We visit the `test` node (`flag` in this case), since that will
|
||||
//! happen regardless. Then we take a pre-branch snapshot of the currently visible definitions for
|
||||
//! all symbols, which we'll need later. Then we record `flag` as a possible constraint on the
|
||||
//! currently visible definition (`x = 2`), and go ahead and visit the `if` body. When we see `x =
|
||||
//! 3`, it replaces `x = 2` (constrained by `flag`) as the sole visible definition of `x`. At the
|
||||
//! end of the `if` body, we take another snapshot of the currently-visible definitions; we'll call
|
||||
//! this the post-if-body snapshot.
|
||||
//!
|
||||
//! Now we need to visit the `else` clause. The conditions when entering the `else` clause should
|
||||
//! be the pre-if conditions; if we are entering the `else` clause, we know that the `if` test
|
||||
//! failed and we didn't execute the `if` body. So we first reset the builder to the pre-if state,
|
||||
//! using the snapshot we took previously (meaning we now have `x = 2` as the sole visible
|
||||
//! definition for `x` again), then visit the `else` clause, where `x = 4` replaces `x = 2` as the
|
||||
//! sole visible definition of `x`.
|
||||
//!
|
||||
//! Now we reach the end of the if/else, and want to visit the following code. The state here needs
|
||||
//! to reflect that we might have gone through the `if` branch, or we might have gone through the
|
||||
//! `else` branch, and we don't know which. So we need to "merge" our current builder state
|
||||
//! (reflecting the end-of-else state, with `x = 4` as the only visible definition) with our
|
||||
//! post-if-body snapshot (which has `x = 3` as the only visible definition). The result of this
|
||||
//! merge is that we now have two visible definitions of `x`: `x = 3` and `x = 4`.
|
||||
//!
|
||||
//! The [`UseDefMapBuilder`] itself just exposes methods for taking a snapshot, resetting to a
|
||||
//! snapshot, and merging a snapshot into the current state. The logic using these methods lives in
|
||||
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder), e.g. where it
|
||||
//! visits a `StmtIf` node.
|
||||
//!
|
||||
//! (In the future we may have some other questions we want to answer as well, such as "is this
|
||||
//! definition used?", which will require tracking a bit more info in our map, e.g. a "used" bit
|
||||
//! for each [`Definition`] which is flipped to true when we record that definition for a use.)
|
||||
use self::symbol_state::{
|
||||
ConstraintIdIterator, DefinitionIdWithConstraintsIterator, ScopedConstraintId,
|
||||
ScopedDefinitionId, SymbolState,
|
||||
};
|
||||
use crate::semantic_index::ast_ids::ScopedUseId;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::ScopedSymbolId;
|
||||
use ruff_index::IndexVec;
|
||||
|
||||
mod bitset;
|
||||
mod symbol_state;
|
||||
|
||||
/// Applicable definitions and constraints for every use of a name.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) struct UseDefMap<'db> {
|
||||
/// Array of [`Definition`] in this scope.
|
||||
all_definitions: IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
|
||||
/// Array of constraints (as [`Expression`]) in this scope.
|
||||
all_constraints: IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
|
||||
/// [`SymbolState`] visible at a [`ScopedUseId`].
|
||||
definitions_by_use: IndexVec<ScopedUseId, SymbolState>,
|
||||
|
||||
/// [`SymbolState`] visible at end of scope for each symbol.
|
||||
public_definitions: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
}
|
||||
|
||||
impl<'db> UseDefMap<'db> {
|
||||
pub(crate) fn use_definitions(
|
||||
&self,
|
||||
use_id: ScopedUseId,
|
||||
) -> DefinitionWithConstraintsIterator<'_, 'db> {
|
||||
DefinitionWithConstraintsIterator {
|
||||
all_definitions: &self.all_definitions,
|
||||
all_constraints: &self.all_constraints,
|
||||
inner: self.definitions_by_use[use_id].visible_definitions(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn use_may_be_unbound(&self, use_id: ScopedUseId) -> bool {
|
||||
self.definitions_by_use[use_id].may_be_unbound()
|
||||
}
|
||||
|
||||
pub(crate) fn public_definitions(
|
||||
&self,
|
||||
symbol: ScopedSymbolId,
|
||||
) -> DefinitionWithConstraintsIterator<'_, 'db> {
|
||||
DefinitionWithConstraintsIterator {
|
||||
all_definitions: &self.all_definitions,
|
||||
all_constraints: &self.all_constraints,
|
||||
inner: self.public_definitions[symbol].visible_definitions(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn public_may_be_unbound(&self, symbol: ScopedSymbolId) -> bool {
|
||||
self.public_definitions[symbol].may_be_unbound()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct DefinitionWithConstraintsIterator<'map, 'db> {
|
||||
all_definitions: &'map IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
all_constraints: &'map IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
inner: DefinitionIdWithConstraintsIterator<'map>,
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for DefinitionWithConstraintsIterator<'map, 'db> {
|
||||
type Item = DefinitionWithConstraints<'map, 'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.inner
|
||||
.next()
|
||||
.map(|def_id_with_constraints| DefinitionWithConstraints {
|
||||
definition: self.all_definitions[def_id_with_constraints.definition],
|
||||
constraints: ConstraintsIterator {
|
||||
all_constraints: self.all_constraints,
|
||||
constraint_ids: def_id_with_constraints.constraint_ids,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for DefinitionWithConstraintsIterator<'_, '_> {}
|
||||
|
||||
pub(crate) struct DefinitionWithConstraints<'map, 'db> {
|
||||
pub(crate) definition: Definition<'db>,
|
||||
pub(crate) constraints: ConstraintsIterator<'map, 'db>,
|
||||
}
|
||||
|
||||
pub(crate) struct ConstraintsIterator<'map, 'db> {
|
||||
all_constraints: &'map IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
constraint_ids: ConstraintIdIterator<'map>,
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for ConstraintsIterator<'map, 'db> {
|
||||
type Item = Expression<'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.constraint_ids
|
||||
.next()
|
||||
.map(|constraint_id| self.all_constraints[constraint_id])
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for ConstraintsIterator<'_, '_> {}
|
||||
|
||||
/// A snapshot of the definitions and constraints state at a particular point in control flow.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(super) struct FlowSnapshot {
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct UseDefMapBuilder<'db> {
|
||||
/// Append-only array of [`Definition`]; None is unbound.
|
||||
all_definitions: IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
|
||||
/// Append-only array of constraints (as [`Expression`]).
|
||||
all_constraints: IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
|
||||
/// Visible definitions at each so-far-recorded use.
|
||||
definitions_by_use: IndexVec<ScopedUseId, SymbolState>,
|
||||
|
||||
/// Currently visible definitions for each symbol.
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
}
|
||||
|
||||
impl<'db> UseDefMapBuilder<'db> {
|
||||
pub(super) fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub(super) fn add_symbol(&mut self, symbol: ScopedSymbolId) {
|
||||
let new_symbol = self.definitions_by_symbol.push(SymbolState::unbound());
|
||||
debug_assert_eq!(symbol, new_symbol);
|
||||
}
|
||||
|
||||
pub(super) fn record_definition(
|
||||
&mut self,
|
||||
symbol: ScopedSymbolId,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
// We have a new definition of a symbol; this replaces any previous definitions in this
|
||||
// path.
|
||||
let def_id = self.all_definitions.push(definition);
|
||||
self.definitions_by_symbol[symbol] = SymbolState::with(def_id);
|
||||
}
|
||||
|
||||
pub(super) fn record_constraint(&mut self, constraint: Expression<'db>) {
|
||||
let constraint_id = self.all_constraints.push(constraint);
|
||||
for definitions in &mut self.definitions_by_symbol {
|
||||
definitions.add_constraint(constraint_id);
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn record_use(&mut self, symbol: ScopedSymbolId, use_id: ScopedUseId) {
|
||||
// We have a use of a symbol; clone the currently visible definitions for that symbol, and
|
||||
// record them as the visible definitions for this use.
|
||||
let new_use = self
|
||||
.definitions_by_use
|
||||
.push(self.definitions_by_symbol[symbol].clone());
|
||||
debug_assert_eq!(use_id, new_use);
|
||||
}
|
||||
|
||||
/// Take a snapshot of the current visible-symbols state.
|
||||
pub(super) fn snapshot(&self) -> FlowSnapshot {
|
||||
FlowSnapshot {
|
||||
definitions_by_symbol: self.definitions_by_symbol.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Restore the current builder visible-definitions state to the given snapshot.
|
||||
pub(super) fn restore(&mut self, snapshot: FlowSnapshot) {
|
||||
// We never remove symbols from `definitions_by_symbol` (it's an IndexVec, and the symbol
|
||||
// IDs must line up), so the current number of known symbols must always be equal to or
|
||||
// greater than the number of known symbols in a previously-taken snapshot.
|
||||
let num_symbols = self.definitions_by_symbol.len();
|
||||
debug_assert!(num_symbols >= snapshot.definitions_by_symbol.len());
|
||||
|
||||
// Restore the current visible-definitions state to the given snapshot.
|
||||
self.definitions_by_symbol = snapshot.definitions_by_symbol;
|
||||
|
||||
// If the snapshot we are restoring is missing some symbols we've recorded since, we need
|
||||
// to fill them in so the symbol IDs continue to line up. Since they don't exist in the
|
||||
// snapshot, the correct state to fill them in with is "unbound".
|
||||
self.definitions_by_symbol
|
||||
.resize(num_symbols, SymbolState::unbound());
|
||||
}
|
||||
|
||||
/// Merge the given snapshot into the current state, reflecting that we might have taken either
|
||||
/// path to get here. The new visible-definitions state for each symbol should include
|
||||
/// definitions from both the prior state and the snapshot.
|
||||
pub(super) fn merge(&mut self, snapshot: FlowSnapshot) {
|
||||
// The tricky thing about merging two Ranges pointing into `all_definitions` is that if the
|
||||
// two Ranges aren't already adjacent in `all_definitions`, we will have to copy at least
|
||||
// one or the other of the ranges to the end of `all_definitions` so as to make them
|
||||
// adjacent. We can't ever move things around in `all_definitions` because previously
|
||||
// recorded uses may still have ranges pointing to any part of it; all we can do is append.
|
||||
// It's possible we may end up with some old entries in `all_definitions` that nobody is
|
||||
// pointing to, but that's OK.
|
||||
|
||||
// We never remove symbols from `definitions_by_symbol` (it's an IndexVec, and the symbol
|
||||
// IDs must line up), so the current number of known symbols must always be equal to or
|
||||
// greater than the number of known symbols in a previously-taken snapshot.
|
||||
debug_assert!(self.definitions_by_symbol.len() >= snapshot.definitions_by_symbol.len());
|
||||
|
||||
let mut snapshot_definitions_iter = snapshot.definitions_by_symbol.into_iter();
|
||||
for current in &mut self.definitions_by_symbol {
|
||||
if let Some(snapshot) = snapshot_definitions_iter.next() {
|
||||
current.merge(snapshot);
|
||||
} else {
|
||||
// Symbol not present in snapshot, so it's unbound from that path.
|
||||
current.add_unbound();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> UseDefMap<'db> {
|
||||
self.all_definitions.shrink_to_fit();
|
||||
self.all_constraints.shrink_to_fit();
|
||||
self.definitions_by_symbol.shrink_to_fit();
|
||||
self.definitions_by_use.shrink_to_fit();
|
||||
|
||||
UseDefMap {
|
||||
all_definitions: self.all_definitions,
|
||||
all_constraints: self.all_constraints,
|
||||
definitions_by_use: self.definitions_by_use,
|
||||
public_definitions: self.definitions_by_symbol,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,228 @@
|
||||
/// Ordered set of `u32`.
|
||||
///
|
||||
/// Uses an inline bit-set for small values (up to 64 * B), falls back to heap allocated vector of
|
||||
/// blocks for larger values.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(super) enum BitSet<const B: usize> {
|
||||
/// Bit-set (in 64-bit blocks) for the first 64 * B entries.
|
||||
Inline([u64; B]),
|
||||
|
||||
/// Overflow beyond 64 * B.
|
||||
Heap(Vec<u64>),
|
||||
}
|
||||
|
||||
impl<const B: usize> Default for BitSet<B> {
|
||||
fn default() -> Self {
|
||||
// B * 64 must fit in a u32, or else we have unusable bits; this assertion makes the
|
||||
// truncating casts to u32 below safe. This would be better as a const assertion, but
|
||||
// that's not possible on stable with const generic params. (B should never really be
|
||||
// anywhere close to this large.)
|
||||
assert!(B * 64 < (u32::MAX as usize));
|
||||
// This implementation requires usize >= 32 bits.
|
||||
static_assertions::const_assert!(usize::BITS >= 32);
|
||||
Self::Inline([0; B])
|
||||
}
|
||||
}
|
||||
|
||||
impl<const B: usize> BitSet<B> {
|
||||
/// Create and return a new [`BitSet`] with a single `value` inserted.
|
||||
pub(super) fn with(value: u32) -> Self {
|
||||
let mut bitset = Self::default();
|
||||
bitset.insert(value);
|
||||
bitset
|
||||
}
|
||||
|
||||
/// Convert from Inline to Heap, if needed, and resize the Heap vector, if needed.
|
||||
fn resize(&mut self, value: u32) {
|
||||
let num_blocks_needed = (value / 64) + 1;
|
||||
match self {
|
||||
Self::Inline(blocks) => {
|
||||
let mut vec = blocks.to_vec();
|
||||
vec.resize(num_blocks_needed as usize, 0);
|
||||
*self = Self::Heap(vec);
|
||||
}
|
||||
Self::Heap(vec) => {
|
||||
vec.resize(num_blocks_needed as usize, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn blocks_mut(&mut self) -> &mut [u64] {
|
||||
match self {
|
||||
Self::Inline(blocks) => blocks.as_mut_slice(),
|
||||
Self::Heap(blocks) => blocks.as_mut_slice(),
|
||||
}
|
||||
}
|
||||
|
||||
fn blocks(&self) -> &[u64] {
|
||||
match self {
|
||||
Self::Inline(blocks) => blocks.as_slice(),
|
||||
Self::Heap(blocks) => blocks.as_slice(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Insert a value into the [`BitSet`].
|
||||
///
|
||||
/// Return true if the value was newly inserted, false if already present.
|
||||
pub(super) fn insert(&mut self, value: u32) -> bool {
|
||||
let value_usize = value as usize;
|
||||
let (block, index) = (value_usize / 64, value_usize % 64);
|
||||
if block >= self.blocks().len() {
|
||||
self.resize(value);
|
||||
}
|
||||
let blocks = self.blocks_mut();
|
||||
let missing = blocks[block] & (1 << index) == 0;
|
||||
blocks[block] |= 1 << index;
|
||||
missing
|
||||
}
|
||||
|
||||
/// Intersect in-place with another [`BitSet`].
|
||||
pub(super) fn intersect(&mut self, other: &BitSet<B>) {
|
||||
let my_blocks = self.blocks_mut();
|
||||
let other_blocks = other.blocks();
|
||||
let min_len = my_blocks.len().min(other_blocks.len());
|
||||
for i in 0..min_len {
|
||||
my_blocks[i] &= other_blocks[i];
|
||||
}
|
||||
for block in my_blocks.iter_mut().skip(min_len) {
|
||||
*block = 0;
|
||||
}
|
||||
}
|
||||
|
||||
/// Return an iterator over the values (in ascending order) in this [`BitSet`].
|
||||
pub(super) fn iter(&self) -> BitSetIterator<'_, B> {
|
||||
let blocks = self.blocks();
|
||||
BitSetIterator {
|
||||
blocks,
|
||||
current_block_index: 0,
|
||||
current_block: blocks[0],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator over values in a [`BitSet`].
|
||||
#[derive(Debug)]
|
||||
pub(super) struct BitSetIterator<'a, const B: usize> {
|
||||
/// The blocks we are iterating over.
|
||||
blocks: &'a [u64],
|
||||
|
||||
/// The index of the block we are currently iterating through.
|
||||
current_block_index: usize,
|
||||
|
||||
/// The block we are currently iterating through (and zeroing as we go.)
|
||||
current_block: u64,
|
||||
}
|
||||
|
||||
impl<const B: usize> Iterator for BitSetIterator<'_, B> {
|
||||
type Item = u32;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
while self.current_block == 0 {
|
||||
if self.current_block_index + 1 >= self.blocks.len() {
|
||||
return None;
|
||||
}
|
||||
self.current_block_index += 1;
|
||||
self.current_block = self.blocks[self.current_block_index];
|
||||
}
|
||||
let lowest_bit_set = self.current_block.trailing_zeros();
|
||||
// reset the lowest set bit, without a data dependency on `lowest_bit_set`
|
||||
self.current_block &= self.current_block.wrapping_sub(1);
|
||||
// SAFETY: `lowest_bit_set` cannot be more than 64, `current_block_index` cannot be more
|
||||
// than `B - 1`, and we check above that `B * 64 < u32::MAX`. So both `64 *
|
||||
// current_block_index` and the final value here must fit in u32.
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
Some(lowest_bit_set + (64 * self.current_block_index) as u32)
|
||||
}
|
||||
}
|
||||
|
||||
impl<const B: usize> std::iter::FusedIterator for BitSetIterator<'_, B> {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::BitSet;
|
||||
|
||||
fn assert_bitset<const B: usize>(bitset: &BitSet<B>, contents: &[u32]) {
|
||||
assert_eq!(bitset.iter().collect::<Vec<_>>(), contents);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn iter() {
|
||||
let mut b = BitSet::<1>::with(3);
|
||||
b.insert(27);
|
||||
b.insert(6);
|
||||
assert!(matches!(b, BitSet::Inline(_)));
|
||||
assert_bitset(&b, &[3, 6, 27]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn iter_overflow() {
|
||||
let mut b = BitSet::<1>::with(140);
|
||||
b.insert(100);
|
||||
b.insert(129);
|
||||
assert!(matches!(b, BitSet::Heap(_)));
|
||||
assert_bitset(&b, &[100, 129, 140]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(23);
|
||||
b2.insert(5);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_mixed_1() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(89);
|
||||
b2.insert(5);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_mixed_2() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(23);
|
||||
b2.insert(89);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_heap() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(89);
|
||||
b2.insert(90);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_heap_2() {
|
||||
let mut b1 = BitSet::<1>::with(89);
|
||||
let mut b2 = BitSet::<1>::with(89);
|
||||
b1.insert(91);
|
||||
b2.insert(90);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[89]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_blocks() {
|
||||
let mut b = BitSet::<2>::with(120);
|
||||
b.insert(45);
|
||||
assert!(matches!(b, BitSet::Inline(_)));
|
||||
assert_bitset(&b, &[45, 120]);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,374 @@
|
||||
//! Track visible definitions of a symbol, and applicable constraints per definition.
|
||||
//!
|
||||
//! These data structures operate entirely on scope-local newtype-indices for definitions and
|
||||
//! constraints, referring to their location in the `all_definitions` and `all_constraints`
|
||||
//! indexvecs in [`super::UseDefMapBuilder`].
|
||||
//!
|
||||
//! We need to track arbitrary associations between definitions and constraints, not just a single
|
||||
//! set of currently dominating constraints (where "dominating" means "control flow must have
|
||||
//! passed through it to reach this point"), because we can have dominating constraints that apply
|
||||
//! to some definitions but not others, as in this code:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 1 if flag else None
|
||||
//! if x is not None:
|
||||
//! if flag2:
|
||||
//! x = 2 if flag else None
|
||||
//! x
|
||||
//! ```
|
||||
//!
|
||||
//! The `x is not None` constraint dominates the final use of `x`, but it applies only to the first
|
||||
//! definition of `x`, not the second, so `None` is a possible value for `x`.
|
||||
//!
|
||||
//! And we can't just track, for each definition, an index into a list of dominating constraints,
|
||||
//! either, because we can have definitions which are still visible, but subject to constraints
|
||||
//! that are no longer dominating, as in this code:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 0
|
||||
//! if flag1:
|
||||
//! x = 1 if flag2 else None
|
||||
//! assert x is not None
|
||||
//! x
|
||||
//! ```
|
||||
//!
|
||||
//! From the point of view of the final use of `x`, the `x is not None` constraint no longer
|
||||
//! dominates, but it does dominate the `x = 1 if flag2 else None` definition, so we have to keep
|
||||
//! track of that.
|
||||
//!
|
||||
//! The data structures used here ([`BitSet`] and [`smallvec::SmallVec`]) optimize for keeping all
|
||||
//! data inline (avoiding lots of scattered allocations) in small-to-medium cases, and falling back
|
||||
//! to heap allocation to be able to scale to arbitrary numbers of definitions and constraints when
|
||||
//! needed.
|
||||
use super::bitset::{BitSet, BitSetIterator};
|
||||
use ruff_index::newtype_index;
|
||||
use smallvec::SmallVec;
|
||||
|
||||
/// A newtype-index for a definition in a particular scope.
|
||||
#[newtype_index]
|
||||
pub(super) struct ScopedDefinitionId;
|
||||
|
||||
/// A newtype-index for a constraint expression in a particular scope.
|
||||
#[newtype_index]
|
||||
pub(super) struct ScopedConstraintId;
|
||||
|
||||
/// Can reference this * 64 total definitions inline; more will fall back to the heap.
|
||||
const INLINE_DEFINITION_BLOCKS: usize = 3;
|
||||
|
||||
/// A [`BitSet`] of [`ScopedDefinitionId`], representing visible definitions of a symbol in a scope.
|
||||
type Definitions = BitSet<INLINE_DEFINITION_BLOCKS>;
|
||||
type DefinitionsIterator<'a> = BitSetIterator<'a, INLINE_DEFINITION_BLOCKS>;
|
||||
|
||||
/// Can reference this * 64 total constraints inline; more will fall back to the heap.
|
||||
const INLINE_CONSTRAINT_BLOCKS: usize = 2;
|
||||
|
||||
/// Can keep inline this many visible definitions per symbol at a given time; more will go to heap.
|
||||
const INLINE_VISIBLE_DEFINITIONS_PER_SYMBOL: usize = 4;
|
||||
|
||||
/// One [`BitSet`] of applicable [`ScopedConstraintId`] per visible definition.
|
||||
type InlineConstraintArray =
|
||||
[BitSet<INLINE_CONSTRAINT_BLOCKS>; INLINE_VISIBLE_DEFINITIONS_PER_SYMBOL];
|
||||
type Constraints = SmallVec<InlineConstraintArray>;
|
||||
type ConstraintsIterator<'a> = std::slice::Iter<'a, BitSet<INLINE_CONSTRAINT_BLOCKS>>;
|
||||
type ConstraintsIntoIterator = smallvec::IntoIter<InlineConstraintArray>;
|
||||
|
||||
/// Visible definitions and narrowing constraints for a single symbol at some point in control flow.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(super) struct SymbolState {
|
||||
/// [`BitSet`]: which [`ScopedDefinitionId`] are visible for this symbol?
|
||||
visible_definitions: Definitions,
|
||||
|
||||
/// For each definition, which [`ScopedConstraintId`] apply?
|
||||
///
|
||||
/// This is a [`smallvec::SmallVec`] which should always have one [`BitSet`] of constraints per
|
||||
/// definition in `visible_definitions`.
|
||||
constraints: Constraints,
|
||||
|
||||
/// Could the symbol be unbound at this point?
|
||||
may_be_unbound: bool,
|
||||
}
|
||||
|
||||
/// A single [`ScopedDefinitionId`] with an iterator of its applicable [`ScopedConstraintId`].
|
||||
#[derive(Debug)]
|
||||
pub(super) struct DefinitionIdWithConstraints<'a> {
|
||||
pub(super) definition: ScopedDefinitionId,
|
||||
pub(super) constraint_ids: ConstraintIdIterator<'a>,
|
||||
}
|
||||
|
||||
impl SymbolState {
|
||||
/// Return a new [`SymbolState`] representing an unbound symbol.
|
||||
pub(super) fn unbound() -> Self {
|
||||
Self {
|
||||
visible_definitions: Definitions::default(),
|
||||
constraints: Constraints::default(),
|
||||
may_be_unbound: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a new [`SymbolState`] representing a symbol with a single visible definition.
|
||||
pub(super) fn with(definition_id: ScopedDefinitionId) -> Self {
|
||||
let mut constraints = Constraints::with_capacity(1);
|
||||
constraints.push(BitSet::default());
|
||||
Self {
|
||||
visible_definitions: Definitions::with(definition_id.into()),
|
||||
constraints,
|
||||
may_be_unbound: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Add Unbound as a possibility for this symbol.
|
||||
pub(super) fn add_unbound(&mut self) {
|
||||
self.may_be_unbound = true;
|
||||
}
|
||||
|
||||
/// Add given constraint to all currently-visible definitions.
|
||||
pub(super) fn add_constraint(&mut self, constraint_id: ScopedConstraintId) {
|
||||
for bitset in &mut self.constraints {
|
||||
bitset.insert(constraint_id.into());
|
||||
}
|
||||
}
|
||||
|
||||
/// Merge another [`SymbolState`] into this one.
|
||||
pub(super) fn merge(&mut self, b: SymbolState) {
|
||||
let mut a = Self {
|
||||
visible_definitions: Definitions::default(),
|
||||
constraints: Constraints::default(),
|
||||
may_be_unbound: self.may_be_unbound || b.may_be_unbound,
|
||||
};
|
||||
std::mem::swap(&mut a, self);
|
||||
let mut a_defs_iter = a.visible_definitions.iter();
|
||||
let mut b_defs_iter = b.visible_definitions.iter();
|
||||
let mut a_constraints_iter = a.constraints.into_iter();
|
||||
let mut b_constraints_iter = b.constraints.into_iter();
|
||||
|
||||
let mut opt_a_def: Option<u32> = a_defs_iter.next();
|
||||
let mut opt_b_def: Option<u32> = b_defs_iter.next();
|
||||
|
||||
// Iterate through the definitions from `a` and `b`, always processing the lower definition
|
||||
// ID first, and pushing each definition onto the merged `SymbolState` with its
|
||||
// constraints. If a definition is found in both `a` and `b`, push it with the intersection
|
||||
// of the constraints from the two paths; a constraint that applies from only one possible
|
||||
// path is irrelevant.
|
||||
|
||||
// Helper to push `def`, with constraints in `constraints_iter`, onto `self`.
|
||||
let push = |def, constraints_iter: &mut ConstraintsIntoIterator, merged: &mut Self| {
|
||||
merged.visible_definitions.insert(def);
|
||||
// SAFETY: we only ever create SymbolState with either no definitions and no constraint
|
||||
// bitsets (`::unbound`) or one definition and one constraint bitset (`::with`), and
|
||||
// `::merge` always pushes one definition and one constraint bitset together (just
|
||||
// below), so the number of definitions and the number of constraint bitsets can never
|
||||
// get out of sync.
|
||||
let constraints = constraints_iter
|
||||
.next()
|
||||
.expect("definitions and constraints length mismatch");
|
||||
merged.constraints.push(constraints);
|
||||
};
|
||||
|
||||
loop {
|
||||
match (opt_a_def, opt_b_def) {
|
||||
(Some(a_def), Some(b_def)) => match a_def.cmp(&b_def) {
|
||||
std::cmp::Ordering::Less => {
|
||||
// Next definition ID is only in `a`, push it to `self` and advance `a`.
|
||||
push(a_def, &mut a_constraints_iter, self);
|
||||
opt_a_def = a_defs_iter.next();
|
||||
}
|
||||
std::cmp::Ordering::Greater => {
|
||||
// Next definition ID is only in `b`, push it to `self` and advance `b`.
|
||||
push(b_def, &mut b_constraints_iter, self);
|
||||
opt_b_def = b_defs_iter.next();
|
||||
}
|
||||
std::cmp::Ordering::Equal => {
|
||||
// Next definition is in both; push to `self` and intersect constraints.
|
||||
push(a_def, &mut b_constraints_iter, self);
|
||||
// SAFETY: we only ever create SymbolState with either no definitions and
|
||||
// no constraint bitsets (`::unbound`) or one definition and one constraint
|
||||
// bitset (`::with`), and `::merge` always pushes one definition and one
|
||||
// constraint bitset together (just below), so the number of definitions
|
||||
// and the number of constraint bitsets can never get out of sync.
|
||||
let a_constraints = a_constraints_iter
|
||||
.next()
|
||||
.expect("definitions and constraints length mismatch");
|
||||
// If the same definition is visible through both paths, any constraint
|
||||
// that applies on only one path is irrelevant to the resulting type from
|
||||
// unioning the two paths, so we intersect the constraints.
|
||||
self.constraints
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
.intersect(&a_constraints);
|
||||
opt_a_def = a_defs_iter.next();
|
||||
opt_b_def = b_defs_iter.next();
|
||||
}
|
||||
},
|
||||
(Some(a_def), None) => {
|
||||
// We've exhausted `b`, just push the def from `a` and move on to the next.
|
||||
push(a_def, &mut a_constraints_iter, self);
|
||||
opt_a_def = a_defs_iter.next();
|
||||
}
|
||||
(None, Some(b_def)) => {
|
||||
// We've exhausted `a`, just push the def from `b` and move on to the next.
|
||||
push(b_def, &mut b_constraints_iter, self);
|
||||
opt_b_def = b_defs_iter.next();
|
||||
}
|
||||
(None, None) => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get iterator over visible definitions with constraints.
|
||||
pub(super) fn visible_definitions(&self) -> DefinitionIdWithConstraintsIterator {
|
||||
DefinitionIdWithConstraintsIterator {
|
||||
definitions: self.visible_definitions.iter(),
|
||||
constraints: self.constraints.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Could the symbol be unbound?
|
||||
pub(super) fn may_be_unbound(&self) -> bool {
|
||||
self.may_be_unbound
|
||||
}
|
||||
}
|
||||
|
||||
/// The default state of a symbol (if we've seen no definitions of it) is unbound.
|
||||
impl Default for SymbolState {
|
||||
fn default() -> Self {
|
||||
SymbolState::unbound()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct DefinitionIdWithConstraintsIterator<'a> {
|
||||
definitions: DefinitionsIterator<'a>,
|
||||
constraints: ConstraintsIterator<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for DefinitionIdWithConstraintsIterator<'a> {
|
||||
type Item = DefinitionIdWithConstraints<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match (self.definitions.next(), self.constraints.next()) {
|
||||
(None, None) => None,
|
||||
(Some(def), Some(constraints)) => Some(DefinitionIdWithConstraints {
|
||||
definition: ScopedDefinitionId::from_u32(def),
|
||||
constraint_ids: ConstraintIdIterator {
|
||||
wrapped: constraints.iter(),
|
||||
},
|
||||
}),
|
||||
// SAFETY: see above.
|
||||
_ => unreachable!("definitions and constraints length mismatch"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for DefinitionIdWithConstraintsIterator<'_> {}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct ConstraintIdIterator<'a> {
|
||||
wrapped: BitSetIterator<'a, INLINE_CONSTRAINT_BLOCKS>,
|
||||
}
|
||||
|
||||
impl Iterator for ConstraintIdIterator<'_> {
|
||||
type Item = ScopedConstraintId;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.wrapped.next().map(ScopedConstraintId::from_u32)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for ConstraintIdIterator<'_> {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{ScopedConstraintId, ScopedDefinitionId, SymbolState};
|
||||
|
||||
impl SymbolState {
|
||||
pub(crate) fn assert(&self, may_be_unbound: bool, expected: &[&str]) {
|
||||
assert_eq!(self.may_be_unbound(), may_be_unbound);
|
||||
let actual = self
|
||||
.visible_definitions()
|
||||
.map(|def_id_with_constraints| {
|
||||
format!(
|
||||
"{}<{}>",
|
||||
def_id_with_constraints.definition.as_u32(),
|
||||
def_id_with_constraints
|
||||
.constraint_ids
|
||||
.map(ScopedConstraintId::as_u32)
|
||||
.map(|idx| idx.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unbound() {
|
||||
let cd = SymbolState::unbound();
|
||||
|
||||
cd.assert(true, &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn with() {
|
||||
let cd = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
|
||||
cd.assert(false, &["0<>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_unbound() {
|
||||
let mut cd = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd.add_unbound();
|
||||
|
||||
cd.assert(true, &["0<>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_constraint() {
|
||||
let mut cd = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd.add_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
cd.assert(false, &["0<0>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn merge() {
|
||||
// merging the same definition with the same constraint keeps the constraint
|
||||
let mut cd0a = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd0a.add_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
let mut cd0b = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd0b.add_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
cd0a.merge(cd0b);
|
||||
let mut cd0 = cd0a;
|
||||
cd0.assert(false, &["0<0>"]);
|
||||
|
||||
// merging the same definition with differing constraints drops all constraints
|
||||
let mut cd1a = SymbolState::with(ScopedDefinitionId::from_u32(1));
|
||||
cd1a.add_constraint(ScopedConstraintId::from_u32(1));
|
||||
|
||||
let mut cd1b = SymbolState::with(ScopedDefinitionId::from_u32(1));
|
||||
cd1b.add_constraint(ScopedConstraintId::from_u32(2));
|
||||
|
||||
cd1a.merge(cd1b);
|
||||
let cd1 = cd1a;
|
||||
cd1.assert(false, &["1<>"]);
|
||||
|
||||
// merging a constrained definition with unbound keeps both
|
||||
let mut cd2a = SymbolState::with(ScopedDefinitionId::from_u32(2));
|
||||
cd2a.add_constraint(ScopedConstraintId::from_u32(3));
|
||||
|
||||
let cd2b = SymbolState::unbound();
|
||||
|
||||
cd2a.merge(cd2b);
|
||||
let cd2 = cd2a;
|
||||
cd2.assert(true, &["2<3>"]);
|
||||
|
||||
// merging different definitions keeps them each with their existing constraints
|
||||
cd0.merge(cd2);
|
||||
let cd = cd0;
|
||||
cd.assert(true, &["0<0>", "2<3>"]);
|
||||
}
|
||||
}
|
||||
@@ -1,38 +1,46 @@
|
||||
use red_knot_module_resolver::{resolve_module, Module, ModuleName};
|
||||
use ruff_db::vfs::VfsFile;
|
||||
use ruff_db::files::{File, FilePath};
|
||||
use ruff_db::source::line_index;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::{Expr, ExpressionRef, StmtClassDef};
|
||||
use ruff_python_ast::{Expr, ExpressionRef};
|
||||
use ruff_source_file::LineIndex;
|
||||
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::{resolve_module, Module};
|
||||
use crate::semantic_index::ast_ids::HasScopedAstId;
|
||||
use crate::semantic_index::symbol::PublicSymbolId;
|
||||
use crate::semantic_index::{public_symbol, semantic_index};
|
||||
use crate::types::{infer_types, public_symbol_ty, Type, TypingContext};
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::types::{definition_ty, global_symbol_ty_by_name, infer_scope_types, Type};
|
||||
use crate::Db;
|
||||
|
||||
pub struct SemanticModel<'db> {
|
||||
db: &'db dyn Db,
|
||||
file: VfsFile,
|
||||
file: File,
|
||||
}
|
||||
|
||||
impl<'db> SemanticModel<'db> {
|
||||
pub fn new(db: &'db dyn Db, file: VfsFile) -> Self {
|
||||
pub fn new(db: &'db dyn Db, file: File) -> Self {
|
||||
Self { db, file }
|
||||
}
|
||||
|
||||
// TODO we don't actually want to expose the Db directly to lint rules, but we need to find a
|
||||
// solution for exposing information from types
|
||||
pub fn db(&self) -> &dyn Db {
|
||||
self.db
|
||||
}
|
||||
|
||||
pub fn file_path(&self) -> &FilePath {
|
||||
self.file.path(self.db)
|
||||
}
|
||||
|
||||
pub fn line_index(&self) -> LineIndex {
|
||||
line_index(self.db.upcast(), self.file)
|
||||
}
|
||||
|
||||
pub fn resolve_module(&self, module_name: ModuleName) -> Option<Module> {
|
||||
resolve_module(self.db.upcast(), module_name)
|
||||
resolve_module(self.db, module_name)
|
||||
}
|
||||
|
||||
pub fn public_symbol(&self, module: &Module, symbol_name: &str) -> Option<PublicSymbolId<'db>> {
|
||||
public_symbol(self.db, module.file(), symbol_name)
|
||||
}
|
||||
|
||||
pub fn public_symbol_ty(&self, symbol: PublicSymbolId<'db>) -> Type<'db> {
|
||||
public_symbol_ty(self.db, symbol)
|
||||
}
|
||||
|
||||
pub fn typing_context(&self) -> TypingContext<'db, '_> {
|
||||
TypingContext::global(self.db)
|
||||
pub fn global_symbol_ty(&self, module: &Module, symbol_name: &str) -> Type<'db> {
|
||||
global_symbol_ty_by_name(self.db, module.file(), symbol_name)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,7 +59,7 @@ impl HasTy for ast::ExpressionRef<'_> {
|
||||
let scope = file_scope.to_scope_id(model.db, model.file);
|
||||
|
||||
let expression_id = self.scoped_ast_id(model.db, scope);
|
||||
infer_types(model.db, scope).expression_ty(expression_id)
|
||||
infer_scope_types(model.db, scope).expression_ty(expression_id)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -139,74 +147,61 @@ impl HasTy for ast::Expr {
|
||||
}
|
||||
}
|
||||
|
||||
impl HasTy for ast::StmtFunctionDef {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
|
||||
let scope = definition.scope(model.db).to_scope_id(model.db, model.file);
|
||||
let types = infer_types(model.db, scope);
|
||||
|
||||
types.definition_ty(definition)
|
||||
}
|
||||
macro_rules! impl_definition_has_ty {
|
||||
($ty: ty) => {
|
||||
impl HasTy for $ty {
|
||||
#[inline]
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl HasTy for StmtClassDef {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
|
||||
let scope = definition.scope(model.db).to_scope_id(model.db, model.file);
|
||||
let types = infer_types(model.db, scope);
|
||||
|
||||
types.definition_ty(definition)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasTy for ast::Alias {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
|
||||
let scope = definition.scope(model.db).to_scope_id(model.db, model.file);
|
||||
let types = infer_types(model.db, scope);
|
||||
|
||||
types.definition_ty(definition)
|
||||
}
|
||||
}
|
||||
impl_definition_has_ty!(ast::StmtFunctionDef);
|
||||
impl_definition_has_ty!(ast::StmtClassDef);
|
||||
impl_definition_has_ty!(ast::Alias);
|
||||
impl_definition_has_ty!(ast::Parameter);
|
||||
impl_definition_has_ty!(ast::ParameterWithDefault);
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use red_knot_module_resolver::{set_module_resolution_settings, ModuleResolutionSettings};
|
||||
use ruff_db::file_system::FileSystemPathBuf;
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::vfs::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::types::Type;
|
||||
use crate::{HasTy, SemanticModel};
|
||||
use crate::{HasTy, ProgramSettings, SemanticModel};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
fn setup_db<'a>(files: impl IntoIterator<Item = (&'a str, &'a str)>) -> anyhow::Result<TestDb> {
|
||||
let mut db = TestDb::new();
|
||||
set_module_resolution_settings(
|
||||
&mut db,
|
||||
ModuleResolutionSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: FileSystemPathBuf::from("/src"),
|
||||
site_packages: None,
|
||||
custom_typeshed: None,
|
||||
},
|
||||
);
|
||||
db.write_files(files)?;
|
||||
|
||||
db
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
},
|
||||
},
|
||||
)?;
|
||||
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_ty() -> anyhow::Result<()> {
|
||||
let db = setup_db();
|
||||
let db = setup_db([("/src/foo.py", "def test(): pass")])?;
|
||||
|
||||
db.memory_file_system()
|
||||
.write_file("/src/foo.py", "def test(): pass")?;
|
||||
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, foo);
|
||||
@@ -222,10 +217,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn class_ty() -> anyhow::Result<()> {
|
||||
let db = setup_db();
|
||||
let db = setup_db([("/src/foo.py", "class Test: pass")])?;
|
||||
|
||||
db.memory_file_system()
|
||||
.write_file("/src/foo.py", "class Test: pass")?;
|
||||
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, foo);
|
||||
@@ -241,12 +234,11 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn alias_ty() -> anyhow::Result<()> {
|
||||
let db = setup_db();
|
||||
|
||||
db.memory_file_system().write_files([
|
||||
let db = setup_db([
|
||||
("/src/foo.py", "class Test: pass"),
|
||||
("/src/bar.py", "from foo import Test"),
|
||||
])?;
|
||||
|
||||
let bar = system_path_to_file(&db, "/src/bar.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, bar);
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
471
crates/red_knot_python_semantic/src/types/builder.rs
Normal file
471
crates/red_knot_python_semantic/src/types/builder.rs
Normal file
@@ -0,0 +1,471 @@
|
||||
//! Smart builders for union and intersection types.
|
||||
//!
|
||||
//! Invariants we maintain here:
|
||||
//! * No single-element union types (should just be the contained type instead.)
|
||||
//! * No single-positive-element intersection types. Single-negative-element are OK, we don't
|
||||
//! have a standalone negation type so there's no other representation for this.
|
||||
//! * The same type should never appear more than once in a union or intersection. (This should
|
||||
//! be expanded to cover subtyping -- see below -- but for now we only implement it for type
|
||||
//! identity.)
|
||||
//! * Disjunctive normal form (DNF): the tree of unions and intersections can never be deeper
|
||||
//! than a union-of-intersections. Unions cannot contain other unions (the inner union just
|
||||
//! flattens into the outer one), intersections cannot contain other intersections (also
|
||||
//! flattens), and intersections cannot contain unions (the intersection distributes over the
|
||||
//! union, inverting it into a union-of-intersections).
|
||||
//!
|
||||
//! The implication of these invariants is that a [`UnionBuilder`] does not necessarily build a
|
||||
//! [`Type::Union`]. For example, if only one type is added to the [`UnionBuilder`], `build()` will
|
||||
//! just return that type directly. The same is true for [`IntersectionBuilder`]; for example, if a
|
||||
//! union type is added to the intersection, it will distribute and [`IntersectionBuilder::build`]
|
||||
//! may end up returning a [`Type::Union`] of intersections.
|
||||
//!
|
||||
//! In the future we should have these additional invariants, but they aren't implemented yet:
|
||||
//! * No type in a union can be a subtype of any other type in the union (just eliminate the
|
||||
//! subtype from the union).
|
||||
//! * No type in an intersection can be a supertype of any other type in the intersection (just
|
||||
//! eliminate the supertype from the intersection).
|
||||
//! * An intersection containing two non-overlapping types should simplify to [`Type::Never`].
|
||||
use crate::types::{IntersectionType, Type, UnionType};
|
||||
use crate::{Db, FxOrderSet};
|
||||
|
||||
pub(crate) struct UnionBuilder<'db> {
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> UnionBuilder<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
elements: FxOrderSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a type to this union.
|
||||
pub(crate) fn add(mut self, ty: Type<'db>) -> Self {
|
||||
match ty {
|
||||
Type::Union(union) => {
|
||||
self.elements.extend(&union.elements(self.db));
|
||||
}
|
||||
Type::Never => {}
|
||||
_ => {
|
||||
self.elements.insert(ty);
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn build(self) -> Type<'db> {
|
||||
match self.elements.len() {
|
||||
0 => Type::Never,
|
||||
1 => self.elements[0],
|
||||
_ => Type::Union(UnionType::new(self.db, self.elements)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct IntersectionBuilder<'db> {
|
||||
// Really this builds a union-of-intersections, because we always keep our set-theoretic types
|
||||
// in disjunctive normal form (DNF), a union of intersections. In the simplest case there's
|
||||
// just a single intersection in this vector, and we are building a single intersection type,
|
||||
// but if a union is added to the intersection, we'll distribute ourselves over that union and
|
||||
// create a union of intersections.
|
||||
intersections: Vec<InnerIntersectionBuilder<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> IntersectionBuilder<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
intersections: vec![InnerIntersectionBuilder::new()],
|
||||
}
|
||||
}
|
||||
|
||||
fn empty(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
intersections: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn add_positive(mut self, ty: Type<'db>) -> Self {
|
||||
if let Type::Union(union) = ty {
|
||||
// Distribute ourself over this union: for each union element, clone ourself and
|
||||
// intersect with that union element, then create a new union-of-intersections with all
|
||||
// of those sub-intersections in it. E.g. if `self` is a simple intersection `T1 & T2`
|
||||
// and we add `T3 | T4` to the intersection, we don't get `T1 & T2 & (T3 | T4)` (that's
|
||||
// not in DNF), we distribute the union and get `(T1 & T3) | (T2 & T3) | (T1 & T4) |
|
||||
// (T2 & T4)`. If `self` is already a union-of-intersections `(T1 & T2) | (T3 & T4)`
|
||||
// and we add `T5 | T6` to it, that flattens all the way out to `(T1 & T2 & T5) | (T1 &
|
||||
// T2 & T6) | (T3 & T4 & T5) ...` -- you get the idea.
|
||||
union
|
||||
.elements(self.db)
|
||||
.iter()
|
||||
.map(|elem| self.clone().add_positive(*elem))
|
||||
.fold(IntersectionBuilder::empty(self.db), |mut builder, sub| {
|
||||
builder.intersections.extend(sub.intersections);
|
||||
builder
|
||||
})
|
||||
} else {
|
||||
// If we are already a union-of-intersections, distribute the new intersected element
|
||||
// across all of those intersections.
|
||||
for inner in &mut self.intersections {
|
||||
inner.add_positive(self.db, ty);
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn add_negative(mut self, ty: Type<'db>) -> Self {
|
||||
// See comments above in `add_positive`; this is just the negated version.
|
||||
if let Type::Union(union) = ty {
|
||||
union
|
||||
.elements(self.db)
|
||||
.iter()
|
||||
.map(|elem| self.clone().add_negative(*elem))
|
||||
.fold(IntersectionBuilder::empty(self.db), |mut builder, sub| {
|
||||
builder.intersections.extend(sub.intersections);
|
||||
builder
|
||||
})
|
||||
} else {
|
||||
for inner in &mut self.intersections {
|
||||
inner.add_negative(self.db, ty);
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn build(mut self) -> Type<'db> {
|
||||
// Avoid allocating the UnionBuilder unnecessarily if we have just one intersection:
|
||||
if self.intersections.len() == 1 {
|
||||
self.intersections.pop().unwrap().build(self.db)
|
||||
} else {
|
||||
let mut builder = UnionBuilder::new(self.db);
|
||||
for inner in self.intersections {
|
||||
builder = builder.add(inner.build(self.db));
|
||||
}
|
||||
builder.build()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
struct InnerIntersectionBuilder<'db> {
|
||||
positive: FxOrderSet<Type<'db>>,
|
||||
negative: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> InnerIntersectionBuilder<'db> {
|
||||
fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Adds a positive type to this intersection.
|
||||
fn add_positive(&mut self, db: &'db dyn Db, ty: Type<'db>) {
|
||||
match ty {
|
||||
Type::Intersection(inter) => {
|
||||
let pos = inter.positive(db);
|
||||
let neg = inter.negative(db);
|
||||
self.positive.extend(pos.difference(&self.negative));
|
||||
self.negative.extend(neg.difference(&self.positive));
|
||||
self.positive.retain(|elem| !neg.contains(elem));
|
||||
self.negative.retain(|elem| !pos.contains(elem));
|
||||
}
|
||||
_ => {
|
||||
if !self.negative.remove(&ty) {
|
||||
self.positive.insert(ty);
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a negative type to this intersection.
|
||||
fn add_negative(&mut self, db: &'db dyn Db, ty: Type<'db>) {
|
||||
// TODO Any/Unknown actually should not self-cancel
|
||||
match ty {
|
||||
Type::Intersection(intersection) => {
|
||||
let pos = intersection.negative(db);
|
||||
let neg = intersection.positive(db);
|
||||
self.positive.extend(pos.difference(&self.negative));
|
||||
self.negative.extend(neg.difference(&self.positive));
|
||||
self.positive.retain(|elem| !neg.contains(elem));
|
||||
self.negative.retain(|elem| !pos.contains(elem));
|
||||
}
|
||||
Type::Never => {}
|
||||
Type::Unbound => {}
|
||||
_ => {
|
||||
if !self.positive.remove(&ty) {
|
||||
self.negative.insert(ty);
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn simplify(&mut self) {
|
||||
// TODO this should be generalized based on subtyping, for now we just handle a few cases
|
||||
|
||||
// Never is a subtype of all types
|
||||
if self.positive.contains(&Type::Never) {
|
||||
self.positive.retain(Type::is_never);
|
||||
self.negative.clear();
|
||||
}
|
||||
|
||||
if self.positive.contains(&Type::Unbound) {
|
||||
self.positive.retain(Type::is_unbound);
|
||||
self.negative.clear();
|
||||
}
|
||||
|
||||
// None intersects only with object
|
||||
for pos in &self.positive {
|
||||
if let Type::Instance(_) = pos {
|
||||
// could be `object` type
|
||||
} else {
|
||||
self.negative.remove(&Type::None);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn build(mut self, db: &'db dyn Db) -> Type<'db> {
|
||||
self.simplify();
|
||||
match (self.positive.len(), self.negative.len()) {
|
||||
(0, 0) => Type::Never,
|
||||
(1, 0) => self.positive[0],
|
||||
_ => {
|
||||
self.positive.shrink_to_fit();
|
||||
self.negative.shrink_to_fit();
|
||||
Type::Intersection(IntersectionType::new(db, self.positive, self.negative))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{IntersectionBuilder, IntersectionType, Type, UnionBuilder, UnionType};
|
||||
use crate::db::tests::TestDb;
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
TestDb::new()
|
||||
}
|
||||
|
||||
impl<'db> UnionType<'db> {
|
||||
fn elements_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.elements(db).into_iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let Type::Union(union) = UnionBuilder::new(&db).add(t0).add(t1).build() else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_single() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ty = UnionBuilder::new(&db).add(t0).build();
|
||||
|
||||
assert_eq!(ty, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_empty() {
|
||||
let db = setup_db();
|
||||
let ty = UnionBuilder::new(&db).build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_never() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ty = UnionBuilder::new(&db).add(t0).add(Type::Never).build();
|
||||
|
||||
assert_eq!(ty, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_flatten() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let u1 = UnionBuilder::new(&db).add(t0).add(t1).build();
|
||||
let Type::Union(union) = UnionBuilder::new(&db).add(u1).add(t2).build() else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t1, t2]);
|
||||
}
|
||||
|
||||
impl<'db> IntersectionType<'db> {
|
||||
fn pos_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.positive(db).into_iter().collect()
|
||||
}
|
||||
|
||||
fn neg_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.negative(db).into_iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ta = Type::Any;
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[ta]);
|
||||
assert_eq!(inter.neg_vec(&db), &[t0]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_flatten_positive() {
|
||||
let db = setup_db();
|
||||
let ta = Type::Any;
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let i0 = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_positive(i0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[t2, ta]);
|
||||
assert_eq!(inter.neg_vec(&db), &[t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_flatten_negative() {
|
||||
let db = setup_db();
|
||||
let ta = Type::Any;
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let i0 = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_negative(i0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[t2, t1]);
|
||||
assert_eq!(inter.neg_vec(&db), &[ta]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersection_distributes_over_union() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let ta = Type::Any;
|
||||
let u0 = UnionBuilder::new(&db).add(t0).add(t1).build();
|
||||
|
||||
let Type::Union(union) = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_positive(u0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
let [Type::Intersection(i0), Type::Intersection(i1)] = union.elements_vec(&db)[..] else {
|
||||
panic!("expected a union of two intersections");
|
||||
};
|
||||
assert_eq!(i0.pos_vec(&db), &[ta, t0]);
|
||||
assert_eq!(i1.pos_vec(&db), &[ta, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_self_negation() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_negative(Type::None)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_never() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_negative(Type::Never)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_positive_never() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_positive(Type::Never)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_positive_unbound() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::Unbound)
|
||||
.add_positive(Type::IntLiteral(1))
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Unbound);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_unbound() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::Unbound)
|
||||
.add_positive(Type::IntLiteral(1))
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::IntLiteral(1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_none() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::None)
|
||||
.add_positive(Type::IntLiteral(1))
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::IntLiteral(1));
|
||||
}
|
||||
}
|
||||
111
crates/red_knot_python_semantic/src/types/diagnostic.rs
Normal file
111
crates/red_knot_python_semantic/src/types/diagnostic.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use std::fmt::Formatter;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub struct TypeCheckDiagnostic {
|
||||
// TODO: Don't use string keys for rules
|
||||
pub(super) rule: String,
|
||||
pub(super) message: String,
|
||||
pub(super) range: TextRange,
|
||||
pub(super) file: File,
|
||||
}
|
||||
|
||||
impl TypeCheckDiagnostic {
|
||||
pub fn rule(&self) -> &str {
|
||||
&self.rule
|
||||
}
|
||||
|
||||
pub fn message(&self) -> &str {
|
||||
&self.message
|
||||
}
|
||||
|
||||
pub fn file(&self) -> File {
|
||||
self.file
|
||||
}
|
||||
}
|
||||
|
||||
impl Ranged for TypeCheckDiagnostic {
|
||||
fn range(&self) -> TextRange {
|
||||
self.range
|
||||
}
|
||||
}
|
||||
|
||||
/// A collection of type check diagnostics.
|
||||
///
|
||||
/// The diagnostics are wrapped in an `Arc` because they need to be cloned multiple times
|
||||
/// when going from `infer_expression` to `check_file`. We could consider
|
||||
/// making [`TypeCheckDiagnostic`] a Salsa struct to have them Arena-allocated (once the Tables refactor is done).
|
||||
/// Using Salsa struct does have the downside that it leaks the Salsa dependency into diagnostics and
|
||||
/// each Salsa-struct comes with an overhead.
|
||||
#[derive(Default, Eq, PartialEq)]
|
||||
pub struct TypeCheckDiagnostics {
|
||||
inner: Vec<std::sync::Arc<TypeCheckDiagnostic>>,
|
||||
}
|
||||
|
||||
impl TypeCheckDiagnostics {
|
||||
pub fn new() -> Self {
|
||||
Self { inner: Vec::new() }
|
||||
}
|
||||
|
||||
pub(super) fn push(&mut self, diagnostic: TypeCheckDiagnostic) {
|
||||
self.inner.push(Arc::new(diagnostic));
|
||||
}
|
||||
|
||||
pub(crate) fn shrink_to_fit(&mut self) {
|
||||
self.inner.shrink_to_fit();
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TypeCheckDiagnostic> for TypeCheckDiagnostics {
|
||||
fn extend<T: IntoIterator<Item = TypeCheckDiagnostic>>(&mut self, iter: T) {
|
||||
self.inner.extend(iter.into_iter().map(std::sync::Arc::new));
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<std::sync::Arc<TypeCheckDiagnostic>> for TypeCheckDiagnostics {
|
||||
fn extend<T: IntoIterator<Item = Arc<TypeCheckDiagnostic>>>(&mut self, iter: T) {
|
||||
self.inner.extend(iter);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Extend<&'a std::sync::Arc<TypeCheckDiagnostic>> for TypeCheckDiagnostics {
|
||||
fn extend<T: IntoIterator<Item = &'a Arc<TypeCheckDiagnostic>>>(&mut self, iter: T) {
|
||||
self.inner
|
||||
.extend(iter.into_iter().map(std::sync::Arc::clone));
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for TypeCheckDiagnostics {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
self.inner.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for TypeCheckDiagnostics {
|
||||
type Target = [std::sync::Arc<TypeCheckDiagnostic>];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for TypeCheckDiagnostics {
|
||||
type Item = Arc<TypeCheckDiagnostic>;
|
||||
type IntoIter = std::vec::IntoIter<std::sync::Arc<TypeCheckDiagnostic>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.inner.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a TypeCheckDiagnostics {
|
||||
type Item = &'a Arc<TypeCheckDiagnostic>;
|
||||
type IntoIter = std::slice::Iter<'a, std::sync::Arc<TypeCheckDiagnostic>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.inner.iter()
|
||||
}
|
||||
}
|
||||
@@ -2,18 +2,19 @@
|
||||
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
use crate::types::{IntersectionType, Type, TypingContext, UnionType};
|
||||
use crate::types::{IntersectionType, Type, UnionType};
|
||||
use crate::Db;
|
||||
|
||||
impl Type<'_> {
|
||||
pub fn display<'a>(&'a self, context: &'a TypingContext) -> DisplayType<'a> {
|
||||
DisplayType { ty: self, context }
|
||||
impl<'db> Type<'db> {
|
||||
pub fn display(&'db self, db: &'db dyn Db) -> DisplayType<'db> {
|
||||
DisplayType { ty: self, db }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct DisplayType<'a> {
|
||||
ty: &'a Type<'a>,
|
||||
context: &'a TypingContext<'a, 'a>,
|
||||
pub struct DisplayType<'db> {
|
||||
ty: &'db Type<'db>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl Display for DisplayType<'_> {
|
||||
@@ -24,43 +25,19 @@ impl Display for DisplayType<'_> {
|
||||
Type::Unknown => f.write_str("Unknown"),
|
||||
Type::Unbound => f.write_str("Unbound"),
|
||||
Type::None => f.write_str("None"),
|
||||
Type::Module(module_id) => {
|
||||
write!(
|
||||
f,
|
||||
"<module '{:?}'>",
|
||||
module_id
|
||||
.scope
|
||||
.file(self.context.db)
|
||||
.path(self.context.db.upcast())
|
||||
)
|
||||
Type::Module(file) => {
|
||||
write!(f, "<module '{:?}'>", file.path(self.db))
|
||||
}
|
||||
// TODO functions and classes should display using a fully qualified name
|
||||
Type::Class(class_id) => {
|
||||
let class = class_id.lookup(self.context);
|
||||
|
||||
f.write_str("Literal[")?;
|
||||
f.write_str(class.name())?;
|
||||
f.write_str("]")
|
||||
}
|
||||
Type::Instance(class_id) => {
|
||||
let class = class_id.lookup(self.context);
|
||||
f.write_str(class.name())
|
||||
}
|
||||
Type::Function(function_id) => {
|
||||
let function = function_id.lookup(self.context);
|
||||
f.write_str(function.name())
|
||||
}
|
||||
Type::Union(union_id) => {
|
||||
let union = union_id.lookup(self.context);
|
||||
|
||||
union.display(self.context).fmt(f)
|
||||
}
|
||||
Type::Intersection(intersection_id) => {
|
||||
let intersection = intersection_id.lookup(self.context);
|
||||
|
||||
intersection.display(self.context).fmt(f)
|
||||
}
|
||||
Type::Class(class) => write!(f, "Literal[{}]", class.name(self.db)),
|
||||
Type::Instance(class) => f.write_str(&class.name(self.db)),
|
||||
Type::Function(function) => write!(f, "Literal[{}]", function.name(self.db)),
|
||||
Type::Union(union) => union.display(self.db).fmt(f),
|
||||
Type::Intersection(intersection) => intersection.display(self.db).fmt(f),
|
||||
Type::IntLiteral(n) => write!(f, "Literal[{n}]"),
|
||||
Type::BooleanLiteral(boolean) => {
|
||||
write!(f, "Literal[{}]", if *boolean { "True" } else { "False" })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -71,15 +48,15 @@ impl std::fmt::Debug for DisplayType<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
impl UnionType<'_> {
|
||||
fn display<'a>(&'a self, context: &'a TypingContext<'a, 'a>) -> DisplayUnionType<'a> {
|
||||
DisplayUnionType { context, ty: self }
|
||||
impl<'db> UnionType<'db> {
|
||||
fn display(&'db self, db: &'db dyn Db) -> DisplayUnionType<'db> {
|
||||
DisplayUnionType { db, ty: self }
|
||||
}
|
||||
}
|
||||
|
||||
struct DisplayUnionType<'a> {
|
||||
ty: &'a UnionType<'a>,
|
||||
context: &'a TypingContext<'a, 'a>,
|
||||
struct DisplayUnionType<'db> {
|
||||
ty: &'db UnionType<'db>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl Display for DisplayUnionType<'_> {
|
||||
@@ -87,7 +64,7 @@ impl Display for DisplayUnionType<'_> {
|
||||
let union = self.ty;
|
||||
|
||||
let (int_literals, other_types): (Vec<Type>, Vec<Type>) = union
|
||||
.elements
|
||||
.elements(self.db)
|
||||
.iter()
|
||||
.copied()
|
||||
.partition(|ty| matches!(ty, Type::IntLiteral(_)));
|
||||
@@ -121,7 +98,7 @@ impl Display for DisplayUnionType<'_> {
|
||||
f.write_str(" | ")?;
|
||||
};
|
||||
first = false;
|
||||
write!(f, "{}", ty.display(self.context))?;
|
||||
write!(f, "{}", ty.display(self.db))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -134,15 +111,15 @@ impl std::fmt::Debug for DisplayUnionType<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
impl IntersectionType<'_> {
|
||||
fn display<'a>(&'a self, context: &'a TypingContext<'a, 'a>) -> DisplayIntersectionType<'a> {
|
||||
DisplayIntersectionType { ty: self, context }
|
||||
impl<'db> IntersectionType<'db> {
|
||||
fn display(&'db self, db: &'db dyn Db) -> DisplayIntersectionType<'db> {
|
||||
DisplayIntersectionType { db, ty: self }
|
||||
}
|
||||
}
|
||||
|
||||
struct DisplayIntersectionType<'a> {
|
||||
ty: &'a IntersectionType<'a>,
|
||||
context: &'a TypingContext<'a, 'a>,
|
||||
struct DisplayIntersectionType<'db> {
|
||||
ty: &'db IntersectionType<'db>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl Display for DisplayIntersectionType<'_> {
|
||||
@@ -150,10 +127,10 @@ impl Display for DisplayIntersectionType<'_> {
|
||||
let mut first = true;
|
||||
for (neg, ty) in self
|
||||
.ty
|
||||
.positive
|
||||
.positive(self.db)
|
||||
.iter()
|
||||
.map(|ty| (false, ty))
|
||||
.chain(self.ty.negative.iter().map(|ty| (true, ty)))
|
||||
.chain(self.ty.negative(self.db).iter().map(|ty| (true, ty)))
|
||||
{
|
||||
if !first {
|
||||
f.write_str(" & ")?;
|
||||
@@ -162,7 +139,7 @@ impl Display for DisplayIntersectionType<'_> {
|
||||
if neg {
|
||||
f.write_str("~")?;
|
||||
};
|
||||
write!(f, "{}", ty.display(self.context))?;
|
||||
write!(f, "{}", ty.display(self.db))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
115
crates/red_knot_python_semantic/src/types/narrow.rs
Normal file
115
crates/red_knot_python_semantic/src/types/narrow.rs
Normal file
@@ -0,0 +1,115 @@
|
||||
use crate::semantic_index::ast_ids::HasScopedAstId;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId, SymbolTable};
|
||||
use crate::semantic_index::symbol_table;
|
||||
use crate::types::{infer_expression_types, IntersectionBuilder, Type, TypeInference};
|
||||
use crate::Db;
|
||||
use ruff_python_ast as ast;
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Return the type constraint that `test` (if true) would place on `definition`, if any.
|
||||
///
|
||||
/// For example, if we have this code:
|
||||
///
|
||||
/// ```python
|
||||
/// y = 1 if flag else None
|
||||
/// x = 1 if flag else None
|
||||
/// if x is not None:
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
/// The `test` expression `x is not None` places the constraint "not None" on the definition of
|
||||
/// `x`, so in that case we'd return `Some(Type::Intersection(negative=[Type::None]))`.
|
||||
///
|
||||
/// But if we called this with the same `test` expression, but the `definition` of `y`, no
|
||||
/// constraint is applied to that definition, so we'd just return `None`.
|
||||
pub(crate) fn narrowing_constraint<'db>(
|
||||
db: &'db dyn Db,
|
||||
test: Expression<'db>,
|
||||
definition: Definition<'db>,
|
||||
) -> Option<Type<'db>> {
|
||||
all_narrowing_constraints(db, test)
|
||||
.get(&definition.symbol(db))
|
||||
.copied()
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
fn all_narrowing_constraints<'db>(
|
||||
db: &'db dyn Db,
|
||||
test: Expression<'db>,
|
||||
) -> NarrowingConstraints<'db> {
|
||||
NarrowingConstraintsBuilder::new(db, test).finish()
|
||||
}
|
||||
|
||||
type NarrowingConstraints<'db> = FxHashMap<ScopedSymbolId, Type<'db>>;
|
||||
|
||||
struct NarrowingConstraintsBuilder<'db> {
|
||||
db: &'db dyn Db,
|
||||
expression: Expression<'db>,
|
||||
constraints: NarrowingConstraints<'db>,
|
||||
}
|
||||
|
||||
impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
fn new(db: &'db dyn Db, expression: Expression<'db>) -> Self {
|
||||
Self {
|
||||
db,
|
||||
expression,
|
||||
constraints: NarrowingConstraints::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(mut self) -> NarrowingConstraints<'db> {
|
||||
if let ast::Expr::Compare(expr_compare) = self.expression.node_ref(self.db).node() {
|
||||
self.add_expr_compare(expr_compare);
|
||||
}
|
||||
// TODO other test expression kinds
|
||||
|
||||
self.constraints.shrink_to_fit();
|
||||
self.constraints
|
||||
}
|
||||
|
||||
fn symbols(&self) -> Arc<SymbolTable> {
|
||||
symbol_table(self.db, self.scope())
|
||||
}
|
||||
|
||||
fn scope(&self) -> ScopeId<'db> {
|
||||
self.expression.scope(self.db)
|
||||
}
|
||||
|
||||
fn inference(&self) -> &'db TypeInference<'db> {
|
||||
infer_expression_types(self.db, self.expression)
|
||||
}
|
||||
|
||||
fn add_expr_compare(&mut self, expr_compare: &ast::ExprCompare) {
|
||||
let ast::ExprCompare {
|
||||
range: _,
|
||||
left,
|
||||
ops,
|
||||
comparators,
|
||||
} = expr_compare;
|
||||
|
||||
if let ast::Expr::Name(ast::ExprName {
|
||||
range: _,
|
||||
id,
|
||||
ctx: _,
|
||||
}) = left.as_ref()
|
||||
{
|
||||
// SAFETY: we should always have a symbol for every Name node.
|
||||
let symbol = self.symbols().symbol_id_by_name(id).unwrap();
|
||||
let scope = self.scope();
|
||||
let inference = self.inference();
|
||||
for (op, comparator) in std::iter::zip(&**ops, &**comparators) {
|
||||
let comp_ty = inference.expression_ty(comparator.scoped_ast_id(self.db, scope));
|
||||
if matches!(op, ast::CmpOp::IsNot) {
|
||||
let ty = IntersectionBuilder::new(self.db)
|
||||
.add_negative(comp_ty)
|
||||
.build();
|
||||
self.constraints.insert(symbol, ty);
|
||||
};
|
||||
// TODO other comparison types
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
1
crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt
vendored
Normal file
1
crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
1ace5718deaf3041f8e3d1dc9c9e8a8e830e517f
|
||||
@@ -35,6 +35,8 @@ _dummy_threading: 3.0-3.8
|
||||
_heapq: 3.0-
|
||||
_imp: 3.0-
|
||||
_interpchannels: 3.13-
|
||||
_interpqueues: 3.13-
|
||||
_interpreters: 3.13-
|
||||
_json: 3.0-
|
||||
_locale: 3.0-
|
||||
_lsprof: 3.0-
|
||||
@@ -112,6 +114,7 @@ curses: 3.0-
|
||||
dataclasses: 3.7-
|
||||
datetime: 3.0-
|
||||
dbm: 3.0-
|
||||
dbm.sqlite3: 3.13-
|
||||
decimal: 3.0-
|
||||
difflib: 3.0-
|
||||
dis: 3.0-
|
||||
@@ -155,6 +158,7 @@ importlib: 3.0-
|
||||
importlib._abc: 3.10-
|
||||
importlib.metadata: 3.8-
|
||||
importlib.metadata._meta: 3.10-
|
||||
importlib.metadata.diagnose: 3.13-
|
||||
importlib.readers: 3.10-
|
||||
importlib.resources: 3.7-
|
||||
importlib.resources.abc: 3.11-
|
||||
@@ -753,9 +753,11 @@ class Constant(expr):
|
||||
__match_args__ = ("value", "kind")
|
||||
value: Any # None, str, bytes, bool, int, float, complex, Ellipsis
|
||||
kind: str | None
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: Any
|
||||
n: int | float | complex
|
||||
if sys.version_info < (3, 14):
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: Any
|
||||
n: int | float | complex
|
||||
|
||||
def __init__(self, value: Any, kind: str | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
class NamedExpr(expr):
|
||||
@@ -1,13 +1,12 @@
|
||||
import sys
|
||||
from abc import abstractmethod
|
||||
from types import MappingProxyType
|
||||
from typing import ( # noqa: Y022,Y038,Y057
|
||||
from typing import ( # noqa: Y022,Y038
|
||||
AbstractSet as Set,
|
||||
AsyncGenerator as AsyncGenerator,
|
||||
AsyncIterable as AsyncIterable,
|
||||
AsyncIterator as AsyncIterator,
|
||||
Awaitable as Awaitable,
|
||||
ByteString as ByteString,
|
||||
Callable as Callable,
|
||||
Collection as Collection,
|
||||
Container as Container,
|
||||
@@ -59,8 +58,12 @@ __all__ = [
|
||||
"ValuesView",
|
||||
"Sequence",
|
||||
"MutableSequence",
|
||||
"ByteString",
|
||||
]
|
||||
if sys.version_info < (3, 14):
|
||||
from typing import ByteString as ByteString # noqa: Y057
|
||||
|
||||
__all__ += ["ByteString"]
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
__all__ += ["Buffer"]
|
||||
|
||||
@@ -70,6 +73,8 @@ _VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers.
|
||||
@final
|
||||
class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def isdisjoint(self, other: Iterable[_KT_co], /) -> bool: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
@property
|
||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
||||
@@ -83,6 +88,8 @@ class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented
|
||||
@final
|
||||
class dict_items(ItemsView[_KT_co, _VT_co]): # undocumented
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def isdisjoint(self, other: Iterable[tuple[_KT_co, _VT_co]], /) -> bool: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
@property
|
||||
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user