Compare commits
294 Commits
dhruv/synt
...
jane/docs/
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7f9c31720b | ||
|
|
6034a4e0ba | ||
|
|
e7a7da9ae9 | ||
|
|
2fda0038d8 | ||
|
|
800ff0a693 | ||
|
|
55f4812051 | ||
|
|
47c9ed07f2 | ||
|
|
7cb2619ef5 | ||
|
|
83fe44728b | ||
|
|
00e456ead4 | ||
|
|
2853751344 | ||
|
|
7109214b57 | ||
|
|
d930e97212 | ||
|
|
9c1b6ec411 | ||
|
|
692309ebd7 | ||
|
|
68a8978454 | ||
|
|
cd2af3be73 | ||
|
|
e2e98d005c | ||
|
|
32ccc38365 | ||
|
|
35151080b1 | ||
|
|
53a80a5c11 | ||
|
|
446ad0ba44 | ||
|
|
d897811f00 | ||
|
|
5d6b26ed33 | ||
|
|
49e5357dac | ||
|
|
e02c44e46c | ||
|
|
86cbf2d594 | ||
|
|
b79f1ed7f5 | ||
|
|
068b75cc8e | ||
|
|
c3f61a012e | ||
|
|
0c8b5eb17a | ||
|
|
375d2c87b2 | ||
|
|
f846fc9e07 | ||
|
|
92b145e56a | ||
|
|
715609663a | ||
|
|
519a278899 | ||
|
|
91d091bb81 | ||
|
|
79d72e6479 | ||
|
|
81160320de | ||
|
|
b1e7bf76da | ||
|
|
ad4a88657b | ||
|
|
611f4e5c5f | ||
|
|
791f6a1820 | ||
|
|
3d0230f469 | ||
|
|
da79bac33c | ||
|
|
8de0cd6565 | ||
|
|
3277d031f8 | ||
|
|
736a4ead14 | ||
|
|
27ebff36ec | ||
|
|
96da136e6a | ||
|
|
4667d8697c | ||
|
|
690e94f4fb | ||
|
|
9fd84e63bc | ||
|
|
3ab7a8da73 | ||
|
|
927069c12f | ||
|
|
c8ff89c73c | ||
|
|
4c05d7a6d4 | ||
|
|
b54922fd73 | ||
|
|
3f884b4b34 | ||
|
|
b456051be8 | ||
|
|
2dfbf118d7 | ||
|
|
ed948eaefb | ||
|
|
22733cb7c7 | ||
|
|
a26bd01be2 | ||
|
|
b617d90651 | ||
|
|
cdc7c71449 | ||
|
|
ff3bf583b2 | ||
|
|
2e7c3454e0 | ||
|
|
1d73d60bd3 | ||
|
|
f666d79cd7 | ||
|
|
26ac805e6d | ||
|
|
98b13b9844 | ||
|
|
13ad24b13e | ||
|
|
104608b2f7 | ||
|
|
1e0642fac8 | ||
|
|
5e5a81b05f | ||
|
|
c53d55a483 | ||
|
|
ffc98522cd | ||
|
|
8499abfa7f | ||
|
|
1f654ee729 | ||
|
|
355d26f05c | ||
|
|
027ea899ce | ||
|
|
61c568268a | ||
|
|
01754a1209 | ||
|
|
e684f6b1e0 | ||
|
|
142eda7dc6 | ||
|
|
d9c0590169 | ||
|
|
f8f0053a6c | ||
|
|
e7c4d28c5e | ||
|
|
19cd9d7d8a | ||
|
|
c50577f1d7 | ||
|
|
2d6d85e993 | ||
|
|
4f49e918a9 | ||
|
|
d681a45b08 | ||
|
|
89bb07c251 | ||
|
|
fe462b30e7 | ||
|
|
c5bc368e43 | ||
|
|
73370fe798 | ||
|
|
22b6488550 | ||
|
|
d4dd96d1f4 | ||
|
|
efbf7b14b5 | ||
|
|
9dc226be97 | ||
|
|
bcbddac21c | ||
|
|
4ed3aed8d3 | ||
|
|
60ea72a6bc | ||
|
|
a525b4be3d | ||
|
|
93973b96cb | ||
|
|
db8f2c2d9f | ||
|
|
5c0df7a150 | ||
|
|
7d5cf1811b | ||
|
|
4e9d771aa0 | ||
|
|
507f5c1137 | ||
|
|
4e92102922 | ||
|
|
08b548626a | ||
|
|
0d06900cec | ||
|
|
521a358a4d | ||
|
|
134aa7c7d5 | ||
|
|
33e44c25ad | ||
|
|
48163dcaca | ||
|
|
e78b9dc7fe | ||
|
|
141d1a8cdf | ||
|
|
8b9bbc0c84 | ||
|
|
87ea06e360 | ||
|
|
74246f4acc | ||
|
|
b3b2f57d8e | ||
|
|
549cc1e437 | ||
|
|
7509a48eab | ||
|
|
af821ecda1 | ||
|
|
ccc418cc49 | ||
|
|
b98ab1b0b6 | ||
|
|
ed947792cf | ||
|
|
ee1621b2f9 | ||
|
|
32ca704956 | ||
|
|
37d8de3316 | ||
|
|
4157c8635b | ||
|
|
d22f3402e1 | ||
|
|
ea27445479 | ||
|
|
540d76892f | ||
|
|
cd101c83ae | ||
|
|
b2fc0df6db | ||
|
|
93eefb1417 | ||
|
|
303ef02f93 | ||
|
|
1b7d08c2c9 | ||
|
|
fcaa62f0d9 | ||
|
|
f144edeefa | ||
|
|
6c1fa1d440 | ||
|
|
5a5a588a72 | ||
|
|
084e5464fb | ||
|
|
31f97329c0 | ||
|
|
b46e9e825a | ||
|
|
9b2cf569b2 | ||
|
|
5806bc915d | ||
|
|
b0b4706e2d | ||
|
|
a8cf7096ff | ||
|
|
895eb3ef48 | ||
|
|
2e0a9755e0 | ||
|
|
b021b5babe | ||
|
|
eed6d784df | ||
|
|
8338db6c12 | ||
|
|
d056d09547 | ||
|
|
1645be018d | ||
|
|
2c865023ac | ||
|
|
2567e14b7a | ||
|
|
3b19df04d7 | ||
|
|
6ffb96171a | ||
|
|
64165bee43 | ||
|
|
0c75548146 | ||
|
|
b56a577f25 | ||
|
|
e1133a24ed | ||
|
|
2f8ac1e9b3 | ||
|
|
3fb2028506 | ||
|
|
3f9ee31efb | ||
|
|
b02d3f3fd9 | ||
|
|
2b28889ca9 | ||
|
|
8db147c09d | ||
|
|
a58bde6958 | ||
|
|
f4e23d2dff | ||
|
|
4a155e2b22 | ||
|
|
bf5b62edac | ||
|
|
c69a789aa5 | ||
|
|
140c408a92 | ||
|
|
27085a93d9 | ||
|
|
a9b6c4f269 | ||
|
|
ded010cf9c | ||
|
|
436dc18b15 | ||
|
|
9599bd7622 | ||
|
|
ec3f523924 | ||
|
|
010434015e | ||
|
|
25131da2c3 | ||
|
|
712783825d | ||
|
|
94a3c53841 | ||
|
|
0ea2519e80 | ||
|
|
6d79ddc0aa | ||
|
|
9f3e609278 | ||
|
|
b36dd1aa51 | ||
|
|
fd9d68051e | ||
|
|
99834ee93d | ||
|
|
b80bf22c4d | ||
|
|
312f6640b8 | ||
|
|
91a5fdee7a | ||
|
|
1ad5f9c038 | ||
|
|
e914bc300b | ||
|
|
27f6f048f0 | ||
|
|
d62a617938 | ||
|
|
16a926d138 | ||
|
|
05566c6075 | ||
|
|
7ce17b7736 | ||
|
|
f9a64503c8 | ||
|
|
8a25531a71 | ||
|
|
9b6d2ce1f2 | ||
|
|
889667ad84 | ||
|
|
5b500fc4dc | ||
|
|
685d11a909 | ||
|
|
dcabd04caf | ||
|
|
3aa7e35a4c | ||
|
|
b0a751012e | ||
|
|
bd46cd1fcf | ||
|
|
a8d1328c1a | ||
|
|
e35deee583 | ||
|
|
921bc15542 | ||
|
|
e14096f0a8 | ||
|
|
5f976cae07 | ||
|
|
7659114eb3 | ||
|
|
163c374242 | ||
|
|
204c59e353 | ||
|
|
531ae5227c | ||
|
|
e0169d8dea | ||
|
|
9a3b9f9fb5 | ||
|
|
49a5a9ccc2 | ||
|
|
69d9212817 | ||
|
|
4a305588e9 | ||
|
|
16acd4913f | ||
|
|
3989cb8b56 | ||
|
|
a38c05bf13 | ||
|
|
ab107ef1f3 | ||
|
|
b36c713279 | ||
|
|
34a5063aa2 | ||
|
|
adc0a5d126 | ||
|
|
e28e737296 | ||
|
|
37ad994318 | ||
|
|
246a3388ee | ||
|
|
6be00d5775 | ||
|
|
9200dfc79f | ||
|
|
5dcde88099 | ||
|
|
7794eb2bde | ||
|
|
40bfae4f99 | ||
|
|
7b064b25b2 | ||
|
|
9993115f63 | ||
|
|
f0a21c9161 | ||
|
|
f26c155de5 | ||
|
|
c3fa826b0a | ||
|
|
8b69794f1d | ||
|
|
4e7c84df1d | ||
|
|
99c400000a | ||
|
|
b5d147d219 | ||
|
|
77da4615c1 | ||
|
|
627d230688 | ||
|
|
0eef834e89 | ||
|
|
650c578e07 | ||
|
|
9567fddf69 | ||
|
|
ab6d9d4658 | ||
|
|
677893226a | ||
|
|
33fd50027c | ||
|
|
3e30962077 | ||
|
|
81275a6c3d | ||
|
|
52c946a4c5 | ||
|
|
ebdaf5765a | ||
|
|
9a93409e1c | ||
|
|
102b9d930f | ||
|
|
550aa871d3 | ||
|
|
3c22a3bdcc | ||
|
|
6263923915 | ||
|
|
94abea4b08 | ||
|
|
519a65007f | ||
|
|
573facd2ba | ||
|
|
3cb2e677aa | ||
|
|
f0046ab28e | ||
|
|
5bb9720a10 | ||
|
|
9ff18bf9d3 | ||
|
|
aa906b9c75 | ||
|
|
3476e2f359 | ||
|
|
8848eca3c6 | ||
|
|
b0731ef9cb | ||
|
|
84531d1644 | ||
|
|
83b8b62e3e | ||
|
|
7225732859 | ||
|
|
403f0dccd8 | ||
|
|
46fcd19ca6 | ||
|
|
d9ec3d56b0 | ||
|
|
cd87b787d9 | ||
|
|
dd6d411026 | ||
|
|
cfceb437a8 | ||
|
|
48b0660228 | ||
|
|
24899efe50 |
@@ -1,3 +1,10 @@
|
||||
[alias]
|
||||
dev = "run --package ruff_dev --bin ruff_dev"
|
||||
benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
|
||||
|
||||
# statically link the C runtime so the executable does not depend on
|
||||
# that shared/dynamic library.
|
||||
#
|
||||
# See: https://github.com/astral-sh/ruff/issues/11503
|
||||
[target.'cfg(all(target_env="msvc", target_os = "windows"))']
|
||||
rustflags = ["-C", "target-feature=+crt-static"]
|
||||
|
||||
4
.gitattributes
vendored
4
.gitattributes
vendored
@@ -8,6 +8,10 @@ crates/ruff_linter/resources/test/fixtures/pycodestyle/W391_3.py text eol=crlf
|
||||
crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_crlf.py text eol=crlf
|
||||
crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap text eol=crlf
|
||||
|
||||
crates/ruff_python_parser/resources/invalid/re_lexing/line_continuation_windows_eol.py text eol=crlf
|
||||
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_windows_eol.py text eol=crlf
|
||||
crates/ruff_python_parser/resources/invalid/re_lex_logical_token_mac_eol.py text eol=cr
|
||||
|
||||
crates/ruff_python_parser/resources/inline linguist-generated=true
|
||||
|
||||
ruff.schema.json linguist-generated=true text=auto eol=lf
|
||||
|
||||
3
.github/CODEOWNERS
vendored
3
.github/CODEOWNERS
vendored
@@ -15,3 +15,6 @@
|
||||
|
||||
# Script for fuzzing the parser
|
||||
/scripts/fuzz-parser/ @AlexWaygood
|
||||
|
||||
# red-knot
|
||||
/crates/red_knot/ @carljm @MichaReiser
|
||||
|
||||
7
.github/renovate.json5
vendored
7
.github/renovate.json5
vendored
@@ -41,6 +41,13 @@
|
||||
description: "Disable PRs updating GitHub runners (e.g. 'runs-on: macos-14')",
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// Disable updates of `zip-rs`; intentionally pinned for now due to ownership change
|
||||
// See: https://github.com/astral-sh/uv/issues/3642
|
||||
matchPackagePatterns: ["zip"],
|
||||
matchManagers: ["cargo"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
groupName: "pre-commit dependencies",
|
||||
matchManagers: ["pre-commit"],
|
||||
|
||||
44
.github/workflows/ci.yaml
vendored
44
.github/workflows/ci.yaml
vendored
@@ -167,6 +167,9 @@ jobs:
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
# Workaround for <https://github.com/nextest-rs/nextest/issues/1493>.
|
||||
RUSTUP_WINDOWS_PATH_ADD_BIN: 1
|
||||
run: |
|
||||
cargo nextest run --all-features --profile ci
|
||||
cargo test --all-features --doc
|
||||
@@ -209,6 +212,38 @@ jobs:
|
||||
- name: "Build"
|
||||
run: cargo build --release --locked
|
||||
|
||||
cargo-build-msrv:
|
||||
name: "cargo build (msrv)"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: SebRollen/toml-action@v1.2.0
|
||||
id: msrv
|
||||
with:
|
||||
file: "Cargo.toml"
|
||||
field: "workspace.package.rust-version"
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup default ${{ steps.msrv.outputs.value }}
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
run: cargo +${{ steps.msrv.outputs.value }} insta test --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
cargo-fuzz:
|
||||
name: "cargo fuzz"
|
||||
runs-on: ubuntu-latest
|
||||
@@ -222,10 +257,13 @@ jobs:
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: "fuzz -> target"
|
||||
- name: "Install cargo-fuzz"
|
||||
uses: taiki-e/install-action@v2
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@main
|
||||
with:
|
||||
tool: cargo-fuzz@0.11.2
|
||||
- name: "Install cargo-fuzz"
|
||||
# Download the latest version from quick install and not the github releases because github releases only has MUSL targets.
|
||||
run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm
|
||||
- run: cargo fuzz build -s none
|
||||
|
||||
fuzz-parser:
|
||||
@@ -303,7 +341,7 @@ jobs:
|
||||
name: ruff
|
||||
path: target/debug
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
name: Download baseline Ruff binary
|
||||
with:
|
||||
name: ruff
|
||||
|
||||
2
.github/workflows/docs.yaml
vendored
2
.github/workflows/docs.yaml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.5.0
|
||||
uses: cloudflare/wrangler-action@v3.6.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
|
||||
2
.github/workflows/playground.yaml
vendored
2
.github/workflows/playground.yaml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.5.0
|
||||
uses: cloudflare/wrangler-action@v3.6.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
|
||||
12
.github/workflows/pr-comment.yaml
vendored
12
.github/workflows/pr-comment.yaml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
name: Download pull request number
|
||||
with:
|
||||
name: pr-number
|
||||
@@ -32,7 +32,7 @@ jobs:
|
||||
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
- uses: dawidd6/action-download-artifact@v6
|
||||
name: "Download ecosystem results"
|
||||
id: download-ecosystem-result
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
@@ -48,6 +48,14 @@ jobs:
|
||||
id: generate-comment
|
||||
if: steps.download-ecosystem-result.outputs.found_artifact == 'true'
|
||||
run: |
|
||||
# Guard against malicious ecosystem results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/ecosystem/ecosystem-result ]]
|
||||
then
|
||||
echo "Error: ecosystem-result cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment ecosystem -->' >> comment.txt
|
||||
|
||||
5
.github/workflows/release.yaml
vendored
5
.github/workflows/release.yaml
vendored
@@ -163,6 +163,9 @@ jobs:
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist
|
||||
env:
|
||||
# aarch64 build fails, see https://github.com/PyO3/maturin/issues/2110
|
||||
XWIN_VERSION: 16
|
||||
- name: "Test wheel"
|
||||
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
||||
shell: bash
|
||||
@@ -569,7 +572,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: "Build and push Docker image"
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
14
.github/workflows/sync_typeshed.yaml
vendored
14
.github/workflows/sync_typeshed.yaml
vendored
@@ -37,13 +37,13 @@ jobs:
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
run: |
|
||||
rm -rf ruff/crates/red_knot/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot/vendor/typeshed/source_commit.txt
|
||||
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
|
||||
@@ -2,7 +2,7 @@ fail_fast: true
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/red_knot/vendor/.*|
|
||||
crates/red_knot_module_resolver/vendor/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
crates/ruff/resources/.*|
|
||||
@@ -14,7 +14,7 @@ exclude: |
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.17
|
||||
rev: v0.18
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
@@ -32,7 +32,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.40.0
|
||||
rev: v0.41.0
|
||||
hooks:
|
||||
- id: markdownlint-fix
|
||||
exclude: |
|
||||
@@ -42,7 +42,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.21.0
|
||||
rev: v1.22.9
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -56,7 +56,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.4.4
|
||||
rev: v0.4.10
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
|
||||
5
.vscode/extensions.json
vendored
Normal file
5
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"rust-lang.rust-analyzer"
|
||||
]
|
||||
}
|
||||
6
.vscode/settings.json
vendored
Normal file
6
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"rust-analyzer.check.extraArgs": [
|
||||
"--all-features"
|
||||
],
|
||||
"rust-analyzer.check.command": "clippy",
|
||||
}
|
||||
216
CHANGELOG.md
216
CHANGELOG.md
@@ -1,5 +1,217 @@
|
||||
# Changelog
|
||||
|
||||
## 0.4.10
|
||||
|
||||
### Parser
|
||||
|
||||
- Implement re-lexing logic for better error recovery ([#11845](https://github.com/astral-sh/ruff/pull/11845))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-copyright`\] Update `CPY001` to check the first 4096 bytes instead of 1024 ([#11927](https://github.com/astral-sh/ruff/pull/11927))
|
||||
- \[`pycodestyle`\] Update `E999` to show all syntax errors instead of just the first one ([#11900](https://github.com/astral-sh/ruff/pull/11900))
|
||||
|
||||
### Server
|
||||
|
||||
- Add tracing setup guide to Helix documentation ([#11883](https://github.com/astral-sh/ruff/pull/11883))
|
||||
- Add tracing setup guide to Neovim documentation ([#11884](https://github.com/astral-sh/ruff/pull/11884))
|
||||
- Defer notebook cell deletion to avoid an error message ([#11864](https://github.com/astral-sh/ruff/pull/11864))
|
||||
|
||||
### Security
|
||||
|
||||
- Guard against malicious ecosystem comment artifacts ([#11879](https://github.com/astral-sh/ruff/pull/11879))
|
||||
|
||||
## 0.4.9
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pylint`\] Implement `consider-dict-items` (`C0206`) ([#11688](https://github.com/astral-sh/ruff/pull/11688))
|
||||
- \[`refurb`\] Implement `repeated-global` (`FURB154`) ([#11187](https://github.com/astral-sh/ruff/pull/11187))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pycodestyle`\] Adapt fix for `E203` to work identical to `ruff format` ([#10999](https://github.com/astral-sh/ruff/pull/10999))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix formatter instability for lines only consisting of zero-width characters ([#11748](https://github.com/astral-sh/ruff/pull/11748))
|
||||
|
||||
### Server
|
||||
|
||||
- Add supported commands in server capabilities ([#11850](https://github.com/astral-sh/ruff/pull/11850))
|
||||
- Use real file path when available in `ruff server` ([#11800](https://github.com/astral-sh/ruff/pull/11800))
|
||||
- Improve error message when a command is run on an unavailable document ([#11823](https://github.com/astral-sh/ruff/pull/11823))
|
||||
- Introduce the `ruff.printDebugInformation` command ([#11831](https://github.com/astral-sh/ruff/pull/11831))
|
||||
- Tracing system now respects log level and trace level, with options to log to a file ([#11747](https://github.com/astral-sh/ruff/pull/11747))
|
||||
|
||||
### CLI
|
||||
|
||||
- Handle non-printable characters in diff view ([#11687](https://github.com/astral-sh/ruff/pull/11687))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`refurb`\] Avoid suggesting starmap when arguments are used outside call (`FURB140`) ([#11830](https://github.com/astral-sh/ruff/pull/11830))
|
||||
- \[`flake8-bugbear`\] Avoid panic in `B909` when checking large loop blocks ([#11772](https://github.com/astral-sh/ruff/pull/11772))
|
||||
- \[`refurb`\] Fix misbehavior of `operator.itemgetter` when getter param is a tuple (`FURB118`) ([#11774](https://github.com/astral-sh/ruff/pull/11774))
|
||||
|
||||
## 0.4.8
|
||||
|
||||
### Performance
|
||||
|
||||
- Linter performance has been improved by around 10% on some microbenchmarks by refactoring the lexer and parser to maintain synchronicity between them ([#11457](https://github.com/astral-sh/ruff/pull/11457))
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bugbear`\] Implement `return-in-generator` (`B901`) ([#11644](https://github.com/astral-sh/ruff/pull/11644))
|
||||
- \[`flake8-pyi`\] Implement `PYI063` ([#11699](https://github.com/astral-sh/ruff/pull/11699))
|
||||
- \[`pygrep_hooks`\] Check blanket ignores via file-level pragmas (`PGH004`) ([#11540](https://github.com/astral-sh/ruff/pull/11540))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pyupgrade`\] Update `UP035` for Python 3.13 and the latest version of `typing_extensions` ([#11693](https://github.com/astral-sh/ruff/pull/11693))
|
||||
- \[`numpy`\] Update `NPY001` rule for NumPy 2.0 ([#11735](https://github.com/astral-sh/ruff/pull/11735))
|
||||
|
||||
### Server
|
||||
|
||||
- Formatting a document with syntax problems no longer spams a visible error popup ([#11745](https://github.com/astral-sh/ruff/pull/11745))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add RDJson support for `--output-format` flag ([#11682](https://github.com/astral-sh/ruff/pull/11682))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pyupgrade`\] Write empty string in lieu of panic when fixing `UP032` ([#11696](https://github.com/astral-sh/ruff/pull/11696))
|
||||
- \[`flake8-simplify`\] Simplify double negatives in `SIM103` ([#11684](https://github.com/astral-sh/ruff/pull/11684))
|
||||
- Ensure the expression generator adds a newline before `type` statements ([#11720](https://github.com/astral-sh/ruff/pull/11720))
|
||||
- Respect per-file ignores for blanket and redirected noqa rules ([#11728](https://github.com/astral-sh/ruff/pull/11728))
|
||||
|
||||
## 0.4.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-pyi`\] Implement `PYI064` ([#11325](https://github.com/astral-sh/ruff/pull/11325))
|
||||
- \[`flake8-pyi`\] Implement `PYI066` ([#11541](https://github.com/astral-sh/ruff/pull/11541))
|
||||
- \[`flake8-pyi`\] Implement `PYI057` ([#11486](https://github.com/astral-sh/ruff/pull/11486))
|
||||
- \[`pyflakes`\] Enable `F822` in `__init__.py` files by default ([#11370](https://github.com/astral-sh/ruff/pull/11370))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix incorrect placement of trailing stub function comments ([#11632](https://github.com/astral-sh/ruff/pull/11632))
|
||||
|
||||
### Server
|
||||
|
||||
- Respect file exclusions in `ruff server` ([#11590](https://github.com/astral-sh/ruff/pull/11590))
|
||||
- Add support for documents not exist on disk ([#11588](https://github.com/astral-sh/ruff/pull/11588))
|
||||
- Add Vim and Kate setup guide for `ruff server` ([#11615](https://github.com/astral-sh/ruff/pull/11615))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid removing newlines between docstring headers and rST blocks ([#11609](https://github.com/astral-sh/ruff/pull/11609))
|
||||
- Infer indentation with imports when logical indent is absent ([#11608](https://github.com/astral-sh/ruff/pull/11608))
|
||||
- Use char index rather than position for indent slice ([#11645](https://github.com/astral-sh/ruff/pull/11645))
|
||||
- \[`flake8-comprehension`\] Strip parentheses around generators in `C400` ([#11607](https://github.com/astral-sh/ruff/pull/11607))
|
||||
- Mark `repeated-isinstance-calls` as unsafe on Python 3.10 and later ([#11622](https://github.com/astral-sh/ruff/pull/11622))
|
||||
|
||||
## 0.4.6
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- Use project-relative paths when calculating GitLab fingerprints ([#11532](https://github.com/astral-sh/ruff/pull/11532))
|
||||
- Bump minimum supported Windows version to Windows 10 ([#11613](https://github.com/astral-sh/ruff/pull/11613))
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-async`\] Sleep with >24 hour interval should usually sleep forever (`ASYNC116`) ([#11498](https://github.com/astral-sh/ruff/pull/11498))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`numpy`\] Add missing functions to NumPy 2.0 migration rule ([#11528](https://github.com/astral-sh/ruff/pull/11528))
|
||||
- \[`mccabe`\] Consider irrefutable pattern similar to `if .. else` for `C901` ([#11565](https://github.com/astral-sh/ruff/pull/11565))
|
||||
- Consider `match`-`case` statements for `C901`, `PLR0912`, and `PLR0915` ([#11521](https://github.com/astral-sh/ruff/pull/11521))
|
||||
- Remove empty strings when converting to f-string (`UP032`) ([#11524](https://github.com/astral-sh/ruff/pull/11524))
|
||||
- \[`flake8-bandit`\] `request-without-timeout` should warn for `requests.request` ([#11548](https://github.com/astral-sh/ruff/pull/11548))
|
||||
- \[`flake8-self`\] Ignore sunder accesses in `flake8-self` rules ([#11546](https://github.com/astral-sh/ruff/pull/11546))
|
||||
- \[`pyupgrade`\] Lint for `TypeAliasType` usages (`UP040`) ([#11530](https://github.com/astral-sh/ruff/pull/11530))
|
||||
|
||||
### Server
|
||||
|
||||
- Respect excludes in `ruff server` configuration discovery ([#11551](https://github.com/astral-sh/ruff/pull/11551))
|
||||
- Use default settings if initialization options is empty or not provided ([#11566](https://github.com/astral-sh/ruff/pull/11566))
|
||||
- `ruff server` correctly treats `.pyi` files as stub files ([#11535](https://github.com/astral-sh/ruff/pull/11535))
|
||||
- `ruff server` searches for configuration in parent directories ([#11537](https://github.com/astral-sh/ruff/pull/11537))
|
||||
- `ruff server`: An empty code action filter no longer returns notebook source actions ([#11526](https://github.com/astral-sh/ruff/pull/11526))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-logging-format`\] Fix autofix title in `logging-warn` (`G010`) ([#11514](https://github.com/astral-sh/ruff/pull/11514))
|
||||
- \[`refurb`\] Avoid recommending `operator.itemgetter` with dependence on lambda arguments ([#11574](https://github.com/astral-sh/ruff/pull/11574))
|
||||
- \[`flake8-simplify`\] Avoid recommending context manager in `__enter__` implementations ([#11575](https://github.com/astral-sh/ruff/pull/11575))
|
||||
- Create intermediary directories for `--output-file` ([#11550](https://github.com/astral-sh/ruff/pull/11550))
|
||||
- Propagate reads on global variables ([#11584](https://github.com/astral-sh/ruff/pull/11584))
|
||||
- Treat all `singledispatch` arguments as runtime-required ([#11523](https://github.com/astral-sh/ruff/pull/11523))
|
||||
|
||||
## 0.4.5
|
||||
|
||||
### Ruff's language server is now in Beta
|
||||
|
||||
`v0.4.5` marks the official Beta release of `ruff server`, an integrated language server built into Ruff.
|
||||
`ruff server` supports the same feature set as `ruff-lsp`, powering linting, formatting, and
|
||||
code fixes in Ruff's editor integrations -- but with superior performance and
|
||||
no installation required. We'd love your feedback!
|
||||
|
||||
You can enable `ruff server` in the [VS Code extension](https://github.com/astral-sh/ruff-vscode?tab=readme-ov-file#enabling-the-rust-based-language-server) today.
|
||||
|
||||
To read more about this exciting milestone, check out our [blog post](https://astral.sh/blog/ruff-v0.4.5)!
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-future-annotations`\] Reword `future-rewritable-type-annotation` (`FA100`) message ([#11381](https://github.com/astral-sh/ruff/pull/11381))
|
||||
- \[`isort`\] Expanded the set of standard-library modules to include `_string`, etc. ([#11374](https://github.com/astral-sh/ruff/pull/11374))
|
||||
- \[`pycodestyle`\] Consider soft keywords for `E27` rules ([#11446](https://github.com/astral-sh/ruff/pull/11446))
|
||||
- \[`pyflakes`\] Recommend adding unused import bindings to `__all__` ([#11314](https://github.com/astral-sh/ruff/pull/11314))
|
||||
- \[`pyflakes`\] Update documentation and deprecate `ignore_init_module_imports` ([#11436](https://github.com/astral-sh/ruff/pull/11436))
|
||||
- \[`pyupgrade`\] Mark quotes as unnecessary for non-evaluated annotations ([#11485](https://github.com/astral-sh/ruff/pull/11485))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Avoid multiline quotes warning with `quote-style = preserve` ([#11490](https://github.com/astral-sh/ruff/pull/11490))
|
||||
|
||||
### Server
|
||||
|
||||
- Support Jupyter Notebook files ([#11206](https://github.com/astral-sh/ruff/pull/11206))
|
||||
- Support `noqa` comment code actions ([#11276](https://github.com/astral-sh/ruff/pull/11276))
|
||||
- Fix automatic configuration reloading ([#11492](https://github.com/astral-sh/ruff/pull/11492))
|
||||
- Fix several issues with configuration in Neovim and Helix ([#11497](https://github.com/astral-sh/ruff/pull/11497))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add `--output-format` as a CLI option for `ruff config` ([#11438](https://github.com/astral-sh/ruff/pull/11438))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid `PLE0237` for property with setter ([#11377](https://github.com/astral-sh/ruff/pull/11377))
|
||||
- Avoid `TCH005` for `if` stmt with `elif`/`else` block ([#11376](https://github.com/astral-sh/ruff/pull/11376))
|
||||
- Avoid flagging `__future__` annotations as required for non-evaluated type annotations ([#11414](https://github.com/astral-sh/ruff/pull/11414))
|
||||
- Check for ruff executable in 'bin' directory as installed by 'pip install --target'. ([#11450](https://github.com/astral-sh/ruff/pull/11450))
|
||||
- Sort edits prior to deduplicating in quotation fix ([#11452](https://github.com/astral-sh/ruff/pull/11452))
|
||||
- Treat escaped newline as valid sequence ([#11465](https://github.com/astral-sh/ruff/pull/11465))
|
||||
- \[`flake8-pie`\] Preserve parentheses in `unnecessary-dict-kwargs` ([#11372](https://github.com/astral-sh/ruff/pull/11372))
|
||||
- \[`pylint`\] Ignore `__slots__` with dynamic values ([#11488](https://github.com/astral-sh/ruff/pull/11488))
|
||||
- \[`pylint`\] Remove `try` body from branch counting ([#11487](https://github.com/astral-sh/ruff/pull/11487))
|
||||
- \[`refurb`\] Respect operator precedence in `FURB110` ([#11464](https://github.com/astral-sh/ruff/pull/11464))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add `--preview` to the README ([#11395](https://github.com/astral-sh/ruff/pull/11395))
|
||||
- Add Python 3.13 to list of allowed Python versions ([#11411](https://github.com/astral-sh/ruff/pull/11411))
|
||||
- Simplify Neovim setup documentation ([#11489](https://github.com/astral-sh/ruff/pull/11489))
|
||||
- Update CONTRIBUTING.md to reflect the new parser ([#11434](https://github.com/astral-sh/ruff/pull/11434))
|
||||
- Update server documentation with new migration guide ([#11499](https://github.com/astral-sh/ruff/pull/11499))
|
||||
- \[`pycodestyle`\] Clarify motivation for `E713` and `E714` ([#11483](https://github.com/astral-sh/ruff/pull/11483))
|
||||
- \[`pyflakes`\] Update docs to describe WAI behavior (F541) ([#11362](https://github.com/astral-sh/ruff/pull/11362))
|
||||
- \[`pylint`\] Clearly indicate what is counted as a branch ([#11423](https://github.com/astral-sh/ruff/pull/11423))
|
||||
|
||||
## 0.4.4
|
||||
|
||||
### Preview features
|
||||
@@ -74,6 +286,10 @@
|
||||
- Avoid allocations for isort module names ([#11251](https://github.com/astral-sh/ruff/pull/11251))
|
||||
- Build a separate ARM wheel for macOS ([#11149](https://github.com/astral-sh/ruff/pull/11149))
|
||||
|
||||
### Windows
|
||||
|
||||
- Increase the minimum requirement to Windows 10.
|
||||
|
||||
## 0.4.2
|
||||
|
||||
### Rule changes
|
||||
|
||||
@@ -101,6 +101,8 @@ pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting,
|
||||
These checks will run on GitHub Actions when you open your pull request, but running them locally
|
||||
will save you time and expedite the merge process.
|
||||
|
||||
If you're using VS Code, you can also install the recommended [rust-analyzer](https://marketplace.visualstudio.com/items?itemName=rust-lang.rust-analyzer) extension to get these checks while editing.
|
||||
|
||||
Note that many code changes also require updating the snapshot tests, which is done interactively
|
||||
after running `cargo test` like so:
|
||||
|
||||
@@ -349,7 +351,9 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
- The commit hash of the merged release pull request on `main`
|
||||
1. The release workflow will do the following:
|
||||
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
||||
uploaded anything, you can restart after pushing a fix.
|
||||
uploaded anything, you can restart after pushing a fix. If you just need to rerun the build,
|
||||
make sure you're [re-running all the failed
|
||||
jobs](https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs#re-running-failed-jobs-in-a-workflow) and not just a single failed job.
|
||||
1. Upload to PyPI.
|
||||
1. Create and push the Git tag (as extracted from `pyproject.toml`). We create the Git tag only
|
||||
after building the wheels and uploading to PyPI, since we can't delete or modify the tag ([#4468](https://github.com/astral-sh/ruff/issues/4468)).
|
||||
|
||||
497
Cargo.lock
generated
497
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
33
Cargo.toml
33
Cargo.toml
@@ -4,7 +4,7 @@ resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
rust-version = "1.71"
|
||||
rust-version = "1.75"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -14,6 +14,7 @@ license = "MIT"
|
||||
[workspace.dependencies]
|
||||
ruff = { path = "crates/ruff" }
|
||||
ruff_cache = { path = "crates/ruff_cache" }
|
||||
ruff_db = { path = "crates/ruff_db" }
|
||||
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
|
||||
ruff_formatter = { path = "crates/ruff_formatter" }
|
||||
ruff_index = { path = "crates/ruff_index" }
|
||||
@@ -34,6 +35,8 @@ ruff_source_file = { path = "crates/ruff_source_file" }
|
||||
ruff_text_size = { path = "crates/ruff_text_size" }
|
||||
ruff_workspace = { path = "crates/ruff_workspace" }
|
||||
|
||||
red_knot_module_resolver = { path = "crates/red_knot_module_resolver" }
|
||||
|
||||
aho-corasick = { version = "1.1.3" }
|
||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
||||
anyhow = { version = "1.0.80" }
|
||||
@@ -42,6 +45,7 @@ bincode = { version = "1.3.3" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
bstr = { version = "1.9.1" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
camino = { version = "1.1.7" }
|
||||
chrono = { version = "0.4.35", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.5.3", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.5.1" }
|
||||
@@ -62,26 +66,26 @@ filetime = { version = "0.2.23" }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
hashbrown = "0.14.3"
|
||||
hexf-parse = { version = "0.2.1" }
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = { version = "2.2.6" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1", feature = ["filters", "glob"] }
|
||||
insta = { version = "1.35.1" }
|
||||
insta-cmd = { version = "0.6.0" }
|
||||
is-macro = { version = "0.3.5" }
|
||||
is-wsl = { version = "0.4.0" }
|
||||
itertools = { version = "0.12.1" }
|
||||
itertools = { version = "0.13.0" }
|
||||
js-sys = { version = "0.3.69" }
|
||||
jod-thread = { version = "0.1.2" }
|
||||
lexical-parse-float = { version = "0.8.0", features = ["format"] }
|
||||
libc = { version = "0.2.153" }
|
||||
libcst = { version = "1.1.0", default-features = false }
|
||||
log = { version = "0.4.17" }
|
||||
lsp-server = { version = "0.7.6" }
|
||||
lsp-types = { version = "0.95.0", features = ["proposed"] }
|
||||
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
|
||||
"proposed",
|
||||
] }
|
||||
matchit = { version = "0.8.1" }
|
||||
memchr = { version = "2.7.1" }
|
||||
mimalloc = { version = "0.1.39" }
|
||||
@@ -101,18 +105,21 @@ quote = { version = "1.0.23" }
|
||||
rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
result-like = { version = "0.5.0" }
|
||||
rustc-hash = { version = "1.1.0" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "f706aa2d32d473ee633a77c1af01d180c85da308" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
serde-wasm-bindgen = { version = "0.6.4" }
|
||||
serde_json = { version = "1.0.113" }
|
||||
serde_test = { version = "1.0.152" }
|
||||
serde_with = { version = "3.6.0", default-features = false, features = ["macros"] }
|
||||
serde_with = { version = "3.6.0", default-features = false, features = [
|
||||
"macros",
|
||||
] }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
similar = { version = "2.4.0", features = ["inline"] }
|
||||
smallvec = { version = "1.13.2" }
|
||||
smol_str = { version = "0.2.2" }
|
||||
static_assertions = "1.1.0"
|
||||
strum = { version = "0.26.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.26.0" }
|
||||
@@ -134,11 +141,17 @@ unicode_names2 = { version = "1.2.2" }
|
||||
unicode-normalization = { version = "0.1.23" }
|
||||
ureq = { version = "2.9.6" }
|
||||
url = { version = "2.5.0" }
|
||||
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
||||
uuid = { version = "1.6.1", features = [
|
||||
"v4",
|
||||
"fast-rng",
|
||||
"macro-diagnostics",
|
||||
"js",
|
||||
] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
wasm-bindgen-test = { version = "0.3.42" }
|
||||
wild = { version = "2" }
|
||||
zip = { version = "0.6.6", default-features = false, features = ["zstd"] }
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
|
||||
@@ -28,7 +28,7 @@ An extremely fast Python linter and code formatter, written in Rust.
|
||||
- ⚡️ 10-100x faster than existing linters (like Flake8) and formatters (like Black)
|
||||
- 🐍 Installable via `pip`
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 🤝 Python 3.12 compatibility
|
||||
- 🤝 Python 3.13 compatibility
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
|
||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
@@ -152,7 +152,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.4.4
|
||||
rev: v0.4.10
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -408,6 +408,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Dagster](https://github.com/dagster-io/dagster)
|
||||
- Databricks ([MLflow](https://github.com/mlflow/mlflow))
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Godot](https://github.com/godotengine/godot)
|
||||
- [Gradio](https://github.com/gradio-app/gradio)
|
||||
- [Great Expectations](https://github.com/great-expectations/great_expectations)
|
||||
- [HTTPX](https://github.com/encode/httpx)
|
||||
@@ -433,6 +434,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Modern Treasury ([Python SDK](https://github.com/Modern-Treasury/modern-treasury-python))
|
||||
- Mozilla ([Firefox](https://github.com/mozilla/gecko-dev))
|
||||
- [Mypy](https://github.com/python/mypy)
|
||||
- [Nautobot](https://github.com/nautobot/nautobot)
|
||||
- Netflix ([Dispatch](https://github.com/Netflix/dispatch))
|
||||
- [Neon](https://github.com/neondatabase/neon)
|
||||
- [Nokia](https://nokia.com/)
|
||||
@@ -440,6 +442,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [NumPyro](https://github.com/pyro-ppl/numpyro)
|
||||
- [ONNX](https://github.com/onnx/onnx)
|
||||
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
|
||||
- [Open Wine Components](https://github.com/Open-Wine-Components/umu-launcher)
|
||||
- [PDM](https://github.com/pdm-project/pdm)
|
||||
- [PaddlePaddle](https://github.com/PaddlePaddle/Paddle)
|
||||
- [Pandas](https://github.com/pandas-dev/pandas)
|
||||
@@ -467,6 +470,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Sphinx](https://github.com/sphinx-doc/sphinx)
|
||||
- [Stable Baselines3](https://github.com/DLR-RM/stable-baselines3)
|
||||
- [Starlette](https://github.com/encode/starlette)
|
||||
- [Streamlit](https://github.com/streamlit/streamlit)
|
||||
- [The Algorithms](https://github.com/TheAlgorithms/Python)
|
||||
- [Vega-Altair](https://github.com/altair-viz/altair)
|
||||
- WordPress ([Openverse](https://github.com/WordPress/openverse))
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = ["crates/red_knot/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
extend-exclude = ["crates/red_knot_module_resolver/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
|
||||
@@ -12,6 +12,8 @@ license.workspace = true
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
red_knot_module_resolver = { workspace = true }
|
||||
|
||||
ruff_python_parser = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
@@ -25,6 +27,7 @@ ctrlc = { version = "3.4.4" }
|
||||
dashmap = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
is-macro = { workspace = true }
|
||||
notify = { workspace = true }
|
||||
parking_lot = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
@@ -35,7 +38,6 @@ tracing-subscriber = { workspace = true }
|
||||
tracing-tree = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
textwrap = { version = "0.16.1" }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[lints]
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
# Red Knot
|
||||
|
||||
The Red Knot crate contains code working towards multifile analysis, type inference and, ultimately, type-checking. It's very much a work in progress for now.
|
||||
|
||||
## Vendored types for the stdlib
|
||||
|
||||
Red Knot vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot/vendor/typeshed`. The file `crates/red_knot/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
|
||||
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).
|
||||
@@ -6,8 +6,8 @@ use std::marker::PhantomData;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_index::{Idx, IndexVec};
|
||||
use ruff_python_ast::visitor::preorder;
|
||||
use ruff_python_ast::visitor::preorder::{PreorderVisitor, TraversalSignal};
|
||||
use ruff_python_ast::visitor::source_order;
|
||||
use ruff_python_ast::visitor::source_order::{SourceOrderVisitor, TraversalSignal};
|
||||
use ruff_python_ast::{
|
||||
AnyNodeRef, AstNode, ExceptHandler, ExceptHandlerExceptHandler, Expr, MatchCase, ModModule,
|
||||
NodeKind, Parameter, Stmt, StmtAnnAssign, StmtAssign, StmtAugAssign, StmtClassDef,
|
||||
@@ -91,9 +91,9 @@ impl AstIds {
|
||||
while let Some(deferred) = visitor.deferred.pop() {
|
||||
match deferred {
|
||||
DeferredNode::FunctionDefinition(def) => {
|
||||
def.visit_preorder(&mut visitor);
|
||||
def.visit_source_order(&mut visitor);
|
||||
}
|
||||
DeferredNode::ClassDefinition(def) => def.visit_preorder(&mut visitor),
|
||||
DeferredNode::ClassDefinition(def) => def.visit_source_order(&mut visitor),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -182,7 +182,7 @@ impl<'a> AstIdsVisitor<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PreorderVisitor<'a> for AstIdsVisitor<'a> {
|
||||
impl<'a> SourceOrderVisitor<'a> for AstIdsVisitor<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
match stmt {
|
||||
Stmt::FunctionDef(def) => {
|
||||
@@ -226,14 +226,14 @@ impl<'a> PreorderVisitor<'a> for AstIdsVisitor<'a> {
|
||||
Stmt::IpyEscapeCommand(_) => {}
|
||||
}
|
||||
|
||||
preorder::walk_stmt(self, stmt);
|
||||
source_order::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, _expr: &'a Expr) {}
|
||||
|
||||
fn visit_parameter(&mut self, parameter: &'a Parameter) {
|
||||
self.create_id(parameter);
|
||||
preorder::walk_parameter(self, parameter);
|
||||
source_order::walk_parameter(self, parameter);
|
||||
}
|
||||
|
||||
fn visit_except_handler(&mut self, except_handler: &'a ExceptHandler) {
|
||||
@@ -243,17 +243,17 @@ impl<'a> PreorderVisitor<'a> for AstIdsVisitor<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
preorder::walk_except_handler(self, except_handler);
|
||||
source_order::walk_except_handler(self, except_handler);
|
||||
}
|
||||
|
||||
fn visit_with_item(&mut self, with_item: &'a WithItem) {
|
||||
self.create_id(with_item);
|
||||
preorder::walk_with_item(self, with_item);
|
||||
source_order::walk_with_item(self, with_item);
|
||||
}
|
||||
|
||||
fn visit_match_case(&mut self, match_case: &'a MatchCase) {
|
||||
self.create_id(match_case);
|
||||
preorder::walk_match_case(self, match_case);
|
||||
source_order::walk_match_case(self, match_case);
|
||||
}
|
||||
|
||||
fn visit_type_param(&mut self, type_param: &'a TypeParam) {
|
||||
@@ -275,10 +275,7 @@ pub struct TypedNodeKey<N: AstNode> {
|
||||
|
||||
impl<N: AstNode> TypedNodeKey<N> {
|
||||
pub fn from_node(node: &N) -> Self {
|
||||
let inner = NodeKey {
|
||||
kind: node.as_any_node_ref().kind(),
|
||||
range: node.range(),
|
||||
};
|
||||
let inner = NodeKey::from_node(node.as_any_node_ref());
|
||||
Self {
|
||||
inner,
|
||||
_marker: PhantomData,
|
||||
@@ -312,7 +309,7 @@ struct FindNodeKeyVisitor<'a> {
|
||||
result: Option<AnyNodeRef<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> PreorderVisitor<'a> for FindNodeKeyVisitor<'a> {
|
||||
impl<'a> SourceOrderVisitor<'a> for FindNodeKeyVisitor<'a> {
|
||||
fn enter_node(&mut self, node: AnyNodeRef<'a>) -> TraversalSignal {
|
||||
if self.result.is_some() {
|
||||
return TraversalSignal::Skip;
|
||||
@@ -352,6 +349,12 @@ pub struct NodeKey {
|
||||
}
|
||||
|
||||
impl NodeKey {
|
||||
pub fn from_node(node: AnyNodeRef) -> Self {
|
||||
NodeKey {
|
||||
kind: node.kind(),
|
||||
range: node.range(),
|
||||
}
|
||||
}
|
||||
pub fn resolve<'a>(&self, root: AnyNodeRef<'a>) -> Option<AnyNodeRef<'a>> {
|
||||
// We need to do a binary search here. Only traverse into a node if the range is withint the node
|
||||
let mut visitor = FindNodeKeyVisitor {
|
||||
|
||||
@@ -9,9 +9,9 @@ use crate::files::FileId;
|
||||
use crate::lint::{LintSemanticStorage, LintSyntaxStorage};
|
||||
use crate::module::ModuleResolver;
|
||||
use crate::parse::ParsedStorage;
|
||||
use crate::semantic::SemanticIndexStorage;
|
||||
use crate::semantic::TypeStore;
|
||||
use crate::source::SourceStorage;
|
||||
use crate::symbols::SymbolTablesStorage;
|
||||
use crate::types::TypeStore;
|
||||
|
||||
mod jars;
|
||||
mod query;
|
||||
@@ -125,7 +125,7 @@ pub struct SourceJar {
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SemanticJar {
|
||||
pub module_resolver: ModuleResolver,
|
||||
pub symbol_tables: SymbolTablesStorage,
|
||||
pub semantic_indices: SemanticIndexStorage,
|
||||
pub type_store: TypeStore,
|
||||
}
|
||||
|
||||
|
||||
@@ -17,9 +17,8 @@ pub mod lint;
|
||||
pub mod module;
|
||||
mod parse;
|
||||
pub mod program;
|
||||
mod semantic;
|
||||
pub mod source;
|
||||
mod symbols;
|
||||
mod types;
|
||||
pub mod watch;
|
||||
|
||||
pub(crate) type FxDashMap<K, V> = dashmap::DashMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
@@ -5,17 +5,18 @@ use std::time::Duration;
|
||||
|
||||
use ruff_python_ast::visitor::Visitor;
|
||||
use ruff_python_ast::{ModModule, StringLiteral};
|
||||
use ruff_python_parser::Parsed;
|
||||
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{LintDb, LintJar, QueryResult};
|
||||
use crate::files::FileId;
|
||||
use crate::module::ModuleName;
|
||||
use crate::parse::{parse, Parsed};
|
||||
use crate::source::{source_text, Source};
|
||||
use crate::symbols::{
|
||||
resolve_global_symbol, symbol_table, Definition, GlobalSymbolId, SymbolId, SymbolTable,
|
||||
use crate::module::{resolve_module, ModuleName};
|
||||
use crate::parse::parse;
|
||||
use crate::semantic::{infer_definition_type, infer_symbol_public_type, Type};
|
||||
use crate::semantic::{
|
||||
resolve_global_symbol, semantic_index, Definition, GlobalSymbolId, SemanticIndex, SymbolId,
|
||||
};
|
||||
use crate::types::{infer_definition_type, infer_symbol_type, Type};
|
||||
use crate::source::{source_text, Source};
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn lint_syntax(db: &dyn LintDb, file_id: FileId) -> QueryResult<Diagnostics> {
|
||||
@@ -40,7 +41,7 @@ pub(crate) fn lint_syntax(db: &dyn LintDb, file_id: FileId) -> QueryResult<Diagn
|
||||
let parsed = parse(db.upcast(), *file_id)?;
|
||||
|
||||
if parsed.errors().is_empty() {
|
||||
let ast = parsed.ast();
|
||||
let ast = parsed.syntax();
|
||||
|
||||
let mut visitor = SyntaxLintVisitor {
|
||||
diagnostics,
|
||||
@@ -81,13 +82,13 @@ pub(crate) fn lint_semantic(db: &dyn LintDb, file_id: FileId) -> QueryResult<Dia
|
||||
storage.get(&file_id, |file_id| {
|
||||
let source = source_text(db.upcast(), *file_id)?;
|
||||
let parsed = parse(db.upcast(), *file_id)?;
|
||||
let symbols = symbol_table(db.upcast(), *file_id)?;
|
||||
let semantic_index = semantic_index(db.upcast(), *file_id)?;
|
||||
|
||||
let context = SemanticLintContext {
|
||||
file_id: *file_id,
|
||||
source,
|
||||
parsed,
|
||||
symbols,
|
||||
parsed: &parsed,
|
||||
semantic_index,
|
||||
db,
|
||||
diagnostics: RefCell::new(Vec::new()),
|
||||
};
|
||||
@@ -101,17 +102,17 @@ pub(crate) fn lint_semantic(db: &dyn LintDb, file_id: FileId) -> QueryResult<Dia
|
||||
|
||||
fn lint_unresolved_imports(context: &SemanticLintContext) -> QueryResult<()> {
|
||||
// TODO: Consider iterating over the dependencies (imports) only instead of all definitions.
|
||||
for (symbol, definition) in context.symbols().all_definitions() {
|
||||
for (symbol, definition) in context.semantic_index().symbol_table().all_definitions() {
|
||||
match definition {
|
||||
Definition::Import(import) => {
|
||||
let ty = context.infer_symbol_type(symbol)?;
|
||||
let ty = context.infer_symbol_public_type(symbol)?;
|
||||
|
||||
if ty.is_unknown() {
|
||||
context.push_diagnostic(format!("Unresolved module {}", import.module));
|
||||
}
|
||||
}
|
||||
Definition::ImportFrom(import) => {
|
||||
let ty = context.infer_symbol_type(symbol)?;
|
||||
let ty = context.infer_symbol_public_type(symbol)?;
|
||||
|
||||
if ty.is_unknown() {
|
||||
let module_name = import.module().map(Deref::deref).unwrap_or_default();
|
||||
@@ -144,16 +145,14 @@ fn lint_bad_overrides(context: &SemanticLintContext) -> QueryResult<()> {
|
||||
// TODO we should have a special marker on the real typing module (from typeshed) so if you
|
||||
// have your own "typing" module in your project, we don't consider it THE typing module (and
|
||||
// same for other stdlib modules that our lint rules care about)
|
||||
let Some(typing_override) =
|
||||
resolve_global_symbol(context.db.upcast(), ModuleName::new("typing"), "override")?
|
||||
else {
|
||||
let Some(typing_override) = context.resolve_global_symbol("typing", "override")? else {
|
||||
// TODO once we bundle typeshed, this should be unreachable!()
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
// TODO we should maybe index definitions by type instead of iterating all, or else iterate all
|
||||
// just once, match, and branch to all lint rules that care about a type of definition
|
||||
for (symbol, definition) in context.symbols().all_definitions() {
|
||||
for (symbol, definition) in context.semantic_index().symbol_table().all_definitions() {
|
||||
if !matches!(definition, Definition::FunctionDef(_)) {
|
||||
continue;
|
||||
}
|
||||
@@ -194,8 +193,8 @@ fn lint_bad_overrides(context: &SemanticLintContext) -> QueryResult<()> {
|
||||
pub struct SemanticLintContext<'a> {
|
||||
file_id: FileId,
|
||||
source: Source,
|
||||
parsed: Parsed,
|
||||
symbols: Arc<SymbolTable>,
|
||||
parsed: &'a Parsed<ModModule>,
|
||||
semantic_index: Arc<SemanticIndex>,
|
||||
db: &'a dyn LintDb,
|
||||
diagnostics: RefCell<Vec<String>>,
|
||||
}
|
||||
@@ -209,16 +208,16 @@ impl<'a> SemanticLintContext<'a> {
|
||||
self.file_id
|
||||
}
|
||||
|
||||
pub fn ast(&self) -> &ModModule {
|
||||
self.parsed.ast()
|
||||
pub fn ast(&self) -> &'a ModModule {
|
||||
self.parsed.syntax()
|
||||
}
|
||||
|
||||
pub fn symbols(&self) -> &SymbolTable {
|
||||
&self.symbols
|
||||
pub fn semantic_index(&self) -> &SemanticIndex {
|
||||
&self.semantic_index
|
||||
}
|
||||
|
||||
pub fn infer_symbol_type(&self, symbol_id: SymbolId) -> QueryResult<Type> {
|
||||
infer_symbol_type(
|
||||
pub fn infer_symbol_public_type(&self, symbol_id: SymbolId) -> QueryResult<Type> {
|
||||
infer_symbol_public_type(
|
||||
self.db.upcast(),
|
||||
GlobalSymbolId {
|
||||
file_id: self.file_id,
|
||||
@@ -234,6 +233,18 @@ impl<'a> SemanticLintContext<'a> {
|
||||
pub fn extend_diagnostics(&mut self, diagnostics: impl IntoIterator<Item = String>) {
|
||||
self.diagnostics.get_mut().extend(diagnostics);
|
||||
}
|
||||
|
||||
pub fn resolve_global_symbol(
|
||||
&self,
|
||||
module: &str,
|
||||
symbol_name: &str,
|
||||
) -> QueryResult<Option<GlobalSymbolId>> {
|
||||
let Some(module) = resolve_module(self.db.upcast(), ModuleName::new(module))? else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
resolve_global_symbol(self.db.upcast(), module, symbol_name)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
||||
@@ -12,7 +12,7 @@ use tracing_subscriber::{Layer, Registry};
|
||||
use tracing_tree::time::Uptime;
|
||||
|
||||
use red_knot::db::{HasJar, ParallelDatabase, QueryError, SourceDb, SourceJar};
|
||||
use red_knot::module::{set_module_search_paths, ModuleSearchPath, ModuleSearchPathKind};
|
||||
use red_knot::module::{set_module_search_paths, ModuleResolutionInputs};
|
||||
use red_knot::program::check::ExecutionMode;
|
||||
use red_knot::program::{FileWatcherChange, Program};
|
||||
use red_knot::watch::FileWatcher;
|
||||
@@ -44,12 +44,17 @@ fn main() -> anyhow::Result<()> {
|
||||
let workspace_folder = entry_point.parent().unwrap();
|
||||
let workspace = Workspace::new(workspace_folder.to_path_buf());
|
||||
|
||||
let workspace_search_path = ModuleSearchPath::new(
|
||||
workspace.root().to_path_buf(),
|
||||
ModuleSearchPathKind::FirstParty,
|
||||
);
|
||||
let workspace_search_path = workspace.root().to_path_buf();
|
||||
|
||||
let search_paths = ModuleResolutionInputs {
|
||||
extra_paths: vec![],
|
||||
workspace_root: workspace_search_path,
|
||||
site_packages: None,
|
||||
custom_typeshed: None,
|
||||
};
|
||||
|
||||
let mut program = Program::new(workspace);
|
||||
set_module_search_paths(&mut program, vec![workspace_search_path]);
|
||||
set_module_search_paths(&mut program, search_paths);
|
||||
|
||||
let entry_id = program.file_id(entry_point);
|
||||
program.workspace_mut().open_file(entry_id);
|
||||
|
||||
@@ -7,16 +7,22 @@ use std::sync::Arc;
|
||||
use dashmap::mapref::entry::Entry;
|
||||
use smol_str::SmolStr;
|
||||
|
||||
use red_knot_module_resolver::ModuleKind;
|
||||
|
||||
use crate::db::{QueryResult, SemanticDb, SemanticJar};
|
||||
use crate::files::FileId;
|
||||
use crate::symbols::Dependency;
|
||||
use crate::semantic::Dependency;
|
||||
use crate::FxDashMap;
|
||||
|
||||
/// ID uniquely identifying a module.
|
||||
/// Representation of a Python module.
|
||||
///
|
||||
/// The inner type wrapped by this struct is a unique identifier for the module
|
||||
/// that is used by the struct's methods to lazily query information about the module.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct Module(u32);
|
||||
|
||||
impl Module {
|
||||
/// Return the absolute name of the module (e.g. `foo.bar`)
|
||||
pub fn name(&self, db: &dyn SemanticDb) -> QueryResult<ModuleName> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
let modules = &jar.module_resolver;
|
||||
@@ -24,6 +30,7 @@ impl Module {
|
||||
Ok(modules.modules.get(self).unwrap().name.clone())
|
||||
}
|
||||
|
||||
/// Return the path to the source code that defines this module
|
||||
pub fn path(&self, db: &dyn SemanticDb) -> QueryResult<ModulePath> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
let modules = &jar.module_resolver;
|
||||
@@ -31,6 +38,7 @@ impl Module {
|
||||
Ok(modules.modules.get(self).unwrap().path.clone())
|
||||
}
|
||||
|
||||
/// Determine whether this module is a single-file module or a package
|
||||
pub fn kind(&self, db: &dyn SemanticDb) -> QueryResult<ModuleKind> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
let modules = &jar.module_resolver;
|
||||
@@ -38,6 +46,16 @@ impl Module {
|
||||
Ok(modules.modules.get(self).unwrap().kind)
|
||||
}
|
||||
|
||||
/// Attempt to resolve a dependency of this module to an absolute [`ModuleName`].
|
||||
///
|
||||
/// A dependency could be either absolute (e.g. the `foo` dependency implied by `from foo import bar`)
|
||||
/// or relative to this module (e.g. the `.foo` dependency implied by `from .foo import bar`)
|
||||
///
|
||||
/// - Returns an error if the query failed.
|
||||
/// - Returns `Ok(None)` if the query succeeded,
|
||||
/// but the dependency refers to a module that does not exist.
|
||||
/// - Returns `Ok(Some(ModuleName))` if the query succeeded,
|
||||
/// and the dependency refers to a module that exists.
|
||||
pub fn resolve_dependency(
|
||||
&self,
|
||||
db: &dyn SemanticDb,
|
||||
@@ -87,7 +105,8 @@ impl Module {
|
||||
|
||||
/// A module name, e.g. `foo.bar`.
|
||||
///
|
||||
/// Always normalized to the absolute form (never a relative module name).
|
||||
/// Always normalized to the absolute form
|
||||
/// (never a relative module name, i.e., never `.foo`).
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct ModuleName(smol_str::SmolStr);
|
||||
|
||||
@@ -124,10 +143,13 @@ impl ModuleName {
|
||||
Some(Self(name))
|
||||
}
|
||||
|
||||
/// An iterator over the components of the module name:
|
||||
/// `foo.bar.baz` -> `foo`, `bar`, `baz`
|
||||
pub fn components(&self) -> impl DoubleEndedIterator<Item = &str> {
|
||||
self.0.split('.')
|
||||
}
|
||||
|
||||
/// The name of this module's immediate parent, if it has a parent
|
||||
pub fn parent(&self) -> Option<ModuleName> {
|
||||
let (_, parent) = self.0.rsplit_once('.')?;
|
||||
|
||||
@@ -157,14 +179,6 @@ impl std::fmt::Display for ModuleName {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum ModuleKind {
|
||||
Module,
|
||||
|
||||
/// A python package (a `__init__.py` or `__init__.pyi` file)
|
||||
Package,
|
||||
}
|
||||
|
||||
/// A search path in which to search modules.
|
||||
/// Corresponds to a path in [`sys.path`](https://docs.python.org/3/library/sys_path_init.html) at runtime.
|
||||
///
|
||||
@@ -181,10 +195,12 @@ impl ModuleSearchPath {
|
||||
}
|
||||
}
|
||||
|
||||
/// Determine whether this is a first-party, third-party or standard-library search path
|
||||
pub fn kind(&self) -> ModuleSearchPathKind {
|
||||
self.inner.kind
|
||||
}
|
||||
|
||||
/// Return the location of the search path on the file system
|
||||
pub fn path(&self) -> &Path {
|
||||
&self.inner.path
|
||||
}
|
||||
@@ -202,22 +218,31 @@ struct ModuleSearchPathInner {
|
||||
kind: ModuleSearchPathKind,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
/// Enumeration of the different kinds of search paths type checkers are expected to support.
|
||||
///
|
||||
/// N.B. Although we don't implement `Ord` for this enum, they are ordered in terms of the
|
||||
/// priority that we want to give these modules when resolving them.
|
||||
/// This is roughly [the order given in the typing spec], but typeshed's stubs
|
||||
/// for the standard library are moved higher up to match Python's semantics at runtime.
|
||||
///
|
||||
/// [the order given in the typing spec]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, is_macro::Is)]
|
||||
pub enum ModuleSearchPathKind {
|
||||
// Project dependency
|
||||
/// "Extra" paths provided by the user in a config file, env var or CLI flag.
|
||||
/// E.g. mypy's `MYPYPATH` env var, or pyright's `stubPath` configuration setting
|
||||
Extra,
|
||||
|
||||
/// Files in the project we're directly being invoked on
|
||||
FirstParty,
|
||||
|
||||
// e.g. site packages
|
||||
ThirdParty,
|
||||
|
||||
// e.g. built-in modules, typeshed
|
||||
/// The `stdlib` directory of typeshed (either vendored or custom)
|
||||
StandardLibrary,
|
||||
}
|
||||
|
||||
impl ModuleSearchPathKind {
|
||||
pub const fn is_first_party(self) -> bool {
|
||||
matches!(self, Self::FirstParty)
|
||||
}
|
||||
/// Stubs or runtime modules installed in site-packages
|
||||
SitePackagesThirdParty,
|
||||
|
||||
/// Vendored third-party stubs from typeshed
|
||||
VendoredThirdParty,
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
@@ -231,9 +256,11 @@ pub struct ModuleData {
|
||||
// Queries
|
||||
//////////////////////////////////////////////////////
|
||||
|
||||
/// Resolves a module name to a module id
|
||||
/// TODO: This would not work with Salsa because `ModuleName` isn't an ingredient and, therefore, cannot be used as part of a query.
|
||||
/// For this to work with salsa, it would be necessary to intern all `ModuleName`s.
|
||||
/// Resolves a module name to a module.
|
||||
///
|
||||
/// TODO: This would not work with Salsa because `ModuleName` isn't an ingredient
|
||||
/// and, therefore, cannot be used as part of a query.
|
||||
/// For this to work with salsa, it would be necessary to intern all `ModuleName`s.
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn resolve_module(db: &dyn SemanticDb, name: ModuleName) -> QueryResult<Option<Module>> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
@@ -255,7 +282,7 @@ pub fn resolve_module(db: &dyn SemanticDb, name: ModuleName) -> QueryResult<Opti
|
||||
let file_id = db.file_id(&normalized);
|
||||
let path = ModulePath::new(root_path.clone(), file_id);
|
||||
|
||||
let id = Module(
|
||||
let module = Module(
|
||||
modules
|
||||
.next_module_id
|
||||
.fetch_add(1, std::sync::atomic::Ordering::Relaxed),
|
||||
@@ -263,7 +290,7 @@ pub fn resolve_module(db: &dyn SemanticDb, name: ModuleName) -> QueryResult<Opti
|
||||
|
||||
modules
|
||||
.modules
|
||||
.insert(id, Arc::from(ModuleData { name, path, kind }));
|
||||
.insert(module, Arc::from(ModuleData { name, path, kind }));
|
||||
|
||||
// A path can map to multiple modules because of symlinks:
|
||||
// ```
|
||||
@@ -272,33 +299,33 @@ pub fn resolve_module(db: &dyn SemanticDb, name: ModuleName) -> QueryResult<Opti
|
||||
// ```
|
||||
// Here, both `foo` and `bar` resolve to the same module but through different paths.
|
||||
// That's why we need to insert the absolute path and not the normalized path here.
|
||||
let absolute_id = if absolute_path == normalized {
|
||||
let absolute_file_id = if absolute_path == normalized {
|
||||
file_id
|
||||
} else {
|
||||
db.file_id(&absolute_path)
|
||||
};
|
||||
|
||||
modules.by_file.insert(absolute_id, id);
|
||||
modules.by_file.insert(absolute_file_id, module);
|
||||
|
||||
entry.insert_entry(id);
|
||||
entry.insert_entry(module);
|
||||
|
||||
Ok(Some(id))
|
||||
Ok(Some(module))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolves the module id for the given path.
|
||||
/// Resolves the module for the given path.
|
||||
///
|
||||
/// Returns `None` if the path is not a module in `sys.path`.
|
||||
/// Returns `None` if the path is not a module locatable via `sys.path`.
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn path_to_module(db: &dyn SemanticDb, path: &Path) -> QueryResult<Option<Module>> {
|
||||
let file = db.file_id(path);
|
||||
file_to_module(db, file)
|
||||
}
|
||||
|
||||
/// Resolves the module id for the file with the given id.
|
||||
/// Resolves the module for the file with the given id.
|
||||
///
|
||||
/// Returns `None` if the file is not a module in `sys.path`.
|
||||
/// Returns `None` if the file is not a module locatable via `sys.path`.
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn file_to_module(db: &dyn SemanticDb, file: FileId) -> QueryResult<Option<Module>> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
@@ -325,12 +352,12 @@ pub fn file_to_module(db: &dyn SemanticDb, file: FileId) -> QueryResult<Option<M
|
||||
|
||||
// Resolve the module name to see if Python would resolve the name to the same path.
|
||||
// If it doesn't, then that means that multiple modules have the same in different
|
||||
// root paths, but that the module corresponding to the past path is in a lower priority path,
|
||||
// root paths, but that the module corresponding to the past path is in a lower priority search path,
|
||||
// in which case we ignore it.
|
||||
let Some(module_id) = resolve_module(db, module_name)? else {
|
||||
let Some(module) = resolve_module(db, module_name)? else {
|
||||
return Ok(None);
|
||||
};
|
||||
let module_path = module_id.path(db)?;
|
||||
let module_path = module.path(db)?;
|
||||
|
||||
if module_path.root() == &root_path {
|
||||
let Ok(normalized) = path.canonicalize() else {
|
||||
@@ -350,7 +377,7 @@ pub fn file_to_module(db: &dyn SemanticDb, file: FileId) -> QueryResult<Option<M
|
||||
}
|
||||
|
||||
// Path has been inserted by `resolved`
|
||||
Ok(Some(module_id))
|
||||
Ok(Some(module))
|
||||
} else {
|
||||
// This path is for a module with the same name but in a module search path with a lower priority.
|
||||
// Ignore it.
|
||||
@@ -363,25 +390,100 @@ pub fn file_to_module(db: &dyn SemanticDb, file: FileId) -> QueryResult<Option<M
|
||||
//////////////////////////////////////////////////////
|
||||
|
||||
/// Changes the module search paths to `search_paths`.
|
||||
pub fn set_module_search_paths(db: &mut dyn SemanticDb, search_paths: Vec<ModuleSearchPath>) {
|
||||
pub fn set_module_search_paths(db: &mut dyn SemanticDb, search_paths: ModuleResolutionInputs) {
|
||||
let jar: &mut SemanticJar = db.jar_mut();
|
||||
|
||||
jar.module_resolver = ModuleResolver::new(search_paths);
|
||||
jar.module_resolver = ModuleResolver::new(search_paths.into_ordered_search_paths());
|
||||
}
|
||||
|
||||
/// Adds a module to the resolver.
|
||||
/// Struct for holding the various paths that are put together
|
||||
/// to create an `OrderedSearchPatsh` instance
|
||||
///
|
||||
/// - `extra_paths` is a list of user-provided paths
|
||||
/// that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
/// or pyright's stubPath configuration setting.
|
||||
/// - `workspace_root` is the root of the workspace,
|
||||
/// used for finding first-party modules
|
||||
/// - `site-packages` is the path to the user's `site-packages` directory,
|
||||
/// where third-party packages from ``PyPI`` are installed
|
||||
/// - `custom_typeshed` is a path to standard-library typeshed stubs.
|
||||
/// Currently this has to be a directory that exists on disk.
|
||||
/// (TODO: fall back to vendored stubs if no custom directory is provided.)
|
||||
#[derive(Debug)]
|
||||
pub struct ModuleResolutionInputs {
|
||||
pub extra_paths: Vec<PathBuf>,
|
||||
pub workspace_root: PathBuf,
|
||||
pub site_packages: Option<PathBuf>,
|
||||
pub custom_typeshed: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl ModuleResolutionInputs {
|
||||
/// Implementation of PEP 561's module resolution order
|
||||
/// (with some small, deliberate, differences)
|
||||
fn into_ordered_search_paths(self) -> OrderedSearchPaths {
|
||||
let ModuleResolutionInputs {
|
||||
extra_paths,
|
||||
workspace_root,
|
||||
site_packages,
|
||||
custom_typeshed,
|
||||
} = self;
|
||||
|
||||
OrderedSearchPaths(
|
||||
extra_paths
|
||||
.into_iter()
|
||||
.map(|path| ModuleSearchPath::new(path, ModuleSearchPathKind::Extra))
|
||||
.chain(std::iter::once(ModuleSearchPath::new(
|
||||
workspace_root,
|
||||
ModuleSearchPathKind::FirstParty,
|
||||
)))
|
||||
// TODO fallback to vendored typeshed stubs if no custom typeshed directory is provided by the user
|
||||
.chain(custom_typeshed.into_iter().map(|path| {
|
||||
ModuleSearchPath::new(
|
||||
path.join(TYPESHED_STDLIB_DIRECTORY),
|
||||
ModuleSearchPathKind::StandardLibrary,
|
||||
)
|
||||
}))
|
||||
.chain(site_packages.into_iter().map(|path| {
|
||||
ModuleSearchPath::new(path, ModuleSearchPathKind::SitePackagesThirdParty)
|
||||
}))
|
||||
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const TYPESHED_STDLIB_DIRECTORY: &str = "stdlib";
|
||||
|
||||
/// A resolved module resolution order, implementing PEP 561
|
||||
/// (with some small, deliberate differences)
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
struct OrderedSearchPaths(Vec<ModuleSearchPath>);
|
||||
|
||||
impl Deref for OrderedSearchPaths {
|
||||
type Target = [ModuleSearchPath];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a module located at `path` to the resolver.
|
||||
///
|
||||
/// Returns `None` if the path doesn't resolve to a module.
|
||||
///
|
||||
/// Returns `Some` with the id of the module and the ids of the modules that need re-resolving
|
||||
/// because they were part of a namespace package and might now resolve differently.
|
||||
/// Returns `Some(module, other_modules)`, where `module` is the resolved module
|
||||
/// with file location `path`, and `other_modules` is a `Vec` of `ModuleData` instances.
|
||||
/// Each element in `other_modules` provides information regarding a single module that needs
|
||||
/// re-resolving because it was part of a namespace package and might now resolve differently.
|
||||
///
|
||||
/// Note: This won't work with salsa because `Path` is not an ingredient.
|
||||
pub fn add_module(db: &mut dyn SemanticDb, path: &Path) -> Option<(Module, Vec<Arc<ModuleData>>)> {
|
||||
// No locking is required because we're holding a mutable reference to `modules`.
|
||||
|
||||
// TODO This needs tests
|
||||
|
||||
// Note: Intentionally by-pass caching here. Module should not be in the cache yet.
|
||||
// Note: Intentionally bypass caching here. Module should not be in the cache yet.
|
||||
let module = path_to_module(db, path).ok()??;
|
||||
|
||||
// The code below is to handle the addition of `__init__.py` files.
|
||||
@@ -405,15 +507,15 @@ pub fn add_module(db: &mut dyn SemanticDb, path: &Path) -> Option<(Module, Vec<A
|
||||
let jar: &mut SemanticJar = db.jar_mut();
|
||||
let modules = &mut jar.module_resolver;
|
||||
|
||||
modules.by_file.retain(|_, id| {
|
||||
modules.by_file.retain(|_, module| {
|
||||
if modules
|
||||
.modules
|
||||
.get(id)
|
||||
.get(module)
|
||||
.unwrap()
|
||||
.name
|
||||
.starts_with(&parent_name)
|
||||
{
|
||||
to_remove.push(*id);
|
||||
to_remove.push(*module);
|
||||
false
|
||||
} else {
|
||||
true
|
||||
@@ -422,8 +524,8 @@ pub fn add_module(db: &mut dyn SemanticDb, path: &Path) -> Option<(Module, Vec<A
|
||||
|
||||
// TODO remove need for this vec
|
||||
let mut removed = Vec::with_capacity(to_remove.len());
|
||||
for id in &to_remove {
|
||||
removed.push(modules.remove_module_by_id(*id));
|
||||
for module in &to_remove {
|
||||
removed.push(modules.remove_module(*module));
|
||||
}
|
||||
|
||||
Some((module, removed))
|
||||
@@ -432,14 +534,14 @@ pub fn add_module(db: &mut dyn SemanticDb, path: &Path) -> Option<(Module, Vec<A
|
||||
#[derive(Default)]
|
||||
pub struct ModuleResolver {
|
||||
/// The search paths where modules are located (and searched). Corresponds to `sys.path` at runtime.
|
||||
search_paths: Vec<ModuleSearchPath>,
|
||||
search_paths: OrderedSearchPaths,
|
||||
|
||||
// Locking: Locking is done by acquiring a (write) lock on `by_name`. This is because `by_name` is the primary
|
||||
// lookup method. Acquiring locks in any other ordering can result in deadlocks.
|
||||
/// Resolves a module name to it's module id.
|
||||
/// Looks up a module by name
|
||||
by_name: FxDashMap<ModuleName, Module>,
|
||||
|
||||
/// All known modules, indexed by the module id.
|
||||
/// A map of all known modules to data about those modules
|
||||
modules: FxDashMap<Module, Arc<ModuleData>>,
|
||||
|
||||
/// Lookup from absolute path to module.
|
||||
@@ -449,7 +551,7 @@ pub struct ModuleResolver {
|
||||
}
|
||||
|
||||
impl ModuleResolver {
|
||||
pub fn new(search_paths: Vec<ModuleSearchPath>) -> Self {
|
||||
fn new(search_paths: OrderedSearchPaths) -> Self {
|
||||
Self {
|
||||
search_paths,
|
||||
modules: FxDashMap::default(),
|
||||
@@ -459,24 +561,27 @@ impl ModuleResolver {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn remove_module(&mut self, file_id: FileId) {
|
||||
/// Remove a module from the inner cache
|
||||
pub(crate) fn remove_module_by_file(&mut self, file_id: FileId) {
|
||||
// No locking is required because we're holding a mutable reference to `self`.
|
||||
let Some((_, id)) = self.by_file.remove(&file_id) else {
|
||||
let Some((_, module)) = self.by_file.remove(&file_id) else {
|
||||
return;
|
||||
};
|
||||
|
||||
self.remove_module_by_id(id);
|
||||
self.remove_module(module);
|
||||
}
|
||||
|
||||
fn remove_module_by_id(&mut self, id: Module) -> Arc<ModuleData> {
|
||||
let (_, module) = self.modules.remove(&id).unwrap();
|
||||
fn remove_module(&mut self, module: Module) -> Arc<ModuleData> {
|
||||
let (_, module_data) = self.modules.remove(&module).unwrap();
|
||||
|
||||
self.by_name.remove(&module.name).unwrap();
|
||||
self.by_name.remove(&module_data.name).unwrap();
|
||||
|
||||
// It's possible that multiple paths map to the same id. Search all other paths referencing the same module id.
|
||||
self.by_file.retain(|_, current_id| *current_id != id);
|
||||
// It's possible that multiple paths map to the same module.
|
||||
// Search all other paths referencing the same module.
|
||||
self.by_file
|
||||
.retain(|_, current_module| *current_module != module);
|
||||
|
||||
module
|
||||
module_data
|
||||
}
|
||||
}
|
||||
|
||||
@@ -505,15 +610,19 @@ impl ModulePath {
|
||||
Self { root, file_id }
|
||||
}
|
||||
|
||||
/// The search path that was used to locate the module
|
||||
pub fn root(&self) -> &ModuleSearchPath {
|
||||
&self.root
|
||||
}
|
||||
|
||||
/// The file containing the source code for the module
|
||||
pub fn file(&self) -> FileId {
|
||||
self.file_id
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a module name and a list of search paths in which to lookup modules,
|
||||
/// attempt to resolve the module name
|
||||
fn resolve_name(
|
||||
name: &ModuleName,
|
||||
search_paths: &[ModuleSearchPath],
|
||||
@@ -635,7 +744,9 @@ enum PackageKind {
|
||||
/// A root package or module. E.g. `foo` in `foo.bar.baz` or just `foo`.
|
||||
Root,
|
||||
|
||||
/// A regular sub-package where the parent contains an `__init__.py`. For example `bar` in `foo.bar` when the `foo` directory contains an `__init__.py`.
|
||||
/// A regular sub-package where the parent contains an `__init__.py`.
|
||||
///
|
||||
/// For example, `bar` in `foo.bar` when the `foo` directory contains an `__init__.py`.
|
||||
Regular,
|
||||
|
||||
/// A sub-package in a namespace package. A namespace package is a package without an `__init__.py`.
|
||||
@@ -653,21 +764,23 @@ impl PackageKind {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::num::NonZeroU32;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::db::SourceDb;
|
||||
use crate::module::{
|
||||
path_to_module, resolve_module, set_module_search_paths, ModuleKind, ModuleName,
|
||||
ModuleSearchPath, ModuleSearchPathKind,
|
||||
ModuleResolutionInputs, TYPESHED_STDLIB_DIRECTORY,
|
||||
};
|
||||
use crate::symbols::Dependency;
|
||||
use crate::semantic::Dependency;
|
||||
|
||||
struct TestCase {
|
||||
temp_dir: tempfile::TempDir,
|
||||
db: TestDb,
|
||||
|
||||
src: ModuleSearchPath,
|
||||
site_packages: ModuleSearchPath,
|
||||
src: PathBuf,
|
||||
custom_typeshed: PathBuf,
|
||||
site_packages: PathBuf,
|
||||
}
|
||||
|
||||
fn create_resolver() -> std::io::Result<TestCase> {
|
||||
@@ -675,25 +788,31 @@ mod tests {
|
||||
|
||||
let src = temp_dir.path().join("src");
|
||||
let site_packages = temp_dir.path().join("site_packages");
|
||||
let custom_typeshed = temp_dir.path().join("typeshed");
|
||||
|
||||
std::fs::create_dir(&src)?;
|
||||
std::fs::create_dir(&site_packages)?;
|
||||
std::fs::create_dir(&custom_typeshed)?;
|
||||
|
||||
let src = ModuleSearchPath::new(src.canonicalize()?, ModuleSearchPathKind::FirstParty);
|
||||
let site_packages = ModuleSearchPath::new(
|
||||
site_packages.canonicalize()?,
|
||||
ModuleSearchPathKind::ThirdParty,
|
||||
);
|
||||
let src = src.canonicalize()?;
|
||||
let site_packages = site_packages.canonicalize()?;
|
||||
let custom_typeshed = custom_typeshed.canonicalize()?;
|
||||
|
||||
let roots = vec![src.clone(), site_packages.clone()];
|
||||
let search_paths = ModuleResolutionInputs {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
site_packages: Some(site_packages.clone()),
|
||||
custom_typeshed: Some(custom_typeshed.clone()),
|
||||
};
|
||||
|
||||
let mut db = TestDb::default();
|
||||
set_module_search_paths(&mut db, roots);
|
||||
set_module_search_paths(&mut db, search_paths);
|
||||
|
||||
Ok(TestCase {
|
||||
temp_dir,
|
||||
db,
|
||||
src,
|
||||
custom_typeshed,
|
||||
site_packages,
|
||||
})
|
||||
}
|
||||
@@ -707,7 +826,7 @@ mod tests {
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
let foo_path = src.path().join("foo.py");
|
||||
let foo_path = src.join("foo.py");
|
||||
std::fs::write(&foo_path, "print('Hello, world!')")?;
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new("foo"))?.unwrap();
|
||||
@@ -718,7 +837,7 @@ mod tests {
|
||||
);
|
||||
|
||||
assert_eq!(ModuleName::new("foo"), foo_module.name(&db)?);
|
||||
assert_eq!(&src, foo_module.path(&db)?.root());
|
||||
assert_eq!(&src, foo_module.path(&db)?.root().path());
|
||||
assert_eq!(ModuleKind::Module, foo_module.kind(&db)?);
|
||||
assert_eq!(&foo_path, &*db.file_path(foo_module.path(&db)?.file()));
|
||||
|
||||
@@ -727,6 +846,76 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn stdlib() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
custom_typeshed,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
let stdlib_dir = custom_typeshed.join(TYPESHED_STDLIB_DIRECTORY);
|
||||
std::fs::create_dir_all(&stdlib_dir).unwrap();
|
||||
let functools_path = stdlib_dir.join("functools.py");
|
||||
std::fs::write(&functools_path, "def update_wrapper(): ...").unwrap();
|
||||
let functools_module = resolve_module(&db, ModuleName::new("functools"))?.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
Some(functools_module),
|
||||
resolve_module(&db, ModuleName::new("functools"))?
|
||||
);
|
||||
assert_eq!(&stdlib_dir, functools_module.path(&db)?.root().path());
|
||||
assert_eq!(ModuleKind::Module, functools_module.kind(&db)?);
|
||||
assert_eq!(
|
||||
&functools_path,
|
||||
&*db.file_path(functools_module.path(&db)?.file())
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Some(functools_module),
|
||||
path_to_module(&db, &functools_path)?
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn first_party_precedence_over_stdlib() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
src,
|
||||
custom_typeshed,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
let stdlib_dir = custom_typeshed.join(TYPESHED_STDLIB_DIRECTORY);
|
||||
std::fs::create_dir_all(&stdlib_dir).unwrap();
|
||||
std::fs::create_dir_all(&src).unwrap();
|
||||
|
||||
let stdlib_functools_path = stdlib_dir.join("functools.py");
|
||||
let first_party_functools_path = src.join("functools.py");
|
||||
std::fs::write(stdlib_functools_path, "def update_wrapper(): ...").unwrap();
|
||||
std::fs::write(&first_party_functools_path, "def update_wrapper(): ...").unwrap();
|
||||
let functools_module = resolve_module(&db, ModuleName::new("functools"))?.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
Some(functools_module),
|
||||
resolve_module(&db, ModuleName::new("functools"))?
|
||||
);
|
||||
assert_eq!(&src, functools_module.path(&db).unwrap().root().path());
|
||||
assert_eq!(ModuleKind::Module, functools_module.kind(&db)?);
|
||||
assert_eq!(
|
||||
&first_party_functools_path,
|
||||
&*db.file_path(functools_module.path(&db)?.file())
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Some(functools_module),
|
||||
path_to_module(&db, &first_party_functools_path)?
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_package() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
@@ -736,7 +925,7 @@ mod tests {
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
let foo_dir = src.path().join("foo");
|
||||
let foo_dir = src.join("foo");
|
||||
let foo_path = foo_dir.join("__init__.py");
|
||||
std::fs::create_dir(&foo_dir)?;
|
||||
std::fs::write(&foo_path, "print('Hello, world!')")?;
|
||||
@@ -744,7 +933,7 @@ mod tests {
|
||||
let foo_module = resolve_module(&db, ModuleName::new("foo"))?.unwrap();
|
||||
|
||||
assert_eq!(ModuleName::new("foo"), foo_module.name(&db)?);
|
||||
assert_eq!(&src, foo_module.path(&db)?.root());
|
||||
assert_eq!(&src, foo_module.path(&db)?.root().path());
|
||||
assert_eq!(&foo_path, &*db.file_path(foo_module.path(&db)?.file()));
|
||||
|
||||
assert_eq!(Some(foo_module), path_to_module(&db, &foo_path)?);
|
||||
@@ -764,17 +953,17 @@ mod tests {
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
let foo_dir = src.path().join("foo");
|
||||
let foo_dir = src.join("foo");
|
||||
let foo_init = foo_dir.join("__init__.py");
|
||||
std::fs::create_dir(&foo_dir)?;
|
||||
std::fs::write(&foo_init, "print('Hello, world!')")?;
|
||||
|
||||
let foo_py = src.path().join("foo.py");
|
||||
let foo_py = src.join("foo.py");
|
||||
std::fs::write(&foo_py, "print('Hello, world!')")?;
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new("foo"))?.unwrap();
|
||||
|
||||
assert_eq!(&src, foo_module.path(&db)?.root());
|
||||
assert_eq!(&src, foo_module.path(&db)?.root().path());
|
||||
assert_eq!(&foo_init, &*db.file_path(foo_module.path(&db)?.file()));
|
||||
assert_eq!(ModuleKind::Package, foo_module.kind(&db)?);
|
||||
|
||||
@@ -793,14 +982,14 @@ mod tests {
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
let foo_stub = src.path().join("foo.pyi");
|
||||
let foo_py = src.path().join("foo.py");
|
||||
let foo_stub = src.join("foo.pyi");
|
||||
let foo_py = src.join("foo.py");
|
||||
std::fs::write(&foo_stub, "x: int")?;
|
||||
std::fs::write(&foo_py, "print('Hello, world!')")?;
|
||||
|
||||
let foo = resolve_module(&db, ModuleName::new("foo"))?.unwrap();
|
||||
|
||||
assert_eq!(&src, foo.path(&db)?.root());
|
||||
assert_eq!(&src, foo.path(&db)?.root().path());
|
||||
assert_eq!(&foo_stub, &*db.file_path(foo.path(&db)?.file()));
|
||||
|
||||
assert_eq!(Some(foo), path_to_module(&db, &foo_stub)?);
|
||||
@@ -818,7 +1007,7 @@ mod tests {
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
let foo = src.path().join("foo");
|
||||
let foo = src.join("foo");
|
||||
let bar = foo.join("bar");
|
||||
let baz = bar.join("baz.py");
|
||||
|
||||
@@ -829,7 +1018,7 @@ mod tests {
|
||||
|
||||
let baz_module = resolve_module(&db, ModuleName::new("foo.bar.baz"))?.unwrap();
|
||||
|
||||
assert_eq!(&src, baz_module.path(&db)?.root());
|
||||
assert_eq!(&src, baz_module.path(&db)?.root().path());
|
||||
assert_eq!(&baz, &*db.file_path(baz_module.path(&db)?.file()));
|
||||
|
||||
assert_eq!(Some(baz_module), path_to_module(&db, &baz)?);
|
||||
@@ -844,6 +1033,7 @@ mod tests {
|
||||
temp_dir: _,
|
||||
src,
|
||||
site_packages,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
// From [PEP420](https://peps.python.org/pep-0420/#nested-namespace-packages).
|
||||
@@ -859,14 +1049,14 @@ mod tests {
|
||||
// two.py
|
||||
// ```
|
||||
|
||||
let parent1 = src.path().join("parent");
|
||||
let parent1 = src.join("parent");
|
||||
let child1 = parent1.join("child");
|
||||
let one = child1.join("one.py");
|
||||
|
||||
std::fs::create_dir_all(child1)?;
|
||||
std::fs::write(&one, "print('Hello, world!')")?;
|
||||
|
||||
let parent2 = site_packages.path().join("parent");
|
||||
let parent2 = site_packages.join("parent");
|
||||
let child2 = parent2.join("child");
|
||||
let two = child2.join("two.py");
|
||||
|
||||
@@ -890,6 +1080,7 @@ mod tests {
|
||||
temp_dir: _,
|
||||
src,
|
||||
site_packages,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
// Adopted test case from the [PEP420 examples](https://peps.python.org/pep-0420/#nested-namespace-packages).
|
||||
@@ -905,7 +1096,7 @@ mod tests {
|
||||
// two.py
|
||||
// ```
|
||||
|
||||
let parent1 = src.path().join("parent");
|
||||
let parent1 = src.join("parent");
|
||||
let child1 = parent1.join("child");
|
||||
let one = child1.join("one.py");
|
||||
|
||||
@@ -913,7 +1104,7 @@ mod tests {
|
||||
std::fs::write(child1.join("__init__.py"), "print('Hello, world!')")?;
|
||||
std::fs::write(&one, "print('Hello, world!')")?;
|
||||
|
||||
let parent2 = site_packages.path().join("parent");
|
||||
let parent2 = site_packages.join("parent");
|
||||
let child2 = parent2.join("child");
|
||||
let two = child2.join("two.py");
|
||||
|
||||
@@ -938,17 +1129,18 @@ mod tests {
|
||||
src,
|
||||
site_packages,
|
||||
temp_dir: _temp_dir,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
let foo_src = src.path().join("foo.py");
|
||||
let foo_site_packages = site_packages.path().join("foo.py");
|
||||
let foo_src = src.join("foo.py");
|
||||
let foo_site_packages = site_packages.join("foo.py");
|
||||
|
||||
std::fs::write(&foo_src, "")?;
|
||||
std::fs::write(&foo_site_packages, "")?;
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new("foo"))?.unwrap();
|
||||
|
||||
assert_eq!(&src, foo_module.path(&db)?.root());
|
||||
assert_eq!(&src, foo_module.path(&db)?.root().path());
|
||||
assert_eq!(&foo_src, &*db.file_path(foo_module.path(&db)?.file()));
|
||||
|
||||
assert_eq!(Some(foo_module), path_to_module(&db, &foo_src)?);
|
||||
@@ -967,8 +1159,8 @@ mod tests {
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
let foo = src.path().join("foo.py");
|
||||
let bar = src.path().join("bar.py");
|
||||
let foo = src.join("foo.py");
|
||||
let bar = src.join("bar.py");
|
||||
|
||||
std::fs::write(&foo, "")?;
|
||||
std::os::unix::fs::symlink(&foo, &bar)?;
|
||||
@@ -978,12 +1170,12 @@ mod tests {
|
||||
|
||||
assert_ne!(foo_module, bar_module);
|
||||
|
||||
assert_eq!(&src, foo_module.path(&db)?.root());
|
||||
assert_eq!(&src, foo_module.path(&db)?.root().path());
|
||||
assert_eq!(&foo, &*db.file_path(foo_module.path(&db)?.file()));
|
||||
|
||||
// Bar has a different name but it should point to the same file.
|
||||
|
||||
assert_eq!(&src, bar_module.path(&db)?.root());
|
||||
assert_eq!(&src, bar_module.path(&db)?.root().path());
|
||||
assert_eq!(foo_module.path(&db)?.file(), bar_module.path(&db)?.file());
|
||||
assert_eq!(&foo, &*db.file_path(bar_module.path(&db)?.file()));
|
||||
|
||||
@@ -1002,7 +1194,7 @@ mod tests {
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
let foo_dir = src.path().join("foo");
|
||||
let foo_dir = src.join("foo");
|
||||
let foo_path = foo_dir.join("__init__.py");
|
||||
let bar_path = foo_dir.join("bar.py");
|
||||
|
||||
|
||||
@@ -1,85 +1,33 @@
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_parser::{Mode, ParseError};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use ruff_python_ast::ModModule;
|
||||
use ruff_python_parser::Parsed;
|
||||
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{QueryResult, SourceDb};
|
||||
use crate::files::FileId;
|
||||
use crate::source::source_text;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Parsed {
|
||||
inner: Arc<ParsedInner>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
struct ParsedInner {
|
||||
ast: ast::ModModule,
|
||||
errors: Vec<ParseError>,
|
||||
}
|
||||
|
||||
impl Parsed {
|
||||
fn new(ast: ast::ModModule, errors: Vec<ParseError>) -> Self {
|
||||
Self {
|
||||
inner: Arc::new(ParsedInner { ast, errors }),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn from_text(text: &str) -> Self {
|
||||
let result = ruff_python_parser::parse(text, Mode::Module);
|
||||
|
||||
let (module, errors) = match result {
|
||||
Ok(ast::Mod::Module(module)) => (module, vec![]),
|
||||
Ok(ast::Mod::Expression(expression)) => (
|
||||
ast::ModModule {
|
||||
range: expression.range(),
|
||||
body: vec![ast::Stmt::Expr(ast::StmtExpr {
|
||||
range: expression.range(),
|
||||
value: expression.body,
|
||||
})],
|
||||
},
|
||||
vec![],
|
||||
),
|
||||
Err(errors) => (
|
||||
ast::ModModule {
|
||||
range: TextRange::default(),
|
||||
body: Vec::new(),
|
||||
},
|
||||
vec![errors],
|
||||
),
|
||||
};
|
||||
|
||||
Parsed::new(module, errors)
|
||||
}
|
||||
|
||||
pub fn ast(&self) -> &ast::ModModule {
|
||||
&self.inner.ast
|
||||
}
|
||||
|
||||
pub fn errors(&self) -> &[ParseError] {
|
||||
&self.inner.errors
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn parse(db: &dyn SourceDb, file_id: FileId) -> QueryResult<Parsed> {
|
||||
pub(crate) fn parse(db: &dyn SourceDb, file_id: FileId) -> QueryResult<Arc<Parsed<ModModule>>> {
|
||||
let jar = db.jar()?;
|
||||
|
||||
jar.parsed.get(&file_id, |file_id| {
|
||||
let source = source_text(db, *file_id)?;
|
||||
|
||||
Ok(Parsed::from_text(source.text()))
|
||||
Ok(Arc::new(ruff_python_parser::parse_unchecked_source(
|
||||
source.text(),
|
||||
source.kind().into(),
|
||||
)))
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct ParsedStorage(KeyValueCache<FileId, Parsed>);
|
||||
pub struct ParsedStorage(KeyValueCache<FileId, Arc<Parsed<ModModule>>>);
|
||||
|
||||
impl Deref for ParsedStorage {
|
||||
type Target = KeyValueCache<FileId, Parsed>;
|
||||
type Target = KeyValueCache<FileId, Arc<Parsed<ModModule>>>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
|
||||
@@ -6,7 +6,7 @@ use crate::files::FileId;
|
||||
use crate::lint::{lint_semantic, lint_syntax, Diagnostics};
|
||||
use crate::module::{file_to_module, resolve_module};
|
||||
use crate::program::Program;
|
||||
use crate::symbols::{symbol_table, Dependency};
|
||||
use crate::semantic::{semantic_index, Dependency};
|
||||
|
||||
impl Program {
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
@@ -28,8 +28,8 @@ impl Program {
|
||||
fn check_file(&self, file: FileId, context: &CheckFileContext) -> QueryResult<Diagnostics> {
|
||||
self.cancelled()?;
|
||||
|
||||
let symbol_table = symbol_table(self, file)?;
|
||||
let dependencies = symbol_table.dependencies();
|
||||
let index = semantic_index(self, file)?;
|
||||
let dependencies = index.symbol_table().dependencies();
|
||||
|
||||
if !dependencies.is_empty() {
|
||||
let module = file_to_module(self, file)?;
|
||||
|
||||
@@ -42,8 +42,8 @@ impl Program {
|
||||
|
||||
let (source, semantic, lint) = self.jars_mut();
|
||||
for change in aggregated_changes.iter() {
|
||||
semantic.module_resolver.remove_module(change.id);
|
||||
semantic.symbol_tables.remove(&change.id);
|
||||
semantic.module_resolver.remove_module_by_file(change.id);
|
||||
semantic.semantic_indices.remove(&change.id);
|
||||
source.sources.remove(&change.id);
|
||||
source.parsed.remove(&change.id);
|
||||
// TODO: remove all dependent modules as well
|
||||
|
||||
882
crates/red_knot/src/semantic.rs
Normal file
882
crates/red_knot/src/semantic.rs
Normal file
@@ -0,0 +1,882 @@
|
||||
use std::num::NonZeroU32;
|
||||
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::visitor::source_order::SourceOrderVisitor;
|
||||
use ruff_python_ast::AstNode;
|
||||
|
||||
use crate::ast_ids::{NodeKey, TypedNodeKey};
|
||||
use crate::cache::KeyValueCache;
|
||||
use crate::db::{QueryResult, SemanticDb, SemanticJar};
|
||||
use crate::files::FileId;
|
||||
use crate::module::Module;
|
||||
use crate::module::ModuleName;
|
||||
use crate::parse::parse;
|
||||
use crate::Name;
|
||||
pub(crate) use definitions::Definition;
|
||||
use definitions::{ImportDefinition, ImportFromDefinition};
|
||||
pub(crate) use flow_graph::ConstrainedDefinition;
|
||||
use flow_graph::{FlowGraph, FlowGraphBuilder, FlowNodeId, ReachableDefinitionsIterator};
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
pub(crate) use symbol_table::{Dependency, SymbolId};
|
||||
use symbol_table::{ScopeId, ScopeKind, SymbolFlags, SymbolTable, SymbolTableBuilder};
|
||||
pub(crate) use types::{infer_definition_type, infer_symbol_public_type, Type, TypeStore};
|
||||
|
||||
mod definitions;
|
||||
mod flow_graph;
|
||||
mod symbol_table;
|
||||
mod types;
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn semantic_index(db: &dyn SemanticDb, file_id: FileId) -> QueryResult<Arc<SemanticIndex>> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
|
||||
jar.semantic_indices.get(&file_id, |_| {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
Ok(Arc::from(SemanticIndex::from_ast(parsed.syntax())))
|
||||
})
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn resolve_global_symbol(
|
||||
db: &dyn SemanticDb,
|
||||
module: Module,
|
||||
name: &str,
|
||||
) -> QueryResult<Option<GlobalSymbolId>> {
|
||||
let file_id = module.path(db)?.file();
|
||||
let symbol_table = &semantic_index(db, file_id)?.symbol_table;
|
||||
let Some(symbol_id) = symbol_table.root_symbol_id_by_name(name) else {
|
||||
return Ok(None);
|
||||
};
|
||||
Ok(Some(GlobalSymbolId { file_id, symbol_id }))
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct ExpressionId;
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub struct GlobalSymbolId {
|
||||
pub(crate) file_id: FileId,
|
||||
pub(crate) symbol_id: SymbolId,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SemanticIndex {
|
||||
symbol_table: SymbolTable,
|
||||
flow_graph: FlowGraph,
|
||||
expressions: FxHashMap<NodeKey, ExpressionId>,
|
||||
expressions_by_id: IndexVec<ExpressionId, NodeKey>,
|
||||
}
|
||||
|
||||
impl SemanticIndex {
|
||||
pub fn from_ast(module: &ast::ModModule) -> Self {
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let mut indexer = SemanticIndexer {
|
||||
symbol_table_builder: SymbolTableBuilder::new(),
|
||||
flow_graph_builder: FlowGraphBuilder::new(),
|
||||
scopes: vec![ScopeState {
|
||||
scope_id: root_scope_id,
|
||||
current_flow_node_id: FlowGraph::start(),
|
||||
}],
|
||||
expressions: FxHashMap::default(),
|
||||
expressions_by_id: IndexVec::default(),
|
||||
current_definition: None,
|
||||
};
|
||||
indexer.visit_body(&module.body);
|
||||
indexer.finish()
|
||||
}
|
||||
|
||||
fn resolve_expression_id<'a>(
|
||||
&self,
|
||||
ast: &'a ast::ModModule,
|
||||
expression_id: ExpressionId,
|
||||
) -> ast::AnyNodeRef<'a> {
|
||||
let node_key = self.expressions_by_id[expression_id];
|
||||
node_key
|
||||
.resolve(ast.as_any_node_ref())
|
||||
.expect("node to resolve")
|
||||
}
|
||||
|
||||
/// Return an iterator over all definitions of `symbol_id` reachable from `use_expr`. The value
|
||||
/// of `symbol_id` in `use_expr` must originate from one of the iterated definitions (or from
|
||||
/// an external reassignment of the name outside of this scope).
|
||||
pub fn reachable_definitions(
|
||||
&self,
|
||||
symbol_id: SymbolId,
|
||||
use_expr: &ast::Expr,
|
||||
) -> ReachableDefinitionsIterator {
|
||||
let expression_id = self.expression_id(use_expr);
|
||||
ReachableDefinitionsIterator::new(
|
||||
&self.flow_graph,
|
||||
symbol_id,
|
||||
self.flow_graph.for_expr(expression_id),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn expression_id(&self, expression: &ast::Expr) -> ExpressionId {
|
||||
self.expressions[&NodeKey::from_node(expression.into())]
|
||||
}
|
||||
|
||||
pub fn symbol_table(&self) -> &SymbolTable {
|
||||
&self.symbol_table
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ScopeState {
|
||||
scope_id: ScopeId,
|
||||
current_flow_node_id: FlowNodeId,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SemanticIndexer {
|
||||
symbol_table_builder: SymbolTableBuilder,
|
||||
flow_graph_builder: FlowGraphBuilder,
|
||||
scopes: Vec<ScopeState>,
|
||||
/// the definition whose target(s) we are currently walking
|
||||
current_definition: Option<Definition>,
|
||||
expressions: FxHashMap<NodeKey, ExpressionId>,
|
||||
expressions_by_id: IndexVec<ExpressionId, NodeKey>,
|
||||
}
|
||||
|
||||
impl SemanticIndexer {
|
||||
pub(crate) fn finish(mut self) -> SemanticIndex {
|
||||
let SemanticIndexer {
|
||||
flow_graph_builder,
|
||||
symbol_table_builder,
|
||||
..
|
||||
} = self;
|
||||
self.expressions.shrink_to_fit();
|
||||
self.expressions_by_id.shrink_to_fit();
|
||||
SemanticIndex {
|
||||
flow_graph: flow_graph_builder.finish(),
|
||||
symbol_table: symbol_table_builder.finish(),
|
||||
expressions: self.expressions,
|
||||
expressions_by_id: self.expressions_by_id,
|
||||
}
|
||||
}
|
||||
|
||||
fn set_current_flow_node(&mut self, new_flow_node_id: FlowNodeId) {
|
||||
let scope_state = self.scopes.last_mut().expect("scope stack is never empty");
|
||||
scope_state.current_flow_node_id = new_flow_node_id;
|
||||
}
|
||||
|
||||
fn current_flow_node(&self) -> FlowNodeId {
|
||||
self.scopes
|
||||
.last()
|
||||
.expect("scope stack is never empty")
|
||||
.current_flow_node_id
|
||||
}
|
||||
|
||||
fn add_or_update_symbol(&mut self, identifier: &str, flags: SymbolFlags) -> SymbolId {
|
||||
self.symbol_table_builder
|
||||
.add_or_update_symbol(self.cur_scope(), identifier, flags)
|
||||
}
|
||||
|
||||
fn add_or_update_symbol_with_def(
|
||||
&mut self,
|
||||
identifier: &str,
|
||||
definition: Definition,
|
||||
) -> SymbolId {
|
||||
let symbol_id = self.add_or_update_symbol(identifier, SymbolFlags::IS_DEFINED);
|
||||
self.symbol_table_builder
|
||||
.add_definition(symbol_id, definition.clone());
|
||||
let new_flow_node_id =
|
||||
self.flow_graph_builder
|
||||
.add_definition(symbol_id, definition, self.current_flow_node());
|
||||
self.set_current_flow_node(new_flow_node_id);
|
||||
symbol_id
|
||||
}
|
||||
|
||||
fn push_scope(
|
||||
&mut self,
|
||||
name: &str,
|
||||
kind: ScopeKind,
|
||||
definition: Option<Definition>,
|
||||
defining_symbol: Option<SymbolId>,
|
||||
) -> ScopeId {
|
||||
let scope_id = self.symbol_table_builder.add_child_scope(
|
||||
self.cur_scope(),
|
||||
name,
|
||||
kind,
|
||||
definition,
|
||||
defining_symbol,
|
||||
);
|
||||
self.scopes.push(ScopeState {
|
||||
scope_id,
|
||||
current_flow_node_id: FlowGraph::start(),
|
||||
});
|
||||
scope_id
|
||||
}
|
||||
|
||||
fn pop_scope(&mut self) -> ScopeId {
|
||||
self.scopes
|
||||
.pop()
|
||||
.expect("Scope stack should never be empty")
|
||||
.scope_id
|
||||
}
|
||||
|
||||
fn cur_scope(&self) -> ScopeId {
|
||||
self.scopes
|
||||
.last()
|
||||
.expect("Scope stack should never be empty")
|
||||
.scope_id
|
||||
}
|
||||
|
||||
fn record_scope_for_node(&mut self, node_key: NodeKey, scope_id: ScopeId) {
|
||||
self.symbol_table_builder
|
||||
.record_scope_for_node(node_key, scope_id);
|
||||
}
|
||||
|
||||
fn insert_constraint(&mut self, expr: &ast::Expr) {
|
||||
let node_key = NodeKey::from_node(expr.into());
|
||||
let expression_id = self.expressions[&node_key];
|
||||
let constraint = self
|
||||
.flow_graph_builder
|
||||
.add_constraint(self.current_flow_node(), expression_id);
|
||||
self.set_current_flow_node(constraint);
|
||||
}
|
||||
|
||||
fn with_type_params(
|
||||
&mut self,
|
||||
name: &str,
|
||||
params: &Option<Box<ast::TypeParams>>,
|
||||
definition: Option<Definition>,
|
||||
defining_symbol: Option<SymbolId>,
|
||||
nested: impl FnOnce(&mut Self) -> ScopeId,
|
||||
) -> ScopeId {
|
||||
if let Some(type_params) = params {
|
||||
self.push_scope(name, ScopeKind::Annotation, definition, defining_symbol);
|
||||
for type_param in &type_params.type_params {
|
||||
let name = match type_param {
|
||||
ast::TypeParam::TypeVar(ast::TypeParamTypeVar { name, .. }) => name,
|
||||
ast::TypeParam::ParamSpec(ast::TypeParamParamSpec { name, .. }) => name,
|
||||
ast::TypeParam::TypeVarTuple(ast::TypeParamTypeVarTuple { name, .. }) => name,
|
||||
};
|
||||
self.add_or_update_symbol(name, SymbolFlags::IS_DEFINED);
|
||||
}
|
||||
}
|
||||
let scope_id = nested(self);
|
||||
if params.is_some() {
|
||||
self.pop_scope();
|
||||
}
|
||||
scope_id
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceOrderVisitor<'_> for SemanticIndexer {
|
||||
fn visit_expr(&mut self, expr: &ast::Expr) {
|
||||
let node_key = NodeKey::from_node(expr.into());
|
||||
let expression_id = self.expressions_by_id.push(node_key);
|
||||
|
||||
let flow_expression_id = self
|
||||
.flow_graph_builder
|
||||
.record_expr(self.current_flow_node());
|
||||
debug_assert_eq!(expression_id, flow_expression_id);
|
||||
|
||||
let symbol_expression_id = self
|
||||
.symbol_table_builder
|
||||
.record_expression(self.cur_scope());
|
||||
|
||||
debug_assert_eq!(expression_id, symbol_expression_id);
|
||||
|
||||
self.expressions.insert(node_key, expression_id);
|
||||
|
||||
match expr {
|
||||
ast::Expr::Name(ast::ExprName { id, ctx, .. }) => {
|
||||
let flags = match ctx {
|
||||
ast::ExprContext::Load => SymbolFlags::IS_USED,
|
||||
ast::ExprContext::Store => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Del => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Invalid => SymbolFlags::empty(),
|
||||
};
|
||||
self.add_or_update_symbol(id, flags);
|
||||
if flags.contains(SymbolFlags::IS_DEFINED) {
|
||||
if let Some(curdef) = self.current_definition.clone() {
|
||||
self.add_or_update_symbol_with_def(id, curdef);
|
||||
}
|
||||
}
|
||||
ast::visitor::source_order::walk_expr(self, expr);
|
||||
}
|
||||
ast::Expr::Named(node) => {
|
||||
debug_assert!(self.current_definition.is_none());
|
||||
self.current_definition =
|
||||
Some(Definition::NamedExpr(TypedNodeKey::from_node(node)));
|
||||
// TODO walrus in comprehensions is implicitly nonlocal
|
||||
self.visit_expr(&node.target);
|
||||
self.current_definition = None;
|
||||
self.visit_expr(&node.value);
|
||||
}
|
||||
ast::Expr::If(ast::ExprIf {
|
||||
body, test, orelse, ..
|
||||
}) => {
|
||||
// TODO detect statically known truthy or falsy test (via type inference, not naive
|
||||
// AST inspection, so we can't simplify here, need to record test expression in CFG
|
||||
// for later checking)
|
||||
|
||||
self.visit_expr(test);
|
||||
|
||||
let if_branch = self.flow_graph_builder.add_branch(self.current_flow_node());
|
||||
|
||||
self.set_current_flow_node(if_branch);
|
||||
self.insert_constraint(test);
|
||||
self.visit_expr(body);
|
||||
|
||||
let post_body = self.current_flow_node();
|
||||
|
||||
self.set_current_flow_node(if_branch);
|
||||
self.visit_expr(orelse);
|
||||
|
||||
let post_else = self
|
||||
.flow_graph_builder
|
||||
.add_phi(self.current_flow_node(), post_body);
|
||||
|
||||
self.set_current_flow_node(post_else);
|
||||
}
|
||||
_ => {
|
||||
ast::visitor::source_order::walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_stmt(&mut self, stmt: &ast::Stmt) {
|
||||
// TODO need to capture more definition statements here
|
||||
match stmt {
|
||||
ast::Stmt::ClassDef(node) => {
|
||||
let node_key = TypedNodeKey::from_node(node);
|
||||
let def = Definition::ClassDef(node_key.clone());
|
||||
let symbol_id = self.add_or_update_symbol_with_def(&node.name, def.clone());
|
||||
for decorator in &node.decorator_list {
|
||||
self.visit_decorator(decorator);
|
||||
}
|
||||
let scope_id = self.with_type_params(
|
||||
&node.name,
|
||||
&node.type_params,
|
||||
Some(def.clone()),
|
||||
Some(symbol_id),
|
||||
|indexer| {
|
||||
if let Some(arguments) = &node.arguments {
|
||||
indexer.visit_arguments(arguments);
|
||||
}
|
||||
let scope_id = indexer.push_scope(
|
||||
&node.name,
|
||||
ScopeKind::Class,
|
||||
Some(def.clone()),
|
||||
Some(symbol_id),
|
||||
);
|
||||
indexer.visit_body(&node.body);
|
||||
indexer.pop_scope();
|
||||
scope_id
|
||||
},
|
||||
);
|
||||
self.record_scope_for_node(*node_key.erased(), scope_id);
|
||||
}
|
||||
ast::Stmt::FunctionDef(node) => {
|
||||
let node_key = TypedNodeKey::from_node(node);
|
||||
let def = Definition::FunctionDef(node_key.clone());
|
||||
let symbol_id = self.add_or_update_symbol_with_def(&node.name, def.clone());
|
||||
for decorator in &node.decorator_list {
|
||||
self.visit_decorator(decorator);
|
||||
}
|
||||
let scope_id = self.with_type_params(
|
||||
&node.name,
|
||||
&node.type_params,
|
||||
Some(def.clone()),
|
||||
Some(symbol_id),
|
||||
|indexer| {
|
||||
indexer.visit_parameters(&node.parameters);
|
||||
for expr in &node.returns {
|
||||
indexer.visit_annotation(expr);
|
||||
}
|
||||
let scope_id = indexer.push_scope(
|
||||
&node.name,
|
||||
ScopeKind::Function,
|
||||
Some(def.clone()),
|
||||
Some(symbol_id),
|
||||
);
|
||||
indexer.visit_body(&node.body);
|
||||
indexer.pop_scope();
|
||||
scope_id
|
||||
},
|
||||
);
|
||||
self.record_scope_for_node(*node_key.erased(), scope_id);
|
||||
}
|
||||
ast::Stmt::Import(ast::StmtImport { names, .. }) => {
|
||||
for alias in names {
|
||||
let symbol_name = if let Some(asname) = &alias.asname {
|
||||
asname.id.as_str()
|
||||
} else {
|
||||
alias.name.id.split('.').next().unwrap()
|
||||
};
|
||||
|
||||
let module = ModuleName::new(&alias.name.id);
|
||||
|
||||
let def = Definition::Import(ImportDefinition {
|
||||
module: module.clone(),
|
||||
});
|
||||
self.add_or_update_symbol_with_def(symbol_name, def);
|
||||
self.symbol_table_builder
|
||||
.add_dependency(Dependency::Module(module));
|
||||
}
|
||||
}
|
||||
ast::Stmt::ImportFrom(ast::StmtImportFrom {
|
||||
module,
|
||||
names,
|
||||
level,
|
||||
..
|
||||
}) => {
|
||||
let module = module.as_ref().map(|m| ModuleName::new(&m.id));
|
||||
|
||||
for alias in names {
|
||||
let symbol_name = if let Some(asname) = &alias.asname {
|
||||
asname.id.as_str()
|
||||
} else {
|
||||
alias.name.id.as_str()
|
||||
};
|
||||
let def = Definition::ImportFrom(ImportFromDefinition {
|
||||
module: module.clone(),
|
||||
name: Name::new(&alias.name.id),
|
||||
level: *level,
|
||||
});
|
||||
self.add_or_update_symbol_with_def(symbol_name, def);
|
||||
}
|
||||
|
||||
let dependency = if let Some(module) = module {
|
||||
match NonZeroU32::new(*level) {
|
||||
Some(level) => Dependency::Relative {
|
||||
level,
|
||||
module: Some(module),
|
||||
},
|
||||
None => Dependency::Module(module),
|
||||
}
|
||||
} else {
|
||||
Dependency::Relative {
|
||||
level: NonZeroU32::new(*level)
|
||||
.expect("Import without a module to have a level > 0"),
|
||||
module,
|
||||
}
|
||||
};
|
||||
|
||||
self.symbol_table_builder.add_dependency(dependency);
|
||||
}
|
||||
ast::Stmt::Assign(node) => {
|
||||
debug_assert!(self.current_definition.is_none());
|
||||
self.visit_expr(&node.value);
|
||||
self.current_definition =
|
||||
Some(Definition::Assignment(TypedNodeKey::from_node(node)));
|
||||
for expr in &node.targets {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
|
||||
self.current_definition = None;
|
||||
}
|
||||
ast::Stmt::If(node) => {
|
||||
// TODO detect statically known truthy or falsy test (via type inference, not naive
|
||||
// AST inspection, so we can't simplify here, need to record test expression in CFG
|
||||
// for later checking)
|
||||
|
||||
// we visit the if "test" condition first regardless
|
||||
self.visit_expr(&node.test);
|
||||
|
||||
// create branch node: does the if test pass or not?
|
||||
let if_branch = self.flow_graph_builder.add_branch(self.current_flow_node());
|
||||
|
||||
// visit the body of the `if` clause
|
||||
self.set_current_flow_node(if_branch);
|
||||
self.insert_constraint(&node.test);
|
||||
self.visit_body(&node.body);
|
||||
|
||||
// Flow node for the last if/elif condition branch; represents the "no branch
|
||||
// taken yet" possibility (where "taking a branch" means that the condition in an
|
||||
// if or elif evaluated to true and control flow went into that clause).
|
||||
let mut prior_branch = if_branch;
|
||||
|
||||
// Flow node for the state after the prior if/elif/else clause; represents "we have
|
||||
// taken one of the branches up to this point." Initially set to the post-if-clause
|
||||
// state, later will be set to the phi node joining that possible path with the
|
||||
// possibility that we took a later if/elif/else clause instead.
|
||||
let mut post_prior_clause = self.current_flow_node();
|
||||
|
||||
// Flag to mark if the final clause is an "else" -- if so, that means the "match no
|
||||
// clauses" path is not possible, we have to go through one of the clauses.
|
||||
let mut last_branch_is_else = false;
|
||||
|
||||
for clause in &node.elif_else_clauses {
|
||||
if let Some(test) = &clause.test {
|
||||
self.visit_expr(test);
|
||||
// This is an elif clause. Create a new branch node. Its predecessor is the
|
||||
// previous branch node, because we can only take one branch in an entire
|
||||
// if/elif/else chain, so if we take this branch, it can only be because we
|
||||
// didn't take the previous one.
|
||||
prior_branch = self.flow_graph_builder.add_branch(prior_branch);
|
||||
self.set_current_flow_node(prior_branch);
|
||||
self.insert_constraint(test);
|
||||
} else {
|
||||
// This is an else clause. No need to create a branch node; there's no
|
||||
// branch here, if we haven't taken any previous branch, we definitely go
|
||||
// into the "else" clause.
|
||||
self.set_current_flow_node(prior_branch);
|
||||
last_branch_is_else = true;
|
||||
}
|
||||
self.visit_elif_else_clause(clause);
|
||||
// Update `post_prior_clause` to a new phi node joining the possibility that we
|
||||
// took any of the previous branches with the possibility that we took the one
|
||||
// just visited.
|
||||
post_prior_clause = self
|
||||
.flow_graph_builder
|
||||
.add_phi(self.current_flow_node(), post_prior_clause);
|
||||
}
|
||||
|
||||
if !last_branch_is_else {
|
||||
// Final branch was not an "else", which means it's possible we took zero
|
||||
// branches in the entire if/elif chain, so we need one more phi node to join
|
||||
// the "no branches taken" possibility.
|
||||
post_prior_clause = self
|
||||
.flow_graph_builder
|
||||
.add_phi(post_prior_clause, prior_branch);
|
||||
}
|
||||
|
||||
// Onward, with current flow node set to our final Phi node.
|
||||
self.set_current_flow_node(post_prior_clause);
|
||||
}
|
||||
_ => {
|
||||
ast::visitor::source_order::walk_stmt(self, stmt);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SemanticIndexStorage(KeyValueCache<FileId, Arc<SemanticIndex>>);
|
||||
|
||||
impl Deref for SemanticIndexStorage {
|
||||
type Target = KeyValueCache<FileId, Arc<SemanticIndex>>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for SemanticIndexStorage {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::semantic::symbol_table::{Symbol, SymbolIterator};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::ModModule;
|
||||
use ruff_python_parser::{Mode, Parsed};
|
||||
|
||||
use super::{Definition, ScopeKind, SemanticIndex, SymbolId};
|
||||
|
||||
fn parse(code: &str) -> Parsed<ModModule> {
|
||||
ruff_python_parser::parse_unchecked(code, Mode::Module)
|
||||
.try_into_module()
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn names<I>(it: SymbolIterator<I>) -> Vec<&str>
|
||||
where
|
||||
I: Iterator<Item = SymbolId>,
|
||||
{
|
||||
let mut symbols: Vec<_> = it.map(Symbol::name).collect();
|
||||
symbols.sort_unstable();
|
||||
symbols
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty() {
|
||||
let parsed = parse("");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()).len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple() {
|
||||
let parsed = parse("x");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["x"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("x").unwrap())
|
||||
.len(),
|
||||
0
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn annotation_only() {
|
||||
let parsed = parse("x: int");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["int", "x"]);
|
||||
// TODO record definition
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import() {
|
||||
let parsed = parse("import foo");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["foo"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("foo").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_sub() {
|
||||
let parsed = parse("import foo.bar");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["foo"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_as() {
|
||||
let parsed = parse("import foo.bar as baz");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["baz"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_from() {
|
||||
let parsed = parse("from bar import foo");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["foo"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("foo").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
assert!(
|
||||
table.root_symbol_id_by_name("foo").is_some_and(|sid| {
|
||||
let s = sid.symbol(&table);
|
||||
s.is_defined() || !s.is_used()
|
||||
}),
|
||||
"symbols that are defined get the defined flag"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn assign() {
|
||||
let parsed = parse("x = foo");
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["foo", "x"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("x").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
assert!(
|
||||
table.root_symbol_id_by_name("foo").is_some_and(|sid| {
|
||||
let s = sid.symbol(&table);
|
||||
!s.is_defined() && s.is_used()
|
||||
}),
|
||||
"a symbol used but not defined in a scope should have only the used flag"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn class_scope() {
|
||||
let parsed = parse(
|
||||
"
|
||||
class C:
|
||||
x = 1
|
||||
y = 2
|
||||
",
|
||||
);
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["C", "y"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let c_scope = scopes[0].scope(&table);
|
||||
assert_eq!(c_scope.kind(), ScopeKind::Class);
|
||||
assert_eq!(c_scope.name(), "C");
|
||||
assert_eq!(names(table.symbols_for_scope(scopes[0])), vec!["x"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("C").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn func_scope() {
|
||||
let parsed = parse(
|
||||
"
|
||||
def func():
|
||||
x = 1
|
||||
y = 2
|
||||
",
|
||||
);
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["func", "y"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let func_scope = scopes[0].scope(&table);
|
||||
assert_eq!(func_scope.kind(), ScopeKind::Function);
|
||||
assert_eq!(func_scope.name(), "func");
|
||||
assert_eq!(names(table.symbols_for_scope(scopes[0])), vec!["x"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("func").unwrap())
|
||||
.len(),
|
||||
1
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dupes() {
|
||||
let parsed = parse(
|
||||
"
|
||||
def func():
|
||||
x = 1
|
||||
def func():
|
||||
y = 2
|
||||
",
|
||||
);
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["func"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 2);
|
||||
let func_scope_1 = scopes[0].scope(&table);
|
||||
let func_scope_2 = scopes[1].scope(&table);
|
||||
assert_eq!(func_scope_1.kind(), ScopeKind::Function);
|
||||
assert_eq!(func_scope_1.name(), "func");
|
||||
assert_eq!(func_scope_2.kind(), ScopeKind::Function);
|
||||
assert_eq!(func_scope_2.name(), "func");
|
||||
assert_eq!(names(table.symbols_for_scope(scopes[0])), vec!["x"]);
|
||||
assert_eq!(names(table.symbols_for_scope(scopes[1])), vec!["y"]);
|
||||
assert_eq!(
|
||||
table
|
||||
.definitions(table.root_symbol_id_by_name("func").unwrap())
|
||||
.len(),
|
||||
2
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generic_func() {
|
||||
let parsed = parse(
|
||||
"
|
||||
def func[T]():
|
||||
x = 1
|
||||
",
|
||||
);
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["func"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let ann_scope_id = scopes[0];
|
||||
let ann_scope = ann_scope_id.scope(&table);
|
||||
assert_eq!(ann_scope.kind(), ScopeKind::Annotation);
|
||||
assert_eq!(ann_scope.name(), "func");
|
||||
assert_eq!(names(table.symbols_for_scope(ann_scope_id)), vec!["T"]);
|
||||
let scopes = table.child_scope_ids_of(ann_scope_id);
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let func_scope_id = scopes[0];
|
||||
let func_scope = func_scope_id.scope(&table);
|
||||
assert_eq!(func_scope.kind(), ScopeKind::Function);
|
||||
assert_eq!(func_scope.name(), "func");
|
||||
assert_eq!(names(table.symbols_for_scope(func_scope_id)), vec!["x"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generic_class() {
|
||||
let parsed = parse(
|
||||
"
|
||||
class C[T]:
|
||||
x = 1
|
||||
",
|
||||
);
|
||||
let table = SemanticIndex::from_ast(parsed.syntax()).symbol_table;
|
||||
assert_eq!(names(table.root_symbols()), vec!["C"]);
|
||||
let scopes = table.root_child_scope_ids();
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let ann_scope_id = scopes[0];
|
||||
let ann_scope = ann_scope_id.scope(&table);
|
||||
assert_eq!(ann_scope.kind(), ScopeKind::Annotation);
|
||||
assert_eq!(ann_scope.name(), "C");
|
||||
assert_eq!(names(table.symbols_for_scope(ann_scope_id)), vec!["T"]);
|
||||
assert!(
|
||||
table
|
||||
.symbol_by_name(ann_scope_id, "T")
|
||||
.is_some_and(|s| s.is_defined() && !s.is_used()),
|
||||
"type parameters are defined by the scope that introduces them"
|
||||
);
|
||||
let scopes = table.child_scope_ids_of(ann_scope_id);
|
||||
assert_eq!(scopes.len(), 1);
|
||||
let func_scope_id = scopes[0];
|
||||
let func_scope = func_scope_id.scope(&table);
|
||||
assert_eq!(func_scope.kind(), ScopeKind::Class);
|
||||
assert_eq!(func_scope.name(), "C");
|
||||
assert_eq!(names(table.symbols_for_scope(func_scope_id)), vec!["x"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reachability_trivial() {
|
||||
let parsed = parse("x = 1; x");
|
||||
let ast = parsed.syntax();
|
||||
let index = SemanticIndex::from_ast(ast);
|
||||
let table = &index.symbol_table;
|
||||
let x_sym = table
|
||||
.root_symbol_id_by_name("x")
|
||||
.expect("x symbol should exist");
|
||||
let ast::Stmt::Expr(ast::StmtExpr { value: x_use, .. }) = &ast.body[1] else {
|
||||
panic!("should be an expr")
|
||||
};
|
||||
let x_defs: Vec<_> = index
|
||||
.reachable_definitions(x_sym, x_use)
|
||||
.map(|constrained_definition| constrained_definition.definition)
|
||||
.collect();
|
||||
assert_eq!(x_defs.len(), 1);
|
||||
let Definition::Assignment(node_key) = &x_defs[0] else {
|
||||
panic!("def should be an assignment")
|
||||
};
|
||||
let Some(def_node) = node_key.resolve(ast.into()) else {
|
||||
panic!("node key should resolve")
|
||||
};
|
||||
let ast::Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
value: ast::Number::Int(num),
|
||||
..
|
||||
}) = &*def_node.value
|
||||
else {
|
||||
panic!("should be a number literal")
|
||||
};
|
||||
assert_eq!(*num, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expression_scope() {
|
||||
let parsed = parse("x = 1;\ndef test():\n y = 4");
|
||||
let ast = parsed.syntax();
|
||||
let index = SemanticIndex::from_ast(ast);
|
||||
let table = &index.symbol_table;
|
||||
|
||||
let x_sym = table
|
||||
.root_symbol_by_name("x")
|
||||
.expect("x symbol should exist");
|
||||
|
||||
let x_stmt = ast.body[0].as_assign_stmt().unwrap();
|
||||
|
||||
let x_id = index.expression_id(&x_stmt.targets[0]);
|
||||
|
||||
assert_eq!(table.scope_of_expression(x_id).kind(), ScopeKind::Module);
|
||||
assert_eq!(table.scope_id_of_expression(x_id), x_sym.scope_id());
|
||||
|
||||
let def = ast.body[1].as_function_def_stmt().unwrap();
|
||||
let y_stmt = def.body[0].as_assign_stmt().unwrap();
|
||||
let y_id = index.expression_id(&y_stmt.targets[0]);
|
||||
|
||||
assert_eq!(table.scope_of_expression(y_id).kind(), ScopeKind::Function);
|
||||
}
|
||||
}
|
||||
52
crates/red_knot/src/semantic/definitions.rs
Normal file
52
crates/red_knot/src/semantic/definitions.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
use crate::ast_ids::TypedNodeKey;
|
||||
use crate::semantic::ModuleName;
|
||||
use crate::Name;
|
||||
use ruff_python_ast as ast;
|
||||
|
||||
// TODO storing TypedNodeKey for definitions means we have to search to find them again in the AST;
|
||||
// this is at best O(log n). If looking up definitions is a bottleneck we should look for
|
||||
// alternatives here.
|
||||
// TODO intern Definitions in SymbolTable and reference using IDs?
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Definition {
|
||||
// For the import cases, we don't need reference to any arbitrary AST subtrees (annotations,
|
||||
// RHS), and referencing just the import statement node is imprecise (a single import statement
|
||||
// can assign many symbols, we'd have to re-search for the one we care about), so we just copy
|
||||
// the small amount of information we need from the AST.
|
||||
Import(ImportDefinition),
|
||||
ImportFrom(ImportFromDefinition),
|
||||
ClassDef(TypedNodeKey<ast::StmtClassDef>),
|
||||
FunctionDef(TypedNodeKey<ast::StmtFunctionDef>),
|
||||
Assignment(TypedNodeKey<ast::StmtAssign>),
|
||||
AnnotatedAssignment(TypedNodeKey<ast::StmtAnnAssign>),
|
||||
NamedExpr(TypedNodeKey<ast::ExprNamed>),
|
||||
/// represents the implicit initial definition of every name as "unbound"
|
||||
Unbound,
|
||||
// TODO with statements, except handlers, function args...
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ImportDefinition {
|
||||
pub module: ModuleName,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ImportFromDefinition {
|
||||
pub module: Option<ModuleName>,
|
||||
pub name: Name,
|
||||
pub level: u32,
|
||||
}
|
||||
|
||||
impl ImportFromDefinition {
|
||||
pub fn module(&self) -> Option<&ModuleName> {
|
||||
self.module.as_ref()
|
||||
}
|
||||
|
||||
pub fn name(&self) -> &Name {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub fn level(&self) -> u32 {
|
||||
self.level
|
||||
}
|
||||
}
|
||||
270
crates/red_knot/src/semantic/flow_graph.rs
Normal file
270
crates/red_knot/src/semantic/flow_graph.rs
Normal file
@@ -0,0 +1,270 @@
|
||||
use super::symbol_table::SymbolId;
|
||||
use crate::semantic::{Definition, ExpressionId};
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use std::iter::FusedIterator;
|
||||
use std::ops::Range;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct FlowNodeId;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum FlowNode {
|
||||
Start,
|
||||
Definition(DefinitionFlowNode),
|
||||
Branch(BranchFlowNode),
|
||||
Phi(PhiFlowNode),
|
||||
Constraint(ConstraintFlowNode),
|
||||
}
|
||||
|
||||
/// A point in control flow where a symbol is defined
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct DefinitionFlowNode {
|
||||
symbol_id: SymbolId,
|
||||
definition: Definition,
|
||||
predecessor: FlowNodeId,
|
||||
}
|
||||
|
||||
/// A branch in control flow
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct BranchFlowNode {
|
||||
predecessor: FlowNodeId,
|
||||
}
|
||||
|
||||
/// A join point where control flow paths come together
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct PhiFlowNode {
|
||||
first_predecessor: FlowNodeId,
|
||||
second_predecessor: FlowNodeId,
|
||||
}
|
||||
|
||||
/// A branch test which may apply constraints to a symbol's type
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ConstraintFlowNode {
|
||||
predecessor: FlowNodeId,
|
||||
test_expression: ExpressionId,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FlowGraph {
|
||||
flow_nodes_by_id: IndexVec<FlowNodeId, FlowNode>,
|
||||
expression_map: IndexVec<ExpressionId, FlowNodeId>,
|
||||
}
|
||||
|
||||
impl FlowGraph {
|
||||
pub fn start() -> FlowNodeId {
|
||||
FlowNodeId::from_usize(0)
|
||||
}
|
||||
|
||||
pub fn for_expr(&self, expr: ExpressionId) -> FlowNodeId {
|
||||
self.expression_map[expr]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct FlowGraphBuilder {
|
||||
flow_graph: FlowGraph,
|
||||
}
|
||||
|
||||
impl FlowGraphBuilder {
|
||||
pub(crate) fn new() -> Self {
|
||||
let mut graph = FlowGraph {
|
||||
flow_nodes_by_id: IndexVec::default(),
|
||||
expression_map: IndexVec::default(),
|
||||
};
|
||||
graph.flow_nodes_by_id.push(FlowNode::Start);
|
||||
Self { flow_graph: graph }
|
||||
}
|
||||
|
||||
pub(crate) fn add(&mut self, node: FlowNode) -> FlowNodeId {
|
||||
self.flow_graph.flow_nodes_by_id.push(node)
|
||||
}
|
||||
|
||||
pub(crate) fn add_definition(
|
||||
&mut self,
|
||||
symbol_id: SymbolId,
|
||||
definition: Definition,
|
||||
predecessor: FlowNodeId,
|
||||
) -> FlowNodeId {
|
||||
self.add(FlowNode::Definition(DefinitionFlowNode {
|
||||
symbol_id,
|
||||
definition,
|
||||
predecessor,
|
||||
}))
|
||||
}
|
||||
|
||||
pub(crate) fn add_branch(&mut self, predecessor: FlowNodeId) -> FlowNodeId {
|
||||
self.add(FlowNode::Branch(BranchFlowNode { predecessor }))
|
||||
}
|
||||
|
||||
pub(crate) fn add_phi(
|
||||
&mut self,
|
||||
first_predecessor: FlowNodeId,
|
||||
second_predecessor: FlowNodeId,
|
||||
) -> FlowNodeId {
|
||||
self.add(FlowNode::Phi(PhiFlowNode {
|
||||
first_predecessor,
|
||||
second_predecessor,
|
||||
}))
|
||||
}
|
||||
|
||||
pub(crate) fn add_constraint(
|
||||
&mut self,
|
||||
predecessor: FlowNodeId,
|
||||
test_expression: ExpressionId,
|
||||
) -> FlowNodeId {
|
||||
self.add(FlowNode::Constraint(ConstraintFlowNode {
|
||||
predecessor,
|
||||
test_expression,
|
||||
}))
|
||||
}
|
||||
|
||||
pub(super) fn record_expr(&mut self, node_id: FlowNodeId) -> ExpressionId {
|
||||
self.flow_graph.expression_map.push(node_id)
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> FlowGraph {
|
||||
self.flow_graph.flow_nodes_by_id.shrink_to_fit();
|
||||
self.flow_graph.expression_map.shrink_to_fit();
|
||||
self.flow_graph
|
||||
}
|
||||
}
|
||||
|
||||
/// A definition, and the set of constraints between a use and the definition
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ConstrainedDefinition {
|
||||
pub definition: Definition,
|
||||
pub constraints: Vec<ExpressionId>,
|
||||
}
|
||||
|
||||
/// A flow node and the constraints we passed through to reach it
|
||||
#[derive(Debug)]
|
||||
struct FlowState {
|
||||
node_id: FlowNodeId,
|
||||
constraints_range: Range<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ReachableDefinitionsIterator<'a> {
|
||||
flow_graph: &'a FlowGraph,
|
||||
symbol_id: SymbolId,
|
||||
pending: Vec<FlowState>,
|
||||
constraints: Vec<ExpressionId>,
|
||||
}
|
||||
|
||||
impl<'a> ReachableDefinitionsIterator<'a> {
|
||||
pub fn new(flow_graph: &'a FlowGraph, symbol_id: SymbolId, start_node_id: FlowNodeId) -> Self {
|
||||
Self {
|
||||
flow_graph,
|
||||
symbol_id,
|
||||
pending: vec![FlowState {
|
||||
node_id: start_node_id,
|
||||
constraints_range: 0..0,
|
||||
}],
|
||||
constraints: vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for ReachableDefinitionsIterator<'a> {
|
||||
type Item = ConstrainedDefinition;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let FlowState {
|
||||
mut node_id,
|
||||
mut constraints_range,
|
||||
} = self.pending.pop()?;
|
||||
self.constraints.truncate(constraints_range.end + 1);
|
||||
loop {
|
||||
match &self.flow_graph.flow_nodes_by_id[node_id] {
|
||||
FlowNode::Start => {
|
||||
// constraints on unbound are irrelevant
|
||||
return Some(ConstrainedDefinition {
|
||||
definition: Definition::Unbound,
|
||||
constraints: vec![],
|
||||
});
|
||||
}
|
||||
FlowNode::Definition(def_node) => {
|
||||
if def_node.symbol_id == self.symbol_id {
|
||||
return Some(ConstrainedDefinition {
|
||||
definition: def_node.definition.clone(),
|
||||
constraints: self.constraints[constraints_range].to_vec(),
|
||||
});
|
||||
}
|
||||
node_id = def_node.predecessor;
|
||||
}
|
||||
FlowNode::Branch(branch_node) => {
|
||||
node_id = branch_node.predecessor;
|
||||
}
|
||||
FlowNode::Phi(phi_node) => {
|
||||
self.pending.push(FlowState {
|
||||
node_id: phi_node.first_predecessor,
|
||||
constraints_range: constraints_range.clone(),
|
||||
});
|
||||
node_id = phi_node.second_predecessor;
|
||||
}
|
||||
FlowNode::Constraint(constraint_node) => {
|
||||
node_id = constraint_node.predecessor;
|
||||
self.constraints.push(constraint_node.test_expression);
|
||||
constraints_range.end += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> FusedIterator for ReachableDefinitionsIterator<'a> {}
|
||||
|
||||
impl std::fmt::Display for FlowGraph {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
writeln!(f, "flowchart TD")?;
|
||||
for (id, node) in self.flow_nodes_by_id.iter_enumerated() {
|
||||
write!(f, " id{}", id.as_u32())?;
|
||||
match node {
|
||||
FlowNode::Start => writeln!(f, r"[\Start/]")?,
|
||||
FlowNode::Definition(def_node) => {
|
||||
writeln!(f, r"(Define symbol {})", def_node.symbol_id.as_u32())?;
|
||||
writeln!(
|
||||
f,
|
||||
r" id{}-->id{}",
|
||||
def_node.predecessor.as_u32(),
|
||||
id.as_u32()
|
||||
)?;
|
||||
}
|
||||
FlowNode::Branch(branch_node) => {
|
||||
writeln!(f, r"{{Branch}}")?;
|
||||
writeln!(
|
||||
f,
|
||||
r" id{}-->id{}",
|
||||
branch_node.predecessor.as_u32(),
|
||||
id.as_u32()
|
||||
)?;
|
||||
}
|
||||
FlowNode::Phi(phi_node) => {
|
||||
writeln!(f, r"((Phi))")?;
|
||||
writeln!(
|
||||
f,
|
||||
r" id{}-->id{}",
|
||||
phi_node.second_predecessor.as_u32(),
|
||||
id.as_u32()
|
||||
)?;
|
||||
writeln!(
|
||||
f,
|
||||
r" id{}-->id{}",
|
||||
phi_node.first_predecessor.as_u32(),
|
||||
id.as_u32()
|
||||
)?;
|
||||
}
|
||||
FlowNode::Constraint(constraint_node) => {
|
||||
writeln!(f, r"((Constraint))")?;
|
||||
writeln!(
|
||||
f,
|
||||
r" id{}-->id{}",
|
||||
constraint_node.predecessor.as_u32(),
|
||||
id.as_u32()
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
560
crates/red_knot/src/semantic/symbol_table.rs
Normal file
560
crates/red_knot/src/semantic/symbol_table.rs
Normal file
@@ -0,0 +1,560 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::iter::{Copied, DoubleEndedIterator, FusedIterator};
|
||||
use std::num::NonZeroU32;
|
||||
|
||||
use bitflags::bitflags;
|
||||
use hashbrown::hash_map::{Keys, RawEntryMut};
|
||||
use rustc_hash::{FxHashMap, FxHasher};
|
||||
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
|
||||
use crate::ast_ids::NodeKey;
|
||||
use crate::module::ModuleName;
|
||||
use crate::semantic::{Definition, ExpressionId};
|
||||
use crate::Name;
|
||||
|
||||
type Map<K, V> = hashbrown::HashMap<K, V, ()>;
|
||||
|
||||
#[newtype_index]
|
||||
pub struct ScopeId;
|
||||
|
||||
impl ScopeId {
|
||||
pub fn scope(self, table: &SymbolTable) -> &Scope {
|
||||
&table.scopes_by_id[self]
|
||||
}
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub struct SymbolId;
|
||||
|
||||
impl SymbolId {
|
||||
pub fn symbol(self, table: &SymbolTable) -> &Symbol {
|
||||
&table.symbols_by_id[self]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||
pub enum ScopeKind {
|
||||
Module,
|
||||
Annotation,
|
||||
Class,
|
||||
Function,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Scope {
|
||||
name: Name,
|
||||
kind: ScopeKind,
|
||||
parent: Option<ScopeId>,
|
||||
children: Vec<ScopeId>,
|
||||
/// the definition (e.g. class or function) that created this scope
|
||||
definition: Option<Definition>,
|
||||
/// the symbol (e.g. class or function) that owns this scope
|
||||
defining_symbol: Option<SymbolId>,
|
||||
/// symbol IDs, hashed by symbol name
|
||||
symbols_by_name: Map<SymbolId, ()>,
|
||||
}
|
||||
|
||||
impl Scope {
|
||||
pub fn name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> ScopeKind {
|
||||
self.kind
|
||||
}
|
||||
|
||||
pub fn definition(&self) -> Option<Definition> {
|
||||
self.definition.clone()
|
||||
}
|
||||
|
||||
pub fn defining_symbol(&self) -> Option<SymbolId> {
|
||||
self.defining_symbol
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum Kind {
|
||||
FreeVar,
|
||||
CellVar,
|
||||
CellVarAssigned,
|
||||
ExplicitGlobal,
|
||||
ImplicitGlobal,
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
#[derive(Copy,Clone,Debug)]
|
||||
pub struct SymbolFlags: u8 {
|
||||
const IS_USED = 1 << 0;
|
||||
const IS_DEFINED = 1 << 1;
|
||||
/// TODO: This flag is not yet set by anything
|
||||
const MARKED_GLOBAL = 1 << 2;
|
||||
/// TODO: This flag is not yet set by anything
|
||||
const MARKED_NONLOCAL = 1 << 3;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Symbol {
|
||||
name: Name,
|
||||
flags: SymbolFlags,
|
||||
scope_id: ScopeId,
|
||||
// kind: Kind,
|
||||
}
|
||||
|
||||
impl Symbol {
|
||||
pub fn name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
|
||||
pub fn scope_id(&self) -> ScopeId {
|
||||
self.scope_id
|
||||
}
|
||||
|
||||
/// Is the symbol used in its containing scope?
|
||||
pub fn is_used(&self) -> bool {
|
||||
self.flags.contains(SymbolFlags::IS_USED)
|
||||
}
|
||||
|
||||
/// Is the symbol defined in its containing scope?
|
||||
pub fn is_defined(&self) -> bool {
|
||||
self.flags.contains(SymbolFlags::IS_DEFINED)
|
||||
}
|
||||
|
||||
// TODO: implement Symbol.kind 2-pass analysis to categorize as: free-var, cell-var,
|
||||
// explicit-global, implicit-global and implement Symbol.kind by modifying the preorder
|
||||
// traversal code
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Dependency {
|
||||
Module(ModuleName),
|
||||
Relative {
|
||||
level: NonZeroU32,
|
||||
module: Option<ModuleName>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Table of all symbols in all scopes for a module.
|
||||
#[derive(Debug)]
|
||||
pub struct SymbolTable {
|
||||
scopes_by_id: IndexVec<ScopeId, Scope>,
|
||||
symbols_by_id: IndexVec<SymbolId, Symbol>,
|
||||
/// the definitions for each symbol
|
||||
defs: FxHashMap<SymbolId, Vec<Definition>>,
|
||||
/// map of AST node (e.g. class/function def) to sub-scope it creates
|
||||
scopes_by_node: FxHashMap<NodeKey, ScopeId>,
|
||||
/// Maps expressions to their enclosing scope.
|
||||
expression_scopes: IndexVec<ExpressionId, ScopeId>,
|
||||
/// dependencies of this module
|
||||
dependencies: Vec<Dependency>,
|
||||
}
|
||||
|
||||
impl SymbolTable {
|
||||
pub fn dependencies(&self) -> &[Dependency] {
|
||||
&self.dependencies
|
||||
}
|
||||
|
||||
pub const fn root_scope_id() -> ScopeId {
|
||||
ScopeId::from_usize(0)
|
||||
}
|
||||
|
||||
pub fn root_scope(&self) -> &Scope {
|
||||
&self.scopes_by_id[SymbolTable::root_scope_id()]
|
||||
}
|
||||
|
||||
pub fn symbol_ids_for_scope(&self, scope_id: ScopeId) -> Copied<Keys<SymbolId, ()>> {
|
||||
self.scopes_by_id[scope_id].symbols_by_name.keys().copied()
|
||||
}
|
||||
|
||||
pub fn symbols_for_scope(
|
||||
&self,
|
||||
scope_id: ScopeId,
|
||||
) -> SymbolIterator<Copied<Keys<SymbolId, ()>>> {
|
||||
SymbolIterator {
|
||||
table: self,
|
||||
ids: self.symbol_ids_for_scope(scope_id),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn root_symbol_ids(&self) -> Copied<Keys<SymbolId, ()>> {
|
||||
self.symbol_ids_for_scope(SymbolTable::root_scope_id())
|
||||
}
|
||||
|
||||
pub fn root_symbols(&self) -> SymbolIterator<Copied<Keys<SymbolId, ()>>> {
|
||||
self.symbols_for_scope(SymbolTable::root_scope_id())
|
||||
}
|
||||
|
||||
pub fn child_scope_ids_of(&self, scope_id: ScopeId) -> &[ScopeId] {
|
||||
&self.scopes_by_id[scope_id].children
|
||||
}
|
||||
|
||||
pub fn child_scopes_of(&self, scope_id: ScopeId) -> ScopeIterator<&[ScopeId]> {
|
||||
ScopeIterator {
|
||||
table: self,
|
||||
ids: self.child_scope_ids_of(scope_id),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn root_child_scope_ids(&self) -> &[ScopeId] {
|
||||
self.child_scope_ids_of(SymbolTable::root_scope_id())
|
||||
}
|
||||
|
||||
pub fn root_child_scopes(&self) -> ScopeIterator<&[ScopeId]> {
|
||||
self.child_scopes_of(SymbolTable::root_scope_id())
|
||||
}
|
||||
|
||||
pub fn symbol_id_by_name(&self, scope_id: ScopeId, name: &str) -> Option<SymbolId> {
|
||||
let scope = &self.scopes_by_id[scope_id];
|
||||
let hash = SymbolTable::hash_name(name);
|
||||
let name = Name::new(name);
|
||||
Some(
|
||||
*scope
|
||||
.symbols_by_name
|
||||
.raw_entry()
|
||||
.from_hash(hash, |symid| self.symbols_by_id[*symid].name == name)?
|
||||
.0,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn symbol_by_name(&self, scope_id: ScopeId, name: &str) -> Option<&Symbol> {
|
||||
Some(&self.symbols_by_id[self.symbol_id_by_name(scope_id, name)?])
|
||||
}
|
||||
|
||||
pub fn root_symbol_id_by_name(&self, name: &str) -> Option<SymbolId> {
|
||||
self.symbol_id_by_name(SymbolTable::root_scope_id(), name)
|
||||
}
|
||||
|
||||
pub fn root_symbol_by_name(&self, name: &str) -> Option<&Symbol> {
|
||||
self.symbol_by_name(SymbolTable::root_scope_id(), name)
|
||||
}
|
||||
|
||||
pub fn scope_id_of_symbol(&self, symbol_id: SymbolId) -> ScopeId {
|
||||
self.symbols_by_id[symbol_id].scope_id
|
||||
}
|
||||
|
||||
pub fn scope_of_symbol(&self, symbol_id: SymbolId) -> &Scope {
|
||||
&self.scopes_by_id[self.scope_id_of_symbol(symbol_id)]
|
||||
}
|
||||
|
||||
pub fn scope_id_of_expression(&self, expression: ExpressionId) -> ScopeId {
|
||||
self.expression_scopes[expression]
|
||||
}
|
||||
|
||||
pub fn scope_of_expression(&self, expr_id: ExpressionId) -> &Scope {
|
||||
&self.scopes_by_id[self.scope_id_of_expression(expr_id)]
|
||||
}
|
||||
|
||||
pub fn parent_scopes(
|
||||
&self,
|
||||
scope_id: ScopeId,
|
||||
) -> ScopeIterator<impl Iterator<Item = ScopeId> + '_> {
|
||||
ScopeIterator {
|
||||
table: self,
|
||||
ids: std::iter::successors(Some(scope_id), |scope| self.scopes_by_id[*scope].parent),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parent_scope(&self, scope_id: ScopeId) -> Option<ScopeId> {
|
||||
self.scopes_by_id[scope_id].parent
|
||||
}
|
||||
|
||||
pub fn scope_id_for_node(&self, node_key: &NodeKey) -> ScopeId {
|
||||
self.scopes_by_node[node_key]
|
||||
}
|
||||
|
||||
pub fn definitions(&self, symbol_id: SymbolId) -> &[Definition] {
|
||||
self.defs
|
||||
.get(&symbol_id)
|
||||
.map(std::vec::Vec::as_slice)
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn all_definitions(&self) -> impl Iterator<Item = (SymbolId, &Definition)> + '_ {
|
||||
self.defs
|
||||
.iter()
|
||||
.flat_map(|(sym_id, defs)| defs.iter().map(move |def| (*sym_id, def)))
|
||||
}
|
||||
|
||||
fn hash_name(name: &str) -> u64 {
|
||||
let mut hasher = FxHasher::default();
|
||||
name.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SymbolIterator<'a, I> {
|
||||
table: &'a SymbolTable,
|
||||
ids: I,
|
||||
}
|
||||
|
||||
impl<'a, I> Iterator for SymbolIterator<'a, I>
|
||||
where
|
||||
I: Iterator<Item = SymbolId>,
|
||||
{
|
||||
type Item = &'a Symbol;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let id = self.ids.next()?;
|
||||
Some(&self.table.symbols_by_id[id])
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.ids.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, I> FusedIterator for SymbolIterator<'a, I> where
|
||||
I: Iterator<Item = SymbolId> + FusedIterator
|
||||
{
|
||||
}
|
||||
|
||||
impl<'a, I> DoubleEndedIterator for SymbolIterator<'a, I>
|
||||
where
|
||||
I: Iterator<Item = SymbolId> + DoubleEndedIterator,
|
||||
{
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
let id = self.ids.next_back()?;
|
||||
Some(&self.table.symbols_by_id[id])
|
||||
}
|
||||
}
|
||||
|
||||
// TODO maybe get rid of this and just do all data access via methods on ScopeId?
|
||||
pub struct ScopeIterator<'a, I> {
|
||||
table: &'a SymbolTable,
|
||||
ids: I,
|
||||
}
|
||||
|
||||
/// iterate (`ScopeId`, `Scope`) pairs for given `ScopeId` iterator
|
||||
impl<'a, I> Iterator for ScopeIterator<'a, I>
|
||||
where
|
||||
I: Iterator<Item = ScopeId>,
|
||||
{
|
||||
type Item = (ScopeId, &'a Scope);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let id = self.ids.next()?;
|
||||
Some((id, &self.table.scopes_by_id[id]))
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.ids.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, I> FusedIterator for ScopeIterator<'a, I> where I: Iterator<Item = ScopeId> + FusedIterator {}
|
||||
|
||||
impl<'a, I> DoubleEndedIterator for ScopeIterator<'a, I>
|
||||
where
|
||||
I: Iterator<Item = ScopeId> + DoubleEndedIterator,
|
||||
{
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
let id = self.ids.next_back()?;
|
||||
Some((id, &self.table.scopes_by_id[id]))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct SymbolTableBuilder {
|
||||
symbol_table: SymbolTable,
|
||||
}
|
||||
|
||||
impl SymbolTableBuilder {
|
||||
pub(super) fn new() -> Self {
|
||||
let mut table = SymbolTable {
|
||||
scopes_by_id: IndexVec::new(),
|
||||
symbols_by_id: IndexVec::new(),
|
||||
defs: FxHashMap::default(),
|
||||
scopes_by_node: FxHashMap::default(),
|
||||
expression_scopes: IndexVec::new(),
|
||||
dependencies: Vec::new(),
|
||||
};
|
||||
table.scopes_by_id.push(Scope {
|
||||
name: Name::new("<module>"),
|
||||
kind: ScopeKind::Module,
|
||||
parent: None,
|
||||
children: Vec::new(),
|
||||
definition: None,
|
||||
defining_symbol: None,
|
||||
symbols_by_name: Map::default(),
|
||||
});
|
||||
Self {
|
||||
symbol_table: table,
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn finish(self) -> SymbolTable {
|
||||
let mut symbol_table = self.symbol_table;
|
||||
symbol_table.scopes_by_id.shrink_to_fit();
|
||||
symbol_table.symbols_by_id.shrink_to_fit();
|
||||
symbol_table.defs.shrink_to_fit();
|
||||
symbol_table.scopes_by_node.shrink_to_fit();
|
||||
symbol_table.expression_scopes.shrink_to_fit();
|
||||
symbol_table.dependencies.shrink_to_fit();
|
||||
symbol_table
|
||||
}
|
||||
|
||||
pub(super) fn add_or_update_symbol(
|
||||
&mut self,
|
||||
scope_id: ScopeId,
|
||||
name: &str,
|
||||
flags: SymbolFlags,
|
||||
) -> SymbolId {
|
||||
let hash = SymbolTable::hash_name(name);
|
||||
let scope = &mut self.symbol_table.scopes_by_id[scope_id];
|
||||
let name = Name::new(name);
|
||||
|
||||
let entry = scope
|
||||
.symbols_by_name
|
||||
.raw_entry_mut()
|
||||
.from_hash(hash, |existing| {
|
||||
self.symbol_table.symbols_by_id[*existing].name == name
|
||||
});
|
||||
|
||||
match entry {
|
||||
RawEntryMut::Occupied(entry) => {
|
||||
if let Some(symbol) = self.symbol_table.symbols_by_id.get_mut(*entry.key()) {
|
||||
symbol.flags.insert(flags);
|
||||
};
|
||||
*entry.key()
|
||||
}
|
||||
RawEntryMut::Vacant(entry) => {
|
||||
let id = self.symbol_table.symbols_by_id.push(Symbol {
|
||||
name,
|
||||
flags,
|
||||
scope_id,
|
||||
});
|
||||
entry.insert_with_hasher(hash, id, (), |symid| {
|
||||
SymbolTable::hash_name(&self.symbol_table.symbols_by_id[*symid].name)
|
||||
});
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn add_definition(&mut self, symbol_id: SymbolId, definition: Definition) {
|
||||
self.symbol_table
|
||||
.defs
|
||||
.entry(symbol_id)
|
||||
.or_default()
|
||||
.push(definition);
|
||||
}
|
||||
|
||||
pub(super) fn add_child_scope(
|
||||
&mut self,
|
||||
parent_scope_id: ScopeId,
|
||||
name: &str,
|
||||
kind: ScopeKind,
|
||||
definition: Option<Definition>,
|
||||
defining_symbol: Option<SymbolId>,
|
||||
) -> ScopeId {
|
||||
let new_scope_id = self.symbol_table.scopes_by_id.push(Scope {
|
||||
name: Name::new(name),
|
||||
kind,
|
||||
parent: Some(parent_scope_id),
|
||||
children: Vec::new(),
|
||||
definition,
|
||||
defining_symbol,
|
||||
symbols_by_name: Map::default(),
|
||||
});
|
||||
let parent_scope = &mut self.symbol_table.scopes_by_id[parent_scope_id];
|
||||
parent_scope.children.push(new_scope_id);
|
||||
new_scope_id
|
||||
}
|
||||
|
||||
pub(super) fn record_scope_for_node(&mut self, node_key: NodeKey, scope_id: ScopeId) {
|
||||
self.symbol_table.scopes_by_node.insert(node_key, scope_id);
|
||||
}
|
||||
|
||||
pub(super) fn add_dependency(&mut self, dependency: Dependency) {
|
||||
self.symbol_table.dependencies.push(dependency);
|
||||
}
|
||||
|
||||
/// Records the scope for the current expression
|
||||
pub(super) fn record_expression(&mut self, scope: ScopeId) -> ExpressionId {
|
||||
self.symbol_table.expression_scopes.push(scope)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{ScopeKind, SymbolFlags, SymbolTable, SymbolTableBuilder};
|
||||
|
||||
#[test]
|
||||
fn insert_same_name_symbol_twice() {
|
||||
let mut builder = SymbolTableBuilder::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let symbol_id_1 =
|
||||
builder.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::IS_DEFINED);
|
||||
let symbol_id_2 = builder.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::IS_USED);
|
||||
let table = builder.finish();
|
||||
|
||||
assert_eq!(symbol_id_1, symbol_id_2);
|
||||
assert!(symbol_id_1.symbol(&table).is_used(), "flags must merge");
|
||||
assert!(symbol_id_1.symbol(&table).is_defined(), "flags must merge");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_different_named_symbols() {
|
||||
let mut builder = SymbolTableBuilder::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let symbol_id_1 = builder.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::empty());
|
||||
let symbol_id_2 = builder.add_or_update_symbol(root_scope_id, "bar", SymbolFlags::empty());
|
||||
|
||||
assert_ne!(symbol_id_1, symbol_id_2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_child_scope_with_symbol() {
|
||||
let mut builder = SymbolTableBuilder::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let foo_symbol_top =
|
||||
builder.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::empty());
|
||||
let c_scope = builder.add_child_scope(root_scope_id, "C", ScopeKind::Class, None, None);
|
||||
let foo_symbol_inner = builder.add_or_update_symbol(c_scope, "foo", SymbolFlags::empty());
|
||||
|
||||
assert_ne!(foo_symbol_top, foo_symbol_inner);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn scope_from_id() {
|
||||
let table = SymbolTableBuilder::new().finish();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let scope = root_scope_id.scope(&table);
|
||||
|
||||
assert_eq!(scope.name.as_str(), "<module>");
|
||||
assert_eq!(scope.kind, ScopeKind::Module);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn symbol_from_id() {
|
||||
let mut builder = SymbolTableBuilder::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let foo_symbol_id =
|
||||
builder.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::empty());
|
||||
let table = builder.finish();
|
||||
let symbol = foo_symbol_id.symbol(&table);
|
||||
|
||||
assert_eq!(symbol.name(), "foo");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bigger_symbol_table() {
|
||||
let mut builder = SymbolTableBuilder::new();
|
||||
let root_scope_id = SymbolTable::root_scope_id();
|
||||
let foo_symbol_id =
|
||||
builder.add_or_update_symbol(root_scope_id, "foo", SymbolFlags::empty());
|
||||
builder.add_or_update_symbol(root_scope_id, "bar", SymbolFlags::empty());
|
||||
builder.add_or_update_symbol(root_scope_id, "baz", SymbolFlags::empty());
|
||||
builder.add_or_update_symbol(root_scope_id, "qux", SymbolFlags::empty());
|
||||
let table = builder.finish();
|
||||
|
||||
let foo_symbol_id_2 = table
|
||||
.root_symbol_id_by_name("foo")
|
||||
.expect("foo symbol to be found");
|
||||
|
||||
assert_eq!(foo_symbol_id_2, foo_symbol_id);
|
||||
}
|
||||
}
|
||||
@@ -2,19 +2,23 @@
|
||||
use crate::ast_ids::NodeKey;
|
||||
use crate::db::{QueryResult, SemanticDb, SemanticJar};
|
||||
use crate::files::FileId;
|
||||
use crate::symbols::{symbol_table, GlobalSymbolId, ScopeId, ScopeKind, SymbolId};
|
||||
use crate::module::{Module, ModuleName};
|
||||
use crate::semantic::{
|
||||
resolve_global_symbol, semantic_index, GlobalSymbolId, ScopeId, ScopeKind, SymbolId,
|
||||
};
|
||||
use crate::{FxDashMap, FxIndexSet, Name};
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use ruff_python_ast as ast;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
pub(crate) mod infer;
|
||||
|
||||
pub(crate) use infer::{infer_definition_type, infer_symbol_type};
|
||||
pub(crate) use infer::{infer_definition_type, infer_symbol_public_type};
|
||||
|
||||
/// unique ID for a type
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum Type {
|
||||
/// the dynamic or gradual type: a statically-unknown set of values
|
||||
/// the dynamic type: a statically-unknown set of values
|
||||
Any,
|
||||
/// the empty set of values
|
||||
Never,
|
||||
@@ -23,14 +27,19 @@ pub enum Type {
|
||||
Unknown,
|
||||
/// name is not bound to any value
|
||||
Unbound,
|
||||
/// the None object (TODO remove this in favor of Instance(types.NoneType)
|
||||
None,
|
||||
/// a specific function object
|
||||
Function(FunctionTypeId),
|
||||
/// a specific module object
|
||||
Module(ModuleTypeId),
|
||||
/// a specific class object
|
||||
Class(ClassTypeId),
|
||||
/// the set of Python objects with the given class in their __class__'s method resolution order
|
||||
Instance(ClassTypeId),
|
||||
Union(UnionTypeId),
|
||||
Intersection(IntersectionTypeId),
|
||||
IntLiteral(i64),
|
||||
// TODO protocols, callable types, overloads, generics, type vars
|
||||
}
|
||||
|
||||
@@ -46,6 +55,90 @@ impl Type {
|
||||
pub const fn is_unknown(&self) -> bool {
|
||||
matches!(self, Type::Unknown)
|
||||
}
|
||||
|
||||
pub fn get_member(&self, db: &dyn SemanticDb, name: &Name) -> QueryResult<Option<Type>> {
|
||||
match self {
|
||||
Type::Any => Ok(Some(Type::Any)),
|
||||
Type::Never => todo!("attribute lookup on Never type"),
|
||||
Type::Unknown => Ok(Some(Type::Unknown)),
|
||||
Type::Unbound => todo!("attribute lookup on Unbound type"),
|
||||
Type::None => todo!("attribute lookup on None type"),
|
||||
Type::Function(_) => todo!("attribute lookup on Function type"),
|
||||
Type::Module(module_id) => module_id.get_member(db, name),
|
||||
Type::Class(class_id) => class_id.get_class_member(db, name),
|
||||
Type::Instance(_) => {
|
||||
// TODO MRO? get_own_instance_member, get_instance_member
|
||||
todo!("attribute lookup on Instance type")
|
||||
}
|
||||
Type::Union(union_id) => {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
let _todo_union_ref = jar.type_store.get_union(*union_id);
|
||||
// TODO perform the get_member on each type in the union
|
||||
// TODO return the union of those results
|
||||
// TODO if any of those results is `None` then include Unknown in the result union
|
||||
todo!("attribute lookup on Union type")
|
||||
}
|
||||
Type::Intersection(_) => {
|
||||
// TODO perform the get_member on each type in the intersection
|
||||
// TODO return the intersection of those results
|
||||
todo!("attribute lookup on Intersection type")
|
||||
}
|
||||
Type::IntLiteral(_) => {
|
||||
// TODO raise error
|
||||
Ok(Some(Type::Unknown))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// when this is fully fleshed out, it will use the db arg and may return QueryError
|
||||
#[allow(clippy::unnecessary_wraps)]
|
||||
pub fn resolve_bin_op(
|
||||
&self,
|
||||
_db: &dyn SemanticDb,
|
||||
op: ast::Operator,
|
||||
right_ty: Type,
|
||||
) -> QueryResult<Type> {
|
||||
match self {
|
||||
Type::Any => Ok(Type::Any),
|
||||
Type::Unknown => Ok(Type::Unknown),
|
||||
Type::IntLiteral(n) => {
|
||||
match right_ty {
|
||||
Type::IntLiteral(m) => {
|
||||
match op {
|
||||
ast::Operator::Add => Ok(n
|
||||
.checked_add(m)
|
||||
.map(Type::IntLiteral)
|
||||
// TODO builtins.int
|
||||
.unwrap_or(Type::Unknown)),
|
||||
ast::Operator::Sub => Ok(n
|
||||
.checked_sub(m)
|
||||
.map(Type::IntLiteral)
|
||||
// TODO builtins.int
|
||||
.unwrap_or(Type::Unknown)),
|
||||
ast::Operator::Mult => Ok(n
|
||||
.checked_mul(m)
|
||||
.map(Type::IntLiteral)
|
||||
// TODO builtins.int
|
||||
.unwrap_or(Type::Unknown)),
|
||||
ast::Operator::Div => Ok(n
|
||||
.checked_div(m)
|
||||
.map(Type::IntLiteral)
|
||||
// TODO builtins.int
|
||||
.unwrap_or(Type::Unknown)),
|
||||
ast::Operator::Mod => Ok(n
|
||||
.checked_rem(m)
|
||||
.map(Type::IntLiteral)
|
||||
// TODO division by zero error
|
||||
.unwrap_or(Type::Unknown)),
|
||||
_ => todo!("complete binop op support for IntLiteral"),
|
||||
}
|
||||
}
|
||||
_ => todo!("complete binop right_ty support for IntLiteral"),
|
||||
}
|
||||
}
|
||||
_ => todo!("complete binop support"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FunctionTypeId> for Type {
|
||||
@@ -80,7 +173,7 @@ impl TypeStore {
|
||||
self.modules.remove(&file_id);
|
||||
}
|
||||
|
||||
pub fn cache_symbol_type(&self, symbol: GlobalSymbolId, ty: Type) {
|
||||
pub fn cache_symbol_public_type(&self, symbol: GlobalSymbolId, ty: Type) {
|
||||
self.add_or_get_module(symbol.file_id)
|
||||
.symbol_types
|
||||
.insert(symbol.symbol_id, ty);
|
||||
@@ -92,7 +185,7 @@ impl TypeStore {
|
||||
.insert(node_key, ty);
|
||||
}
|
||||
|
||||
pub fn get_cached_symbol_type(&self, symbol: GlobalSymbolId) -> Option<Type> {
|
||||
pub fn get_cached_symbol_public_type(&self, symbol: GlobalSymbolId) -> Option<Type> {
|
||||
self.try_get_module(symbol.file_id)?
|
||||
.symbol_types
|
||||
.get(&symbol.symbol_id)
|
||||
@@ -120,7 +213,7 @@ impl TypeStore {
|
||||
self.modules.get(&file_id)
|
||||
}
|
||||
|
||||
fn add_function(
|
||||
fn add_function_type(
|
||||
&self,
|
||||
file_id: FileId,
|
||||
name: &str,
|
||||
@@ -132,7 +225,18 @@ impl TypeStore {
|
||||
.add_function(name, symbol_id, scope_id, decorators)
|
||||
}
|
||||
|
||||
fn add_class(
|
||||
fn add_function(
|
||||
&self,
|
||||
file_id: FileId,
|
||||
name: &str,
|
||||
symbol_id: SymbolId,
|
||||
scope_id: ScopeId,
|
||||
decorators: Vec<Type>,
|
||||
) -> Type {
|
||||
Type::Function(self.add_function_type(file_id, name, symbol_id, scope_id, decorators))
|
||||
}
|
||||
|
||||
fn add_class_type(
|
||||
&self,
|
||||
file_id: FileId,
|
||||
name: &str,
|
||||
@@ -143,12 +247,36 @@ impl TypeStore {
|
||||
.add_class(name, scope_id, bases)
|
||||
}
|
||||
|
||||
fn add_union(&mut self, file_id: FileId, elems: &[Type]) -> UnionTypeId {
|
||||
fn add_class(&self, file_id: FileId, name: &str, scope_id: ScopeId, bases: Vec<Type>) -> Type {
|
||||
Type::Class(self.add_class_type(file_id, name, scope_id, bases))
|
||||
}
|
||||
|
||||
/// add "raw" union type with exactly given elements
|
||||
fn add_union_type(&self, file_id: FileId, elems: &[Type]) -> UnionTypeId {
|
||||
self.add_or_get_module(file_id).add_union(elems)
|
||||
}
|
||||
|
||||
fn add_intersection(
|
||||
&mut self,
|
||||
/// add union with normalization; may not return a `UnionType`
|
||||
fn add_union(&self, file_id: FileId, elems: &[Type]) -> Type {
|
||||
let mut flattened = Vec::with_capacity(elems.len());
|
||||
for ty in elems {
|
||||
match ty {
|
||||
Type::Union(union_id) => flattened.extend(union_id.elements(self)),
|
||||
_ => flattened.push(*ty),
|
||||
}
|
||||
}
|
||||
// TODO don't add identical unions
|
||||
// TODO de-duplicate union elements
|
||||
match flattened[..] {
|
||||
[] => Type::Never,
|
||||
[ty] => ty,
|
||||
_ => Type::Union(self.add_union_type(file_id, &flattened)),
|
||||
}
|
||||
}
|
||||
|
||||
/// add "raw" intersection type with exactly given elements
|
||||
fn add_intersection_type(
|
||||
&self,
|
||||
file_id: FileId,
|
||||
positive: &[Type],
|
||||
negative: &[Type],
|
||||
@@ -157,6 +285,38 @@ impl TypeStore {
|
||||
.add_intersection(positive, negative)
|
||||
}
|
||||
|
||||
/// add intersection with normalization; may not return an `IntersectionType`
|
||||
fn add_intersection(&self, file_id: FileId, positive: &[Type], negative: &[Type]) -> Type {
|
||||
let mut pos_flattened = Vec::with_capacity(positive.len());
|
||||
let mut neg_flattened = Vec::with_capacity(negative.len());
|
||||
for ty in positive {
|
||||
match ty {
|
||||
Type::Intersection(intersection_id) => {
|
||||
pos_flattened.extend(intersection_id.positive(self));
|
||||
neg_flattened.extend(intersection_id.negative(self));
|
||||
}
|
||||
_ => pos_flattened.push(*ty),
|
||||
}
|
||||
}
|
||||
for ty in negative {
|
||||
match ty {
|
||||
Type::Intersection(intersection_id) => {
|
||||
pos_flattened.extend(intersection_id.negative(self));
|
||||
neg_flattened.extend(intersection_id.positive(self));
|
||||
}
|
||||
_ => neg_flattened.push(*ty),
|
||||
}
|
||||
}
|
||||
// TODO don't add identical intersections
|
||||
// TODO deduplicate intersection elements
|
||||
// TODO maintain DNF form (union of intersections)
|
||||
match (&pos_flattened[..], &neg_flattened[..]) {
|
||||
([], []) => Type::Any, // TODO should be object
|
||||
([ty], []) => *ty,
|
||||
(pos, neg) => Type::Intersection(self.add_intersection_type(file_id, pos, neg)),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_function(&self, id: FunctionTypeId) -> FunctionTypeRef {
|
||||
FunctionTypeRef {
|
||||
module_store: self.get_module(id.file_id),
|
||||
@@ -292,10 +452,11 @@ impl FunctionTypeId {
|
||||
self,
|
||||
db: &dyn SemanticDb,
|
||||
) -> QueryResult<Option<ClassTypeId>> {
|
||||
let table = symbol_table(db, self.file_id)?;
|
||||
let index = semantic_index(db, self.file_id)?;
|
||||
let table = index.symbol_table();
|
||||
let FunctionType { symbol_id, .. } = *self.function(db)?;
|
||||
let scope_id = symbol_id.symbol(&table).scope_id();
|
||||
let scope = scope_id.scope(&table);
|
||||
let scope_id = symbol_id.symbol(table).scope_id();
|
||||
let scope = scope_id.scope(table);
|
||||
if !matches!(scope.kind(), ScopeKind::Class) {
|
||||
return Ok(None);
|
||||
};
|
||||
@@ -336,6 +497,31 @@ impl FunctionTypeId {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct ModuleTypeId {
|
||||
module: Module,
|
||||
file_id: FileId,
|
||||
}
|
||||
|
||||
impl ModuleTypeId {
|
||||
fn module(self, db: &dyn SemanticDb) -> QueryResult<ModuleStoreRef> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
Ok(jar.type_store.add_or_get_module(self.file_id).downgrade())
|
||||
}
|
||||
|
||||
pub(crate) fn name(self, db: &dyn SemanticDb) -> QueryResult<ModuleName> {
|
||||
self.module.name(db)
|
||||
}
|
||||
|
||||
fn get_member(self, db: &dyn SemanticDb, name: &Name) -> QueryResult<Option<Type>> {
|
||||
if let Some(symbol_id) = resolve_global_symbol(db, self.module, name)? {
|
||||
Ok(Some(infer_symbol_public_type(db, symbol_id)?))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct ClassTypeId {
|
||||
file_id: FileId,
|
||||
@@ -375,9 +561,9 @@ impl ClassTypeId {
|
||||
fn get_own_class_member(self, db: &dyn SemanticDb, name: &Name) -> QueryResult<Option<Type>> {
|
||||
// TODO: this should distinguish instance-only members (e.g. `x: int`) and not return them
|
||||
let ClassType { scope_id, .. } = *self.class(db)?;
|
||||
let table = symbol_table(db, self.file_id)?;
|
||||
if let Some(symbol_id) = table.symbol_id_by_name(scope_id, name) {
|
||||
Ok(Some(infer_symbol_type(
|
||||
let index = semantic_index(db, self.file_id)?;
|
||||
if let Some(symbol_id) = index.symbol_table().symbol_id_by_name(scope_id, name) {
|
||||
Ok(Some(infer_symbol_public_type(
|
||||
db,
|
||||
GlobalSymbolId {
|
||||
file_id: self.file_id,
|
||||
@@ -389,7 +575,13 @@ impl ClassTypeId {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: get_own_instance_member, get_class_member, get_instance_member
|
||||
/// Get own class member or fall back to super-class member.
|
||||
fn get_class_member(self, db: &dyn SemanticDb, name: &Name) -> QueryResult<Option<Type>> {
|
||||
self.get_own_class_member(db, name)
|
||||
.or_else(|_| self.get_super_class_member(db, name))
|
||||
}
|
||||
|
||||
// TODO: get_own_instance_member, get_instance_member
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
@@ -398,12 +590,31 @@ pub struct UnionTypeId {
|
||||
union_id: ModuleUnionTypeId,
|
||||
}
|
||||
|
||||
impl UnionTypeId {
|
||||
pub fn elements(self, type_store: &TypeStore) -> Vec<Type> {
|
||||
let union = type_store.get_union(self);
|
||||
union.elements.iter().copied().collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct IntersectionTypeId {
|
||||
file_id: FileId,
|
||||
intersection_id: ModuleIntersectionTypeId,
|
||||
}
|
||||
|
||||
impl IntersectionTypeId {
|
||||
pub fn positive(self, type_store: &TypeStore) -> Vec<Type> {
|
||||
let intersection = type_store.get_intersection(self);
|
||||
intersection.positive.iter().copied().collect()
|
||||
}
|
||||
|
||||
pub fn negative(self, type_store: &TypeStore) -> Vec<Type> {
|
||||
let intersection = type_store.get_intersection(self);
|
||||
intersection.negative.iter().copied().collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
struct ModuleFunctionTypeId;
|
||||
|
||||
@@ -427,7 +638,7 @@ struct ModuleTypeStore {
|
||||
unions: IndexVec<ModuleUnionTypeId, UnionType>,
|
||||
/// arena of all intersection types created in this module
|
||||
intersections: IndexVec<ModuleIntersectionTypeId, IntersectionType>,
|
||||
/// cached types of symbols in this module
|
||||
/// cached public types of symbols in this module
|
||||
symbol_types: FxHashMap<SymbolId, Type>,
|
||||
/// cached types of AST nodes in this module
|
||||
node_types: FxHashMap<NodeKey, Type>,
|
||||
@@ -529,6 +740,11 @@ impl std::fmt::Display for DisplayType<'_> {
|
||||
Type::Never => f.write_str("Never"),
|
||||
Type::Unknown => f.write_str("Unknown"),
|
||||
Type::Unbound => f.write_str("Unbound"),
|
||||
Type::None => f.write_str("None"),
|
||||
Type::Module(module_id) => {
|
||||
// NOTE: something like this?: "<module 'module-name' from 'path-from-fileid'>"
|
||||
todo!("{module_id:?}")
|
||||
}
|
||||
// TODO functions and classes should display using a fully qualified name
|
||||
Type::Class(class_id) => {
|
||||
f.write_str("Literal[")?;
|
||||
@@ -547,6 +763,7 @@ impl std::fmt::Display for DisplayType<'_> {
|
||||
.get_module(int_id.file_id)
|
||||
.get_intersection(int_id.intersection_id)
|
||||
.display(f, self.store),
|
||||
Type::IntLiteral(n) => write!(f, "Literal[{n}]"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -605,16 +822,42 @@ pub(crate) struct UnionType {
|
||||
|
||||
impl UnionType {
|
||||
fn display(&self, f: &mut std::fmt::Formatter<'_>, store: &TypeStore) -> std::fmt::Result {
|
||||
f.write_str("(")?;
|
||||
let (int_literals, other_types): (Vec<Type>, Vec<Type>) = self
|
||||
.elements
|
||||
.iter()
|
||||
.copied()
|
||||
.partition(|ty| matches!(ty, Type::IntLiteral(_)));
|
||||
let mut first = true;
|
||||
for ty in &self.elements {
|
||||
if !int_literals.is_empty() {
|
||||
f.write_str("Literal[")?;
|
||||
let mut nums: Vec<i64> = int_literals
|
||||
.into_iter()
|
||||
.filter_map(|ty| {
|
||||
if let Type::IntLiteral(n) = ty {
|
||||
Some(n)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
nums.sort_unstable();
|
||||
for num in nums {
|
||||
if !first {
|
||||
f.write_str(", ")?;
|
||||
}
|
||||
write!(f, "{num}")?;
|
||||
first = false;
|
||||
}
|
||||
f.write_str("]")?;
|
||||
}
|
||||
for ty in other_types {
|
||||
if !first {
|
||||
f.write_str(" | ")?;
|
||||
};
|
||||
first = false;
|
||||
write!(f, "{}", ty.display(store))?;
|
||||
}
|
||||
f.write_str(")")
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -634,7 +877,6 @@ pub(crate) struct IntersectionType {
|
||||
|
||||
impl IntersectionType {
|
||||
fn display(&self, f: &mut std::fmt::Formatter<'_>, store: &TypeStore) -> std::fmt::Result {
|
||||
f.write_str("(")?;
|
||||
let mut first = true;
|
||||
for (neg, ty) in self
|
||||
.positive
|
||||
@@ -651,25 +893,79 @@ impl IntersectionType {
|
||||
};
|
||||
write!(f, "{}", ty.display(store))?;
|
||||
}
|
||||
f.write_str(")")
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::Type;
|
||||
use std::path::Path;
|
||||
|
||||
use crate::files::Files;
|
||||
use crate::symbols::{SymbolFlags, SymbolTable};
|
||||
use crate::types::{Type, TypeStore};
|
||||
use crate::semantic::symbol_table::SymbolTableBuilder;
|
||||
use crate::semantic::{FileId, ScopeId, SymbolFlags, SymbolTable, TypeStore};
|
||||
use crate::FxIndexSet;
|
||||
|
||||
struct TestCase {
|
||||
store: TypeStore,
|
||||
files: Files,
|
||||
file_id: FileId,
|
||||
root_scope: ScopeId,
|
||||
}
|
||||
|
||||
fn create_test() -> TestCase {
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
TestCase {
|
||||
store: TypeStore::default(),
|
||||
files,
|
||||
file_id,
|
||||
root_scope: SymbolTable::root_scope_id(),
|
||||
}
|
||||
}
|
||||
|
||||
fn assert_union_elements(store: &TypeStore, union: Type, elements: &[Type]) {
|
||||
let Type::Union(union_id) = union else {
|
||||
panic!("should be a union")
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
store.get_union(union_id).elements,
|
||||
elements.iter().copied().collect::<FxIndexSet<_>>()
|
||||
);
|
||||
}
|
||||
|
||||
fn assert_intersection_elements(
|
||||
store: &TypeStore,
|
||||
intersection: Type,
|
||||
positive: &[Type],
|
||||
negative: &[Type],
|
||||
) {
|
||||
let Type::Intersection(intersection_id) = intersection else {
|
||||
panic!("should be a intersection")
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
store.get_intersection(intersection_id).positive,
|
||||
positive.iter().copied().collect::<FxIndexSet<_>>()
|
||||
);
|
||||
assert_eq!(
|
||||
store.get_intersection(intersection_id).negative,
|
||||
negative.iter().copied().collect::<FxIndexSet<_>>()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_class() {
|
||||
let store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let id = store.add_class(file_id, "C", SymbolTable::root_scope_id(), Vec::new());
|
||||
let TestCase {
|
||||
store,
|
||||
file_id,
|
||||
root_scope,
|
||||
..
|
||||
} = create_test();
|
||||
|
||||
let id = store.add_class_type(file_id, "C", root_scope, Vec::new());
|
||||
assert_eq!(store.get_class(id).name(), "C");
|
||||
let inst = Type::Instance(id);
|
||||
assert_eq!(format!("{}", inst.display(&store)), "C");
|
||||
@@ -677,21 +973,26 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn add_function() {
|
||||
let store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let mut table = SymbolTable::new();
|
||||
let func_symbol = table.add_or_update_symbol(
|
||||
let TestCase {
|
||||
store,
|
||||
file_id,
|
||||
root_scope,
|
||||
..
|
||||
} = create_test();
|
||||
|
||||
let mut builder = SymbolTableBuilder::new();
|
||||
let func_symbol = builder.add_or_update_symbol(
|
||||
SymbolTable::root_scope_id(),
|
||||
"func",
|
||||
SymbolFlags::IS_DEFINED,
|
||||
);
|
||||
builder.finish();
|
||||
|
||||
let id = store.add_function(
|
||||
let id = store.add_function_type(
|
||||
file_id,
|
||||
"func",
|
||||
func_symbol,
|
||||
SymbolTable::root_scope_id(),
|
||||
root_scope,
|
||||
vec![Type::Unknown],
|
||||
);
|
||||
assert_eq!(store.get_function(id).name(), "func");
|
||||
@@ -702,44 +1003,117 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn add_union() {
|
||||
let mut store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let c1 = store.add_class(file_id, "C1", SymbolTable::root_scope_id(), Vec::new());
|
||||
let c2 = store.add_class(file_id, "C2", SymbolTable::root_scope_id(), Vec::new());
|
||||
let TestCase {
|
||||
store,
|
||||
file_id,
|
||||
root_scope,
|
||||
..
|
||||
} = create_test();
|
||||
|
||||
let c1 = store.add_class_type(file_id, "C1", root_scope, Vec::new());
|
||||
let c2 = store.add_class_type(file_id, "C2", root_scope, Vec::new());
|
||||
let elems = vec![Type::Instance(c1), Type::Instance(c2)];
|
||||
let id = store.add_union(file_id, &elems);
|
||||
assert_eq!(
|
||||
store.get_union(id).elements,
|
||||
elems.into_iter().collect::<FxIndexSet<_>>()
|
||||
);
|
||||
let id = store.add_union_type(file_id, &elems);
|
||||
let union = Type::Union(id);
|
||||
assert_eq!(format!("{}", union.display(&store)), "(C1 | C2)");
|
||||
|
||||
assert_union_elements(&store, union, &elems);
|
||||
assert_eq!(format!("{}", union.display(&store)), "C1 | C2");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_intersection() {
|
||||
let mut store = TypeStore::default();
|
||||
let files = Files::default();
|
||||
let file_id = files.intern(Path::new("/foo"));
|
||||
let c1 = store.add_class(file_id, "C1", SymbolTable::root_scope_id(), Vec::new());
|
||||
let c2 = store.add_class(file_id, "C2", SymbolTable::root_scope_id(), Vec::new());
|
||||
let c3 = store.add_class(file_id, "C3", SymbolTable::root_scope_id(), Vec::new());
|
||||
let TestCase {
|
||||
store,
|
||||
file_id,
|
||||
root_scope,
|
||||
..
|
||||
} = create_test();
|
||||
|
||||
let c1 = store.add_class_type(file_id, "C1", root_scope, Vec::new());
|
||||
let c2 = store.add_class_type(file_id, "C2", root_scope, Vec::new());
|
||||
let c3 = store.add_class_type(file_id, "C3", root_scope, Vec::new());
|
||||
let pos = vec![Type::Instance(c1), Type::Instance(c2)];
|
||||
let neg = vec![Type::Instance(c3)];
|
||||
let id = store.add_intersection(file_id, &pos, &neg);
|
||||
assert_eq!(
|
||||
store.get_intersection(id).positive,
|
||||
pos.into_iter().collect::<FxIndexSet<_>>()
|
||||
);
|
||||
assert_eq!(
|
||||
store.get_intersection(id).negative,
|
||||
neg.into_iter().collect::<FxIndexSet<_>>()
|
||||
);
|
||||
let id = store.add_intersection_type(file_id, &pos, &neg);
|
||||
let intersection = Type::Intersection(id);
|
||||
assert_eq!(
|
||||
format!("{}", intersection.display(&store)),
|
||||
"(C1 & C2 & ~C3)"
|
||||
);
|
||||
|
||||
assert_intersection_elements(&store, intersection, &pos, &neg);
|
||||
assert_eq!(format!("{}", intersection.display(&store)), "C1 & C2 & ~C3");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flatten_union_zero_elements() {
|
||||
let TestCase { store, file_id, .. } = create_test();
|
||||
|
||||
let ty = store.add_union(file_id, &[]);
|
||||
|
||||
assert!(matches!(ty, Type::Never), "{ty:?} should be Never");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flatten_union_one_element() {
|
||||
let TestCase { store, file_id, .. } = create_test();
|
||||
|
||||
let ty = store.add_union(file_id, &[Type::None]);
|
||||
|
||||
assert!(matches!(ty, Type::None), "{ty:?} should be None");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flatten_nested_union() {
|
||||
let TestCase { store, file_id, .. } = create_test();
|
||||
|
||||
let l1 = Type::IntLiteral(1);
|
||||
let l2 = Type::IntLiteral(2);
|
||||
let u1 = store.add_union(file_id, &[l1, l2]);
|
||||
let u2 = store.add_union(file_id, &[u1, Type::None]);
|
||||
|
||||
assert_union_elements(&store, u2, &[l1, l2, Type::None]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flatten_intersection_zero_elements() {
|
||||
let TestCase { store, file_id, .. } = create_test();
|
||||
|
||||
let ty = store.add_intersection(file_id, &[], &[]);
|
||||
|
||||
// TODO should be object, not Any
|
||||
assert!(matches!(ty, Type::Any), "{ty:?} should be object");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flatten_intersection_one_positive_element() {
|
||||
let TestCase { store, file_id, .. } = create_test();
|
||||
|
||||
let ty = store.add_intersection(file_id, &[Type::None], &[]);
|
||||
|
||||
assert!(matches!(ty, Type::None), "{ty:?} should be None");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flatten_intersection_one_negative_element() {
|
||||
let TestCase { store, file_id, .. } = create_test();
|
||||
|
||||
let ty = store.add_intersection(file_id, &[], &[Type::None]);
|
||||
|
||||
assert_intersection_elements(&store, ty, &[], &[Type::None]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flatten_nested_intersection() {
|
||||
let TestCase {
|
||||
store,
|
||||
file_id,
|
||||
root_scope,
|
||||
..
|
||||
} = create_test();
|
||||
|
||||
let c1 = Type::Instance(store.add_class_type(file_id, "C1", root_scope, vec![]));
|
||||
let c2 = Type::Instance(store.add_class_type(file_id, "C2", root_scope, vec![]));
|
||||
let c1sub = Type::Instance(store.add_class_type(file_id, "C1sub", root_scope, vec![c1]));
|
||||
let i1 = store.add_intersection(file_id, &[c1, c2], &[c1sub]);
|
||||
let i2 = store.add_intersection(file_id, &[i1, Type::None], &[]);
|
||||
|
||||
assert_intersection_elements(&store, i2, &[c1, c2, Type::None], &[c1sub]);
|
||||
}
|
||||
}
|
||||
762
crates/red_knot/src/semantic/types/infer.rs
Normal file
762
crates/red_knot/src/semantic/types/infer.rs
Normal file
@@ -0,0 +1,762 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::AstNode;
|
||||
use std::fmt::Debug;
|
||||
|
||||
use crate::db::{QueryResult, SemanticDb, SemanticJar};
|
||||
|
||||
use crate::module::{resolve_module, ModuleName};
|
||||
use crate::parse::parse;
|
||||
use crate::semantic::types::{ModuleTypeId, Type};
|
||||
use crate::semantic::{
|
||||
resolve_global_symbol, semantic_index, ConstrainedDefinition, Definition, GlobalSymbolId,
|
||||
ImportDefinition, ImportFromDefinition,
|
||||
};
|
||||
use crate::{FileId, Name};
|
||||
|
||||
// FIXME: Figure out proper dead-lock free synchronisation now that this takes `&db` instead of `&mut db`.
|
||||
/// Resolve the public-facing type for a symbol (the type seen by other scopes: other modules, or
|
||||
/// nested functions). Because calls to nested functions and imports can occur anywhere in control
|
||||
/// flow, this type must be conservative and consider all definitions of the symbol that could
|
||||
/// possibly be seen by another scope. Currently we take the most conservative approach, which is
|
||||
/// the union of all definitions. We may be able to narrow this in future to eliminate definitions
|
||||
/// which can't possibly (or at least likely) be seen by any other scope, so that e.g. we could
|
||||
/// infer `Literal["1"]` instead of `Literal[1] | Literal["1"]` for `x` in `x = x; x = str(x);`.
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
pub fn infer_symbol_public_type(db: &dyn SemanticDb, symbol: GlobalSymbolId) -> QueryResult<Type> {
|
||||
let index = semantic_index(db, symbol.file_id)?;
|
||||
let defs = index.symbol_table().definitions(symbol.symbol_id).to_vec();
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
|
||||
if let Some(ty) = jar.type_store.get_cached_symbol_public_type(symbol) {
|
||||
return Ok(ty);
|
||||
}
|
||||
|
||||
let ty = infer_type_from_definitions(db, symbol, defs.iter().cloned())?;
|
||||
|
||||
jar.type_store.cache_symbol_public_type(symbol, ty);
|
||||
|
||||
// TODO record dependencies
|
||||
Ok(ty)
|
||||
}
|
||||
|
||||
/// Infer type of a symbol as union of the given `Definitions`.
|
||||
fn infer_type_from_definitions<T>(
|
||||
db: &dyn SemanticDb,
|
||||
symbol: GlobalSymbolId,
|
||||
definitions: T,
|
||||
) -> QueryResult<Type>
|
||||
where
|
||||
T: Debug + IntoIterator<Item = Definition>,
|
||||
{
|
||||
infer_type_from_constrained_definitions(
|
||||
db,
|
||||
symbol,
|
||||
definitions
|
||||
.into_iter()
|
||||
.map(|definition| ConstrainedDefinition {
|
||||
definition,
|
||||
constraints: vec![],
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
/// Infer type of a symbol as union of the given `ConstrainedDefinitions`.
|
||||
fn infer_type_from_constrained_definitions<T>(
|
||||
db: &dyn SemanticDb,
|
||||
symbol: GlobalSymbolId,
|
||||
constrained_definitions: T,
|
||||
) -> QueryResult<Type>
|
||||
where
|
||||
T: IntoIterator<Item = ConstrainedDefinition>,
|
||||
{
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
let mut tys = constrained_definitions
|
||||
.into_iter()
|
||||
.map(|def| infer_constrained_definition_type(db, symbol, def.clone()))
|
||||
.peekable();
|
||||
if let Some(first) = tys.next() {
|
||||
if tys.peek().is_some() {
|
||||
Ok(jar.type_store.add_union(
|
||||
symbol.file_id,
|
||||
&Iterator::chain(std::iter::once(first), tys).collect::<QueryResult<Vec<_>>>()?,
|
||||
))
|
||||
} else {
|
||||
first
|
||||
}
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
|
||||
/// Infer type for a ConstrainedDefinition (intersection of the definition type and the
|
||||
/// constraints)
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
pub fn infer_constrained_definition_type(
|
||||
db: &dyn SemanticDb,
|
||||
symbol: GlobalSymbolId,
|
||||
constrained_definition: ConstrainedDefinition,
|
||||
) -> QueryResult<Type> {
|
||||
let ConstrainedDefinition {
|
||||
definition,
|
||||
constraints,
|
||||
} = constrained_definition;
|
||||
let index = semantic_index(db, symbol.file_id)?;
|
||||
let parsed = parse(db.upcast(), symbol.file_id)?;
|
||||
let mut intersected_types = vec![infer_definition_type(db, symbol, definition)?];
|
||||
for constraint in constraints {
|
||||
if let Some(constraint_type) = infer_constraint_type(
|
||||
db,
|
||||
symbol,
|
||||
index.resolve_expression_id(parsed.syntax(), constraint),
|
||||
)? {
|
||||
intersected_types.push(constraint_type);
|
||||
}
|
||||
}
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
Ok(jar
|
||||
.type_store
|
||||
.add_intersection(symbol.file_id, &intersected_types, &[]))
|
||||
}
|
||||
|
||||
/// Infer a type for a Definition
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
pub fn infer_definition_type(
|
||||
db: &dyn SemanticDb,
|
||||
symbol: GlobalSymbolId,
|
||||
definition: Definition,
|
||||
) -> QueryResult<Type> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
let type_store = &jar.type_store;
|
||||
let file_id = symbol.file_id;
|
||||
|
||||
match definition {
|
||||
Definition::Unbound => Ok(Type::Unbound),
|
||||
Definition::Import(ImportDefinition {
|
||||
module: module_name,
|
||||
}) => {
|
||||
if let Some(module) = resolve_module(db, module_name.clone())? {
|
||||
Ok(Type::Module(ModuleTypeId { module, file_id }))
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
Definition::ImportFrom(ImportFromDefinition {
|
||||
module,
|
||||
name,
|
||||
level,
|
||||
}) => {
|
||||
// TODO relative imports
|
||||
assert!(matches!(level, 0));
|
||||
let module_name = ModuleName::new(module.as_ref().expect("TODO relative imports"));
|
||||
let Some(module) = resolve_module(db, module_name.clone())? else {
|
||||
return Ok(Type::Unknown);
|
||||
};
|
||||
|
||||
if let Some(remote_symbol) = resolve_global_symbol(db, module, &name)? {
|
||||
infer_symbol_public_type(db, remote_symbol)
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
Definition::ClassDef(node_key) => {
|
||||
if let Some(ty) = type_store.get_cached_node_type(file_id, node_key.erased()) {
|
||||
Ok(ty)
|
||||
} else {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.syntax();
|
||||
let index = semantic_index(db, file_id)?;
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
|
||||
let mut bases = Vec::with_capacity(node.bases().len());
|
||||
|
||||
for base in node.bases() {
|
||||
bases.push(infer_expr_type(db, file_id, base)?);
|
||||
}
|
||||
let scope_id = index.symbol_table().scope_id_for_node(node_key.erased());
|
||||
let ty = type_store.add_class(file_id, &node.name.id, scope_id, bases);
|
||||
type_store.cache_node_type(file_id, *node_key.erased(), ty);
|
||||
Ok(ty)
|
||||
}
|
||||
}
|
||||
Definition::FunctionDef(node_key) => {
|
||||
if let Some(ty) = type_store.get_cached_node_type(file_id, node_key.erased()) {
|
||||
Ok(ty)
|
||||
} else {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.syntax();
|
||||
let index = semantic_index(db, file_id)?;
|
||||
let node = node_key
|
||||
.resolve(ast.as_any_node_ref())
|
||||
.expect("node key should resolve");
|
||||
|
||||
let decorator_tys = node
|
||||
.decorator_list
|
||||
.iter()
|
||||
.map(|decorator| infer_expr_type(db, file_id, &decorator.expression))
|
||||
.collect::<QueryResult<_>>()?;
|
||||
let scope_id = index.symbol_table().scope_id_for_node(node_key.erased());
|
||||
let ty = type_store.add_function(
|
||||
file_id,
|
||||
&node.name.id,
|
||||
symbol.symbol_id,
|
||||
scope_id,
|
||||
decorator_tys,
|
||||
);
|
||||
type_store.cache_node_type(file_id, *node_key.erased(), ty);
|
||||
Ok(ty)
|
||||
}
|
||||
}
|
||||
Definition::Assignment(node_key) => {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.syntax();
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
// TODO handle unpacking assignment
|
||||
infer_expr_type(db, file_id, &node.value)
|
||||
}
|
||||
Definition::AnnotatedAssignment(node_key) => {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.syntax();
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
// TODO actually look at the annotation
|
||||
let Some(value) = &node.value else {
|
||||
return Ok(Type::Unknown);
|
||||
};
|
||||
// TODO handle unpacking assignment
|
||||
infer_expr_type(db, file_id, value)
|
||||
}
|
||||
Definition::NamedExpr(node_key) => {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.syntax();
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
infer_expr_type(db, file_id, &node.value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the type that the given constraint (an expression from a control-flow test) requires the
|
||||
/// given symbol to have. For example, returns the Type "~None" as the constraint type if given the
|
||||
/// symbol ID for x and the expression ID for `x is not None`. Returns (Rust) None if the given
|
||||
/// expression applies no constraints on the given symbol.
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
fn infer_constraint_type(
|
||||
db: &dyn SemanticDb,
|
||||
symbol_id: GlobalSymbolId,
|
||||
// TODO this should preferably take an &ast::Expr instead of AnyNodeRef
|
||||
expression: ast::AnyNodeRef,
|
||||
) -> QueryResult<Option<Type>> {
|
||||
let file_id = symbol_id.file_id;
|
||||
let index = semantic_index(db, file_id)?;
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
let symbol_name = symbol_id.symbol_id.symbol(&index.symbol_table).name();
|
||||
// TODO narrowing attributes
|
||||
// TODO narrowing dict keys
|
||||
// TODO isinstance, ==/!=, type(...), literals, bools...
|
||||
match expression {
|
||||
ast::AnyNodeRef::ExprCompare(ast::ExprCompare {
|
||||
left,
|
||||
ops,
|
||||
comparators,
|
||||
..
|
||||
}) => {
|
||||
// TODO chained comparisons
|
||||
match left.as_ref() {
|
||||
ast::Expr::Name(ast::ExprName { id, .. }) if id == symbol_name => match ops[0] {
|
||||
ast::CmpOp::Is | ast::CmpOp::IsNot => {
|
||||
Ok(match infer_expr_type(db, file_id, &comparators[0])? {
|
||||
Type::None => Some(Type::None),
|
||||
_ => None,
|
||||
}
|
||||
.map(|ty| {
|
||||
if matches!(ops[0], ast::CmpOp::IsNot) {
|
||||
jar.type_store.add_intersection(file_id, &[], &[ty])
|
||||
} else {
|
||||
ty
|
||||
}
|
||||
}))
|
||||
}
|
||||
_ => Ok(None),
|
||||
},
|
||||
_ => Ok(None),
|
||||
}
|
||||
}
|
||||
_ => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
/// Infer type of the given expression.
|
||||
fn infer_expr_type(db: &dyn SemanticDb, file_id: FileId, expr: &ast::Expr) -> QueryResult<Type> {
|
||||
// TODO cache the resolution of the type on the node
|
||||
let index = semantic_index(db, file_id)?;
|
||||
match expr {
|
||||
ast::Expr::NoneLiteral(_) => Ok(Type::None),
|
||||
ast::Expr::NumberLiteral(ast::ExprNumberLiteral { value, .. }) => {
|
||||
match value {
|
||||
ast::Number::Int(n) => {
|
||||
// TODO support big int literals
|
||||
Ok(n.as_i64().map(Type::IntLiteral).unwrap_or(Type::Unknown))
|
||||
}
|
||||
// TODO builtins.float or builtins.complex
|
||||
_ => Ok(Type::Unknown),
|
||||
}
|
||||
}
|
||||
ast::Expr::Name(name) => {
|
||||
// TODO look up in the correct scope, don't assume global
|
||||
if let Some(symbol_id) = index.symbol_table().root_symbol_id_by_name(&name.id) {
|
||||
infer_type_from_constrained_definitions(
|
||||
db,
|
||||
GlobalSymbolId { file_id, symbol_id },
|
||||
index.reachable_definitions(symbol_id, expr),
|
||||
)
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
ast::Expr::Attribute(ast::ExprAttribute { value, attr, .. }) => {
|
||||
let value_type = infer_expr_type(db, file_id, value)?;
|
||||
let attr_name = &Name::new(&attr.id);
|
||||
value_type
|
||||
.get_member(db, attr_name)
|
||||
.map(|ty| ty.unwrap_or(Type::Unknown))
|
||||
}
|
||||
ast::Expr::BinOp(ast::ExprBinOp {
|
||||
left, op, right, ..
|
||||
}) => {
|
||||
let left_ty = infer_expr_type(db, file_id, left)?;
|
||||
let right_ty = infer_expr_type(db, file_id, right)?;
|
||||
// TODO add reverse bin op support if right <: left
|
||||
left_ty.resolve_bin_op(db, *op, right_ty)
|
||||
}
|
||||
ast::Expr::Named(ast::ExprNamed { value, .. }) => infer_expr_type(db, file_id, value),
|
||||
ast::Expr::If(ast::ExprIf { body, orelse, .. }) => {
|
||||
// TODO detect statically known truthy or falsy test
|
||||
let body_ty = infer_expr_type(db, file_id, body)?;
|
||||
let else_ty = infer_expr_type(db, file_id, orelse)?;
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
Ok(jar.type_store.add_union(file_id, &[body_ty, else_ty]))
|
||||
}
|
||||
_ => todo!("expression type resolution for {:?}", expr),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::db::{HasJar, SemanticJar};
|
||||
use crate::module::{
|
||||
resolve_module, set_module_search_paths, ModuleName, ModuleResolutionInputs,
|
||||
};
|
||||
use crate::semantic::{infer_symbol_public_type, resolve_global_symbol, Type};
|
||||
use crate::Name;
|
||||
|
||||
// TODO with virtual filesystem we shouldn't have to write files to disk for these
|
||||
// tests
|
||||
|
||||
struct TestCase {
|
||||
temp_dir: tempfile::TempDir,
|
||||
db: TestDb,
|
||||
|
||||
src: PathBuf,
|
||||
}
|
||||
|
||||
fn create_test() -> std::io::Result<TestCase> {
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
|
||||
let src = temp_dir.path().join("src");
|
||||
std::fs::create_dir(&src)?;
|
||||
let src = src.canonicalize()?;
|
||||
|
||||
let search_paths = ModuleResolutionInputs {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
site_packages: None,
|
||||
custom_typeshed: None,
|
||||
};
|
||||
|
||||
let mut db = TestDb::default();
|
||||
set_module_search_paths(&mut db, search_paths);
|
||||
|
||||
Ok(TestCase { temp_dir, db, src })
|
||||
}
|
||||
|
||||
fn write_to_path(case: &TestCase, relative_path: &str, contents: &str) -> anyhow::Result<()> {
|
||||
let path = case.src.join(relative_path);
|
||||
std::fs::write(path, contents)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_public_type(
|
||||
case: &TestCase,
|
||||
module_name: &str,
|
||||
variable_name: &str,
|
||||
) -> anyhow::Result<Type> {
|
||||
let db = &case.db;
|
||||
let module = resolve_module(db, ModuleName::new(module_name))?.expect("Module to exist");
|
||||
let symbol = resolve_global_symbol(db, module, variable_name)?.expect("symbol to exist");
|
||||
|
||||
Ok(infer_symbol_public_type(db, symbol)?)
|
||||
}
|
||||
|
||||
fn assert_public_type(
|
||||
case: &TestCase,
|
||||
module_name: &str,
|
||||
variable_name: &str,
|
||||
type_name: &str,
|
||||
) -> anyhow::Result<()> {
|
||||
let ty = get_public_type(case, module_name, variable_name)?;
|
||||
|
||||
let jar = HasJar::<SemanticJar>::jar(&case.db)?;
|
||||
assert_eq!(format!("{}", ty.display(&jar.type_store)), type_name);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_import_to_class() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(&case, "a.py", "from b import C as D; E = D")?;
|
||||
write_to_path(&case, "b.py", "class C: pass")?;
|
||||
|
||||
assert_public_type(&case, "a", "E", "Literal[C]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_base_class_by_name() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"mod.py",
|
||||
"
|
||||
class Base: pass
|
||||
class Sub(Base): pass
|
||||
",
|
||||
)?;
|
||||
|
||||
let ty = get_public_type(&case, "mod", "Sub")?;
|
||||
|
||||
let Type::Class(class_id) = ty else {
|
||||
panic!("Sub is not a Class")
|
||||
};
|
||||
let jar = HasJar::<SemanticJar>::jar(&case.db)?;
|
||||
let base_names: Vec<_> = jar
|
||||
.type_store
|
||||
.get_class(class_id)
|
||||
.bases()
|
||||
.iter()
|
||||
.map(|base_ty| format!("{}", base_ty.display(&jar.type_store)))
|
||||
.collect();
|
||||
|
||||
assert_eq!(base_names, vec!["Literal[Base]"]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_method() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"mod.py",
|
||||
"
|
||||
class C:
|
||||
def f(self): pass
|
||||
",
|
||||
)?;
|
||||
|
||||
let ty = get_public_type(&case, "mod", "C")?;
|
||||
|
||||
let Type::Class(class_id) = ty else {
|
||||
panic!("C is not a Class");
|
||||
};
|
||||
|
||||
let member_ty = class_id
|
||||
.get_own_class_member(&case.db, &Name::new("f"))
|
||||
.expect("C.f to resolve");
|
||||
|
||||
let Some(Type::Function(func_id)) = member_ty else {
|
||||
panic!("C.f is not a Function");
|
||||
};
|
||||
|
||||
let jar = HasJar::<SemanticJar>::jar(&case.db)?;
|
||||
let function = jar.type_store.get_function(func_id);
|
||||
assert_eq!(function.name(), "f");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_module_member() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(&case, "a.py", "import b; D = b.C")?;
|
||||
write_to_path(&case, "b.py", "class C: pass")?;
|
||||
|
||||
assert_public_type(&case, "a", "D", "Literal[C]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_literal() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(&case, "a.py", "x = 1")?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_union() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
if flag:
|
||||
x = 1
|
||||
else:
|
||||
x = 2
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1, 2]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_visible_def() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
write_to_path(&case, "a.py", "y = 1; y = 2; x = y")?;
|
||||
assert_public_type(&case, "a", "x", "Literal[2]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn join_paths() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
y = 1
|
||||
y = 2
|
||||
if flag:
|
||||
y = 3
|
||||
x = y
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[2, 3]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn maybe_unbound() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
if flag:
|
||||
y = 1
|
||||
x = y
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1] | Unbound")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn if_elif_else() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
y = 1
|
||||
y = 2
|
||||
if flag:
|
||||
y = 3
|
||||
elif flag2:
|
||||
y = 4
|
||||
else:
|
||||
r = y
|
||||
y = 5
|
||||
s = y
|
||||
x = y
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[3, 4, 5]")?;
|
||||
assert_public_type(&case, "a", "r", "Literal[2]")?;
|
||||
assert_public_type(&case, "a", "s", "Literal[5]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn if_elif() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
y = 1
|
||||
y = 2
|
||||
if flag:
|
||||
y = 3
|
||||
elif flag2:
|
||||
y = 4
|
||||
x = y
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[2, 3, 4]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_int_arithmetic() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
a = 2 + 1
|
||||
b = a - 4
|
||||
c = a * b
|
||||
d = c / 3
|
||||
e = 5 % 3
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "a", "Literal[3]")?;
|
||||
assert_public_type(&case, "a", "b", "Literal[-1]")?;
|
||||
assert_public_type(&case, "a", "c", "Literal[-3]")?;
|
||||
assert_public_type(&case, "a", "d", "Literal[-1]")?;
|
||||
assert_public_type(&case, "a", "e", "Literal[2]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn walrus() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
x = (y := 1) + 1
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[2]")?;
|
||||
assert_public_type(&case, "a", "y", "Literal[1]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ifexpr() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
x = 1 if flag else 2
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1, 2]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ifexpr_walrus() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
y = z = 0
|
||||
x = (y := 1) if flag else (z := 2)
|
||||
a = y
|
||||
b = z
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1, 2]")?;
|
||||
assert_public_type(&case, "a", "a", "Literal[0, 1]")?;
|
||||
assert_public_type(&case, "a", "b", "Literal[0, 2]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ifexpr_walrus_2() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
y = 0
|
||||
(y := 1) if flag else (y := 2)
|
||||
a = y
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "a", "Literal[1, 2]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ifexpr_nested() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
x = 1 if flag else 2 if flag2 else 3
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1, 2, 3]")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn none() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
x = 1 if flag else None
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_type(&case, "a", "x", "Literal[1] | None")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn narrow_none() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
|
||||
write_to_path(
|
||||
&case,
|
||||
"a.py",
|
||||
"
|
||||
x = 1 if flag else None
|
||||
y = 0
|
||||
if x is not None:
|
||||
y = x
|
||||
z = y
|
||||
",
|
||||
)?;
|
||||
|
||||
// TODO normalization of unions and intersections: this type is technically correct but
|
||||
// begging for normalization
|
||||
assert_public_type(&case, "a", "z", "Literal[0] | Literal[1] | None & ~None")
|
||||
}
|
||||
}
|
||||
@@ -53,6 +53,16 @@ pub enum SourceKind {
|
||||
IpyNotebook(Arc<Notebook>),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a SourceKind> for PySourceType {
|
||||
fn from(value: &'a SourceKind) -> Self {
|
||||
match value {
|
||||
SourceKind::Python(_) => PySourceType::Python,
|
||||
SourceKind::Stub(_) => PySourceType::Stub,
|
||||
SourceKind::IpyNotebook(_) => PySourceType::Ipynb,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Source {
|
||||
kind: SourceKind,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,292 +0,0 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::AstNode;
|
||||
|
||||
use crate::db::{QueryResult, SemanticDb, SemanticJar};
|
||||
|
||||
use crate::module::ModuleName;
|
||||
use crate::parse::parse;
|
||||
use crate::symbols::{
|
||||
resolve_global_symbol, symbol_table, Definition, GlobalSymbolId, ImportFromDefinition,
|
||||
};
|
||||
use crate::types::Type;
|
||||
use crate::FileId;
|
||||
|
||||
// FIXME: Figure out proper dead-lock free synchronisation now that this takes `&db` instead of `&mut db`.
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
pub fn infer_symbol_type(db: &dyn SemanticDb, symbol: GlobalSymbolId) -> QueryResult<Type> {
|
||||
let symbols = symbol_table(db, symbol.file_id)?;
|
||||
let defs = symbols.definitions(symbol.symbol_id);
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
|
||||
if let Some(ty) = jar.type_store.get_cached_symbol_type(symbol) {
|
||||
return Ok(ty);
|
||||
}
|
||||
|
||||
// TODO handle multiple defs, conditional defs...
|
||||
assert_eq!(defs.len(), 1);
|
||||
|
||||
let ty = infer_definition_type(db, symbol, defs[0].clone())?;
|
||||
|
||||
jar.type_store.cache_symbol_type(symbol, ty);
|
||||
|
||||
// TODO record dependencies
|
||||
Ok(ty)
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip(db))]
|
||||
pub fn infer_definition_type(
|
||||
db: &dyn SemanticDb,
|
||||
symbol: GlobalSymbolId,
|
||||
definition: Definition,
|
||||
) -> QueryResult<Type> {
|
||||
let jar: &SemanticJar = db.jar()?;
|
||||
let type_store = &jar.type_store;
|
||||
let file_id = symbol.file_id;
|
||||
|
||||
match definition {
|
||||
Definition::ImportFrom(ImportFromDefinition {
|
||||
module,
|
||||
name,
|
||||
level,
|
||||
}) => {
|
||||
// TODO relative imports
|
||||
assert!(matches!(level, 0));
|
||||
let module_name = ModuleName::new(module.as_ref().expect("TODO relative imports"));
|
||||
if let Some(remote_symbol) = resolve_global_symbol(db, module_name, &name)? {
|
||||
infer_symbol_type(db, remote_symbol)
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
Definition::ClassDef(node_key) => {
|
||||
if let Some(ty) = type_store.get_cached_node_type(file_id, node_key.erased()) {
|
||||
Ok(ty)
|
||||
} else {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.ast();
|
||||
let table = symbol_table(db, file_id)?;
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
|
||||
let mut bases = Vec::with_capacity(node.bases().len());
|
||||
|
||||
for base in node.bases() {
|
||||
bases.push(infer_expr_type(db, file_id, base)?);
|
||||
}
|
||||
let scope_id = table.scope_id_for_node(node_key.erased());
|
||||
let ty = Type::Class(type_store.add_class(file_id, &node.name.id, scope_id, bases));
|
||||
type_store.cache_node_type(file_id, *node_key.erased(), ty);
|
||||
Ok(ty)
|
||||
}
|
||||
}
|
||||
Definition::FunctionDef(node_key) => {
|
||||
if let Some(ty) = type_store.get_cached_node_type(file_id, node_key.erased()) {
|
||||
Ok(ty)
|
||||
} else {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.ast();
|
||||
let table = symbol_table(db, file_id)?;
|
||||
let node = node_key
|
||||
.resolve(ast.as_any_node_ref())
|
||||
.expect("node key should resolve");
|
||||
|
||||
let decorator_tys = node
|
||||
.decorator_list
|
||||
.iter()
|
||||
.map(|decorator| infer_expr_type(db, file_id, &decorator.expression))
|
||||
.collect::<QueryResult<_>>()?;
|
||||
let scope_id = table.scope_id_for_node(node_key.erased());
|
||||
let ty = type_store
|
||||
.add_function(
|
||||
file_id,
|
||||
&node.name.id,
|
||||
symbol.symbol_id,
|
||||
scope_id,
|
||||
decorator_tys,
|
||||
)
|
||||
.into();
|
||||
type_store.cache_node_type(file_id, *node_key.erased(), ty);
|
||||
Ok(ty)
|
||||
}
|
||||
}
|
||||
Definition::Assignment(node_key) => {
|
||||
let parsed = parse(db.upcast(), file_id)?;
|
||||
let ast = parsed.ast();
|
||||
let node = node_key.resolve_unwrap(ast.as_any_node_ref());
|
||||
// TODO handle unpacking assignment correctly
|
||||
infer_expr_type(db, file_id, &node.value)
|
||||
}
|
||||
_ => todo!("other kinds of definitions"),
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_expr_type(db: &dyn SemanticDb, file_id: FileId, expr: &ast::Expr) -> QueryResult<Type> {
|
||||
// TODO cache the resolution of the type on the node
|
||||
let symbols = symbol_table(db, file_id)?;
|
||||
match expr {
|
||||
ast::Expr::Name(name) => {
|
||||
// TODO look up in the correct scope, don't assume global
|
||||
if let Some(symbol_id) = symbols.root_symbol_id_by_name(&name.id) {
|
||||
infer_symbol_type(db, GlobalSymbolId { file_id, symbol_id })
|
||||
} else {
|
||||
Ok(Type::Unknown)
|
||||
}
|
||||
}
|
||||
_ => todo!("full expression type resolution"),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::db::{HasJar, SemanticJar};
|
||||
use crate::module::{
|
||||
resolve_module, set_module_search_paths, ModuleName, ModuleSearchPath, ModuleSearchPathKind,
|
||||
};
|
||||
use crate::symbols::{symbol_table, GlobalSymbolId};
|
||||
use crate::types::{infer_symbol_type, Type};
|
||||
use crate::Name;
|
||||
|
||||
// TODO with virtual filesystem we shouldn't have to write files to disk for these
|
||||
// tests
|
||||
|
||||
struct TestCase {
|
||||
temp_dir: tempfile::TempDir,
|
||||
db: TestDb,
|
||||
|
||||
src: ModuleSearchPath,
|
||||
}
|
||||
|
||||
fn create_test() -> std::io::Result<TestCase> {
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
|
||||
let src = temp_dir.path().join("src");
|
||||
std::fs::create_dir(&src)?;
|
||||
let src = ModuleSearchPath::new(src.canonicalize()?, ModuleSearchPathKind::FirstParty);
|
||||
|
||||
let roots = vec![src.clone()];
|
||||
|
||||
let mut db = TestDb::default();
|
||||
set_module_search_paths(&mut db, roots);
|
||||
|
||||
Ok(TestCase { temp_dir, db, src })
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_import_to_class() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
let db = &case.db;
|
||||
|
||||
let a_path = case.src.path().join("a.py");
|
||||
let b_path = case.src.path().join("b.py");
|
||||
std::fs::write(a_path, "from b import C as D; E = D")?;
|
||||
std::fs::write(b_path, "class C: pass")?;
|
||||
let a_file = resolve_module(db, ModuleName::new("a"))?
|
||||
.expect("module should be found")
|
||||
.path(db)?
|
||||
.file();
|
||||
let a_syms = symbol_table(db, a_file)?;
|
||||
let e_sym = a_syms
|
||||
.root_symbol_id_by_name("E")
|
||||
.expect("E symbol should be found");
|
||||
|
||||
let ty = infer_symbol_type(
|
||||
db,
|
||||
GlobalSymbolId {
|
||||
file_id: a_file,
|
||||
symbol_id: e_sym,
|
||||
},
|
||||
)?;
|
||||
|
||||
let jar = HasJar::<SemanticJar>::jar(db)?;
|
||||
assert!(matches!(ty, Type::Class(_)));
|
||||
assert_eq!(format!("{}", ty.display(&jar.type_store)), "Literal[C]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_base_class_by_name() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
let db = &case.db;
|
||||
|
||||
let path = case.src.path().join("mod.py");
|
||||
std::fs::write(path, "class Base: pass\nclass Sub(Base): pass")?;
|
||||
let file = resolve_module(db, ModuleName::new("mod"))?
|
||||
.expect("module should be found")
|
||||
.path(db)?
|
||||
.file();
|
||||
let syms = symbol_table(db, file)?;
|
||||
let sym = syms
|
||||
.root_symbol_id_by_name("Sub")
|
||||
.expect("Sub symbol should be found");
|
||||
|
||||
let ty = infer_symbol_type(
|
||||
db,
|
||||
GlobalSymbolId {
|
||||
file_id: file,
|
||||
symbol_id: sym,
|
||||
},
|
||||
)?;
|
||||
|
||||
let Type::Class(class_id) = ty else {
|
||||
panic!("Sub is not a Class")
|
||||
};
|
||||
let jar = HasJar::<SemanticJar>::jar(db)?;
|
||||
let base_names: Vec<_> = jar
|
||||
.type_store
|
||||
.get_class(class_id)
|
||||
.bases()
|
||||
.iter()
|
||||
.map(|base_ty| format!("{}", base_ty.display(&jar.type_store)))
|
||||
.collect();
|
||||
|
||||
assert_eq!(base_names, vec!["Literal[Base]"]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_method() -> anyhow::Result<()> {
|
||||
let case = create_test()?;
|
||||
let db = &case.db;
|
||||
|
||||
let path = case.src.path().join("mod.py");
|
||||
std::fs::write(path, "class C:\n def f(self): pass")?;
|
||||
let file = resolve_module(db, ModuleName::new("mod"))?
|
||||
.expect("module should be found")
|
||||
.path(db)?
|
||||
.file();
|
||||
let syms = symbol_table(db, file)?;
|
||||
let sym = syms
|
||||
.root_symbol_id_by_name("C")
|
||||
.expect("C symbol should be found");
|
||||
|
||||
let ty = infer_symbol_type(
|
||||
db,
|
||||
GlobalSymbolId {
|
||||
file_id: file,
|
||||
symbol_id: sym,
|
||||
},
|
||||
)?;
|
||||
|
||||
let Type::Class(class_id) = ty else {
|
||||
panic!("C is not a Class");
|
||||
};
|
||||
|
||||
let member_ty = class_id
|
||||
.get_own_class_member(db, &Name::new("f"))
|
||||
.expect("C.f to resolve");
|
||||
|
||||
let Some(Type::Function(func_id)) = member_ty else {
|
||||
panic!("C.f is not a Function");
|
||||
};
|
||||
|
||||
let jar = HasJar::<SemanticJar>::jar(db)?;
|
||||
let function = jar.type_store.get_function(func_id);
|
||||
assert_eq!(function.name(), "f");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
a9d7e861f7a46ae7acd56569326adef302e10f29
|
||||
591
crates/red_knot/vendor/typeshed/stdlib/_ast.pyi
vendored
591
crates/red_knot/vendor/typeshed/stdlib/_ast.pyi
vendored
@@ -1,591 +0,0 @@
|
||||
import sys
|
||||
import typing_extensions
|
||||
from typing import Any, ClassVar, Literal
|
||||
|
||||
PyCF_ONLY_AST: Literal[1024]
|
||||
PyCF_TYPE_COMMENTS: Literal[4096]
|
||||
PyCF_ALLOW_TOP_LEVEL_AWAIT: Literal[8192]
|
||||
|
||||
# Alias used for fields that must always be valid identifiers
|
||||
# A string `x` counts as a valid identifier if both the following are True
|
||||
# (1) `x.isidentifier()` evaluates to `True`
|
||||
# (2) `keyword.iskeyword(x)` evaluates to `False`
|
||||
_Identifier: typing_extensions.TypeAlias = str
|
||||
|
||||
class AST:
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ()
|
||||
_attributes: ClassVar[tuple[str, ...]]
|
||||
_fields: ClassVar[tuple[str, ...]]
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
|
||||
# TODO: Not all nodes have all of the following attributes
|
||||
lineno: int
|
||||
col_offset: int
|
||||
end_lineno: int | None
|
||||
end_col_offset: int | None
|
||||
type_comment: str | None
|
||||
|
||||
class mod(AST): ...
|
||||
class type_ignore(AST): ...
|
||||
|
||||
class TypeIgnore(type_ignore):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("lineno", "tag")
|
||||
tag: str
|
||||
|
||||
class FunctionType(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("argtypes", "returns")
|
||||
argtypes: list[expr]
|
||||
returns: expr
|
||||
|
||||
class Module(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body", "type_ignores")
|
||||
body: list[stmt]
|
||||
type_ignores: list[TypeIgnore]
|
||||
|
||||
class Interactive(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body",)
|
||||
body: list[stmt]
|
||||
|
||||
class Expression(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body",)
|
||||
body: expr
|
||||
|
||||
class stmt(AST): ...
|
||||
|
||||
class FunctionDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params")
|
||||
elif sys.version_info >= (3, 10):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment")
|
||||
name: _Identifier
|
||||
args: arguments
|
||||
body: list[stmt]
|
||||
decorator_list: list[expr]
|
||||
returns: expr | None
|
||||
if sys.version_info >= (3, 12):
|
||||
type_params: list[type_param]
|
||||
|
||||
class AsyncFunctionDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params")
|
||||
elif sys.version_info >= (3, 10):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment")
|
||||
name: _Identifier
|
||||
args: arguments
|
||||
body: list[stmt]
|
||||
decorator_list: list[expr]
|
||||
returns: expr | None
|
||||
if sys.version_info >= (3, 12):
|
||||
type_params: list[type_param]
|
||||
|
||||
class ClassDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "bases", "keywords", "body", "decorator_list", "type_params")
|
||||
elif sys.version_info >= (3, 10):
|
||||
__match_args__ = ("name", "bases", "keywords", "body", "decorator_list")
|
||||
name: _Identifier
|
||||
bases: list[expr]
|
||||
keywords: list[keyword]
|
||||
body: list[stmt]
|
||||
decorator_list: list[expr]
|
||||
if sys.version_info >= (3, 12):
|
||||
type_params: list[type_param]
|
||||
|
||||
class Return(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr | None
|
||||
|
||||
class Delete(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("targets",)
|
||||
targets: list[expr]
|
||||
|
||||
class Assign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("targets", "value", "type_comment")
|
||||
targets: list[expr]
|
||||
value: expr
|
||||
|
||||
class AugAssign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "op", "value")
|
||||
target: Name | Attribute | Subscript
|
||||
op: operator
|
||||
value: expr
|
||||
|
||||
class AnnAssign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "annotation", "value", "simple")
|
||||
target: Name | Attribute | Subscript
|
||||
annotation: expr
|
||||
value: expr | None
|
||||
simple: int
|
||||
|
||||
class For(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
|
||||
target: expr
|
||||
iter: expr
|
||||
body: list[stmt]
|
||||
orelse: list[stmt]
|
||||
|
||||
class AsyncFor(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
|
||||
target: expr
|
||||
iter: expr
|
||||
body: list[stmt]
|
||||
orelse: list[stmt]
|
||||
|
||||
class While(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
test: expr
|
||||
body: list[stmt]
|
||||
orelse: list[stmt]
|
||||
|
||||
class If(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
test: expr
|
||||
body: list[stmt]
|
||||
orelse: list[stmt]
|
||||
|
||||
class With(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("items", "body", "type_comment")
|
||||
items: list[withitem]
|
||||
body: list[stmt]
|
||||
|
||||
class AsyncWith(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("items", "body", "type_comment")
|
||||
items: list[withitem]
|
||||
body: list[stmt]
|
||||
|
||||
class Raise(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("exc", "cause")
|
||||
exc: expr | None
|
||||
cause: expr | None
|
||||
|
||||
class Try(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body", "handlers", "orelse", "finalbody")
|
||||
body: list[stmt]
|
||||
handlers: list[ExceptHandler]
|
||||
orelse: list[stmt]
|
||||
finalbody: list[stmt]
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
class TryStar(stmt):
|
||||
__match_args__ = ("body", "handlers", "orelse", "finalbody")
|
||||
body: list[stmt]
|
||||
handlers: list[ExceptHandler]
|
||||
orelse: list[stmt]
|
||||
finalbody: list[stmt]
|
||||
|
||||
class Assert(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "msg")
|
||||
test: expr
|
||||
msg: expr | None
|
||||
|
||||
class Import(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
names: list[alias]
|
||||
|
||||
class ImportFrom(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("module", "names", "level")
|
||||
module: str | None
|
||||
names: list[alias]
|
||||
level: int
|
||||
|
||||
class Global(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
names: list[_Identifier]
|
||||
|
||||
class Nonlocal(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
names: list[_Identifier]
|
||||
|
||||
class Expr(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
|
||||
class Pass(stmt): ...
|
||||
class Break(stmt): ...
|
||||
class Continue(stmt): ...
|
||||
class expr(AST): ...
|
||||
|
||||
class BoolOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("op", "values")
|
||||
op: boolop
|
||||
values: list[expr]
|
||||
|
||||
class BinOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("left", "op", "right")
|
||||
left: expr
|
||||
op: operator
|
||||
right: expr
|
||||
|
||||
class UnaryOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("op", "operand")
|
||||
op: unaryop
|
||||
operand: expr
|
||||
|
||||
class Lambda(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("args", "body")
|
||||
args: arguments
|
||||
body: expr
|
||||
|
||||
class IfExp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
test: expr
|
||||
body: expr
|
||||
orelse: expr
|
||||
|
||||
class Dict(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("keys", "values")
|
||||
keys: list[expr | None]
|
||||
values: list[expr]
|
||||
|
||||
class Set(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts",)
|
||||
elts: list[expr]
|
||||
|
||||
class ListComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
elt: expr
|
||||
generators: list[comprehension]
|
||||
|
||||
class SetComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
elt: expr
|
||||
generators: list[comprehension]
|
||||
|
||||
class DictComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("key", "value", "generators")
|
||||
key: expr
|
||||
value: expr
|
||||
generators: list[comprehension]
|
||||
|
||||
class GeneratorExp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
elt: expr
|
||||
generators: list[comprehension]
|
||||
|
||||
class Await(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
|
||||
class Yield(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr | None
|
||||
|
||||
class YieldFrom(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
|
||||
class Compare(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("left", "ops", "comparators")
|
||||
left: expr
|
||||
ops: list[cmpop]
|
||||
comparators: list[expr]
|
||||
|
||||
class Call(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("func", "args", "keywords")
|
||||
func: expr
|
||||
args: list[expr]
|
||||
keywords: list[keyword]
|
||||
|
||||
class FormattedValue(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "conversion", "format_spec")
|
||||
value: expr
|
||||
conversion: int
|
||||
format_spec: expr | None
|
||||
|
||||
class JoinedStr(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("values",)
|
||||
values: list[expr]
|
||||
|
||||
class Constant(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "kind")
|
||||
value: Any # None, str, bytes, bool, int, float, complex, Ellipsis
|
||||
kind: str | None
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: Any
|
||||
n: int | float | complex
|
||||
|
||||
class NamedExpr(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "value")
|
||||
target: Name
|
||||
value: expr
|
||||
|
||||
class Attribute(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "attr", "ctx")
|
||||
value: expr
|
||||
attr: _Identifier
|
||||
ctx: expr_context
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
_Slice: typing_extensions.TypeAlias = expr
|
||||
else:
|
||||
class slice(AST): ...
|
||||
_Slice: typing_extensions.TypeAlias = slice
|
||||
|
||||
class Slice(_Slice):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("lower", "upper", "step")
|
||||
lower: expr | None
|
||||
upper: expr | None
|
||||
step: expr | None
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
class ExtSlice(slice):
|
||||
dims: list[slice]
|
||||
|
||||
class Index(slice):
|
||||
value: expr
|
||||
|
||||
class Subscript(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "slice", "ctx")
|
||||
value: expr
|
||||
slice: _Slice
|
||||
ctx: expr_context
|
||||
|
||||
class Starred(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "ctx")
|
||||
value: expr
|
||||
ctx: expr_context
|
||||
|
||||
class Name(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("id", "ctx")
|
||||
id: _Identifier
|
||||
ctx: expr_context
|
||||
|
||||
class List(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts", "ctx")
|
||||
elts: list[expr]
|
||||
ctx: expr_context
|
||||
|
||||
class Tuple(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts", "ctx")
|
||||
elts: list[expr]
|
||||
ctx: expr_context
|
||||
if sys.version_info >= (3, 9):
|
||||
dims: list[expr]
|
||||
|
||||
class expr_context(AST): ...
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
class AugLoad(expr_context): ...
|
||||
class AugStore(expr_context): ...
|
||||
class Param(expr_context): ...
|
||||
|
||||
class Suite(mod):
|
||||
body: list[stmt]
|
||||
|
||||
class Del(expr_context): ...
|
||||
class Load(expr_context): ...
|
||||
class Store(expr_context): ...
|
||||
class boolop(AST): ...
|
||||
class And(boolop): ...
|
||||
class Or(boolop): ...
|
||||
class operator(AST): ...
|
||||
class Add(operator): ...
|
||||
class BitAnd(operator): ...
|
||||
class BitOr(operator): ...
|
||||
class BitXor(operator): ...
|
||||
class Div(operator): ...
|
||||
class FloorDiv(operator): ...
|
||||
class LShift(operator): ...
|
||||
class Mod(operator): ...
|
||||
class Mult(operator): ...
|
||||
class MatMult(operator): ...
|
||||
class Pow(operator): ...
|
||||
class RShift(operator): ...
|
||||
class Sub(operator): ...
|
||||
class unaryop(AST): ...
|
||||
class Invert(unaryop): ...
|
||||
class Not(unaryop): ...
|
||||
class UAdd(unaryop): ...
|
||||
class USub(unaryop): ...
|
||||
class cmpop(AST): ...
|
||||
class Eq(cmpop): ...
|
||||
class Gt(cmpop): ...
|
||||
class GtE(cmpop): ...
|
||||
class In(cmpop): ...
|
||||
class Is(cmpop): ...
|
||||
class IsNot(cmpop): ...
|
||||
class Lt(cmpop): ...
|
||||
class LtE(cmpop): ...
|
||||
class NotEq(cmpop): ...
|
||||
class NotIn(cmpop): ...
|
||||
|
||||
class comprehension(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "iter", "ifs", "is_async")
|
||||
target: expr
|
||||
iter: expr
|
||||
ifs: list[expr]
|
||||
is_async: int
|
||||
|
||||
class excepthandler(AST): ...
|
||||
|
||||
class ExceptHandler(excepthandler):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("type", "name", "body")
|
||||
type: expr | None
|
||||
name: _Identifier | None
|
||||
body: list[stmt]
|
||||
|
||||
class arguments(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("posonlyargs", "args", "vararg", "kwonlyargs", "kw_defaults", "kwarg", "defaults")
|
||||
posonlyargs: list[arg]
|
||||
args: list[arg]
|
||||
vararg: arg | None
|
||||
kwonlyargs: list[arg]
|
||||
kw_defaults: list[expr | None]
|
||||
kwarg: arg | None
|
||||
defaults: list[expr]
|
||||
|
||||
class arg(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("arg", "annotation", "type_comment")
|
||||
arg: _Identifier
|
||||
annotation: expr | None
|
||||
|
||||
class keyword(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("arg", "value")
|
||||
arg: _Identifier | None
|
||||
value: expr
|
||||
|
||||
class alias(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("name", "asname")
|
||||
name: str
|
||||
asname: _Identifier | None
|
||||
|
||||
class withitem(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("context_expr", "optional_vars")
|
||||
context_expr: expr
|
||||
optional_vars: expr | None
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
class Match(stmt):
|
||||
__match_args__ = ("subject", "cases")
|
||||
subject: expr
|
||||
cases: list[match_case]
|
||||
|
||||
class pattern(AST): ...
|
||||
# Without the alias, Pyright complains variables named pattern are recursively defined
|
||||
_Pattern: typing_extensions.TypeAlias = pattern
|
||||
|
||||
class match_case(AST):
|
||||
__match_args__ = ("pattern", "guard", "body")
|
||||
pattern: _Pattern
|
||||
guard: expr | None
|
||||
body: list[stmt]
|
||||
|
||||
class MatchValue(pattern):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
|
||||
class MatchSingleton(pattern):
|
||||
__match_args__ = ("value",)
|
||||
value: Literal[True, False] | None
|
||||
|
||||
class MatchSequence(pattern):
|
||||
__match_args__ = ("patterns",)
|
||||
patterns: list[pattern]
|
||||
|
||||
class MatchStar(pattern):
|
||||
__match_args__ = ("name",)
|
||||
name: _Identifier | None
|
||||
|
||||
class MatchMapping(pattern):
|
||||
__match_args__ = ("keys", "patterns", "rest")
|
||||
keys: list[expr]
|
||||
patterns: list[pattern]
|
||||
rest: _Identifier | None
|
||||
|
||||
class MatchClass(pattern):
|
||||
__match_args__ = ("cls", "patterns", "kwd_attrs", "kwd_patterns")
|
||||
cls: expr
|
||||
patterns: list[pattern]
|
||||
kwd_attrs: list[_Identifier]
|
||||
kwd_patterns: list[pattern]
|
||||
|
||||
class MatchAs(pattern):
|
||||
__match_args__ = ("pattern", "name")
|
||||
pattern: _Pattern | None
|
||||
name: _Identifier | None
|
||||
|
||||
class MatchOr(pattern):
|
||||
__match_args__ = ("patterns",)
|
||||
patterns: list[pattern]
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
class type_param(AST):
|
||||
end_lineno: int
|
||||
end_col_offset: int
|
||||
|
||||
class TypeVar(type_param):
|
||||
__match_args__ = ("name", "bound")
|
||||
name: _Identifier
|
||||
bound: expr | None
|
||||
|
||||
class ParamSpec(type_param):
|
||||
__match_args__ = ("name",)
|
||||
name: _Identifier
|
||||
|
||||
class TypeVarTuple(type_param):
|
||||
__match_args__ = ("name",)
|
||||
name: _Identifier
|
||||
|
||||
class TypeAlias(stmt):
|
||||
__match_args__ = ("name", "type_params", "value")
|
||||
name: Name
|
||||
type_params: list[type_param]
|
||||
value: expr
|
||||
@@ -1,20 +0,0 @@
|
||||
def make_archive(
|
||||
base_name: str,
|
||||
format: str,
|
||||
root_dir: str | None = None,
|
||||
base_dir: str | None = None,
|
||||
verbose: int = 0,
|
||||
dry_run: int = 0,
|
||||
owner: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str: ...
|
||||
def make_tarball(
|
||||
base_name: str,
|
||||
base_dir: str,
|
||||
compress: str | None = "gzip",
|
||||
verbose: int = 0,
|
||||
dry_run: int = 0,
|
||||
owner: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str: ...
|
||||
def make_zipfile(base_name: str, base_dir: str, verbose: int = 0, dry_run: int = 0) -> str: ...
|
||||
@@ -1,66 +0,0 @@
|
||||
from _typeshed import Incomplete
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Callable, Iterable
|
||||
from distutils.dist import Distribution
|
||||
from typing import Any
|
||||
|
||||
class Command:
|
||||
distribution: Distribution
|
||||
sub_commands: list[tuple[str, Callable[[Command], bool] | None]]
|
||||
def __init__(self, dist: Distribution) -> None: ...
|
||||
@abstractmethod
|
||||
def initialize_options(self) -> None: ...
|
||||
@abstractmethod
|
||||
def finalize_options(self) -> None: ...
|
||||
@abstractmethod
|
||||
def run(self) -> None: ...
|
||||
def announce(self, msg: str, level: int = 1) -> None: ...
|
||||
def debug_print(self, msg: str) -> None: ...
|
||||
def ensure_string(self, option: str, default: str | None = None) -> None: ...
|
||||
def ensure_string_list(self, option: str | list[str]) -> None: ...
|
||||
def ensure_filename(self, option: str) -> None: ...
|
||||
def ensure_dirname(self, option: str) -> None: ...
|
||||
def get_command_name(self) -> str: ...
|
||||
def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: ...
|
||||
def get_finalized_command(self, command: str, create: int = 1) -> Command: ...
|
||||
def reinitialize_command(self, command: Command | str, reinit_subcommands: int = 0) -> Command: ...
|
||||
def run_command(self, command: str) -> None: ...
|
||||
def get_sub_commands(self) -> list[str]: ...
|
||||
def warn(self, msg: str) -> None: ...
|
||||
def execute(self, func: Callable[..., object], args: Iterable[Any], msg: str | None = None, level: int = 1) -> None: ...
|
||||
def mkpath(self, name: str, mode: int = 0o777) -> None: ...
|
||||
def copy_file(
|
||||
self, infile: str, outfile: str, preserve_mode: int = 1, preserve_times: int = 1, link: str | None = None, level: Any = 1
|
||||
) -> tuple[str, bool]: ... # level is not used
|
||||
def copy_tree(
|
||||
self,
|
||||
infile: str,
|
||||
outfile: str,
|
||||
preserve_mode: int = 1,
|
||||
preserve_times: int = 1,
|
||||
preserve_symlinks: int = 0,
|
||||
level: Any = 1,
|
||||
) -> list[str]: ... # level is not used
|
||||
def move_file(self, src: str, dst: str, level: Any = 1) -> str: ... # level is not used
|
||||
def spawn(self, cmd: Iterable[str], search_path: int = 1, level: Any = 1) -> None: ... # level is not used
|
||||
def make_archive(
|
||||
self,
|
||||
base_name: str,
|
||||
format: str,
|
||||
root_dir: str | None = None,
|
||||
base_dir: str | None = None,
|
||||
owner: str | None = None,
|
||||
group: str | None = None,
|
||||
) -> str: ...
|
||||
def make_file(
|
||||
self,
|
||||
infiles: str | list[str] | tuple[str, ...],
|
||||
outfile: str,
|
||||
func: Callable[..., object],
|
||||
args: list[Any],
|
||||
exec_msg: str | None = None,
|
||||
skip_msg: str | None = None,
|
||||
level: Any = 1,
|
||||
) -> None: ... # level is not used
|
||||
def ensure_finalized(self) -> None: ...
|
||||
def dump_options(self, header: Incomplete | None = None, indent: str = "") -> None: ...
|
||||
@@ -1,3 +0,0 @@
|
||||
def newer(source: str, target: str) -> bool: ...
|
||||
def newer_pairwise(sources: list[str], targets: list[str]) -> list[tuple[str, str]]: ...
|
||||
def newer_group(sources: list[str], target: str, missing: str = "error") -> bool: ...
|
||||
@@ -1,13 +0,0 @@
|
||||
def mkpath(name: str, mode: int = 0o777, verbose: int = 1, dry_run: int = 0) -> list[str]: ...
|
||||
def create_tree(base_dir: str, files: list[str], mode: int = 0o777, verbose: int = 1, dry_run: int = 0) -> None: ...
|
||||
def copy_tree(
|
||||
src: str,
|
||||
dst: str,
|
||||
preserve_mode: int = 1,
|
||||
preserve_times: int = 1,
|
||||
preserve_symlinks: int = 0,
|
||||
update: int = 0,
|
||||
verbose: int = 1,
|
||||
dry_run: int = 0,
|
||||
) -> list[str]: ...
|
||||
def remove_tree(directory: str, verbose: int = 1, dry_run: int = 0) -> None: ...
|
||||
@@ -1,14 +0,0 @@
|
||||
from collections.abc import Sequence
|
||||
|
||||
def copy_file(
|
||||
src: str,
|
||||
dst: str,
|
||||
preserve_mode: bool = ...,
|
||||
preserve_times: bool = ...,
|
||||
update: bool = ...,
|
||||
link: str | None = None,
|
||||
verbose: bool = ...,
|
||||
dry_run: bool = ...,
|
||||
) -> tuple[str, str]: ...
|
||||
def move_file(src: str, dst: str, verbose: bool = ..., dry_run: bool = ...) -> str: ...
|
||||
def write_file(filename: str, contents: Sequence[str]) -> None: ...
|
||||
@@ -1,2 +0,0 @@
|
||||
def spawn(cmd: list[str], search_path: bool = ..., verbose: bool = ..., dry_run: bool = ...) -> None: ...
|
||||
def find_executable(executable: str, path: str | None = None) -> str | None: ...
|
||||
@@ -1,33 +0,0 @@
|
||||
import builtins
|
||||
import types
|
||||
from _typeshed import ReadableBuffer, SupportsRead, SupportsWrite
|
||||
from typing import Any
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
version: int
|
||||
|
||||
_Marshallable: TypeAlias = (
|
||||
# handled in w_object() in marshal.c
|
||||
None
|
||||
| type[StopIteration]
|
||||
| builtins.ellipsis
|
||||
| bool
|
||||
# handled in w_complex_object() in marshal.c
|
||||
| int
|
||||
| float
|
||||
| complex
|
||||
| bytes
|
||||
| str
|
||||
| tuple[_Marshallable, ...]
|
||||
| list[Any]
|
||||
| dict[Any, Any]
|
||||
| set[Any]
|
||||
| frozenset[_Marshallable]
|
||||
| types.CodeType
|
||||
| ReadableBuffer
|
||||
)
|
||||
|
||||
def dump(value: _Marshallable, file: SupportsWrite[bytes], version: int = 4, /) -> None: ...
|
||||
def load(file: SupportsRead[bytes], /) -> Any: ...
|
||||
def dumps(value: _Marshallable, version: int = 4, /) -> bytes: ...
|
||||
def loads(bytes: ReadableBuffer, /) -> Any: ...
|
||||
@@ -1 +0,0 @@
|
||||
from _stat import *
|
||||
35
crates/red_knot_module_resolver/Cargo.toml
Normal file
35
crates/red_knot_module_resolver/Cargo.toml
Normal file
@@ -0,0 +1,35 @@
|
||||
[package]
|
||||
name = "red_knot_module_resolver"
|
||||
version = "0.0.0"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
ruff_db = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
smol_str = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
path-slash = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
9
crates/red_knot_module_resolver/README.md
Normal file
9
crates/red_knot_module_resolver/README.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Red Knot
|
||||
|
||||
A work-in-progress multifile module resolver for Ruff.
|
||||
|
||||
## Vendored types for the stdlib
|
||||
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_module_resolver/vendor/typeshed`. The file `crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
|
||||
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).
|
||||
74
crates/red_knot_module_resolver/build.rs
Normal file
74
crates/red_knot_module_resolver/build.rs
Normal file
@@ -0,0 +1,74 @@
|
||||
//! Build script to package our vendored typeshed files
|
||||
//! into a zip archive that can be included in the Ruff binary.
|
||||
//!
|
||||
//! This script should be automatically run at build time
|
||||
//! whenever the script itself changes, or whenever any files
|
||||
//! in `crates/red_knot_module_resolver/vendor/typeshed` change.
|
||||
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
|
||||
use path_slash::PathExt;
|
||||
use zip::result::ZipResult;
|
||||
use zip::write::{FileOptions, ZipWriter};
|
||||
use zip::CompressionMethod;
|
||||
|
||||
const TYPESHED_SOURCE_DIR: &str = "vendor/typeshed";
|
||||
const TYPESHED_ZIP_LOCATION: &str = "/zipped_typeshed.zip";
|
||||
|
||||
/// Recursively zip the contents of an entire directory.
|
||||
///
|
||||
/// This routine is adapted from a recipe at
|
||||
/// <https://github.com/zip-rs/zip-old/blob/5d0f198124946b7be4e5969719a7f29f363118cd/examples/write_dir.rs>
|
||||
fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
|
||||
let mut zip = ZipWriter::new(writer);
|
||||
|
||||
let options = FileOptions::default()
|
||||
.compression_method(CompressionMethod::Zstd)
|
||||
.unix_permissions(0o644);
|
||||
|
||||
for entry in walkdir::WalkDir::new(directory_path) {
|
||||
let dir_entry = entry.unwrap();
|
||||
let absolute_path = dir_entry.path();
|
||||
let normalized_relative_path = absolute_path
|
||||
.strip_prefix(Path::new(directory_path))
|
||||
.unwrap()
|
||||
.to_slash()
|
||||
.expect("Unexpected non-utf8 typeshed path!");
|
||||
|
||||
// Write file or directory explicitly
|
||||
// Some unzip tools unzip files with directory paths correctly, some do not!
|
||||
if absolute_path.is_file() {
|
||||
println!("adding file {absolute_path:?} as {normalized_relative_path:?} ...");
|
||||
zip.start_file(normalized_relative_path, options)?;
|
||||
let mut f = File::open(absolute_path)?;
|
||||
std::io::copy(&mut f, &mut zip).unwrap();
|
||||
} else if !normalized_relative_path.is_empty() {
|
||||
// Only if not root! Avoids path spec / warning
|
||||
// and mapname conversion failed error on unzip
|
||||
println!("adding dir {absolute_path:?} as {normalized_relative_path:?} ...");
|
||||
zip.add_directory(normalized_relative_path, options)?;
|
||||
}
|
||||
}
|
||||
zip.finish()
|
||||
}
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed={TYPESHED_SOURCE_DIR}");
|
||||
assert!(
|
||||
Path::new(TYPESHED_SOURCE_DIR).is_dir(),
|
||||
"Where is typeshed?"
|
||||
);
|
||||
let out_dir = std::env::var("OUT_DIR").unwrap();
|
||||
|
||||
// N.B. Deliberately using `format!()` instead of `Path::join()` here,
|
||||
// so that we use `/` as a path separator on all platforms.
|
||||
// That enables us to load the typeshed zip at compile time in `module.rs`
|
||||
// (otherwise we'd have to dynamically determine the exact path to the typeshed zip
|
||||
// based on the default path separator for the specific platform we're on,
|
||||
// which can't be done at compile time.)
|
||||
let zipped_typeshed_location = format!("{out_dir}{TYPESHED_ZIP_LOCATION}");
|
||||
|
||||
let zipped_typeshed = File::create(zipped_typeshed_location).unwrap();
|
||||
zip_dir(TYPESHED_SOURCE_DIR, zipped_typeshed).unwrap();
|
||||
}
|
||||
156
crates/red_knot_module_resolver/src/db.rs
Normal file
156
crates/red_knot_module_resolver/src/db.rs
Normal file
@@ -0,0 +1,156 @@
|
||||
use ruff_db::Upcast;
|
||||
|
||||
use crate::resolver::{
|
||||
file_to_module,
|
||||
internal::{ModuleNameIngredient, ModuleResolverSearchPaths},
|
||||
resolve_module_query,
|
||||
};
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(
|
||||
ModuleNameIngredient<'_>,
|
||||
ModuleResolverSearchPaths,
|
||||
resolve_module_query,
|
||||
file_to_module,
|
||||
);
|
||||
|
||||
pub trait Db: salsa::DbWithJar<Jar> + ruff_db::Db + Upcast<dyn ruff_db::Db> {}
|
||||
|
||||
pub(crate) mod tests {
|
||||
use std::sync;
|
||||
|
||||
use salsa::DebugWithDb;
|
||||
|
||||
use ruff_db::file_system::{FileSystem, MemoryFileSystem, OsFileSystem};
|
||||
use ruff_db::vfs::Vfs;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[salsa::db(Jar, ruff_db::Jar)]
|
||||
pub(crate) struct TestDb {
|
||||
storage: salsa::Storage<Self>,
|
||||
file_system: TestFileSystem,
|
||||
events: sync::Arc<sync::Mutex<Vec<salsa::Event>>>,
|
||||
vfs: Vfs,
|
||||
}
|
||||
|
||||
impl TestDb {
|
||||
#[allow(unused)]
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
file_system: TestFileSystem::Memory(MemoryFileSystem::default()),
|
||||
events: sync::Arc::default(),
|
||||
vfs: Vfs::with_stubbed_vendored(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the memory file system.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If this test db isn't using a memory file system.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn memory_file_system(&self) -> &MemoryFileSystem {
|
||||
if let TestFileSystem::Memory(fs) = &self.file_system {
|
||||
fs
|
||||
} else {
|
||||
panic!("The test db is not using a memory file system");
|
||||
}
|
||||
}
|
||||
|
||||
/// Uses the real file system instead of the memory file system.
|
||||
///
|
||||
/// This useful for testing advanced file system features like permissions, symlinks, etc.
|
||||
///
|
||||
/// Note that any files written to the memory file system won't be copied over.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn with_os_file_system(&mut self) {
|
||||
self.file_system = TestFileSystem::Os(OsFileSystem);
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub(crate) fn vfs_mut(&mut self) -> &mut Vfs {
|
||||
&mut self.vfs
|
||||
}
|
||||
|
||||
/// Takes the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn take_salsa_events(&mut self) -> Vec<salsa::Event> {
|
||||
let inner = sync::Arc::get_mut(&mut self.events).expect("no pending salsa snapshots");
|
||||
|
||||
let events = inner.get_mut().unwrap();
|
||||
std::mem::take(&mut *events)
|
||||
}
|
||||
|
||||
/// Clears the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
#[allow(unused)]
|
||||
pub(crate) fn clear_salsa_events(&mut self) {
|
||||
self.take_salsa_events();
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ruff_db::Db> for TestDb {
|
||||
fn upcast(&self) -> &(dyn ruff_db::Db + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ruff_db::Db for TestDb {
|
||||
fn file_system(&self) -> &dyn ruff_db::file_system::FileSystem {
|
||||
self.file_system.inner()
|
||||
}
|
||||
|
||||
fn vfs(&self) -> &ruff_db::vfs::Vfs {
|
||||
&self.vfs
|
||||
}
|
||||
}
|
||||
|
||||
impl Db for TestDb {}
|
||||
|
||||
impl salsa::Database for TestDb {
|
||||
fn salsa_event(&self, event: salsa::Event) {
|
||||
tracing::trace!("event: {:?}", event.debug(self));
|
||||
let mut events = self.events.lock().unwrap();
|
||||
events.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::ParallelDatabase for TestDb {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
storage: self.storage.snapshot(),
|
||||
file_system: self.file_system.snapshot(),
|
||||
events: self.events.clone(),
|
||||
vfs: self.vfs.snapshot(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
enum TestFileSystem {
|
||||
Memory(MemoryFileSystem),
|
||||
#[allow(unused)]
|
||||
Os(OsFileSystem),
|
||||
}
|
||||
|
||||
impl TestFileSystem {
|
||||
fn inner(&self) -> &dyn FileSystem {
|
||||
match self {
|
||||
Self::Memory(inner) => inner,
|
||||
Self::Os(inner) => inner,
|
||||
}
|
||||
}
|
||||
|
||||
fn snapshot(&self) -> Self {
|
||||
match self {
|
||||
Self::Memory(inner) => Self::Memory(inner.snapshot()),
|
||||
Self::Os(inner) => Self::Os(inner.snapshot()),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
9
crates/red_knot_module_resolver/src/lib.rs
Normal file
9
crates/red_knot_module_resolver/src/lib.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
mod db;
|
||||
mod module;
|
||||
mod resolver;
|
||||
mod typeshed;
|
||||
|
||||
pub use db::{Db, Jar};
|
||||
pub use module::{ModuleKind, ModuleName};
|
||||
pub use resolver::{resolve_module, set_module_resolution_settings, ModuleResolutionSettings};
|
||||
pub use typeshed::versions::TypeshedVersions;
|
||||
346
crates/red_knot_module_resolver/src/module.rs
Normal file
346
crates/red_knot_module_resolver/src/module.rs
Normal file
@@ -0,0 +1,346 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_db::file_system::FileSystemPath;
|
||||
use ruff_db::vfs::{VfsFile, VfsPath};
|
||||
use ruff_python_stdlib::identifiers::is_identifier;
|
||||
|
||||
use crate::Db;
|
||||
|
||||
/// A module name, e.g. `foo.bar`.
|
||||
///
|
||||
/// Always normalized to the absolute form (never a relative module name, i.e., never `.foo`).
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
|
||||
pub struct ModuleName(smol_str::SmolStr);
|
||||
|
||||
impl ModuleName {
|
||||
/// Creates a new module name for `name`. Returns `Some` if `name` is a valid, absolute
|
||||
/// module name and `None` otherwise.
|
||||
///
|
||||
/// The module name is invalid if:
|
||||
///
|
||||
/// * The name is empty
|
||||
/// * The name is relative
|
||||
/// * The name ends with a `.`
|
||||
/// * The name contains a sequence of multiple dots
|
||||
/// * A component of a name (the part between two dots) isn't a valid python identifier.
|
||||
#[inline]
|
||||
pub fn new(name: &str) -> Option<Self> {
|
||||
Self::new_from_smol(smol_str::SmolStr::new(name))
|
||||
}
|
||||
|
||||
/// Creates a new module name for `name` where `name` is a static string.
|
||||
/// Returns `Some` if `name` is a valid, absolute module name and `None` otherwise.
|
||||
///
|
||||
/// The module name is invalid if:
|
||||
///
|
||||
/// * The name is empty
|
||||
/// * The name is relative
|
||||
/// * The name ends with a `.`
|
||||
/// * The name contains a sequence of multiple dots
|
||||
/// * A component of a name (the part between two dots) isn't a valid python identifier.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").as_deref(), Some("foo.bar"));
|
||||
/// assert_eq!(ModuleName::new_static(""), None);
|
||||
/// assert_eq!(ModuleName::new_static("..foo"), None);
|
||||
/// assert_eq!(ModuleName::new_static(".foo"), None);
|
||||
/// assert_eq!(ModuleName::new_static("foo."), None);
|
||||
/// assert_eq!(ModuleName::new_static("foo..bar"), None);
|
||||
/// assert_eq!(ModuleName::new_static("2000"), None);
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn new_static(name: &'static str) -> Option<Self> {
|
||||
Self::new_from_smol(smol_str::SmolStr::new_static(name))
|
||||
}
|
||||
|
||||
fn new_from_smol(name: smol_str::SmolStr) -> Option<Self> {
|
||||
if name.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if name.split('.').all(is_identifier) {
|
||||
Some(Self(name))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator over the components of the module name:
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().components().collect::<Vec<_>>(), vec!["foo", "bar", "baz"]);
|
||||
/// ```
|
||||
pub fn components(&self) -> impl DoubleEndedIterator<Item = &str> {
|
||||
self.0.split('.')
|
||||
}
|
||||
|
||||
/// The name of this module's immediate parent, if it has a parent.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").unwrap().parent(), Some(ModuleName::new_static("foo").unwrap()));
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().parent(), Some(ModuleName::new_static("foo.bar").unwrap()));
|
||||
/// assert_eq!(ModuleName::new_static("root").unwrap().parent(), None);
|
||||
/// ```
|
||||
pub fn parent(&self) -> Option<ModuleName> {
|
||||
let (parent, _) = self.0.rsplit_once('.')?;
|
||||
|
||||
Some(Self(smol_str::SmolStr::new(parent)))
|
||||
}
|
||||
|
||||
/// Returns `true` if the name starts with `other`.
|
||||
///
|
||||
/// This is equivalent to checking if `self` is a sub-module of `other`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
///
|
||||
/// assert!(ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
||||
///
|
||||
/// assert!(!ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("bar").unwrap()));
|
||||
/// assert!(!ModuleName::new_static("foo_bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
||||
/// ```
|
||||
pub fn starts_with(&self, other: &ModuleName) -> bool {
|
||||
let mut self_components = self.components();
|
||||
let other_components = other.components();
|
||||
|
||||
for other_component in other_components {
|
||||
if self_components.next() != Some(other_component) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
|
||||
pub(crate) fn from_relative_path(path: &FileSystemPath) -> Option<Self> {
|
||||
let path = if path.ends_with("__init__.py") || path.ends_with("__init__.pyi") {
|
||||
path.parent()?
|
||||
} else {
|
||||
path
|
||||
};
|
||||
|
||||
let name = if let Some(parent) = path.parent() {
|
||||
let mut name = String::with_capacity(path.as_str().len());
|
||||
|
||||
for component in parent.components() {
|
||||
name.push_str(component.as_os_str().to_str()?);
|
||||
name.push('.');
|
||||
}
|
||||
|
||||
// SAFETY: Unwrap is safe here or `parent` would have returned `None`.
|
||||
name.push_str(path.file_stem().unwrap());
|
||||
|
||||
smol_str::SmolStr::from(name)
|
||||
} else {
|
||||
smol_str::SmolStr::new(path.file_stem()?)
|
||||
};
|
||||
|
||||
Some(Self(name))
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ModuleName {
|
||||
type Target = str;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<str> for ModuleName {
|
||||
fn eq(&self, other: &str) -> bool {
|
||||
self.as_str() == other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<ModuleName> for str {
|
||||
fn eq(&self, other: &ModuleName) -> bool {
|
||||
self == other.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ModuleName {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
/// Representation of a Python module.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct Module {
|
||||
inner: Arc<ModuleInner>,
|
||||
}
|
||||
|
||||
impl Module {
|
||||
pub(crate) fn new(
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: ModuleSearchPath,
|
||||
file: VfsFile,
|
||||
) -> Self {
|
||||
Self {
|
||||
inner: Arc::new(ModuleInner {
|
||||
name,
|
||||
kind,
|
||||
search_path,
|
||||
file,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// The absolute name of the module (e.g. `foo.bar`)
|
||||
pub fn name(&self) -> &ModuleName {
|
||||
&self.inner.name
|
||||
}
|
||||
|
||||
/// The file to the source code that defines this module
|
||||
pub fn file(&self) -> VfsFile {
|
||||
self.inner.file
|
||||
}
|
||||
|
||||
/// The search path from which the module was resolved.
|
||||
pub fn search_path(&self) -> &ModuleSearchPath {
|
||||
&self.inner.search_path
|
||||
}
|
||||
|
||||
/// Determine whether this module is a single-file module or a package
|
||||
pub fn kind(&self) -> ModuleKind {
|
||||
self.inner.kind
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Module {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Module")
|
||||
.field("name", &self.name())
|
||||
.field("kind", &self.kind())
|
||||
.field("file", &self.file())
|
||||
.field("search_path", &self.search_path())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::DebugWithDb<dyn Db> for Module {
|
||||
fn fmt(&self, f: &mut Formatter<'_>, db: &dyn Db) -> std::fmt::Result {
|
||||
f.debug_struct("Module")
|
||||
.field("name", &self.name())
|
||||
.field("kind", &self.kind())
|
||||
.field("file", &self.file().debug(db.upcast()))
|
||||
.field("search_path", &self.search_path())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
struct ModuleInner {
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: ModuleSearchPath,
|
||||
file: VfsFile,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum ModuleKind {
|
||||
/// A single-file module (e.g. `foo.py` or `foo.pyi`)
|
||||
Module,
|
||||
|
||||
/// A python package (`foo/__init__.py` or `foo/__init__.pyi`)
|
||||
Package,
|
||||
}
|
||||
|
||||
/// A search path in which to search modules.
|
||||
/// Corresponds to a path in [`sys.path`](https://docs.python.org/3/library/sys_path_init.html) at runtime.
|
||||
///
|
||||
/// Cloning a search path is cheap because it's an `Arc`.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct ModuleSearchPath {
|
||||
inner: Arc<ModuleSearchPathInner>,
|
||||
}
|
||||
|
||||
impl ModuleSearchPath {
|
||||
pub fn new<P>(path: P, kind: ModuleSearchPathKind) -> Self
|
||||
where
|
||||
P: Into<VfsPath>,
|
||||
{
|
||||
Self {
|
||||
inner: Arc::new(ModuleSearchPathInner {
|
||||
path: path.into(),
|
||||
kind,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// Determine whether this is a first-party, third-party or standard-library search path
|
||||
pub fn kind(&self) -> ModuleSearchPathKind {
|
||||
self.inner.kind
|
||||
}
|
||||
|
||||
/// Return the location of the search path on the file system
|
||||
pub fn path(&self) -> &VfsPath {
|
||||
&self.inner.path
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for ModuleSearchPath {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("ModuleSearchPath")
|
||||
.field("path", &self.inner.path)
|
||||
.field("kind", &self.kind())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq)]
|
||||
struct ModuleSearchPathInner {
|
||||
path: VfsPath,
|
||||
kind: ModuleSearchPathKind,
|
||||
}
|
||||
|
||||
/// Enumeration of the different kinds of search paths type checkers are expected to support.
|
||||
///
|
||||
/// N.B. Although we don't implement `Ord` for this enum, they are ordered in terms of the
|
||||
/// priority that we want to give these modules when resolving them.
|
||||
/// This is roughly [the order given in the typing spec], but typeshed's stubs
|
||||
/// for the standard library are moved higher up to match Python's semantics at runtime.
|
||||
///
|
||||
/// [the order given in the typing spec]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub enum ModuleSearchPathKind {
|
||||
/// "Extra" paths provided by the user in a config file, env var or CLI flag.
|
||||
/// E.g. mypy's `MYPYPATH` env var, or pyright's `stubPath` configuration setting
|
||||
Extra,
|
||||
|
||||
/// Files in the project we're directly being invoked on
|
||||
FirstParty,
|
||||
|
||||
/// The `stdlib` directory of typeshed (either vendored or custom)
|
||||
StandardLibrary,
|
||||
|
||||
/// Stubs or runtime modules installed in site-packages
|
||||
SitePackagesThirdParty,
|
||||
|
||||
/// Vendored third-party stubs from typeshed
|
||||
VendoredThirdParty,
|
||||
}
|
||||
938
crates/red_knot_module_resolver/src/resolver.rs
Normal file
938
crates/red_knot_module_resolver/src/resolver.rs
Normal file
@@ -0,0 +1,938 @@
|
||||
use salsa::DebugWithDb;
|
||||
use std::ops::Deref;
|
||||
|
||||
use ruff_db::file_system::{FileSystem, FileSystemPath, FileSystemPathBuf};
|
||||
use ruff_db::vfs::{system_path_to_file, vfs_path_to_file, VfsFile, VfsPath};
|
||||
|
||||
use crate::module::{Module, ModuleKind, ModuleName, ModuleSearchPath, ModuleSearchPathKind};
|
||||
use crate::resolver::internal::ModuleResolverSearchPaths;
|
||||
use crate::Db;
|
||||
|
||||
const TYPESHED_STDLIB_DIRECTORY: &str = "stdlib";
|
||||
|
||||
/// Configures the module search paths for the module resolver.
|
||||
///
|
||||
/// Must be called before calling any other module resolution functions.
|
||||
pub fn set_module_resolution_settings(db: &mut dyn Db, config: ModuleResolutionSettings) {
|
||||
// There's no concurrency issue here because we hold a `&mut dyn Db` reference. No other
|
||||
// thread can mutate the `Db` while we're in this call, so using `try_get` to test if
|
||||
// the settings have already been set is safe.
|
||||
if let Some(existing) = ModuleResolverSearchPaths::try_get(db) {
|
||||
existing
|
||||
.set_search_paths(db)
|
||||
.to(config.into_ordered_search_paths());
|
||||
} else {
|
||||
ModuleResolverSearchPaths::new(db, config.into_ordered_search_paths());
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolves a module name to a module.
|
||||
pub fn resolve_module(db: &dyn Db, module_name: ModuleName) -> Option<Module> {
|
||||
let interned_name = internal::ModuleNameIngredient::new(db, module_name);
|
||||
|
||||
resolve_module_query(db, interned_name)
|
||||
}
|
||||
|
||||
/// Salsa query that resolves an interned [`ModuleNameIngredient`] to a module.
|
||||
///
|
||||
/// This query should not be called directly. Instead, use [`resolve_module`]. It only exists
|
||||
/// because Salsa requires the module name to be an ingredient.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn resolve_module_query<'db>(
|
||||
db: &'db dyn Db,
|
||||
module_name: internal::ModuleNameIngredient<'db>,
|
||||
) -> Option<Module> {
|
||||
let _ = tracing::trace_span!("resolve_module", module_name = ?module_name.debug(db)).enter();
|
||||
|
||||
let name = module_name.name(db);
|
||||
|
||||
let (search_path, module_file, kind) = resolve_name(db, name)?;
|
||||
|
||||
let module = Module::new(name.clone(), kind, search_path, module_file);
|
||||
|
||||
Some(module)
|
||||
}
|
||||
|
||||
/// Resolves the module for the given path.
|
||||
///
|
||||
/// Returns `None` if the path is not a module locatable via `sys.path`.
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn path_to_module(db: &dyn Db, path: &VfsPath) -> Option<Module> {
|
||||
// It's not entirely clear on first sight why this method calls `file_to_module` instead of
|
||||
// it being the other way round, considering that the first thing that `file_to_module` does
|
||||
// is to retrieve the file's path.
|
||||
//
|
||||
// The reason is that `file_to_module` is a tracked Salsa query and salsa queries require that
|
||||
// all arguments are Salsa ingredients (something stored in Salsa). `Path`s aren't salsa ingredients but
|
||||
// `VfsFile` is. So what we do here is to retrieve the `path`'s `VfsFile` so that we can make
|
||||
// use of Salsa's caching and invalidation.
|
||||
let file = vfs_path_to_file(db.upcast(), path)?;
|
||||
file_to_module(db, file)
|
||||
}
|
||||
|
||||
/// Resolves the module for the file with the given id.
|
||||
///
|
||||
/// Returns `None` if the file is not a module locatable via `sys.path`.
|
||||
#[salsa::tracked]
|
||||
#[allow(unused)]
|
||||
pub(crate) fn file_to_module(db: &dyn Db, file: VfsFile) -> Option<Module> {
|
||||
let _ = tracing::trace_span!("file_to_module", file = ?file.debug(db.upcast())).enter();
|
||||
|
||||
let path = file.path(db.upcast());
|
||||
|
||||
let search_paths = module_search_paths(db);
|
||||
|
||||
let relative_path = search_paths
|
||||
.iter()
|
||||
.find_map(|root| match (root.path(), path) {
|
||||
(VfsPath::FileSystem(root_path), VfsPath::FileSystem(path)) => {
|
||||
let relative_path = path.strip_prefix(root_path).ok()?;
|
||||
Some(relative_path)
|
||||
}
|
||||
(VfsPath::Vendored(_), VfsPath::Vendored(_)) => {
|
||||
todo!("Add support for vendored modules")
|
||||
}
|
||||
(VfsPath::Vendored(_), VfsPath::FileSystem(_))
|
||||
| (VfsPath::FileSystem(_), VfsPath::Vendored(_)) => None,
|
||||
})?;
|
||||
|
||||
let module_name = ModuleName::from_relative_path(relative_path)?;
|
||||
|
||||
// Resolve the module name to see if Python would resolve the name to the same path.
|
||||
// If it doesn't, then that means that multiple modules have the same name in different
|
||||
// root paths, but that the module corresponding to `path` is in a lower priority search path,
|
||||
// in which case we ignore it.
|
||||
let module = resolve_module(db, module_name)?;
|
||||
|
||||
if file == module.file() {
|
||||
Some(module)
|
||||
} else {
|
||||
// This path is for a module with the same name but with a different precedence. For example:
|
||||
// ```
|
||||
// src/foo.py
|
||||
// src/foo/__init__.py
|
||||
// ```
|
||||
// The module name of `src/foo.py` is `foo`, but the module loaded by Python is `src/foo/__init__.py`.
|
||||
// That means we need to ignore `src/foo.py` even though it resolves to the same module name.
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Configures the search paths that are used to resolve modules.
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
pub struct ModuleResolutionSettings {
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
/// or pyright's stubPath configuration setting.
|
||||
pub extra_paths: Vec<FileSystemPathBuf>,
|
||||
|
||||
/// The root of the workspace, used for finding first-party modules.
|
||||
pub workspace_root: FileSystemPathBuf,
|
||||
|
||||
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
|
||||
pub site_packages: Option<FileSystemPathBuf>,
|
||||
|
||||
/// Optional path to standard-library typeshed stubs.
|
||||
/// Currently this has to be a directory that exists on disk.
|
||||
///
|
||||
/// (TODO: fall back to vendored stubs if no custom directory is provided.)
|
||||
pub custom_typeshed: Option<FileSystemPathBuf>,
|
||||
}
|
||||
|
||||
impl ModuleResolutionSettings {
|
||||
/// Implementation of PEP 561's module resolution order
|
||||
/// (with some small, deliberate, differences)
|
||||
fn into_ordered_search_paths(self) -> OrderedSearchPaths {
|
||||
let ModuleResolutionSettings {
|
||||
extra_paths,
|
||||
workspace_root,
|
||||
site_packages,
|
||||
custom_typeshed,
|
||||
} = self;
|
||||
|
||||
let mut paths: Vec<_> = extra_paths
|
||||
.into_iter()
|
||||
.map(|path| ModuleSearchPath::new(path, ModuleSearchPathKind::Extra))
|
||||
.collect();
|
||||
|
||||
paths.push(ModuleSearchPath::new(
|
||||
workspace_root,
|
||||
ModuleSearchPathKind::FirstParty,
|
||||
));
|
||||
|
||||
// TODO fallback to vendored typeshed stubs if no custom typeshed directory is provided by the user
|
||||
if let Some(custom_typeshed) = custom_typeshed {
|
||||
paths.push(ModuleSearchPath::new(
|
||||
custom_typeshed.join(TYPESHED_STDLIB_DIRECTORY),
|
||||
ModuleSearchPathKind::StandardLibrary,
|
||||
));
|
||||
}
|
||||
|
||||
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
|
||||
if let Some(site_packages) = site_packages {
|
||||
paths.push(ModuleSearchPath::new(
|
||||
site_packages,
|
||||
ModuleSearchPathKind::SitePackagesThirdParty,
|
||||
));
|
||||
}
|
||||
|
||||
OrderedSearchPaths(paths)
|
||||
}
|
||||
}
|
||||
|
||||
/// A resolved module resolution order, implementing PEP 561
|
||||
/// (with some small, deliberate differences)
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub(crate) struct OrderedSearchPaths(Vec<ModuleSearchPath>);
|
||||
|
||||
impl Deref for OrderedSearchPaths {
|
||||
type Target = [ModuleSearchPath];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
// The singleton methods generated by salsa are all `pub` instead of `pub(crate)` which triggers
|
||||
// `unreachable_pub`. Work around this by creating a module and allow `unreachable_pub` for it.
|
||||
// Salsa also generates uses to `_db` variables for `interned` which triggers `clippy::used_underscore_binding`. Suppress that too
|
||||
// TODO(micha): Contribute a fix for this upstream where the singleton methods have the same visibility as the struct.
|
||||
#[allow(unreachable_pub, clippy::used_underscore_binding)]
|
||||
pub(crate) mod internal {
|
||||
use crate::module::ModuleName;
|
||||
use crate::resolver::OrderedSearchPaths;
|
||||
|
||||
#[salsa::input(singleton)]
|
||||
pub(crate) struct ModuleResolverSearchPaths {
|
||||
#[return_ref]
|
||||
pub(super) search_paths: OrderedSearchPaths,
|
||||
}
|
||||
|
||||
/// A thin wrapper around `ModuleName` to make it a Salsa ingredient.
|
||||
///
|
||||
/// This is needed because Salsa requires that all query arguments are salsa ingredients.
|
||||
#[salsa::interned]
|
||||
pub(crate) struct ModuleNameIngredient<'db> {
|
||||
#[return_ref]
|
||||
pub(super) name: ModuleName,
|
||||
}
|
||||
}
|
||||
|
||||
fn module_search_paths(db: &dyn Db) -> &[ModuleSearchPath] {
|
||||
ModuleResolverSearchPaths::get(db).search_paths(db)
|
||||
}
|
||||
|
||||
/// Given a module name and a list of search paths in which to lookup modules,
|
||||
/// attempt to resolve the module name
|
||||
fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(ModuleSearchPath, VfsFile, ModuleKind)> {
|
||||
let search_paths = module_search_paths(db);
|
||||
|
||||
for search_path in search_paths {
|
||||
let mut components = name.components();
|
||||
let module_name = components.next_back()?;
|
||||
|
||||
let VfsPath::FileSystem(fs_search_path) = search_path.path() else {
|
||||
todo!("Vendored search paths are not yet supported");
|
||||
};
|
||||
|
||||
match resolve_package(db.file_system(), fs_search_path, components) {
|
||||
Ok(resolved_package) => {
|
||||
let mut package_path = resolved_package.path;
|
||||
|
||||
package_path.push(module_name);
|
||||
|
||||
// Must be a `__init__.pyi` or `__init__.py` or it isn't a package.
|
||||
let kind = if db.file_system().is_directory(&package_path) {
|
||||
package_path.push("__init__");
|
||||
ModuleKind::Package
|
||||
} else {
|
||||
ModuleKind::Module
|
||||
};
|
||||
|
||||
// TODO Implement full https://peps.python.org/pep-0561/#type-checker-module-resolution-order resolution
|
||||
let stub = package_path.with_extension("pyi");
|
||||
|
||||
if let Some(stub) = system_path_to_file(db.upcast(), &stub) {
|
||||
return Some((search_path.clone(), stub, kind));
|
||||
}
|
||||
|
||||
let module = package_path.with_extension("py");
|
||||
|
||||
if let Some(module) = system_path_to_file(db.upcast(), &module) {
|
||||
return Some((search_path.clone(), module, kind));
|
||||
}
|
||||
|
||||
// For regular packages, don't search the next search path. All files of that
|
||||
// package must be in the same location
|
||||
if resolved_package.kind.is_regular_package() {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
Err(parent_kind) => {
|
||||
if parent_kind.is_regular_package() {
|
||||
// For regular packages, don't search the next search path.
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn resolve_package<'a, I>(
|
||||
fs: &dyn FileSystem,
|
||||
module_search_path: &FileSystemPath,
|
||||
components: I,
|
||||
) -> Result<ResolvedPackage, PackageKind>
|
||||
where
|
||||
I: Iterator<Item = &'a str>,
|
||||
{
|
||||
let mut package_path = module_search_path.to_path_buf();
|
||||
|
||||
// `true` if inside a folder that is a namespace package (has no `__init__.py`).
|
||||
// Namespace packages are special because they can be spread across multiple search paths.
|
||||
// https://peps.python.org/pep-0420/
|
||||
let mut in_namespace_package = false;
|
||||
|
||||
// `true` if resolving a sub-package. For example, `true` when resolving `bar` of `foo.bar`.
|
||||
let mut in_sub_package = false;
|
||||
|
||||
// For `foo.bar.baz`, test that `foo` and `baz` both contain a `__init__.py`.
|
||||
for folder in components {
|
||||
package_path.push(folder);
|
||||
|
||||
let has_init_py = fs.is_file(&package_path.join("__init__.py"))
|
||||
|| fs.is_file(&package_path.join("__init__.pyi"));
|
||||
|
||||
if has_init_py {
|
||||
in_namespace_package = false;
|
||||
} else if fs.is_directory(&package_path) {
|
||||
// A directory without an `__init__.py` is a namespace package, continue with the next folder.
|
||||
in_namespace_package = true;
|
||||
} else if in_namespace_package {
|
||||
// Package not found but it is part of a namespace package.
|
||||
return Err(PackageKind::Namespace);
|
||||
} else if in_sub_package {
|
||||
// A regular sub package wasn't found.
|
||||
return Err(PackageKind::Regular);
|
||||
} else {
|
||||
// We couldn't find `foo` for `foo.bar.baz`, search the next search path.
|
||||
return Err(PackageKind::Root);
|
||||
}
|
||||
|
||||
in_sub_package = true;
|
||||
}
|
||||
|
||||
let kind = if in_namespace_package {
|
||||
PackageKind::Namespace
|
||||
} else if in_sub_package {
|
||||
PackageKind::Regular
|
||||
} else {
|
||||
PackageKind::Root
|
||||
};
|
||||
|
||||
Ok(ResolvedPackage {
|
||||
kind,
|
||||
path: package_path,
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ResolvedPackage {
|
||||
path: FileSystemPathBuf,
|
||||
kind: PackageKind,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
|
||||
enum PackageKind {
|
||||
/// A root package or module. E.g. `foo` in `foo.bar.baz` or just `foo`.
|
||||
Root,
|
||||
|
||||
/// A regular sub-package where the parent contains an `__init__.py`.
|
||||
///
|
||||
/// For example, `bar` in `foo.bar` when the `foo` directory contains an `__init__.py`.
|
||||
Regular,
|
||||
|
||||
/// A sub-package in a namespace package. A namespace package is a package without an `__init__.py`.
|
||||
///
|
||||
/// For example, `bar` in `foo.bar` if the `foo` directory contains no `__init__.py`.
|
||||
Namespace,
|
||||
}
|
||||
|
||||
impl PackageKind {
|
||||
const fn is_regular_package(self) -> bool {
|
||||
matches!(self, PackageKind::Regular)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use ruff_db::file_system::{FileSystemPath, FileSystemPathBuf};
|
||||
use ruff_db::vfs::{system_path_to_file, VfsFile, VfsPath};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::module::{ModuleKind, ModuleName};
|
||||
|
||||
use super::{
|
||||
path_to_module, resolve_module, set_module_resolution_settings, ModuleResolutionSettings,
|
||||
TYPESHED_STDLIB_DIRECTORY,
|
||||
};
|
||||
|
||||
struct TestCase {
|
||||
db: TestDb,
|
||||
|
||||
src: FileSystemPathBuf,
|
||||
custom_typeshed: FileSystemPathBuf,
|
||||
site_packages: FileSystemPathBuf,
|
||||
}
|
||||
|
||||
fn create_resolver() -> std::io::Result<TestCase> {
|
||||
let mut db = TestDb::new();
|
||||
|
||||
let src = FileSystemPath::new("src").to_path_buf();
|
||||
let site_packages = FileSystemPath::new("site_packages").to_path_buf();
|
||||
let custom_typeshed = FileSystemPath::new("typeshed").to_path_buf();
|
||||
|
||||
let fs = db.memory_file_system();
|
||||
|
||||
fs.create_directory_all(&src)?;
|
||||
fs.create_directory_all(&site_packages)?;
|
||||
fs.create_directory_all(&custom_typeshed)?;
|
||||
|
||||
let settings = ModuleResolutionSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
site_packages: Some(site_packages.clone()),
|
||||
custom_typeshed: Some(custom_typeshed.clone()),
|
||||
};
|
||||
|
||||
set_module_resolution_settings(&mut db, settings);
|
||||
|
||||
Ok(TestCase {
|
||||
db,
|
||||
src,
|
||||
custom_typeshed,
|
||||
site_packages,
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn first_party_module() -> anyhow::Result<()> {
|
||||
let TestCase { db, src, .. } = create_resolver()?;
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
let foo_path = src.join("foo.py");
|
||||
db.memory_file_system()
|
||||
.write_file(&foo_path, "print('Hello, world!')")?;
|
||||
|
||||
let foo_module = resolve_module(&db, foo_module_name.clone()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
Some(&foo_module),
|
||||
resolve_module(&db, foo_module_name.clone()).as_ref()
|
||||
);
|
||||
|
||||
assert_eq!("foo", foo_module.name());
|
||||
assert_eq!(&src, foo_module.search_path().path());
|
||||
assert_eq!(ModuleKind::Module, foo_module.kind());
|
||||
assert_eq!(&foo_path, foo_module.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
Some(foo_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo_path))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn stdlib() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
custom_typeshed,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
let stdlib_dir = custom_typeshed.join(TYPESHED_STDLIB_DIRECTORY);
|
||||
let functools_path = stdlib_dir.join("functools.py");
|
||||
db.memory_file_system()
|
||||
.write_file(&functools_path, "def update_wrapper(): ...")?;
|
||||
|
||||
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
||||
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
Some(&functools_module),
|
||||
resolve_module(&db, functools_module_name).as_ref()
|
||||
);
|
||||
|
||||
assert_eq!(&stdlib_dir, functools_module.search_path().path());
|
||||
assert_eq!(ModuleKind::Module, functools_module.kind());
|
||||
assert_eq!(&functools_path.clone(), functools_module.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
Some(functools_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(functools_path))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn first_party_precedence_over_stdlib() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
src,
|
||||
custom_typeshed,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
let stdlib_dir = custom_typeshed.join(TYPESHED_STDLIB_DIRECTORY);
|
||||
let stdlib_functools_path = stdlib_dir.join("functools.py");
|
||||
let first_party_functools_path = src.join("functools.py");
|
||||
|
||||
db.memory_file_system().write_files([
|
||||
(&stdlib_functools_path, "def update_wrapper(): ..."),
|
||||
(&first_party_functools_path, "def update_wrapper(): ..."),
|
||||
])?;
|
||||
|
||||
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
||||
let functools_module = resolve_module(&db, functools_module_name.clone()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
Some(&functools_module),
|
||||
resolve_module(&db, functools_module_name).as_ref()
|
||||
);
|
||||
assert_eq!(&src, functools_module.search_path().path());
|
||||
assert_eq!(ModuleKind::Module, functools_module.kind());
|
||||
assert_eq!(
|
||||
&first_party_functools_path.clone(),
|
||||
functools_module.file().path(&db)
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Some(functools_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(first_party_functools_path))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TODO: Port typeshed test case. Porting isn't possible at the moment because the vendored zip
|
||||
// is part of the red knot crate
|
||||
// #[test]
|
||||
// fn typeshed_zip_created_at_build_time() -> anyhow::Result<()> {
|
||||
// // The file path here is hardcoded in this crate's `build.rs` script.
|
||||
// // Luckily this crate will fail to build if this file isn't available at build time.
|
||||
// const TYPESHED_ZIP_BYTES: &[u8] =
|
||||
// include_bytes!(concat!(env!("OUT_DIR"), "/zipped_typeshed.zip"));
|
||||
// assert!(!TYPESHED_ZIP_BYTES.is_empty());
|
||||
// let mut typeshed_zip_archive = ZipArchive::new(Cursor::new(TYPESHED_ZIP_BYTES))?;
|
||||
//
|
||||
// let path_to_functools = Path::new("stdlib").join("functools.pyi");
|
||||
// let mut functools_module_stub = typeshed_zip_archive
|
||||
// .by_name(path_to_functools.to_str().unwrap())
|
||||
// .unwrap();
|
||||
// assert!(functools_module_stub.is_file());
|
||||
//
|
||||
// let mut functools_module_stub_source = String::new();
|
||||
// functools_module_stub.read_to_string(&mut functools_module_stub_source)?;
|
||||
//
|
||||
// assert!(functools_module_stub_source.contains("def update_wrapper("));
|
||||
// Ok(())
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn resolve_package() -> anyhow::Result<()> {
|
||||
let TestCase { src, db, .. } = create_resolver()?;
|
||||
|
||||
let foo_dir = src.join("foo");
|
||||
let foo_path = foo_dir.join("__init__.py");
|
||||
|
||||
db.memory_file_system()
|
||||
.write_file(&foo_path, "print('Hello, world!')")?;
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
|
||||
assert_eq!("foo", foo_module.name());
|
||||
assert_eq!(&src, foo_module.search_path().path());
|
||||
assert_eq!(&foo_path, foo_module.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
Some(&foo_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo_path)).as_ref()
|
||||
);
|
||||
|
||||
// Resolving by directory doesn't resolve to the init file.
|
||||
assert_eq!(None, path_to_module(&db, &VfsPath::FileSystem(foo_dir)));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn package_priority_over_module() -> anyhow::Result<()> {
|
||||
let TestCase { db, src, .. } = create_resolver()?;
|
||||
|
||||
let foo_dir = src.join("foo");
|
||||
let foo_init = foo_dir.join("__init__.py");
|
||||
|
||||
db.memory_file_system()
|
||||
.write_file(&foo_init, "print('Hello, world!')")?;
|
||||
|
||||
let foo_py = src.join("foo.py");
|
||||
db.memory_file_system()
|
||||
.write_file(&foo_py, "print('Hello, world!')")?;
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
|
||||
assert_eq!(&src, foo_module.search_path().path());
|
||||
assert_eq!(&foo_init, foo_module.file().path(&db));
|
||||
assert_eq!(ModuleKind::Package, foo_module.kind());
|
||||
|
||||
assert_eq!(
|
||||
Some(foo_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo_init))
|
||||
);
|
||||
assert_eq!(None, path_to_module(&db, &VfsPath::FileSystem(foo_py)));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn typing_stub_over_module() -> anyhow::Result<()> {
|
||||
let TestCase { db, src, .. } = create_resolver()?;
|
||||
|
||||
let foo_stub = src.join("foo.pyi");
|
||||
let foo_py = src.join("foo.py");
|
||||
db.memory_file_system()
|
||||
.write_files([(&foo_stub, "x: int"), (&foo_py, "print('Hello, world!')")])?;
|
||||
|
||||
let foo = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
|
||||
assert_eq!(&src, foo.search_path().path());
|
||||
assert_eq!(&foo_stub, foo.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
Some(foo),
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo_stub))
|
||||
);
|
||||
assert_eq!(None, path_to_module(&db, &VfsPath::FileSystem(foo_py)));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sub_packages() -> anyhow::Result<()> {
|
||||
let TestCase { db, src, .. } = create_resolver()?;
|
||||
|
||||
let foo = src.join("foo");
|
||||
let bar = foo.join("bar");
|
||||
let baz = bar.join("baz.py");
|
||||
|
||||
db.memory_file_system().write_files([
|
||||
(&foo.join("__init__.py"), ""),
|
||||
(&bar.join("__init__.py"), ""),
|
||||
(&baz, "print('Hello, world!')"),
|
||||
])?;
|
||||
|
||||
let baz_module =
|
||||
resolve_module(&db, ModuleName::new_static("foo.bar.baz").unwrap()).unwrap();
|
||||
|
||||
assert_eq!(&src, baz_module.search_path().path());
|
||||
assert_eq!(&baz, baz_module.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
Some(baz_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(baz))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn namespace_package() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
src,
|
||||
site_packages,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
// From [PEP420](https://peps.python.org/pep-0420/#nested-namespace-packages).
|
||||
// But uses `src` for `project1` and `site_packages2` for `project2`.
|
||||
// ```
|
||||
// src
|
||||
// parent
|
||||
// child
|
||||
// one.py
|
||||
// site_packages
|
||||
// parent
|
||||
// child
|
||||
// two.py
|
||||
// ```
|
||||
|
||||
let parent1 = src.join("parent");
|
||||
let child1 = parent1.join("child");
|
||||
let one = child1.join("one.py");
|
||||
|
||||
let parent2 = site_packages.join("parent");
|
||||
let child2 = parent2.join("child");
|
||||
let two = child2.join("two.py");
|
||||
|
||||
db.memory_file_system().write_files([
|
||||
(&one, "print('Hello, world!')"),
|
||||
(&two, "print('Hello, world!')"),
|
||||
])?;
|
||||
|
||||
let one_module =
|
||||
resolve_module(&db, ModuleName::new_static("parent.child.one").unwrap()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
Some(one_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(one))
|
||||
);
|
||||
|
||||
let two_module =
|
||||
resolve_module(&db, ModuleName::new_static("parent.child.two").unwrap()).unwrap();
|
||||
assert_eq!(
|
||||
Some(two_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(two))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn regular_package_in_namespace_package() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
src,
|
||||
site_packages,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
// Adopted test case from the [PEP420 examples](https://peps.python.org/pep-0420/#nested-namespace-packages).
|
||||
// The `src/parent/child` package is a regular package. Therefore, `site_packages/parent/child/two.py` should not be resolved.
|
||||
// ```
|
||||
// src
|
||||
// parent
|
||||
// child
|
||||
// one.py
|
||||
// site_packages
|
||||
// parent
|
||||
// child
|
||||
// two.py
|
||||
// ```
|
||||
|
||||
let parent1 = src.join("parent");
|
||||
let child1 = parent1.join("child");
|
||||
let one = child1.join("one.py");
|
||||
|
||||
let parent2 = site_packages.join("parent");
|
||||
let child2 = parent2.join("child");
|
||||
let two = child2.join("two.py");
|
||||
|
||||
db.memory_file_system().write_files([
|
||||
(&child1.join("__init__.py"), "print('Hello, world!')"),
|
||||
(&one, "print('Hello, world!')"),
|
||||
(&two, "print('Hello, world!')"),
|
||||
])?;
|
||||
|
||||
let one_module =
|
||||
resolve_module(&db, ModuleName::new_static("parent.child.one").unwrap()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
Some(one_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(one))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
None,
|
||||
resolve_module(&db, ModuleName::new_static("parent.child.two").unwrap())
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn module_search_path_priority() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
db,
|
||||
src,
|
||||
site_packages,
|
||||
..
|
||||
} = create_resolver()?;
|
||||
|
||||
let foo_src = src.join("foo.py");
|
||||
let foo_site_packages = site_packages.join("foo.py");
|
||||
|
||||
db.memory_file_system()
|
||||
.write_files([(&foo_src, ""), (&foo_site_packages, "")])?;
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
|
||||
assert_eq!(&src, foo_module.search_path().path());
|
||||
assert_eq!(&foo_src, foo_module.file().path(&db));
|
||||
|
||||
assert_eq!(
|
||||
Some(foo_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo_src))
|
||||
);
|
||||
assert_eq!(
|
||||
None,
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo_site_packages))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(target_family = "unix")]
|
||||
fn symlink() -> anyhow::Result<()> {
|
||||
let TestCase {
|
||||
mut db,
|
||||
src,
|
||||
site_packages,
|
||||
custom_typeshed,
|
||||
} = create_resolver()?;
|
||||
|
||||
db.with_os_file_system();
|
||||
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
let root = FileSystemPath::from_std_path(temp_dir.path()).unwrap();
|
||||
|
||||
let src = root.join(src);
|
||||
let site_packages = root.join(site_packages);
|
||||
let custom_typeshed = root.join(custom_typeshed);
|
||||
|
||||
let foo = src.join("foo.py");
|
||||
let bar = src.join("bar.py");
|
||||
|
||||
std::fs::create_dir_all(src.as_std_path())?;
|
||||
std::fs::create_dir_all(site_packages.as_std_path())?;
|
||||
std::fs::create_dir_all(custom_typeshed.as_std_path())?;
|
||||
|
||||
std::fs::write(foo.as_std_path(), "")?;
|
||||
std::os::unix::fs::symlink(foo.as_std_path(), bar.as_std_path())?;
|
||||
|
||||
let settings = ModuleResolutionSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
site_packages: Some(site_packages),
|
||||
custom_typeshed: Some(custom_typeshed),
|
||||
};
|
||||
|
||||
set_module_resolution_settings(&mut db, settings);
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
let bar_module = resolve_module(&db, ModuleName::new_static("bar").unwrap()).unwrap();
|
||||
|
||||
assert_ne!(foo_module, bar_module);
|
||||
|
||||
assert_eq!(&src, foo_module.search_path().path());
|
||||
assert_eq!(&foo, foo_module.file().path(&db));
|
||||
|
||||
// `foo` and `bar` shouldn't resolve to the same file
|
||||
|
||||
assert_eq!(&src, bar_module.search_path().path());
|
||||
assert_eq!(&bar, bar_module.file().path(&db));
|
||||
assert_eq!(&foo, foo_module.file().path(&db));
|
||||
|
||||
assert_ne!(&foo_module, &bar_module);
|
||||
|
||||
assert_eq!(
|
||||
Some(foo_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(foo))
|
||||
);
|
||||
assert_eq!(
|
||||
Some(bar_module),
|
||||
path_to_module(&db, &VfsPath::FileSystem(bar))
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deleting_an_unrealted_file_doesnt_change_module_resolution() -> anyhow::Result<()> {
|
||||
let TestCase { mut db, src, .. } = create_resolver()?;
|
||||
|
||||
let foo_path = src.join("foo.py");
|
||||
let bar_path = src.join("bar.py");
|
||||
|
||||
db.memory_file_system()
|
||||
.write_files([(&foo_path, "x = 1"), (&bar_path, "y = 2")])?;
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
let foo_module = resolve_module(&db, foo_module_name.clone()).unwrap();
|
||||
|
||||
let bar = system_path_to_file(&db, &bar_path).expect("bar.py to exist");
|
||||
|
||||
db.clear_salsa_events();
|
||||
|
||||
// Delete `bar.py`
|
||||
db.memory_file_system().remove_file(&bar_path)?;
|
||||
bar.touch(&mut db);
|
||||
|
||||
// Re-query the foo module. The foo module should still be cached because `bar.py` isn't relevant
|
||||
// for resolving `foo`.
|
||||
|
||||
let foo_module2 = resolve_module(&db, foo_module_name);
|
||||
|
||||
assert!(!db
|
||||
.take_salsa_events()
|
||||
.iter()
|
||||
.any(|event| { matches!(event.kind, salsa::EventKind::WillExecute { .. }) }));
|
||||
|
||||
assert_eq!(Some(foo_module), foo_module2);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn adding_a_file_on_which_the_module_resolution_depends_on_invalidates_the_query(
|
||||
) -> anyhow::Result<()> {
|
||||
let TestCase { mut db, src, .. } = create_resolver()?;
|
||||
let foo_path = src.join("foo.py");
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
assert_eq!(resolve_module(&db, foo_module_name.clone()), None);
|
||||
|
||||
// Now write the foo file
|
||||
db.memory_file_system().write_file(&foo_path, "x = 1")?;
|
||||
VfsFile::touch_path(&mut db, &VfsPath::FileSystem(foo_path.clone()));
|
||||
let foo_file = system_path_to_file(&db, &foo_path).expect("foo.py to exist");
|
||||
|
||||
let foo_module = resolve_module(&db, foo_module_name).expect("Foo module to resolve");
|
||||
assert_eq!(foo_file, foo_module.file());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn removing_a_file_that_the_module_resolution_depends_on_invalidates_the_query(
|
||||
) -> anyhow::Result<()> {
|
||||
let TestCase { mut db, src, .. } = create_resolver()?;
|
||||
let foo_path = src.join("foo.py");
|
||||
let foo_init_path = src.join("foo/__init__.py");
|
||||
|
||||
db.memory_file_system()
|
||||
.write_files([(&foo_path, "x = 1"), (&foo_init_path, "x = 2")])?;
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
let foo_module = resolve_module(&db, foo_module_name.clone()).expect("foo module to exist");
|
||||
|
||||
assert_eq!(&foo_init_path, foo_module.file().path(&db));
|
||||
|
||||
// Delete `foo/__init__.py` and the `foo` folder. `foo` should now resolve to `foo.py`
|
||||
db.memory_file_system().remove_file(&foo_init_path)?;
|
||||
db.memory_file_system()
|
||||
.remove_directory(foo_init_path.parent().unwrap())?;
|
||||
VfsFile::touch_path(&mut db, &VfsPath::FileSystem(foo_init_path.clone()));
|
||||
|
||||
let foo_module = resolve_module(&db, foo_module_name).expect("Foo module to resolve");
|
||||
assert_eq!(&foo_path, foo_module.file().path(&db));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
91
crates/red_knot_module_resolver/src/typeshed.rs
Normal file
91
crates/red_knot_module_resolver/src/typeshed.rs
Normal file
@@ -0,0 +1,91 @@
|
||||
pub(crate) mod versions;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::io::{self, Read};
|
||||
use std::path::Path;
|
||||
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::vfs::VendoredPath;
|
||||
|
||||
// The file path here is hardcoded in this crate's `build.rs` script.
|
||||
// Luckily this crate will fail to build if this file isn't available at build time.
|
||||
const TYPESHED_ZIP_BYTES: &[u8] =
|
||||
include_bytes!(concat!(env!("OUT_DIR"), "/zipped_typeshed.zip"));
|
||||
|
||||
#[test]
|
||||
fn typeshed_zip_created_at_build_time() {
|
||||
let mut typeshed_zip_archive =
|
||||
zip::ZipArchive::new(io::Cursor::new(TYPESHED_ZIP_BYTES)).unwrap();
|
||||
|
||||
let mut functools_module_stub = typeshed_zip_archive
|
||||
.by_name("stdlib/functools.pyi")
|
||||
.unwrap();
|
||||
assert!(functools_module_stub.is_file());
|
||||
|
||||
let mut functools_module_stub_source = String::new();
|
||||
functools_module_stub
|
||||
.read_to_string(&mut functools_module_stub_source)
|
||||
.unwrap();
|
||||
|
||||
assert!(functools_module_stub_source.contains("def update_wrapper("));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn typeshed_vfs_consistent_with_vendored_stubs() {
|
||||
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
|
||||
let vendored_typeshed_stubs = VendoredFileSystem::new(TYPESHED_ZIP_BYTES).unwrap();
|
||||
|
||||
let mut empty_iterator = true;
|
||||
for entry in walkdir::WalkDir::new(&vendored_typeshed_dir).min_depth(1) {
|
||||
empty_iterator = false;
|
||||
let entry = entry.unwrap();
|
||||
let absolute_path = entry.path();
|
||||
let file_type = entry.file_type();
|
||||
|
||||
let relative_path = absolute_path
|
||||
.strip_prefix(&vendored_typeshed_dir)
|
||||
.unwrap_or_else(|_| {
|
||||
panic!("Expected {absolute_path:?} to be a child of {vendored_typeshed_dir:?}")
|
||||
});
|
||||
|
||||
let vendored_path = <&VendoredPath>::try_from(relative_path)
|
||||
.unwrap_or_else(|_| panic!("Expected {relative_path:?} to be valid UTF-8"));
|
||||
|
||||
assert!(
|
||||
vendored_typeshed_stubs.exists(vendored_path),
|
||||
"Expected {vendored_path:?} to exist in the `VendoredFileSystem`!
|
||||
|
||||
Vendored file system:
|
||||
|
||||
{vendored_typeshed_stubs:#?}
|
||||
"
|
||||
);
|
||||
|
||||
let vendored_path_kind = vendored_typeshed_stubs
|
||||
.metadata(vendored_path)
|
||||
.unwrap_or_else(|| {
|
||||
panic!(
|
||||
"Expected metadata for {vendored_path:?} to be retrievable from the `VendoredFileSystem!
|
||||
|
||||
Vendored file system:
|
||||
|
||||
{vendored_typeshed_stubs:#?}
|
||||
"
|
||||
)
|
||||
})
|
||||
.kind();
|
||||
|
||||
assert_eq!(
|
||||
vendored_path_kind.is_directory(),
|
||||
file_type.is_dir(),
|
||||
"{vendored_path:?} had type {vendored_path_kind:?}, inconsistent with fs path {relative_path:?}: {file_type:?}"
|
||||
);
|
||||
}
|
||||
|
||||
assert!(
|
||||
!empty_iterator,
|
||||
"Expected there to be at least one file or directory in the vendored typeshed stubs!"
|
||||
);
|
||||
}
|
||||
}
|
||||
590
crates/red_knot_module_resolver/src/typeshed/versions.rs
Normal file
590
crates/red_knot_module_resolver/src/typeshed/versions.rs
Normal file
@@ -0,0 +1,590 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt;
|
||||
use std::num::{NonZeroU16, NonZeroUsize};
|
||||
use std::ops::{RangeFrom, RangeInclusive};
|
||||
use std::str::FromStr;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::module::ModuleName;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct TypeshedVersionsParseError {
|
||||
line_number: NonZeroU16,
|
||||
reason: TypeshedVersionsParseErrorKind,
|
||||
}
|
||||
|
||||
impl fmt::Display for TypeshedVersionsParseError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let TypeshedVersionsParseError {
|
||||
line_number,
|
||||
reason,
|
||||
} = self;
|
||||
write!(
|
||||
f,
|
||||
"Error while parsing line {line_number} of typeshed's VERSIONS file: {reason}"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for TypeshedVersionsParseError {
|
||||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||
if let TypeshedVersionsParseErrorKind::IntegerParsingFailure { err, .. } = &self.reason {
|
||||
Some(err)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum TypeshedVersionsParseErrorKind {
|
||||
TooManyLines(NonZeroUsize),
|
||||
UnexpectedNumberOfColons,
|
||||
InvalidModuleName(String),
|
||||
UnexpectedNumberOfHyphens,
|
||||
UnexpectedNumberOfPeriods(String),
|
||||
IntegerParsingFailure {
|
||||
version: String,
|
||||
err: std::num::ParseIntError,
|
||||
},
|
||||
}
|
||||
|
||||
impl fmt::Display for TypeshedVersionsParseErrorKind {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::TooManyLines(num_lines) => write!(
|
||||
f,
|
||||
"File has too many lines ({num_lines}); maximum allowed is {}",
|
||||
NonZeroU16::MAX
|
||||
),
|
||||
Self::UnexpectedNumberOfColons => {
|
||||
f.write_str("Expected every non-comment line to have exactly one colon")
|
||||
}
|
||||
Self::InvalidModuleName(name) => write!(
|
||||
f,
|
||||
"Expected all components of '{name}' to be valid Python identifiers"
|
||||
),
|
||||
Self::UnexpectedNumberOfHyphens => {
|
||||
f.write_str("Expected every non-comment line to have exactly one '-' character")
|
||||
}
|
||||
Self::UnexpectedNumberOfPeriods(format) => write!(
|
||||
f,
|
||||
"Expected all versions to be in the form {{MAJOR}}.{{MINOR}}; got '{format}'"
|
||||
),
|
||||
Self::IntegerParsingFailure { version, err } => write!(
|
||||
f,
|
||||
"Failed to convert '{version}' to a pair of integers due to {err}",
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct TypeshedVersions(FxHashMap<ModuleName, PyVersionRange>);
|
||||
|
||||
impl TypeshedVersions {
|
||||
pub fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
pub fn contains_module(&self, module_name: &ModuleName) -> bool {
|
||||
self.0.contains_key(module_name)
|
||||
}
|
||||
|
||||
pub fn module_exists_on_version(
|
||||
&self,
|
||||
module: ModuleName,
|
||||
version: impl Into<PyVersion>,
|
||||
) -> bool {
|
||||
let version = version.into();
|
||||
let mut module: Option<ModuleName> = Some(module);
|
||||
while let Some(module_to_try) = module {
|
||||
if let Some(range) = self.0.get(&module_to_try) {
|
||||
return range.contains(version);
|
||||
}
|
||||
module = module_to_try.parent();
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for TypeshedVersions {
|
||||
type Err = TypeshedVersionsParseError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut map = FxHashMap::default();
|
||||
|
||||
for (line_index, line) in s.lines().enumerate() {
|
||||
// humans expect line numbers to be 1-indexed
|
||||
let line_number = NonZeroUsize::new(line_index.saturating_add(1)).unwrap();
|
||||
|
||||
let Ok(line_number) = NonZeroU16::try_from(line_number) else {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number: NonZeroU16::MAX,
|
||||
reason: TypeshedVersionsParseErrorKind::TooManyLines(line_number),
|
||||
});
|
||||
};
|
||||
|
||||
let Some(content) = line.split('#').map(str::trim).next() else {
|
||||
continue;
|
||||
};
|
||||
if content.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut parts = content.split(':').map(str::trim);
|
||||
let (Some(module_name), Some(rest), None) = (parts.next(), parts.next(), parts.next())
|
||||
else {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfColons,
|
||||
});
|
||||
};
|
||||
|
||||
let Some(module_name) = ModuleName::new(module_name) else {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number,
|
||||
reason: TypeshedVersionsParseErrorKind::InvalidModuleName(
|
||||
module_name.to_string(),
|
||||
),
|
||||
});
|
||||
};
|
||||
|
||||
match PyVersionRange::from_str(rest) {
|
||||
Ok(version) => map.insert(module_name, version),
|
||||
Err(reason) => {
|
||||
return Err(TypeshedVersionsParseError {
|
||||
line_number,
|
||||
reason,
|
||||
})
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Ok(Self(map))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for TypeshedVersions {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let sorted_items: BTreeMap<&ModuleName, &PyVersionRange> = self.0.iter().collect();
|
||||
for (module_name, range) in sorted_items {
|
||||
writeln!(f, "{module_name}: {range}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
enum PyVersionRange {
|
||||
AvailableFrom(RangeFrom<PyVersion>),
|
||||
AvailableWithin(RangeInclusive<PyVersion>),
|
||||
}
|
||||
|
||||
impl PyVersionRange {
|
||||
fn contains(&self, version: PyVersion) -> bool {
|
||||
match self {
|
||||
Self::AvailableFrom(inner) => inner.contains(&version),
|
||||
Self::AvailableWithin(inner) => inner.contains(&version),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for PyVersionRange {
|
||||
type Err = TypeshedVersionsParseErrorKind;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut parts = s.split('-').map(str::trim);
|
||||
match (parts.next(), parts.next(), parts.next()) {
|
||||
(Some(lower), Some(""), None) => Ok(Self::AvailableFrom((lower.parse()?)..)),
|
||||
(Some(lower), Some(upper), None) => {
|
||||
Ok(Self::AvailableWithin((lower.parse()?)..=(upper.parse()?)))
|
||||
}
|
||||
_ => Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PyVersionRange {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::AvailableFrom(range_from) => write!(f, "{}-", range_from.start),
|
||||
Self::AvailableWithin(range_inclusive) => {
|
||||
write!(f, "{}-{}", range_inclusive.start(), range_inclusive.end())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub struct PyVersion {
|
||||
major: u8,
|
||||
minor: u8,
|
||||
}
|
||||
|
||||
impl FromStr for PyVersion {
|
||||
type Err = TypeshedVersionsParseErrorKind;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut parts = s.split('.').map(str::trim);
|
||||
let (Some(major), Some(minor), None) = (parts.next(), parts.next(), parts.next()) else {
|
||||
return Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
|
||||
s.to_string(),
|
||||
));
|
||||
};
|
||||
let major = match u8::from_str(major) {
|
||||
Ok(major) => major,
|
||||
Err(err) => {
|
||||
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err,
|
||||
})
|
||||
}
|
||||
};
|
||||
let minor = match u8::from_str(minor) {
|
||||
Ok(minor) => minor,
|
||||
Err(err) => {
|
||||
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err,
|
||||
})
|
||||
}
|
||||
};
|
||||
Ok(Self { major, minor })
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PyVersion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let PyVersion { major, minor } = self;
|
||||
write!(f, "{major}.{minor}")
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: unify with the PythonVersion enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default)]
|
||||
pub enum SupportedPyVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl From<SupportedPyVersion> for PyVersion {
|
||||
fn from(value: SupportedPyVersion) -> Self {
|
||||
match value {
|
||||
SupportedPyVersion::Py37 => PyVersion { major: 3, minor: 7 },
|
||||
SupportedPyVersion::Py38 => PyVersion { major: 3, minor: 8 },
|
||||
SupportedPyVersion::Py39 => PyVersion { major: 3, minor: 9 },
|
||||
SupportedPyVersion::Py310 => PyVersion {
|
||||
major: 3,
|
||||
minor: 10,
|
||||
},
|
||||
SupportedPyVersion::Py311 => PyVersion {
|
||||
major: 3,
|
||||
minor: 11,
|
||||
},
|
||||
SupportedPyVersion::Py312 => PyVersion {
|
||||
major: 3,
|
||||
minor: 12,
|
||||
},
|
||||
SupportedPyVersion::Py313 => PyVersion {
|
||||
major: 3,
|
||||
minor: 13,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::num::{IntErrorKind, NonZeroU16};
|
||||
use std::path::Path;
|
||||
|
||||
use super::*;
|
||||
|
||||
use insta::assert_snapshot;
|
||||
|
||||
const TYPESHED_STDLIB_DIR: &str = "stdlib";
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
const ONE: NonZeroU16 = unsafe { NonZeroU16::new_unchecked(1) };
|
||||
|
||||
#[test]
|
||||
fn can_parse_vendored_versions_file() {
|
||||
let versions_data = include_str!(concat!(
|
||||
env!("CARGO_MANIFEST_DIR"),
|
||||
"/vendor/typeshed/stdlib/VERSIONS"
|
||||
));
|
||||
|
||||
let versions = TypeshedVersions::from_str(versions_data).unwrap();
|
||||
assert!(versions.len() > 100);
|
||||
assert!(versions.len() < 1000);
|
||||
|
||||
let asyncio = ModuleName::new_static("asyncio").unwrap();
|
||||
let asyncio_staggered = ModuleName::new_static("asyncio.staggered").unwrap();
|
||||
let audioop = ModuleName::new_static("audioop").unwrap();
|
||||
|
||||
assert!(versions.contains_module(&asyncio));
|
||||
assert!(versions.module_exists_on_version(asyncio, SupportedPyVersion::Py310));
|
||||
|
||||
assert!(versions.contains_module(&asyncio_staggered));
|
||||
assert!(
|
||||
versions.module_exists_on_version(asyncio_staggered.clone(), SupportedPyVersion::Py38)
|
||||
);
|
||||
assert!(!versions.module_exists_on_version(asyncio_staggered, SupportedPyVersion::Py37));
|
||||
|
||||
assert!(versions.contains_module(&audioop));
|
||||
assert!(versions.module_exists_on_version(audioop.clone(), SupportedPyVersion::Py312));
|
||||
assert!(!versions.module_exists_on_version(audioop, SupportedPyVersion::Py313));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn typeshed_versions_consistent_with_vendored_stubs() {
|
||||
const VERSIONS_DATA: &str = include_str!("../../vendor/typeshed/stdlib/VERSIONS");
|
||||
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
|
||||
let vendored_typeshed_versions = TypeshedVersions::from_str(VERSIONS_DATA).unwrap();
|
||||
|
||||
let mut empty_iterator = true;
|
||||
|
||||
let stdlib_stubs_path = vendored_typeshed_dir.join(TYPESHED_STDLIB_DIR);
|
||||
|
||||
for entry in std::fs::read_dir(&stdlib_stubs_path).unwrap() {
|
||||
empty_iterator = false;
|
||||
let entry = entry.unwrap();
|
||||
let absolute_path = entry.path();
|
||||
|
||||
let relative_path = absolute_path
|
||||
.strip_prefix(&stdlib_stubs_path)
|
||||
.unwrap_or_else(|_| panic!("Expected path to be a child of {stdlib_stubs_path:?} but found {absolute_path:?}"));
|
||||
|
||||
let relative_path_str = relative_path.as_os_str().to_str().unwrap_or_else(|| {
|
||||
panic!("Expected all typeshed paths to be valid UTF-8; got {relative_path:?}")
|
||||
});
|
||||
if relative_path_str == "VERSIONS" {
|
||||
continue;
|
||||
}
|
||||
|
||||
let top_level_module = if let Some(extension) = relative_path.extension() {
|
||||
// It was a file; strip off the file extension to get the module name:
|
||||
let extension = extension
|
||||
.to_str()
|
||||
.unwrap_or_else(||panic!("Expected all file extensions to be UTF-8; was not true for {relative_path:?}"));
|
||||
|
||||
relative_path_str
|
||||
.strip_suffix(extension)
|
||||
.and_then(|string| string.strip_suffix('.')).unwrap_or_else(|| {
|
||||
panic!("Expected path {relative_path_str:?} to end with computed extension {extension:?}")
|
||||
})
|
||||
} else {
|
||||
// It was a directory; no need to do anything to get the module name
|
||||
relative_path_str
|
||||
};
|
||||
|
||||
let top_level_module = ModuleName::new(top_level_module)
|
||||
.unwrap_or_else(|| panic!("{top_level_module:?} was not a valid module name!"));
|
||||
|
||||
assert!(vendored_typeshed_versions.contains_module(&top_level_module));
|
||||
}
|
||||
|
||||
assert!(
|
||||
!empty_iterator,
|
||||
"Expected there to be at least one file or directory in the vendored typeshed stubs"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_parse_mock_versions_file() {
|
||||
const VERSIONS: &str = "\
|
||||
# a comment
|
||||
# some more comment
|
||||
# yet more comment
|
||||
|
||||
|
||||
# and some more comment
|
||||
|
||||
bar: 2.7-3.10
|
||||
|
||||
# more comment
|
||||
bar.baz: 3.1-3.9
|
||||
foo: 3.8- # trailing comment
|
||||
";
|
||||
let parsed_versions = TypeshedVersions::from_str(VERSIONS).unwrap();
|
||||
assert_eq!(parsed_versions.len(), 3);
|
||||
assert_snapshot!(parsed_versions.to_string(), @r###"
|
||||
bar: 2.7-3.10
|
||||
bar.baz: 3.1-3.9
|
||||
foo: 3.8-
|
||||
"###
|
||||
);
|
||||
|
||||
let foo = ModuleName::new_static("foo").unwrap();
|
||||
let bar = ModuleName::new_static("bar").unwrap();
|
||||
let bar_baz = ModuleName::new_static("bar.baz").unwrap();
|
||||
let spam = ModuleName::new_static("spam").unwrap();
|
||||
|
||||
assert!(parsed_versions.contains_module(&foo));
|
||||
assert!(!parsed_versions.module_exists_on_version(foo.clone(), SupportedPyVersion::Py37));
|
||||
assert!(parsed_versions.module_exists_on_version(foo.clone(), SupportedPyVersion::Py38));
|
||||
assert!(parsed_versions.module_exists_on_version(foo, SupportedPyVersion::Py311));
|
||||
|
||||
assert!(parsed_versions.contains_module(&bar));
|
||||
assert!(parsed_versions.module_exists_on_version(bar.clone(), SupportedPyVersion::Py37));
|
||||
assert!(parsed_versions.module_exists_on_version(bar.clone(), SupportedPyVersion::Py310));
|
||||
assert!(!parsed_versions.module_exists_on_version(bar, SupportedPyVersion::Py311));
|
||||
|
||||
assert!(parsed_versions.contains_module(&bar_baz));
|
||||
assert!(parsed_versions.module_exists_on_version(bar_baz.clone(), SupportedPyVersion::Py37));
|
||||
assert!(parsed_versions.module_exists_on_version(bar_baz.clone(), SupportedPyVersion::Py39));
|
||||
assert!(!parsed_versions.module_exists_on_version(bar_baz, SupportedPyVersion::Py310));
|
||||
|
||||
assert!(!parsed_versions.contains_module(&spam));
|
||||
assert!(!parsed_versions.module_exists_on_version(spam.clone(), SupportedPyVersion::Py37));
|
||||
assert!(!parsed_versions.module_exists_on_version(spam, SupportedPyVersion::Py313));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_huge_versions_file() {
|
||||
let offset = 100;
|
||||
let too_many = u16::MAX as usize + offset;
|
||||
|
||||
let mut massive_versions_file = String::new();
|
||||
for i in 0..too_many {
|
||||
massive_versions_file.push_str(&format!("x{i}: 3.8-\n"));
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str(&massive_versions_file),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: NonZeroU16::MAX,
|
||||
reason: TypeshedVersionsParseErrorKind::TooManyLines(
|
||||
NonZeroUsize::new(too_many + 1 - offset).unwrap()
|
||||
)
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_typeshed_versions_bad_colon_number() {
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo 3.7"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfColons
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo:: 3.7"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfColons
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_typeshed_versions_non_identifier_modules() {
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("not!an!identifier!: 3.7"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::InvalidModuleName(
|
||||
"not!an!identifier!".to_string()
|
||||
)
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("(also_not).(an_identifier): 3.7"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::InvalidModuleName(
|
||||
"(also_not).(an_identifier)".to_string()
|
||||
)
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_typeshed_versions_bad_hyphen_number() {
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo: 3.8"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo: 3.8--"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo: 3.8--3.9"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_typeshed_versions_bad_period_number() {
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo: 38-"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods("38".to_string())
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo: 3..8-"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
|
||||
"3..8".to_string()
|
||||
)
|
||||
})
|
||||
);
|
||||
assert_eq!(
|
||||
TypeshedVersions::from_str("foo: 3.8-3..11"),
|
||||
Err(TypeshedVersionsParseError {
|
||||
line_number: ONE,
|
||||
reason: TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
|
||||
"3..11".to_string()
|
||||
)
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_typeshed_versions_non_digits() {
|
||||
let err = TypeshedVersions::from_str("foo: 1.two-").unwrap_err();
|
||||
assert_eq!(err.line_number, ONE);
|
||||
let TypeshedVersionsParseErrorKind::IntegerParsingFailure { version, err } = err.reason
|
||||
else {
|
||||
panic!()
|
||||
};
|
||||
assert_eq!(version, "1.two".to_string());
|
||||
assert_eq!(*err.kind(), IntErrorKind::InvalidDigit);
|
||||
|
||||
let err = TypeshedVersions::from_str("foo: 3.8-four.9").unwrap_err();
|
||||
assert_eq!(err.line_number, ONE);
|
||||
let TypeshedVersionsParseErrorKind::IntegerParsingFailure { version, err } = err.reason
|
||||
else {
|
||||
panic!()
|
||||
};
|
||||
assert_eq!(version, "four.9".to_string());
|
||||
assert_eq!(*err.kind(), IntErrorKind::InvalidDigit);
|
||||
}
|
||||
}
|
||||
1
crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt
vendored
Normal file
1
crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
114409d49b43ba62a179ebb856fa70a5161f751e
|
||||
@@ -65,9 +65,9 @@ array: 3.0-
|
||||
ast: 3.0-
|
||||
asynchat: 3.0-3.11
|
||||
asyncio: 3.4-
|
||||
asyncio.mixins: 3.10-
|
||||
asyncio.exceptions: 3.8-
|
||||
asyncio.format_helpers: 3.7-
|
||||
asyncio.mixins: 3.10-
|
||||
asyncio.runners: 3.7-
|
||||
asyncio.staggered: 3.8-
|
||||
asyncio.taskgroups: 3.11-
|
||||
@@ -166,7 +166,7 @@ ipaddress: 3.3-
|
||||
itertools: 3.0-
|
||||
json: 3.0-
|
||||
keyword: 3.0-
|
||||
lib2to3: 3.0-
|
||||
lib2to3: 3.0-3.12
|
||||
linecache: 3.0-
|
||||
locale: 3.0-
|
||||
logging: 3.0-
|
||||
@@ -270,6 +270,7 @@ threading: 3.0-
|
||||
time: 3.0-
|
||||
timeit: 3.0-
|
||||
tkinter: 3.0-
|
||||
tkinter.tix: 3.0-3.12
|
||||
token: 3.0-
|
||||
tokenize: 3.0-
|
||||
tomllib: 3.11-
|
||||
1233
crates/red_knot_module_resolver/vendor/typeshed/stdlib/_ast.pyi
vendored
Normal file
1233
crates/red_knot_module_resolver/vendor/typeshed/stdlib/_ast.pyi
vendored
Normal file
File diff suppressed because it is too large
Load Diff
@@ -201,7 +201,7 @@ class Array(_CData, Generic[_CT]):
|
||||
# Sized and _CData prevents using _CDataMeta.
|
||||
def __len__(self) -> int: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, item: Any) -> GenericAlias: ...
|
||||
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
|
||||
|
||||
def addressof(obj: _CData) -> int: ...
|
||||
def alignment(obj_or_type: _CData | type[_CData]) -> int: ...
|
||||
@@ -63,8 +63,7 @@ A_COLOR: int
|
||||
A_DIM: int
|
||||
A_HORIZONTAL: int
|
||||
A_INVIS: int
|
||||
if sys.platform != "darwin":
|
||||
A_ITALIC: int
|
||||
A_ITALIC: int
|
||||
A_LEFT: int
|
||||
A_LOW: int
|
||||
A_NORMAL: int
|
||||
@@ -45,5 +45,5 @@ class make_scanner:
|
||||
def __init__(self, context: make_scanner) -> None: ...
|
||||
def __call__(self, string: str, index: int) -> tuple[Any, int]: ...
|
||||
|
||||
def encode_basestring_ascii(s: str) -> str: ...
|
||||
def encode_basestring_ascii(s: str, /) -> str: ...
|
||||
def scanstring(string: str, end: int, strict: bool = ...) -> tuple[str, int]: ...
|
||||
@@ -783,7 +783,7 @@ def ntohl(x: int, /) -> int: ... # param & ret val are 32-bit ints
|
||||
def ntohs(x: int, /) -> int: ... # param & ret val are 16-bit ints
|
||||
def htonl(x: int, /) -> int: ... # param & ret val are 32-bit ints
|
||||
def htons(x: int, /) -> int: ... # param & ret val are 16-bit ints
|
||||
def inet_aton(ip_string: str, /) -> bytes: ... # ret val 4 bytes in length
|
||||
def inet_aton(ip_addr: str, /) -> bytes: ... # ret val 4 bytes in length
|
||||
def inet_ntoa(packed_ip: ReadableBuffer, /) -> str: ...
|
||||
def inet_pton(address_family: int, ip_string: str, /) -> bytes: ...
|
||||
def inet_ntop(address_family: int, packed_ip: ReadableBuffer, /) -> str: ...
|
||||
@@ -797,7 +797,7 @@ if sys.platform != "win32":
|
||||
def socketpair(family: int = ..., type: int = ..., proto: int = ..., /) -> tuple[socket, socket]: ...
|
||||
|
||||
def if_nameindex() -> list[tuple[int, str]]: ...
|
||||
def if_nametoindex(name: str, /) -> int: ...
|
||||
def if_nametoindex(oname: str, /) -> int: ...
|
||||
def if_indextoname(index: int, /) -> str: ...
|
||||
|
||||
CAPI: object
|
||||
@@ -64,19 +64,19 @@ UF_NODUMP: Literal[0x00000001]
|
||||
UF_NOUNLINK: Literal[0x00000010]
|
||||
UF_OPAQUE: Literal[0x00000008]
|
||||
|
||||
def S_IMODE(mode: int) -> int: ...
|
||||
def S_IFMT(mode: int) -> int: ...
|
||||
def S_ISBLK(mode: int) -> bool: ...
|
||||
def S_ISCHR(mode: int) -> bool: ...
|
||||
def S_ISDIR(mode: int) -> bool: ...
|
||||
def S_ISDOOR(mode: int) -> bool: ...
|
||||
def S_ISFIFO(mode: int) -> bool: ...
|
||||
def S_ISLNK(mode: int) -> bool: ...
|
||||
def S_ISPORT(mode: int) -> bool: ...
|
||||
def S_ISREG(mode: int) -> bool: ...
|
||||
def S_ISSOCK(mode: int) -> bool: ...
|
||||
def S_ISWHT(mode: int) -> bool: ...
|
||||
def filemode(mode: int) -> str: ...
|
||||
def S_IMODE(mode: int, /) -> int: ...
|
||||
def S_IFMT(mode: int, /) -> int: ...
|
||||
def S_ISBLK(mode: int, /) -> bool: ...
|
||||
def S_ISCHR(mode: int, /) -> bool: ...
|
||||
def S_ISDIR(mode: int, /) -> bool: ...
|
||||
def S_ISDOOR(mode: int, /) -> bool: ...
|
||||
def S_ISFIFO(mode: int, /) -> bool: ...
|
||||
def S_ISLNK(mode: int, /) -> bool: ...
|
||||
def S_ISPORT(mode: int, /) -> bool: ...
|
||||
def S_ISREG(mode: int, /) -> bool: ...
|
||||
def S_ISSOCK(mode: int, /) -> bool: ...
|
||||
def S_ISWHT(mode: int, /) -> bool: ...
|
||||
def filemode(mode: int, /) -> str: ...
|
||||
|
||||
if sys.platform == "win32":
|
||||
IO_REPARSE_TAG_SYMLINK: int
|
||||
@@ -101,3 +101,17 @@ if sys.platform == "win32":
|
||||
FILE_ATTRIBUTE_SYSTEM: Literal[4]
|
||||
FILE_ATTRIBUTE_TEMPORARY: Literal[256]
|
||||
FILE_ATTRIBUTE_VIRTUAL: Literal[65536]
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
SF_SETTABLE: Literal[0x3FFF0000]
|
||||
# https://github.com/python/cpython/issues/114081#issuecomment-2119017790
|
||||
# SF_RESTRICTED: Literal[0x00080000]
|
||||
SF_FIRMLINK: Literal[0x00800000]
|
||||
SF_DATALESS: Literal[0x40000000]
|
||||
|
||||
SF_SUPPORTED: Literal[0x9F0000]
|
||||
SF_SYNTHETIC: Literal[0xC0000000]
|
||||
|
||||
UF_TRACKED: Literal[0x00000040]
|
||||
UF_DATAVAULT: Literal[0x00000080]
|
||||
UF_SETTABLE: Literal[0x0000FFFF]
|
||||
@@ -1,5 +1,7 @@
|
||||
import sys
|
||||
from collections.abc import Callable
|
||||
from typing import Any, ClassVar, Literal, final
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
# _tkinter is meant to be only used internally by tkinter, but some tkinter
|
||||
# functions e.g. return _tkinter.Tcl_Obj objects. Tcl_Obj represents a Tcl
|
||||
@@ -30,6 +32,8 @@ class Tcl_Obj:
|
||||
|
||||
class TclError(Exception): ...
|
||||
|
||||
_TkinterTraceFunc: TypeAlias = Callable[[tuple[str, ...]], object]
|
||||
|
||||
# This class allows running Tcl code. Tkinter uses it internally a lot, and
|
||||
# it's often handy to drop a piece of Tcl code into a tkinter program. Example:
|
||||
#
|
||||
@@ -86,6 +90,9 @@ class TkappType:
|
||||
def unsetvar(self, *args, **kwargs): ...
|
||||
def wantobjects(self, *args, **kwargs): ...
|
||||
def willdispatch(self): ...
|
||||
if sys.version_info >= (3, 12):
|
||||
def gettrace(self, /) -> _TkinterTraceFunc | None: ...
|
||||
def settrace(self, func: _TkinterTraceFunc | None, /) -> None: ...
|
||||
|
||||
# These should be kept in sync with tkinter.tix constants, except ALL_EVENTS which doesn't match TCL_ALL_EVENTS
|
||||
ALL_EVENTS: Literal[-3]
|
||||
@@ -326,6 +326,8 @@ class structseq(Generic[_T_co]):
|
||||
# but only has any meaning if you supply it a dict where the keys are strings.
|
||||
# https://github.com/python/typeshed/pull/6560#discussion_r767149830
|
||||
def __new__(cls: type[Self], sequence: Iterable[_T_co], dict: dict[str, Any] = ...) -> Self: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def __replace__(self: Self, **kwargs: Any) -> Self: ...
|
||||
|
||||
# Superset of typing.AnyStr that also includes LiteralString
|
||||
AnyOrLiteralStr = TypeVar("AnyOrLiteralStr", str, bytes, LiteralString) # noqa: Y001
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user