Compare commits
261 Commits
editables-
...
cjm/phis
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bccc33c6fe | ||
|
|
73160dc8b6 | ||
|
|
15aa5a6d57 | ||
|
|
33512a4249 | ||
|
|
d8ebb03591 | ||
|
|
2e211c5c22 | ||
|
|
9fd8aaaf29 | ||
|
|
d110bd4e60 | ||
|
|
eb9c7ae869 | ||
|
|
7defc0d136 | ||
|
|
45f459bafd | ||
|
|
99e946a005 | ||
|
|
78a7ac0722 | ||
|
|
fa2f3f9f2f | ||
|
|
3898d737d8 | ||
|
|
c487149b7d | ||
|
|
bebed67bf1 | ||
|
|
3ddcad64f5 | ||
|
|
05c35b6975 | ||
|
|
7fc39ad624 | ||
|
|
2520ebb145 | ||
|
|
89c8b49027 | ||
|
|
e05953a991 | ||
|
|
d0ac38f9d3 | ||
|
|
ff53db3d99 | ||
|
|
899a52390b | ||
|
|
82a3e69b8a | ||
|
|
7027344dfc | ||
|
|
fb9f0c448f | ||
|
|
75131c6f4a | ||
|
|
4b9ddc4a06 | ||
|
|
99dc208b00 | ||
|
|
540023262e | ||
|
|
2ea79572ae | ||
|
|
aa0db338d9 | ||
|
|
a99a45868c | ||
|
|
fabf19fdc9 | ||
|
|
59f712a566 | ||
|
|
1d080465de | ||
|
|
3481e16cdf | ||
|
|
d7e9280e1e | ||
|
|
f237d36d2f | ||
|
|
12f22b1fdd | ||
|
|
47d05ee9ea | ||
|
|
9caec36b59 | ||
|
|
cb364780b3 | ||
|
|
71b8bf211f | ||
|
|
109b9cc4f9 | ||
|
|
5d02627794 | ||
|
|
65444bb00e | ||
|
|
8822a79b4d | ||
|
|
2df4d23113 | ||
|
|
603b62607a | ||
|
|
2b71fc4510 | ||
|
|
1b78d872ec | ||
|
|
feba5031dc | ||
|
|
0c2b88f224 | ||
|
|
cf1a57df5a | ||
|
|
597c5f9124 | ||
|
|
69e1c567d4 | ||
|
|
37b9bac403 | ||
|
|
83db48d316 | ||
|
|
c4e651921b | ||
|
|
b595346213 | ||
|
|
253474b312 | ||
|
|
a176679b24 | ||
|
|
1f51048fa4 | ||
|
|
2abfab0f9b | ||
|
|
64f1f3468d | ||
|
|
ffaa35eafe | ||
|
|
c906b0183b | ||
|
|
bc5b9b81dd | ||
|
|
221ea662e0 | ||
|
|
d28c5afd14 | ||
|
|
f1de08c2a0 | ||
|
|
33e9a6a54e | ||
|
|
f577e03021 | ||
|
|
f53733525c | ||
|
|
2daa914334 | ||
|
|
6d9205e346 | ||
|
|
df7345e118 | ||
|
|
dc6aafecc2 | ||
|
|
5107a50ae7 | ||
|
|
a631d600ac | ||
|
|
f34b9a77f0 | ||
|
|
7997da47f5 | ||
|
|
d380b37a09 | ||
|
|
b14fee9320 | ||
|
|
037e817450 | ||
|
|
7fcfedd430 | ||
|
|
50ff5c7544 | ||
|
|
90e5bc2bd9 | ||
|
|
aae9619d3d | ||
|
|
7fa76a2b2b | ||
|
|
14dd6d980e | ||
|
|
846f57fd15 | ||
|
|
8e6aa78796 | ||
|
|
e91a0fe94a | ||
|
|
d2c627efb3 | ||
|
|
10e977d5f5 | ||
|
|
f0318ff889 | ||
|
|
5cc3fed9a8 | ||
|
|
39dd732e27 | ||
|
|
52630a1d55 | ||
|
|
7b5fd63ce8 | ||
|
|
5499821c67 | ||
|
|
7ee7c68f36 | ||
|
|
2393d19f91 | ||
|
|
a8e2ba508e | ||
|
|
0b4d3ce39b | ||
|
|
0a345dc627 | ||
|
|
ff2aa3ea00 | ||
|
|
0d3bad877d | ||
|
|
756060d676 | ||
|
|
b647f3fba8 | ||
|
|
82e69ebf23 | ||
|
|
2c79045342 | ||
|
|
3497f5257b | ||
|
|
25aabec814 | ||
|
|
0e71485ea9 | ||
|
|
43a9d282f7 | ||
|
|
6f357b8b45 | ||
|
|
73d9f11a9c | ||
|
|
d6c6db5a44 | ||
|
|
56d985a972 | ||
|
|
b3e0655cc9 | ||
|
|
06baffec9e | ||
|
|
67a2ae800a | ||
|
|
7a2c75e2fc | ||
|
|
9ee44637ca | ||
|
|
733341ab39 | ||
|
|
341a25eec1 | ||
|
|
38e178e914 | ||
|
|
daccb3f4f3 | ||
|
|
c858afe03a | ||
|
|
3c1c3199d0 | ||
|
|
fbfe2cb2f5 | ||
|
|
1c311e4fdb | ||
|
|
12177a42e3 | ||
|
|
dfb08856eb | ||
|
|
94d817e1a5 | ||
|
|
9296bd4e3f | ||
|
|
da824ba316 | ||
|
|
012198a1b0 | ||
|
|
fbab04fbe1 | ||
|
|
9aa43d5f91 | ||
|
|
966563c79b | ||
|
|
27edadec29 | ||
|
|
2e2b1b460f | ||
|
|
a3e67abf4c | ||
|
|
ee0518e8f7 | ||
|
|
d774a3bd48 | ||
|
|
7e6b19048e | ||
|
|
8e383b9587 | ||
|
|
3f49ab126f | ||
|
|
c1bc7f4dee | ||
|
|
a44d579f21 | ||
|
|
a3900d2b0b | ||
|
|
83b1c48a93 | ||
|
|
138e70bd5c | ||
|
|
ee103ffb25 | ||
|
|
18f87b9497 | ||
|
|
adc8d4e1e7 | ||
|
|
90db361199 | ||
|
|
4738135801 | ||
|
|
264cd750e9 | ||
|
|
7a4419a2a5 | ||
|
|
ac1666d6e2 | ||
|
|
459c85ba27 | ||
|
|
aaa56eb0bd | ||
|
|
f3c14a4276 | ||
|
|
3169d408fa | ||
|
|
a2286c8e47 | ||
|
|
fb9f566f56 | ||
|
|
381bd1ff4a | ||
|
|
2f54d05d97 | ||
|
|
e18b4e42d3 | ||
|
|
9495331a5f | ||
|
|
e1076db7d0 | ||
|
|
1986c9e8e2 | ||
|
|
d7e80dc955 | ||
|
|
87d09f77cd | ||
|
|
bd37ef13b8 | ||
|
|
ec23c974db | ||
|
|
122e5ab428 | ||
|
|
2f2149aca8 | ||
|
|
9d5c31e7da | ||
|
|
25f3ad6238 | ||
|
|
79926329a4 | ||
|
|
9cdc578dd9 | ||
|
|
665c75f7ab | ||
|
|
f37b39d6cc | ||
|
|
e18c45c310 | ||
|
|
d930052de8 | ||
|
|
7ad4df9e9f | ||
|
|
425761e960 | ||
|
|
4b69271809 | ||
|
|
bf23d38a21 | ||
|
|
49f51583fa | ||
|
|
1fe4a5faed | ||
|
|
998bfe0847 | ||
|
|
6f4db8675b | ||
|
|
71f7aa4971 | ||
|
|
9f72f474e6 | ||
|
|
10c993e21a | ||
|
|
2d3914296d | ||
|
|
7571da8778 | ||
|
|
2ceac5f868 | ||
|
|
5ce80827d2 | ||
|
|
e047b9685a | ||
|
|
fc16d8d04d | ||
|
|
175e5d7b88 | ||
|
|
c03f257ed7 | ||
|
|
6bbb4a28c2 | ||
|
|
2ce3e3ae60 | ||
|
|
2a64cccb61 | ||
|
|
928ffd6650 | ||
|
|
e52be0951a | ||
|
|
889073578e | ||
|
|
eac965ecaf | ||
|
|
8659f2f4ea | ||
|
|
c1b292a0dc | ||
|
|
3af6ccb720 | ||
|
|
f0fc6a95fe | ||
|
|
f96a3c71ff | ||
|
|
b9b7deff17 | ||
|
|
40d9324f5a | ||
|
|
a9f8bd59b2 | ||
|
|
143e172431 | ||
|
|
b2d3a05ee4 | ||
|
|
ef1ca0dd38 | ||
|
|
c7b13bb8fc | ||
|
|
dbbe3526ef | ||
|
|
f22c8ab811 | ||
|
|
2a8f95c437 | ||
|
|
ea2d51c2bb | ||
|
|
ed238e0c76 | ||
|
|
3ace12943e | ||
|
|
978909fcf4 | ||
|
|
f8735e1ee8 | ||
|
|
d70ceb6a56 | ||
|
|
fc7d9e95b8 | ||
|
|
b578fca9cb | ||
|
|
8d3146c2b2 | ||
|
|
fa5c841154 | ||
|
|
f8fcbc19d9 | ||
|
|
97fdd48208 | ||
|
|
731ed2e40b | ||
|
|
3a742c17f8 | ||
|
|
053243635c | ||
|
|
82355712c3 | ||
|
|
4bc73dd87e | ||
|
|
53b84ab054 | ||
|
|
3664f85f45 | ||
|
|
2c1926beeb | ||
|
|
4bcc96ae51 | ||
|
|
c0a2b49bac | ||
|
|
ca22248628 | ||
|
|
d8cf8ac2ef | ||
|
|
1c7b84059e | ||
|
|
f82bb67555 |
27
.github/renovate.json5
vendored
27
.github/renovate.json5
vendored
@@ -8,15 +8,32 @@
|
||||
semanticCommits: "disabled",
|
||||
separateMajorMinor: false,
|
||||
prHourlyLimit: 10,
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "npm"],
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "pip_requirements", "npm"],
|
||||
cargo: {
|
||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||
rangeStrategy: "update-lockfile",
|
||||
},
|
||||
pep621: {
|
||||
// The default for this package manager is to only search for `pyproject.toml` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/pep621/#file-matching
|
||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||
},
|
||||
pip_requirements: {
|
||||
// The default for this package manager is to run on all requirements.txt files:
|
||||
// https://docs.renovatebot.com/modules/manager/pip_requirements/#file-matching
|
||||
// `fileMatch` doesn't work for excluding files; to exclude `requirements.txt` files
|
||||
// outside the `doc/` directory, we instead have to use `ignorePaths`. Unlike `fileMatch`,
|
||||
// which takes a regex string, `ignorePaths` takes a glob string, so we have to use
|
||||
// a "negative glob pattern".
|
||||
// See:
|
||||
// - https://docs.renovatebot.com/modules/manager/#ignoring-files-that-match-the-default-filematch
|
||||
// - https://docs.renovatebot.com/configuration-options/#ignorepaths
|
||||
// - https://docs.renovatebot.com/string-pattern-matching/#negative-matching
|
||||
ignorePaths: ["!docs/requirements*.txt"]
|
||||
},
|
||||
npm: {
|
||||
// The default for this package manager is to only search for `package.json` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/npm/#file-matching
|
||||
fileMatch: ["^playground/.*package\\.json$"],
|
||||
},
|
||||
"pre-commit": {
|
||||
@@ -48,6 +65,14 @@
|
||||
matchManagers: ["cargo"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// `mkdocs-material` requires a manual update to keep the version in sync
|
||||
// with `mkdocs-material-insider`.
|
||||
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
|
||||
matchManagers: ["pip_requirements"],
|
||||
matchPackagePatterns: ["mkdocs-material"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
groupName: "pre-commit dependencies",
|
||||
matchManagers: ["pre-commit"],
|
||||
|
||||
17
.github/workflows/ci.yaml
vendored
17
.github/workflows/ci.yaml
vendored
@@ -111,7 +111,7 @@ jobs:
|
||||
- name: "Clippy"
|
||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
||||
- name: "Clippy (wasm)"
|
||||
run: cargo clippy -p ruff_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
run: cargo clippy -p ruff_wasm -p red_knot_wasm --target wasm32-unknown-unknown --all-features --locked -- -D warnings
|
||||
|
||||
cargo-test-linux:
|
||||
name: "cargo test (linux)"
|
||||
@@ -142,6 +142,13 @@ jobs:
|
||||
|
||||
# Check for broken links in the documentation.
|
||||
- run: cargo doc --all --no-deps
|
||||
env:
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
# Use --document-private-items so that all our doc comments are kept in
|
||||
# sync, not just public items. Eventually we should do this for all
|
||||
# crates; for now add crates here as they are warning-clean to prevent
|
||||
# regression.
|
||||
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p ruff_db --document-private-items
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
@@ -191,10 +198,14 @@ jobs:
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run wasm-pack"
|
||||
- name: "Test ruff_wasm"
|
||||
run: |
|
||||
cd crates/ruff_wasm
|
||||
wasm-pack test --node
|
||||
- name: "Test red_knot_wasm"
|
||||
run: |
|
||||
cd crates/red_knot_wasm
|
||||
wasm-pack test --node
|
||||
|
||||
cargo-build-release:
|
||||
name: "cargo build (release)"
|
||||
@@ -619,7 +630,7 @@ jobs:
|
||||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@v2
|
||||
uses: CodSpeedHQ/action@v3
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
||||
2
.github/workflows/pr-comment.yaml
vendored
2
.github/workflows/pr-comment.yaml
vendored
@@ -23,6 +23,7 @@ jobs:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
@@ -43,6 +44,7 @@ jobs:
|
||||
path: pr/ecosystem
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
|
||||
4
.github/workflows/publish-docs.yml
vendored
4
.github/workflows/publish-docs.yml
vendored
@@ -104,8 +104,8 @@ jobs:
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
git config user.name "$GITHUB_ACTOR"
|
||||
git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
|
||||
git config user.name "astral-docs-bot"
|
||||
git config user.email "176161322+astral-docs-bot@users.noreply.github.com"
|
||||
|
||||
git checkout -b $branch_name
|
||||
git add site/ruff
|
||||
|
||||
14
.github/workflows/sync_typeshed.yaml
vendored
14
.github/workflows/sync_typeshed.yaml
vendored
@@ -37,13 +37,13 @@ jobs:
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
run: |
|
||||
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_module_resolver/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_module_resolver/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt
|
||||
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -21,6 +21,14 @@ flamegraph.svg
|
||||
# `CARGO_TARGET_DIR=target-llvm-lines RUSTFLAGS="-Csymbol-mangling-version=v0" cargo llvm-lines -p ruff --lib`
|
||||
/target*
|
||||
|
||||
# samply profiles
|
||||
profile.json
|
||||
|
||||
# tracing-flame traces
|
||||
tracing.folded
|
||||
tracing-flamechart.svg
|
||||
tracing-flamegraph.svg
|
||||
|
||||
###
|
||||
# Rust.gitignore
|
||||
###
|
||||
|
||||
@@ -14,6 +14,9 @@ MD041: false
|
||||
# MD013/line-length
|
||||
MD013: false
|
||||
|
||||
# MD014/commands-show-output
|
||||
MD014: false
|
||||
|
||||
# MD024/no-duplicate-heading
|
||||
MD024:
|
||||
# Allow when nested under different parents e.g. CHANGELOG.md
|
||||
|
||||
@@ -2,7 +2,8 @@ fail_fast: true
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/red_knot_module_resolver/vendor/.*|
|
||||
crates/red_knot_python_semantic/vendor/.*|
|
||||
crates/red_knot_workspace/resources/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
crates/ruff/resources/.*|
|
||||
@@ -42,7 +43,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.23.2
|
||||
rev: v1.23.6
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -56,18 +57,13 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.5.2
|
||||
rev: v0.5.7
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*
|
||||
)$
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
|
||||
140
CHANGELOG.md
140
CHANGELOG.md
@@ -1,5 +1,145 @@
|
||||
# Changelog
|
||||
|
||||
## 0.5.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-comprehensions`\] Account for list and set comprehensions in `unnecessary-literal-within-tuple-call` (`C409`) ([#12657](https://github.com/astral-sh/ruff/pull/12657))
|
||||
- \[`flake8-pyi`\] Add autofix for `future-annotations-in-stub` (`PYI044`) ([#12676](https://github.com/astral-sh/ruff/pull/12676))
|
||||
- \[`flake8-return`\] Avoid syntax error when auto-fixing `RET505` with mixed indentation (space and tabs) ([#12740](https://github.com/astral-sh/ruff/pull/12740))
|
||||
- \[`pydoclint`\] Add `docstring-missing-yields` (`DOC402`) and `docstring-extraneous-yields` (`DOC403`) ([#12538](https://github.com/astral-sh/ruff/pull/12538))
|
||||
- \[`pydoclint`\] Avoid `DOC201` if docstring begins with "Return", "Returns", "Yield", or "Yields" ([#12675](https://github.com/astral-sh/ruff/pull/12675))
|
||||
- \[`pydoclint`\] Deduplicate collected exceptions after traversing function bodies (`DOC501`) ([#12642](https://github.com/astral-sh/ruff/pull/12642))
|
||||
- \[`pydoclint`\] Ignore `DOC` errors for stub functions ([#12651](https://github.com/astral-sh/ruff/pull/12651))
|
||||
- \[`pydoclint`\] Teach rules to understand reraised exceptions as being explicitly raised (`DOC501`, `DOC502`) ([#12639](https://github.com/astral-sh/ruff/pull/12639))
|
||||
- \[`ruff`\] Implement `incorrectly-parenthesized-tuple-in-subscript` (`RUF031`) ([#12480](https://github.com/astral-sh/ruff/pull/12480))
|
||||
- \[`ruff`\] Mark `RUF023` fix as unsafe if `__slots__` is not a set and the binding is used elsewhere ([#12692](https://github.com/astral-sh/ruff/pull/12692))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`refurb`\] Add autofix for `implicit-cwd` (`FURB177`) ([#12708](https://github.com/astral-sh/ruff/pull/12708))
|
||||
- \[`ruff`\] Add autofix for `zip-instead-of-pairwise` (`RUF007`) ([#12663](https://github.com/astral-sh/ruff/pull/12663))
|
||||
- \[`tryceratops`\] Add `BaseException` to `raise-vanilla-class` rule (`TRY002`) ([#12620](https://github.com/astral-sh/ruff/pull/12620))
|
||||
|
||||
### Server
|
||||
|
||||
- Ignore non-file workspace URL; Ruff will display a warning notification in this case ([#12725](https://github.com/astral-sh/ruff/pull/12725))
|
||||
|
||||
### CLI
|
||||
|
||||
- Fix cache invalidation for nested `pyproject.toml` files ([#12727](https://github.com/astral-sh/ruff/pull/12727))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Fix false positives with multiple `async with` items (`ASYNC100`) ([#12643](https://github.com/astral-sh/ruff/pull/12643))
|
||||
- \[`flake8-bandit`\] Avoid false-positives for list concatenations in SQL construction (`S608`) ([#12720](https://github.com/astral-sh/ruff/pull/12720))
|
||||
- \[`flake8-bugbear`\] Treat `return` as equivalent to `break` (`B909`) ([#12646](https://github.com/astral-sh/ruff/pull/12646))
|
||||
- \[`flake8-comprehensions`\] Set comprehensions not a violation for `sum` in `unnecessary-comprehension-in-call` (`C419`) ([#12691](https://github.com/astral-sh/ruff/pull/12691))
|
||||
- \[`flake8-simplify`\] Parenthesize conditions based on precedence when merging if arms (`SIM114`) ([#12737](https://github.com/astral-sh/ruff/pull/12737))
|
||||
- \[`pydoclint`\] Try both 'Raises' section styles when convention is unspecified (`DOC501`) ([#12649](https://github.com/astral-sh/ruff/pull/12649))
|
||||
|
||||
## 0.5.6
|
||||
|
||||
Ruff 0.5.6 automatically enables linting and formatting of notebooks in *preview mode*.
|
||||
You can opt-out of this behavior by adding `*.ipynb` to the `extend-exclude` setting.
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
### Preview features
|
||||
|
||||
- Enable notebooks by default in preview mode ([#12621](https://github.com/astral-sh/ruff/pull/12621))
|
||||
- \[`flake8-builtins`\] Implement import, lambda, and module shadowing ([#12546](https://github.com/astral-sh/ruff/pull/12546))
|
||||
- \[`pydoclint`\] Add `docstring-missing-returns` (`DOC201`) and `docstring-extraneous-returns` (`DOC202`) ([#12485](https://github.com/astral-sh/ruff/pull/12485))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-return`\] Exempt cached properties and other property-like decorators from explicit return rule (`RET501`) ([#12563](https://github.com/astral-sh/ruff/pull/12563))
|
||||
|
||||
### Server
|
||||
|
||||
- Make server panic hook more error resilient ([#12610](https://github.com/astral-sh/ruff/pull/12610))
|
||||
- Use `$/logTrace` for server trace logs in Zed and VS Code ([#12564](https://github.com/astral-sh/ruff/pull/12564))
|
||||
- Keep track of deleted cells for reorder change request ([#12575](https://github.com/astral-sh/ruff/pull/12575))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`flake8-implicit-str-concat`\] Always allow explicit multi-line concatenations when implicit concatenations are banned ([#12532](https://github.com/astral-sh/ruff/pull/12532))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Avoid flagging `asyncio.timeout`s as unused when the context manager includes `asyncio.TaskGroup` ([#12605](https://github.com/astral-sh/ruff/pull/12605))
|
||||
- \[`flake8-slots`\] Avoid recommending `__slots__` for classes that inherit from more than `namedtuple` ([#12531](https://github.com/astral-sh/ruff/pull/12531))
|
||||
- \[`isort`\] Avoid marking required imports as unused ([#12537](https://github.com/astral-sh/ruff/pull/12537))
|
||||
- \[`isort`\] Preserve trailing inline comments on import-from statements ([#12498](https://github.com/astral-sh/ruff/pull/12498))
|
||||
- \[`pycodestyle`\] Add newlines before comments (`E305`) ([#12606](https://github.com/astral-sh/ruff/pull/12606))
|
||||
- \[`pycodestyle`\] Don't attach comments with mismatched indents ([#12604](https://github.com/astral-sh/ruff/pull/12604))
|
||||
- \[`pyflakes`\] Fix preview-mode bugs in `F401` when attempting to autofix unused first-party submodule imports in an `__init__.py` file ([#12569](https://github.com/astral-sh/ruff/pull/12569))
|
||||
- \[`pylint`\] Respect start index in `unnecessary-list-index-lookup` ([#12603](https://github.com/astral-sh/ruff/pull/12603))
|
||||
- \[`pyupgrade`\] Avoid recommending no-argument super in `slots=True` dataclasses ([#12530](https://github.com/astral-sh/ruff/pull/12530))
|
||||
- \[`pyupgrade`\] Use colon rather than dot formatting for integer-only types ([#12534](https://github.com/astral-sh/ruff/pull/12534))
|
||||
- Fix NFKC normalization bug when removing unused imports ([#12571](https://github.com/astral-sh/ruff/pull/12571))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Consider more stdlib decorators to be property-like ([#12583](https://github.com/astral-sh/ruff/pull/12583))
|
||||
- Improve handling of metaclasses in various linter rules ([#12579](https://github.com/astral-sh/ruff/pull/12579))
|
||||
- Improve consistency between linter rules in determining whether a function is property ([#12581](https://github.com/astral-sh/ruff/pull/12581))
|
||||
|
||||
## 0.5.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`fastapi`\] Implement `fastapi-redundant-response-model` (`FAST001`) and `fastapi-non-annotated-dependency`(`FAST002`) ([#11579](https://github.com/astral-sh/ruff/pull/11579))
|
||||
- \[`pydoclint`\] Implement `docstring-missing-exception` (`DOC501`) and `docstring-extraneous-exception` (`DOC502`) ([#11471](https://github.com/astral-sh/ruff/pull/11471))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`numpy`\] Fix NumPy 2.0 rule for `np.alltrue` and `np.sometrue` ([#12473](https://github.com/astral-sh/ruff/pull/12473))
|
||||
- \[`numpy`\] Ignore `NPY201` inside `except` blocks for compatibility with older numpy versions ([#12490](https://github.com/astral-sh/ruff/pull/12490))
|
||||
- \[`pep8-naming`\] Avoid applying `ignore-names` to `self` and `cls` function names (`N804`, `N805`) ([#12497](https://github.com/astral-sh/ruff/pull/12497))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix incorrect placement of leading function comment with type params ([#12447](https://github.com/astral-sh/ruff/pull/12447))
|
||||
|
||||
### Server
|
||||
|
||||
- Do not bail code action resolution when a quick fix is requested ([#12462](https://github.com/astral-sh/ruff/pull/12462))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `Ord` implementation of `cmp_fix` ([#12471](https://github.com/astral-sh/ruff/pull/12471))
|
||||
- Raise syntax error for unparenthesized generator expression in multi-argument call ([#12445](https://github.com/astral-sh/ruff/pull/12445))
|
||||
- \[`pydoclint`\] Fix panic in `DOC501` reported in [#12428](https://github.com/astral-sh/ruff/pull/12428) ([#12435](https://github.com/astral-sh/ruff/pull/12435))
|
||||
- \[`flake8-bugbear`\] Allow singleton tuples with starred expressions in `B013` ([#12484](https://github.com/astral-sh/ruff/pull/12484))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add Eglot setup guide for Emacs editor ([#12426](https://github.com/astral-sh/ruff/pull/12426))
|
||||
- Add note about the breaking change in `nvim-lspconfig` ([#12507](https://github.com/astral-sh/ruff/pull/12507))
|
||||
- Add note to include notebook files for native server ([#12449](https://github.com/astral-sh/ruff/pull/12449))
|
||||
- Add setup docs for Zed editor ([#12501](https://github.com/astral-sh/ruff/pull/12501))
|
||||
|
||||
## 0.5.4
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`ruff`\] Rename `RUF007` to `zip-instead-of-pairwise` ([#12399](https://github.com/astral-sh/ruff/pull/12399))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-builtins`\] Avoid shadowing diagnostics for `@override` methods ([#12415](https://github.com/astral-sh/ruff/pull/12415))
|
||||
- \[`flake8-comprehensions`\] Insert parentheses for multi-argument generators ([#12422](https://github.com/astral-sh/ruff/pull/12422))
|
||||
- \[`pydocstyle`\] Handle escaped docstrings within docstring (`D301`) ([#12192](https://github.com/astral-sh/ruff/pull/12192))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix GitHub link to Neovim setup ([#12410](https://github.com/astral-sh/ruff/pull/12410))
|
||||
- Fix `output-format` default in settings reference ([#12409](https://github.com/astral-sh/ruff/pull/12409))
|
||||
|
||||
## 0.5.3
|
||||
|
||||
**Ruff 0.5.3 marks the stable release of the Ruff language server and introduces revamped
|
||||
|
||||
@@ -905,15 +905,11 @@ There are three ways in which an import can be categorized as "first-party":
|
||||
package (e.g., `from foo import bar` or `import foo.bar`), they'll be classified as first-party
|
||||
automatically. This check is as simple as comparing the first segment of the current file's
|
||||
module path to the first segment of the import.
|
||||
1. **Source roots**: Ruff supports a `[src](https://docs.astral.sh/ruff/settings/#src)` setting, which
|
||||
1. **Source roots**: Ruff supports a [`src`](https://docs.astral.sh/ruff/settings/#src) setting, which
|
||||
sets the directories to scan when identifying first-party imports. The algorithm is
|
||||
straightforward: given an import, like `import foo`, iterate over the directories enumerated in
|
||||
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
|
||||
file `foo.py`.
|
||||
|
||||
By default, `src` is set to the project root. In the above example, we'd want to set
|
||||
`src = ["./src"]` to ensure that we locate `./my_project/src/foo` and thus categorize `import foo`
|
||||
as first-party in `baz.py`. In practice, for this limited example, setting `src = ["./src"]` is
|
||||
unnecessary, as all imports within `./my_project/src/foo` would be categorized as first-party via
|
||||
the same-package heuristic; but if your project contains multiple packages, you'll want to set `src`
|
||||
explicitly.
|
||||
By default, `src` is set to the project root, along with `"src"` subdirectory in the project root.
|
||||
This ensures that Ruff supports both flat and "src" layouts out of the box.
|
||||
|
||||
413
Cargo.lock
generated
413
Cargo.lock
generated
@@ -95,9 +95,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anstyle"
|
||||
version = "1.0.6"
|
||||
version = "1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc"
|
||||
checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1"
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-parse"
|
||||
@@ -133,6 +133,12 @@ version = "1.0.86"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
|
||||
|
||||
[[package]]
|
||||
name = "append-only-vec"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74d9f7083455f1a474276ccd32374958d2cb591024aac45101c7623b10271347"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
version = "1.7.1"
|
||||
@@ -141,9 +147,9 @@ checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457"
|
||||
|
||||
[[package]]
|
||||
name = "argfile"
|
||||
version = "0.2.0"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b7c5c8e418080ef8aa932039d12eda7b6f5043baf48f1523c166fbc32d004534"
|
||||
checksum = "0a1cc0ba69de57db40674c66f7cf2caee3981ddef084388482c95c0e2133e5e8"
|
||||
dependencies = [
|
||||
"fs-err",
|
||||
"os_str_bytes",
|
||||
@@ -190,9 +196,9 @@ checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
|
||||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
version = "1.9.1"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "05efc5cfd9110c8416e471df0e96702d58690178e206e61b7173706673c93706"
|
||||
checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"regex-automata 0.4.6",
|
||||
@@ -282,7 +288,7 @@ dependencies = [
|
||||
"android-tzdata",
|
||||
"iana-time-zone",
|
||||
"num-traits",
|
||||
"windows-targets 0.52.5",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -314,9 +320,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.9"
|
||||
version = "4.5.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "64acc1846d54c1fe936a78dc189c34e28d3f5afc348403f28ecf53660b9b8462"
|
||||
checksum = "11d8838454fda655dafd3accb2b6e2bea645b9e4078abe84a22ceb947235c5cc"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -324,9 +330,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.9"
|
||||
version = "4.5.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6fb8393d67ba2e7bfaf28a23458e4e2b543cc73a99595511eb207fdb8aede942"
|
||||
checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -367,9 +373,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.5.8"
|
||||
version = "4.5.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2bac35c6dafb060fd4d275d9a4ffae97917c13a6327903a8be2153cd964f7085"
|
||||
checksum = "501d359d5f3dcaf6ecdeee48833ae73ec6e42723a1e52419c79abf9507eec0a0"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
@@ -759,9 +765,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "env_logger"
|
||||
version = "0.11.3"
|
||||
version = "0.11.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "38b35839ba51819680ba087cd351788c9a3c476841207e0b8cee0b04722343b9"
|
||||
checksum = "e13fa619b91fb2381732789fc5de83b45675e882f66623b7d8cb4f643017018d"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -814,14 +820,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "filetime"
|
||||
version = "0.2.23"
|
||||
version = "0.2.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd"
|
||||
checksum = "bf401df4a4e3872c4fe8151134cf483738e74b67fc934d6532c882b3d24a4550"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"windows-sys 0.52.0",
|
||||
"libredox",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -930,9 +936,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "hashlink"
|
||||
version = "0.8.4"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7"
|
||||
checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af"
|
||||
dependencies = [
|
||||
"hashbrown",
|
||||
]
|
||||
@@ -1021,9 +1027,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "imara-diff"
|
||||
version = "0.1.6"
|
||||
version = "0.1.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "af13c8ceb376860ff0c6a66d83a8cdd4ecd9e464da24621bbffcd02b49619434"
|
||||
checksum = "fc9da1a252bd44cd341657203722352efc9bc0c847d06ea6d2dc1cd1135e0a01"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"hashbrown",
|
||||
@@ -1031,9 +1037,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "imperative"
|
||||
version = "1.0.5"
|
||||
version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b70798296d538cdaa6d652941fcc795963f8b9878b9e300c9fab7a522bd2fc0"
|
||||
checksum = "29a1f6526af721f9aec9ceed7ab8ebfca47f3399d08b80056c2acca3fcb694a9"
|
||||
dependencies = [
|
||||
"phf",
|
||||
"rust-stemmers",
|
||||
@@ -1041,9 +1047,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.2.6"
|
||||
version = "2.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26"
|
||||
checksum = "de3fc2e30ba82dd1b3911c8de1ffc143c74a914a14e99514d7637e3099df5ea0"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown",
|
||||
@@ -1137,9 +1143,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "is-macro"
|
||||
version = "0.3.5"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "59a85abdc13717906baccb5a1e435556ce0df215f242892f721dff62bf25288f"
|
||||
checksum = "2069faacbe981460232f880d26bf3c7634e322d49053aa48c27e3ae642f728f1"
|
||||
dependencies = [
|
||||
"Inflector",
|
||||
"proc-macro2",
|
||||
@@ -1194,9 +1200,9 @@ checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b"
|
||||
|
||||
[[package]]
|
||||
name = "jobserver"
|
||||
version = "0.1.31"
|
||||
version = "0.1.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e"
|
||||
checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
@@ -1291,6 +1297,7 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"libc",
|
||||
"redox_syscall 0.5.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1476,11 +1483,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "nu-ansi-term"
|
||||
version = "0.50.0"
|
||||
version = "0.50.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dd2800e1520bdc966782168a627aa5d1ad92e33b984bf7c7615d31280c83ff14"
|
||||
checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399"
|
||||
dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1518,18 +1525,18 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
|
||||
|
||||
[[package]]
|
||||
name = "ordermap"
|
||||
version = "0.5.0"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ab5a8e22be64dfa1123429350872e7be33594dbf5ae5212c90c5890e71966d1d"
|
||||
checksum = "8c81974681ab4f0cc9fe49cad56f821d1cc67a08cd2caa9b5d58b0adaa5dd36d"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "os_str_bytes"
|
||||
version = "6.6.1"
|
||||
version = "7.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1"
|
||||
checksum = "7ac44c994af577c799b1b4bd80dc214701e349873ad894d6cdf96f4f7526e0b9"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
@@ -1558,7 +1565,7 @@ checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"redox_syscall 0.4.1",
|
||||
"smallvec",
|
||||
"windows-targets 0.48.5",
|
||||
]
|
||||
@@ -1640,9 +1647,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pep440_rs"
|
||||
version = "0.6.0"
|
||||
version = "0.6.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ca0a570e7ec9171250cac57614e901f62408094b54b3798bb920d3cf0d4a0e09"
|
||||
checksum = "466eada3179c2e069ca897b99006cbb33f816290eaeec62464eea907e22ae385"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"serde",
|
||||
@@ -1854,41 +1861,24 @@ name = "red_knot"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
"clap",
|
||||
"colored",
|
||||
"countme",
|
||||
"crossbeam",
|
||||
"ctrlc",
|
||||
"notify",
|
||||
"filetime",
|
||||
"rayon",
|
||||
"red_knot_module_resolver",
|
||||
"red_knot_python_semantic",
|
||||
"red_knot_server",
|
||||
"red_knot_workspace",
|
||||
"ruff_db",
|
||||
"ruff_python_ast",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"tracing-tree",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "red_knot_module_resolver"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"camino",
|
||||
"compact_str",
|
||||
"insta",
|
||||
"once_cell",
|
||||
"path-slash",
|
||||
"ruff_db",
|
||||
"ruff_python_stdlib",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"tempfile",
|
||||
"tracing",
|
||||
"walkdir",
|
||||
"zip",
|
||||
"tracing-flame",
|
||||
"tracing-subscriber",
|
||||
"tracing-tree",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1897,17 +1887,87 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.6.0",
|
||||
"camino",
|
||||
"compact_str",
|
||||
"countme",
|
||||
"hashbrown",
|
||||
"insta",
|
||||
"once_cell",
|
||||
"ordermap",
|
||||
"red_knot_module_resolver",
|
||||
"path-slash",
|
||||
"ruff_db",
|
||||
"ruff_index",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_parser",
|
||||
"ruff_python_trivia",
|
||||
"ruff_python_stdlib",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"smallvec",
|
||||
"tempfile",
|
||||
"tracing",
|
||||
"walkdir",
|
||||
"zip",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "red_knot_server"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"crossbeam",
|
||||
"jod-thread",
|
||||
"libc",
|
||||
"lsp-server",
|
||||
"lsp-types",
|
||||
"red_knot_python_semantic",
|
||||
"red_knot_workspace",
|
||||
"ruff_db",
|
||||
"ruff_linter",
|
||||
"ruff_notebook",
|
||||
"ruff_python_ast",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "red_knot_wasm"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
"js-sys",
|
||||
"log",
|
||||
"red_knot_python_semantic",
|
||||
"red_knot_workspace",
|
||||
"ruff_db",
|
||||
"ruff_notebook",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-test",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "red_knot_workspace"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"crossbeam",
|
||||
"notify",
|
||||
"red_knot_python_semantic",
|
||||
"ruff_cache",
|
||||
"ruff_db",
|
||||
"ruff_python_ast",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"thiserror",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
@@ -1920,6 +1980,15 @@ dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_users"
|
||||
version = "0.4.5"
|
||||
@@ -1933,9 +2002,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.10.5"
|
||||
version = "1.10.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f"
|
||||
checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
@@ -1992,7 +2061,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.5.3"
|
||||
version = "0.5.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2050,7 +2119,8 @@ dependencies = [
|
||||
"criterion",
|
||||
"mimalloc",
|
||||
"once_cell",
|
||||
"red_knot",
|
||||
"red_knot_python_semantic",
|
||||
"red_knot_workspace",
|
||||
"ruff_db",
|
||||
"ruff_linter",
|
||||
"ruff_python_ast",
|
||||
@@ -2087,16 +2157,23 @@ dependencies = [
|
||||
"filetime",
|
||||
"ignore",
|
||||
"insta",
|
||||
"matchit",
|
||||
"path-slash",
|
||||
"ruff_cache",
|
||||
"ruff_notebook",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_parser",
|
||||
"ruff_python_trivia",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"tracing-tree",
|
||||
"web-time",
|
||||
"zip",
|
||||
]
|
||||
|
||||
@@ -2176,7 +2253,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.5.3"
|
||||
version = "0.5.7"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"annotate-snippets 0.9.2",
|
||||
@@ -2200,7 +2277,7 @@ dependencies = [
|
||||
"once_cell",
|
||||
"path-absolutize",
|
||||
"pathdiff",
|
||||
"pep440_rs 0.6.0",
|
||||
"pep440_rs 0.6.6",
|
||||
"pyproject-toml",
|
||||
"quick-junit",
|
||||
"regex",
|
||||
@@ -2230,6 +2307,7 @@ dependencies = [
|
||||
"thiserror",
|
||||
"toml",
|
||||
"typed-arena",
|
||||
"unicode-normalization",
|
||||
"unicode-width",
|
||||
"unicode_names2",
|
||||
"url",
|
||||
@@ -2399,13 +2477,17 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"is-macro",
|
||||
"ruff_cache",
|
||||
"ruff_index",
|
||||
"ruff_macros",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_parser",
|
||||
"ruff_python_stdlib",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"schemars",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2491,7 +2573,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.5.3"
|
||||
version = "0.5.7"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -2530,7 +2612,7 @@ dependencies = [
|
||||
"matchit",
|
||||
"path-absolutize",
|
||||
"path-slash",
|
||||
"pep440_rs 0.6.0",
|
||||
"pep440_rs 0.6.6",
|
||||
"regex",
|
||||
"ruff_cache",
|
||||
"ruff_formatter",
|
||||
@@ -2538,6 +2620,7 @@ dependencies = [
|
||||
"ruff_macros",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_formatter",
|
||||
"ruff_python_semantic",
|
||||
"ruff_source_file",
|
||||
"rustc-hash 2.0.0",
|
||||
"schemars",
|
||||
@@ -2630,25 +2713,34 @@ checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
|
||||
[[package]]
|
||||
name = "salsa"
|
||||
version = "0.18.0"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=a1bf3a613f451af7fc0a59411c56abc47fe8e8e1#a1bf3a613f451af7fc0a59411c56abc47fe8e8e1"
|
||||
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
|
||||
dependencies = [
|
||||
"append-only-vec",
|
||||
"arc-swap",
|
||||
"crossbeam",
|
||||
"dashmap 5.5.3",
|
||||
"dashmap 6.0.1",
|
||||
"hashlink",
|
||||
"indexmap",
|
||||
"log",
|
||||
"lazy_static",
|
||||
"parking_lot",
|
||||
"rustc-hash 1.1.0",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa-macro-rules",
|
||||
"salsa-macros",
|
||||
"smallvec",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "salsa-macro-rules"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
|
||||
|
||||
[[package]]
|
||||
name = "salsa-macros"
|
||||
version = "0.18.0"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=a1bf3a613f451af7fc0a59411c56abc47fe8e8e1#a1bf3a613f451af7fc0a59411c56abc47fe8e8e1"
|
||||
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
@@ -2708,9 +2800,9 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.204"
|
||||
version = "1.0.206"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12"
|
||||
checksum = "5b3e4cd94123dd520a128bcd11e34d9e9e423e7e3e50425cb1b4b1e3549d0284"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
@@ -2728,9 +2820,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.204"
|
||||
version = "1.0.206"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222"
|
||||
checksum = "fabfb6138d2383ea8208cf98ccf69cdfb1aff4088460681d84189aa259762f97"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2750,11 +2842,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.120"
|
||||
version = "1.0.124"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4e0d21c9a8cae1235ad58a00c11cb40d4b1e5c784f1ef2c537876ed6ffd8b7c5"
|
||||
checksum = "66ad62847a56b3dba58cc891acd13884b9c61138d330c0d7b6181713d4fce38d"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
"ryu",
|
||||
"serde",
|
||||
]
|
||||
@@ -2772,18 +2865,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_spanned"
|
||||
version = "0.6.6"
|
||||
version = "0.6.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0"
|
||||
checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_test"
|
||||
version = "1.0.176"
|
||||
version = "1.0.177"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a2f49ace1498612d14f7e0b8245519584db8299541dfe31a06374a828d620ab"
|
||||
checksum = "7f901ee573cab6b3060453d2d5f0bae4e6d628c23c0a962ff9b5f1d7c8d4f1ed"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
@@ -2910,9 +3003,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.71"
|
||||
version = "2.0.74"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b146dcf730474b4bcd16c311627b31ede9ab149045db4d6088b3becaea046462"
|
||||
checksum = "1fceb41e3d546d0bd83421d3409b1460cc7444cd389341a4c880fe7a042cb3d7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2932,14 +3025,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.10.1"
|
||||
version = "3.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1"
|
||||
checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"fastrand",
|
||||
"once_cell",
|
||||
"rustix",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3000,18 +3094,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.62"
|
||||
version = "1.0.63"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f2675633b1499176c2dff06b0856a27976a8f9d436737b4cf4f312d4d91d8bbb"
|
||||
checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "1.0.62"
|
||||
version = "1.0.63"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d20468752b09f49e909e55a5d338caa8bedf615594e9d80bc4c565d30faf798c"
|
||||
checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3075,9 +3169,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.8.14"
|
||||
version = "0.8.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f49eb2ab21d2f26bd6db7bf383edc527a7ebaee412d17af4d40fdccd442f335"
|
||||
checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_spanned",
|
||||
@@ -3087,18 +3181,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "toml_datetime"
|
||||
version = "0.6.6"
|
||||
version = "0.6.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf"
|
||||
checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml_edit"
|
||||
version = "0.22.14"
|
||||
version = "0.22.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f21c7aaf97f1bd9ca9d4f9e73b0a6c74bd5afef56f2bc931943a6e1c37e04e38"
|
||||
checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"serde",
|
||||
@@ -3140,6 +3234,17 @@ dependencies = [
|
||||
"valuable",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-flame"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0bae117ee14789185e129aaee5d93750abe67fdc5a9a62650452bfe4e122a3a9"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-indicatif"
|
||||
version = "0.3.6"
|
||||
@@ -3183,11 +3288,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing-tree"
|
||||
version = "0.3.1"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b56c62d2c80033cb36fae448730a2f2ef99410fe3ecbffc916681a32f6807dbe"
|
||||
checksum = "f459ca79f1b0d5f71c54ddfde6debfc59c8b6eeb46808ae492077f739dc7b49c"
|
||||
dependencies = [
|
||||
"nu-ansi-term 0.50.0",
|
||||
"nu-ansi-term 0.50.1",
|
||||
"tracing-core",
|
||||
"tracing-log",
|
||||
"tracing-subscriber",
|
||||
@@ -3304,9 +3409,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
|
||||
|
||||
[[package]]
|
||||
name = "ureq"
|
||||
version = "2.10.0"
|
||||
version = "2.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72139d247e5f97a3eff96229a7ae85ead5328a39efe76f8bf5a06313d505b6ea"
|
||||
checksum = "b74fc6b57825be3373f7054754755f03ac3a8f5d70015ccad699ba2029956f4a"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"flate2",
|
||||
@@ -3338,9 +3443,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.9.1"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5de17fd2f7da591098415cff336e12965a28061ddace43b59cb3c430179c9439"
|
||||
checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
"rand",
|
||||
@@ -3350,9 +3455,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "uuid-macro-internal"
|
||||
version = "1.9.1"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a3ff64d5cde1e2cb5268bdb497235b6bd255ba8244f910dbc3574e59593de68c"
|
||||
checksum = "ee1cd046f83ea2c4e920d6ee9f7c3537ef928d75dce5d84a87c2c5d6b3999a3a"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3521,6 +3626,16 @@ dependencies = [
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "web-time"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "webpki-roots"
|
||||
version = "0.26.1"
|
||||
@@ -3588,7 +3703,7 @@ version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.5",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3606,7 +3721,16 @@ version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.5",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.59.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3626,18 +3750,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb"
|
||||
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm 0.52.5",
|
||||
"windows_aarch64_msvc 0.52.5",
|
||||
"windows_i686_gnu 0.52.5",
|
||||
"windows_aarch64_gnullvm 0.52.6",
|
||||
"windows_aarch64_msvc 0.52.6",
|
||||
"windows_i686_gnu 0.52.6",
|
||||
"windows_i686_gnullvm",
|
||||
"windows_i686_msvc 0.52.5",
|
||||
"windows_x86_64_gnu 0.52.5",
|
||||
"windows_x86_64_gnullvm 0.52.5",
|
||||
"windows_x86_64_msvc 0.52.5",
|
||||
"windows_i686_msvc 0.52.6",
|
||||
"windows_x86_64_gnu 0.52.6",
|
||||
"windows_x86_64_gnullvm 0.52.6",
|
||||
"windows_x86_64_msvc 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3648,9 +3772,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263"
|
||||
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
@@ -3660,9 +3784,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6"
|
||||
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
@@ -3672,15 +3796,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670"
|
||||
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnullvm"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9"
|
||||
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
@@ -3690,9 +3814,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf"
|
||||
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
@@ -3702,9 +3826,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9"
|
||||
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
@@ -3714,9 +3838,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596"
|
||||
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
@@ -3726,15 +3850,15 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0"
|
||||
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||
|
||||
[[package]]
|
||||
name = "winnow"
|
||||
version = "0.6.6"
|
||||
version = "0.6.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f0c976aaaa0e1f90dbb21e9587cdaf1d9679a1cde8875c0d6bd83ab96a208352"
|
||||
checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
@@ -3795,6 +3919,7 @@ dependencies = [
|
||||
"byteorder",
|
||||
"crc32fast",
|
||||
"crossbeam-utils",
|
||||
"flate2",
|
||||
"zstd",
|
||||
]
|
||||
|
||||
|
||||
16
Cargo.toml
16
Cargo.toml
@@ -4,7 +4,7 @@ resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
rust-version = "1.75"
|
||||
rust-version = "1.76"
|
||||
homepage = "https://docs.astral.sh/ruff"
|
||||
documentation = "https://docs.astral.sh/ruff"
|
||||
repository = "https://github.com/astral-sh/ruff"
|
||||
@@ -35,9 +35,9 @@ ruff_source_file = { path = "crates/ruff_source_file" }
|
||||
ruff_text_size = { path = "crates/ruff_text_size" }
|
||||
ruff_workspace = { path = "crates/ruff_workspace" }
|
||||
|
||||
red_knot = { path = "crates/red_knot" }
|
||||
red_knot_module_resolver = { path = "crates/red_knot_module_resolver" }
|
||||
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
|
||||
red_knot_server = { path = "crates/red_knot_server" }
|
||||
red_knot_workspace = { path = "crates/red_knot_workspace" }
|
||||
|
||||
aho-corasick = { version = "1.1.3" }
|
||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
||||
@@ -108,7 +108,7 @@ rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "a1bf3a613f451af7fc0a59411c56abc47fe8e8e1" }
|
||||
salsa = { git = "https://github.com/MichaReiser/salsa.git", tag = "red-knot-0.0.1" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
@@ -131,9 +131,10 @@ thiserror = { version = "1.0.58" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-flame = { version = "0.2.0" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
tracing-tree = { version = "0.3.0" }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = ["env-filter", "fmt"] }
|
||||
tracing-tree = { version = "0.4.0" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
@@ -152,11 +153,12 @@ walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
wasm-bindgen-test = { version = "0.3.42" }
|
||||
wild = { version = "2" }
|
||||
zip = { version = "0.6.6", default-features = false, features = ["zstd"] }
|
||||
zip = { version = "0.6.6", default-features = false }
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
unreachable_pub = "warn"
|
||||
unexpected_cfgs = { level = "warn", check-cfg = ["cfg(fuzzing)", "cfg(codspeed)"] }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
pedantic = { level = "warn", priority = -2 }
|
||||
|
||||
25
LICENSE
25
LICENSE
@@ -1371,3 +1371,28 @@ are:
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
- pydoclint, licensed as follows:
|
||||
"""
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 jsh9
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
"""
|
||||
|
||||
17
README.md
17
README.md
@@ -29,14 +29,14 @@ An extremely fast Python linter and code formatter, written in Rust.
|
||||
- 🐍 Installable via `pip`
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 🤝 Python 3.13 compatibility
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruff-compare-to-flake8), isort, and Black
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruffs-linter-compare-to-flake8), isort, and [Black](https://docs.astral.sh/ruff/faq/#how-does-ruffs-formatter-compare-to-black)
|
||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [800 built-in rules](https://docs.astral.sh/ruff/rules/), with native re-implementations
|
||||
of popular Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party [editor integrations](https://docs.astral.sh/ruff/integrations/) for
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://github.com/astral-sh/ruff-lsp)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#pyprojecttoml-discovery)
|
||||
[VS Code](https://github.com/astral-sh/ruff-vscode) and [more](https://docs.astral.sh/ruff/editors/setup)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://docs.astral.sh/ruff/configuration/#config-file-discovery)
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
functionality behind a single, common interface.
|
||||
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.5.3/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.5.3/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.5.7/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.5.7/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.3
|
||||
rev: v0.5.7
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -179,8 +179,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
- id: ruff-format
|
||||
```
|
||||
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or
|
||||
alongside any other editor through the [Ruff LSP](https://github.com/astral-sh/ruff-lsp).
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or with [various other editors](https://docs.astral.sh/ruff/editors/setup).
|
||||
|
||||
Ruff can also be used as a [GitHub Action](https://github.com/features/actions) via
|
||||
[`ruff-action`](https://github.com/chartboost/ruff-action):
|
||||
@@ -424,6 +423,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- [Dagger](https://github.com/dagger/dagger)
|
||||
- [Dagster](https://github.com/dagster-io/dagster)
|
||||
- Databricks ([MLflow](https://github.com/mlflow/mlflow))
|
||||
- [Dify](https://github.com/langgenius/dify)
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Godot](https://github.com/godotengine/godot)
|
||||
- [Gradio](https://github.com/gradio-app/gradio)
|
||||
@@ -434,6 +434,7 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||
- Hugging Face ([Transformers](https://github.com/huggingface/transformers),
|
||||
[Datasets](https://github.com/huggingface/datasets),
|
||||
[Diffusers](https://github.com/huggingface/diffusers))
|
||||
- IBM ([Qiskit](https://github.com/Qiskit/qiskit))
|
||||
- ING Bank ([popmon](https://github.com/ing-bank/popmon), [probatus](https://github.com/ing-bank/probatus))
|
||||
- [Ibis](https://github.com/ibis-project/ibis)
|
||||
- [ivy](https://github.com/unifyai/ivy)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = ["crates/red_knot_module_resolver/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
extend-exclude = ["crates/red_knot_python_semantic/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
|
||||
@@ -10,4 +10,12 @@ doc-valid-idents = [
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
]
|
||||
|
||||
ignore-interior-mutability = [
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
]
|
||||
|
||||
@@ -12,25 +12,29 @@ license.workspace = true
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
red_knot_module_resolver = { workspace = true }
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
red_knot_workspace = { workspace = true }
|
||||
red_knot_server = { workspace = true }
|
||||
|
||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
||||
ruff_python_ast = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
chrono = { workspace = true }
|
||||
clap = { workspace = true, features = ["wrap_help"] }
|
||||
colored = { workspace = true }
|
||||
countme = { workspace = true, features = ["enable"] }
|
||||
crossbeam = { workspace = true }
|
||||
ctrlc = { version = "3.4.4" }
|
||||
notify = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
tracing = { workspace = true, features = ["release_max_level_debug"] }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
|
||||
tracing-flame = { workspace = true }
|
||||
tracing-tree = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
filetime = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
BIN
crates/red_knot/docs/tracing-flamegraph.png
Normal file
BIN
crates/red_knot/docs/tracing-flamegraph.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 40 KiB |
123
crates/red_knot/docs/tracing.md
Normal file
123
crates/red_knot/docs/tracing.md
Normal file
@@ -0,0 +1,123 @@
|
||||
# Tracing
|
||||
|
||||
Traces are a useful tool to narrow down the location of a bug or, at least, to understand why the compiler is doing a particular thing.
|
||||
Note, tracing messages with severity `debug` or greater are user-facing. They should be phrased accordingly.
|
||||
Tracing spans are only shown when using `-vvv`.
|
||||
|
||||
## Verbosity levels
|
||||
|
||||
The CLI supports different verbosity levels.
|
||||
|
||||
- default: Only show errors and warnings.
|
||||
- `-v` activates `info!`: Show generally useful information such as paths of configuration files, detected platform, etc., but it's not a lot of messages, it's something you'll activate in CI by default. cargo build e.g. shows you which packages are fresh.
|
||||
- `-vv` activates `debug!` and timestamps: This should be enough information to get to the bottom of bug reports. When you're processing many packages or files, you'll get pages and pages of output, but each line is link to a specific action or state change.
|
||||
- `-vvv` activates `trace!` (only in debug builds) and shows tracing-spans: At this level, you're logging everything. Most of this is wasted, it's really slow, we dump e.g. the entire resolution graph. Only useful to developers, and you almost certainly want to use `RED_KNOT_LOG` to filter it down to the area your investigating.
|
||||
|
||||
## `RED_KNOT_LOG`
|
||||
|
||||
By default, the CLI shows messages from the `ruff` and `red_knot` crates. Tracing messages from other crates are not shown.
|
||||
The `RED_KNOT_LOG` environment variable allows you to customize which messages are shown by specifying one
|
||||
or more [filter directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives).
|
||||
|
||||
### Examples
|
||||
|
||||
#### Show all debug messages
|
||||
|
||||
Shows debug messages from all crates.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=debug
|
||||
```
|
||||
|
||||
#### Show salsa query execution messages
|
||||
|
||||
Show the salsa `execute: my_query` messages in addition to all red knot messages.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=ruff=trace,red_knot=trace,salsa=info
|
||||
```
|
||||
|
||||
#### Show typing traces
|
||||
|
||||
Only show traces for the `red_knot_python_semantic::types` module.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG="red_knot_python_semantic::types"
|
||||
```
|
||||
|
||||
Note: Ensure that you use `-vvv` to see tracing spans.
|
||||
|
||||
#### Show messages for a single file
|
||||
|
||||
Shows all messages that are inside of a span for a specific file.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG=red_knot[{file=/home/micha/astral/test/x.py}]=trace
|
||||
```
|
||||
|
||||
**Note**: Tracing still shows all spans because tracing can't know at the time of entering the span
|
||||
whether one if its children has the file `x.py`.
|
||||
|
||||
**Note**: Salsa currently logs the entire memoized values. In our case, the source text and parsed AST.
|
||||
This very quickly leads to extremely long outputs.
|
||||
|
||||
## Tracing and Salsa
|
||||
|
||||
Be mindful about using `tracing` in Salsa queries, especially when using `warn` or `error` because it isn't guaranteed
|
||||
that the query will execute after restoring from a persistent cache. In which case the user won't see the message.
|
||||
|
||||
For example, don't use `tracing` to show the user a message when generating a lint violation failed
|
||||
because the message would only be shown when linting the file the first time, but not on subsequent analysis
|
||||
runs or when restoring from a persistent cache. This can be confusing for users because they
|
||||
don't understand why a specific lint violation isn't raised. Instead, change your
|
||||
query to return the failure as part of the query's result or use a Salsa accumulator.
|
||||
|
||||
## Tracing in tests
|
||||
|
||||
You can use `ruff_db::testing::setup_logging` or `ruff_db::testing::setup_logging_with_filter` to set up logging in tests.
|
||||
|
||||
```rust
|
||||
use ruff_db::testing::setup_logging;
|
||||
|
||||
#[test]
|
||||
fn test() {
|
||||
let _logging = setup_logging();
|
||||
|
||||
tracing::info!("This message will be printed to stderr");
|
||||
}
|
||||
```
|
||||
|
||||
Note: Most test runners capture stderr and only show its output when a test fails.
|
||||
|
||||
Note also that `setup_logging` only sets up logging for the current thread because [`set_global_default`](https://docs.rs/tracing/latest/tracing/subscriber/fn.set_global_default.html) can only be
|
||||
called **once**.
|
||||
|
||||
## Release builds
|
||||
|
||||
`trace!` events are removed in release builds.
|
||||
|
||||
## Profiling
|
||||
|
||||
Red Knot generates a folded stack trace to the current directory named `tracing.folded` when setting the environment variable `RED_KNOT_LOG_PROFILE` to `1` or `true`.
|
||||
|
||||
```bash
|
||||
RED_KNOT_LOG_PROFILE=1 red_knot -- --current-directory=../test -vvv
|
||||
```
|
||||
|
||||
You can convert the textual representation into a visual one using `inferno`.
|
||||
|
||||
```shell
|
||||
cargo install inferno
|
||||
```
|
||||
|
||||
```shell
|
||||
# flamegraph
|
||||
cat tracing.folded | inferno-flamegraph > tracing-flamegraph.svg
|
||||
|
||||
# flamechart
|
||||
cat tracing.folded | inferno-flamegraph --flamechart > tracing-flamechart.svg
|
||||
```
|
||||
|
||||

|
||||
|
||||
See [`tracing-flame`](https://crates.io/crates/tracing-flame) for more details.
|
||||
@@ -1,2 +0,0 @@
|
||||
pub(crate) mod target_version;
|
||||
pub(crate) mod verbosity;
|
||||
@@ -1,34 +0,0 @@
|
||||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TargetVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
ruff_db::program::TargetVersion::from(*self).fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for ruff_db::program::TargetVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => Self::Py37,
|
||||
TargetVersion::Py38 => Self::Py38,
|
||||
TargetVersion::Py39 => Self::Py39,
|
||||
TargetVersion::Py310 => Self::Py310,
|
||||
TargetVersion::Py311 => Self::Py311,
|
||||
TargetVersion::Py312 => Self::Py312,
|
||||
TargetVersion::Py313 => Self::Py313,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub(crate) enum VerbosityLevel {
|
||||
Info,
|
||||
Debug,
|
||||
Trace,
|
||||
}
|
||||
|
||||
/// Logging flags to `#[command(flatten)]` into your CLI
|
||||
#[derive(clap::Args, Debug, Clone, Default)]
|
||||
#[command(about = None, long_about = None)]
|
||||
pub(crate) struct Verbosity {
|
||||
#[arg(
|
||||
long,
|
||||
short = 'v',
|
||||
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
|
||||
action = clap::ArgAction::Count,
|
||||
global = true,
|
||||
)]
|
||||
verbose: u8,
|
||||
}
|
||||
|
||||
impl Verbosity {
|
||||
/// Returns the verbosity level based on the number of `-v` flags.
|
||||
///
|
||||
/// Returns `None` if the user did not specify any verbosity flags.
|
||||
pub(crate) fn level(&self) -> Option<VerbosityLevel> {
|
||||
match self.verbose {
|
||||
0 => None,
|
||||
1 => Some(VerbosityLevel::Info),
|
||||
2 => Some(VerbosityLevel::Debug),
|
||||
_ => Some(VerbosityLevel::Trace),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,200 +0,0 @@
|
||||
use std::panic::{AssertUnwindSafe, RefUnwindSafe};
|
||||
use std::sync::Arc;
|
||||
|
||||
use salsa::{Cancelled, Database, DbWithJar};
|
||||
|
||||
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb, Jar as ResolverJar};
|
||||
use red_knot_python_semantic::{Db as SemanticDb, Jar as SemanticJar};
|
||||
use ruff_db::files::{system_path_to_file, File, Files};
|
||||
use ruff_db::program::{Program, ProgramSettings};
|
||||
use ruff_db::system::System;
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
|
||||
|
||||
use crate::lint::{lint_semantic, lint_syntax, unwind_if_cancelled, Diagnostics};
|
||||
use crate::watch::{FileChangeKind, FileWatcherChange};
|
||||
use crate::workspace::{check_file, Package, Workspace, WorkspaceMetadata};
|
||||
|
||||
pub trait Db: DbWithJar<Jar> + SemanticDb + Upcast<dyn SemanticDb> {}
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(
|
||||
Workspace,
|
||||
Package,
|
||||
lint_syntax,
|
||||
lint_semantic,
|
||||
unwind_if_cancelled,
|
||||
);
|
||||
|
||||
#[salsa::db(SourceJar, ResolverJar, SemanticJar, Jar)]
|
||||
pub struct RootDatabase {
|
||||
workspace: Option<Workspace>,
|
||||
storage: salsa::Storage<RootDatabase>,
|
||||
files: Files,
|
||||
system: Arc<dyn System + Send + Sync + RefUnwindSafe>,
|
||||
}
|
||||
|
||||
impl RootDatabase {
|
||||
pub fn new<S>(workspace: WorkspaceMetadata, settings: ProgramSettings, system: S) -> Self
|
||||
where
|
||||
S: System + 'static + Send + Sync + RefUnwindSafe,
|
||||
{
|
||||
let mut db = Self {
|
||||
workspace: None,
|
||||
storage: salsa::Storage::default(),
|
||||
files: Files::default(),
|
||||
system: Arc::new(system),
|
||||
};
|
||||
|
||||
let workspace = Workspace::from_metadata(&db, workspace);
|
||||
// Initialize the `Program` singleton
|
||||
Program::from_settings(&db, settings);
|
||||
|
||||
db.workspace = Some(workspace);
|
||||
db
|
||||
}
|
||||
|
||||
pub fn workspace(&self) -> Workspace {
|
||||
// SAFETY: The workspace is always initialized in `new`.
|
||||
self.workspace.unwrap()
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self, changes))]
|
||||
pub fn apply_changes(&mut self, changes: Vec<FileWatcherChange>) {
|
||||
let workspace = self.workspace();
|
||||
let workspace_path = workspace.root(self).to_path_buf();
|
||||
|
||||
// TODO: Optimize change tracking by only reloading a package if a file that is part of the package was changed.
|
||||
let mut structural_change = false;
|
||||
for change in changes {
|
||||
if matches!(
|
||||
change.path.file_name(),
|
||||
Some(".gitignore" | ".ignore" | "ruff.toml" | ".ruff.toml" | "pyproject.toml")
|
||||
) {
|
||||
// Changes to ignore files or settings can change the workspace structure or add/remove files
|
||||
// from packages.
|
||||
structural_change = true;
|
||||
} else {
|
||||
match change.kind {
|
||||
FileChangeKind::Created => {
|
||||
// Reload the package when a new file was added. This is necessary because the file might be excluded
|
||||
// by a gitignore.
|
||||
if workspace.package(self, &change.path).is_some() {
|
||||
structural_change = true;
|
||||
}
|
||||
}
|
||||
FileChangeKind::Modified => {}
|
||||
FileChangeKind::Deleted => {
|
||||
if let Some(package) = workspace.package(self, &change.path) {
|
||||
if let Some(file) = system_path_to_file(self, &change.path) {
|
||||
package.remove_file(self, file);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
File::touch_path(self, &change.path);
|
||||
}
|
||||
|
||||
if structural_change {
|
||||
match WorkspaceMetadata::from_path(&workspace_path, self.system()) {
|
||||
Ok(metadata) => {
|
||||
tracing::debug!("Reload workspace after structural change.");
|
||||
// TODO: Handle changes in the program settings.
|
||||
workspace.reload(self, metadata);
|
||||
}
|
||||
Err(error) => {
|
||||
tracing::error!("Failed to load workspace, keep old workspace: {error}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
pub fn check(&self) -> Result<Vec<String>, Cancelled> {
|
||||
self.with_db(|db| db.workspace().check(db))
|
||||
}
|
||||
|
||||
pub fn check_file(&self, file: File) -> Result<Diagnostics, Cancelled> {
|
||||
self.with_db(|db| check_file(db, file))
|
||||
}
|
||||
|
||||
pub(crate) fn with_db<F, T>(&self, f: F) -> Result<T, Cancelled>
|
||||
where
|
||||
F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe,
|
||||
{
|
||||
// The `AssertUnwindSafe` here looks scary, but is a consequence of Salsa's design.
|
||||
// Salsa uses panics to implement cancellation and to recover from cycles. However, the Salsa
|
||||
// storage isn't `UnwindSafe` or `RefUnwindSafe` because its dependencies `DashMap` and `parking_lot::*` aren't
|
||||
// unwind safe.
|
||||
//
|
||||
// Having to use `AssertUnwindSafe` isn't as big as a deal as it might seem because
|
||||
// the `UnwindSafe` and `RefUnwindSafe` traits are designed to catch logical bugs.
|
||||
// They don't protect against [UB](https://internals.rust-lang.org/t/pre-rfc-deprecating-unwindsafe/15974).
|
||||
// On top of that, `Cancelled` only catches specific Salsa-panics and propagates all other panics.
|
||||
//
|
||||
// That still leaves us with possible logical bugs in two sources:
|
||||
// * In Salsa itself: This must be considered a bug in Salsa and needs fixing upstream.
|
||||
// Reviewing Salsa code specifically around unwind safety seems doable.
|
||||
// * Our code: This is the main concern. Luckily, it only involves code that uses internal mutability
|
||||
// and calls into Salsa queries when mutating the internal state. Using `AssertUnwindSafe`
|
||||
// certainly makes it harder to catch these issues in our user code.
|
||||
//
|
||||
// For now, this is the only solution at hand unless Salsa decides to change its design.
|
||||
// [Zulip support thread](https://salsa.zulipchat.com/#narrow/stream/145099-general/topic/How.20to.20use.20.60Cancelled.3A.3Acatch.60)
|
||||
let db = &AssertUnwindSafe(self);
|
||||
Cancelled::catch(|| f(db))
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SemanticDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn SemanticDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ResolverDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ResolverDb for RootDatabase {}
|
||||
|
||||
impl SemanticDb for RootDatabase {}
|
||||
|
||||
impl SourceDb for RootDatabase {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
vendored_typeshed_stubs()
|
||||
}
|
||||
|
||||
fn system(&self) -> &dyn System {
|
||||
&*self.system
|
||||
}
|
||||
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
impl Database for RootDatabase {}
|
||||
|
||||
impl Db for RootDatabase {}
|
||||
|
||||
impl salsa::ParallelDatabase for RootDatabase {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
workspace: self.workspace,
|
||||
storage: self.storage.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
system: self.system.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
254
crates/red_knot/src/logging.rs
Normal file
254
crates/red_knot/src/logging.rs
Normal file
@@ -0,0 +1,254 @@
|
||||
//! Sets up logging for Red Knot
|
||||
|
||||
use anyhow::Context;
|
||||
use colored::Colorize;
|
||||
use std::fmt;
|
||||
use std::fs::File;
|
||||
use std::io::BufWriter;
|
||||
use tracing::log::LevelFilter;
|
||||
use tracing::{Event, Subscriber};
|
||||
use tracing_subscriber::fmt::format::Writer;
|
||||
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields};
|
||||
use tracing_subscriber::registry::LookupSpan;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
/// Logging flags to `#[command(flatten)]` into your CLI
|
||||
#[derive(clap::Args, Debug, Clone, Default)]
|
||||
#[command(about = None, long_about = None)]
|
||||
pub(crate) struct Verbosity {
|
||||
#[arg(
|
||||
long,
|
||||
short = 'v',
|
||||
help = "Use verbose output (or `-vv` and `-vvv` for more verbose output)",
|
||||
action = clap::ArgAction::Count,
|
||||
global = true,
|
||||
)]
|
||||
verbose: u8,
|
||||
}
|
||||
|
||||
impl Verbosity {
|
||||
/// Returns the verbosity level based on the number of `-v` flags.
|
||||
///
|
||||
/// Returns `None` if the user did not specify any verbosity flags.
|
||||
pub(crate) fn level(&self) -> VerbosityLevel {
|
||||
match self.verbose {
|
||||
0 => VerbosityLevel::Default,
|
||||
1 => VerbosityLevel::Verbose,
|
||||
2 => VerbosityLevel::ExtraVerbose,
|
||||
_ => VerbosityLevel::Trace,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub(crate) enum VerbosityLevel {
|
||||
/// Default output level. Only shows Ruff and Red Knot events up to the [`WARN`](tracing::Level::WARN).
|
||||
Default,
|
||||
|
||||
/// Enables verbose output. Emits Ruff and Red Knot events up to the [`INFO`](tracing::Level::INFO).
|
||||
/// Corresponds to `-v`.
|
||||
Verbose,
|
||||
|
||||
/// Enables a more verbose tracing format and emits Ruff and Red Knot events up to [`DEBUG`](tracing::Level::DEBUG).
|
||||
/// Corresponds to `-vv`
|
||||
ExtraVerbose,
|
||||
|
||||
/// Enables all tracing events and uses a tree-like output format. Corresponds to `-vvv`.
|
||||
Trace,
|
||||
}
|
||||
|
||||
impl VerbosityLevel {
|
||||
const fn level_filter(self) -> LevelFilter {
|
||||
match self {
|
||||
VerbosityLevel::Default => LevelFilter::Warn,
|
||||
VerbosityLevel::Verbose => LevelFilter::Info,
|
||||
VerbosityLevel::ExtraVerbose => LevelFilter::Debug,
|
||||
VerbosityLevel::Trace => LevelFilter::Trace,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) const fn is_trace(self) -> bool {
|
||||
matches!(self, VerbosityLevel::Trace)
|
||||
}
|
||||
|
||||
pub(crate) const fn is_extra_verbose(self) -> bool {
|
||||
matches!(self, VerbosityLevel::ExtraVerbose)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn setup_tracing(level: VerbosityLevel) -> anyhow::Result<TracingGuard> {
|
||||
use tracing_subscriber::prelude::*;
|
||||
|
||||
// The `RED_KNOT_LOG` environment variable overrides the default log level.
|
||||
let filter = if let Ok(log_env_variable) = std::env::var("RED_KNOT_LOG") {
|
||||
EnvFilter::builder()
|
||||
.parse(log_env_variable)
|
||||
.context("Failed to parse directives specified in RED_KNOT_LOG environment variable.")?
|
||||
} else {
|
||||
match level {
|
||||
VerbosityLevel::Default => {
|
||||
// Show warning traces
|
||||
EnvFilter::default().add_directive(tracing::level_filters::LevelFilter::WARN.into())
|
||||
}
|
||||
level => {
|
||||
let level_filter = level.level_filter();
|
||||
|
||||
// Show info|debug|trace events, but allow `RED_KNOT_LOG` to override
|
||||
let filter = EnvFilter::default().add_directive(
|
||||
format!("red_knot={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
);
|
||||
|
||||
filter.add_directive(
|
||||
format!("ruff={level_filter}")
|
||||
.parse()
|
||||
.expect("Hardcoded directive to be valid"),
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let (profiling_layer, guard) = setup_profile();
|
||||
|
||||
let registry = tracing_subscriber::registry()
|
||||
.with(filter)
|
||||
.with(profiling_layer);
|
||||
|
||||
if level.is_trace() {
|
||||
let subscriber = registry.with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(std::io::stderr)
|
||||
.with_timer(tracing_tree::time::Uptime::default()),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
} else {
|
||||
let subscriber = registry.with(
|
||||
tracing_subscriber::fmt::layer()
|
||||
.event_format(RedKnotFormat {
|
||||
display_level: true,
|
||||
display_timestamp: level.is_extra_verbose(),
|
||||
show_spans: false,
|
||||
})
|
||||
.with_writer(std::io::stderr),
|
||||
);
|
||||
|
||||
subscriber.init();
|
||||
}
|
||||
|
||||
Ok(TracingGuard {
|
||||
_flame_guard: guard,
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn setup_profile<S>() -> (
|
||||
Option<tracing_flame::FlameLayer<S, BufWriter<File>>>,
|
||||
Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
)
|
||||
where
|
||||
S: Subscriber + for<'span> LookupSpan<'span>,
|
||||
{
|
||||
if let Ok("1" | "true") = std::env::var("RED_KNOT_LOG_PROFILE").as_deref() {
|
||||
let (layer, guard) = tracing_flame::FlameLayer::with_file("tracing.folded")
|
||||
.expect("Flame layer to be created");
|
||||
(Some(layer), Some(guard))
|
||||
} else {
|
||||
(None, None)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TracingGuard {
|
||||
_flame_guard: Option<tracing_flame::FlushGuard<BufWriter<File>>>,
|
||||
}
|
||||
|
||||
struct RedKnotFormat {
|
||||
display_timestamp: bool,
|
||||
display_level: bool,
|
||||
show_spans: bool,
|
||||
}
|
||||
|
||||
/// See <https://docs.rs/tracing-subscriber/0.3.18/src/tracing_subscriber/fmt/format/mod.rs.html#1026-1156>
|
||||
impl<S, N> FormatEvent<S, N> for RedKnotFormat
|
||||
where
|
||||
S: Subscriber + for<'a> LookupSpan<'a>,
|
||||
N: for<'a> FormatFields<'a> + 'static,
|
||||
{
|
||||
fn format_event(
|
||||
&self,
|
||||
ctx: &FmtContext<'_, S, N>,
|
||||
mut writer: Writer<'_>,
|
||||
event: &Event<'_>,
|
||||
) -> fmt::Result {
|
||||
let meta = event.metadata();
|
||||
let ansi = writer.has_ansi_escapes();
|
||||
|
||||
if self.display_timestamp {
|
||||
let timestamp = chrono::Local::now()
|
||||
.format("%Y-%m-%d %H:%M:%S.%f")
|
||||
.to_string();
|
||||
if ansi {
|
||||
write!(writer, "{} ", timestamp.dimmed())?;
|
||||
} else {
|
||||
write!(
|
||||
writer,
|
||||
"{} ",
|
||||
chrono::Local::now().format("%Y-%m-%d %H:%M:%S.%f")
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.display_level {
|
||||
let level = meta.level();
|
||||
// Same colors as tracing
|
||||
if ansi {
|
||||
let formatted_level = level.to_string();
|
||||
match *level {
|
||||
tracing::Level::TRACE => {
|
||||
write!(writer, "{} ", formatted_level.purple().bold())?;
|
||||
}
|
||||
tracing::Level::DEBUG => write!(writer, "{} ", formatted_level.blue().bold())?,
|
||||
tracing::Level::INFO => write!(writer, "{} ", formatted_level.green().bold())?,
|
||||
tracing::Level::WARN => write!(writer, "{} ", formatted_level.yellow().bold())?,
|
||||
tracing::Level::ERROR => write!(writer, "{} ", level.to_string().red().bold())?,
|
||||
}
|
||||
} else {
|
||||
write!(writer, "{level} ")?;
|
||||
}
|
||||
}
|
||||
|
||||
if self.show_spans {
|
||||
let span = event.parent();
|
||||
let mut seen = false;
|
||||
|
||||
let span = span
|
||||
.and_then(|id| ctx.span(id))
|
||||
.or_else(|| ctx.lookup_current());
|
||||
|
||||
let scope = span.into_iter().flat_map(|span| span.scope().from_root());
|
||||
|
||||
for span in scope {
|
||||
seen = true;
|
||||
if ansi {
|
||||
write!(writer, "{}:", span.metadata().name().bold())?;
|
||||
} else {
|
||||
write!(writer, "{}:", span.metadata().name())?;
|
||||
}
|
||||
}
|
||||
|
||||
if seen {
|
||||
writer.write_char(' ')?;
|
||||
}
|
||||
}
|
||||
|
||||
ctx.field_format().format_fields(writer.by_ref(), event)?;
|
||||
|
||||
writeln!(writer)
|
||||
}
|
||||
}
|
||||
@@ -1,35 +1,39 @@
|
||||
use std::process::{ExitCode, Termination};
|
||||
use std::sync::Mutex;
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
use clap::Parser;
|
||||
use colored::Colorize;
|
||||
use crossbeam::channel as crossbeam_channel;
|
||||
use salsa::ParallelDatabase;
|
||||
use tracing::subscriber::Interest;
|
||||
use tracing::{Level, Metadata};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::layer::{Context, Filter, SubscriberExt};
|
||||
use tracing_subscriber::{Layer, Registry};
|
||||
use tracing_tree::time::Uptime;
|
||||
use salsa::plumbing::ZalsaDatabase;
|
||||
|
||||
use red_knot::db::RootDatabase;
|
||||
use red_knot::watch::FileWatcher;
|
||||
use red_knot::watch::FileWatcherChange;
|
||||
use red_knot::workspace::WorkspaceMetadata;
|
||||
use ruff_db::program::{ProgramSettings, SearchPathSettings};
|
||||
use ruff_db::system::{OsSystem, System, SystemPathBuf};
|
||||
use red_knot_python_semantic::{ProgramSettings, SearchPathSettings};
|
||||
use red_knot_server::run_server;
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::site_packages::VirtualEnvironment;
|
||||
use red_knot_workspace::watch;
|
||||
use red_knot_workspace::watch::WorkspaceWatcher;
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
|
||||
use target_version::TargetVersion;
|
||||
|
||||
use cli::target_version::TargetVersion;
|
||||
use cli::verbosity::{Verbosity, VerbosityLevel};
|
||||
use crate::logging::{setup_tracing, Verbosity};
|
||||
|
||||
mod cli;
|
||||
mod logging;
|
||||
mod target_version;
|
||||
mod verbosity;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(
|
||||
author,
|
||||
name = "red-knot",
|
||||
about = "An experimental multifile analysis backend for Ruff"
|
||||
about = "An extremely fast Python type checker."
|
||||
)]
|
||||
#[command(version)]
|
||||
struct Args {
|
||||
#[command(subcommand)]
|
||||
pub(crate) command: Option<Command>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Changes the current working directory.",
|
||||
@@ -38,6 +42,17 @@ struct Args {
|
||||
)]
|
||||
current_directory: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Path to the virtual environment the project uses",
|
||||
long_help = "\
|
||||
Path to the virtual environment the project uses. \
|
||||
If provided, red-knot will use the `site-packages` directory of this virtual environment \
|
||||
to resolve type information for the project's third-party dependencies.",
|
||||
value_name = "PATH"
|
||||
)]
|
||||
venv_path: Option<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "DIRECTORY",
|
||||
@@ -52,60 +67,126 @@ struct Args {
|
||||
)]
|
||||
extra_search_path: Vec<SystemPathBuf>,
|
||||
|
||||
#[arg(long, help = "Python version to assume when resolving types", default_value_t = TargetVersion::default(), value_name="VERSION")]
|
||||
#[arg(
|
||||
long,
|
||||
help = "Python version to assume when resolving types",
|
||||
default_value_t = TargetVersion::default(),
|
||||
value_name="VERSION")
|
||||
]
|
||||
target_version: TargetVersion,
|
||||
|
||||
#[clap(flatten)]
|
||||
verbosity: Verbosity,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Run in watch mode by re-running whenever files change",
|
||||
short = 'W'
|
||||
)]
|
||||
watch: bool,
|
||||
}
|
||||
|
||||
#[allow(
|
||||
clippy::print_stdout,
|
||||
clippy::unnecessary_wraps,
|
||||
clippy::print_stderr,
|
||||
clippy::dbg_macro
|
||||
)]
|
||||
pub fn main() -> anyhow::Result<()> {
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum Command {
|
||||
/// Start the language server
|
||||
Server,
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stdout, clippy::unnecessary_wraps, clippy::print_stderr)]
|
||||
pub fn main() -> ExitStatus {
|
||||
run().unwrap_or_else(|error| {
|
||||
use std::io::Write;
|
||||
|
||||
// Use `writeln` instead of `eprintln` to avoid panicking when the stderr pipe is broken.
|
||||
let mut stderr = std::io::stderr().lock();
|
||||
|
||||
// This communicates that this isn't a linter error but Red Knot itself hard-errored for
|
||||
// some reason (e.g. failed to resolve the configuration)
|
||||
writeln!(stderr, "{}", "Red Knot failed".red().bold()).ok();
|
||||
// Currently we generally only see one error, but e.g. with io errors when resolving
|
||||
// the configuration it is help to chain errors ("resolving configuration failed" ->
|
||||
// "failed to read file: subdir/pyproject.toml")
|
||||
for cause in error.chain() {
|
||||
writeln!(stderr, " {} {cause}", "Cause:".bold()).ok();
|
||||
}
|
||||
|
||||
ExitStatus::Error
|
||||
})
|
||||
}
|
||||
|
||||
fn run() -> anyhow::Result<ExitStatus> {
|
||||
let Args {
|
||||
command,
|
||||
current_directory,
|
||||
custom_typeshed_dir,
|
||||
extra_search_path: extra_paths,
|
||||
venv_path,
|
||||
target_version,
|
||||
verbosity,
|
||||
watch,
|
||||
} = Args::parse_from(std::env::args().collect::<Vec<_>>());
|
||||
|
||||
let verbosity = verbosity.level();
|
||||
countme::enable(verbosity == Some(VerbosityLevel::Trace));
|
||||
setup_tracing(verbosity);
|
||||
if matches!(command, Some(Command::Server)) {
|
||||
return run_server().map(|()| ExitStatus::Success);
|
||||
}
|
||||
|
||||
let cwd = if let Some(cwd) = current_directory {
|
||||
let canonicalized = cwd.as_utf8_path().canonicalize_utf8().unwrap();
|
||||
SystemPathBuf::from_utf8_path_buf(canonicalized)
|
||||
} else {
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
SystemPathBuf::from_path_buf(cwd).unwrap()
|
||||
let verbosity = verbosity.level();
|
||||
countme::enable(verbosity.is_trace());
|
||||
let _guard = setup_tracing(verbosity)?;
|
||||
|
||||
// The base path to which all CLI arguments are relative to.
|
||||
let cli_base_path = {
|
||||
let cwd = std::env::current_dir().context("Failed to get the current working directory")?;
|
||||
SystemPathBuf::from_path_buf(cwd)
|
||||
.map_err(|path| {
|
||||
anyhow!(
|
||||
"The current working directory '{}' contains non-unicode characters. Red Knot only supports unicode paths.",
|
||||
path.display()
|
||||
)
|
||||
})?
|
||||
};
|
||||
|
||||
let cwd = current_directory
|
||||
.map(|cwd| {
|
||||
if cwd.as_std_path().is_dir() {
|
||||
Ok(SystemPath::absolute(&cwd, &cli_base_path))
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Provided current-directory path '{cwd}' is not a directory."
|
||||
))
|
||||
}
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_else(|| cli_base_path.clone());
|
||||
|
||||
let system = OsSystem::new(cwd.clone());
|
||||
let workspace_metadata =
|
||||
WorkspaceMetadata::from_path(system.current_directory(), &system).unwrap();
|
||||
let workspace_metadata = WorkspaceMetadata::from_path(system.current_directory(), &system)?;
|
||||
|
||||
// TODO: Verify the remaining search path settings eagerly.
|
||||
let site_packages = venv_path
|
||||
.map(|path| {
|
||||
VirtualEnvironment::new(path, &OsSystem::new(cli_base_path))
|
||||
.and_then(|venv| venv.site_packages_directories(&system))
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
|
||||
// TODO: Respect the settings from the workspace metadata. when resolving the program settings.
|
||||
let program_settings = ProgramSettings {
|
||||
target_version: target_version.into(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths,
|
||||
workspace_root: workspace_metadata.root().to_path_buf(),
|
||||
src_root: workspace_metadata.root().to_path_buf(),
|
||||
custom_typeshed: custom_typeshed_dir,
|
||||
site_packages: None,
|
||||
site_packages,
|
||||
},
|
||||
};
|
||||
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
// cache and load the cache if it exists.
|
||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system);
|
||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system)?;
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new(verbosity);
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
|
||||
|
||||
// Listen to Ctrl+C and abort the watch mode.
|
||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
||||
@@ -117,125 +198,158 @@ pub fn main() -> anyhow::Result<()> {
|
||||
}
|
||||
})?;
|
||||
|
||||
let file_changes_notifier = main_loop.file_changes_notifier();
|
||||
let exit_status = if watch {
|
||||
main_loop.watch(&mut db)?
|
||||
} else {
|
||||
main_loop.run(&mut db)
|
||||
};
|
||||
|
||||
// Watch for file changes and re-trigger the analysis.
|
||||
let mut file_watcher = FileWatcher::new(move |changes| {
|
||||
file_changes_notifier.notify(changes);
|
||||
})?;
|
||||
tracing::trace!("Counts for entire CLI run:\n{}", countme::get_all());
|
||||
|
||||
file_watcher.watch_folder(db.workspace().root(&db).as_std_path())?;
|
||||
std::mem::forget(db);
|
||||
|
||||
main_loop.run(&mut db);
|
||||
Ok(exit_status)
|
||||
}
|
||||
|
||||
println!("{}", countme::get_all());
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum ExitStatus {
|
||||
/// Checking was successful and there were no errors.
|
||||
Success = 0,
|
||||
|
||||
Ok(())
|
||||
/// Checking was successful but there were errors.
|
||||
Failure = 1,
|
||||
|
||||
/// Checking failed.
|
||||
Error = 2,
|
||||
}
|
||||
|
||||
impl Termination for ExitStatus {
|
||||
fn report(self) -> ExitCode {
|
||||
ExitCode::from(self as u8)
|
||||
}
|
||||
}
|
||||
|
||||
struct MainLoop {
|
||||
verbosity: Option<VerbosityLevel>,
|
||||
orchestrator: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
/// Sender that can be used to send messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
|
||||
/// Receiver for the messages sent **to** the main loop.
|
||||
receiver: crossbeam_channel::Receiver<MainLoopMessage>,
|
||||
|
||||
/// The file system watcher, if running in watch mode.
|
||||
watcher: Option<WorkspaceWatcher>,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new(verbosity: Option<VerbosityLevel>) -> (Self, MainLoopCancellationToken) {
|
||||
let (orchestrator_sender, orchestrator_receiver) = crossbeam_channel::bounded(1);
|
||||
let (main_loop_sender, main_loop_receiver) = crossbeam_channel::bounded(1);
|
||||
|
||||
let mut orchestrator = Orchestrator {
|
||||
receiver: orchestrator_receiver,
|
||||
main_loop: main_loop_sender.clone(),
|
||||
revision: 0,
|
||||
};
|
||||
|
||||
std::thread::spawn(move || {
|
||||
orchestrator.run();
|
||||
});
|
||||
fn new() -> (Self, MainLoopCancellationToken) {
|
||||
let (sender, receiver) = crossbeam_channel::bounded(10);
|
||||
|
||||
(
|
||||
Self {
|
||||
verbosity,
|
||||
orchestrator: orchestrator_sender,
|
||||
receiver: main_loop_receiver,
|
||||
},
|
||||
MainLoopCancellationToken {
|
||||
sender: main_loop_sender,
|
||||
sender: sender.clone(),
|
||||
receiver,
|
||||
watcher: None,
|
||||
},
|
||||
MainLoopCancellationToken { sender },
|
||||
)
|
||||
}
|
||||
|
||||
fn file_changes_notifier(&self) -> FileChangesNotifier {
|
||||
FileChangesNotifier {
|
||||
sender: self.orchestrator.clone(),
|
||||
}
|
||||
fn watch(mut self, db: &mut RootDatabase) -> anyhow::Result<ExitStatus> {
|
||||
tracing::debug!("Starting watch mode");
|
||||
let sender = self.sender.clone();
|
||||
let watcher = watch::directory_watcher(move |event| {
|
||||
sender.send(MainLoopMessage::ApplyChanges(event)).unwrap();
|
||||
})?;
|
||||
|
||||
self.watcher = Some(WorkspaceWatcher::new(watcher, db));
|
||||
|
||||
self.run(db);
|
||||
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn run(self, db: &mut RootDatabase) {
|
||||
self.orchestrator.send(OrchestratorMessage::Run).unwrap();
|
||||
fn run(mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
|
||||
for message in &self.receiver {
|
||||
tracing::trace!("Main Loop: Tick");
|
||||
let result = self.main_loop(db);
|
||||
|
||||
tracing::debug!("Exiting main loop");
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn main_loop(&mut self, db: &mut RootDatabase) -> ExitStatus {
|
||||
// Schedule the first check.
|
||||
tracing::debug!("Starting main loop");
|
||||
|
||||
let mut revision = 0u64;
|
||||
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
MainLoopMessage::CheckWorkspace { revision } => {
|
||||
MainLoopMessage::CheckWorkspace => {
|
||||
let db = db.snapshot();
|
||||
let orchestrator = self.orchestrator.clone();
|
||||
let sender = self.sender.clone();
|
||||
|
||||
// Spawn a new task that checks the workspace. This needs to be done in a separate thread
|
||||
// to prevent blocking the main loop here.
|
||||
rayon::spawn(move || {
|
||||
if let Ok(result) = db.check() {
|
||||
orchestrator
|
||||
.send(OrchestratorMessage::CheckCompleted {
|
||||
diagnostics: result,
|
||||
revision,
|
||||
})
|
||||
// Send the result back to the main loop for printing.
|
||||
sender
|
||||
.send(MainLoopMessage::CheckCompleted { result, revision })
|
||||
.unwrap();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
MainLoopMessage::CheckCompleted {
|
||||
result,
|
||||
revision: check_revision,
|
||||
} => {
|
||||
let has_diagnostics = !result.is_empty();
|
||||
if check_revision == revision {
|
||||
for diagnostic in result {
|
||||
tracing::error!("{}", diagnostic);
|
||||
}
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
|
||||
);
|
||||
}
|
||||
|
||||
if self.watcher.is_none() {
|
||||
return if has_diagnostics {
|
||||
ExitStatus::Failure
|
||||
} else {
|
||||
ExitStatus::Success
|
||||
};
|
||||
}
|
||||
|
||||
tracing::trace!("Counts after last check:\n{}", countme::get_all());
|
||||
}
|
||||
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
revision += 1;
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
db.apply_changes(changes);
|
||||
}
|
||||
MainLoopMessage::CheckCompleted(diagnostics) => {
|
||||
eprintln!("{}", diagnostics.join("\n"));
|
||||
if self.verbosity == Some(VerbosityLevel::Trace) {
|
||||
eprintln!("{}", countme::get_all());
|
||||
if let Some(watcher) = self.watcher.as_mut() {
|
||||
watcher.update(db);
|
||||
}
|
||||
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
|
||||
}
|
||||
MainLoopMessage::Exit => {
|
||||
if self.verbosity == Some(VerbosityLevel::Trace) {
|
||||
eprintln!("{}", countme::get_all());
|
||||
}
|
||||
return;
|
||||
// Cancel any pending queries and wait for them to complete.
|
||||
// TODO: Don't use Salsa internal APIs
|
||||
// [Zulip-Thread](https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries)
|
||||
let _ = db.zalsa_mut();
|
||||
return ExitStatus::Success;
|
||||
}
|
||||
}
|
||||
|
||||
tracing::debug!("Waiting for next main loop message.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for MainLoop {
|
||||
fn drop(&mut self) {
|
||||
self.orchestrator
|
||||
.send(OrchestratorMessage::Shutdown)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct FileChangesNotifier {
|
||||
sender: crossbeam_channel::Sender<OrchestratorMessage>,
|
||||
}
|
||||
|
||||
impl FileChangesNotifier {
|
||||
fn notify(&self, changes: Vec<FileWatcherChange>) {
|
||||
self.sender
|
||||
.send(OrchestratorMessage::FileChanges(changes))
|
||||
.unwrap();
|
||||
ExitStatus::Success
|
||||
}
|
||||
}
|
||||
|
||||
@@ -250,170 +364,11 @@ impl MainLoopCancellationToken {
|
||||
}
|
||||
}
|
||||
|
||||
struct Orchestrator {
|
||||
/// Sends messages to the main loop.
|
||||
main_loop: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
/// Receives messages from the main loop.
|
||||
receiver: crossbeam_channel::Receiver<OrchestratorMessage>,
|
||||
revision: usize,
|
||||
}
|
||||
|
||||
impl Orchestrator {
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn run(&mut self) {
|
||||
while let Ok(message) = self.receiver.recv() {
|
||||
match message {
|
||||
OrchestratorMessage::Run => {
|
||||
self.main_loop
|
||||
.send(MainLoopMessage::CheckWorkspace {
|
||||
revision: self.revision,
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
OrchestratorMessage::CheckCompleted {
|
||||
diagnostics,
|
||||
revision,
|
||||
} => {
|
||||
// Only take the diagnostics if they are for the latest revision.
|
||||
if self.revision == revision {
|
||||
self.main_loop
|
||||
.send(MainLoopMessage::CheckCompleted(diagnostics))
|
||||
.unwrap();
|
||||
} else {
|
||||
tracing::debug!("Discarding diagnostics for outdated revision {revision} (current: {}).", self.revision);
|
||||
}
|
||||
}
|
||||
|
||||
OrchestratorMessage::FileChanges(changes) => {
|
||||
// Request cancellation, but wait until all analysis tasks have completed to
|
||||
// avoid stale messages in the next main loop.
|
||||
|
||||
self.revision += 1;
|
||||
self.debounce_changes(changes);
|
||||
}
|
||||
OrchestratorMessage::Shutdown => {
|
||||
return self.shutdown();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn debounce_changes(&self, mut changes: Vec<FileWatcherChange>) {
|
||||
loop {
|
||||
// Consume possibly incoming file change messages before running a new analysis, but don't wait for more than 100ms.
|
||||
crossbeam_channel::select! {
|
||||
recv(self.receiver) -> message => {
|
||||
match message {
|
||||
Ok(OrchestratorMessage::Shutdown) => {
|
||||
return self.shutdown();
|
||||
}
|
||||
Ok(OrchestratorMessage::FileChanges(file_changes)) => {
|
||||
changes.extend(file_changes);
|
||||
}
|
||||
|
||||
Ok(OrchestratorMessage::CheckCompleted { .. })=> {
|
||||
// disregard any outdated completion message.
|
||||
}
|
||||
Ok(OrchestratorMessage::Run) => unreachable!("The orchestrator is already running."),
|
||||
|
||||
Err(_) => {
|
||||
// There are no more senders, no point in waiting for more messages
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
default(std::time::Duration::from_millis(10)) => {
|
||||
// No more file changes after 10 ms, send the changes and schedule a new analysis
|
||||
self.main_loop.send(MainLoopMessage::ApplyChanges(changes)).unwrap();
|
||||
self.main_loop.send(MainLoopMessage::CheckWorkspace { revision: self.revision}).unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_self)]
|
||||
fn shutdown(&self) {
|
||||
tracing::trace!("Shutting down orchestrator.");
|
||||
}
|
||||
}
|
||||
|
||||
/// Message sent from the orchestrator to the main loop.
|
||||
#[derive(Debug)]
|
||||
enum MainLoopMessage {
|
||||
CheckWorkspace { revision: usize },
|
||||
CheckCompleted(Vec<String>),
|
||||
ApplyChanges(Vec<FileWatcherChange>),
|
||||
CheckWorkspace,
|
||||
CheckCompleted { result: Vec<String>, revision: u64 },
|
||||
ApplyChanges(Vec<watch::ChangeEvent>),
|
||||
Exit,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum OrchestratorMessage {
|
||||
Run,
|
||||
Shutdown,
|
||||
|
||||
CheckCompleted {
|
||||
diagnostics: Vec<String>,
|
||||
revision: usize,
|
||||
},
|
||||
|
||||
FileChanges(Vec<FileWatcherChange>),
|
||||
}
|
||||
|
||||
fn setup_tracing(verbosity: Option<VerbosityLevel>) {
|
||||
let trace_level = match verbosity {
|
||||
None => Level::WARN,
|
||||
Some(VerbosityLevel::Info) => Level::INFO,
|
||||
Some(VerbosityLevel::Debug) => Level::DEBUG,
|
||||
Some(VerbosityLevel::Trace) => Level::TRACE,
|
||||
};
|
||||
|
||||
let subscriber = Registry::default().with(
|
||||
tracing_tree::HierarchicalLayer::default()
|
||||
.with_indent_lines(true)
|
||||
.with_indent_amount(2)
|
||||
.with_bracketed_fields(true)
|
||||
.with_thread_ids(true)
|
||||
.with_targets(true)
|
||||
.with_writer(|| Box::new(std::io::stderr()))
|
||||
.with_timer(Uptime::default())
|
||||
.with_filter(LoggingFilter { trace_level }),
|
||||
);
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber).unwrap();
|
||||
}
|
||||
|
||||
struct LoggingFilter {
|
||||
trace_level: Level,
|
||||
}
|
||||
|
||||
impl LoggingFilter {
|
||||
fn is_enabled(&self, meta: &Metadata<'_>) -> bool {
|
||||
let filter = if meta.target().starts_with("red_knot") || meta.target().starts_with("ruff") {
|
||||
self.trace_level
|
||||
} else {
|
||||
Level::INFO
|
||||
};
|
||||
|
||||
meta.level() <= &filter
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Filter<S> for LoggingFilter {
|
||||
fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool {
|
||||
self.is_enabled(meta)
|
||||
}
|
||||
|
||||
fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest {
|
||||
if self.is_enabled(meta) {
|
||||
Interest::always()
|
||||
} else {
|
||||
Interest::never()
|
||||
}
|
||||
}
|
||||
|
||||
fn max_level_hint(&self) -> Option<LevelFilter> {
|
||||
Some(LevelFilter::from_level(self.trace_level))
|
||||
}
|
||||
}
|
||||
|
||||
48
crates/red_knot/src/target_version.rs
Normal file
48
crates/red_knot/src/target_version.rs
Normal file
@@ -0,0 +1,48 @@
|
||||
/// Enumeration of all supported Python versions
|
||||
///
|
||||
/// TODO: unify with the `PythonVersion` enum in the linter/formatter crates?
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl TargetVersion {
|
||||
const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Py37 => "py37",
|
||||
Self::Py38 => "py38",
|
||||
Self::Py39 => "py39",
|
||||
Self::Py310 => "py310",
|
||||
Self::Py311 => "py311",
|
||||
Self::Py312 => "py312",
|
||||
Self::Py313 => "py313",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TargetVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for red_knot_python_semantic::PythonVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => Self::PY37,
|
||||
TargetVersion::Py38 => Self::PY38,
|
||||
TargetVersion::Py39 => Self::PY39,
|
||||
TargetVersion::Py310 => Self::PY310,
|
||||
TargetVersion::Py311 => Self::PY311,
|
||||
TargetVersion::Py312 => Self::PY312,
|
||||
TargetVersion::Py313 => Self::PY313,
|
||||
}
|
||||
}
|
||||
}
|
||||
1
crates/red_knot/src/verbosity.rs
Normal file
1
crates/red_knot/src/verbosity.rs
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
@@ -1,111 +0,0 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use notify::event::{CreateKind, ModifyKind, RemoveKind};
|
||||
use notify::{recommended_watcher, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
|
||||
pub struct FileWatcher {
|
||||
watcher: RecommendedWatcher,
|
||||
}
|
||||
|
||||
pub trait EventHandler: Send + 'static {
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>);
|
||||
}
|
||||
|
||||
impl<F> EventHandler for F
|
||||
where
|
||||
F: Fn(Vec<FileWatcherChange>) + Send + 'static,
|
||||
{
|
||||
fn handle(&self, changes: Vec<FileWatcherChange>) {
|
||||
let f = self;
|
||||
f(changes);
|
||||
}
|
||||
}
|
||||
|
||||
impl FileWatcher {
|
||||
pub fn new<E>(handler: E) -> anyhow::Result<Self>
|
||||
where
|
||||
E: EventHandler,
|
||||
{
|
||||
Self::from_handler(Box::new(handler))
|
||||
}
|
||||
|
||||
fn from_handler(handler: Box<dyn EventHandler>) -> anyhow::Result<Self> {
|
||||
let watcher = recommended_watcher(move |event: notify::Result<Event>| {
|
||||
match event {
|
||||
Ok(event) => {
|
||||
// TODO verify that this handles all events correctly
|
||||
let change_kind = match event.kind {
|
||||
EventKind::Create(CreateKind::File) => FileChangeKind::Created,
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::From)) => {
|
||||
FileChangeKind::Deleted
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::To)) => {
|
||||
FileChangeKind::Created
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::Any)) => {
|
||||
// TODO Introduce a better catch all event for cases that we don't understand.
|
||||
FileChangeKind::Created
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(notify::event::RenameMode::Both)) => {
|
||||
todo!("Handle both create and delete event.");
|
||||
}
|
||||
EventKind::Modify(_) => FileChangeKind::Modified,
|
||||
EventKind::Remove(RemoveKind::File) => FileChangeKind::Deleted,
|
||||
_ => {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut changes = Vec::new();
|
||||
|
||||
for path in event.paths {
|
||||
if let Some(fs_path) = SystemPath::from_std_path(&path) {
|
||||
changes
|
||||
.push(FileWatcherChange::new(fs_path.to_path_buf(), change_kind));
|
||||
}
|
||||
}
|
||||
|
||||
if !changes.is_empty() {
|
||||
handler.handle(changes);
|
||||
}
|
||||
}
|
||||
// TODO proper error handling
|
||||
Err(err) => {
|
||||
panic!("Error: {err}");
|
||||
}
|
||||
}
|
||||
})
|
||||
.context("Failed to create file watcher.")?;
|
||||
|
||||
Ok(Self { watcher })
|
||||
}
|
||||
|
||||
pub fn watch_folder(&mut self, path: &Path) -> anyhow::Result<()> {
|
||||
self.watcher.watch(path, RecursiveMode::Recursive)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileWatcherChange {
|
||||
pub path: SystemPathBuf,
|
||||
#[allow(unused)]
|
||||
pub kind: FileChangeKind,
|
||||
}
|
||||
|
||||
impl FileWatcherChange {
|
||||
pub fn new(path: SystemPathBuf, kind: FileChangeKind) -> Self {
|
||||
Self { path, kind }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum FileChangeKind {
|
||||
Created,
|
||||
Modified,
|
||||
Deleted,
|
||||
}
|
||||
1240
crates/red_knot/tests/file_watching.rs
Normal file
1240
crates/red_knot/tests/file_watching.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,39 +0,0 @@
|
||||
[package]
|
||||
name = "red_knot_module_resolver"
|
||||
version = "0.0.0"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
ruff_db = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
|
||||
compact_str = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
path-slash = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_db = { workspace = true, features = ["os"] }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
@@ -1,126 +0,0 @@
|
||||
use ruff_db::Upcast;
|
||||
|
||||
use crate::resolver::{
|
||||
editable_install_resolution_paths, file_to_module, internal::ModuleNameIngredient,
|
||||
module_resolution_settings, resolve_module_query,
|
||||
};
|
||||
use crate::typeshed::parse_typeshed_versions;
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(
|
||||
ModuleNameIngredient<'_>,
|
||||
module_resolution_settings,
|
||||
editable_install_resolution_paths,
|
||||
resolve_module_query,
|
||||
file_to_module,
|
||||
parse_typeshed_versions,
|
||||
);
|
||||
|
||||
pub trait Db: salsa::DbWithJar<Jar> + ruff_db::Db + Upcast<dyn ruff_db::Db> {}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::sync;
|
||||
|
||||
use salsa::DebugWithDb;
|
||||
|
||||
use ruff_db::files::Files;
|
||||
use ruff_db::system::{DbWithTestSystem, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
|
||||
use crate::vendored_typeshed_stubs;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[salsa::db(Jar, ruff_db::Jar)]
|
||||
pub(crate) struct TestDb {
|
||||
storage: salsa::Storage<Self>,
|
||||
system: TestSystem,
|
||||
vendored: VendoredFileSystem,
|
||||
files: Files,
|
||||
events: sync::Arc<sync::Mutex<Vec<salsa::Event>>>,
|
||||
}
|
||||
|
||||
impl TestDb {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
system: TestSystem::default(),
|
||||
vendored: vendored_typeshed_stubs().snapshot(),
|
||||
events: sync::Arc::default(),
|
||||
files: Files::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Takes the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
pub(crate) fn take_salsa_events(&mut self) -> Vec<salsa::Event> {
|
||||
let inner = sync::Arc::get_mut(&mut self.events).expect("no pending salsa snapshots");
|
||||
|
||||
let events = inner.get_mut().unwrap();
|
||||
std::mem::take(&mut *events)
|
||||
}
|
||||
|
||||
/// Clears the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
pub(crate) fn clear_salsa_events(&mut self) {
|
||||
self.take_salsa_events();
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ruff_db::Db> for TestDb {
|
||||
fn upcast(&self) -> &(dyn ruff_db::Db + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ruff_db::Db for TestDb {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
&self.vendored
|
||||
}
|
||||
|
||||
fn system(&self) -> &dyn ruff_db::system::System {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
impl Db for TestDb {}
|
||||
|
||||
impl DbWithTestSystem for TestDb {
|
||||
fn test_system(&self) -> &TestSystem {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn test_system_mut(&mut self) -> &mut TestSystem {
|
||||
&mut self.system
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::Database for TestDb {
|
||||
fn salsa_event(&self, event: salsa::Event) {
|
||||
tracing::trace!("event: {:?}", event.debug(self));
|
||||
let mut events = self.events.lock().unwrap();
|
||||
events.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::ParallelDatabase for TestDb {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
storage: self.storage.snapshot(),
|
||||
system: self.system.snapshot(),
|
||||
vendored: self.vendored.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
events: self.events.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
mod db;
|
||||
mod module;
|
||||
mod module_name;
|
||||
mod path;
|
||||
mod resolver;
|
||||
mod state;
|
||||
mod typeshed;
|
||||
|
||||
#[cfg(test)]
|
||||
mod testing;
|
||||
|
||||
pub use db::{Db, Jar};
|
||||
pub use module::{Module, ModuleKind};
|
||||
pub use module_name::ModuleName;
|
||||
pub use resolver::resolve_module;
|
||||
pub use typeshed::{
|
||||
vendored_typeshed_stubs, TypeshedVersionsParseError, TypeshedVersionsParseErrorKind,
|
||||
};
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1 +0,0 @@
|
||||
f863db6bc5242348ceaa6a3bca4e59aa9e62faaa
|
||||
@@ -1,117 +0,0 @@
|
||||
import sys
|
||||
from typing import Literal
|
||||
|
||||
SF_APPEND: Literal[0x00040000]
|
||||
SF_ARCHIVED: Literal[0x00010000]
|
||||
SF_IMMUTABLE: Literal[0x00020000]
|
||||
SF_NOUNLINK: Literal[0x00100000]
|
||||
SF_SNAPSHOT: Literal[0x00200000]
|
||||
|
||||
ST_MODE: Literal[0]
|
||||
ST_INO: Literal[1]
|
||||
ST_DEV: Literal[2]
|
||||
ST_NLINK: Literal[3]
|
||||
ST_UID: Literal[4]
|
||||
ST_GID: Literal[5]
|
||||
ST_SIZE: Literal[6]
|
||||
ST_ATIME: Literal[7]
|
||||
ST_MTIME: Literal[8]
|
||||
ST_CTIME: Literal[9]
|
||||
|
||||
S_IFIFO: Literal[0o010000]
|
||||
S_IFLNK: Literal[0o120000]
|
||||
S_IFREG: Literal[0o100000]
|
||||
S_IFSOCK: Literal[0o140000]
|
||||
S_IFBLK: Literal[0o060000]
|
||||
S_IFCHR: Literal[0o020000]
|
||||
S_IFDIR: Literal[0o040000]
|
||||
|
||||
# These are 0 on systems that don't support the specific kind of file.
|
||||
# Example: Linux doesn't support door files, so S_IFDOOR is 0 on linux.
|
||||
S_IFDOOR: int
|
||||
S_IFPORT: int
|
||||
S_IFWHT: int
|
||||
|
||||
S_ISUID: Literal[0o4000]
|
||||
S_ISGID: Literal[0o2000]
|
||||
S_ISVTX: Literal[0o1000]
|
||||
|
||||
S_IRWXU: Literal[0o0700]
|
||||
S_IRUSR: Literal[0o0400]
|
||||
S_IWUSR: Literal[0o0200]
|
||||
S_IXUSR: Literal[0o0100]
|
||||
|
||||
S_IRWXG: Literal[0o0070]
|
||||
S_IRGRP: Literal[0o0040]
|
||||
S_IWGRP: Literal[0o0020]
|
||||
S_IXGRP: Literal[0o0010]
|
||||
|
||||
S_IRWXO: Literal[0o0007]
|
||||
S_IROTH: Literal[0o0004]
|
||||
S_IWOTH: Literal[0o0002]
|
||||
S_IXOTH: Literal[0o0001]
|
||||
|
||||
S_ENFMT: Literal[0o2000]
|
||||
S_IREAD: Literal[0o0400]
|
||||
S_IWRITE: Literal[0o0200]
|
||||
S_IEXEC: Literal[0o0100]
|
||||
|
||||
UF_APPEND: Literal[0x00000004]
|
||||
UF_COMPRESSED: Literal[0x00000020] # OS X 10.6+ only
|
||||
UF_HIDDEN: Literal[0x00008000] # OX X 10.5+ only
|
||||
UF_IMMUTABLE: Literal[0x00000002]
|
||||
UF_NODUMP: Literal[0x00000001]
|
||||
UF_NOUNLINK: Literal[0x00000010]
|
||||
UF_OPAQUE: Literal[0x00000008]
|
||||
|
||||
def S_IMODE(mode: int, /) -> int: ...
|
||||
def S_IFMT(mode: int, /) -> int: ...
|
||||
def S_ISBLK(mode: int, /) -> bool: ...
|
||||
def S_ISCHR(mode: int, /) -> bool: ...
|
||||
def S_ISDIR(mode: int, /) -> bool: ...
|
||||
def S_ISDOOR(mode: int, /) -> bool: ...
|
||||
def S_ISFIFO(mode: int, /) -> bool: ...
|
||||
def S_ISLNK(mode: int, /) -> bool: ...
|
||||
def S_ISPORT(mode: int, /) -> bool: ...
|
||||
def S_ISREG(mode: int, /) -> bool: ...
|
||||
def S_ISSOCK(mode: int, /) -> bool: ...
|
||||
def S_ISWHT(mode: int, /) -> bool: ...
|
||||
def filemode(mode: int, /) -> str: ...
|
||||
|
||||
if sys.platform == "win32":
|
||||
IO_REPARSE_TAG_SYMLINK: int
|
||||
IO_REPARSE_TAG_MOUNT_POINT: int
|
||||
IO_REPARSE_TAG_APPEXECLINK: int
|
||||
|
||||
if sys.platform == "win32":
|
||||
FILE_ATTRIBUTE_ARCHIVE: Literal[32]
|
||||
FILE_ATTRIBUTE_COMPRESSED: Literal[2048]
|
||||
FILE_ATTRIBUTE_DEVICE: Literal[64]
|
||||
FILE_ATTRIBUTE_DIRECTORY: Literal[16]
|
||||
FILE_ATTRIBUTE_ENCRYPTED: Literal[16384]
|
||||
FILE_ATTRIBUTE_HIDDEN: Literal[2]
|
||||
FILE_ATTRIBUTE_INTEGRITY_STREAM: Literal[32768]
|
||||
FILE_ATTRIBUTE_NORMAL: Literal[128]
|
||||
FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: Literal[8192]
|
||||
FILE_ATTRIBUTE_NO_SCRUB_DATA: Literal[131072]
|
||||
FILE_ATTRIBUTE_OFFLINE: Literal[4096]
|
||||
FILE_ATTRIBUTE_READONLY: Literal[1]
|
||||
FILE_ATTRIBUTE_REPARSE_POINT: Literal[1024]
|
||||
FILE_ATTRIBUTE_SPARSE_FILE: Literal[512]
|
||||
FILE_ATTRIBUTE_SYSTEM: Literal[4]
|
||||
FILE_ATTRIBUTE_TEMPORARY: Literal[256]
|
||||
FILE_ATTRIBUTE_VIRTUAL: Literal[65536]
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
SF_SETTABLE: Literal[0x3FFF0000]
|
||||
# https://github.com/python/cpython/issues/114081#issuecomment-2119017790
|
||||
# SF_RESTRICTED: Literal[0x00080000]
|
||||
SF_FIRMLINK: Literal[0x00800000]
|
||||
SF_DATALESS: Literal[0x40000000]
|
||||
|
||||
SF_SUPPORTED: Literal[0x9F0000]
|
||||
SF_SYNTHETIC: Literal[0xC0000000]
|
||||
|
||||
UF_TRACKED: Literal[0x00000040]
|
||||
UF_DATAVAULT: Literal[0x00000080]
|
||||
UF_SETTABLE: Literal[0x0000FFFF]
|
||||
@@ -1,20 +0,0 @@
|
||||
import enum
|
||||
import sys
|
||||
from typing import Literal
|
||||
|
||||
LOG_THRESHOLD_FOR_CONNLOST_WRITES: Literal[5]
|
||||
ACCEPT_RETRY_DELAY: Literal[1]
|
||||
DEBUG_STACK_DEPTH: Literal[10]
|
||||
SSL_HANDSHAKE_TIMEOUT: float
|
||||
SENDFILE_FALLBACK_READBUFFER_SIZE: Literal[262144]
|
||||
if sys.version_info >= (3, 11):
|
||||
SSL_SHUTDOWN_TIMEOUT: float
|
||||
FLOW_CONTROL_HIGH_WATER_SSL_READ: Literal[256]
|
||||
FLOW_CONTROL_HIGH_WATER_SSL_WRITE: Literal[512]
|
||||
if sys.version_info >= (3, 12):
|
||||
THREAD_JOIN_TIMEOUT: Literal[300]
|
||||
|
||||
class _SendfileMode(enum.Enum):
|
||||
UNSUPPORTED = 1
|
||||
TRY_NATIVE = 2
|
||||
FALLBACK = 3
|
||||
@@ -1,16 +0,0 @@
|
||||
from typing import Any, TypeVar
|
||||
|
||||
__all__ = ["Error", "copy", "deepcopy"]
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
# None in CPython but non-None in Jython
|
||||
PyStringMap: Any
|
||||
|
||||
# Note: memo and _nil are internal kwargs.
|
||||
def deepcopy(x: _T, memo: dict[int, Any] | None = None, _nil: Any = []) -> _T: ...
|
||||
def copy(x: _T) -> _T: ...
|
||||
|
||||
class Error(Exception): ...
|
||||
|
||||
error = Error
|
||||
@@ -1,25 +0,0 @@
|
||||
from typing import Any
|
||||
|
||||
from ..cmd import Command
|
||||
|
||||
def show_formats() -> None: ...
|
||||
|
||||
class bdist(Command):
|
||||
description: str
|
||||
user_options: Any
|
||||
boolean_options: Any
|
||||
help_options: Any
|
||||
no_format_option: Any
|
||||
default_format: Any
|
||||
format_commands: Any
|
||||
format_command: Any
|
||||
bdist_base: Any
|
||||
plat_name: Any
|
||||
formats: Any
|
||||
dist_dir: Any
|
||||
skip_build: int
|
||||
group: Any
|
||||
owner: Any
|
||||
def initialize_options(self) -> None: ...
|
||||
def finalize_options(self) -> None: ...
|
||||
def run(self) -> None: ...
|
||||
@@ -1,67 +0,0 @@
|
||||
ENDMARKER: int
|
||||
NAME: int
|
||||
NUMBER: int
|
||||
STRING: int
|
||||
NEWLINE: int
|
||||
INDENT: int
|
||||
DEDENT: int
|
||||
LPAR: int
|
||||
RPAR: int
|
||||
LSQB: int
|
||||
RSQB: int
|
||||
COLON: int
|
||||
COMMA: int
|
||||
SEMI: int
|
||||
PLUS: int
|
||||
MINUS: int
|
||||
STAR: int
|
||||
SLASH: int
|
||||
VBAR: int
|
||||
AMPER: int
|
||||
LESS: int
|
||||
GREATER: int
|
||||
EQUAL: int
|
||||
DOT: int
|
||||
PERCENT: int
|
||||
BACKQUOTE: int
|
||||
LBRACE: int
|
||||
RBRACE: int
|
||||
EQEQUAL: int
|
||||
NOTEQUAL: int
|
||||
LESSEQUAL: int
|
||||
GREATEREQUAL: int
|
||||
TILDE: int
|
||||
CIRCUMFLEX: int
|
||||
LEFTSHIFT: int
|
||||
RIGHTSHIFT: int
|
||||
DOUBLESTAR: int
|
||||
PLUSEQUAL: int
|
||||
MINEQUAL: int
|
||||
STAREQUAL: int
|
||||
SLASHEQUAL: int
|
||||
PERCENTEQUAL: int
|
||||
AMPEREQUAL: int
|
||||
VBAREQUAL: int
|
||||
CIRCUMFLEXEQUAL: int
|
||||
LEFTSHIFTEQUAL: int
|
||||
RIGHTSHIFTEQUAL: int
|
||||
DOUBLESTAREQUAL: int
|
||||
DOUBLESLASH: int
|
||||
DOUBLESLASHEQUAL: int
|
||||
OP: int
|
||||
COMMENT: int
|
||||
NL: int
|
||||
RARROW: int
|
||||
AT: int
|
||||
ATEQUAL: int
|
||||
AWAIT: int
|
||||
ASYNC: int
|
||||
ERRORTOKEN: int
|
||||
COLONEQUAL: int
|
||||
N_TOKENS: int
|
||||
NT_OFFSET: int
|
||||
tok_name: dict[int, str]
|
||||
|
||||
def ISTERMINAL(x: int) -> bool: ...
|
||||
def ISNONTERMINAL(x: int) -> bool: ...
|
||||
def ISEOF(x: int) -> bool: ...
|
||||
@@ -1,55 +0,0 @@
|
||||
import sys
|
||||
from typing import Literal, overload
|
||||
|
||||
if sys.platform != "win32":
|
||||
LOG_ALERT: Literal[1]
|
||||
LOG_AUTH: Literal[32]
|
||||
LOG_AUTHPRIV: Literal[80]
|
||||
LOG_CONS: Literal[2]
|
||||
LOG_CRIT: Literal[2]
|
||||
LOG_CRON: Literal[72]
|
||||
LOG_DAEMON: Literal[24]
|
||||
LOG_DEBUG: Literal[7]
|
||||
LOG_EMERG: Literal[0]
|
||||
LOG_ERR: Literal[3]
|
||||
LOG_INFO: Literal[6]
|
||||
LOG_KERN: Literal[0]
|
||||
LOG_LOCAL0: Literal[128]
|
||||
LOG_LOCAL1: Literal[136]
|
||||
LOG_LOCAL2: Literal[144]
|
||||
LOG_LOCAL3: Literal[152]
|
||||
LOG_LOCAL4: Literal[160]
|
||||
LOG_LOCAL5: Literal[168]
|
||||
LOG_LOCAL6: Literal[176]
|
||||
LOG_LOCAL7: Literal[184]
|
||||
LOG_LPR: Literal[48]
|
||||
LOG_MAIL: Literal[16]
|
||||
LOG_NDELAY: Literal[8]
|
||||
LOG_NEWS: Literal[56]
|
||||
LOG_NOTICE: Literal[5]
|
||||
LOG_NOWAIT: Literal[16]
|
||||
LOG_ODELAY: Literal[4]
|
||||
LOG_PERROR: Literal[32]
|
||||
LOG_PID: Literal[1]
|
||||
LOG_SYSLOG: Literal[40]
|
||||
LOG_USER: Literal[8]
|
||||
LOG_UUCP: Literal[64]
|
||||
LOG_WARNING: Literal[4]
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
LOG_FTP: Literal[88]
|
||||
LOG_INSTALL: Literal[112]
|
||||
LOG_LAUNCHD: Literal[192]
|
||||
LOG_NETINFO: Literal[96]
|
||||
LOG_RAS: Literal[120]
|
||||
LOG_REMOTEAUTH: Literal[104]
|
||||
|
||||
def LOG_MASK(pri: int, /) -> int: ...
|
||||
def LOG_UPTO(pri: int, /) -> int: ...
|
||||
def closelog() -> None: ...
|
||||
def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: ...
|
||||
def setlogmask(maskpri: int, /) -> int: ...
|
||||
@overload
|
||||
def syslog(priority: int, message: str) -> None: ...
|
||||
@overload
|
||||
def syslog(message: str) -> None: ...
|
||||
@@ -1,80 +0,0 @@
|
||||
from typing import Literal
|
||||
|
||||
# These are not actually bools. See #4669
|
||||
NO: bool
|
||||
YES: bool
|
||||
TRUE: bool
|
||||
FALSE: bool
|
||||
ON: bool
|
||||
OFF: bool
|
||||
N: Literal["n"]
|
||||
S: Literal["s"]
|
||||
W: Literal["w"]
|
||||
E: Literal["e"]
|
||||
NW: Literal["nw"]
|
||||
SW: Literal["sw"]
|
||||
NE: Literal["ne"]
|
||||
SE: Literal["se"]
|
||||
NS: Literal["ns"]
|
||||
EW: Literal["ew"]
|
||||
NSEW: Literal["nsew"]
|
||||
CENTER: Literal["center"]
|
||||
NONE: Literal["none"]
|
||||
X: Literal["x"]
|
||||
Y: Literal["y"]
|
||||
BOTH: Literal["both"]
|
||||
LEFT: Literal["left"]
|
||||
TOP: Literal["top"]
|
||||
RIGHT: Literal["right"]
|
||||
BOTTOM: Literal["bottom"]
|
||||
RAISED: Literal["raised"]
|
||||
SUNKEN: Literal["sunken"]
|
||||
FLAT: Literal["flat"]
|
||||
RIDGE: Literal["ridge"]
|
||||
GROOVE: Literal["groove"]
|
||||
SOLID: Literal["solid"]
|
||||
HORIZONTAL: Literal["horizontal"]
|
||||
VERTICAL: Literal["vertical"]
|
||||
NUMERIC: Literal["numeric"]
|
||||
CHAR: Literal["char"]
|
||||
WORD: Literal["word"]
|
||||
BASELINE: Literal["baseline"]
|
||||
INSIDE: Literal["inside"]
|
||||
OUTSIDE: Literal["outside"]
|
||||
SEL: Literal["sel"]
|
||||
SEL_FIRST: Literal["sel.first"]
|
||||
SEL_LAST: Literal["sel.last"]
|
||||
END: Literal["end"]
|
||||
INSERT: Literal["insert"]
|
||||
CURRENT: Literal["current"]
|
||||
ANCHOR: Literal["anchor"]
|
||||
ALL: Literal["all"]
|
||||
NORMAL: Literal["normal"]
|
||||
DISABLED: Literal["disabled"]
|
||||
ACTIVE: Literal["active"]
|
||||
HIDDEN: Literal["hidden"]
|
||||
CASCADE: Literal["cascade"]
|
||||
CHECKBUTTON: Literal["checkbutton"]
|
||||
COMMAND: Literal["command"]
|
||||
RADIOBUTTON: Literal["radiobutton"]
|
||||
SEPARATOR: Literal["separator"]
|
||||
SINGLE: Literal["single"]
|
||||
BROWSE: Literal["browse"]
|
||||
MULTIPLE: Literal["multiple"]
|
||||
EXTENDED: Literal["extended"]
|
||||
DOTBOX: Literal["dotbox"]
|
||||
UNDERLINE: Literal["underline"]
|
||||
PIESLICE: Literal["pieslice"]
|
||||
CHORD: Literal["chord"]
|
||||
ARC: Literal["arc"]
|
||||
FIRST: Literal["first"]
|
||||
LAST: Literal["last"]
|
||||
BUTT: Literal["butt"]
|
||||
PROJECTING: Literal["projecting"]
|
||||
ROUND: Literal["round"]
|
||||
BEVEL: Literal["bevel"]
|
||||
MITER: Literal["miter"]
|
||||
MOVETO: Literal["moveto"]
|
||||
SCROLL: Literal["scroll"]
|
||||
UNITS: Literal["units"]
|
||||
PAGES: Literal["pages"]
|
||||
@@ -1,28 +0,0 @@
|
||||
import sys
|
||||
from _typeshed import ReadableBuffer
|
||||
from typing import Literal, overload
|
||||
|
||||
if sys.platform == "win32":
|
||||
SND_APPLICATION: Literal[128]
|
||||
SND_FILENAME: Literal[131072]
|
||||
SND_ALIAS: Literal[65536]
|
||||
SND_LOOP: Literal[8]
|
||||
SND_MEMORY: Literal[4]
|
||||
SND_PURGE: Literal[64]
|
||||
SND_ASYNC: Literal[1]
|
||||
SND_NODEFAULT: Literal[2]
|
||||
SND_NOSTOP: Literal[16]
|
||||
SND_NOWAIT: Literal[8192]
|
||||
|
||||
MB_ICONASTERISK: Literal[64]
|
||||
MB_ICONEXCLAMATION: Literal[48]
|
||||
MB_ICONHAND: Literal[16]
|
||||
MB_ICONQUESTION: Literal[32]
|
||||
MB_OK: Literal[0]
|
||||
def Beep(frequency: int, duration: int) -> None: ...
|
||||
# Can actually accept anything ORed with 4, and if not it's definitely str, but that's inexpressible
|
||||
@overload
|
||||
def PlaySound(sound: ReadableBuffer | None, flags: Literal[4]) -> None: ...
|
||||
@overload
|
||||
def PlaySound(sound: str | ReadableBuffer | None, flags: int) -> None: ...
|
||||
def MessageBeep(type: int = 0) -> None: ...
|
||||
@@ -11,24 +11,41 @@ repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
red_knot_module_resolver = { workspace = true }
|
||||
ruff_db = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
compact_str = { workspace = true }
|
||||
countme = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
ordermap = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
path-slash = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true, features = ["zstd", "deflate"] }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["os", "testing"] }
|
||||
ruff_python_parser = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# Red Knot
|
||||
|
||||
A work-in-progress multifile module resolver for Ruff.
|
||||
Semantic analysis for the red-knot project.
|
||||
|
||||
## Vendored types for the stdlib
|
||||
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_module_resolver/vendor/typeshed`. The file `crates/red_knot_module_resolver/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_python_semantic/vendor/typeshed`. The file `crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
|
||||
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).
|
||||
@@ -3,7 +3,7 @@
|
||||
//!
|
||||
//! This script should be automatically run at build time
|
||||
//! whenever the script itself changes, or whenever any files
|
||||
//! in `crates/red_knot_module_resolver/vendor/typeshed` change.
|
||||
//! in `crates/red_knot_python_semantic/vendor/typeshed` change.
|
||||
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
@@ -23,8 +23,21 @@ const TYPESHED_ZIP_LOCATION: &str = "/zipped_typeshed.zip";
|
||||
fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
|
||||
let mut zip = ZipWriter::new(writer);
|
||||
|
||||
// Use deflated compression for WASM builds because compiling `zstd-sys` requires clang
|
||||
// [source](https://github.com/gyscos/zstd-rs/wiki/Compile-for-WASM) which complicates the build
|
||||
// by a lot. Deflated compression is slower but it shouldn't matter much for the WASM use case
|
||||
// (WASM itself is already slower than a native build for a specific platform).
|
||||
// We can't use `#[cfg(...)]` here because the target-arch in a build script is the
|
||||
// architecture of the system running the build script and not the architecture of the build-target.
|
||||
// That's why we use the `TARGET` environment variable here.
|
||||
let method = if std::env::var("TARGET").unwrap().contains("wasm32") {
|
||||
CompressionMethod::Deflated
|
||||
} else {
|
||||
CompressionMethod::Zstd
|
||||
};
|
||||
|
||||
let options = FileOptions::default()
|
||||
.compression_method(CompressionMethod::Zstd)
|
||||
.compression_method(method)
|
||||
.unix_permissions(0o644);
|
||||
|
||||
for entry in walkdir::WalkDir::new(directory_path) {
|
||||
16
crates/red_knot_python_semantic/src/builtins.rs
Normal file
16
crates/red_knot_python_semantic/src/builtins.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::resolve_module;
|
||||
use crate::semantic_index::global_scope;
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
use crate::Db;
|
||||
|
||||
/// Salsa query to get the builtins scope.
|
||||
///
|
||||
/// Can return None if a custom typeshed is used that is missing `builtins.pyi`.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn builtins_scope(db: &dyn Db) -> Option<ScopeId<'_>> {
|
||||
let builtins_name =
|
||||
ModuleName::new_static("builtins").expect("Expected 'builtins' to be a valid module name");
|
||||
let builtins_file = resolve_module(db, builtins_name)?.file();
|
||||
Some(global_scope(db, builtins_file))
|
||||
}
|
||||
@@ -1,57 +1,22 @@
|
||||
use salsa::DbWithJar;
|
||||
|
||||
use red_knot_module_resolver::Db as ResolverDb;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
use crate::semantic_index::{global_scope, semantic_index, symbol_table, use_def_map};
|
||||
use crate::types::{
|
||||
infer_definition_types, infer_expression_types, infer_scope_types, ClassType, FunctionType,
|
||||
IntersectionType, UnionType,
|
||||
};
|
||||
|
||||
#[salsa::jar(db=Db)]
|
||||
pub struct Jar(
|
||||
ScopeId<'_>,
|
||||
Definition<'_>,
|
||||
Expression<'_>,
|
||||
FunctionType<'_>,
|
||||
ClassType<'_>,
|
||||
UnionType<'_>,
|
||||
IntersectionType<'_>,
|
||||
symbol_table,
|
||||
use_def_map,
|
||||
global_scope,
|
||||
semantic_index,
|
||||
infer_definition_types,
|
||||
infer_expression_types,
|
||||
infer_scope_types,
|
||||
);
|
||||
|
||||
/// Database giving access to semantic information about a Python program.
|
||||
pub trait Db:
|
||||
SourceDb + ResolverDb + DbWithJar<Jar> + Upcast<dyn SourceDb> + Upcast<dyn ResolverDb>
|
||||
{
|
||||
}
|
||||
#[salsa::db]
|
||||
pub trait Db: SourceDb + Upcast<dyn SourceDb> {}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use salsa::DebugWithDb;
|
||||
|
||||
use red_knot_module_resolver::{vendored_typeshed_stubs, Db as ResolverDb, Jar as ResolverJar};
|
||||
use crate::module_resolver::vendored_typeshed_stubs;
|
||||
use ruff_db::files::Files;
|
||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Jar as SourceJar, Upcast};
|
||||
use ruff_python_trivia::textwrap;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
use super::{Db, Jar};
|
||||
use super::Db;
|
||||
|
||||
#[salsa::db(Jar, ResolverJar, SourceJar)]
|
||||
#[salsa::db]
|
||||
pub(crate) struct TestDb {
|
||||
storage: salsa::Storage<Self>,
|
||||
files: Files,
|
||||
@@ -65,7 +30,7 @@ pub(crate) mod tests {
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
system: TestSystem::default(),
|
||||
vendored: vendored_typeshed_stubs().snapshot(),
|
||||
vendored: vendored_typeshed_stubs().clone(),
|
||||
events: std::sync::Arc::default(),
|
||||
files: Files::default(),
|
||||
}
|
||||
@@ -89,12 +54,6 @@ pub(crate) mod tests {
|
||||
pub(crate) fn clear_salsa_events(&mut self) {
|
||||
self.take_salsa_events();
|
||||
}
|
||||
|
||||
/// Write auto-dedented text to a file.
|
||||
pub(crate) fn write_dedented(&mut self, path: &str, content: &str) -> anyhow::Result<()> {
|
||||
self.write_file(path, textwrap::dedent(content))?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithTestSystem for TestDb {
|
||||
@@ -107,6 +66,7 @@ pub(crate) mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl SourceDb for TestDb {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
&self.vendored
|
||||
@@ -125,34 +85,21 @@ pub(crate) mod tests {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ResolverDb> for TestDb {
|
||||
fn upcast(&self) -> &(dyn ResolverDb + 'static) {
|
||||
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl red_knot_module_resolver::Db for TestDb {}
|
||||
#[salsa::db]
|
||||
impl Db for TestDb {}
|
||||
|
||||
#[salsa::db]
|
||||
impl salsa::Database for TestDb {
|
||||
fn salsa_event(&self, event: salsa::Event) {
|
||||
tracing::trace!("event: {:?}", event.debug(self));
|
||||
fn salsa_event(&self, event: &dyn Fn() -> salsa::Event) {
|
||||
let event = event();
|
||||
tracing::trace!("event: {event:?}");
|
||||
let mut events = self.events.lock().unwrap();
|
||||
events.push(event);
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::ParallelDatabase for TestDb {
|
||||
fn snapshot(&self) -> salsa::Snapshot<Self> {
|
||||
salsa::Snapshot::new(Self {
|
||||
storage: self.storage.snapshot(),
|
||||
files: self.files.snapshot(),
|
||||
system: self.system.snapshot(),
|
||||
vendored: self.vendored.snapshot(),
|
||||
events: self.events.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,12 +2,21 @@ use std::hash::BuildHasherDefault;
|
||||
|
||||
use rustc_hash::FxHasher;
|
||||
|
||||
pub use db::{Db, Jar};
|
||||
pub use db::Db;
|
||||
pub use module_name::ModuleName;
|
||||
pub use module_resolver::{resolve_module, system_module_search_paths, vendored_typeshed_stubs};
|
||||
pub use program::{Program, ProgramSettings, SearchPathSettings};
|
||||
pub use python_version::PythonVersion;
|
||||
pub use semantic_model::{HasTy, SemanticModel};
|
||||
|
||||
pub mod ast_node_ref;
|
||||
mod builtins;
|
||||
mod db;
|
||||
mod module_name;
|
||||
mod module_resolver;
|
||||
mod node_key;
|
||||
mod program;
|
||||
mod python_version;
|
||||
pub mod semantic_index;
|
||||
mod semantic_model;
|
||||
pub mod types;
|
||||
|
||||
@@ -42,7 +42,7 @@ impl ModuleName {
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").as_deref(), Some("foo.bar"));
|
||||
/// assert_eq!(ModuleName::new_static(""), None);
|
||||
@@ -68,7 +68,7 @@ impl ModuleName {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().components().collect::<Vec<_>>(), vec!["foo", "bar", "baz"]);
|
||||
/// ```
|
||||
@@ -82,7 +82,7 @@ impl ModuleName {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar").unwrap().parent(), Some(ModuleName::new_static("foo").unwrap()));
|
||||
/// assert_eq!(ModuleName::new_static("foo.bar.baz").unwrap().parent(), Some(ModuleName::new_static("foo.bar").unwrap()));
|
||||
@@ -101,7 +101,7 @@ impl ModuleName {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert!(ModuleName::new_static("foo.bar").unwrap().starts_with(&ModuleName::new_static("foo").unwrap()));
|
||||
///
|
||||
@@ -133,7 +133,7 @@ impl ModuleName {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_module_resolver::ModuleName;
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// assert_eq!(&*ModuleName::from_components(["a"]).unwrap(), "a");
|
||||
/// assert_eq!(&*ModuleName::from_components(["a", "b"]).unwrap(), "a.b");
|
||||
47
crates/red_knot_python_semantic/src/module_resolver/mod.rs
Normal file
47
crates/red_knot_python_semantic/src/module_resolver/mod.rs
Normal file
@@ -0,0 +1,47 @@
|
||||
use std::iter::FusedIterator;
|
||||
|
||||
pub(crate) use module::Module;
|
||||
pub use resolver::resolve_module;
|
||||
pub(crate) use resolver::SearchPaths;
|
||||
use ruff_db::system::SystemPath;
|
||||
pub use typeshed::vendored_typeshed_stubs;
|
||||
|
||||
use crate::module_resolver::resolver::search_paths;
|
||||
use crate::Db;
|
||||
use resolver::SearchPathIterator;
|
||||
|
||||
mod module;
|
||||
mod path;
|
||||
mod resolver;
|
||||
mod state;
|
||||
mod typeshed;
|
||||
|
||||
#[cfg(test)]
|
||||
mod testing;
|
||||
|
||||
/// Returns an iterator over all search paths pointing to a system path
|
||||
pub fn system_module_search_paths(db: &dyn Db) -> SystemModuleSearchPathsIter {
|
||||
SystemModuleSearchPathsIter {
|
||||
inner: search_paths(db),
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SystemModuleSearchPathsIter<'db> {
|
||||
inner: SearchPathIterator<'db>,
|
||||
}
|
||||
|
||||
impl<'db> Iterator for SystemModuleSearchPathsIter<'db> {
|
||||
type Item = &'db SystemPath;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
let next = self.inner.next()?;
|
||||
|
||||
if let Some(system_path) = next.as_system_path() {
|
||||
return Some(system_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedIterator for SystemModuleSearchPathsIter<'_> {}
|
||||
@@ -3,9 +3,8 @@ use std::sync::Arc;
|
||||
|
||||
use ruff_db::files::File;
|
||||
|
||||
use crate::db::Db;
|
||||
use super::path::SearchPath;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::path::{ModuleResolutionPathBuf, ModuleResolutionPathRef};
|
||||
|
||||
/// Representation of a Python module.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
@@ -17,7 +16,7 @@ impl Module {
|
||||
pub(crate) fn new(
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: Arc<ModuleResolutionPathBuf>,
|
||||
search_path: SearchPath,
|
||||
file: File,
|
||||
) -> Self {
|
||||
Self {
|
||||
@@ -41,8 +40,8 @@ impl Module {
|
||||
}
|
||||
|
||||
/// The search path from which the module was resolved.
|
||||
pub(crate) fn search_path(&self) -> ModuleResolutionPathRef {
|
||||
ModuleResolutionPathRef::from(&*self.inner.search_path)
|
||||
pub(crate) fn search_path(&self) -> &SearchPath {
|
||||
&self.inner.search_path
|
||||
}
|
||||
|
||||
/// Determine whether this module is a single-file module or a package
|
||||
@@ -62,22 +61,11 @@ impl std::fmt::Debug for Module {
|
||||
}
|
||||
}
|
||||
|
||||
impl salsa::DebugWithDb<dyn Db> for Module {
|
||||
fn fmt(&self, f: &mut Formatter<'_>, db: &dyn Db) -> std::fmt::Result {
|
||||
f.debug_struct("Module")
|
||||
.field("name", &self.name())
|
||||
.field("kind", &self.kind())
|
||||
.field("file", &self.file().debug(db.upcast()))
|
||||
.field("search_path", &self.search_path())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
struct ModuleInner {
|
||||
name: ModuleName,
|
||||
kind: ModuleKind,
|
||||
search_path: Arc<ModuleResolutionPathBuf>,
|
||||
search_path: SearchPath,
|
||||
file: File,
|
||||
}
|
||||
|
||||
1098
crates/red_knot_python_semantic/src/module_resolver/path.rs
Normal file
1098
crates/red_knot_python_semantic/src/module_resolver/path.rs
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,18 +1,17 @@
|
||||
use ruff_db::program::TargetVersion;
|
||||
use ruff_db::system::System;
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
|
||||
use super::typeshed::LazyTypeshedVersions;
|
||||
use crate::db::Db;
|
||||
use crate::typeshed::LazyTypeshedVersions;
|
||||
use crate::python_version::PythonVersion;
|
||||
|
||||
pub(crate) struct ResolverState<'db> {
|
||||
pub(crate) db: &'db dyn Db,
|
||||
pub(crate) typeshed_versions: LazyTypeshedVersions<'db>,
|
||||
pub(crate) target_version: TargetVersion,
|
||||
pub(crate) target_version: PythonVersion,
|
||||
}
|
||||
|
||||
impl<'db> ResolverState<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db, target_version: TargetVersion) -> Self {
|
||||
pub(crate) fn new(db: &'db dyn Db, target_version: PythonVersion) -> Self {
|
||||
Self {
|
||||
db,
|
||||
typeshed_versions: LazyTypeshedVersions::new(),
|
||||
@@ -20,10 +19,6 @@ impl<'db> ResolverState<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn system(&self) -> &dyn System {
|
||||
self.db.system()
|
||||
}
|
||||
|
||||
pub(crate) fn vendored(&self) -> &VendoredFileSystem {
|
||||
self.db.vendored()
|
||||
}
|
||||
@@ -1,8 +1,10 @@
|
||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::VendoredPathBuf;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::ProgramSettings;
|
||||
|
||||
/// A test case for the module resolver.
|
||||
///
|
||||
@@ -12,8 +14,11 @@ pub(crate) struct TestCase<T> {
|
||||
pub(crate) db: TestDb,
|
||||
pub(crate) src: SystemPathBuf,
|
||||
pub(crate) stdlib: T,
|
||||
// Most test cases only ever need a single `site-packages` directory,
|
||||
// so this is a single directory instead of a `Vec` of directories,
|
||||
// like it is in `ruff_db::Program`.
|
||||
pub(crate) site_packages: SystemPathBuf,
|
||||
pub(crate) target_version: TargetVersion,
|
||||
pub(crate) target_version: PythonVersion,
|
||||
}
|
||||
|
||||
/// A `(file_name, file_contents)` tuple
|
||||
@@ -95,7 +100,7 @@ pub(crate) struct UnspecifiedTypeshed;
|
||||
/// to `()`.
|
||||
pub(crate) struct TestCaseBuilder<T> {
|
||||
typeshed_option: T,
|
||||
target_version: TargetVersion,
|
||||
target_version: PythonVersion,
|
||||
first_party_files: Vec<FileSpec>,
|
||||
site_packages_files: Vec<FileSpec>,
|
||||
}
|
||||
@@ -114,7 +119,7 @@ impl<T> TestCaseBuilder<T> {
|
||||
}
|
||||
|
||||
/// Specify the target Python version the module resolver should assume
|
||||
pub(crate) fn with_target_version(mut self, target_version: TargetVersion) -> Self {
|
||||
pub(crate) fn with_target_version(mut self, target_version: PythonVersion) -> Self {
|
||||
self.target_version = target_version;
|
||||
self
|
||||
}
|
||||
@@ -125,6 +130,8 @@ impl<T> TestCaseBuilder<T> {
|
||||
files: impl IntoIterator<Item = FileSpec>,
|
||||
) -> SystemPathBuf {
|
||||
let root = location.as_ref().to_path_buf();
|
||||
// Make sure to create the directory even if the list of files is empty:
|
||||
db.memory_file_system().create_directory_all(&root).unwrap();
|
||||
db.write_files(
|
||||
files
|
||||
.into_iter()
|
||||
@@ -139,7 +146,7 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
pub(crate) fn new() -> TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
Self {
|
||||
typeshed_option: UnspecifiedTypeshed,
|
||||
target_version: TargetVersion::default(),
|
||||
target_version: PythonVersion::default(),
|
||||
first_party_files: vec![],
|
||||
site_packages_files: vec![],
|
||||
}
|
||||
@@ -214,16 +221,19 @@ impl TestCaseBuilder<MockedTypeshed> {
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
let typeshed = Self::build_typeshed_mock(&mut db, &typeshed_option);
|
||||
|
||||
Program::new(
|
||||
Program::from_settings(
|
||||
&db,
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
site_packages: Some(site_packages.clone()),
|
||||
ProgramSettings {
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
site_packages: vec![site_packages.clone()],
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.expect("Valid program settings");
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
@@ -267,16 +277,19 @@ impl TestCaseBuilder<VendoredTypeshed> {
|
||||
Self::write_mock_directory(&mut db, "/site-packages", site_packages_files);
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
|
||||
Program::new(
|
||||
Program::from_settings(
|
||||
&db,
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: src.clone(),
|
||||
custom_typeshed: None,
|
||||
site_packages: Some(site_packages.clone()),
|
||||
ProgramSettings {
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![site_packages.clone()],
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
@@ -1,8 +1,8 @@
|
||||
pub use self::vendored::vendored_typeshed_stubs;
|
||||
pub(crate) use self::versions::{
|
||||
parse_typeshed_versions, LazyTypeshedVersions, TypeshedVersionsQueryResult,
|
||||
pub(super) use self::versions::{
|
||||
parse_typeshed_versions, LazyTypeshedVersions, TypeshedVersionsParseError,
|
||||
TypeshedVersionsQueryResult,
|
||||
};
|
||||
pub use self::versions::{TypeshedVersionsParseError, TypeshedVersionsParseErrorKind};
|
||||
|
||||
mod vendored;
|
||||
mod versions;
|
||||
@@ -6,16 +6,15 @@ use std::ops::{RangeFrom, RangeInclusive};
|
||||
use std::str::FromStr;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use ruff_db::program::TargetVersion;
|
||||
use ruff_db::system::SystemPath;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
|
||||
use super::vendored::vendored_typeshed_stubs;
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
|
||||
use super::vendored::vendored_typeshed_stubs;
|
||||
use crate::python_version::PythonVersion;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct LazyTypeshedVersions<'db>(OnceCell<&'db TypeshedVersions>);
|
||||
@@ -44,7 +43,7 @@ impl<'db> LazyTypeshedVersions<'db> {
|
||||
db: &'db dyn Db,
|
||||
module: &ModuleName,
|
||||
stdlib_root: Option<&SystemPath>,
|
||||
target_version: TargetVersion,
|
||||
target_version: PythonVersion,
|
||||
) -> TypeshedVersionsQueryResult {
|
||||
let versions = self.0.get_or_init(|| {
|
||||
let versions_path = if let Some(system_path) = stdlib_root {
|
||||
@@ -52,7 +51,7 @@ impl<'db> LazyTypeshedVersions<'db> {
|
||||
} else {
|
||||
return &VENDORED_VERSIONS;
|
||||
};
|
||||
let Some(versions_file) = system_path_to_file(db.upcast(), &versions_path) else {
|
||||
let Ok(versions_file) = system_path_to_file(db.upcast(), &versions_path) else {
|
||||
todo!(
|
||||
"Still need to figure out how to handle VERSIONS files being deleted \
|
||||
from custom typeshed directories! Expected a file to exist at {versions_path}"
|
||||
@@ -64,7 +63,7 @@ impl<'db> LazyTypeshedVersions<'db> {
|
||||
// Unwrapping here is not correct...
|
||||
parse_typeshed_versions(db, versions_file).as_ref().unwrap()
|
||||
});
|
||||
versions.query_module(module, PyVersion::from(target_version))
|
||||
versions.query_module(module, target_version)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -90,7 +89,7 @@ static VENDORED_VERSIONS: Lazy<TypeshedVersions> = Lazy::new(|| {
|
||||
});
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub struct TypeshedVersionsParseError {
|
||||
pub(crate) struct TypeshedVersionsParseError {
|
||||
line_number: Option<NonZeroU16>,
|
||||
reason: TypeshedVersionsParseErrorKind,
|
||||
}
|
||||
@@ -123,7 +122,7 @@ impl std::error::Error for TypeshedVersionsParseError {
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub enum TypeshedVersionsParseErrorKind {
|
||||
pub(super) enum TypeshedVersionsParseErrorKind {
|
||||
TooManyLines(NonZeroUsize),
|
||||
UnexpectedNumberOfColons,
|
||||
InvalidModuleName(String),
|
||||
@@ -178,7 +177,7 @@ impl TypeshedVersions {
|
||||
fn query_module(
|
||||
&self,
|
||||
module: &ModuleName,
|
||||
target_version: PyVersion,
|
||||
target_version: PythonVersion,
|
||||
) -> TypeshedVersionsQueryResult {
|
||||
if let Some(range) = self.exact(module) {
|
||||
if range.contains(target_version) {
|
||||
@@ -323,13 +322,13 @@ impl fmt::Display for TypeshedVersions {
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
enum PyVersionRange {
|
||||
AvailableFrom(RangeFrom<PyVersion>),
|
||||
AvailableWithin(RangeInclusive<PyVersion>),
|
||||
AvailableFrom(RangeFrom<PythonVersion>),
|
||||
AvailableWithin(RangeInclusive<PythonVersion>),
|
||||
}
|
||||
|
||||
impl PyVersionRange {
|
||||
#[must_use]
|
||||
fn contains(&self, version: PyVersion) -> bool {
|
||||
fn contains(&self, version: PythonVersion) -> bool {
|
||||
match self {
|
||||
Self::AvailableFrom(inner) => inner.contains(&version),
|
||||
Self::AvailableWithin(inner) => inner.contains(&version),
|
||||
@@ -343,9 +342,14 @@ impl FromStr for PyVersionRange {
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut parts = s.split('-').map(str::trim);
|
||||
match (parts.next(), parts.next(), parts.next()) {
|
||||
(Some(lower), Some(""), None) => Ok(Self::AvailableFrom((lower.parse()?)..)),
|
||||
(Some(lower), Some(""), None) => {
|
||||
let lower = PythonVersion::from_versions_file_string(lower)?;
|
||||
Ok(Self::AvailableFrom(lower..))
|
||||
}
|
||||
(Some(lower), Some(upper), None) => {
|
||||
Ok(Self::AvailableWithin((lower.parse()?)..=(upper.parse()?)))
|
||||
let lower = PythonVersion::from_versions_file_string(lower)?;
|
||||
let upper = PythonVersion::from_versions_file_string(upper)?;
|
||||
Ok(Self::AvailableWithin(lower..=upper))
|
||||
}
|
||||
_ => Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens),
|
||||
}
|
||||
@@ -363,74 +367,20 @@ impl fmt::Display for PyVersionRange {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
struct PyVersion {
|
||||
major: u8,
|
||||
minor: u8,
|
||||
}
|
||||
|
||||
impl FromStr for PyVersion {
|
||||
type Err = TypeshedVersionsParseErrorKind;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
impl PythonVersion {
|
||||
fn from_versions_file_string(s: &str) -> Result<Self, TypeshedVersionsParseErrorKind> {
|
||||
let mut parts = s.split('.').map(str::trim);
|
||||
let (Some(major), Some(minor), None) = (parts.next(), parts.next(), parts.next()) else {
|
||||
return Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
|
||||
s.to_string(),
|
||||
));
|
||||
};
|
||||
let major = match u8::from_str(major) {
|
||||
Ok(major) => major,
|
||||
Err(err) => {
|
||||
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err,
|
||||
})
|
||||
PythonVersion::try_from((major, minor)).map_err(|int_parse_error| {
|
||||
TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err: int_parse_error,
|
||||
}
|
||||
};
|
||||
let minor = match u8::from_str(minor) {
|
||||
Ok(minor) => minor,
|
||||
Err(err) => {
|
||||
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err,
|
||||
})
|
||||
}
|
||||
};
|
||||
Ok(Self { major, minor })
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PyVersion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let PyVersion { major, minor } = self;
|
||||
write!(f, "{major}.{minor}")
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for PyVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => PyVersion { major: 3, minor: 7 },
|
||||
TargetVersion::Py38 => PyVersion { major: 3, minor: 8 },
|
||||
TargetVersion::Py39 => PyVersion { major: 3, minor: 9 },
|
||||
TargetVersion::Py310 => PyVersion {
|
||||
major: 3,
|
||||
minor: 10,
|
||||
},
|
||||
TargetVersion::Py311 => PyVersion {
|
||||
major: 3,
|
||||
minor: 11,
|
||||
},
|
||||
TargetVersion::Py312 => PyVersion {
|
||||
major: 3,
|
||||
minor: 12,
|
||||
},
|
||||
TargetVersion::Py313 => PyVersion {
|
||||
major: 3,
|
||||
minor: 13,
|
||||
},
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -440,7 +390,6 @@ mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use insta::assert_snapshot;
|
||||
use ruff_db::program::TargetVersion;
|
||||
|
||||
use super::*;
|
||||
|
||||
@@ -478,34 +427,34 @@ mod tests {
|
||||
|
||||
assert!(versions.contains_exact(&asyncio));
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio, TargetVersion::Py310.into()),
|
||||
versions.query_module(&asyncio, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
|
||||
assert!(versions.contains_exact(&asyncio_staggered));
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio_staggered, TargetVersion::Py38.into()),
|
||||
versions.query_module(&asyncio_staggered, PythonVersion::PY38),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio_staggered, TargetVersion::Py37.into()),
|
||||
versions.query_module(&asyncio_staggered, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
|
||||
assert!(versions.contains_exact(&audioop));
|
||||
assert_eq!(
|
||||
versions.query_module(&audioop, TargetVersion::Py312.into()),
|
||||
versions.query_module(&audioop, PythonVersion::PY312),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
versions.query_module(&audioop, TargetVersion::Py313.into()),
|
||||
versions.query_module(&audioop, PythonVersion::PY313),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn typeshed_versions_consistent_with_vendored_stubs() {
|
||||
const VERSIONS_DATA: &str = include_str!("../../vendor/typeshed/stdlib/VERSIONS");
|
||||
const VERSIONS_DATA: &str = include_str!("../../../vendor/typeshed/stdlib/VERSIONS");
|
||||
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
|
||||
let vendored_typeshed_versions = TypeshedVersions::from_str(VERSIONS_DATA).unwrap();
|
||||
|
||||
@@ -590,15 +539,15 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(parsed_versions.contains_exact(&bar));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, TargetVersion::Py37.into()),
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, TargetVersion::Py310.into()),
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, TargetVersion::Py311.into()),
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY311),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
@@ -610,15 +559,15 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(parsed_versions.contains_exact(&foo));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, TargetVersion::Py37.into()),
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, TargetVersion::Py38.into()),
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY38),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, TargetVersion::Py311.into()),
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY311),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
}
|
||||
@@ -630,15 +579,15 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(parsed_versions.contains_exact(&bar_baz));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, TargetVersion::Py37.into()),
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, TargetVersion::Py39.into()),
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY39),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, TargetVersion::Py310.into()),
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
@@ -650,15 +599,15 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(!parsed_versions.contains_exact(&bar_eggs));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, TargetVersion::Py37.into()),
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, TargetVersion::Py310.into()),
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, TargetVersion::Py311.into()),
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY311),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
@@ -670,11 +619,11 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(!parsed_versions.contains_exact(&spam));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&spam, TargetVersion::Py37.into()),
|
||||
parsed_versions.query_module(&spam, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&spam, TargetVersion::Py313.into()),
|
||||
parsed_versions.query_module(&spam, PythonVersion::PY313),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
78
crates/red_knot_python_semantic/src/program.rs
Normal file
78
crates/red_knot_python_semantic/src/program.rs
Normal file
@@ -0,0 +1,78 @@
|
||||
use crate::python_version::PythonVersion;
|
||||
use anyhow::Context;
|
||||
use salsa::Durability;
|
||||
use salsa::Setter;
|
||||
|
||||
use ruff_db::system::SystemPathBuf;
|
||||
|
||||
use crate::module_resolver::SearchPaths;
|
||||
use crate::Db;
|
||||
|
||||
#[salsa::input(singleton)]
|
||||
pub struct Program {
|
||||
pub target_version: PythonVersion,
|
||||
|
||||
#[default]
|
||||
#[return_ref]
|
||||
pub(crate) search_paths: SearchPaths,
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn from_settings(db: &dyn Db, settings: ProgramSettings) -> anyhow::Result<Self> {
|
||||
let ProgramSettings {
|
||||
target_version,
|
||||
search_paths,
|
||||
} = settings;
|
||||
|
||||
tracing::info!("Target version: {target_version}");
|
||||
|
||||
let search_paths = SearchPaths::from_settings(db, search_paths)
|
||||
.with_context(|| "Invalid search path settings")?;
|
||||
|
||||
Ok(Program::builder(settings.target_version)
|
||||
.durability(Durability::HIGH)
|
||||
.search_paths(search_paths)
|
||||
.new(db))
|
||||
}
|
||||
|
||||
pub fn update_search_paths(
|
||||
&self,
|
||||
db: &mut dyn Db,
|
||||
search_path_settings: SearchPathSettings,
|
||||
) -> anyhow::Result<()> {
|
||||
let search_paths = SearchPaths::from_settings(db, search_path_settings)?;
|
||||
|
||||
if self.search_paths(db) != &search_paths {
|
||||
tracing::debug!("Update search paths");
|
||||
self.set_search_paths(db).to(search_paths);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub struct ProgramSettings {
|
||||
pub target_version: PythonVersion,
|
||||
pub search_paths: SearchPathSettings,
|
||||
}
|
||||
|
||||
/// Configures the search paths for module resolution.
|
||||
#[derive(Eq, PartialEq, Debug, Clone, Default)]
|
||||
pub struct SearchPathSettings {
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
/// or pyright's stubPath configuration setting.
|
||||
pub extra_paths: Vec<SystemPathBuf>,
|
||||
|
||||
/// The root of the workspace, used for finding first-party modules.
|
||||
pub src_root: SystemPathBuf,
|
||||
|
||||
/// Optional path to a "custom typeshed" directory on disk for us to use for standard-library types.
|
||||
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
|
||||
/// bundled as a zip file in the binary
|
||||
pub custom_typeshed: Option<SystemPathBuf>,
|
||||
|
||||
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
|
||||
pub site_packages: Vec<SystemPathBuf>,
|
||||
}
|
||||
62
crates/red_knot_python_semantic/src/python_version.rs
Normal file
62
crates/red_knot_python_semantic/src/python_version.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
use std::fmt;
|
||||
|
||||
/// Representation of a Python version.
|
||||
///
|
||||
/// Unlike the `TargetVersion` enums in the CLI crates,
|
||||
/// this does not necessarily represent a Python version that we actually support.
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub struct PythonVersion {
|
||||
pub major: u8,
|
||||
pub minor: u8,
|
||||
}
|
||||
|
||||
impl PythonVersion {
|
||||
pub const PY37: PythonVersion = PythonVersion { major: 3, minor: 7 };
|
||||
pub const PY38: PythonVersion = PythonVersion { major: 3, minor: 8 };
|
||||
pub const PY39: PythonVersion = PythonVersion { major: 3, minor: 9 };
|
||||
pub const PY310: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 10,
|
||||
};
|
||||
pub const PY311: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 11,
|
||||
};
|
||||
pub const PY312: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 12,
|
||||
};
|
||||
pub const PY313: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 13,
|
||||
};
|
||||
|
||||
pub fn free_threaded_build_available(self) -> bool {
|
||||
self >= PythonVersion::PY313
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for PythonVersion {
|
||||
fn default() -> Self {
|
||||
Self::PY38
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<(&str, &str)> for PythonVersion {
|
||||
type Error = std::num::ParseIntError;
|
||||
|
||||
fn try_from(value: (&str, &str)) -> Result<Self, Self::Error> {
|
||||
let (major, minor) = value;
|
||||
Ok(Self {
|
||||
major: major.parse()?,
|
||||
minor: minor.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PythonVersion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let PythonVersion { major, minor } = self;
|
||||
write!(f, "{major}.{minor}")
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,7 @@ use std::iter::FusedIterator;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
use salsa::plumbing::AsId;
|
||||
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
@@ -17,6 +18,8 @@ use crate::semantic_index::symbol::{
|
||||
};
|
||||
use crate::Db;
|
||||
|
||||
pub(crate) use self::use_def::UseDefMap;
|
||||
|
||||
pub mod ast_ids;
|
||||
mod builder;
|
||||
pub mod definition;
|
||||
@@ -24,8 +27,6 @@ pub mod expression;
|
||||
pub mod symbol;
|
||||
mod use_def;
|
||||
|
||||
pub(crate) use self::use_def::UseDefMap;
|
||||
|
||||
type SymbolMap = hashbrown::HashMap<ScopedSymbolId, (), ()>;
|
||||
|
||||
/// Returns the semantic index for `file`.
|
||||
@@ -33,7 +34,7 @@ type SymbolMap = hashbrown::HashMap<ScopedSymbolId, (), ()>;
|
||||
/// Prefer using [`symbol_table`] when working with symbols from a single scope.
|
||||
#[salsa::tracked(return_ref, no_eq)]
|
||||
pub(crate) fn semantic_index(db: &dyn Db, file: File) -> SemanticIndex<'_> {
|
||||
let _span = tracing::trace_span!("semantic_index", ?file).entered();
|
||||
let _span = tracing::trace_span!("semantic_index", file = %file.path(db)).entered();
|
||||
|
||||
let parsed = parsed_module(db.upcast(), file);
|
||||
|
||||
@@ -47,8 +48,10 @@ pub(crate) fn semantic_index(db: &dyn Db, file: File) -> SemanticIndex<'_> {
|
||||
/// is unchanged.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn symbol_table<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<SymbolTable> {
|
||||
let _span = tracing::trace_span!("symbol_table", ?scope).entered();
|
||||
let index = semantic_index(db, scope.file(db));
|
||||
let file = scope.file(db);
|
||||
let _span =
|
||||
tracing::trace_span!("symbol_table", scope=?scope.as_id(), file=%file.path(db)).entered();
|
||||
let index = semantic_index(db, file);
|
||||
|
||||
index.symbol_table(scope.file_scope_id(db))
|
||||
}
|
||||
@@ -60,8 +63,10 @@ pub(crate) fn symbol_table<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<Sym
|
||||
/// is unchanged.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn use_def_map<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<UseDefMap<'db>> {
|
||||
let _span = tracing::trace_span!("use_def_map", ?scope).entered();
|
||||
let index = semantic_index(db, scope.file(db));
|
||||
let file = scope.file(db);
|
||||
let _span =
|
||||
tracing::trace_span!("use_def_map", scope=?scope.as_id(), file=%file.path(db)).entered();
|
||||
let index = semantic_index(db, file);
|
||||
|
||||
index.use_def_map(scope.file_scope_id(db))
|
||||
}
|
||||
@@ -69,7 +74,7 @@ pub(crate) fn use_def_map<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc<UseD
|
||||
/// Returns the module global scope of `file`.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn global_scope(db: &dyn Db, file: File) -> ScopeId<'_> {
|
||||
let _span = tracing::trace_span!("global_scope", ?file).entered();
|
||||
let _span = tracing::trace_span!("global_scope", file = %file.path(db)).entered();
|
||||
|
||||
FileScopeId::global().to_scope_id(db, file)
|
||||
}
|
||||
@@ -84,8 +89,6 @@ pub(crate) struct SemanticIndex<'db> {
|
||||
scopes: IndexVec<FileScopeId, Scope>,
|
||||
|
||||
/// Map expressions to their corresponding scope.
|
||||
/// We can't use [`ExpressionId`] here, because the challenge is how to get from
|
||||
/// an [`ast::Expr`] to an [`ExpressionId`] (which requires knowing the scope).
|
||||
scopes_by_expression: FxHashMap<ExpressionNodeKey, FileScopeId>,
|
||||
|
||||
/// Map from a node creating a definition to its definition.
|
||||
@@ -113,7 +116,7 @@ pub(crate) struct SemanticIndex<'db> {
|
||||
impl<'db> SemanticIndex<'db> {
|
||||
/// Returns the symbol table for a specific scope.
|
||||
///
|
||||
/// Use the Salsa cached [`symbol_table`] query if you only need the
|
||||
/// Use the Salsa cached [`symbol_table()`] query if you only need the
|
||||
/// symbol table for a single scope.
|
||||
pub(super) fn symbol_table(&self, scope_id: FileScopeId) -> Arc<SymbolTable> {
|
||||
self.symbol_tables[scope_id].clone()
|
||||
@@ -121,9 +124,9 @@ impl<'db> SemanticIndex<'db> {
|
||||
|
||||
/// Returns the use-def map for a specific scope.
|
||||
///
|
||||
/// Use the Salsa cached [`use_def_map`] query if you only need the
|
||||
/// Use the Salsa cached [`use_def_map()`] query if you only need the
|
||||
/// use-def map for a single scope.
|
||||
pub(super) fn use_def_map(&self, scope_id: FileScopeId) -> Arc<UseDefMap> {
|
||||
pub(super) fn use_def_map(&self, scope_id: FileScopeId) -> Arc<UseDefMap<'db>> {
|
||||
self.use_def_maps[scope_id].clone()
|
||||
}
|
||||
|
||||
@@ -304,10 +307,11 @@ mod tests {
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::DbWithTestSystem;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::semantic_index::ast_ids::HasScopedUseId;
|
||||
use crate::semantic_index::definition::DefinitionKind;
|
||||
use crate::semantic_index::definition::{DefinitionKind, DefinitionNode};
|
||||
use crate::semantic_index::symbol::{FileScopeId, Scope, ScopeKind, SymbolTable};
|
||||
use crate::semantic_index::{global_scope, semantic_index, symbol_table, use_def_map};
|
||||
use crate::Db;
|
||||
@@ -370,10 +374,11 @@ mod tests {
|
||||
let foo = global_table.symbol_id_by_name("foo").unwrap();
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.public_definitions(foo) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Import(_)));
|
||||
let definition = use_def.public_definition(foo).unwrap();
|
||||
assert!(matches!(
|
||||
definition.kind(&db),
|
||||
DefinitionKind::Node(DefinitionNode::Import(_))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -407,16 +412,16 @@ mod tests {
|
||||
);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.public_definitions(
|
||||
global_table
|
||||
.symbol_id_by_name("foo")
|
||||
.expect("symbol to exist"),
|
||||
) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def
|
||||
.public_definition(
|
||||
global_table
|
||||
.symbol_id_by_name("foo")
|
||||
.expect("symbol to exist"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::ImportFrom(_)
|
||||
definition.kind(&db),
|
||||
DefinitionKind::Node(DefinitionNode::ImportFrom(_))
|
||||
));
|
||||
}
|
||||
|
||||
@@ -434,14 +439,12 @@ mod tests {
|
||||
"a symbol used but not defined in a scope should have only the used flag"
|
||||
);
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] =
|
||||
use_def.public_definitions(global_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def
|
||||
.public_definition(global_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
definition.kind(&db),
|
||||
DefinitionKind::Node(DefinitionNode::Assignment(_))
|
||||
));
|
||||
}
|
||||
|
||||
@@ -473,14 +476,12 @@ y = 2
|
||||
assert_eq!(names(&class_table), vec!["x"]);
|
||||
|
||||
let use_def = index.use_def_map(class_scope_id);
|
||||
let [definition] =
|
||||
use_def.public_definitions(class_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def
|
||||
.public_definition(class_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
definition.kind(&db),
|
||||
DefinitionKind::Node(DefinitionNode::Assignment(_))
|
||||
));
|
||||
}
|
||||
|
||||
@@ -511,19 +512,151 @@ y = 2
|
||||
assert_eq!(names(&function_table), vec!["x"]);
|
||||
|
||||
let use_def = index.use_def_map(function_scope_id);
|
||||
let [definition] = use_def.public_definitions(
|
||||
function_table
|
||||
.symbol_id_by_name("x")
|
||||
.expect("symbol exists"),
|
||||
) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def
|
||||
.public_definition(
|
||||
function_table
|
||||
.symbol_id_by_name("x")
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
definition.kind(&db),
|
||||
DefinitionKind::Node(DefinitionNode::Assignment(_))
|
||||
));
|
||||
}
|
||||
|
||||
/// Test case to validate that the comprehension scope is correctly identified and that the target
|
||||
/// variable is defined only in the comprehension scope and not in the global scope.
|
||||
#[test]
|
||||
fn comprehension_scope() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[x for x in iter1]
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&global_table), vec!["iter1"]);
|
||||
|
||||
let [(comprehension_scope_id, comprehension_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
|
||||
assert_eq!(comprehension_scope.kind(), ScopeKind::Comprehension);
|
||||
assert_eq!(
|
||||
comprehension_scope_id.to_scope_id(&db, file).name(&db),
|
||||
"<listcomp>"
|
||||
);
|
||||
|
||||
let comprehension_symbol_table = index.symbol_table(comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&comprehension_symbol_table), vec!["x"]);
|
||||
}
|
||||
|
||||
/// Test case to validate that the `x` variable used in the comprehension is referencing the
|
||||
/// `x` variable defined by the inner generator (`for x in iter2`) and not the outer one.
|
||||
#[test]
|
||||
fn multiple_generators() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[x for x in iter1 for x in iter2]
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let [(comprehension_scope_id, _)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
|
||||
let use_def = index.use_def_map(comprehension_scope_id);
|
||||
|
||||
let module = parsed_module(&db, file).syntax();
|
||||
let element = module.body[0]
|
||||
.as_expr_stmt()
|
||||
.unwrap()
|
||||
.value
|
||||
.as_list_comp_expr()
|
||||
.unwrap()
|
||||
.elt
|
||||
.as_name_expr()
|
||||
.unwrap();
|
||||
let element_use_id =
|
||||
element.scoped_use_id(&db, comprehension_scope_id.to_scope_id(&db, file));
|
||||
|
||||
let definition = use_def.definition_for_use(element_use_id).unwrap();
|
||||
let DefinitionKind::Node(DefinitionNode::Comprehension(comprehension)) =
|
||||
definition.kind(&db)
|
||||
else {
|
||||
panic!("expected generator definition")
|
||||
};
|
||||
let ast::Comprehension { target, .. } = comprehension.node();
|
||||
let name = target.as_name_expr().unwrap().id().as_str();
|
||||
|
||||
assert_eq!(name, "x");
|
||||
assert_eq!(target.range(), TextRange::new(23.into(), 24.into()));
|
||||
}
|
||||
|
||||
/// Test case to validate that the nested comprehension creates a new scope which is a child of
|
||||
/// the outer comprehension scope and the variables are correctly defined in the respective
|
||||
/// scopes.
|
||||
#[test]
|
||||
fn nested_comprehensions() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[{x for x in iter2} for y in iter1]
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&global_table), vec!["iter1"]);
|
||||
|
||||
let [(comprehension_scope_id, comprehension_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
|
||||
assert_eq!(comprehension_scope.kind(), ScopeKind::Comprehension);
|
||||
assert_eq!(
|
||||
comprehension_scope_id.to_scope_id(&db, file).name(&db),
|
||||
"<listcomp>"
|
||||
);
|
||||
|
||||
let comprehension_symbol_table = index.symbol_table(comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&comprehension_symbol_table), vec!["y", "iter2"]);
|
||||
|
||||
let [(inner_comprehension_scope_id, inner_comprehension_scope)] = index
|
||||
.child_scopes(comprehension_scope_id)
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one inner comprehension scope")
|
||||
};
|
||||
|
||||
assert_eq!(inner_comprehension_scope.kind(), ScopeKind::Comprehension);
|
||||
assert_eq!(
|
||||
inner_comprehension_scope_id
|
||||
.to_scope_id(&db, file)
|
||||
.name(&db),
|
||||
"<setcomp>"
|
||||
);
|
||||
|
||||
let inner_comprehension_symbol_table = index.symbol_table(inner_comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&inner_comprehension_symbol_table), vec!["x"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dupes() {
|
||||
let TestCase { db, file } = test_case(
|
||||
@@ -557,14 +690,17 @@ def func():
|
||||
assert_eq!(names(&func2_table), vec!["y"]);
|
||||
|
||||
let use_def = index.use_def_map(FileScopeId::global());
|
||||
let [definition] = use_def.public_definitions(
|
||||
global_table
|
||||
.symbol_id_by_name("func")
|
||||
.expect("symbol exists"),
|
||||
) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Function(_)));
|
||||
let definition = use_def
|
||||
.public_definition(
|
||||
global_table
|
||||
.symbol_id_by_name("func")
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.kind(&db),
|
||||
DefinitionKind::Node(DefinitionNode::Function(_))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -664,10 +800,9 @@ class C[T]:
|
||||
};
|
||||
let x_use_id = x_use_expr_name.scoped_use_id(&db, scope);
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.use_definitions(x_use_id) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let DefinitionKind::Assignment(assignment) = definition.node(&db) else {
|
||||
let definition = use_def.definition_for_use(x_use_id).unwrap();
|
||||
let DefinitionKind::Node(DefinitionNode::Assignment(assignment)) = definition.kind(&db)
|
||||
else {
|
||||
panic!("should be an assignment definition")
|
||||
};
|
||||
let ast::Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
|
||||
@@ -26,9 +26,9 @@ use crate::Db;
|
||||
/// ```
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct AstIds {
|
||||
/// Maps expressions to their expression id. Uses `NodeKey` because it avoids cloning [`Parsed`].
|
||||
/// Maps expressions to their expression id.
|
||||
expressions_map: FxHashMap<ExpressionNodeKey, ScopedExpressionId>,
|
||||
/// Maps expressions which "use" a symbol (that is, [`ExprName`]) to a use id.
|
||||
/// Maps expressions which "use" a symbol (that is, [`ast::ExprName`]) to a use id.
|
||||
uses_map: FxHashMap<ExpressionNodeKey, ScopedUseId>,
|
||||
}
|
||||
|
||||
|
||||
@@ -13,15 +13,15 @@ use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
|
||||
use crate::semantic_index::ast_ids::AstIdsBuilder;
|
||||
use crate::semantic_index::definition::{
|
||||
AssignmentDefinitionNodeRef, Definition, DefinitionNodeKey, DefinitionNodeRef,
|
||||
ImportFromDefinitionNodeRef,
|
||||
AssignmentDefinitionNodeRef, ComprehensionDefinitionNodeRef, Definition, DefinitionKind,
|
||||
DefinitionNodeKey, DefinitionNodeRef, ImportFromDefinitionNodeRef,
|
||||
};
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{
|
||||
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId, SymbolFlags,
|
||||
SymbolTableBuilder,
|
||||
};
|
||||
use crate::semantic_index::use_def::{FlowSnapshot, UseDefMapBuilder};
|
||||
use crate::semantic_index::use_def::{BasicBlockId, UseDefMapBuilder};
|
||||
use crate::semantic_index::SemanticIndex;
|
||||
use crate::Db;
|
||||
|
||||
@@ -31,8 +31,10 @@ pub(super) struct SemanticIndexBuilder<'db> {
|
||||
file: File,
|
||||
module: &'db ParsedModule,
|
||||
scope_stack: Vec<FileScopeId>,
|
||||
/// the assignment we're currently visiting
|
||||
/// The assignment we're currently visiting.
|
||||
current_assignment: Option<CurrentAssignment<'db>>,
|
||||
/// Basic block ending at each `break` in the current loop.
|
||||
loop_breaks: Vec<BasicBlockId>,
|
||||
|
||||
// Semantic Index fields
|
||||
scopes: IndexVec<FileScopeId, Scope>,
|
||||
@@ -54,6 +56,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
module: parsed,
|
||||
scope_stack: Vec::new(),
|
||||
current_assignment: None,
|
||||
loop_breaks: vec![],
|
||||
|
||||
scopes: IndexVec::new(),
|
||||
symbol_tables: IndexVec::new(),
|
||||
@@ -95,14 +98,19 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
|
||||
let file_scope_id = self.scopes.push(scope);
|
||||
self.symbol_tables.push(SymbolTableBuilder::new());
|
||||
self.use_def_maps.push(UseDefMapBuilder::new());
|
||||
self.use_def_maps
|
||||
.push(UseDefMapBuilder::new(self.db, self.file, file_scope_id));
|
||||
let ast_id_scope = self.ast_ids.push(AstIdsBuilder::new());
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
// SAFETY: `node` is guaranteed to be a child of `self.module`
|
||||
let scope_id = ScopeId::new(self.db, self.file, file_scope_id, unsafe {
|
||||
node.to_kind(self.module.clone())
|
||||
});
|
||||
let scope_id = ScopeId::new(
|
||||
self.db,
|
||||
self.file,
|
||||
file_scope_id,
|
||||
unsafe { node.to_kind(self.module.clone()) },
|
||||
countme::Count::default(),
|
||||
);
|
||||
|
||||
self.scope_ids_by_scope.push(scope_id);
|
||||
self.scopes_by_node.insert(node.node_key(), file_scope_id);
|
||||
@@ -135,26 +143,40 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
&mut self.ast_ids[scope_id]
|
||||
}
|
||||
|
||||
fn flow_snapshot(&mut self) -> FlowSnapshot {
|
||||
self.current_use_def_map().snapshot()
|
||||
/// Start a new basic block and return the previous block's ID.
|
||||
fn next_block(&mut self) -> BasicBlockId {
|
||||
self.current_use_def_map().next_block(/* sealed */ true)
|
||||
}
|
||||
|
||||
fn flow_restore(&mut self, state: FlowSnapshot) {
|
||||
self.current_use_def_map().restore(state);
|
||||
/// Start a new unsealed basic block and return the previous block's ID.
|
||||
fn next_block_unsealed(&mut self) -> BasicBlockId {
|
||||
self.current_use_def_map().next_block(/* sealed */ false)
|
||||
}
|
||||
|
||||
fn flow_merge(&mut self, state: &FlowSnapshot) {
|
||||
self.current_use_def_map().merge(state);
|
||||
/// Seal an unsealed basic block.
|
||||
fn seal_block(&mut self) {
|
||||
self.current_use_def_map().seal_current_block();
|
||||
}
|
||||
|
||||
/// Start a new basic block with the given block as predecessor.
|
||||
fn new_block_from(&mut self, predecessor: BasicBlockId) {
|
||||
self.current_use_def_map()
|
||||
.new_block_from(predecessor, /* sealed */ true);
|
||||
}
|
||||
|
||||
/// Add a predecessor to the current block.
|
||||
fn merge_block(&mut self, predecessor: BasicBlockId) {
|
||||
self.current_use_def_map().merge_block(predecessor);
|
||||
}
|
||||
|
||||
/// Add predecessors to the current block.
|
||||
fn merge_blocks(&mut self, predecessors: Vec<BasicBlockId>) {
|
||||
self.current_use_def_map().merge_blocks(predecessors);
|
||||
}
|
||||
|
||||
fn add_or_update_symbol(&mut self, name: Name, flags: SymbolFlags) -> ScopedSymbolId {
|
||||
let symbol_table = self.current_symbol_table();
|
||||
let (symbol_id, added) = symbol_table.add_or_update_symbol(name, flags);
|
||||
if added {
|
||||
let use_def_map = self.current_use_def_map();
|
||||
use_def_map.add_symbol(symbol_id);
|
||||
}
|
||||
symbol_id
|
||||
symbol_table.add_or_update_symbol(name, flags)
|
||||
}
|
||||
|
||||
fn add_definition<'a>(
|
||||
@@ -162,16 +184,15 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
symbol: ScopedSymbolId,
|
||||
definition_node: impl Into<DefinitionNodeRef<'a>>,
|
||||
) -> Definition<'db> {
|
||||
let definition_node = definition_node.into();
|
||||
let definition_node: DefinitionNodeRef<'_> = definition_node.into();
|
||||
let definition = Definition::new(
|
||||
self.db,
|
||||
self.file,
|
||||
self.current_scope(),
|
||||
symbol,
|
||||
#[allow(unsafe_code)]
|
||||
unsafe {
|
||||
definition_node.into_owned(self.module.clone())
|
||||
},
|
||||
DefinitionKind::Node(unsafe { definition_node.into_owned(self.module.clone()) }),
|
||||
countme::Count::default(),
|
||||
);
|
||||
|
||||
self.definitions_by_node
|
||||
@@ -193,6 +214,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
unsafe {
|
||||
AstNodeRef::new(self.module.clone(), expression_node)
|
||||
},
|
||||
countme::Count::default(),
|
||||
);
|
||||
self.expressions_by_node
|
||||
.insert(expression_node.into(), expression);
|
||||
@@ -200,30 +222,38 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
|
||||
fn with_type_params(
|
||||
&mut self,
|
||||
with_params: &WithTypeParams,
|
||||
with_scope: NodeWithScopeRef,
|
||||
type_params: Option<&'db ast::TypeParams>,
|
||||
nested: impl FnOnce(&mut Self) -> FileScopeId,
|
||||
) -> FileScopeId {
|
||||
let type_params = with_params.type_parameters();
|
||||
|
||||
if let Some(type_params) = type_params {
|
||||
let with_scope = match with_params {
|
||||
WithTypeParams::ClassDef { node, .. } => {
|
||||
NodeWithScopeRef::ClassTypeParameters(node)
|
||||
}
|
||||
WithTypeParams::FunctionDef { node, .. } => {
|
||||
NodeWithScopeRef::FunctionTypeParameters(node)
|
||||
}
|
||||
};
|
||||
|
||||
self.push_scope(with_scope);
|
||||
|
||||
for type_param in &type_params.type_params {
|
||||
let name = match type_param {
|
||||
ast::TypeParam::TypeVar(ast::TypeParamTypeVar { name, .. }) => name,
|
||||
ast::TypeParam::ParamSpec(ast::TypeParamParamSpec { name, .. }) => name,
|
||||
ast::TypeParam::TypeVarTuple(ast::TypeParamTypeVarTuple { name, .. }) => name,
|
||||
let (name, bound, default) = match type_param {
|
||||
ast::TypeParam::TypeVar(ast::TypeParamTypeVar {
|
||||
range: _,
|
||||
name,
|
||||
bound,
|
||||
default,
|
||||
}) => (name, bound, default),
|
||||
ast::TypeParam::ParamSpec(ast::TypeParamParamSpec {
|
||||
name, default, ..
|
||||
}) => (name, &None, default),
|
||||
ast::TypeParam::TypeVarTuple(ast::TypeParamTypeVarTuple {
|
||||
name,
|
||||
default,
|
||||
..
|
||||
}) => (name, &None, default),
|
||||
};
|
||||
// TODO create Definition for typevars
|
||||
self.add_or_update_symbol(name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
if let Some(bound) = bound {
|
||||
self.visit_expr(bound);
|
||||
}
|
||||
if let Some(default) = default {
|
||||
self.visit_expr(default);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -236,6 +266,49 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
nested_scope
|
||||
}
|
||||
|
||||
/// Visit a list of [`Comprehension`] nodes, assumed to be the "generators" that compose a
|
||||
/// comprehension (that is, the `for x in y` and `for y in z` parts of `x for x in y for y in z`.)
|
||||
///
|
||||
/// [`Comprehension`]: ast::Comprehension
|
||||
fn visit_generators(&mut self, scope: NodeWithScopeRef, generators: &'db [ast::Comprehension]) {
|
||||
let mut generators_iter = generators.iter();
|
||||
|
||||
let Some(generator) = generators_iter.next() else {
|
||||
unreachable!("Expression must contain at least one generator");
|
||||
};
|
||||
|
||||
// The `iter` of the first generator is evaluated in the outer scope, while all subsequent
|
||||
// nodes are evaluated in the inner scope.
|
||||
self.visit_expr(&generator.iter);
|
||||
self.push_scope(scope);
|
||||
|
||||
self.current_assignment = Some(CurrentAssignment::Comprehension {
|
||||
node: generator,
|
||||
first: true,
|
||||
});
|
||||
self.visit_expr(&generator.target);
|
||||
self.current_assignment = None;
|
||||
|
||||
for expr in &generator.ifs {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
|
||||
for generator in generators_iter {
|
||||
self.visit_expr(&generator.iter);
|
||||
|
||||
self.current_assignment = Some(CurrentAssignment::Comprehension {
|
||||
node: generator,
|
||||
first: false,
|
||||
});
|
||||
self.visit_expr(&generator.target);
|
||||
self.current_assignment = None;
|
||||
|
||||
for expr in &generator.ifs {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn build(mut self) -> SemanticIndex<'db> {
|
||||
let module = self.module;
|
||||
self.visit_body(module.suite());
|
||||
@@ -304,10 +377,11 @@ where
|
||||
self.add_definition(symbol, function_def);
|
||||
|
||||
self.with_type_params(
|
||||
&WithTypeParams::FunctionDef { node: function_def },
|
||||
NodeWithScopeRef::FunctionTypeParameters(function_def),
|
||||
function_def.type_params.as_deref(),
|
||||
|builder| {
|
||||
builder.visit_parameters(&function_def.parameters);
|
||||
for expr in &function_def.returns {
|
||||
if let Some(expr) = &function_def.returns {
|
||||
builder.visit_annotation(expr);
|
||||
}
|
||||
|
||||
@@ -326,16 +400,20 @@ where
|
||||
self.add_or_update_symbol(class.name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, class);
|
||||
|
||||
self.with_type_params(&WithTypeParams::ClassDef { node: class }, |builder| {
|
||||
if let Some(arguments) = &class.arguments {
|
||||
builder.visit_arguments(arguments);
|
||||
}
|
||||
self.with_type_params(
|
||||
NodeWithScopeRef::ClassTypeParameters(class),
|
||||
class.type_params.as_deref(),
|
||||
|builder| {
|
||||
if let Some(arguments) = &class.arguments {
|
||||
builder.visit_arguments(arguments);
|
||||
}
|
||||
|
||||
builder.push_scope(NodeWithScopeRef::Class(class));
|
||||
builder.visit_body(&class.body);
|
||||
builder.push_scope(NodeWithScopeRef::Class(class));
|
||||
builder.visit_body(&class.body);
|
||||
|
||||
builder.pop_scope()
|
||||
});
|
||||
builder.pop_scope()
|
||||
},
|
||||
);
|
||||
}
|
||||
ast::Stmt::Import(node) => {
|
||||
for alias in &node.names {
|
||||
@@ -376,36 +454,28 @@ where
|
||||
debug_assert!(self.current_assignment.is_none());
|
||||
// TODO deferred annotation visiting
|
||||
self.visit_expr(&node.annotation);
|
||||
match &node.value {
|
||||
Some(value) => {
|
||||
self.visit_expr(value);
|
||||
self.current_assignment = Some(node.into());
|
||||
self.visit_expr(&node.target);
|
||||
self.current_assignment = None;
|
||||
}
|
||||
None => {
|
||||
// TODO annotation-only assignments
|
||||
self.visit_expr(&node.target);
|
||||
}
|
||||
if let Some(value) = &node.value {
|
||||
self.visit_expr(value);
|
||||
}
|
||||
self.current_assignment = Some(node.into());
|
||||
self.visit_expr(&node.target);
|
||||
self.current_assignment = None;
|
||||
}
|
||||
ast::Stmt::If(node) => {
|
||||
self.visit_expr(&node.test);
|
||||
let pre_if = self.flow_snapshot();
|
||||
let pre_if = self.next_block();
|
||||
self.visit_body(&node.body);
|
||||
let mut post_clauses: Vec<FlowSnapshot> = vec![];
|
||||
let mut post_clauses: Vec<BasicBlockId> = vec![];
|
||||
for clause in &node.elif_else_clauses {
|
||||
// snapshot after every block except the last; the last one will just become
|
||||
// the state that we merge the other snapshots into
|
||||
post_clauses.push(self.flow_snapshot());
|
||||
post_clauses.push(self.next_block());
|
||||
// we can only take an elif/else branch if none of the previous ones were
|
||||
// taken, so the block entry state is always `pre_if`
|
||||
self.flow_restore(pre_if.clone());
|
||||
self.new_block_from(pre_if);
|
||||
self.visit_elif_else_clause(clause);
|
||||
}
|
||||
for post_clause_state in post_clauses {
|
||||
self.flow_merge(&post_clause_state);
|
||||
}
|
||||
self.next_block_unsealed();
|
||||
let has_else = node
|
||||
.elif_else_clauses
|
||||
.last()
|
||||
@@ -413,8 +483,39 @@ where
|
||||
if !has_else {
|
||||
// if there's no else clause, then it's possible we took none of the branches,
|
||||
// and the pre_if state can reach here
|
||||
self.flow_merge(&pre_if);
|
||||
self.merge_block(pre_if);
|
||||
}
|
||||
self.merge_blocks(post_clauses);
|
||||
self.seal_block();
|
||||
}
|
||||
ast::Stmt::While(node) => {
|
||||
self.visit_expr(&node.test);
|
||||
|
||||
let pre_loop = self.next_block();
|
||||
|
||||
// Save aside any break states from an outer loop
|
||||
let saved_break_states = std::mem::take(&mut self.loop_breaks);
|
||||
self.visit_body(&node.body);
|
||||
// Get the break states from the body of this loop, and restore the saved outer
|
||||
// ones.
|
||||
let break_states = std::mem::replace(&mut self.loop_breaks, saved_break_states);
|
||||
|
||||
// We may execute the `else` clause without ever executing the body, so merge in
|
||||
// the pre-loop state before visiting `else`.
|
||||
self.next_block_unsealed();
|
||||
self.merge_block(pre_loop);
|
||||
self.seal_block();
|
||||
self.visit_body(&node.orelse);
|
||||
|
||||
// Breaking out of a while loop bypasses the `else` clause, so merge in the break
|
||||
// states after visiting `else`.
|
||||
self.next_block_unsealed();
|
||||
self.merge_blocks(break_states);
|
||||
self.seal_block();
|
||||
}
|
||||
ast::Stmt::Break(_) => {
|
||||
let block_id = self.next_block();
|
||||
self.loop_breaks.push(block_id);
|
||||
}
|
||||
_ => {
|
||||
walk_stmt(self, stmt);
|
||||
@@ -428,8 +529,7 @@ where
|
||||
self.current_ast_ids().record_expression(expr);
|
||||
|
||||
match expr {
|
||||
ast::Expr::Name(name_node) => {
|
||||
let ast::ExprName { id, ctx, .. } = name_node;
|
||||
ast::Expr::Name(name_node @ ast::ExprName { id, ctx, .. }) => {
|
||||
let flags = match ctx {
|
||||
ast::ExprContext::Load => SymbolFlags::IS_USED,
|
||||
ast::ExprContext::Store => SymbolFlags::IS_DEFINED,
|
||||
@@ -452,8 +552,17 @@ where
|
||||
self.add_definition(symbol, ann_assign);
|
||||
}
|
||||
Some(CurrentAssignment::Named(named)) => {
|
||||
// TODO(dhruvmanila): If the current scope is a comprehension, then the
|
||||
// named expression is implicitly nonlocal. This is yet to be
|
||||
// implemented.
|
||||
self.add_definition(symbol, named);
|
||||
}
|
||||
Some(CurrentAssignment::Comprehension { node, first }) => {
|
||||
self.add_definition(
|
||||
symbol,
|
||||
ComprehensionDefinitionNodeRef { node, first },
|
||||
);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
@@ -473,6 +582,13 @@ where
|
||||
self.current_assignment = None;
|
||||
self.visit_expr(&node.value);
|
||||
}
|
||||
ast::Expr::Lambda(lambda) => {
|
||||
if let Some(parameters) = &lambda.parameters {
|
||||
self.visit_parameters(parameters);
|
||||
}
|
||||
self.push_scope(NodeWithScopeRef::Lambda(lambda));
|
||||
self.visit_expr(lambda.body.as_ref());
|
||||
}
|
||||
ast::Expr::If(ast::ExprIf {
|
||||
body, test, orelse, ..
|
||||
}) => {
|
||||
@@ -480,30 +596,74 @@ where
|
||||
// AST inspection, so we can't simplify here, need to record test expression for
|
||||
// later checking)
|
||||
self.visit_expr(test);
|
||||
let pre_if = self.flow_snapshot();
|
||||
let pre_if = self.next_block();
|
||||
self.visit_expr(body);
|
||||
let post_body = self.flow_snapshot();
|
||||
self.flow_restore(pre_if);
|
||||
let post_body = self.next_block();
|
||||
self.new_block_from(pre_if);
|
||||
self.visit_expr(orelse);
|
||||
self.flow_merge(&post_body);
|
||||
self.next_block_unsealed();
|
||||
self.merge_block(post_body);
|
||||
self.seal_block();
|
||||
}
|
||||
ast::Expr::ListComp(
|
||||
list_comprehension @ ast::ExprListComp {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
NodeWithScopeRef::ListComprehension(list_comprehension),
|
||||
generators,
|
||||
);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::SetComp(
|
||||
set_comprehension @ ast::ExprSetComp {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
NodeWithScopeRef::SetComprehension(set_comprehension),
|
||||
generators,
|
||||
);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::Generator(
|
||||
generator @ ast::ExprGenerator {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(NodeWithScopeRef::GeneratorExpression(generator), generators);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::DictComp(
|
||||
dict_comprehension @ ast::ExprDictComp {
|
||||
key,
|
||||
value,
|
||||
generators,
|
||||
..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
NodeWithScopeRef::DictComprehension(dict_comprehension),
|
||||
generators,
|
||||
);
|
||||
self.visit_expr(key);
|
||||
self.visit_expr(value);
|
||||
}
|
||||
_ => {
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum WithTypeParams<'node> {
|
||||
ClassDef { node: &'node ast::StmtClassDef },
|
||||
FunctionDef { node: &'node ast::StmtFunctionDef },
|
||||
}
|
||||
|
||||
impl<'node> WithTypeParams<'node> {
|
||||
fn type_parameters(&self) -> Option<&'node ast::TypeParams> {
|
||||
match self {
|
||||
WithTypeParams::ClassDef { node, .. } => node.type_params.as_deref(),
|
||||
WithTypeParams::FunctionDef { node, .. } => node.type_params.as_deref(),
|
||||
if matches!(
|
||||
expr,
|
||||
ast::Expr::Lambda(_)
|
||||
| ast::Expr::ListComp(_)
|
||||
| ast::Expr::SetComp(_)
|
||||
| ast::Expr::Generator(_)
|
||||
| ast::Expr::DictComp(_)
|
||||
) {
|
||||
self.pop_scope();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -513,6 +673,10 @@ enum CurrentAssignment<'a> {
|
||||
Assign(&'a ast::StmtAssign),
|
||||
AnnAssign(&'a ast::StmtAnnAssign),
|
||||
Named(&'a ast::ExprNamed),
|
||||
Comprehension {
|
||||
node: &'a ast::Comprehension,
|
||||
first: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAssign> for CurrentAssignment<'a> {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::ParsedModule;
|
||||
use ruff_index::newtype_index;
|
||||
use ruff_python_ast as ast;
|
||||
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
@@ -8,7 +9,7 @@ use crate::semantic_index::symbol::{FileScopeId, ScopeId, ScopedSymbolId};
|
||||
use crate::Db;
|
||||
|
||||
#[salsa::tracked]
|
||||
pub struct Definition<'db> {
|
||||
pub(crate) struct Definition<'db> {
|
||||
/// The file in which the definition occurs.
|
||||
#[id]
|
||||
pub(crate) file: File,
|
||||
@@ -23,7 +24,10 @@ pub struct Definition<'db> {
|
||||
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) node: DefinitionKind,
|
||||
pub(crate) kind: DefinitionKind,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<Definition<'static>>,
|
||||
}
|
||||
|
||||
impl<'db> Definition<'db> {
|
||||
@@ -32,6 +36,22 @@ impl<'db> Definition<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) enum DefinitionKind {
|
||||
/// Inserted at control-flow merge points, if multiple definitions can reach the merge point.
|
||||
///
|
||||
/// Operands are not kept inline, since it's not possible to construct cyclically-referential
|
||||
/// Salsa tracked structs; they are kept instead in the
|
||||
/// [`UseDefMap`](super::use_def::UseDefMap).
|
||||
Phi(ScopedPhiId),
|
||||
|
||||
/// An assignment to the symbol.
|
||||
Node(DefinitionNode),
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub(crate) struct ScopedPhiId;
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) enum DefinitionNodeRef<'a> {
|
||||
Import(&'a ast::Alias),
|
||||
@@ -41,6 +61,7 @@ pub(crate) enum DefinitionNodeRef<'a> {
|
||||
NamedExpression(&'a ast::ExprNamed),
|
||||
Assignment(AssignmentDefinitionNodeRef<'a>),
|
||||
AnnotatedAssignment(&'a ast::StmtAnnAssign),
|
||||
Comprehension(ComprehensionDefinitionNodeRef<'a>),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtFunctionDef> for DefinitionNodeRef<'a> {
|
||||
@@ -85,6 +106,12 @@ impl<'a> From<AssignmentDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ComprehensionDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node: ComprehensionDefinitionNodeRef<'a>) -> Self {
|
||||
Self::Comprehension(node)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ImportFromDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::StmtImportFrom,
|
||||
@@ -97,36 +124,48 @@ pub(crate) struct AssignmentDefinitionNodeRef<'a> {
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ComprehensionDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::Comprehension,
|
||||
pub(crate) first: bool,
|
||||
}
|
||||
|
||||
impl DefinitionNodeRef<'_> {
|
||||
#[allow(unsafe_code)]
|
||||
pub(super) unsafe fn into_owned(self, parsed: ParsedModule) -> DefinitionKind {
|
||||
pub(super) unsafe fn into_owned(self, parsed: ParsedModule) -> DefinitionNode {
|
||||
match self {
|
||||
DefinitionNodeRef::Import(alias) => {
|
||||
DefinitionKind::Import(AstNodeRef::new(parsed, alias))
|
||||
DefinitionNode::Import(AstNodeRef::new(parsed, alias))
|
||||
}
|
||||
DefinitionNodeRef::ImportFrom(ImportFromDefinitionNodeRef { node, alias_index }) => {
|
||||
DefinitionKind::ImportFrom(ImportFromDefinitionKind {
|
||||
DefinitionNode::ImportFrom(ImportFromDefinitionNode {
|
||||
node: AstNodeRef::new(parsed, node),
|
||||
alias_index,
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::Function(function) => {
|
||||
DefinitionKind::Function(AstNodeRef::new(parsed, function))
|
||||
DefinitionNode::Function(AstNodeRef::new(parsed, function))
|
||||
}
|
||||
DefinitionNodeRef::Class(class) => {
|
||||
DefinitionKind::Class(AstNodeRef::new(parsed, class))
|
||||
DefinitionNode::Class(AstNodeRef::new(parsed, class))
|
||||
}
|
||||
DefinitionNodeRef::NamedExpression(named) => {
|
||||
DefinitionKind::NamedExpression(AstNodeRef::new(parsed, named))
|
||||
DefinitionNode::NamedExpression(AstNodeRef::new(parsed, named))
|
||||
}
|
||||
DefinitionNodeRef::Assignment(AssignmentDefinitionNodeRef { assignment, target }) => {
|
||||
DefinitionKind::Assignment(AssignmentDefinitionKind {
|
||||
DefinitionNode::Assignment(AssignmentDefinitionNode {
|
||||
assignment: AstNodeRef::new(parsed.clone(), assignment),
|
||||
target: AstNodeRef::new(parsed, target),
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::AnnotatedAssignment(assign) => {
|
||||
DefinitionKind::AnnotatedAssignment(AstNodeRef::new(parsed, assign))
|
||||
DefinitionNode::AnnotatedAssignment(AstNodeRef::new(parsed, assign))
|
||||
}
|
||||
DefinitionNodeRef::Comprehension(ComprehensionDefinitionNodeRef { node, first }) => {
|
||||
DefinitionNode::Comprehension(ComprehensionDefinitionNode {
|
||||
node: AstNodeRef::new(parsed, node),
|
||||
first,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -145,28 +184,46 @@ impl DefinitionNodeRef<'_> {
|
||||
target,
|
||||
}) => target.into(),
|
||||
Self::AnnotatedAssignment(node) => node.into(),
|
||||
Self::Comprehension(ComprehensionDefinitionNodeRef { node, first: _ }) => node.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum DefinitionKind {
|
||||
pub enum DefinitionNode {
|
||||
Import(AstNodeRef<ast::Alias>),
|
||||
ImportFrom(ImportFromDefinitionKind),
|
||||
ImportFrom(ImportFromDefinitionNode),
|
||||
Function(AstNodeRef<ast::StmtFunctionDef>),
|
||||
Class(AstNodeRef<ast::StmtClassDef>),
|
||||
NamedExpression(AstNodeRef<ast::ExprNamed>),
|
||||
Assignment(AssignmentDefinitionKind),
|
||||
Assignment(AssignmentDefinitionNode),
|
||||
AnnotatedAssignment(AstNodeRef<ast::StmtAnnAssign>),
|
||||
Comprehension(ComprehensionDefinitionNode),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ImportFromDefinitionKind {
|
||||
pub struct ComprehensionDefinitionNode {
|
||||
node: AstNodeRef<ast::Comprehension>,
|
||||
first: bool,
|
||||
}
|
||||
|
||||
impl ComprehensionDefinitionNode {
|
||||
pub(crate) fn node(&self) -> &ast::Comprehension {
|
||||
self.node.node()
|
||||
}
|
||||
|
||||
pub(crate) fn is_first(&self) -> bool {
|
||||
self.first
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ImportFromDefinitionNode {
|
||||
node: AstNodeRef<ast::StmtImportFrom>,
|
||||
alias_index: usize,
|
||||
}
|
||||
|
||||
impl ImportFromDefinitionKind {
|
||||
impl ImportFromDefinitionNode {
|
||||
pub(crate) fn import(&self) -> &ast::StmtImportFrom {
|
||||
self.node.node()
|
||||
}
|
||||
@@ -178,12 +235,12 @@ impl ImportFromDefinitionKind {
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub struct AssignmentDefinitionKind {
|
||||
pub struct AssignmentDefinitionNode {
|
||||
assignment: AstNodeRef<ast::StmtAssign>,
|
||||
target: AstNodeRef<ast::ExprName>,
|
||||
}
|
||||
|
||||
impl AssignmentDefinitionKind {
|
||||
impl AssignmentDefinitionNode {
|
||||
pub(crate) fn assignment(&self) -> &ast::StmtAssign {
|
||||
self.assignment.node()
|
||||
}
|
||||
@@ -227,3 +284,9 @@ impl From<&ast::StmtAnnAssign> for DefinitionNodeKey {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Comprehension> for DefinitionNodeKey {
|
||||
fn from(node: &ast::Comprehension) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,6 +22,9 @@ pub(crate) struct Expression<'db> {
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) node: AstNodeRef<ast::Expr>,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<Expression<'static>>,
|
||||
}
|
||||
|
||||
impl<'db> Expression<'db> {
|
||||
|
||||
@@ -100,6 +100,9 @@ pub struct ScopeId<'db> {
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub node: NodeWithScopeKind,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<ScopeId<'static>>,
|
||||
}
|
||||
|
||||
impl<'db> ScopeId<'db> {
|
||||
@@ -111,6 +114,10 @@ impl<'db> ScopeId<'db> {
|
||||
NodeWithScopeKind::ClassTypeParameters(_)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(_)
|
||||
| NodeWithScopeKind::Function(_)
|
||||
| NodeWithScopeKind::ListComprehension(_)
|
||||
| NodeWithScopeKind::SetComprehension(_)
|
||||
| NodeWithScopeKind::DictComprehension(_)
|
||||
| NodeWithScopeKind::GeneratorExpression(_)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -123,6 +130,11 @@ impl<'db> ScopeId<'db> {
|
||||
}
|
||||
NodeWithScopeKind::Function(function)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(function) => function.name.as_str(),
|
||||
NodeWithScopeKind::Lambda(_) => "<lambda>",
|
||||
NodeWithScopeKind::ListComprehension(_) => "<listcomp>",
|
||||
NodeWithScopeKind::SetComprehension(_) => "<setcomp>",
|
||||
NodeWithScopeKind::DictComprehension(_) => "<dictcomp>",
|
||||
NodeWithScopeKind::GeneratorExpression(_) => "<generator>",
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -166,6 +178,13 @@ pub enum ScopeKind {
|
||||
Annotation,
|
||||
Class,
|
||||
Function,
|
||||
Comprehension,
|
||||
}
|
||||
|
||||
impl ScopeKind {
|
||||
pub const fn is_comprehension(self) -> bool {
|
||||
matches!(self, ScopeKind::Comprehension)
|
||||
}
|
||||
}
|
||||
|
||||
/// Symbol table for a specific [`Scope`].
|
||||
@@ -253,7 +272,7 @@ impl SymbolTableBuilder {
|
||||
&mut self,
|
||||
name: Name,
|
||||
flags: SymbolFlags,
|
||||
) -> (ScopedSymbolId, bool) {
|
||||
) -> ScopedSymbolId {
|
||||
let hash = SymbolTable::hash_name(&name);
|
||||
let entry = self
|
||||
.table
|
||||
@@ -266,7 +285,7 @@ impl SymbolTableBuilder {
|
||||
let symbol = &mut self.table.symbols[*entry.key()];
|
||||
symbol.insert_flags(flags);
|
||||
|
||||
(*entry.key(), false)
|
||||
*entry.key()
|
||||
}
|
||||
RawEntryMut::Vacant(entry) => {
|
||||
let mut symbol = Symbol::new(name);
|
||||
@@ -276,7 +295,7 @@ impl SymbolTableBuilder {
|
||||
entry.insert_with_hasher(hash, id, (), |id| {
|
||||
SymbolTable::hash_name(self.table.symbols[*id].name().as_str())
|
||||
});
|
||||
(id, true)
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -293,8 +312,13 @@ pub(crate) enum NodeWithScopeRef<'a> {
|
||||
Module,
|
||||
Class(&'a ast::StmtClassDef),
|
||||
Function(&'a ast::StmtFunctionDef),
|
||||
Lambda(&'a ast::ExprLambda),
|
||||
FunctionTypeParameters(&'a ast::StmtFunctionDef),
|
||||
ClassTypeParameters(&'a ast::StmtClassDef),
|
||||
ListComprehension(&'a ast::ExprListComp),
|
||||
SetComprehension(&'a ast::ExprSetComp),
|
||||
DictComprehension(&'a ast::ExprDictComp),
|
||||
GeneratorExpression(&'a ast::ExprGenerator),
|
||||
}
|
||||
|
||||
impl NodeWithScopeRef<'_> {
|
||||
@@ -312,11 +336,26 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::Function(function) => {
|
||||
NodeWithScopeKind::Function(AstNodeRef::new(module, function))
|
||||
}
|
||||
NodeWithScopeRef::Lambda(lambda) => {
|
||||
NodeWithScopeKind::Lambda(AstNodeRef::new(module, lambda))
|
||||
}
|
||||
NodeWithScopeRef::FunctionTypeParameters(function) => {
|
||||
NodeWithScopeKind::FunctionTypeParameters(AstNodeRef::new(module, function))
|
||||
}
|
||||
NodeWithScopeRef::ClassTypeParameters(class) => {
|
||||
NodeWithScopeKind::Class(AstNodeRef::new(module, class))
|
||||
NodeWithScopeKind::ClassTypeParameters(AstNodeRef::new(module, class))
|
||||
}
|
||||
NodeWithScopeRef::ListComprehension(comprehension) => {
|
||||
NodeWithScopeKind::ListComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::SetComprehension(comprehension) => {
|
||||
NodeWithScopeKind::SetComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::DictComprehension(comprehension) => {
|
||||
NodeWithScopeKind::DictComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::GeneratorExpression(generator) => {
|
||||
NodeWithScopeKind::GeneratorExpression(AstNodeRef::new(module, generator))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -326,8 +365,13 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::Module => ScopeKind::Module,
|
||||
NodeWithScopeRef::Class(_) => ScopeKind::Class,
|
||||
NodeWithScopeRef::Function(_) => ScopeKind::Function,
|
||||
NodeWithScopeRef::Lambda(_) => ScopeKind::Function,
|
||||
NodeWithScopeRef::FunctionTypeParameters(_)
|
||||
| NodeWithScopeRef::ClassTypeParameters(_) => ScopeKind::Annotation,
|
||||
NodeWithScopeRef::ListComprehension(_)
|
||||
| NodeWithScopeRef::SetComprehension(_)
|
||||
| NodeWithScopeRef::DictComprehension(_)
|
||||
| NodeWithScopeRef::GeneratorExpression(_) => ScopeKind::Comprehension,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -338,12 +382,27 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::Function(function) => {
|
||||
NodeWithScopeKey::Function(NodeKey::from_node(function))
|
||||
}
|
||||
NodeWithScopeRef::Lambda(lambda) => {
|
||||
NodeWithScopeKey::Lambda(NodeKey::from_node(lambda))
|
||||
}
|
||||
NodeWithScopeRef::FunctionTypeParameters(function) => {
|
||||
NodeWithScopeKey::FunctionTypeParameters(NodeKey::from_node(function))
|
||||
}
|
||||
NodeWithScopeRef::ClassTypeParameters(class) => {
|
||||
NodeWithScopeKey::ClassTypeParameters(NodeKey::from_node(class))
|
||||
}
|
||||
NodeWithScopeRef::ListComprehension(comprehension) => {
|
||||
NodeWithScopeKey::ListComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::SetComprehension(comprehension) => {
|
||||
NodeWithScopeKey::SetComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::DictComprehension(comprehension) => {
|
||||
NodeWithScopeKey::DictComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::GeneratorExpression(generator) => {
|
||||
NodeWithScopeKey::GeneratorExpression(NodeKey::from_node(generator))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -356,6 +415,11 @@ pub enum NodeWithScopeKind {
|
||||
ClassTypeParameters(AstNodeRef<ast::StmtClassDef>),
|
||||
Function(AstNodeRef<ast::StmtFunctionDef>),
|
||||
FunctionTypeParameters(AstNodeRef<ast::StmtFunctionDef>),
|
||||
Lambda(AstNodeRef<ast::ExprLambda>),
|
||||
ListComprehension(AstNodeRef<ast::ExprListComp>),
|
||||
SetComprehension(AstNodeRef<ast::ExprSetComp>),
|
||||
DictComprehension(AstNodeRef<ast::ExprDictComp>),
|
||||
GeneratorExpression(AstNodeRef<ast::ExprGenerator>),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
@@ -365,4 +429,9 @@ pub(crate) enum NodeWithScopeKey {
|
||||
ClassTypeParameters(NodeKey),
|
||||
Function(NodeKey),
|
||||
FunctionTypeParameters(NodeKey),
|
||||
Lambda(NodeKey),
|
||||
ListComprehension(NodeKey),
|
||||
SetComprehension(NodeKey),
|
||||
DictComprehension(NodeKey),
|
||||
GeneratorExpression(NodeKey),
|
||||
}
|
||||
|
||||
@@ -56,298 +56,323 @@
|
||||
//! visible at the end of the scope.
|
||||
//!
|
||||
//! The data structure we build to answer these two questions is the `UseDefMap`. It has a
|
||||
//! `definitions_by_use` vector indexed by [`ScopedUseId`] and a `public_definitions` vector
|
||||
//! indexed by [`ScopedSymbolId`]. The values in each of these vectors are (in principle) a list of
|
||||
//! visible definitions at that use, or at the end of the scope for that symbol.
|
||||
//! `definitions_by_use` vector indexed by [`ScopedUseId`] and a `public_definitions` map
|
||||
//! indexed by [`ScopedSymbolId`]. The values in each are the visible definition of a symbol at
|
||||
//! that use, or at the end of the scope.
|
||||
//!
|
||||
//! In order to avoid vectors-of-vectors and all the allocations that would entail, we don't
|
||||
//! actually store these "list of visible definitions" as a vector of [`Definition`] IDs. Instead,
|
||||
//! the values in `definitions_by_use` and `public_definitions` are a [`Definitions`] struct that
|
||||
//! keeps a [`Range`] into a third vector of [`Definition`] IDs, `all_definitions`. The trick with
|
||||
//! this representation is that it requires that the definitions visible at any given use of a
|
||||
//! symbol are stored sequentially in `all_definitions`.
|
||||
//!
|
||||
//! There is another special kind of possible "definition" for a symbol: it might be unbound in the
|
||||
//! scope. (This isn't equivalent to "zero visible definitions", since we may go through an `if`
|
||||
//! that has a definition for the symbol, leaving us with one visible definition, but still also
|
||||
//! the "unbound" possibility, since we might not have taken the `if` branch.)
|
||||
//!
|
||||
//! The simplest way to model "unbound" would be as an actual [`Definition`] itself: the initial
|
||||
//! visible [`Definition`] for each symbol in a scope. But actually modeling it this way would
|
||||
//! dramatically increase the number of [`Definition`] that Salsa must track. Since "unbound" is a
|
||||
//! special definition in that all symbols share it, and it doesn't have any additional per-symbol
|
||||
//! state, we can represent it more efficiently: we use the `may_be_unbound` boolean on the
|
||||
//! [`Definitions`] struct. If this flag is `true`, it means the symbol/use really has one
|
||||
//! additional visible "definition", which is the unbound state. If this flag is `false`, it means
|
||||
//! we've eliminated the possibility of unbound: every path we've followed includes a definition
|
||||
//! for this symbol.
|
||||
//!
|
||||
//! To build a [`UseDefMap`], the [`UseDefMapBuilder`] is notified of each new use and definition
|
||||
//! as they are encountered by the
|
||||
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder) AST visit. For
|
||||
//! each symbol, the builder tracks the currently-visible definitions for that symbol. When we hit
|
||||
//! a use of a symbol, it records the currently-visible definitions for that symbol as the visible
|
||||
//! definitions for that use. When we reach the end of the scope, it records the currently-visible
|
||||
//! definitions for each symbol as the public definitions of that symbol.
|
||||
//!
|
||||
//! Let's walk through the above example. Initially we record for `x` that it has no visible
|
||||
//! definitions, and may be unbound. When we see `x = 1`, we record that as the sole visible
|
||||
//! definition of `x`, and flip `may_be_unbound` to `false`. Then we see `x = 2`, and it replaces
|
||||
//! `x = 1` as the sole visible definition of `x`. When we get to `y = x`, we record that the
|
||||
//! visible definitions for that use of `x` are just the `x = 2` definition.
|
||||
//!
|
||||
//! Then we hit the `if` branch. We visit the `test` node (`flag` in this case), since that will
|
||||
//! happen regardless. Then we take a pre-branch snapshot of the currently visible definitions for
|
||||
//! all symbols, which we'll need later. Then we go ahead and visit the `if` body. When we see `x =
|
||||
//! 3`, it replaces `x = 2` as the sole visible definition of `x`. At the end of the `if` body, we
|
||||
//! take another snapshot of the currently-visible definitions; we'll call this the post-if-body
|
||||
//! snapshot.
|
||||
//!
|
||||
//! Now we need to visit the `else` clause. The conditions when entering the `else` clause should
|
||||
//! be the pre-if conditions; if we are entering the `else` clause, we know that the `if` test
|
||||
//! failed and we didn't execute the `if` body. So we first reset the builder to the pre-if state,
|
||||
//! using the snapshot we took previously (meaning we now have `x = 2` as the sole visible
|
||||
//! definition for `x` again), then visit the `else` clause, where `x = 4` replaces `x = 2` as the
|
||||
//! sole visible definition of `x`.
|
||||
//!
|
||||
//! Now we reach the end of the if/else, and want to visit the following code. The state here needs
|
||||
//! to reflect that we might have gone through the `if` branch, or we might have gone through the
|
||||
//! `else` branch, and we don't know which. So we need to "merge" our current builder state
|
||||
//! (reflecting the end-of-else state, with `x = 4` as the only visible definition) with our
|
||||
//! post-if-body snapshot (which has `x = 3` as the only visible definition). The result of this
|
||||
//! merge is that we now have two visible definitions of `x`: `x = 3` and `x = 4`.
|
||||
//!
|
||||
//! The [`UseDefMapBuilder`] itself just exposes methods for taking a snapshot, resetting to a
|
||||
//! snapshot, and merging a snapshot into the current state. The logic using these methods lives in
|
||||
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder), e.g. where it
|
||||
//! visits a `StmtIf` node.
|
||||
//!
|
||||
//! (In the future we may have some other questions we want to answer as well, such as "is this
|
||||
//! definition used?", which will require tracking a bit more info in our map, e.g. a "used" bit
|
||||
//! for each [`Definition`] which is flipped to true when we record that definition for a use.)
|
||||
//! Rather than have multiple definitions, we use a Phi definition at control flow join points to
|
||||
//! merge the visible definition in each path. This means at any given point we always have exactly
|
||||
//! one definition for a symbol. (This is analogous to static-single-assignment, or SSA, form, and
|
||||
//! in fact we use the algorithm from [Simple and efficient construction of static single
|
||||
//! assignment form](https://dl.acm.org/doi/10.1007/978-3-642-37051-9_6) here.)
|
||||
use crate::semantic_index::ast_ids::ScopedUseId;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::symbol::ScopedSymbolId;
|
||||
use ruff_index::IndexVec;
|
||||
use std::ops::Range;
|
||||
use crate::semantic_index::definition::{Definition, DefinitionKind, ScopedPhiId};
|
||||
use crate::semantic_index::symbol::{FileScopeId, ScopedSymbolId};
|
||||
use crate::Db;
|
||||
use ruff_db::files::File;
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
||||
/// All definitions that can reach a given use of a name.
|
||||
/// Number of basic block predecessors we store inline.
|
||||
const PREDECESSORS: usize = 2;
|
||||
|
||||
/// Input operands (definitions) for a Phi definition. None means not defined.
|
||||
// TODO would like to use SmallVec here but can't due to lifetime invariance issue.
|
||||
type PhiOperands<'db> = Vec<Option<Definition<'db>>>;
|
||||
|
||||
/// Definition for each use of a name.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) struct UseDefMap<'db> {
|
||||
// TODO store constraints with definitions for type narrowing
|
||||
/// Definition IDs array for `definitions_by_use` and `public_definitions` to slice into.
|
||||
all_definitions: Vec<Definition<'db>>,
|
||||
/// Definition that reaches each [`ScopedUseId`].
|
||||
definitions_by_use: IndexVec<ScopedUseId, Option<Definition<'db>>>,
|
||||
|
||||
/// Definitions that can reach a [`ScopedUseId`].
|
||||
definitions_by_use: IndexVec<ScopedUseId, Definitions>,
|
||||
/// Definition of each symbol visible at end of scope.
|
||||
///
|
||||
/// Sparse, because it only includes symbols defined in the scope.
|
||||
public_definitions: FxHashMap<ScopedSymbolId, Definition<'db>>,
|
||||
|
||||
/// Definitions of each symbol visible at end of scope.
|
||||
public_definitions: IndexVec<ScopedSymbolId, Definitions>,
|
||||
/// Operands for each Phi definition in this scope.
|
||||
phi_operands: IndexVec<ScopedPhiId, PhiOperands<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> UseDefMap<'db> {
|
||||
pub(crate) fn use_definitions(&self, use_id: ScopedUseId) -> &[Definition<'db>] {
|
||||
&self.all_definitions[self.definitions_by_use[use_id].definitions_range.clone()]
|
||||
/// Return the dominating definition for a given use of a name; None means not-defined.
|
||||
pub(crate) fn definition_for_use(&self, use_id: ScopedUseId) -> Option<Definition<'db>> {
|
||||
self.definitions_by_use[use_id]
|
||||
}
|
||||
|
||||
pub(crate) fn use_may_be_unbound(&self, use_id: ScopedUseId) -> bool {
|
||||
self.definitions_by_use[use_id].may_be_unbound
|
||||
/// Return the definition visible at end of scope for a symbol.
|
||||
///
|
||||
/// Return None if the symbol is never defined in the scope.
|
||||
pub(crate) fn public_definition(&self, symbol_id: ScopedSymbolId) -> Option<Definition<'db>> {
|
||||
self.public_definitions.get(&symbol_id).copied()
|
||||
}
|
||||
|
||||
pub(crate) fn public_definitions(&self, symbol: ScopedSymbolId) -> &[Definition<'db>] {
|
||||
&self.all_definitions[self.public_definitions[symbol].definitions_range.clone()]
|
||||
}
|
||||
|
||||
pub(crate) fn public_may_be_unbound(&self, symbol: ScopedSymbolId) -> bool {
|
||||
self.public_definitions[symbol].may_be_unbound
|
||||
/// Return the operands for a Phi in this scope; a None means not-defined.
|
||||
pub(crate) fn phi_operands<'s>(&'s self, phi_id: ScopedPhiId) -> &'s [Option<Definition<'db>>] {
|
||||
self.phi_operands[phi_id].as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
/// Definitions visible for a symbol at a particular use (or end-of-scope).
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
struct Definitions {
|
||||
/// [`Range`] in `all_definitions` of the visible definition IDs.
|
||||
definitions_range: Range<usize>,
|
||||
/// Is the symbol possibly unbound at this point?
|
||||
may_be_unbound: bool,
|
||||
}
|
||||
type PredecessorBlocks = SmallVec<[BasicBlockId; PREDECESSORS]>;
|
||||
|
||||
impl Definitions {
|
||||
/// The default state of a symbol is "no definitions, may be unbound", aka definitely-unbound.
|
||||
fn unbound() -> Self {
|
||||
Self {
|
||||
definitions_range: Range::default(),
|
||||
may_be_unbound: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Definitions {
|
||||
fn default() -> Self {
|
||||
Definitions::unbound()
|
||||
}
|
||||
}
|
||||
|
||||
/// A snapshot of the visible definitions for each symbol at a particular point in control flow.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(super) struct FlowSnapshot {
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, Definitions>,
|
||||
}
|
||||
/// A basic block is a linear region of code (no branches.)
|
||||
#[newtype_index]
|
||||
pub(super) struct BasicBlockId;
|
||||
|
||||
pub(super) struct UseDefMapBuilder<'db> {
|
||||
/// Definition IDs array for `definitions_by_use` and `definitions_by_symbol` to slice into.
|
||||
all_definitions: Vec<Definition<'db>>,
|
||||
db: &'db dyn Db,
|
||||
file: File,
|
||||
file_scope: FileScopeId,
|
||||
|
||||
/// Visible definitions at each so-far-recorded use.
|
||||
definitions_by_use: IndexVec<ScopedUseId, Definitions>,
|
||||
/// Predecessor blocks for each basic block.
|
||||
///
|
||||
/// Entry block has none, all other blocks have at least one, blocks that join control flow can
|
||||
/// have two or more.
|
||||
predecessors: IndexVec<BasicBlockId, PredecessorBlocks>,
|
||||
|
||||
/// Currently visible definitions for each symbol.
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, Definitions>,
|
||||
/// The definition of each symbol which dominates each basic block.
|
||||
///
|
||||
/// No entry means "lazily unfilled"; we haven't had to query for it yet, and we may never have
|
||||
/// to, if the symbol isn't used in this block or any successor block.
|
||||
///
|
||||
/// Each block has an [`FxHashMap`] of symbols instead of an [`IndexVec`] because it is lazy
|
||||
/// and potentially sparse; it will only include a definition for a symbol that is actually
|
||||
/// used in that block or a successor. An [`IndexVec`] would have to be eagerly filled with
|
||||
/// placeholders.
|
||||
definitions_per_block:
|
||||
IndexVec<BasicBlockId, FxHashMap<ScopedSymbolId, Option<Definition<'db>>>>,
|
||||
|
||||
/// Incomplete Phi definitions in each block.
|
||||
///
|
||||
/// An incomplete Phi is used when we don't know, while processing a block's body, what new
|
||||
/// predecessors it may later gain (that is, backward jumps.)
|
||||
///
|
||||
/// Sparse, because relative few blocks (just loop headers) will have any incomplete Phis.
|
||||
incomplete_phis: FxHashMap<BasicBlockId, Vec<Definition<'db>>>,
|
||||
|
||||
/// Operands for each Phi definition in this scope.
|
||||
phi_operands: IndexVec<ScopedPhiId, PhiOperands<'db>>,
|
||||
|
||||
/// Are this block's predecessors fully populated?
|
||||
///
|
||||
/// If not, it isn't safe to recurse to predecessors yet; we might miss a predecessor block.
|
||||
sealed_blocks: IndexVec<BasicBlockId, bool>,
|
||||
|
||||
/// Definition for each so-far-recorded use.
|
||||
definitions_by_use: IndexVec<ScopedUseId, Option<Definition<'db>>>,
|
||||
|
||||
/// All symbols defined in this scope.
|
||||
defined_symbols: FxHashSet<ScopedSymbolId>,
|
||||
}
|
||||
|
||||
impl<'db> UseDefMapBuilder<'db> {
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
all_definitions: Vec::new(),
|
||||
pub(super) fn new(db: &'db dyn Db, file: File, file_scope: FileScopeId) -> Self {
|
||||
let mut new = Self {
|
||||
db,
|
||||
file,
|
||||
file_scope,
|
||||
predecessors: IndexVec::new(),
|
||||
definitions_per_block: IndexVec::new(),
|
||||
incomplete_phis: FxHashMap::default(),
|
||||
sealed_blocks: IndexVec::new(),
|
||||
definitions_by_use: IndexVec::new(),
|
||||
definitions_by_symbol: IndexVec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn add_symbol(&mut self, symbol: ScopedSymbolId) {
|
||||
let new_symbol = self.definitions_by_symbol.push(Definitions::unbound());
|
||||
debug_assert_eq!(symbol, new_symbol);
|
||||
phi_operands: IndexVec::new(),
|
||||
defined_symbols: FxHashSet::default(),
|
||||
};
|
||||
|
||||
// create the entry basic block
|
||||
new.predecessors.push(PredecessorBlocks::default());
|
||||
new.definitions_per_block.push(FxHashMap::default());
|
||||
new.sealed_blocks.push(true);
|
||||
|
||||
new
|
||||
}
|
||||
|
||||
/// Record a definition for a symbol.
|
||||
pub(super) fn record_definition(
|
||||
&mut self,
|
||||
symbol: ScopedSymbolId,
|
||||
symbol_id: ScopedSymbolId,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
// We have a new definition of a symbol; this replaces any previous definitions in this
|
||||
// path.
|
||||
let def_idx = self.all_definitions.len();
|
||||
self.all_definitions.push(definition);
|
||||
self.definitions_by_symbol[symbol] = Definitions {
|
||||
#[allow(clippy::range_plus_one)]
|
||||
definitions_range: def_idx..(def_idx + 1),
|
||||
may_be_unbound: false,
|
||||
};
|
||||
self.memoize(self.current_block_id(), symbol_id, Some(definition));
|
||||
self.defined_symbols.insert(symbol_id);
|
||||
}
|
||||
|
||||
pub(super) fn record_use(&mut self, symbol: ScopedSymbolId, use_id: ScopedUseId) {
|
||||
// We have a use of a symbol; clone the currently visible definitions for that symbol, and
|
||||
// record them as the visible definitions for this use.
|
||||
let new_use = self
|
||||
.definitions_by_use
|
||||
.push(self.definitions_by_symbol[symbol].clone());
|
||||
/// Record a use of a symbol.
|
||||
pub(super) fn record_use(&mut self, symbol_id: ScopedSymbolId, use_id: ScopedUseId) {
|
||||
let definition_id = self.lookup(symbol_id);
|
||||
let new_use = self.definitions_by_use.push(definition_id);
|
||||
debug_assert_eq!(use_id, new_use);
|
||||
}
|
||||
|
||||
/// Take a snapshot of the current visible-symbols state.
|
||||
pub(super) fn snapshot(&self) -> FlowSnapshot {
|
||||
FlowSnapshot {
|
||||
definitions_by_symbol: self.definitions_by_symbol.clone(),
|
||||
/// Get the id of the current basic block.
|
||||
pub(super) fn current_block_id(&self) -> BasicBlockId {
|
||||
BasicBlockId::from(self.definitions_per_block.len() - 1)
|
||||
}
|
||||
|
||||
/// Push a new basic block, with given block as predecessor.
|
||||
pub(super) fn new_block_from(&mut self, block_id: BasicBlockId, sealed: bool) {
|
||||
self.new_block_with_predecessors(smallvec![block_id], sealed);
|
||||
}
|
||||
|
||||
/// Push a new basic block, with current block as predecessor; return the current block's ID.
|
||||
pub(super) fn next_block(&mut self, sealed: bool) -> BasicBlockId {
|
||||
let current_block_id = self.current_block_id();
|
||||
self.new_block_from(current_block_id, sealed);
|
||||
current_block_id
|
||||
}
|
||||
|
||||
/// Add a predecessor to the current block.
|
||||
pub(super) fn merge_block(&mut self, new_predecessor: BasicBlockId) {
|
||||
let block_id = self.current_block_id();
|
||||
debug_assert!(!self.sealed_blocks[block_id]);
|
||||
self.predecessors[block_id].push(new_predecessor);
|
||||
}
|
||||
|
||||
/// Add predecessors to the current block.
|
||||
pub(super) fn merge_blocks(&mut self, new_predecessors: Vec<BasicBlockId>) {
|
||||
let block_id = self.current_block_id();
|
||||
debug_assert!(!self.sealed_blocks[block_id]);
|
||||
self.predecessors[block_id].extend(new_predecessors);
|
||||
}
|
||||
|
||||
/// Mark the current block as sealed; it cannot have any more predecessors added.
|
||||
pub(super) fn seal_current_block(&mut self) {
|
||||
self.seal_block(self.current_block_id());
|
||||
}
|
||||
|
||||
/// Mark a block as sealed; it cannot have any more predecessors added.
|
||||
pub(super) fn seal_block(&mut self, block_id: BasicBlockId) {
|
||||
debug_assert!(!self.sealed_blocks[block_id]);
|
||||
if let Some(phis) = self.incomplete_phis.get(&block_id) {
|
||||
for phi in phis.clone() {
|
||||
self.add_phi_operands(block_id, phi);
|
||||
}
|
||||
self.incomplete_phis.remove(&block_id);
|
||||
}
|
||||
self.sealed_blocks[block_id] = true;
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> UseDefMap<'db> {
|
||||
debug_assert!(self.incomplete_phis.is_empty());
|
||||
debug_assert!(self.sealed_blocks.iter().all(|&b| b));
|
||||
self.definitions_by_use.shrink_to_fit();
|
||||
self.phi_operands.shrink_to_fit();
|
||||
|
||||
let mut public_definitions: FxHashMap<ScopedSymbolId, Definition<'db>> =
|
||||
FxHashMap::default();
|
||||
|
||||
for symbol_id in self.defined_symbols.clone() {
|
||||
// SAFETY: We are only looking up defined symbols here, can't get None.
|
||||
public_definitions.insert(symbol_id, self.lookup(symbol_id).unwrap());
|
||||
}
|
||||
|
||||
UseDefMap {
|
||||
definitions_by_use: self.definitions_by_use,
|
||||
public_definitions,
|
||||
phi_operands: self.phi_operands,
|
||||
}
|
||||
}
|
||||
|
||||
/// Restore the current builder visible-definitions state to the given snapshot.
|
||||
pub(super) fn restore(&mut self, snapshot: FlowSnapshot) {
|
||||
// We never remove symbols from `definitions_by_symbol` (it's an IndexVec, and the symbol
|
||||
// IDs must line up), so the current number of known symbols must always be equal to or
|
||||
// greater than the number of known symbols in a previously-taken snapshot.
|
||||
let num_symbols = self.definitions_by_symbol.len();
|
||||
debug_assert!(num_symbols >= snapshot.definitions_by_symbol.len());
|
||||
/// Push a new basic block (with given predecessors) and return its ID.
|
||||
fn new_block_with_predecessors(
|
||||
&mut self,
|
||||
predecessors: PredecessorBlocks,
|
||||
sealed: bool,
|
||||
) -> BasicBlockId {
|
||||
let new_block_id = self.predecessors.push(predecessors);
|
||||
self.definitions_per_block.push(FxHashMap::default());
|
||||
self.sealed_blocks.push(sealed);
|
||||
|
||||
// Restore the current visible-definitions state to the given snapshot.
|
||||
self.definitions_by_symbol = snapshot.definitions_by_symbol;
|
||||
|
||||
// If the snapshot we are restoring is missing some symbols we've recorded since, we need
|
||||
// to fill them in so the symbol IDs continue to line up. Since they don't exist in the
|
||||
// snapshot, the correct state to fill them in with is "unbound", the default.
|
||||
self.definitions_by_symbol
|
||||
.resize(num_symbols, Definitions::unbound());
|
||||
new_block_id
|
||||
}
|
||||
|
||||
/// Merge the given snapshot into the current state, reflecting that we might have taken either
|
||||
/// path to get here. The new visible-definitions state for each symbol should include
|
||||
/// definitions from both the prior state and the snapshot.
|
||||
pub(super) fn merge(&mut self, snapshot: &FlowSnapshot) {
|
||||
// The tricky thing about merging two Ranges pointing into `all_definitions` is that if the
|
||||
// two Ranges aren't already adjacent in `all_definitions`, we will have to copy at least
|
||||
// one or the other of the ranges to the end of `all_definitions` so as to make them
|
||||
// adjacent. We can't ever move things around in `all_definitions` because previously
|
||||
// recorded uses may still have ranges pointing to any part of it; all we can do is append.
|
||||
// It's possible we may end up with some old entries in `all_definitions` that nobody is
|
||||
// pointing to, but that's OK.
|
||||
/// Look up the dominating definition for a symbol in the current block.
|
||||
///
|
||||
/// If there isn't a local definition, recursively look up the symbol in predecessor blocks,
|
||||
/// memoizing the found symbol in each block.
|
||||
fn lookup(&mut self, symbol_id: ScopedSymbolId) -> Option<Definition<'db>> {
|
||||
self.lookup_impl(self.current_block_id(), symbol_id)
|
||||
}
|
||||
|
||||
// We never remove symbols from `definitions_by_symbol` (it's an IndexVec, and the symbol
|
||||
// IDs must line up), so the current number of known symbols must always be equal to or
|
||||
// greater than the number of known symbols in a previously-taken snapshot.
|
||||
debug_assert!(self.definitions_by_symbol.len() >= snapshot.definitions_by_symbol.len());
|
||||
|
||||
for (symbol_id, current) in self.definitions_by_symbol.iter_mut_enumerated() {
|
||||
let Some(snapshot) = snapshot.definitions_by_symbol.get(symbol_id) else {
|
||||
// Symbol not present in snapshot, so it's unbound from that path.
|
||||
current.may_be_unbound = true;
|
||||
continue;
|
||||
};
|
||||
|
||||
// If the symbol can be unbound in either predecessor, it can be unbound post-merge.
|
||||
current.may_be_unbound |= snapshot.may_be_unbound;
|
||||
|
||||
// Merge the definition ranges.
|
||||
let current = &mut current.definitions_range;
|
||||
let snapshot = &snapshot.definitions_range;
|
||||
|
||||
// We never create reversed ranges.
|
||||
debug_assert!(current.end >= current.start);
|
||||
debug_assert!(snapshot.end >= snapshot.start);
|
||||
|
||||
if current == snapshot {
|
||||
// Ranges already identical, nothing to do.
|
||||
} else if snapshot.is_empty() {
|
||||
// Merging from an empty range; nothing to do.
|
||||
} else if (*current).is_empty() {
|
||||
// Merging to an empty range; just use the incoming range.
|
||||
*current = snapshot.clone();
|
||||
} else if snapshot.end >= current.start && snapshot.start <= current.end {
|
||||
// Ranges are adjacent or overlapping, merge them in-place.
|
||||
*current = current.start.min(snapshot.start)..current.end.max(snapshot.end);
|
||||
} else if current.end == self.all_definitions.len() {
|
||||
// Ranges are not adjacent or overlapping, `current` is at the end of
|
||||
// `all_definitions`, we need to copy `snapshot` to the end so they are adjacent
|
||||
// and can be merged into one range.
|
||||
self.all_definitions.extend_from_within(snapshot.clone());
|
||||
current.end = self.all_definitions.len();
|
||||
} else if snapshot.end == self.all_definitions.len() {
|
||||
// Ranges are not adjacent or overlapping, `snapshot` is at the end of
|
||||
// `all_definitions`, we need to copy `current` to the end so they are adjacent and
|
||||
// can be merged into one range.
|
||||
self.all_definitions.extend_from_within(current.clone());
|
||||
current.start = snapshot.start;
|
||||
current.end = self.all_definitions.len();
|
||||
} else {
|
||||
// Ranges are not adjacent and neither one is at the end of `all_definitions`, we
|
||||
// have to copy both to the end so they are adjacent and we can merge them.
|
||||
let start = self.all_definitions.len();
|
||||
self.all_definitions.extend_from_within(current.clone());
|
||||
self.all_definitions.extend_from_within(snapshot.clone());
|
||||
current.start = start;
|
||||
current.end = self.all_definitions.len();
|
||||
fn lookup_impl(
|
||||
&mut self,
|
||||
block_id: BasicBlockId,
|
||||
symbol_id: ScopedSymbolId,
|
||||
) -> Option<Definition<'db>> {
|
||||
if let Some(local) = self.definitions_per_block[block_id].get(&symbol_id) {
|
||||
return *local;
|
||||
}
|
||||
if !self.sealed_blocks[block_id] {
|
||||
// we may still be missing predecessors; insert an incomplete Phi.
|
||||
let definition = self.create_incomplete_phi(block_id, symbol_id);
|
||||
self.incomplete_phis
|
||||
.entry(block_id)
|
||||
.or_default()
|
||||
.push(definition);
|
||||
return Some(definition);
|
||||
}
|
||||
match self.predecessors[block_id].as_slice() {
|
||||
// entry block, no definition found: return None
|
||||
[] => None,
|
||||
// single predecessor, recurse
|
||||
&[single_predecessor_id] => {
|
||||
let definition = self.lookup_impl(single_predecessor_id, symbol_id);
|
||||
self.memoize(block_id, symbol_id, definition);
|
||||
definition
|
||||
}
|
||||
// multiple predecessors: create and memoize an incomplete Phi to break cycles, then
|
||||
// recurse into predecessors and fill the Phi operands.
|
||||
_ => {
|
||||
let phi = self.create_incomplete_phi(block_id, symbol_id);
|
||||
self.add_phi_operands(block_id, phi);
|
||||
Some(phi)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> UseDefMap<'db> {
|
||||
self.all_definitions.shrink_to_fit();
|
||||
self.definitions_by_symbol.shrink_to_fit();
|
||||
self.definitions_by_use.shrink_to_fit();
|
||||
/// Recurse into predecessors to add operands for an incomplete Phi.
|
||||
fn add_phi_operands(&mut self, block_id: BasicBlockId, phi: Definition<'db>) {
|
||||
let predecessors: PredecessorBlocks = self.predecessors[block_id].clone();
|
||||
let operands: PhiOperands = predecessors
|
||||
.iter()
|
||||
.map(|pred_id| self.lookup_impl(*pred_id, phi.symbol(self.db)))
|
||||
.collect();
|
||||
let DefinitionKind::Phi(phi_id) = phi.kind(self.db) else {
|
||||
unreachable!("add_phi_operands called with non-Phi");
|
||||
};
|
||||
self.phi_operands[*phi_id] = operands;
|
||||
}
|
||||
|
||||
UseDefMap {
|
||||
all_definitions: self.all_definitions,
|
||||
definitions_by_use: self.definitions_by_use,
|
||||
public_definitions: self.definitions_by_symbol,
|
||||
}
|
||||
/// Remember a given definition for a given symbol in the given block.
|
||||
fn memoize(
|
||||
&mut self,
|
||||
block_id: BasicBlockId,
|
||||
symbol_id: ScopedSymbolId,
|
||||
definition_id: Option<Definition<'db>>,
|
||||
) {
|
||||
self.definitions_per_block[block_id].insert(symbol_id, definition_id);
|
||||
}
|
||||
|
||||
/// Create an incomplete Phi for the given block and symbol, memoize it, and return its ID.
|
||||
fn create_incomplete_phi(
|
||||
&mut self,
|
||||
block_id: BasicBlockId,
|
||||
symbol_id: ScopedSymbolId,
|
||||
) -> Definition<'db> {
|
||||
let phi_id = self.phi_operands.push(vec![]);
|
||||
let definition = Definition::new(
|
||||
self.db,
|
||||
self.file,
|
||||
self.file_scope,
|
||||
symbol_id,
|
||||
DefinitionKind::Phi(phi_id),
|
||||
countme::Count::default(),
|
||||
);
|
||||
self.memoize(block_id, symbol_id, Some(definition));
|
||||
definition
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
use red_knot_module_resolver::{resolve_module, Module, ModuleName};
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::files::{File, FilePath};
|
||||
use ruff_db::source::line_index;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::{Expr, ExpressionRef, StmtClassDef};
|
||||
use ruff_source_file::LineIndex;
|
||||
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::{resolve_module, Module};
|
||||
use crate::semantic_index::ast_ids::HasScopedAstId;
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::types::{definition_ty, global_symbol_ty_by_name, infer_scope_types, Type};
|
||||
@@ -24,8 +27,16 @@ impl<'db> SemanticModel<'db> {
|
||||
self.db
|
||||
}
|
||||
|
||||
pub fn file_path(&self) -> &FilePath {
|
||||
self.file.path(self.db)
|
||||
}
|
||||
|
||||
pub fn line_index(&self) -> LineIndex {
|
||||
line_index(self.db.upcast(), self.file)
|
||||
}
|
||||
|
||||
pub fn resolve_module(&self, module_name: ModuleName) -> Option<Module> {
|
||||
resolve_module(self.db.upcast(), module_name)
|
||||
resolve_module(self.db, module_name)
|
||||
}
|
||||
|
||||
pub fn global_symbol_ty(&self, module: &Module, symbol_name: &str) -> Type<'db> {
|
||||
@@ -140,7 +151,7 @@ impl HasTy for ast::StmtFunctionDef {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
definition_ty(model.db, Some(definition))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -148,7 +159,7 @@ impl HasTy for StmtClassDef {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
definition_ty(model.db, Some(definition))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -156,7 +167,7 @@ impl HasTy for ast::Alias {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
definition_ty(model.db, Some(definition))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -164,34 +175,38 @@ impl HasTy for ast::Alias {
|
||||
mod tests {
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::types::Type;
|
||||
use crate::{HasTy, SemanticModel};
|
||||
use crate::{HasTy, ProgramSettings, SemanticModel};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
Program::new(
|
||||
fn setup_db<'a>(files: impl IntoIterator<Item = (&'a str, &'a str)>) -> anyhow::Result<TestDb> {
|
||||
let mut db = TestDb::new();
|
||||
db.write_files(files)?;
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
TargetVersion::Py38,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
workspace_root: SystemPathBuf::from("/src"),
|
||||
site_packages: None,
|
||||
custom_typeshed: None,
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
},
|
||||
},
|
||||
);
|
||||
)?;
|
||||
|
||||
db
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_ty() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
let db = setup_db([("/src/foo.py", "def test(): pass")])?;
|
||||
|
||||
db.write_file("/src/foo.py", "def test(): pass")?;
|
||||
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, foo);
|
||||
@@ -207,9 +222,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn class_ty() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
let db = setup_db([("/src/foo.py", "class Test: pass")])?;
|
||||
|
||||
db.write_file("/src/foo.py", "class Test: pass")?;
|
||||
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, foo);
|
||||
@@ -225,12 +239,11 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn alias_ty() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
let db = setup_db([
|
||||
("/src/foo.py", "class Test: pass"),
|
||||
("/src/bar.py", "from foo import Test"),
|
||||
])?;
|
||||
|
||||
let bar = system_path_to_file(&db, "/src/bar.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, bar);
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
use crate::builtins::builtins_scope;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId};
|
||||
use crate::semantic_index::{global_scope, symbol_table, use_def_map};
|
||||
use crate::{Db, FxOrderSet};
|
||||
|
||||
mod builder;
|
||||
mod display;
|
||||
mod infer;
|
||||
|
||||
pub(crate) use self::infer::{infer_definition_types, infer_expression_types, infer_scope_types};
|
||||
pub(crate) use self::builder::UnionBuilder;
|
||||
pub(crate) use self::infer::{infer_definition_types, infer_scope_types};
|
||||
|
||||
/// Infer the public type of a symbol (its type as seen from outside its scope).
|
||||
pub(crate) fn symbol_ty<'db>(
|
||||
@@ -20,13 +23,7 @@ pub(crate) fn symbol_ty<'db>(
|
||||
let _span = tracing::trace_span!("symbol_ty", ?symbol).entered();
|
||||
|
||||
let use_def = use_def_map(db, scope);
|
||||
definitions_ty(
|
||||
db,
|
||||
use_def.public_definitions(symbol),
|
||||
use_def
|
||||
.public_may_be_unbound(symbol)
|
||||
.then_some(Type::Unbound),
|
||||
)
|
||||
definition_ty(db, use_def.public_definition(symbol))
|
||||
}
|
||||
|
||||
/// Shorthand for `symbol_ty` that takes a symbol name instead of an ID.
|
||||
@@ -42,55 +39,31 @@ pub(crate) fn symbol_ty_by_name<'db>(
|
||||
.unwrap_or(Type::Unbound)
|
||||
}
|
||||
|
||||
/// Shorthand for `symbol_ty` that looks up a module-global symbol in a file.
|
||||
/// Shorthand for `symbol_ty` that looks up a module-global symbol by name in a file.
|
||||
pub(crate) fn global_symbol_ty_by_name<'db>(db: &'db dyn Db, file: File, name: &str) -> Type<'db> {
|
||||
symbol_ty_by_name(db, global_scope(db, file), name)
|
||||
}
|
||||
|
||||
/// Infer the type of a [`Definition`].
|
||||
pub(crate) fn definition_ty<'db>(db: &'db dyn Db, definition: Definition<'db>) -> Type<'db> {
|
||||
let inference = infer_definition_types(db, definition);
|
||||
inference.definition_ty(definition)
|
||||
/// Shorthand for `symbol_ty` that looks up a symbol in the builtins.
|
||||
///
|
||||
/// Returns `Unbound` if the builtins module isn't available for some reason.
|
||||
pub(crate) fn builtins_symbol_ty_by_name<'db>(db: &'db dyn Db, name: &str) -> Type<'db> {
|
||||
builtins_scope(db)
|
||||
.map(|builtins| symbol_ty_by_name(db, builtins, name))
|
||||
.unwrap_or(Type::Unbound)
|
||||
}
|
||||
|
||||
/// Infer the combined type of an array of [`Definition`]s, plus one optional "unbound type".
|
||||
///
|
||||
/// Will return a union if there is more than one definition, or at least one plus an unbound
|
||||
/// type.
|
||||
///
|
||||
/// The "unbound type" represents the type in case control flow may not have passed through any
|
||||
/// definitions in this scope. If this isn't possible, then it will be `None`. If it is possible,
|
||||
/// and the result in that case should be Unbound (e.g. an unbound function local), then it will be
|
||||
/// `Some(Type::Unbound)`. If it is possible and the result should be something else (e.g. an
|
||||
/// implicit global lookup), then `unbound_type` will be `Some(the_global_symbol_type)`.
|
||||
///
|
||||
/// # Panics
|
||||
/// Will panic if called with zero definitions and no `unbound_ty`. This is a logic error,
|
||||
/// as any symbol with zero visible definitions clearly may be unbound, and the caller should
|
||||
/// provide an `unbound_ty`.
|
||||
pub(crate) fn definitions_ty<'db>(
|
||||
/// Infer the type of a [`Definition`].
|
||||
pub(crate) fn definition_ty<'db>(
|
||||
db: &'db dyn Db,
|
||||
definitions: &[Definition<'db>],
|
||||
unbound_ty: Option<Type<'db>>,
|
||||
definition: Option<Definition<'db>>,
|
||||
) -> Type<'db> {
|
||||
let def_types = definitions.iter().map(|def| definition_ty(db, *def));
|
||||
let mut all_types = unbound_ty.into_iter().chain(def_types);
|
||||
|
||||
let Some(first) = all_types.next() else {
|
||||
panic!("definitions_ty should never be called with zero definitions and no unbound_ty.")
|
||||
};
|
||||
|
||||
if let Some(second) = all_types.next() {
|
||||
let mut builder = UnionTypeBuilder::new(db);
|
||||
builder = builder.add(first).add(second);
|
||||
|
||||
for variant in all_types {
|
||||
builder = builder.add(variant);
|
||||
match definition {
|
||||
Some(definition) => {
|
||||
let inference = infer_definition_types(db, definition);
|
||||
inference.definition_ty(definition)
|
||||
}
|
||||
|
||||
Type::Union(builder.build())
|
||||
} else {
|
||||
first
|
||||
None => Type::Unbound,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -107,7 +80,7 @@ pub enum Type<'db> {
|
||||
/// name does not exist or is not bound to any value (this represents an error, but with some
|
||||
/// leniency options it could be silently resolved to Unknown in some cases)
|
||||
Unbound,
|
||||
/// the None object (TODO remove this in favor of Instance(types.NoneType)
|
||||
/// the None object -- TODO remove this in favor of Instance(types.NoneType)
|
||||
None,
|
||||
/// a specific function object
|
||||
Function(FunctionType<'db>),
|
||||
@@ -117,9 +90,14 @@ pub enum Type<'db> {
|
||||
Class(ClassType<'db>),
|
||||
/// the set of Python objects with the given class in their __class__'s method resolution order
|
||||
Instance(ClassType<'db>),
|
||||
/// the set of objects in any of the types in the union
|
||||
Union(UnionType<'db>),
|
||||
/// the set of objects in all of the types in the intersection
|
||||
Intersection(IntersectionType<'db>),
|
||||
/// An integer literal
|
||||
IntLiteral(i64),
|
||||
/// A boolean literal, either `True` or `False`.
|
||||
BooleanLiteral(bool),
|
||||
// TODO protocols, callable types, overloads, generics, type vars
|
||||
}
|
||||
|
||||
@@ -128,8 +106,27 @@ impl<'db> Type<'db> {
|
||||
matches!(self, Type::Unbound)
|
||||
}
|
||||
|
||||
pub const fn is_unknown(&self) -> bool {
|
||||
matches!(self, Type::Unknown)
|
||||
pub fn may_be_unbound(&self, db: &'db dyn Db) -> bool {
|
||||
match self {
|
||||
Type::Unbound => true,
|
||||
Type::Union(union) => union.contains(db, Type::Unbound),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn replace_unbound_with(&self, db: &'db dyn Db, replacement: Type<'db>) -> Type<'db> {
|
||||
match self {
|
||||
Type::Unbound => replacement,
|
||||
Type::Union(union) => union
|
||||
.elements(db)
|
||||
.into_iter()
|
||||
.fold(UnionBuilder::new(db), |builder, ty| {
|
||||
builder.add(ty.replace_unbound_with(db, replacement))
|
||||
})
|
||||
.build(),
|
||||
ty => *ty,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
@@ -147,12 +144,13 @@ impl<'db> Type<'db> {
|
||||
// TODO MRO? get_own_instance_member, get_instance_member
|
||||
todo!("attribute lookup on Instance type")
|
||||
}
|
||||
Type::Union(_) => {
|
||||
// TODO perform the get_member on each type in the union
|
||||
// TODO return the union of those results
|
||||
// TODO if any of those results is `None` then include Unknown in the result union
|
||||
todo!("attribute lookup on Union type")
|
||||
}
|
||||
Type::Union(union) => union
|
||||
.elements(db)
|
||||
.iter()
|
||||
.fold(UnionBuilder::new(db), |builder, element_ty| {
|
||||
builder.add(element_ty.member(db, name))
|
||||
})
|
||||
.build(),
|
||||
Type::Intersection(_) => {
|
||||
// TODO perform the get_member on each type in the intersection
|
||||
// TODO return the intersection of those results
|
||||
@@ -162,6 +160,17 @@ impl<'db> Type<'db> {
|
||||
// TODO raise error
|
||||
Type::Unknown
|
||||
}
|
||||
Type::BooleanLiteral(_) => Type::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn instance(&self) -> Type<'db> {
|
||||
match self {
|
||||
Type::Any => Type::Any,
|
||||
Type::Unknown => Type::Unknown,
|
||||
Type::Class(class) => Type::Instance(*class),
|
||||
_ => Type::Unknown, // TODO type errors
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -225,52 +234,25 @@ impl<'db> ClassType<'db> {
|
||||
|
||||
#[salsa::interned]
|
||||
pub struct UnionType<'db> {
|
||||
/// the union type includes values in any of these types
|
||||
/// The union type includes values in any of these types.
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
struct UnionTypeBuilder<'db> {
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> UnionTypeBuilder<'db> {
|
||||
fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
elements: FxOrderSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a type to this union.
|
||||
fn add(mut self, ty: Type<'db>) -> Self {
|
||||
match ty {
|
||||
Type::Union(union) => {
|
||||
self.elements.extend(&union.elements(self.db));
|
||||
}
|
||||
_ => {
|
||||
self.elements.insert(ty);
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
fn build(self) -> UnionType<'db> {
|
||||
UnionType::new(self.db, self.elements)
|
||||
impl<'db> UnionType<'db> {
|
||||
pub fn contains(&self, db: &'db dyn Db, ty: Type<'db>) -> bool {
|
||||
self.elements(db).contains(&ty)
|
||||
}
|
||||
}
|
||||
|
||||
// Negation types aren't expressible in annotations, and are most likely to arise from type
|
||||
// narrowing along with intersections (e.g. `if not isinstance(...)`), so we represent them
|
||||
// directly in intersections rather than as a separate type. This sacrifices some efficiency in the
|
||||
// case where a Not appears outside an intersection (unclear when that could even happen, but we'd
|
||||
// have to represent it as a single-element intersection if it did) in exchange for better
|
||||
// efficiency in the within-intersection case.
|
||||
#[salsa::interned]
|
||||
pub struct IntersectionType<'db> {
|
||||
// the intersection type includes only values in all of these types
|
||||
/// The intersection type includes only values in all of these types.
|
||||
positive: FxOrderSet<Type<'db>>,
|
||||
// the intersection type does not include any value in any of these types
|
||||
|
||||
/// The intersection type does not include any value in any of these types.
|
||||
///
|
||||
/// Negation types aren't expressible in annotations, and are most likely to arise from type
|
||||
/// narrowing along with intersections (e.g. `if not isinstance(...)`), so we represent them
|
||||
/// directly in intersections rather than as a separate type.
|
||||
negative: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
429
crates/red_knot_python_semantic/src/types/builder.rs
Normal file
429
crates/red_knot_python_semantic/src/types/builder.rs
Normal file
@@ -0,0 +1,429 @@
|
||||
//! Smart builders for union and intersection types.
|
||||
//!
|
||||
//! Invariants we maintain here:
|
||||
//! * No single-element union types (should just be the contained type instead.)
|
||||
//! * No single-positive-element intersection types. Single-negative-element are OK, we don't
|
||||
//! have a standalone negation type so there's no other representation for this.
|
||||
//! * The same type should never appear more than once in a union or intersection. (This should
|
||||
//! be expanded to cover subtyping -- see below -- but for now we only implement it for type
|
||||
//! identity.)
|
||||
//! * Disjunctive normal form (DNF): the tree of unions and intersections can never be deeper
|
||||
//! than a union-of-intersections. Unions cannot contain other unions (the inner union just
|
||||
//! flattens into the outer one), intersections cannot contain other intersections (also
|
||||
//! flattens), and intersections cannot contain unions (the intersection distributes over the
|
||||
//! union, inverting it into a union-of-intersections).
|
||||
//!
|
||||
//! The implication of these invariants is that a [`UnionBuilder`] does not necessarily build a
|
||||
//! [`Type::Union`]. For example, if only one type is added to the [`UnionBuilder`], `build()` will
|
||||
//! just return that type directly. The same is true for [`IntersectionBuilder`]; for example, if a
|
||||
//! union type is added to the intersection, it will distribute and [`IntersectionBuilder::build`]
|
||||
//! may end up returning a [`Type::Union`] of intersections.
|
||||
//!
|
||||
//! In the future we should have these additional invariants, but they aren't implemented yet:
|
||||
//! * No type in a union can be a subtype of any other type in the union (just eliminate the
|
||||
//! subtype from the union).
|
||||
//! * No type in an intersection can be a supertype of any other type in the intersection (just
|
||||
//! eliminate the supertype from the intersection).
|
||||
//! * An intersection containing two non-overlapping types should simplify to [`Type::Never`].
|
||||
use crate::types::{IntersectionType, Type, UnionType};
|
||||
use crate::{Db, FxOrderSet};
|
||||
|
||||
pub(crate) struct UnionBuilder<'db> {
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> UnionBuilder<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
elements: FxOrderSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a type to this union.
|
||||
pub(crate) fn add(mut self, ty: Type<'db>) -> Self {
|
||||
match ty {
|
||||
Type::Union(union) => {
|
||||
self.elements.extend(&union.elements(self.db));
|
||||
}
|
||||
Type::Never => {}
|
||||
_ => {
|
||||
self.elements.insert(ty);
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn build(self) -> Type<'db> {
|
||||
match self.elements.len() {
|
||||
0 => Type::Never,
|
||||
1 => self.elements[0],
|
||||
_ => Type::Union(UnionType::new(self.db, self.elements)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct IntersectionBuilder<'db> {
|
||||
// Really this builds a union-of-intersections, because we always keep our set-theoretic types
|
||||
// in disjunctive normal form (DNF), a union of intersections. In the simplest case there's
|
||||
// just a single intersection in this vector, and we are building a single intersection type,
|
||||
// but if a union is added to the intersection, we'll distribute ourselves over that union and
|
||||
// create a union of intersections.
|
||||
intersections: Vec<InnerIntersectionBuilder<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> IntersectionBuilder<'db> {
|
||||
#[allow(dead_code)]
|
||||
fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
intersections: vec![InnerIntersectionBuilder::new()],
|
||||
}
|
||||
}
|
||||
|
||||
fn empty(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
intersections: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn add_positive(mut self, ty: Type<'db>) -> Self {
|
||||
if let Type::Union(union) = ty {
|
||||
// Distribute ourself over this union: for each union element, clone ourself and
|
||||
// intersect with that union element, then create a new union-of-intersections with all
|
||||
// of those sub-intersections in it. E.g. if `self` is a simple intersection `T1 & T2`
|
||||
// and we add `T3 | T4` to the intersection, we don't get `T1 & T2 & (T3 | T4)` (that's
|
||||
// not in DNF), we distribute the union and get `(T1 & T3) | (T2 & T3) | (T1 & T4) |
|
||||
// (T2 & T4)`. If `self` is already a union-of-intersections `(T1 & T2) | (T3 & T4)`
|
||||
// and we add `T5 | T6` to it, that flattens all the way out to `(T1 & T2 & T5) | (T1 &
|
||||
// T2 & T6) | (T3 & T4 & T5) ...` -- you get the idea.
|
||||
union
|
||||
.elements(self.db)
|
||||
.iter()
|
||||
.map(|elem| self.clone().add_positive(*elem))
|
||||
.fold(IntersectionBuilder::empty(self.db), |mut builder, sub| {
|
||||
builder.intersections.extend(sub.intersections);
|
||||
builder
|
||||
})
|
||||
} else {
|
||||
// If we are already a union-of-intersections, distribute the new intersected element
|
||||
// across all of those intersections.
|
||||
for inner in &mut self.intersections {
|
||||
inner.add_positive(self.db, ty);
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn add_negative(mut self, ty: Type<'db>) -> Self {
|
||||
// See comments above in `add_positive`; this is just the negated version.
|
||||
if let Type::Union(union) = ty {
|
||||
union
|
||||
.elements(self.db)
|
||||
.iter()
|
||||
.map(|elem| self.clone().add_negative(*elem))
|
||||
.fold(IntersectionBuilder::empty(self.db), |mut builder, sub| {
|
||||
builder.intersections.extend(sub.intersections);
|
||||
builder
|
||||
})
|
||||
} else {
|
||||
for inner in &mut self.intersections {
|
||||
inner.add_negative(self.db, ty);
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn build(mut self) -> Type<'db> {
|
||||
// Avoid allocating the UnionBuilder unnecessarily if we have just one intersection:
|
||||
if self.intersections.len() == 1 {
|
||||
self.intersections.pop().unwrap().build(self.db)
|
||||
} else {
|
||||
let mut builder = UnionBuilder::new(self.db);
|
||||
for inner in self.intersections {
|
||||
builder = builder.add(inner.build(self.db));
|
||||
}
|
||||
builder.build()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
#[derive(Debug, Clone, Default)]
|
||||
struct InnerIntersectionBuilder<'db> {
|
||||
positive: FxOrderSet<Type<'db>>,
|
||||
negative: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> InnerIntersectionBuilder<'db> {
|
||||
fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Adds a positive type to this intersection.
|
||||
fn add_positive(&mut self, db: &'db dyn Db, ty: Type<'db>) {
|
||||
match ty {
|
||||
Type::Intersection(inter) => {
|
||||
let pos = inter.positive(db);
|
||||
let neg = inter.negative(db);
|
||||
self.positive.extend(pos.difference(&self.negative));
|
||||
self.negative.extend(neg.difference(&self.positive));
|
||||
self.positive.retain(|elem| !neg.contains(elem));
|
||||
self.negative.retain(|elem| !pos.contains(elem));
|
||||
}
|
||||
_ => {
|
||||
if !self.negative.remove(&ty) {
|
||||
self.positive.insert(ty);
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a negative type to this intersection.
|
||||
fn add_negative(&mut self, db: &'db dyn Db, ty: Type<'db>) {
|
||||
// TODO Any/Unknown actually should not self-cancel
|
||||
match ty {
|
||||
Type::Intersection(intersection) => {
|
||||
let pos = intersection.negative(db);
|
||||
let neg = intersection.positive(db);
|
||||
self.positive.extend(pos.difference(&self.negative));
|
||||
self.negative.extend(neg.difference(&self.positive));
|
||||
self.positive.retain(|elem| !neg.contains(elem));
|
||||
self.negative.retain(|elem| !pos.contains(elem));
|
||||
}
|
||||
Type::Never => {}
|
||||
_ => {
|
||||
if !self.positive.remove(&ty) {
|
||||
self.negative.insert(ty);
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn simplify(&mut self) {
|
||||
// TODO this should be generalized based on subtyping, for now we just handle a few cases
|
||||
|
||||
// Never is a subtype of all types
|
||||
if self.positive.contains(&Type::Never) {
|
||||
self.positive.clear();
|
||||
self.negative.clear();
|
||||
self.positive.insert(Type::Never);
|
||||
}
|
||||
}
|
||||
|
||||
fn build(mut self, db: &'db dyn Db) -> Type<'db> {
|
||||
self.simplify();
|
||||
match (self.positive.len(), self.negative.len()) {
|
||||
(0, 0) => Type::Never,
|
||||
(1, 0) => self.positive[0],
|
||||
_ => {
|
||||
self.positive.shrink_to_fit();
|
||||
self.negative.shrink_to_fit();
|
||||
Type::Intersection(IntersectionType::new(db, self.positive, self.negative))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{IntersectionBuilder, IntersectionType, Type, UnionBuilder, UnionType};
|
||||
use crate::db::tests::TestDb;
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
TestDb::new()
|
||||
}
|
||||
|
||||
impl<'db> UnionType<'db> {
|
||||
fn elements_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.elements(db).into_iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let Type::Union(union) = UnionBuilder::new(&db).add(t0).add(t1).build() else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_single() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ty = UnionBuilder::new(&db).add(t0).build();
|
||||
|
||||
assert_eq!(ty, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_empty() {
|
||||
let db = setup_db();
|
||||
let ty = UnionBuilder::new(&db).build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_never() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ty = UnionBuilder::new(&db).add(t0).add(Type::Never).build();
|
||||
|
||||
assert_eq!(ty, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_flatten() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let u1 = UnionBuilder::new(&db).add(t0).add(t1).build();
|
||||
let Type::Union(union) = UnionBuilder::new(&db).add(u1).add(t2).build() else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t1, t2]);
|
||||
}
|
||||
|
||||
impl<'db> IntersectionType<'db> {
|
||||
fn pos_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.positive(db).into_iter().collect()
|
||||
}
|
||||
|
||||
fn neg_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.negative(db).into_iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ta = Type::Any;
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[ta]);
|
||||
assert_eq!(inter.neg_vec(&db), &[t0]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_flatten_positive() {
|
||||
let db = setup_db();
|
||||
let ta = Type::Any;
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let i0 = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_positive(i0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[t2, ta]);
|
||||
assert_eq!(inter.neg_vec(&db), &[t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_flatten_negative() {
|
||||
let db = setup_db();
|
||||
let ta = Type::Any;
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let i0 = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_negative(i0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[t2, t1]);
|
||||
assert_eq!(inter.neg_vec(&db), &[ta]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersection_distributes_over_union() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let ta = Type::Any;
|
||||
let u0 = UnionBuilder::new(&db).add(t0).add(t1).build();
|
||||
|
||||
let Type::Union(union) = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_positive(u0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
let [Type::Intersection(i0), Type::Intersection(i1)] = union.elements_vec(&db)[..] else {
|
||||
panic!("expected a union of two intersections");
|
||||
};
|
||||
assert_eq!(i0.pos_vec(&db), &[ta, t0]);
|
||||
assert_eq!(i1.pos_vec(&db), &[ta, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_self_negation() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_negative(Type::None)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_never() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_negative(Type::Never)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_positive_never() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_positive(Type::Never)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
}
|
||||
@@ -26,19 +26,18 @@ impl Display for DisplayType<'_> {
|
||||
Type::Unbound => f.write_str("Unbound"),
|
||||
Type::None => f.write_str("None"),
|
||||
Type::Module(file) => {
|
||||
write!(f, "<module '{:?}'>", file.path(self.db.upcast()))
|
||||
write!(f, "<module '{:?}'>", file.path(self.db))
|
||||
}
|
||||
// TODO functions and classes should display using a fully qualified name
|
||||
Type::Class(class) => {
|
||||
f.write_str("Literal[")?;
|
||||
f.write_str(&class.name(self.db))?;
|
||||
f.write_str("]")
|
||||
}
|
||||
Type::Class(class) => write!(f, "Literal[{}]", class.name(self.db)),
|
||||
Type::Instance(class) => f.write_str(&class.name(self.db)),
|
||||
Type::Function(function) => f.write_str(&function.name(self.db)),
|
||||
Type::Function(function) => write!(f, "Literal[{}]", function.name(self.db)),
|
||||
Type::Union(union) => union.display(self.db).fmt(f),
|
||||
Type::Intersection(intersection) => intersection.display(self.db).fmt(f),
|
||||
Type::IntLiteral(n) => write!(f, "Literal[{n}]"),
|
||||
Type::BooleanLiteral(boolean) => {
|
||||
write!(f, "Literal[{}]", if *boolean { "True" } else { "False" })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
1
crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt
vendored
Normal file
1
crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
4ef2d66663fc080fefa379e6ae5fc45d4f8b54eb
|
||||
@@ -1,18 +1,18 @@
|
||||
import sys
|
||||
from _typeshed import SupportsWrite
|
||||
from collections.abc import Iterable, Iterator
|
||||
from typing import Any, Final, Literal
|
||||
from typing import Any, Final
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
__version__: Final[str]
|
||||
|
||||
QUOTE_ALL: Literal[1]
|
||||
QUOTE_MINIMAL: Literal[0]
|
||||
QUOTE_NONE: Literal[3]
|
||||
QUOTE_NONNUMERIC: Literal[2]
|
||||
QUOTE_ALL: Final = 1
|
||||
QUOTE_MINIMAL: Final = 0
|
||||
QUOTE_NONE: Final = 3
|
||||
QUOTE_NONNUMERIC: Final = 2
|
||||
if sys.version_info >= (3, 12):
|
||||
QUOTE_STRINGS: Literal[4]
|
||||
QUOTE_NOTNULL: Literal[5]
|
||||
QUOTE_STRINGS: Final = 4
|
||||
QUOTE_NOTNULL: Final = 5
|
||||
|
||||
# Ideally this would be `QUOTE_ALL | QUOTE_MINIMAL | QUOTE_NONE | QUOTE_NONNUMERIC`
|
||||
# However, using literals in situations like these can cause false-positives (see #7258)
|
||||
@@ -71,7 +71,7 @@ class _CData(metaclass=_CDataMeta):
|
||||
@classmethod
|
||||
def from_address(cls, address: int) -> Self: ...
|
||||
@classmethod
|
||||
def from_param(cls, obj: Any) -> Self | _CArgObject: ...
|
||||
def from_param(cls, value: Any, /) -> Self | _CArgObject: ...
|
||||
@classmethod
|
||||
def in_dll(cls, library: CDLL, name: str) -> Self: ...
|
||||
def __buffer__(self, flags: int, /) -> memoryview: ...
|
||||
@@ -368,11 +368,7 @@ def tparm(
|
||||
) -> bytes: ...
|
||||
def typeahead(fd: int, /) -> None: ...
|
||||
def unctrl(ch: _ChType, /) -> bytes: ...
|
||||
|
||||
if sys.version_info < (3, 12) or sys.platform != "darwin":
|
||||
# The support for macos was dropped in 3.12
|
||||
def unget_wch(ch: int | str, /) -> None: ...
|
||||
|
||||
def unget_wch(ch: int | str, /) -> None: ...
|
||||
def ungetch(ch: _ChType, /) -> None: ...
|
||||
def ungetmouse(id: int, x: int, y: int, z: int, bstate: int, /) -> None: ...
|
||||
def update_lines_cols() -> None: ...
|
||||
@@ -447,13 +443,10 @@ class _CursesWindow:
|
||||
def getch(self) -> int: ...
|
||||
@overload
|
||||
def getch(self, y: int, x: int) -> int: ...
|
||||
if sys.version_info < (3, 12) or sys.platform != "darwin":
|
||||
# The support for macos was dropped in 3.12
|
||||
@overload
|
||||
def get_wch(self) -> int | str: ...
|
||||
@overload
|
||||
def get_wch(self, y: int, x: int) -> int | str: ...
|
||||
|
||||
@overload
|
||||
def get_wch(self) -> int | str: ...
|
||||
@overload
|
||||
def get_wch(self, y: int, x: int) -> int | str: ...
|
||||
@overload
|
||||
def getkey(self) -> str: ...
|
||||
@overload
|
||||
@@ -17,20 +17,20 @@ class DecimalTuple(NamedTuple):
|
||||
digits: tuple[int, ...]
|
||||
exponent: int | Literal["n", "N", "F"]
|
||||
|
||||
ROUND_DOWN: str
|
||||
ROUND_HALF_UP: str
|
||||
ROUND_HALF_EVEN: str
|
||||
ROUND_CEILING: str
|
||||
ROUND_FLOOR: str
|
||||
ROUND_UP: str
|
||||
ROUND_HALF_DOWN: str
|
||||
ROUND_05UP: str
|
||||
HAVE_CONTEXTVAR: bool
|
||||
HAVE_THREADS: bool
|
||||
MAX_EMAX: int
|
||||
MAX_PREC: int
|
||||
MIN_EMIN: int
|
||||
MIN_ETINY: int
|
||||
ROUND_DOWN: Final[str]
|
||||
ROUND_HALF_UP: Final[str]
|
||||
ROUND_HALF_EVEN: Final[str]
|
||||
ROUND_CEILING: Final[str]
|
||||
ROUND_FLOOR: Final[str]
|
||||
ROUND_UP: Final[str]
|
||||
ROUND_HALF_DOWN: Final[str]
|
||||
ROUND_05UP: Final[str]
|
||||
HAVE_CONTEXTVAR: Final[bool]
|
||||
HAVE_THREADS: Final[bool]
|
||||
MAX_EMAX: Final[int]
|
||||
MAX_PREC: Final[int]
|
||||
MIN_EMIN: Final[int]
|
||||
MIN_ETINY: Final[int]
|
||||
|
||||
class DecimalException(ArithmeticError): ...
|
||||
class Clamped(DecimalException): ...
|
||||
@@ -1,5 +1,5 @@
|
||||
from _typeshed import structseq
|
||||
from typing import Final, Literal, SupportsIndex, final
|
||||
from typing import Any, Final, Literal, SupportsIndex, final
|
||||
from typing_extensions import Buffer, Self
|
||||
|
||||
class ChannelError(RuntimeError): ...
|
||||
@@ -72,13 +72,15 @@ class ChannelInfo(structseq[int], tuple[bool, bool, bool, int, int, int, int, in
|
||||
@property
|
||||
def send_released(self) -> bool: ...
|
||||
|
||||
def create() -> ChannelID: ...
|
||||
def create(unboundop: Literal[1, 2, 3]) -> ChannelID: ...
|
||||
def destroy(cid: SupportsIndex) -> None: ...
|
||||
def list_all() -> list[ChannelID]: ...
|
||||
def list_interpreters(cid: SupportsIndex, *, send: bool) -> list[int]: ...
|
||||
def send(cid: SupportsIndex, obj: object, *, blocking: bool = True, timeout: float | None = None) -> None: ...
|
||||
def send_buffer(cid: SupportsIndex, obj: Buffer, *, blocking: bool = True, timeout: float | None = None) -> None: ...
|
||||
def recv(cid: SupportsIndex, default: object = ...) -> object: ...
|
||||
def recv(cid: SupportsIndex, default: object = ...) -> tuple[Any, Literal[1, 2, 3]]: ...
|
||||
def close(cid: SupportsIndex, *, send: bool = False, recv: bool = False) -> None: ...
|
||||
def get_count(cid: SupportsIndex) -> int: ...
|
||||
def get_info(cid: SupportsIndex) -> ChannelInfo: ...
|
||||
def get_channel_defaults(cid: SupportsIndex) -> Literal[1, 2, 3]: ...
|
||||
def release(cid: SupportsIndex, *, send: bool = False, recv: bool = False, force: bool = False) -> None: ...
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user