Compare commits
358 Commits
0.6.2
...
david/more
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d440cad340 | ||
|
|
9169e35175 | ||
|
|
79f809cd13 | ||
|
|
b219c6fc2c | ||
|
|
edfaf896e4 | ||
|
|
da842bbc38 | ||
|
|
b425e6a34a | ||
|
|
a95b4d57c1 | ||
|
|
e069381a23 | ||
|
|
749f176ae5 | ||
|
|
e241c42212 | ||
|
|
09eeee99d4 | ||
|
|
04b636cba2 | ||
|
|
6048f331d9 | ||
|
|
93097f1c53 | ||
|
|
9bb4722ebf | ||
|
|
5caabe54b6 | ||
|
|
814ab47582 | ||
|
|
c3a3622e30 | ||
|
|
4ef422d3b4 | ||
|
|
58bc981677 | ||
|
|
dd5018ac55 | ||
|
|
63df94b521 | ||
|
|
e4c0dd6f96 | ||
|
|
3111dce5b4 | ||
|
|
8445e4725c | ||
|
|
defdc4dd8e | ||
|
|
46bc69d1d4 | ||
|
|
3209953276 | ||
|
|
6ae833e0c7 | ||
|
|
a3dc5c0529 | ||
|
|
d6b24b690a | ||
|
|
5b4afd30ca | ||
|
|
b9827a4122 | ||
|
|
93eff7f174 | ||
|
|
fc661e193a | ||
|
|
42fcbef876 | ||
|
|
71b52b83e4 | ||
|
|
fb90f5a13d | ||
|
|
d7484e6942 | ||
|
|
14ee5dbfde | ||
|
|
27ac34d683 | ||
|
|
31ca1c3064 | ||
|
|
646e4136d7 | ||
|
|
58a11b33da | ||
|
|
7856e90a2c | ||
|
|
98878c9bf2 | ||
|
|
73aa6ea417 | ||
|
|
38d872ea4c | ||
|
|
824def2194 | ||
|
|
2ab78dd6a5 | ||
|
|
03fa7f64dd | ||
|
|
43330225be | ||
|
|
383d9d9f6e | ||
|
|
8108f83810 | ||
|
|
f1205177fd | ||
|
|
1c2cafc101 | ||
|
|
7c5a7d909c | ||
|
|
2a365bb278 | ||
|
|
020f4d4a54 | ||
|
|
888930b7d3 | ||
|
|
d726f09cf0 | ||
|
|
975be9c1c6 | ||
|
|
99e4566fce | ||
|
|
7ad07c2c5d | ||
|
|
4aefe52393 | ||
|
|
cc1f766622 | ||
|
|
fdd0a22c03 | ||
|
|
3728d5b3a2 | ||
|
|
7e3894f5b3 | ||
|
|
c3b40da0d2 | ||
|
|
ef45185dbc | ||
|
|
961fc98344 | ||
|
|
0a6dc8e1b8 | ||
|
|
8d54996ffb | ||
|
|
73e884b232 | ||
|
|
edba60106b | ||
|
|
043fba7a57 | ||
|
|
20d997784d | ||
|
|
82324678cf | ||
|
|
cfd5d63917 | ||
|
|
2a36b47f13 | ||
|
|
6322639aca | ||
|
|
360af1bc32 | ||
|
|
3af3f74c66 | ||
|
|
45f01e7872 | ||
|
|
6cdf996af6 | ||
|
|
9d8a4c0057 | ||
|
|
c9c748a79e | ||
|
|
32c746bd82 | ||
|
|
e76f77d711 | ||
|
|
d86b73eb3d | ||
|
|
5f4b282327 | ||
|
|
d9267132d6 | ||
|
|
5118166d21 | ||
|
|
6fb1d6037a | ||
|
|
9237813e0c | ||
|
|
3bebde3ccc | ||
|
|
6c5cbad533 | ||
|
|
7a2f8d4463 | ||
|
|
ad87ea948d | ||
|
|
acfc34d615 | ||
|
|
668730cc28 | ||
|
|
bee498d635 | ||
|
|
ec72e675d9 | ||
|
|
1639488082 | ||
|
|
f3e464ea4c | ||
|
|
253f5f269a | ||
|
|
c046101b79 | ||
|
|
7706f561a9 | ||
|
|
f5e3662446 | ||
|
|
a354d9ead6 | ||
|
|
58a8e9c511 | ||
|
|
e83388dcea | ||
|
|
ae39ce56c0 | ||
|
|
ff2d214e11 | ||
|
|
9442cd8fae | ||
|
|
8012707348 | ||
|
|
d7ffe46054 | ||
|
|
7c83af419c | ||
|
|
bbb044ebda | ||
|
|
481065238b | ||
|
|
11f06e0d55 | ||
|
|
f27a8b8c7a | ||
|
|
ca0ae0a484 | ||
|
|
be1d5e3368 | ||
|
|
03503f7f56 | ||
|
|
ff4b6d11fa | ||
|
|
96e7f3f96f | ||
|
|
90dc7438ee | ||
|
|
3e99ab141c | ||
|
|
115745a8ac | ||
|
|
8bb59d7216 | ||
|
|
47aac060de | ||
|
|
7c55330534 | ||
|
|
047d77c60b | ||
|
|
18fddd458a | ||
|
|
db76000521 | ||
|
|
a2ed1e1cd1 | ||
|
|
7457679582 | ||
|
|
1d352872ba | ||
|
|
c8b905bc96 | ||
|
|
5b593d0397 | ||
|
|
c5c5acda23 | ||
|
|
26747aae75 | ||
|
|
85b825a2a1 | ||
|
|
9e764ef6d0 | ||
|
|
0e325a53ef | ||
|
|
2a136cfb57 | ||
|
|
7749164d4a | ||
|
|
da50e14524 | ||
|
|
1886b731a5 | ||
|
|
364eddc95a | ||
|
|
48fb340e3b | ||
|
|
71bb4d3bdc | ||
|
|
5c20f570d0 | ||
|
|
7441da287f | ||
|
|
c2a5179d75 | ||
|
|
17c4690b5e | ||
|
|
f06d44e6e5 | ||
|
|
653c09001a | ||
|
|
8921fbb54c | ||
|
|
3018303c87 | ||
|
|
6c303b2445 | ||
|
|
7579a792c7 | ||
|
|
0bbc138037 | ||
|
|
ff11db61b4 | ||
|
|
2823487bf8 | ||
|
|
910fac781d | ||
|
|
149fb2090e | ||
|
|
40c65dcfa7 | ||
|
|
03f3a4e855 | ||
|
|
531ebf6dff | ||
|
|
7c2011599f | ||
|
|
17e90823da | ||
|
|
d01cbf7f8f | ||
|
|
770b276c21 | ||
|
|
4e935f7d7d | ||
|
|
260c2ecd15 | ||
|
|
f110d80279 | ||
|
|
a6d3d2fccd | ||
|
|
afdb659111 | ||
|
|
a8d9104fa3 | ||
|
|
d3530ab997 | ||
|
|
cf1e91bb59 | ||
|
|
125eaafae0 | ||
|
|
7aae80903c | ||
|
|
4aca9b91ba | ||
|
|
8b3da1867e | ||
|
|
c173ec5bc7 | ||
|
|
44d916fb4e | ||
|
|
4eb849aed3 | ||
|
|
6ac61d7b89 | ||
|
|
c7b2e336f0 | ||
|
|
70748950ae | ||
|
|
dcfebaa4a8 | ||
|
|
d86e5ad031 | ||
|
|
bb12fe9d0c | ||
|
|
3b57faf19b | ||
|
|
c9f7c3d652 | ||
|
|
489dbbaadc | ||
|
|
47e9ea2d5d | ||
|
|
7919a7122a | ||
|
|
a70d693b1c | ||
|
|
1365b0806d | ||
|
|
f4de49ab37 | ||
|
|
8b49845537 | ||
|
|
d988204b1b | ||
|
|
8558126df1 | ||
|
|
9bd9981e70 | ||
|
|
21bfab9b69 | ||
|
|
43a5922f6f | ||
|
|
175d067250 | ||
|
|
4dc2c257ef | ||
|
|
b72d49be16 | ||
|
|
eded78a39b | ||
|
|
a7b8cc08f0 | ||
|
|
b93d0ab57c | ||
|
|
e6b927a583 | ||
|
|
acab1f4fd8 | ||
|
|
2ca78721e6 | ||
|
|
a528edad35 | ||
|
|
1d5bd89987 | ||
|
|
b7cef6c999 | ||
|
|
110193af57 | ||
|
|
d6bd841512 | ||
|
|
210a9e6068 | ||
|
|
7c872e639b | ||
|
|
5ef6979d9a | ||
|
|
62c7d8f6ba | ||
|
|
6f53aaf931 | ||
|
|
ac720cd705 | ||
|
|
312bd86e48 | ||
|
|
b04948fb72 | ||
|
|
a98dbcee78 | ||
|
|
1eb3e4057f | ||
|
|
346dbf45b5 | ||
|
|
f427a7a5a3 | ||
|
|
955dc8804a | ||
|
|
e1603e3dca | ||
|
|
35d45c1e4b | ||
|
|
e4aa479515 | ||
|
|
a7c936878d | ||
|
|
c3bcd5c842 | ||
|
|
594dee1b0b | ||
|
|
a4ebe7d344 | ||
|
|
2a3775e525 | ||
|
|
65cc6ec41d | ||
|
|
66fe226608 | ||
|
|
e965f9cc0e | ||
|
|
0512428a6f | ||
|
|
46a457318d | ||
|
|
57289099bb | ||
|
|
9d1bd7a8a7 | ||
|
|
e37bde458e | ||
|
|
862bd0c429 | ||
|
|
e1e9143c47 | ||
|
|
3c4ec82aee | ||
|
|
29c36a56b2 | ||
|
|
dfee65882b | ||
|
|
50c8ee5175 | ||
|
|
c2aac5f826 | ||
|
|
387af831f9 | ||
|
|
9d517061f2 | ||
|
|
facf6febf0 | ||
|
|
46e687e8d1 | ||
|
|
599103c933 | ||
|
|
54df960a4a | ||
|
|
3463683632 | ||
|
|
6b973b2556 | ||
|
|
c0e2c13d0d | ||
|
|
591a7a152c | ||
|
|
b7c7b4b387 | ||
|
|
ea0246c51a | ||
|
|
0f85769976 | ||
|
|
47f0b45be3 | ||
|
|
f4bed22b05 | ||
|
|
17eb65b26f | ||
|
|
9986397d56 | ||
|
|
58c641c92f | ||
|
|
227fa4e035 | ||
|
|
2b21b77ee6 | ||
|
|
ba272b093c | ||
|
|
8972e5d175 | ||
|
|
9ac2e61bad | ||
|
|
6deb056117 | ||
|
|
c4aad4b161 | ||
|
|
3abd5c08a5 | ||
|
|
2014cba87f | ||
|
|
5661353334 | ||
|
|
dd5d0d523c | ||
|
|
1be8c2e340 | ||
|
|
52d8847b60 | ||
|
|
d3b6e8f58b | ||
|
|
bf620dcb38 | ||
|
|
fae0573817 | ||
|
|
0c23b868dc | ||
|
|
3ceedf76b8 | ||
|
|
828871dc5c | ||
|
|
ee21fc7fd8 | ||
|
|
28ab5f4065 | ||
|
|
a73bebcf15 | ||
|
|
34dafb67a2 | ||
|
|
f8656ff35e | ||
|
|
34b4732c46 | ||
|
|
ce68f1cc1b | ||
|
|
281e6d9791 | ||
|
|
ee258caed7 | ||
|
|
b4d9d26020 | ||
|
|
a99832088a | ||
|
|
770ef2ab27 | ||
|
|
c6023c03a2 | ||
|
|
df694ca1c1 | ||
|
|
2e75cfbfe7 | ||
|
|
cfafaa7637 | ||
|
|
3e9c7adeee | ||
|
|
81cd438d88 | ||
|
|
483748c188 | ||
|
|
eb3dc37faa | ||
|
|
aba1802828 | ||
|
|
96b42b0c8f | ||
|
|
e6d0c4a65d | ||
|
|
4e1b289a67 | ||
|
|
a5ef124201 | ||
|
|
390bb43276 | ||
|
|
fe8b15291f | ||
|
|
c8e01d7c53 | ||
|
|
c4d628cc4c | ||
|
|
ab3648c4c5 | ||
|
|
a822fd6642 | ||
|
|
f8f2e2a442 | ||
|
|
0b5828a1e8 | ||
|
|
5af48337a5 | ||
|
|
39ad6b9472 | ||
|
|
41dec93cd2 | ||
|
|
aee2caa733 | ||
|
|
fe5544e137 | ||
|
|
14c014a48b | ||
|
|
ecd0597d6b | ||
|
|
202271fba6 | ||
|
|
4bdb0b4f86 | ||
|
|
2286f916c1 | ||
|
|
1e4c944251 | ||
|
|
f50f8732e9 | ||
|
|
ecab04e338 | ||
|
|
8c09496b07 | ||
|
|
d19fd1b91c | ||
|
|
99df859e20 | ||
|
|
2d5fe9a6d3 | ||
|
|
1f2cb09853 | ||
|
|
cfe25ab465 | ||
|
|
551ed2706b | ||
|
|
21c5606793 | ||
|
|
c73a7bb929 | ||
|
|
4f6accb5c6 | ||
|
|
1ca14e4335 | ||
|
|
b9c8113a8a | ||
|
|
2edd32aa31 |
@@ -20,7 +20,7 @@
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"rust-lang.rust-analyzer",
|
||||
"serayuzgur.crates",
|
||||
"fill-labs.dependi",
|
||||
"tamasfe.even-better-toml",
|
||||
"Swellaby.vscode-rust-test-adapter",
|
||||
"charliermarsh.ruff"
|
||||
|
||||
14
.github/workflows/ci.yaml
vendored
14
.github/workflows/ci.yaml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: tj-actions/changed-files@v44
|
||||
- uses: tj-actions/changed-files@v45
|
||||
id: changed
|
||||
with:
|
||||
files_yaml: |
|
||||
@@ -148,7 +148,7 @@ jobs:
|
||||
# sync, not just public items. Eventually we should do this for all
|
||||
# crates; for now add crates here as they are warning-clean to prevent
|
||||
# regression.
|
||||
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p ruff_db --document-private-items
|
||||
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p red_knot_test -p ruff_db --document-private-items
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
@@ -518,6 +518,8 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.13"
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
@@ -525,13 +527,15 @@ jobs:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
run: uv pip install -r docs/requirements-insiders.txt --system
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: pip install -r docs/requirements.txt
|
||||
run: uv pip install -r docs/requirements.txt --system
|
||||
- name: "Update README File"
|
||||
run: python scripts/transform_readme.py --target mkdocs
|
||||
- name: "Generate docs"
|
||||
@@ -608,7 +612,7 @@ jobs:
|
||||
just test
|
||||
|
||||
benchmarks:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-22.04
|
||||
needs: determine_changes
|
||||
if: ${{ github.repository == 'astral-sh/ruff' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
timeout-minutes: 20
|
||||
|
||||
9
.github/workflows/publish-docs.yml
vendored
9
.github/workflows/publish-docs.yml
vendored
@@ -34,10 +34,10 @@ jobs:
|
||||
- name: "Set docs version"
|
||||
run: |
|
||||
version="${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}"
|
||||
# if version is missing, exit with error
|
||||
if [[ -z "$version" ]]; then
|
||||
echo "Can't build docs without a version."
|
||||
exit 1
|
||||
# if version is missing, use 'latest'
|
||||
if [ -z "$version" ]; then
|
||||
echo "Using 'latest' as version"
|
||||
version="latest"
|
||||
fi
|
||||
|
||||
# Use version as display name for now
|
||||
@@ -145,6 +145,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
|
||||
# give the PR a few seconds to be created before trying to auto-merge it
|
||||
sleep 10
|
||||
|
||||
4
.github/workflows/release.yml
vendored
4
.github/workflows/release.yml
vendored
@@ -1,3 +1,5 @@
|
||||
# This file was autogenerated by cargo-dist: https://opensource.axo.dev/cargo-dist/
|
||||
#
|
||||
# Copyright 2022-2024, axodotdev
|
||||
# SPDX-License-Identifier: MIT or Apache-2.0
|
||||
#
|
||||
@@ -64,7 +66,7 @@ jobs:
|
||||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.18.0/cargo-dist-installer.sh | sh"
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.22.1/cargo-dist-installer.sh | sh"
|
||||
- name: Cache cargo-dist
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
|
||||
14
.github/workflows/sync_typeshed.yaml
vendored
14
.github/workflows/sync_typeshed.yaml
vendored
@@ -37,13 +37,13 @@ jobs:
|
||||
- name: Sync typeshed
|
||||
id: sync
|
||||
run: |
|
||||
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_python_semantic/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_python_semantic/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt
|
||||
rm -rf ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
mkdir ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
cp typeshed/README.md ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
cp typeshed/LICENSE ruff/crates/red_knot_vendored/vendor/typeshed
|
||||
cp -r typeshed/stdlib ruff/crates/red_knot_vendored/vendor/typeshed/stdlib
|
||||
rm -rf ruff/crates/red_knot_vendored/vendor/typeshed/stdlib/@tests
|
||||
git -C typeshed rev-parse HEAD > ruff/crates/red_knot_vendored/vendor/typeshed/source_commit.txt
|
||||
- name: Commit the changes
|
||||
id: commit
|
||||
if: ${{ steps.sync.outcome == 'success' }}
|
||||
|
||||
@@ -2,7 +2,7 @@ fail_fast: true
|
||||
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/red_knot_python_semantic/vendor/.*|
|
||||
crates/red_knot_vendored/vendor/.*|
|
||||
crates/red_knot_workspace/resources/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
@@ -17,7 +17,7 @@ exclude: |
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.19
|
||||
rev: v0.20.2
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
@@ -28,6 +28,7 @@ repos:
|
||||
additional_dependencies:
|
||||
- mdformat-mkdocs
|
||||
- mdformat-admon
|
||||
- mdformat-footnote
|
||||
exclude: |
|
||||
(?x)^(
|
||||
docs/formatter/black\.md
|
||||
@@ -35,7 +36,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.41.0
|
||||
rev: v0.42.0
|
||||
hooks:
|
||||
- id: markdownlint-fix
|
||||
exclude: |
|
||||
@@ -45,7 +46,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.23.6
|
||||
rev: v1.25.0
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -59,7 +60,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.6.1
|
||||
rev: v0.6.9
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
@@ -68,8 +69,8 @@ repos:
|
||||
require_serial: true
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.1.0
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: v3.3.3
|
||||
hooks:
|
||||
- id: prettier
|
||||
types: [yaml]
|
||||
|
||||
181
CHANGELOG.md
181
CHANGELOG.md
@@ -1,5 +1,186 @@
|
||||
# Changelog
|
||||
|
||||
## 0.6.9
|
||||
|
||||
### Preview features
|
||||
|
||||
- Fix codeblock dynamic line length calculation for indented docstring examples ([#13523](https://github.com/astral-sh/ruff/pull/13523))
|
||||
- \[`refurb`\] Mark `FURB118` fix as unsafe ([#13613](https://github.com/astral-sh/ruff/pull/13613))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pydocstyle`\] Don't raise `D208` when last line is non-empty ([#13372](https://github.com/astral-sh/ruff/pull/13372))
|
||||
- \[`pylint`\] Preserve trivia (i.e. comments) in `PLR5501` autofix ([#13573](https://github.com/astral-sh/ruff/pull/13573))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`pyflakes`\] Add `allow-unused-imports` setting for `unused-import` rule (`F401`) ([#13601](https://github.com/astral-sh/ruff/pull/13601))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Support ruff discovery in pip build environments ([#13591](https://github.com/astral-sh/ruff/pull/13591))
|
||||
- \[`flake8-bugbear`\] Avoid short circuiting `B017` for multiple context managers ([#13609](https://github.com/astral-sh/ruff/pull/13609))
|
||||
- \[`pylint`\] Do not offer an invalid fix for `PLR1716` when the comparisons contain parenthesis ([#13527](https://github.com/astral-sh/ruff/pull/13527))
|
||||
- \[`pyupgrade`\] Fix `UP043` to apply to `collections.abc.Generator` and `collections.abc.AsyncGenerator` ([#13611](https://github.com/astral-sh/ruff/pull/13611))
|
||||
- \[`refurb`\] Fix handling of slices in tuples for `FURB118`, e.g., `x[:, 1]` ([#13518](https://github.com/astral-sh/ruff/pull/13518))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update GitHub Action link to `astral-sh/ruff-action` ([#13551](https://github.com/astral-sh/ruff/pull/13551))
|
||||
|
||||
## 0.6.8
|
||||
|
||||
### Preview features
|
||||
|
||||
- Remove unnecessary parentheses around `match case` clauses ([#13510](https://github.com/astral-sh/ruff/pull/13510))
|
||||
- Parenthesize overlong `if` guards in `match..case` clauses ([#13513](https://github.com/astral-sh/ruff/pull/13513))
|
||||
- Detect basic wildcard imports in `ruff analyze graph` ([#13486](https://github.com/astral-sh/ruff/pull/13486))
|
||||
- \[`pylint`\] Implement `boolean-chained-comparison` (`R1716`) ([#13435](https://github.com/astral-sh/ruff/pull/13435))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`lake8-simplify`\] Detect `SIM910` when using variadic keyword arguments, i.e., `**kwargs` ([#13503](https://github.com/astral-sh/ruff/pull/13503))
|
||||
- \[`pyupgrade`\] Avoid false negatives with non-reference shadowed bindings of loop variables (`UP028`) ([#13504](https://github.com/astral-sh/ruff/pull/13504))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Detect tuples bound to variadic positional arguments i.e. `*args` ([#13512](https://github.com/astral-sh/ruff/pull/13512))
|
||||
- Exit gracefully on broken pipe errors ([#13485](https://github.com/astral-sh/ruff/pull/13485))
|
||||
- Avoid panic when analyze graph hits broken pipe ([#13484](https://github.com/astral-sh/ruff/pull/13484))
|
||||
|
||||
### Performance
|
||||
|
||||
- Reuse `BTreeSets` in module resolver ([#13440](https://github.com/astral-sh/ruff/pull/13440))
|
||||
- Skip traversal for non-compound statements ([#13441](https://github.com/astral-sh/ruff/pull/13441))
|
||||
|
||||
## 0.6.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- Add Python version support to ruff analyze CLI ([#13426](https://github.com/astral-sh/ruff/pull/13426))
|
||||
- Add `exclude` support to `ruff analyze` ([#13425](https://github.com/astral-sh/ruff/pull/13425))
|
||||
- Fix parentheses around return type annotations ([#13381](https://github.com/astral-sh/ruff/pull/13381))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pycodestyle`\] Fix: Don't autofix if the first line ends in a question mark? (D400) ([#13399](https://github.com/astral-sh/ruff/pull/13399))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Respect `lint.exclude` in ruff check `--add-noqa` ([#13427](https://github.com/astral-sh/ruff/pull/13427))
|
||||
|
||||
### Performance
|
||||
|
||||
- Avoid tracking module resolver files in Salsa ([#13437](https://github.com/astral-sh/ruff/pull/13437))
|
||||
- Use `forget` for module resolver database ([#13438](https://github.com/astral-sh/ruff/pull/13438))
|
||||
|
||||
## 0.6.6
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`refurb`\] Skip `slice-to-remove-prefix-or-suffix` (`FURB188`) when non-trivial slice steps are present ([#13405](https://github.com/astral-sh/ruff/pull/13405))
|
||||
- Add a subcommand to generate dependency graphs ([#13402](https://github.com/astral-sh/ruff/pull/13402))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Fix placement of inline parameter comments ([#13379](https://github.com/astral-sh/ruff/pull/13379))
|
||||
|
||||
### Server
|
||||
|
||||
- Fix off-by one error in the `LineIndex::offset` calculation ([#13407](https://github.com/astral-sh/ruff/pull/13407))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`fastapi`\] Respect FastAPI aliases in route definitions ([#13394](https://github.com/astral-sh/ruff/pull/13394))
|
||||
- \[`pydocstyle`\] Respect word boundaries when detecting function signature in docs ([#13388](https://github.com/astral-sh/ruff/pull/13388))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add backlinks to rule overview linter ([#13368](https://github.com/astral-sh/ruff/pull/13368))
|
||||
- Fix documentation for editor vim plugin ALE ([#13348](https://github.com/astral-sh/ruff/pull/13348))
|
||||
- Fix rendering of `FURB188` docs ([#13406](https://github.com/astral-sh/ruff/pull/13406))
|
||||
|
||||
## 0.6.5
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`pydoclint`\] Ignore `DOC201` when function name is "**new**" ([#13300](https://github.com/astral-sh/ruff/pull/13300))
|
||||
- \[`refurb`\] Implement `slice-to-remove-prefix-or-suffix` (`FURB188`) ([#13256](https://github.com/astral-sh/ruff/pull/13256))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`eradicate`\] Ignore script-comments with multiple end-tags (`ERA001`) ([#13283](https://github.com/astral-sh/ruff/pull/13283))
|
||||
- \[`pyflakes`\] Improve error message for `UndefinedName` when a builtin was added in a newer version than specified in Ruff config (`F821`) ([#13293](https://github.com/astral-sh/ruff/pull/13293))
|
||||
|
||||
### Server
|
||||
|
||||
- Add support for extensionless Python files for server ([#13326](https://github.com/astral-sh/ruff/pull/13326))
|
||||
- Fix configuration inheritance for configurations specified in the LSP settings ([#13285](https://github.com/astral-sh/ruff/pull/13285))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`ruff`\] Handle unary operators in `decimal-from-float-literal` (`RUF032`) ([#13275](https://github.com/astral-sh/ruff/pull/13275))
|
||||
|
||||
### CLI
|
||||
|
||||
- Only include rules with diagnostics in SARIF metadata ([#13268](https://github.com/astral-sh/ruff/pull/13268))
|
||||
|
||||
### Playground
|
||||
|
||||
- Add "Copy as pyproject.toml/ruff.toml" and "Paste from TOML" ([#13328](https://github.com/astral-sh/ruff/pull/13328))
|
||||
- Fix errors not shown for restored snippet on page load ([#13262](https://github.com/astral-sh/ruff/pull/13262))
|
||||
|
||||
## 0.6.4
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-builtins`\] Use dynamic builtins list based on Python version ([#13172](https://github.com/astral-sh/ruff/pull/13172))
|
||||
- \[`pydoclint`\] Permit yielding `None` in `DOC402` and `DOC403` ([#13148](https://github.com/astral-sh/ruff/pull/13148))
|
||||
- \[`pylint`\] Update diagnostic message for `PLW3201` ([#13194](https://github.com/astral-sh/ruff/pull/13194))
|
||||
- \[`ruff`\] Implement `post-init-default` (`RUF033`) ([#13192](https://github.com/astral-sh/ruff/pull/13192))
|
||||
- \[`ruff`\] Implement useless if-else (`RUF034`) ([#13218](https://github.com/astral-sh/ruff/pull/13218))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-pyi`\] Respect `pep8_naming.classmethod-decorators` settings when determining if a method is a classmethod in `custom-type-var-return-type` (`PYI019`) ([#13162](https://github.com/astral-sh/ruff/pull/13162))
|
||||
- \[`flake8-pyi`\] Teach various rules that annotations might be stringized ([#12951](https://github.com/astral-sh/ruff/pull/12951))
|
||||
- \[`pylint`\] Avoid `no-self-use` for `attrs`-style validators ([#13166](https://github.com/astral-sh/ruff/pull/13166))
|
||||
- \[`pylint`\] Recurse into subscript subexpressions when searching for list/dict lookups (`PLR1733`, `PLR1736`) ([#13186](https://github.com/astral-sh/ruff/pull/13186))
|
||||
- \[`pyupgrade`\] Detect `aiofiles.open` calls in `UP015` ([#13173](https://github.com/astral-sh/ruff/pull/13173))
|
||||
- \[`pyupgrade`\] Mark `sys.version_info[0] < 3` and similar comparisons as outdated (`UP036`) ([#13175](https://github.com/astral-sh/ruff/pull/13175))
|
||||
|
||||
### CLI
|
||||
|
||||
- Enrich messages of SARIF results ([#13180](https://github.com/astral-sh/ruff/pull/13180))
|
||||
- Handle singular case for incompatible rules warning in `ruff format` output ([#13212](https://github.com/astral-sh/ruff/pull/13212))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pydocstyle`\] Improve heuristics for detecting Google-style docstrings ([#13142](https://github.com/astral-sh/ruff/pull/13142))
|
||||
- \[`refurb`\] Treat `sep` arguments with effects as unsafe removals (`FURB105`) ([#13165](https://github.com/astral-sh/ruff/pull/13165))
|
||||
|
||||
## 0.6.3
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Extend `open-file-with-context-handler` to work with `dbm.sqlite3` (`SIM115`) ([#13104](https://github.com/astral-sh/ruff/pull/13104))
|
||||
- \[`pycodestyle`\] Disable `E741` in stub files (`.pyi`) ([#13119](https://github.com/astral-sh/ruff/pull/13119))
|
||||
- \[`pydoclint`\] Avoid `DOC201` on explicit returns in functions that only return `None` ([#13064](https://github.com/astral-sh/ruff/pull/13064))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-async`\] Disable check for `asyncio` before Python 3.11 (`ASYNC109`) ([#13023](https://github.com/astral-sh/ruff/pull/13023))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`FastAPI`\] Avoid introducing invalid syntax in fix for `fast-api-non-annotated-dependency` (`FAST002`) ([#13133](https://github.com/astral-sh/ruff/pull/13133))
|
||||
- \[`flake8-implicit-str-concat`\] Normalize octals before merging concatenated strings in `single-line-implicit-string-concatenation` (`ISC001`) ([#13118](https://github.com/astral-sh/ruff/pull/13118))
|
||||
- \[`flake8-pytest-style`\] Improve help message for `pytest-incorrect-mark-parentheses-style` (`PT023`) ([#13092](https://github.com/astral-sh/ruff/pull/13092))
|
||||
- \[`pylint`\] Avoid autofix for calls that aren't `min` or `max` as starred expression (`PLW3301`) ([#13089](https://github.com/astral-sh/ruff/pull/13089))
|
||||
- \[`ruff`\] Add `datetime.time`, `datetime.tzinfo`, and `datetime.timezone` as immutable function calls (`RUF009`) ([#13109](https://github.com/astral-sh/ruff/pull/13109))
|
||||
- \[`ruff`\] Extend comment deletion for `RUF100` to include trailing text from `noqa` directives while preserving any following comments on the same line, if any ([#13105](https://github.com/astral-sh/ruff/pull/13105))
|
||||
- Fix dark theme on initial page load for the Ruff playground ([#13077](https://github.com/astral-sh/ruff/pull/13077))
|
||||
|
||||
## 0.6.2
|
||||
|
||||
### Preview features
|
||||
|
||||
@@ -29,16 +29,14 @@ You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
|
||||
cargo install cargo-insta
|
||||
```
|
||||
|
||||
And you'll need pre-commit to run some validation checks:
|
||||
|
||||
```shell
|
||||
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
|
||||
```
|
||||
You'll need [uv](https://docs.astral.sh/uv/getting-started/installation/) (or `pipx` and `pip`) to
|
||||
run Python utility commands.
|
||||
|
||||
You can optionally install pre-commit hooks to automatically run the validation checks
|
||||
when making a commit:
|
||||
|
||||
```shell
|
||||
uv tool install pre-commit
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
@@ -66,7 +64,7 @@ and that it passes both the lint and test validation checks:
|
||||
```shell
|
||||
cargo clippy --workspace --all-targets --all-features -- -D warnings # Rust linting
|
||||
RUFF_UPDATE_SCHEMA=1 cargo test # Rust testing and updating ruff.schema.json
|
||||
pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
uvx pre-commit run --all-files --show-diff-on-failure # Rust and Python formatting, Markdown and Python linting, etc.
|
||||
```
|
||||
|
||||
These checks will run on GitHub Actions when you open your pull request, but running them locally
|
||||
@@ -267,26 +265,20 @@ To preview any changes to the documentation locally:
|
||||
|
||||
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
|
||||
|
||||
1. Install MkDocs and Material for MkDocs with:
|
||||
|
||||
```shell
|
||||
pip install -r docs/requirements.txt
|
||||
```
|
||||
|
||||
1. Generate the MkDocs site with:
|
||||
|
||||
```shell
|
||||
python scripts/generate_mkdocs.py
|
||||
uv run --no-project --isolated --with-requirements docs/requirements.txt scripts/generate_mkdocs.py
|
||||
```
|
||||
|
||||
1. Run the development server with:
|
||||
|
||||
```shell
|
||||
# For contributors.
|
||||
mkdocs serve -f mkdocs.public.yml
|
||||
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.public.yml
|
||||
|
||||
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
|
||||
mkdocs serve -f mkdocs.insiders.yml
|
||||
uvx --with-requirements docs/requirements-insiders.txt -- mkdocs serve -f mkdocs.insiders.yml
|
||||
```
|
||||
|
||||
The documentation should then be available locally at
|
||||
@@ -368,9 +360,8 @@ GitHub Actions will run your changes against a number of real-world projects fro
|
||||
report on any linter or formatter differences. You can also run those checks locally via:
|
||||
|
||||
```shell
|
||||
pip install -e ./python/ruff-ecosystem
|
||||
ruff-ecosystem check ruff "./target/debug/ruff"
|
||||
ruff-ecosystem format ruff "./target/debug/ruff"
|
||||
uvx --from ./python/ruff-ecosystem ruff-ecosystem check ruff "./target/debug/ruff"
|
||||
uvx --from ./python/ruff-ecosystem ruff-ecosystem format ruff "./target/debug/ruff"
|
||||
```
|
||||
|
||||
See the [ruff-ecosystem package](https://github.com/astral-sh/ruff/tree/main/python/ruff-ecosystem) for more details.
|
||||
@@ -530,6 +521,8 @@ You can run the benchmarks with
|
||||
cargo benchmark
|
||||
```
|
||||
|
||||
`cargo benchmark` is an alias for `cargo bench -p ruff_benchmark --bench linter --bench formatter --`
|
||||
|
||||
#### Benchmark-driven Development
|
||||
|
||||
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
|
||||
@@ -568,7 +561,7 @@ cargo install critcmp
|
||||
|
||||
#### Tips
|
||||
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo bench -p ruff_benchmark lexer`
|
||||
to only run the lexer benchmarks.
|
||||
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
|
||||
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)
|
||||
|
||||
606
Cargo.lock
generated
606
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
93
Cargo.toml
93
Cargo.toml
@@ -14,9 +14,10 @@ license = "MIT"
|
||||
[workspace.dependencies]
|
||||
ruff = { path = "crates/ruff" }
|
||||
ruff_cache = { path = "crates/ruff_cache" }
|
||||
ruff_db = { path = "crates/ruff_db" }
|
||||
ruff_db = { path = "crates/ruff_db", default-features = false }
|
||||
ruff_diagnostics = { path = "crates/ruff_diagnostics" }
|
||||
ruff_formatter = { path = "crates/ruff_formatter" }
|
||||
ruff_graph = { path = "crates/ruff_graph" }
|
||||
ruff_index = { path = "crates/ruff_index" }
|
||||
ruff_linter = { path = "crates/ruff_linter" }
|
||||
ruff_macros = { path = "crates/ruff_macros" }
|
||||
@@ -33,15 +34,18 @@ ruff_python_trivia = { path = "crates/ruff_python_trivia" }
|
||||
ruff_server = { path = "crates/ruff_server" }
|
||||
ruff_source_file = { path = "crates/ruff_source_file" }
|
||||
ruff_text_size = { path = "crates/ruff_text_size" }
|
||||
red_knot_vendored = { path = "crates/red_knot_vendored" }
|
||||
ruff_workspace = { path = "crates/ruff_workspace" }
|
||||
|
||||
red_knot_python_semantic = { path = "crates/red_knot_python_semantic" }
|
||||
red_knot_server = { path = "crates/red_knot_server" }
|
||||
red_knot_workspace = { path = "crates/red_knot_workspace" }
|
||||
red_knot_test = { path = "crates/red_knot_test" }
|
||||
red_knot_workspace = { path = "crates/red_knot_workspace", default-features = false }
|
||||
|
||||
aho-corasick = { version = "1.1.3" }
|
||||
annotate-snippets = { version = "0.9.2", features = ["color"] }
|
||||
anyhow = { version = "1.0.80" }
|
||||
assert_fs = { version = "1.1.0" }
|
||||
argfile = { version = "0.2.0" }
|
||||
bincode = { version = "1.3.3" }
|
||||
bitflags = { version = "2.5.0" }
|
||||
@@ -68,7 +72,11 @@ fern = { version = "0.6.1" }
|
||||
filetime = { version = "0.2.23" }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
hashbrown = "0.14.3"
|
||||
globwalk = { version = "0.9.1" }
|
||||
hashbrown = { version = "0.15.0", default-features = false, features = [
|
||||
"raw-entry",
|
||||
"inline-more",
|
||||
] }
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
@@ -86,7 +94,7 @@ libcst = { version = "1.1.0", default-features = false }
|
||||
log = { version = "0.4.17" }
|
||||
lsp-server = { version = "0.7.6" }
|
||||
lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [
|
||||
"proposed",
|
||||
"proposed",
|
||||
] }
|
||||
matchit = { version = "0.8.1" }
|
||||
memchr = { version = "2.7.1" }
|
||||
@@ -102,13 +110,14 @@ pep440_rs = { version = "0.6.0", features = ["serde"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
proc-macro2 = { version = "1.0.79" }
|
||||
pyproject-toml = { version = "0.9.0" }
|
||||
quick-junit = { version = "0.4.0" }
|
||||
quick-junit = { version = "0.5.0" }
|
||||
quote = { version = "1.0.23" }
|
||||
rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
rstest = { version = "0.22.0", default-features = false }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "f608ff8b24f07706492027199f51132244034f29" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "4a7c955255e707e64e43f3ce5eabb771ae067768" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
@@ -116,7 +125,7 @@ serde-wasm-bindgen = { version = "0.6.4" }
|
||||
serde_json = { version = "1.0.113" }
|
||||
serde_test = { version = "1.0.152" }
|
||||
serde_with = { version = "3.6.0", default-features = false, features = [
|
||||
"macros",
|
||||
"macros",
|
||||
] }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
similar = { version = "2.4.0", features = ["inline"] }
|
||||
@@ -133,7 +142,10 @@ toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-flame = { version = "0.2.0" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = ["env-filter", "fmt"] }
|
||||
tracing-subscriber = { version = "0.3.18", default-features = false, features = [
|
||||
"env-filter",
|
||||
"fmt",
|
||||
] }
|
||||
tracing-tree = { version = "0.4.0" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
@@ -144,10 +156,10 @@ unicode-normalization = { version = "0.1.23" }
|
||||
ureq = { version = "2.9.6" }
|
||||
url = { version = "2.5.0" }
|
||||
uuid = { version = "1.6.1", features = [
|
||||
"v4",
|
||||
"fast-rng",
|
||||
"macro-diagnostics",
|
||||
"js",
|
||||
"v4",
|
||||
"fast-rng",
|
||||
"macro-diagnostics",
|
||||
"js",
|
||||
] }
|
||||
walkdir = { version = "2.3.2" }
|
||||
wasm-bindgen = { version = "0.2.92" }
|
||||
@@ -158,7 +170,10 @@ zip = { version = "0.6.6", default-features = false }
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
unreachable_pub = "warn"
|
||||
unexpected_cfgs = { level = "warn", check-cfg = ["cfg(fuzzing)", "cfg(codspeed)"] }
|
||||
unexpected_cfgs = { level = "warn", check-cfg = [
|
||||
"cfg(fuzzing)",
|
||||
"cfg(codspeed)",
|
||||
] }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
pedantic = { level = "warn", priority = -2 }
|
||||
@@ -230,9 +245,9 @@ inherits = "release"
|
||||
# Config for 'cargo dist'
|
||||
[workspace.metadata.dist]
|
||||
# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax)
|
||||
cargo-dist-version = "0.18.0"
|
||||
cargo-dist-version = "0.22.1"
|
||||
# CI backends to support
|
||||
ci = ["github"]
|
||||
ci = "github"
|
||||
# The installers to generate for each app
|
||||
installers = ["shell", "powershell"]
|
||||
# The archive format to use for windows builds (defaults .zip)
|
||||
@@ -241,33 +256,33 @@ windows-archive = ".zip"
|
||||
unix-archive = ".tar.gz"
|
||||
# Target platforms to build apps for (Rust target-triple syntax)
|
||||
targets = [
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-pc-windows-msvc",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"arm-unknown-linux-musleabihf",
|
||||
"armv7-unknown-linux-gnueabihf",
|
||||
"armv7-unknown-linux-musleabihf",
|
||||
"i686-pc-windows-msvc",
|
||||
"i686-unknown-linux-gnu",
|
||||
"i686-unknown-linux-musl",
|
||||
"powerpc64-unknown-linux-gnu",
|
||||
"powerpc64le-unknown-linux-gnu",
|
||||
"s390x-unknown-linux-gnu",
|
||||
"x86_64-apple-darwin",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-unknown-linux-musl",
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-pc-windows-msvc",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"arm-unknown-linux-musleabihf",
|
||||
"armv7-unknown-linux-gnueabihf",
|
||||
"armv7-unknown-linux-musleabihf",
|
||||
"i686-pc-windows-msvc",
|
||||
"i686-unknown-linux-gnu",
|
||||
"i686-unknown-linux-musl",
|
||||
"powerpc64-unknown-linux-gnu",
|
||||
"powerpc64le-unknown-linux-gnu",
|
||||
"s390x-unknown-linux-gnu",
|
||||
"x86_64-apple-darwin",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-unknown-linux-musl",
|
||||
]
|
||||
# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true)
|
||||
auto-includes = false
|
||||
# Whether cargo-dist should create a GitHub Release or use an existing draft
|
||||
create-release = true
|
||||
# Publish jobs to run in CI
|
||||
# Which actions to run on pull requests
|
||||
pr-run-mode = "skip"
|
||||
# Whether CI should trigger releases with dispatches instead of tag pushes
|
||||
dispatch-releases = true
|
||||
# The stage during which the GitHub Release should be created
|
||||
# Which phase cargo-dist should use to create the GitHub release
|
||||
github-release = "announce"
|
||||
# Whether CI should include auto-generated code to build local artifacts
|
||||
build-local-artifacts = false
|
||||
@@ -275,9 +290,15 @@ build-local-artifacts = false
|
||||
local-artifacts-jobs = ["./build-binaries", "./build-docker"]
|
||||
# Publish jobs to run in CI
|
||||
publish-jobs = ["./publish-pypi", "./publish-wasm"]
|
||||
# Announcement jobs to run in CI
|
||||
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
|
||||
# Post-announce jobs to run in CI
|
||||
post-announce-jobs = [
|
||||
"./notify-dependents",
|
||||
"./publish-docs",
|
||||
"./publish-playground",
|
||||
]
|
||||
# Custom permissions for GitHub Jobs
|
||||
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
|
||||
# Whether to install an updater program
|
||||
install-updater = false
|
||||
# Path that installers should place binaries in
|
||||
install-path = "CARGO_HOME"
|
||||
|
||||
26
README.md
26
README.md
@@ -110,7 +110,7 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
1. [Who's Using Ruff?](#whos-using-ruff)
|
||||
1. [License](#license)
|
||||
|
||||
## Getting Started
|
||||
## Getting Started<a id="getting-started"></a>
|
||||
|
||||
For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
|
||||
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.6.2/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.6.2/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.6.9/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.6.9/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.6.2
|
||||
rev: v0.6.9
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/astral-sh/ruff-vscode) or with [various other editors](https://docs.astral.sh/ruff/editors/setup).
|
||||
|
||||
Ruff can also be used as a [GitHub Action](https://github.com/features/actions) via
|
||||
[`ruff-action`](https://github.com/chartboost/ruff-action):
|
||||
[`ruff-action`](https://github.com/astral-sh/ruff-action):
|
||||
|
||||
```yaml
|
||||
name: Ruff
|
||||
@@ -192,10 +192,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: chartboost/ruff-action@v1
|
||||
- uses: astral-sh/ruff-action@v1
|
||||
```
|
||||
|
||||
### Configuration
|
||||
### Configuration<a id="configuration"></a>
|
||||
|
||||
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
|
||||
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
||||
@@ -291,7 +291,7 @@ features that may change prior to stabilization.
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
||||
for more on the linting and formatting commands, respectively.
|
||||
|
||||
## Rules
|
||||
## Rules<a id="rules"></a>
|
||||
|
||||
<!-- Begin section: Rules -->
|
||||
|
||||
@@ -367,21 +367,21 @@ quality tools, including:
|
||||
|
||||
For a complete enumeration of the supported rules, see [_Rules_](https://docs.astral.sh/ruff/rules/).
|
||||
|
||||
## Contributing
|
||||
## Contributing<a id="contributing"></a>
|
||||
|
||||
Contributions are welcome and highly appreciated. To get started, check out the
|
||||
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
|
||||
|
||||
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
|
||||
## Support
|
||||
## Support<a id="support"></a>
|
||||
|
||||
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
|
||||
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
|
||||
|
||||
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
|
||||
## Acknowledgements
|
||||
## Acknowledgements<a id="acknowledgements"></a>
|
||||
|
||||
Ruff's linter draws on both the APIs and implementation details of many other
|
||||
tools in the Python ecosystem, especially [Flake8](https://github.com/PyCQA/flake8), [Pyflakes](https://github.com/PyCQA/pyflakes),
|
||||
@@ -405,7 +405,7 @@ Ruff is the beneficiary of a large number of [contributors](https://github.com/a
|
||||
|
||||
Ruff is released under the MIT license.
|
||||
|
||||
## Who's Using Ruff?
|
||||
## Who's Using Ruff?<a id="whos-using-ruff"></a>
|
||||
|
||||
Ruff is used by a number of major open-source projects and companies, including:
|
||||
|
||||
@@ -524,7 +524,7 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
|
||||
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
|
||||
```
|
||||
|
||||
## License
|
||||
## License<a id="license"></a>
|
||||
|
||||
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = ["crates/red_knot_python_semantic/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
extend-exclude = ["crates/red_knot_vendored/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
@@ -8,7 +8,7 @@ hel = "hel"
|
||||
whos = "whos"
|
||||
spawnve = "spawnve"
|
||||
ned = "ned"
|
||||
pn = "pn" # `import panel as pd` is a thing
|
||||
pn = "pn" # `import panel as pn` is a thing
|
||||
poit = "poit"
|
||||
BA = "BA" # acronym for "Bad Allowed", used in testing.
|
||||
jod = "jod" # e.g., `jod-thread`
|
||||
|
||||
@@ -13,9 +13,8 @@ license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
red_knot_workspace = { workspace = true }
|
||||
red_knot_workspace = { workspace = true, features = ["zstd"] }
|
||||
red_knot_server = { workspace = true }
|
||||
|
||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
|
||||
@@ -13,12 +13,17 @@ The CLI supports different verbosity levels.
|
||||
- `-vv` activates `debug!` and timestamps: This should be enough information to get to the bottom of bug reports. When you're processing many packages or files, you'll get pages and pages of output, but each line is link to a specific action or state change.
|
||||
- `-vvv` activates `trace!` (only in debug builds) and shows tracing-spans: At this level, you're logging everything. Most of this is wasted, it's really slow, we dump e.g. the entire resolution graph. Only useful to developers, and you almost certainly want to use `RED_KNOT_LOG` to filter it down to the area your investigating.
|
||||
|
||||
## `RED_KNOT_LOG`
|
||||
## Better logging with `RED_KNOT_LOG` and `RAYON_NUM_THREADS`
|
||||
|
||||
By default, the CLI shows messages from the `ruff` and `red_knot` crates. Tracing messages from other crates are not shown.
|
||||
The `RED_KNOT_LOG` environment variable allows you to customize which messages are shown by specifying one
|
||||
or more [filter directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives).
|
||||
|
||||
The `RAYON_NUM_THREADS` environment variable, meanwhile, can be used to control the level of concurrency red-knot uses.
|
||||
By default, red-knot will attempt to parallelize its work so that multiple files are checked simultaneously,
|
||||
but this can result in a confused logging output where messages from different threads are intertwined.
|
||||
To switch off concurrency entirely and have more readable logs, use `RAYON_NUM_THREADS=1`.
|
||||
|
||||
### Examples
|
||||
|
||||
#### Show all debug messages
|
||||
|
||||
@@ -160,7 +160,7 @@ fn run() -> anyhow::Result<ExitStatus> {
|
||||
SystemPathBuf::from_path_buf(cwd)
|
||||
.map_err(|path| {
|
||||
anyhow!(
|
||||
"The current working directory '{}' contains non-unicode characters. Red Knot only supports unicode paths.",
|
||||
"The current working directory `{}` contains non-Unicode characters. Red Knot only supports Unicode paths.",
|
||||
path.display()
|
||||
)
|
||||
})?
|
||||
@@ -174,7 +174,7 @@ fn run() -> anyhow::Result<ExitStatus> {
|
||||
Ok(SystemPath::absolute(cwd, &cli_base_path))
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Provided current-directory path '{cwd}' is not a directory."
|
||||
"Provided current-directory path `{cwd}` is not a directory"
|
||||
))
|
||||
}
|
||||
})
|
||||
|
||||
@@ -42,14 +42,14 @@ impl TestCase {
|
||||
|
||||
fn stop_watch(&mut self) -> Vec<watch::ChangeEvent> {
|
||||
self.try_stop_watch(Duration::from_secs(10))
|
||||
.expect("Expected watch changes but observed none.")
|
||||
.expect("Expected watch changes but observed none")
|
||||
}
|
||||
|
||||
fn try_stop_watch(&mut self, timeout: Duration) -> Option<Vec<watch::ChangeEvent>> {
|
||||
let watcher = self
|
||||
.watcher
|
||||
.take()
|
||||
.expect("Cannot call `stop_watch` more than once.");
|
||||
.expect("Cannot call `stop_watch` more than once");
|
||||
|
||||
let mut all_events = self
|
||||
.changes_receiver
|
||||
@@ -72,7 +72,7 @@ impl TestCase {
|
||||
#[cfg(unix)]
|
||||
fn take_watch_changes(&self) -> Vec<watch::ChangeEvent> {
|
||||
self.try_take_watch_changes(Duration::from_secs(10))
|
||||
.expect("Expected watch changes but observed none.")
|
||||
.expect("Expected watch changes but observed none")
|
||||
}
|
||||
|
||||
fn try_take_watch_changes(&self, timeout: Duration) -> Option<Vec<watch::ChangeEvent>> {
|
||||
@@ -150,14 +150,14 @@ where
|
||||
let absolute_path = workspace_path.join(relative_path);
|
||||
if let Some(parent) = absolute_path.parent() {
|
||||
std::fs::create_dir_all(parent).with_context(|| {
|
||||
format!("Failed to create parent directory for file '{relative_path}'.",)
|
||||
format!("Failed to create parent directory for file `{relative_path}`")
|
||||
})?;
|
||||
}
|
||||
|
||||
let mut file = std::fs::File::create(absolute_path.as_std_path())
|
||||
.with_context(|| format!("Failed to open file '{relative_path}'"))?;
|
||||
.with_context(|| format!("Failed to open file `{relative_path}`"))?;
|
||||
file.write_all(content.as_bytes())
|
||||
.with_context(|| format!("Failed to write to file '{relative_path}'"))?;
|
||||
.with_context(|| format!("Failed to write to file `{relative_path}`"))?;
|
||||
file.sync_data()?;
|
||||
}
|
||||
|
||||
@@ -194,7 +194,7 @@ where
|
||||
|
||||
let root_path = SystemPath::from_std_path(temp_dir.path()).ok_or_else(|| {
|
||||
anyhow!(
|
||||
"Temp directory '{}' is not a valid UTF-8 path.",
|
||||
"Temporary directory `{}` is not a valid UTF-8 path.",
|
||||
temp_dir.path().display()
|
||||
)
|
||||
})?;
|
||||
@@ -209,7 +209,7 @@ where
|
||||
let workspace_path = root_path.join("workspace");
|
||||
|
||||
std::fs::create_dir_all(workspace_path.as_std_path())
|
||||
.with_context(|| format!("Failed to create workspace directory '{workspace_path}'",))?;
|
||||
.with_context(|| format!("Failed to create workspace directory `{workspace_path}`"))?;
|
||||
|
||||
setup_files
|
||||
.setup(&root_path, &workspace_path)
|
||||
@@ -233,7 +233,7 @@ where
|
||||
}))
|
||||
{
|
||||
std::fs::create_dir_all(path.as_std_path())
|
||||
.with_context(|| format!("Failed to create search path '{path}'"))?;
|
||||
.with_context(|| format!("Failed to create search path `{path}`"))?;
|
||||
}
|
||||
|
||||
let configuration = Configuration {
|
||||
@@ -665,7 +665,7 @@ fn directory_deleted() -> anyhow::Result<()> {
|
||||
|
||||
let bar = case.system_file(case.workspace_path("bar.py")).unwrap();
|
||||
|
||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_some(),);
|
||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_some());
|
||||
|
||||
let sub_path = case.workspace_path("sub");
|
||||
|
||||
|
||||
@@ -17,13 +17,14 @@ ruff_python_ast = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
ruff_python_literal = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
compact_str = { workspace = true }
|
||||
countme = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
itertools = { workspace = true}
|
||||
ordermap = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
@@ -32,21 +33,19 @@ rustc-hash = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
static_assertions = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
path-slash = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true, features = ["zstd", "deflate"] }
|
||||
test-case = { workspace = true }
|
||||
memchr = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_db = { workspace = true, features = ["os", "testing"] }
|
||||
ruff_python_parser = { workspace = true }
|
||||
red_knot_test = { workspace = true }
|
||||
red_knot_vendored = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
rstest = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -1,87 +1,4 @@
|
||||
//! Build script to package our vendored typeshed files
|
||||
//! into a zip archive that can be included in the Ruff binary.
|
||||
//!
|
||||
//! This script should be automatically run at build time
|
||||
//! whenever the script itself changes, or whenever any files
|
||||
//! in `crates/red_knot_python_semantic/vendor/typeshed` change.
|
||||
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
|
||||
use path_slash::PathExt;
|
||||
use zip::result::ZipResult;
|
||||
use zip::write::{FileOptions, ZipWriter};
|
||||
use zip::CompressionMethod;
|
||||
|
||||
const TYPESHED_SOURCE_DIR: &str = "vendor/typeshed";
|
||||
const TYPESHED_ZIP_LOCATION: &str = "/zipped_typeshed.zip";
|
||||
|
||||
/// Recursively zip the contents of an entire directory.
|
||||
///
|
||||
/// This routine is adapted from a recipe at
|
||||
/// <https://github.com/zip-rs/zip-old/blob/5d0f198124946b7be4e5969719a7f29f363118cd/examples/write_dir.rs>
|
||||
fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
|
||||
let mut zip = ZipWriter::new(writer);
|
||||
|
||||
// Use deflated compression for WASM builds because compiling `zstd-sys` requires clang
|
||||
// [source](https://github.com/gyscos/zstd-rs/wiki/Compile-for-WASM) which complicates the build
|
||||
// by a lot. Deflated compression is slower but it shouldn't matter much for the WASM use case
|
||||
// (WASM itself is already slower than a native build for a specific platform).
|
||||
// We can't use `#[cfg(...)]` here because the target-arch in a build script is the
|
||||
// architecture of the system running the build script and not the architecture of the build-target.
|
||||
// That's why we use the `TARGET` environment variable here.
|
||||
let method = if std::env::var("TARGET").unwrap().contains("wasm32") {
|
||||
CompressionMethod::Deflated
|
||||
} else {
|
||||
CompressionMethod::Zstd
|
||||
};
|
||||
|
||||
let options = FileOptions::default()
|
||||
.compression_method(method)
|
||||
.unix_permissions(0o644);
|
||||
|
||||
for entry in walkdir::WalkDir::new(directory_path) {
|
||||
let dir_entry = entry.unwrap();
|
||||
let absolute_path = dir_entry.path();
|
||||
let normalized_relative_path = absolute_path
|
||||
.strip_prefix(Path::new(directory_path))
|
||||
.unwrap()
|
||||
.to_slash()
|
||||
.expect("Unexpected non-utf8 typeshed path!");
|
||||
|
||||
// Write file or directory explicitly
|
||||
// Some unzip tools unzip files with directory paths correctly, some do not!
|
||||
if absolute_path.is_file() {
|
||||
println!("adding file {absolute_path:?} as {normalized_relative_path:?} ...");
|
||||
zip.start_file(normalized_relative_path, options)?;
|
||||
let mut f = File::open(absolute_path)?;
|
||||
std::io::copy(&mut f, &mut zip).unwrap();
|
||||
} else if !normalized_relative_path.is_empty() {
|
||||
// Only if not root! Avoids path spec / warning
|
||||
// and mapname conversion failed error on unzip
|
||||
println!("adding dir {absolute_path:?} as {normalized_relative_path:?} ...");
|
||||
zip.add_directory(normalized_relative_path, options)?;
|
||||
}
|
||||
}
|
||||
zip.finish()
|
||||
}
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed={TYPESHED_SOURCE_DIR}");
|
||||
assert!(
|
||||
Path::new(TYPESHED_SOURCE_DIR).is_dir(),
|
||||
"Where is typeshed?"
|
||||
);
|
||||
let out_dir = std::env::var("OUT_DIR").unwrap();
|
||||
|
||||
// N.B. Deliberately using `format!()` instead of `Path::join()` here,
|
||||
// so that we use `/` as a path separator on all platforms.
|
||||
// That enables us to load the typeshed zip at compile time in `module.rs`
|
||||
// (otherwise we'd have to dynamically determine the exact path to the typeshed zip
|
||||
// based on the default path separator for the specific platform we're on,
|
||||
// which can't be done at compile time.)
|
||||
let zipped_typeshed_location = format!("{out_dir}{TYPESHED_ZIP_LOCATION}");
|
||||
|
||||
let zipped_typeshed = File::create(zipped_typeshed_location).unwrap();
|
||||
zip_dir(TYPESHED_SOURCE_DIR, zipped_typeshed).unwrap();
|
||||
/// Rebuild the crate if a test file is added or removed from
|
||||
pub fn main() {
|
||||
println!("cargo:rerun-if-changed=resources/mdtest");
|
||||
}
|
||||
|
||||
4
crates/red_knot_python_semantic/resources/README.md
Normal file
4
crates/red_knot_python_semantic/resources/README.md
Normal file
@@ -0,0 +1,4 @@
|
||||
Markdown files within the `mdtest/` subdirectory are tests of type inference and type checking;
|
||||
executed by the `tests/mdtest.rs` integration test.
|
||||
|
||||
See `crates/red_knot_test/README.md` for documentation of this test format.
|
||||
@@ -0,0 +1,43 @@
|
||||
### Comparison: Byte literals
|
||||
|
||||
These tests assert that we infer precise `Literal` types for comparisons between objects
|
||||
inferred as having `Literal` bytes types:
|
||||
|
||||
```py
|
||||
reveal_type(b"abc" == b"abc") # revealed: Literal[True]
|
||||
reveal_type(b"abc" == b"ab") # revealed: Literal[False]
|
||||
|
||||
reveal_type(b"abc" != b"abc") # revealed: Literal[False]
|
||||
reveal_type(b"abc" != b"ab") # revealed: Literal[True]
|
||||
|
||||
reveal_type(b"abc" < b"abd") # revealed: Literal[True]
|
||||
reveal_type(b"abc" < b"abb") # revealed: Literal[False]
|
||||
|
||||
reveal_type(b"abc" <= b"abc") # revealed: Literal[True]
|
||||
reveal_type(b"abc" <= b"abb") # revealed: Literal[False]
|
||||
|
||||
reveal_type(b"abc" > b"abd") # revealed: Literal[False]
|
||||
reveal_type(b"abc" > b"abb") # revealed: Literal[True]
|
||||
|
||||
reveal_type(b"abc" >= b"abc") # revealed: Literal[True]
|
||||
reveal_type(b"abc" >= b"abd") # revealed: Literal[False]
|
||||
|
||||
reveal_type(b"" in b"") # revealed: Literal[True]
|
||||
reveal_type(b"" in b"abc") # revealed: Literal[True]
|
||||
reveal_type(b"abc" in b"") # revealed: Literal[False]
|
||||
reveal_type(b"ab" in b"abc") # revealed: Literal[True]
|
||||
reveal_type(b"abc" in b"abc") # revealed: Literal[True]
|
||||
reveal_type(b"d" in b"abc") # revealed: Literal[False]
|
||||
reveal_type(b"ac" in b"abc") # revealed: Literal[False]
|
||||
reveal_type(b"\x81\x82" in b"\x80\x81\x82") # revealed: Literal[True]
|
||||
reveal_type(b"\x82\x83" in b"\x80\x81\x82") # revealed: Literal[False]
|
||||
|
||||
reveal_type(b"ab" not in b"abc") # revealed: Literal[False]
|
||||
reveal_type(b"ac" not in b"abc") # revealed: Literal[True]
|
||||
|
||||
reveal_type(b"abc" is b"abc") # revealed: bool
|
||||
reveal_type(b"abc" is b"ab") # revealed: Literal[False]
|
||||
|
||||
reveal_type(b"abc" is not b"abc") # revealed: bool
|
||||
reveal_type(b"abc" is not b"ab") # revealed: Literal[True]
|
||||
```
|
||||
@@ -0,0 +1,29 @@
|
||||
# Narrowing for `is` conditionals
|
||||
|
||||
## `is None`
|
||||
|
||||
```py
|
||||
x = None if flag else 1
|
||||
|
||||
if x is None:
|
||||
# TODO the following should be simplified to 'None'
|
||||
reveal_type(x) # revealed: None | Literal[1] & None
|
||||
|
||||
reveal_type(x) # revealed: None | Literal[1]
|
||||
```
|
||||
|
||||
## `is` for other types
|
||||
|
||||
```py
|
||||
class A:
|
||||
...
|
||||
|
||||
x = A()
|
||||
y = x if flag else None
|
||||
|
||||
if y is x:
|
||||
# TODO the following should be simplified to 'A'
|
||||
reveal_type(y) # revealed: A | None & A
|
||||
|
||||
reveal_type(y) # revealed: A | None
|
||||
```
|
||||
@@ -0,0 +1,40 @@
|
||||
# Narrowing for `is not` conditionals
|
||||
|
||||
## `is not None`
|
||||
|
||||
The type guard removes `None` from the union type:
|
||||
|
||||
```py
|
||||
x = None if flag else 1
|
||||
|
||||
if x is not None:
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
|
||||
reveal_type(x) # revealed: None | Literal[1]
|
||||
```
|
||||
|
||||
## `is not` for other singleton types
|
||||
|
||||
```py
|
||||
x = True if flag else False
|
||||
reveal_type(x) # revealed: bool
|
||||
|
||||
if x is not False:
|
||||
# TODO the following should be `Literal[True]`
|
||||
reveal_type(x) # revealed: bool & ~Literal[False]
|
||||
```
|
||||
|
||||
## `is not` for non-singleton types
|
||||
|
||||
Non-singleton types should *not* narrow the type: two instances of a
|
||||
non-singleton class may occupy different addresses in memory even if
|
||||
they compare equal.
|
||||
|
||||
```py
|
||||
x = [1]
|
||||
y = [1]
|
||||
|
||||
if x is not y:
|
||||
# TODO: should include type parameter: list[int]
|
||||
reveal_type(x) # revealed: list
|
||||
```
|
||||
@@ -0,0 +1,17 @@
|
||||
# Narrowing for `match` statements
|
||||
|
||||
## Single `match` pattern
|
||||
|
||||
```py
|
||||
x = None if flag else 1
|
||||
reveal_type(x) # revealed: None | Literal[1]
|
||||
|
||||
y = 0
|
||||
|
||||
match x:
|
||||
case None:
|
||||
y = x
|
||||
|
||||
# TODO intersection simplification: should be just Literal[0] | None
|
||||
reveal_type(y) # revealed: Literal[0] | None | Literal[1] & None
|
||||
```
|
||||
35
crates/red_knot_python_semantic/resources/mdtest/numbers.md
Normal file
35
crates/red_knot_python_semantic/resources/mdtest/numbers.md
Normal file
@@ -0,0 +1,35 @@
|
||||
# Numbers
|
||||
|
||||
## Integers
|
||||
|
||||
### Literals
|
||||
|
||||
We can infer an integer literal type:
|
||||
|
||||
```py
|
||||
reveal_type(1) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
### Overflow
|
||||
|
||||
We only track integer literals within the range of an i64:
|
||||
|
||||
```py
|
||||
reveal_type(9223372036854775808) # revealed: int
|
||||
```
|
||||
|
||||
## Floats
|
||||
|
||||
There aren't literal float types, but we infer the general float type:
|
||||
|
||||
```py
|
||||
reveal_type(1.0) # revealed: float
|
||||
```
|
||||
|
||||
## Complex
|
||||
|
||||
Same for complex:
|
||||
|
||||
```py
|
||||
reveal_type(2j) # revealed: complex
|
||||
```
|
||||
@@ -31,10 +31,10 @@ impl<T> AstNodeRef<T> {
|
||||
/// which the `AstNodeRef` belongs.
|
||||
///
|
||||
/// ## Safety
|
||||
///
|
||||
/// Dereferencing the `node` can result in undefined behavior if `parsed` isn't the
|
||||
/// [`ParsedModule`] to which `node` belongs. It's the caller's responsibility to ensure that
|
||||
/// the invariant `node belongs to parsed` is upheld.
|
||||
|
||||
pub(super) unsafe fn new(parsed: ParsedModule, node: &T) -> Self {
|
||||
Self {
|
||||
_parsed: parsed,
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::resolve_module;
|
||||
use crate::semantic_index::global_scope;
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
use crate::Db;
|
||||
|
||||
/// Salsa query to get the builtins scope.
|
||||
///
|
||||
/// Can return None if a custom typeshed is used that is missing `builtins.pyi`.
|
||||
#[salsa::tracked]
|
||||
pub(crate) fn builtins_scope(db: &dyn Db) -> Option<ScopeId<'_>> {
|
||||
let builtins_name =
|
||||
ModuleName::new_static("builtins").expect("Expected 'builtins' to be a valid module name");
|
||||
let builtins_file = resolve_module(db, builtins_name)?.file();
|
||||
Some(global_scope(db, builtins_file))
|
||||
}
|
||||
@@ -11,7 +11,6 @@ pub trait Db: SourceDb + Upcast<dyn SourceDb> {
|
||||
pub(crate) mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::module_resolver::vendored_typeshed_stubs;
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
@@ -33,7 +32,7 @@ pub(crate) mod tests {
|
||||
Self {
|
||||
storage: salsa::Storage::default(),
|
||||
system: TestSystem::default(),
|
||||
vendored: vendored_typeshed_stubs().clone(),
|
||||
vendored: red_knot_vendored::file_system().clone(),
|
||||
events: std::sync::Arc::default(),
|
||||
files: Files::default(),
|
||||
}
|
||||
|
||||
@@ -4,13 +4,12 @@ use rustc_hash::FxHasher;
|
||||
|
||||
pub use db::Db;
|
||||
pub use module_name::ModuleName;
|
||||
pub use module_resolver::{resolve_module, system_module_search_paths, vendored_typeshed_stubs};
|
||||
pub use module_resolver::{resolve_module, system_module_search_paths, Module};
|
||||
pub use program::{Program, ProgramSettings, SearchPathSettings, SitePackages};
|
||||
pub use python_version::PythonVersion;
|
||||
pub use semantic_model::{HasTy, SemanticModel};
|
||||
|
||||
pub mod ast_node_ref;
|
||||
mod builtins;
|
||||
mod db;
|
||||
mod module_name;
|
||||
mod module_resolver;
|
||||
@@ -20,6 +19,7 @@ mod python_version;
|
||||
pub mod semantic_index;
|
||||
mod semantic_model;
|
||||
pub(crate) mod site_packages;
|
||||
mod stdlib;
|
||||
pub mod types;
|
||||
|
||||
type FxOrderSet<V> = ordermap::set::OrderSet<V, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
use std::iter::FusedIterator;
|
||||
|
||||
pub(crate) use module::Module;
|
||||
pub use module::Module;
|
||||
pub use resolver::resolve_module;
|
||||
pub(crate) use resolver::{file_to_module, SearchPaths};
|
||||
use ruff_db::system::SystemPath;
|
||||
pub use typeshed::vendored_typeshed_stubs;
|
||||
|
||||
use crate::module_resolver::resolver::search_paths;
|
||||
use crate::Db;
|
||||
|
||||
@@ -59,6 +59,10 @@ impl ModulePath {
|
||||
self.relative_path.push(component);
|
||||
}
|
||||
|
||||
pub(crate) fn pop(&mut self) -> bool {
|
||||
self.relative_path.pop()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(super) fn is_directory(&self, resolver: &ResolverContext) -> bool {
|
||||
let ModulePath {
|
||||
|
||||
@@ -1,20 +1,21 @@
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
use std::borrow::Cow;
|
||||
use std::iter::FusedIterator;
|
||||
use std::ops::Deref;
|
||||
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
|
||||
use ruff_db::files::{File, FilePath, FileRootKind};
|
||||
use ruff_db::system::{DirectoryEntry, System, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::{VendoredFileSystem, VendoredPath};
|
||||
|
||||
use super::module::{Module, ModuleKind};
|
||||
use super::path::{ModulePath, SearchPath, SearchPathValidationError};
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::typeshed::{vendored_typeshed_versions, TypeshedVersions};
|
||||
use crate::site_packages::VirtualEnvironment;
|
||||
use crate::{Program, PythonVersion, SearchPathSettings, SitePackages};
|
||||
|
||||
use super::module::{Module, ModuleKind};
|
||||
use super::path::{ModulePath, SearchPath, SearchPathValidationError};
|
||||
|
||||
/// Resolves a module name to a module.
|
||||
pub fn resolve_module(db: &dyn Db, module_name: ModuleName) -> Option<Module> {
|
||||
let interned_name = ModuleNameIngredient::new(db, module_name);
|
||||
@@ -35,14 +36,14 @@ pub(crate) fn resolve_module_query<'db>(
|
||||
let _span = tracing::trace_span!("resolve_module", %name).entered();
|
||||
|
||||
let Some((search_path, module_file, kind)) = resolve_name(db, name) else {
|
||||
tracing::debug!("Module '{name}' not found in the search paths.");
|
||||
tracing::debug!("Module `{name}` not found in search paths");
|
||||
return None;
|
||||
};
|
||||
|
||||
let module = Module::new(name.clone(), kind, search_path, module_file);
|
||||
|
||||
tracing::trace!(
|
||||
"Resolved module '{name}' to '{path}'.",
|
||||
"Resolved module `{name}` to `{path}`",
|
||||
path = module_file.path(db)
|
||||
);
|
||||
|
||||
@@ -136,7 +137,7 @@ pub(crate) struct SearchPaths {
|
||||
/// for the first `site-packages` path
|
||||
site_packages: Vec<SearchPath>,
|
||||
|
||||
typeshed_versions: ResolvedTypeshedVersions,
|
||||
typeshed_versions: TypeshedVersions,
|
||||
}
|
||||
|
||||
impl SearchPaths {
|
||||
@@ -202,11 +203,11 @@ impl SearchPaths {
|
||||
|
||||
let search_path = SearchPath::custom_stdlib(db, &custom_typeshed)?;
|
||||
|
||||
(ResolvedTypeshedVersions::Custom(parsed), search_path)
|
||||
(parsed, search_path)
|
||||
} else {
|
||||
tracing::debug!("Using vendored stdlib");
|
||||
(
|
||||
ResolvedTypeshedVersions::Vendored(vendored_typeshed_versions()),
|
||||
vendored_typeshed_versions(db),
|
||||
SearchPath::vendored_stdlib(),
|
||||
)
|
||||
};
|
||||
@@ -279,23 +280,6 @@ impl SearchPaths {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
enum ResolvedTypeshedVersions {
|
||||
Vendored(&'static TypeshedVersions),
|
||||
Custom(TypeshedVersions),
|
||||
}
|
||||
|
||||
impl Deref for ResolvedTypeshedVersions {
|
||||
type Target = TypeshedVersions;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
match self {
|
||||
ResolvedTypeshedVersions::Vendored(versions) => versions,
|
||||
ResolvedTypeshedVersions::Custom(versions) => versions,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Collect all dynamic search paths. For each `site-packages` path:
|
||||
/// - Collect that `site-packages` path
|
||||
/// - Collect any search paths listed in `.pth` files in that `site-packages` directory
|
||||
@@ -340,7 +324,7 @@ pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
|
||||
|
||||
let site_packages_root = files
|
||||
.root(db.upcast(), site_packages_dir)
|
||||
.expect("Site-package root to have been created.");
|
||||
.expect("Site-package root to have been created");
|
||||
|
||||
// This query needs to be re-executed each time a `.pth` file
|
||||
// is added, modified or removed from the `site-packages` directory.
|
||||
@@ -569,24 +553,16 @@ fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(SearchPath, File, Mod
|
||||
|
||||
package_path.push(module_name);
|
||||
|
||||
// Must be a `__init__.pyi` or `__init__.py` or it isn't a package.
|
||||
let kind = if package_path.is_directory(&resolver_state) {
|
||||
package_path.push("__init__");
|
||||
ModuleKind::Package
|
||||
} else {
|
||||
ModuleKind::Module
|
||||
};
|
||||
|
||||
// TODO Implement full https://peps.python.org/pep-0561/#type-checker-module-resolution-order resolution
|
||||
if let Some(stub) = package_path.with_pyi_extension().to_file(&resolver_state) {
|
||||
return Some((search_path.clone(), stub, kind));
|
||||
// Check for a regular package first (highest priority)
|
||||
package_path.push("__init__");
|
||||
if let Some(regular_package) = resolve_file_module(&package_path, &resolver_state) {
|
||||
return Some((search_path.clone(), regular_package, ModuleKind::Package));
|
||||
}
|
||||
|
||||
if let Some(module) = package_path
|
||||
.with_py_extension()
|
||||
.and_then(|path| path.to_file(&resolver_state))
|
||||
{
|
||||
return Some((search_path.clone(), module, kind));
|
||||
// Check for a file module next
|
||||
package_path.pop();
|
||||
if let Some(file_module) = resolve_file_module(&package_path, &resolver_state) {
|
||||
return Some((search_path.clone(), file_module, ModuleKind::Module));
|
||||
}
|
||||
|
||||
// For regular packages, don't search the next search path. All files of that
|
||||
@@ -607,6 +583,23 @@ fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(SearchPath, File, Mod
|
||||
None
|
||||
}
|
||||
|
||||
/// If `module` exists on disk with either a `.pyi` or `.py` extension,
|
||||
/// return the [`File`] corresponding to that path.
|
||||
///
|
||||
/// `.pyi` files take priority, as they always have priority when
|
||||
/// resolving modules.
|
||||
fn resolve_file_module(module: &ModulePath, resolver_state: &ResolverContext) -> Option<File> {
|
||||
// Stubs have precedence over source files
|
||||
module
|
||||
.with_pyi_extension()
|
||||
.to_file(resolver_state)
|
||||
.or_else(|| {
|
||||
module
|
||||
.with_py_extension()
|
||||
.and_then(|path| path.to_file(resolver_state))
|
||||
})
|
||||
}
|
||||
|
||||
fn resolve_package<'a, 'db, I>(
|
||||
module_search_path: &SearchPath,
|
||||
components: I,
|
||||
@@ -633,7 +626,10 @@ where
|
||||
|
||||
if is_regular_package {
|
||||
in_namespace_package = false;
|
||||
} else if package_path.is_directory(resolver_state) {
|
||||
} else if package_path.is_directory(resolver_state)
|
||||
// Pure modules hide namespace packages with the same name
|
||||
&& resolve_file_module(&package_path, resolver_state).is_none()
|
||||
{
|
||||
// A directory without an `__init__.py` is a namespace package, continue with the next folder.
|
||||
in_namespace_package = true;
|
||||
} else if in_namespace_package {
|
||||
@@ -1091,6 +1087,25 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn single_file_takes_priority_over_namespace_package() {
|
||||
//const SRC: &[FileSpec] = &[("foo.py", "x = 1")];
|
||||
const SRC: &[FileSpec] = &[("foo.py", "x = 1"), ("foo/bar.py", "x = 2")];
|
||||
|
||||
let TestCase { db, src, .. } = TestCaseBuilder::new().with_src_files(SRC).build();
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
let foo_bar_module_name = ModuleName::new_static("foo.bar").unwrap();
|
||||
|
||||
// `foo.py` takes priority over the `foo` namespace package
|
||||
let foo_module = resolve_module(&db, foo_module_name.clone()).unwrap();
|
||||
assert_eq!(foo_module.file().path(&db), &src.join("foo.py"));
|
||||
|
||||
// `foo.bar` isn't recognised as a module
|
||||
let foo_bar_module = resolve_module(&db, foo_bar_module_name.clone());
|
||||
assert_eq!(foo_bar_module, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn typing_stub_over_module() {
|
||||
const SRC: &[FileSpec] = &[("foo.py", "print('Hello, world!')"), ("foo.pyi", "x: int")];
|
||||
|
||||
@@ -4,25 +4,19 @@ use std::num::{NonZeroU16, NonZeroUsize};
|
||||
use std::ops::{RangeFrom, RangeInclusive};
|
||||
use std::str::FromStr;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use super::vendored::vendored_typeshed_stubs;
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::{Program, PythonVersion};
|
||||
|
||||
static VENDORED_VERSIONS: Lazy<TypeshedVersions> = Lazy::new(|| {
|
||||
pub(in crate::module_resolver) fn vendored_typeshed_versions(db: &dyn Db) -> TypeshedVersions {
|
||||
TypeshedVersions::from_str(
|
||||
&vendored_typeshed_stubs()
|
||||
&db.vendored()
|
||||
.read_to_string("stdlib/VERSIONS")
|
||||
.unwrap(),
|
||||
.expect("The vendored typeshed stubs should contain a VERSIONS file"),
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
pub(crate) fn vendored_typeshed_versions() -> &'static TypeshedVersions {
|
||||
&VENDORED_VERSIONS
|
||||
.expect("The VERSIONS file in the vendored typeshed stubs should be well-formed")
|
||||
}
|
||||
|
||||
pub(crate) fn typeshed_versions(db: &dyn Db) -> &TypeshedVersions {
|
||||
@@ -332,6 +326,8 @@ mod tests {
|
||||
|
||||
use insta::assert_snapshot;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
|
||||
use super::*;
|
||||
|
||||
const TYPESHED_STDLIB_DIR: &str = "stdlib";
|
||||
@@ -353,12 +349,9 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn can_parse_vendored_versions_file() {
|
||||
let versions_data = include_str!(concat!(
|
||||
env!("CARGO_MANIFEST_DIR"),
|
||||
"/vendor/typeshed/stdlib/VERSIONS"
|
||||
));
|
||||
let db = TestDb::new();
|
||||
|
||||
let versions = TypeshedVersions::from_str(versions_data).unwrap();
|
||||
let versions = vendored_typeshed_versions(&db);
|
||||
assert!(versions.len() > 100);
|
||||
assert!(versions.len() < 1000);
|
||||
|
||||
@@ -395,9 +388,10 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn typeshed_versions_consistent_with_vendored_stubs() {
|
||||
const VERSIONS_DATA: &str = include_str!("../../../vendor/typeshed/stdlib/VERSIONS");
|
||||
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
|
||||
let vendored_typeshed_versions = TypeshedVersions::from_str(VERSIONS_DATA).unwrap();
|
||||
let db = TestDb::new();
|
||||
let vendored_typeshed_versions = vendored_typeshed_versions(&db);
|
||||
let vendored_typeshed_dir =
|
||||
Path::new(env!("CARGO_MANIFEST_DIR")).join("../red_knot_vendored/vendor/typeshed");
|
||||
|
||||
let mut empty_iterator = true;
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
pub use self::vendored::vendored_typeshed_stubs;
|
||||
pub(super) use self::versions::{
|
||||
typeshed_versions, vendored_typeshed_versions, TypeshedVersions, TypeshedVersionsParseError,
|
||||
TypeshedVersionsQueryResult,
|
||||
};
|
||||
|
||||
mod vendored;
|
||||
mod versions;
|
||||
@@ -54,6 +54,13 @@ impl TryFrom<(&str, &str)> for PythonVersion {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(u8, u8)> for PythonVersion {
|
||||
fn from(value: (u8, u8)) -> Self {
|
||||
let (major, minor) = value;
|
||||
Self { major, minor }
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PythonVersion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let PythonVersion { major, minor } = self;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::iter::FusedIterator;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
use rustc_hash::{FxBuildHasher, FxHashMap};
|
||||
use salsa::plumbing::AsId;
|
||||
|
||||
use ruff_db::files::File;
|
||||
@@ -21,14 +21,17 @@ use crate::Db;
|
||||
|
||||
pub mod ast_ids;
|
||||
mod builder;
|
||||
pub(crate) mod constraint;
|
||||
pub mod definition;
|
||||
pub mod expression;
|
||||
pub mod symbol;
|
||||
mod use_def;
|
||||
|
||||
pub(crate) use self::use_def::{DefinitionWithConstraints, DefinitionWithConstraintsIterator};
|
||||
pub(crate) use self::use_def::{
|
||||
BindingWithConstraints, BindingWithConstraintsIterator, DeclarationsIterator,
|
||||
};
|
||||
|
||||
type SymbolMap = hashbrown::HashMap<ScopedSymbolId, (), ()>;
|
||||
type SymbolMap = hashbrown::HashMap<ScopedSymbolId, (), FxBuildHasher>;
|
||||
|
||||
/// Returns the semantic index for `file`.
|
||||
///
|
||||
@@ -112,6 +115,9 @@ pub(crate) struct SemanticIndex<'db> {
|
||||
/// Note: We should not depend on this map when analysing other files or
|
||||
/// changing a file invalidates all dependents.
|
||||
ast_ids: IndexVec<FileScopeId, AstIds>,
|
||||
|
||||
/// Flags about the global scope (code usage impacting inference)
|
||||
has_future_annotations: bool,
|
||||
}
|
||||
|
||||
impl<'db> SemanticIndex<'db> {
|
||||
@@ -212,6 +218,12 @@ impl<'db> SemanticIndex<'db> {
|
||||
pub(crate) fn node_scope(&self, node: NodeWithScopeRef) -> FileScopeId {
|
||||
self.scopes_by_node[&node.node_key()]
|
||||
}
|
||||
|
||||
/// Checks if there is an import of `__future__.annotations` in the global scope, which affects
|
||||
/// the logic for type inference.
|
||||
pub(super) fn has_future_annotations(&self) -> bool {
|
||||
self.has_future_annotations
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AncestorsIter<'a> {
|
||||
@@ -325,16 +337,16 @@ mod tests {
|
||||
use crate::Db;
|
||||
|
||||
impl UseDefMap<'_> {
|
||||
fn first_public_definition(&self, symbol: ScopedSymbolId) -> Option<Definition<'_>> {
|
||||
self.public_definitions(symbol)
|
||||
fn first_public_binding(&self, symbol: ScopedSymbolId) -> Option<Definition<'_>> {
|
||||
self.public_bindings(symbol)
|
||||
.next()
|
||||
.map(|constrained_definition| constrained_definition.definition)
|
||||
.map(|constrained_binding| constrained_binding.binding)
|
||||
}
|
||||
|
||||
fn first_use_definition(&self, use_id: ScopedUseId) -> Option<Definition<'_>> {
|
||||
self.use_definitions(use_id)
|
||||
fn first_binding_at_use(&self, use_id: ScopedUseId) -> Option<Definition<'_>> {
|
||||
self.bindings_at_use(use_id)
|
||||
.next()
|
||||
.map(|constrained_definition| constrained_definition.definition)
|
||||
.map(|constrained_binding| constrained_binding.binding)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -396,8 +408,8 @@ mod tests {
|
||||
let foo = global_table.symbol_id_by_name("foo").unwrap();
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let definition = use_def.first_public_definition(foo).unwrap();
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Import(_)));
|
||||
let binding = use_def.first_public_binding(foo).unwrap();
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::Import(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -426,22 +438,19 @@ mod tests {
|
||||
assert!(
|
||||
global_table
|
||||
.symbol_by_name("foo")
|
||||
.is_some_and(|symbol| { symbol.is_defined() && !symbol.is_used() }),
|
||||
.is_some_and(|symbol| { symbol.is_bound() && !symbol.is_used() }),
|
||||
"symbols that are defined get the defined flag"
|
||||
);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
let binding = use_def
|
||||
.first_public_binding(
|
||||
global_table
|
||||
.symbol_id_by_name("foo")
|
||||
.expect("symbol to exist"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::ImportFrom(_)
|
||||
));
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::ImportFrom(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -454,17 +463,14 @@ mod tests {
|
||||
assert!(
|
||||
global_table
|
||||
.symbol_by_name("foo")
|
||||
.is_some_and(|symbol| { !symbol.is_defined() && symbol.is_used() }),
|
||||
"a symbol used but not defined in a scope should have only the used flag"
|
||||
.is_some_and(|symbol| { !symbol.is_bound() && symbol.is_used() }),
|
||||
"a symbol used but not bound in a scope should have only the used flag"
|
||||
);
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let definition = use_def
|
||||
.first_public_definition(global_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
let binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
));
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::Assignment(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -476,12 +482,12 @@ mod tests {
|
||||
assert_eq!(names(&global_table), vec!["x"]);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let definition = use_def
|
||||
.first_public_definition(global_table.symbol_id_by_name("x").unwrap())
|
||||
let binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name("x").unwrap())
|
||||
.unwrap();
|
||||
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
binding.kind(&db),
|
||||
DefinitionKind::AugmentedAssignment(_)
|
||||
));
|
||||
}
|
||||
@@ -514,13 +520,10 @@ y = 2
|
||||
assert_eq!(names(&class_table), vec!["x"]);
|
||||
|
||||
let use_def = index.use_def_map(class_scope_id);
|
||||
let definition = use_def
|
||||
.first_public_definition(class_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
let binding = use_def
|
||||
.first_public_binding(class_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
));
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::Assignment(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -550,17 +553,14 @@ y = 2
|
||||
assert_eq!(names(&function_table), vec!["x"]);
|
||||
|
||||
let use_def = index.use_def_map(function_scope_id);
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
let binding = use_def
|
||||
.first_public_binding(
|
||||
function_table
|
||||
.symbol_id_by_name("x")
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
));
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::Assignment(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -575,7 +575,7 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&global_table), vec!["f", "str", "int"]);
|
||||
assert_eq!(names(&global_table), vec!["str", "int", "f"]);
|
||||
|
||||
let [(function_scope_id, _function_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
@@ -592,27 +592,27 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
|
||||
let use_def = index.use_def_map(function_scope_id);
|
||||
for name in ["a", "b", "c", "d"] {
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
let binding = use_def
|
||||
.first_public_binding(
|
||||
function_table
|
||||
.symbol_id_by_name(name)
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
binding.kind(&db),
|
||||
DefinitionKind::ParameterWithDefault(_)
|
||||
));
|
||||
}
|
||||
for name in ["args", "kwargs"] {
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
let binding = use_def
|
||||
.first_public_binding(
|
||||
function_table
|
||||
.symbol_id_by_name(name)
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Parameter(_)));
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::Parameter(_)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -640,23 +640,19 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
|
||||
let use_def = index.use_def_map(lambda_scope_id);
|
||||
for name in ["a", "b", "c", "d"] {
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
lambda_table.symbol_id_by_name(name).expect("symbol exists"),
|
||||
)
|
||||
let binding = use_def
|
||||
.first_public_binding(lambda_table.symbol_id_by_name(name).expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
binding.kind(&db),
|
||||
DefinitionKind::ParameterWithDefault(_)
|
||||
));
|
||||
}
|
||||
for name in ["args", "kwargs"] {
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
lambda_table.symbol_id_by_name(name).expect("symbol exists"),
|
||||
)
|
||||
let binding = use_def
|
||||
.first_public_binding(lambda_table.symbol_id_by_name(name).expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Parameter(_)));
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::Parameter(_)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -666,7 +662,7 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
fn comprehension_scope() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[x for x in iter1]
|
||||
[x for x, y in iter1]
|
||||
",
|
||||
);
|
||||
|
||||
@@ -690,7 +686,22 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
|
||||
let comprehension_symbol_table = index.symbol_table(comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&comprehension_symbol_table), vec!["x"]);
|
||||
assert_eq!(names(&comprehension_symbol_table), vec!["x", "y"]);
|
||||
|
||||
let use_def = index.use_def_map(comprehension_scope_id);
|
||||
for name in ["x", "y"] {
|
||||
let binding = use_def
|
||||
.first_public_binding(
|
||||
comprehension_symbol_table
|
||||
.symbol_id_by_name(name)
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
binding.kind(&db),
|
||||
DefinitionKind::Comprehension(_)
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
/// Test case to validate that the `x` variable used in the comprehension is referencing the
|
||||
@@ -726,12 +737,12 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
let element_use_id =
|
||||
element.scoped_use_id(&db, comprehension_scope_id.to_scope_id(&db, file));
|
||||
|
||||
let definition = use_def.first_use_definition(element_use_id).unwrap();
|
||||
let DefinitionKind::Comprehension(comprehension) = definition.node(&db) else {
|
||||
let binding = use_def.first_binding_at_use(element_use_id).unwrap();
|
||||
let DefinitionKind::Comprehension(comprehension) = binding.kind(&db) else {
|
||||
panic!("expected generator definition")
|
||||
};
|
||||
let ast::Comprehension { target, .. } = comprehension.node();
|
||||
let name = target.as_name_expr().unwrap().id().as_str();
|
||||
let target = comprehension.target();
|
||||
let name = target.id().as_str();
|
||||
|
||||
assert_eq!(name, "x");
|
||||
assert_eq!(target.range(), TextRange::new(23.into(), 24.into()));
|
||||
@@ -806,12 +817,10 @@ with item1 as x, item2 as y:
|
||||
|
||||
let use_def = index.use_def_map(FileScopeId::global());
|
||||
for name in ["x", "y"] {
|
||||
let Some(definition) = use_def.first_public_definition(
|
||||
global_table.symbol_id_by_name(name).expect("symbol exists"),
|
||||
) else {
|
||||
panic!("Expected with item definition for {name}");
|
||||
};
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::WithItem(_)));
|
||||
let binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name(name).expect("symbol exists"))
|
||||
.expect("Expected with item definition for {name}");
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::WithItem(_)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -831,12 +840,10 @@ with context() as (x, y):
|
||||
|
||||
let use_def = index.use_def_map(FileScopeId::global());
|
||||
for name in ["x", "y"] {
|
||||
let Some(definition) = use_def.first_public_definition(
|
||||
global_table.symbol_id_by_name(name).expect("symbol exists"),
|
||||
) else {
|
||||
panic!("Expected with item definition for {name}");
|
||||
};
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::WithItem(_)));
|
||||
let binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name(name).expect("symbol exists"))
|
||||
.expect("Expected with item definition for {name}");
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::WithItem(_)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -873,14 +880,14 @@ def func():
|
||||
assert_eq!(names(&func2_table), vec!["y"]);
|
||||
|
||||
let use_def = index.use_def_map(FileScopeId::global());
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
let binding = use_def
|
||||
.first_public_binding(
|
||||
global_table
|
||||
.symbol_id_by_name("func")
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Function(_)));
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::Function(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -948,7 +955,7 @@ class C[T]:
|
||||
assert!(
|
||||
ann_table
|
||||
.symbol_by_name("T")
|
||||
.is_some_and(|s| s.is_defined() && !s.is_used()),
|
||||
.is_some_and(|s| s.is_bound() && !s.is_used()),
|
||||
"type parameters are defined by the scope that introduces them"
|
||||
);
|
||||
|
||||
@@ -980,8 +987,8 @@ class C[T]:
|
||||
};
|
||||
let x_use_id = x_use_expr_name.scoped_use_id(&db, scope);
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let definition = use_def.first_use_definition(x_use_id).unwrap();
|
||||
let DefinitionKind::Assignment(assignment) = definition.node(&db) else {
|
||||
let binding = use_def.first_binding_at_use(x_use_id).unwrap();
|
||||
let DefinitionKind::Assignment(assignment) = binding.kind(&db) else {
|
||||
panic!("should be an assignment definition")
|
||||
};
|
||||
let ast::Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
@@ -1029,7 +1036,7 @@ class C[T]:
|
||||
}
|
||||
|
||||
let TestCase { db, file } = test_case(
|
||||
r#"
|
||||
r"
|
||||
class Test:
|
||||
def foo():
|
||||
def bar():
|
||||
@@ -1038,7 +1045,7 @@ class Test:
|
||||
pass
|
||||
|
||||
def x():
|
||||
pass"#,
|
||||
pass",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
@@ -1073,7 +1080,7 @@ def x():
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn match_stmt_symbols() {
|
||||
fn match_stmt() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
match subject:
|
||||
@@ -1087,12 +1094,120 @@ match subject:
|
||||
",
|
||||
);
|
||||
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
let global_scope_id = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope_id);
|
||||
|
||||
assert!(global_table.symbol_by_name("Foo").unwrap().is_used());
|
||||
assert_eq!(
|
||||
names(&global_table),
|
||||
vec!["subject", "a", "b", "c", "d", "f", "e", "h", "g", "Foo", "i", "j", "k", "l"]
|
||||
vec!["subject", "a", "b", "c", "d", "e", "f", "g", "h", "Foo", "i", "j", "k", "l"]
|
||||
);
|
||||
|
||||
let use_def = use_def_map(&db, global_scope_id);
|
||||
for (name, expected_index) in [
|
||||
("a", 0),
|
||||
("b", 0),
|
||||
("c", 1),
|
||||
("d", 2),
|
||||
("e", 0),
|
||||
("f", 1),
|
||||
("g", 0),
|
||||
("h", 1),
|
||||
("i", 0),
|
||||
("j", 1),
|
||||
("k", 0),
|
||||
("l", 1),
|
||||
] {
|
||||
let binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name(name).expect("symbol exists"))
|
||||
.expect("Expected with item definition for {name}");
|
||||
if let DefinitionKind::MatchPattern(pattern) = binding.kind(&db) {
|
||||
assert_eq!(pattern.index(), expected_index);
|
||||
} else {
|
||||
panic!("Expected match pattern definition for {name}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nested_match_case() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
match 1:
|
||||
case first:
|
||||
match 2:
|
||||
case second:
|
||||
pass
|
||||
",
|
||||
);
|
||||
|
||||
let global_scope_id = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope_id);
|
||||
|
||||
assert_eq!(names(&global_table), vec!["first", "second"]);
|
||||
|
||||
let use_def = use_def_map(&db, global_scope_id);
|
||||
for (name, expected_index) in [("first", 0), ("second", 0)] {
|
||||
let binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name(name).expect("symbol exists"))
|
||||
.expect("Expected with item definition for {name}");
|
||||
if let DefinitionKind::MatchPattern(pattern) = binding.kind(&db) {
|
||||
assert_eq!(pattern.index(), expected_index);
|
||||
} else {
|
||||
panic!("Expected match pattern definition for {name}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn for_loops_single_assignment() {
|
||||
let TestCase { db, file } = test_case("for x in a: pass");
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(&names(&global_table), &["a", "x"]);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name("x").unwrap())
|
||||
.unwrap();
|
||||
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::For(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn for_loops_simple_unpacking() {
|
||||
let TestCase { db, file } = test_case("for (x, y) in a: pass");
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(&names(&global_table), &["a", "x", "y"]);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let x_binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name("x").unwrap())
|
||||
.unwrap();
|
||||
let y_binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name("y").unwrap())
|
||||
.unwrap();
|
||||
|
||||
assert!(matches!(x_binding.kind(&db), DefinitionKind::For(_)));
|
||||
assert!(matches!(y_binding.kind(&db), DefinitionKind::For(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn for_loops_complex_unpacking() {
|
||||
let TestCase { db, file } = test_case("for [((a,) b), (c, d)] in e: pass");
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(&names(&global_table), &["e", "a", "b", "c", "d"]);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let binding = use_def
|
||||
.first_public_binding(global_table.symbol_id_by_name("a").unwrap())
|
||||
.unwrap();
|
||||
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::For(_)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,18 +15,22 @@ use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
|
||||
use crate::semantic_index::ast_ids::AstIdsBuilder;
|
||||
use crate::semantic_index::definition::{
|
||||
AssignmentDefinitionNodeRef, ComprehensionDefinitionNodeRef, Definition, DefinitionNodeKey,
|
||||
DefinitionNodeRef, ImportFromDefinitionNodeRef,
|
||||
DefinitionNodeRef, ForStmtDefinitionNodeRef, ImportFromDefinitionNodeRef,
|
||||
};
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{
|
||||
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId, SymbolFlags,
|
||||
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId,
|
||||
SymbolTableBuilder,
|
||||
};
|
||||
use crate::semantic_index::use_def::{FlowSnapshot, UseDefMapBuilder};
|
||||
use crate::semantic_index::SemanticIndex;
|
||||
use crate::Db;
|
||||
|
||||
use super::definition::WithItemDefinitionNodeRef;
|
||||
use super::constraint::{Constraint, PatternConstraint};
|
||||
use super::definition::{
|
||||
DefinitionCategory, ExceptHandlerDefinitionNodeRef, MatchPatternDefinitionNodeRef,
|
||||
WithItemDefinitionNodeRef,
|
||||
};
|
||||
|
||||
pub(super) struct SemanticIndexBuilder<'db> {
|
||||
// Builder state
|
||||
@@ -36,9 +40,14 @@ pub(super) struct SemanticIndexBuilder<'db> {
|
||||
scope_stack: Vec<FileScopeId>,
|
||||
/// The assignment we're currently visiting.
|
||||
current_assignment: Option<CurrentAssignment<'db>>,
|
||||
/// The match case we're currently visiting.
|
||||
current_match_case: Option<CurrentMatchCase<'db>>,
|
||||
/// Flow states at each `break` in the current loop.
|
||||
loop_break_states: Vec<FlowSnapshot>,
|
||||
|
||||
/// Flags about the file's global scope
|
||||
has_future_annotations: bool,
|
||||
|
||||
// Semantic Index fields
|
||||
scopes: IndexVec<FileScopeId, Scope>,
|
||||
scope_ids_by_scope: IndexVec<FileScopeId, ScopeId<'db>>,
|
||||
@@ -59,8 +68,11 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
module: parsed,
|
||||
scope_stack: Vec::new(),
|
||||
current_assignment: None,
|
||||
current_match_case: None,
|
||||
loop_break_states: vec![],
|
||||
|
||||
has_future_annotations: false,
|
||||
|
||||
scopes: IndexVec::new(),
|
||||
symbol_tables: IndexVec::new(),
|
||||
ast_ids: IndexVec::new(),
|
||||
@@ -162,49 +174,95 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
self.current_use_def_map_mut().merge(state);
|
||||
}
|
||||
|
||||
fn add_or_update_symbol(&mut self, name: Name, flags: SymbolFlags) -> ScopedSymbolId {
|
||||
let symbol_table = self.current_symbol_table();
|
||||
let (symbol_id, added) = symbol_table.add_or_update_symbol(name, flags);
|
||||
fn add_symbol(&mut self, name: Name) -> ScopedSymbolId {
|
||||
let (symbol_id, added) = self.current_symbol_table().add_symbol(name);
|
||||
if added {
|
||||
let use_def_map = self.current_use_def_map_mut();
|
||||
use_def_map.add_symbol(symbol_id);
|
||||
self.current_use_def_map_mut().add_symbol(symbol_id);
|
||||
}
|
||||
symbol_id
|
||||
}
|
||||
|
||||
fn mark_symbol_bound(&mut self, id: ScopedSymbolId) {
|
||||
self.current_symbol_table().mark_symbol_bound(id);
|
||||
}
|
||||
|
||||
fn mark_symbol_used(&mut self, id: ScopedSymbolId) {
|
||||
self.current_symbol_table().mark_symbol_used(id);
|
||||
}
|
||||
|
||||
fn add_definition<'a>(
|
||||
&mut self,
|
||||
symbol: ScopedSymbolId,
|
||||
definition_node: impl Into<DefinitionNodeRef<'a>>,
|
||||
) -> Definition<'db> {
|
||||
let definition_node: DefinitionNodeRef<'_> = definition_node.into();
|
||||
#[allow(unsafe_code)]
|
||||
// SAFETY: `definition_node` is guaranteed to be a child of `self.module`
|
||||
let kind = unsafe { definition_node.into_owned(self.module.clone()) };
|
||||
let category = kind.category();
|
||||
let definition = Definition::new(
|
||||
self.db,
|
||||
self.file,
|
||||
self.current_scope(),
|
||||
symbol,
|
||||
#[allow(unsafe_code)]
|
||||
unsafe {
|
||||
definition_node.into_owned(self.module.clone())
|
||||
},
|
||||
kind,
|
||||
countme::Count::default(),
|
||||
);
|
||||
|
||||
self.definitions_by_node
|
||||
let existing_definition = self
|
||||
.definitions_by_node
|
||||
.insert(definition_node.key(), definition);
|
||||
self.current_use_def_map_mut()
|
||||
.record_definition(symbol, definition);
|
||||
debug_assert_eq!(existing_definition, None);
|
||||
|
||||
if category.is_binding() {
|
||||
self.mark_symbol_bound(symbol);
|
||||
}
|
||||
|
||||
let use_def = self.current_use_def_map_mut();
|
||||
match category {
|
||||
DefinitionCategory::DeclarationAndBinding => {
|
||||
use_def.record_declaration_and_binding(symbol, definition);
|
||||
}
|
||||
DefinitionCategory::Declaration => use_def.record_declaration(symbol, definition),
|
||||
DefinitionCategory::Binding => use_def.record_binding(symbol, definition),
|
||||
}
|
||||
|
||||
definition
|
||||
}
|
||||
|
||||
fn add_constraint(&mut self, constraint_node: &ast::Expr) -> Expression<'db> {
|
||||
fn add_expression_constraint(&mut self, constraint_node: &ast::Expr) -> Expression<'db> {
|
||||
let expression = self.add_standalone_expression(constraint_node);
|
||||
self.current_use_def_map_mut().record_constraint(expression);
|
||||
self.current_use_def_map_mut()
|
||||
.record_constraint(Constraint::Expression(expression));
|
||||
|
||||
expression
|
||||
}
|
||||
|
||||
fn add_pattern_constraint(
|
||||
&mut self,
|
||||
subject: &ast::Expr,
|
||||
pattern: &ast::Pattern,
|
||||
) -> PatternConstraint<'db> {
|
||||
#[allow(unsafe_code)]
|
||||
let (subject, pattern) = unsafe {
|
||||
(
|
||||
AstNodeRef::new(self.module.clone(), subject),
|
||||
AstNodeRef::new(self.module.clone(), pattern),
|
||||
)
|
||||
};
|
||||
let pattern_constraint = PatternConstraint::new(
|
||||
self.db,
|
||||
self.file,
|
||||
self.current_scope(),
|
||||
subject,
|
||||
pattern,
|
||||
countme::Count::default(),
|
||||
);
|
||||
self.current_use_def_map_mut()
|
||||
.record_constraint(Constraint::Pattern(pattern_constraint));
|
||||
pattern_constraint
|
||||
}
|
||||
|
||||
/// Record an expression that needs to be a Salsa ingredient, because we need to infer its type
|
||||
/// standalone (type narrowing tests, RHS of an assignment.)
|
||||
fn add_standalone_expression(&mut self, expression_node: &ast::Expr) -> Expression<'db> {
|
||||
@@ -249,10 +307,13 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
..
|
||||
}) => (name, &None, default),
|
||||
};
|
||||
// TODO create Definition for typevars
|
||||
self.add_or_update_symbol(name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
if let Some(bound) = bound {
|
||||
self.visit_expr(bound);
|
||||
let symbol = self.add_symbol(name.id.clone());
|
||||
// TODO create Definition for PEP 695 typevars
|
||||
// note that the "bound" on the typevar is a totally different thing than whether
|
||||
// or not a name is "bound" by a typevar declaration; the latter is always true.
|
||||
self.mark_symbol_bound(symbol);
|
||||
if let Some(bounds) = bound {
|
||||
self.visit_expr(bounds);
|
||||
}
|
||||
if let Some(default) = default {
|
||||
self.visit_expr(default);
|
||||
@@ -269,11 +330,23 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
nested_scope
|
||||
}
|
||||
|
||||
/// Visit a list of [`Comprehension`] nodes, assumed to be the "generators" that compose a
|
||||
/// comprehension (that is, the `for x in y` and `for y in z` parts of `x for x in y for y in z`.)
|
||||
/// This method does several things:
|
||||
/// - It pushes a new scope onto the stack for visiting
|
||||
/// a list/dict/set comprehension or generator expression
|
||||
/// - Inside that scope, it visits a list of [`Comprehension`] nodes,
|
||||
/// assumed to be the "generators" that compose a comprehension
|
||||
/// (that is, the `for x in y` and `for y in z` parts of `x for x in y for y in z`).
|
||||
/// - Inside that scope, it also calls a closure for visiting the outer `elt`
|
||||
/// of a list/dict/set comprehension or generator expression
|
||||
/// - It then pops the new scope off the stack
|
||||
///
|
||||
/// [`Comprehension`]: ast::Comprehension
|
||||
fn visit_generators(&mut self, scope: NodeWithScopeRef, generators: &'db [ast::Comprehension]) {
|
||||
fn with_generators_scope(
|
||||
&mut self,
|
||||
scope: NodeWithScopeRef,
|
||||
generators: &'db [ast::Comprehension],
|
||||
visit_outer_elt: impl FnOnce(&mut Self),
|
||||
) {
|
||||
let mut generators_iter = generators.iter();
|
||||
|
||||
let Some(generator) = generators_iter.next() else {
|
||||
@@ -282,6 +355,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
|
||||
// The `iter` of the first generator is evaluated in the outer scope, while all subsequent
|
||||
// nodes are evaluated in the inner scope.
|
||||
self.add_standalone_expression(&generator.iter);
|
||||
self.visit_expr(&generator.iter);
|
||||
self.push_scope(scope);
|
||||
|
||||
@@ -297,6 +371,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
}
|
||||
|
||||
for generator in generators_iter {
|
||||
self.add_standalone_expression(&generator.iter);
|
||||
self.visit_expr(&generator.iter);
|
||||
|
||||
self.current_assignment = Some(CurrentAssignment::Comprehension {
|
||||
@@ -310,11 +385,13 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
}
|
||||
|
||||
visit_outer_elt(self);
|
||||
self.pop_scope();
|
||||
}
|
||||
|
||||
fn declare_parameter(&mut self, parameter: AnyParameterRef) {
|
||||
let symbol =
|
||||
self.add_or_update_symbol(parameter.name().id().clone(), SymbolFlags::IS_DEFINED);
|
||||
let symbol = self.add_symbol(parameter.name().id().clone());
|
||||
|
||||
let definition = self.add_definition(symbol, parameter);
|
||||
|
||||
@@ -322,10 +399,11 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
// Insert a mapping from the parameter to the same definition.
|
||||
// This ensures that calling `HasTy::ty` on the inner parameter returns
|
||||
// a valid type (and doesn't panic)
|
||||
self.definitions_by_node.insert(
|
||||
let existing_definition = self.definitions_by_node.insert(
|
||||
DefinitionNodeRef::from(AnyParameterRef::Variadic(&with_default.parameter)).key(),
|
||||
definition,
|
||||
);
|
||||
debug_assert_eq!(existing_definition, None);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -377,6 +455,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
scopes_by_expression: self.scopes_by_expression,
|
||||
scopes_by_node: self.scopes_by_node,
|
||||
use_def_maps,
|
||||
has_future_annotations: self.has_future_annotations,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -392,20 +471,6 @@ where
|
||||
self.visit_decorator(decorator);
|
||||
}
|
||||
|
||||
let symbol = self
|
||||
.add_or_update_symbol(function_def.name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, function_def);
|
||||
|
||||
// The default value of the parameters needs to be evaluated in the
|
||||
// enclosing scope.
|
||||
for default in function_def
|
||||
.parameters
|
||||
.iter_non_variadic_params()
|
||||
.filter_map(|param| param.default.as_deref())
|
||||
{
|
||||
self.visit_expr(default);
|
||||
}
|
||||
|
||||
self.with_type_params(
|
||||
NodeWithScopeRef::FunctionTypeParameters(function_def),
|
||||
function_def.type_params.as_deref(),
|
||||
@@ -426,14 +491,27 @@ where
|
||||
builder.pop_scope()
|
||||
},
|
||||
);
|
||||
// The default value of the parameters needs to be evaluated in the
|
||||
// enclosing scope.
|
||||
for default in function_def
|
||||
.parameters
|
||||
.iter_non_variadic_params()
|
||||
.filter_map(|param| param.default.as_deref())
|
||||
{
|
||||
self.visit_expr(default);
|
||||
}
|
||||
// The symbol for the function name itself has to be evaluated
|
||||
// at the end to match the runtime evaluation of parameter defaults
|
||||
// and return-type annotations.
|
||||
let symbol = self.add_symbol(function_def.name.id.clone());
|
||||
self.add_definition(symbol, function_def);
|
||||
}
|
||||
ast::Stmt::ClassDef(class) => {
|
||||
for decorator in &class.decorator_list {
|
||||
self.visit_decorator(decorator);
|
||||
}
|
||||
|
||||
let symbol =
|
||||
self.add_or_update_symbol(class.name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
let symbol = self.add_symbol(class.name.id.clone());
|
||||
self.add_definition(symbol, class);
|
||||
|
||||
self.with_type_params(
|
||||
@@ -459,7 +537,7 @@ where
|
||||
Name::new(alias.name.id.split('.').next().unwrap())
|
||||
};
|
||||
|
||||
let symbol = self.add_or_update_symbol(symbol_name, SymbolFlags::IS_DEFINED);
|
||||
let symbol = self.add_symbol(symbol_name);
|
||||
self.add_definition(symbol, alias);
|
||||
}
|
||||
}
|
||||
@@ -471,8 +549,16 @@ where
|
||||
&alias.name.id
|
||||
};
|
||||
|
||||
let symbol =
|
||||
self.add_or_update_symbol(symbol_name.clone(), SymbolFlags::IS_DEFINED);
|
||||
// Look for imports `from __future__ import annotations`, ignore `as ...`
|
||||
// We intentionally don't enforce the rules about location of `__future__`
|
||||
// imports here, we assume the user's intent was to apply the `__future__`
|
||||
// import, so we still check using it (and will also emit a diagnostic about a
|
||||
// miss-placed `__future__` import.)
|
||||
self.has_future_annotations |= alias.name.id == "annotations"
|
||||
&& node.module.as_deref() == Some("__future__");
|
||||
|
||||
let symbol = self.add_symbol(symbol_name.clone());
|
||||
|
||||
self.add_definition(symbol, ImportFromDefinitionNodeRef { node, alias_index });
|
||||
}
|
||||
}
|
||||
@@ -488,7 +574,6 @@ where
|
||||
}
|
||||
ast::Stmt::AnnAssign(node) => {
|
||||
debug_assert!(self.current_assignment.is_none());
|
||||
// TODO deferred annotation visiting
|
||||
self.visit_expr(&node.annotation);
|
||||
if let Some(value) = &node.value {
|
||||
self.visit_expr(value);
|
||||
@@ -514,7 +599,7 @@ where
|
||||
ast::Stmt::If(node) => {
|
||||
self.visit_expr(&node.test);
|
||||
let pre_if = self.flow_snapshot();
|
||||
self.add_constraint(&node.test);
|
||||
self.add_expression_constraint(&node.test);
|
||||
self.visit_body(&node.body);
|
||||
let mut post_clauses: Vec<FlowSnapshot> = vec![];
|
||||
for clause in &node.elif_else_clauses {
|
||||
@@ -539,14 +624,23 @@ where
|
||||
self.flow_merge(pre_if);
|
||||
}
|
||||
}
|
||||
ast::Stmt::While(node) => {
|
||||
self.visit_expr(&node.test);
|
||||
ast::Stmt::While(ast::StmtWhile {
|
||||
test,
|
||||
body,
|
||||
orelse,
|
||||
range: _,
|
||||
}) => {
|
||||
self.visit_expr(test);
|
||||
|
||||
let pre_loop = self.flow_snapshot();
|
||||
|
||||
// Save aside any break states from an outer loop
|
||||
let saved_break_states = std::mem::take(&mut self.loop_break_states);
|
||||
self.visit_body(&node.body);
|
||||
|
||||
// TODO: definitions created inside the body should be fully visible
|
||||
// to other statements/expressions inside the body --Alex/Carl
|
||||
self.visit_body(body);
|
||||
|
||||
// Get the break states from the body of this loop, and restore the saved outer
|
||||
// ones.
|
||||
let break_states =
|
||||
@@ -555,7 +649,7 @@ where
|
||||
// We may execute the `else` clause without ever executing the body, so merge in
|
||||
// the pre-loop state before visiting `else`.
|
||||
self.flow_merge(pre_loop);
|
||||
self.visit_body(&node.orelse);
|
||||
self.visit_body(orelse);
|
||||
|
||||
// Breaking out of a while loop bypasses the `else` clause, so merge in the break
|
||||
// states after visiting `else`.
|
||||
@@ -578,6 +672,123 @@ where
|
||||
ast::Stmt::Break(_) => {
|
||||
self.loop_break_states.push(self.flow_snapshot());
|
||||
}
|
||||
|
||||
ast::Stmt::For(
|
||||
for_stmt @ ast::StmtFor {
|
||||
range: _,
|
||||
is_async: _,
|
||||
target,
|
||||
iter,
|
||||
body,
|
||||
orelse,
|
||||
},
|
||||
) => {
|
||||
self.add_standalone_expression(iter);
|
||||
self.visit_expr(iter);
|
||||
|
||||
let pre_loop = self.flow_snapshot();
|
||||
let saved_break_states = std::mem::take(&mut self.loop_break_states);
|
||||
|
||||
debug_assert!(self.current_assignment.is_none());
|
||||
self.current_assignment = Some(for_stmt.into());
|
||||
self.visit_expr(target);
|
||||
self.current_assignment = None;
|
||||
|
||||
// TODO: Definitions created by loop variables
|
||||
// (and definitions created inside the body)
|
||||
// are fully visible to other statements/expressions inside the body --Alex/Carl
|
||||
self.visit_body(body);
|
||||
|
||||
let break_states =
|
||||
std::mem::replace(&mut self.loop_break_states, saved_break_states);
|
||||
|
||||
// We may execute the `else` clause without ever executing the body, so merge in
|
||||
// the pre-loop state before visiting `else`.
|
||||
self.flow_merge(pre_loop);
|
||||
self.visit_body(orelse);
|
||||
|
||||
// Breaking out of a `for` loop bypasses the `else` clause, so merge in the break
|
||||
// states after visiting `else`.
|
||||
for break_state in break_states {
|
||||
self.flow_merge(break_state);
|
||||
}
|
||||
}
|
||||
ast::Stmt::Match(ast::StmtMatch {
|
||||
subject,
|
||||
cases,
|
||||
range: _,
|
||||
}) => {
|
||||
self.add_standalone_expression(subject);
|
||||
self.visit_expr(subject);
|
||||
|
||||
let after_subject = self.flow_snapshot();
|
||||
let Some((first, remaining)) = cases.split_first() else {
|
||||
return;
|
||||
};
|
||||
self.add_pattern_constraint(subject, &first.pattern);
|
||||
self.visit_match_case(first);
|
||||
|
||||
let mut post_case_snapshots = vec![];
|
||||
for case in remaining {
|
||||
post_case_snapshots.push(self.flow_snapshot());
|
||||
self.flow_restore(after_subject.clone());
|
||||
self.add_pattern_constraint(subject, &case.pattern);
|
||||
self.visit_match_case(case);
|
||||
}
|
||||
for post_clause_state in post_case_snapshots {
|
||||
self.flow_merge(post_clause_state);
|
||||
}
|
||||
if !cases
|
||||
.last()
|
||||
.is_some_and(|case| case.guard.is_none() && case.pattern.is_wildcard())
|
||||
{
|
||||
self.flow_merge(after_subject);
|
||||
}
|
||||
}
|
||||
ast::Stmt::Try(ast::StmtTry {
|
||||
body,
|
||||
handlers,
|
||||
orelse,
|
||||
finalbody,
|
||||
is_star,
|
||||
range: _,
|
||||
}) => {
|
||||
self.visit_body(body);
|
||||
|
||||
for except_handler in handlers {
|
||||
let ast::ExceptHandler::ExceptHandler(except_handler) = except_handler;
|
||||
let ast::ExceptHandlerExceptHandler {
|
||||
name: symbol_name,
|
||||
type_: handled_exceptions,
|
||||
body: handler_body,
|
||||
range: _,
|
||||
} = except_handler;
|
||||
|
||||
if let Some(handled_exceptions) = handled_exceptions {
|
||||
self.visit_expr(handled_exceptions);
|
||||
}
|
||||
|
||||
// If `handled_exceptions` above was `None`, it's something like `except as e:`,
|
||||
// which is invalid syntax. However, it's still pretty obvious here that the user
|
||||
// *wanted* `e` to be bound, so we should still create a definition here nonetheless.
|
||||
if let Some(symbol_name) = symbol_name {
|
||||
let symbol = self.add_symbol(symbol_name.id.clone());
|
||||
|
||||
self.add_definition(
|
||||
symbol,
|
||||
DefinitionNodeRef::ExceptHandler(ExceptHandlerDefinitionNodeRef {
|
||||
handler: except_handler,
|
||||
is_star: *is_star,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
self.visit_body(handler_body);
|
||||
}
|
||||
|
||||
self.visit_body(orelse);
|
||||
self.visit_body(finalbody);
|
||||
}
|
||||
_ => {
|
||||
walk_stmt(self, stmt);
|
||||
}
|
||||
@@ -591,23 +802,18 @@ where
|
||||
|
||||
match expr {
|
||||
ast::Expr::Name(name_node @ ast::ExprName { id, ctx, .. }) => {
|
||||
let mut flags = match ctx {
|
||||
ast::ExprContext::Load => SymbolFlags::IS_USED,
|
||||
ast::ExprContext::Store => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Del => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Invalid => SymbolFlags::empty(),
|
||||
let (is_use, is_definition) = match (ctx, self.current_assignment) {
|
||||
(ast::ExprContext::Store, Some(CurrentAssignment::AugAssign(_))) => {
|
||||
// For augmented assignment, the target expression is also used.
|
||||
(true, true)
|
||||
}
|
||||
(ast::ExprContext::Load, _) => (true, false),
|
||||
(ast::ExprContext::Store, _) => (false, true),
|
||||
(ast::ExprContext::Del, _) => (false, true),
|
||||
(ast::ExprContext::Invalid, _) => (false, false),
|
||||
};
|
||||
if matches!(
|
||||
self.current_assignment,
|
||||
Some(CurrentAssignment::AugAssign(_))
|
||||
) && !ctx.is_invalid()
|
||||
{
|
||||
// For augmented assignment, the target expression is also used, so we should
|
||||
// record that as a use.
|
||||
flags |= SymbolFlags::IS_USED;
|
||||
}
|
||||
let symbol = self.add_or_update_symbol(id.clone(), flags);
|
||||
if flags.contains(SymbolFlags::IS_DEFINED) {
|
||||
let symbol = self.add_symbol(id.clone());
|
||||
if is_definition {
|
||||
match self.current_assignment {
|
||||
Some(CurrentAssignment::Assign(assignment)) => {
|
||||
self.add_definition(
|
||||
@@ -624,6 +830,16 @@ where
|
||||
Some(CurrentAssignment::AugAssign(aug_assign)) => {
|
||||
self.add_definition(symbol, aug_assign);
|
||||
}
|
||||
Some(CurrentAssignment::For(node)) => {
|
||||
self.add_definition(
|
||||
symbol,
|
||||
ForStmtDefinitionNodeRef {
|
||||
iterable: &node.iter,
|
||||
target: name_node,
|
||||
is_async: node.is_async,
|
||||
},
|
||||
);
|
||||
}
|
||||
Some(CurrentAssignment::Named(named)) => {
|
||||
// TODO(dhruvmanila): If the current scope is a comprehension, then the
|
||||
// named expression is implicitly nonlocal. This is yet to be
|
||||
@@ -633,7 +849,12 @@ where
|
||||
Some(CurrentAssignment::Comprehension { node, first }) => {
|
||||
self.add_definition(
|
||||
symbol,
|
||||
ComprehensionDefinitionNodeRef { node, first },
|
||||
ComprehensionDefinitionNodeRef {
|
||||
iterable: &node.iter,
|
||||
target: name_node,
|
||||
first,
|
||||
is_async: node.is_async,
|
||||
},
|
||||
);
|
||||
}
|
||||
Some(CurrentAssignment::WithItem(with_item)) => {
|
||||
@@ -649,7 +870,8 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
if flags.contains(SymbolFlags::IS_USED) {
|
||||
if is_use {
|
||||
self.mark_symbol_used(symbol);
|
||||
let use_id = self.current_ast_ids().record_use(expr);
|
||||
self.current_use_def_map_mut().record_use(symbol, use_id);
|
||||
}
|
||||
@@ -658,11 +880,11 @@ where
|
||||
}
|
||||
ast::Expr::Named(node) => {
|
||||
debug_assert!(self.current_assignment.is_none());
|
||||
self.current_assignment = Some(node.into());
|
||||
// TODO walrus in comprehensions is implicitly nonlocal
|
||||
self.visit_expr(&node.value);
|
||||
self.current_assignment = Some(node.into());
|
||||
self.visit_expr(&node.target);
|
||||
self.current_assignment = None;
|
||||
self.visit_expr(&node.value);
|
||||
}
|
||||
ast::Expr::Lambda(lambda) => {
|
||||
if let Some(parameters) = &lambda.parameters {
|
||||
@@ -686,6 +908,7 @@ where
|
||||
}
|
||||
|
||||
self.visit_expr(lambda.body.as_ref());
|
||||
self.pop_scope();
|
||||
}
|
||||
ast::Expr::If(ast::ExprIf {
|
||||
body, test, orelse, ..
|
||||
@@ -706,30 +929,33 @@ where
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
self.with_generators_scope(
|
||||
NodeWithScopeRef::ListComprehension(list_comprehension),
|
||||
generators,
|
||||
|builder| builder.visit_expr(elt),
|
||||
);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::SetComp(
|
||||
set_comprehension @ ast::ExprSetComp {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
self.with_generators_scope(
|
||||
NodeWithScopeRef::SetComprehension(set_comprehension),
|
||||
generators,
|
||||
|builder| builder.visit_expr(elt),
|
||||
);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::Generator(
|
||||
generator @ ast::ExprGenerator {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(NodeWithScopeRef::GeneratorExpression(generator), generators);
|
||||
self.visit_expr(elt);
|
||||
self.with_generators_scope(
|
||||
NodeWithScopeRef::GeneratorExpression(generator),
|
||||
generators,
|
||||
|builder| builder.visit_expr(elt),
|
||||
);
|
||||
}
|
||||
ast::Expr::DictComp(
|
||||
dict_comprehension @ ast::ExprDictComp {
|
||||
@@ -739,31 +965,22 @@ where
|
||||
..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
self.with_generators_scope(
|
||||
NodeWithScopeRef::DictComprehension(dict_comprehension),
|
||||
generators,
|
||||
|builder| {
|
||||
builder.visit_expr(key);
|
||||
builder.visit_expr(value);
|
||||
},
|
||||
);
|
||||
self.visit_expr(key);
|
||||
self.visit_expr(value);
|
||||
}
|
||||
_ => {
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
|
||||
if matches!(
|
||||
expr,
|
||||
ast::Expr::Lambda(_)
|
||||
| ast::Expr::ListComp(_)
|
||||
| ast::Expr::SetComp(_)
|
||||
| ast::Expr::Generator(_)
|
||||
| ast::Expr::DictComp(_)
|
||||
) {
|
||||
self.pop_scope();
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_parameters(&mut self, parameters: &'ast ruff_python_ast::Parameters) {
|
||||
fn visit_parameters(&mut self, parameters: &'ast ast::Parameters) {
|
||||
// Intentionally avoid walking default expressions, as we handle them in the enclosing
|
||||
// scope.
|
||||
for parameter in parameters.iter().map(ast::AnyParameterRef::as_parameter) {
|
||||
@@ -771,23 +988,58 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_match_case(&mut self, match_case: &'ast ast::MatchCase) {
|
||||
debug_assert!(self.current_match_case.is_none());
|
||||
self.current_match_case = Some(CurrentMatchCase::new(&match_case.pattern));
|
||||
self.visit_pattern(&match_case.pattern);
|
||||
self.current_match_case = None;
|
||||
|
||||
if let Some(expr) = &match_case.guard {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
self.visit_body(&match_case.body);
|
||||
}
|
||||
|
||||
fn visit_pattern(&mut self, pattern: &'ast ast::Pattern) {
|
||||
if let ast::Pattern::MatchAs(ast::PatternMatchAs {
|
||||
name: Some(name), ..
|
||||
})
|
||||
| ast::Pattern::MatchStar(ast::PatternMatchStar {
|
||||
if let ast::Pattern::MatchStar(ast::PatternMatchStar {
|
||||
name: Some(name),
|
||||
range: _,
|
||||
}) = pattern
|
||||
{
|
||||
let symbol = self.add_symbol(name.id().clone());
|
||||
let state = self.current_match_case.as_ref().unwrap();
|
||||
self.add_definition(
|
||||
symbol,
|
||||
MatchPatternDefinitionNodeRef {
|
||||
pattern: state.pattern,
|
||||
identifier: name,
|
||||
index: state.index,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
walk_pattern(self, pattern);
|
||||
|
||||
if let ast::Pattern::MatchAs(ast::PatternMatchAs {
|
||||
name: Some(name), ..
|
||||
})
|
||||
| ast::Pattern::MatchMapping(ast::PatternMatchMapping {
|
||||
rest: Some(name), ..
|
||||
}) = pattern
|
||||
{
|
||||
// TODO(dhruvmanila): Add definition
|
||||
self.add_or_update_symbol(name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
let symbol = self.add_symbol(name.id().clone());
|
||||
let state = self.current_match_case.as_ref().unwrap();
|
||||
self.add_definition(
|
||||
symbol,
|
||||
MatchPatternDefinitionNodeRef {
|
||||
pattern: state.pattern,
|
||||
identifier: name,
|
||||
index: state.index,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
walk_pattern(self, pattern);
|
||||
self.current_match_case.as_mut().unwrap().index += 1;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -796,6 +1048,7 @@ enum CurrentAssignment<'a> {
|
||||
Assign(&'a ast::StmtAssign),
|
||||
AnnAssign(&'a ast::StmtAnnAssign),
|
||||
AugAssign(&'a ast::StmtAugAssign),
|
||||
For(&'a ast::StmtFor),
|
||||
Named(&'a ast::ExprNamed),
|
||||
Comprehension {
|
||||
node: &'a ast::Comprehension,
|
||||
@@ -822,6 +1075,12 @@ impl<'a> From<&'a ast::StmtAugAssign> for CurrentAssignment<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtFor> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::StmtFor) -> Self {
|
||||
Self::For(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::ExprNamed> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::ExprNamed) -> Self {
|
||||
Self::Named(value)
|
||||
@@ -833,3 +1092,27 @@ impl<'a> From<&'a ast::WithItem> for CurrentAssignment<'a> {
|
||||
Self::WithItem(value)
|
||||
}
|
||||
}
|
||||
|
||||
struct CurrentMatchCase<'a> {
|
||||
/// The pattern that's part of the current match case.
|
||||
pattern: &'a ast::Pattern,
|
||||
|
||||
/// The index of the sub-pattern that's being currently visited within the pattern.
|
||||
///
|
||||
/// For example:
|
||||
/// ```py
|
||||
/// match subject:
|
||||
/// case a as b: ...
|
||||
/// case [a, b]: ...
|
||||
/// case a | b: ...
|
||||
/// ```
|
||||
///
|
||||
/// In all of the above cases, the index would be 0 for `a` and 1 for `b`.
|
||||
index: u32,
|
||||
}
|
||||
|
||||
impl<'a> CurrentMatchCase<'a> {
|
||||
fn new(pattern: &'a ast::Pattern) -> Self {
|
||||
Self { pattern, index: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_python_ast as ast;
|
||||
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::db::Db;
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{FileScopeId, ScopeId};
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub(crate) enum Constraint<'db> {
|
||||
Expression(Expression<'db>),
|
||||
Pattern(PatternConstraint<'db>),
|
||||
}
|
||||
|
||||
#[salsa::tracked]
|
||||
pub(crate) struct PatternConstraint<'db> {
|
||||
#[id]
|
||||
pub(crate) file: File,
|
||||
|
||||
#[id]
|
||||
pub(crate) file_scope: FileScopeId,
|
||||
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) subject: AstNodeRef<ast::Expr>,
|
||||
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) pattern: AstNodeRef<ast::Pattern>,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<PatternConstraint<'static>>,
|
||||
}
|
||||
|
||||
impl<'db> PatternConstraint<'db> {
|
||||
pub(crate) fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
|
||||
self.file_scope(db).to_scope_id(db, self.file(db))
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ use ruff_db::parsed::ParsedModule;
|
||||
use ruff_python_ast as ast;
|
||||
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::module_resolver::file_to_module;
|
||||
use crate::node_key::NodeKey;
|
||||
use crate::semantic_index::symbol::{FileScopeId, ScopeId, ScopedSymbolId};
|
||||
use crate::Db;
|
||||
@@ -23,7 +24,7 @@ pub struct Definition<'db> {
|
||||
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) node: DefinitionKind,
|
||||
pub(crate) kind: DefinitionKind,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<Definition<'static>>,
|
||||
@@ -33,12 +34,33 @@ impl<'db> Definition<'db> {
|
||||
pub(crate) fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
|
||||
self.file_scope(db).to_scope_id(db, self.file(db))
|
||||
}
|
||||
|
||||
pub(crate) fn category(self, db: &'db dyn Db) -> DefinitionCategory {
|
||||
self.kind(db).category()
|
||||
}
|
||||
|
||||
pub(crate) fn is_declaration(self, db: &'db dyn Db) -> bool {
|
||||
self.kind(db).category().is_declaration()
|
||||
}
|
||||
|
||||
pub(crate) fn is_binding(self, db: &'db dyn Db) -> bool {
|
||||
self.kind(db).category().is_binding()
|
||||
}
|
||||
|
||||
/// Return true if this is a symbol was defined in the `typing` or `typing_extensions` modules
|
||||
pub(crate) fn is_typing_definition(self, db: &'db dyn Db) -> bool {
|
||||
file_to_module(db, self.file(db)).is_some_and(|module| {
|
||||
module.search_path().is_standard_library()
|
||||
&& matches!(&**module.name(), "typing" | "typing_extensions")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) enum DefinitionNodeRef<'a> {
|
||||
Import(&'a ast::Alias),
|
||||
ImportFrom(ImportFromDefinitionNodeRef<'a>),
|
||||
For(ForStmtDefinitionNodeRef<'a>),
|
||||
Function(&'a ast::StmtFunctionDef),
|
||||
Class(&'a ast::StmtClassDef),
|
||||
NamedExpression(&'a ast::ExprNamed),
|
||||
@@ -48,6 +70,8 @@ pub(crate) enum DefinitionNodeRef<'a> {
|
||||
Comprehension(ComprehensionDefinitionNodeRef<'a>),
|
||||
Parameter(ast::AnyParameterRef<'a>),
|
||||
WithItem(WithItemDefinitionNodeRef<'a>),
|
||||
MatchPattern(MatchPatternDefinitionNodeRef<'a>),
|
||||
ExceptHandler(ExceptHandlerDefinitionNodeRef<'a>),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtFunctionDef> for DefinitionNodeRef<'a> {
|
||||
@@ -92,6 +116,12 @@ impl<'a> From<ImportFromDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ForStmtDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(value: ForStmtDefinitionNodeRef<'a>) -> Self {
|
||||
Self::For(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<AssignmentDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: AssignmentDefinitionNodeRef<'a>) -> Self {
|
||||
Self::Assignment(node_ref)
|
||||
@@ -116,6 +146,12 @@ impl<'a> From<ast::AnyParameterRef<'a>> for DefinitionNodeRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<MatchPatternDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node: MatchPatternDefinitionNodeRef<'a>) -> Self {
|
||||
Self::MatchPattern(node)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ImportFromDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::StmtImportFrom,
|
||||
@@ -134,10 +170,36 @@ pub(crate) struct WithItemDefinitionNodeRef<'a> {
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ForStmtDefinitionNodeRef<'a> {
|
||||
pub(crate) iterable: &'a ast::Expr,
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
pub(crate) is_async: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ExceptHandlerDefinitionNodeRef<'a> {
|
||||
pub(crate) handler: &'a ast::ExceptHandlerExceptHandler,
|
||||
pub(crate) is_star: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ComprehensionDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::Comprehension,
|
||||
pub(crate) iterable: &'a ast::Expr,
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
pub(crate) first: bool,
|
||||
pub(crate) is_async: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct MatchPatternDefinitionNodeRef<'a> {
|
||||
/// The outermost pattern node in which the identifier being defined occurs.
|
||||
pub(crate) pattern: &'a ast::Pattern,
|
||||
/// The identifier being defined.
|
||||
pub(crate) identifier: &'a ast::Identifier,
|
||||
/// The index of the identifier in the pattern when visiting the `pattern` node in evaluation
|
||||
/// order.
|
||||
pub(crate) index: u32,
|
||||
}
|
||||
|
||||
impl DefinitionNodeRef<'_> {
|
||||
@@ -174,12 +236,26 @@ impl DefinitionNodeRef<'_> {
|
||||
DefinitionNodeRef::AugmentedAssignment(augmented_assignment) => {
|
||||
DefinitionKind::AugmentedAssignment(AstNodeRef::new(parsed, augmented_assignment))
|
||||
}
|
||||
DefinitionNodeRef::Comprehension(ComprehensionDefinitionNodeRef { node, first }) => {
|
||||
DefinitionKind::Comprehension(ComprehensionDefinitionKind {
|
||||
node: AstNodeRef::new(parsed, node),
|
||||
first,
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::For(ForStmtDefinitionNodeRef {
|
||||
iterable,
|
||||
target,
|
||||
is_async,
|
||||
}) => DefinitionKind::For(ForStmtDefinitionKind {
|
||||
iterable: AstNodeRef::new(parsed.clone(), iterable),
|
||||
target: AstNodeRef::new(parsed, target),
|
||||
is_async,
|
||||
}),
|
||||
DefinitionNodeRef::Comprehension(ComprehensionDefinitionNodeRef {
|
||||
iterable,
|
||||
target,
|
||||
first,
|
||||
is_async,
|
||||
}) => DefinitionKind::Comprehension(ComprehensionDefinitionKind {
|
||||
iterable: AstNodeRef::new(parsed.clone(), iterable),
|
||||
target: AstNodeRef::new(parsed, target),
|
||||
first,
|
||||
is_async,
|
||||
}),
|
||||
DefinitionNodeRef::Parameter(parameter) => match parameter {
|
||||
ast::AnyParameterRef::Variadic(parameter) => {
|
||||
DefinitionKind::Parameter(AstNodeRef::new(parsed, parameter))
|
||||
@@ -194,6 +270,22 @@ impl DefinitionNodeRef<'_> {
|
||||
target: AstNodeRef::new(parsed, target),
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::MatchPattern(MatchPatternDefinitionNodeRef {
|
||||
pattern,
|
||||
identifier,
|
||||
index,
|
||||
}) => DefinitionKind::MatchPattern(MatchPatternDefinitionKind {
|
||||
pattern: AstNodeRef::new(parsed.clone(), pattern),
|
||||
identifier: AstNodeRef::new(parsed, identifier),
|
||||
index,
|
||||
}),
|
||||
DefinitionNodeRef::ExceptHandler(ExceptHandlerDefinitionNodeRef {
|
||||
handler,
|
||||
is_star,
|
||||
}) => DefinitionKind::ExceptHandler(ExceptHandlerDefinitionKind {
|
||||
handler: AstNodeRef::new(parsed.clone(), handler),
|
||||
is_star,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -212,16 +304,60 @@ impl DefinitionNodeRef<'_> {
|
||||
}) => target.into(),
|
||||
Self::AnnotatedAssignment(node) => node.into(),
|
||||
Self::AugmentedAssignment(node) => node.into(),
|
||||
Self::Comprehension(ComprehensionDefinitionNodeRef { node, first: _ }) => node.into(),
|
||||
Self::For(ForStmtDefinitionNodeRef {
|
||||
iterable: _,
|
||||
target,
|
||||
is_async: _,
|
||||
}) => target.into(),
|
||||
Self::Comprehension(ComprehensionDefinitionNodeRef { target, .. }) => target.into(),
|
||||
Self::Parameter(node) => match node {
|
||||
ast::AnyParameterRef::Variadic(parameter) => parameter.into(),
|
||||
ast::AnyParameterRef::NonVariadic(parameter) => parameter.into(),
|
||||
},
|
||||
Self::WithItem(WithItemDefinitionNodeRef { node: _, target }) => target.into(),
|
||||
Self::MatchPattern(MatchPatternDefinitionNodeRef { identifier, .. }) => {
|
||||
identifier.into()
|
||||
}
|
||||
Self::ExceptHandler(ExceptHandlerDefinitionNodeRef { handler, .. }) => handler.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub(crate) enum DefinitionCategory {
|
||||
/// A Definition which binds a value to a name (e.g. `x = 1`).
|
||||
Binding,
|
||||
/// A Definition which declares the upper-bound of acceptable types for this name (`x: int`).
|
||||
Declaration,
|
||||
/// A Definition which both declares a type and binds a value (e.g. `x: int = 1`).
|
||||
DeclarationAndBinding,
|
||||
}
|
||||
|
||||
impl DefinitionCategory {
|
||||
/// True if this definition establishes a "declared type" for the symbol.
|
||||
///
|
||||
/// If so, any assignments reached by this definition are in error if they assign a value of a
|
||||
/// type not assignable to the declared type.
|
||||
///
|
||||
/// Annotations establish a declared type. So do function and class definitions, and imports.
|
||||
pub(crate) fn is_declaration(self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
DefinitionCategory::Declaration | DefinitionCategory::DeclarationAndBinding
|
||||
)
|
||||
}
|
||||
|
||||
/// True if this definition assigns a value to the symbol.
|
||||
///
|
||||
/// False only for annotated assignments without a RHS.
|
||||
pub(crate) fn is_binding(self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
DefinitionCategory::Binding | DefinitionCategory::DeclarationAndBinding
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum DefinitionKind {
|
||||
Import(AstNodeRef<ast::Alias>),
|
||||
@@ -232,26 +368,102 @@ pub enum DefinitionKind {
|
||||
Assignment(AssignmentDefinitionKind),
|
||||
AnnotatedAssignment(AstNodeRef<ast::StmtAnnAssign>),
|
||||
AugmentedAssignment(AstNodeRef<ast::StmtAugAssign>),
|
||||
For(ForStmtDefinitionKind),
|
||||
Comprehension(ComprehensionDefinitionKind),
|
||||
Parameter(AstNodeRef<ast::Parameter>),
|
||||
ParameterWithDefault(AstNodeRef<ast::ParameterWithDefault>),
|
||||
WithItem(WithItemDefinitionKind),
|
||||
MatchPattern(MatchPatternDefinitionKind),
|
||||
ExceptHandler(ExceptHandlerDefinitionKind),
|
||||
}
|
||||
|
||||
impl DefinitionKind {
|
||||
pub(crate) fn category(&self) -> DefinitionCategory {
|
||||
match self {
|
||||
// functions, classes, and imports always bind, and we consider them declarations
|
||||
DefinitionKind::Function(_)
|
||||
| DefinitionKind::Class(_)
|
||||
| DefinitionKind::Import(_)
|
||||
| DefinitionKind::ImportFrom(_) => DefinitionCategory::DeclarationAndBinding,
|
||||
// a parameter always binds a value, but is only a declaration if annotated
|
||||
DefinitionKind::Parameter(parameter) => {
|
||||
if parameter.annotation.is_some() {
|
||||
DefinitionCategory::DeclarationAndBinding
|
||||
} else {
|
||||
DefinitionCategory::Binding
|
||||
}
|
||||
}
|
||||
// presence of a default is irrelevant, same logic as for a no-default parameter
|
||||
DefinitionKind::ParameterWithDefault(parameter_with_default) => {
|
||||
if parameter_with_default.parameter.annotation.is_some() {
|
||||
DefinitionCategory::DeclarationAndBinding
|
||||
} else {
|
||||
DefinitionCategory::Binding
|
||||
}
|
||||
}
|
||||
// annotated assignment is always a declaration, only a binding if there is a RHS
|
||||
DefinitionKind::AnnotatedAssignment(ann_assign) => {
|
||||
if ann_assign.value.is_some() {
|
||||
DefinitionCategory::DeclarationAndBinding
|
||||
} else {
|
||||
DefinitionCategory::Declaration
|
||||
}
|
||||
}
|
||||
// all of these bind values without declaring a type
|
||||
DefinitionKind::NamedExpression(_)
|
||||
| DefinitionKind::Assignment(_)
|
||||
| DefinitionKind::AugmentedAssignment(_)
|
||||
| DefinitionKind::For(_)
|
||||
| DefinitionKind::Comprehension(_)
|
||||
| DefinitionKind::WithItem(_)
|
||||
| DefinitionKind::MatchPattern(_)
|
||||
| DefinitionKind::ExceptHandler(_) => DefinitionCategory::Binding,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub struct MatchPatternDefinitionKind {
|
||||
pattern: AstNodeRef<ast::Pattern>,
|
||||
identifier: AstNodeRef<ast::Identifier>,
|
||||
index: u32,
|
||||
}
|
||||
|
||||
impl MatchPatternDefinitionKind {
|
||||
pub(crate) fn pattern(&self) -> &ast::Pattern {
|
||||
self.pattern.node()
|
||||
}
|
||||
|
||||
pub(crate) fn index(&self) -> u32 {
|
||||
self.index
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ComprehensionDefinitionKind {
|
||||
node: AstNodeRef<ast::Comprehension>,
|
||||
iterable: AstNodeRef<ast::Expr>,
|
||||
target: AstNodeRef<ast::ExprName>,
|
||||
first: bool,
|
||||
is_async: bool,
|
||||
}
|
||||
|
||||
impl ComprehensionDefinitionKind {
|
||||
pub(crate) fn node(&self) -> &ast::Comprehension {
|
||||
self.node.node()
|
||||
pub(crate) fn iterable(&self) -> &ast::Expr {
|
||||
self.iterable.node()
|
||||
}
|
||||
|
||||
pub(crate) fn target(&self) -> &ast::ExprName {
|
||||
self.target.node()
|
||||
}
|
||||
|
||||
pub(crate) fn is_first(&self) -> bool {
|
||||
self.first
|
||||
}
|
||||
|
||||
pub(crate) fn is_async(&self) -> bool {
|
||||
self.is_async
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@@ -302,6 +514,47 @@ impl WithItemDefinitionKind {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ForStmtDefinitionKind {
|
||||
iterable: AstNodeRef<ast::Expr>,
|
||||
target: AstNodeRef<ast::ExprName>,
|
||||
is_async: bool,
|
||||
}
|
||||
|
||||
impl ForStmtDefinitionKind {
|
||||
pub(crate) fn iterable(&self) -> &ast::Expr {
|
||||
self.iterable.node()
|
||||
}
|
||||
|
||||
pub(crate) fn target(&self) -> &ast::ExprName {
|
||||
self.target.node()
|
||||
}
|
||||
|
||||
pub(crate) fn is_async(&self) -> bool {
|
||||
self.is_async
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ExceptHandlerDefinitionKind {
|
||||
handler: AstNodeRef<ast::ExceptHandlerExceptHandler>,
|
||||
is_star: bool,
|
||||
}
|
||||
|
||||
impl ExceptHandlerDefinitionKind {
|
||||
pub(crate) fn node(&self) -> &ast::ExceptHandlerExceptHandler {
|
||||
self.handler.node()
|
||||
}
|
||||
|
||||
pub(crate) fn handled_exceptions(&self) -> Option<&ast::Expr> {
|
||||
self.node().type_.as_deref()
|
||||
}
|
||||
|
||||
pub(crate) fn is_star(&self) -> bool {
|
||||
self.is_star
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
|
||||
pub(crate) struct DefinitionNodeKey(NodeKey);
|
||||
|
||||
@@ -347,9 +600,9 @@ impl From<&ast::StmtAugAssign> for DefinitionNodeKey {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Comprehension> for DefinitionNodeKey {
|
||||
fn from(node: &ast::Comprehension) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
impl From<&ast::StmtFor> for DefinitionNodeKey {
|
||||
fn from(value: &ast::StmtFor) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -364,3 +617,15 @@ impl From<&ast::ParameterWithDefault> for DefinitionNodeKey {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Identifier> for DefinitionNodeKey {
|
||||
fn from(identifier: &ast::Identifier) -> Self {
|
||||
Self(NodeKey::from_node(identifier))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::ExceptHandlerExceptHandler> for DefinitionNodeKey {
|
||||
fn from(handler: &ast::ExceptHandlerExceptHandler) -> Self {
|
||||
Self(NodeKey::from_node(handler))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,16 +44,16 @@ impl Symbol {
|
||||
}
|
||||
|
||||
/// Is the symbol defined in its containing scope?
|
||||
pub fn is_defined(&self) -> bool {
|
||||
self.flags.contains(SymbolFlags::IS_DEFINED)
|
||||
pub fn is_bound(&self) -> bool {
|
||||
self.flags.contains(SymbolFlags::IS_BOUND)
|
||||
}
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub(super) struct SymbolFlags: u8 {
|
||||
struct SymbolFlags: u8 {
|
||||
const IS_USED = 1 << 0;
|
||||
const IS_DEFINED = 1 << 1;
|
||||
const IS_BOUND = 1 << 1;
|
||||
/// TODO: This flag is not yet set by anything
|
||||
const MARKED_GLOBAL = 1 << 2;
|
||||
/// TODO: This flag is not yet set by anything
|
||||
@@ -149,6 +149,10 @@ impl FileScopeId {
|
||||
FileScopeId::from_u32(0)
|
||||
}
|
||||
|
||||
pub fn is_global(self) -> bool {
|
||||
self == FileScopeId::global()
|
||||
}
|
||||
|
||||
pub fn to_scope_id(self, db: &dyn Db, file: File) -> ScopeId<'_> {
|
||||
let index = semantic_index(db, file);
|
||||
index.scope_ids_by_scope[self]
|
||||
@@ -268,11 +272,7 @@ impl SymbolTableBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn add_or_update_symbol(
|
||||
&mut self,
|
||||
name: Name,
|
||||
flags: SymbolFlags,
|
||||
) -> (ScopedSymbolId, bool) {
|
||||
pub(super) fn add_symbol(&mut self, name: Name) -> (ScopedSymbolId, bool) {
|
||||
let hash = SymbolTable::hash_name(&name);
|
||||
let entry = self
|
||||
.table
|
||||
@@ -281,15 +281,9 @@ impl SymbolTableBuilder {
|
||||
.from_hash(hash, |id| self.table.symbols[*id].name() == &name);
|
||||
|
||||
match entry {
|
||||
RawEntryMut::Occupied(entry) => {
|
||||
let symbol = &mut self.table.symbols[*entry.key()];
|
||||
symbol.insert_flags(flags);
|
||||
|
||||
(*entry.key(), false)
|
||||
}
|
||||
RawEntryMut::Occupied(entry) => (*entry.key(), false),
|
||||
RawEntryMut::Vacant(entry) => {
|
||||
let mut symbol = Symbol::new(name);
|
||||
symbol.insert_flags(flags);
|
||||
let symbol = Symbol::new(name);
|
||||
|
||||
let id = self.table.symbols.push(symbol);
|
||||
entry.insert_with_hasher(hash, id, (), |id| {
|
||||
@@ -300,6 +294,14 @@ impl SymbolTableBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn mark_symbol_bound(&mut self, id: ScopedSymbolId) {
|
||||
self.table.symbols[id].insert_flags(SymbolFlags::IS_BOUND);
|
||||
}
|
||||
|
||||
pub(super) fn mark_symbol_used(&mut self, id: ScopedSymbolId) {
|
||||
self.table.symbols[id].insert_flags(SymbolFlags::IS_USED);
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> SymbolTable {
|
||||
self.table.shrink_to_fit();
|
||||
self.table
|
||||
|
||||
@@ -1,5 +1,79 @@
|
||||
//! Build a map from each use of a symbol to the definitions visible from that use, and the
|
||||
//! type-narrowing constraints that apply to each definition.
|
||||
//! First, some terminology:
|
||||
//!
|
||||
//! * A "binding" gives a new value to a variable. This includes many different Python statements
|
||||
//! (assignment statements of course, but also imports, `def` and `class` statements, `as`
|
||||
//! clauses in `with` and `except` statements, match patterns, and others) and even one
|
||||
//! expression kind (named expressions). It notably does not include annotated assignment
|
||||
//! statements without a right-hand side value; these do not assign any new value to the
|
||||
//! variable. We consider function parameters to be bindings as well, since (from the perspective
|
||||
//! of the function's internal scope), a function parameter begins the scope bound to a value.
|
||||
//!
|
||||
//! * A "declaration" establishes an upper bound type for the values that a variable may be
|
||||
//! permitted to take on. Annotated assignment statements (with or without an RHS value) are
|
||||
//! declarations; annotated function parameters are also declarations. We consider `def` and
|
||||
//! `class` statements to also be declarations, so as to prohibit accidentally shadowing them.
|
||||
//!
|
||||
//! Annotated assignments with a right-hand side, and annotated function parameters, are both
|
||||
//! bindings and declarations.
|
||||
//!
|
||||
//! We use [`Definition`] as the universal term (and Salsa tracked struct) encompassing both
|
||||
//! bindings and declarations. (This sacrifices a bit of type safety in exchange for improved
|
||||
//! performance via fewer Salsa tracked structs and queries, since most declarations -- typed
|
||||
//! parameters and annotated assignments with RHS -- are both bindings and declarations.)
|
||||
//!
|
||||
//! At any given use of a variable, we can ask about both its "declared type" and its "inferred
|
||||
//! type". These may be different, but the inferred type must always be assignable to the declared
|
||||
//! type; that is, the declared type is always wider, and the inferred type may be more precise. If
|
||||
//! we see an invalid assignment, we emit a diagnostic and abandon our inferred type, deferring to
|
||||
//! the declared type (this allows an explicit annotation to override bad inference, without a
|
||||
//! cast), maintaining the invariant.
|
||||
//!
|
||||
//! The **inferred type** represents the most precise type we believe encompasses all possible
|
||||
//! values for the variable at a given use. It is based on a union of the bindings which can reach
|
||||
//! that use through some control flow path, and the narrowing constraints that control flow must
|
||||
//! have passed through between the binding and the use. For example, in this code:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 1 if flag else None
|
||||
//! if x is not None:
|
||||
//! use(x)
|
||||
//! ```
|
||||
//!
|
||||
//! For the use of `x` on the third line, the inferred type should be `Literal[1]`. This is based
|
||||
//! on the binding on the first line, which assigns the type `Literal[1] | None`, and the narrowing
|
||||
//! constraint on the second line, which rules out the type `None`, since control flow must pass
|
||||
//! through this constraint to reach the use in question.
|
||||
//!
|
||||
//! The **declared type** represents the code author's declaration (usually through a type
|
||||
//! annotation) that a given variable should not be assigned any type outside the declared type. In
|
||||
//! our model, declared types are also control-flow-sensitive; we allow the code author to
|
||||
//! explicitly re-declare the same variable with a different type. So for a given binding of a
|
||||
//! variable, we will want to ask which declarations of that variable can reach that binding, in
|
||||
//! order to determine whether the binding is permitted, or should be a type error. For example:
|
||||
//!
|
||||
//! ```python
|
||||
//! from pathlib import Path
|
||||
//! def f(path: str):
|
||||
//! path: Path = Path(path)
|
||||
//! ```
|
||||
//!
|
||||
//! In this function, the initial declared type of `path` is `str`, meaning that the assignment
|
||||
//! `path = Path(path)` would be a type error, since it assigns to `path` a value whose type is not
|
||||
//! assignable to `str`. This is the purpose of declared types: they prevent accidental assignment
|
||||
//! of the wrong type to a variable.
|
||||
//!
|
||||
//! But in some cases it is useful to "shadow" or "re-declare" a variable with a new type, and we
|
||||
//! permit this, as long as it is done with an explicit re-annotation. So `path: Path =
|
||||
//! Path(path)`, with the explicit `: Path` annotation, is permitted.
|
||||
//!
|
||||
//! The general rule is that whatever declaration(s) can reach a given binding determine the
|
||||
//! validity of that binding. If there is a path in which the symbol is not declared, that is a
|
||||
//! declaration of `Unknown`. If multiple declarations can reach a binding, we union them, but by
|
||||
//! default we also issue a type error, since this implicit union of declared types may hide an
|
||||
//! error.
|
||||
//!
|
||||
//! To support type inference, we build a map from each use of a symbol to the bindings live at
|
||||
//! that use, and the type narrowing constraints that apply to each binding.
|
||||
//!
|
||||
//! Let's take this code sample:
|
||||
//!
|
||||
@@ -7,148 +81,157 @@
|
||||
//! x = 1
|
||||
//! x = 2
|
||||
//! y = x
|
||||
//! if y is not None:
|
||||
//! if flag:
|
||||
//! x = 3
|
||||
//! else:
|
||||
//! x = 4
|
||||
//! z = x
|
||||
//! ```
|
||||
//!
|
||||
//! In this snippet, we have four definitions of `x` (the statements assigning `1`, `2`, `3`,
|
||||
//! and `4` to it), and two uses of `x` (the `y = x` and `z = x` assignments). The first
|
||||
//! [`Definition`] of `x` is never visible to any use, because it's immediately replaced by the
|
||||
//! second definition, before any use happens. (A linter could thus flag the statement `x = 1`
|
||||
//! as likely superfluous.)
|
||||
//! In this snippet, we have four bindings of `x` (the statements assigning `1`, `2`, `3`, and `4`
|
||||
//! to it), and two uses of `x` (the `y = x` and `z = x` assignments). The first binding of `x`
|
||||
//! does not reach any use, because it's immediately replaced by the second binding, before any use
|
||||
//! happens. (A linter could thus flag the statement `x = 1` as likely superfluous.)
|
||||
//!
|
||||
//! The first use of `x` has one definition visible to it: the assignment `x = 2`.
|
||||
//! The first use of `x` has one live binding: the assignment `x = 2`.
|
||||
//!
|
||||
//! Things get a bit more complex when we have branches. We will definitely take either the `if` or
|
||||
//! the `else` branch. Thus, the second use of `x` has two definitions visible to it: `x = 3` and
|
||||
//! `x = 4`. The `x = 2` definition is no longer visible, because it must be replaced by either `x
|
||||
//! = 3` or `x = 4`, no matter which branch was taken. We don't know which branch was taken, so we
|
||||
//! must consider both definitions as visible, which means eventually we would (in type inference)
|
||||
//! look at these two definitions and infer a type of `Literal[3, 4]` -- the union of `Literal[3]`
|
||||
//! and `Literal[4]` -- for the second use of `x`.
|
||||
//! the `else` branch. Thus, the second use of `x` has two live bindings: `x = 3` and `x = 4`. The
|
||||
//! `x = 2` assignment is no longer visible, because it must be replaced by either `x = 3` or `x =
|
||||
//! 4`, no matter which branch was taken. We don't know which branch was taken, so we must consider
|
||||
//! both bindings as live, which means eventually we would (in type inference) look at these two
|
||||
//! bindings and infer a type of `Literal[3, 4]` -- the union of `Literal[3]` and `Literal[4]` --
|
||||
//! for the second use of `x`.
|
||||
//!
|
||||
//! So that's one question our use-def map needs to answer: given a specific use of a symbol, which
|
||||
//! definition(s) is/are visible from that use. In
|
||||
//! [`AstIds`](crate::semantic_index::ast_ids::AstIds) we number all uses (that means a `Name` node
|
||||
//! with `Load` context) so we have a `ScopedUseId` to efficiently represent each use.
|
||||
//! binding(s) can reach that use. In [`AstIds`](crate::semantic_index::ast_ids::AstIds) we number
|
||||
//! all uses (that means a `Name` node with `Load` context) so we have a `ScopedUseId` to
|
||||
//! efficiently represent each use.
|
||||
//!
|
||||
//! Another case we need to handle is when a symbol is referenced from a different scope (the most
|
||||
//! obvious example of this is an import). We call this "public" use of a symbol. So the other
|
||||
//! question we need to be able to answer is, what are the publicly-visible definitions of each
|
||||
//! symbol?
|
||||
//!
|
||||
//! Technically, public use of a symbol could also occur from any point in control flow of the
|
||||
//! scope where the symbol is defined (via inline imports and import cycles, in the case of an
|
||||
//! import, or via a function call partway through the local scope that ends up using a symbol from
|
||||
//! the scope via a global or nonlocal reference.) But modeling this fully accurately requires
|
||||
//! whole-program analysis that isn't tractable for an efficient incremental compiler, since it
|
||||
//! means a given symbol could have a different type every place it's referenced throughout the
|
||||
//! program, depending on the shape of arbitrarily-sized call/import graphs. So we follow other
|
||||
//! Python type-checkers in making the simplifying assumption that usually the scope will finish
|
||||
//! execution before its symbols are made visible to other scopes; for instance, most imports will
|
||||
//! import from a complete module, not a partially-executed module. (We may want to get a little
|
||||
//! smarter than this in the future, in particular for closures, but for now this is where we
|
||||
//! start.)
|
||||
//!
|
||||
//! So this means that the publicly-visible definitions of a symbol are the definitions still
|
||||
//! visible at the end of the scope; effectively we have an implicit "use" of every symbol at the
|
||||
//! end of the scope.
|
||||
//!
|
||||
//! We also need to know, for a given definition of a symbol, what type-narrowing constraints apply
|
||||
//! We also need to know, for a given definition of a symbol, what type narrowing constraints apply
|
||||
//! to it. For instance, in this code sample:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 1 if flag else None
|
||||
//! if x is not None:
|
||||
//! y = x
|
||||
//! use(x)
|
||||
//! ```
|
||||
//!
|
||||
//! At the use of `x` in `y = x`, the visible definition of `x` is `1 if flag else None`, which
|
||||
//! would infer as the type `Literal[1] | None`. But the constraint `x is not None` dominates this
|
||||
//! use, which means we can rule out the possibility that `x` is `None` here, which should give us
|
||||
//! the type `Literal[1]` for this use.
|
||||
//! At the use of `x`, the live binding of `x` is `1 if flag else None`, which would infer as the
|
||||
//! type `Literal[1] | None`. But the constraint `x is not None` dominates this use, which means we
|
||||
//! can rule out the possibility that `x` is `None` here, which should give us the type
|
||||
//! `Literal[1]` for this use.
|
||||
//!
|
||||
//! For declared types, we need to be able to answer the question "given a binding to a symbol,
|
||||
//! which declarations of that symbol can reach the binding?" This allows us to emit a diagnostic
|
||||
//! if the binding is attempting to bind a value of a type that is not assignable to the declared
|
||||
//! type for that symbol, at that point in control flow.
|
||||
//!
|
||||
//! We also need to know, given a declaration of a symbol, what the inferred type of that symbol is
|
||||
//! at that point. This allows us to emit a diagnostic in a case like `x = "foo"; x: int`. The
|
||||
//! binding `x = "foo"` occurs before the declaration `x: int`, so according to our
|
||||
//! control-flow-sensitive interpretation of declarations, the assignment is not an error. But the
|
||||
//! declaration is an error, since it would violate the "inferred type must be assignable to
|
||||
//! declared type" rule.
|
||||
//!
|
||||
//! Another case we need to handle is when a symbol is referenced from a different scope (for
|
||||
//! example, an import or a nonlocal reference). We call this "public" use of a symbol. For public
|
||||
//! use of a symbol, we prefer the declared type, if there are any declarations of that symbol; if
|
||||
//! not, we fall back to the inferred type. So we also need to know which declarations and bindings
|
||||
//! can reach the end of the scope.
|
||||
//!
|
||||
//! Technically, public use of a symbol could occur from any point in control flow of the scope
|
||||
//! where the symbol is defined (via inline imports and import cycles, in the case of an import, or
|
||||
//! via a function call partway through the local scope that ends up using a symbol from the scope
|
||||
//! via a global or nonlocal reference.) But modeling this fully accurately requires whole-program
|
||||
//! analysis that isn't tractable for an efficient analysis, since it means a given symbol could
|
||||
//! have a different type every place it's referenced throughout the program, depending on the
|
||||
//! shape of arbitrarily-sized call/import graphs. So we follow other Python type checkers in
|
||||
//! making the simplifying assumption that usually the scope will finish execution before its
|
||||
//! symbols are made visible to other scopes; for instance, most imports will import from a
|
||||
//! complete module, not a partially-executed module. (We may want to get a little smarter than
|
||||
//! this in the future for some closures, but for now this is where we start.)
|
||||
//!
|
||||
//! The data structure we build to answer these questions is the `UseDefMap`. It has a
|
||||
//! `definitions_by_use` vector indexed by [`ScopedUseId`] and a `public_definitions` vector
|
||||
//! indexed by [`ScopedSymbolId`]. The values in each of these vectors are (in principle) a list of
|
||||
//! visible definitions at that use, or at the end of the scope for that symbol, with a list of the
|
||||
//! dominating constraints for each of those definitions.
|
||||
//! `bindings_by_use` vector of [`SymbolBindings`] indexed by [`ScopedUseId`], a
|
||||
//! `declarations_by_binding` vector of [`SymbolDeclarations`] indexed by [`ScopedDefinitionId`], a
|
||||
//! `bindings_by_declaration` vector of [`SymbolBindings`] indexed by [`ScopedDefinitionId`], and
|
||||
//! `public_bindings` and `public_definitions` vectors indexed by [`ScopedSymbolId`]. The values in
|
||||
//! each of these vectors are (in principle) a list of live bindings at that use/definition, or at
|
||||
//! the end of the scope for that symbol, with a list of the dominating constraints for each
|
||||
//! binding.
|
||||
//!
|
||||
//! In order to avoid vectors-of-vectors-of-vectors and all the allocations that would entail, we
|
||||
//! don't actually store these "list of visible definitions" as a vector of [`Definition`].
|
||||
//! Instead, the values in `definitions_by_use` and `public_definitions` are a [`SymbolState`]
|
||||
//! struct which uses bit-sets to track definitions and constraints in terms of
|
||||
//! [`ScopedDefinitionId`] and [`ScopedConstraintId`], which are indices into the `all_definitions`
|
||||
//! and `all_constraints` indexvecs in the [`UseDefMap`].
|
||||
//! Instead, [`SymbolBindings`] and [`SymbolDeclarations`] are structs which use bit-sets to track
|
||||
//! definitions (and constraints, in the case of bindings) in terms of [`ScopedDefinitionId`] and
|
||||
//! [`ScopedConstraintId`], which are indices into the `all_definitions` and `all_constraints`
|
||||
//! indexvecs in the [`UseDefMap`].
|
||||
//!
|
||||
//! There is another special kind of possible "definition" for a symbol: there might be a path from
|
||||
//! the scope entry to a given use in which the symbol is never bound.
|
||||
//!
|
||||
//! The simplest way to model "unbound" would be as an actual [`Definition`] itself: the initial
|
||||
//! visible [`Definition`] for each symbol in a scope. But actually modeling it this way would
|
||||
//! unnecessarily increase the number of [`Definition`] that Salsa must track. Since "unbound" is a
|
||||
//! special definition in that all symbols share it, and it doesn't have any additional per-symbol
|
||||
//! state, and constraints are irrelevant to it, we can represent it more efficiently: we use the
|
||||
//! `may_be_unbound` boolean on the [`SymbolState`] struct. If this flag is `true`, it means the
|
||||
//! symbol/use really has one additional visible "definition", which is the unbound state. If this
|
||||
//! flag is `false`, it means we've eliminated the possibility of unbound: every path we've
|
||||
//! followed includes a definition for this symbol.
|
||||
//! The simplest way to model "unbound" would be as a "binding" itself: the initial "binding" for
|
||||
//! each symbol in a scope. But actually modeling it this way would unnecessarily increase the
|
||||
//! number of [`Definition`]s that Salsa must track. Since "unbound" is special in that all symbols
|
||||
//! share it, and it doesn't have any additional per-symbol state, and constraints are irrelevant
|
||||
//! to it, we can represent it more efficiently: we use the `may_be_unbound` boolean on the
|
||||
//! [`SymbolBindings`] struct. If this flag is `true` for a use of a symbol, it means the symbol
|
||||
//! has a path to the use in which it is never bound. If this flag is `false`, it means we've
|
||||
//! eliminated the possibility of unbound: every control flow path to the use includes a binding
|
||||
//! for this symbol.
|
||||
//!
|
||||
//! To build a [`UseDefMap`], the [`UseDefMapBuilder`] is notified of each new use, definition, and
|
||||
//! constraint as they are encountered by the
|
||||
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder) AST visit. For
|
||||
//! each symbol, the builder tracks the `SymbolState` for that symbol. When we hit a use of a
|
||||
//! symbol, it records the current state for that symbol for that use. When we reach the end of the
|
||||
//! scope, it records the state for each symbol as the public definitions of that symbol.
|
||||
//! each symbol, the builder tracks the `SymbolState` (`SymbolBindings` and `SymbolDeclarations`)
|
||||
//! for that symbol. When we hit a use or definition of a symbol, we record the necessary parts of
|
||||
//! the current state for that symbol that we need for that use or definition. When we reach the
|
||||
//! end of the scope, it records the state for each symbol as the public definitions of that
|
||||
//! symbol.
|
||||
//!
|
||||
//! Let's walk through the above example. Initially we record for `x` that it has no visible
|
||||
//! definitions, and may be unbound. When we see `x = 1`, we record that as the sole visible
|
||||
//! definition of `x`, and flip `may_be_unbound` to `false`. Then we see `x = 2`, and it replaces
|
||||
//! `x = 1` as the sole visible definition of `x`. When we get to `y = x`, we record that the
|
||||
//! visible definitions for that use of `x` are just the `x = 2` definition.
|
||||
//! Let's walk through the above example. Initially we record for `x` that it has no bindings, and
|
||||
//! may be unbound. When we see `x = 1`, we record that as the sole live binding of `x`, and flip
|
||||
//! `may_be_unbound` to `false`. Then we see `x = 2`, and we replace `x = 1` as the sole live
|
||||
//! binding of `x`. When we get to `y = x`, we record that the live bindings for that use of `x`
|
||||
//! are just the `x = 2` definition.
|
||||
//!
|
||||
//! Then we hit the `if` branch. We visit the `test` node (`flag` in this case), since that will
|
||||
//! happen regardless. Then we take a pre-branch snapshot of the currently visible definitions for
|
||||
//! all symbols, which we'll need later. Then we record `flag` as a possible constraint on the
|
||||
//! currently visible definition (`x = 2`), and go ahead and visit the `if` body. When we see `x =
|
||||
//! 3`, it replaces `x = 2` (constrained by `flag`) as the sole visible definition of `x`. At the
|
||||
//! end of the `if` body, we take another snapshot of the currently-visible definitions; we'll call
|
||||
//! this the post-if-body snapshot.
|
||||
//! happen regardless. Then we take a pre-branch snapshot of the current state for all symbols,
|
||||
//! which we'll need later. Then we record `flag` as a possible constraint on the current binding
|
||||
//! (`x = 2`), and go ahead and visit the `if` body. When we see `x = 3`, it replaces `x = 2`
|
||||
//! (constrained by `flag`) as the sole live binding of `x`. At the end of the `if` body, we take
|
||||
//! another snapshot of the current symbol state; we'll call this the post-if-body snapshot.
|
||||
//!
|
||||
//! Now we need to visit the `else` clause. The conditions when entering the `else` clause should
|
||||
//! be the pre-if conditions; if we are entering the `else` clause, we know that the `if` test
|
||||
//! failed and we didn't execute the `if` body. So we first reset the builder to the pre-if state,
|
||||
//! using the snapshot we took previously (meaning we now have `x = 2` as the sole visible
|
||||
//! definition for `x` again), then visit the `else` clause, where `x = 4` replaces `x = 2` as the
|
||||
//! sole visible definition of `x`.
|
||||
//! using the snapshot we took previously (meaning we now have `x = 2` as the sole binding for `x`
|
||||
//! again), then visit the `else` clause, where `x = 4` replaces `x = 2` as the sole live binding
|
||||
//! of `x`.
|
||||
//!
|
||||
//! Now we reach the end of the if/else, and want to visit the following code. The state here needs
|
||||
//! to reflect that we might have gone through the `if` branch, or we might have gone through the
|
||||
//! `else` branch, and we don't know which. So we need to "merge" our current builder state
|
||||
//! (reflecting the end-of-else state, with `x = 4` as the only visible definition) with our
|
||||
//! post-if-body snapshot (which has `x = 3` as the only visible definition). The result of this
|
||||
//! merge is that we now have two visible definitions of `x`: `x = 3` and `x = 4`.
|
||||
//! (reflecting the end-of-else state, with `x = 4` as the only live binding) with our post-if-body
|
||||
//! snapshot (which has `x = 3` as the only live binding). The result of this merge is that we now
|
||||
//! have two live bindings of `x`: `x = 3` and `x = 4`.
|
||||
//!
|
||||
//! The [`UseDefMapBuilder`] itself just exposes methods for taking a snapshot, resetting to a
|
||||
//! snapshot, and merging a snapshot into the current state. The logic using these methods lives in
|
||||
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder), e.g. where it
|
||||
//! visits a `StmtIf` node.
|
||||
//!
|
||||
//! (In the future we may have some other questions we want to answer as well, such as "is this
|
||||
//! definition used?", which will require tracking a bit more info in our map, e.g. a "used" bit
|
||||
//! for each [`Definition`] which is flipped to true when we record that definition for a use.)
|
||||
use self::symbol_state::{
|
||||
ConstraintIdIterator, DefinitionIdWithConstraintsIterator, ScopedConstraintId,
|
||||
ScopedDefinitionId, SymbolState,
|
||||
BindingIdWithConstraintsIterator, ConstraintIdIterator, DeclarationIdIterator,
|
||||
ScopedConstraintId, ScopedDefinitionId, SymbolBindings, SymbolDeclarations, SymbolState,
|
||||
};
|
||||
use crate::semantic_index::ast_ids::ScopedUseId;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::ScopedSymbolId;
|
||||
use ruff_index::IndexVec;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use super::constraint::Constraint;
|
||||
|
||||
mod bitset;
|
||||
mod symbol_state;
|
||||
@@ -159,63 +242,135 @@ pub(crate) struct UseDefMap<'db> {
|
||||
/// Array of [`Definition`] in this scope.
|
||||
all_definitions: IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
|
||||
/// Array of constraints (as [`Expression`]) in this scope.
|
||||
all_constraints: IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
/// Array of [`Constraint`] in this scope.
|
||||
all_constraints: IndexVec<ScopedConstraintId, Constraint<'db>>,
|
||||
|
||||
/// [`SymbolState`] visible at a [`ScopedUseId`].
|
||||
definitions_by_use: IndexVec<ScopedUseId, SymbolState>,
|
||||
/// [`SymbolBindings`] reaching a [`ScopedUseId`].
|
||||
bindings_by_use: IndexVec<ScopedUseId, SymbolBindings>,
|
||||
|
||||
/// [`SymbolBindings`] or [`SymbolDeclarations`] reaching a given [`Definition`].
|
||||
///
|
||||
/// If the definition is a binding (only) -- `x = 1` for example -- then we need
|
||||
/// [`SymbolDeclarations`] to know whether this binding is permitted by the live declarations.
|
||||
///
|
||||
/// If the definition is a declaration (only) -- `x: int` for example -- then we need
|
||||
/// [`SymbolBindings`] to know whether this declaration is consistent with the previously
|
||||
/// inferred type.
|
||||
///
|
||||
/// If the definition is both a declaration and a binding -- `x: int = 1` for example -- then
|
||||
/// we don't actually need anything here, all we'll need to validate is that our own RHS is a
|
||||
/// valid assignment to our own annotation.
|
||||
definitions_by_definition: FxHashMap<Definition<'db>, SymbolDefinitions>,
|
||||
|
||||
/// [`SymbolState`] visible at end of scope for each symbol.
|
||||
public_definitions: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
public_symbols: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
}
|
||||
|
||||
impl<'db> UseDefMap<'db> {
|
||||
pub(crate) fn use_definitions(
|
||||
pub(crate) fn bindings_at_use(
|
||||
&self,
|
||||
use_id: ScopedUseId,
|
||||
) -> DefinitionWithConstraintsIterator<'_, 'db> {
|
||||
DefinitionWithConstraintsIterator {
|
||||
all_definitions: &self.all_definitions,
|
||||
all_constraints: &self.all_constraints,
|
||||
inner: self.definitions_by_use[use_id].visible_definitions(),
|
||||
}
|
||||
) -> BindingWithConstraintsIterator<'_, 'db> {
|
||||
self.bindings_iterator(&self.bindings_by_use[use_id])
|
||||
}
|
||||
|
||||
pub(crate) fn use_may_be_unbound(&self, use_id: ScopedUseId) -> bool {
|
||||
self.definitions_by_use[use_id].may_be_unbound()
|
||||
self.bindings_by_use[use_id].may_be_unbound()
|
||||
}
|
||||
|
||||
pub(crate) fn public_definitions(
|
||||
pub(crate) fn public_bindings(
|
||||
&self,
|
||||
symbol: ScopedSymbolId,
|
||||
) -> DefinitionWithConstraintsIterator<'_, 'db> {
|
||||
DefinitionWithConstraintsIterator {
|
||||
all_definitions: &self.all_definitions,
|
||||
all_constraints: &self.all_constraints,
|
||||
inner: self.public_definitions[symbol].visible_definitions(),
|
||||
}
|
||||
) -> BindingWithConstraintsIterator<'_, 'db> {
|
||||
self.bindings_iterator(self.public_symbols[symbol].bindings())
|
||||
}
|
||||
|
||||
pub(crate) fn public_may_be_unbound(&self, symbol: ScopedSymbolId) -> bool {
|
||||
self.public_definitions[symbol].may_be_unbound()
|
||||
self.public_symbols[symbol].may_be_unbound()
|
||||
}
|
||||
|
||||
pub(crate) fn bindings_at_declaration(
|
||||
&self,
|
||||
declaration: Definition<'db>,
|
||||
) -> BindingWithConstraintsIterator<'_, 'db> {
|
||||
if let SymbolDefinitions::Bindings(bindings) = &self.definitions_by_definition[&declaration]
|
||||
{
|
||||
self.bindings_iterator(bindings)
|
||||
} else {
|
||||
unreachable!("Declaration has non-Bindings in definitions_by_definition");
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn declarations_at_binding(
|
||||
&self,
|
||||
binding: Definition<'db>,
|
||||
) -> DeclarationsIterator<'_, 'db> {
|
||||
if let SymbolDefinitions::Declarations(declarations) =
|
||||
&self.definitions_by_definition[&binding]
|
||||
{
|
||||
self.declarations_iterator(declarations)
|
||||
} else {
|
||||
unreachable!("Binding has non-Declarations in definitions_by_definition");
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn public_declarations(
|
||||
&self,
|
||||
symbol: ScopedSymbolId,
|
||||
) -> DeclarationsIterator<'_, 'db> {
|
||||
let declarations = self.public_symbols[symbol].declarations();
|
||||
self.declarations_iterator(declarations)
|
||||
}
|
||||
|
||||
pub(crate) fn has_public_declarations(&self, symbol: ScopedSymbolId) -> bool {
|
||||
!self.public_symbols[symbol].declarations().is_empty()
|
||||
}
|
||||
|
||||
fn bindings_iterator<'a>(
|
||||
&'a self,
|
||||
bindings: &'a SymbolBindings,
|
||||
) -> BindingWithConstraintsIterator<'a, 'db> {
|
||||
BindingWithConstraintsIterator {
|
||||
all_definitions: &self.all_definitions,
|
||||
all_constraints: &self.all_constraints,
|
||||
inner: bindings.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
fn declarations_iterator<'a>(
|
||||
&'a self,
|
||||
declarations: &'a SymbolDeclarations,
|
||||
) -> DeclarationsIterator<'a, 'db> {
|
||||
DeclarationsIterator {
|
||||
all_definitions: &self.all_definitions,
|
||||
inner: declarations.iter(),
|
||||
may_be_undeclared: declarations.may_be_undeclared(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct DefinitionWithConstraintsIterator<'map, 'db> {
|
||||
all_definitions: &'map IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
all_constraints: &'map IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
inner: DefinitionIdWithConstraintsIterator<'map>,
|
||||
/// Either live bindings or live declarations for a symbol.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
enum SymbolDefinitions {
|
||||
Bindings(SymbolBindings),
|
||||
Declarations(SymbolDeclarations),
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for DefinitionWithConstraintsIterator<'map, 'db> {
|
||||
type Item = DefinitionWithConstraints<'map, 'db>;
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct BindingWithConstraintsIterator<'map, 'db> {
|
||||
all_definitions: &'map IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
all_constraints: &'map IndexVec<ScopedConstraintId, Constraint<'db>>,
|
||||
inner: BindingIdWithConstraintsIterator<'map>,
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for BindingWithConstraintsIterator<'map, 'db> {
|
||||
type Item = BindingWithConstraints<'map, 'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.inner
|
||||
.next()
|
||||
.map(|def_id_with_constraints| DefinitionWithConstraints {
|
||||
definition: self.all_definitions[def_id_with_constraints.definition],
|
||||
.map(|def_id_with_constraints| BindingWithConstraints {
|
||||
binding: self.all_definitions[def_id_with_constraints.definition],
|
||||
constraints: ConstraintsIterator {
|
||||
all_constraints: self.all_constraints,
|
||||
constraint_ids: def_id_with_constraints.constraint_ids,
|
||||
@@ -224,20 +379,20 @@ impl<'map, 'db> Iterator for DefinitionWithConstraintsIterator<'map, 'db> {
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for DefinitionWithConstraintsIterator<'_, '_> {}
|
||||
impl std::iter::FusedIterator for BindingWithConstraintsIterator<'_, '_> {}
|
||||
|
||||
pub(crate) struct DefinitionWithConstraints<'map, 'db> {
|
||||
pub(crate) definition: Definition<'db>,
|
||||
pub(crate) struct BindingWithConstraints<'map, 'db> {
|
||||
pub(crate) binding: Definition<'db>,
|
||||
pub(crate) constraints: ConstraintsIterator<'map, 'db>,
|
||||
}
|
||||
|
||||
pub(crate) struct ConstraintsIterator<'map, 'db> {
|
||||
all_constraints: &'map IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
all_constraints: &'map IndexVec<ScopedConstraintId, Constraint<'db>>,
|
||||
constraint_ids: ConstraintIdIterator<'map>,
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for ConstraintsIterator<'map, 'db> {
|
||||
type Item = Expression<'db>;
|
||||
type Item = Constraint<'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.constraint_ids
|
||||
@@ -248,25 +403,50 @@ impl<'map, 'db> Iterator for ConstraintsIterator<'map, 'db> {
|
||||
|
||||
impl std::iter::FusedIterator for ConstraintsIterator<'_, '_> {}
|
||||
|
||||
pub(crate) struct DeclarationsIterator<'map, 'db> {
|
||||
all_definitions: &'map IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
inner: DeclarationIdIterator<'map>,
|
||||
may_be_undeclared: bool,
|
||||
}
|
||||
|
||||
impl DeclarationsIterator<'_, '_> {
|
||||
pub(crate) fn may_be_undeclared(&self) -> bool {
|
||||
self.may_be_undeclared
|
||||
}
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for DeclarationsIterator<'map, 'db> {
|
||||
type Item = Definition<'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.inner.next().map(|def_id| self.all_definitions[def_id])
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for DeclarationsIterator<'_, '_> {}
|
||||
|
||||
/// A snapshot of the definitions and constraints state at a particular point in control flow.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(super) struct FlowSnapshot {
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
symbol_states: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct UseDefMapBuilder<'db> {
|
||||
/// Append-only array of [`Definition`]; None is unbound.
|
||||
/// Append-only array of [`Definition`].
|
||||
all_definitions: IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
|
||||
/// Append-only array of constraints (as [`Expression`]).
|
||||
all_constraints: IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
/// Append-only array of [`Constraint`].
|
||||
all_constraints: IndexVec<ScopedConstraintId, Constraint<'db>>,
|
||||
|
||||
/// Visible definitions at each so-far-recorded use.
|
||||
definitions_by_use: IndexVec<ScopedUseId, SymbolState>,
|
||||
/// Live bindings at each so-far-recorded use.
|
||||
bindings_by_use: IndexVec<ScopedUseId, SymbolBindings>,
|
||||
|
||||
/// Currently visible definitions for each symbol.
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
/// Live bindings or declarations for each so-far-recorded definition.
|
||||
definitions_by_definition: FxHashMap<Definition<'db>, SymbolDefinitions>,
|
||||
|
||||
/// Currently live bindings and declarations for each symbol.
|
||||
symbol_states: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
}
|
||||
|
||||
impl<'db> UseDefMapBuilder<'db> {
|
||||
@@ -275,86 +455,104 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
}
|
||||
|
||||
pub(super) fn add_symbol(&mut self, symbol: ScopedSymbolId) {
|
||||
let new_symbol = self.definitions_by_symbol.push(SymbolState::unbound());
|
||||
let new_symbol = self.symbol_states.push(SymbolState::undefined());
|
||||
debug_assert_eq!(symbol, new_symbol);
|
||||
}
|
||||
|
||||
pub(super) fn record_definition(
|
||||
pub(super) fn record_binding(&mut self, symbol: ScopedSymbolId, binding: Definition<'db>) {
|
||||
let def_id = self.all_definitions.push(binding);
|
||||
let symbol_state = &mut self.symbol_states[symbol];
|
||||
self.definitions_by_definition.insert(
|
||||
binding,
|
||||
SymbolDefinitions::Declarations(symbol_state.declarations().clone()),
|
||||
);
|
||||
symbol_state.record_binding(def_id);
|
||||
}
|
||||
|
||||
pub(super) fn record_constraint(&mut self, constraint: Constraint<'db>) {
|
||||
let constraint_id = self.all_constraints.push(constraint);
|
||||
for state in &mut self.symbol_states {
|
||||
state.record_constraint(constraint_id);
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn record_declaration(
|
||||
&mut self,
|
||||
symbol: ScopedSymbolId,
|
||||
declaration: Definition<'db>,
|
||||
) {
|
||||
let def_id = self.all_definitions.push(declaration);
|
||||
let symbol_state = &mut self.symbol_states[symbol];
|
||||
self.definitions_by_definition.insert(
|
||||
declaration,
|
||||
SymbolDefinitions::Bindings(symbol_state.bindings().clone()),
|
||||
);
|
||||
symbol_state.record_declaration(def_id);
|
||||
}
|
||||
|
||||
pub(super) fn record_declaration_and_binding(
|
||||
&mut self,
|
||||
symbol: ScopedSymbolId,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
// We have a new definition of a symbol; this replaces any previous definitions in this
|
||||
// path.
|
||||
// We don't need to store anything in self.definitions_by_definition.
|
||||
let def_id = self.all_definitions.push(definition);
|
||||
self.definitions_by_symbol[symbol] = SymbolState::with(def_id);
|
||||
}
|
||||
|
||||
pub(super) fn record_constraint(&mut self, constraint: Expression<'db>) {
|
||||
let constraint_id = self.all_constraints.push(constraint);
|
||||
for definitions in &mut self.definitions_by_symbol {
|
||||
definitions.add_constraint(constraint_id);
|
||||
}
|
||||
let symbol_state = &mut self.symbol_states[symbol];
|
||||
symbol_state.record_declaration(def_id);
|
||||
symbol_state.record_binding(def_id);
|
||||
}
|
||||
|
||||
pub(super) fn record_use(&mut self, symbol: ScopedSymbolId, use_id: ScopedUseId) {
|
||||
// We have a use of a symbol; clone the currently visible definitions for that symbol, and
|
||||
// record them as the visible definitions for this use.
|
||||
// We have a use of a symbol; clone the current bindings for that symbol, and record them
|
||||
// as the live bindings for this use.
|
||||
let new_use = self
|
||||
.definitions_by_use
|
||||
.push(self.definitions_by_symbol[symbol].clone());
|
||||
.bindings_by_use
|
||||
.push(self.symbol_states[symbol].bindings().clone());
|
||||
debug_assert_eq!(use_id, new_use);
|
||||
}
|
||||
|
||||
/// Take a snapshot of the current visible-symbols state.
|
||||
pub(super) fn snapshot(&self) -> FlowSnapshot {
|
||||
FlowSnapshot {
|
||||
definitions_by_symbol: self.definitions_by_symbol.clone(),
|
||||
symbol_states: self.symbol_states.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Restore the current builder visible-definitions state to the given snapshot.
|
||||
/// Restore the current builder symbols state to the given snapshot.
|
||||
pub(super) fn restore(&mut self, snapshot: FlowSnapshot) {
|
||||
// We never remove symbols from `definitions_by_symbol` (it's an IndexVec, and the symbol
|
||||
// We never remove symbols from `symbol_states` (it's an IndexVec, and the symbol
|
||||
// IDs must line up), so the current number of known symbols must always be equal to or
|
||||
// greater than the number of known symbols in a previously-taken snapshot.
|
||||
let num_symbols = self.definitions_by_symbol.len();
|
||||
debug_assert!(num_symbols >= snapshot.definitions_by_symbol.len());
|
||||
let num_symbols = self.symbol_states.len();
|
||||
debug_assert!(num_symbols >= snapshot.symbol_states.len());
|
||||
|
||||
// Restore the current visible-definitions state to the given snapshot.
|
||||
self.definitions_by_symbol = snapshot.definitions_by_symbol;
|
||||
self.symbol_states = snapshot.symbol_states;
|
||||
|
||||
// If the snapshot we are restoring is missing some symbols we've recorded since, we need
|
||||
// to fill them in so the symbol IDs continue to line up. Since they don't exist in the
|
||||
// snapshot, the correct state to fill them in with is "unbound".
|
||||
self.definitions_by_symbol
|
||||
.resize(num_symbols, SymbolState::unbound());
|
||||
// snapshot, the correct state to fill them in with is "undefined".
|
||||
self.symbol_states
|
||||
.resize(num_symbols, SymbolState::undefined());
|
||||
}
|
||||
|
||||
/// Merge the given snapshot into the current state, reflecting that we might have taken either
|
||||
/// path to get here. The new visible-definitions state for each symbol should include
|
||||
/// definitions from both the prior state and the snapshot.
|
||||
/// path to get here. The new state for each symbol should include definitions from both the
|
||||
/// prior state and the snapshot.
|
||||
pub(super) fn merge(&mut self, snapshot: FlowSnapshot) {
|
||||
// The tricky thing about merging two Ranges pointing into `all_definitions` is that if the
|
||||
// two Ranges aren't already adjacent in `all_definitions`, we will have to copy at least
|
||||
// one or the other of the ranges to the end of `all_definitions` so as to make them
|
||||
// adjacent. We can't ever move things around in `all_definitions` because previously
|
||||
// recorded uses may still have ranges pointing to any part of it; all we can do is append.
|
||||
// It's possible we may end up with some old entries in `all_definitions` that nobody is
|
||||
// pointing to, but that's OK.
|
||||
|
||||
// We never remove symbols from `definitions_by_symbol` (it's an IndexVec, and the symbol
|
||||
// We never remove symbols from `symbol_states` (it's an IndexVec, and the symbol
|
||||
// IDs must line up), so the current number of known symbols must always be equal to or
|
||||
// greater than the number of known symbols in a previously-taken snapshot.
|
||||
debug_assert!(self.definitions_by_symbol.len() >= snapshot.definitions_by_symbol.len());
|
||||
debug_assert!(self.symbol_states.len() >= snapshot.symbol_states.len());
|
||||
|
||||
let mut snapshot_definitions_iter = snapshot.definitions_by_symbol.into_iter();
|
||||
for current in &mut self.definitions_by_symbol {
|
||||
let mut snapshot_definitions_iter = snapshot.symbol_states.into_iter();
|
||||
for current in &mut self.symbol_states {
|
||||
if let Some(snapshot) = snapshot_definitions_iter.next() {
|
||||
current.merge(snapshot);
|
||||
} else {
|
||||
// Symbol not present in snapshot, so it's unbound from that path.
|
||||
current.add_unbound();
|
||||
// Symbol not present in snapshot, so it's unbound/undeclared from that path.
|
||||
current.set_may_be_unbound();
|
||||
current.set_may_be_undeclared();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -362,14 +560,16 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
pub(super) fn finish(mut self) -> UseDefMap<'db> {
|
||||
self.all_definitions.shrink_to_fit();
|
||||
self.all_constraints.shrink_to_fit();
|
||||
self.definitions_by_symbol.shrink_to_fit();
|
||||
self.definitions_by_use.shrink_to_fit();
|
||||
self.symbol_states.shrink_to_fit();
|
||||
self.bindings_by_use.shrink_to_fit();
|
||||
self.definitions_by_definition.shrink_to_fit();
|
||||
|
||||
UseDefMap {
|
||||
all_definitions: self.all_definitions,
|
||||
all_constraints: self.all_constraints,
|
||||
definitions_by_use: self.definitions_by_use,
|
||||
public_definitions: self.definitions_by_symbol,
|
||||
bindings_by_use: self.bindings_by_use,
|
||||
public_symbols: self.symbol_states,
|
||||
definitions_by_definition: self.definitions_by_definition,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,17 +32,25 @@ impl<const B: usize> BitSet<B> {
|
||||
bitset
|
||||
}
|
||||
|
||||
pub(super) fn is_empty(&self) -> bool {
|
||||
self.blocks().iter().all(|&b| b == 0)
|
||||
}
|
||||
|
||||
/// Convert from Inline to Heap, if needed, and resize the Heap vector, if needed.
|
||||
fn resize(&mut self, value: u32) {
|
||||
let num_blocks_needed = (value / 64) + 1;
|
||||
self.resize_blocks(num_blocks_needed as usize);
|
||||
}
|
||||
|
||||
fn resize_blocks(&mut self, num_blocks_needed: usize) {
|
||||
match self {
|
||||
Self::Inline(blocks) => {
|
||||
let mut vec = blocks.to_vec();
|
||||
vec.resize(num_blocks_needed as usize, 0);
|
||||
vec.resize(num_blocks_needed, 0);
|
||||
*self = Self::Heap(vec);
|
||||
}
|
||||
Self::Heap(vec) => {
|
||||
vec.resize(num_blocks_needed as usize, 0);
|
||||
vec.resize(num_blocks_needed, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -89,6 +97,19 @@ impl<const B: usize> BitSet<B> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Union in-place with another [`BitSet`].
|
||||
pub(super) fn union(&mut self, other: &BitSet<B>) {
|
||||
let mut max_len = self.blocks().len();
|
||||
let other_len = other.blocks().len();
|
||||
if other_len > max_len {
|
||||
max_len = other_len;
|
||||
self.resize_blocks(max_len);
|
||||
}
|
||||
for (my_block, other_block) in self.blocks_mut().iter_mut().zip(other.blocks()) {
|
||||
*my_block |= other_block;
|
||||
}
|
||||
}
|
||||
|
||||
/// Return an iterator over the values (in ascending order) in this [`BitSet`].
|
||||
pub(super) fn iter(&self) -> BitSetIterator<'_, B> {
|
||||
let blocks = self.blocks();
|
||||
@@ -218,6 +239,59 @@ mod tests {
|
||||
assert_bitset(&b1, &[89]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn union() {
|
||||
let mut b1 = BitSet::<1>::with(2);
|
||||
let b2 = BitSet::<1>::with(4);
|
||||
|
||||
b1.union(&b2);
|
||||
assert_bitset(&b1, &[2, 4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn union_mixed_1() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(89);
|
||||
b2.insert(5);
|
||||
|
||||
b1.union(&b2);
|
||||
assert_bitset(&b1, &[4, 5, 89]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn union_mixed_2() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(23);
|
||||
b2.insert(89);
|
||||
|
||||
b1.union(&b2);
|
||||
assert_bitset(&b1, &[4, 23, 89]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn union_heap() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(89);
|
||||
b2.insert(90);
|
||||
|
||||
b1.union(&b2);
|
||||
assert_bitset(&b1, &[4, 89, 90]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn union_heap_2() {
|
||||
let mut b1 = BitSet::<1>::with(89);
|
||||
let mut b2 = BitSet::<1>::with(89);
|
||||
b1.insert(91);
|
||||
b2.insert(90);
|
||||
|
||||
b1.union(&b2);
|
||||
assert_bitset(&b1, &[89, 90, 91]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_blocks() {
|
||||
let mut b = BitSet::<2>::with(120);
|
||||
@@ -225,4 +299,11 @@ mod tests {
|
||||
assert!(matches!(b, BitSet::Inline(_)));
|
||||
assert_bitset(&b, &[45, 120]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty() {
|
||||
let b = BitSet::<1>::default();
|
||||
|
||||
assert!(b.is_empty());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
//! Track visible definitions of a symbol, and applicable constraints per definition.
|
||||
//! Track live bindings per symbol, applicable constraints per binding, and live declarations.
|
||||
//!
|
||||
//! These data structures operate entirely on scope-local newtype-indices for definitions and
|
||||
//! constraints, referring to their location in the `all_definitions` and `all_constraints`
|
||||
//! indexvecs in [`super::UseDefMapBuilder`].
|
||||
//!
|
||||
//! We need to track arbitrary associations between definitions and constraints, not just a single
|
||||
//! set of currently dominating constraints (where "dominating" means "control flow must have
|
||||
//! passed through it to reach this point"), because we can have dominating constraints that apply
|
||||
//! to some definitions but not others, as in this code:
|
||||
//! We need to track arbitrary associations between bindings and constraints, not just a single set
|
||||
//! of currently dominating constraints (where "dominating" means "control flow must have passed
|
||||
//! through it to reach this point"), because we can have dominating constraints that apply to some
|
||||
//! bindings but not others, as in this code:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 1 if flag else None
|
||||
@@ -18,11 +18,11 @@
|
||||
//! ```
|
||||
//!
|
||||
//! The `x is not None` constraint dominates the final use of `x`, but it applies only to the first
|
||||
//! definition of `x`, not the second, so `None` is a possible value for `x`.
|
||||
//! binding of `x`, not the second, so `None` is a possible value for `x`.
|
||||
//!
|
||||
//! And we can't just track, for each definition, an index into a list of dominating constraints,
|
||||
//! either, because we can have definitions which are still visible, but subject to constraints
|
||||
//! that are no longer dominating, as in this code:
|
||||
//! And we can't just track, for each binding, an index into a list of dominating constraints,
|
||||
//! either, because we can have bindings which are still visible, but subject to constraints that
|
||||
//! are no longer dominating, as in this code:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 0
|
||||
@@ -33,13 +33,16 @@
|
||||
//! ```
|
||||
//!
|
||||
//! From the point of view of the final use of `x`, the `x is not None` constraint no longer
|
||||
//! dominates, but it does dominate the `x = 1 if flag2 else None` definition, so we have to keep
|
||||
//! dominates, but it does dominate the `x = 1 if flag2 else None` binding, so we have to keep
|
||||
//! track of that.
|
||||
//!
|
||||
//! The data structures used here ([`BitSet`] and [`smallvec::SmallVec`]) optimize for keeping all
|
||||
//! data inline (avoiding lots of scattered allocations) in small-to-medium cases, and falling back
|
||||
//! to heap allocation to be able to scale to arbitrary numbers of definitions and constraints when
|
||||
//! needed.
|
||||
//! to heap allocation to be able to scale to arbitrary numbers of live bindings and constraints
|
||||
//! when needed.
|
||||
//!
|
||||
//! Tracking live declarations is simpler, since constraints are not involved, but otherwise very
|
||||
//! similar to tracking live bindings.
|
||||
use super::bitset::{BitSet, BitSetIterator};
|
||||
use ruff_index::newtype_index;
|
||||
use smallvec::SmallVec;
|
||||
@@ -53,93 +56,200 @@ pub(super) struct ScopedDefinitionId;
|
||||
pub(super) struct ScopedConstraintId;
|
||||
|
||||
/// Can reference this * 64 total definitions inline; more will fall back to the heap.
|
||||
const INLINE_DEFINITION_BLOCKS: usize = 3;
|
||||
const INLINE_BINDING_BLOCKS: usize = 3;
|
||||
|
||||
/// A [`BitSet`] of [`ScopedDefinitionId`], representing visible definitions of a symbol in a scope.
|
||||
type Definitions = BitSet<INLINE_DEFINITION_BLOCKS>;
|
||||
type DefinitionsIterator<'a> = BitSetIterator<'a, INLINE_DEFINITION_BLOCKS>;
|
||||
/// A [`BitSet`] of [`ScopedDefinitionId`], representing live bindings of a symbol in a scope.
|
||||
type Bindings = BitSet<INLINE_BINDING_BLOCKS>;
|
||||
type BindingsIterator<'a> = BitSetIterator<'a, INLINE_BINDING_BLOCKS>;
|
||||
|
||||
/// Can reference this * 64 total declarations inline; more will fall back to the heap.
|
||||
const INLINE_DECLARATION_BLOCKS: usize = 3;
|
||||
|
||||
/// A [`BitSet`] of [`ScopedDefinitionId`], representing live declarations of a symbol in a scope.
|
||||
type Declarations = BitSet<INLINE_DECLARATION_BLOCKS>;
|
||||
type DeclarationsIterator<'a> = BitSetIterator<'a, INLINE_DECLARATION_BLOCKS>;
|
||||
|
||||
/// Can reference this * 64 total constraints inline; more will fall back to the heap.
|
||||
const INLINE_CONSTRAINT_BLOCKS: usize = 2;
|
||||
|
||||
/// Can keep inline this many visible definitions per symbol at a given time; more will go to heap.
|
||||
const INLINE_VISIBLE_DEFINITIONS_PER_SYMBOL: usize = 4;
|
||||
/// Can keep inline this many live bindings per symbol at a given time; more will go to heap.
|
||||
const INLINE_BINDINGS_PER_SYMBOL: usize = 4;
|
||||
|
||||
/// One [`BitSet`] of applicable [`ScopedConstraintId`] per visible definition.
|
||||
type InlineConstraintArray =
|
||||
[BitSet<INLINE_CONSTRAINT_BLOCKS>; INLINE_VISIBLE_DEFINITIONS_PER_SYMBOL];
|
||||
/// One [`BitSet`] of applicable [`ScopedConstraintId`] per live binding.
|
||||
type InlineConstraintArray = [BitSet<INLINE_CONSTRAINT_BLOCKS>; INLINE_BINDINGS_PER_SYMBOL];
|
||||
type Constraints = SmallVec<InlineConstraintArray>;
|
||||
type ConstraintsIterator<'a> = std::slice::Iter<'a, BitSet<INLINE_CONSTRAINT_BLOCKS>>;
|
||||
type ConstraintsIntoIterator = smallvec::IntoIter<InlineConstraintArray>;
|
||||
|
||||
/// Visible definitions and narrowing constraints for a single symbol at some point in control flow.
|
||||
/// Live declarations for a single symbol at some point in control flow.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(super) struct SymbolState {
|
||||
/// [`BitSet`]: which [`ScopedDefinitionId`] are visible for this symbol?
|
||||
visible_definitions: Definitions,
|
||||
pub(super) struct SymbolDeclarations {
|
||||
/// [`BitSet`]: which declarations (as [`ScopedDefinitionId`]) can reach the current location?
|
||||
live_declarations: Declarations,
|
||||
|
||||
/// For each definition, which [`ScopedConstraintId`] apply?
|
||||
/// Could the symbol be un-declared at this point?
|
||||
may_be_undeclared: bool,
|
||||
}
|
||||
|
||||
impl SymbolDeclarations {
|
||||
fn undeclared() -> Self {
|
||||
Self {
|
||||
live_declarations: Declarations::default(),
|
||||
may_be_undeclared: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Record a newly-encountered declaration for this symbol.
|
||||
fn record_declaration(&mut self, declaration_id: ScopedDefinitionId) {
|
||||
self.live_declarations = Declarations::with(declaration_id.into());
|
||||
self.may_be_undeclared = false;
|
||||
}
|
||||
|
||||
/// Add undeclared as a possibility for this symbol.
|
||||
fn set_may_be_undeclared(&mut self) {
|
||||
self.may_be_undeclared = true;
|
||||
}
|
||||
|
||||
/// Return an iterator over live declarations for this symbol.
|
||||
pub(super) fn iter(&self) -> DeclarationIdIterator {
|
||||
DeclarationIdIterator {
|
||||
inner: self.live_declarations.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn is_empty(&self) -> bool {
|
||||
self.live_declarations.is_empty()
|
||||
}
|
||||
|
||||
pub(super) fn may_be_undeclared(&self) -> bool {
|
||||
self.may_be_undeclared
|
||||
}
|
||||
}
|
||||
|
||||
/// Live bindings and narrowing constraints for a single symbol at some point in control flow.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(super) struct SymbolBindings {
|
||||
/// [`BitSet`]: which bindings (as [`ScopedDefinitionId`]) can reach the current location?
|
||||
live_bindings: Bindings,
|
||||
|
||||
/// For each live binding, which [`ScopedConstraintId`] apply?
|
||||
///
|
||||
/// This is a [`smallvec::SmallVec`] which should always have one [`BitSet`] of constraints per
|
||||
/// definition in `visible_definitions`.
|
||||
/// binding in `live_bindings`.
|
||||
constraints: Constraints,
|
||||
|
||||
/// Could the symbol be unbound at this point?
|
||||
may_be_unbound: bool,
|
||||
}
|
||||
|
||||
/// A single [`ScopedDefinitionId`] with an iterator of its applicable [`ScopedConstraintId`].
|
||||
#[derive(Debug)]
|
||||
pub(super) struct DefinitionIdWithConstraints<'a> {
|
||||
pub(super) definition: ScopedDefinitionId,
|
||||
pub(super) constraint_ids: ConstraintIdIterator<'a>,
|
||||
}
|
||||
|
||||
impl SymbolState {
|
||||
/// Return a new [`SymbolState`] representing an unbound symbol.
|
||||
pub(super) fn unbound() -> Self {
|
||||
impl SymbolBindings {
|
||||
fn unbound() -> Self {
|
||||
Self {
|
||||
visible_definitions: Definitions::default(),
|
||||
live_bindings: Bindings::default(),
|
||||
constraints: Constraints::default(),
|
||||
may_be_unbound: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a new [`SymbolState`] representing a symbol with a single visible definition.
|
||||
pub(super) fn with(definition_id: ScopedDefinitionId) -> Self {
|
||||
let mut constraints = Constraints::with_capacity(1);
|
||||
constraints.push(BitSet::default());
|
||||
Self {
|
||||
visible_definitions: Definitions::with(definition_id.into()),
|
||||
constraints,
|
||||
may_be_unbound: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Add Unbound as a possibility for this symbol.
|
||||
pub(super) fn add_unbound(&mut self) {
|
||||
fn set_may_be_unbound(&mut self) {
|
||||
self.may_be_unbound = true;
|
||||
}
|
||||
|
||||
/// Add given constraint to all currently-visible definitions.
|
||||
pub(super) fn add_constraint(&mut self, constraint_id: ScopedConstraintId) {
|
||||
/// Record a newly-encountered binding for this symbol.
|
||||
pub(super) fn record_binding(&mut self, binding_id: ScopedDefinitionId) {
|
||||
// The new binding replaces all previous live bindings in this path, and has no
|
||||
// constraints.
|
||||
self.live_bindings = Bindings::with(binding_id.into());
|
||||
self.constraints = Constraints::with_capacity(1);
|
||||
self.constraints.push(BitSet::default());
|
||||
self.may_be_unbound = false;
|
||||
}
|
||||
|
||||
/// Add given constraint to all live bindings.
|
||||
pub(super) fn record_constraint(&mut self, constraint_id: ScopedConstraintId) {
|
||||
for bitset in &mut self.constraints {
|
||||
bitset.insert(constraint_id.into());
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterate over currently live bindings for this symbol.
|
||||
pub(super) fn iter(&self) -> BindingIdWithConstraintsIterator {
|
||||
BindingIdWithConstraintsIterator {
|
||||
definitions: self.live_bindings.iter(),
|
||||
constraints: self.constraints.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn may_be_unbound(&self) -> bool {
|
||||
self.may_be_unbound
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(super) struct SymbolState {
|
||||
declarations: SymbolDeclarations,
|
||||
bindings: SymbolBindings,
|
||||
}
|
||||
|
||||
impl SymbolState {
|
||||
/// Return a new [`SymbolState`] representing an unbound, undeclared symbol.
|
||||
pub(super) fn undefined() -> Self {
|
||||
Self {
|
||||
declarations: SymbolDeclarations::undeclared(),
|
||||
bindings: SymbolBindings::unbound(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Add Unbound as a possibility for this symbol.
|
||||
pub(super) fn set_may_be_unbound(&mut self) {
|
||||
self.bindings.set_may_be_unbound();
|
||||
}
|
||||
|
||||
/// Record a newly-encountered binding for this symbol.
|
||||
pub(super) fn record_binding(&mut self, binding_id: ScopedDefinitionId) {
|
||||
self.bindings.record_binding(binding_id);
|
||||
}
|
||||
|
||||
/// Add given constraint to all live bindings.
|
||||
pub(super) fn record_constraint(&mut self, constraint_id: ScopedConstraintId) {
|
||||
self.bindings.record_constraint(constraint_id);
|
||||
}
|
||||
|
||||
/// Add undeclared as a possibility for this symbol.
|
||||
pub(super) fn set_may_be_undeclared(&mut self) {
|
||||
self.declarations.set_may_be_undeclared();
|
||||
}
|
||||
|
||||
/// Record a newly-encountered declaration of this symbol.
|
||||
pub(super) fn record_declaration(&mut self, declaration_id: ScopedDefinitionId) {
|
||||
self.declarations.record_declaration(declaration_id);
|
||||
}
|
||||
|
||||
/// Merge another [`SymbolState`] into this one.
|
||||
pub(super) fn merge(&mut self, b: SymbolState) {
|
||||
let mut a = Self {
|
||||
visible_definitions: Definitions::default(),
|
||||
constraints: Constraints::default(),
|
||||
may_be_unbound: self.may_be_unbound || b.may_be_unbound,
|
||||
bindings: SymbolBindings {
|
||||
live_bindings: Bindings::default(),
|
||||
constraints: Constraints::default(),
|
||||
may_be_unbound: self.bindings.may_be_unbound || b.bindings.may_be_unbound,
|
||||
},
|
||||
declarations: SymbolDeclarations {
|
||||
live_declarations: self.declarations.live_declarations.clone(),
|
||||
may_be_undeclared: self.declarations.may_be_undeclared
|
||||
|| b.declarations.may_be_undeclared,
|
||||
},
|
||||
};
|
||||
|
||||
std::mem::swap(&mut a, self);
|
||||
let mut a_defs_iter = a.visible_definitions.iter();
|
||||
let mut b_defs_iter = b.visible_definitions.iter();
|
||||
let mut a_constraints_iter = a.constraints.into_iter();
|
||||
let mut b_constraints_iter = b.constraints.into_iter();
|
||||
self.declarations
|
||||
.live_declarations
|
||||
.union(&b.declarations.live_declarations);
|
||||
|
||||
let mut a_defs_iter = a.bindings.live_bindings.iter();
|
||||
let mut b_defs_iter = b.bindings.live_bindings.iter();
|
||||
let mut a_constraints_iter = a.bindings.constraints.into_iter();
|
||||
let mut b_constraints_iter = b.bindings.constraints.into_iter();
|
||||
|
||||
let mut opt_a_def: Option<u32> = a_defs_iter.next();
|
||||
let mut opt_b_def: Option<u32> = b_defs_iter.next();
|
||||
@@ -152,7 +262,7 @@ impl SymbolState {
|
||||
|
||||
// Helper to push `def`, with constraints in `constraints_iter`, onto `self`.
|
||||
let push = |def, constraints_iter: &mut ConstraintsIntoIterator, merged: &mut Self| {
|
||||
merged.visible_definitions.insert(def);
|
||||
merged.bindings.live_bindings.insert(def);
|
||||
// SAFETY: we only ever create SymbolState with either no definitions and no constraint
|
||||
// bitsets (`::unbound`) or one definition and one constraint bitset (`::with`), and
|
||||
// `::merge` always pushes one definition and one constraint bitset together (just
|
||||
@@ -161,7 +271,7 @@ impl SymbolState {
|
||||
let constraints = constraints_iter
|
||||
.next()
|
||||
.expect("definitions and constraints length mismatch");
|
||||
merged.constraints.push(constraints);
|
||||
merged.bindings.constraints.push(constraints);
|
||||
};
|
||||
|
||||
loop {
|
||||
@@ -191,7 +301,8 @@ impl SymbolState {
|
||||
// If the same definition is visible through both paths, any constraint
|
||||
// that applies on only one path is irrelevant to the resulting type from
|
||||
// unioning the two paths, so we intersect the constraints.
|
||||
self.constraints
|
||||
self.bindings
|
||||
.constraints
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
.intersect(&a_constraints);
|
||||
@@ -214,40 +325,49 @@ impl SymbolState {
|
||||
}
|
||||
}
|
||||
|
||||
/// Get iterator over visible definitions with constraints.
|
||||
pub(super) fn visible_definitions(&self) -> DefinitionIdWithConstraintsIterator {
|
||||
DefinitionIdWithConstraintsIterator {
|
||||
definitions: self.visible_definitions.iter(),
|
||||
constraints: self.constraints.iter(),
|
||||
}
|
||||
pub(super) fn bindings(&self) -> &SymbolBindings {
|
||||
&self.bindings
|
||||
}
|
||||
|
||||
pub(super) fn declarations(&self) -> &SymbolDeclarations {
|
||||
&self.declarations
|
||||
}
|
||||
|
||||
/// Could the symbol be unbound?
|
||||
pub(super) fn may_be_unbound(&self) -> bool {
|
||||
self.may_be_unbound
|
||||
self.bindings.may_be_unbound()
|
||||
}
|
||||
}
|
||||
|
||||
/// The default state of a symbol (if we've seen no definitions of it) is unbound.
|
||||
/// The default state of a symbol, if we've seen no definitions of it, is undefined (that is,
|
||||
/// both unbound and undeclared).
|
||||
impl Default for SymbolState {
|
||||
fn default() -> Self {
|
||||
SymbolState::unbound()
|
||||
SymbolState::undefined()
|
||||
}
|
||||
}
|
||||
|
||||
/// A single binding (as [`ScopedDefinitionId`]) with an iterator of its applicable
|
||||
/// [`ScopedConstraintId`].
|
||||
#[derive(Debug)]
|
||||
pub(super) struct BindingIdWithConstraints<'a> {
|
||||
pub(super) definition: ScopedDefinitionId,
|
||||
pub(super) constraint_ids: ConstraintIdIterator<'a>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct DefinitionIdWithConstraintsIterator<'a> {
|
||||
definitions: DefinitionsIterator<'a>,
|
||||
pub(super) struct BindingIdWithConstraintsIterator<'a> {
|
||||
definitions: BindingsIterator<'a>,
|
||||
constraints: ConstraintsIterator<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for DefinitionIdWithConstraintsIterator<'a> {
|
||||
type Item = DefinitionIdWithConstraints<'a>;
|
||||
impl<'a> Iterator for BindingIdWithConstraintsIterator<'a> {
|
||||
type Item = BindingIdWithConstraints<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match (self.definitions.next(), self.constraints.next()) {
|
||||
(None, None) => None,
|
||||
(Some(def), Some(constraints)) => Some(DefinitionIdWithConstraints {
|
||||
(Some(def), Some(constraints)) => Some(BindingIdWithConstraints {
|
||||
definition: ScopedDefinitionId::from_u32(def),
|
||||
constraint_ids: ConstraintIdIterator {
|
||||
wrapped: constraints.iter(),
|
||||
@@ -259,7 +379,7 @@ impl<'a> Iterator for DefinitionIdWithConstraintsIterator<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for DefinitionIdWithConstraintsIterator<'_> {}
|
||||
impl std::iter::FusedIterator for BindingIdWithConstraintsIterator<'_> {}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct ConstraintIdIterator<'a> {
|
||||
@@ -276,99 +396,193 @@ impl Iterator for ConstraintIdIterator<'_> {
|
||||
|
||||
impl std::iter::FusedIterator for ConstraintIdIterator<'_> {}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct DeclarationIdIterator<'a> {
|
||||
inner: DeclarationsIterator<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for DeclarationIdIterator<'a> {
|
||||
type Item = ScopedDefinitionId;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.inner.next().map(ScopedDefinitionId::from_u32)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for DeclarationIdIterator<'_> {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{ScopedConstraintId, ScopedDefinitionId, SymbolState};
|
||||
|
||||
impl SymbolState {
|
||||
pub(crate) fn assert(&self, may_be_unbound: bool, expected: &[&str]) {
|
||||
assert_eq!(self.may_be_unbound(), may_be_unbound);
|
||||
let actual = self
|
||||
.visible_definitions()
|
||||
.map(|def_id_with_constraints| {
|
||||
format!(
|
||||
"{}<{}>",
|
||||
def_id_with_constraints.definition.as_u32(),
|
||||
def_id_with_constraints
|
||||
.constraint_ids
|
||||
.map(ScopedConstraintId::as_u32)
|
||||
.map(|idx| idx.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
fn assert_bindings(symbol: &SymbolState, may_be_unbound: bool, expected: &[&str]) {
|
||||
assert_eq!(symbol.may_be_unbound(), may_be_unbound);
|
||||
let actual = symbol
|
||||
.bindings()
|
||||
.iter()
|
||||
.map(|def_id_with_constraints| {
|
||||
format!(
|
||||
"{}<{}>",
|
||||
def_id_with_constraints.definition.as_u32(),
|
||||
def_id_with_constraints
|
||||
.constraint_ids
|
||||
.map(ScopedConstraintId::as_u32)
|
||||
.map(|idx| idx.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
pub(crate) fn assert_declarations(
|
||||
symbol: &SymbolState,
|
||||
may_be_undeclared: bool,
|
||||
expected: &[u32],
|
||||
) {
|
||||
assert_eq!(symbol.declarations.may_be_undeclared(), may_be_undeclared);
|
||||
let actual = symbol
|
||||
.declarations()
|
||||
.iter()
|
||||
.map(ScopedDefinitionId::as_u32)
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unbound() {
|
||||
let cd = SymbolState::unbound();
|
||||
let sym = SymbolState::undefined();
|
||||
|
||||
cd.assert(true, &[]);
|
||||
assert_bindings(&sym, true, &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn with() {
|
||||
let cd = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
|
||||
cd.assert(false, &["0<>"]);
|
||||
assert_bindings(&sym, false, &["0<>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_unbound() {
|
||||
let mut cd = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd.add_unbound();
|
||||
fn set_may_be_unbound() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
sym.set_may_be_unbound();
|
||||
|
||||
cd.assert(true, &["0<>"]);
|
||||
assert_bindings(&sym, true, &["0<>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_constraint() {
|
||||
let mut cd = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd.add_constraint(ScopedConstraintId::from_u32(0));
|
||||
fn record_constraint() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
sym.record_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
cd.assert(false, &["0<0>"]);
|
||||
assert_bindings(&sym, false, &["0<0>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn merge() {
|
||||
// merging the same definition with the same constraint keeps the constraint
|
||||
let mut cd0a = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd0a.add_constraint(ScopedConstraintId::from_u32(0));
|
||||
let mut sym0a = SymbolState::undefined();
|
||||
sym0a.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
sym0a.record_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
let mut cd0b = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd0b.add_constraint(ScopedConstraintId::from_u32(0));
|
||||
let mut sym0b = SymbolState::undefined();
|
||||
sym0b.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
sym0b.record_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
cd0a.merge(cd0b);
|
||||
let mut cd0 = cd0a;
|
||||
cd0.assert(false, &["0<0>"]);
|
||||
sym0a.merge(sym0b);
|
||||
let mut sym0 = sym0a;
|
||||
assert_bindings(&sym0, false, &["0<0>"]);
|
||||
|
||||
// merging the same definition with differing constraints drops all constraints
|
||||
let mut cd1a = SymbolState::with(ScopedDefinitionId::from_u32(1));
|
||||
cd1a.add_constraint(ScopedConstraintId::from_u32(1));
|
||||
let mut sym1a = SymbolState::undefined();
|
||||
sym1a.record_binding(ScopedDefinitionId::from_u32(1));
|
||||
sym1a.record_constraint(ScopedConstraintId::from_u32(1));
|
||||
|
||||
let mut cd1b = SymbolState::with(ScopedDefinitionId::from_u32(1));
|
||||
cd1b.add_constraint(ScopedConstraintId::from_u32(2));
|
||||
let mut sym1b = SymbolState::undefined();
|
||||
sym1b.record_binding(ScopedDefinitionId::from_u32(1));
|
||||
sym1b.record_constraint(ScopedConstraintId::from_u32(2));
|
||||
|
||||
cd1a.merge(cd1b);
|
||||
let cd1 = cd1a;
|
||||
cd1.assert(false, &["1<>"]);
|
||||
sym1a.merge(sym1b);
|
||||
let sym1 = sym1a;
|
||||
assert_bindings(&sym1, false, &["1<>"]);
|
||||
|
||||
// merging a constrained definition with unbound keeps both
|
||||
let mut cd2a = SymbolState::with(ScopedDefinitionId::from_u32(2));
|
||||
cd2a.add_constraint(ScopedConstraintId::from_u32(3));
|
||||
let mut sym2a = SymbolState::undefined();
|
||||
sym2a.record_binding(ScopedDefinitionId::from_u32(2));
|
||||
sym2a.record_constraint(ScopedConstraintId::from_u32(3));
|
||||
|
||||
let cd2b = SymbolState::unbound();
|
||||
let sym2b = SymbolState::undefined();
|
||||
|
||||
cd2a.merge(cd2b);
|
||||
let cd2 = cd2a;
|
||||
cd2.assert(true, &["2<3>"]);
|
||||
sym2a.merge(sym2b);
|
||||
let sym2 = sym2a;
|
||||
assert_bindings(&sym2, true, &["2<3>"]);
|
||||
|
||||
// merging different definitions keeps them each with their existing constraints
|
||||
cd0.merge(cd2);
|
||||
let cd = cd0;
|
||||
cd.assert(true, &["0<0>", "2<3>"]);
|
||||
sym0.merge(sym2);
|
||||
let sym = sym0;
|
||||
assert_bindings(&sym, true, &["0<0>", "2<3>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_declaration() {
|
||||
let sym = SymbolState::undefined();
|
||||
|
||||
assert_declarations(&sym, true, &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn record_declaration() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_declaration(ScopedDefinitionId::from_u32(1));
|
||||
|
||||
assert_declarations(&sym, false, &[1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn record_declaration_override() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_declaration(ScopedDefinitionId::from_u32(1));
|
||||
sym.record_declaration(ScopedDefinitionId::from_u32(2));
|
||||
|
||||
assert_declarations(&sym, false, &[2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn record_declaration_merge() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_declaration(ScopedDefinitionId::from_u32(1));
|
||||
|
||||
let mut sym2 = SymbolState::undefined();
|
||||
sym2.record_declaration(ScopedDefinitionId::from_u32(2));
|
||||
|
||||
sym.merge(sym2);
|
||||
|
||||
assert_declarations(&sym, false, &[1, 2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn record_declaration_merge_partial_undeclared() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_declaration(ScopedDefinitionId::from_u32(1));
|
||||
|
||||
let sym2 = SymbolState::undefined();
|
||||
|
||||
sym.merge(sym2);
|
||||
|
||||
assert_declarations(&sym, true, &[1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn set_may_be_undeclared() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_declaration(ScopedDefinitionId::from_u32(0));
|
||||
sym.set_may_be_undeclared();
|
||||
|
||||
assert_declarations(&sym, true, &[0]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::{resolve_module, Module};
|
||||
use crate::semantic_index::ast_ids::HasScopedAstId;
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::types::{definition_ty, global_symbol_ty_by_name, infer_scope_types, Type};
|
||||
use crate::types::{binding_ty, global_symbol_ty, infer_scope_types, Type};
|
||||
use crate::Db;
|
||||
|
||||
pub struct SemanticModel<'db> {
|
||||
@@ -40,7 +40,7 @@ impl<'db> SemanticModel<'db> {
|
||||
}
|
||||
|
||||
pub fn global_symbol_ty(&self, module: &Module, symbol_name: &str) -> Type<'db> {
|
||||
global_symbol_ty_by_name(self.db, module.file(), symbol_name)
|
||||
global_symbol_ty(self.db, module.file(), symbol_name)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -147,24 +147,24 @@ impl HasTy for ast::Expr {
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_definition_has_ty {
|
||||
macro_rules! impl_binding_has_ty {
|
||||
($ty: ty) => {
|
||||
impl HasTy for $ty {
|
||||
#[inline]
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
let binding = index.definition(self);
|
||||
binding_ty(model.db, binding)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_definition_has_ty!(ast::StmtFunctionDef);
|
||||
impl_definition_has_ty!(ast::StmtClassDef);
|
||||
impl_definition_has_ty!(ast::Alias);
|
||||
impl_definition_has_ty!(ast::Parameter);
|
||||
impl_definition_has_ty!(ast::ParameterWithDefault);
|
||||
impl_binding_has_ty!(ast::StmtFunctionDef);
|
||||
impl_binding_has_ty!(ast::StmtClassDef);
|
||||
impl_binding_has_ty!(ast::Alias);
|
||||
impl_binding_has_ty!(ast::Parameter);
|
||||
impl_binding_has_ty!(ast::ParameterWithDefault);
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
@@ -192,7 +192,7 @@ impl VirtualEnvironment {
|
||||
} else {
|
||||
tracing::warn!(
|
||||
"Failed to resolve `sys.prefix` of the system Python installation \
|
||||
from the `home` value in the `pyvenv.cfg` file at '{}'. \
|
||||
from the `home` value in the `pyvenv.cfg` file at `{}`. \
|
||||
System site-packages will not be used for module resolution.",
|
||||
venv_path.join("pyvenv.cfg")
|
||||
);
|
||||
@@ -426,7 +426,7 @@ impl Deref for SysPrefixPath {
|
||||
|
||||
impl fmt::Display for SysPrefixPath {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "`sys.prefix` path '{}'", self.0)
|
||||
write!(f, "`sys.prefix` path `{}`", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -483,7 +483,7 @@ impl Deref for PythonHomePath {
|
||||
|
||||
impl fmt::Display for PythonHomePath {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "`home` location '{}'", self.0)
|
||||
write!(f, "`home` location `{}`", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
87
crates/red_knot_python_semantic/src/stdlib.rs
Normal file
87
crates/red_knot_python_semantic/src/stdlib.rs
Normal file
@@ -0,0 +1,87 @@
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::resolve_module;
|
||||
use crate::semantic_index::global_scope;
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
use crate::types::{global_symbol_ty, Type};
|
||||
use crate::Db;
|
||||
|
||||
/// Enumeration of various core stdlib modules, for which we have dedicated Salsa queries.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
enum CoreStdlibModule {
|
||||
Builtins,
|
||||
Types,
|
||||
Typeshed,
|
||||
TypingExtensions,
|
||||
}
|
||||
|
||||
impl CoreStdlibModule {
|
||||
fn name(self) -> ModuleName {
|
||||
let module_name = match self {
|
||||
Self::Builtins => "builtins",
|
||||
Self::Types => "types",
|
||||
Self::Typeshed => "_typeshed",
|
||||
Self::TypingExtensions => "typing_extensions",
|
||||
};
|
||||
ModuleName::new_static(module_name)
|
||||
.unwrap_or_else(|| panic!("{module_name} should be a valid module name!"))
|
||||
}
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in a given core module
|
||||
///
|
||||
/// Returns `Unbound` if the given core module cannot be resolved for some reason
|
||||
fn core_module_symbol_ty<'db>(
|
||||
db: &'db dyn Db,
|
||||
core_module: CoreStdlibModule,
|
||||
symbol: &str,
|
||||
) -> Type<'db> {
|
||||
resolve_module(db, core_module.name())
|
||||
.map(|module| global_symbol_ty(db, module.file(), symbol))
|
||||
.unwrap_or(Type::Unbound)
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in the builtins namespace.
|
||||
///
|
||||
/// Returns `Unbound` if the `builtins` module isn't available for some reason.
|
||||
#[inline]
|
||||
pub(crate) fn builtins_symbol_ty<'db>(db: &'db dyn Db, symbol: &str) -> Type<'db> {
|
||||
core_module_symbol_ty(db, CoreStdlibModule::Builtins, symbol)
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in the `types` module namespace.
|
||||
///
|
||||
/// Returns `Unbound` if the `types` module isn't available for some reason.
|
||||
#[inline]
|
||||
pub(crate) fn types_symbol_ty<'db>(db: &'db dyn Db, symbol: &str) -> Type<'db> {
|
||||
core_module_symbol_ty(db, CoreStdlibModule::Types, symbol)
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in the `_typeshed` module namespace.
|
||||
///
|
||||
/// Returns `Unbound` if the `_typeshed` module isn't available for some reason.
|
||||
#[inline]
|
||||
pub(crate) fn typeshed_symbol_ty<'db>(db: &'db dyn Db, symbol: &str) -> Type<'db> {
|
||||
core_module_symbol_ty(db, CoreStdlibModule::Typeshed, symbol)
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in the `typing_extensions` module namespace.
|
||||
///
|
||||
/// Returns `Unbound` if the `typing_extensions` module isn't available for some reason.
|
||||
#[inline]
|
||||
pub(crate) fn typing_extensions_symbol_ty<'db>(db: &'db dyn Db, symbol: &str) -> Type<'db> {
|
||||
core_module_symbol_ty(db, CoreStdlibModule::TypingExtensions, symbol)
|
||||
}
|
||||
|
||||
/// Get the scope of a core stdlib module.
|
||||
///
|
||||
/// Can return `None` if a custom typeshed is used that is missing the core module in question.
|
||||
fn core_module_scope(db: &dyn Db, core_module: CoreStdlibModule) -> Option<ScopeId<'_>> {
|
||||
resolve_module(db, core_module.name()).map(|module| global_scope(db, module.file()))
|
||||
}
|
||||
|
||||
/// Get the `builtins` module scope.
|
||||
///
|
||||
/// Can return `None` if a custom typeshed is used that is missing `builtins.pyi`.
|
||||
pub(crate) fn builtins_module_scope(db: &dyn Db) -> Option<ScopeId<'_>> {
|
||||
core_module_scope(db, CoreStdlibModule::Builtins)
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -27,9 +27,12 @@
|
||||
//! * An intersection containing two non-overlapping types should simplify to [`Type::Never`].
|
||||
use crate::types::{IntersectionType, Type, UnionType};
|
||||
use crate::{Db, FxOrderSet};
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use super::KnownClass;
|
||||
|
||||
pub(crate) struct UnionBuilder<'db> {
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
elements: Vec<Type<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
@@ -37,7 +40,7 @@ impl<'db> UnionBuilder<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
elements: FxOrderSet::default(),
|
||||
elements: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -45,11 +48,59 @@ impl<'db> UnionBuilder<'db> {
|
||||
pub(crate) fn add(mut self, ty: Type<'db>) -> Self {
|
||||
match ty {
|
||||
Type::Union(union) => {
|
||||
self.elements.extend(&union.elements(self.db));
|
||||
let new_elements = union.elements(self.db);
|
||||
self.elements.reserve(new_elements.len());
|
||||
for element in new_elements {
|
||||
self = self.add(*element);
|
||||
}
|
||||
}
|
||||
Type::Never => {}
|
||||
_ => {
|
||||
self.elements.insert(ty);
|
||||
let bool_pair = if let Type::BooleanLiteral(b) = ty {
|
||||
Some(Type::BooleanLiteral(!b))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut to_add = ty;
|
||||
let mut to_remove = SmallVec::<[usize; 2]>::new();
|
||||
for (index, element) in self.elements.iter().enumerate() {
|
||||
if Some(*element) == bool_pair {
|
||||
to_add = KnownClass::Bool.to_instance(self.db);
|
||||
to_remove.push(index);
|
||||
// The type we are adding is a BooleanLiteral, which doesn't have any
|
||||
// subtypes. And we just found that the union already contained our
|
||||
// mirror-image BooleanLiteral, so it can't also contain bool or any
|
||||
// supertype of bool. Therefore, we are done.
|
||||
break;
|
||||
}
|
||||
if ty.is_subtype_of(self.db, *element) {
|
||||
return self;
|
||||
} else if element.is_subtype_of(self.db, ty) {
|
||||
to_remove.push(index);
|
||||
}
|
||||
}
|
||||
|
||||
match to_remove[..] {
|
||||
[] => self.elements.push(to_add),
|
||||
[index] => self.elements[index] = to_add,
|
||||
_ => {
|
||||
let mut current_index = 0;
|
||||
let mut to_remove = to_remove.into_iter();
|
||||
let mut next_to_remove_index = to_remove.next();
|
||||
self.elements.retain(|_| {
|
||||
let retain = if Some(current_index) == next_to_remove_index {
|
||||
next_to_remove_index = to_remove.next();
|
||||
false
|
||||
} else {
|
||||
true
|
||||
};
|
||||
current_index += 1;
|
||||
retain
|
||||
});
|
||||
self.elements.push(to_add);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,7 +111,7 @@ impl<'db> UnionBuilder<'db> {
|
||||
match self.elements.len() {
|
||||
0 => Type::Never,
|
||||
1 => self.elements[0],
|
||||
_ => Type::Union(UnionType::new(self.db, self.elements)),
|
||||
_ => Type::Union(UnionType::new(self.db, self.elements.into())),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -143,11 +194,12 @@ impl<'db> IntersectionBuilder<'db> {
|
||||
if self.intersections.len() == 1 {
|
||||
self.intersections.pop().unwrap().build(self.db)
|
||||
} else {
|
||||
let mut builder = UnionBuilder::new(self.db);
|
||||
for inner in self.intersections {
|
||||
builder = builder.add(inner.build(self.db));
|
||||
}
|
||||
builder.build()
|
||||
UnionType::from_elements(
|
||||
self.db,
|
||||
self.intersections
|
||||
.into_iter()
|
||||
.map(|inner| inner.build(self.db)),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -165,6 +217,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
|
||||
/// Adds a positive type to this intersection.
|
||||
fn add_positive(&mut self, db: &'db dyn Db, ty: Type<'db>) {
|
||||
// TODO `Any`/`Unknown`/`Todo` actually should not self-cancel
|
||||
match ty {
|
||||
Type::Intersection(inter) => {
|
||||
let pos = inter.positive(db);
|
||||
@@ -184,7 +237,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
|
||||
/// Adds a negative type to this intersection.
|
||||
fn add_negative(&mut self, db: &'db dyn Db, ty: Type<'db>) {
|
||||
// TODO Any/Unknown actually should not self-cancel
|
||||
// TODO `Any`/`Unknown`/`Todo` actually should not self-cancel
|
||||
match ty {
|
||||
Type::Intersection(intersection) => {
|
||||
let pos = intersection.negative(db);
|
||||
@@ -245,17 +298,32 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{IntersectionBuilder, IntersectionType, Type, UnionBuilder, UnionType};
|
||||
use super::{IntersectionBuilder, IntersectionType, Type, UnionType};
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::types::{KnownClass, UnionBuilder};
|
||||
use crate::ProgramSettings;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
TestDb::new()
|
||||
}
|
||||
let db = TestDb::new();
|
||||
|
||||
impl<'db> UnionType<'db> {
|
||||
fn elements_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.elements(db).into_iter().collect()
|
||||
}
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&src_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings::new(src_root),
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -263,19 +331,16 @@ mod tests {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let Type::Union(union) = UnionBuilder::new(&db).add(t0).add(t1).build() else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
let union = UnionType::from_elements(&db, [t0, t1]).expect_union();
|
||||
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t1]);
|
||||
assert_eq!(union.elements(&db), &[t0, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_single() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ty = UnionBuilder::new(&db).add(t0).build();
|
||||
|
||||
let ty = UnionType::from_elements(&db, [t0]);
|
||||
assert_eq!(ty, t0);
|
||||
}
|
||||
|
||||
@@ -283,7 +348,6 @@ mod tests {
|
||||
fn build_union_empty() {
|
||||
let db = setup_db();
|
||||
let ty = UnionBuilder::new(&db).build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
@@ -291,32 +355,83 @@ mod tests {
|
||||
fn build_union_never() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ty = UnionBuilder::new(&db).add(t0).add(Type::Never).build();
|
||||
|
||||
let ty = UnionType::from_elements(&db, [t0, Type::Never]);
|
||||
assert_eq!(ty, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_bool() {
|
||||
let db = setup_db();
|
||||
let bool_instance_ty = KnownClass::Bool.to_instance(&db);
|
||||
|
||||
let t0 = Type::BooleanLiteral(true);
|
||||
let t1 = Type::BooleanLiteral(true);
|
||||
let t2 = Type::BooleanLiteral(false);
|
||||
let t3 = Type::IntLiteral(17);
|
||||
|
||||
let union = UnionType::from_elements(&db, [t0, t1, t3]).expect_union();
|
||||
assert_eq!(union.elements(&db), &[t0, t3]);
|
||||
|
||||
let union = UnionType::from_elements(&db, [t0, t1, t2, t3]).expect_union();
|
||||
assert_eq!(union.elements(&db), &[bool_instance_ty, t3]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_flatten() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let u1 = UnionBuilder::new(&db).add(t0).add(t1).build();
|
||||
let Type::Union(union) = UnionBuilder::new(&db).add(u1).add(t2).build() else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
let u1 = UnionType::from_elements(&db, [t0, t1]);
|
||||
let union = UnionType::from_elements(&db, [u1, t2]).expect_union();
|
||||
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t1, t2]);
|
||||
assert_eq!(union.elements(&db), &[t0, t1, t2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_simplify_subtype() {
|
||||
let db = setup_db();
|
||||
let t0 = KnownClass::Str.to_instance(&db);
|
||||
let t1 = Type::LiteralString;
|
||||
let u0 = UnionType::from_elements(&db, [t0, t1]);
|
||||
let u1 = UnionType::from_elements(&db, [t1, t0]);
|
||||
|
||||
assert_eq!(u0, t0);
|
||||
assert_eq!(u1, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_no_simplify_unknown() {
|
||||
let db = setup_db();
|
||||
let t0 = KnownClass::Str.to_instance(&db);
|
||||
let t1 = Type::Unknown;
|
||||
let u0 = UnionType::from_elements(&db, [t0, t1]);
|
||||
let u1 = UnionType::from_elements(&db, [t1, t0]);
|
||||
|
||||
assert_eq!(u0.expect_union().elements(&db), &[t0, t1]);
|
||||
assert_eq!(u1.expect_union().elements(&db), &[t1, t0]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_subsume_multiple() {
|
||||
let db = setup_db();
|
||||
let str_ty = KnownClass::Str.to_instance(&db);
|
||||
let int_ty = KnownClass::Int.to_instance(&db);
|
||||
let object_ty = KnownClass::Object.to_instance(&db);
|
||||
let unknown_ty = Type::Unknown;
|
||||
|
||||
let u0 = UnionType::from_elements(&db, [str_ty, unknown_ty, int_ty, object_ty]);
|
||||
|
||||
assert_eq!(u0.expect_union().elements(&db), &[unknown_ty, object_ty]);
|
||||
}
|
||||
|
||||
impl<'db> IntersectionType<'db> {
|
||||
fn pos_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.positive(db).into_iter().collect()
|
||||
self.positive(db).into_iter().copied().collect()
|
||||
}
|
||||
|
||||
fn neg_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.negative(db).into_iter().collect()
|
||||
self.negative(db).into_iter().copied().collect()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -325,16 +440,14 @@ mod tests {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ta = Type::Any;
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
let intersection = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
.expect_intersection();
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[ta]);
|
||||
assert_eq!(inter.neg_vec(&db), &[t0]);
|
||||
assert_eq!(intersection.pos_vec(&db), &[ta]);
|
||||
assert_eq!(intersection.neg_vec(&db), &[t0]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -347,16 +460,14 @@ mod tests {
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
let intersection = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_positive(i0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
.expect_intersection();
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[t2, ta]);
|
||||
assert_eq!(inter.neg_vec(&db), &[t1]);
|
||||
assert_eq!(intersection.pos_vec(&db), &[t2, ta]);
|
||||
assert_eq!(intersection.neg_vec(&db), &[t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -369,16 +480,14 @@ mod tests {
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
let intersection = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_negative(i0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
.expect_intersection();
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[t2, t1]);
|
||||
assert_eq!(inter.neg_vec(&db), &[ta]);
|
||||
assert_eq!(intersection.pos_vec(&db), &[t2, t1]);
|
||||
assert_eq!(intersection.neg_vec(&db), &[ta]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -387,16 +496,14 @@ mod tests {
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let ta = Type::Any;
|
||||
let u0 = UnionBuilder::new(&db).add(t0).add(t1).build();
|
||||
let u0 = UnionType::from_elements(&db, [t0, t1]);
|
||||
|
||||
let Type::Union(union) = IntersectionBuilder::new(&db)
|
||||
let union = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_positive(u0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
let [Type::Intersection(i0), Type::Intersection(i1)] = union.elements_vec(&db)[..] else {
|
||||
.expect_union();
|
||||
let [Type::Intersection(i0), Type::Intersection(i1)] = union.elements(&db)[..] else {
|
||||
panic!("expected a union of two intersections");
|
||||
};
|
||||
assert_eq!(i0.pos_vec(&db), &[ta, t0]);
|
||||
|
||||
@@ -1,14 +1,22 @@
|
||||
//! Display implementations for types.
|
||||
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
|
||||
use ruff_db::display::FormatterJoinExtension;
|
||||
use ruff_python_ast::str::Quote;
|
||||
use ruff_python_literal::escape::AsciiEscape;
|
||||
|
||||
use crate::types::{IntersectionType, Type, UnionType};
|
||||
use crate::Db;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
impl<'db> Type<'db> {
|
||||
pub fn display(&'db self, db: &'db dyn Db) -> DisplayType<'db> {
|
||||
pub fn display(&self, db: &'db dyn Db) -> DisplayType {
|
||||
DisplayType { ty: self, db }
|
||||
}
|
||||
fn representation(self, db: &'db dyn Db) -> DisplayRepresentation<'db> {
|
||||
DisplayRepresentation { db, ty: self }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
@@ -18,36 +26,84 @@ pub struct DisplayType<'db> {
|
||||
}
|
||||
|
||||
impl Display for DisplayType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
let representation = self.ty.representation(self.db);
|
||||
if matches!(
|
||||
self.ty,
|
||||
Type::IntLiteral(_)
|
||||
| Type::BooleanLiteral(_)
|
||||
| Type::StringLiteral(_)
|
||||
| Type::BytesLiteral(_)
|
||||
| Type::Class(_)
|
||||
| Type::Function(_)
|
||||
) {
|
||||
write!(f, "Literal[{representation}]")
|
||||
} else {
|
||||
representation.fmt(f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for DisplayType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
/// Writes the string representation of a type, which is the value displayed either as
|
||||
/// `Literal[<repr>]` or `Literal[<repr1>, <repr2>]` for literal types or as `<repr>` for
|
||||
/// non literals
|
||||
struct DisplayRepresentation<'db> {
|
||||
ty: Type<'db>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl Display for DisplayRepresentation<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
match self.ty {
|
||||
Type::Any => f.write_str("Any"),
|
||||
Type::Never => f.write_str("Never"),
|
||||
Type::Unknown => f.write_str("Unknown"),
|
||||
Type::Unbound => f.write_str("Unbound"),
|
||||
Type::None => f.write_str("None"),
|
||||
// `[Type::Todo]`'s display should be explicit that is not a valid display of
|
||||
// any other type
|
||||
Type::Todo => f.write_str("@Todo"),
|
||||
Type::Module(file) => {
|
||||
write!(f, "<module '{:?}'>", file.path(self.db))
|
||||
}
|
||||
// TODO functions and classes should display using a fully qualified name
|
||||
Type::Class(class) => write!(f, "Literal[{}]", class.name(self.db)),
|
||||
Type::Instance(class) => f.write_str(&class.name(self.db)),
|
||||
Type::Function(function) => write!(f, "Literal[{}]", function.name(self.db)),
|
||||
Type::Class(class) => f.write_str(class.name(self.db)),
|
||||
Type::Instance(class) => f.write_str(class.name(self.db)),
|
||||
Type::Function(function) => f.write_str(function.name(self.db)),
|
||||
Type::Union(union) => union.display(self.db).fmt(f),
|
||||
Type::Intersection(intersection) => intersection.display(self.db).fmt(f),
|
||||
Type::IntLiteral(n) => write!(f, "Literal[{n}]"),
|
||||
Type::BooleanLiteral(boolean) => {
|
||||
write!(f, "Literal[{}]", if *boolean { "True" } else { "False" })
|
||||
Type::IntLiteral(n) => n.fmt(f),
|
||||
Type::BooleanLiteral(boolean) => f.write_str(if boolean { "True" } else { "False" }),
|
||||
Type::StringLiteral(string) => {
|
||||
write!(f, r#""{}""#, string.value(self.db).replace('"', r#"\""#))
|
||||
}
|
||||
Type::LiteralString => f.write_str("LiteralString"),
|
||||
Type::BytesLiteral(bytes) => {
|
||||
let escape =
|
||||
AsciiEscape::with_preferred_quote(bytes.value(self.db).as_ref(), Quote::Double);
|
||||
|
||||
escape.bytes_repr().write(f)
|
||||
}
|
||||
Type::Tuple(tuple) => {
|
||||
f.write_str("tuple[")?;
|
||||
let elements = tuple.elements(self.db);
|
||||
if elements.is_empty() {
|
||||
f.write_str("()")?;
|
||||
} else {
|
||||
elements.display(self.db).fmt(f)?;
|
||||
}
|
||||
f.write_str("]")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for DisplayType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> UnionType<'db> {
|
||||
fn display(&'db self, db: &'db dyn Db) -> DisplayUnionType<'db> {
|
||||
DisplayUnionType { db, ty: self }
|
||||
@@ -60,54 +116,90 @@ struct DisplayUnionType<'db> {
|
||||
}
|
||||
|
||||
impl Display for DisplayUnionType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let union = self.ty;
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
let elements = self.ty.elements(self.db);
|
||||
|
||||
let (int_literals, other_types): (Vec<Type>, Vec<Type>) = union
|
||||
.elements(self.db)
|
||||
.iter()
|
||||
.copied()
|
||||
.partition(|ty| matches!(ty, Type::IntLiteral(_)));
|
||||
// Group literal types by kind.
|
||||
let mut grouped_literals = FxHashMap::default();
|
||||
|
||||
let mut first = true;
|
||||
if !int_literals.is_empty() {
|
||||
f.write_str("Literal[")?;
|
||||
let mut nums: Vec<_> = int_literals
|
||||
.into_iter()
|
||||
.filter_map(|ty| {
|
||||
if let Type::IntLiteral(n) = ty {
|
||||
Some(n)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
nums.sort_unstable();
|
||||
for num in nums {
|
||||
if !first {
|
||||
f.write_str(", ")?;
|
||||
}
|
||||
write!(f, "{num}")?;
|
||||
first = false;
|
||||
for element in elements {
|
||||
if let Ok(literal_kind) = LiteralTypeKind::try_from(*element) {
|
||||
grouped_literals
|
||||
.entry(literal_kind)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(*element);
|
||||
}
|
||||
f.write_str("]")?;
|
||||
}
|
||||
|
||||
for ty in other_types {
|
||||
if !first {
|
||||
f.write_str(" | ")?;
|
||||
};
|
||||
first = false;
|
||||
write!(f, "{}", ty.display(self.db))?;
|
||||
let mut join = f.join(" | ");
|
||||
|
||||
for element in elements {
|
||||
if let Ok(literal_kind) = LiteralTypeKind::try_from(*element) {
|
||||
let Some(mut literals) = grouped_literals.remove(&literal_kind) else {
|
||||
continue;
|
||||
};
|
||||
if literal_kind == LiteralTypeKind::IntLiteral {
|
||||
literals.sort_unstable_by_key(|ty| ty.expect_int_literal());
|
||||
}
|
||||
join.entry(&DisplayLiteralGroup {
|
||||
literals,
|
||||
db: self.db,
|
||||
});
|
||||
} else {
|
||||
join.entry(&element.display(self.db));
|
||||
}
|
||||
}
|
||||
|
||||
join.finish()?;
|
||||
|
||||
debug_assert!(grouped_literals.is_empty());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for DisplayUnionType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(self, f)
|
||||
impl fmt::Debug for DisplayUnionType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
struct DisplayLiteralGroup<'db> {
|
||||
literals: Vec<Type<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl Display for DisplayLiteralGroup<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("Literal[")?;
|
||||
f.join(", ")
|
||||
.entries(self.literals.iter().map(|ty| ty.representation(self.db)))
|
||||
.finish()?;
|
||||
f.write_str("]")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
enum LiteralTypeKind {
|
||||
Class,
|
||||
Function,
|
||||
IntLiteral,
|
||||
StringLiteral,
|
||||
BytesLiteral,
|
||||
}
|
||||
|
||||
impl TryFrom<Type<'_>> for LiteralTypeKind {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: Type<'_>) -> Result<Self, Self::Error> {
|
||||
match value {
|
||||
Type::Class(_) => Ok(Self::Class),
|
||||
Type::Function(_) => Ok(Self::Function),
|
||||
Type::IntLiteral(_) => Ok(Self::IntLiteral),
|
||||
Type::StringLiteral(_) => Ok(Self::StringLiteral),
|
||||
Type::BytesLiteral(_) => Ok(Self::BytesLiteral),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -123,30 +215,159 @@ struct DisplayIntersectionType<'db> {
|
||||
}
|
||||
|
||||
impl Display for DisplayIntersectionType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let mut first = true;
|
||||
for (neg, ty) in self
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
let tys = self
|
||||
.ty
|
||||
.positive(self.db)
|
||||
.iter()
|
||||
.map(|ty| (false, ty))
|
||||
.chain(self.ty.negative(self.db).iter().map(|ty| (true, ty)))
|
||||
{
|
||||
if !first {
|
||||
f.write_str(" & ")?;
|
||||
};
|
||||
first = false;
|
||||
if neg {
|
||||
f.write_str("~")?;
|
||||
};
|
||||
write!(f, "{}", ty.display(self.db))?;
|
||||
}
|
||||
Ok(())
|
||||
.map(|&ty| DisplayMaybeNegatedType {
|
||||
ty,
|
||||
db: self.db,
|
||||
negated: false,
|
||||
})
|
||||
.chain(
|
||||
self.ty
|
||||
.negative(self.db)
|
||||
.iter()
|
||||
.map(|&ty| DisplayMaybeNegatedType {
|
||||
ty,
|
||||
db: self.db,
|
||||
negated: true,
|
||||
}),
|
||||
);
|
||||
f.join(" & ").entries(tys).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for DisplayIntersectionType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(self, f)
|
||||
impl fmt::Debug for DisplayIntersectionType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
struct DisplayMaybeNegatedType<'db> {
|
||||
ty: Type<'db>,
|
||||
db: &'db dyn Db,
|
||||
negated: bool,
|
||||
}
|
||||
|
||||
impl<'db> Display for DisplayMaybeNegatedType<'db> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
if self.negated {
|
||||
f.write_str("~")?;
|
||||
}
|
||||
self.ty.display(self.db).fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait TypeArrayDisplay<'db> {
|
||||
fn display(&self, db: &'db dyn Db) -> DisplayTypeArray;
|
||||
}
|
||||
|
||||
impl<'db> TypeArrayDisplay<'db> for Box<[Type<'db>]> {
|
||||
fn display(&self, db: &'db dyn Db) -> DisplayTypeArray {
|
||||
DisplayTypeArray { types: self, db }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> TypeArrayDisplay<'db> for Vec<Type<'db>> {
|
||||
fn display(&self, db: &'db dyn Db) -> DisplayTypeArray {
|
||||
DisplayTypeArray { types: self, db }
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct DisplayTypeArray<'b, 'db> {
|
||||
types: &'b [Type<'db>],
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> Display for DisplayTypeArray<'_, 'db> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
f.join(", ")
|
||||
.entries(self.types.iter().map(|ty| ty.display(self.db)))
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::types::{global_symbol_ty, BytesLiteralType, StringLiteralType, Type, UnionType};
|
||||
use crate::{Program, ProgramSettings, PythonVersion, SearchPathSettings};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&src_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings::new(src_root),
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_condense_literal_display_by_type() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_dedented(
|
||||
"src/main.py",
|
||||
"
|
||||
def foo(x: int) -> int:
|
||||
return x + 1
|
||||
|
||||
def bar(s: str) -> str:
|
||||
return s
|
||||
|
||||
class A: ...
|
||||
class B: ...
|
||||
",
|
||||
)?;
|
||||
let mod_file = system_path_to_file(&db, "src/main.py").expect("file to exist");
|
||||
|
||||
let union_elements = &[
|
||||
Type::Unknown,
|
||||
Type::IntLiteral(-1),
|
||||
global_symbol_ty(&db, mod_file, "A"),
|
||||
Type::StringLiteral(StringLiteralType::new(&db, Box::from("A"))),
|
||||
Type::BytesLiteral(BytesLiteralType::new(&db, Box::from([0]))),
|
||||
Type::BytesLiteral(BytesLiteralType::new(&db, Box::from([7]))),
|
||||
Type::IntLiteral(0),
|
||||
Type::IntLiteral(1),
|
||||
Type::StringLiteral(StringLiteralType::new(&db, Box::from("B"))),
|
||||
global_symbol_ty(&db, mod_file, "foo"),
|
||||
global_symbol_ty(&db, mod_file, "bar"),
|
||||
global_symbol_ty(&db, mod_file, "B"),
|
||||
Type::BooleanLiteral(true),
|
||||
Type::None,
|
||||
];
|
||||
let union = UnionType::from_elements(&db, union_elements).expect_union();
|
||||
let display = format!("{}", union.display(&db));
|
||||
assert_eq!(
|
||||
display,
|
||||
concat!(
|
||||
"Unknown | ",
|
||||
"Literal[-1, 0, 1] | ",
|
||||
"Literal[A, B] | ",
|
||||
"Literal[\"A\", \"B\"] | ",
|
||||
"Literal[b\"\\x00\", b\"\\x07\"] | ",
|
||||
"Literal[foo, bar] | ",
|
||||
"Literal[True] | ",
|
||||
"None"
|
||||
)
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,10 @@
|
||||
use crate::semantic_index::ast_ids::HasScopedAstId;
|
||||
use crate::semantic_index::constraint::{Constraint, PatternConstraint};
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId, SymbolTable};
|
||||
use crate::semantic_index::symbol_table;
|
||||
use crate::types::{infer_expression_types, IntersectionBuilder, Type, TypeInference};
|
||||
use crate::types::{infer_expression_types, IntersectionBuilder, Type};
|
||||
use crate::Db;
|
||||
use ruff_python_ast as ast;
|
||||
use rustc_hash::FxHashMap;
|
||||
@@ -27,62 +28,114 @@ use std::sync::Arc;
|
||||
/// constraint is applied to that definition, so we'd just return `None`.
|
||||
pub(crate) fn narrowing_constraint<'db>(
|
||||
db: &'db dyn Db,
|
||||
test: Expression<'db>,
|
||||
constraint: Constraint<'db>,
|
||||
definition: Definition<'db>,
|
||||
) -> Option<Type<'db>> {
|
||||
all_narrowing_constraints(db, test)
|
||||
.get(&definition.symbol(db))
|
||||
.copied()
|
||||
match constraint {
|
||||
Constraint::Expression(expression) => {
|
||||
all_narrowing_constraints_for_expression(db, expression)
|
||||
.get(&definition.symbol(db))
|
||||
.copied()
|
||||
}
|
||||
Constraint::Pattern(pattern) => all_narrowing_constraints_for_pattern(db, pattern)
|
||||
.get(&definition.symbol(db))
|
||||
.copied(),
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
fn all_narrowing_constraints<'db>(
|
||||
fn all_narrowing_constraints_for_pattern<'db>(
|
||||
db: &'db dyn Db,
|
||||
test: Expression<'db>,
|
||||
pattern: PatternConstraint<'db>,
|
||||
) -> NarrowingConstraints<'db> {
|
||||
NarrowingConstraintsBuilder::new(db, test).finish()
|
||||
NarrowingConstraintsBuilder::new(db, Constraint::Pattern(pattern)).finish()
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
fn all_narrowing_constraints_for_expression<'db>(
|
||||
db: &'db dyn Db,
|
||||
expression: Expression<'db>,
|
||||
) -> NarrowingConstraints<'db> {
|
||||
NarrowingConstraintsBuilder::new(db, Constraint::Expression(expression)).finish()
|
||||
}
|
||||
|
||||
type NarrowingConstraints<'db> = FxHashMap<ScopedSymbolId, Type<'db>>;
|
||||
|
||||
struct NarrowingConstraintsBuilder<'db> {
|
||||
db: &'db dyn Db,
|
||||
expression: Expression<'db>,
|
||||
constraint: Constraint<'db>,
|
||||
constraints: NarrowingConstraints<'db>,
|
||||
}
|
||||
|
||||
impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
fn new(db: &'db dyn Db, expression: Expression<'db>) -> Self {
|
||||
fn new(db: &'db dyn Db, constraint: Constraint<'db>) -> Self {
|
||||
Self {
|
||||
db,
|
||||
expression,
|
||||
constraint,
|
||||
constraints: NarrowingConstraints::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(mut self) -> NarrowingConstraints<'db> {
|
||||
if let ast::Expr::Compare(expr_compare) = self.expression.node_ref(self.db).node() {
|
||||
self.add_expr_compare(expr_compare);
|
||||
match self.constraint {
|
||||
Constraint::Expression(expression) => self.evaluate_expression_constraint(expression),
|
||||
Constraint::Pattern(pattern) => self.evaluate_pattern_constraint(pattern),
|
||||
}
|
||||
// TODO other test expression kinds
|
||||
|
||||
self.constraints.shrink_to_fit();
|
||||
self.constraints
|
||||
}
|
||||
|
||||
fn evaluate_expression_constraint(&mut self, expression: Expression<'db>) {
|
||||
if let ast::Expr::Compare(expr_compare) = expression.node_ref(self.db).node() {
|
||||
self.add_expr_compare(expr_compare, expression);
|
||||
}
|
||||
// TODO other test expression kinds
|
||||
}
|
||||
|
||||
fn evaluate_pattern_constraint(&mut self, pattern: PatternConstraint<'db>) {
|
||||
let subject = pattern.subject(self.db);
|
||||
|
||||
match pattern.pattern(self.db).node() {
|
||||
ast::Pattern::MatchValue(_) => {
|
||||
// TODO
|
||||
}
|
||||
ast::Pattern::MatchSingleton(singleton_pattern) => {
|
||||
self.add_match_pattern_singleton(subject, singleton_pattern);
|
||||
}
|
||||
ast::Pattern::MatchSequence(_) => {
|
||||
// TODO
|
||||
}
|
||||
ast::Pattern::MatchMapping(_) => {
|
||||
// TODO
|
||||
}
|
||||
ast::Pattern::MatchClass(_) => {
|
||||
// TODO
|
||||
}
|
||||
ast::Pattern::MatchStar(_) => {
|
||||
// TODO
|
||||
}
|
||||
ast::Pattern::MatchAs(_) => {
|
||||
// TODO
|
||||
}
|
||||
ast::Pattern::MatchOr(_) => {
|
||||
// TODO
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn symbols(&self) -> Arc<SymbolTable> {
|
||||
symbol_table(self.db, self.scope())
|
||||
}
|
||||
|
||||
fn scope(&self) -> ScopeId<'db> {
|
||||
self.expression.scope(self.db)
|
||||
match self.constraint {
|
||||
Constraint::Expression(expression) => expression.scope(self.db),
|
||||
Constraint::Pattern(pattern) => pattern.scope(self.db),
|
||||
}
|
||||
}
|
||||
|
||||
fn inference(&self) -> &'db TypeInference<'db> {
|
||||
infer_expression_types(self.db, self.expression)
|
||||
}
|
||||
|
||||
fn add_expr_compare(&mut self, expr_compare: &ast::ExprCompare) {
|
||||
fn add_expr_compare(&mut self, expr_compare: &ast::ExprCompare, expression: Expression<'db>) {
|
||||
let ast::ExprCompare {
|
||||
range: _,
|
||||
left,
|
||||
@@ -99,17 +152,46 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
// SAFETY: we should always have a symbol for every Name node.
|
||||
let symbol = self.symbols().symbol_id_by_name(id).unwrap();
|
||||
let scope = self.scope();
|
||||
let inference = self.inference();
|
||||
let inference = infer_expression_types(self.db, expression);
|
||||
for (op, comparator) in std::iter::zip(&**ops, &**comparators) {
|
||||
let comp_ty = inference.expression_ty(comparator.scoped_ast_id(self.db, scope));
|
||||
if matches!(op, ast::CmpOp::IsNot) {
|
||||
let ty = IntersectionBuilder::new(self.db)
|
||||
.add_negative(comp_ty)
|
||||
.build();
|
||||
self.constraints.insert(symbol, ty);
|
||||
};
|
||||
// TODO other comparison types
|
||||
match op {
|
||||
ast::CmpOp::IsNot => {
|
||||
if comp_ty.is_singleton(self.db) {
|
||||
let ty = IntersectionBuilder::new(self.db)
|
||||
.add_negative(comp_ty)
|
||||
.build();
|
||||
self.constraints.insert(symbol, ty);
|
||||
} else {
|
||||
// Non-singletons cannot be safely narrowed using `is not`
|
||||
}
|
||||
}
|
||||
ast::CmpOp::Is => {
|
||||
self.constraints.insert(symbol, comp_ty);
|
||||
}
|
||||
_ => {
|
||||
// TODO other comparison types
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn add_match_pattern_singleton(
|
||||
&mut self,
|
||||
subject: &ast::Expr,
|
||||
pattern: &ast::PatternMatchSingleton,
|
||||
) {
|
||||
if let Some(ast::ExprName { id, .. }) = subject.as_name_expr() {
|
||||
// SAFETY: we should always have a symbol for every Name node.
|
||||
let symbol = self.symbols().symbol_id_by_name(id).unwrap();
|
||||
|
||||
let ty = match pattern.value {
|
||||
ast::Singleton::None => Type::None,
|
||||
ast::Singleton::True => Type::BooleanLiteral(true),
|
||||
ast::Singleton::False => Type::BooleanLiteral(false),
|
||||
};
|
||||
self.constraints.insert(symbol, ty);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
14
crates/red_knot_python_semantic/tests/mdtest.rs
Normal file
14
crates/red_knot_python_semantic/tests/mdtest.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
use red_knot_test::run;
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// See `crates/red_knot_test/README.md` for documentation on these tests.
|
||||
#[rstest::rstest]
|
||||
fn mdtest(#[files("resources/mdtest/**/*.md")] path: PathBuf) {
|
||||
let crate_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("resources")
|
||||
.join("mdtest")
|
||||
.canonicalize()
|
||||
.unwrap();
|
||||
let title = path.strip_prefix(crate_dir).unwrap();
|
||||
run(&path, title.as_os_str().to_str().unwrap());
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
1ace5718deaf3041f8e3d1dc9c9e8a8e830e517f
|
||||
@@ -1,100 +0,0 @@
|
||||
import sys
|
||||
from _typeshed import StrPath
|
||||
from collections.abc import Mapping
|
||||
|
||||
LC_CTYPE: int
|
||||
LC_COLLATE: int
|
||||
LC_TIME: int
|
||||
LC_MONETARY: int
|
||||
LC_NUMERIC: int
|
||||
LC_ALL: int
|
||||
CHAR_MAX: int
|
||||
|
||||
def setlocale(category: int, locale: str | None = None, /) -> str: ...
|
||||
def localeconv() -> Mapping[str, int | str | list[int]]: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def getencoding() -> str: ...
|
||||
|
||||
def strcoll(os1: str, os2: str, /) -> int: ...
|
||||
def strxfrm(string: str, /) -> str: ...
|
||||
|
||||
# native gettext functions
|
||||
# https://docs.python.org/3/library/locale.html#access-to-message-catalogs
|
||||
# https://github.com/python/cpython/blob/f4c03484da59049eb62a9bf7777b963e2267d187/Modules/_localemodule.c#L626
|
||||
if sys.platform != "win32":
|
||||
LC_MESSAGES: int
|
||||
|
||||
ABDAY_1: int
|
||||
ABDAY_2: int
|
||||
ABDAY_3: int
|
||||
ABDAY_4: int
|
||||
ABDAY_5: int
|
||||
ABDAY_6: int
|
||||
ABDAY_7: int
|
||||
|
||||
ABMON_1: int
|
||||
ABMON_2: int
|
||||
ABMON_3: int
|
||||
ABMON_4: int
|
||||
ABMON_5: int
|
||||
ABMON_6: int
|
||||
ABMON_7: int
|
||||
ABMON_8: int
|
||||
ABMON_9: int
|
||||
ABMON_10: int
|
||||
ABMON_11: int
|
||||
ABMON_12: int
|
||||
|
||||
DAY_1: int
|
||||
DAY_2: int
|
||||
DAY_3: int
|
||||
DAY_4: int
|
||||
DAY_5: int
|
||||
DAY_6: int
|
||||
DAY_7: int
|
||||
|
||||
ERA: int
|
||||
ERA_D_T_FMT: int
|
||||
ERA_D_FMT: int
|
||||
ERA_T_FMT: int
|
||||
|
||||
MON_1: int
|
||||
MON_2: int
|
||||
MON_3: int
|
||||
MON_4: int
|
||||
MON_5: int
|
||||
MON_6: int
|
||||
MON_7: int
|
||||
MON_8: int
|
||||
MON_9: int
|
||||
MON_10: int
|
||||
MON_11: int
|
||||
MON_12: int
|
||||
|
||||
CODESET: int
|
||||
D_T_FMT: int
|
||||
D_FMT: int
|
||||
T_FMT: int
|
||||
T_FMT_AMPM: int
|
||||
AM_STR: int
|
||||
PM_STR: int
|
||||
|
||||
RADIXCHAR: int
|
||||
THOUSEP: int
|
||||
YESEXPR: int
|
||||
NOEXPR: int
|
||||
CRNCYSTR: int
|
||||
ALT_DIGITS: int
|
||||
|
||||
def nl_langinfo(key: int, /) -> str: ...
|
||||
|
||||
# This is dependent on `libintl.h` which is a part of `gettext`
|
||||
# system dependency. These functions might be missing.
|
||||
# But, we always say that they are present.
|
||||
def gettext(msg: str, /) -> str: ...
|
||||
def dgettext(domain: str | None, msg: str, /) -> str: ...
|
||||
def dcgettext(domain: str | None, msg: str, category: int, /) -> str: ...
|
||||
def textdomain(domain: str | None, /) -> str: ...
|
||||
def bindtextdomain(domain: str, dir: StrPath | None, /) -> str: ...
|
||||
def bind_textdomain_codeset(domain: str, codeset: str | None, /) -> str | None: ...
|
||||
@@ -1 +0,0 @@
|
||||
DEBUG: bool | None
|
||||
@@ -1,46 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
from collections.abc import Iterator
|
||||
from contextlib import AbstractContextManager
|
||||
from pathlib import Path
|
||||
from types import ModuleType
|
||||
from typing import Any, BinaryIO, TextIO
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
from importlib.abc import Traversable
|
||||
|
||||
__all__ = ["Package", "Resource", "contents", "is_resource", "open_binary", "open_text", "path", "read_binary", "read_text"]
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
__all__ += ["as_file", "files"]
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
__all__ += ["ResourceReader"]
|
||||
|
||||
Package: TypeAlias = str | ModuleType
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
Resource: TypeAlias = str
|
||||
else:
|
||||
Resource: TypeAlias = str | os.PathLike[Any]
|
||||
|
||||
def open_binary(package: Package, resource: Resource) -> BinaryIO: ...
|
||||
def open_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> TextIO: ...
|
||||
def read_binary(package: Package, resource: Resource) -> bytes: ...
|
||||
def read_text(package: Package, resource: Resource, encoding: str = "utf-8", errors: str = "strict") -> str: ...
|
||||
def path(package: Package, resource: Resource) -> AbstractContextManager[Path]: ...
|
||||
def is_resource(package: Package, name: str) -> bool: ...
|
||||
def contents(package: Package) -> Iterator[str]: ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
def as_file(path: Traversable) -> AbstractContextManager[Path]: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
def files(anchor: Package | None = ...) -> Traversable: ...
|
||||
|
||||
elif sys.version_info >= (3, 9):
|
||||
def files(package: Package) -> Traversable: ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from importlib.abc import ResourceReader as ResourceReader
|
||||
@@ -1,49 +0,0 @@
|
||||
import sys
|
||||
|
||||
codes: dict[str, int]
|
||||
messages: dict[int, str]
|
||||
|
||||
XML_ERROR_ABORTED: str
|
||||
XML_ERROR_ASYNC_ENTITY: str
|
||||
XML_ERROR_ATTRIBUTE_EXTERNAL_ENTITY_REF: str
|
||||
XML_ERROR_BAD_CHAR_REF: str
|
||||
XML_ERROR_BINARY_ENTITY_REF: str
|
||||
XML_ERROR_CANT_CHANGE_FEATURE_ONCE_PARSING: str
|
||||
XML_ERROR_DUPLICATE_ATTRIBUTE: str
|
||||
XML_ERROR_ENTITY_DECLARED_IN_PE: str
|
||||
XML_ERROR_EXTERNAL_ENTITY_HANDLING: str
|
||||
XML_ERROR_FEATURE_REQUIRES_XML_DTD: str
|
||||
XML_ERROR_FINISHED: str
|
||||
XML_ERROR_INCOMPLETE_PE: str
|
||||
XML_ERROR_INCORRECT_ENCODING: str
|
||||
XML_ERROR_INVALID_TOKEN: str
|
||||
XML_ERROR_JUNK_AFTER_DOC_ELEMENT: str
|
||||
XML_ERROR_MISPLACED_XML_PI: str
|
||||
XML_ERROR_NOT_STANDALONE: str
|
||||
XML_ERROR_NOT_SUSPENDED: str
|
||||
XML_ERROR_NO_ELEMENTS: str
|
||||
XML_ERROR_NO_MEMORY: str
|
||||
XML_ERROR_PARAM_ENTITY_REF: str
|
||||
XML_ERROR_PARTIAL_CHAR: str
|
||||
XML_ERROR_PUBLICID: str
|
||||
XML_ERROR_RECURSIVE_ENTITY_REF: str
|
||||
XML_ERROR_SUSPENDED: str
|
||||
XML_ERROR_SUSPEND_PE: str
|
||||
XML_ERROR_SYNTAX: str
|
||||
XML_ERROR_TAG_MISMATCH: str
|
||||
XML_ERROR_TEXT_DECL: str
|
||||
XML_ERROR_UNBOUND_PREFIX: str
|
||||
XML_ERROR_UNCLOSED_CDATA_SECTION: str
|
||||
XML_ERROR_UNCLOSED_TOKEN: str
|
||||
XML_ERROR_UNDECLARING_PREFIX: str
|
||||
XML_ERROR_UNDEFINED_ENTITY: str
|
||||
XML_ERROR_UNEXPECTED_STATE: str
|
||||
XML_ERROR_UNKNOWN_ENCODING: str
|
||||
XML_ERROR_XML_DECL: str
|
||||
if sys.version_info >= (3, 11):
|
||||
XML_ERROR_RESERVED_PREFIX_XML: str
|
||||
XML_ERROR_RESERVED_PREFIX_XMLNS: str
|
||||
XML_ERROR_RESERVED_NAMESPACE_URI: str
|
||||
XML_ERROR_INVALID_ARGUMENT: str
|
||||
XML_ERROR_NO_BUFFER: str
|
||||
XML_ERROR_AMPLIFICATION_LIMIT_BREACH: str
|
||||
@@ -1,11 +0,0 @@
|
||||
XML_CTYPE_ANY: int
|
||||
XML_CTYPE_CHOICE: int
|
||||
XML_CTYPE_EMPTY: int
|
||||
XML_CTYPE_MIXED: int
|
||||
XML_CTYPE_NAME: int
|
||||
XML_CTYPE_SEQ: int
|
||||
|
||||
XML_CQUANT_NONE: int
|
||||
XML_CQUANT_OPT: int
|
||||
XML_CQUANT_PLUS: int
|
||||
XML_CQUANT_REP: int
|
||||
@@ -6,7 +6,8 @@ use std::panic::PanicInfo;
|
||||
use lsp_server::Message;
|
||||
use lsp_types::{
|
||||
ClientCapabilities, DiagnosticOptions, DiagnosticServerCapabilities, MessageType,
|
||||
ServerCapabilities, TextDocumentSyncCapability, TextDocumentSyncOptions, Url,
|
||||
ServerCapabilities, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
|
||||
Url,
|
||||
};
|
||||
|
||||
use self::connection::{Connection, ConnectionInitializer};
|
||||
@@ -99,6 +100,11 @@ impl Server {
|
||||
anyhow::anyhow!("Failed to get the current working directory while creating a default workspace.")
|
||||
})?;
|
||||
|
||||
if workspaces.len() > 1 {
|
||||
// TODO(dhruvmanila): Support multi-root workspaces
|
||||
anyhow::bail!("Multi-root workspaces are not supported yet");
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
connection,
|
||||
worker_threads,
|
||||
@@ -215,6 +221,7 @@ impl Server {
|
||||
text_document_sync: Some(TextDocumentSyncCapability::Options(
|
||||
TextDocumentSyncOptions {
|
||||
open_close: Some(true),
|
||||
change: Some(TextDocumentSyncKind::INCREMENTAL),
|
||||
..Default::default()
|
||||
},
|
||||
)),
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
use crate::{server::schedule::Task, session::Session, system::url_to_system_path};
|
||||
use lsp_server as server;
|
||||
|
||||
use crate::server::schedule::Task;
|
||||
use crate::session::Session;
|
||||
use crate::system::{url_to_any_system_path, AnySystemPath};
|
||||
|
||||
mod diagnostics;
|
||||
mod notifications;
|
||||
mod requests;
|
||||
mod traits;
|
||||
|
||||
use notifications as notification;
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use requests as request;
|
||||
|
||||
use self::traits::{NotificationHandler, RequestHandler};
|
||||
@@ -43,6 +45,7 @@ pub(super) fn notification<'a>(notif: server::Notification) -> Task<'a> {
|
||||
match notif.method.as_str() {
|
||||
notification::DidCloseTextDocumentHandler::METHOD => local_notification_task::<notification::DidCloseTextDocumentHandler>(notif),
|
||||
notification::DidOpenTextDocumentHandler::METHOD => local_notification_task::<notification::DidOpenTextDocumentHandler>(notif),
|
||||
notification::DidChangeTextDocumentHandler::METHOD => local_notification_task::<notification::DidChangeTextDocumentHandler>(notif),
|
||||
notification::DidOpenNotebookHandler::METHOD => {
|
||||
local_notification_task::<notification::DidOpenNotebookHandler>(notif)
|
||||
}
|
||||
@@ -82,12 +85,18 @@ fn background_request_task<'a, R: traits::BackgroundDocumentRequestHandler>(
|
||||
Ok(Task::background(schedule, move |session: &Session| {
|
||||
let url = R::document_url(¶ms).into_owned();
|
||||
|
||||
let Ok(path) = url_to_system_path(&url) else {
|
||||
let Ok(path) = url_to_any_system_path(&url) else {
|
||||
return Box::new(|_, _| {});
|
||||
};
|
||||
let db = session
|
||||
.workspace_db_for_path(path.as_std_path())
|
||||
.map(RootDatabase::snapshot);
|
||||
let db = match path {
|
||||
AnySystemPath::System(path) => {
|
||||
match session.workspace_db_for_path(path.as_std_path()) {
|
||||
Some(db) => db.snapshot(),
|
||||
None => session.default_workspace_db().snapshot(),
|
||||
}
|
||||
}
|
||||
AnySystemPath::SystemVirtual(_) => session.default_workspace_db().snapshot(),
|
||||
};
|
||||
|
||||
let Some(snapshot) = session.take_snapshot(url) else {
|
||||
return Box::new(|_, _| {});
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
mod did_change;
|
||||
mod did_close;
|
||||
mod did_close_notebook;
|
||||
mod did_open;
|
||||
mod did_open_notebook;
|
||||
mod set_trace;
|
||||
|
||||
pub(super) use did_change::DidChangeTextDocumentHandler;
|
||||
pub(super) use did_close::DidCloseTextDocumentHandler;
|
||||
pub(super) use did_close_notebook::DidCloseNotebookHandler;
|
||||
pub(super) use did_open::DidOpenTextDocumentHandler;
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
use lsp_server::ErrorCode;
|
||||
use lsp_types::notification::DidChangeTextDocument;
|
||||
use lsp_types::DidChangeTextDocumentParams;
|
||||
|
||||
use red_knot_workspace::watch::ChangeEvent;
|
||||
|
||||
use crate::server::api::traits::{NotificationHandler, SyncNotificationHandler};
|
||||
use crate::server::api::LSPResult;
|
||||
use crate::server::client::{Notifier, Requester};
|
||||
use crate::server::Result;
|
||||
use crate::session::Session;
|
||||
use crate::system::{url_to_any_system_path, AnySystemPath};
|
||||
|
||||
pub(crate) struct DidChangeTextDocumentHandler;
|
||||
|
||||
impl NotificationHandler for DidChangeTextDocumentHandler {
|
||||
type NotificationType = DidChangeTextDocument;
|
||||
}
|
||||
|
||||
impl SyncNotificationHandler for DidChangeTextDocumentHandler {
|
||||
fn run(
|
||||
session: &mut Session,
|
||||
_notifier: Notifier,
|
||||
_requester: &mut Requester,
|
||||
params: DidChangeTextDocumentParams,
|
||||
) -> Result<()> {
|
||||
let Ok(path) = url_to_any_system_path(¶ms.text_document.uri) else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let key = session.key_from_url(params.text_document.uri);
|
||||
|
||||
session
|
||||
.update_text_document(&key, params.content_changes, params.text_document.version)
|
||||
.with_failure_code(ErrorCode::InternalError)?;
|
||||
|
||||
match path {
|
||||
AnySystemPath::System(path) => {
|
||||
let db = match session.workspace_db_for_path_mut(path.as_std_path()) {
|
||||
Some(db) => db,
|
||||
None => session.default_workspace_db_mut(),
|
||||
};
|
||||
db.apply_changes(vec![ChangeEvent::file_content_changed(path)], None);
|
||||
}
|
||||
AnySystemPath::SystemVirtual(virtual_path) => {
|
||||
let db = session.default_workspace_db_mut();
|
||||
db.apply_changes(vec![ChangeEvent::ChangedVirtual(virtual_path)], None);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(dhruvmanila): Publish diagnostics if the client doesnt support pull diagnostics
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,7 @@
|
||||
use lsp_server::ErrorCode;
|
||||
use lsp_types::notification::DidCloseTextDocument;
|
||||
use lsp_types::DidCloseTextDocumentParams;
|
||||
|
||||
use ruff_db::files::File;
|
||||
use red_knot_workspace::watch::ChangeEvent;
|
||||
|
||||
use crate::server::api::diagnostics::clear_diagnostics;
|
||||
use crate::server::api::traits::{NotificationHandler, SyncNotificationHandler};
|
||||
@@ -10,7 +9,7 @@ use crate::server::api::LSPResult;
|
||||
use crate::server::client::{Notifier, Requester};
|
||||
use crate::server::Result;
|
||||
use crate::session::Session;
|
||||
use crate::system::url_to_system_path;
|
||||
use crate::system::{url_to_any_system_path, AnySystemPath};
|
||||
|
||||
pub(crate) struct DidCloseTextDocumentHandler;
|
||||
|
||||
@@ -25,7 +24,7 @@ impl SyncNotificationHandler for DidCloseTextDocumentHandler {
|
||||
_requester: &mut Requester,
|
||||
params: DidCloseTextDocumentParams,
|
||||
) -> Result<()> {
|
||||
let Ok(path) = url_to_system_path(¶ms.text_document.uri) else {
|
||||
let Ok(path) = url_to_any_system_path(¶ms.text_document.uri) else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
@@ -34,8 +33,9 @@ impl SyncNotificationHandler for DidCloseTextDocumentHandler {
|
||||
.close_document(&key)
|
||||
.with_failure_code(ErrorCode::InternalError)?;
|
||||
|
||||
if let Some(db) = session.workspace_db_for_path_mut(path.as_std_path()) {
|
||||
File::sync_path(db, &path);
|
||||
if let AnySystemPath::SystemVirtual(virtual_path) = path {
|
||||
let db = session.default_workspace_db_mut();
|
||||
db.apply_changes(vec![ChangeEvent::DeletedVirtual(virtual_path)], None);
|
||||
}
|
||||
|
||||
clear_diagnostics(key.url(), ¬ifier)?;
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
use lsp_types::notification::DidCloseNotebookDocument;
|
||||
use lsp_types::DidCloseNotebookDocumentParams;
|
||||
|
||||
use ruff_db::files::File;
|
||||
use red_knot_workspace::watch::ChangeEvent;
|
||||
|
||||
use crate::server::api::traits::{NotificationHandler, SyncNotificationHandler};
|
||||
use crate::server::api::LSPResult;
|
||||
use crate::server::client::{Notifier, Requester};
|
||||
use crate::server::Result;
|
||||
use crate::session::Session;
|
||||
use crate::system::url_to_system_path;
|
||||
use crate::system::{url_to_any_system_path, AnySystemPath};
|
||||
|
||||
pub(crate) struct DidCloseNotebookHandler;
|
||||
|
||||
@@ -23,7 +23,7 @@ impl SyncNotificationHandler for DidCloseNotebookHandler {
|
||||
_requester: &mut Requester,
|
||||
params: DidCloseNotebookDocumentParams,
|
||||
) -> Result<()> {
|
||||
let Ok(path) = url_to_system_path(¶ms.notebook_document.uri) else {
|
||||
let Ok(path) = url_to_any_system_path(¶ms.notebook_document.uri) else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
@@ -32,8 +32,9 @@ impl SyncNotificationHandler for DidCloseNotebookHandler {
|
||||
.close_document(&key)
|
||||
.with_failure_code(lsp_server::ErrorCode::InternalError)?;
|
||||
|
||||
if let Some(db) = session.workspace_db_for_path_mut(path.as_std_path()) {
|
||||
File::sync_path(db, &path);
|
||||
if let AnySystemPath::SystemVirtual(virtual_path) = path {
|
||||
let db = session.default_workspace_db_mut();
|
||||
db.apply_changes(vec![ChangeEvent::DeletedVirtual(virtual_path)], None);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
use lsp_types::notification::DidOpenTextDocument;
|
||||
use lsp_types::DidOpenTextDocumentParams;
|
||||
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use red_knot_workspace::watch::ChangeEvent;
|
||||
use ruff_db::Db;
|
||||
|
||||
use crate::server::api::traits::{NotificationHandler, SyncNotificationHandler};
|
||||
use crate::server::client::{Notifier, Requester};
|
||||
use crate::server::Result;
|
||||
use crate::session::Session;
|
||||
use crate::system::url_to_system_path;
|
||||
use crate::system::{url_to_any_system_path, AnySystemPath};
|
||||
use crate::TextDocument;
|
||||
|
||||
pub(crate) struct DidOpenTextDocumentHandler;
|
||||
@@ -23,17 +24,25 @@ impl SyncNotificationHandler for DidOpenTextDocumentHandler {
|
||||
_requester: &mut Requester,
|
||||
params: DidOpenTextDocumentParams,
|
||||
) -> Result<()> {
|
||||
let Ok(path) = url_to_system_path(¶ms.text_document.uri) else {
|
||||
let Ok(path) = url_to_any_system_path(¶ms.text_document.uri) else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let document = TextDocument::new(params.text_document.text, params.text_document.version);
|
||||
session.open_text_document(params.text_document.uri, document);
|
||||
|
||||
if let Some(db) = session.workspace_db_for_path_mut(path.as_std_path()) {
|
||||
// TODO(dhruvmanila): Store the `file` in `DocumentController`
|
||||
let file = system_path_to_file(db, &path).unwrap();
|
||||
file.sync(db);
|
||||
match path {
|
||||
AnySystemPath::System(path) => {
|
||||
let db = match session.workspace_db_for_path_mut(path.as_std_path()) {
|
||||
Some(db) => db,
|
||||
None => session.default_workspace_db_mut(),
|
||||
};
|
||||
db.apply_changes(vec![ChangeEvent::Opened(path)], None);
|
||||
}
|
||||
AnySystemPath::SystemVirtual(virtual_path) => {
|
||||
let db = session.default_workspace_db_mut();
|
||||
db.files().virtual_file(db, &virtual_path);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(dhruvmanila): Publish diagnostics if the client doesn't support pull diagnostics
|
||||
|
||||
@@ -2,7 +2,8 @@ use lsp_server::ErrorCode;
|
||||
use lsp_types::notification::DidOpenNotebookDocument;
|
||||
use lsp_types::DidOpenNotebookDocumentParams;
|
||||
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use red_knot_workspace::watch::ChangeEvent;
|
||||
use ruff_db::Db;
|
||||
|
||||
use crate::edit::NotebookDocument;
|
||||
use crate::server::api::traits::{NotificationHandler, SyncNotificationHandler};
|
||||
@@ -10,7 +11,7 @@ use crate::server::api::LSPResult;
|
||||
use crate::server::client::{Notifier, Requester};
|
||||
use crate::server::Result;
|
||||
use crate::session::Session;
|
||||
use crate::system::url_to_system_path;
|
||||
use crate::system::{url_to_any_system_path, AnySystemPath};
|
||||
|
||||
pub(crate) struct DidOpenNotebookHandler;
|
||||
|
||||
@@ -25,7 +26,7 @@ impl SyncNotificationHandler for DidOpenNotebookHandler {
|
||||
_requester: &mut Requester,
|
||||
params: DidOpenNotebookDocumentParams,
|
||||
) -> Result<()> {
|
||||
let Ok(path) = url_to_system_path(¶ms.notebook_document.uri) else {
|
||||
let Ok(path) = url_to_any_system_path(¶ms.notebook_document.uri) else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
@@ -38,10 +39,18 @@ impl SyncNotificationHandler for DidOpenNotebookHandler {
|
||||
.with_failure_code(ErrorCode::InternalError)?;
|
||||
session.open_notebook_document(params.notebook_document.uri.clone(), notebook);
|
||||
|
||||
if let Some(db) = session.workspace_db_for_path_mut(path.as_std_path()) {
|
||||
// TODO(dhruvmanila): Store the `file` in `DocumentController`
|
||||
let file = system_path_to_file(db, &path).unwrap();
|
||||
file.sync(db);
|
||||
match path {
|
||||
AnySystemPath::System(path) => {
|
||||
let db = match session.workspace_db_for_path_mut(path.as_std_path()) {
|
||||
Some(db) => db,
|
||||
None => session.default_workspace_db_mut(),
|
||||
};
|
||||
db.apply_changes(vec![ChangeEvent::Opened(path)], None);
|
||||
}
|
||||
AnySystemPath::SystemVirtual(virtual_path) => {
|
||||
let db = session.default_workspace_db_mut();
|
||||
db.files().virtual_file(db, &virtual_path);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(dhruvmanila): Publish diagnostics if the client doesn't support pull diagnostics
|
||||
|
||||
@@ -26,13 +26,11 @@ impl BackgroundDocumentRequestHandler for DocumentDiagnosticRequestHandler {
|
||||
|
||||
fn run_with_snapshot(
|
||||
snapshot: DocumentSnapshot,
|
||||
db: Option<RootDatabase>,
|
||||
db: RootDatabase,
|
||||
_notifier: Notifier,
|
||||
_params: DocumentDiagnosticParams,
|
||||
) -> Result<DocumentDiagnosticReportResult> {
|
||||
let diagnostics = db
|
||||
.map(|db| compute_diagnostics(&snapshot, &db))
|
||||
.unwrap_or_default();
|
||||
let diagnostics = compute_diagnostics(&snapshot, &db);
|
||||
|
||||
Ok(DocumentDiagnosticReportResult::Report(
|
||||
DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport {
|
||||
@@ -48,10 +46,19 @@ impl BackgroundDocumentRequestHandler for DocumentDiagnosticRequestHandler {
|
||||
|
||||
fn compute_diagnostics(snapshot: &DocumentSnapshot, db: &RootDatabase) -> Vec<Diagnostic> {
|
||||
let Some(file) = snapshot.file(db) else {
|
||||
tracing::info!(
|
||||
"No file found for snapshot for `{}`",
|
||||
snapshot.query().file_url()
|
||||
);
|
||||
return vec![];
|
||||
};
|
||||
let Ok(diagnostics) = db.check_file(file) else {
|
||||
return vec![];
|
||||
|
||||
let diagnostics = match db.check_file(file) {
|
||||
Ok(diagnostics) => diagnostics,
|
||||
Err(cancelled) => {
|
||||
tracing::info!("Diagnostics computation {cancelled}");
|
||||
return vec![];
|
||||
}
|
||||
};
|
||||
|
||||
diagnostics
|
||||
@@ -65,12 +72,12 @@ fn to_lsp_diagnostic(message: &str) -> Diagnostic {
|
||||
let words = message.split(':').collect::<Vec<_>>();
|
||||
|
||||
let (range, message) = match words.as_slice() {
|
||||
[_filename, line, column, message] => {
|
||||
let line = line.parse::<u32>().unwrap_or_default();
|
||||
[_, _, line, column, message] | [_, line, column, message] => {
|
||||
let line = line.parse::<u32>().unwrap_or_default().saturating_sub(1);
|
||||
let column = column.parse::<u32>().unwrap_or_default();
|
||||
(
|
||||
Range::new(
|
||||
Position::new(line.saturating_sub(1), column.saturating_sub(1)),
|
||||
Position::new(line, column.saturating_sub(1)),
|
||||
Position::new(line, column),
|
||||
),
|
||||
message.trim(),
|
||||
|
||||
@@ -34,7 +34,7 @@ pub(super) trait BackgroundDocumentRequestHandler: RequestHandler {
|
||||
|
||||
fn run_with_snapshot(
|
||||
snapshot: DocumentSnapshot,
|
||||
db: Option<RootDatabase>,
|
||||
db: RootDatabase,
|
||||
notifier: Notifier,
|
||||
params: <<Self as RequestHandler>::RequestType as Request>::Params,
|
||||
) -> super::Result<<<Self as RequestHandler>::RequestType as Request>::Result>;
|
||||
|
||||
@@ -6,15 +6,16 @@ use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use lsp_types::{ClientCapabilities, Url};
|
||||
use lsp_types::{ClientCapabilities, TextDocumentContentChangeEvent, Url};
|
||||
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::system::SystemPath;
|
||||
use ruff_db::Db;
|
||||
|
||||
use crate::edit::{DocumentKey, NotebookDocument};
|
||||
use crate::system::{url_to_system_path, LSPSystem};
|
||||
use crate::edit::{DocumentKey, DocumentVersion, NotebookDocument};
|
||||
use crate::system::{url_to_any_system_path, AnySystemPath, LSPSystem};
|
||||
use crate::{PositionEncoding, TextDocument};
|
||||
|
||||
pub(crate) use self::capabilities::ResolvedClientCapabilities;
|
||||
@@ -82,6 +83,12 @@ impl Session {
|
||||
})
|
||||
}
|
||||
|
||||
// TODO(dhruvmanila): Ideally, we should have a single method for `workspace_db_for_path_mut`
|
||||
// and `default_workspace_db_mut` but the borrow checker doesn't allow that.
|
||||
// https://github.com/astral-sh/ruff/pull/13041#discussion_r1726725437
|
||||
|
||||
/// Returns a reference to the workspace [`RootDatabase`] corresponding to the given path, if
|
||||
/// any.
|
||||
pub(crate) fn workspace_db_for_path(&self, path: impl AsRef<Path>) -> Option<&RootDatabase> {
|
||||
self.workspaces
|
||||
.range(..=path.as_ref().to_path_buf())
|
||||
@@ -89,6 +96,8 @@ impl Session {
|
||||
.map(|(_, db)| db)
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the workspace [`RootDatabase`] corresponding to the given
|
||||
/// path, if any.
|
||||
pub(crate) fn workspace_db_for_path_mut(
|
||||
&mut self,
|
||||
path: impl AsRef<Path>,
|
||||
@@ -99,6 +108,19 @@ impl Session {
|
||||
.map(|(_, db)| db)
|
||||
}
|
||||
|
||||
/// Returns a reference to the default workspace [`RootDatabase`]. The default workspace is the
|
||||
/// minimum root path in the workspace map.
|
||||
pub(crate) fn default_workspace_db(&self) -> &RootDatabase {
|
||||
// SAFETY: Currently, red knot only support a single workspace.
|
||||
self.workspaces.values().next().unwrap()
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the default workspace [`RootDatabase`].
|
||||
pub(crate) fn default_workspace_db_mut(&mut self) -> &mut RootDatabase {
|
||||
// SAFETY: Currently, red knot only support a single workspace.
|
||||
self.workspaces.values_mut().next().unwrap()
|
||||
}
|
||||
|
||||
pub fn key_from_url(&self, url: Url) -> DocumentKey {
|
||||
self.index().key_from_url(url)
|
||||
}
|
||||
@@ -125,6 +147,20 @@ impl Session {
|
||||
self.index_mut().open_text_document(url, document);
|
||||
}
|
||||
|
||||
/// Updates a text document at the associated `key`.
|
||||
///
|
||||
/// The document key must point to a text document, or this will throw an error.
|
||||
pub(crate) fn update_text_document(
|
||||
&mut self,
|
||||
key: &DocumentKey,
|
||||
content_changes: Vec<TextDocumentContentChangeEvent>,
|
||||
new_version: DocumentVersion,
|
||||
) -> crate::Result<()> {
|
||||
let position_encoding = self.position_encoding;
|
||||
self.index_mut()
|
||||
.update_text_document(key, content_changes, new_version, position_encoding)
|
||||
}
|
||||
|
||||
/// De-registers a document, specified by its key.
|
||||
/// Calling this multiple times for the same document is a logic error.
|
||||
pub(crate) fn close_document(&mut self, key: &DocumentKey) -> crate::Result<()> {
|
||||
@@ -211,6 +247,7 @@ impl Drop for MutIndexGuard<'_> {
|
||||
|
||||
/// An immutable snapshot of `Session` that references
|
||||
/// a specific document.
|
||||
#[derive(Debug)]
|
||||
pub struct DocumentSnapshot {
|
||||
resolved_client_capabilities: Arc<ResolvedClientCapabilities>,
|
||||
document_ref: index::DocumentQuery,
|
||||
@@ -231,7 +268,12 @@ impl DocumentSnapshot {
|
||||
}
|
||||
|
||||
pub(crate) fn file(&self, db: &RootDatabase) -> Option<File> {
|
||||
let path = url_to_system_path(self.document_ref.file_url()).ok()?;
|
||||
system_path_to_file(db, path).ok()
|
||||
match url_to_any_system_path(self.document_ref.file_url()).ok()? {
|
||||
AnySystemPath::System(path) => system_path_to_file(db, path).ok(),
|
||||
AnySystemPath::SystemVirtual(virtual_path) => db
|
||||
.files()
|
||||
.try_virtual_file(&virtual_path)
|
||||
.map(|virtual_file| virtual_file.file()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,27 +8,40 @@ use ruff_db::file_revision::FileRevision;
|
||||
use ruff_db::system::walk_directory::WalkDirectoryBuilder;
|
||||
use ruff_db::system::{
|
||||
DirectoryEntry, FileType, Metadata, OsSystem, Result, System, SystemPath, SystemPathBuf,
|
||||
SystemVirtualPath,
|
||||
SystemVirtualPath, SystemVirtualPathBuf,
|
||||
};
|
||||
use ruff_notebook::{Notebook, NotebookError};
|
||||
|
||||
use crate::session::index::Index;
|
||||
use crate::DocumentQuery;
|
||||
|
||||
/// Converts the given [`Url`] to a [`SystemPathBuf`].
|
||||
/// Converts the given [`Url`] to an [`AnySystemPath`].
|
||||
///
|
||||
/// If the URL scheme is `file`, then the path is converted to a [`SystemPathBuf`]. Otherwise, the
|
||||
/// URL is converted to a [`SystemVirtualPathBuf`].
|
||||
///
|
||||
/// This fails in the following cases:
|
||||
/// * The URL scheme is not `file`.
|
||||
/// * The URL cannot be converted to a file path (refer to [`Url::to_file_path`]).
|
||||
/// * If the URL is not a valid UTF-8 string.
|
||||
pub(crate) fn url_to_system_path(url: &Url) -> std::result::Result<SystemPathBuf, ()> {
|
||||
pub(crate) fn url_to_any_system_path(url: &Url) -> std::result::Result<AnySystemPath, ()> {
|
||||
if url.scheme() == "file" {
|
||||
Ok(SystemPathBuf::from_path_buf(url.to_file_path()?).map_err(|_| ())?)
|
||||
Ok(AnySystemPath::System(
|
||||
SystemPathBuf::from_path_buf(url.to_file_path()?).map_err(|_| ())?,
|
||||
))
|
||||
} else {
|
||||
Err(())
|
||||
Ok(AnySystemPath::SystemVirtual(
|
||||
SystemVirtualPath::new(url.as_str()).to_path_buf(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents either a [`SystemPath`] or a [`SystemVirtualPath`].
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum AnySystemPath {
|
||||
System(SystemPathBuf),
|
||||
SystemVirtual(SystemVirtualPathBuf),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct LSPSystem {
|
||||
/// A read-only copy of the index where the server stores all the open documents and settings.
|
||||
@@ -144,19 +157,6 @@ impl System for LSPSystem {
|
||||
}
|
||||
}
|
||||
|
||||
fn virtual_path_metadata(&self, path: &SystemVirtualPath) -> Result<Metadata> {
|
||||
// Virtual paths only exists in the LSP system, so we don't need to check the OS system.
|
||||
let document = self
|
||||
.system_virtual_path_to_document_ref(path)?
|
||||
.ok_or_else(|| virtual_path_not_found(path))?;
|
||||
|
||||
Ok(Metadata::new(
|
||||
document_revision(&document),
|
||||
None,
|
||||
FileType::File,
|
||||
))
|
||||
}
|
||||
|
||||
fn read_virtual_path_to_string(&self, path: &SystemVirtualPath) -> Result<String> {
|
||||
let document = self
|
||||
.system_virtual_path_to_document_ref(path)?
|
||||
|
||||
33
crates/red_knot_test/Cargo.toml
Normal file
33
crates/red_knot_test/Cargo.toml
Normal file
@@ -0,0 +1,33 @@
|
||||
[package]
|
||||
name = "red_knot_test"
|
||||
version = "0.0.0"
|
||||
publish = false
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
homepage.workspace = true
|
||||
documentation.workspace = true
|
||||
repository.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
red_knot_vendored = { workspace = true }
|
||||
ruff_db = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
colored = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
470
crates/red_knot_test/README.md
Normal file
470
crates/red_knot_test/README.md
Normal file
@@ -0,0 +1,470 @@
|
||||
# Writing type-checking / type-inference tests
|
||||
|
||||
Any Markdown file can be a test suite.
|
||||
|
||||
In order for it to be run as one, `red_knot_test::run` must be called with its path; see
|
||||
`crates/red_knot_python_semantic/tests/mdtest.rs` for an example that treats all Markdown files
|
||||
under a certain directory as test suites.
|
||||
|
||||
A Markdown test suite can contain any number of tests. A test consists of one or more embedded
|
||||
"files", each defined by a triple-backticks fenced code block. The code block must have a tag string
|
||||
specifying its language; currently only `py` (Python files) and `pyi` (type stub files) are
|
||||
supported.
|
||||
|
||||
The simplest possible test suite consists of just a single test, with a single embedded file:
|
||||
|
||||
````markdown
|
||||
```py
|
||||
reveal_type(1) # revealed: Literal[1]
|
||||
```
|
||||
````
|
||||
|
||||
When running this test, the mdtest framework will write a file with these contents to the default
|
||||
file path (`/src/test.py`) in its in-memory file system, run a type check on that file, and then
|
||||
match the resulting diagnostics with the assertions in the test. Assertions are in the form of
|
||||
Python comments. If all diagnostics and all assertions are matched, the test passes; otherwise, it
|
||||
fails.
|
||||
|
||||
<!---
|
||||
(If you are reading this document in raw Markdown source rather than rendered Markdown, note that
|
||||
the quadruple-backtick-fenced "markdown" language code block above is NOT itself part of the mdtest
|
||||
syntax, it's just how this README embeds an example mdtest Markdown document.)
|
||||
--->
|
||||
|
||||
See actual example mdtest suites in
|
||||
[`crates/red_knot_python_semantic/resources/mdtest`](https://github.com/astral-sh/ruff/tree/main/crates/red_knot_python_semantic/resources/mdtest).
|
||||
|
||||
> ℹ️ Note: If you use `rstest` to generate a separate test for all Markdown files in a certain directory,
|
||||
> as with the example in `crates/red_knot_python_semantic/tests/mdtest.rs`,
|
||||
> you will likely want to also make sure that the crate the tests are in is rebuilt every time a
|
||||
> Markdown file is added or removed from the directory. See
|
||||
> [`crates/red_knot_python_semantic/build.rs`](https://github.com/astral-sh/ruff/tree/main/crates/red_knot_python_semantic/build.rs)
|
||||
> for an example of how to do this.
|
||||
>
|
||||
> This is because `rstest` generates its tests at build time rather than at runtime.
|
||||
> Without the `build.rs` file to force a rebuild when a Markdown file is added or removed,
|
||||
> a new Markdown test suite might not be run unless some other change in the crate caused a rebuild
|
||||
> following the addition of the new test file.
|
||||
|
||||
## Assertions
|
||||
|
||||
Two kinds of assertions are supported: `# revealed:` (shown above) and `# error:`.
|
||||
|
||||
### Assertion kinds
|
||||
|
||||
#### revealed
|
||||
|
||||
A `# revealed:` assertion should always be paired with a call to the `reveal_type` utility, which
|
||||
reveals (via a diagnostic) the inferred type of its argument (which can be any expression). The text
|
||||
after `# revealed:` must match exactly with the displayed form of the revealed type of that
|
||||
expression.
|
||||
|
||||
The `reveal_type` function can be imported from the `typing` standard library module (or, for older
|
||||
Python versions, from the `typing_extensions` pseudo-standard-library module[^extensions]):
|
||||
|
||||
```py
|
||||
from typing import reveal_type
|
||||
|
||||
reveal_type("foo") # revealed: Literal["foo"]
|
||||
```
|
||||
|
||||
For convenience, type checkers also pretend that `reveal_type` is a built-in, so that this import is
|
||||
not required. Using `reveal_type` without importing it issues a diagnostic warning that it was used
|
||||
without importing it, in addition to the diagnostic revealing the type of the expression.
|
||||
|
||||
The `# revealed:` assertion must always match a revealed-type diagnostic, and will also match the
|
||||
undefined-reveal diagnostic, if present, so it's safe to use `reveal_type` in tests either with or
|
||||
without importing it. (Style preference is to not import it in tests, unless specifically testing
|
||||
something about the behavior of importing it.)
|
||||
|
||||
#### error
|
||||
|
||||
A comment beginning with `# error:` is an assertion that a type checker diagnostic will be emitted,
|
||||
with text span starting on that line. The matching can be narrowed in three ways:
|
||||
|
||||
- `# error: [invalid-assignment]` requires that the matched diagnostic have the rule code
|
||||
`invalid-assignment`. (The square brackets are required.)
|
||||
- `# error: "Some text"` requires that the diagnostic's full message contain the text `Some text`.
|
||||
(The double quotes are required in the assertion comment; they are not part of the matched text.)
|
||||
- `# error: 8 [rule-code]` or `# error: 8 "Some text"` additionally requires that the matched
|
||||
diagnostic's text span begins on column 8 (one-indexed) of this line.
|
||||
|
||||
Assertions must contain either a rule code or a contains-text, or both, and may optionally also
|
||||
include a column number. They must come in order: first column, if present; then rule code, if
|
||||
present; then contains-text, if present. For example, an assertion using all three would look like
|
||||
`# error: 8 [invalid-assignment] "Some text"`.
|
||||
|
||||
Error assertions in tests intended to test type checker semantics should primarily use rule-code
|
||||
assertions, with occasional contains-text assertions where needed to disambiguate or validate some
|
||||
details of the diagnostic message.
|
||||
|
||||
### Assertion locations
|
||||
|
||||
An assertion comment may be a line-trailing comment, in which case it applies to the line it is on:
|
||||
|
||||
```py
|
||||
x: str = 1 # error: [invalid-assignment]
|
||||
```
|
||||
|
||||
Or it may be a comment on its own line, in which case it applies to the next line that does not
|
||||
contain an assertion comment:
|
||||
|
||||
```py
|
||||
# error: [invalid-assignment]
|
||||
x: str = 1
|
||||
```
|
||||
|
||||
Multiple assertions applying to the same line may be stacked:
|
||||
|
||||
```py
|
||||
# error: [invalid-assignment]
|
||||
# revealed: Literal[1]
|
||||
x: str = reveal_type(1)
|
||||
```
|
||||
|
||||
Intervening empty lines or non-assertion comments are not allowed; an assertion stack must be one
|
||||
assertion per line, immediately following each other, with the line immediately following the last
|
||||
assertion as the line of source code on which the matched diagnostics are emitted.
|
||||
|
||||
## Multi-file tests
|
||||
|
||||
Some tests require multiple files, with imports from one file into another. Multiple fenced code
|
||||
blocks represent multiple embedded files. Since files must have unique names, at most one file can
|
||||
use the default name of `/src/test.py`. Other files must explicitly specify their file name:
|
||||
|
||||
````markdown
|
||||
```py
|
||||
from b import C
|
||||
reveal_type(C) # revealed: Literal[C]
|
||||
```
|
||||
|
||||
```py path=b.py
|
||||
class C: pass
|
||||
```
|
||||
````
|
||||
|
||||
Relative file names are always relative to the "workspace root", which is also an import root (that
|
||||
is, the equivalent of a runtime entry on `sys.path`).
|
||||
|
||||
The default workspace root is `/src/`. Currently it is not possible to customize this in a test, but
|
||||
this is a feature we will want to add in the future.
|
||||
|
||||
So the above test creates two files, `/src/test.py` and `/src/b.py`, and sets the workspace root to
|
||||
`/src/`, allowing `test.py` to import from `b.py` using the module name `b`.
|
||||
|
||||
## Multi-test suites
|
||||
|
||||
A single test suite (Markdown file) can contain multiple tests, by demarcating them using Markdown
|
||||
header lines:
|
||||
|
||||
````markdown
|
||||
# Same-file invalid assignment
|
||||
|
||||
```py
|
||||
x: int = "foo" # error: [invalid-assignment]
|
||||
```
|
||||
|
||||
# Cross-file invalid assignment
|
||||
|
||||
```py
|
||||
from b import y
|
||||
x: int = y # error: [invalid-assignment]
|
||||
```
|
||||
|
||||
```py path=b.py
|
||||
y = "foo"
|
||||
```
|
||||
````
|
||||
|
||||
This test suite contains two tests, one named "Same-file invalid assignment" and the other named
|
||||
"Cross-file invalid assignment". The first test involves only a single embedded file, and the second
|
||||
test involves two embedded files.
|
||||
|
||||
The tests are run independently, in independent in-memory file systems and with new red-knot
|
||||
[Salsa](https://github.com/salsa-rs/salsa) databases. This means that each is a from-scratch run of
|
||||
the type checker, with no data persisting from any previous test.
|
||||
|
||||
Due to `cargo test` limitations, an entire test suite (Markdown file) is run as a single Rust test,
|
||||
so it's not possible to select individual tests within it to run.
|
||||
|
||||
## Structured test suites
|
||||
|
||||
Markdown headers can also be used to group related tests within a suite:
|
||||
|
||||
````markdown
|
||||
# Literals
|
||||
|
||||
## Numbers
|
||||
|
||||
### Integer
|
||||
|
||||
```py
|
||||
reveal_type(1) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
### Float
|
||||
|
||||
```py
|
||||
reveal_type(1.0) # revealed: float
|
||||
```
|
||||
|
||||
## Strings
|
||||
|
||||
```py
|
||||
reveal_type("foo") # revealed: Literal["foo"]
|
||||
```
|
||||
````
|
||||
|
||||
This test suite contains three tests, named "Literals - Numbers - Integer", "Literals - Numbers -
|
||||
Float", and "Literals - Strings".
|
||||
|
||||
A header-demarcated section must either be a test or a grouping header; it cannot be both. That is,
|
||||
a header section can either contain embedded files (making it a test), or it can contain more
|
||||
deeply-nested headers (headers with more `#`), but it cannot contain both.
|
||||
|
||||
## Documentation of tests
|
||||
|
||||
Arbitrary Markdown syntax (including of course normal prose paragraphs) is permitted (and ignored by
|
||||
the test framework) between fenced code blocks. This permits natural documentation of
|
||||
why a test exists, and what it intends to assert:
|
||||
|
||||
````markdown
|
||||
Assigning a string to a variable annotated as `int` is not permitted:
|
||||
|
||||
```py
|
||||
x: int = "foo" # error: [invalid-assignment]
|
||||
```
|
||||
````
|
||||
|
||||
## Planned features
|
||||
|
||||
There are some designed features that we intend for the test framework to have, but have not yet
|
||||
implemented:
|
||||
|
||||
### Multi-line diagnostic assertions
|
||||
|
||||
We may want to be able to assert that a diagnostic spans multiple lines, and to assert the columns it
|
||||
begins and/or ends on. The planned syntax for this will use `<<<` and `>>>` to mark the start and end lines for
|
||||
an assertion:
|
||||
|
||||
```py
|
||||
(3 # error: 2 [unsupported-operands] <<<
|
||||
+
|
||||
"foo") # error: 6 >>>
|
||||
```
|
||||
|
||||
The column assertion `6` on the ending line should be optional.
|
||||
|
||||
In cases of overlapping such assertions, resolve ambiguity using more angle brackets: `<<<<` begins
|
||||
an assertion ended by `>>>>`, etc.
|
||||
|
||||
### Non-Python files
|
||||
|
||||
Some tests may need to specify non-Python embedded files: typeshed `stdlib/VERSIONS`, `pth` files,
|
||||
`py.typed` files, `pyvenv.cfg` files...
|
||||
|
||||
We will allow specifying any of these using the `text` language in the code block tag string:
|
||||
|
||||
````markdown
|
||||
```text path=/third-party/foo/py.typed
|
||||
partial
|
||||
```
|
||||
````
|
||||
|
||||
We may want to also support testing Jupyter notebooks as embedded files; exact syntax for this is
|
||||
yet to be determined.
|
||||
|
||||
Of course, red-knot is only run directly on `py` and `pyi` files, and assertion comments are only
|
||||
possible in these files.
|
||||
|
||||
A fenced code block with no language will always be an error.
|
||||
|
||||
### Configuration
|
||||
|
||||
We will add the ability to specify non-default red-knot configurations to use in tests, by including
|
||||
a TOML code block:
|
||||
|
||||
````markdown
|
||||
```toml
|
||||
[tool.knot]
|
||||
warn-on-any = true
|
||||
```
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
|
||||
def f(x: Any): # error: [use-of-any]
|
||||
pass
|
||||
```
|
||||
````
|
||||
|
||||
It should be possible to include a TOML code block in a single test (as shown), or in a grouping
|
||||
section, in which case it applies to all nested tests within that grouping section. Configurations
|
||||
at multiple level are allowed and merged, with the most-nested (closest to the test) taking
|
||||
precedence.
|
||||
|
||||
### Running just a single test from a suite
|
||||
|
||||
Having each test in a suite always run as a distinct Rust test would require writing our own test
|
||||
runner or code-generating tests in a build script; neither of these is planned.
|
||||
|
||||
We could still allow running just a single test from a suite, for debugging purposes, either via
|
||||
some "focus" syntax that could be easily temporarily added to a test, or via an environment
|
||||
variable.
|
||||
|
||||
### Configuring search paths and kinds
|
||||
|
||||
The red-knot TOML configuration format hasn't been designed yet, and we may want to implement
|
||||
support in the test framework for configuring search paths before it is designed. If so, we can
|
||||
define some configuration options for now under the `[tool.knot.tests]` namespace. In the future,
|
||||
perhaps some of these can be replaced by real red-knot configuration options; some or all may also
|
||||
be kept long-term as test-specific options.
|
||||
|
||||
Some configuration options we will want to provide:
|
||||
|
||||
- We should be able to configure the default workspace root to something other than `/src/` using a
|
||||
`workspace-root` configuration option.
|
||||
|
||||
- We should be able to add a third-party root using the `third-party-root` configuration option.
|
||||
|
||||
- We may want to add additional configuration options for setting additional search path kinds.
|
||||
|
||||
Paths for `workspace-root` and `third-party-root` must be absolute.
|
||||
|
||||
Relative embedded-file paths are relative to the workspace root, even if it is explicitly set to a
|
||||
non-default value using the `workspace-root` config.
|
||||
|
||||
### Specifying a custom typeshed
|
||||
|
||||
Some tests will need to override the default typeshed with custom files. The `[tool.knot.tests]`
|
||||
configuration option `typeshed-root` should be usable for this:
|
||||
|
||||
````markdown
|
||||
```toml
|
||||
[tool.knot.tests]
|
||||
typeshed-root = "/typeshed"
|
||||
```
|
||||
|
||||
This file is importable as part of our custom typeshed, because it is within `/typeshed`, which we
|
||||
configured above as our custom typeshed root:
|
||||
|
||||
```py path=/typeshed/stdlib/builtins.pyi
|
||||
I_AM_THE_ONLY_BUILTIN = 1
|
||||
```
|
||||
|
||||
This file is written to `/src/test.py`, because the default workspace root is `/src/ and the default
|
||||
file path is `test.py`:
|
||||
|
||||
```py
|
||||
reveal_type(I_AM_THE_ONLY_BUILTIN) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
````
|
||||
|
||||
A fenced code block with language `text` can be used to provide a `stdlib/VERSIONS` file in the
|
||||
custom typeshed root. If no such file is created explicitly, one should be created implicitly
|
||||
including entries enabling all specified `<typeshed-root>/stdlib` files for all supported Python
|
||||
versions.
|
||||
|
||||
### I/O errors
|
||||
|
||||
We could use an `error=` configuration option in the tag string to make an embedded file cause an
|
||||
I/O error on read.
|
||||
|
||||
### Asserting on full diagnostic output
|
||||
|
||||
The inline comment diagnostic assertions are useful for making quick, readable assertions about
|
||||
diagnostics in a particular location. But sometimes we will want to assert on the full diagnostic
|
||||
output of checking an embedded Python file. Or sometimes (see “incremental tests” below) we will
|
||||
want to assert on diagnostics in a file, without impacting the contents of that file by changing a
|
||||
comment in it. In these cases, a Python code block in a test could be followed by a fenced code
|
||||
block with language `output`; this would contain the full diagnostic output for the preceding test
|
||||
file:
|
||||
|
||||
````markdown
|
||||
# full output
|
||||
|
||||
```py
|
||||
x = 1
|
||||
reveal_type(x)
|
||||
```
|
||||
|
||||
This is just an example, not a proposal that red-knot would ever actually output diagnostics in
|
||||
precisely this format:
|
||||
|
||||
```output
|
||||
test.py, line 1, col 1: revealed type is 'Literal[1]'
|
||||
```
|
||||
````
|
||||
|
||||
We will want to build tooling to automatically capture and update these “full diagnostic output”
|
||||
blocks, when tests are run in an update-output mode (probably specified by an environment variable.)
|
||||
|
||||
By default, an `output` block will specify diagnostic output for the file `<workspace-root>/test.py`.
|
||||
An `output` block can have a `path=` option, to explicitly specify the Python file for which it
|
||||
asserts diagnostic output, and a `stage=` option, to specify which stage of an incremental test it
|
||||
specifies diagnostic output at. (See “incremental tests” below.)
|
||||
|
||||
It is an error for an `output` block to exist, if there is no `py` or `python` block in the same
|
||||
test for the same file path.
|
||||
|
||||
### Incremental tests
|
||||
|
||||
Some tests should validate incremental checking, by initially creating some files, checking them,
|
||||
and then modifying/adding/deleting files and checking again.
|
||||
|
||||
We should add the capability to create an incremental test by using the `stage=` option on some
|
||||
fenced code blocks in the test:
|
||||
|
||||
````markdown
|
||||
# Incremental
|
||||
|
||||
## modify a file
|
||||
|
||||
Initial version of `test.py` and `b.py`:
|
||||
|
||||
```py
|
||||
from b import x
|
||||
reveal_type(x)
|
||||
```
|
||||
|
||||
```py path=b.py
|
||||
x = 1
|
||||
```
|
||||
|
||||
Initial expected output for `test.py`:
|
||||
|
||||
```output
|
||||
/src/test.py, line 1, col 1: revealed type is 'Literal[1]'
|
||||
```
|
||||
|
||||
Now in our first incremental stage, modify the contents of `b.py`:
|
||||
|
||||
```py path=b.py stage=1
|
||||
# b.py
|
||||
x = 2
|
||||
```
|
||||
|
||||
And this is our updated expected output for `test.py` at stage 1:
|
||||
|
||||
```output stage=1
|
||||
/src/test.py, line 1, col 1: revealed type is 'Literal[2]'
|
||||
```
|
||||
|
||||
(One reason to use full-diagnostic-output blocks in this test is that updating
|
||||
inline-comment diagnostic assertions for `test.py` would require specifying new
|
||||
contents for `test.py` in stage 1, which we don't want to do in this test.)
|
||||
````
|
||||
|
||||
It will be possible to provide any number of stages in an incremental test. If a stage re-specifies
|
||||
a filename that was specified in a previous stage (or the initial stage), that file is modified. A
|
||||
new filename appearing for the first time in a new stage will create a new file. To delete a
|
||||
previously created file, specify that file with the tag `delete` in its tag string (in this case, it
|
||||
is an error to provide non-empty contents). Any previously-created files that are not re-specified
|
||||
in a later stage continue to exist with their previously-specified contents, and are not "touched".
|
||||
|
||||
All stages should be run in order, incrementally, and then the final state should also be re-checked
|
||||
cold, to validate equivalence of cold and incremental check results.
|
||||
|
||||
[^extensions]: `typing-extensions` is a third-party module, but typeshed, and thus type checkers
|
||||
also, treat it as part of the standard library.
|
||||
621
crates/red_knot_test/src/assertion.rs
Normal file
621
crates/red_knot_test/src/assertion.rs
Normal file
@@ -0,0 +1,621 @@
|
||||
//! Parse type and type-error assertions in Python comment form.
|
||||
//!
|
||||
//! Parses comments of the form `# revealed: SomeType` and `# error: 8 [rule-code] "message text"`.
|
||||
//! In the latter case, the `8` is a column number, and `"message text"` asserts that the full
|
||||
//! diagnostic message contains the text `"message text"`; all three are optional (`# error:` will
|
||||
//! match any error.)
|
||||
//!
|
||||
//! Assertion comments may be placed at end-of-line:
|
||||
//!
|
||||
//! ```py
|
||||
//! x: int = "foo" # error: [invalid-assignment]
|
||||
//! ```
|
||||
//!
|
||||
//! Or as a full-line comment on the preceding line:
|
||||
//!
|
||||
//! ```py
|
||||
//! # error: [invalid-assignment]
|
||||
//! x: int = "foo"
|
||||
//! ```
|
||||
//!
|
||||
//! Multiple assertion comments may apply to the same line; in this case all (or all but the last)
|
||||
//! must be full-line comments:
|
||||
//!
|
||||
//! ```py
|
||||
//! # error: [unbound-name]
|
||||
//! reveal_type(x) # revealed: Unbound
|
||||
//! ```
|
||||
//!
|
||||
//! or
|
||||
//!
|
||||
//! ```py
|
||||
//! # error: [unbound-name]
|
||||
//! # revealed: Unbound
|
||||
//! reveal_type(x)
|
||||
//! ```
|
||||
|
||||
use crate::db::Db;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::source::{line_index, source_text, SourceText};
|
||||
use ruff_python_trivia::CommentRanges;
|
||||
use ruff_source_file::{LineIndex, Locator, OneIndexed};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use smallvec::SmallVec;
|
||||
use std::ops::Deref;
|
||||
|
||||
/// Diagnostic assertion comments in a single embedded file.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct InlineFileAssertions {
|
||||
comment_ranges: CommentRanges,
|
||||
source: SourceText,
|
||||
lines: LineIndex,
|
||||
}
|
||||
|
||||
impl InlineFileAssertions {
|
||||
pub(crate) fn from_file(db: &Db, file: File) -> Self {
|
||||
let source = source_text(db, file);
|
||||
let lines = line_index(db, file);
|
||||
let parsed = parsed_module(db, file);
|
||||
let comment_ranges = CommentRanges::from(parsed.tokens());
|
||||
Self {
|
||||
comment_ranges,
|
||||
source,
|
||||
lines,
|
||||
}
|
||||
}
|
||||
|
||||
fn locator(&self) -> Locator {
|
||||
Locator::with_index(&self.source, self.lines.clone())
|
||||
}
|
||||
|
||||
fn line_number(&self, range: &impl Ranged) -> OneIndexed {
|
||||
self.lines.line_index(range.start())
|
||||
}
|
||||
|
||||
fn is_own_line_comment(&self, ranged_assertion: &AssertionWithRange) -> bool {
|
||||
CommentRanges::is_own_line(ranged_assertion.start(), &self.locator())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a InlineFileAssertions {
|
||||
type Item = LineAssertions<'a>;
|
||||
type IntoIter = LineAssertionsIterator<'a>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
Self::IntoIter {
|
||||
file_assertions: self,
|
||||
inner: AssertionWithRangeIterator {
|
||||
file_assertions: self,
|
||||
inner: self.comment_ranges.into_iter(),
|
||||
}
|
||||
.peekable(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An [`Assertion`] with the [`TextRange`] of its original inline comment.
|
||||
#[derive(Debug)]
|
||||
struct AssertionWithRange<'a>(Assertion<'a>, TextRange);
|
||||
|
||||
impl<'a> Deref for AssertionWithRange<'a> {
|
||||
type Target = Assertion<'a>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Ranged for AssertionWithRange<'_> {
|
||||
fn range(&self) -> TextRange {
|
||||
self.1
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<AssertionWithRange<'a>> for Assertion<'a> {
|
||||
fn from(value: AssertionWithRange<'a>) -> Self {
|
||||
value.0
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator that yields all assertions within a single embedded Python file.
|
||||
#[derive(Debug)]
|
||||
struct AssertionWithRangeIterator<'a> {
|
||||
file_assertions: &'a InlineFileAssertions,
|
||||
inner: std::iter::Copied<std::slice::Iter<'a, TextRange>>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for AssertionWithRangeIterator<'a> {
|
||||
type Item = AssertionWithRange<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let locator = self.file_assertions.locator();
|
||||
loop {
|
||||
let inner_next = self.inner.next()?;
|
||||
let comment = locator.slice(inner_next);
|
||||
if let Some(assertion) = Assertion::from_comment(comment) {
|
||||
return Some(AssertionWithRange(assertion, inner_next));
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for AssertionWithRangeIterator<'_> {}
|
||||
|
||||
/// A vector of [`Assertion`]s belonging to a single line.
|
||||
///
|
||||
/// Most lines will have zero or one assertion, so we use a [`SmallVec`] optimized for a single
|
||||
/// element to avoid most heap vector allocations.
|
||||
type AssertionVec<'a> = SmallVec<[Assertion<'a>; 1]>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct LineAssertionsIterator<'a> {
|
||||
file_assertions: &'a InlineFileAssertions,
|
||||
inner: std::iter::Peekable<AssertionWithRangeIterator<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for LineAssertionsIterator<'a> {
|
||||
type Item = LineAssertions<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let file = self.file_assertions;
|
||||
let ranged_assertion = self.inner.next()?;
|
||||
let mut collector = AssertionVec::new();
|
||||
let mut line_number = file.line_number(&ranged_assertion);
|
||||
// Collect all own-line comments on consecutive lines; these all apply to the same line of
|
||||
// code. For example:
|
||||
//
|
||||
// ```py
|
||||
// # error: [unbound-name]
|
||||
// # revealed: Unbound
|
||||
// reveal_type(x)
|
||||
// ```
|
||||
//
|
||||
if file.is_own_line_comment(&ranged_assertion) {
|
||||
collector.push(ranged_assertion.into());
|
||||
let mut only_own_line = true;
|
||||
while let Some(ranged_assertion) = self.inner.peek() {
|
||||
let next_line_number = line_number.saturating_add(1);
|
||||
if file.line_number(ranged_assertion) == next_line_number {
|
||||
if !file.is_own_line_comment(ranged_assertion) {
|
||||
only_own_line = false;
|
||||
}
|
||||
line_number = next_line_number;
|
||||
collector.push(self.inner.next().unwrap().into());
|
||||
// If we see an end-of-line comment, it has to be the end of the stack,
|
||||
// otherwise we'd botch this case, attributing all three errors to the `bar`
|
||||
// line:
|
||||
//
|
||||
// ```py
|
||||
// # error:
|
||||
// foo # error:
|
||||
// bar # error:
|
||||
// ```
|
||||
//
|
||||
if !only_own_line {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if only_own_line {
|
||||
// The collected comments apply to the _next_ line in the code.
|
||||
line_number = line_number.saturating_add(1);
|
||||
}
|
||||
} else {
|
||||
// We have a line-trailing comment; it applies to its own line, and is not grouped.
|
||||
collector.push(ranged_assertion.into());
|
||||
}
|
||||
Some(LineAssertions {
|
||||
line_number,
|
||||
assertions: collector,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for LineAssertionsIterator<'_> {}
|
||||
|
||||
/// One or more assertions referring to the same line of code.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct LineAssertions<'a> {
|
||||
/// The line these assertions refer to.
|
||||
///
|
||||
/// Not necessarily the same line the assertion comment is located on; for an own-line comment,
|
||||
/// it's the next non-assertion line.
|
||||
pub(crate) line_number: OneIndexed,
|
||||
|
||||
/// The assertions referring to this line.
|
||||
pub(crate) assertions: AssertionVec<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Deref for LineAssertions<'a> {
|
||||
type Target = [Assertion<'a>];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.assertions
|
||||
}
|
||||
}
|
||||
|
||||
static TYPE_RE: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"^#\s*revealed:\s*(?<ty_display>.+?)\s*$").unwrap());
|
||||
|
||||
static ERROR_RE: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(
|
||||
r#"^#\s*error:(\s*(?<column>\d+))?(\s*\[(?<rule>.+?)\])?(\s*"(?<message>.+?)")?\s*$"#,
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
/// A single diagnostic assertion comment.
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum Assertion<'a> {
|
||||
/// A `revealed: ` assertion.
|
||||
Revealed(&'a str),
|
||||
|
||||
/// An `error: ` assertion.
|
||||
Error(ErrorAssertion<'a>),
|
||||
}
|
||||
|
||||
impl<'a> Assertion<'a> {
|
||||
fn from_comment(comment: &'a str) -> Option<Self> {
|
||||
if let Some(caps) = TYPE_RE.captures(comment) {
|
||||
Some(Self::Revealed(caps.name("ty_display").unwrap().as_str()))
|
||||
} else {
|
||||
ERROR_RE.captures(comment).map(|caps| {
|
||||
Self::Error(ErrorAssertion {
|
||||
rule: caps.name("rule").map(|m| m.as_str()),
|
||||
column: caps.name("column").and_then(|m| m.as_str().parse().ok()),
|
||||
message_contains: caps.name("message").map(|m| m.as_str()),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Assertion<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Revealed(expected_type) => write!(f, "revealed: {expected_type}"),
|
||||
Self::Error(assertion) => assertion.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An `error: ` assertion comment.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ErrorAssertion<'a> {
|
||||
/// The diagnostic rule code we expect.
|
||||
pub(crate) rule: Option<&'a str>,
|
||||
|
||||
/// The column we expect the diagnostic range to start at.
|
||||
pub(crate) column: Option<OneIndexed>,
|
||||
|
||||
/// A string we expect to be contained in the diagnostic message.
|
||||
pub(crate) message_contains: Option<&'a str>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ErrorAssertion<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str("error:")?;
|
||||
if let Some(column) = self.column {
|
||||
write!(f, " {column}")?;
|
||||
}
|
||||
if let Some(rule) = self.rule {
|
||||
write!(f, " [{rule}]")?;
|
||||
}
|
||||
if let Some(message) = self.message_contains {
|
||||
write!(f, r#" "{message}""#)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{Assertion, InlineFileAssertions, LineAssertions};
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_python_trivia::textwrap::dedent;
|
||||
use ruff_source_file::OneIndexed;
|
||||
|
||||
fn get_assertions(source: &str) -> InlineFileAssertions {
|
||||
let mut db = crate::db::Db::setup(SystemPathBuf::from("/src"));
|
||||
db.write_file("/src/test.py", source).unwrap();
|
||||
let file = system_path_to_file(&db, "/src/test.py").unwrap();
|
||||
InlineFileAssertions::from_file(&db, file)
|
||||
}
|
||||
|
||||
fn as_vec(assertions: &InlineFileAssertions) -> Vec<LineAssertions> {
|
||||
assertions.into_iter().collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ty_display() {
|
||||
let assertions = get_assertions(&dedent(
|
||||
"
|
||||
reveal_type(1) # revealed: Literal[1]
|
||||
",
|
||||
));
|
||||
|
||||
let [line] = &as_vec(&assertions)[..] else {
|
||||
panic!("expected one line");
|
||||
};
|
||||
|
||||
assert_eq!(line.line_number, OneIndexed::from_zero_indexed(1));
|
||||
|
||||
let [assert] = &line.assertions[..] else {
|
||||
panic!("expected one assertion");
|
||||
};
|
||||
|
||||
assert_eq!(format!("{assert}"), "revealed: Literal[1]");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error() {
|
||||
let assertions = get_assertions(&dedent(
|
||||
"
|
||||
x # error:
|
||||
",
|
||||
));
|
||||
|
||||
let [line] = &as_vec(&assertions)[..] else {
|
||||
panic!("expected one line");
|
||||
};
|
||||
|
||||
assert_eq!(line.line_number, OneIndexed::from_zero_indexed(1));
|
||||
|
||||
let [assert] = &line.assertions[..] else {
|
||||
panic!("expected one assertion");
|
||||
};
|
||||
|
||||
assert_eq!(format!("{assert}"), "error:");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prior_line() {
|
||||
let assertions = get_assertions(&dedent(
|
||||
"
|
||||
# revealed: Literal[1]
|
||||
reveal_type(1)
|
||||
",
|
||||
));
|
||||
|
||||
let [line] = &as_vec(&assertions)[..] else {
|
||||
panic!("expected one line");
|
||||
};
|
||||
|
||||
assert_eq!(line.line_number, OneIndexed::from_zero_indexed(2));
|
||||
|
||||
let [assert] = &line.assertions[..] else {
|
||||
panic!("expected one assertion");
|
||||
};
|
||||
|
||||
assert_eq!(format!("{assert}"), "revealed: Literal[1]");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn stacked_prior_line() {
|
||||
let assertions = get_assertions(&dedent(
|
||||
"
|
||||
# revealed: Unbound
|
||||
# error: [unbound-name]
|
||||
reveal_type(x)
|
||||
",
|
||||
));
|
||||
|
||||
let [line] = &as_vec(&assertions)[..] else {
|
||||
panic!("expected one line");
|
||||
};
|
||||
|
||||
assert_eq!(line.line_number, OneIndexed::from_zero_indexed(3));
|
||||
|
||||
let [assert1, assert2] = &line.assertions[..] else {
|
||||
panic!("expected two assertions");
|
||||
};
|
||||
|
||||
assert_eq!(format!("{assert1}"), "revealed: Unbound");
|
||||
assert_eq!(format!("{assert2}"), "error: [unbound-name]");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn stacked_mixed() {
|
||||
let assertions = get_assertions(&dedent(
|
||||
"
|
||||
# revealed: Unbound
|
||||
reveal_type(x) # error: [unbound-name]
|
||||
",
|
||||
));
|
||||
|
||||
let [line] = &as_vec(&assertions)[..] else {
|
||||
panic!("expected one line");
|
||||
};
|
||||
|
||||
assert_eq!(line.line_number, OneIndexed::from_zero_indexed(2));
|
||||
|
||||
let [assert1, assert2] = &line.assertions[..] else {
|
||||
panic!("expected two assertions");
|
||||
};
|
||||
|
||||
assert_eq!(format!("{assert1}"), "revealed: Unbound");
|
||||
assert_eq!(format!("{assert2}"), "error: [unbound-name]");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_lines() {
|
||||
let assertions = get_assertions(&dedent(
|
||||
r#"
|
||||
# error: [invalid-assignment]
|
||||
x: int = "foo"
|
||||
y # error: [unbound-name]
|
||||
"#,
|
||||
));
|
||||
|
||||
let [line1, line2] = &as_vec(&assertions)[..] else {
|
||||
panic!("expected two lines");
|
||||
};
|
||||
|
||||
assert_eq!(line1.line_number, OneIndexed::from_zero_indexed(2));
|
||||
assert_eq!(line2.line_number, OneIndexed::from_zero_indexed(3));
|
||||
|
||||
let [Assertion::Error(error1)] = &line1.assertions[..] else {
|
||||
panic!("expected one error assertion");
|
||||
};
|
||||
|
||||
assert_eq!(error1.rule, Some("invalid-assignment"));
|
||||
|
||||
let [Assertion::Error(error2)] = &line2.assertions[..] else {
|
||||
panic!("expected one error assertion");
|
||||
};
|
||||
|
||||
assert_eq!(error2.rule, Some("unbound-name"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_lines_mixed_stack() {
|
||||
let assertions = get_assertions(&dedent(
|
||||
r#"
|
||||
# error: [invalid-assignment]
|
||||
x: int = reveal_type("foo") # revealed: str
|
||||
y # error: [unbound-name]
|
||||
"#,
|
||||
));
|
||||
|
||||
let [line1, line2] = &as_vec(&assertions)[..] else {
|
||||
panic!("expected two lines");
|
||||
};
|
||||
|
||||
assert_eq!(line1.line_number, OneIndexed::from_zero_indexed(2));
|
||||
assert_eq!(line2.line_number, OneIndexed::from_zero_indexed(3));
|
||||
|
||||
let [Assertion::Error(error1), Assertion::Revealed(expected_ty)] = &line1.assertions[..]
|
||||
else {
|
||||
panic!("expected one error assertion and one Revealed assertion");
|
||||
};
|
||||
|
||||
assert_eq!(error1.rule, Some("invalid-assignment"));
|
||||
assert_eq!(*expected_ty, "str");
|
||||
|
||||
let [Assertion::Error(error2)] = &line2.assertions[..] else {
|
||||
panic!("expected one error assertion");
|
||||
};
|
||||
|
||||
assert_eq!(error2.rule, Some("unbound-name"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_with_rule() {
|
||||
let assertions = get_assertions(&dedent(
|
||||
"
|
||||
x # error: [unbound-name]
|
||||
",
|
||||
));
|
||||
|
||||
let [line] = &as_vec(&assertions)[..] else {
|
||||
panic!("expected one line");
|
||||
};
|
||||
|
||||
assert_eq!(line.line_number, OneIndexed::from_zero_indexed(1));
|
||||
|
||||
let [assert] = &line.assertions[..] else {
|
||||
panic!("expected one assertion");
|
||||
};
|
||||
|
||||
assert_eq!(format!("{assert}"), "error: [unbound-name]");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_with_rule_and_column() {
|
||||
let assertions = get_assertions(&dedent(
|
||||
"
|
||||
x # error: 1 [unbound-name]
|
||||
",
|
||||
));
|
||||
|
||||
let [line] = &as_vec(&assertions)[..] else {
|
||||
panic!("expected one line");
|
||||
};
|
||||
|
||||
assert_eq!(line.line_number, OneIndexed::from_zero_indexed(1));
|
||||
|
||||
let [assert] = &line.assertions[..] else {
|
||||
panic!("expected one assertion");
|
||||
};
|
||||
|
||||
assert_eq!(format!("{assert}"), "error: 1 [unbound-name]");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_with_rule_and_message() {
|
||||
let assertions = get_assertions(&dedent(
|
||||
r#"
|
||||
# error: [unbound-name] "`x` is unbound"
|
||||
x
|
||||
"#,
|
||||
));
|
||||
|
||||
let [line] = &as_vec(&assertions)[..] else {
|
||||
panic!("expected one line");
|
||||
};
|
||||
|
||||
assert_eq!(line.line_number, OneIndexed::from_zero_indexed(2));
|
||||
|
||||
let [assert] = &line.assertions[..] else {
|
||||
panic!("expected one assertion");
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
format!("{assert}"),
|
||||
r#"error: [unbound-name] "`x` is unbound""#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_with_message_and_column() {
|
||||
let assertions = get_assertions(&dedent(
|
||||
r#"
|
||||
# error: 1 "`x` is unbound"
|
||||
x
|
||||
"#,
|
||||
));
|
||||
|
||||
let [line] = &as_vec(&assertions)[..] else {
|
||||
panic!("expected one line");
|
||||
};
|
||||
|
||||
assert_eq!(line.line_number, OneIndexed::from_zero_indexed(2));
|
||||
|
||||
let [assert] = &line.assertions[..] else {
|
||||
panic!("expected one assertion");
|
||||
};
|
||||
|
||||
assert_eq!(format!("{assert}"), r#"error: 1 "`x` is unbound""#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_with_rule_and_message_and_column() {
|
||||
let assertions = get_assertions(&dedent(
|
||||
r#"
|
||||
# error: 1 [unbound-name] "`x` is unbound"
|
||||
x
|
||||
"#,
|
||||
));
|
||||
|
||||
let [line] = &as_vec(&assertions)[..] else {
|
||||
panic!("expected one line");
|
||||
};
|
||||
|
||||
assert_eq!(line.line_number, OneIndexed::from_zero_indexed(2));
|
||||
|
||||
let [assert] = &line.assertions[..] else {
|
||||
panic!("expected one assertion");
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
format!("{assert}"),
|
||||
r#"error: 1 [unbound-name] "`x` is unbound""#
|
||||
);
|
||||
}
|
||||
}
|
||||
88
crates/red_knot_test/src/db.rs
Normal file
88
crates/red_knot_test/src/db.rs
Normal file
@@ -0,0 +1,88 @@
|
||||
use red_knot_python_semantic::{
|
||||
Db as SemanticDb, Program, ProgramSettings, PythonVersion, SearchPathSettings,
|
||||
};
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::SystemPathBuf;
|
||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
#[salsa::db]
|
||||
pub(crate) struct Db {
|
||||
storage: salsa::Storage<Self>,
|
||||
files: Files,
|
||||
system: TestSystem,
|
||||
vendored: VendoredFileSystem,
|
||||
}
|
||||
|
||||
impl Db {
|
||||
pub(crate) fn setup(workspace_root: SystemPathBuf) -> Self {
|
||||
let db = Self {
|
||||
storage: salsa::Storage::default(),
|
||||
system: TestSystem::default(),
|
||||
vendored: red_knot_vendored::file_system().clone(),
|
||||
files: Files::default(),
|
||||
};
|
||||
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&workspace_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings::new(workspace_root),
|
||||
},
|
||||
)
|
||||
.expect("Invalid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithTestSystem for Db {
|
||||
fn test_system(&self) -> &TestSystem {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn test_system_mut(&mut self) -> &mut TestSystem {
|
||||
&mut self.system
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl SourceDb for Db {
|
||||
fn vendored(&self) -> &VendoredFileSystem {
|
||||
&self.vendored
|
||||
}
|
||||
|
||||
fn system(&self) -> &dyn System {
|
||||
&self.system
|
||||
}
|
||||
|
||||
fn files(&self) -> &Files {
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDb> for Db {
|
||||
fn upcast(&self) -> &(dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
fn upcast_mut(&mut self) -> &mut (dyn SourceDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl SemanticDb for Db {
|
||||
fn is_file_open(&self, file: File) -> bool {
|
||||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl salsa::Database for Db {
|
||||
fn salsa_event(&self, _event: &dyn Fn() -> salsa::Event) {}
|
||||
}
|
||||
173
crates/red_knot_test/src/diagnostic.rs
Normal file
173
crates/red_knot_test/src/diagnostic.rs
Normal file
@@ -0,0 +1,173 @@
|
||||
//! Sort and group diagnostics by line number, so they can be correlated with assertions.
|
||||
//!
|
||||
//! We don't assume that we will get the diagnostics in source order.
|
||||
|
||||
use ruff_source_file::{LineIndex, OneIndexed};
|
||||
use ruff_text_size::Ranged;
|
||||
use std::ops::{Deref, Range};
|
||||
|
||||
/// All diagnostics for one embedded Python file, sorted and grouped by start line number.
|
||||
///
|
||||
/// The diagnostics are kept in a flat vector, sorted by line number. A separate vector of
|
||||
/// [`LineDiagnosticRange`] has one entry for each contiguous slice of the diagnostics vector
|
||||
/// containing diagnostics which all start on the same line.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct SortedDiagnostics<T> {
|
||||
diagnostics: Vec<T>,
|
||||
line_ranges: Vec<LineDiagnosticRange>,
|
||||
}
|
||||
|
||||
impl<T> SortedDiagnostics<T>
|
||||
where
|
||||
T: Ranged + Clone,
|
||||
{
|
||||
pub(crate) fn new(diagnostics: impl IntoIterator<Item = T>, line_index: &LineIndex) -> Self {
|
||||
let mut diagnostics: Vec<_> = diagnostics
|
||||
.into_iter()
|
||||
.map(|diagnostic| DiagnosticWithLine {
|
||||
line_number: line_index.line_index(diagnostic.start()),
|
||||
diagnostic,
|
||||
})
|
||||
.collect();
|
||||
diagnostics.sort_unstable_by_key(|diagnostic_with_line| diagnostic_with_line.line_number);
|
||||
|
||||
let mut diags = Self {
|
||||
diagnostics: Vec::with_capacity(diagnostics.len()),
|
||||
line_ranges: vec![],
|
||||
};
|
||||
|
||||
let mut current_line_number = None;
|
||||
let mut start = 0;
|
||||
for DiagnosticWithLine {
|
||||
line_number,
|
||||
diagnostic,
|
||||
} in diagnostics
|
||||
{
|
||||
match current_line_number {
|
||||
None => {
|
||||
current_line_number = Some(line_number);
|
||||
}
|
||||
Some(current) => {
|
||||
if line_number != current {
|
||||
let end = diags.diagnostics.len();
|
||||
diags.line_ranges.push(LineDiagnosticRange {
|
||||
line_number: current,
|
||||
diagnostic_index_range: start..end,
|
||||
});
|
||||
start = end;
|
||||
current_line_number = Some(line_number);
|
||||
}
|
||||
}
|
||||
}
|
||||
diags.diagnostics.push(diagnostic);
|
||||
}
|
||||
if let Some(line_number) = current_line_number {
|
||||
diags.line_ranges.push(LineDiagnosticRange {
|
||||
line_number,
|
||||
diagnostic_index_range: start..diags.diagnostics.len(),
|
||||
});
|
||||
}
|
||||
|
||||
diags
|
||||
}
|
||||
|
||||
pub(crate) fn iter_lines(&self) -> LineDiagnosticsIterator<T> {
|
||||
LineDiagnosticsIterator {
|
||||
diagnostics: self.diagnostics.as_slice(),
|
||||
inner: self.line_ranges.iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Range delineating diagnostics in [`SortedDiagnostics`] that begin on a single line.
|
||||
#[derive(Debug)]
|
||||
struct LineDiagnosticRange {
|
||||
line_number: OneIndexed,
|
||||
diagnostic_index_range: Range<usize>,
|
||||
}
|
||||
|
||||
/// Iterator to group sorted diagnostics by line.
|
||||
pub(crate) struct LineDiagnosticsIterator<'a, T> {
|
||||
diagnostics: &'a [T],
|
||||
inner: std::slice::Iter<'a, LineDiagnosticRange>,
|
||||
}
|
||||
|
||||
impl<'a, T> Iterator for LineDiagnosticsIterator<'a, T>
|
||||
where
|
||||
T: Ranged + Clone,
|
||||
{
|
||||
type Item = LineDiagnostics<'a, T>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let LineDiagnosticRange {
|
||||
line_number,
|
||||
diagnostic_index_range,
|
||||
} = self.inner.next()?;
|
||||
Some(LineDiagnostics {
|
||||
line_number: *line_number,
|
||||
diagnostics: &self.diagnostics[diagnostic_index_range.clone()],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> std::iter::FusedIterator for LineDiagnosticsIterator<'_, T> where T: Clone + Ranged {}
|
||||
|
||||
/// All diagnostics that start on a single line of source code in one embedded Python file.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct LineDiagnostics<'a, T> {
|
||||
/// Line number on which these diagnostics start.
|
||||
pub(crate) line_number: OneIndexed,
|
||||
|
||||
/// Diagnostics starting on this line.
|
||||
pub(crate) diagnostics: &'a [T],
|
||||
}
|
||||
|
||||
impl<T> Deref for LineDiagnostics<'_, T> {
|
||||
type Target = [T];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.diagnostics
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct DiagnosticWithLine<T> {
|
||||
line_number: OneIndexed,
|
||||
diagnostic: T,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::db::Db;
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::source::line_index;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_source_file::OneIndexed;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
#[test]
|
||||
fn sort_and_group() {
|
||||
let mut db = Db::setup(SystemPathBuf::from("/src"));
|
||||
db.write_file("/src/test.py", "one\ntwo\n").unwrap();
|
||||
let file = system_path_to_file(&db, "/src/test.py").unwrap();
|
||||
let lines = line_index(&db, file);
|
||||
|
||||
let ranges = vec![
|
||||
TextRange::new(TextSize::new(0), TextSize::new(1)),
|
||||
TextRange::new(TextSize::new(5), TextSize::new(10)),
|
||||
TextRange::new(TextSize::new(1), TextSize::new(7)),
|
||||
];
|
||||
|
||||
let sorted = super::SortedDiagnostics::new(&ranges, &lines);
|
||||
let grouped = sorted.iter_lines().collect::<Vec<_>>();
|
||||
|
||||
let [line1, line2] = &grouped[..] else {
|
||||
panic!("expected two lines");
|
||||
};
|
||||
|
||||
assert_eq!(line1.line_number, OneIndexed::from_zero_indexed(0));
|
||||
assert_eq!(line1.diagnostics.len(), 2);
|
||||
assert_eq!(line2.line_number, OneIndexed::from_zero_indexed(1));
|
||||
assert_eq!(line2.diagnostics.len(), 1);
|
||||
}
|
||||
}
|
||||
95
crates/red_knot_test/src/lib.rs
Normal file
95
crates/red_knot_test/src/lib.rs
Normal file
@@ -0,0 +1,95 @@
|
||||
use colored::Colorize;
|
||||
use parser as test_parser;
|
||||
use red_knot_python_semantic::types::check_types;
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
type Failures = BTreeMap<SystemPathBuf, matcher::FailuresByLine>;
|
||||
|
||||
mod assertion;
|
||||
mod db;
|
||||
mod diagnostic;
|
||||
mod matcher;
|
||||
mod parser;
|
||||
|
||||
/// Run `path` as a markdown test suite with given `title`.
|
||||
///
|
||||
/// Panic on test failure, and print failure details.
|
||||
#[allow(clippy::print_stdout)]
|
||||
pub fn run(path: &PathBuf, title: &str) {
|
||||
let source = std::fs::read_to_string(path).unwrap();
|
||||
let suite = match test_parser::parse(title, &source) {
|
||||
Ok(suite) => suite,
|
||||
Err(err) => {
|
||||
panic!("Error parsing `{}`: {err}", path.to_str().unwrap())
|
||||
}
|
||||
};
|
||||
|
||||
let mut any_failures = false;
|
||||
for test in suite.tests() {
|
||||
if let Err(failures) = run_test(&test) {
|
||||
any_failures = true;
|
||||
println!("\n{}\n", test.name().bold().underline());
|
||||
|
||||
for (path, by_line) in failures {
|
||||
println!("{}", path.as_str().bold());
|
||||
for (line_number, failures) in by_line.iter() {
|
||||
for failure in failures {
|
||||
let line_info = format!("line {line_number}:").cyan();
|
||||
println!(" {line_info} {failure}");
|
||||
}
|
||||
}
|
||||
println!();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!("{}\n", "-".repeat(50));
|
||||
|
||||
assert!(!any_failures, "Some tests failed.");
|
||||
}
|
||||
|
||||
fn run_test(test: &parser::MarkdownTest) -> Result<(), Failures> {
|
||||
let workspace_root = SystemPathBuf::from("/src");
|
||||
let mut db = db::Db::setup(workspace_root.clone());
|
||||
|
||||
let mut system_paths = vec![];
|
||||
|
||||
for file in test.files() {
|
||||
assert!(
|
||||
matches!(file.lang, "py" | "pyi"),
|
||||
"Non-Python files not supported yet."
|
||||
);
|
||||
let full_path = workspace_root.join(file.path);
|
||||
db.write_file(&full_path, file.code).unwrap();
|
||||
system_paths.push(full_path);
|
||||
}
|
||||
|
||||
let mut failures = BTreeMap::default();
|
||||
|
||||
for path in system_paths {
|
||||
let file = system_path_to_file(&db, path.clone()).unwrap();
|
||||
let parsed = parsed_module(&db, file);
|
||||
|
||||
// TODO allow testing against code with syntax errors
|
||||
assert!(
|
||||
parsed.errors().is_empty(),
|
||||
"Python syntax errors in {}, {:?}: {:?}",
|
||||
test.name(),
|
||||
path,
|
||||
parsed.errors()
|
||||
);
|
||||
|
||||
matcher::match_file(&db, file, check_types(&db, file)).unwrap_or_else(|line_failures| {
|
||||
failures.insert(path, line_failures);
|
||||
});
|
||||
}
|
||||
if failures.is_empty() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(failures)
|
||||
}
|
||||
}
|
||||
929
crates/red_knot_test/src/matcher.rs
Normal file
929
crates/red_knot_test/src/matcher.rs
Normal file
@@ -0,0 +1,929 @@
|
||||
//! Match [`TypeCheckDiagnostic`]s against [`Assertion`]s and produce test failure messages for any
|
||||
//! mismatches.
|
||||
use crate::assertion::{Assertion, ErrorAssertion, InlineFileAssertions};
|
||||
use crate::db::Db;
|
||||
use crate::diagnostic::SortedDiagnostics;
|
||||
use colored::Colorize;
|
||||
use red_knot_python_semantic::types::TypeCheckDiagnostic;
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::source::{line_index, source_text, SourceText};
|
||||
use ruff_source_file::{LineIndex, OneIndexed};
|
||||
use ruff_text_size::Ranged;
|
||||
use std::cmp::Ordering;
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct FailuresByLine {
|
||||
failures: Vec<String>,
|
||||
lines: Vec<LineFailures>,
|
||||
}
|
||||
|
||||
impl FailuresByLine {
|
||||
pub(super) fn iter(&self) -> impl Iterator<Item = (OneIndexed, &[String])> {
|
||||
self.lines.iter().map(|line_failures| {
|
||||
(
|
||||
line_failures.line_number,
|
||||
&self.failures[line_failures.range.clone()],
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn push(&mut self, line_number: OneIndexed, messages: Vec<String>) {
|
||||
let start = self.failures.len();
|
||||
self.failures.extend(messages);
|
||||
self.lines.push(LineFailures {
|
||||
line_number,
|
||||
range: start..self.failures.len(),
|
||||
});
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
self.lines.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct LineFailures {
|
||||
line_number: OneIndexed,
|
||||
range: Range<usize>,
|
||||
}
|
||||
|
||||
pub(super) fn match_file<T>(
|
||||
db: &Db,
|
||||
file: File,
|
||||
diagnostics: impl IntoIterator<Item = T>,
|
||||
) -> Result<(), FailuresByLine>
|
||||
where
|
||||
T: Diagnostic + Clone,
|
||||
{
|
||||
// Parse assertions from comments in the file, and get diagnostics from the file; both
|
||||
// ordered by line number.
|
||||
let assertions = InlineFileAssertions::from_file(db, file);
|
||||
let diagnostics = SortedDiagnostics::new(diagnostics, &line_index(db, file));
|
||||
|
||||
// Get iterators over assertions and diagnostics grouped by line, in ascending line order.
|
||||
let mut line_assertions = assertions.into_iter();
|
||||
let mut line_diagnostics = diagnostics.iter_lines();
|
||||
|
||||
let mut current_assertions = line_assertions.next();
|
||||
let mut current_diagnostics = line_diagnostics.next();
|
||||
|
||||
let matcher = Matcher::from_file(db, file);
|
||||
let mut failures = FailuresByLine::default();
|
||||
|
||||
loop {
|
||||
match (¤t_assertions, ¤t_diagnostics) {
|
||||
(Some(assertions), Some(diagnostics)) => {
|
||||
match assertions.line_number.cmp(&diagnostics.line_number) {
|
||||
Ordering::Equal => {
|
||||
// We have assertions and diagnostics on the same line; check for
|
||||
// matches and error on any that don't match, then advance both
|
||||
// iterators.
|
||||
matcher
|
||||
.match_line(diagnostics, assertions)
|
||||
.unwrap_or_else(|messages| {
|
||||
failures.push(assertions.line_number, messages);
|
||||
});
|
||||
current_assertions = line_assertions.next();
|
||||
current_diagnostics = line_diagnostics.next();
|
||||
}
|
||||
Ordering::Less => {
|
||||
// We have assertions on an earlier line than diagnostics; report these
|
||||
// assertions as all unmatched, and advance the assertions iterator.
|
||||
failures.push(assertions.line_number, unmatched(assertions));
|
||||
current_assertions = line_assertions.next();
|
||||
}
|
||||
Ordering::Greater => {
|
||||
// We have diagnostics on an earlier line than assertions; report these
|
||||
// diagnostics as all unmatched, and advance the diagnostics iterator.
|
||||
failures.push(diagnostics.line_number, unmatched(diagnostics));
|
||||
current_diagnostics = line_diagnostics.next();
|
||||
}
|
||||
}
|
||||
}
|
||||
(Some(assertions), None) => {
|
||||
// We've exhausted diagnostics but still have assertions; report these assertions
|
||||
// as unmatched and advance the assertions iterator.
|
||||
failures.push(assertions.line_number, unmatched(assertions));
|
||||
current_assertions = line_assertions.next();
|
||||
}
|
||||
(None, Some(diagnostics)) => {
|
||||
// We've exhausted assertions but still have diagnostics; report these
|
||||
// diagnostics as unmatched and advance the diagnostics iterator.
|
||||
failures.push(diagnostics.line_number, unmatched(diagnostics));
|
||||
current_diagnostics = line_diagnostics.next();
|
||||
}
|
||||
// When we've exhausted both diagnostics and assertions, break.
|
||||
(None, None) => break,
|
||||
}
|
||||
}
|
||||
|
||||
if failures.is_empty() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(failures)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) trait Diagnostic: Ranged {
|
||||
fn rule(&self) -> &str;
|
||||
|
||||
fn message(&self) -> &str;
|
||||
}
|
||||
|
||||
impl Diagnostic for Arc<TypeCheckDiagnostic> {
|
||||
fn rule(&self) -> &str {
|
||||
self.as_ref().rule()
|
||||
}
|
||||
|
||||
fn message(&self) -> &str {
|
||||
self.as_ref().message()
|
||||
}
|
||||
}
|
||||
|
||||
trait Unmatched {
|
||||
fn unmatched(&self) -> String;
|
||||
}
|
||||
|
||||
fn unmatched<'a, T: Unmatched + 'a>(unmatched: &'a [T]) -> Vec<String> {
|
||||
unmatched.iter().map(Unmatched::unmatched).collect()
|
||||
}
|
||||
|
||||
trait UnmatchedWithColumn {
|
||||
fn unmatched_with_column(&self, column: OneIndexed) -> String;
|
||||
}
|
||||
|
||||
impl Unmatched for Assertion<'_> {
|
||||
fn unmatched(&self) -> String {
|
||||
format!("{} {self}", "unmatched assertion:".red())
|
||||
}
|
||||
}
|
||||
|
||||
fn maybe_add_undefined_reveal_clarification<T: Diagnostic>(
|
||||
diagnostic: &T,
|
||||
original: std::fmt::Arguments,
|
||||
) -> String {
|
||||
if diagnostic.rule() == "undefined-reveal" {
|
||||
format!(
|
||||
"{} add a `# revealed` assertion on this line (original diagnostic: {original})",
|
||||
"used built-in `reveal_type`:".yellow()
|
||||
)
|
||||
} else {
|
||||
format!("{} {original}", "unexpected error:".red())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Unmatched for T
|
||||
where
|
||||
T: Diagnostic,
|
||||
{
|
||||
fn unmatched(&self) -> String {
|
||||
maybe_add_undefined_reveal_clarification(
|
||||
self,
|
||||
format_args!(r#"[{}] "{}""#, self.rule(), self.message()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> UnmatchedWithColumn for T
|
||||
where
|
||||
T: Diagnostic,
|
||||
{
|
||||
fn unmatched_with_column(&self, column: OneIndexed) -> String {
|
||||
maybe_add_undefined_reveal_clarification(
|
||||
self,
|
||||
format_args!(r#"{column} [{}] "{}""#, self.rule(), self.message()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
struct Matcher {
|
||||
line_index: LineIndex,
|
||||
source: SourceText,
|
||||
}
|
||||
|
||||
impl Matcher {
|
||||
fn from_file(db: &Db, file: File) -> Self {
|
||||
Self {
|
||||
line_index: line_index(db, file),
|
||||
source: source_text(db, file),
|
||||
}
|
||||
}
|
||||
|
||||
/// Check a slice of [`Diagnostic`]s against a slice of [`Assertion`]s.
|
||||
///
|
||||
/// Return vector of [`Unmatched`] for any unmatched diagnostics or assertions.
|
||||
fn match_line<'a, 'b, T: Diagnostic + 'a>(
|
||||
&self,
|
||||
diagnostics: &'a [T],
|
||||
assertions: &'a [Assertion<'b>],
|
||||
) -> Result<(), Vec<String>>
|
||||
where
|
||||
'b: 'a,
|
||||
{
|
||||
let mut failures = vec![];
|
||||
let mut unmatched: Vec<_> = diagnostics.iter().collect();
|
||||
for assertion in assertions {
|
||||
if matches!(
|
||||
assertion,
|
||||
Assertion::Error(ErrorAssertion {
|
||||
rule: None,
|
||||
message_contains: None,
|
||||
..
|
||||
})
|
||||
) {
|
||||
failures.push(format!(
|
||||
"{} no rule or message text",
|
||||
"invalid assertion:".red()
|
||||
));
|
||||
continue;
|
||||
}
|
||||
if !self.matches(assertion, &mut unmatched) {
|
||||
failures.push(assertion.unmatched());
|
||||
}
|
||||
}
|
||||
for diagnostic in unmatched {
|
||||
failures.push(diagnostic.unmatched_with_column(self.column(diagnostic)));
|
||||
}
|
||||
if failures.is_empty() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(failures)
|
||||
}
|
||||
}
|
||||
|
||||
fn column<T: Ranged>(&self, ranged: &T) -> OneIndexed {
|
||||
self.line_index
|
||||
.source_location(ranged.start(), &self.source)
|
||||
.column
|
||||
}
|
||||
|
||||
/// Check if `assertion` matches any [`Diagnostic`]s in `unmatched`.
|
||||
///
|
||||
/// If so, return `true` and remove the matched diagnostics from `unmatched`. Otherwise, return
|
||||
/// `false`.
|
||||
///
|
||||
/// An `Error` assertion can only match one diagnostic; even if it could match more than one,
|
||||
/// we short-circuit after the first match.
|
||||
///
|
||||
/// A `Revealed` assertion must match a revealed-type diagnostic, and may also match an
|
||||
/// undefined-reveal diagnostic, if present.
|
||||
fn matches<T: Diagnostic>(&self, assertion: &Assertion, unmatched: &mut Vec<&T>) -> bool {
|
||||
match assertion {
|
||||
Assertion::Error(error) => {
|
||||
let position = unmatched.iter().position(|diagnostic| {
|
||||
!error.rule.is_some_and(|rule| rule != diagnostic.rule())
|
||||
&& !error
|
||||
.column
|
||||
.is_some_and(|col| col != self.column(*diagnostic))
|
||||
&& !error
|
||||
.message_contains
|
||||
.is_some_and(|needle| !diagnostic.message().contains(needle))
|
||||
});
|
||||
if let Some(position) = position {
|
||||
unmatched.swap_remove(position);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
Assertion::Revealed(expected_type) => {
|
||||
let mut matched_revealed_type = None;
|
||||
let mut matched_undefined_reveal = None;
|
||||
let expected_reveal_type_message = format!("Revealed type is `{expected_type}`");
|
||||
for (index, diagnostic) in unmatched.iter().enumerate() {
|
||||
if matched_revealed_type.is_none()
|
||||
&& diagnostic.rule() == "revealed-type"
|
||||
&& diagnostic.message() == expected_reveal_type_message
|
||||
{
|
||||
matched_revealed_type = Some(index);
|
||||
} else if matched_undefined_reveal.is_none()
|
||||
&& diagnostic.rule() == "undefined-reveal"
|
||||
{
|
||||
matched_undefined_reveal = Some(index);
|
||||
}
|
||||
if matched_revealed_type.is_some() && matched_undefined_reveal.is_some() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let mut idx = 0;
|
||||
unmatched.retain(|_| {
|
||||
let retain =
|
||||
Some(idx) != matched_revealed_type && Some(idx) != matched_undefined_reveal;
|
||||
idx += 1;
|
||||
retain
|
||||
});
|
||||
matched_revealed_type.is_some()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::FailuresByLine;
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_python_trivia::textwrap::dedent;
|
||||
use ruff_source_file::OneIndexed;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct TestDiagnostic {
|
||||
rule: &'static str,
|
||||
message: &'static str,
|
||||
range: TextRange,
|
||||
}
|
||||
|
||||
impl TestDiagnostic {
|
||||
fn new(rule: &'static str, message: &'static str, offset: usize) -> Self {
|
||||
let offset: u32 = offset.try_into().unwrap();
|
||||
Self {
|
||||
rule,
|
||||
message,
|
||||
range: TextRange::new(offset.into(), (offset + 1).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl super::Diagnostic for TestDiagnostic {
|
||||
fn rule(&self) -> &str {
|
||||
self.rule
|
||||
}
|
||||
|
||||
fn message(&self) -> &str {
|
||||
self.message
|
||||
}
|
||||
}
|
||||
|
||||
impl Ranged for TestDiagnostic {
|
||||
fn range(&self) -> ruff_text_size::TextRange {
|
||||
self.range
|
||||
}
|
||||
}
|
||||
|
||||
fn get_result(source: &str, diagnostics: Vec<TestDiagnostic>) -> Result<(), FailuresByLine> {
|
||||
colored::control::set_override(false);
|
||||
|
||||
let mut db = crate::db::Db::setup(SystemPathBuf::from("/src"));
|
||||
db.write_file("/src/test.py", source).unwrap();
|
||||
let file = system_path_to_file(&db, "/src/test.py").unwrap();
|
||||
|
||||
super::match_file(&db, file, diagnostics)
|
||||
}
|
||||
|
||||
fn assert_fail(result: Result<(), FailuresByLine>, messages: &[(usize, &[&str])]) {
|
||||
let Err(failures) = result else {
|
||||
panic!("expected a failure");
|
||||
};
|
||||
|
||||
let expected: Vec<(OneIndexed, Vec<String>)> = messages
|
||||
.iter()
|
||||
.map(|(idx, msgs)| {
|
||||
(
|
||||
OneIndexed::from_zero_indexed(*idx),
|
||||
msgs.iter().map(ToString::to_string).collect(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
let failures: Vec<(OneIndexed, Vec<String>)> = failures
|
||||
.iter()
|
||||
.map(|(idx, msgs)| (idx, msgs.to_vec()))
|
||||
.collect();
|
||||
|
||||
assert_eq!(failures, expected);
|
||||
}
|
||||
|
||||
fn assert_ok(result: &Result<(), FailuresByLine>) {
|
||||
assert!(result.is_ok(), "{result:?}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn revealed_match() {
|
||||
let result = get_result(
|
||||
"x # revealed: Foo",
|
||||
vec![TestDiagnostic::new(
|
||||
"revealed-type",
|
||||
"Revealed type is `Foo`",
|
||||
0,
|
||||
)],
|
||||
);
|
||||
|
||||
assert_ok(&result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn revealed_wrong_rule() {
|
||||
let result = get_result(
|
||||
"x # revealed: Foo",
|
||||
vec![TestDiagnostic::new(
|
||||
"not-revealed-type",
|
||||
"Revealed type is `Foo`",
|
||||
0,
|
||||
)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
"unmatched assertion: revealed: Foo",
|
||||
r#"unexpected error: 1 [not-revealed-type] "Revealed type is `Foo`""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn revealed_wrong_message() {
|
||||
let result = get_result(
|
||||
"x # revealed: Foo",
|
||||
vec![TestDiagnostic::new("revealed-type", "Something else", 0)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
"unmatched assertion: revealed: Foo",
|
||||
r#"unexpected error: 1 [revealed-type] "Something else""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn revealed_unmatched() {
|
||||
let result = get_result("x # revealed: Foo", vec![]);
|
||||
|
||||
assert_fail(result, &[(0, &["unmatched assertion: revealed: Foo"])]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn revealed_match_with_undefined() {
|
||||
let result = get_result(
|
||||
"x # revealed: Foo",
|
||||
vec![
|
||||
TestDiagnostic::new("revealed-type", "Revealed type is `Foo`", 0),
|
||||
TestDiagnostic::new("undefined-reveal", "Doesn't matter", 0),
|
||||
],
|
||||
);
|
||||
|
||||
assert_ok(&result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn revealed_match_with_only_undefined() {
|
||||
let result = get_result(
|
||||
"x # revealed: Foo",
|
||||
vec![TestDiagnostic::new("undefined-reveal", "Doesn't matter", 0)],
|
||||
);
|
||||
|
||||
assert_fail(result, &[(0, &["unmatched assertion: revealed: Foo"])]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn revealed_mismatch_with_undefined() {
|
||||
let result = get_result(
|
||||
"x # revealed: Foo",
|
||||
vec![
|
||||
TestDiagnostic::new("revealed-type", "Revealed type is `Bar`", 0),
|
||||
TestDiagnostic::new("undefined-reveal", "Doesn't matter", 0),
|
||||
],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
"unmatched assertion: revealed: Foo",
|
||||
r#"unexpected error: 1 [revealed-type] "Revealed type is `Bar`""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn undefined_reveal_type_unmatched() {
|
||||
let result = get_result(
|
||||
"reveal_type(1)",
|
||||
vec![
|
||||
TestDiagnostic::new("undefined-reveal", "undefined reveal message", 0),
|
||||
TestDiagnostic::new("revealed-type", "Revealed type is `Literal[1]`", 12),
|
||||
],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
"used built-in `reveal_type`: add a `# revealed` assertion on this line (\
|
||||
original diagnostic: [undefined-reveal] \"undefined reveal message\")",
|
||||
r#"unexpected error: [revealed-type] "Revealed type is `Literal[1]`""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn undefined_reveal_type_mismatched() {
|
||||
let result = get_result(
|
||||
"reveal_type(1) # error: [something-else]",
|
||||
vec![
|
||||
TestDiagnostic::new("undefined-reveal", "undefined reveal message", 0),
|
||||
TestDiagnostic::new("revealed-type", "Revealed type is `Literal[1]`", 12),
|
||||
],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
"unmatched assertion: error: [something-else]",
|
||||
"used built-in `reveal_type`: add a `# revealed` assertion on this line (\
|
||||
original diagnostic: 1 [undefined-reveal] \"undefined reveal message\")",
|
||||
r#"unexpected error: 13 [revealed-type] "Revealed type is `Literal[1]`""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_unmatched() {
|
||||
let result = get_result("x # error: [rule]", vec![]);
|
||||
|
||||
assert_fail(result, &[(0, &["unmatched assertion: error: [rule]"])]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_match_rule() {
|
||||
let result = get_result(
|
||||
"x # error: [some-rule]",
|
||||
vec![TestDiagnostic::new("some-rule", "Any message", 0)],
|
||||
);
|
||||
|
||||
assert_ok(&result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_wrong_rule() {
|
||||
let result = get_result(
|
||||
"x # error: [some-rule]",
|
||||
vec![TestDiagnostic::new("anything", "Any message", 0)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
"unmatched assertion: error: [some-rule]",
|
||||
r#"unexpected error: 1 [anything] "Any message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_match_message() {
|
||||
let result = get_result(
|
||||
r#"x # error: "contains this""#,
|
||||
vec![TestDiagnostic::new("anything", "message contains this", 0)],
|
||||
);
|
||||
|
||||
assert_ok(&result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_wrong_message() {
|
||||
let result = get_result(
|
||||
r#"x # error: "contains this""#,
|
||||
vec![TestDiagnostic::new("anything", "Any message", 0)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
r#"unmatched assertion: error: "contains this""#,
|
||||
r#"unexpected error: 1 [anything] "Any message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_match_column_and_rule() {
|
||||
let result = get_result(
|
||||
"x # error: 1 [some-rule]",
|
||||
vec![TestDiagnostic::new("some-rule", "Any message", 0)],
|
||||
);
|
||||
|
||||
assert_ok(&result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_wrong_column() {
|
||||
let result = get_result(
|
||||
"x # error: 2 [rule]",
|
||||
vec![TestDiagnostic::new("rule", "Any message", 0)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
"unmatched assertion: error: 2 [rule]",
|
||||
r#"unexpected error: 1 [rule] "Any message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_match_column_and_message() {
|
||||
let result = get_result(
|
||||
r#"x # error: 1 "contains this""#,
|
||||
vec![TestDiagnostic::new("anything", "message contains this", 0)],
|
||||
);
|
||||
|
||||
assert_ok(&result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_match_rule_and_message() {
|
||||
let result = get_result(
|
||||
r#"x # error: [a-rule] "contains this""#,
|
||||
vec![TestDiagnostic::new("a-rule", "message contains this", 0)],
|
||||
);
|
||||
|
||||
assert_ok(&result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_match_all() {
|
||||
let result = get_result(
|
||||
r#"x # error: 1 [a-rule] "contains this""#,
|
||||
vec![TestDiagnostic::new("a-rule", "message contains this", 0)],
|
||||
);
|
||||
|
||||
assert_ok(&result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_match_all_wrong_column() {
|
||||
let result = get_result(
|
||||
r#"x # error: 2 [some-rule] "contains this""#,
|
||||
vec![TestDiagnostic::new("some-rule", "message contains this", 0)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
r#"unmatched assertion: error: 2 [some-rule] "contains this""#,
|
||||
r#"unexpected error: 1 [some-rule] "message contains this""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_match_all_wrong_rule() {
|
||||
let result = get_result(
|
||||
r#"x # error: 1 [some-rule] "contains this""#,
|
||||
vec![TestDiagnostic::new(
|
||||
"other-rule",
|
||||
"message contains this",
|
||||
0,
|
||||
)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
r#"unmatched assertion: error: 1 [some-rule] "contains this""#,
|
||||
r#"unexpected error: 1 [other-rule] "message contains this""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_match_all_wrong_message() {
|
||||
let result = get_result(
|
||||
r#"x # error: 1 [some-rule] "contains this""#,
|
||||
vec![TestDiagnostic::new("some-rule", "Any message", 0)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
r#"unmatched assertion: error: 1 [some-rule] "contains this""#,
|
||||
r#"unexpected error: 1 [some-rule] "Any message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn interspersed_matches_and_mismatches() {
|
||||
let source = dedent(
|
||||
r#"
|
||||
1 # error: [line-one]
|
||||
2
|
||||
3 # error: [line-three]
|
||||
4 # error: [line-four]
|
||||
5
|
||||
6: # error: [line-six]
|
||||
"#,
|
||||
);
|
||||
let two = source.find('2').unwrap();
|
||||
let three = source.find('3').unwrap();
|
||||
let five = source.find('5').unwrap();
|
||||
let result = get_result(
|
||||
&source,
|
||||
vec![
|
||||
TestDiagnostic::new("line-two", "msg", two),
|
||||
TestDiagnostic::new("line-three", "msg", three),
|
||||
TestDiagnostic::new("line-five", "msg", five),
|
||||
],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[
|
||||
(1, &["unmatched assertion: error: [line-one]"]),
|
||||
(2, &[r#"unexpected error: [line-two] "msg""#]),
|
||||
(4, &["unmatched assertion: error: [line-four]"]),
|
||||
(5, &[r#"unexpected error: [line-five] "msg""#]),
|
||||
(6, &["unmatched assertion: error: [line-six]"]),
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn more_diagnostics_than_assertions() {
|
||||
let source = dedent(
|
||||
r#"
|
||||
1 # error: [line-one]
|
||||
2
|
||||
"#,
|
||||
);
|
||||
let one = source.find('1').unwrap();
|
||||
let two = source.find('2').unwrap();
|
||||
let result = get_result(
|
||||
&source,
|
||||
vec![
|
||||
TestDiagnostic::new("line-one", "msg", one),
|
||||
TestDiagnostic::new("line-two", "msg", two),
|
||||
],
|
||||
);
|
||||
|
||||
assert_fail(result, &[(2, &[r#"unexpected error: [line-two] "msg""#])]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_assertions_and_diagnostics_same_line() {
|
||||
let source = dedent(
|
||||
"
|
||||
# error: [one-rule]
|
||||
# error: [other-rule]
|
||||
x
|
||||
",
|
||||
);
|
||||
let x = source.find('x').unwrap();
|
||||
let result = get_result(
|
||||
&source,
|
||||
vec![
|
||||
TestDiagnostic::new("one-rule", "msg", x),
|
||||
TestDiagnostic::new("other-rule", "msg", x),
|
||||
],
|
||||
);
|
||||
|
||||
assert_ok(&result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_assertions_and_diagnostics_same_line_all_same() {
|
||||
let source = dedent(
|
||||
"
|
||||
# error: [one-rule]
|
||||
# error: [one-rule]
|
||||
x
|
||||
",
|
||||
);
|
||||
let x = source.find('x').unwrap();
|
||||
let result = get_result(
|
||||
&source,
|
||||
vec![
|
||||
TestDiagnostic::new("one-rule", "msg", x),
|
||||
TestDiagnostic::new("one-rule", "msg", x),
|
||||
],
|
||||
);
|
||||
|
||||
assert_ok(&result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_assertions_and_diagnostics_same_line_mismatch() {
|
||||
let source = dedent(
|
||||
"
|
||||
# error: [one-rule]
|
||||
# error: [other-rule]
|
||||
x
|
||||
",
|
||||
);
|
||||
let x = source.find('x').unwrap();
|
||||
let result = get_result(
|
||||
&source,
|
||||
vec![
|
||||
TestDiagnostic::new("one-rule", "msg", x),
|
||||
TestDiagnostic::new("other-rule", "msg", x),
|
||||
TestDiagnostic::new("third-rule", "msg", x),
|
||||
],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(3, &[r#"unexpected error: 1 [third-rule] "msg""#])],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parenthesized_expression() {
|
||||
let source = dedent(
|
||||
"
|
||||
a = b + (
|
||||
error: [undefined-reveal]
|
||||
reveal_type(5) # revealed: Literal[5]
|
||||
)
|
||||
",
|
||||
);
|
||||
let reveal = source.find("reveal_type").unwrap();
|
||||
let result = get_result(
|
||||
&source,
|
||||
vec![
|
||||
TestDiagnostic::new("undefined-reveal", "msg", reveal),
|
||||
TestDiagnostic::new("revealed-type", "Revealed type is `Literal[5]`", reveal),
|
||||
],
|
||||
);
|
||||
|
||||
assert_ok(&result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bare_error_assertion_not_allowed() {
|
||||
let source = "x # error:";
|
||||
let x = source.find('x').unwrap();
|
||||
let result = get_result(
|
||||
source,
|
||||
vec![TestDiagnostic::new("some-rule", "some message", x)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
"invalid assertion: no rule or message text",
|
||||
r#"unexpected error: 1 [some-rule] "some message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn column_only_error_assertion_not_allowed() {
|
||||
let source = "x # error: 1";
|
||||
let x = source.find('x').unwrap();
|
||||
let result = get_result(
|
||||
source,
|
||||
vec![TestDiagnostic::new("some-rule", "some message", x)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
"invalid assertion: no rule or message text",
|
||||
r#"unexpected error: 1 [some-rule] "some message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
}
|
||||
576
crates/red_knot_test/src/parser.rs
Normal file
576
crates/red_knot_test/src/parser.rs
Normal file
@@ -0,0 +1,576 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::{Captures, Regex};
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
|
||||
/// Parse the Markdown `source` as a test suite with given `title`.
|
||||
pub(crate) fn parse<'s>(title: &'s str, source: &'s str) -> anyhow::Result<MarkdownTestSuite<'s>> {
|
||||
let parser = Parser::new(title, source);
|
||||
parser.parse()
|
||||
}
|
||||
|
||||
/// A parsed markdown file containing tests.
|
||||
///
|
||||
/// Borrows from the source string and filepath it was created from.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct MarkdownTestSuite<'s> {
|
||||
/// Header sections.
|
||||
sections: IndexVec<SectionId, Section<'s>>,
|
||||
|
||||
/// Test files embedded within the Markdown file.
|
||||
files: IndexVec<EmbeddedFileId, EmbeddedFile<'s>>,
|
||||
}
|
||||
|
||||
impl<'s> MarkdownTestSuite<'s> {
|
||||
pub(crate) fn tests(&self) -> MarkdownTestIterator<'_, 's> {
|
||||
MarkdownTestIterator {
|
||||
suite: self,
|
||||
current_file_index: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A single test inside a [`MarkdownTestSuite`].
|
||||
///
|
||||
/// A test is a single header section (or the implicit root section, if there are no Markdown
|
||||
/// headers in the file), containing one or more embedded Python files as fenced code blocks, and
|
||||
/// containing no nested header subsections.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct MarkdownTest<'m, 's> {
|
||||
suite: &'m MarkdownTestSuite<'s>,
|
||||
section: &'m Section<'s>,
|
||||
files: &'m [EmbeddedFile<'s>],
|
||||
}
|
||||
|
||||
impl<'m, 's> MarkdownTest<'m, 's> {
|
||||
pub(crate) fn name(&self) -> String {
|
||||
let mut name = String::new();
|
||||
let mut parent_id = self.section.parent_id;
|
||||
while let Some(next_id) = parent_id {
|
||||
let parent = &self.suite.sections[next_id];
|
||||
parent_id = parent.parent_id;
|
||||
if !name.is_empty() {
|
||||
name.insert_str(0, " - ");
|
||||
}
|
||||
name.insert_str(0, parent.title);
|
||||
}
|
||||
if !name.is_empty() {
|
||||
name.push_str(" - ");
|
||||
}
|
||||
name.push_str(self.section.title);
|
||||
name
|
||||
}
|
||||
|
||||
pub(crate) fn files(&self) -> impl Iterator<Item = &'m EmbeddedFile<'s>> {
|
||||
self.files.iter()
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator yielding all [`MarkdownTest`]s in a [`MarkdownTestSuite`].
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct MarkdownTestIterator<'m, 's> {
|
||||
suite: &'m MarkdownTestSuite<'s>,
|
||||
current_file_index: usize,
|
||||
}
|
||||
|
||||
impl<'m, 's> Iterator for MarkdownTestIterator<'m, 's> {
|
||||
type Item = MarkdownTest<'m, 's>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let mut current_file_index = self.current_file_index;
|
||||
let mut file = self.suite.files.get(current_file_index.into());
|
||||
let section_id = file?.section;
|
||||
while file.is_some_and(|file| file.section == section_id) {
|
||||
current_file_index += 1;
|
||||
file = self.suite.files.get(current_file_index.into());
|
||||
}
|
||||
let files = &self.suite.files[EmbeddedFileId::from_usize(self.current_file_index)
|
||||
..EmbeddedFileId::from_usize(current_file_index)];
|
||||
self.current_file_index = current_file_index;
|
||||
Some(MarkdownTest {
|
||||
suite: self.suite,
|
||||
section: &self.suite.sections[section_id],
|
||||
files,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
struct SectionId;
|
||||
|
||||
/// A single header section of a [`MarkdownTestSuite`], or the implicit root "section".
|
||||
///
|
||||
/// A header section is the part of a Markdown file beginning with a `#`-prefixed header line, and
|
||||
/// extending until the next header line at the same or higher outline level (that is, with the
|
||||
/// same number or fewer `#` characters).
|
||||
///
|
||||
/// A header section may either contain one or more embedded Python files (making it a
|
||||
/// [`MarkdownTest`]), or it may contain nested sections (headers with more `#` characters), but
|
||||
/// not both.
|
||||
#[derive(Debug)]
|
||||
struct Section<'s> {
|
||||
title: &'s str,
|
||||
level: u8,
|
||||
parent_id: Option<SectionId>,
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
struct EmbeddedFileId;
|
||||
|
||||
/// A single file embedded in a [`Section`] as a fenced code block.
|
||||
///
|
||||
/// Currently must be a Python file (`py` language) or type stub (`pyi`). In the future we plan
|
||||
/// support other kinds of files as well (TOML configuration, typeshed VERSIONS, `pth` files...).
|
||||
///
|
||||
/// A Python embedded file makes its containing [`Section`] into a [`MarkdownTest`], and will be
|
||||
/// type-checked and searched for inline-comment assertions to match against the diagnostics from
|
||||
/// type checking.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct EmbeddedFile<'s> {
|
||||
section: SectionId,
|
||||
pub(crate) path: &'s str,
|
||||
pub(crate) lang: &'s str,
|
||||
pub(crate) code: &'s str,
|
||||
}
|
||||
|
||||
/// Matches an arbitrary amount of whitespace (including newlines), followed by a sequence of `#`
|
||||
/// characters, followed by a title heading, followed by a newline.
|
||||
static HEADER_RE: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"^(\s*\n)*(?<level>#+)\s+(?<title>.+)\s*\n").unwrap());
|
||||
|
||||
/// Matches a code block fenced by triple backticks, possibly with language and `key=val`
|
||||
/// configuration items following the opening backticks (in the "tag string" of the code block).
|
||||
static CODE_RE: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(r"^```(?<lang>\w+)(?<config>( +\S+)*)\s*\n(?<code>(.|\n)*?)\n```\s*\n").unwrap()
|
||||
});
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SectionStack(Vec<SectionId>);
|
||||
|
||||
impl SectionStack {
|
||||
fn new(root_section_id: SectionId) -> Self {
|
||||
Self(vec![root_section_id])
|
||||
}
|
||||
|
||||
fn push(&mut self, section_id: SectionId) {
|
||||
self.0.push(section_id);
|
||||
}
|
||||
|
||||
fn pop(&mut self) -> Option<SectionId> {
|
||||
let popped = self.0.pop();
|
||||
debug_assert_ne!(popped, None, "Should never pop the implicit root section");
|
||||
debug_assert!(
|
||||
!self.0.is_empty(),
|
||||
"Should never pop the implicit root section"
|
||||
);
|
||||
popped
|
||||
}
|
||||
|
||||
fn parent(&mut self) -> SectionId {
|
||||
*self
|
||||
.0
|
||||
.last()
|
||||
.expect("Should never pop the implicit root section")
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse the source of a Markdown file into a [`MarkdownTestSuite`].
|
||||
#[derive(Debug)]
|
||||
struct Parser<'s> {
|
||||
/// [`Section`]s of the final [`MarkdownTestSuite`].
|
||||
sections: IndexVec<SectionId, Section<'s>>,
|
||||
|
||||
/// [`EmbeddedFile`]s of the final [`MarkdownTestSuite`].
|
||||
files: IndexVec<EmbeddedFileId, EmbeddedFile<'s>>,
|
||||
|
||||
/// The unparsed remainder of the Markdown source.
|
||||
unparsed: &'s str,
|
||||
|
||||
/// Stack of ancestor sections.
|
||||
stack: SectionStack,
|
||||
|
||||
/// Names of embedded files in current active section.
|
||||
current_section_files: Option<FxHashSet<&'s str>>,
|
||||
}
|
||||
|
||||
impl<'s> Parser<'s> {
|
||||
fn new(title: &'s str, source: &'s str) -> Self {
|
||||
let mut sections = IndexVec::default();
|
||||
let root_section_id = sections.push(Section {
|
||||
title,
|
||||
level: 0,
|
||||
parent_id: None,
|
||||
});
|
||||
Self {
|
||||
sections,
|
||||
files: IndexVec::default(),
|
||||
unparsed: source,
|
||||
stack: SectionStack::new(root_section_id),
|
||||
current_section_files: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn parse(mut self) -> anyhow::Result<MarkdownTestSuite<'s>> {
|
||||
self.parse_impl()?;
|
||||
Ok(self.finish())
|
||||
}
|
||||
|
||||
fn finish(mut self) -> MarkdownTestSuite<'s> {
|
||||
self.sections.shrink_to_fit();
|
||||
self.files.shrink_to_fit();
|
||||
|
||||
MarkdownTestSuite {
|
||||
sections: self.sections,
|
||||
files: self.files,
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_impl(&mut self) -> anyhow::Result<()> {
|
||||
while !self.unparsed.is_empty() {
|
||||
if let Some(captures) = self.scan(&HEADER_RE) {
|
||||
self.parse_header(&captures)?;
|
||||
} else if let Some(captures) = self.scan(&CODE_RE) {
|
||||
self.parse_code_block(&captures)?;
|
||||
} else {
|
||||
// ignore other Markdown syntax (paragraphs, etc) used as comments in the test
|
||||
if let Some(next_newline) = self.unparsed.find('\n') {
|
||||
(_, self.unparsed) = self.unparsed.split_at(next_newline + 1);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_header(&mut self, captures: &Captures<'s>) -> anyhow::Result<()> {
|
||||
let header_level = captures["level"].len();
|
||||
self.pop_sections_to_level(header_level);
|
||||
|
||||
let parent = self.stack.parent();
|
||||
|
||||
let section = Section {
|
||||
// HEADER_RE can't match without a match for group 'title'.
|
||||
title: captures.name("title").unwrap().into(),
|
||||
level: header_level.try_into()?,
|
||||
parent_id: Some(parent),
|
||||
};
|
||||
|
||||
if self.current_section_files.is_some() {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Header '{}' not valid inside a test case; parent '{}' has code files.",
|
||||
section.title,
|
||||
self.sections[parent].title,
|
||||
));
|
||||
}
|
||||
|
||||
let section_id = self.sections.push(section);
|
||||
self.stack.push(section_id);
|
||||
|
||||
self.current_section_files = None;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_code_block(&mut self, captures: &Captures<'s>) -> anyhow::Result<()> {
|
||||
// We never pop the implicit root section.
|
||||
let parent = self.stack.parent();
|
||||
|
||||
let mut config: FxHashMap<&'s str, &'s str> = FxHashMap::default();
|
||||
|
||||
if let Some(config_match) = captures.name("config") {
|
||||
for item in config_match.as_str().split_whitespace() {
|
||||
let mut parts = item.split('=');
|
||||
let key = parts.next().unwrap();
|
||||
let Some(val) = parts.next() else {
|
||||
return Err(anyhow::anyhow!("Invalid config item `{}`.", item));
|
||||
};
|
||||
if parts.next().is_some() {
|
||||
return Err(anyhow::anyhow!("Invalid config item `{}`.", item));
|
||||
}
|
||||
if config.insert(key, val).is_some() {
|
||||
return Err(anyhow::anyhow!("Duplicate config item `{}`.", item));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let path = config.get("path").copied().unwrap_or("test.py");
|
||||
|
||||
self.files.push(EmbeddedFile {
|
||||
path,
|
||||
section: parent,
|
||||
// CODE_RE can't match without matches for 'lang' and 'code'.
|
||||
lang: captures.name("lang").unwrap().into(),
|
||||
code: captures.name("code").unwrap().into(),
|
||||
});
|
||||
|
||||
if let Some(current_files) = &mut self.current_section_files {
|
||||
if !current_files.insert(path) {
|
||||
if path == "test.py" {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Test `{}` has duplicate files named `{path}`. \
|
||||
(This is the default filename; \
|
||||
consider giving some files an explicit name with `path=...`.)",
|
||||
self.sections[parent].title
|
||||
));
|
||||
}
|
||||
return Err(anyhow::anyhow!(
|
||||
"Test `{}` has duplicate files named `{path}`.",
|
||||
self.sections[parent].title
|
||||
));
|
||||
};
|
||||
} else {
|
||||
self.current_section_files = Some(FxHashSet::from_iter([path]));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn pop_sections_to_level(&mut self, level: usize) {
|
||||
while level <= self.sections[self.stack.parent()].level.into() {
|
||||
self.stack.pop();
|
||||
// We would have errored before pushing a child section if there were files, so we know
|
||||
// no parent section can have files.
|
||||
self.current_section_files = None;
|
||||
}
|
||||
}
|
||||
|
||||
/// Get capture groups and advance cursor past match if unparsed text matches `pattern`.
|
||||
fn scan(&mut self, pattern: &Regex) -> Option<Captures<'s>> {
|
||||
if let Some(captures) = pattern.captures(self.unparsed) {
|
||||
let (_, unparsed) = self.unparsed.split_at(captures.get(0).unwrap().end());
|
||||
self.unparsed = unparsed;
|
||||
Some(captures)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_python_trivia::textwrap::dedent;
|
||||
|
||||
#[test]
|
||||
fn empty() {
|
||||
let mf = super::parse("file.md", "").unwrap();
|
||||
|
||||
assert!(mf.tests().next().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn single_file_test() {
|
||||
let source = dedent(
|
||||
"
|
||||
```py
|
||||
x = 1
|
||||
```
|
||||
",
|
||||
);
|
||||
let mf = super::parse("file.md", &source).unwrap();
|
||||
|
||||
let [test] = &mf.tests().collect::<Vec<_>>()[..] else {
|
||||
panic!("expected one test");
|
||||
};
|
||||
|
||||
assert_eq!(test.name(), "file.md");
|
||||
|
||||
let [file] = test.files().collect::<Vec<_>>()[..] else {
|
||||
panic!("expected one file");
|
||||
};
|
||||
|
||||
assert_eq!(file.path, "test.py");
|
||||
assert_eq!(file.lang, "py");
|
||||
assert_eq!(file.code, "x = 1");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_tests() {
|
||||
let source = dedent(
|
||||
"
|
||||
# One
|
||||
|
||||
```py
|
||||
x = 1
|
||||
```
|
||||
|
||||
# Two
|
||||
|
||||
```py
|
||||
y = 2
|
||||
```
|
||||
",
|
||||
);
|
||||
let mf = super::parse("file.md", &source).unwrap();
|
||||
|
||||
let [test1, test2] = &mf.tests().collect::<Vec<_>>()[..] else {
|
||||
panic!("expected two tests");
|
||||
};
|
||||
|
||||
assert_eq!(test1.name(), "file.md - One");
|
||||
assert_eq!(test2.name(), "file.md - Two");
|
||||
|
||||
let [file] = test1.files().collect::<Vec<_>>()[..] else {
|
||||
panic!("expected one file");
|
||||
};
|
||||
|
||||
assert_eq!(file.path, "test.py");
|
||||
assert_eq!(file.lang, "py");
|
||||
assert_eq!(file.code, "x = 1");
|
||||
|
||||
let [file] = test2.files().collect::<Vec<_>>()[..] else {
|
||||
panic!("expected one file");
|
||||
};
|
||||
|
||||
assert_eq!(file.path, "test.py");
|
||||
assert_eq!(file.lang, "py");
|
||||
assert_eq!(file.code, "y = 2");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn custom_file_path() {
|
||||
let source = dedent(
|
||||
"
|
||||
```py path=foo.py
|
||||
x = 1
|
||||
```
|
||||
",
|
||||
);
|
||||
let mf = super::parse("file.md", &source).unwrap();
|
||||
|
||||
let [test] = &mf.tests().collect::<Vec<_>>()[..] else {
|
||||
panic!("expected one test");
|
||||
};
|
||||
let [file] = test.files().collect::<Vec<_>>()[..] else {
|
||||
panic!("expected one file");
|
||||
};
|
||||
|
||||
assert_eq!(file.path, "foo.py");
|
||||
assert_eq!(file.lang, "py");
|
||||
assert_eq!(file.code, "x = 1");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multi_line_file() {
|
||||
let source = dedent(
|
||||
"
|
||||
```py
|
||||
x = 1
|
||||
y = 2
|
||||
```
|
||||
",
|
||||
);
|
||||
let mf = super::parse("file.md", &source).unwrap();
|
||||
|
||||
let [test] = &mf.tests().collect::<Vec<_>>()[..] else {
|
||||
panic!("expected one test");
|
||||
};
|
||||
let [file] = test.files().collect::<Vec<_>>()[..] else {
|
||||
panic!("expected one file");
|
||||
};
|
||||
|
||||
assert_eq!(file.code, "x = 1\ny = 2");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_header_inside_test() {
|
||||
let source = dedent(
|
||||
"
|
||||
# One
|
||||
|
||||
```py
|
||||
x = 1
|
||||
```
|
||||
|
||||
## Two
|
||||
",
|
||||
);
|
||||
let err = super::parse("file.md", &source).expect_err("Should fail to parse");
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"Header 'Two' not valid inside a test case; parent 'One' has code files."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_config_item_no_equals() {
|
||||
let source = dedent(
|
||||
"
|
||||
```py foo
|
||||
x = 1
|
||||
```
|
||||
",
|
||||
);
|
||||
let err = super::parse("file.md", &source).expect_err("Should fail to parse");
|
||||
assert_eq!(err.to_string(), "Invalid config item `foo`.");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_config_item_too_many_equals() {
|
||||
let source = dedent(
|
||||
"
|
||||
```py foo=bar=baz
|
||||
x = 1
|
||||
```
|
||||
",
|
||||
);
|
||||
let err = super::parse("file.md", &source).expect_err("Should fail to parse");
|
||||
assert_eq!(err.to_string(), "Invalid config item `foo=bar=baz`.");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_config_item_duplicate() {
|
||||
let source = dedent(
|
||||
"
|
||||
```py foo=bar foo=baz
|
||||
x = 1
|
||||
```
|
||||
",
|
||||
);
|
||||
let err = super::parse("file.md", &source).expect_err("Should fail to parse");
|
||||
assert_eq!(err.to_string(), "Duplicate config item `foo=baz`.");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_duplicate_name_files_in_test() {
|
||||
let source = dedent(
|
||||
"
|
||||
```py
|
||||
x = 1
|
||||
```
|
||||
|
||||
```py
|
||||
y = 2
|
||||
```
|
||||
",
|
||||
);
|
||||
let err = super::parse("file.md", &source).expect_err("Should fail to parse");
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"Test `file.md` has duplicate files named `test.py`. \
|
||||
(This is the default filename; consider giving some files an explicit name \
|
||||
with `path=...`.)"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_duplicate_name_files_in_test_non_default() {
|
||||
let source = dedent(
|
||||
"
|
||||
```py path=foo.py
|
||||
x = 1
|
||||
```
|
||||
|
||||
```py path=foo.py
|
||||
y = 2
|
||||
```
|
||||
",
|
||||
);
|
||||
let err = super::parse("file.md", &source).expect_err("Should fail to parse");
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"Test `file.md` has duplicate files named `foo.py`."
|
||||
);
|
||||
}
|
||||
}
|
||||
32
crates/red_knot_vendored/Cargo.toml
Normal file
32
crates/red_knot_vendored/Cargo.toml
Normal file
@@ -0,0 +1,32 @@
|
||||
[package]
|
||||
name = "red_knot_vendored"
|
||||
version = "0.0.0"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
ruff_db = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
path-slash = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true, features = ["zstd", "deflate"] }
|
||||
|
||||
[dev-dependencies]
|
||||
walkdir = { workspace = true }
|
||||
|
||||
[features]
|
||||
zstd = ["zip/zstd"]
|
||||
deflate = ["zip/deflate"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
# Red Knot
|
||||
# Vendored types for the stdlib
|
||||
|
||||
Semantic analysis for the red-knot project.
|
||||
|
||||
## Vendored types for the stdlib
|
||||
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_python_semantic/vendor/typeshed`. The file `crates/red_knot_python_semantic/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
This crate vendors [typeshed](https://github.com/python/typeshed)'s stubs for the standard library. The vendored stubs can be found in `crates/red_knot_vendored/vendor/typeshed`. The file `crates/red_knot_vendored/vendor/typeshed/source_commit.txt` tells you the typeshed commit that our vendored stdlib stubs currently correspond to.
|
||||
|
||||
The typeshed stubs are updated every two weeks via an automated PR using the `sync_typeshed.yaml` workflow in the `.github/workflows` directory. This workflow can also be triggered at any time via [workflow dispatch](https://docs.github.com/en/actions/using-workflows/manually-running-a-workflow#running-a-workflow).
|
||||
89
crates/red_knot_vendored/build.rs
Normal file
89
crates/red_knot_vendored/build.rs
Normal file
@@ -0,0 +1,89 @@
|
||||
//! Build script to package our vendored typeshed files
|
||||
//! into a zip archive that can be included in the Ruff binary.
|
||||
//!
|
||||
//! This script should be automatically run at build time
|
||||
//! whenever the script itself changes, or whenever any files
|
||||
//! in `crates/red_knot_vendored/vendor/typeshed` change.
|
||||
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
|
||||
use path_slash::PathExt;
|
||||
use zip::result::ZipResult;
|
||||
use zip::write::{FileOptions, ZipWriter};
|
||||
use zip::CompressionMethod;
|
||||
|
||||
const TYPESHED_SOURCE_DIR: &str = "vendor/typeshed";
|
||||
const TYPESHED_ZIP_LOCATION: &str = "/zipped_typeshed.zip";
|
||||
|
||||
/// Recursively zip the contents of an entire directory.
|
||||
///
|
||||
/// This routine is adapted from a recipe at
|
||||
/// <https://github.com/zip-rs/zip-old/blob/5d0f198124946b7be4e5969719a7f29f363118cd/examples/write_dir.rs>
|
||||
fn zip_dir(directory_path: &str, writer: File) -> ZipResult<File> {
|
||||
let mut zip = ZipWriter::new(writer);
|
||||
|
||||
// Use deflated compression for WASM builds because compiling `zstd-sys` requires clang
|
||||
// [source](https://github.com/gyscos/zstd-rs/wiki/Compile-for-WASM) which complicates the build
|
||||
// by a lot. Deflated compression is slower but it shouldn't matter much for the WASM use case
|
||||
// (WASM itself is already slower than a native build for a specific platform).
|
||||
// We can't use `#[cfg(...)]` here because the target-arch in a build script is the
|
||||
// architecture of the system running the build script and not the architecture of the build-target.
|
||||
// That's why we use the `TARGET` environment variable here.
|
||||
let method = if cfg!(feature = "zstd") {
|
||||
CompressionMethod::Zstd
|
||||
} else if cfg!(feature = "deflate") {
|
||||
CompressionMethod::Deflated
|
||||
} else {
|
||||
CompressionMethod::Stored
|
||||
};
|
||||
|
||||
let options = FileOptions::default()
|
||||
.compression_method(method)
|
||||
.unix_permissions(0o644);
|
||||
|
||||
for entry in walkdir::WalkDir::new(directory_path) {
|
||||
let dir_entry = entry.unwrap();
|
||||
let absolute_path = dir_entry.path();
|
||||
let normalized_relative_path = absolute_path
|
||||
.strip_prefix(Path::new(directory_path))
|
||||
.unwrap()
|
||||
.to_slash()
|
||||
.expect("Unexpected non-utf8 typeshed path!");
|
||||
|
||||
// Write file or directory explicitly
|
||||
// Some unzip tools unzip files with directory paths correctly, some do not!
|
||||
if absolute_path.is_file() {
|
||||
println!("adding file {absolute_path:?} as {normalized_relative_path:?} ...");
|
||||
zip.start_file(normalized_relative_path, options)?;
|
||||
let mut f = File::open(absolute_path)?;
|
||||
std::io::copy(&mut f, &mut zip).unwrap();
|
||||
} else if !normalized_relative_path.is_empty() {
|
||||
// Only if not root! Avoids path spec / warning
|
||||
// and mapname conversion failed error on unzip
|
||||
println!("adding dir {absolute_path:?} as {normalized_relative_path:?} ...");
|
||||
zip.add_directory(normalized_relative_path, options)?;
|
||||
}
|
||||
}
|
||||
zip.finish()
|
||||
}
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed={TYPESHED_SOURCE_DIR}");
|
||||
assert!(
|
||||
Path::new(TYPESHED_SOURCE_DIR).is_dir(),
|
||||
"Where is typeshed?"
|
||||
);
|
||||
let out_dir = std::env::var("OUT_DIR").unwrap();
|
||||
|
||||
// N.B. Deliberately using `format!()` instead of `Path::join()` here,
|
||||
// so that we use `/` as a path separator on all platforms.
|
||||
// That enables us to load the typeshed zip at compile time in `module.rs`
|
||||
// (otherwise we'd have to dynamically determine the exact path to the typeshed zip
|
||||
// based on the default path separator for the specific platform we're on,
|
||||
// which can't be done at compile time.)
|
||||
let zipped_typeshed_location = format!("{out_dir}{TYPESHED_ZIP_LOCATION}");
|
||||
|
||||
let zipped_typeshed = File::create(zipped_typeshed_location).unwrap();
|
||||
zip_dir(TYPESHED_SOURCE_DIR, zipped_typeshed).unwrap();
|
||||
}
|
||||
@@ -6,7 +6,7 @@ use ruff_db::vendored::VendoredFileSystem;
|
||||
// Luckily this crate will fail to build if this file isn't available at build time.
|
||||
static TYPESHED_ZIP_BYTES: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/zipped_typeshed.zip"));
|
||||
|
||||
pub fn vendored_typeshed_stubs() -> &'static VendoredFileSystem {
|
||||
pub fn file_system() -> &'static VendoredFileSystem {
|
||||
static VENDORED_TYPESHED_STUBS: Lazy<VendoredFileSystem> =
|
||||
Lazy::new(|| VendoredFileSystem::new_static(TYPESHED_ZIP_BYTES).unwrap());
|
||||
&VENDORED_TYPESHED_STUBS
|
||||
@@ -42,7 +42,7 @@ mod tests {
|
||||
#[test]
|
||||
fn typeshed_vfs_consistent_with_vendored_stubs() {
|
||||
let vendored_typeshed_dir = Path::new("vendor/typeshed").canonicalize().unwrap();
|
||||
let vendored_typeshed_stubs = vendored_typeshed_stubs();
|
||||
let vendored_typeshed_stubs = file_system();
|
||||
|
||||
let mut empty_iterator = true;
|
||||
for entry in walkdir::WalkDir::new(&vendored_typeshed_dir).min_depth(1) {
|
||||
1
crates/red_knot_vendored/vendor/typeshed/source_commit.txt
vendored
Normal file
1
crates/red_knot_vendored/vendor/typeshed/source_commit.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
91a58b07cdd807b1d965e04ba85af2adab8bf924
|
||||
@@ -41,7 +41,7 @@ _json: 3.0-
|
||||
_locale: 3.0-
|
||||
_lsprof: 3.0-
|
||||
_markupbase: 3.0-
|
||||
_msi: 3.0-
|
||||
_msi: 3.0-3.12
|
||||
_operator: 3.4-
|
||||
_osx_support: 3.0-
|
||||
_posixsubprocess: 3.2-
|
||||
@@ -161,6 +161,8 @@ importlib.metadata._meta: 3.10-
|
||||
importlib.metadata.diagnose: 3.13-
|
||||
importlib.readers: 3.10-
|
||||
importlib.resources: 3.7-
|
||||
importlib.resources._common: 3.11-
|
||||
importlib.resources._functional: 3.13-
|
||||
importlib.resources.abc: 3.11-
|
||||
importlib.resources.readers: 3.11-
|
||||
importlib.resources.simple: 3.11-
|
||||
@@ -1,7 +1,7 @@
|
||||
import sys
|
||||
import typing_extensions
|
||||
from typing import Any, ClassVar, Generic, Literal, TypedDict, overload
|
||||
from typing_extensions import Unpack
|
||||
from typing_extensions import Self, Unpack
|
||||
|
||||
PyCF_ONLY_AST: Literal[1024]
|
||||
PyCF_TYPE_COMMENTS: Literal[4096]
|
||||
@@ -34,6 +34,9 @@ class AST:
|
||||
if sys.version_info >= (3, 13):
|
||||
_field_types: ClassVar[dict[str, Any]]
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self) -> Self: ...
|
||||
|
||||
class mod(AST): ...
|
||||
class type_ignore(AST): ...
|
||||
|
||||
@@ -44,6 +47,9 @@ class TypeIgnore(type_ignore):
|
||||
tag: str
|
||||
def __init__(self, lineno: int, tag: str) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, lineno: int = ..., tag: str = ...) -> Self: ...
|
||||
|
||||
class FunctionType(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("argtypes", "returns")
|
||||
@@ -57,6 +63,9 @@ class FunctionType(mod):
|
||||
else:
|
||||
def __init__(self, argtypes: list[expr], returns: expr) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, argtypes: list[expr] = ..., returns: expr = ...) -> Self: ...
|
||||
|
||||
class Module(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body", "type_ignores")
|
||||
@@ -67,6 +76,9 @@ class Module(mod):
|
||||
else:
|
||||
def __init__(self, body: list[stmt], type_ignores: list[TypeIgnore]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, body: list[stmt] = ..., type_ignores: list[TypeIgnore] = ...) -> Self: ...
|
||||
|
||||
class Interactive(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body",)
|
||||
@@ -76,12 +88,18 @@ class Interactive(mod):
|
||||
else:
|
||||
def __init__(self, body: list[stmt]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, body: list[stmt] = ...) -> Self: ...
|
||||
|
||||
class Expression(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body",)
|
||||
body: expr
|
||||
def __init__(self, body: expr) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, body: expr = ...) -> Self: ...
|
||||
|
||||
class stmt(AST):
|
||||
lineno: int
|
||||
col_offset: int
|
||||
@@ -89,6 +107,9 @@ class stmt(AST):
|
||||
end_col_offset: int | None
|
||||
def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class FunctionDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params")
|
||||
@@ -152,6 +173,19 @@ class FunctionDef(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
name: _Identifier = ...,
|
||||
args: arguments = ...,
|
||||
body: list[stmt] = ...,
|
||||
decorator_list: list[expr] = ...,
|
||||
returns: expr | None = ...,
|
||||
type_comment: str | None = ...,
|
||||
type_params: list[type_param] = ...,
|
||||
) -> Self: ...
|
||||
|
||||
class AsyncFunctionDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params")
|
||||
@@ -215,6 +249,19 @@ class AsyncFunctionDef(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
name: _Identifier = ...,
|
||||
args: arguments = ...,
|
||||
body: list[stmt],
|
||||
decorator_list: list[expr],
|
||||
returns: expr | None,
|
||||
type_comment: str | None,
|
||||
type_params: list[type_param],
|
||||
) -> Self: ...
|
||||
|
||||
class ClassDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "bases", "keywords", "body", "decorator_list", "type_params")
|
||||
@@ -260,12 +307,28 @@ class ClassDef(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
name: _Identifier,
|
||||
bases: list[expr],
|
||||
keywords: list[keyword],
|
||||
body: list[stmt],
|
||||
decorator_list: list[expr],
|
||||
type_params: list[type_param],
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class Return(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr | None
|
||||
def __init__(self, value: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Delete(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("targets",)
|
||||
@@ -275,6 +338,9 @@ class Delete(stmt):
|
||||
else:
|
||||
def __init__(self, targets: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, targets: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Assign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("targets", "value", "type_comment")
|
||||
@@ -295,6 +361,11 @@ class Assign(stmt):
|
||||
self, targets: list[expr], value: expr, type_comment: str | None = None, **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, targets: list[expr] = ..., value: expr = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class AugAssign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "op", "value")
|
||||
@@ -305,6 +376,16 @@ class AugAssign(stmt):
|
||||
self, target: Name | Attribute | Subscript, op: operator, value: expr, **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
target: Name | Attribute | Subscript = ...,
|
||||
op: operator = ...,
|
||||
value: expr = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class AnnAssign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "annotation", "value", "simple")
|
||||
@@ -332,6 +413,17 @@ class AnnAssign(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
target: Name | Attribute | Subscript = ...,
|
||||
annotation: expr = ...,
|
||||
value: expr | None = ...,
|
||||
simple: int = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class For(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
|
||||
@@ -361,6 +453,18 @@ class For(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
target: expr = ...,
|
||||
iter: expr = ...,
|
||||
body: list[stmt] = ...,
|
||||
orelse: list[stmt] = ...,
|
||||
type_comment: str | None = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class AsyncFor(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
|
||||
@@ -390,6 +494,18 @@ class AsyncFor(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
target: expr = ...,
|
||||
iter: expr = ...,
|
||||
body: list[stmt] = ...,
|
||||
orelse: list[stmt] = ...,
|
||||
type_comment: str | None = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class While(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
@@ -403,6 +519,9 @@ class While(stmt):
|
||||
else:
|
||||
def __init__(self, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class If(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
@@ -416,6 +535,11 @@ class If(stmt):
|
||||
else:
|
||||
def __init__(self, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, test: expr = ..., body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class With(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("items", "body", "type_comment")
|
||||
@@ -435,6 +559,16 @@ class With(stmt):
|
||||
self, items: list[withitem], body: list[stmt], type_comment: str | None = None, **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
items: list[withitem] = ...,
|
||||
body: list[stmt] = ...,
|
||||
type_comment: str | None = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class AsyncWith(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("items", "body", "type_comment")
|
||||
@@ -454,6 +588,16 @@ class AsyncWith(stmt):
|
||||
self, items: list[withitem], body: list[stmt], type_comment: str | None = None, **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
items: list[withitem] = ...,
|
||||
body: list[stmt] = ...,
|
||||
type_comment: str | None = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class Raise(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("exc", "cause")
|
||||
@@ -461,6 +605,9 @@ class Raise(stmt):
|
||||
cause: expr | None
|
||||
def __init__(self, exc: expr | None = None, cause: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, exc: expr | None = ..., cause: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Try(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body", "handlers", "orelse", "finalbody")
|
||||
@@ -487,6 +634,17 @@ class Try(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
body: list[stmt] = ...,
|
||||
handlers: list[ExceptHandler] = ...,
|
||||
orelse: list[stmt] = ...,
|
||||
finalbody: list[stmt] = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
class TryStar(stmt):
|
||||
__match_args__ = ("body", "handlers", "orelse", "finalbody")
|
||||
@@ -513,6 +671,17 @@ if sys.version_info >= (3, 11):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
body: list[stmt] = ...,
|
||||
handlers: list[ExceptHandler] = ...,
|
||||
orelse: list[stmt] = ...,
|
||||
finalbody: list[stmt] = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class Assert(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "msg")
|
||||
@@ -520,6 +689,9 @@ class Assert(stmt):
|
||||
msg: expr | None
|
||||
def __init__(self, test: expr, msg: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, test: expr, msg: expr | None, **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Import(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
@@ -529,6 +701,9 @@ class Import(stmt):
|
||||
else:
|
||||
def __init__(self, names: list[alias], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, names: list[alias] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class ImportFrom(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("module", "names", "level")
|
||||
@@ -550,6 +725,11 @@ class ImportFrom(stmt):
|
||||
self, module: str | None = None, *, names: list[alias], level: int, **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, module: str | None = ..., names: list[alias] = ..., level: int = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class Global(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
@@ -559,6 +739,9 @@ class Global(stmt):
|
||||
else:
|
||||
def __init__(self, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Nonlocal(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
@@ -568,12 +751,18 @@ class Nonlocal(stmt):
|
||||
else:
|
||||
def __init__(self, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, names: list[_Identifier] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Expr(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Pass(stmt): ...
|
||||
class Break(stmt): ...
|
||||
class Continue(stmt): ...
|
||||
@@ -585,6 +774,9 @@ class expr(AST):
|
||||
end_col_offset: int | None
|
||||
def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class BoolOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("op", "values")
|
||||
@@ -595,6 +787,9 @@ class BoolOp(expr):
|
||||
else:
|
||||
def __init__(self, op: boolop, values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, op: boolop = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class BinOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("left", "op", "right")
|
||||
@@ -603,6 +798,11 @@ class BinOp(expr):
|
||||
right: expr
|
||||
def __init__(self, left: expr, op: operator, right: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, left: expr = ..., op: operator = ..., right: expr = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class UnaryOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("op", "operand")
|
||||
@@ -610,6 +810,9 @@ class UnaryOp(expr):
|
||||
operand: expr
|
||||
def __init__(self, op: unaryop, operand: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, op: unaryop = ..., operand: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Lambda(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("args", "body")
|
||||
@@ -617,6 +820,9 @@ class Lambda(expr):
|
||||
body: expr
|
||||
def __init__(self, args: arguments, body: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, args: arguments = ..., body: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class IfExp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
@@ -625,6 +831,11 @@ class IfExp(expr):
|
||||
orelse: expr
|
||||
def __init__(self, test: expr, body: expr, orelse: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, test: expr = ..., body: expr = ..., orelse: expr = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class Dict(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("keys", "values")
|
||||
@@ -635,6 +846,11 @@ class Dict(expr):
|
||||
else:
|
||||
def __init__(self, keys: list[expr | None], values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, keys: list[expr | None] = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class Set(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts",)
|
||||
@@ -644,6 +860,9 @@ class Set(expr):
|
||||
else:
|
||||
def __init__(self, elts: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, elts: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class ListComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
@@ -654,6 +873,11 @@ class ListComp(expr):
|
||||
else:
|
||||
def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class SetComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
@@ -664,6 +888,11 @@ class SetComp(expr):
|
||||
else:
|
||||
def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class DictComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("key", "value", "generators")
|
||||
@@ -677,6 +906,11 @@ class DictComp(expr):
|
||||
else:
|
||||
def __init__(self, key: expr, value: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, key: expr = ..., value: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class GeneratorExp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
@@ -687,24 +921,38 @@ class GeneratorExp(expr):
|
||||
else:
|
||||
def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class Await(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Yield(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr | None
|
||||
def __init__(self, value: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class YieldFrom(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Compare(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("left", "ops", "comparators")
|
||||
@@ -718,6 +966,11 @@ class Compare(expr):
|
||||
else:
|
||||
def __init__(self, left: expr, ops: list[cmpop], comparators: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, left: expr = ..., ops: list[cmpop] = ..., comparators: list[expr] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class Call(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("func", "args", "keywords")
|
||||
@@ -731,6 +984,11 @@ class Call(expr):
|
||||
else:
|
||||
def __init__(self, func: expr, args: list[expr], keywords: list[keyword], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, func: expr = ..., args: list[expr] = ..., keywords: list[keyword] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class FormattedValue(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "conversion", "format_spec")
|
||||
@@ -739,6 +997,11 @@ class FormattedValue(expr):
|
||||
format_spec: expr | None
|
||||
def __init__(self, value: expr, conversion: int, format_spec: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, value: expr = ..., conversion: int = ..., format_spec: expr | None = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class JoinedStr(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("values",)
|
||||
@@ -748,6 +1011,9 @@ class JoinedStr(expr):
|
||||
else:
|
||||
def __init__(self, values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Constant(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "kind")
|
||||
@@ -760,6 +1026,9 @@ class Constant(expr):
|
||||
|
||||
def __init__(self, value: Any, kind: str | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: Any = ..., kind: str | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class NamedExpr(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "value")
|
||||
@@ -767,6 +1036,9 @@ class NamedExpr(expr):
|
||||
value: expr
|
||||
def __init__(self, target: Name, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, target: Name = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Attribute(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "attr", "ctx")
|
||||
@@ -775,6 +1047,11 @@ class Attribute(expr):
|
||||
ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__`
|
||||
def __init__(self, value: expr, attr: _Identifier, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, value: expr = ..., attr: _Identifier = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
_Slice: typing_extensions.TypeAlias = expr
|
||||
_SliceAttributes: typing_extensions.TypeAlias = _Attributes
|
||||
@@ -794,6 +1071,16 @@ class Slice(_Slice):
|
||||
self, lower: expr | None = None, upper: expr | None = None, step: expr | None = None, **kwargs: Unpack[_SliceAttributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
lower: expr | None = ...,
|
||||
upper: expr | None = ...,
|
||||
step: expr | None = ...,
|
||||
**kwargs: Unpack[_SliceAttributes],
|
||||
) -> Self: ...
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
class ExtSlice(slice):
|
||||
dims: list[slice]
|
||||
@@ -811,6 +1098,11 @@ class Subscript(expr):
|
||||
ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__`
|
||||
def __init__(self, value: expr, slice: _Slice, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, value: expr = ..., slice: _Slice = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class Starred(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "ctx")
|
||||
@@ -818,6 +1110,9 @@ class Starred(expr):
|
||||
ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__`
|
||||
def __init__(self, value: expr, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Name(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("id", "ctx")
|
||||
@@ -825,6 +1120,9 @@ class Name(expr):
|
||||
ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__`
|
||||
def __init__(self, id: _Identifier, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, id: _Identifier = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class List(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts", "ctx")
|
||||
@@ -835,6 +1133,9 @@ class List(expr):
|
||||
else:
|
||||
def __init__(self, elts: list[expr], ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Tuple(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts", "ctx")
|
||||
@@ -847,6 +1148,9 @@ class Tuple(expr):
|
||||
else:
|
||||
def __init__(self, elts: list[expr], ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class expr_context(AST): ...
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
@@ -910,6 +1214,9 @@ class comprehension(AST):
|
||||
else:
|
||||
def __init__(self, target: expr, iter: expr, ifs: list[expr], is_async: int) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, target: expr = ..., iter: expr = ..., ifs: list[expr] = ..., is_async: int = ...) -> Self: ...
|
||||
|
||||
class excepthandler(AST):
|
||||
lineno: int
|
||||
col_offset: int
|
||||
@@ -917,6 +1224,11 @@ class excepthandler(AST):
|
||||
end_col_offset: int | None
|
||||
def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, lineno: int = ..., col_offset: int = ..., end_lineno: int | None = ..., end_col_offset: int | None = ...
|
||||
) -> Self: ...
|
||||
|
||||
class ExceptHandler(excepthandler):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("type", "name", "body")
|
||||
@@ -937,6 +1249,16 @@ class ExceptHandler(excepthandler):
|
||||
self, type: expr | None = None, name: _Identifier | None = None, *, body: list[stmt], **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
type: expr | None = ...,
|
||||
name: _Identifier | None = ...,
|
||||
body: list[stmt] = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class arguments(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("posonlyargs", "args", "vararg", "kwonlyargs", "kw_defaults", "kwarg", "defaults")
|
||||
@@ -995,6 +1317,19 @@ class arguments(AST):
|
||||
defaults: list[expr],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
posonlyargs: list[arg] = ...,
|
||||
args: list[arg] = ...,
|
||||
vararg: arg | None = ...,
|
||||
kwonlyargs: list[arg] = ...,
|
||||
kw_defaults: list[expr | None] = ...,
|
||||
kwarg: arg | None = ...,
|
||||
defaults: list[expr] = ...,
|
||||
) -> Self: ...
|
||||
|
||||
class arg(AST):
|
||||
lineno: int
|
||||
col_offset: int
|
||||
@@ -1009,6 +1344,16 @@ class arg(AST):
|
||||
self, arg: _Identifier, annotation: expr | None = None, type_comment: str | None = None, **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
arg: _Identifier = ...,
|
||||
annotation: expr | None = ...,
|
||||
type_comment: str | None = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class keyword(AST):
|
||||
lineno: int
|
||||
col_offset: int
|
||||
@@ -1023,6 +1368,9 @@ class keyword(AST):
|
||||
@overload
|
||||
def __init__(self, arg: _Identifier | None = None, *, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, arg: _Identifier | None = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class alias(AST):
|
||||
lineno: int
|
||||
col_offset: int
|
||||
@@ -1034,6 +1382,9 @@ class alias(AST):
|
||||
asname: _Identifier | None
|
||||
def __init__(self, name: str, asname: _Identifier | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, name: str = ..., asname: _Identifier | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class withitem(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("context_expr", "optional_vars")
|
||||
@@ -1041,6 +1392,9 @@ class withitem(AST):
|
||||
optional_vars: expr | None
|
||||
def __init__(self, context_expr: expr, optional_vars: expr | None = None) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, context_expr: expr = ..., optional_vars: expr | None = ...) -> Self: ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
class Match(stmt):
|
||||
__match_args__ = ("subject", "cases")
|
||||
@@ -1051,6 +1405,11 @@ if sys.version_info >= (3, 10):
|
||||
else:
|
||||
def __init__(self, subject: expr, cases: list[match_case], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, subject: expr = ..., cases: list[match_case] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class pattern(AST):
|
||||
lineno: int
|
||||
col_offset: int
|
||||
@@ -1058,6 +1417,11 @@ if sys.version_info >= (3, 10):
|
||||
end_col_offset: int
|
||||
def __init__(self, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, lineno: int = ..., col_offset: int = ..., end_lineno: int = ..., end_col_offset: int = ...
|
||||
) -> Self: ...
|
||||
|
||||
# Without the alias, Pyright complains variables named pattern are recursively defined
|
||||
_Pattern: typing_extensions.TypeAlias = pattern
|
||||
|
||||
@@ -1074,16 +1438,25 @@ if sys.version_info >= (3, 10):
|
||||
@overload
|
||||
def __init__(self, pattern: _Pattern, guard: expr | None = None, *, body: list[stmt]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, pattern: _Pattern = ..., guard: expr | None = ..., body: list[stmt] = ...) -> Self: ...
|
||||
|
||||
class MatchValue(pattern):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
def __init__(self, value: expr, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
|
||||
|
||||
class MatchSingleton(pattern):
|
||||
__match_args__ = ("value",)
|
||||
value: Literal[True, False] | None
|
||||
def __init__(self, value: Literal[True, False] | None, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: Literal[True, False] | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
|
||||
|
||||
class MatchSequence(pattern):
|
||||
__match_args__ = ("patterns",)
|
||||
patterns: list[pattern]
|
||||
@@ -1092,11 +1465,17 @@ if sys.version_info >= (3, 10):
|
||||
else:
|
||||
def __init__(self, patterns: list[pattern], **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
|
||||
|
||||
class MatchStar(pattern):
|
||||
__match_args__ = ("name",)
|
||||
name: _Identifier | None
|
||||
def __init__(self, name: _Identifier | None, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, name: _Identifier | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
|
||||
|
||||
class MatchMapping(pattern):
|
||||
__match_args__ = ("keys", "patterns", "rest")
|
||||
keys: list[expr]
|
||||
@@ -1119,6 +1498,16 @@ if sys.version_info >= (3, 10):
|
||||
**kwargs: Unpack[_Attributes[int]],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
keys: list[expr] = ...,
|
||||
patterns: list[pattern] = ...,
|
||||
rest: _Identifier | None = ...,
|
||||
**kwargs: Unpack[_Attributes[int]],
|
||||
) -> Self: ...
|
||||
|
||||
class MatchClass(pattern):
|
||||
__match_args__ = ("cls", "patterns", "kwd_attrs", "kwd_patterns")
|
||||
cls: expr
|
||||
@@ -1144,6 +1533,17 @@ if sys.version_info >= (3, 10):
|
||||
**kwargs: Unpack[_Attributes[int]],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
cls: expr = ...,
|
||||
patterns: list[pattern] = ...,
|
||||
kwd_attrs: list[_Identifier] = ...,
|
||||
kwd_patterns: list[pattern] = ...,
|
||||
**kwargs: Unpack[_Attributes[int]],
|
||||
) -> Self: ...
|
||||
|
||||
class MatchAs(pattern):
|
||||
__match_args__ = ("pattern", "name")
|
||||
pattern: _Pattern | None
|
||||
@@ -1152,6 +1552,11 @@ if sys.version_info >= (3, 10):
|
||||
self, pattern: _Pattern | None = None, name: _Identifier | None = None, **kwargs: Unpack[_Attributes[int]]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, pattern: _Pattern | None = ..., name: _Identifier | None = ..., **kwargs: Unpack[_Attributes[int]]
|
||||
) -> Self: ...
|
||||
|
||||
class MatchOr(pattern):
|
||||
__match_args__ = ("patterns",)
|
||||
patterns: list[pattern]
|
||||
@@ -1160,6 +1565,9 @@ if sys.version_info >= (3, 10):
|
||||
else:
|
||||
def __init__(self, patterns: list[pattern], **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
class type_param(AST):
|
||||
lineno: int
|
||||
@@ -1168,6 +1576,9 @@ if sys.version_info >= (3, 12):
|
||||
end_col_offset: int
|
||||
def __init__(self, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, **kwargs: Unpack[_Attributes[int]]) -> Self: ...
|
||||
|
||||
class TypeVar(type_param):
|
||||
if sys.version_info >= (3, 13):
|
||||
__match_args__ = ("name", "bound", "default_value")
|
||||
@@ -1187,6 +1598,16 @@ if sys.version_info >= (3, 12):
|
||||
else:
|
||||
def __init__(self, name: _Identifier, bound: expr | None = None, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
name: _Identifier = ...,
|
||||
bound: expr | None = ...,
|
||||
default_value: expr | None = ...,
|
||||
**kwargs: Unpack[_Attributes[int]],
|
||||
) -> Self: ...
|
||||
|
||||
class ParamSpec(type_param):
|
||||
if sys.version_info >= (3, 13):
|
||||
__match_args__ = ("name", "default_value")
|
||||
@@ -1201,6 +1622,11 @@ if sys.version_info >= (3, 12):
|
||||
else:
|
||||
def __init__(self, name: _Identifier, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, name: _Identifier = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]]
|
||||
) -> Self: ...
|
||||
|
||||
class TypeVarTuple(type_param):
|
||||
if sys.version_info >= (3, 13):
|
||||
__match_args__ = ("name", "default_value")
|
||||
@@ -1215,6 +1641,11 @@ if sys.version_info >= (3, 12):
|
||||
else:
|
||||
def __init__(self, name: _Identifier, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, name: _Identifier = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]]
|
||||
) -> Self: ...
|
||||
|
||||
class TypeAlias(stmt):
|
||||
__match_args__ = ("name", "type_params", "value")
|
||||
name: Name
|
||||
@@ -1233,3 +1664,13 @@ if sys.version_info >= (3, 12):
|
||||
def __init__(
|
||||
self, name: Name, type_params: list[type_param], value: expr, **kwargs: Unpack[_Attributes[int]]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
name: Name = ...,
|
||||
type_params: list[type_param] = ...,
|
||||
value: expr = ...,
|
||||
**kwargs: Unpack[_Attributes[int]],
|
||||
) -> Self: ...
|
||||
@@ -493,7 +493,7 @@ class _CursesWindow:
|
||||
def instr(self, y: int, x: int, n: int = ...) -> bytes: ...
|
||||
def is_linetouched(self, line: int, /) -> bool: ...
|
||||
def is_wintouched(self) -> bool: ...
|
||||
def keypad(self, yes: bool) -> None: ...
|
||||
def keypad(self, yes: bool, /) -> None: ...
|
||||
def leaveok(self, yes: bool) -> None: ...
|
||||
def move(self, new_y: int, new_x: int) -> None: ...
|
||||
def mvderwin(self, y: int, x: int) -> None: ...
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user