Compare commits
182 Commits
0.7.4
...
david/unre
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
500b9a2691 | ||
|
|
15476be531 | ||
|
|
e7a361699d | ||
|
|
a218e1901b | ||
|
|
f108043d2d | ||
|
|
77b45aeee9 | ||
|
|
c6f4c106b0 | ||
|
|
dc55b4c8a2 | ||
|
|
41d19c3c29 | ||
|
|
99d44299e8 | ||
|
|
32ad489d79 | ||
|
|
a3e7e7d8b6 | ||
|
|
aea4bbbb30 | ||
|
|
167e445243 | ||
|
|
dccfd6e4f8 | ||
|
|
eb4ae2b910 | ||
|
|
5be842b1c3 | ||
|
|
2a21d79ec4 | ||
|
|
1964ecdbb7 | ||
|
|
6d167672f1 | ||
|
|
ae45b897ea | ||
|
|
90f48f45b0 | ||
|
|
d9cbf2fe44 | ||
|
|
3f6c65e78c | ||
|
|
976c37a849 | ||
|
|
a378ff38dc | ||
|
|
d8bca0d3a2 | ||
|
|
6f1cf5b686 | ||
|
|
8639f8c1a6 | ||
|
|
f1b2e85339 | ||
|
|
6d61c8aa16 | ||
|
|
8a7ba5d2df | ||
|
|
6fcbe8efb4 | ||
|
|
c40b37aa36 | ||
|
|
ef0e2a6e1b | ||
|
|
4fb1416bf4 | ||
|
|
8a860b89b4 | ||
|
|
f96fa6b0e2 | ||
|
|
4cd2b9926e | ||
|
|
11a2929ed7 | ||
|
|
187974eff4 | ||
|
|
14ba469fc0 | ||
|
|
6fd10e2fe7 | ||
|
|
e0f3eaf1dd | ||
|
|
c84c690f1e | ||
|
|
0d649f9afd | ||
|
|
82c01aa662 | ||
|
|
9f446faa6c | ||
|
|
b94d6cf567 | ||
|
|
cd0c97211c | ||
|
|
0e71c9e3bb | ||
|
|
24c90d6953 | ||
|
|
fbff4dec3a | ||
|
|
f3dac27e9a | ||
|
|
e4cefd9bf9 | ||
|
|
9e4ee98109 | ||
|
|
557d583e32 | ||
|
|
f98eebdbab | ||
|
|
c606bf014e | ||
|
|
e8fce20736 | ||
|
|
5a30ec0df6 | ||
|
|
fab1b0d546 | ||
|
|
66abef433b | ||
|
|
fa22bd604a | ||
|
|
0c9165fc3a | ||
|
|
9f6147490b | ||
|
|
b7571c3e24 | ||
|
|
d178d115f3 | ||
|
|
6501782678 | ||
|
|
bca4341dcc | ||
|
|
31ede11774 | ||
|
|
ba9f881687 | ||
|
|
4357a0a3c2 | ||
|
|
c18afa93b3 | ||
|
|
8f04202ee4 | ||
|
|
efe54081d6 | ||
|
|
ac23c99744 | ||
|
|
e5c7d87461 | ||
|
|
de62e39eba | ||
|
|
d285717da8 | ||
|
|
545e9deba3 | ||
|
|
e3d792605f | ||
|
|
1f303a5eb6 | ||
|
|
07d13c6b4a | ||
|
|
e1838aac29 | ||
|
|
4ba847f250 | ||
|
|
13e9fc9362 | ||
|
|
3fda2d17c7 | ||
|
|
931fa06d85 | ||
|
|
e53ac7985d | ||
|
|
e25e7044ba | ||
|
|
b80de52592 | ||
|
|
2917534279 | ||
|
|
f6b2cd5588 | ||
|
|
302fe76c2b | ||
|
|
a90e404c3f | ||
|
|
8358ad8d25 | ||
|
|
2b8b1ef178 | ||
|
|
2efa3fbb62 | ||
|
|
b9da4305e6 | ||
|
|
87043a2415 | ||
|
|
f684b6fff4 | ||
|
|
47f39ed1a0 | ||
|
|
aecdb8c144 | ||
|
|
3c52d2d1bd | ||
|
|
942d6eeb9f | ||
|
|
4ccacc80f9 | ||
|
|
b2bb119c6a | ||
|
|
cef12f4925 | ||
|
|
aa7ac2ce0f | ||
|
|
70d9c90827 | ||
|
|
adfa723464 | ||
|
|
844c07f1f0 | ||
|
|
11d20a1a51 | ||
|
|
e9079e7d95 | ||
|
|
c400725713 | ||
|
|
1081694140 | ||
|
|
52f526eb38 | ||
|
|
dc05b38165 | ||
|
|
8c3c5ee5e3 | ||
|
|
b46cc6ac0b | ||
|
|
8b925ea626 | ||
|
|
1b180c8342 | ||
|
|
afeb217452 | ||
|
|
c0b3dd3745 | ||
|
|
5f6607bf54 | ||
|
|
a6deca44b5 | ||
|
|
0dbceccbc1 | ||
|
|
48680e10b6 | ||
|
|
b0c88a2a42 | ||
|
|
b9c53a74f9 | ||
|
|
6a4d207db7 | ||
|
|
42c35b6f44 | ||
|
|
9e79d64d62 | ||
|
|
582857f292 | ||
|
|
9bbeb793e5 | ||
|
|
dbbe7a773c | ||
|
|
5f09d4a90a | ||
|
|
f8c20258ae | ||
|
|
d8538d8c98 | ||
|
|
3642381489 | ||
|
|
1f07880d5c | ||
|
|
d81b6cd334 | ||
|
|
d99210c049 | ||
|
|
577653551c | ||
|
|
38a385fb6f | ||
|
|
cd2ae5aa2d | ||
|
|
41694f21c6 | ||
|
|
fccbe56d23 | ||
|
|
c46555da41 | ||
|
|
0a27c9dabd | ||
|
|
3c9e76eb66 | ||
|
|
80f5cdcf66 | ||
|
|
35fe0e90da | ||
|
|
157b49a8ee | ||
|
|
8a6e223df5 | ||
|
|
5a48da53da | ||
|
|
58005b590c | ||
|
|
884835e386 | ||
|
|
efd4407f7f | ||
|
|
761588a60e | ||
|
|
e1eb188049 | ||
|
|
ff19629b11 | ||
|
|
cd80c9d907 | ||
|
|
abb34828bd | ||
|
|
cab7caf80b | ||
|
|
d470f29093 | ||
|
|
1fbed6c325 | ||
|
|
4dcb7ddafe | ||
|
|
5be90c3a67 | ||
|
|
d0dca7bfcf | ||
|
|
78210b198b | ||
|
|
4a2310b595 | ||
|
|
fc392c663a | ||
|
|
81d3c419e9 | ||
|
|
a6a3d3f656 | ||
|
|
c847cad389 | ||
|
|
81e5830585 | ||
|
|
2b58705cc1 | ||
|
|
9f3235a37f | ||
|
|
62d650226b | ||
|
|
5d8a391a3e |
@@ -17,4 +17,7 @@ indent_size = 4
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.md]
|
||||
max_line_length = 100
|
||||
max_line_length = 100
|
||||
|
||||
[*.toml]
|
||||
indent_size = 4
|
||||
5
.github/CODEOWNERS
vendored
5
.github/CODEOWNERS
vendored
@@ -13,9 +13,10 @@
|
||||
# flake8-pyi
|
||||
/crates/ruff_linter/src/rules/flake8_pyi/ @AlexWaygood
|
||||
|
||||
# Script for fuzzing the parser
|
||||
/scripts/fuzz-parser/ @AlexWaygood
|
||||
# Script for fuzzing the parser/red-knot etc.
|
||||
/python/py-fuzzer/ @AlexWaygood
|
||||
|
||||
# red-knot
|
||||
/crates/red_knot* @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||
/crates/ruff_db/ @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||
/scripts/knot_benchmark/ @carljm @MichaReiser @AlexWaygood @sharkdp
|
||||
|
||||
73
.github/workflows/ci.yaml
vendored
73
.github/workflows/ci.yaml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
- crates/ruff_text_size/**
|
||||
- crates/ruff_python_ast/**
|
||||
- crates/ruff_python_parser/**
|
||||
- scripts/fuzz-parser/**
|
||||
- python/py-fuzzer/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
linter:
|
||||
@@ -82,6 +82,7 @@ jobs:
|
||||
code:
|
||||
- "**/*"
|
||||
- "!**/*.md"
|
||||
- "crates/red_knot_python_semantic/resources/mdtest/**/*.md"
|
||||
- "!docs/**"
|
||||
- "!assets/**"
|
||||
|
||||
@@ -115,7 +116,7 @@ jobs:
|
||||
|
||||
cargo-test-linux:
|
||||
name: "cargo test (linux)"
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
@@ -157,9 +158,36 @@ jobs:
|
||||
name: ruff
|
||||
path: target/debug/ruff
|
||||
|
||||
cargo-test-linux-release:
|
||||
name: "cargo test (linux, release)"
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
run: cargo insta test --release --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
cargo-test-windows:
|
||||
name: "cargo test (windows)"
|
||||
runs-on: windows-latest
|
||||
runs-on: windows-latest-xlarge
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
@@ -197,6 +225,8 @@ jobs:
|
||||
cache: "npm"
|
||||
cache-dependency-path: playground/package-lock.json
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
with:
|
||||
version: v0.13.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Test ruff_wasm"
|
||||
run: |
|
||||
@@ -210,8 +240,7 @@ jobs:
|
||||
cargo-build-release:
|
||||
name: "cargo build (release)"
|
||||
runs-on: macos-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
if: ${{ github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -255,11 +284,11 @@ jobs:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
run: cargo +${{ steps.msrv.outputs.value }} insta test --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
cargo-fuzz:
|
||||
name: "cargo fuzz"
|
||||
cargo-fuzz-build:
|
||||
name: "cargo fuzz build"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
if: ${{ github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -278,7 +307,7 @@ jobs:
|
||||
- run: cargo fuzz build -s none
|
||||
|
||||
fuzz-parser:
|
||||
name: "Fuzz the parser"
|
||||
name: "fuzz parser"
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
@@ -289,13 +318,7 @@ jobs:
|
||||
FORCE_COLOR: 1
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: Install uv
|
||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
- name: Install Python requirements
|
||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
||||
- uses: astral-sh/setup-uv@v4
|
||||
- uses: actions/download-artifact@v4
|
||||
name: Download Ruff binary to test
|
||||
id: download-cached-binary
|
||||
@@ -307,7 +330,15 @@ jobs:
|
||||
# Make executable, since artifact download doesn't preserve this
|
||||
chmod +x ${{ steps.download-cached-binary.outputs.download-path }}/ruff
|
||||
|
||||
python scripts/fuzz-parser/fuzz.py 0-500 --test-executable ${{ steps.download-cached-binary.outputs.download-path }}/ruff
|
||||
(
|
||||
uvx \
|
||||
--python=${{ env.PYTHON_VERSION }} \
|
||||
--from=./python/py-fuzzer \
|
||||
fuzz \
|
||||
--test-executable=${{ steps.download-cached-binary.outputs.download-path }}/ruff \
|
||||
--bin=ruff \
|
||||
0-500
|
||||
)
|
||||
|
||||
scripts:
|
||||
name: "test scripts"
|
||||
@@ -331,7 +362,7 @@ jobs:
|
||||
|
||||
ecosystem:
|
||||
name: "ecosystem"
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: depot-ubuntu-latest-8
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
@@ -561,12 +592,12 @@ jobs:
|
||||
run: rustup show
|
||||
- name: "Cache rust"
|
||||
uses: Swatinem/rust-cache@v2
|
||||
- name: "Formatter progress"
|
||||
- name: "Run checks"
|
||||
run: scripts/formatter_ecosystem_checks.sh
|
||||
- name: "Github step summary"
|
||||
run: cat target/progress_projects_stats.txt > $GITHUB_STEP_SUMMARY
|
||||
run: cat target/formatter-ecosystem/stats.txt > $GITHUB_STEP_SUMMARY
|
||||
- name: "Remove checkouts from cache"
|
||||
run: rm -r target/progress_projects
|
||||
run: rm -r target/formatter-ecosystem
|
||||
|
||||
check-ruff-lsp:
|
||||
name: "test ruff-lsp"
|
||||
|
||||
19
.github/workflows/daily_fuzz.yaml
vendored
19
.github/workflows/daily_fuzz.yaml
vendored
@@ -32,13 +32,7 @@ jobs:
|
||||
if: ${{ github.repository == 'astral-sh/ruff' || github.event_name != 'schedule' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Install uv
|
||||
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
- name: Install Python requirements
|
||||
run: uv pip install -r scripts/fuzz-parser/requirements.txt --system
|
||||
- uses: astral-sh/setup-uv@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
@@ -49,7 +43,16 @@ jobs:
|
||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
||||
run: cargo build --locked
|
||||
- name: Fuzz
|
||||
run: python scripts/fuzz-parser/fuzz.py $(shuf -i 0-9999999999999999999 -n 1000) --test-executable target/debug/ruff
|
||||
run: |
|
||||
(
|
||||
uvx \
|
||||
--python=3.12 \
|
||||
--from=./python/py-fuzzer \
|
||||
fuzz \
|
||||
--test-executable=target/debug/ruff \
|
||||
--bin=ruff \
|
||||
$(shuf -i 0-9999999999999999999 -n 1000)
|
||||
)
|
||||
|
||||
create-issue-on-failure:
|
||||
name: Create an issue if the daily fuzz surfaced any bugs
|
||||
|
||||
36
.github/workflows/release.yml
vendored
36
.github/workflows/release.yml
vendored
@@ -1,4 +1,4 @@
|
||||
# This file was autogenerated by cargo-dist: https://opensource.axo.dev/cargo-dist/
|
||||
# This file was autogenerated by dist: https://opensource.axo.dev/cargo-dist/
|
||||
#
|
||||
# Copyright 2022-2024, axodotdev
|
||||
# SPDX-License-Identifier: MIT or Apache-2.0
|
||||
@@ -6,7 +6,7 @@
|
||||
# CI that:
|
||||
#
|
||||
# * checks for a Git Tag that looks like a release
|
||||
# * builds artifacts with cargo-dist (archives, installers, hashes)
|
||||
# * builds artifacts with dist (archives, installers, hashes)
|
||||
# * uploads those artifacts to temporary workflow zip
|
||||
# * on success, uploads the artifacts to a GitHub Release
|
||||
#
|
||||
@@ -24,10 +24,10 @@ permissions:
|
||||
# must be a Cargo-style SemVer Version (must have at least major.minor.patch).
|
||||
#
|
||||
# If PACKAGE_NAME is specified, then the announcement will be for that
|
||||
# package (erroring out if it doesn't have the given version or isn't cargo-dist-able).
|
||||
# package (erroring out if it doesn't have the given version or isn't dist-able).
|
||||
#
|
||||
# If PACKAGE_NAME isn't specified, then the announcement will be for all
|
||||
# (cargo-dist-able) packages in the workspace with that version (this mode is
|
||||
# (dist-able) packages in the workspace with that version (this mode is
|
||||
# intended for workspaces with only one dist-able package, or with all dist-able
|
||||
# packages versioned/released in lockstep).
|
||||
#
|
||||
@@ -48,7 +48,7 @@ on:
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
# Run 'cargo dist plan' (or host) to determine what tasks we need to do
|
||||
# Run 'dist plan' (or host) to determine what tasks we need to do
|
||||
plan:
|
||||
runs-on: "ubuntu-20.04"
|
||||
outputs:
|
||||
@@ -62,16 +62,16 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install cargo-dist
|
||||
- name: Install dist
|
||||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.22.1/cargo-dist-installer.sh | sh"
|
||||
- name: Cache cargo-dist
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.25.2-prerelease.3/cargo-dist-installer.sh | sh"
|
||||
- name: Cache dist
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/cargo-dist
|
||||
path: ~/.cargo/bin/dist
|
||||
# sure would be cool if github gave us proper conditionals...
|
||||
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
|
||||
# functionality based on whether this is a pull_request, and whether it's from a fork.
|
||||
@@ -79,8 +79,8 @@ jobs:
|
||||
# but also really annoying to build CI around when it needs secrets to work right.)
|
||||
- id: plan
|
||||
run: |
|
||||
cargo dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
|
||||
echo "cargo dist ran successfully"
|
||||
dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
|
||||
echo "dist ran successfully"
|
||||
cat plan-dist-manifest.json
|
||||
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
@@ -124,12 +124,12 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install cached cargo-dist
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/cargo-dist
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
||||
- name: Fetch local artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
@@ -140,8 +140,8 @@ jobs:
|
||||
- id: cargo-dist
|
||||
shell: bash
|
||||
run: |
|
||||
cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
|
||||
echo "cargo dist ran successfully"
|
||||
dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
|
||||
echo "dist ran successfully"
|
||||
|
||||
# Parse out what we just built and upload it to scratch storage
|
||||
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
|
||||
@@ -174,12 +174,12 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install cached cargo-dist
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/cargo-dist
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
# Fetch artifacts from scratch-storage
|
||||
- name: Fetch artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
@@ -191,7 +191,7 @@ jobs:
|
||||
- id: host
|
||||
shell: bash
|
||||
run: |
|
||||
cargo dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
|
||||
dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
|
||||
echo "artifacts uploaded and released successfully"
|
||||
cat dist-manifest.json
|
||||
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
@@ -17,7 +17,7 @@ exclude: |
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.22
|
||||
rev: v0.23
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
@@ -73,7 +73,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.7.3
|
||||
rev: v0.7.4
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
|
||||
@@ -1,5 +1,30 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.8.0
|
||||
|
||||
- **Default to Python 3.9**
|
||||
|
||||
Ruff now defaults to Python 3.9 instead of 3.8 if no explicit Python version is configured using [`ruff.target-version`](https://docs.astral.sh/ruff/settings/#target-version) or [`project.requires-python`](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#python-requires) ([#13896](https://github.com/astral-sh/ruff/pull/13896))
|
||||
|
||||
- **Changed location of `pydoclint` diagnostics**
|
||||
|
||||
[`pydoclint`](https://docs.astral.sh/ruff/rules/#pydoclint-doc) diagnostics now point to the first-line of the problematic docstring. Previously, this was not the case.
|
||||
|
||||
If you've opted into these preview rules but have them suppressed using
|
||||
[`noqa`](https://docs.astral.sh/ruff/linter/#error-suppression) comments in
|
||||
some places, this change may mean that you need to move the `noqa` suppression
|
||||
comments. Most users should be unaffected by this change.
|
||||
|
||||
- **Use XDG (i.e. `~/.local/bin`) instead of the Cargo home directory in the standalone installer**
|
||||
|
||||
Previously, Ruff's installer used `$CARGO_HOME` or `~/.cargo/bin` for its target install directory. Now, Ruff will be installed into `$XDG_BIN_HOME`, `$XDG_DATA_HOME/../bin`, or `~/.local/bin` (in that order).
|
||||
|
||||
This change is only relevant to users of the standalone Ruff installer (using the shell or PowerShell script). If you installed Ruff using uv or pip, you should be unaffected.
|
||||
|
||||
- **Changes to the line width calculation**
|
||||
|
||||
Ruff now uses a new version of the [unicode-width](https://github.com/unicode-rs/unicode-width) Rust crate to calculate the line width. In very rare cases, this may lead to lines containing Unicode characters being reformatted, or being considered too long when they were not before ([`E501`](https://docs.astral.sh/ruff/rules/line-too-long/)).
|
||||
|
||||
## 0.7.0
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments
|
||||
|
||||
139
CHANGELOG.md
139
CHANGELOG.md
@@ -1,5 +1,142 @@
|
||||
# Changelog
|
||||
|
||||
## 0.8.1
|
||||
|
||||
### Preview features
|
||||
|
||||
- Formatter: Avoid invalid syntax for format-spec with quotes for all Python versions ([#14625](https://github.com/astral-sh/ruff/pull/14625))
|
||||
- Formatter: Consider quotes inside format-specs when choosing the quotes for an f-string ([#14493](https://github.com/astral-sh/ruff/pull/14493))
|
||||
- Formatter: Do not consider f-strings with escaped newlines as multiline ([#14624](https://github.com/astral-sh/ruff/pull/14624))
|
||||
- Formatter: Fix f-string formatting in assignment statement ([#14454](https://github.com/astral-sh/ruff/pull/14454))
|
||||
- Formatter: Fix unnecessary space around power operator (`**`) in overlong f-string expressions ([#14489](https://github.com/astral-sh/ruff/pull/14489))
|
||||
- \[`airflow`\] Avoid implicit `schedule` argument to `DAG` and `@dag` (`AIR301`) ([#14581](https://github.com/astral-sh/ruff/pull/14581))
|
||||
- \[`flake8-builtins`\] Exempt private built-in modules (`A005`) ([#14505](https://github.com/astral-sh/ruff/pull/14505))
|
||||
- \[`flake8-pytest-style`\] Fix `pytest.mark.parametrize` rules to check calls instead of decorators ([#14515](https://github.com/astral-sh/ruff/pull/14515))
|
||||
- \[`flake8-type-checking`\] Implement `runtime-cast-value` (`TC006`) ([#14511](https://github.com/astral-sh/ruff/pull/14511))
|
||||
- \[`flake8-type-checking`\] Implement `unquoted-type-alias` (`TC007`) and `quoted-type-alias` (`TC008`) ([#12927](https://github.com/astral-sh/ruff/pull/12927))
|
||||
- \[`flake8-use-pathlib`\] Recommend `Path.iterdir()` over `os.listdir()` (`PTH208`) ([#14509](https://github.com/astral-sh/ruff/pull/14509))
|
||||
- \[`pylint`\] Extend `invalid-envvar-default` to detect `os.environ.get` (`PLW1508`) ([#14512](https://github.com/astral-sh/ruff/pull/14512))
|
||||
- \[`pylint`\] Implement `len-test` (`PLC1802`) ([#14309](https://github.com/astral-sh/ruff/pull/14309))
|
||||
- \[`refurb`\] Fix bug where methods defined using lambdas were flagged by `FURB118` ([#14639](https://github.com/astral-sh/ruff/pull/14639))
|
||||
- \[`ruff`\] Auto-add `r` prefix when string has no backslashes for `unraw-re-pattern` (`RUF039`) ([#14536](https://github.com/astral-sh/ruff/pull/14536))
|
||||
- \[`ruff`\] Implement `invalid-assert-message-literal-argument` (`RUF040`) ([#14488](https://github.com/astral-sh/ruff/pull/14488))
|
||||
- \[`ruff`\] Implement `unnecessary-nested-literal` (`RUF041`) ([#14323](https://github.com/astral-sh/ruff/pull/14323))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Ignore more rules for stub files ([#14541](https://github.com/astral-sh/ruff/pull/14541))
|
||||
- \[`pep8-naming`\] Eliminate false positives for single-letter names (`N811`, `N814`) ([#14584](https://github.com/astral-sh/ruff/pull/14584))
|
||||
- \[`pyflakes`\] Avoid false positives in `@no_type_check` contexts (`F821`, `F722`) ([#14615](https://github.com/astral-sh/ruff/pull/14615))
|
||||
- \[`ruff`\] Detect redirected-noqa in file-level comments (`RUF101`) ([#14635](https://github.com/astral-sh/ruff/pull/14635))
|
||||
- \[`ruff`\] Mark fixes for `unsorted-dunder-all` and `unsorted-dunder-slots` as unsafe when there are complex comments in the sequence (`RUF022`, `RUF023`) ([#14560](https://github.com/astral-sh/ruff/pull/14560))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid fixing code to `None | None` for `redundant-none-literal` (`PYI061`) and `never-union` (`RUF020`) ([#14583](https://github.com/astral-sh/ruff/pull/14583), [#14589](https://github.com/astral-sh/ruff/pull/14589))
|
||||
- \[`flake8-bugbear`\] Fix `mutable-contextvar-default` to resolve annotated function calls properly (`B039`) ([#14532](https://github.com/astral-sh/ruff/pull/14532))
|
||||
- \[`flake8-type-checking`\] Avoid syntax errors and type checking problem for quoted annotations autofix (`TC003`, `TC006`) ([#14634](https://github.com/astral-sh/ruff/pull/14634))
|
||||
- \[`pylint`\] Do not wrap function calls in parentheses in the fix for unnecessary-dunder-call (`PLC2801`) ([#14601](https://github.com/astral-sh/ruff/pull/14601))
|
||||
- \[`ruff`\] Handle `attrs`'s `auto_attribs` correctly (`RUF009`) ([#14520](https://github.com/astral-sh/ruff/pull/14520))
|
||||
|
||||
## 0.8.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.8.0) for a migration guide and overview of the changes!
|
||||
|
||||
### Breaking changes
|
||||
|
||||
See also, the "Remapped rules" section which may result in disabled rules.
|
||||
|
||||
- **Default to Python 3.9**
|
||||
|
||||
Ruff now defaults to Python 3.9 instead of 3.8 if no explicit Python version is configured using [`ruff.target-version`](https://docs.astral.sh/ruff/settings/#target-version) or [`project.requires-python`](https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#python-requires) ([#13896](https://github.com/astral-sh/ruff/pull/13896))
|
||||
|
||||
- **Changed location of `pydoclint` diagnostics**
|
||||
|
||||
[`pydoclint`](https://docs.astral.sh/ruff/rules/#pydoclint-doc) diagnostics now point to the first-line of the problematic docstring. Previously, this was not the case.
|
||||
|
||||
If you've opted into these preview rules but have them suppressed using
|
||||
[`noqa`](https://docs.astral.sh/ruff/linter/#error-suppression) comments in
|
||||
some places, this change may mean that you need to move the `noqa` suppression
|
||||
comments. Most users should be unaffected by this change.
|
||||
|
||||
- **Use XDG (i.e. `~/.local/bin`) instead of the Cargo home directory in the standalone installer**
|
||||
|
||||
Previously, Ruff's installer used `$CARGO_HOME` or `~/.cargo/bin` for its target install directory. Now, Ruff will be installed into `$XDG_BIN_HOME`, `$XDG_DATA_HOME/../bin`, or `~/.local/bin` (in that order).
|
||||
|
||||
This change is only relevant to users of the standalone Ruff installer (using the shell or PowerShell script). If you installed Ruff using uv or pip, you should be unaffected.
|
||||
|
||||
- **Changes to the line width calculation**
|
||||
|
||||
Ruff now uses a new version of the [unicode-width](https://github.com/unicode-rs/unicode-width) Rust crate to calculate the line width. In very rare cases, this may lead to lines containing Unicode characters being reformatted, or being considered too long when they were not before ([`E501`](https://docs.astral.sh/ruff/rules/line-too-long/)).
|
||||
|
||||
### Removed Rules
|
||||
|
||||
The following deprecated rules have been removed:
|
||||
|
||||
- [`missing-type-self`](https://docs.astral.sh/ruff/rules/missing-type-self/) (`ANN101`)
|
||||
- [`missing-type-cls`](https://docs.astral.sh/ruff/rules/missing-type-cls/) (`ANN102`)
|
||||
- [`syntax-error`](https://docs.astral.sh/ruff/rules/syntax-error/) (`E999`)
|
||||
- [`pytest-missing-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-missing-fixture-name-underscore/) (`PT004`)
|
||||
- [`pytest-incorrect-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-incorrect-fixture-name-underscore/) (`PT005`)
|
||||
- [`unpacked-list-comprehension`](https://docs.astral.sh/ruff/rules/unpacked-list-comprehension/) (`UP027`)
|
||||
|
||||
### Remapped rules
|
||||
|
||||
The following rules have been remapped to new rule codes:
|
||||
|
||||
- [`flake8-type-checking`](https://docs.astral.sh/ruff/rules/#flake8-type-checking-tc): `TCH` to `TC`
|
||||
|
||||
### Stabilization
|
||||
|
||||
The following rules have been stabilized and are no longer in preview:
|
||||
|
||||
- [`builtin-import-shadowing`](https://docs.astral.sh/ruff/rules/builtin-import-shadowing/) (`A004`)
|
||||
- [`mutable-contextvar-default`](https://docs.astral.sh/ruff/rules/mutable-contextvar-default/) (`B039`)
|
||||
- [`fast-api-redundant-response-model`](https://docs.astral.sh/ruff/rules/fast-api-redundant-response-model/) (`FAST001`)
|
||||
- [`fast-api-non-annotated-dependency`](https://docs.astral.sh/ruff/rules/fast-api-non-annotated-dependency/) (`FAST002`)
|
||||
- [`dict-index-missing-items`](https://docs.astral.sh/ruff/rules/dict-index-missing-items/) (`PLC0206`)
|
||||
- [`pep484-style-positional-only-parameter`](https://docs.astral.sh/ruff/rules/pep484-style-positional-only-parameter/) (`PYI063`)
|
||||
- [`redundant-final-literal`](https://docs.astral.sh/ruff/rules/redundant-final-literal/) (`PYI064`)
|
||||
- [`bad-version-info-order`](https://docs.astral.sh/ruff/rules/bad-version-info-order/) (`PYI066`)
|
||||
- [`parenthesize-chained-operators`](https://docs.astral.sh/ruff/rules/parenthesize-chained-operators/) (`RUF021`)
|
||||
- [`unsorted-dunder-all`](https://docs.astral.sh/ruff/rules/unsorted-dunder-all/) (`RUF022`)
|
||||
- [`unsorted-dunder-slots`](https://docs.astral.sh/ruff/rules/unsorted-dunder-slots/) (`RUF023`)
|
||||
- [`assert-with-print-message`](https://docs.astral.sh/ruff/rules/assert-with-print-message/) (`RUF030`)
|
||||
- [`unnecessary-default-type-args`](https://docs.astral.sh/ruff/rules/unnecessary-default-type-args/) (`UP043`)
|
||||
|
||||
The following behaviors have been stabilized:
|
||||
|
||||
- [`ambiguous-variable-name`](https://docs.astral.sh/ruff/rules/ambiguous-variable-name/) (`E741`): Violations in stub files are now ignored. Stub authors typically don't control variable names.
|
||||
- [`printf-string-formatting`](https://docs.astral.sh/ruff/rules/printf-string-formatting/) (`UP031`): Report all `printf`-like usages even if no autofix is available
|
||||
|
||||
The following fixes have been stabilized:
|
||||
|
||||
- [`zip-instead-of-pairwise`](https://docs.astral.sh/ruff/rules/zip-instead-of-pairwise/) (`RUF007`)
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-datetimez`\] Exempt `min.time()` and `max.time()` (`DTZ901`) ([#14394](https://github.com/astral-sh/ruff/pull/14394))
|
||||
- \[`flake8-pie`\] Mark fix as unsafe if the following statement is a string literal (`PIE790`) ([#14393](https://github.com/astral-sh/ruff/pull/14393))
|
||||
- \[`flake8-pyi`\] New rule `redundant-none-literal` (`PYI061`) ([#14316](https://github.com/astral-sh/ruff/pull/14316))
|
||||
- \[`flake8-pyi`\] Add autofix for `redundant-numeric-union` (`PYI041`) ([#14273](https://github.com/astral-sh/ruff/pull/14273))
|
||||
- \[`ruff`\] New rule `map-int-version-parsing` (`RUF048`) ([#14373](https://github.com/astral-sh/ruff/pull/14373))
|
||||
- \[`ruff`\] New rule `redundant-bool-literal` (`RUF038`) ([#14319](https://github.com/astral-sh/ruff/pull/14319))
|
||||
- \[`ruff`\] New rule `unraw-re-pattern` (`RUF039`) ([#14446](https://github.com/astral-sh/ruff/pull/14446))
|
||||
- \[`pycodestyle`\] Exempt `pytest.importorskip()` calls (`E402`) ([#14474](https://github.com/astral-sh/ruff/pull/14474))
|
||||
- \[`pylint`\] Autofix suggests using sets when possible (`PLR1714`) ([#14372](https://github.com/astral-sh/ruff/pull/14372))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- [`invalid-pyproject-toml`](https://docs.astral.sh/ruff/rules/invalid-pyproject-toml/) (`RUF200`): Updated to reflect the provisionally accepted [PEP 639](https://peps.python.org/pep-0639/).
|
||||
- \[`flake8-pyi`\] Avoid panic in unfixable case (`PYI041`) ([#14402](https://github.com/astral-sh/ruff/pull/14402))
|
||||
- \[`flake8-type-checking`\] Correctly handle quotes in subscript expression when generating an autofix ([#14371](https://github.com/astral-sh/ruff/pull/14371))
|
||||
- \[`pylint`\] Suggest correct autofix for `__contains__` (`PLC2801`) ([#14424](https://github.com/astral-sh/ruff/pull/14424))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Ruff now emits a warning instead of an error when a configuration [`ignore`](https://docs.astral.sh/ruff/settings/#lint_ignore)s a rule that has been removed ([#14435](https://github.com/astral-sh/ruff/pull/14435))
|
||||
- Ruff now validates that `lint.flake8-import-conventions.aliases` only uses valid module names and aliases ([#14477](https://github.com/astral-sh/ruff/pull/14477))
|
||||
|
||||
## 0.7.4
|
||||
|
||||
### Preview features
|
||||
@@ -978,7 +1115,7 @@ The following deprecated CLI commands have been removed:
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-bugbear`\] Implement `return-in-generator` (`B901`) ([#11644](https://github.com/astral-sh/ruff/pull/11644))
|
||||
- \[`flake8-pyi`\] Implement `PYI063` ([#11699](https://github.com/astral-sh/ruff/pull/11699))
|
||||
- \[`flake8-pyi`\] Implement `pep484-style-positional-only-parameter` (`PYI063`) ([#11699](https://github.com/astral-sh/ruff/pull/11699))
|
||||
- \[`pygrep_hooks`\] Check blanket ignores via file-level pragmas (`PGH004`) ([#11540](https://github.com/astral-sh/ruff/pull/11540))
|
||||
|
||||
### Rule changes
|
||||
|
||||
@@ -139,7 +139,7 @@ At a high level, the steps involved in adding a new lint rule are as follows:
|
||||
1. Create a file for your rule (e.g., `crates/ruff_linter/src/rules/flake8_bugbear/rules/assert_false.rs`).
|
||||
|
||||
1. In that file, define a violation struct (e.g., `pub struct AssertFalse`). You can grep for
|
||||
`#[violation]` to see examples.
|
||||
`#[derive(ViolationMetadata)]` to see examples.
|
||||
|
||||
1. In that file, define a function that adds the violation to the diagnostic list as appropriate
|
||||
(e.g., `pub(crate) fn assert_false`) based on whatever inputs are required for the rule (e.g.,
|
||||
|
||||
270
Cargo.lock
generated
270
Cargo.lock
generated
@@ -170,6 +170,12 @@ version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.13.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.22.0"
|
||||
@@ -207,10 +213,16 @@ dependencies = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
version = "1.10.0"
|
||||
name = "boxcar"
|
||||
version = "0.2.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c"
|
||||
checksum = "7f839cdf7e2d3198ac6ca003fd8ebc61715755f41c1cad15ff13df67531e00ed"
|
||||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
version = "1.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1a68f1f47cdf0ec8ee4b941b2eee2a80cb796db73118c0dd09ac63fbe405be22"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"regex-automata 0.4.8",
|
||||
@@ -341,9 +353,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.20"
|
||||
version = "4.5.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b97f376d85a664d5837dbae44bf546e6477a679ff6610010f17276f686d867e8"
|
||||
checksum = "fb3b4b9e5a7c7514dfa52869339ee98b3156b0bfb4e8a77c4ff4babb64b1604f"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -351,9 +363,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.20"
|
||||
version = "4.5.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "19bc80abd44e4bed93ca373a0704ccbd1b710dc5749406201bb018272808dc54"
|
||||
checksum = "b17a95aa67cc7b5ebd32aa5370189aa0d79069ef1c64ce893bd30fb24bff20ec"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -401,7 +413,7 @@ dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -681,7 +693,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"strsim 0.10.0",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -692,7 +704,7 @@ checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f"
|
||||
dependencies = [
|
||||
"darling_core",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -746,23 +758,23 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "dir-test"
|
||||
version = "0.3.0"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c44bdf9319ad5223afb7eb15a7110452b0adf0373ea6756561b2c708eef0dd1"
|
||||
checksum = "b12781621d53fd9087021f5a338df5c57c04f84a6231c1f4726f45e2e333470b"
|
||||
dependencies = [
|
||||
"dir-test-macros",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dir-test-macros"
|
||||
version = "0.3.0"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "644f96047137dfaa7a09e34d4623f9e52a1926ecc25ba32ad2ba3fc422536b25"
|
||||
checksum = "1340852f50b2285d01a7f598cc5d08b572669c3e09e614925175cc3c26787b91"
|
||||
dependencies = [
|
||||
"glob",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -814,7 +826,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -829,6 +841,12 @@ version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9bda8e21c04aca2ae33ffc2fd8c23134f3cac46db123ba97bd9d3f3b8a4a85e1"
|
||||
|
||||
[[package]]
|
||||
name = "dunce"
|
||||
version = "1.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813"
|
||||
|
||||
[[package]]
|
||||
name = "dyn-clone"
|
||||
version = "1.0.17"
|
||||
@@ -1050,9 +1068,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.15.1"
|
||||
version = "0.15.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3a9bfc1af68b1726ea47d3d5109de126281def866b33970e10fbab11b5dafab3"
|
||||
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
|
||||
|
||||
[[package]]
|
||||
name = "hashlink"
|
||||
@@ -1228,7 +1246,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1301,22 +1319,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown 0.15.1",
|
||||
"hashbrown 0.15.2",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "indicatif"
|
||||
version = "0.17.8"
|
||||
version = "0.17.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3"
|
||||
checksum = "cbf675b85ed934d3c67b5c5469701eec7db22689d0a2139d856e0925fa28b281"
|
||||
dependencies = [
|
||||
"console",
|
||||
"instant",
|
||||
"number_prefix",
|
||||
"portable-atomic",
|
||||
"unicode-width 0.1.13",
|
||||
"unicode-width 0.2.0",
|
||||
"vt100",
|
||||
"web-time",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1358,6 +1376,7 @@ dependencies = [
|
||||
"pest",
|
||||
"pest_derive",
|
||||
"regex",
|
||||
"ron",
|
||||
"serde",
|
||||
"similar",
|
||||
"walkdir",
|
||||
@@ -1401,7 +1420,7 @@ dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1501,15 +1520,15 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.162"
|
||||
version = "0.2.164"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "18d287de67fe55fd7e1581fe933d965a5a9477b38e949cfa9f8574ef01506398"
|
||||
checksum = "433bfe06b8c75da9b2e3fbea6e5329ff87748f0b144ef75306e674c3f6f7c13f"
|
||||
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
version = "1.5.0"
|
||||
version = "1.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1586dd7a857d8a61a577afde1a24cc9573ff549eff092d5ce968b1ec93cc61b6"
|
||||
checksum = "fa3e60579a8cba3d86aa4a5f7fc98973cc0fd2ac270bf02f85a9bef09700b075"
|
||||
dependencies = [
|
||||
"chic",
|
||||
"libcst_derive",
|
||||
@@ -1527,7 +1546,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a2ae40017ac09cd2c6a53504cb3c871c7f2b41466eac5bc66ba63f39073b467b"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1691,9 +1710,9 @@ checksum = "308d96db8debc727c3fd9744aac51751243420e46edf401010908da7f8d5e57c"
|
||||
|
||||
[[package]]
|
||||
name = "newtype-uuid"
|
||||
version = "1.1.0"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3526cb7c660872e401beaf3297f95f548ce3b4b4bdd8121b7c0713771d7c4a6e"
|
||||
checksum = "4c8781e2ef64806278a55ad223f0bc875772fd40e1fe6e73e8adbf027817229d"
|
||||
dependencies = [
|
||||
"uuid",
|
||||
]
|
||||
@@ -1922,18 +1941,6 @@ version = "0.8.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e3aeb8f54c078314c2065ee649a7241f46b9d8e418e1a9581ba0546657d7aa3a"
|
||||
|
||||
[[package]]
|
||||
name = "pep440_rs"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e0c29f9c43de378b4e4e0cd7dbcce0e5cfb80443de8c05620368b2948bc936a1"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"regex",
|
||||
"serde",
|
||||
"unicode-width 0.1.13",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pep440_rs"
|
||||
version = "0.7.2"
|
||||
@@ -1943,22 +1950,29 @@ dependencies = [
|
||||
"serde",
|
||||
"unicode-width 0.2.0",
|
||||
"unscanny",
|
||||
"version-ranges",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pep508_rs"
|
||||
version = "0.3.0"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "910c513bea0f4f833122321c0f20e8c704e01de98692f6989c2ec21f43d88b1e"
|
||||
checksum = "8c2feee999fa547bacab06a4881bacc74688858b92fa8ef1e206c748b0a76048"
|
||||
dependencies = [
|
||||
"boxcar",
|
||||
"indexmap",
|
||||
"itertools 0.13.0",
|
||||
"once_cell",
|
||||
"pep440_rs 0.4.0",
|
||||
"pep440_rs",
|
||||
"regex",
|
||||
"rustc-hash 2.0.0",
|
||||
"serde",
|
||||
"smallvec",
|
||||
"thiserror 1.0.67",
|
||||
"tracing",
|
||||
"unicode-width 0.1.13",
|
||||
"unicode-width 0.2.0",
|
||||
"url",
|
||||
"urlencoding",
|
||||
"version-ranges",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1998,7 +2012,7 @@ dependencies = [
|
||||
"pest_meta",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2113,46 +2127,47 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.89"
|
||||
version = "1.0.92"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e"
|
||||
checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyproject-toml"
|
||||
version = "0.9.0"
|
||||
version = "0.13.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "95c3dd745f99aa3c554b7bb00859f7d18c2f1d6afd749ccc86d60b61e702abd9"
|
||||
checksum = "643af57c3f36ba90a8b53e972727d8092f7408a9ebfbaf4c3d2c17b07c58d835"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"pep440_rs 0.4.0",
|
||||
"pep440_rs",
|
||||
"pep508_rs",
|
||||
"serde",
|
||||
"thiserror 1.0.67",
|
||||
"toml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quick-junit"
|
||||
version = "0.5.0"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "62ffd2f9a162cfae131bed6d9d1ed60adced33be340a94f96952897d7cb0c240"
|
||||
checksum = "3ed1a693391a16317257103ad06a88c6529ac640846021da7c435a06fffdacd7"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"indexmap",
|
||||
"newtype-uuid",
|
||||
"quick-xml",
|
||||
"strip-ansi-escapes",
|
||||
"thiserror 1.0.67",
|
||||
"thiserror 2.0.3",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quick-xml"
|
||||
version = "0.36.1"
|
||||
version = "0.37.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "96a05e2e8efddfa51a84ca47cec303fac86c8541b686d37cac5efc0e094417bc"
|
||||
checksum = "f22f29bdff3987b4d8632ef95fd6424ec7e4e0a57e2f4fc63e489e75357f6a03"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
@@ -2251,7 +2266,7 @@ dependencies = [
|
||||
"compact_str",
|
||||
"countme",
|
||||
"dir-test",
|
||||
"hashbrown 0.15.1",
|
||||
"hashbrown 0.15.2",
|
||||
"indexmap",
|
||||
"insta",
|
||||
"itertools 0.13.0",
|
||||
@@ -2269,6 +2284,7 @@ dependencies = [
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"serde",
|
||||
"smallvec",
|
||||
"static_assertions",
|
||||
"tempfile",
|
||||
@@ -2353,7 +2369,10 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"crossbeam",
|
||||
"glob",
|
||||
"insta",
|
||||
"notify",
|
||||
"pep440_rs",
|
||||
"rayon",
|
||||
"red_knot_python_semantic",
|
||||
"red_knot_vendored",
|
||||
@@ -2363,7 +2382,9 @@ dependencies = [
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"tempfile",
|
||||
"serde",
|
||||
"thiserror 2.0.3",
|
||||
"toml",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
@@ -2455,9 +2476,20 @@ dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ron"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "88073939a61e5b7680558e6be56b419e208420c2adb92be54921fa6b72283f1a"
|
||||
dependencies = [
|
||||
"base64 0.13.1",
|
||||
"bitflags 1.3.2",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.7.4"
|
||||
version = "0.8.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2556,7 +2588,9 @@ dependencies = [
|
||||
"camino",
|
||||
"countme",
|
||||
"dashmap 6.1.0",
|
||||
"dunce",
|
||||
"filetime",
|
||||
"glob",
|
||||
"ignore",
|
||||
"insta",
|
||||
"matchit",
|
||||
@@ -2642,7 +2676,7 @@ dependencies = [
|
||||
"serde",
|
||||
"static_assertions",
|
||||
"tracing",
|
||||
"unicode-width 0.1.13",
|
||||
"unicode-width 0.2.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2674,7 +2708,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.7.4"
|
||||
version = "0.8.1"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"annotate-snippets 0.9.2",
|
||||
@@ -2697,7 +2731,7 @@ dependencies = [
|
||||
"natord",
|
||||
"path-absolutize",
|
||||
"pathdiff",
|
||||
"pep440_rs 0.7.2",
|
||||
"pep440_rs",
|
||||
"pyproject-toml",
|
||||
"quick-junit",
|
||||
"regex",
|
||||
@@ -2728,7 +2762,7 @@ dependencies = [
|
||||
"toml",
|
||||
"typed-arena",
|
||||
"unicode-normalization",
|
||||
"unicode-width 0.1.13",
|
||||
"unicode-width 0.2.0",
|
||||
"unicode_names2",
|
||||
"url",
|
||||
]
|
||||
@@ -2741,7 +2775,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"ruff_python_trivia",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2778,7 +2812,6 @@ dependencies = [
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"schemars",
|
||||
"serde",
|
||||
]
|
||||
@@ -2990,7 +3023,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.7.4"
|
||||
version = "0.8.1"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -3029,7 +3062,7 @@ dependencies = [
|
||||
"matchit",
|
||||
"path-absolutize",
|
||||
"path-slash",
|
||||
"pep440_rs 0.7.2",
|
||||
"pep440_rs",
|
||||
"regex",
|
||||
"ruff_cache",
|
||||
"ruff_formatter",
|
||||
@@ -3039,6 +3072,7 @@ dependencies = [
|
||||
"ruff_python_ast",
|
||||
"ruff_python_formatter",
|
||||
"ruff_python_semantic",
|
||||
"ruff_python_stdlib",
|
||||
"ruff_source_file",
|
||||
"rustc-hash 2.0.0",
|
||||
"schemars",
|
||||
@@ -3161,7 +3195,7 @@ dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
@@ -3195,7 +3229,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"serde_derive_internals",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3218,9 +3252,9 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.214"
|
||||
version = "1.0.215"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f55c3193aca71c12ad7890f1785d2b73e1b9f63a0bbc353c08ef26fe03fc56b5"
|
||||
checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
@@ -3238,13 +3272,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.214"
|
||||
version = "1.0.215"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766"
|
||||
checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3255,14 +3289,14 @@ checksum = "330f01ce65a3a5fe59a60c82f3c9a024b573b8a6e875bd233fe5f934e71d54e3"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.132"
|
||||
version = "1.0.133"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03"
|
||||
checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
@@ -3278,7 +3312,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3319,7 +3353,7 @@ dependencies = [
|
||||
"darling",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3427,7 +3461,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rustversion",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3438,20 +3472,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.109"
|
||||
version = "2.0.89"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.87"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d"
|
||||
checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3466,7 +3489,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3529,7 +3552,7 @@ dependencies = [
|
||||
"cfg-if",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3540,7 +3563,7 @@ checksum = "5c89e72a01ed4c579669add59014b9a524d609c0c88c6a585ce37485879f6ffb"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
"test-case-core",
|
||||
]
|
||||
|
||||
@@ -3570,7 +3593,7 @@ checksum = "b607164372e89797d78b8e23a6d67d5d1038c1c65efd52e1389ef8b77caba2a6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3581,7 +3604,7 @@ checksum = "f077553d607adc1caf65430528a576c757a71ed73944b66ebb58ef2bbd243568"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3703,7 +3726,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3842,9 +3865,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.13"
|
||||
version = "1.0.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe"
|
||||
checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-normalization"
|
||||
@@ -3907,7 +3930,7 @@ version = "2.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b74fc6b57825be3373f7054754755f03ac3a8f5d70015ccad699ba2029956f4a"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"base64 0.22.0",
|
||||
"flate2",
|
||||
"log",
|
||||
"once_cell",
|
||||
@@ -3919,9 +3942,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "2.5.3"
|
||||
version = "2.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8d157f1b96d14500ffdc1f10ba712e780825526c03d9a49b4d0324b0d9113ada"
|
||||
checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60"
|
||||
dependencies = [
|
||||
"form_urlencoded",
|
||||
"idna",
|
||||
@@ -3929,6 +3952,12 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urlencoding"
|
||||
version = "2.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
|
||||
|
||||
[[package]]
|
||||
name = "utf16_iter"
|
||||
version = "1.0.5"
|
||||
@@ -3967,7 +3996,7 @@ checksum = "6b91f57fe13a38d0ce9e28a03463d8d3c2468ed03d75375110ec71d93b449a08"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3976,6 +4005,15 @@ version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
|
||||
|
||||
[[package]]
|
||||
name = "version-ranges"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f8d079415ceb2be83fc355adbadafe401307d5c309c7e6ade6638e6f9f42f42d"
|
||||
dependencies = [
|
||||
"smallvec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.4"
|
||||
@@ -4053,7 +4091,7 @@ dependencies = [
|
||||
"once_cell",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
@@ -4087,7 +4125,7 @@ checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
"wasm-bindgen-backend",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
@@ -4121,7 +4159,7 @@ checksum = "c97b2ef2c8d627381e51c071c2ab328eac606d3f69dd82bcbca20a9e389d95f0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4424,7 +4462,7 @@ checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
@@ -4445,7 +4483,7 @@ checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4465,7 +4503,7 @@ checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
@@ -4494,7 +4532,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
27
Cargo.toml
27
Cargo.toml
@@ -65,7 +65,8 @@ compact_str = "0.8.0"
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
dir-test = { version = "0.3.0" }
|
||||
dir-test = { version = "0.4.0" }
|
||||
dunce = { version = "1.0.5" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
env_logger = { version = "0.11.0" }
|
||||
etcetera = { version = "0.8.0" }
|
||||
@@ -81,7 +82,7 @@ hashbrown = { version = "0.15.0", default-features = false, features = [
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = {version = "2.6.0" }
|
||||
indexmap = { version = "2.6.0" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1" }
|
||||
@@ -110,7 +111,7 @@ pathdiff = { version = "0.2.1" }
|
||||
pep440_rs = { version = "0.7.1" }
|
||||
pretty_assertions = "1.3.0"
|
||||
proc-macro2 = { version = "1.0.79" }
|
||||
pyproject-toml = { version = "0.9.0" }
|
||||
pyproject-toml = { version = "0.13.4" }
|
||||
quick-junit = { version = "0.5.0" }
|
||||
quote = { version = "1.0.23" }
|
||||
rand = { version = "0.8.5" }
|
||||
@@ -150,7 +151,7 @@ tracing-tree = { version = "0.4.0" }
|
||||
typed-arena = { version = "2.0.2" }
|
||||
unic-ucd-category = { version = "0.9" }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
unicode-width = { version = "0.1.11" }
|
||||
unicode-width = { version = "0.2.0" }
|
||||
unicode_names2 = { version = "1.2.2" }
|
||||
unicode-normalization = { version = "0.1.23" }
|
||||
ureq = { version = "2.9.6" }
|
||||
@@ -247,10 +248,10 @@ debug = 1
|
||||
[profile.dist]
|
||||
inherits = "release"
|
||||
|
||||
# Config for 'cargo dist'
|
||||
# Config for 'dist'
|
||||
[workspace.metadata.dist]
|
||||
# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax)
|
||||
cargo-dist-version = "0.22.1"
|
||||
# The preferred dist version to use in CI (Cargo.toml SemVer syntax)
|
||||
cargo-dist-version = "0.25.2-prerelease.3"
|
||||
# CI backends to support
|
||||
ci = "github"
|
||||
# The installers to generate for each app
|
||||
@@ -281,13 +282,13 @@ targets = [
|
||||
]
|
||||
# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true)
|
||||
auto-includes = false
|
||||
# Whether cargo-dist should create a GitHub Release or use an existing draft
|
||||
# Whether dist should create a Github Release or use an existing draft
|
||||
create-release = true
|
||||
# Which actions to run on pull requests
|
||||
pr-run-mode = "skip"
|
||||
# Whether CI should trigger releases with dispatches instead of tag pushes
|
||||
dispatch-releases = true
|
||||
# Which phase cargo-dist should use to create the GitHub release
|
||||
# Which phase dist should use to create the GitHub release
|
||||
github-release = "announce"
|
||||
# Whether CI should include auto-generated code to build local artifacts
|
||||
build-local-artifacts = false
|
||||
@@ -296,14 +297,10 @@ local-artifacts-jobs = ["./build-binaries", "./build-docker"]
|
||||
# Publish jobs to run in CI
|
||||
publish-jobs = ["./publish-pypi", "./publish-wasm"]
|
||||
# Post-announce jobs to run in CI
|
||||
post-announce-jobs = [
|
||||
"./notify-dependents",
|
||||
"./publish-docs",
|
||||
"./publish-playground",
|
||||
]
|
||||
post-announce-jobs = ["./notify-dependents", "./publish-docs", "./publish-playground"]
|
||||
# Custom permissions for GitHub Jobs
|
||||
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read" }, "publish-wasm" = { contents = "read", id-token = "write", packages = "write" } }
|
||||
# Whether to install an updater program
|
||||
install-updater = false
|
||||
# Path that installers should place binaries in
|
||||
install-path = "CARGO_HOME"
|
||||
install-path = ["$XDG_BIN_HOME/", "$XDG_DATA_HOME/../bin", "~/.local/bin"]
|
||||
|
||||
10
README.md
10
README.md
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.7.4/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.7.4/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.8.1/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.8.1/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.7.4
|
||||
rev: v0.8.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -238,8 +238,8 @@ exclude = [
|
||||
line-length = 88
|
||||
indent-width = 4
|
||||
|
||||
# Assume Python 3.8
|
||||
target-version = "py38"
|
||||
# Assume Python 3.9
|
||||
target-version = "py39"
|
||||
|
||||
[lint]
|
||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = ["crates/red_knot_vendored/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
extend-exclude = [
|
||||
"crates/red_knot_vendored/vendor/**/*",
|
||||
"**/resources/**/*",
|
||||
"**/snapshots/**/*",
|
||||
"crates/red_knot_workspace/src/workspace/pyproject/package_name.rs"
|
||||
]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
|
||||
36
clippy.toml
36
clippy.toml
@@ -1,21 +1,25 @@
|
||||
doc-valid-idents = [
|
||||
"..",
|
||||
"CodeQL",
|
||||
"FastAPI",
|
||||
"IPython",
|
||||
"LangChain",
|
||||
"LibCST",
|
||||
"McCabe",
|
||||
"NumPy",
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
"..",
|
||||
"CodeQL",
|
||||
"FastAPI",
|
||||
"IPython",
|
||||
"LangChain",
|
||||
"LibCST",
|
||||
"McCabe",
|
||||
"NumPy",
|
||||
"SCREAMING_SNAKE_CASE",
|
||||
"SQLAlchemy",
|
||||
"StackOverflow",
|
||||
"PyCharm",
|
||||
"SNMPv1",
|
||||
"SNMPv2",
|
||||
"SNMPv3",
|
||||
"PyFlakes"
|
||||
]
|
||||
|
||||
ignore-interior-mutability = [
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
# Interned is read-only. The wrapped `Rc` never gets updated.
|
||||
"ruff_formatter::format_element::Interned",
|
||||
# The expression is read-only.
|
||||
"ruff_python_ast::hashable::HashableExpr",
|
||||
]
|
||||
|
||||
@@ -34,6 +34,7 @@ tracing-tree = { workspace = true }
|
||||
[dev-dependencies]
|
||||
filetime = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["testing"] }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -183,10 +183,10 @@ fn run() -> anyhow::Result<ExitStatus> {
|
||||
|
||||
let system = OsSystem::new(cwd.clone());
|
||||
let cli_configuration = args.to_configuration(&cwd);
|
||||
let workspace_metadata = WorkspaceMetadata::from_path(
|
||||
let workspace_metadata = WorkspaceMetadata::discover(
|
||||
system.current_directory(),
|
||||
&system,
|
||||
Some(cli_configuration.clone()),
|
||||
Some(&cli_configuration),
|
||||
)?;
|
||||
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
|
||||
@@ -4,12 +4,12 @@
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
#[default]
|
||||
Py313,
|
||||
}
|
||||
|
||||
@@ -46,3 +46,17 @@ impl From<TargetVersion> for red_knot_python_semantic::PythonVersion {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::target_version::TargetVersion;
|
||||
use red_knot_python_semantic::PythonVersion;
|
||||
|
||||
#[test]
|
||||
fn same_default_as_python_version() {
|
||||
assert_eq!(
|
||||
PythonVersion::from(TargetVersion::default()),
|
||||
PythonVersion::default()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,9 +4,8 @@ use std::io::Write;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
|
||||
use red_knot_python_semantic::{resolve_module, ModuleName, Program, PythonVersion, SitePackages};
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::db::{Db, RootDatabase};
|
||||
use red_knot_workspace::watch;
|
||||
use red_knot_workspace::watch::{directory_watcher, WorkspaceWatcher};
|
||||
use red_knot_workspace::workspace::settings::{Configuration, SearchPathConfiguration};
|
||||
@@ -14,6 +13,7 @@ use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::files::{system_path_to_file, File, FileError};
|
||||
use ruff_db::source::source_text;
|
||||
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_db::testing::{setup_logging, setup_logging_with_filter};
|
||||
use ruff_db::Upcast;
|
||||
|
||||
struct TestCase {
|
||||
@@ -46,6 +46,8 @@ impl TestCase {
|
||||
}
|
||||
|
||||
fn try_stop_watch(&mut self, timeout: Duration) -> Option<Vec<watch::ChangeEvent>> {
|
||||
tracing::debug!("Try stopping watch with timeout {:?}", timeout);
|
||||
|
||||
let watcher = self
|
||||
.watcher
|
||||
.take()
|
||||
@@ -55,8 +57,11 @@ impl TestCase {
|
||||
.changes_receiver
|
||||
.recv_timeout(timeout)
|
||||
.unwrap_or_default();
|
||||
|
||||
watcher.flush();
|
||||
tracing::debug!("Flushed file watcher");
|
||||
watcher.stop();
|
||||
tracing::debug!("Stopping file watcher");
|
||||
|
||||
for event in &self.changes_receiver {
|
||||
all_events.extend(event);
|
||||
@@ -69,7 +74,6 @@ impl TestCase {
|
||||
Some(all_events)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn take_watch_changes(&self) -> Vec<watch::ChangeEvent> {
|
||||
self.try_take_watch_changes(Duration::from_secs(10))
|
||||
.expect("Expected watch changes but observed none")
|
||||
@@ -110,8 +114,8 @@ impl TestCase {
|
||||
) -> anyhow::Result<()> {
|
||||
let program = Program::get(self.db());
|
||||
|
||||
self.configuration.search_paths = configuration.clone();
|
||||
let new_settings = configuration.into_settings(self.db.workspace().root(&self.db));
|
||||
let new_settings = configuration.to_settings(self.db.workspace().root(&self.db));
|
||||
self.configuration.search_paths = configuration;
|
||||
|
||||
program.update_search_paths(&mut self.db, &new_settings)?;
|
||||
|
||||
@@ -204,7 +208,9 @@ where
|
||||
.as_utf8_path()
|
||||
.canonicalize_utf8()
|
||||
.with_context(|| "Failed to canonicalize root path.")?,
|
||||
);
|
||||
)
|
||||
.simplified()
|
||||
.to_path_buf();
|
||||
|
||||
let workspace_path = root_path.join("workspace");
|
||||
|
||||
@@ -241,8 +247,7 @@ where
|
||||
search_paths,
|
||||
};
|
||||
|
||||
let workspace =
|
||||
WorkspaceMetadata::from_path(&workspace_path, &system, Some(configuration.clone()))?;
|
||||
let workspace = WorkspaceMetadata::discover(&workspace_path, &system, Some(&configuration))?;
|
||||
|
||||
let db = RootDatabase::new(workspace, system)?;
|
||||
|
||||
@@ -599,6 +604,8 @@ fn directory_moved_to_trash() -> anyhow::Result<()> {
|
||||
|
||||
#[test]
|
||||
fn directory_renamed() -> anyhow::Result<()> {
|
||||
let _tracing = setup_logging_with_filter("file_watching=TRACE,red_knot=TRACE");
|
||||
|
||||
let mut case = setup([
|
||||
("bar.py", "import sub.a"),
|
||||
("sub/__init__.py", ""),
|
||||
@@ -639,6 +646,10 @@ fn directory_renamed() -> anyhow::Result<()> {
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
for event in &changes {
|
||||
tracing::debug!("Event: {:?}", event);
|
||||
}
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
// `import sub.a` should no longer resolve
|
||||
@@ -1311,3 +1322,138 @@ mod unix {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nested_packages_delete_root() -> anyhow::Result<()> {
|
||||
let mut case = setup(|root: &SystemPath, workspace_root: &SystemPath| {
|
||||
std::fs::write(
|
||||
workspace_root.join("pyproject.toml").as_std_path(),
|
||||
r#"
|
||||
[project]
|
||||
name = "inner"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
std::fs::write(
|
||||
root.join("pyproject.toml").as_std_path(),
|
||||
r#"
|
||||
[project]
|
||||
name = "outer"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
assert_eq!(
|
||||
case.db().workspace().root(case.db()),
|
||||
&*case.workspace_path("")
|
||||
);
|
||||
|
||||
std::fs::remove_file(case.workspace_path("pyproject.toml").as_std_path())?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
// It should now pick up the outer workspace.
|
||||
assert_eq!(case.db().workspace().root(case.db()), case.root_path());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn added_package() -> anyhow::Result<()> {
|
||||
let _ = setup_logging();
|
||||
let mut case = setup([
|
||||
(
|
||||
"pyproject.toml",
|
||||
r#"
|
||||
[project]
|
||||
name = "inner"
|
||||
|
||||
[tool.knot.workspace]
|
||||
members = ["packages/*"]
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"packages/a/pyproject.toml",
|
||||
r#"
|
||||
[project]
|
||||
name = "a"
|
||||
"#,
|
||||
),
|
||||
])?;
|
||||
|
||||
assert_eq!(case.db().workspace().packages(case.db()).len(), 2);
|
||||
|
||||
std::fs::create_dir(case.workspace_path("packages/b").as_std_path())
|
||||
.context("failed to create folder for package 'b'")?;
|
||||
|
||||
// It seems that the file watcher won't pick up on file changes shortly after the folder
|
||||
// was created... I suspect this is because most file watchers don't support recursive
|
||||
// file watching. Instead, file-watching libraries manually implement recursive file watching
|
||||
// by setting a watcher for each directory. But doing this obviously "lags" behind.
|
||||
case.take_watch_changes();
|
||||
|
||||
std::fs::write(
|
||||
case.workspace_path("packages/b/pyproject.toml")
|
||||
.as_std_path(),
|
||||
r#"
|
||||
[project]
|
||||
name = "b"
|
||||
"#,
|
||||
)
|
||||
.context("failed to write pyproject.toml for package b")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
assert_eq!(case.db().workspace().packages(case.db()).len(), 3);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn removed_package() -> anyhow::Result<()> {
|
||||
let mut case = setup([
|
||||
(
|
||||
"pyproject.toml",
|
||||
r#"
|
||||
[project]
|
||||
name = "inner"
|
||||
|
||||
[tool.knot.workspace]
|
||||
members = ["packages/*"]
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"packages/a/pyproject.toml",
|
||||
r#"
|
||||
[project]
|
||||
name = "a"
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"packages/b/pyproject.toml",
|
||||
r#"
|
||||
[project]
|
||||
name = "b"
|
||||
"#,
|
||||
),
|
||||
])?;
|
||||
|
||||
assert_eq!(case.db().workspace().packages(case.db()).len(), 3);
|
||||
|
||||
std::fs::remove_dir_all(case.workspace_path("packages/b").as_std_path())
|
||||
.context("failed to remove package 'b'")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
assert_eq!(case.db().workspace().packages(case.db()).len(), 2);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ license = { workspace = true }
|
||||
[dependencies]
|
||||
ruff_db = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_python_ast = { workspace = true, features = ["salsa"] }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_parser = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
@@ -33,6 +33,7 @@ thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
smallvec = { workspace = true }
|
||||
static_assertions = { workspace = true }
|
||||
test-case = { workspace = true }
|
||||
|
||||
@@ -0,0 +1,62 @@
|
||||
# NoReturn & Never
|
||||
|
||||
`NoReturn` is used to annotate the return type for functions that never return. `Never` is the
|
||||
bottom type, representing the empty set of Python objects. These two annotations can be used
|
||||
interchangeably.
|
||||
|
||||
## Function Return Type Annotation
|
||||
|
||||
```py
|
||||
from typing import NoReturn
|
||||
|
||||
def stop() -> NoReturn:
|
||||
raise RuntimeError("no way")
|
||||
|
||||
# revealed: Never
|
||||
reveal_type(stop())
|
||||
```
|
||||
|
||||
## Assignment
|
||||
|
||||
```py
|
||||
from typing import NoReturn, Never, Any
|
||||
|
||||
# error: [invalid-type-parameter] "Type `typing.Never` expected no type parameter"
|
||||
x: Never[int]
|
||||
a1: NoReturn
|
||||
# TODO: Test `Never` is only available in python >= 3.11
|
||||
a2: Never
|
||||
b1: Any
|
||||
b2: int
|
||||
|
||||
def f():
|
||||
# revealed: Never
|
||||
reveal_type(a1)
|
||||
# revealed: Never
|
||||
reveal_type(a2)
|
||||
|
||||
# Never is assignable to all types.
|
||||
v1: int = a1
|
||||
v2: str = a1
|
||||
# Other types are not assignable to Never except for Never (and Any).
|
||||
v3: Never = b1
|
||||
v4: Never = a2
|
||||
v5: Any = b2
|
||||
# error: [invalid-assignment] "Object of type `Literal[1]` is not assignable to `Never`"
|
||||
v6: Never = 1
|
||||
```
|
||||
|
||||
## Typing Extensions
|
||||
|
||||
```py
|
||||
from typing_extensions import NoReturn, Never
|
||||
|
||||
x: NoReturn
|
||||
y: Never
|
||||
|
||||
def f():
|
||||
# revealed: Never
|
||||
reveal_type(x)
|
||||
# revealed: Never
|
||||
reveal_type(y)
|
||||
```
|
||||
@@ -0,0 +1,47 @@
|
||||
# Optional
|
||||
|
||||
## Annotation
|
||||
|
||||
`typing.Optional` is equivalent to using the type with a None in a Union.
|
||||
|
||||
```py
|
||||
from typing import Optional
|
||||
|
||||
a: Optional[int]
|
||||
a1: Optional[bool]
|
||||
a2: Optional[Optional[bool]]
|
||||
a3: Optional[None]
|
||||
|
||||
def f():
|
||||
# revealed: int | None
|
||||
reveal_type(a)
|
||||
# revealed: bool | None
|
||||
reveal_type(a1)
|
||||
# revealed: bool | None
|
||||
reveal_type(a2)
|
||||
# revealed: None
|
||||
reveal_type(a3)
|
||||
```
|
||||
|
||||
## Assignment
|
||||
|
||||
```py
|
||||
from typing import Optional
|
||||
|
||||
a: Optional[int] = 1
|
||||
a = None
|
||||
# error: [invalid-assignment] "Object of type `Literal[""]` is not assignable to `int | None`"
|
||||
a = ""
|
||||
```
|
||||
|
||||
## Typing Extensions
|
||||
|
||||
```py
|
||||
from typing_extensions import Optional
|
||||
|
||||
a: Optional[int]
|
||||
|
||||
def f():
|
||||
# revealed: int | None
|
||||
reveal_type(a)
|
||||
```
|
||||
@@ -9,10 +9,10 @@ Ts = TypeVarTuple("Ts")
|
||||
|
||||
def append_int(*args: *Ts) -> tuple[*Ts, int]:
|
||||
# TODO: should show some representation of the variadic generic type
|
||||
reveal_type(args) # revealed: @Todo
|
||||
reveal_type(args) # revealed: @Todo(function parameter type)
|
||||
|
||||
return (*args, 1)
|
||||
|
||||
# TODO should be tuple[Literal[True], Literal["a"], int]
|
||||
reveal_type(append_int(True, "a")) # revealed: @Todo
|
||||
reveal_type(append_int(True, "a")) # revealed: @Todo(full tuple[...] support)
|
||||
```
|
||||
|
||||
@@ -189,3 +189,31 @@ reveal_type(d) # revealed: Foo
|
||||
## Parameter
|
||||
|
||||
TODO: Add tests once parameter inference is supported
|
||||
|
||||
## Invalid expressions
|
||||
|
||||
The expressions in these string annotations aren't valid expressions in this context but we
|
||||
shouldn't panic.
|
||||
|
||||
```py
|
||||
a: "1 or 2"
|
||||
b: "(x := 1)"
|
||||
c: "1 + 2"
|
||||
d: "lambda x: x"
|
||||
e: "x if True else y"
|
||||
f: "{'a': 1, 'b': 2}"
|
||||
g: "{1, 2}"
|
||||
h: "[i for i in range(5)]"
|
||||
i: "{i for i in range(5)}"
|
||||
j: "{i: i for i in range(5)}"
|
||||
k: "(i for i in range(5))"
|
||||
l: "await 1"
|
||||
# error: [forward-annotation-syntax-error]
|
||||
m: "yield 1"
|
||||
# error: [forward-annotation-syntax-error]
|
||||
n: "yield from 1"
|
||||
o: "1 < 2"
|
||||
p: "call()"
|
||||
r: "[1, 2]"
|
||||
s: "(1, 2)"
|
||||
```
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
# Union
|
||||
|
||||
## Annotation
|
||||
|
||||
`typing.Union` can be used to construct union types same as `|` operator.
|
||||
|
||||
```py
|
||||
from typing import Union
|
||||
|
||||
a: Union[int, str]
|
||||
a1: Union[int, bool]
|
||||
a2: Union[int, Union[float, str]]
|
||||
a3: Union[int, None]
|
||||
a4: Union[Union[float, str]]
|
||||
a5: Union[int]
|
||||
a6: Union[()]
|
||||
|
||||
def f():
|
||||
# revealed: int | str
|
||||
reveal_type(a)
|
||||
# Since bool is a subtype of int we simplify to int here. But we do allow assigning boolean values (see below).
|
||||
# revealed: int
|
||||
reveal_type(a1)
|
||||
# revealed: int | float | str
|
||||
reveal_type(a2)
|
||||
# revealed: int | None
|
||||
reveal_type(a3)
|
||||
# revealed: float | str
|
||||
reveal_type(a4)
|
||||
# revealed: int
|
||||
reveal_type(a5)
|
||||
# revealed: Never
|
||||
reveal_type(a6)
|
||||
```
|
||||
|
||||
## Assignment
|
||||
|
||||
```py
|
||||
from typing import Union
|
||||
|
||||
a: Union[int, str]
|
||||
a = 1
|
||||
a = ""
|
||||
a1: Union[int, bool]
|
||||
a1 = 1
|
||||
a1 = True
|
||||
# error: [invalid-assignment] "Object of type `Literal[b""]` is not assignable to `int | str`"
|
||||
a = b""
|
||||
```
|
||||
|
||||
## Typing Extensions
|
||||
|
||||
```py
|
||||
from typing_extensions import Union
|
||||
|
||||
a: Union[int, str]
|
||||
|
||||
def f():
|
||||
# revealed: int | str
|
||||
reveal_type(a)
|
||||
```
|
||||
@@ -51,12 +51,12 @@ reveal_type(c) # revealed: tuple[str, int]
|
||||
reveal_type(d) # revealed: tuple[tuple[str, str], tuple[int, int]]
|
||||
|
||||
# TODO: homogenous tuples, PEP-646 tuples
|
||||
reveal_type(e) # revealed: @Todo
|
||||
reveal_type(f) # revealed: @Todo
|
||||
reveal_type(g) # revealed: @Todo
|
||||
reveal_type(e) # revealed: @Todo(full tuple[...] support)
|
||||
reveal_type(f) # revealed: @Todo(full tuple[...] support)
|
||||
reveal_type(g) # revealed: @Todo(full tuple[...] support)
|
||||
|
||||
# TODO: support more kinds of type expressions in annotations
|
||||
reveal_type(h) # revealed: @Todo
|
||||
reveal_type(h) # revealed: @Todo(full tuple[...] support)
|
||||
|
||||
reveal_type(i) # revealed: tuple[str | int, str | int]
|
||||
reveal_type(j) # revealed: tuple[str | int]
|
||||
|
||||
@@ -317,7 +317,7 @@ reveal_type(1 + A()) # revealed: int
|
||||
reveal_type(A() + "foo") # revealed: A
|
||||
# TODO should be `A` since `str.__add__` doesn't support `A` instances
|
||||
# TODO overloads
|
||||
reveal_type("foo" + A()) # revealed: @Todo
|
||||
reveal_type("foo" + A()) # revealed: @Todo(return type)
|
||||
|
||||
reveal_type(A() + b"foo") # revealed: A
|
||||
# TODO should be `A` since `bytes.__add__` doesn't support `A` instances
|
||||
@@ -325,7 +325,7 @@ reveal_type(b"foo" + A()) # revealed: bytes
|
||||
|
||||
reveal_type(A() + ()) # revealed: A
|
||||
# TODO this should be `A`, since `tuple.__add__` doesn't support `A` instances
|
||||
reveal_type(() + A()) # revealed: @Todo
|
||||
reveal_type(() + A()) # revealed: @Todo(return type)
|
||||
|
||||
literal_string_instance = "foo" * 1_000_000_000
|
||||
# the test is not testing what it's meant to be testing if this isn't a `LiteralString`:
|
||||
@@ -334,7 +334,7 @@ reveal_type(literal_string_instance) # revealed: LiteralString
|
||||
reveal_type(A() + literal_string_instance) # revealed: A
|
||||
# TODO should be `A` since `str.__add__` doesn't support `A` instances
|
||||
# TODO overloads
|
||||
reveal_type(literal_string_instance + A()) # revealed: @Todo
|
||||
reveal_type(literal_string_instance + A()) # revealed: @Todo(return type)
|
||||
```
|
||||
|
||||
## Operations involving instances of classes inheriting from `Any`
|
||||
|
||||
@@ -38,7 +38,7 @@ if (x := 1) and bool_instance():
|
||||
if True or (x := 1):
|
||||
# TODO: infer that the second arm is never executed, and raise `unresolved-reference`.
|
||||
# error: [possibly-unresolved-reference]
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
reveal_type(x) # revealed: Never
|
||||
|
||||
if True and (x := 1):
|
||||
# TODO: infer that the second arm is always executed, do not raise a diagnostic
|
||||
|
||||
@@ -16,7 +16,7 @@ async def get_int_async() -> int:
|
||||
return 42
|
||||
|
||||
# TODO: we don't yet support `types.CoroutineType`, should be generic `Coroutine[Any, Any, int]`
|
||||
reveal_type(get_int_async()) # revealed: @Todo
|
||||
reveal_type(get_int_async()) # revealed: @Todo(generic types.CoroutineType)
|
||||
```
|
||||
|
||||
## Generic
|
||||
@@ -44,7 +44,7 @@ def bar() -> str:
|
||||
return "bar"
|
||||
|
||||
# TODO: should reveal `int`, as the decorator replaces `bar` with `foo`
|
||||
reveal_type(bar()) # revealed: @Todo
|
||||
reveal_type(bar()) # revealed: @Todo(return type)
|
||||
```
|
||||
|
||||
## Invalid callable
|
||||
|
||||
@@ -58,7 +58,9 @@ reveal_type(c >= d) # revealed: Literal[True]
|
||||
#### Results with Ambiguity
|
||||
|
||||
```py
|
||||
def bool_instance() -> bool: ...
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
def int_instance() -> int:
|
||||
return 42
|
||||
|
||||
@@ -134,23 +136,158 @@ reveal_type(c >= c) # revealed: Literal[True]
|
||||
|
||||
#### Non Boolean Rich Comparisons
|
||||
|
||||
Rich comparison methods defined in a class affect tuple comparisons as well. Proper type inference
|
||||
should be possible even in cases where these methods return non-boolean types.
|
||||
|
||||
Note: Tuples use lexicographic comparisons. If the `==` result for all paired elements in the tuple
|
||||
is True, the comparison then considers the tuple’s length. Regardless of the return type of the
|
||||
dunder methods, the final result can still be a boolean value.
|
||||
|
||||
(+cpython: For tuples, `==` and `!=` always produce boolean results, regardless of the return type
|
||||
of the dunder methods.)
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
class A:
|
||||
def __eq__(self, o) -> str: ...
|
||||
def __ne__(self, o) -> int: ...
|
||||
def __lt__(self, o) -> float: ...
|
||||
def __le__(self, o) -> object: ...
|
||||
def __gt__(self, o) -> tuple: ...
|
||||
def __ge__(self, o) -> list: ...
|
||||
def __eq__(self, o: object) -> str:
|
||||
return "hello"
|
||||
|
||||
def __ne__(self, o: object) -> bytes:
|
||||
return b"world"
|
||||
|
||||
def __lt__(self, o: A) -> float:
|
||||
return 3.14
|
||||
|
||||
def __le__(self, o: A) -> complex:
|
||||
return complex(0.5, -0.5)
|
||||
|
||||
def __gt__(self, o: A) -> tuple:
|
||||
return (1, 2, 3)
|
||||
|
||||
def __ge__(self, o: A) -> list:
|
||||
return [1, 2, 3]
|
||||
|
||||
a = (A(), A())
|
||||
|
||||
reveal_type(a == a) # revealed: bool
|
||||
reveal_type(a != a) # revealed: bool
|
||||
reveal_type(a < a) # revealed: float | Literal[False]
|
||||
reveal_type(a <= a) # revealed: complex | Literal[True]
|
||||
reveal_type(a > a) # revealed: tuple | Literal[False]
|
||||
reveal_type(a >= a) # revealed: list | Literal[True]
|
||||
|
||||
# If lexicographic comparison is finished before comparing A()
|
||||
b = ("1_foo", A())
|
||||
c = ("2_bar", A())
|
||||
|
||||
reveal_type(b == c) # revealed: Literal[False]
|
||||
reveal_type(b != c) # revealed: Literal[True]
|
||||
reveal_type(b < c) # revealed: Literal[True]
|
||||
reveal_type(b <= c) # revealed: Literal[True]
|
||||
reveal_type(b > c) # revealed: Literal[False]
|
||||
reveal_type(b >= c) # revealed: Literal[False]
|
||||
|
||||
class B:
|
||||
def __lt__(self, o: B) -> set:
|
||||
return set()
|
||||
|
||||
reveal_type((A(), B()) < (A(), B())) # revealed: float | set | Literal[False]
|
||||
```
|
||||
|
||||
#### Special Handling of Eq and NotEq in Lexicographic Comparisons
|
||||
|
||||
> Example: `(int_instance(), "foo") == (int_instance(), "bar")`
|
||||
|
||||
`Eq` and `NotEq` have unique behavior compared to other operators in lexicographic comparisons.
|
||||
Specifically, for `Eq`, if any non-equal pair exists within the tuples being compared, we can
|
||||
immediately conclude that the tuples are not equal. Conversely, for `NotEq`, if any non-equal pair
|
||||
exists, we can determine that the tuples are unequal.
|
||||
|
||||
In contrast, with operators like `<` and `>`, the comparison must consider each pair of elements
|
||||
sequentially, and the final outcome might remain ambiguous until all pairs are compared.
|
||||
|
||||
```py
|
||||
def str_instance() -> str:
|
||||
return "hello"
|
||||
|
||||
def int_instance() -> int:
|
||||
return 42
|
||||
|
||||
reveal_type("foo" == "bar") # revealed: Literal[False]
|
||||
reveal_type(("foo",) == ("bar",)) # revealed: Literal[False]
|
||||
reveal_type((4, "foo") == (4, "bar")) # revealed: Literal[False]
|
||||
reveal_type((int_instance(), "foo") == (int_instance(), "bar")) # revealed: Literal[False]
|
||||
|
||||
a = (str_instance(), int_instance(), "foo")
|
||||
|
||||
reveal_type(a == a) # revealed: bool
|
||||
reveal_type(a != a) # revealed: bool
|
||||
reveal_type(a < a) # revealed: bool
|
||||
reveal_type(a <= a) # revealed: bool
|
||||
reveal_type(a > a) # revealed: bool
|
||||
reveal_type(a >= a) # revealed: bool
|
||||
|
||||
b = (str_instance(), int_instance(), "bar")
|
||||
|
||||
reveal_type(a == b) # revealed: Literal[False]
|
||||
reveal_type(a != b) # revealed: Literal[True]
|
||||
reveal_type(a < b) # revealed: bool
|
||||
reveal_type(a <= b) # revealed: bool
|
||||
reveal_type(a > b) # revealed: bool
|
||||
reveal_type(a >= b) # revealed: bool
|
||||
|
||||
c = (str_instance(), int_instance(), "foo", "different_length")
|
||||
reveal_type(a == c) # revealed: Literal[False]
|
||||
reveal_type(a != c) # revealed: Literal[True]
|
||||
reveal_type(a < c) # revealed: bool
|
||||
reveal_type(a <= c) # revealed: bool
|
||||
reveal_type(a > c) # revealed: bool
|
||||
reveal_type(a >= c) # revealed: bool
|
||||
```
|
||||
|
||||
#### Error Propagation
|
||||
|
||||
Errors occurring within a tuple comparison should propagate outward. However, if the tuple
|
||||
comparison can clearly conclude before encountering an error, the error should not be raised.
|
||||
|
||||
```py
|
||||
def int_instance() -> int:
|
||||
return 42
|
||||
|
||||
def str_instance() -> str:
|
||||
return "hello"
|
||||
|
||||
class A: ...
|
||||
|
||||
# error: [unsupported-operator] "Operator `<` is not supported for types `A` and `A`"
|
||||
A() < A()
|
||||
# error: [unsupported-operator] "Operator `<=` is not supported for types `A` and `A`"
|
||||
A() <= A()
|
||||
# error: [unsupported-operator] "Operator `>` is not supported for types `A` and `A`"
|
||||
A() > A()
|
||||
# error: [unsupported-operator] "Operator `>=` is not supported for types `A` and `A`"
|
||||
A() >= A()
|
||||
|
||||
a = (0, int_instance(), A())
|
||||
|
||||
# error: [unsupported-operator] "Operator `<` is not supported for types `A` and `A`, in comparing `tuple[Literal[0], int, A]` with `tuple[Literal[0], int, A]`"
|
||||
reveal_type(a < a) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `<=` is not supported for types `A` and `A`, in comparing `tuple[Literal[0], int, A]` with `tuple[Literal[0], int, A]`"
|
||||
reveal_type(a <= a) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `>` is not supported for types `A` and `A`, in comparing `tuple[Literal[0], int, A]` with `tuple[Literal[0], int, A]`"
|
||||
reveal_type(a > a) # revealed: Unknown
|
||||
# error: [unsupported-operator] "Operator `>=` is not supported for types `A` and `A`, in comparing `tuple[Literal[0], int, A]` with `tuple[Literal[0], int, A]`"
|
||||
reveal_type(a >= a) # revealed: Unknown
|
||||
|
||||
# Comparison between `a` and `b` should only involve the first elements, `Literal[0]` and `Literal[99999]`,
|
||||
# and should terminate immediately.
|
||||
b = (99999, int_instance(), A())
|
||||
|
||||
reveal_type(a < b) # revealed: Literal[True]
|
||||
reveal_type(a <= b) # revealed: Literal[True]
|
||||
reveal_type(a > b) # revealed: Literal[False]
|
||||
reveal_type(a >= b) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
### Membership Test Comparisons
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
class A: ...
|
||||
|
||||
a = 1 in 7 # error: "Operator `in` is not supported for types `Literal[1]` and `Literal[7]`"
|
||||
reveal_type(a) # revealed: bool
|
||||
|
||||
@@ -33,4 +35,8 @@ reveal_type(e) # revealed: bool
|
||||
f = (1, 2) < (1, "hello")
|
||||
# TODO: should be Unknown, once operand type check is implemented
|
||||
reveal_type(f) # revealed: bool
|
||||
|
||||
# error: [unsupported-operator] "Operator `<` is not supported for types `A` and `A`, in comparing `tuple[bool, A]` with `tuple[bool, A]`"
|
||||
g = (bool_instance(), A()) < (bool_instance(), A())
|
||||
reveal_type(g) # revealed: Unknown
|
||||
```
|
||||
|
||||
@@ -50,11 +50,11 @@ def foo(
|
||||
help()
|
||||
except x as e:
|
||||
# TODO: should be `AttributeError`
|
||||
reveal_type(e) # revealed: @Todo
|
||||
reveal_type(e) # revealed: @Todo(exception type)
|
||||
except y as f:
|
||||
# TODO: should be `OSError | RuntimeError`
|
||||
reveal_type(f) # revealed: @Todo
|
||||
reveal_type(f) # revealed: @Todo(exception type)
|
||||
except z as g:
|
||||
# TODO: should be `BaseException`
|
||||
reveal_type(g) # revealed: @Todo
|
||||
reveal_type(g) # revealed: @Todo(exception type)
|
||||
```
|
||||
|
||||
@@ -22,3 +22,22 @@ reveal_type(1 if None else 2) # revealed: Literal[2]
|
||||
reveal_type(1 if "" else 2) # revealed: Literal[2]
|
||||
reveal_type(1 if 0 else 2) # revealed: Literal[2]
|
||||
```
|
||||
|
||||
## Leaked Narrowing Constraint
|
||||
|
||||
(issue #14588)
|
||||
|
||||
The test inside an if expression should not affect code outside of the expression.
|
||||
|
||||
```py
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
x: Literal[42, "hello"] = 42 if bool_instance() else "hello"
|
||||
|
||||
reveal_type(x) # revealed: Literal[42] | Literal["hello"]
|
||||
|
||||
_ = ... if isinstance(x, str) else ...
|
||||
|
||||
reveal_type(x) # revealed: Literal[42] | Literal["hello"]
|
||||
```
|
||||
|
||||
@@ -18,7 +18,7 @@ box: MyBox[int] = MyBox(5)
|
||||
wrong_innards: MyBox[int] = MyBox("five")
|
||||
|
||||
# TODO reveal int
|
||||
reveal_type(box.data) # revealed: @Todo
|
||||
reveal_type(box.data) # revealed: @Todo(instance attributes)
|
||||
|
||||
reveal_type(MyBox.box_model_number) # revealed: Literal[695]
|
||||
```
|
||||
@@ -39,7 +39,7 @@ class MySecureBox[T](MyBox[T]): ...
|
||||
secure_box: MySecureBox[int] = MySecureBox(5)
|
||||
reveal_type(secure_box) # revealed: MySecureBox
|
||||
# TODO reveal int
|
||||
reveal_type(secure_box.data) # revealed: @Todo
|
||||
reveal_type(secure_box.data) # revealed: @Todo(instance attributes)
|
||||
```
|
||||
|
||||
## Cyclical class definition
|
||||
@@ -60,52 +60,20 @@ reveal_type(S) # revealed: Literal[S]
|
||||
|
||||
## Type params
|
||||
|
||||
A PEP695 type variable defines a value of type `typing.TypeVar` with attributes `__name__`,
|
||||
`__bounds__`, `__constraints__`, and `__default__` (the latter three all lazily evaluated):
|
||||
A PEP695 type variable defines a value of type `typing.TypeVar`.
|
||||
|
||||
```py
|
||||
def f[T, U: A, V: (A, B), W = A, X: A = A1]():
|
||||
def f[T]():
|
||||
reveal_type(T) # revealed: T
|
||||
reveal_type(T.__name__) # revealed: Literal["T"]
|
||||
reveal_type(T.__bound__) # revealed: None
|
||||
reveal_type(T.__constraints__) # revealed: tuple[()]
|
||||
reveal_type(T.__default__) # revealed: NoDefault
|
||||
|
||||
reveal_type(U) # revealed: U
|
||||
reveal_type(U.__name__) # revealed: Literal["U"]
|
||||
reveal_type(U.__bound__) # revealed: type[A]
|
||||
reveal_type(U.__constraints__) # revealed: tuple[()]
|
||||
reveal_type(U.__default__) # revealed: NoDefault
|
||||
|
||||
reveal_type(V) # revealed: V
|
||||
reveal_type(V.__name__) # revealed: Literal["V"]
|
||||
reveal_type(V.__bound__) # revealed: None
|
||||
reveal_type(V.__constraints__) # revealed: tuple[type[A], type[B]]
|
||||
reveal_type(V.__default__) # revealed: NoDefault
|
||||
|
||||
reveal_type(W) # revealed: W
|
||||
reveal_type(W.__name__) # revealed: Literal["W"]
|
||||
reveal_type(W.__bound__) # revealed: None
|
||||
reveal_type(W.__constraints__) # revealed: tuple[()]
|
||||
reveal_type(W.__default__) # revealed: type[A]
|
||||
|
||||
reveal_type(X) # revealed: X
|
||||
reveal_type(X.__name__) # revealed: Literal["X"]
|
||||
reveal_type(X.__bound__) # revealed: type[A]
|
||||
reveal_type(X.__constraints__) # revealed: tuple[()]
|
||||
reveal_type(X.__default__) # revealed: type[A1]
|
||||
|
||||
class A: ...
|
||||
class B: ...
|
||||
class A1(A): ...
|
||||
```
|
||||
|
||||
## Minimum two constraints
|
||||
|
||||
A typevar with less than two constraints emits a diagnostic and is treated as unconstrained:
|
||||
A typevar with less than two constraints emits a diagnostic:
|
||||
|
||||
```py
|
||||
# error: [invalid-typevar-constraints] "TypeVar must have at least two constrained types"
|
||||
def f[T: (int,)]():
|
||||
reveal_type(T.__constraints__) # revealed: tuple[()]
|
||||
pass
|
||||
```
|
||||
|
||||
@@ -51,6 +51,8 @@ invalid1: Literal[3 + 4]
|
||||
invalid2: Literal[4 + 3j]
|
||||
# error: [invalid-literal-parameter]
|
||||
invalid3: Literal[(3, 4)]
|
||||
|
||||
hello = "hello"
|
||||
invalid4: Literal[
|
||||
1 + 2, # error: [invalid-literal-parameter]
|
||||
"foo",
|
||||
@@ -76,7 +78,7 @@ from other import Literal
|
||||
a1: Literal[26]
|
||||
|
||||
def f():
|
||||
reveal_type(a1) # revealed: @Todo
|
||||
reveal_type(a1) # revealed: @Todo(generics)
|
||||
```
|
||||
|
||||
## Detecting typing_extensions.Literal
|
||||
|
||||
@@ -18,7 +18,7 @@ async def foo():
|
||||
pass
|
||||
|
||||
# TODO: should reveal `Unknown` because `__aiter__` is not defined
|
||||
# revealed: @Todo
|
||||
# revealed: @Todo(async iterables/iterators)
|
||||
# error: [possibly-unresolved-reference]
|
||||
reveal_type(x)
|
||||
```
|
||||
@@ -40,6 +40,6 @@ async def foo():
|
||||
pass
|
||||
|
||||
# error: [possibly-unresolved-reference]
|
||||
# revealed: @Todo
|
||||
# revealed: @Todo(async iterables/iterators)
|
||||
reveal_type(x)
|
||||
```
|
||||
|
||||
@@ -52,3 +52,29 @@ else:
|
||||
reveal_type(x) # revealed: Literal[2, 3]
|
||||
reveal_type(y) # revealed: Literal[1, 2, 4]
|
||||
```
|
||||
|
||||
## Nested while loops
|
||||
|
||||
```py
|
||||
def flag() -> bool:
|
||||
return True
|
||||
|
||||
x = 1
|
||||
|
||||
while flag():
|
||||
x = 2
|
||||
|
||||
while flag():
|
||||
x = 3
|
||||
if flag():
|
||||
break
|
||||
else:
|
||||
x = 4
|
||||
|
||||
if flag():
|
||||
break
|
||||
else:
|
||||
x = 5
|
||||
|
||||
reveal_type(x) # revealed: Literal[3, 4, 5]
|
||||
```
|
||||
|
||||
@@ -171,7 +171,7 @@ def f(*args, **kwargs) -> int: ...
|
||||
class A(metaclass=f): ...
|
||||
|
||||
# TODO should be `type[int]`
|
||||
reveal_type(A.__class__) # revealed: @Todo
|
||||
reveal_type(A.__class__) # revealed: @Todo(metaclass not a class)
|
||||
```
|
||||
|
||||
## Cyclic
|
||||
|
||||
@@ -256,7 +256,7 @@ class O: ...
|
||||
class X(O): ...
|
||||
class Y(O): ...
|
||||
|
||||
if bool():
|
||||
if returns_bool():
|
||||
foo = Y
|
||||
else:
|
||||
foo = object
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
# Consolidating narrowed types after if statement
|
||||
|
||||
## After if-else statements, narrowing has no effect if the variable is not mutated in any branch
|
||||
|
||||
```py
|
||||
def optional_int() -> int | None: ...
|
||||
|
||||
x = optional_int()
|
||||
|
||||
if x is None:
|
||||
pass
|
||||
else:
|
||||
pass
|
||||
|
||||
reveal_type(x) # revealed: int | None
|
||||
```
|
||||
|
||||
## Narrowing can have a persistent effect if the variable is mutated in one branch
|
||||
|
||||
```py
|
||||
def optional_int() -> int | None: ...
|
||||
|
||||
x = optional_int()
|
||||
|
||||
if x is None:
|
||||
x = 10
|
||||
else:
|
||||
pass
|
||||
|
||||
reveal_type(x) # revealed: int
|
||||
```
|
||||
|
||||
## An if statement without an explicit `else` branch is equivalent to one with a no-op `else` branch
|
||||
|
||||
```py
|
||||
def optional_int() -> int | None: ...
|
||||
|
||||
x = optional_int()
|
||||
y = optional_int()
|
||||
|
||||
if x is None:
|
||||
x = 0
|
||||
|
||||
if y is None:
|
||||
pass
|
||||
|
||||
reveal_type(x) # revealed: int
|
||||
reveal_type(y) # revealed: int | None
|
||||
```
|
||||
|
||||
## An if-elif without an explicit else branch is equivalent to one with an empty else branch
|
||||
|
||||
```py
|
||||
def optional_int() -> int | None: ...
|
||||
|
||||
x = optional_int()
|
||||
|
||||
if x is None:
|
||||
x = 0
|
||||
elif x > 50:
|
||||
x = 50
|
||||
|
||||
reveal_type(x) # revealed: int
|
||||
```
|
||||
152
crates/red_knot_python_semantic/resources/mdtest/narrow/type.md
Normal file
152
crates/red_knot_python_semantic/resources/mdtest/narrow/type.md
Normal file
@@ -0,0 +1,152 @@
|
||||
# Narrowing for checks involving `type(x)`
|
||||
|
||||
## `type(x) is C`
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
def get_a_or_b() -> A | B:
|
||||
return A()
|
||||
|
||||
x = get_a_or_b()
|
||||
|
||||
if type(x) is A:
|
||||
reveal_type(x) # revealed: A
|
||||
else:
|
||||
# It would be wrong to infer `B` here. The type
|
||||
# of `x` could be a subclass of `A`, so we need
|
||||
# to infer the full union type:
|
||||
reveal_type(x) # revealed: A | B
|
||||
```
|
||||
|
||||
## `type(x) is not C`
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
def get_a_or_b() -> A | B:
|
||||
return A()
|
||||
|
||||
x = get_a_or_b()
|
||||
|
||||
if type(x) is not A:
|
||||
# Same reasoning as above: no narrowing should occur here.
|
||||
reveal_type(x) # revealed: A | B
|
||||
else:
|
||||
reveal_type(x) # revealed: A
|
||||
```
|
||||
|
||||
## `type(x) == C`, `type(x) != C`
|
||||
|
||||
No narrowing can occur for equality comparisons, since there might be a custom `__eq__`
|
||||
implementation on the metaclass.
|
||||
|
||||
TODO: Narrowing might be possible in some cases where the classes themselves are `@final` or their
|
||||
metaclass is `@final`.
|
||||
|
||||
```py
|
||||
class IsEqualToEverything(type):
|
||||
def __eq__(cls, other):
|
||||
return True
|
||||
|
||||
class A(metaclass=IsEqualToEverything): ...
|
||||
class B(metaclass=IsEqualToEverything): ...
|
||||
|
||||
def get_a_or_b() -> A | B:
|
||||
return B()
|
||||
|
||||
x = get_a_or_b()
|
||||
|
||||
if type(x) == A:
|
||||
reveal_type(x) # revealed: A | B
|
||||
|
||||
if type(x) != A:
|
||||
reveal_type(x) # revealed: A | B
|
||||
```
|
||||
|
||||
## No narrowing for custom `type` callable
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
def type(x):
|
||||
return int
|
||||
|
||||
def get_a_or_b() -> A | B:
|
||||
return A()
|
||||
|
||||
x = get_a_or_b()
|
||||
|
||||
if type(x) is A:
|
||||
reveal_type(x) # revealed: A | B
|
||||
else:
|
||||
reveal_type(x) # revealed: A | B
|
||||
```
|
||||
|
||||
## No narrowing for multiple arguments
|
||||
|
||||
No narrowing should occur if `type` is used to dynamically create a class:
|
||||
|
||||
```py
|
||||
def get_str_or_int() -> str | int:
|
||||
return "test"
|
||||
|
||||
x = get_str_or_int()
|
||||
|
||||
if type(x, (), {}) is str:
|
||||
reveal_type(x) # revealed: str | int
|
||||
else:
|
||||
reveal_type(x) # revealed: str | int
|
||||
```
|
||||
|
||||
## No narrowing for keyword arguments
|
||||
|
||||
`type` can't be used with a keyword argument:
|
||||
|
||||
```py
|
||||
def get_str_or_int() -> str | int:
|
||||
return "test"
|
||||
|
||||
x = get_str_or_int()
|
||||
|
||||
# TODO: we could issue a diagnostic here
|
||||
if type(object=x) is str:
|
||||
reveal_type(x) # revealed: str | int
|
||||
```
|
||||
|
||||
## Narrowing if `type` is aliased
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
alias_for_type = type
|
||||
|
||||
def get_a_or_b() -> A | B:
|
||||
return A()
|
||||
|
||||
x = get_a_or_b()
|
||||
|
||||
if alias_for_type(x) is A:
|
||||
reveal_type(x) # revealed: A
|
||||
```
|
||||
|
||||
## Limitations
|
||||
|
||||
```py
|
||||
class Base: ...
|
||||
class Derived(Base): ...
|
||||
|
||||
def get_base() -> Base:
|
||||
return Base()
|
||||
|
||||
x = get_base()
|
||||
|
||||
if type(x) is Base:
|
||||
# Ideally, this could be narrower, but there is now way to
|
||||
# express a constraint like `Base & ~ProperSubtypeOf[Base]`.
|
||||
reveal_type(x) # revealed: Base
|
||||
```
|
||||
@@ -17,8 +17,7 @@ reveal_type(__doc__) # revealed: str | None
|
||||
# (needs support for `*` imports)
|
||||
reveal_type(__spec__) # revealed: Unknown | None
|
||||
|
||||
# TODO: generics
|
||||
reveal_type(__path__) # revealed: @Todo
|
||||
reveal_type(__path__) # revealed: @Todo(generics)
|
||||
|
||||
class X:
|
||||
reveal_type(__name__) # revealed: str
|
||||
@@ -64,7 +63,7 @@ reveal_type(typing.__class__) # revealed: Literal[type]
|
||||
|
||||
# TODO: needs support for attribute access on instances, properties and generics;
|
||||
# should be `dict[str, Any]`
|
||||
reveal_type(typing.__dict__) # revealed: @Todo
|
||||
reveal_type(typing.__dict__) # revealed: @Todo(instance attributes)
|
||||
```
|
||||
|
||||
Typeshed includes a fake `__getattr__` method in the stub for `types.ModuleType` to help out with
|
||||
@@ -96,8 +95,8 @@ from foo import __dict__ as foo_dict
|
||||
|
||||
# TODO: needs support for attribute access on instances, properties, and generics;
|
||||
# should be `dict[str, Any]` for both of these:
|
||||
reveal_type(foo.__dict__) # revealed: @Todo
|
||||
reveal_type(foo_dict) # revealed: @Todo
|
||||
reveal_type(foo.__dict__) # revealed: @Todo(instance attributes)
|
||||
reveal_type(foo_dict) # revealed: @Todo(instance attributes)
|
||||
```
|
||||
|
||||
## Conditionally global or `ModuleType` attribute
|
||||
|
||||
@@ -0,0 +1,303 @@
|
||||
# Statically-known branches
|
||||
|
||||
## Always false
|
||||
|
||||
### If
|
||||
|
||||
```py
|
||||
x = 1
|
||||
|
||||
if False:
|
||||
x = 2
|
||||
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
### Else
|
||||
|
||||
```py
|
||||
x = 1
|
||||
|
||||
if True:
|
||||
pass
|
||||
else:
|
||||
x = 2
|
||||
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
## Always true
|
||||
|
||||
### If
|
||||
|
||||
```py
|
||||
x = 1
|
||||
|
||||
if True:
|
||||
x = 2
|
||||
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
```
|
||||
|
||||
### Else
|
||||
|
||||
```py
|
||||
x = 1
|
||||
|
||||
if False:
|
||||
pass
|
||||
else:
|
||||
x = 2
|
||||
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
```
|
||||
|
||||
## Combination
|
||||
|
||||
```py
|
||||
x = 1
|
||||
|
||||
if True:
|
||||
x = 2
|
||||
else:
|
||||
x = 3
|
||||
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
```
|
||||
|
||||
## Nested
|
||||
|
||||
```py path=nested_if_true_if_true.py
|
||||
x = 1
|
||||
|
||||
if True:
|
||||
if True:
|
||||
x = 2
|
||||
else:
|
||||
x = 3
|
||||
else:
|
||||
x = 4
|
||||
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
```
|
||||
|
||||
```py path=nested_if_true_if_false.py
|
||||
x = 1
|
||||
|
||||
if True:
|
||||
if False:
|
||||
x = 2
|
||||
else:
|
||||
x = 3
|
||||
else:
|
||||
x = 4
|
||||
|
||||
reveal_type(x) # revealed: Literal[3]
|
||||
```
|
||||
|
||||
```py path=nested_if_true_if_bool.py
|
||||
def flag() -> bool: ...
|
||||
|
||||
x = 1
|
||||
|
||||
if True:
|
||||
if flag():
|
||||
x = 2
|
||||
else:
|
||||
x = 3
|
||||
else:
|
||||
x = 4
|
||||
|
||||
reveal_type(x) # revealed: Literal[2, 3]
|
||||
```
|
||||
|
||||
```py path=nested_if_bool_if_true.py
|
||||
def flag() -> bool: ...
|
||||
|
||||
x = 1
|
||||
|
||||
if flag():
|
||||
if True:
|
||||
x = 2
|
||||
else:
|
||||
x = 3
|
||||
else:
|
||||
x = 4
|
||||
|
||||
reveal_type(x) # revealed: Literal[2, 4]
|
||||
```
|
||||
|
||||
```py path=nested_else_if_true.py
|
||||
x = 1
|
||||
|
||||
if False:
|
||||
x = 2
|
||||
else:
|
||||
if True:
|
||||
x = 3
|
||||
else:
|
||||
x = 4
|
||||
|
||||
reveal_type(x) # revealed: Literal[3]
|
||||
```
|
||||
|
||||
```py path=nested_else_if_false.py
|
||||
x = 1
|
||||
|
||||
if False:
|
||||
x = 2
|
||||
else:
|
||||
if False:
|
||||
x = 3
|
||||
else:
|
||||
x = 4
|
||||
|
||||
reveal_type(x) # revealed: Literal[4]
|
||||
```
|
||||
|
||||
```py path=nested_else_if_bool.py
|
||||
def flag() -> bool: ...
|
||||
|
||||
x = 1
|
||||
|
||||
if False:
|
||||
x = 2
|
||||
else:
|
||||
if flag():
|
||||
x = 3
|
||||
else:
|
||||
x = 4
|
||||
|
||||
reveal_type(x) # revealed: Literal[3, 4]
|
||||
```
|
||||
|
||||
## If-expressions
|
||||
|
||||
### Always true
|
||||
|
||||
```py
|
||||
x = 1 if True else 2
|
||||
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
### Always false
|
||||
|
||||
```py
|
||||
x = 1 if False else 2
|
||||
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
```
|
||||
|
||||
## Boolean expressions
|
||||
|
||||
### Always true
|
||||
|
||||
```py
|
||||
(x := 1) == 1 or (x := 2)
|
||||
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
### Always false
|
||||
|
||||
```py
|
||||
(x := 1) == 0 or (x := 2)
|
||||
|
||||
reveal_type(x) # revealed: Literal[2]
|
||||
```
|
||||
|
||||
## Conditional declarations
|
||||
|
||||
```py path=if_false.py
|
||||
x: str
|
||||
|
||||
if False:
|
||||
x: int
|
||||
|
||||
def f() -> None:
|
||||
reveal_type(x) # revealed: str
|
||||
```
|
||||
|
||||
```py path=if_true_else.py
|
||||
x: str
|
||||
|
||||
if True:
|
||||
pass
|
||||
else:
|
||||
x: int
|
||||
|
||||
def f() -> None:
|
||||
reveal_type(x) # revealed: str
|
||||
```
|
||||
|
||||
```py path=if_true.py
|
||||
x: str
|
||||
|
||||
if True:
|
||||
x: int
|
||||
|
||||
def f() -> None:
|
||||
reveal_type(x) # revealed: int
|
||||
```
|
||||
|
||||
```py path=if_false_else.py
|
||||
x: str
|
||||
|
||||
if False:
|
||||
pass
|
||||
else:
|
||||
x: int
|
||||
|
||||
def f() -> None:
|
||||
reveal_type(x) # revealed: int
|
||||
```
|
||||
|
||||
```py path=if_bool.py
|
||||
def flag() -> bool: ...
|
||||
|
||||
x: str
|
||||
|
||||
if flag():
|
||||
x: int
|
||||
|
||||
def f() -> None:
|
||||
reveal_type(x) # revealed: str | int
|
||||
```
|
||||
|
||||
## Conditionally defined functions
|
||||
|
||||
```py
|
||||
def f() -> int: ...
|
||||
def g() -> int: ...
|
||||
|
||||
if True:
|
||||
def f() -> str: ...
|
||||
|
||||
else:
|
||||
def g() -> str: ...
|
||||
|
||||
reveal_type(f()) # revealed: str
|
||||
reveal_type(g()) # revealed: int
|
||||
```
|
||||
|
||||
## Conditionally defined class attributes
|
||||
|
||||
```py
|
||||
class C:
|
||||
if True:
|
||||
x: int = 1
|
||||
else:
|
||||
x: str = "a"
|
||||
|
||||
reveal_type(C.x) # revealed: int
|
||||
```
|
||||
|
||||
## TODO
|
||||
|
||||
- declarations vs bindings => NoDefault: NoDefaultType
|
||||
- conditional imports
|
||||
- conditional class definitions
|
||||
- compare with tests in if.md=>Statically known branches
|
||||
- boundness
|
||||
- TODO in `issubclass.md`
|
||||
@@ -27,7 +27,7 @@ def int_instance() -> int:
|
||||
|
||||
a = b"abcde"[int_instance()]
|
||||
# TODO: Support overloads... Should be `bytes`
|
||||
reveal_type(a) # revealed: @Todo
|
||||
reveal_type(a) # revealed: @Todo(return type)
|
||||
```
|
||||
|
||||
## Slices
|
||||
@@ -47,11 +47,11 @@ def int_instance() -> int: ...
|
||||
|
||||
byte_slice1 = b[int_instance() : int_instance()]
|
||||
# TODO: Support overloads... Should be `bytes`
|
||||
reveal_type(byte_slice1) # revealed: @Todo
|
||||
reveal_type(byte_slice1) # revealed: @Todo(return type)
|
||||
|
||||
def bytes_instance() -> bytes: ...
|
||||
|
||||
byte_slice2 = bytes_instance()[0:5]
|
||||
# TODO: Support overloads... Should be `bytes`
|
||||
reveal_type(byte_slice2) # revealed: @Todo
|
||||
reveal_type(byte_slice2) # revealed: @Todo(return type)
|
||||
```
|
||||
|
||||
@@ -21,10 +21,11 @@ reveal_type(Identity[0]) # revealed: str
|
||||
## Class getitem union
|
||||
|
||||
```py
|
||||
flag = True
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
class UnionClassGetItem:
|
||||
if flag:
|
||||
if bool_instance():
|
||||
|
||||
def __class_getitem__(cls, item: int) -> str:
|
||||
return item
|
||||
@@ -59,9 +60,10 @@ reveal_type(x[0]) # revealed: str | int
|
||||
## Class getitem with unbound method union
|
||||
|
||||
```py
|
||||
flag = True
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
if flag:
|
||||
if bool_instance():
|
||||
class Spam:
|
||||
def __class_getitem__(self, x: int) -> str:
|
||||
return "foo"
|
||||
@@ -77,9 +79,10 @@ reveal_type(Spam[42])
|
||||
## TODO: Class getitem non-class union
|
||||
|
||||
```py
|
||||
flag = True
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
if flag:
|
||||
if bool_instance():
|
||||
class Eggs:
|
||||
def __class_getitem__(self, x: int) -> str:
|
||||
return "foo"
|
||||
|
||||
@@ -30,10 +30,11 @@ reveal_type(Identity()[0]) # revealed: int
|
||||
## Getitem union
|
||||
|
||||
```py
|
||||
flag = True
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
class Identity:
|
||||
if flag:
|
||||
if bool_instance():
|
||||
|
||||
def __getitem__(self, index: int) -> int:
|
||||
return index
|
||||
|
||||
@@ -12,13 +12,13 @@ x = [1, 2, 3]
|
||||
reveal_type(x) # revealed: list
|
||||
|
||||
# TODO reveal int
|
||||
reveal_type(x[0]) # revealed: @Todo
|
||||
reveal_type(x[0]) # revealed: @Todo(return type)
|
||||
|
||||
# TODO reveal list
|
||||
reveal_type(x[0:1]) # revealed: @Todo
|
||||
reveal_type(x[0:1]) # revealed: @Todo(return type)
|
||||
|
||||
# TODO error
|
||||
reveal_type(x["a"]) # revealed: @Todo
|
||||
reveal_type(x["a"]) # revealed: @Todo(return type)
|
||||
```
|
||||
|
||||
## Assignments within list assignment
|
||||
|
||||
@@ -23,7 +23,7 @@ def int_instance() -> int: ...
|
||||
|
||||
a = "abcde"[int_instance()]
|
||||
# TODO: Support overloads... Should be `str`
|
||||
reveal_type(a) # revealed: @Todo
|
||||
reveal_type(a) # revealed: @Todo(return type)
|
||||
```
|
||||
|
||||
## Slices
|
||||
@@ -78,13 +78,13 @@ def int_instance() -> int: ...
|
||||
|
||||
substring1 = s[int_instance() : int_instance()]
|
||||
# TODO: Support overloads... Should be `LiteralString`
|
||||
reveal_type(substring1) # revealed: @Todo
|
||||
reveal_type(substring1) # revealed: @Todo(return type)
|
||||
|
||||
def str_instance() -> str: ...
|
||||
|
||||
substring2 = str_instance()[0:5]
|
||||
# TODO: Support overloads... Should be `str`
|
||||
reveal_type(substring2) # revealed: @Todo
|
||||
reveal_type(substring2) # revealed: @Todo(return type)
|
||||
```
|
||||
|
||||
## Unsupported slice types
|
||||
|
||||
@@ -71,5 +71,5 @@ def int_instance() -> int: ...
|
||||
|
||||
tuple_slice = t[int_instance() : int_instance()]
|
||||
# TODO: Support overloads... Should be `tuple[Literal[1, 'a', b"b"] | None, ...]`
|
||||
reveal_type(tuple_slice) # revealed: @Todo
|
||||
reveal_type(tuple_slice) # revealed: @Todo(return type)
|
||||
```
|
||||
|
||||
@@ -22,23 +22,23 @@ type:
|
||||
```py
|
||||
import sys
|
||||
|
||||
reveal_type(sys.version_info >= (3, 8)) # revealed: Literal[True]
|
||||
reveal_type((3, 8) <= sys.version_info) # revealed: Literal[True]
|
||||
reveal_type(sys.version_info >= (3, 9)) # revealed: Literal[True]
|
||||
reveal_type((3, 9) <= sys.version_info) # revealed: Literal[True]
|
||||
|
||||
reveal_type(sys.version_info > (3, 8)) # revealed: Literal[True]
|
||||
reveal_type((3, 8) < sys.version_info) # revealed: Literal[True]
|
||||
reveal_type(sys.version_info > (3, 9)) # revealed: Literal[True]
|
||||
reveal_type((3, 9) < sys.version_info) # revealed: Literal[True]
|
||||
|
||||
reveal_type(sys.version_info < (3, 8)) # revealed: Literal[False]
|
||||
reveal_type((3, 8) > sys.version_info) # revealed: Literal[False]
|
||||
reveal_type(sys.version_info < (3, 9)) # revealed: Literal[False]
|
||||
reveal_type((3, 9) > sys.version_info) # revealed: Literal[False]
|
||||
|
||||
reveal_type(sys.version_info <= (3, 8)) # revealed: Literal[False]
|
||||
reveal_type((3, 8) >= sys.version_info) # revealed: Literal[False]
|
||||
reveal_type(sys.version_info <= (3, 9)) # revealed: Literal[False]
|
||||
reveal_type((3, 9) >= sys.version_info) # revealed: Literal[False]
|
||||
|
||||
reveal_type(sys.version_info == (3, 8)) # revealed: Literal[False]
|
||||
reveal_type((3, 8) == sys.version_info) # revealed: Literal[False]
|
||||
reveal_type(sys.version_info == (3, 9)) # revealed: Literal[False]
|
||||
reveal_type((3, 9) == sys.version_info) # revealed: Literal[False]
|
||||
|
||||
reveal_type(sys.version_info != (3, 8)) # revealed: Literal[True]
|
||||
reveal_type((3, 8) != sys.version_info) # revealed: Literal[True]
|
||||
reveal_type(sys.version_info != (3, 9)) # revealed: Literal[True]
|
||||
reveal_type((3, 9) != sys.version_info) # revealed: Literal[True]
|
||||
```
|
||||
|
||||
## Non-literal types from comparisons
|
||||
@@ -49,17 +49,16 @@ sometimes not:
|
||||
```py
|
||||
import sys
|
||||
|
||||
reveal_type(sys.version_info >= (3, 8, 1)) # revealed: bool
|
||||
reveal_type(sys.version_info >= (3, 8, 1, "final", 0)) # revealed: bool
|
||||
reveal_type(sys.version_info >= (3, 9, 1)) # revealed: Literal[True]
|
||||
reveal_type(sys.version_info >= (3, 9, 1, "final", 0)) # revealed: Literal[True]
|
||||
|
||||
# TODO: While this won't fail at runtime, the user has probably made a mistake
|
||||
# if they're comparing a tuple of length >5 with `sys.version_info`
|
||||
# (`sys.version_info` is a tuple of length 5). It might be worth
|
||||
# emitting a lint diagnostic of some kind warning them about the probable error?
|
||||
reveal_type(sys.version_info >= (3, 8, 1, "final", 0, 5)) # revealed: bool
|
||||
reveal_type(sys.version_info >= (3, 9, 1, "final", 0, 5)) # revealed: Literal[True]
|
||||
|
||||
# TODO: this should be `Literal[False]`; see #14279
|
||||
reveal_type(sys.version_info == (3, 8, 1, "finallllll", 0)) # revealed: bool
|
||||
reveal_type(sys.version_info == (3, 8, 1, "finallllll", 0)) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
## Imports and aliases
|
||||
@@ -71,11 +70,11 @@ another name:
|
||||
from sys import version_info
|
||||
from sys import version_info as foo
|
||||
|
||||
reveal_type(version_info >= (3, 8)) # revealed: Literal[True]
|
||||
reveal_type(foo >= (3, 8)) # revealed: Literal[True]
|
||||
reveal_type(version_info >= (3, 9)) # revealed: Literal[True]
|
||||
reveal_type(foo >= (3, 9)) # revealed: Literal[True]
|
||||
|
||||
bar = version_info
|
||||
reveal_type(bar >= (3, 8)) # revealed: Literal[True]
|
||||
reveal_type(bar >= (3, 9)) # revealed: Literal[True]
|
||||
```
|
||||
|
||||
## Non-stdlib modules named `sys`
|
||||
@@ -92,7 +91,7 @@ version_info: tuple[int, int] = (4, 2)
|
||||
```py path=package/script.py
|
||||
from .sys import version_info
|
||||
|
||||
reveal_type(version_info >= (3, 8)) # revealed: bool
|
||||
reveal_type(version_info >= (3, 9)) # revealed: bool
|
||||
```
|
||||
|
||||
## Accessing fields by name
|
||||
@@ -103,8 +102,8 @@ The fields of `sys.version_info` can be accessed by name:
|
||||
import sys
|
||||
|
||||
reveal_type(sys.version_info.major >= 3) # revealed: Literal[True]
|
||||
reveal_type(sys.version_info.minor >= 8) # revealed: Literal[True]
|
||||
reveal_type(sys.version_info.minor >= 9) # revealed: Literal[False]
|
||||
reveal_type(sys.version_info.minor >= 13) # revealed: Literal[True]
|
||||
reveal_type(sys.version_info.minor >= 14) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
But the `micro`, `releaselevel` and `serial` fields are inferred as `@Todo` until we support
|
||||
@@ -113,9 +112,9 @@ properties on instance types:
|
||||
```py path=b.py
|
||||
import sys
|
||||
|
||||
reveal_type(sys.version_info.micro) # revealed: @Todo
|
||||
reveal_type(sys.version_info.releaselevel) # revealed: @Todo
|
||||
reveal_type(sys.version_info.serial) # revealed: @Todo
|
||||
reveal_type(sys.version_info.micro) # revealed: @Todo(instance attributes)
|
||||
reveal_type(sys.version_info.releaselevel) # revealed: @Todo(instance attributes)
|
||||
reveal_type(sys.version_info.serial) # revealed: @Todo(instance attributes)
|
||||
```
|
||||
|
||||
## Accessing fields by index/slice
|
||||
@@ -126,14 +125,14 @@ The fields of `sys.version_info` can be accessed by index or by slice:
|
||||
import sys
|
||||
|
||||
reveal_type(sys.version_info[0] < 3) # revealed: Literal[False]
|
||||
reveal_type(sys.version_info[1] > 8) # revealed: Literal[False]
|
||||
reveal_type(sys.version_info[1] > 13) # revealed: Literal[False]
|
||||
|
||||
# revealed: tuple[Literal[3], Literal[8], int, Literal["alpha", "beta", "candidate", "final"], int]
|
||||
# revealed: tuple[Literal[3], Literal[13], int, Literal["alpha", "beta", "candidate", "final"], int]
|
||||
reveal_type(sys.version_info[:5])
|
||||
|
||||
reveal_type(sys.version_info[:2] >= (3, 8)) # revealed: Literal[True]
|
||||
reveal_type(sys.version_info[0:2] >= (3, 9)) # revealed: Literal[False]
|
||||
reveal_type(sys.version_info[:3] >= (3, 9, 1)) # revealed: Literal[False]
|
||||
reveal_type(sys.version_info[:2] >= (3, 13)) # revealed: Literal[True]
|
||||
reveal_type(sys.version_info[0:2] >= (3, 14)) # revealed: Literal[False]
|
||||
reveal_type(sys.version_info[:3] >= (3, 14, 1)) # revealed: Literal[False]
|
||||
reveal_type(sys.version_info[3] == "final") # revealed: bool
|
||||
reveal_type(sys.version_info[3] == "finalllllll") # revealed: Literal[False]
|
||||
```
|
||||
|
||||
@@ -0,0 +1,71 @@
|
||||
# Type aliases
|
||||
|
||||
## Basic
|
||||
|
||||
```py
|
||||
type IntOrStr = int | str
|
||||
|
||||
reveal_type(IntOrStr) # revealed: typing.TypeAliasType
|
||||
reveal_type(IntOrStr.__name__) # revealed: Literal["IntOrStr"]
|
||||
|
||||
x: IntOrStr = 1
|
||||
|
||||
reveal_type(x) # revealed: Literal[1]
|
||||
|
||||
def f() -> None:
|
||||
reveal_type(x) # revealed: int | str
|
||||
```
|
||||
|
||||
## `__value__` attribute
|
||||
|
||||
```py
|
||||
type IntOrStr = int | str
|
||||
|
||||
# TODO: This should either fall back to the specified type from typeshed,
|
||||
# which is `Any`, or be the actual type of the runtime value expression
|
||||
# `int | str`, i.e. `types.UnionType`.
|
||||
reveal_type(IntOrStr.__value__) # revealed: @Todo(instance attributes)
|
||||
```
|
||||
|
||||
## Invalid assignment
|
||||
|
||||
```py
|
||||
type OptionalInt = int | None
|
||||
|
||||
# error: [invalid-assignment]
|
||||
x: OptionalInt = "1"
|
||||
```
|
||||
|
||||
## Type aliases in type aliases
|
||||
|
||||
```py
|
||||
type IntOrStr = int | str
|
||||
type IntOrStrOrBytes = IntOrStr | bytes
|
||||
|
||||
x: IntOrStrOrBytes = 1
|
||||
|
||||
def f() -> None:
|
||||
reveal_type(x) # revealed: int | str | bytes
|
||||
```
|
||||
|
||||
## Aliased type aliases
|
||||
|
||||
```py
|
||||
type IntOrStr = int | str
|
||||
MyIntOrStr = IntOrStr
|
||||
|
||||
x: MyIntOrStr = 1
|
||||
|
||||
# error: [invalid-assignment]
|
||||
y: MyIntOrStr = None
|
||||
```
|
||||
|
||||
## Generic type aliases
|
||||
|
||||
```py
|
||||
type ListOrSet[T] = list[T] | set[T]
|
||||
|
||||
# TODO: Should be `tuple[typing.TypeVar | typing.ParamSpec | typing.TypeVarTuple, ...]`,
|
||||
# as specified in the `typeshed` stubs.
|
||||
reveal_type(ListOrSet.__type_params__) # revealed: @Todo(instance attributes)
|
||||
```
|
||||
@@ -84,7 +84,7 @@ reveal_type(b) # revealed: Literal[2]
|
||||
[a, *b, c, d] = (1, 2)
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
# TODO: Should be list[Any] once support for assigning to starred expression is added
|
||||
reveal_type(b) # revealed: @Todo
|
||||
reveal_type(b) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(c) # revealed: Literal[2]
|
||||
reveal_type(d) # revealed: Unknown
|
||||
```
|
||||
@@ -95,7 +95,7 @@ reveal_type(d) # revealed: Unknown
|
||||
[a, *b, c] = (1, 2)
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
# TODO: Should be list[Any] once support for assigning to starred expression is added
|
||||
reveal_type(b) # revealed: @Todo
|
||||
reveal_type(b) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(c) # revealed: Literal[2]
|
||||
```
|
||||
|
||||
@@ -105,7 +105,7 @@ reveal_type(c) # revealed: Literal[2]
|
||||
[a, *b, c] = (1, 2, 3)
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
# TODO: Should be list[int] once support for assigning to starred expression is added
|
||||
reveal_type(b) # revealed: @Todo
|
||||
reveal_type(b) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(c) # revealed: Literal[3]
|
||||
```
|
||||
|
||||
@@ -115,7 +115,7 @@ reveal_type(c) # revealed: Literal[3]
|
||||
[a, *b, c, d] = (1, 2, 3, 4, 5, 6)
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
# TODO: Should be list[int] once support for assigning to starred expression is added
|
||||
reveal_type(b) # revealed: @Todo
|
||||
reveal_type(b) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(c) # revealed: Literal[5]
|
||||
reveal_type(d) # revealed: Literal[6]
|
||||
```
|
||||
@@ -127,7 +127,7 @@ reveal_type(d) # revealed: Literal[6]
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
reveal_type(b) # revealed: Literal[2]
|
||||
# TODO: Should be list[int] once support for assigning to starred expression is added
|
||||
reveal_type(c) # revealed: @Todo
|
||||
reveal_type(c) # revealed: @Todo(starred unpacking)
|
||||
```
|
||||
|
||||
### Starred expression (6)
|
||||
@@ -138,7 +138,7 @@ reveal_type(c) # revealed: @Todo
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
reveal_type(b) # revealed: Unknown
|
||||
reveal_type(c) # revealed: Unknown
|
||||
reveal_type(d) # revealed: @Todo
|
||||
reveal_type(d) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(e) # revealed: Unknown
|
||||
reveal_type(f) # revealed: Unknown
|
||||
```
|
||||
@@ -222,7 +222,7 @@ reveal_type(b) # revealed: LiteralString
|
||||
(a, *b, c, d) = "ab"
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
# TODO: Should be list[LiteralString] once support for assigning to starred expression is added
|
||||
reveal_type(b) # revealed: @Todo
|
||||
reveal_type(b) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(c) # revealed: LiteralString
|
||||
reveal_type(d) # revealed: Unknown
|
||||
```
|
||||
@@ -233,7 +233,7 @@ reveal_type(d) # revealed: Unknown
|
||||
(a, *b, c) = "ab"
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
# TODO: Should be list[Any] once support for assigning to starred expression is added
|
||||
reveal_type(b) # revealed: @Todo
|
||||
reveal_type(b) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(c) # revealed: LiteralString
|
||||
```
|
||||
|
||||
@@ -243,7 +243,7 @@ reveal_type(c) # revealed: LiteralString
|
||||
(a, *b, c) = "abc"
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
# TODO: Should be list[LiteralString] once support for assigning to starred expression is added
|
||||
reveal_type(b) # revealed: @Todo
|
||||
reveal_type(b) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(c) # revealed: LiteralString
|
||||
```
|
||||
|
||||
@@ -253,7 +253,7 @@ reveal_type(c) # revealed: LiteralString
|
||||
(a, *b, c, d) = "abcdef"
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
# TODO: Should be list[LiteralString] once support for assigning to starred expression is added
|
||||
reveal_type(b) # revealed: @Todo
|
||||
reveal_type(b) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(c) # revealed: LiteralString
|
||||
reveal_type(d) # revealed: LiteralString
|
||||
```
|
||||
@@ -265,5 +265,5 @@ reveal_type(d) # revealed: LiteralString
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
reveal_type(b) # revealed: LiteralString
|
||||
# TODO: Should be list[int] once support for assigning to starred expression is added
|
||||
reveal_type(c) # revealed: @Todo
|
||||
reveal_type(c) # revealed: @Todo(starred unpacking)
|
||||
```
|
||||
|
||||
@@ -17,5 +17,5 @@ class Manager:
|
||||
|
||||
async def test():
|
||||
async with Manager() as f:
|
||||
reveal_type(f) # revealed: @Todo
|
||||
reveal_type(f) # revealed: @Todo(async with statement)
|
||||
```
|
||||
|
||||
@@ -459,11 +459,11 @@ foo: 3.8- # trailing comment
|
||||
";
|
||||
let parsed_versions = TypeshedVersions::from_str(VERSIONS).unwrap();
|
||||
assert_eq!(parsed_versions.len(), 3);
|
||||
assert_snapshot!(parsed_versions.to_string(), @r###"
|
||||
assert_snapshot!(parsed_versions.to_string(), @r"
|
||||
bar: 2.7-3.10
|
||||
bar.baz: 3.1-3.9
|
||||
foo: 3.8-
|
||||
"###
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -54,6 +54,7 @@ impl Program {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
|
||||
pub struct ProgramSettings {
|
||||
pub target_version: PythonVersion,
|
||||
pub search_paths: SearchPathSettings,
|
||||
@@ -61,6 +62,7 @@ pub struct ProgramSettings {
|
||||
|
||||
/// Configures the search paths for module resolution.
|
||||
#[derive(Eq, PartialEq, Debug, Clone)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
|
||||
pub struct SearchPathSettings {
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
@@ -91,6 +93,7 @@ impl SearchPathSettings {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
|
||||
pub enum SitePackages {
|
||||
Derived {
|
||||
venv_path: SystemPathBuf,
|
||||
|
||||
@@ -5,6 +5,7 @@ use std::fmt;
|
||||
/// Unlike the `TargetVersion` enums in the CLI crates,
|
||||
/// this does not necessarily represent a Python version that we actually support.
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
|
||||
pub struct PythonVersion {
|
||||
pub major: u8,
|
||||
pub minor: u8,
|
||||
@@ -38,7 +39,7 @@ impl PythonVersion {
|
||||
|
||||
impl Default for PythonVersion {
|
||||
fn default() -> Self {
|
||||
Self::PY38
|
||||
Self::PY313 // TODO: temporarily changed to 3.13 to activate all sys.version_info branches
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1229,4 +1229,32 @@ match 1:
|
||||
|
||||
assert!(matches!(binding.kind(&db), DefinitionKind::For(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn if_statement() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
x = False
|
||||
|
||||
if True:
|
||||
x: bool
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
// let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
let use_def = index.use_def_map(FileScopeId::global());
|
||||
|
||||
// use_def
|
||||
|
||||
use_def.print(&db);
|
||||
|
||||
assert!(false);
|
||||
// let binding = use_def
|
||||
// .first_public_binding(global_table.symbol_id_by_name(name).expect("symbol exists"))
|
||||
// .expect("Expected with item definition for {name}");
|
||||
// assert!(matches!(binding.kind(&db), DefinitionKind::WithItem(_)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,64 +49,50 @@ fn ast_ids<'db>(db: &'db dyn Db, scope: ScopeId) -> &'db AstIds {
|
||||
semantic_index(db, scope.file(db)).ast_ids(scope.file_scope_id(db))
|
||||
}
|
||||
|
||||
pub trait HasScopedUseId {
|
||||
/// The type of the ID uniquely identifying the use.
|
||||
type Id: Copy;
|
||||
|
||||
/// Returns the ID that uniquely identifies the use in `scope`.
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id;
|
||||
}
|
||||
|
||||
/// Uniquely identifies a use of a name in a [`crate::semantic_index::symbol::FileScopeId`].
|
||||
#[newtype_index]
|
||||
pub struct ScopedUseId;
|
||||
|
||||
impl HasScopedUseId for ast::ExprName {
|
||||
type Id = ScopedUseId;
|
||||
pub trait HasScopedUseId {
|
||||
/// Returns the ID that uniquely identifies the use in `scope`.
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedUseId;
|
||||
}
|
||||
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
impl HasScopedUseId for ast::ExprName {
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedUseId {
|
||||
let expression_ref = ExpressionRef::from(self);
|
||||
expression_ref.scoped_use_id(db, scope)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasScopedUseId for ast::ExpressionRef<'_> {
|
||||
type Id = ScopedUseId;
|
||||
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedUseId {
|
||||
let ast_ids = ast_ids(db, scope);
|
||||
ast_ids.use_id(*self)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait HasScopedAstId {
|
||||
/// The type of the ID uniquely identifying the node.
|
||||
type Id: Copy;
|
||||
|
||||
/// Returns the ID that uniquely identifies the node in `scope`.
|
||||
fn scoped_ast_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id;
|
||||
}
|
||||
|
||||
impl<T: HasScopedAstId> HasScopedAstId for Box<T> {
|
||||
type Id = <T as HasScopedAstId>::Id;
|
||||
|
||||
fn scoped_ast_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
self.as_ref().scoped_ast_id(db, scope)
|
||||
}
|
||||
}
|
||||
|
||||
/// Uniquely identifies an [`ast::Expr`] in a [`crate::semantic_index::symbol::FileScopeId`].
|
||||
#[newtype_index]
|
||||
pub struct ScopedExpressionId;
|
||||
|
||||
pub trait HasScopedExpressionId {
|
||||
/// Returns the ID that uniquely identifies the node in `scope`.
|
||||
fn scoped_expression_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedExpressionId;
|
||||
}
|
||||
|
||||
impl<T: HasScopedExpressionId> HasScopedExpressionId for Box<T> {
|
||||
fn scoped_expression_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedExpressionId {
|
||||
self.as_ref().scoped_expression_id(db, scope)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_has_scoped_expression_id {
|
||||
($ty: ty) => {
|
||||
impl HasScopedAstId for $ty {
|
||||
type Id = ScopedExpressionId;
|
||||
|
||||
fn scoped_ast_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
impl HasScopedExpressionId for $ty {
|
||||
fn scoped_expression_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedExpressionId {
|
||||
let expression_ref = ExpressionRef::from(self);
|
||||
expression_ref.scoped_ast_id(db, scope)
|
||||
expression_ref.scoped_expression_id(db, scope)
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -146,29 +132,20 @@ impl_has_scoped_expression_id!(ast::ExprSlice);
|
||||
impl_has_scoped_expression_id!(ast::ExprIpyEscapeCommand);
|
||||
impl_has_scoped_expression_id!(ast::Expr);
|
||||
|
||||
impl HasScopedAstId for ast::ExpressionRef<'_> {
|
||||
type Id = ScopedExpressionId;
|
||||
|
||||
fn scoped_ast_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
impl HasScopedExpressionId for ast::ExpressionRef<'_> {
|
||||
fn scoped_expression_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedExpressionId {
|
||||
let ast_ids = ast_ids(db, scope);
|
||||
ast_ids.expression_id(*self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct AstIdsBuilder {
|
||||
expressions_map: FxHashMap<ExpressionNodeKey, ScopedExpressionId>,
|
||||
uses_map: FxHashMap<ExpressionNodeKey, ScopedUseId>,
|
||||
}
|
||||
|
||||
impl AstIdsBuilder {
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
expressions_map: FxHashMap::default(),
|
||||
uses_map: FxHashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds `expr` to the expression ids map and returns its id.
|
||||
pub(super) fn record_expression(&mut self, expr: &ast::Expr) -> ScopedExpressionId {
|
||||
let expression_id = self.expressions_map.len().into();
|
||||
|
||||
@@ -23,7 +23,7 @@ use crate::semantic_index::symbol::{
|
||||
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId,
|
||||
SymbolTableBuilder,
|
||||
};
|
||||
use crate::semantic_index::use_def::{FlowSnapshot, UseDefMapBuilder};
|
||||
use crate::semantic_index::use_def::{ActiveConstraintsSnapshot, FlowSnapshot, UseDefMapBuilder};
|
||||
use crate::semantic_index::SemanticIndex;
|
||||
use crate::unpack::Unpack;
|
||||
use crate::Db;
|
||||
@@ -36,12 +36,25 @@ use super::definition::{
|
||||
|
||||
mod except_handlers;
|
||||
|
||||
/// Are we in a state where a `break` statement is allowed?
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
enum LoopState {
|
||||
InLoop,
|
||||
NotInLoop,
|
||||
}
|
||||
|
||||
impl LoopState {
|
||||
fn is_inside(self) -> bool {
|
||||
matches!(self, LoopState::InLoop)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct SemanticIndexBuilder<'db> {
|
||||
// Builder state
|
||||
db: &'db dyn Db,
|
||||
file: File,
|
||||
module: &'db ParsedModule,
|
||||
scope_stack: Vec<FileScopeId>,
|
||||
scope_stack: Vec<(FileScopeId, LoopState)>,
|
||||
/// The assignments we're currently visiting, with
|
||||
/// the most recent visit at the end of the Vec
|
||||
current_assignments: Vec<CurrentAssignment<'db>>,
|
||||
@@ -103,9 +116,24 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
*self
|
||||
.scope_stack
|
||||
.last()
|
||||
.map(|(scope, _)| scope)
|
||||
.expect("Always to have a root scope")
|
||||
}
|
||||
|
||||
fn loop_state(&self) -> LoopState {
|
||||
self.scope_stack
|
||||
.last()
|
||||
.expect("Always to have a root scope")
|
||||
.1
|
||||
}
|
||||
|
||||
fn set_inside_loop(&mut self, state: LoopState) {
|
||||
self.scope_stack
|
||||
.last_mut()
|
||||
.expect("Always to have a root scope")
|
||||
.1 = state;
|
||||
}
|
||||
|
||||
fn push_scope(&mut self, node: NodeWithScopeRef) {
|
||||
let parent = self.current_scope();
|
||||
self.push_scope_with_parent(node, Some(parent));
|
||||
@@ -124,22 +152,23 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
self.try_node_context_stack_manager.enter_nested_scope();
|
||||
|
||||
let file_scope_id = self.scopes.push(scope);
|
||||
self.symbol_tables.push(SymbolTableBuilder::new());
|
||||
self.use_def_maps.push(UseDefMapBuilder::new());
|
||||
let ast_id_scope = self.ast_ids.push(AstIdsBuilder::new());
|
||||
self.symbol_tables.push(SymbolTableBuilder::default());
|
||||
self.use_def_maps.push(UseDefMapBuilder::default());
|
||||
let ast_id_scope = self.ast_ids.push(AstIdsBuilder::default());
|
||||
|
||||
let scope_id = ScopeId::new(self.db, self.file, file_scope_id, countme::Count::default());
|
||||
|
||||
self.scope_ids_by_scope.push(scope_id);
|
||||
self.scopes_by_node.insert(node.node_key(), file_scope_id);
|
||||
let previous = self.scopes_by_node.insert(node.node_key(), file_scope_id);
|
||||
debug_assert_eq!(previous, None);
|
||||
|
||||
debug_assert_eq!(ast_id_scope, file_scope_id);
|
||||
|
||||
self.scope_stack.push(file_scope_id);
|
||||
self.scope_stack.push((file_scope_id, LoopState::NotInLoop));
|
||||
}
|
||||
|
||||
fn pop_scope(&mut self) -> FileScopeId {
|
||||
let id = self.scope_stack.pop().expect("Root scope to be present");
|
||||
let (id, _) = self.scope_stack.pop().expect("Root scope to be present");
|
||||
let children_end = self.scopes.next_index();
|
||||
let scope = &mut self.scopes[id];
|
||||
scope.descendents = scope.descendents.start..children_end;
|
||||
@@ -171,12 +200,20 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
self.current_use_def_map().snapshot()
|
||||
}
|
||||
|
||||
fn flow_restore(&mut self, state: FlowSnapshot) {
|
||||
self.current_use_def_map_mut().restore(state);
|
||||
fn constraints_snapshot(&self) -> ActiveConstraintsSnapshot {
|
||||
self.current_use_def_map().constraints_snapshot()
|
||||
}
|
||||
|
||||
fn flow_merge(&mut self, state: FlowSnapshot) {
|
||||
fn flow_restore(&mut self, state: FlowSnapshot, active_constraints: ActiveConstraintsSnapshot) {
|
||||
self.current_use_def_map_mut().restore(state);
|
||||
self.current_use_def_map_mut()
|
||||
.restore_constraints(active_constraints);
|
||||
}
|
||||
|
||||
fn flow_merge(&mut self, state: FlowSnapshot, active_constraints: ActiveConstraintsSnapshot) {
|
||||
self.current_use_def_map_mut().merge(state);
|
||||
self.current_use_def_map_mut()
|
||||
.restore_constraints(active_constraints);
|
||||
}
|
||||
|
||||
fn add_symbol(&mut self, name: Name) -> ScopedSymbolId {
|
||||
@@ -589,6 +626,27 @@ where
|
||||
},
|
||||
);
|
||||
}
|
||||
ast::Stmt::TypeAlias(type_alias) => {
|
||||
let symbol = self.add_symbol(
|
||||
type_alias
|
||||
.name
|
||||
.as_name_expr()
|
||||
.map(|name| name.id.clone())
|
||||
.unwrap_or("<unknown>".into()),
|
||||
);
|
||||
self.add_definition(symbol, type_alias);
|
||||
self.visit_expr(&type_alias.name);
|
||||
|
||||
self.with_type_params(
|
||||
NodeWithScopeRef::TypeAliasTypeParameters(type_alias),
|
||||
type_alias.type_params.as_ref(),
|
||||
|builder| {
|
||||
builder.push_scope(NodeWithScopeRef::TypeAlias(type_alias));
|
||||
builder.visit_expr(&type_alias.value);
|
||||
builder.pop_scope()
|
||||
},
|
||||
);
|
||||
}
|
||||
ast::Stmt::Import(node) => {
|
||||
for alias in &node.names {
|
||||
let symbol_name = if let Some(asname) = &alias.asname {
|
||||
@@ -715,37 +773,44 @@ where
|
||||
ast::Stmt::If(node) => {
|
||||
self.visit_expr(&node.test);
|
||||
let pre_if = self.flow_snapshot();
|
||||
let pre_if_constraints = self.constraints_snapshot();
|
||||
let constraint = self.record_expression_constraint(&node.test);
|
||||
let mut constraints = vec![constraint];
|
||||
self.visit_body(&node.body);
|
||||
let mut post_clauses: Vec<FlowSnapshot> = vec![];
|
||||
for clause in &node.elif_else_clauses {
|
||||
let elif_else_clauses = node
|
||||
.elif_else_clauses
|
||||
.iter()
|
||||
.map(|clause| (clause.test.as_ref(), clause.body.as_slice()));
|
||||
let has_else = node
|
||||
.elif_else_clauses
|
||||
.last()
|
||||
.is_some_and(|clause| clause.test.is_none());
|
||||
let elif_else_clauses = elif_else_clauses.chain(if has_else {
|
||||
// if there's an `else` clause already, we don't need to add another
|
||||
None
|
||||
} else {
|
||||
// if there's no `else` branch, we should add a no-op `else` branch
|
||||
Some((None, Default::default()))
|
||||
});
|
||||
for (clause_test, clause_body) in elif_else_clauses {
|
||||
// snapshot after every block except the last; the last one will just become
|
||||
// the state that we merge the other snapshots into
|
||||
post_clauses.push(self.flow_snapshot());
|
||||
// we can only take an elif/else branch if none of the previous ones were
|
||||
// taken, so the block entry state is always `pre_if`
|
||||
self.flow_restore(pre_if.clone());
|
||||
self.flow_restore(pre_if.clone(), pre_if_constraints.clone());
|
||||
for constraint in &constraints {
|
||||
self.record_negated_constraint(*constraint);
|
||||
}
|
||||
if let Some(elif_test) = &clause.test {
|
||||
if let Some(elif_test) = clause_test {
|
||||
self.visit_expr(elif_test);
|
||||
constraints.push(self.record_expression_constraint(elif_test));
|
||||
}
|
||||
self.visit_body(&clause.body);
|
||||
self.visit_body(clause_body);
|
||||
}
|
||||
for post_clause_state in post_clauses {
|
||||
self.flow_merge(post_clause_state);
|
||||
}
|
||||
let has_else = node
|
||||
.elif_else_clauses
|
||||
.last()
|
||||
.is_some_and(|clause| clause.test.is_none());
|
||||
if !has_else {
|
||||
// if there's no else clause, then it's possible we took none of the branches,
|
||||
// and the pre_if state can reach here
|
||||
self.flow_merge(pre_if);
|
||||
self.flow_merge(post_clause_state, pre_if_constraints.clone());
|
||||
}
|
||||
}
|
||||
ast::Stmt::While(ast::StmtWhile {
|
||||
@@ -757,13 +822,17 @@ where
|
||||
self.visit_expr(test);
|
||||
|
||||
let pre_loop = self.flow_snapshot();
|
||||
let pre_loop_constraints = self.constraints_snapshot();
|
||||
|
||||
// Save aside any break states from an outer loop
|
||||
let saved_break_states = std::mem::take(&mut self.loop_break_states);
|
||||
|
||||
// TODO: definitions created inside the body should be fully visible
|
||||
// to other statements/expressions inside the body --Alex/Carl
|
||||
let outer_loop_state = self.loop_state();
|
||||
self.set_inside_loop(LoopState::InLoop);
|
||||
self.visit_body(body);
|
||||
self.set_inside_loop(outer_loop_state);
|
||||
|
||||
// Get the break states from the body of this loop, and restore the saved outer
|
||||
// ones.
|
||||
@@ -772,13 +841,13 @@ where
|
||||
|
||||
// We may execute the `else` clause without ever executing the body, so merge in
|
||||
// the pre-loop state before visiting `else`.
|
||||
self.flow_merge(pre_loop);
|
||||
self.flow_merge(pre_loop, pre_loop_constraints.clone());
|
||||
self.visit_body(orelse);
|
||||
|
||||
// Breaking out of a while loop bypasses the `else` clause, so merge in the break
|
||||
// states after visiting `else`.
|
||||
for break_state in break_states {
|
||||
self.flow_merge(break_state);
|
||||
self.flow_merge(break_state, pre_loop_constraints.clone()); // TODO?
|
||||
}
|
||||
}
|
||||
ast::Stmt::With(ast::StmtWith {
|
||||
@@ -802,7 +871,9 @@ where
|
||||
self.visit_body(body);
|
||||
}
|
||||
ast::Stmt::Break(_) => {
|
||||
self.loop_break_states.push(self.flow_snapshot());
|
||||
if self.loop_state().is_inside() {
|
||||
self.loop_break_states.push(self.flow_snapshot());
|
||||
}
|
||||
}
|
||||
|
||||
ast::Stmt::For(
|
||||
@@ -819,6 +890,7 @@ where
|
||||
self.visit_expr(iter);
|
||||
|
||||
let pre_loop = self.flow_snapshot();
|
||||
let pre_loop_constraints = self.constraints_snapshot();
|
||||
let saved_break_states = std::mem::take(&mut self.loop_break_states);
|
||||
|
||||
debug_assert_eq!(&self.current_assignments, &[]);
|
||||
@@ -829,20 +901,23 @@ where
|
||||
// TODO: Definitions created by loop variables
|
||||
// (and definitions created inside the body)
|
||||
// are fully visible to other statements/expressions inside the body --Alex/Carl
|
||||
let outer_loop_state = self.loop_state();
|
||||
self.set_inside_loop(LoopState::InLoop);
|
||||
self.visit_body(body);
|
||||
self.set_inside_loop(outer_loop_state);
|
||||
|
||||
let break_states =
|
||||
std::mem::replace(&mut self.loop_break_states, saved_break_states);
|
||||
|
||||
// We may execute the `else` clause without ever executing the body, so merge in
|
||||
// the pre-loop state before visiting `else`.
|
||||
self.flow_merge(pre_loop);
|
||||
self.flow_merge(pre_loop, pre_loop_constraints.clone());
|
||||
self.visit_body(orelse);
|
||||
|
||||
// Breaking out of a `for` loop bypasses the `else` clause, so merge in the break
|
||||
// states after visiting `else`.
|
||||
for break_state in break_states {
|
||||
self.flow_merge(break_state);
|
||||
self.flow_merge(break_state, pre_loop_constraints.clone());
|
||||
}
|
||||
}
|
||||
ast::Stmt::Match(ast::StmtMatch {
|
||||
@@ -854,6 +929,7 @@ where
|
||||
self.visit_expr(subject);
|
||||
|
||||
let after_subject = self.flow_snapshot();
|
||||
let after_subject_cs = self.constraints_snapshot();
|
||||
let Some((first, remaining)) = cases.split_first() else {
|
||||
return;
|
||||
};
|
||||
@@ -863,18 +939,18 @@ where
|
||||
let mut post_case_snapshots = vec![];
|
||||
for case in remaining {
|
||||
post_case_snapshots.push(self.flow_snapshot());
|
||||
self.flow_restore(after_subject.clone());
|
||||
self.flow_restore(after_subject.clone(), after_subject_cs.clone());
|
||||
self.add_pattern_constraint(subject, &case.pattern);
|
||||
self.visit_match_case(case);
|
||||
}
|
||||
for post_clause_state in post_case_snapshots {
|
||||
self.flow_merge(post_clause_state);
|
||||
self.flow_merge(post_clause_state, after_subject_cs.clone());
|
||||
}
|
||||
if !cases
|
||||
.last()
|
||||
.is_some_and(|case| case.guard.is_none() && case.pattern.is_wildcard())
|
||||
{
|
||||
self.flow_merge(after_subject);
|
||||
self.flow_merge(after_subject, after_subject_cs.clone());
|
||||
}
|
||||
}
|
||||
ast::Stmt::Try(ast::StmtTry {
|
||||
@@ -892,6 +968,7 @@ where
|
||||
// We will merge this state with all of the intermediate
|
||||
// states during the `try` block before visiting those suites.
|
||||
let pre_try_block_state = self.flow_snapshot();
|
||||
let pre_try_block_constraints = self.constraints_snapshot();
|
||||
|
||||
self.try_node_context_stack_manager.push_context();
|
||||
|
||||
@@ -912,14 +989,17 @@ where
|
||||
// as there necessarily must have been 0 `except` blocks executed
|
||||
// if we hit the `else` block.
|
||||
let post_try_block_state = self.flow_snapshot();
|
||||
let post_try_block_constraints = self.constraints_snapshot();
|
||||
|
||||
// Prepare for visiting the `except` block(s)
|
||||
self.flow_restore(pre_try_block_state);
|
||||
self.flow_restore(pre_try_block_state, pre_try_block_constraints.clone());
|
||||
for state in try_block_snapshots {
|
||||
self.flow_merge(state);
|
||||
self.flow_merge(state, pre_try_block_constraints.clone());
|
||||
// TODO?
|
||||
}
|
||||
|
||||
let pre_except_state = self.flow_snapshot();
|
||||
let pre_except_constraints = self.constraints_snapshot();
|
||||
let num_handlers = handlers.len();
|
||||
|
||||
for (i, except_handler) in handlers.iter().enumerate() {
|
||||
@@ -958,19 +1038,22 @@ where
|
||||
// as we'll immediately call `self.flow_restore()` to a different state
|
||||
// as soon as this loop over the handlers terminates.
|
||||
if i < (num_handlers - 1) {
|
||||
self.flow_restore(pre_except_state.clone());
|
||||
self.flow_restore(
|
||||
pre_except_state.clone(),
|
||||
pre_except_constraints.clone(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// If we get to the `else` block, we know that 0 of the `except` blocks can have been executed,
|
||||
// and the entire `try` block must have been executed:
|
||||
self.flow_restore(post_try_block_state);
|
||||
self.flow_restore(post_try_block_state, post_try_block_constraints);
|
||||
}
|
||||
|
||||
self.visit_body(orelse);
|
||||
|
||||
for post_except_state in post_except_states {
|
||||
self.flow_merge(post_except_state);
|
||||
self.flow_merge(post_except_state, pre_try_block_constraints.clone());
|
||||
}
|
||||
|
||||
// TODO: there's lots of complexity here that isn't yet handled by our model.
|
||||
@@ -1127,19 +1210,17 @@ where
|
||||
ast::Expr::If(ast::ExprIf {
|
||||
body, test, orelse, ..
|
||||
}) => {
|
||||
// TODO detect statically known truthy or falsy test (via type inference, not naive
|
||||
// AST inspection, so we can't simplify here, need to record test expression for
|
||||
// later checking)
|
||||
self.visit_expr(test);
|
||||
let constraint = self.record_expression_constraint(test);
|
||||
let pre_if = self.flow_snapshot();
|
||||
let pre_if_constraints = self.constraints_snapshot();
|
||||
let constraint = self.record_expression_constraint(test);
|
||||
self.visit_expr(body);
|
||||
let post_body = self.flow_snapshot();
|
||||
self.flow_restore(pre_if);
|
||||
self.flow_restore(pre_if, pre_if_constraints.clone());
|
||||
|
||||
self.record_negated_constraint(constraint);
|
||||
self.visit_expr(orelse);
|
||||
self.flow_merge(post_body);
|
||||
self.flow_merge(post_body, pre_if_constraints);
|
||||
}
|
||||
ast::Expr::ListComp(
|
||||
list_comprehension @ ast::ExprListComp {
|
||||
@@ -1200,7 +1281,7 @@ where
|
||||
// AST inspection, so we can't simplify here, need to record test expression for
|
||||
// later checking)
|
||||
let mut snapshots = vec![];
|
||||
|
||||
let pre_op_constraints = self.constraints_snapshot();
|
||||
for (index, value) in values.iter().enumerate() {
|
||||
self.visit_expr(value);
|
||||
// In the last value we don't need to take a snapshot nor add a constraint
|
||||
@@ -1215,7 +1296,7 @@ where
|
||||
}
|
||||
}
|
||||
for snapshot in snapshots {
|
||||
self.flow_merge(snapshot);
|
||||
self.flow_merge(snapshot, pre_op_constraints.clone());
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
|
||||
@@ -83,6 +83,7 @@ pub(crate) enum DefinitionNodeRef<'a> {
|
||||
For(ForStmtDefinitionNodeRef<'a>),
|
||||
Function(&'a ast::StmtFunctionDef),
|
||||
Class(&'a ast::StmtClassDef),
|
||||
TypeAlias(&'a ast::StmtTypeAlias),
|
||||
NamedExpression(&'a ast::ExprNamed),
|
||||
Assignment(AssignmentDefinitionNodeRef<'a>),
|
||||
AnnotatedAssignment(&'a ast::StmtAnnAssign),
|
||||
@@ -109,6 +110,12 @@ impl<'a> From<&'a ast::StmtClassDef> for DefinitionNodeRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtTypeAlias> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::StmtTypeAlias) -> Self {
|
||||
Self::TypeAlias(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::ExprNamed> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::ExprNamed) -> Self {
|
||||
Self::NamedExpression(node)
|
||||
@@ -265,6 +272,9 @@ impl<'db> DefinitionNodeRef<'db> {
|
||||
DefinitionNodeRef::Class(class) => {
|
||||
DefinitionKind::Class(AstNodeRef::new(parsed, class))
|
||||
}
|
||||
DefinitionNodeRef::TypeAlias(type_alias) => {
|
||||
DefinitionKind::TypeAlias(AstNodeRef::new(parsed, type_alias))
|
||||
}
|
||||
DefinitionNodeRef::NamedExpression(named) => {
|
||||
DefinitionKind::NamedExpression(AstNodeRef::new(parsed, named))
|
||||
}
|
||||
@@ -358,6 +368,7 @@ impl<'db> DefinitionNodeRef<'db> {
|
||||
}
|
||||
Self::Function(node) => node.into(),
|
||||
Self::Class(node) => node.into(),
|
||||
Self::TypeAlias(node) => node.into(),
|
||||
Self::NamedExpression(node) => node.into(),
|
||||
Self::Assignment(AssignmentDefinitionNodeRef {
|
||||
value: _,
|
||||
@@ -434,6 +445,7 @@ pub enum DefinitionKind<'db> {
|
||||
ImportFrom(ImportFromDefinitionKind),
|
||||
Function(AstNodeRef<ast::StmtFunctionDef>),
|
||||
Class(AstNodeRef<ast::StmtClassDef>),
|
||||
TypeAlias(AstNodeRef<ast::StmtTypeAlias>),
|
||||
NamedExpression(AstNodeRef<ast::ExprNamed>),
|
||||
Assignment(AssignmentDefinitionKind<'db>),
|
||||
AnnotatedAssignment(AstNodeRef<ast::StmtAnnAssign>),
|
||||
@@ -456,6 +468,7 @@ impl DefinitionKind<'_> {
|
||||
// functions, classes, and imports always bind, and we consider them declarations
|
||||
DefinitionKind::Function(_)
|
||||
| DefinitionKind::Class(_)
|
||||
| DefinitionKind::TypeAlias(_)
|
||||
| DefinitionKind::Import(_)
|
||||
| DefinitionKind::ImportFrom(_)
|
||||
| DefinitionKind::TypeVar(_)
|
||||
@@ -682,6 +695,12 @@ impl From<&ast::StmtClassDef> for DefinitionNodeKey {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtTypeAlias> for DefinitionNodeKey {
|
||||
fn from(node: &ast::StmtTypeAlias) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::ExprName> for DefinitionNodeKey {
|
||||
fn from(node: &ast::ExprName) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
|
||||
@@ -116,14 +116,11 @@ impl<'db> ScopeId<'db> {
|
||||
// Type parameter scopes behave like function scopes in terms of name resolution; CPython
|
||||
// symbol table also uses the term "function-like" for these scopes.
|
||||
matches!(
|
||||
self.node(db),
|
||||
NodeWithScopeKind::ClassTypeParameters(_)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(_)
|
||||
| NodeWithScopeKind::Function(_)
|
||||
| NodeWithScopeKind::ListComprehension(_)
|
||||
| NodeWithScopeKind::SetComprehension(_)
|
||||
| NodeWithScopeKind::DictComprehension(_)
|
||||
| NodeWithScopeKind::GeneratorExpression(_)
|
||||
self.node(db).scope_kind(),
|
||||
ScopeKind::Annotation
|
||||
| ScopeKind::Function
|
||||
| ScopeKind::TypeAlias
|
||||
| ScopeKind::Comprehension
|
||||
)
|
||||
}
|
||||
|
||||
@@ -144,6 +141,12 @@ impl<'db> ScopeId<'db> {
|
||||
}
|
||||
NodeWithScopeKind::Function(function)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(function) => function.name.as_str(),
|
||||
NodeWithScopeKind::TypeAlias(type_alias)
|
||||
| NodeWithScopeKind::TypeAliasTypeParameters(type_alias) => type_alias
|
||||
.name
|
||||
.as_name_expr()
|
||||
.map(|name| name.id.as_str())
|
||||
.unwrap_or("<type alias>"),
|
||||
NodeWithScopeKind::Lambda(_) => "<lambda>",
|
||||
NodeWithScopeKind::ListComprehension(_) => "<listcomp>",
|
||||
NodeWithScopeKind::SetComprehension(_) => "<setcomp>",
|
||||
@@ -201,6 +204,7 @@ pub enum ScopeKind {
|
||||
Class,
|
||||
Function,
|
||||
Comprehension,
|
||||
TypeAlias,
|
||||
}
|
||||
|
||||
impl ScopeKind {
|
||||
@@ -210,7 +214,7 @@ impl ScopeKind {
|
||||
}
|
||||
|
||||
/// Symbol table for a specific [`Scope`].
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SymbolTable {
|
||||
/// The symbols in this scope.
|
||||
symbols: IndexVec<ScopedSymbolId, Symbol>,
|
||||
@@ -220,13 +224,6 @@ pub struct SymbolTable {
|
||||
}
|
||||
|
||||
impl SymbolTable {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
symbols: IndexVec::new(),
|
||||
symbols_by_name: SymbolMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn shrink_to_fit(&mut self) {
|
||||
self.symbols.shrink_to_fit();
|
||||
}
|
||||
@@ -278,18 +275,12 @@ impl PartialEq for SymbolTable {
|
||||
|
||||
impl Eq for SymbolTable {}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct SymbolTableBuilder {
|
||||
table: SymbolTable,
|
||||
}
|
||||
|
||||
impl SymbolTableBuilder {
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
table: SymbolTable::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn add_symbol(&mut self, name: Name) -> (ScopedSymbolId, bool) {
|
||||
let hash = SymbolTable::hash_name(&name);
|
||||
let entry = self
|
||||
@@ -339,6 +330,8 @@ pub(crate) enum NodeWithScopeRef<'a> {
|
||||
Lambda(&'a ast::ExprLambda),
|
||||
FunctionTypeParameters(&'a ast::StmtFunctionDef),
|
||||
ClassTypeParameters(&'a ast::StmtClassDef),
|
||||
TypeAlias(&'a ast::StmtTypeAlias),
|
||||
TypeAliasTypeParameters(&'a ast::StmtTypeAlias),
|
||||
ListComprehension(&'a ast::ExprListComp),
|
||||
SetComprehension(&'a ast::ExprSetComp),
|
||||
DictComprehension(&'a ast::ExprDictComp),
|
||||
@@ -360,6 +353,12 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::Function(function) => {
|
||||
NodeWithScopeKind::Function(AstNodeRef::new(module, function))
|
||||
}
|
||||
NodeWithScopeRef::TypeAlias(type_alias) => {
|
||||
NodeWithScopeKind::TypeAlias(AstNodeRef::new(module, type_alias))
|
||||
}
|
||||
NodeWithScopeRef::TypeAliasTypeParameters(type_alias) => {
|
||||
NodeWithScopeKind::TypeAliasTypeParameters(AstNodeRef::new(module, type_alias))
|
||||
}
|
||||
NodeWithScopeRef::Lambda(lambda) => {
|
||||
NodeWithScopeKind::Lambda(AstNodeRef::new(module, lambda))
|
||||
}
|
||||
@@ -400,6 +399,12 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::ClassTypeParameters(class) => {
|
||||
NodeWithScopeKey::ClassTypeParameters(NodeKey::from_node(class))
|
||||
}
|
||||
NodeWithScopeRef::TypeAlias(type_alias) => {
|
||||
NodeWithScopeKey::TypeAlias(NodeKey::from_node(type_alias))
|
||||
}
|
||||
NodeWithScopeRef::TypeAliasTypeParameters(type_alias) => {
|
||||
NodeWithScopeKey::TypeAliasTypeParameters(NodeKey::from_node(type_alias))
|
||||
}
|
||||
NodeWithScopeRef::ListComprehension(comprehension) => {
|
||||
NodeWithScopeKey::ListComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
@@ -424,6 +429,8 @@ pub enum NodeWithScopeKind {
|
||||
ClassTypeParameters(AstNodeRef<ast::StmtClassDef>),
|
||||
Function(AstNodeRef<ast::StmtFunctionDef>),
|
||||
FunctionTypeParameters(AstNodeRef<ast::StmtFunctionDef>),
|
||||
TypeAliasTypeParameters(AstNodeRef<ast::StmtTypeAlias>),
|
||||
TypeAlias(AstNodeRef<ast::StmtTypeAlias>),
|
||||
Lambda(AstNodeRef<ast::ExprLambda>),
|
||||
ListComprehension(AstNodeRef<ast::ExprListComp>),
|
||||
SetComprehension(AstNodeRef<ast::ExprSetComp>),
|
||||
@@ -436,9 +443,11 @@ impl NodeWithScopeKind {
|
||||
match self {
|
||||
Self::Module => ScopeKind::Module,
|
||||
Self::Class(_) => ScopeKind::Class,
|
||||
Self::Function(_) => ScopeKind::Function,
|
||||
Self::Lambda(_) => ScopeKind::Function,
|
||||
Self::FunctionTypeParameters(_) | Self::ClassTypeParameters(_) => ScopeKind::Annotation,
|
||||
Self::Function(_) | Self::Lambda(_) => ScopeKind::Function,
|
||||
Self::FunctionTypeParameters(_)
|
||||
| Self::ClassTypeParameters(_)
|
||||
| Self::TypeAliasTypeParameters(_) => ScopeKind::Annotation,
|
||||
Self::TypeAlias(_) => ScopeKind::TypeAlias,
|
||||
Self::ListComprehension(_)
|
||||
| Self::SetComprehension(_)
|
||||
| Self::DictComprehension(_)
|
||||
@@ -459,6 +468,13 @@ impl NodeWithScopeKind {
|
||||
_ => panic!("expected function"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_type_alias(&self) -> &ast::StmtTypeAlias {
|
||||
match self {
|
||||
Self::TypeAlias(type_alias) => type_alias.node(),
|
||||
_ => panic!("expected type alias"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
@@ -468,6 +484,8 @@ pub(crate) enum NodeWithScopeKey {
|
||||
ClassTypeParameters(NodeKey),
|
||||
Function(NodeKey),
|
||||
FunctionTypeParameters(NodeKey),
|
||||
TypeAlias(NodeKey),
|
||||
TypeAliasTypeParameters(NodeKey),
|
||||
Lambda(NodeKey),
|
||||
ListComprehension(NodeKey),
|
||||
SetComprehension(NodeKey),
|
||||
|
||||
@@ -221,6 +221,8 @@
|
||||
//! snapshot, and merging a snapshot into the current state. The logic using these methods lives in
|
||||
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder), e.g. where it
|
||||
//! visits a `StmtIf` node.
|
||||
use std::collections::HashSet;
|
||||
|
||||
use self::symbol_state::{
|
||||
BindingIdWithConstraintsIterator, ConstraintIdIterator, DeclarationIdIterator,
|
||||
ScopedConstraintId, ScopedDefinitionId, SymbolBindings, SymbolDeclarations, SymbolState,
|
||||
@@ -268,6 +270,109 @@ pub(crate) struct UseDefMap<'db> {
|
||||
}
|
||||
|
||||
impl<'db> UseDefMap<'db> {
|
||||
#[cfg(test)]
|
||||
pub(crate) fn print(&self, db: &dyn crate::db::Db) {
|
||||
use crate::semantic_index::constraint::ConstraintNode;
|
||||
|
||||
println!("all_definitions:");
|
||||
println!("================");
|
||||
|
||||
for (id, d) in self.all_definitions.iter_enumerated() {
|
||||
println!(
|
||||
"{:?}: {:?} {:?} {:?}",
|
||||
id,
|
||||
d.category(db),
|
||||
d.scope(db),
|
||||
d.symbol(db),
|
||||
);
|
||||
println!(" {:?}", d.kind(db));
|
||||
println!();
|
||||
}
|
||||
|
||||
println!("all_constraints:");
|
||||
println!("================");
|
||||
|
||||
for (id, c) in self.all_constraints.iter_enumerated() {
|
||||
println!("{:?}: {:?}", id, c.node);
|
||||
if let ConstraintNode::Expression(e) = c.node {
|
||||
println!(" {:?}", e.node_ref(db));
|
||||
}
|
||||
}
|
||||
|
||||
println!();
|
||||
|
||||
println!("bindings_by_use:");
|
||||
println!("================");
|
||||
|
||||
for (id, bindings) in self.bindings_by_use.iter_enumerated() {
|
||||
println!("{:?}:", id);
|
||||
for binding in bindings.iter() {
|
||||
let definition = self.all_definitions[binding.definition];
|
||||
let mut constraint_ids = binding.constraint_ids.peekable();
|
||||
let mut active_constraint_ids =
|
||||
binding.constraints_active_at_binding_ids.peekable();
|
||||
|
||||
println!(" * {:?}", definition);
|
||||
|
||||
if constraint_ids.peek().is_some() {
|
||||
println!(" Constraints:");
|
||||
for constraint_id in constraint_ids {
|
||||
println!(" {:?}", self.all_constraints[constraint_id]);
|
||||
}
|
||||
} else {
|
||||
println!(" No constraints");
|
||||
}
|
||||
|
||||
println!();
|
||||
|
||||
if active_constraint_ids.peek().is_some() {
|
||||
println!(" Active constraints at binding:");
|
||||
for constraint_id in active_constraint_ids {
|
||||
println!(" {:?}", self.all_constraints[constraint_id]);
|
||||
}
|
||||
} else {
|
||||
println!(" No active constraints at binding");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!();
|
||||
|
||||
println!("public_symbols:");
|
||||
println!("================");
|
||||
|
||||
for (id, symbol) in self.public_symbols.iter_enumerated() {
|
||||
println!("{:?}:", id);
|
||||
println!(" * Bindings:");
|
||||
for binding in symbol.bindings().iter() {
|
||||
let definition = self.all_definitions[binding.definition];
|
||||
let mut constraint_ids = binding.constraint_ids.peekable();
|
||||
|
||||
println!(" {:?}", definition);
|
||||
|
||||
if constraint_ids.peek().is_some() {
|
||||
println!(" Constraints:");
|
||||
for constraint_id in constraint_ids {
|
||||
println!(" {:?}", self.all_constraints[constraint_id]);
|
||||
}
|
||||
} else {
|
||||
println!(" No constraints");
|
||||
}
|
||||
}
|
||||
|
||||
println!(" * Declarations:");
|
||||
for (declaration, _) in symbol.declarations().iter() {
|
||||
let definition = self.all_definitions[declaration];
|
||||
println!(" {:?}", definition);
|
||||
}
|
||||
|
||||
println!();
|
||||
}
|
||||
|
||||
println!();
|
||||
println!();
|
||||
}
|
||||
|
||||
pub(crate) fn bindings_at_use(
|
||||
&self,
|
||||
use_id: ScopedUseId,
|
||||
@@ -352,6 +457,7 @@ impl<'db> UseDefMap<'db> {
|
||||
) -> DeclarationsIterator<'a, 'db> {
|
||||
DeclarationsIterator {
|
||||
all_definitions: &self.all_definitions,
|
||||
all_constraints: &self.all_constraints,
|
||||
inner: declarations.iter(),
|
||||
may_be_undeclared: declarations.may_be_undeclared(),
|
||||
}
|
||||
@@ -365,7 +471,7 @@ enum SymbolDefinitions {
|
||||
Declarations(SymbolDeclarations),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct BindingWithConstraintsIterator<'map, 'db> {
|
||||
all_definitions: &'map IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
all_constraints: &'map IndexVec<ScopedConstraintId, Constraint<'db>>,
|
||||
@@ -384,6 +490,10 @@ impl<'map, 'db> Iterator for BindingWithConstraintsIterator<'map, 'db> {
|
||||
all_constraints: self.all_constraints,
|
||||
constraint_ids: def_id_with_constraints.constraint_ids,
|
||||
},
|
||||
constraints_active_at_binding: ConstraintsIterator {
|
||||
all_constraints: self.all_constraints,
|
||||
constraint_ids: def_id_with_constraints.constraints_active_at_binding_ids,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -393,8 +503,10 @@ impl std::iter::FusedIterator for BindingWithConstraintsIterator<'_, '_> {}
|
||||
pub(crate) struct BindingWithConstraints<'map, 'db> {
|
||||
pub(crate) binding: Definition<'db>,
|
||||
pub(crate) constraints: ConstraintsIterator<'map, 'db>,
|
||||
pub(crate) constraints_active_at_binding: ConstraintsIterator<'map, 'db>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ConstraintsIterator<'map, 'db> {
|
||||
all_constraints: &'map IndexVec<ScopedConstraintId, Constraint<'db>>,
|
||||
constraint_ids: ConstraintIdIterator<'map>,
|
||||
@@ -414,6 +526,7 @@ impl std::iter::FusedIterator for ConstraintsIterator<'_, '_> {}
|
||||
|
||||
pub(crate) struct DeclarationsIterator<'map, 'db> {
|
||||
all_definitions: &'map IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
all_constraints: &'map IndexVec<ScopedConstraintId, Constraint<'db>>,
|
||||
inner: DeclarationIdIterator<'map>,
|
||||
may_be_undeclared: bool,
|
||||
}
|
||||
@@ -425,10 +538,18 @@ impl DeclarationsIterator<'_, '_> {
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for DeclarationsIterator<'map, 'db> {
|
||||
type Item = Definition<'db>;
|
||||
type Item = (Definition<'db>, ConstraintsIterator<'map, 'db>);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.inner.next().map(|def_id| self.all_definitions[def_id])
|
||||
self.inner.next().map(|(def_id, constraints)| {
|
||||
(
|
||||
self.all_definitions[def_id],
|
||||
ConstraintsIterator {
|
||||
all_constraints: self.all_constraints,
|
||||
constraint_ids: constraints,
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -440,6 +561,9 @@ pub(super) struct FlowSnapshot {
|
||||
symbol_states: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(super) struct ActiveConstraintsSnapshot(HashSet<ScopedConstraintId>);
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct UseDefMapBuilder<'db> {
|
||||
/// Append-only array of [`Definition`].
|
||||
@@ -448,6 +572,8 @@ pub(super) struct UseDefMapBuilder<'db> {
|
||||
/// Append-only array of [`Constraint`].
|
||||
all_constraints: IndexVec<ScopedConstraintId, Constraint<'db>>,
|
||||
|
||||
active_constraints: HashSet<ScopedConstraintId>,
|
||||
|
||||
/// Live bindings at each so-far-recorded use.
|
||||
bindings_by_use: IndexVec<ScopedUseId, SymbolBindings>,
|
||||
|
||||
@@ -459,10 +585,6 @@ pub(super) struct UseDefMapBuilder<'db> {
|
||||
}
|
||||
|
||||
impl<'db> UseDefMapBuilder<'db> {
|
||||
pub(super) fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub(super) fn add_symbol(&mut self, symbol: ScopedSymbolId) {
|
||||
let new_symbol = self.symbol_states.push(SymbolState::undefined());
|
||||
debug_assert_eq!(symbol, new_symbol);
|
||||
@@ -475,7 +597,7 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
binding,
|
||||
SymbolDefinitions::Declarations(symbol_state.declarations().clone()),
|
||||
);
|
||||
symbol_state.record_binding(def_id);
|
||||
symbol_state.record_binding(def_id, &self.active_constraints);
|
||||
}
|
||||
|
||||
pub(super) fn record_constraint(&mut self, constraint: Constraint<'db>) {
|
||||
@@ -483,6 +605,7 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
for state in &mut self.symbol_states {
|
||||
state.record_constraint(constraint_id);
|
||||
}
|
||||
self.active_constraints.insert(constraint_id);
|
||||
}
|
||||
|
||||
pub(super) fn record_declaration(
|
||||
@@ -496,7 +619,7 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
declaration,
|
||||
SymbolDefinitions::Bindings(symbol_state.bindings().clone()),
|
||||
);
|
||||
symbol_state.record_declaration(def_id);
|
||||
symbol_state.record_declaration(def_id, &self.active_constraints);
|
||||
}
|
||||
|
||||
pub(super) fn record_declaration_and_binding(
|
||||
@@ -507,8 +630,8 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
// We don't need to store anything in self.definitions_by_definition.
|
||||
let def_id = self.all_definitions.push(definition);
|
||||
let symbol_state = &mut self.symbol_states[symbol];
|
||||
symbol_state.record_declaration(def_id);
|
||||
symbol_state.record_binding(def_id);
|
||||
symbol_state.record_declaration(def_id, &self.active_constraints);
|
||||
symbol_state.record_binding(def_id, &self.active_constraints);
|
||||
}
|
||||
|
||||
pub(super) fn record_use(&mut self, symbol: ScopedSymbolId, use_id: ScopedUseId) {
|
||||
@@ -527,6 +650,10 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn constraints_snapshot(&self) -> ActiveConstraintsSnapshot {
|
||||
ActiveConstraintsSnapshot(self.active_constraints.clone())
|
||||
}
|
||||
|
||||
/// Restore the current builder symbols state to the given snapshot.
|
||||
pub(super) fn restore(&mut self, snapshot: FlowSnapshot) {
|
||||
// We never remove symbols from `symbol_states` (it's an IndexVec, and the symbol
|
||||
@@ -545,6 +672,10 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
.resize(num_symbols, SymbolState::undefined());
|
||||
}
|
||||
|
||||
pub(super) fn restore_constraints(&mut self, snapshot: ActiveConstraintsSnapshot) {
|
||||
self.active_constraints = snapshot.0;
|
||||
}
|
||||
|
||||
/// Merge the given snapshot into the current state, reflecting that we might have taken either
|
||||
/// path to get here. The new state for each symbol should include definitions from both the
|
||||
/// prior state and the snapshot.
|
||||
|
||||
@@ -122,7 +122,7 @@ impl<const B: usize> BitSet<B> {
|
||||
}
|
||||
|
||||
/// Iterator over values in a [`BitSet`].
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub(super) struct BitSetIterator<'a, const B: usize> {
|
||||
/// The blocks we are iterating over.
|
||||
blocks: &'a [u64],
|
||||
|
||||
@@ -43,6 +43,8 @@
|
||||
//!
|
||||
//! Tracking live declarations is simpler, since constraints are not involved, but otherwise very
|
||||
//! similar to tracking live bindings.
|
||||
use std::collections::HashSet;
|
||||
|
||||
use super::bitset::{BitSet, BitSetIterator};
|
||||
use ruff_index::newtype_index;
|
||||
use smallvec::SmallVec;
|
||||
@@ -87,6 +89,8 @@ pub(super) struct SymbolDeclarations {
|
||||
/// [`BitSet`]: which declarations (as [`ScopedDefinitionId`]) can reach the current location?
|
||||
live_declarations: Declarations,
|
||||
|
||||
constraints_active_at_declaration: Constraints, // TODO: rename to constraints_active_at_declaration
|
||||
|
||||
/// Could the symbol be un-declared at this point?
|
||||
may_be_undeclared: bool,
|
||||
}
|
||||
@@ -95,14 +99,27 @@ impl SymbolDeclarations {
|
||||
fn undeclared() -> Self {
|
||||
Self {
|
||||
live_declarations: Declarations::default(),
|
||||
constraints_active_at_declaration: Constraints::default(),
|
||||
may_be_undeclared: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Record a newly-encountered declaration for this symbol.
|
||||
fn record_declaration(&mut self, declaration_id: ScopedDefinitionId) {
|
||||
fn record_declaration(
|
||||
&mut self,
|
||||
declaration_id: ScopedDefinitionId,
|
||||
active_constraints: &HashSet<ScopedConstraintId>,
|
||||
) {
|
||||
self.live_declarations = Declarations::with(declaration_id.into());
|
||||
self.may_be_undeclared = false;
|
||||
|
||||
// TODO: unify code with below
|
||||
self.constraints_active_at_declaration = Constraints::with_capacity(1);
|
||||
self.constraints_active_at_declaration
|
||||
.push(BitSet::default());
|
||||
for active_constraint_id in active_constraints {
|
||||
self.constraints_active_at_declaration[0].insert(active_constraint_id.as_u32());
|
||||
}
|
||||
}
|
||||
|
||||
/// Add undeclared as a possibility for this symbol.
|
||||
@@ -114,6 +131,7 @@ impl SymbolDeclarations {
|
||||
pub(super) fn iter(&self) -> DeclarationIdIterator {
|
||||
DeclarationIdIterator {
|
||||
inner: self.live_declarations.iter(),
|
||||
constraints_active_at_binding: self.constraints_active_at_declaration.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -138,6 +156,8 @@ pub(super) struct SymbolBindings {
|
||||
/// binding in `live_bindings`.
|
||||
constraints: Constraints,
|
||||
|
||||
constraints_active_at_binding: Constraints,
|
||||
|
||||
/// Could the symbol be unbound at this point?
|
||||
may_be_unbound: bool,
|
||||
}
|
||||
@@ -147,6 +167,7 @@ impl SymbolBindings {
|
||||
Self {
|
||||
live_bindings: Bindings::default(),
|
||||
constraints: Constraints::default(),
|
||||
constraints_active_at_binding: Constraints::default(),
|
||||
may_be_unbound: true,
|
||||
}
|
||||
}
|
||||
@@ -157,12 +178,21 @@ impl SymbolBindings {
|
||||
}
|
||||
|
||||
/// Record a newly-encountered binding for this symbol.
|
||||
pub(super) fn record_binding(&mut self, binding_id: ScopedDefinitionId) {
|
||||
pub(super) fn record_binding(
|
||||
&mut self,
|
||||
binding_id: ScopedDefinitionId,
|
||||
active_constraints: &HashSet<ScopedConstraintId>,
|
||||
) {
|
||||
// The new binding replaces all previous live bindings in this path, and has no
|
||||
// constraints.
|
||||
self.live_bindings = Bindings::with(binding_id.into());
|
||||
self.constraints = Constraints::with_capacity(1);
|
||||
self.constraints.push(BitSet::default());
|
||||
self.constraints_active_at_binding = Constraints::with_capacity(1);
|
||||
self.constraints_active_at_binding.push(BitSet::default());
|
||||
for active_constraint_id in active_constraints {
|
||||
self.constraints_active_at_binding[0].insert(active_constraint_id.as_u32());
|
||||
}
|
||||
self.may_be_unbound = false;
|
||||
}
|
||||
|
||||
@@ -178,6 +208,7 @@ impl SymbolBindings {
|
||||
BindingIdWithConstraintsIterator {
|
||||
definitions: self.live_bindings.iter(),
|
||||
constraints: self.constraints.iter(),
|
||||
constraints_active_at_binding: self.constraints_active_at_binding.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -207,8 +238,12 @@ impl SymbolState {
|
||||
}
|
||||
|
||||
/// Record a newly-encountered binding for this symbol.
|
||||
pub(super) fn record_binding(&mut self, binding_id: ScopedDefinitionId) {
|
||||
self.bindings.record_binding(binding_id);
|
||||
pub(super) fn record_binding(
|
||||
&mut self,
|
||||
binding_id: ScopedDefinitionId,
|
||||
active_constraints: &HashSet<ScopedConstraintId>,
|
||||
) {
|
||||
self.bindings.record_binding(binding_id, active_constraints);
|
||||
}
|
||||
|
||||
/// Add given constraint to all live bindings.
|
||||
@@ -222,8 +257,13 @@ impl SymbolState {
|
||||
}
|
||||
|
||||
/// Record a newly-encountered declaration of this symbol.
|
||||
pub(super) fn record_declaration(&mut self, declaration_id: ScopedDefinitionId) {
|
||||
self.declarations.record_declaration(declaration_id);
|
||||
pub(super) fn record_declaration(
|
||||
&mut self,
|
||||
declaration_id: ScopedDefinitionId,
|
||||
active_constraints: &HashSet<ScopedConstraintId>,
|
||||
) {
|
||||
self.declarations
|
||||
.record_declaration(declaration_id, active_constraints);
|
||||
}
|
||||
|
||||
/// Merge another [`SymbolState`] into this one.
|
||||
@@ -232,24 +272,93 @@ impl SymbolState {
|
||||
bindings: SymbolBindings {
|
||||
live_bindings: Bindings::default(),
|
||||
constraints: Constraints::default(),
|
||||
constraints_active_at_binding: Constraints::default(), // TODO
|
||||
may_be_unbound: self.bindings.may_be_unbound || b.bindings.may_be_unbound,
|
||||
},
|
||||
declarations: SymbolDeclarations {
|
||||
live_declarations: self.declarations.live_declarations.clone(),
|
||||
constraints_active_at_declaration: Constraints::default(), // TODO
|
||||
may_be_undeclared: self.declarations.may_be_undeclared
|
||||
|| b.declarations.may_be_undeclared,
|
||||
},
|
||||
};
|
||||
|
||||
// let mut constraints_active_at_binding = BitSet::default();
|
||||
// for active_constraint_id in active_constraints.0 {
|
||||
// constraints_active_at_binding.insert(active_constraint_id.as_u32());
|
||||
// }
|
||||
|
||||
std::mem::swap(&mut a, self);
|
||||
self.declarations
|
||||
.live_declarations
|
||||
.union(&b.declarations.live_declarations);
|
||||
// self.declarations
|
||||
// .live_declarations
|
||||
// .union(&b.declarations.live_declarations);
|
||||
|
||||
let mut a_decls_iter = a.declarations.live_declarations.iter();
|
||||
let mut b_decls_iter = b.declarations.live_declarations.iter();
|
||||
let mut a_constraints_active_at_declaration_iter =
|
||||
a.declarations.constraints_active_at_declaration.into_iter();
|
||||
let mut b_constraints_active_at_declaration_iter =
|
||||
b.declarations.constraints_active_at_declaration.into_iter();
|
||||
|
||||
let mut opt_a_decl: Option<u32> = a_decls_iter.next();
|
||||
let mut opt_b_decl: Option<u32> = b_decls_iter.next();
|
||||
|
||||
let push = |decl,
|
||||
constraints_active_at_declaration_iter: &mut ConstraintsIntoIterator,
|
||||
merged: &mut Self| {
|
||||
merged.declarations.live_declarations.insert(decl);
|
||||
let constraints_active_at_binding = constraints_active_at_declaration_iter
|
||||
.next()
|
||||
.expect("declarations and constraints_active_at_binding length mismatch");
|
||||
merged
|
||||
.declarations
|
||||
.constraints_active_at_declaration
|
||||
.push(constraints_active_at_binding);
|
||||
};
|
||||
|
||||
loop {
|
||||
match (opt_a_decl, opt_b_decl) {
|
||||
(Some(a_decl), Some(b_decl)) => match a_decl.cmp(&b_decl) {
|
||||
std::cmp::Ordering::Less => {
|
||||
push(a_decl, &mut a_constraints_active_at_declaration_iter, self);
|
||||
opt_a_decl = a_decls_iter.next();
|
||||
}
|
||||
std::cmp::Ordering::Greater => {
|
||||
push(b_decl, &mut b_constraints_active_at_declaration_iter, self);
|
||||
opt_b_decl = b_decls_iter.next();
|
||||
}
|
||||
std::cmp::Ordering::Equal => {
|
||||
push(a_decl, &mut b_constraints_active_at_declaration_iter, self);
|
||||
self.declarations
|
||||
.constraints_active_at_declaration
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
.intersect(&a_constraints_active_at_declaration_iter.next().unwrap());
|
||||
|
||||
opt_a_decl = a_decls_iter.next();
|
||||
opt_b_decl = b_decls_iter.next();
|
||||
}
|
||||
},
|
||||
(Some(a_decl), None) => {
|
||||
push(a_decl, &mut a_constraints_active_at_declaration_iter, self);
|
||||
opt_a_decl = a_decls_iter.next();
|
||||
}
|
||||
(None, Some(b_decl)) => {
|
||||
push(b_decl, &mut b_constraints_active_at_declaration_iter, self);
|
||||
opt_b_decl = b_decls_iter.next();
|
||||
}
|
||||
(None, None) => break,
|
||||
}
|
||||
}
|
||||
|
||||
let mut a_defs_iter = a.bindings.live_bindings.iter();
|
||||
let mut b_defs_iter = b.bindings.live_bindings.iter();
|
||||
let mut a_constraints_iter = a.bindings.constraints.into_iter();
|
||||
let mut b_constraints_iter = b.bindings.constraints.into_iter();
|
||||
let mut a_constraints_active_at_binding_iter =
|
||||
a.bindings.constraints_active_at_binding.into_iter();
|
||||
let mut b_constraints_active_at_binding_iter =
|
||||
b.bindings.constraints_active_at_binding.into_iter();
|
||||
|
||||
let mut opt_a_def: Option<u32> = a_defs_iter.next();
|
||||
let mut opt_b_def: Option<u32> = b_defs_iter.next();
|
||||
@@ -261,7 +370,10 @@ impl SymbolState {
|
||||
// path is irrelevant.
|
||||
|
||||
// Helper to push `def`, with constraints in `constraints_iter`, onto `self`.
|
||||
let push = |def, constraints_iter: &mut ConstraintsIntoIterator, merged: &mut Self| {
|
||||
let push = |def,
|
||||
constraints_iter: &mut ConstraintsIntoIterator,
|
||||
constraints_active_at_binding_iter: &mut ConstraintsIntoIterator,
|
||||
merged: &mut Self| {
|
||||
merged.bindings.live_bindings.insert(def);
|
||||
// SAFETY: we only ever create SymbolState with either no definitions and no constraint
|
||||
// bitsets (`::unbound`) or one definition and one constraint bitset (`::with`), and
|
||||
@@ -271,7 +383,14 @@ impl SymbolState {
|
||||
let constraints = constraints_iter
|
||||
.next()
|
||||
.expect("definitions and constraints length mismatch");
|
||||
let constraints_active_at_binding = constraints_active_at_binding_iter
|
||||
.next()
|
||||
.expect("definitions and constraints_active_at_binding length mismatch");
|
||||
merged.bindings.constraints.push(constraints);
|
||||
merged
|
||||
.bindings
|
||||
.constraints_active_at_binding
|
||||
.push(constraints_active_at_binding);
|
||||
};
|
||||
|
||||
loop {
|
||||
@@ -279,17 +398,32 @@ impl SymbolState {
|
||||
(Some(a_def), Some(b_def)) => match a_def.cmp(&b_def) {
|
||||
std::cmp::Ordering::Less => {
|
||||
// Next definition ID is only in `a`, push it to `self` and advance `a`.
|
||||
push(a_def, &mut a_constraints_iter, self);
|
||||
push(
|
||||
a_def,
|
||||
&mut a_constraints_iter,
|
||||
&mut a_constraints_active_at_binding_iter,
|
||||
self,
|
||||
);
|
||||
opt_a_def = a_defs_iter.next();
|
||||
}
|
||||
std::cmp::Ordering::Greater => {
|
||||
// Next definition ID is only in `b`, push it to `self` and advance `b`.
|
||||
push(b_def, &mut b_constraints_iter, self);
|
||||
push(
|
||||
b_def,
|
||||
&mut b_constraints_iter,
|
||||
&mut b_constraints_active_at_binding_iter,
|
||||
self,
|
||||
);
|
||||
opt_b_def = b_defs_iter.next();
|
||||
}
|
||||
std::cmp::Ordering::Equal => {
|
||||
// Next definition is in both; push to `self` and intersect constraints.
|
||||
push(a_def, &mut b_constraints_iter, self);
|
||||
push(
|
||||
a_def,
|
||||
&mut b_constraints_iter,
|
||||
&mut b_constraints_active_at_binding_iter,
|
||||
self,
|
||||
);
|
||||
// SAFETY: we only ever create SymbolState with either no definitions and
|
||||
// no constraint bitsets (`::unbound`) or one definition and one constraint
|
||||
// bitset (`::with`), and `::merge` always pushes one definition and one
|
||||
@@ -298,6 +432,11 @@ impl SymbolState {
|
||||
let a_constraints = a_constraints_iter
|
||||
.next()
|
||||
.expect("definitions and constraints length mismatch");
|
||||
// let _a_constraints_active_at_binding =
|
||||
// a_constraints_active_at_binding_iter.next().expect(
|
||||
// "definitions and constraints_active_at_binding length mismatch",
|
||||
// ); // TODO: perform check that we see the same constraints in both paths
|
||||
|
||||
// If the same definition is visible through both paths, any constraint
|
||||
// that applies on only one path is irrelevant to the resulting type from
|
||||
// unioning the two paths, so we intersect the constraints.
|
||||
@@ -306,18 +445,29 @@ impl SymbolState {
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
.intersect(&a_constraints);
|
||||
|
||||
opt_a_def = a_defs_iter.next();
|
||||
opt_b_def = b_defs_iter.next();
|
||||
}
|
||||
},
|
||||
(Some(a_def), None) => {
|
||||
// We've exhausted `b`, just push the def from `a` and move on to the next.
|
||||
push(a_def, &mut a_constraints_iter, self);
|
||||
push(
|
||||
a_def,
|
||||
&mut a_constraints_iter,
|
||||
&mut a_constraints_active_at_binding_iter,
|
||||
self,
|
||||
);
|
||||
opt_a_def = a_defs_iter.next();
|
||||
}
|
||||
(None, Some(b_def)) => {
|
||||
// We've exhausted `a`, just push the def from `b` and move on to the next.
|
||||
push(b_def, &mut b_constraints_iter, self);
|
||||
push(
|
||||
b_def,
|
||||
&mut b_constraints_iter,
|
||||
&mut b_constraints_active_at_binding_iter,
|
||||
self,
|
||||
);
|
||||
opt_b_def = b_defs_iter.next();
|
||||
}
|
||||
(None, None) => break,
|
||||
@@ -353,26 +503,37 @@ impl Default for SymbolState {
|
||||
pub(super) struct BindingIdWithConstraints<'a> {
|
||||
pub(super) definition: ScopedDefinitionId,
|
||||
pub(super) constraint_ids: ConstraintIdIterator<'a>,
|
||||
pub(super) constraints_active_at_binding_ids: ConstraintIdIterator<'a>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub(super) struct BindingIdWithConstraintsIterator<'a> {
|
||||
definitions: BindingsIterator<'a>,
|
||||
constraints: ConstraintsIterator<'a>,
|
||||
constraints_active_at_binding: ConstraintsIterator<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for BindingIdWithConstraintsIterator<'a> {
|
||||
type Item = BindingIdWithConstraints<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match (self.definitions.next(), self.constraints.next()) {
|
||||
(None, None) => None,
|
||||
(Some(def), Some(constraints)) => Some(BindingIdWithConstraints {
|
||||
definition: ScopedDefinitionId::from_u32(def),
|
||||
constraint_ids: ConstraintIdIterator {
|
||||
wrapped: constraints.iter(),
|
||||
},
|
||||
}),
|
||||
match (
|
||||
self.definitions.next(),
|
||||
self.constraints.next(),
|
||||
self.constraints_active_at_binding.next(),
|
||||
) {
|
||||
(None, None, None) => None,
|
||||
(Some(def), Some(constraints), Some(constraints_active_at_binding)) => {
|
||||
Some(BindingIdWithConstraints {
|
||||
definition: ScopedDefinitionId::from_u32(def),
|
||||
constraint_ids: ConstraintIdIterator {
|
||||
wrapped: constraints.iter(),
|
||||
},
|
||||
constraints_active_at_binding_ids: ConstraintIdIterator {
|
||||
wrapped: constraints_active_at_binding.iter(),
|
||||
},
|
||||
})
|
||||
}
|
||||
// SAFETY: see above.
|
||||
_ => unreachable!("definitions and constraints length mismatch"),
|
||||
}
|
||||
@@ -381,7 +542,7 @@ impl<'a> Iterator for BindingIdWithConstraintsIterator<'a> {
|
||||
|
||||
impl std::iter::FusedIterator for BindingIdWithConstraintsIterator<'_> {}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub(super) struct ConstraintIdIterator<'a> {
|
||||
wrapped: BitSetIterator<'a, INLINE_CONSTRAINT_BLOCKS>,
|
||||
}
|
||||
@@ -399,13 +560,25 @@ impl std::iter::FusedIterator for ConstraintIdIterator<'_> {}
|
||||
#[derive(Debug)]
|
||||
pub(super) struct DeclarationIdIterator<'a> {
|
||||
inner: DeclarationsIterator<'a>,
|
||||
constraints_active_at_binding: ConstraintsIterator<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for DeclarationIdIterator<'a> {
|
||||
type Item = ScopedDefinitionId;
|
||||
type Item = (ScopedDefinitionId, ConstraintIdIterator<'a>);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.inner.next().map(ScopedDefinitionId::from_u32)
|
||||
// self.inner.next().map(ScopedDefinitionId::from_u32)
|
||||
match (self.inner.next(), self.constraints_active_at_binding.next()) {
|
||||
(None, None) => None,
|
||||
(Some(declaration), Some(constraints_active_at_binding)) => Some((
|
||||
ScopedDefinitionId::from_u32(declaration),
|
||||
ConstraintIdIterator {
|
||||
wrapped: constraints_active_at_binding.iter(),
|
||||
},
|
||||
)),
|
||||
// SAFETY: see above.
|
||||
_ => unreachable!("declarations and constraints_active_at_binding length mismatch"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -413,7 +586,7 @@ impl std::iter::FusedIterator for DeclarationIdIterator<'_> {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{ScopedConstraintId, ScopedDefinitionId, SymbolState};
|
||||
use super::{ScopedConstraintId, SymbolState};
|
||||
|
||||
fn assert_bindings(symbol: &SymbolState, may_be_unbound: bool, expected: &[&str]) {
|
||||
assert_eq!(symbol.may_be_unbound(), may_be_unbound);
|
||||
@@ -445,7 +618,7 @@ mod tests {
|
||||
let actual = symbol
|
||||
.declarations()
|
||||
.iter()
|
||||
.map(ScopedDefinitionId::as_u32)
|
||||
.map(|(d, _)| d.as_u32()) // TODO: constraints
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
@@ -457,76 +630,76 @@ mod tests {
|
||||
assert_bindings(&sym, true, &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn with() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
// #[test]
|
||||
// fn with() {
|
||||
// let mut sym = SymbolState::undefined();
|
||||
// sym.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
|
||||
assert_bindings(&sym, false, &["0<>"]);
|
||||
}
|
||||
// assert_bindings(&sym, false, &["0<>"]);
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn set_may_be_unbound() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
sym.set_may_be_unbound();
|
||||
// #[test]
|
||||
// fn set_may_be_unbound() {
|
||||
// let mut sym = SymbolState::undefined();
|
||||
// sym.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
// sym.set_may_be_unbound();
|
||||
|
||||
assert_bindings(&sym, true, &["0<>"]);
|
||||
}
|
||||
// assert_bindings(&sym, true, &["0<>"]);
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn record_constraint() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
sym.record_constraint(ScopedConstraintId::from_u32(0));
|
||||
// #[test]
|
||||
// fn record_constraint() {
|
||||
// let mut sym = SymbolState::undefined();
|
||||
// sym.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
// sym.record_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
assert_bindings(&sym, false, &["0<0>"]);
|
||||
}
|
||||
// assert_bindings(&sym, false, &["0<0>"]);
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn merge() {
|
||||
// merging the same definition with the same constraint keeps the constraint
|
||||
let mut sym0a = SymbolState::undefined();
|
||||
sym0a.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
sym0a.record_constraint(ScopedConstraintId::from_u32(0));
|
||||
// #[test]
|
||||
// fn merge() {
|
||||
// // merging the same definition with the same constraint keeps the constraint
|
||||
// let mut sym0a = SymbolState::undefined();
|
||||
// sym0a.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
// sym0a.record_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
let mut sym0b = SymbolState::undefined();
|
||||
sym0b.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
sym0b.record_constraint(ScopedConstraintId::from_u32(0));
|
||||
// let mut sym0b = SymbolState::undefined();
|
||||
// sym0b.record_binding(ScopedDefinitionId::from_u32(0));
|
||||
// sym0b.record_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
sym0a.merge(sym0b);
|
||||
let mut sym0 = sym0a;
|
||||
assert_bindings(&sym0, false, &["0<0>"]);
|
||||
// sym0a.merge(sym0b);
|
||||
// let mut sym0 = sym0a;
|
||||
// assert_bindings(&sym0, false, &["0<0>"]);
|
||||
|
||||
// merging the same definition with differing constraints drops all constraints
|
||||
let mut sym1a = SymbolState::undefined();
|
||||
sym1a.record_binding(ScopedDefinitionId::from_u32(1));
|
||||
sym1a.record_constraint(ScopedConstraintId::from_u32(1));
|
||||
// // merging the same definition with differing constraints drops all constraints
|
||||
// let mut sym1a = SymbolState::undefined();
|
||||
// sym1a.record_binding(ScopedDefinitionId::from_u32(1));
|
||||
// sym1a.record_constraint(ScopedConstraintId::from_u32(1));
|
||||
|
||||
let mut sym1b = SymbolState::undefined();
|
||||
sym1b.record_binding(ScopedDefinitionId::from_u32(1));
|
||||
sym1b.record_constraint(ScopedConstraintId::from_u32(2));
|
||||
// let mut sym1b = SymbolState::undefined();
|
||||
// sym1b.record_binding(ScopedDefinitionId::from_u32(1));
|
||||
// sym1b.record_constraint(ScopedConstraintId::from_u32(2));
|
||||
|
||||
sym1a.merge(sym1b);
|
||||
let sym1 = sym1a;
|
||||
assert_bindings(&sym1, false, &["1<>"]);
|
||||
// sym1a.merge(sym1b);
|
||||
// let sym1 = sym1a;
|
||||
// assert_bindings(&sym1, false, &["1<>"]);
|
||||
|
||||
// merging a constrained definition with unbound keeps both
|
||||
let mut sym2a = SymbolState::undefined();
|
||||
sym2a.record_binding(ScopedDefinitionId::from_u32(2));
|
||||
sym2a.record_constraint(ScopedConstraintId::from_u32(3));
|
||||
// // merging a constrained definition with unbound keeps both
|
||||
// let mut sym2a = SymbolState::undefined();
|
||||
// sym2a.record_binding(ScopedDefinitionId::from_u32(2));
|
||||
// sym2a.record_constraint(ScopedConstraintId::from_u32(3));
|
||||
|
||||
let sym2b = SymbolState::undefined();
|
||||
// let sym2b = SymbolState::undefined();
|
||||
|
||||
sym2a.merge(sym2b);
|
||||
let sym2 = sym2a;
|
||||
assert_bindings(&sym2, true, &["2<3>"]);
|
||||
// sym2a.merge(sym2b);
|
||||
// let sym2 = sym2a;
|
||||
// assert_bindings(&sym2, true, &["2<3>"]);
|
||||
|
||||
// merging different definitions keeps them each with their existing constraints
|
||||
sym0.merge(sym2);
|
||||
let sym = sym0;
|
||||
assert_bindings(&sym, true, &["0<0>", "2<3>"]);
|
||||
}
|
||||
// // merging different definitions keeps them each with their existing constraints
|
||||
// sym0.merge(sym2);
|
||||
// let sym = sym0;
|
||||
// assert_bindings(&sym, true, &["0<0>", "2<3>"]);
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn no_declaration() {
|
||||
@@ -535,54 +708,54 @@ mod tests {
|
||||
assert_declarations(&sym, true, &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn record_declaration() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_declaration(ScopedDefinitionId::from_u32(1));
|
||||
// #[test]
|
||||
// fn record_declaration() {
|
||||
// let mut sym = SymbolState::undefined();
|
||||
// sym.record_declaration(ScopedDefinitionId::from_u32(1));
|
||||
|
||||
assert_declarations(&sym, false, &[1]);
|
||||
}
|
||||
// assert_declarations(&sym, false, &[1]);
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn record_declaration_override() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_declaration(ScopedDefinitionId::from_u32(1));
|
||||
sym.record_declaration(ScopedDefinitionId::from_u32(2));
|
||||
// #[test]
|
||||
// fn record_declaration_override() {
|
||||
// let mut sym = SymbolState::undefined();
|
||||
// sym.record_declaration(ScopedDefinitionId::from_u32(1));
|
||||
// sym.record_declaration(ScopedDefinitionId::from_u32(2));
|
||||
|
||||
assert_declarations(&sym, false, &[2]);
|
||||
}
|
||||
// assert_declarations(&sym, false, &[2]);
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn record_declaration_merge() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_declaration(ScopedDefinitionId::from_u32(1));
|
||||
// #[test]
|
||||
// fn record_declaration_merge() {
|
||||
// let mut sym = SymbolState::undefined();
|
||||
// sym.record_declaration(ScopedDefinitionId::from_u32(1));
|
||||
|
||||
let mut sym2 = SymbolState::undefined();
|
||||
sym2.record_declaration(ScopedDefinitionId::from_u32(2));
|
||||
// let mut sym2 = SymbolState::undefined();
|
||||
// sym2.record_declaration(ScopedDefinitionId::from_u32(2));
|
||||
|
||||
sym.merge(sym2);
|
||||
// sym.merge(sym2);
|
||||
|
||||
assert_declarations(&sym, false, &[1, 2]);
|
||||
}
|
||||
// assert_declarations(&sym, false, &[1, 2]);
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn record_declaration_merge_partial_undeclared() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_declaration(ScopedDefinitionId::from_u32(1));
|
||||
// #[test]
|
||||
// fn record_declaration_merge_partial_undeclared() {
|
||||
// let mut sym = SymbolState::undefined();
|
||||
// sym.record_declaration(ScopedDefinitionId::from_u32(1));
|
||||
|
||||
let sym2 = SymbolState::undefined();
|
||||
// let sym2 = SymbolState::undefined();
|
||||
|
||||
sym.merge(sym2);
|
||||
// sym.merge(sym2);
|
||||
|
||||
assert_declarations(&sym, true, &[1]);
|
||||
}
|
||||
// assert_declarations(&sym, true, &[1]);
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn set_may_be_undeclared() {
|
||||
let mut sym = SymbolState::undefined();
|
||||
sym.record_declaration(ScopedDefinitionId::from_u32(0));
|
||||
sym.set_may_be_undeclared();
|
||||
// #[test]
|
||||
// fn set_may_be_undeclared() {
|
||||
// let mut sym = SymbolState::undefined();
|
||||
// sym.record_declaration(ScopedDefinitionId::from_u32(0));
|
||||
// sym.set_may_be_undeclared();
|
||||
|
||||
assert_declarations(&sym, true, &[0]);
|
||||
}
|
||||
// assert_declarations(&sym, true, &[0]);
|
||||
// }
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ use ruff_source_file::LineIndex;
|
||||
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::{resolve_module, Module};
|
||||
use crate::semantic_index::ast_ids::HasScopedAstId;
|
||||
use crate::semantic_index::ast_ids::HasScopedExpressionId;
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::types::{binding_ty, infer_scope_types, Type};
|
||||
use crate::Db;
|
||||
@@ -54,7 +54,7 @@ impl HasTy for ast::ExpressionRef<'_> {
|
||||
let file_scope = index.expression_scope_id(*self);
|
||||
let scope = file_scope.to_scope_id(model.db, model.file);
|
||||
|
||||
let expression_id = self.scoped_ast_id(model.db, scope);
|
||||
let expression_id = self.scoped_expression_id(model.db, scope);
|
||||
infer_scope_types(model.db, scope).expression_ty(expression_id)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -732,7 +732,20 @@ mod tests {
|
||||
let system = TestSystem::default();
|
||||
assert!(matches!(
|
||||
VirtualEnvironment::new("/.venv", &system),
|
||||
Err(SitePackagesDiscoveryError::VenvDirIsNotADirectory(_))
|
||||
Err(SitePackagesDiscoveryError::VenvDirCanonicalizationError(..))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reject_venv_that_is_not_a_directory() {
|
||||
let system = TestSystem::default();
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_file("/.venv", "")
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
VirtualEnvironment::new("/.venv", &system),
|
||||
Err(SitePackagesDiscoveryError::VenvDirIsNotADirectory(..))
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
@@ -14,7 +14,8 @@ pub(crate) use self::infer::{
|
||||
};
|
||||
pub(crate) use self::signatures::Signature;
|
||||
use crate::module_resolver::file_to_module;
|
||||
use crate::semantic_index::ast_ids::HasScopedAstId;
|
||||
use crate::semantic_index::ast_ids::HasScopedExpressionId;
|
||||
use crate::semantic_index::constraint::ConstraintNode;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::symbol::{self as symbol, ScopeId, ScopedSymbolId};
|
||||
use crate::semantic_index::{
|
||||
@@ -47,7 +48,7 @@ pub fn check_types(db: &dyn Db, file: File) -> TypeCheckDiagnostics {
|
||||
tracing::debug!("Checking file '{path}'", path = file.path(db));
|
||||
|
||||
let index = semantic_index(db, file);
|
||||
let mut diagnostics = TypeCheckDiagnostics::new();
|
||||
let mut diagnostics = TypeCheckDiagnostics::default();
|
||||
|
||||
for scope_id in index.scope_ids() {
|
||||
let result = infer_scope_types(db, scope_id);
|
||||
@@ -207,7 +208,7 @@ fn definition_expression_ty<'db>(
|
||||
let index = semantic_index(db, file);
|
||||
let file_scope = index.expression_scope_id(expression);
|
||||
let scope = file_scope.to_scope_id(db, file);
|
||||
let expr_id = expression.scoped_ast_id(db, scope);
|
||||
let expr_id = expression.scoped_expression_id(db, scope);
|
||||
if scope == definition.scope(db) {
|
||||
// expression is in the definition scope
|
||||
let inference = infer_definition_types(db, definition);
|
||||
@@ -222,6 +223,12 @@ fn definition_expression_ty<'db>(
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
enum UnconditionallyVisible {
|
||||
Yes,
|
||||
No,
|
||||
}
|
||||
|
||||
/// Infer the combined type of an iterator of bindings.
|
||||
///
|
||||
/// Will return a union if there is more than one binding.
|
||||
@@ -229,29 +236,88 @@ fn bindings_ty<'db>(
|
||||
db: &'db dyn Db,
|
||||
bindings_with_constraints: BindingWithConstraintsIterator<'_, 'db>,
|
||||
) -> Option<Type<'db>> {
|
||||
let mut def_types = bindings_with_constraints.map(
|
||||
let def_types = bindings_with_constraints.map(
|
||||
|BindingWithConstraints {
|
||||
binding,
|
||||
constraints,
|
||||
constraints_active_at_binding,
|
||||
}| {
|
||||
let mut constraint_tys = constraints
|
||||
.filter_map(|constraint| narrowing_constraint(db, constraint, binding))
|
||||
.peekable();
|
||||
let test_expr_tys = || {
|
||||
constraints_active_at_binding.clone().map(|c| {
|
||||
let ty = if let ConstraintNode::Expression(test_expr) = c.node {
|
||||
let inference = infer_expression_types(db, test_expr);
|
||||
let scope = test_expr.scope(db);
|
||||
inference
|
||||
.expression_ty(test_expr.node_ref(db).scoped_expression_id(db, scope))
|
||||
} else {
|
||||
// TODO: handle other constraint nodes
|
||||
todo_type!()
|
||||
};
|
||||
|
||||
let binding_ty = binding_ty(db, binding);
|
||||
if constraint_tys.peek().is_some() {
|
||||
constraint_tys
|
||||
.fold(
|
||||
IntersectionBuilder::new(db).add_positive(binding_ty),
|
||||
IntersectionBuilder::add_positive,
|
||||
)
|
||||
.build()
|
||||
(c, ty)
|
||||
})
|
||||
};
|
||||
|
||||
if test_expr_tys().any(|(c, test_expr_ty)| {
|
||||
if c.is_positive {
|
||||
test_expr_ty.bool(db).is_always_false()
|
||||
} else {
|
||||
test_expr_ty.bool(db).is_always_true()
|
||||
}
|
||||
}) {
|
||||
// TODO: do we need to call binding_ty(…) even if we don't need the result?
|
||||
(Type::Never, UnconditionallyVisible::No)
|
||||
} else {
|
||||
binding_ty
|
||||
let mut test_expr_tys_iter = test_expr_tys().peekable();
|
||||
|
||||
let unconditionally_visible = if test_expr_tys_iter.peek().is_some()
|
||||
&& test_expr_tys_iter.all(|(c, test_expr_ty)| {
|
||||
if c.is_positive {
|
||||
test_expr_ty.bool(db).is_always_true()
|
||||
} else {
|
||||
test_expr_ty.bool(db).is_always_false()
|
||||
}
|
||||
}) {
|
||||
UnconditionallyVisible::Yes
|
||||
} else {
|
||||
UnconditionallyVisible::No
|
||||
};
|
||||
|
||||
let mut constraint_tys = constraints
|
||||
.filter_map(|constraint| narrowing_constraint(db, constraint, binding))
|
||||
.peekable();
|
||||
|
||||
let binding_ty = binding_ty(db, binding);
|
||||
if constraint_tys.peek().is_some() {
|
||||
let intersection_ty = constraint_tys
|
||||
.fold(
|
||||
IntersectionBuilder::new(db).add_positive(binding_ty),
|
||||
IntersectionBuilder::add_positive,
|
||||
)
|
||||
.build();
|
||||
(intersection_ty, unconditionally_visible)
|
||||
} else {
|
||||
(binding_ty, unconditionally_visible)
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// TODO: get rid of all the collects and clean up, obviously
|
||||
let def_types: Vec<_> = def_types.collect();
|
||||
|
||||
// shrink the vector to only include everything from the last unconditionally visible binding
|
||||
let def_types: Vec<_> = def_types
|
||||
.iter()
|
||||
.rev()
|
||||
.take_while_inclusive(|(_, unconditionally_visible)| {
|
||||
*unconditionally_visible != UnconditionallyVisible::Yes
|
||||
})
|
||||
.map(|(ty, _)| *ty)
|
||||
.collect();
|
||||
|
||||
let mut def_types = def_types.into_iter().rev();
|
||||
|
||||
if let Some(first) = def_types.next() {
|
||||
if let Some(second) = def_types.next() {
|
||||
Some(UnionType::from_elements(
|
||||
@@ -287,7 +353,63 @@ fn declarations_ty<'db>(
|
||||
declarations: DeclarationsIterator<'_, 'db>,
|
||||
undeclared_ty: Option<Type<'db>>,
|
||||
) -> DeclaredTypeResult<'db> {
|
||||
let decl_types = declarations.map(|declaration| declaration_ty(db, declaration));
|
||||
let decl_types = declarations.map(|(declaration, constraints_active_at_declaration)| {
|
||||
let test_expr_tys = || {
|
||||
constraints_active_at_declaration.clone().map(|c| {
|
||||
let ty = if let ConstraintNode::Expression(test_expr) = c.node {
|
||||
let inference = infer_expression_types(db, test_expr);
|
||||
let scope = test_expr.scope(db);
|
||||
inference.expression_ty(test_expr.node_ref(db).scoped_expression_id(db, scope))
|
||||
} else {
|
||||
// TODO: handle other constraint nodes
|
||||
todo_type!()
|
||||
};
|
||||
|
||||
(c, ty)
|
||||
})
|
||||
};
|
||||
|
||||
if test_expr_tys().any(|(c, test_expr_ty)| {
|
||||
if c.is_positive {
|
||||
test_expr_ty.bool(db).is_always_false()
|
||||
} else {
|
||||
test_expr_ty.bool(db).is_always_true()
|
||||
}
|
||||
}) {
|
||||
(Type::Never, UnconditionallyVisible::No)
|
||||
} else {
|
||||
let mut test_expr_tys_iter = test_expr_tys().peekable();
|
||||
|
||||
if test_expr_tys_iter.peek().is_some()
|
||||
&& test_expr_tys_iter.all(|(c, test_expr_ty)| {
|
||||
if c.is_positive {
|
||||
test_expr_ty.bool(db).is_always_true()
|
||||
} else {
|
||||
test_expr_ty.bool(db).is_always_false()
|
||||
}
|
||||
})
|
||||
{
|
||||
(declaration_ty(db, declaration), UnconditionallyVisible::Yes)
|
||||
} else {
|
||||
(declaration_ty(db, declaration), UnconditionallyVisible::No)
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// TODO: get rid of all the collects and clean up, obviously
|
||||
let decl_types: Vec<_> = decl_types.collect();
|
||||
|
||||
// shrink the vector to only include everything from the last unconditionally visible binding
|
||||
let decl_types: Vec<_> = decl_types
|
||||
.iter()
|
||||
.rev()
|
||||
.take_while_inclusive(|(_, unconditionally_visible)| {
|
||||
*unconditionally_visible != UnconditionallyVisible::Yes
|
||||
})
|
||||
.map(|(ty, _)| *ty)
|
||||
.collect();
|
||||
|
||||
let decl_types = decl_types.into_iter().rev();
|
||||
|
||||
let mut all_types = undeclared_ty.into_iter().chain(decl_types);
|
||||
|
||||
@@ -324,6 +446,61 @@ fn declarations_ty<'db>(
|
||||
}
|
||||
}
|
||||
|
||||
/// Meta data for `Type::Todo`, which represents a known limitation in red-knot.
|
||||
#[cfg(debug_assertions)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum TodoType {
|
||||
FileAndLine(&'static str, u32),
|
||||
Message(&'static str),
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
impl std::fmt::Display for TodoType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
TodoType::FileAndLine(file, line) => write!(f, "[{file}:{line}]"),
|
||||
TodoType::Message(msg) => write!(f, "({msg})"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct TodoType;
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
impl std::fmt::Display for TodoType {
|
||||
fn fmt(&self, _: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a `Type::Todo` variant to represent a known limitation in the type system.
|
||||
///
|
||||
/// It can be used with a custom message (preferred): `todo_type!("PEP 604 not supported")`,
|
||||
/// or simply using `todo_type!()`, which will include information about the file and line.
|
||||
#[cfg(debug_assertions)]
|
||||
macro_rules! todo_type {
|
||||
() => {
|
||||
Type::Todo(crate::types::TodoType::FileAndLine(file!(), line!()))
|
||||
};
|
||||
($message:literal) => {
|
||||
Type::Todo(crate::types::TodoType::Message($message))
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
macro_rules! todo_type {
|
||||
() => {
|
||||
Type::Todo(crate::types::TodoType)
|
||||
};
|
||||
($message:literal) => {
|
||||
Type::Todo(crate::types::TodoType)
|
||||
};
|
||||
}
|
||||
|
||||
pub(crate) use todo_type;
|
||||
|
||||
/// Representation of a type: a set of possible values at runtime.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, salsa::Update)]
|
||||
pub enum Type<'db> {
|
||||
@@ -340,7 +517,9 @@ pub enum Type<'db> {
|
||||
/// General rule: `Todo` should only propagate when the presence of the input `Todo` caused the
|
||||
/// output to be unknown. An output should only be `Todo` if fixing all `Todo` inputs to be not
|
||||
/// `Todo` would change the output type.
|
||||
Todo,
|
||||
///
|
||||
/// This variant should be created with the `todo_type!` macro.
|
||||
Todo(TodoType),
|
||||
/// The empty set of values
|
||||
Never,
|
||||
/// A specific function object
|
||||
@@ -384,7 +563,7 @@ impl<'db> Type<'db> {
|
||||
}
|
||||
|
||||
pub const fn is_todo(&self) -> bool {
|
||||
matches!(self, Type::Todo)
|
||||
matches!(self, Type::Todo(_))
|
||||
}
|
||||
|
||||
pub const fn class_literal(class: Class<'db>) -> Self {
|
||||
@@ -480,6 +659,19 @@ impl<'db> Type<'db> {
|
||||
.expect("Expected a Type::IntLiteral variant")
|
||||
}
|
||||
|
||||
pub const fn into_known_instance(self) -> Option<KnownInstanceType<'db>> {
|
||||
match self {
|
||||
Type::KnownInstance(known_instance) => Some(known_instance),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn expect_known_instance(self) -> KnownInstanceType<'db> {
|
||||
self.into_known_instance()
|
||||
.expect("Expected a Type::KnownInstance variant")
|
||||
}
|
||||
|
||||
pub const fn is_boolean_literal(&self) -> bool {
|
||||
matches!(self, Type::BooleanLiteral(..))
|
||||
}
|
||||
@@ -530,8 +722,8 @@ impl<'db> Type<'db> {
|
||||
return true;
|
||||
}
|
||||
match (self, target) {
|
||||
(Type::Unknown | Type::Any | Type::Todo, _) => false,
|
||||
(_, Type::Unknown | Type::Any | Type::Todo) => false,
|
||||
(Type::Unknown | Type::Any | Type::Todo(_), _) => false,
|
||||
(_, Type::Unknown | Type::Any | Type::Todo(_)) => false,
|
||||
(Type::Never, _) => true,
|
||||
(_, Type::Never) => false,
|
||||
(_, Type::Instance(InstanceType { class }))
|
||||
@@ -666,8 +858,8 @@ impl<'db> Type<'db> {
|
||||
return true;
|
||||
}
|
||||
match (self, target) {
|
||||
(Type::Unknown | Type::Any | Type::Todo, _) => true,
|
||||
(_, Type::Unknown | Type::Any | Type::Todo) => true,
|
||||
(Type::Unknown | Type::Any | Type::Todo(_), _) => true,
|
||||
(_, Type::Unknown | Type::Any | Type::Todo(_)) => true,
|
||||
(Type::Union(union), ty) => union
|
||||
.elements(db)
|
||||
.iter()
|
||||
@@ -698,22 +890,7 @@ impl<'db> Type<'db> {
|
||||
|
||||
// TODO: Once we have support for final classes, we can establish that
|
||||
// `Type::SubclassOf('FinalClass')` is equivalent to `Type::ClassLiteral('FinalClass')`.
|
||||
|
||||
// TODO: The following is a workaround that is required to unify the two different versions
|
||||
// of `NoneType` and `NoDefaultType` in typeshed. This should not be required anymore once
|
||||
// we understand `sys.version_info` branches.
|
||||
self == other
|
||||
|| matches!((self, other),
|
||||
(
|
||||
Type::Instance(InstanceType { class: self_class }),
|
||||
Type::Instance(InstanceType { class: target_class })
|
||||
)
|
||||
if {
|
||||
let self_known = self_class.known(db);
|
||||
matches!(self_known, Some(KnownClass::NoneType | KnownClass::NoDefaultType))
|
||||
&& self_known == target_class.known(db)
|
||||
}
|
||||
)
|
||||
self == other || matches!((self, other), (Type::Todo(_), Type::Todo(_)))
|
||||
}
|
||||
|
||||
/// Return true if this type and `other` have no common elements.
|
||||
@@ -726,7 +903,7 @@ impl<'db> Type<'db> {
|
||||
|
||||
(Type::Any, _) | (_, Type::Any) => false,
|
||||
(Type::Unknown, _) | (_, Type::Unknown) => false,
|
||||
(Type::Todo, _) | (_, Type::Todo) => false,
|
||||
(Type::Todo(_), _) | (_, Type::Todo(_)) => false,
|
||||
|
||||
(Type::Union(union), other) | (other, Type::Union(union)) => union
|
||||
.elements(db)
|
||||
@@ -931,7 +1108,7 @@ impl<'db> Type<'db> {
|
||||
Type::Any
|
||||
| Type::Never
|
||||
| Type::Unknown
|
||||
| Type::Todo
|
||||
| Type::Todo(_)
|
||||
| Type::IntLiteral(..)
|
||||
| Type::StringLiteral(..)
|
||||
| Type::BytesLiteral(..)
|
||||
@@ -1007,7 +1184,10 @@ impl<'db> Type<'db> {
|
||||
|
||||
Type::Instance(InstanceType { class }) => match class.known(db) {
|
||||
Some(
|
||||
KnownClass::NoneType | KnownClass::NoDefaultType | KnownClass::VersionInfo,
|
||||
KnownClass::NoneType
|
||||
| KnownClass::NoDefaultType
|
||||
| KnownClass::VersionInfo
|
||||
| KnownClass::TypeAliasType,
|
||||
) => true,
|
||||
Some(
|
||||
KnownClass::Bool
|
||||
@@ -1034,7 +1214,7 @@ impl<'db> Type<'db> {
|
||||
Type::Any
|
||||
| Type::Never
|
||||
| Type::Unknown
|
||||
| Type::Todo
|
||||
| Type::Todo(_)
|
||||
| Type::Union(..)
|
||||
| Type::Intersection(..)
|
||||
| Type::LiteralString => false,
|
||||
@@ -1052,12 +1232,12 @@ impl<'db> Type<'db> {
|
||||
Type::Any => Type::Any.into(),
|
||||
Type::Never => {
|
||||
// TODO: attribute lookup on Never type
|
||||
Type::Todo.into()
|
||||
todo_type!().into()
|
||||
}
|
||||
Type::Unknown => Type::Unknown.into(),
|
||||
Type::FunctionLiteral(_) => {
|
||||
// TODO: attribute lookup on function type
|
||||
Type::Todo.into()
|
||||
todo_type!().into()
|
||||
}
|
||||
Type::ModuleLiteral(file) => {
|
||||
// `__dict__` is a very special member that is never overridden by module globals;
|
||||
@@ -1107,7 +1287,7 @@ impl<'db> Type<'db> {
|
||||
Type::IntLiteral(Program::get(db).target_version(db).minor.into())
|
||||
}
|
||||
// TODO MRO? get_own_instance_member, get_instance_member
|
||||
_ => Type::Todo,
|
||||
_ => todo_type!("instance attributes"),
|
||||
};
|
||||
ty.into()
|
||||
}
|
||||
@@ -1149,36 +1329,36 @@ impl<'db> Type<'db> {
|
||||
Type::Intersection(_) => {
|
||||
// TODO perform the get_member on each type in the intersection
|
||||
// TODO return the intersection of those results
|
||||
Type::Todo.into()
|
||||
todo_type!().into()
|
||||
}
|
||||
Type::IntLiteral(_) => {
|
||||
// TODO raise error
|
||||
Type::Todo.into()
|
||||
todo_type!().into()
|
||||
}
|
||||
Type::BooleanLiteral(_) => Type::Todo.into(),
|
||||
Type::BooleanLiteral(_) => todo_type!().into(),
|
||||
Type::StringLiteral(_) => {
|
||||
// TODO defer to `typing.LiteralString`/`builtins.str` methods
|
||||
// from typeshed's stubs
|
||||
Type::Todo.into()
|
||||
todo_type!().into()
|
||||
}
|
||||
Type::LiteralString => {
|
||||
// TODO defer to `typing.LiteralString`/`builtins.str` methods
|
||||
// from typeshed's stubs
|
||||
Type::Todo.into()
|
||||
todo_type!().into()
|
||||
}
|
||||
Type::BytesLiteral(_) => {
|
||||
// TODO defer to Type::Instance(<bytes from typeshed>).member
|
||||
Type::Todo.into()
|
||||
todo_type!().into()
|
||||
}
|
||||
Type::SliceLiteral(_) => {
|
||||
// TODO defer to `builtins.slice` methods
|
||||
Type::Todo.into()
|
||||
todo_type!().into()
|
||||
}
|
||||
Type::Tuple(_) => {
|
||||
// TODO: implement tuple methods
|
||||
Type::Todo.into()
|
||||
todo_type!().into()
|
||||
}
|
||||
Type::Todo => Type::Todo.into(),
|
||||
&todo @ Type::Todo(_) => todo.into(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1188,7 +1368,7 @@ impl<'db> Type<'db> {
|
||||
/// when `bool(x)` is called on an object `x`.
|
||||
fn bool(&self, db: &'db dyn Db) -> Truthiness {
|
||||
match self {
|
||||
Type::Any | Type::Todo | Type::Never | Type::Unknown => Truthiness::Ambiguous,
|
||||
Type::Any | Type::Todo(_) | Type::Never | Type::Unknown => Truthiness::Ambiguous,
|
||||
Type::FunctionLiteral(_) => Truthiness::AlwaysTrue,
|
||||
Type::ModuleLiteral(_) => Truthiness::AlwaysTrue,
|
||||
Type::ClassLiteral(_) => {
|
||||
@@ -1329,7 +1509,7 @@ impl<'db> Type<'db> {
|
||||
// `Any` is callable, and its return type is also `Any`.
|
||||
Type::Any => CallOutcome::callable(Type::Any),
|
||||
|
||||
Type::Todo => CallOutcome::callable(Type::Todo),
|
||||
Type::Todo(_) => CallOutcome::callable(todo_type!()),
|
||||
|
||||
Type::Unknown => CallOutcome::callable(Type::Unknown),
|
||||
|
||||
@@ -1342,7 +1522,7 @@ impl<'db> Type<'db> {
|
||||
),
|
||||
|
||||
// TODO: intersection types
|
||||
Type::Intersection(_) => CallOutcome::callable(Type::Todo),
|
||||
Type::Intersection(_) => CallOutcome::callable(todo_type!()),
|
||||
|
||||
_ => CallOutcome::not_callable(self),
|
||||
}
|
||||
@@ -1381,7 +1561,7 @@ impl<'db> Type<'db> {
|
||||
};
|
||||
}
|
||||
|
||||
if matches!(self, Type::Unknown | Type::Any | Type::Todo) {
|
||||
if matches!(self, Type::Unknown | Type::Any | Type::Todo(_)) {
|
||||
// Explicit handling of `Unknown` and `Any` necessary until `type[Unknown]` and
|
||||
// `type[Any]` are not defined as `Todo` anymore.
|
||||
return IterationOutcome::Iterable { element_ty: self };
|
||||
@@ -1440,14 +1620,14 @@ impl<'db> Type<'db> {
|
||||
pub fn to_instance(&self, db: &'db dyn Db) -> Type<'db> {
|
||||
match self {
|
||||
Type::Any => Type::Any,
|
||||
Type::Todo => Type::Todo,
|
||||
todo @ Type::Todo(_) => *todo,
|
||||
Type::Unknown => Type::Unknown,
|
||||
Type::Never => Type::Never,
|
||||
Type::ClassLiteral(ClassLiteralType { class }) => Type::instance(*class),
|
||||
Type::SubclassOf(SubclassOfType { class }) => Type::instance(*class),
|
||||
Type::Union(union) => union.map(db, |element| element.to_instance(db)),
|
||||
// TODO: we can probably do better here: --Alex
|
||||
Type::Intersection(_) => Type::Todo,
|
||||
Type::Intersection(_) => todo_type!(),
|
||||
// TODO: calling `.to_instance()` on any of these should result in a diagnostic,
|
||||
// since they already indicate that the object is an instance of some kind:
|
||||
Type::BooleanLiteral(_)
|
||||
@@ -1478,7 +1658,11 @@ impl<'db> Type<'db> {
|
||||
Type::Unknown => Type::Unknown,
|
||||
// TODO map this to a new `Type::TypeVar` variant
|
||||
Type::KnownInstance(KnownInstanceType::TypeVar(_)) => *self,
|
||||
_ => Type::Todo,
|
||||
Type::KnownInstance(KnownInstanceType::TypeAliasType(alias)) => alias.value_ty(db),
|
||||
Type::KnownInstance(KnownInstanceType::Never | KnownInstanceType::NoReturn) => {
|
||||
Type::Never
|
||||
}
|
||||
_ => todo_type!(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1553,8 +1737,8 @@ impl<'db> Type<'db> {
|
||||
// TODO: `type[Unknown]`?
|
||||
Type::Unknown => Type::Unknown,
|
||||
// TODO intersections
|
||||
Type::Intersection(_) => Type::Todo,
|
||||
Type::Todo => Type::Todo,
|
||||
Type::Intersection(_) => todo_type!(),
|
||||
todo @ Type::Todo(_) => *todo,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1642,6 +1826,7 @@ pub enum KnownClass {
|
||||
// Typing
|
||||
SpecialForm,
|
||||
TypeVar,
|
||||
TypeAliasType,
|
||||
NoDefaultType,
|
||||
// sys
|
||||
VersionInfo,
|
||||
@@ -1668,6 +1853,7 @@ impl<'db> KnownClass {
|
||||
Self::NoneType => "NoneType",
|
||||
Self::SpecialForm => "_SpecialForm",
|
||||
Self::TypeVar => "TypeVar",
|
||||
Self::TypeAliasType => "TypeAliasType",
|
||||
Self::NoDefaultType => "_NoDefaultType",
|
||||
// This is the name the type of `sys.version_info` has in typeshed,
|
||||
// which is different to what `type(sys.version_info).__name__` is at runtime.
|
||||
@@ -1683,13 +1869,13 @@ impl<'db> KnownClass {
|
||||
}
|
||||
|
||||
pub fn to_class_literal(self, db: &'db dyn Db) -> Type<'db> {
|
||||
core_module_symbol(db, self.canonical_module(), self.as_str())
|
||||
core_module_symbol(db, self.canonical_module(db), self.as_str())
|
||||
.ignore_possibly_unbound()
|
||||
.unwrap_or(Type::Unknown)
|
||||
}
|
||||
|
||||
/// Return the module in which we should look up the definition for this class
|
||||
pub(crate) const fn canonical_module(self) -> CoreStdlibModule {
|
||||
pub(crate) fn canonical_module(self, db: &'db dyn Db) -> CoreStdlibModule {
|
||||
match self {
|
||||
Self::Bool
|
||||
| Self::Object
|
||||
@@ -1706,11 +1892,19 @@ impl<'db> KnownClass {
|
||||
Self::VersionInfo => CoreStdlibModule::Sys,
|
||||
Self::GenericAlias | Self::ModuleType | Self::FunctionType => CoreStdlibModule::Types,
|
||||
Self::NoneType => CoreStdlibModule::Typeshed,
|
||||
Self::SpecialForm | Self::TypeVar => CoreStdlibModule::Typing,
|
||||
// TODO when we understand sys.version_info, we will need an explicit fallback here,
|
||||
// because typing_extensions has a 3.13+ re-export for the `typing.NoDefault`
|
||||
// singleton, but not for `typing._NoDefaultType`
|
||||
Self::NoDefaultType => CoreStdlibModule::TypingExtensions,
|
||||
Self::SpecialForm | Self::TypeVar | Self::TypeAliasType => CoreStdlibModule::Typing,
|
||||
Self::NoDefaultType => {
|
||||
let python_version = Program::get(db).target_version(db);
|
||||
|
||||
// typing_extensions has a 3.13+ re-export for the `typing.NoDefault`
|
||||
// singleton, but not for `typing._NoDefaultType`. So we need to switch
|
||||
// to `typing.NoDefault` for newer versions:
|
||||
if python_version.major >= 3 && python_version.minor >= 13 {
|
||||
CoreStdlibModule::Typing
|
||||
} else {
|
||||
CoreStdlibModule::TypingExtensions
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1720,7 +1914,7 @@ impl<'db> KnownClass {
|
||||
const fn is_singleton(self) -> bool {
|
||||
// TODO there are other singleton types (EllipsisType, NotImplementedType)
|
||||
match self {
|
||||
Self::NoneType | Self::NoDefaultType | Self::VersionInfo => true,
|
||||
Self::NoneType | Self::NoDefaultType | Self::VersionInfo | Self::TypeAliasType => true,
|
||||
Self::Bool
|
||||
| Self::Object
|
||||
| Self::Bytes
|
||||
@@ -1762,6 +1956,7 @@ impl<'db> KnownClass {
|
||||
"NoneType" => Self::NoneType,
|
||||
"ModuleType" => Self::ModuleType,
|
||||
"FunctionType" => Self::FunctionType,
|
||||
"TypeAliasType" => Self::TypeAliasType,
|
||||
"_SpecialForm" => Self::SpecialForm,
|
||||
"_NoDefaultType" => Self::NoDefaultType,
|
||||
"_version_info" => Self::VersionInfo,
|
||||
@@ -1769,11 +1964,11 @@ impl<'db> KnownClass {
|
||||
};
|
||||
|
||||
let module = file_to_module(db, file)?;
|
||||
candidate.check_module(&module).then_some(candidate)
|
||||
candidate.check_module(db, &module).then_some(candidate)
|
||||
}
|
||||
|
||||
/// Return `true` if the module of `self` matches `module_name`
|
||||
fn check_module(self, module: &Module) -> bool {
|
||||
fn check_module(self, db: &dyn Db, module: &Module) -> bool {
|
||||
if !module.search_path().is_standard_library() {
|
||||
return false;
|
||||
}
|
||||
@@ -1793,9 +1988,9 @@ impl<'db> KnownClass {
|
||||
| Self::GenericAlias
|
||||
| Self::ModuleType
|
||||
| Self::VersionInfo
|
||||
| Self::FunctionType => module.name() == self.canonical_module().as_str(),
|
||||
| Self::FunctionType => module.name() == self.canonical_module(db).as_str(),
|
||||
Self::NoneType => matches!(module.name().as_str(), "_typeshed" | "types"),
|
||||
Self::SpecialForm | Self::TypeVar | Self::NoDefaultType => {
|
||||
Self::SpecialForm | Self::TypeVar | Self::TypeAliasType | Self::NoDefaultType => {
|
||||
matches!(module.name().as_str(), "typing" | "typing_extensions")
|
||||
}
|
||||
}
|
||||
@@ -1807,24 +2002,44 @@ impl<'db> KnownClass {
|
||||
pub enum KnownInstanceType<'db> {
|
||||
/// The symbol `typing.Literal` (which can also be found as `typing_extensions.Literal`)
|
||||
Literal,
|
||||
/// The symbol `typing.Optional` (which can also be found as `typing_extensions.Optional`)
|
||||
Optional,
|
||||
/// The symbol `typing.Union` (which can also be found as `typing_extensions.Union`)
|
||||
Union,
|
||||
/// The symbol `typing.NoReturn` (which can also be found as `typing_extensions.NoReturn`)
|
||||
NoReturn,
|
||||
/// The symbol `typing.Never` available since 3.11 (which can also be found as `typing_extensions.Never`)
|
||||
Never,
|
||||
/// A single instance of `typing.TypeVar`
|
||||
TypeVar(TypeVarInstance<'db>),
|
||||
/// A single instance of `typing.TypeAliasType` (PEP 695 type alias)
|
||||
TypeAliasType(TypeAliasType<'db>),
|
||||
// TODO: fill this enum out with more special forms, etc.
|
||||
}
|
||||
|
||||
impl<'db> KnownInstanceType<'db> {
|
||||
pub const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
KnownInstanceType::Literal => "Literal",
|
||||
KnownInstanceType::TypeVar(_) => "TypeVar",
|
||||
Self::Literal => "Literal",
|
||||
Self::Optional => "Optional",
|
||||
Self::Union => "Union",
|
||||
Self::TypeVar(_) => "TypeVar",
|
||||
Self::NoReturn => "NoReturn",
|
||||
Self::Never => "Never",
|
||||
Self::TypeAliasType(_) => "TypeAliasType",
|
||||
}
|
||||
}
|
||||
|
||||
/// Evaluate the known instance in boolean context
|
||||
pub const fn bool(self) -> Truthiness {
|
||||
match self {
|
||||
Self::Literal => Truthiness::AlwaysTrue,
|
||||
Self::TypeVar(_) => Truthiness::AlwaysTrue,
|
||||
Self::Literal
|
||||
| Self::Optional
|
||||
| Self::TypeVar(_)
|
||||
| Self::Union
|
||||
| Self::NoReturn
|
||||
| Self::Never
|
||||
| Self::TypeAliasType(_) => Truthiness::AlwaysTrue,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1832,7 +2047,12 @@ impl<'db> KnownInstanceType<'db> {
|
||||
pub fn repr(self, db: &'db dyn Db) -> &'db str {
|
||||
match self {
|
||||
Self::Literal => "typing.Literal",
|
||||
Self::Optional => "typing.Optional",
|
||||
Self::Union => "typing.Union",
|
||||
Self::NoReturn => "typing.NoReturn",
|
||||
Self::Never => "typing.Never",
|
||||
Self::TypeVar(typevar) => typevar.name(db),
|
||||
Self::TypeAliasType(_) => "typing.TypeAliasType",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1840,7 +2060,12 @@ impl<'db> KnownInstanceType<'db> {
|
||||
pub const fn class(self) -> KnownClass {
|
||||
match self {
|
||||
Self::Literal => KnownClass::SpecialForm,
|
||||
Self::Optional => KnownClass::SpecialForm,
|
||||
Self::Union => KnownClass::SpecialForm,
|
||||
Self::NoReturn => KnownClass::SpecialForm,
|
||||
Self::Never => KnownClass::SpecialForm,
|
||||
Self::TypeVar(_) => KnownClass::TypeVar,
|
||||
Self::TypeAliasType(_) => KnownClass::TypeAliasType,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1859,6 +2084,10 @@ impl<'db> KnownInstanceType<'db> {
|
||||
}
|
||||
match (module.name().as_str(), instance_name) {
|
||||
("typing" | "typing_extensions", "Literal") => Some(Self::Literal),
|
||||
("typing" | "typing_extensions", "Optional") => Some(Self::Optional),
|
||||
("typing" | "typing_extensions", "Union") => Some(Self::Union),
|
||||
("typing" | "typing_extensions", "NoReturn") => Some(Self::NoReturn),
|
||||
("typing" | "typing_extensions", "Never") => Some(Self::Never),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@@ -1866,23 +2095,7 @@ impl<'db> KnownInstanceType<'db> {
|
||||
fn member(self, db: &'db dyn Db, name: &str) -> Symbol<'db> {
|
||||
let ty = match (self, name) {
|
||||
(Self::TypeVar(typevar), "__name__") => Type::string_literal(db, typevar.name(db)),
|
||||
(Self::TypeVar(typevar), "__bound__") => typevar
|
||||
.upper_bound(db)
|
||||
.map(|ty| ty.to_meta_type(db))
|
||||
.unwrap_or_else(|| KnownClass::NoneType.to_instance(db)),
|
||||
(Self::TypeVar(typevar), "__constraints__") => {
|
||||
let tuple_elements: Vec<Type<'db>> = typevar
|
||||
.constraints(db)
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
.map(|ty| ty.to_meta_type(db))
|
||||
.collect();
|
||||
Type::tuple(db, &tuple_elements)
|
||||
}
|
||||
(Self::TypeVar(typevar), "__default__") => typevar
|
||||
.default_ty(db)
|
||||
.map(|ty| ty.to_meta_type(db))
|
||||
.unwrap_or_else(|| KnownClass::NoDefaultType.to_instance(db)),
|
||||
(Self::TypeAliasType(alias), "__name__") => Type::string_literal(db, alias.name(db)),
|
||||
_ => return self.instance_fallback(db).member(db, name),
|
||||
};
|
||||
ty.into()
|
||||
@@ -1914,6 +2127,7 @@ pub struct TypeVarInstance<'db> {
|
||||
}
|
||||
|
||||
impl<'db> TypeVarInstance<'db> {
|
||||
#[allow(unused)]
|
||||
pub(crate) fn upper_bound(self, db: &'db dyn Db) -> Option<Type<'db>> {
|
||||
if let Some(TypeVarBoundOrConstraints::UpperBound(ty)) = self.bound_or_constraints(db) {
|
||||
Some(ty)
|
||||
@@ -1922,6 +2136,7 @@ impl<'db> TypeVarInstance<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub(crate) fn constraints(self, db: &'db dyn Db) -> Option<&[Type<'db>]> {
|
||||
if let Some(TypeVarBoundOrConstraints::Constraints(tuple)) = self.bound_or_constraints(db) {
|
||||
Some(tuple.elements(db))
|
||||
@@ -2295,6 +2510,14 @@ impl Truthiness {
|
||||
matches!(self, Truthiness::Ambiguous)
|
||||
}
|
||||
|
||||
const fn is_always_false(self) -> bool {
|
||||
matches!(self, Truthiness::AlwaysFalse)
|
||||
}
|
||||
|
||||
const fn is_always_true(self) -> bool {
|
||||
matches!(self, Truthiness::AlwaysTrue)
|
||||
}
|
||||
|
||||
const fn negate(self) -> Self {
|
||||
match self {
|
||||
Self::AlwaysTrue => Self::AlwaysFalse,
|
||||
@@ -2594,7 +2817,7 @@ impl<'db> Class<'db> {
|
||||
// TODO: If the metaclass is not a class, we should verify that it's a callable
|
||||
// which accepts the same arguments as `type.__new__` (otherwise error), and return
|
||||
// the meta-type of its return type. (And validate that is a class type?)
|
||||
return Ok(Type::Todo);
|
||||
return Ok(todo_type!("metaclass not a class"));
|
||||
};
|
||||
|
||||
// Reconcile all base classes' metaclasses with the candidate metaclass.
|
||||
@@ -2708,6 +2931,27 @@ impl<'db> Class<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::interned]
|
||||
pub struct TypeAliasType<'db> {
|
||||
#[return_ref]
|
||||
pub name: ast::name::Name,
|
||||
|
||||
rhs_scope: ScopeId<'db>,
|
||||
}
|
||||
|
||||
#[salsa::tracked]
|
||||
impl<'db> TypeAliasType<'db> {
|
||||
#[salsa::tracked]
|
||||
pub fn value_ty(self, db: &'db dyn Db) -> Type<'db> {
|
||||
let scope = self.rhs_scope(db);
|
||||
|
||||
let type_alias_stmt_node = scope.node(db).expect_type_alias();
|
||||
let definition = semantic_index(db, scope.file(db)).definition(type_alias_stmt_node);
|
||||
|
||||
definition_expression_ty(db, definition, &type_alias_stmt_node.value)
|
||||
}
|
||||
}
|
||||
|
||||
/// Either the explicit `metaclass=` keyword of the class, or the inferred metaclass of one of its base classes.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(super) struct MetaclassCandidate<'db> {
|
||||
@@ -2894,6 +3138,11 @@ impl<'db> TupleType<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure that the `Type` enum does not grow unexpectedly.
|
||||
#[cfg(not(debug_assertions))]
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
static_assertions::assert_eq_size!(Type, [u8; 16]);
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use super::*;
|
||||
@@ -2909,14 +3158,7 @@ pub(crate) mod tests {
|
||||
use ruff_python_ast as ast;
|
||||
use test_case::test_case;
|
||||
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
#[test]
|
||||
fn no_bloat_enum_sizes() {
|
||||
use std::mem::size_of;
|
||||
assert_eq!(size_of::<Type>(), 16);
|
||||
}
|
||||
|
||||
pub(crate) fn setup_db() -> TestDb {
|
||||
pub(crate) fn setup_db_with_python_version(python_version: PythonVersion) -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
@@ -2927,7 +3169,7 @@ pub(crate) mod tests {
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
target_version: python_version,
|
||||
search_paths: SearchPathSettings::new(src_root),
|
||||
},
|
||||
)
|
||||
@@ -2936,6 +3178,10 @@ pub(crate) mod tests {
|
||||
db
|
||||
}
|
||||
|
||||
pub(crate) fn setup_db() -> TestDb {
|
||||
setup_db_with_python_version(PythonVersion::default())
|
||||
}
|
||||
|
||||
/// A test representation of a type that can be transformed unambiguously into a real Type,
|
||||
/// given a db.
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -2969,7 +3215,7 @@ pub(crate) mod tests {
|
||||
Ty::Unknown => Type::Unknown,
|
||||
Ty::None => Type::none(db),
|
||||
Ty::Any => Type::Any,
|
||||
Ty::Todo => Type::Todo,
|
||||
Ty::Todo => todo_type!("Ty::Todo"),
|
||||
Ty::IntLiteral(n) => Type::IntLiteral(n),
|
||||
Ty::StringLiteral(s) => Type::string_literal(db, s),
|
||||
Ty::BooleanLiteral(b) => Type::BooleanLiteral(b),
|
||||
@@ -3383,13 +3629,23 @@ pub(crate) mod tests {
|
||||
#[test_case(Ty::None)]
|
||||
#[test_case(Ty::BooleanLiteral(true))]
|
||||
#[test_case(Ty::BooleanLiteral(false))]
|
||||
#[test_case(Ty::KnownClassInstance(KnownClass::NoDefaultType))]
|
||||
fn is_singleton(from: Ty) {
|
||||
let db = setup_db();
|
||||
|
||||
assert!(from.into_type(&db).is_singleton(&db));
|
||||
}
|
||||
|
||||
/// TODO: test documentation
|
||||
#[test_case(PythonVersion::PY312)]
|
||||
#[test_case(PythonVersion::PY313)]
|
||||
fn no_default_type_is_singleton(python_version: PythonVersion) {
|
||||
let db = setup_db_with_python_version(python_version);
|
||||
|
||||
let no_default = Ty::KnownClassInstance(KnownClass::NoDefaultType).into_type(&db);
|
||||
|
||||
assert!(no_default.is_singleton(&db));
|
||||
}
|
||||
|
||||
#[test_case(Ty::None)]
|
||||
#[test_case(Ty::BooleanLiteral(true))]
|
||||
#[test_case(Ty::IntLiteral(1))]
|
||||
@@ -3560,4 +3816,95 @@ pub(crate) mod tests {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn type_alias_types() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_dedented(
|
||||
"src/mod.py",
|
||||
r#"
|
||||
type Alias1 = int
|
||||
type Alias2 = int
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let mod_py = system_path_to_file(&db, "src/mod.py")?;
|
||||
let ty_alias1 = global_symbol(&db, mod_py, "Alias1").expect_type();
|
||||
let ty_alias2 = global_symbol(&db, mod_py, "Alias2").expect_type();
|
||||
|
||||
let Type::KnownInstance(KnownInstanceType::TypeAliasType(alias1)) = ty_alias1 else {
|
||||
panic!("Expected TypeAliasType, got {ty_alias1:?}");
|
||||
};
|
||||
assert_eq!(alias1.name(&db), "Alias1");
|
||||
assert_eq!(alias1.value_ty(&db), KnownClass::Int.to_instance(&db));
|
||||
|
||||
// Two type aliases are distinct and disjoint, even if they refer to the same type
|
||||
assert!(!ty_alias1.is_equivalent_to(&db, ty_alias2));
|
||||
assert!(ty_alias1.is_disjoint_from(&db, ty_alias2));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// All other tests also make sure that `Type::Todo` works as expected. This particular
|
||||
/// test makes sure that we handle `Todo` types correctly, even if they originate from
|
||||
/// different sources.
|
||||
#[test]
|
||||
fn todo_types() {
|
||||
let db = setup_db();
|
||||
|
||||
let todo1 = todo_type!("1");
|
||||
let todo2 = todo_type!("2");
|
||||
let todo3 = todo_type!();
|
||||
let todo4 = todo_type!();
|
||||
|
||||
assert!(todo1.is_equivalent_to(&db, todo2));
|
||||
assert!(todo3.is_equivalent_to(&db, todo4));
|
||||
assert!(todo1.is_equivalent_to(&db, todo3));
|
||||
|
||||
assert!(todo1.is_subtype_of(&db, todo2));
|
||||
assert!(todo2.is_subtype_of(&db, todo1));
|
||||
|
||||
assert!(todo3.is_subtype_of(&db, todo4));
|
||||
assert!(todo4.is_subtype_of(&db, todo3));
|
||||
|
||||
assert!(todo1.is_subtype_of(&db, todo3));
|
||||
assert!(todo3.is_subtype_of(&db, todo1));
|
||||
|
||||
let int = KnownClass::Int.to_instance(&db);
|
||||
|
||||
assert!(int.is_assignable_to(&db, todo1));
|
||||
assert!(int.is_assignable_to(&db, todo3));
|
||||
|
||||
assert!(todo1.is_assignable_to(&db, int));
|
||||
assert!(todo3.is_assignable_to(&db, int));
|
||||
|
||||
// We lose information when combining several `Todo` types. This is an
|
||||
// acknowledged limitation of the current implementation. We can not
|
||||
// easily store the meta information of several `Todo`s in a single
|
||||
// variant, as `TodoType` needs to implement `Copy`, meaning it can't
|
||||
// contain `Vec`/`Box`/etc., and can't be boxed itself.
|
||||
//
|
||||
// Lifting this restriction would require us to intern `TodoType` in
|
||||
// salsa, but that would mean we would have to pass in `db` everywhere.
|
||||
|
||||
// A union of several `Todo` types collapses to a single `Todo` type:
|
||||
assert!(UnionType::from_elements(&db, vec![todo1, todo2, todo3, todo4]).is_todo());
|
||||
|
||||
// And similar for intersection types:
|
||||
assert!(IntersectionBuilder::new(&db)
|
||||
.add_positive(todo1)
|
||||
.add_positive(todo2)
|
||||
.add_positive(todo3)
|
||||
.add_positive(todo4)
|
||||
.build()
|
||||
.is_todo());
|
||||
assert!(IntersectionBuilder::new(&db)
|
||||
.add_positive(todo1)
|
||||
.add_negative(todo2)
|
||||
.add_positive(todo3)
|
||||
.add_negative(todo4)
|
||||
.build()
|
||||
.is_todo());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -128,7 +128,7 @@ impl<'db> IntersectionBuilder<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
intersections: vec![InnerIntersectionBuilder::new()],
|
||||
intersections: vec![InnerIntersectionBuilder::default()],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -231,10 +231,6 @@ struct InnerIntersectionBuilder<'db> {
|
||||
}
|
||||
|
||||
impl<'db> InnerIntersectionBuilder<'db> {
|
||||
fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Adds a positive type to this intersection.
|
||||
fn add_positive(&mut self, db: &'db dyn Db, new_positive: Type<'db>) {
|
||||
if let Type::Intersection(other) = new_positive {
|
||||
@@ -253,7 +249,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
.iter()
|
||||
.find(|element| element.is_boolean_literal())
|
||||
{
|
||||
*self = Self::new();
|
||||
*self = Self::default();
|
||||
self.positive.insert(Type::BooleanLiteral(!value));
|
||||
return;
|
||||
}
|
||||
@@ -272,7 +268,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
}
|
||||
// A & B = Never if A and B are disjoint
|
||||
if new_positive.is_disjoint_from(db, *existing_positive) {
|
||||
*self = Self::new();
|
||||
*self = Self::default();
|
||||
self.positive.insert(Type::Never);
|
||||
return;
|
||||
}
|
||||
@@ -285,7 +281,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
for (index, existing_negative) in self.negative.iter().enumerate() {
|
||||
// S & ~T = Never if S <: T
|
||||
if new_positive.is_subtype_of(db, *existing_negative) {
|
||||
*self = Self::new();
|
||||
*self = Self::default();
|
||||
self.positive.insert(Type::Never);
|
||||
return;
|
||||
}
|
||||
@@ -313,7 +309,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
self.add_positive(db, *neg);
|
||||
}
|
||||
}
|
||||
ty @ (Type::Any | Type::Unknown | Type::Todo) => {
|
||||
ty @ (Type::Any | Type::Unknown | Type::Todo(_)) => {
|
||||
// Adding any of these types to the negative side of an intersection
|
||||
// is equivalent to adding it to the positive side. We do this to
|
||||
// simplify the representation.
|
||||
@@ -326,7 +322,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
.iter()
|
||||
.any(|pos| *pos == KnownClass::Bool.to_instance(db)) =>
|
||||
{
|
||||
*self = Self::new();
|
||||
*self = Self::default();
|
||||
self.positive.insert(Type::BooleanLiteral(!bool));
|
||||
}
|
||||
_ => {
|
||||
@@ -348,7 +344,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
for existing_positive in &self.positive {
|
||||
// S & ~T = Never if S <: T
|
||||
if existing_positive.is_subtype_of(db, new_negative) {
|
||||
*self = Self::new();
|
||||
*self = Self::default();
|
||||
self.positive.insert(Type::Never);
|
||||
return;
|
||||
}
|
||||
@@ -383,7 +379,7 @@ mod tests {
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::stdlib::typing_symbol;
|
||||
use crate::types::{global_symbol, KnownClass, UnionBuilder};
|
||||
use crate::types::{global_symbol, todo_type, KnownClass, UnionBuilder};
|
||||
use crate::ProgramSettings;
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
@@ -991,7 +987,7 @@ mod tests {
|
||||
|
||||
#[test_case(Type::Any)]
|
||||
#[test_case(Type::Unknown)]
|
||||
#[test_case(Type::Todo)]
|
||||
#[test_case(todo_type!())]
|
||||
fn build_intersection_t_and_negative_t_does_not_simplify(ty: Type) {
|
||||
let db = setup_db();
|
||||
|
||||
|
||||
@@ -73,10 +73,6 @@ pub struct TypeCheckDiagnostics {
|
||||
}
|
||||
|
||||
impl TypeCheckDiagnostics {
|
||||
pub fn new() -> Self {
|
||||
Self { inner: Vec::new() }
|
||||
}
|
||||
|
||||
pub(super) fn push(&mut self, diagnostic: TypeCheckDiagnostic) {
|
||||
self.inner.push(Arc::new(diagnostic));
|
||||
}
|
||||
@@ -148,7 +144,7 @@ impl<'db> TypeCheckDiagnosticsBuilder<'db> {
|
||||
Self {
|
||||
db,
|
||||
file,
|
||||
diagnostics: TypeCheckDiagnostics::new(),
|
||||
diagnostics: TypeCheckDiagnostics::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -77,7 +77,7 @@ impl Display for DisplayRepresentation<'_> {
|
||||
}
|
||||
// `[Type::Todo]`'s display should be explicit that is not a valid display of
|
||||
// any other type
|
||||
Type::Todo => f.write_str("@Todo"),
|
||||
Type::Todo(todo) => write!(f, "@Todo{todo}"),
|
||||
Type::ModuleLiteral(file) => {
|
||||
write!(f, "<module '{:?}'>", file.path(self.db))
|
||||
}
|
||||
|
||||
@@ -38,7 +38,7 @@ use salsa::plumbing::AsId;
|
||||
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::{file_to_module, resolve_module};
|
||||
use crate::semantic_index::ast_ids::{HasScopedAstId, HasScopedUseId, ScopedExpressionId};
|
||||
use crate::semantic_index::ast_ids::{HasScopedExpressionId, HasScopedUseId, ScopedExpressionId};
|
||||
use crate::semantic_index::definition::{
|
||||
AssignmentDefinitionKind, Definition, DefinitionKind, DefinitionNodeKey,
|
||||
ExceptHandlerDefinitionKind, TargetKind,
|
||||
@@ -52,11 +52,12 @@ use crate::types::diagnostic::{TypeCheckDiagnostics, TypeCheckDiagnosticsBuilder
|
||||
use crate::types::mro::MroErrorKind;
|
||||
use crate::types::unpacker::{UnpackResult, Unpacker};
|
||||
use crate::types::{
|
||||
bindings_ty, builtins_symbol, declarations_ty, global_symbol, symbol, typing_extensions_symbol,
|
||||
Boundness, Class, ClassLiteralType, FunctionType, InstanceType, IntersectionBuilder,
|
||||
IntersectionType, IterationOutcome, KnownClass, KnownFunction, KnownInstanceType,
|
||||
MetaclassCandidate, MetaclassErrorKind, SliceLiteralType, Symbol, Truthiness, TupleType, Type,
|
||||
TypeArrayDisplay, TypeVarBoundOrConstraints, TypeVarInstance, UnionBuilder, UnionType,
|
||||
bindings_ty, builtins_symbol, declarations_ty, global_symbol, symbol, todo_type,
|
||||
typing_extensions_symbol, Boundness, Class, ClassLiteralType, FunctionType, InstanceType,
|
||||
IntersectionBuilder, IntersectionType, IterationOutcome, KnownClass, KnownFunction,
|
||||
KnownInstanceType, MetaclassCandidate, MetaclassErrorKind, SliceLiteralType, Symbol,
|
||||
Truthiness, TupleType, Type, TypeAliasType, TypeArrayDisplay, TypeVarBoundOrConstraints,
|
||||
TypeVarInstance, UnionBuilder, UnionType,
|
||||
};
|
||||
use crate::unpack::Unpack;
|
||||
use crate::util::subscript::{PyIndex, PySlice};
|
||||
@@ -181,7 +182,7 @@ fn infer_unpack_types<'db>(db: &'db dyn Db, unpack: Unpack<'db>) -> UnpackResult
|
||||
let scope = unpack.scope(db);
|
||||
|
||||
let result = infer_expression_types(db, value);
|
||||
let value_ty = result.expression_ty(value.node_ref(db).scoped_ast_id(db, scope));
|
||||
let value_ty = result.expression_ty(value.node_ref(db).scoped_expression_id(db, scope));
|
||||
|
||||
let mut unpacker = Unpacker::new(db, file);
|
||||
unpacker.unpack(unpack.target(db), value_ty, scope);
|
||||
@@ -409,7 +410,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
#[track_caller]
|
||||
fn expression_ty(&self, expr: &ast::Expr) -> Type<'db> {
|
||||
self.types
|
||||
.expression_ty(expr.scoped_ast_id(self.db, self.scope()))
|
||||
.expression_ty(expr.scoped_expression_id(self.db, self.scope()))
|
||||
}
|
||||
|
||||
/// Infers types in the given [`InferenceRegion`].
|
||||
@@ -438,6 +439,12 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
NodeWithScopeKind::FunctionTypeParameters(function) => {
|
||||
self.infer_function_type_params(function.node());
|
||||
}
|
||||
NodeWithScopeKind::TypeAliasTypeParameters(type_alias) => {
|
||||
self.infer_type_alias_type_params(type_alias.node());
|
||||
}
|
||||
NodeWithScopeKind::TypeAlias(type_alias) => {
|
||||
self.infer_type_alias(type_alias.node());
|
||||
}
|
||||
NodeWithScopeKind::ListComprehension(comprehension) => {
|
||||
self.infer_list_comprehension_expression_scope(comprehension.node());
|
||||
}
|
||||
@@ -605,6 +612,9 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.infer_function_definition(function.node(), definition);
|
||||
}
|
||||
DefinitionKind::Class(class) => self.infer_class_definition(class.node(), definition),
|
||||
DefinitionKind::TypeAlias(type_alias) => {
|
||||
self.infer_type_alias_definition(type_alias.node(), definition);
|
||||
}
|
||||
DefinitionKind::Import(import) => {
|
||||
self.infer_import_definition(import.node(), definition);
|
||||
}
|
||||
@@ -847,6 +857,19 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.infer_parameters(&function.parameters);
|
||||
}
|
||||
|
||||
fn infer_type_alias_type_params(&mut self, type_alias: &ast::StmtTypeAlias) {
|
||||
let type_params = type_alias
|
||||
.type_params
|
||||
.as_ref()
|
||||
.expect("type alias type params scope without type params");
|
||||
|
||||
self.infer_type_parameters(type_params);
|
||||
}
|
||||
|
||||
fn infer_type_alias(&mut self, type_alias: &ast::StmtTypeAlias) {
|
||||
self.infer_annotation_expression(&type_alias.value, DeferredExpressionState::Deferred);
|
||||
}
|
||||
|
||||
fn infer_function_body(&mut self, function: &ast::StmtFunctionDef) {
|
||||
self.infer_body(&function.body);
|
||||
}
|
||||
@@ -954,7 +977,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
let function_ty = Type::FunctionLiteral(FunctionType::new(
|
||||
self.db,
|
||||
&*name.id,
|
||||
&name.id,
|
||||
function_kind,
|
||||
body_scope,
|
||||
decorator_tys,
|
||||
@@ -1027,7 +1050,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
) {
|
||||
// TODO(dhruvmanila): Annotation expression is resolved at the enclosing scope, infer the
|
||||
// parameter type from there
|
||||
let annotated_ty = Type::Todo;
|
||||
let annotated_ty = todo_type!("function parameter type");
|
||||
if parameter.annotation.is_some() {
|
||||
self.add_declaration_with_binding(
|
||||
parameter.into(),
|
||||
@@ -1069,7 +1092,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
let maybe_known_class = KnownClass::try_from_file(self.db, self.file, name);
|
||||
|
||||
let class = Class::new(self.db, &*name.id, body_scope, maybe_known_class);
|
||||
let class = Class::new(self.db, &name.id, body_scope, maybe_known_class);
|
||||
let class_ty = Type::class_literal(class);
|
||||
|
||||
self.add_declaration_with_binding(class_node.into(), definition, class_ty, class_ty);
|
||||
@@ -1107,6 +1130,33 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_type_alias_definition(
|
||||
&mut self,
|
||||
type_alias: &ast::StmtTypeAlias,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
self.infer_expression(&type_alias.name);
|
||||
|
||||
let rhs_scope = self
|
||||
.index
|
||||
.node_scope(NodeWithScopeRef::TypeAlias(type_alias))
|
||||
.to_scope_id(self.db, self.file);
|
||||
|
||||
let type_alias_ty =
|
||||
Type::KnownInstance(KnownInstanceType::TypeAliasType(TypeAliasType::new(
|
||||
self.db,
|
||||
&type_alias.name.as_name_expr().unwrap().id,
|
||||
rhs_scope,
|
||||
)));
|
||||
|
||||
self.add_declaration_with_binding(
|
||||
type_alias.into(),
|
||||
definition,
|
||||
type_alias_ty,
|
||||
type_alias_ty,
|
||||
);
|
||||
}
|
||||
|
||||
fn infer_if_statement(&mut self, if_statement: &ast::StmtIf) {
|
||||
let ast::StmtIf {
|
||||
range: _,
|
||||
@@ -1215,9 +1265,10 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
is_async,
|
||||
);
|
||||
|
||||
self.types
|
||||
.expressions
|
||||
.insert(target.scoped_ast_id(self.db, self.scope()), target_ty);
|
||||
self.types.expressions.insert(
|
||||
target.scoped_expression_id(self.db, self.scope()),
|
||||
target_ty,
|
||||
);
|
||||
self.add_binding(target.into(), definition, target_ty);
|
||||
}
|
||||
|
||||
@@ -1235,7 +1286,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
) -> Type<'db> {
|
||||
// TODO: Handle async with statements (they use `aenter` and `aexit`)
|
||||
if is_async {
|
||||
return Type::Todo;
|
||||
return todo_type!("async with statement");
|
||||
}
|
||||
|
||||
let context_manager_ty = context_expression_ty.to_meta_type(self.db);
|
||||
@@ -1385,12 +1436,12 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.db,
|
||||
tuple.elements(self.db).iter().map(|ty| {
|
||||
ty.into_class_literal()
|
||||
.map_or(Type::Todo, |ClassLiteralType { class }| {
|
||||
.map_or(todo_type!(), |ClassLiteralType { class }| {
|
||||
Type::instance(class)
|
||||
})
|
||||
}),
|
||||
),
|
||||
_ => Type::Todo,
|
||||
_ => todo_type!("exception type"),
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1460,7 +1511,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
default,
|
||||
} = node;
|
||||
self.infer_optional_expression(default.as_deref());
|
||||
self.add_declaration_with_binding(node.into(), definition, Type::Todo, Type::Todo);
|
||||
self.add_declaration_with_binding(node.into(), definition, todo_type!(), todo_type!());
|
||||
}
|
||||
|
||||
fn infer_typevartuple_definition(
|
||||
@@ -1474,7 +1525,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
default,
|
||||
} = node;
|
||||
self.infer_optional_expression(default.as_deref());
|
||||
self.add_declaration_with_binding(node.into(), definition, Type::Todo, Type::Todo);
|
||||
self.add_declaration_with_binding(node.into(), definition, todo_type!(), todo_type!());
|
||||
}
|
||||
|
||||
fn infer_match_statement(&mut self, match_statement: &ast::StmtMatch) {
|
||||
@@ -1509,7 +1560,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
// against the subject expression type (which we can query via `infer_expression_types`)
|
||||
// and extract the type at the `index` position if the pattern matches. This will be
|
||||
// similar to the logic in `self.infer_assignment_definition`.
|
||||
self.add_binding(pattern.into(), definition, Type::Todo);
|
||||
self.add_binding(pattern.into(), definition, todo_type!());
|
||||
}
|
||||
|
||||
fn infer_match_pattern(&mut self, pattern: &ast::Pattern) {
|
||||
@@ -1607,7 +1658,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.infer_standalone_expression(value);
|
||||
|
||||
let value_ty = self.expression_ty(value);
|
||||
let name_ast_id = name.scoped_ast_id(self.db, self.scope());
|
||||
let name_ast_id = name.scoped_expression_id(self.db, self.scope());
|
||||
|
||||
let target_ty = match assignment.target() {
|
||||
TargetKind::Sequence(unpack) => {
|
||||
@@ -1828,17 +1879,8 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.infer_augmented_op(assignment, target_type, value_type)
|
||||
}
|
||||
|
||||
fn infer_type_alias_statement(&mut self, type_alias_statement: &ast::StmtTypeAlias) {
|
||||
let ast::StmtTypeAlias {
|
||||
range: _,
|
||||
name,
|
||||
type_params: _,
|
||||
value,
|
||||
} = type_alias_statement;
|
||||
self.infer_expression(value);
|
||||
self.infer_expression(name);
|
||||
|
||||
// TODO: properly handle generic type aliases, which need their own annotation scope
|
||||
fn infer_type_alias_statement(&mut self, node: &ast::StmtTypeAlias) {
|
||||
self.infer_definition(node);
|
||||
}
|
||||
|
||||
fn infer_for_statement(&mut self, for_statement: &ast::StmtFor) {
|
||||
@@ -1873,8 +1915,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
let iterable_ty = self.infer_standalone_expression(iterable);
|
||||
|
||||
let loop_var_value_ty = if is_async {
|
||||
// TODO(Alex): async iterables/iterators!
|
||||
Type::Todo
|
||||
todo_type!("async iterables/iterators")
|
||||
} else {
|
||||
iterable_ty
|
||||
.iterate(self.db)
|
||||
@@ -2202,7 +2243,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
ast::Expr::Await(await_expression) => self.infer_await_expression(await_expression),
|
||||
ast::Expr::IpyEscapeCommand(_) => {
|
||||
// TODO Implement Ipy escape command support
|
||||
Type::Todo
|
||||
todo_type!()
|
||||
}
|
||||
};
|
||||
|
||||
@@ -2211,18 +2252,14 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
ty
|
||||
}
|
||||
|
||||
fn store_expression_type(
|
||||
&mut self,
|
||||
expression: &impl HasScopedAstId<Id = ScopedExpressionId>,
|
||||
ty: Type<'db>,
|
||||
) {
|
||||
fn store_expression_type(&mut self, expression: &impl HasScopedExpressionId, ty: Type<'db>) {
|
||||
if self.deferred_state.in_string_annotation() {
|
||||
// Avoid storing the type of expressions that are part of a string annotation because
|
||||
// the expression ids don't exists in the semantic index. Instead, we'll store the type
|
||||
// on the string expression itself that represents the annotation.
|
||||
return;
|
||||
}
|
||||
let expr_id = expression.scoped_ast_id(self.db, self.scope());
|
||||
let expr_id = expression.scoped_expression_id(self.db, self.scope());
|
||||
let previous = self.types.expressions.insert(expr_id, ty);
|
||||
assert_eq!(previous, None);
|
||||
}
|
||||
@@ -2288,6 +2325,12 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
} = expression;
|
||||
let ty = self.infer_expression(expression);
|
||||
|
||||
if let Some(ref format_spec) = format_spec {
|
||||
for element in format_spec.elements.expressions() {
|
||||
self.infer_expression(&element.expression);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: handle format specifiers by calling a method
|
||||
// (`Type::format`?) that handles the `__format__` method.
|
||||
// Conversion flags should be handled before calling `__format__`.
|
||||
@@ -2394,7 +2437,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.infer_first_comprehension_iter(generators);
|
||||
|
||||
// TODO generator type
|
||||
Type::Todo
|
||||
todo_type!()
|
||||
}
|
||||
|
||||
fn infer_list_comprehension_expression(&mut self, listcomp: &ast::ExprListComp) -> Type<'db> {
|
||||
@@ -2407,7 +2450,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.infer_first_comprehension_iter(generators);
|
||||
|
||||
// TODO list type
|
||||
Type::Todo
|
||||
todo_type!()
|
||||
}
|
||||
|
||||
fn infer_dict_comprehension_expression(&mut self, dictcomp: &ast::ExprDictComp) -> Type<'db> {
|
||||
@@ -2421,7 +2464,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.infer_first_comprehension_iter(generators);
|
||||
|
||||
// TODO dict type
|
||||
Type::Todo
|
||||
todo_type!()
|
||||
}
|
||||
|
||||
fn infer_set_comprehension_expression(&mut self, setcomp: &ast::ExprSetComp) -> Type<'db> {
|
||||
@@ -2434,7 +2477,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.infer_first_comprehension_iter(generators);
|
||||
|
||||
// TODO set type
|
||||
Type::Todo
|
||||
todo_type!()
|
||||
}
|
||||
|
||||
fn infer_generator_expression_scope(&mut self, generator: &ast::ExprGenerator) {
|
||||
@@ -2541,24 +2584,25 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
.parent_scope_id(self.scope().file_scope_id(self.db))
|
||||
.expect("A comprehension should never be the top-level scope")
|
||||
.to_scope_id(self.db, self.file);
|
||||
result.expression_ty(iterable.scoped_ast_id(self.db, lookup_scope))
|
||||
result.expression_ty(iterable.scoped_expression_id(self.db, lookup_scope))
|
||||
} else {
|
||||
self.extend(result);
|
||||
result.expression_ty(iterable.scoped_ast_id(self.db, self.scope()))
|
||||
result.expression_ty(iterable.scoped_expression_id(self.db, self.scope()))
|
||||
};
|
||||
|
||||
let target_ty = if is_async {
|
||||
// TODO: async iterables/iterators! -- Alex
|
||||
Type::Todo
|
||||
todo_type!("async iterables/iterators")
|
||||
} else {
|
||||
iterable_ty
|
||||
.iterate(self.db)
|
||||
.unwrap_with_diagnostic(iterable.into(), &mut self.diagnostics)
|
||||
};
|
||||
|
||||
self.types
|
||||
.expressions
|
||||
.insert(target.scoped_ast_id(self.db, self.scope()), target_ty);
|
||||
self.types.expressions.insert(
|
||||
target.scoped_expression_id(self.db, self.scope()),
|
||||
target_ty,
|
||||
);
|
||||
self.add_binding(target.into(), definition, target_ty);
|
||||
}
|
||||
|
||||
@@ -2638,7 +2682,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
|
||||
// TODO function type
|
||||
Type::Todo
|
||||
todo_type!()
|
||||
}
|
||||
|
||||
fn infer_call_expression(&mut self, call_expression: &ast::ExprCall) -> Type<'db> {
|
||||
@@ -2669,7 +2713,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
.unwrap_with_diagnostic(value.as_ref().into(), &mut self.diagnostics);
|
||||
|
||||
// TODO
|
||||
Type::Todo
|
||||
todo_type!()
|
||||
}
|
||||
|
||||
fn infer_yield_expression(&mut self, yield_expression: &ast::ExprYield) -> Type<'db> {
|
||||
@@ -2678,7 +2722,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.infer_optional_expression(value.as_deref());
|
||||
|
||||
// TODO awaitable type
|
||||
Type::Todo
|
||||
todo_type!()
|
||||
}
|
||||
|
||||
fn infer_yield_from_expression(&mut self, yield_from: &ast::ExprYieldFrom) -> Type<'db> {
|
||||
@@ -2690,7 +2734,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
.unwrap_with_diagnostic(value.as_ref().into(), &mut self.diagnostics);
|
||||
|
||||
// TODO get type from `ReturnType` of generator
|
||||
Type::Todo
|
||||
todo_type!()
|
||||
}
|
||||
|
||||
fn infer_await_expression(&mut self, await_expression: &ast::ExprAwait) -> Type<'db> {
|
||||
@@ -2699,7 +2743,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.infer_expression(value);
|
||||
|
||||
// TODO awaitable type
|
||||
Type::Todo
|
||||
todo_type!()
|
||||
}
|
||||
|
||||
/// Look up a name reference that isn't bound in the local scope.
|
||||
@@ -2975,7 +3019,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
Type::Unknown
|
||||
}
|
||||
}
|
||||
_ => Type::Todo, // TODO other unary op types
|
||||
_ => todo_type!(), // TODO other unary op types
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3223,7 +3267,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
(left, Type::BooleanLiteral(bool_value), op) => {
|
||||
self.infer_binary_expression_type(left, Type::IntLiteral(i64::from(bool_value)), op)
|
||||
}
|
||||
_ => Some(Type::Todo), // TODO
|
||||
_ => Some(todo_type!()), // TODO
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3639,16 +3683,16 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
let lhs_elements = lhs.elements(self.db);
|
||||
let rhs_elements = rhs.elements(self.db);
|
||||
|
||||
let mut lexicographic_type_comparison =
|
||||
|op| self.infer_lexicographic_type_comparison(lhs_elements, op, rhs_elements);
|
||||
let mut tuple_rich_comparison =
|
||||
|op| self.infer_tuple_rich_comparison(lhs_elements, op, rhs_elements);
|
||||
|
||||
match op {
|
||||
ast::CmpOp::Eq => lexicographic_type_comparison(RichCompareOperator::Eq),
|
||||
ast::CmpOp::NotEq => lexicographic_type_comparison(RichCompareOperator::Ne),
|
||||
ast::CmpOp::Lt => lexicographic_type_comparison(RichCompareOperator::Lt),
|
||||
ast::CmpOp::LtE => lexicographic_type_comparison(RichCompareOperator::Le),
|
||||
ast::CmpOp::Gt => lexicographic_type_comparison(RichCompareOperator::Gt),
|
||||
ast::CmpOp::GtE => lexicographic_type_comparison(RichCompareOperator::Ge),
|
||||
ast::CmpOp::Eq => tuple_rich_comparison(RichCompareOperator::Eq),
|
||||
ast::CmpOp::NotEq => tuple_rich_comparison(RichCompareOperator::Ne),
|
||||
ast::CmpOp::Lt => tuple_rich_comparison(RichCompareOperator::Lt),
|
||||
ast::CmpOp::LtE => tuple_rich_comparison(RichCompareOperator::Le),
|
||||
ast::CmpOp::Gt => tuple_rich_comparison(RichCompareOperator::Gt),
|
||||
ast::CmpOp::GtE => tuple_rich_comparison(RichCompareOperator::Ge),
|
||||
ast::CmpOp::In | ast::CmpOp::NotIn => {
|
||||
let mut eq_count = 0usize;
|
||||
let mut not_eq_count = 0usize;
|
||||
@@ -3661,7 +3705,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
).expect("infer_binary_type_comparison should never return None for `CmpOp::Eq`");
|
||||
|
||||
match eq_result {
|
||||
Type::Todo => return Ok(Type::Todo),
|
||||
todo @ Type::Todo(_) => return Ok(todo),
|
||||
ty => match ty.bool(self.db) {
|
||||
Truthiness::AlwaysTrue => eq_count += 1,
|
||||
Truthiness::AlwaysFalse => not_eq_count += 1,
|
||||
@@ -3681,13 +3725,12 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
ast::CmpOp::Is | ast::CmpOp::IsNot => {
|
||||
// - `[ast::CmpOp::Is]`: returns `false` if the elements are definitely unequal, otherwise `bool`
|
||||
// - `[ast::CmpOp::IsNot]`: returns `true` if the elements are definitely unequal, otherwise `bool`
|
||||
let eq_result = lexicographic_type_comparison(RichCompareOperator::Eq)
|
||||
.expect(
|
||||
let eq_result = tuple_rich_comparison(RichCompareOperator::Eq).expect(
|
||||
"infer_binary_type_comparison should never return None for `CmpOp::Eq`",
|
||||
);
|
||||
|
||||
Ok(match eq_result {
|
||||
Type::Todo => Type::Todo,
|
||||
todo @ Type::Todo(_) => todo,
|
||||
ty => match ty.bool(self.db) {
|
||||
Truthiness::AlwaysFalse => Type::BooleanLiteral(op.is_is_not()),
|
||||
_ => KnownClass::Bool.to_instance(self.db),
|
||||
@@ -3742,58 +3785,85 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
// TODO: handle more types
|
||||
_ => match op {
|
||||
ast::CmpOp::Is | ast::CmpOp::IsNot => Ok(KnownClass::Bool.to_instance(self.db)),
|
||||
_ => Ok(Type::Todo),
|
||||
_ => Ok(todo_type!()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Performs lexicographic comparison between two slices of types.
|
||||
/// Simulates rich comparison between tuples and returns the inferred result.
|
||||
/// This performs a lexicographic comparison, returning a union of all possible return types that could result from the comparison.
|
||||
///
|
||||
/// For lexicographic comparison, elements from both slices are compared pairwise using
|
||||
/// `infer_binary_type_comparison`. If a conclusive result cannot be determined as a `BooleanLiteral`,
|
||||
/// it returns `bool`. Returns `None` if the comparison is not supported.
|
||||
fn infer_lexicographic_type_comparison(
|
||||
/// basically it's based on cpython's `tuple_richcompare`
|
||||
/// see `<https://github.com/python/cpython/blob/9d6366b60d01305fc5e45100e0cd13e358aa397d/Objects/tupleobject.c#L637>`
|
||||
fn infer_tuple_rich_comparison(
|
||||
&mut self,
|
||||
left: &[Type<'db>],
|
||||
op: RichCompareOperator,
|
||||
right: &[Type<'db>],
|
||||
) -> Result<Type<'db>, CompareUnsupportedError<'db>> {
|
||||
// Compare paired elements from left and right slices
|
||||
for (l_ty, r_ty) in left.iter().copied().zip(right.iter().copied()) {
|
||||
let eq_result = self
|
||||
let left_iter = left.iter().copied();
|
||||
let right_iter = right.iter().copied();
|
||||
|
||||
let mut builder = UnionBuilder::new(self.db);
|
||||
|
||||
for (l_ty, r_ty) in left_iter.zip(right_iter) {
|
||||
let pairwise_eq_result = self
|
||||
.infer_binary_type_comparison(l_ty, ast::CmpOp::Eq, r_ty)
|
||||
.expect("infer_binary_type_comparison should never return None for `CmpOp::Eq`");
|
||||
|
||||
match eq_result {
|
||||
match pairwise_eq_result {
|
||||
// If propagation is required, return the result as is
|
||||
Type::Todo => return Ok(Type::Todo),
|
||||
todo @ Type::Todo(_) => return Ok(todo),
|
||||
ty => match ty.bool(self.db) {
|
||||
// Types are equal, continue to the next pair
|
||||
// - AlwaysTrue : Continue to the next pair for lexicographic comparison
|
||||
Truthiness::AlwaysTrue => continue,
|
||||
// Types are not equal, perform the specified comparison and return the result
|
||||
Truthiness::AlwaysFalse => {
|
||||
return self.infer_binary_type_comparison(l_ty, op.into(), r_ty)
|
||||
// - AlwaysFalse:
|
||||
// Lexicographic comparisons will always terminate with this pair.
|
||||
// Complete the comparison and return the result.
|
||||
// - Ambiguous:
|
||||
// Lexicographic comparisons might continue to the next pair (if eq_result is true),
|
||||
// or terminate here (if eq_result is false).
|
||||
// To account for cases where the comparison terminates here, add the pairwise comparison result to the union builder.
|
||||
eq_truthiness @ (Truthiness::AlwaysFalse | Truthiness::Ambiguous) => {
|
||||
let pairwise_compare_result = match op {
|
||||
RichCompareOperator::Lt
|
||||
| RichCompareOperator::Le
|
||||
| RichCompareOperator::Gt
|
||||
| RichCompareOperator::Ge => {
|
||||
self.infer_binary_type_comparison(l_ty, op.into(), r_ty)?
|
||||
}
|
||||
// For `==` and `!=`, we already figure out the result from `pairwise_eq_result`
|
||||
// NOTE: The CPython implementation does not account for non-boolean return types
|
||||
// or cases where `!=` is not the negation of `==`, we also do not consider these cases.
|
||||
RichCompareOperator::Eq => Type::BooleanLiteral(false),
|
||||
RichCompareOperator::Ne => Type::BooleanLiteral(true),
|
||||
};
|
||||
|
||||
builder = builder.add(pairwise_compare_result);
|
||||
|
||||
if eq_truthiness.is_ambiguous() {
|
||||
continue;
|
||||
}
|
||||
|
||||
return Ok(builder.build());
|
||||
}
|
||||
// If the intermediate result is ambiguous, we cannot determine the final result as BooleanLiteral.
|
||||
// In this case, we simply return a bool instance.
|
||||
Truthiness::Ambiguous => return Ok(KnownClass::Bool.to_instance(self.db)),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// At this point, the lengths of the two slices may be different, but the prefix of
|
||||
// left and right slices is entirely identical.
|
||||
// We return a comparison of the slice lengths based on the operator.
|
||||
// if no more items to compare, we just compare sizes
|
||||
let (left_len, right_len) = (left.len(), right.len());
|
||||
|
||||
Ok(Type::BooleanLiteral(match op {
|
||||
builder = builder.add(Type::BooleanLiteral(match op {
|
||||
RichCompareOperator::Eq => left_len == right_len,
|
||||
RichCompareOperator::Ne => left_len != right_len,
|
||||
RichCompareOperator::Lt => left_len < right_len,
|
||||
RichCompareOperator::Le => left_len <= right_len,
|
||||
RichCompareOperator::Gt => left_len > right_len,
|
||||
RichCompareOperator::Ge => left_len >= right_len,
|
||||
}))
|
||||
}));
|
||||
|
||||
Ok(builder.build())
|
||||
}
|
||||
|
||||
fn infer_subscript_expression(&mut self, subscript: &ast::ExprSubscript) -> Type<'db> {
|
||||
@@ -4159,6 +4229,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
"annotation-f-string",
|
||||
format_args!("Type expressions cannot use f-strings"),
|
||||
);
|
||||
self.infer_fstring_expression(fstring);
|
||||
Type::Unknown
|
||||
}
|
||||
|
||||
@@ -4233,7 +4304,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.infer_name_expression(name).in_type_expression(self.db)
|
||||
}
|
||||
ast::ExprContext::Invalid => Type::Unknown,
|
||||
ast::ExprContext::Store | ast::ExprContext::Del => Type::Todo,
|
||||
ast::ExprContext::Store | ast::ExprContext::Del => todo_type!(),
|
||||
},
|
||||
|
||||
ast::Expr::Attribute(attribute_expression) => match attribute_expression.ctx {
|
||||
@@ -4241,7 +4312,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
.infer_attribute_expression(attribute_expression)
|
||||
.in_type_expression(self.db),
|
||||
ast::ExprContext::Invalid => Type::Unknown,
|
||||
ast::ExprContext::Store | ast::ExprContext::Del => Type::Todo,
|
||||
ast::ExprContext::Store | ast::ExprContext::Del => todo_type!(),
|
||||
},
|
||||
|
||||
ast::Expr::NoneLiteral(_literal) => Type::none(self.db),
|
||||
@@ -4251,14 +4322,14 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
// TODO: an Ellipsis literal *on its own* does not have any meaning in annotation
|
||||
// expressions, but is meaningful in the context of a number of special forms.
|
||||
ast::Expr::EllipsisLiteral(_literal) => Type::Todo,
|
||||
ast::Expr::EllipsisLiteral(_literal) => todo_type!(),
|
||||
|
||||
// Other literals do not have meaningful values in the annotation expression context.
|
||||
// However, we will we want to handle these differently when working with special forms,
|
||||
// since (e.g.) `123` is not valid in an annotation expression but `Literal[123]` is.
|
||||
ast::Expr::BytesLiteral(_literal) => Type::Todo,
|
||||
ast::Expr::NumberLiteral(_literal) => Type::Todo,
|
||||
ast::Expr::BooleanLiteral(_literal) => Type::Todo,
|
||||
ast::Expr::BytesLiteral(_literal) => todo_type!(),
|
||||
ast::Expr::NumberLiteral(_literal) => todo_type!(),
|
||||
ast::Expr::BooleanLiteral(_literal) => todo_type!(),
|
||||
|
||||
ast::Expr::Subscript(subscript) => {
|
||||
let ast::ExprSubscript {
|
||||
@@ -4301,9 +4372,13 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
// TODO PEP 646
|
||||
ast::Expr::Starred(starred) => {
|
||||
self.infer_starred_expression(starred);
|
||||
Type::Todo
|
||||
todo_type!()
|
||||
}
|
||||
|
||||
// Avoid inferring the types of invalid type expressions that have been parsed from a
|
||||
// string annotation, as they are not present in the semantic index.
|
||||
_ if self.deferred_state.in_string_annotation() => Type::Unknown,
|
||||
|
||||
// Forms which are invalid in the context of annotation expressions: we infer their
|
||||
// nested expressions as normal expressions, but the type of the top-level expression is
|
||||
// always `Type::Unknown` in these cases.
|
||||
@@ -4387,7 +4462,6 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.infer_slice_expression(slice);
|
||||
Type::Unknown
|
||||
}
|
||||
|
||||
ast::Expr::IpyEscapeCommand(_) => todo!("Implement Ipy escape command support"),
|
||||
}
|
||||
}
|
||||
@@ -4445,16 +4519,23 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
element_types.push(element_ty);
|
||||
}
|
||||
|
||||
if return_todo {
|
||||
Type::Todo
|
||||
let ty = if return_todo {
|
||||
todo_type!("full tuple[...] support")
|
||||
} else {
|
||||
Type::tuple(self.db, &element_types)
|
||||
}
|
||||
};
|
||||
|
||||
// Here, we store the type for the inner `int, str` tuple-expression,
|
||||
// while the type for the outer `tuple[int, str]` slice-expression is
|
||||
// stored in the surrounding `infer_type_expression` call:
|
||||
self.store_expression_type(tuple_slice, ty);
|
||||
|
||||
ty
|
||||
}
|
||||
single_element => {
|
||||
let single_element_ty = self.infer_type_expression(single_element);
|
||||
if element_could_alter_type_of_whole_tuple(single_element, single_element_ty) {
|
||||
Type::Todo
|
||||
todo_type!()
|
||||
} else {
|
||||
Type::tuple(self.db, &[single_element_ty])
|
||||
}
|
||||
@@ -4465,18 +4546,18 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
/// Given the slice of a `type[]` annotation, return the type that the annotation represents
|
||||
fn infer_subclass_of_type_expression(&mut self, slice: &ast::Expr) -> Type<'db> {
|
||||
match slice {
|
||||
ast::Expr::Name(name) => {
|
||||
let name_ty = self.infer_name_expression(name);
|
||||
ast::Expr::Name(_) => {
|
||||
let name_ty = self.infer_expression(slice);
|
||||
if let Some(ClassLiteralType { class }) = name_ty.into_class_literal() {
|
||||
Type::subclass_of(class)
|
||||
} else {
|
||||
Type::Todo
|
||||
todo_type!()
|
||||
}
|
||||
}
|
||||
// TODO: attributes, unions, subscripts, etc.
|
||||
_ => {
|
||||
self.infer_type_expression(slice);
|
||||
Type::Todo
|
||||
todo_type!()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4495,20 +4576,21 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
match value_ty {
|
||||
Type::KnownInstance(known_instance) => {
|
||||
self.infer_parameterized_known_instance_type_expression(known_instance, slice)
|
||||
self.infer_parameterized_known_instance_type_expression(subscript, known_instance)
|
||||
}
|
||||
_ => {
|
||||
self.infer_type_expression(slice);
|
||||
Type::Todo // TODO: generics
|
||||
todo_type!("generics")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_parameterized_known_instance_type_expression(
|
||||
&mut self,
|
||||
subscript: &ast::ExprSubscript,
|
||||
known_instance: KnownInstanceType,
|
||||
parameters: &ast::Expr,
|
||||
) -> Type<'db> {
|
||||
let parameters = &*subscript.slice;
|
||||
match known_instance {
|
||||
KnownInstanceType::Literal => match self.infer_literal_parameter_type(parameters) {
|
||||
Ok(ty) => ty,
|
||||
@@ -4526,7 +4608,40 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
Type::Unknown
|
||||
}
|
||||
},
|
||||
KnownInstanceType::TypeVar(_) => Type::Todo,
|
||||
KnownInstanceType::Optional => {
|
||||
let param_type = self.infer_type_expression(parameters);
|
||||
UnionType::from_elements(self.db, [param_type, Type::none(self.db)])
|
||||
}
|
||||
KnownInstanceType::Union => match parameters {
|
||||
ast::Expr::Tuple(t) => {
|
||||
let union_ty = UnionType::from_elements(
|
||||
self.db,
|
||||
t.iter().map(|elt| self.infer_type_expression(elt)),
|
||||
);
|
||||
self.store_expression_type(parameters, union_ty);
|
||||
union_ty
|
||||
}
|
||||
_ => self.infer_type_expression(parameters),
|
||||
},
|
||||
KnownInstanceType::TypeVar(_) => {
|
||||
self.infer_type_expression(parameters);
|
||||
todo_type!()
|
||||
}
|
||||
KnownInstanceType::TypeAliasType(_) => {
|
||||
self.infer_type_expression(parameters);
|
||||
todo_type!("generic type alias")
|
||||
}
|
||||
KnownInstanceType::NoReturn | KnownInstanceType::Never => {
|
||||
self.diagnostics.add(
|
||||
subscript.into(),
|
||||
"invalid-type-parameter",
|
||||
format_args!(
|
||||
"Type `{}` expected no type parameter",
|
||||
known_instance.repr(self.db)
|
||||
),
|
||||
);
|
||||
Type::Unknown
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4539,8 +4654,15 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
ast::Expr::Subscript(ast::ExprSubscript { value, slice, .. }) => {
|
||||
let value_ty = self.infer_expression(value);
|
||||
if matches!(value_ty, Type::KnownInstance(KnownInstanceType::Literal)) {
|
||||
self.infer_literal_parameter_type(slice)?
|
||||
let ty = self.infer_literal_parameter_type(slice)?;
|
||||
|
||||
// This branch deals with annotations such as `Literal[Literal[1]]`.
|
||||
// Here, we store the type for the inner `Literal[1]` expression:
|
||||
self.store_expression_type(parameters, ty);
|
||||
ty
|
||||
} else {
|
||||
self.store_expression_type(parameters, Type::Unknown);
|
||||
|
||||
return Err(vec![parameters]);
|
||||
}
|
||||
}
|
||||
@@ -4558,15 +4680,27 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
}
|
||||
if errors.is_empty() {
|
||||
builder.build()
|
||||
let union_type = builder.build();
|
||||
|
||||
// This branch deals with annotations such as `Literal[1, 2]`. Here, we
|
||||
// store the type for the inner `1, 2` tuple-expression:
|
||||
self.store_expression_type(parameters, union_type);
|
||||
|
||||
union_type
|
||||
} else {
|
||||
self.store_expression_type(parameters, Type::Unknown);
|
||||
|
||||
return Err(errors);
|
||||
}
|
||||
}
|
||||
|
||||
ast::Expr::StringLiteral(literal) => self.infer_string_literal_expression(literal),
|
||||
ast::Expr::BytesLiteral(literal) => self.infer_bytes_literal_expression(literal),
|
||||
ast::Expr::BooleanLiteral(literal) => self.infer_boolean_literal_expression(literal),
|
||||
literal @ (ast::Expr::StringLiteral(_)
|
||||
| ast::Expr::BytesLiteral(_)
|
||||
| ast::Expr::BooleanLiteral(_)
|
||||
| ast::Expr::NoneLiteral(_)) => self.infer_expression(literal),
|
||||
literal @ ast::Expr::NumberLiteral(ref number) if number.value.is_int() => {
|
||||
self.infer_expression(literal)
|
||||
}
|
||||
// For enum values
|
||||
ast::Expr::Attribute(ast::ExprAttribute { value, attr, .. }) => {
|
||||
let value_ty = self.infer_expression(value);
|
||||
@@ -4576,7 +4710,6 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
.ignore_possibly_unbound()
|
||||
.unwrap_or(Type::Unknown)
|
||||
}
|
||||
ast::Expr::NoneLiteral(_) => Type::none(self.db),
|
||||
// for negative and positive numbers
|
||||
ast::Expr::UnaryOp(ref u)
|
||||
if matches!(u.op, UnaryOp::USub | UnaryOp::UAdd)
|
||||
@@ -4584,10 +4717,8 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
{
|
||||
self.infer_unary_expression(u)
|
||||
}
|
||||
ast::Expr::NumberLiteral(ref number) if number.value.is_int() => {
|
||||
self.infer_number_literal_expression(number)
|
||||
}
|
||||
_ => {
|
||||
self.infer_expression(parameters);
|
||||
return Err(vec![parameters]);
|
||||
}
|
||||
})
|
||||
@@ -4755,6 +4886,7 @@ enum ModuleNameResolutionError {
|
||||
///
|
||||
/// If the formatted string contains an expression (with a representation unknown at compile time),
|
||||
/// infers an instance of `builtins.str`.
|
||||
#[derive(Debug)]
|
||||
struct StringPartsCollector {
|
||||
concatenated: Option<String>,
|
||||
expression: bool,
|
||||
@@ -4877,8 +5009,8 @@ fn perform_membership_test_comparison<'db>(
|
||||
|
||||
compare_result_opt
|
||||
.map(|ty| {
|
||||
if matches!(ty, Type::Todo) {
|
||||
return Type::Todo;
|
||||
if matches!(ty, Type::Todo(_)) {
|
||||
return ty;
|
||||
}
|
||||
|
||||
match op {
|
||||
@@ -4899,7 +5031,7 @@ mod tests {
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::python_version::{self, PythonVersion};
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::symbol::FileScopeId;
|
||||
use crate::semantic_index::{global_scope, semantic_index, symbol_table, use_def_map};
|
||||
@@ -4909,10 +5041,11 @@ mod tests {
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_db::testing::assert_function_query_was_not_run;
|
||||
use test_case::test_case;
|
||||
|
||||
use super::*;
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
fn setup_db_with_python_version(python_version: PythonVersion) -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
@@ -4923,7 +5056,7 @@ mod tests {
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
target_version: python_version,
|
||||
search_paths: SearchPathSettings::new(src_root),
|
||||
},
|
||||
)
|
||||
@@ -4932,6 +5065,10 @@ mod tests {
|
||||
db
|
||||
}
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
setup_db_with_python_version(PythonVersion::default())
|
||||
}
|
||||
|
||||
fn setup_db_with_custom_typeshed<'a>(
|
||||
typeshed: &str,
|
||||
files: impl IntoIterator<Item = (&'a str, &'a str)>,
|
||||
@@ -5203,9 +5340,10 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ellipsis_type() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
#[test_case(PythonVersion::PY39, "ellipsis")]
|
||||
#[test_case(PythonVersion::PY310, "EllipsisType")]
|
||||
fn ellipsis_type(version: PythonVersion, expected_type: &str) -> anyhow::Result<()> {
|
||||
let mut db = setup_db_with_python_version(version);
|
||||
|
||||
db.write_dedented(
|
||||
"src/a.py",
|
||||
@@ -5214,8 +5352,7 @@ mod tests {
|
||||
",
|
||||
)?;
|
||||
|
||||
// TODO: sys.version_info
|
||||
assert_public_ty(&db, "src/a.py", "x", "EllipsisType | ellipsis");
|
||||
assert_public_ty(&db, "src/a.py", "x", expected_type);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -5849,7 +5986,17 @@ mod tests {
|
||||
|
||||
// We currently return `Todo` for all async comprehensions,
|
||||
// including comprehensions that have invalid syntax
|
||||
assert_scope_ty(&db, "src/a.py", &["foo", "<listcomp>"], "x", "@Todo");
|
||||
assert_scope_ty(
|
||||
&db,
|
||||
"src/a.py",
|
||||
&["foo", "<listcomp>"],
|
||||
"x",
|
||||
if cfg!(debug_assertions) {
|
||||
"@Todo(async iterables/iterators)"
|
||||
} else {
|
||||
"@Todo"
|
||||
},
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -5873,7 +6020,17 @@ mod tests {
|
||||
)?;
|
||||
|
||||
// TODO async iterables/iterators! --Alex
|
||||
assert_scope_ty(&db, "src/a.py", &["foo", "<listcomp>"], "x", "@Todo");
|
||||
assert_scope_ty(
|
||||
&db,
|
||||
"src/a.py",
|
||||
&["foo", "<listcomp>"],
|
||||
"x",
|
||||
if cfg!(debug_assertions) {
|
||||
"@Todo(async iterables/iterators)"
|
||||
} else {
|
||||
"@Todo"
|
||||
},
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -5907,6 +6064,72 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pep695_type_params() {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_dedented(
|
||||
"src/a.py",
|
||||
"
|
||||
def f[T, U: A, V: (A, B), W = A, X: A = A1, Y: (int,)]():
|
||||
pass
|
||||
|
||||
class A: ...
|
||||
class B: ...
|
||||
class A1(A): ...
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let check_typevar = |var: &'static str,
|
||||
upper_bound: Option<&'static str>,
|
||||
constraints: Option<&[&'static str]>,
|
||||
default: Option<&'static str>| {
|
||||
let var_ty = get_symbol(&db, "src/a.py", &["f"], var).expect_type();
|
||||
assert_eq!(var_ty.display(&db).to_string(), var);
|
||||
|
||||
let expected_name_ty = format!(r#"Literal["{var}"]"#);
|
||||
let name_ty = var_ty.member(&db, "__name__").expect_type();
|
||||
assert_eq!(name_ty.display(&db).to_string(), expected_name_ty);
|
||||
|
||||
let KnownInstanceType::TypeVar(typevar) = var_ty.expect_known_instance() else {
|
||||
panic!("expected TypeVar");
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
typevar
|
||||
.upper_bound(&db)
|
||||
.map(|ty| ty.display(&db).to_string()),
|
||||
upper_bound.map(std::borrow::ToOwned::to_owned)
|
||||
);
|
||||
assert_eq!(
|
||||
typevar.constraints(&db).map(|tys| tys
|
||||
.iter()
|
||||
.map(|ty| ty.display(&db).to_string())
|
||||
.collect::<Vec<_>>()),
|
||||
constraints.map(|strings| strings
|
||||
.iter()
|
||||
.map(std::string::ToString::to_string)
|
||||
.collect::<Vec<_>>())
|
||||
);
|
||||
assert_eq!(
|
||||
typevar
|
||||
.default_ty(&db)
|
||||
.map(|ty| ty.display(&db).to_string()),
|
||||
default.map(std::borrow::ToOwned::to_owned)
|
||||
);
|
||||
};
|
||||
|
||||
check_typevar("T", None, None, None);
|
||||
check_typevar("U", Some("A"), None, None);
|
||||
check_typevar("V", None, Some(&["A", "B"]), None);
|
||||
check_typevar("W", None, None, Some("A"));
|
||||
check_typevar("X", Some("A"), None, Some("A1"));
|
||||
|
||||
// a typevar with less than two constraints is treated as unconstrained
|
||||
check_typevar("Y", None, None, None);
|
||||
}
|
||||
|
||||
// Incremental inference tests
|
||||
|
||||
fn first_public_binding<'db>(db: &'db TestDb, file: File, name: &str) -> Definition<'db> {
|
||||
|
||||
@@ -5,7 +5,7 @@ use itertools::Either;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use super::{Class, ClassLiteralType, KnownClass, KnownInstanceType, Type};
|
||||
use crate::Db;
|
||||
use crate::{types::todo_type, Db};
|
||||
|
||||
/// The inferred method resolution order of a given class.
|
||||
///
|
||||
@@ -354,7 +354,7 @@ impl<'db> ClassBase<'db> {
|
||||
match ty {
|
||||
Type::Any => Some(Self::Any),
|
||||
Type::Unknown => Some(Self::Unknown),
|
||||
Type::Todo => Some(Self::Todo),
|
||||
Type::Todo(_) => Some(Self::Todo),
|
||||
Type::ClassLiteral(ClassLiteralType { class }) => Some(Self::Class(class)),
|
||||
Type::Union(_) => None, // TODO -- forces consideration of multiple possible MROs?
|
||||
Type::Intersection(_) => None, // TODO -- probably incorrect?
|
||||
@@ -371,8 +371,13 @@ impl<'db> ClassBase<'db> {
|
||||
| Type::ModuleLiteral(_)
|
||||
| Type::SubclassOf(_) => None,
|
||||
Type::KnownInstance(known_instance) => match known_instance {
|
||||
KnownInstanceType::Literal => None,
|
||||
KnownInstanceType::TypeVar(_) => None,
|
||||
KnownInstanceType::TypeVar(_)
|
||||
| KnownInstanceType::TypeAliasType(_)
|
||||
| KnownInstanceType::Literal
|
||||
| KnownInstanceType::Union
|
||||
| KnownInstanceType::NoReturn
|
||||
| KnownInstanceType::Never
|
||||
| KnownInstanceType::Optional => None,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -404,7 +409,7 @@ impl<'db> From<ClassBase<'db>> for Type<'db> {
|
||||
fn from(value: ClassBase<'db>) -> Self {
|
||||
match value {
|
||||
ClassBase::Any => Type::Any,
|
||||
ClassBase::Todo => Type::Todo,
|
||||
ClassBase::Todo => todo_type!(),
|
||||
ClassBase::Unknown => Type::Unknown,
|
||||
ClassBase::Class(class) => Type::class_literal(class),
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::semantic_index::ast_ids::HasScopedAstId;
|
||||
use crate::semantic_index::ast_ids::HasScopedExpressionId;
|
||||
use crate::semantic_index::constraint::{Constraint, ConstraintNode, PatternConstraint};
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::expression::Expression;
|
||||
@@ -257,17 +257,26 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
expression: Expression<'db>,
|
||||
is_positive: bool,
|
||||
) -> Option<NarrowingConstraints<'db>> {
|
||||
fn is_narrowing_target_candidate(expr: &ast::Expr) -> bool {
|
||||
matches!(expr, ast::Expr::Name(_) | ast::Expr::Call(_))
|
||||
}
|
||||
|
||||
let ast::ExprCompare {
|
||||
range: _,
|
||||
left,
|
||||
ops,
|
||||
comparators,
|
||||
} = expr_compare;
|
||||
if !left.is_name_expr() && comparators.iter().all(|c| !c.is_name_expr()) {
|
||||
// If none of the comparators are name expressions,
|
||||
// we have no symbol to narrow down the type of.
|
||||
|
||||
// Performance optimization: early return if there are no potential narrowing targets.
|
||||
if !is_narrowing_target_candidate(left)
|
||||
&& comparators
|
||||
.iter()
|
||||
.all(|c| !is_narrowing_target_candidate(c))
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
if !is_positive && comparators.len() > 1 {
|
||||
// We can't negate a constraint made by a multi-comparator expression, since we can't
|
||||
// know which comparison part is the one being negated.
|
||||
@@ -283,42 +292,85 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
.tuple_windows::<(&ruff_python_ast::Expr, &ruff_python_ast::Expr)>();
|
||||
let mut constraints = NarrowingConstraints::default();
|
||||
for (op, (left, right)) in std::iter::zip(&**ops, comparator_tuples) {
|
||||
if let ast::Expr::Name(ast::ExprName {
|
||||
range: _,
|
||||
id,
|
||||
ctx: _,
|
||||
}) = left
|
||||
{
|
||||
// SAFETY: we should always have a symbol for every Name node.
|
||||
let symbol = self.symbols().symbol_id_by_name(id).unwrap();
|
||||
let rhs_ty = inference.expression_ty(right.scoped_ast_id(self.db, scope));
|
||||
let rhs_ty = inference.expression_ty(right.scoped_expression_id(self.db, scope));
|
||||
|
||||
match if is_positive { *op } else { op.negate() } {
|
||||
ast::CmpOp::IsNot => {
|
||||
if rhs_ty.is_singleton(self.db) {
|
||||
let ty = IntersectionBuilder::new(self.db)
|
||||
.add_negative(rhs_ty)
|
||||
.build();
|
||||
constraints.insert(symbol, ty);
|
||||
} else {
|
||||
// Non-singletons cannot be safely narrowed using `is not`
|
||||
match left {
|
||||
ast::Expr::Name(ast::ExprName {
|
||||
range: _,
|
||||
id,
|
||||
ctx: _,
|
||||
}) => {
|
||||
let symbol = self
|
||||
.symbols()
|
||||
.symbol_id_by_name(id)
|
||||
.expect("Should always have a symbol for every Name node");
|
||||
|
||||
match if is_positive { *op } else { op.negate() } {
|
||||
ast::CmpOp::IsNot => {
|
||||
if rhs_ty.is_singleton(self.db) {
|
||||
let ty = IntersectionBuilder::new(self.db)
|
||||
.add_negative(rhs_ty)
|
||||
.build();
|
||||
constraints.insert(symbol, ty);
|
||||
} else {
|
||||
// Non-singletons cannot be safely narrowed using `is not`
|
||||
}
|
||||
}
|
||||
}
|
||||
ast::CmpOp::Is => {
|
||||
constraints.insert(symbol, rhs_ty);
|
||||
}
|
||||
ast::CmpOp::NotEq => {
|
||||
if rhs_ty.is_single_valued(self.db) {
|
||||
let ty = IntersectionBuilder::new(self.db)
|
||||
.add_negative(rhs_ty)
|
||||
.build();
|
||||
constraints.insert(symbol, ty);
|
||||
ast::CmpOp::Is => {
|
||||
constraints.insert(symbol, rhs_ty);
|
||||
}
|
||||
ast::CmpOp::NotEq => {
|
||||
if rhs_ty.is_single_valued(self.db) {
|
||||
let ty = IntersectionBuilder::new(self.db)
|
||||
.add_negative(rhs_ty)
|
||||
.build();
|
||||
constraints.insert(symbol, ty);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// TODO other comparison types
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// TODO other comparison types
|
||||
}
|
||||
}
|
||||
ast::Expr::Call(ast::ExprCall {
|
||||
range: _,
|
||||
func: callable,
|
||||
arguments:
|
||||
ast::Arguments {
|
||||
args,
|
||||
keywords,
|
||||
range: _,
|
||||
},
|
||||
}) if rhs_ty.is_class_literal() && keywords.is_empty() => {
|
||||
let [ast::Expr::Name(ast::ExprName { id, .. })] = &**args else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let is_valid_constraint = if is_positive {
|
||||
op == &ast::CmpOp::Is
|
||||
} else {
|
||||
op == &ast::CmpOp::IsNot
|
||||
};
|
||||
|
||||
if !is_valid_constraint {
|
||||
continue;
|
||||
}
|
||||
|
||||
let callable_ty =
|
||||
inference.expression_ty(callable.scoped_expression_id(self.db, scope));
|
||||
|
||||
if callable_ty
|
||||
.into_class_literal()
|
||||
.is_some_and(|c| c.class.is_known(self.db, KnownClass::Type))
|
||||
{
|
||||
let symbol = self
|
||||
.symbols()
|
||||
.symbol_id_by_name(id)
|
||||
.expect("Should always have a symbol for every Name node");
|
||||
constraints.insert(symbol, rhs_ty.to_instance(self.db));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Some(constraints)
|
||||
@@ -336,7 +388,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
// TODO: add support for PEP 604 union types on the right hand side of `isinstance`
|
||||
// and `issubclass`, for example `isinstance(x, str | (int | float))`.
|
||||
match inference
|
||||
.expression_ty(expr_call.func.scoped_ast_id(self.db, scope))
|
||||
.expression_ty(expr_call.func.scoped_expression_id(self.db, scope))
|
||||
.into_function_literal()
|
||||
.and_then(|f| f.known(self.db))
|
||||
.and_then(KnownFunction::constraint_function)
|
||||
@@ -348,7 +400,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
let symbol = self.symbols().symbol_id_by_name(id).unwrap();
|
||||
|
||||
let class_info_ty =
|
||||
inference.expression_ty(class_info.scoped_ast_id(self.db, scope));
|
||||
inference.expression_ty(class_info.scoped_expression_id(self.db, scope));
|
||||
|
||||
let to_constraint = match function {
|
||||
KnownConstraintFunction::IsInstance => {
|
||||
@@ -414,7 +466,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
// filter our arms with statically known truthiness
|
||||
.filter(|expr| {
|
||||
inference
|
||||
.expression_ty(expr.scoped_ast_id(self.db, scope))
|
||||
.expression_ty(expr.scoped_expression_id(self.db, scope))
|
||||
.bool(self.db)
|
||||
!= match expr_bool_op.op {
|
||||
BoolOp::And => Truthiness::AlwaysTrue,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#![allow(dead_code)]
|
||||
use super::{definition_expression_ty, Type};
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::Db;
|
||||
use crate::{semantic_index::definition::Definition, types::todo_type};
|
||||
use ruff_python_ast::{self as ast, name::Name};
|
||||
|
||||
/// A typed callable signature.
|
||||
@@ -18,7 +18,7 @@ impl<'db> Signature<'db> {
|
||||
pub(crate) fn todo() -> Self {
|
||||
Self {
|
||||
parameters: Parameters::todo(),
|
||||
return_ty: Type::Todo,
|
||||
return_ty: todo_type!("return type"),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,8 +33,7 @@ impl<'db> Signature<'db> {
|
||||
.as_ref()
|
||||
.map(|returns| {
|
||||
if function_node.is_async {
|
||||
// TODO: generic `types.CoroutineType`!
|
||||
Type::Todo
|
||||
todo_type!("generic types.CoroutineType")
|
||||
} else {
|
||||
definition_expression_ty(db, definition, returns.as_ref())
|
||||
}
|
||||
@@ -81,11 +80,11 @@ impl<'db> Parameters<'db> {
|
||||
Self {
|
||||
variadic: Some(Parameter {
|
||||
name: Some(Name::new_static("args")),
|
||||
annotated_ty: Type::Todo,
|
||||
annotated_ty: todo_type!(),
|
||||
}),
|
||||
keywords: Some(Parameter {
|
||||
name: Some(Name::new_static("kwargs")),
|
||||
annotated_ty: Type::Todo,
|
||||
annotated_ty: todo_type!(),
|
||||
}),
|
||||
..Default::default()
|
||||
}
|
||||
|
||||
@@ -4,9 +4,9 @@ use ruff_db::files::File;
|
||||
use ruff_python_ast::{self as ast, AnyNodeRef};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::semantic_index::ast_ids::{HasScopedAstId, ScopedExpressionId};
|
||||
use crate::semantic_index::ast_ids::{HasScopedExpressionId, ScopedExpressionId};
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
use crate::types::{Type, TypeCheckDiagnostics, TypeCheckDiagnosticsBuilder};
|
||||
use crate::types::{todo_type, Type, TypeCheckDiagnostics, TypeCheckDiagnosticsBuilder};
|
||||
use crate::Db;
|
||||
|
||||
/// Unpacks the value expression type to their respective targets.
|
||||
@@ -29,7 +29,7 @@ impl<'db> Unpacker<'db> {
|
||||
match target {
|
||||
ast::Expr::Name(target_name) => {
|
||||
self.targets
|
||||
.insert(target_name.scoped_ast_id(self.db, scope), value_ty);
|
||||
.insert(target_name.scoped_expression_id(self.db, scope), value_ty);
|
||||
}
|
||||
ast::Expr::Starred(ast::ExprStarred { value, .. }) => {
|
||||
self.unpack(value, value_ty, scope);
|
||||
@@ -59,7 +59,7 @@ impl<'db> Unpacker<'db> {
|
||||
// TODO: Combine the types into a list type. If the
|
||||
// starred_element_types is empty, then it should be `List[Any]`.
|
||||
// combine_types(starred_element_types);
|
||||
element_types.push(Type::Todo);
|
||||
element_types.push(todo_type!("starred unpacking"));
|
||||
|
||||
element_types.extend_from_slice(
|
||||
// SAFETY: Safe because of the length check above.
|
||||
@@ -72,7 +72,7 @@ impl<'db> Unpacker<'db> {
|
||||
// index.
|
||||
element_types.resize(elts.len() - 1, Type::Unknown);
|
||||
// TODO: This should be `list[Unknown]`
|
||||
element_types.insert(starred_index, Type::Todo);
|
||||
element_types.insert(starred_index, todo_type!("starred unpacking"));
|
||||
Cow::Owned(element_types)
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -68,7 +68,7 @@ impl Session {
|
||||
let system = LSPSystem::new(index.clone());
|
||||
|
||||
// TODO(dhruvmanila): Get the values from the client settings
|
||||
let metadata = WorkspaceMetadata::from_path(system_path, &system, None)?;
|
||||
let metadata = WorkspaceMetadata::discover(system_path, &system, None)?;
|
||||
// TODO(micha): Handle the case where the program settings are incorrect more gracefully.
|
||||
workspaces.insert(path, RootDatabase::new(metadata, system)?);
|
||||
}
|
||||
|
||||
@@ -7,8 +7,8 @@ use lsp_types::Url;
|
||||
use ruff_db::file_revision::FileRevision;
|
||||
use ruff_db::system::walk_directory::WalkDirectoryBuilder;
|
||||
use ruff_db::system::{
|
||||
DirectoryEntry, FileType, Metadata, OsSystem, Result, System, SystemPath, SystemPathBuf,
|
||||
SystemVirtualPath, SystemVirtualPathBuf,
|
||||
DirectoryEntry, FileType, GlobError, Metadata, OsSystem, PatternError, Result, System,
|
||||
SystemPath, SystemPathBuf, SystemVirtualPath, SystemVirtualPathBuf,
|
||||
};
|
||||
use ruff_notebook::{Notebook, NotebookError};
|
||||
|
||||
@@ -198,6 +198,16 @@ impl System for LSPSystem {
|
||||
self.os_system.walk_directory(path)
|
||||
}
|
||||
|
||||
fn glob(
|
||||
&self,
|
||||
pattern: &str,
|
||||
) -> std::result::Result<
|
||||
Box<dyn Iterator<Item = std::result::Result<SystemPathBuf, GlobError>>>,
|
||||
PatternError,
|
||||
> {
|
||||
self.os_system.glob(pattern)
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn Any {
|
||||
self
|
||||
}
|
||||
|
||||
@@ -180,6 +180,16 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
/// Discard `@Todo`-type metadata from expected types, which is not available
|
||||
/// when running in release mode.
|
||||
#[cfg(not(debug_assertions))]
|
||||
fn discard_todo_metadata(ty: &str) -> std::borrow::Cow<'_, str> {
|
||||
static TODO_METADATA_REGEX: std::sync::LazyLock<regex::Regex> =
|
||||
std::sync::LazyLock::new(|| regex::Regex::new(r"@Todo\([^)]*\)").unwrap());
|
||||
|
||||
TODO_METADATA_REGEX.replace_all(ty, "@Todo")
|
||||
}
|
||||
|
||||
struct Matcher {
|
||||
line_index: LineIndex,
|
||||
source: SourceText,
|
||||
@@ -276,6 +286,9 @@ impl Matcher {
|
||||
}
|
||||
}
|
||||
Assertion::Revealed(expected_type) => {
|
||||
#[cfg(not(debug_assertions))]
|
||||
let expected_type = discard_todo_metadata(&expected_type);
|
||||
|
||||
let mut matched_revealed_type = None;
|
||||
let mut matched_undefined_reveal = None;
|
||||
let expected_reveal_type_message = format!("Revealed type is `{expected_type}`");
|
||||
|
||||
@@ -3,15 +3,15 @@ use std::any::Any;
|
||||
use js_sys::Error;
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::db::{Db, RootDatabase};
|
||||
use red_knot_workspace::workspace::settings::Configuration;
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::system::walk_directory::WalkDirectoryBuilder;
|
||||
use ruff_db::system::{
|
||||
DirectoryEntry, MemoryFileSystem, Metadata, System, SystemPath, SystemPathBuf,
|
||||
SystemVirtualPath,
|
||||
DirectoryEntry, GlobError, MemoryFileSystem, Metadata, PatternError, System, SystemPath,
|
||||
SystemPathBuf, SystemVirtualPath,
|
||||
};
|
||||
use ruff_notebook::Notebook;
|
||||
|
||||
@@ -42,10 +42,10 @@ impl Workspace {
|
||||
#[wasm_bindgen(constructor)]
|
||||
pub fn new(root: &str, settings: &Settings) -> Result<Workspace, Error> {
|
||||
let system = WasmSystem::new(SystemPath::new(root));
|
||||
let workspace = WorkspaceMetadata::from_path(
|
||||
let workspace = WorkspaceMetadata::discover(
|
||||
SystemPath::new(root),
|
||||
&system,
|
||||
Some(Configuration {
|
||||
Some(&Configuration {
|
||||
target_version: Some(settings.target_version.into()),
|
||||
..Configuration::default()
|
||||
}),
|
||||
@@ -184,12 +184,12 @@ impl Settings {
|
||||
#[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Default)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
Py312,
|
||||
#[default]
|
||||
Py313,
|
||||
}
|
||||
|
||||
@@ -226,7 +226,7 @@ impl System for WasmSystem {
|
||||
}
|
||||
|
||||
fn canonicalize_path(&self, path: &SystemPath) -> ruff_db::system::Result<SystemPathBuf> {
|
||||
Ok(self.fs.canonicalize(path))
|
||||
self.fs.canonicalize(path)
|
||||
}
|
||||
|
||||
fn read_to_string(&self, path: &SystemPath) -> ruff_db::system::Result<String> {
|
||||
@@ -272,6 +272,13 @@ impl System for WasmSystem {
|
||||
self.fs.walk_directory(path)
|
||||
}
|
||||
|
||||
fn glob(
|
||||
&self,
|
||||
pattern: &str,
|
||||
) -> Result<Box<dyn Iterator<Item = Result<SystemPathBuf, GlobError>>>, PatternError> {
|
||||
Ok(Box::new(self.fs.glob(pattern)?))
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn Any {
|
||||
self
|
||||
}
|
||||
@@ -284,3 +291,17 @@ impl System for WasmSystem {
|
||||
fn not_found() -> std::io::Error {
|
||||
std::io::Error::new(std::io::ErrorKind::NotFound, "No such file or directory")
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::TargetVersion;
|
||||
use red_knot_python_semantic::PythonVersion;
|
||||
|
||||
#[test]
|
||||
fn same_default_as_python_version() {
|
||||
assert_eq!(
|
||||
PythonVersion::from(TargetVersion::default()),
|
||||
PythonVersion::default()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,22 +15,29 @@ license.workspace = true
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
|
||||
ruff_cache = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["os", "cache", "serde"] }
|
||||
ruff_python_ast = { workspace = true, features = ["serde"] }
|
||||
ruff_text_size = { workspace = true }
|
||||
red_knot_vendored = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
crossbeam = { workspace = true }
|
||||
glob = { workspace = true }
|
||||
notify = { workspace = true }
|
||||
pep440_rs = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
red_knot_python_semantic = { workspace = true, features = ["serde"] }
|
||||
ruff_db = { workspace = true, features = ["testing"] }
|
||||
tempfile = { workspace = true }
|
||||
glob = { workspace = true }
|
||||
insta = { workspace = true, features = ["redactions", "ron"] }
|
||||
|
||||
[features]
|
||||
default = ["zstd"]
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
while True:
|
||||
|
||||
class A:
|
||||
x: int
|
||||
|
||||
break
|
||||
@@ -0,0 +1,6 @@
|
||||
while True:
|
||||
|
||||
def b():
|
||||
x: int
|
||||
|
||||
break
|
||||
@@ -0,0 +1,6 @@
|
||||
for _ in range(1):
|
||||
|
||||
class A:
|
||||
x: int
|
||||
|
||||
break
|
||||
@@ -0,0 +1,6 @@
|
||||
for _ in range(1):
|
||||
|
||||
def b():
|
||||
x: int
|
||||
|
||||
break
|
||||
@@ -0,0 +1 @@
|
||||
../../../../ruff_python_parser/resources/inline/err/type_param_invalid_bound_expr.py
|
||||
@@ -0,0 +1,3 @@
|
||||
msg = "hello"
|
||||
|
||||
f"{msg!r:>{10+10}}"
|
||||
@@ -0,0 +1 @@
|
||||
x: f"Literal[{1 + 2}]" = 3
|
||||
@@ -0,0 +1,3 @@
|
||||
from typing import Union
|
||||
|
||||
x: Union[int, str] = 1
|
||||
@@ -15,7 +15,9 @@ use ruff_db::{Db as SourceDb, Upcast};
|
||||
mod changes;
|
||||
|
||||
#[salsa::db]
|
||||
pub trait Db: SemanticDb + Upcast<dyn SemanticDb> {}
|
||||
pub trait Db: SemanticDb + Upcast<dyn SemanticDb> {
|
||||
fn workspace(&self) -> Workspace;
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
pub struct RootDatabase {
|
||||
@@ -45,11 +47,6 @@ impl RootDatabase {
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
pub fn workspace(&self) -> Workspace {
|
||||
// SAFETY: The workspace is always initialized in `new`.
|
||||
self.workspace.unwrap()
|
||||
}
|
||||
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
pub fn check(&self) -> Result<Vec<Box<dyn Diagnostic>>, Cancelled> {
|
||||
self.with_db(|db| db.workspace().check(db))
|
||||
@@ -153,7 +150,11 @@ impl salsa::Database for RootDatabase {
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl Db for RootDatabase {}
|
||||
impl Db for RootDatabase {
|
||||
fn workspace(&self) -> Workspace {
|
||||
self.workspace.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
@@ -168,6 +169,7 @@ pub(crate) mod tests {
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::workspace::{Workspace, WorkspaceMetadata};
|
||||
|
||||
#[salsa::db]
|
||||
pub(crate) struct TestDb {
|
||||
@@ -176,17 +178,23 @@ pub(crate) mod tests {
|
||||
files: Files,
|
||||
system: TestSystem,
|
||||
vendored: VendoredFileSystem,
|
||||
workspace: Option<Workspace>,
|
||||
}
|
||||
|
||||
impl TestDb {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
pub(crate) fn new(workspace: WorkspaceMetadata) -> Self {
|
||||
let mut db = Self {
|
||||
storage: salsa::Storage::default(),
|
||||
system: TestSystem::default(),
|
||||
vendored: red_knot_vendored::file_system().clone(),
|
||||
files: Files::default(),
|
||||
events: Arc::default(),
|
||||
}
|
||||
workspace: None,
|
||||
};
|
||||
|
||||
let workspace = Workspace::from_metadata(&db, workspace);
|
||||
db.workspace = Some(workspace);
|
||||
db
|
||||
}
|
||||
}
|
||||
|
||||
@@ -254,7 +262,11 @@ pub(crate) mod tests {
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl Db for TestDb {}
|
||||
impl Db for TestDb {
|
||||
fn workspace(&self) -> Workspace {
|
||||
self.workspace.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl salsa::Database for TestDb {
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
use crate::db::{Db, RootDatabase};
|
||||
use crate::watch;
|
||||
use crate::watch::{ChangeEvent, CreatedKind, DeletedKind};
|
||||
use crate::workspace::settings::Configuration;
|
||||
use crate::workspace::{Workspace, WorkspaceMetadata};
|
||||
use red_knot_python_semantic::Program;
|
||||
use ruff_db::files::{system_path_to_file, File, Files};
|
||||
use ruff_db::system::walk_directory::WalkState;
|
||||
use ruff_db::system::SystemPath;
|
||||
use ruff_db::Db;
|
||||
use ruff_db::Db as _;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::db::RootDatabase;
|
||||
use crate::watch;
|
||||
use crate::watch::{CreatedKind, DeletedKind};
|
||||
use crate::workspace::settings::Configuration;
|
||||
use crate::workspace::WorkspaceMetadata;
|
||||
|
||||
impl RootDatabase {
|
||||
#[tracing::instrument(level = "debug", skip(self, changes, base_configuration))]
|
||||
pub fn apply_changes(
|
||||
@@ -18,7 +17,7 @@ impl RootDatabase {
|
||||
changes: Vec<watch::ChangeEvent>,
|
||||
base_configuration: Option<&Configuration>,
|
||||
) {
|
||||
let workspace = self.workspace();
|
||||
let mut workspace = self.workspace();
|
||||
let workspace_path = workspace.root(self).to_path_buf();
|
||||
let program = Program::get(self);
|
||||
let custom_stdlib_versions_path = program
|
||||
@@ -58,6 +57,12 @@ impl RootDatabase {
|
||||
// Changes to ignore files or settings can change the workspace structure or add/remove files
|
||||
// from packages.
|
||||
if let Some(package) = workspace.package(self, path) {
|
||||
if package.root(self) == workspace.root(self)
|
||||
|| matches!(change, ChangeEvent::Deleted { .. })
|
||||
{
|
||||
workspace_change = true;
|
||||
}
|
||||
|
||||
changed_packages.insert(package);
|
||||
} else {
|
||||
workspace_change = true;
|
||||
@@ -151,18 +156,22 @@ impl RootDatabase {
|
||||
}
|
||||
|
||||
if workspace_change {
|
||||
match WorkspaceMetadata::from_path(
|
||||
&workspace_path,
|
||||
self.system(),
|
||||
base_configuration.cloned(),
|
||||
) {
|
||||
match WorkspaceMetadata::discover(&workspace_path, self.system(), base_configuration) {
|
||||
Ok(metadata) => {
|
||||
tracing::debug!("Reloading workspace after structural change");
|
||||
// TODO: Handle changes in the program settings.
|
||||
workspace.reload(self, metadata);
|
||||
if metadata.root() == workspace.root(self) {
|
||||
tracing::debug!("Reloading workspace after structural change");
|
||||
// TODO: Handle changes in the program settings.
|
||||
workspace.reload(self, metadata);
|
||||
} else {
|
||||
tracing::debug!("Replace workspace after structural change");
|
||||
workspace = Workspace::from_metadata(self, metadata);
|
||||
self.workspace = Some(workspace);
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
tracing::error!("Failed to load workspace, keep old workspace: {error}");
|
||||
tracing::error!(
|
||||
"Failed to load workspace, keeping old workspace configuration: {error}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -227,6 +236,3 @@ impl RootDatabase {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {}
|
||||
|
||||
@@ -210,7 +210,15 @@ impl Debouncer {
|
||||
}
|
||||
|
||||
let kind = event.kind;
|
||||
let path = match SystemPathBuf::from_path_buf(event.paths.into_iter().next().unwrap()) {
|
||||
|
||||
// There are cases where paths can be empty.
|
||||
// https://github.com/astral-sh/ruff/issues/14222
|
||||
let Some(path) = event.paths.into_iter().next() else {
|
||||
tracing::debug!("Ignoring change event with kind '{kind:?}' without a path",);
|
||||
return;
|
||||
};
|
||||
|
||||
let path = match SystemPathBuf::from_path_buf(path) {
|
||||
Ok(path) => path,
|
||||
Err(path) => {
|
||||
tracing::debug!(
|
||||
|
||||
@@ -6,9 +6,9 @@ use tracing::info;
|
||||
use red_knot_python_semantic::system_module_search_paths;
|
||||
use ruff_cache::{CacheKey, CacheKeyHasher};
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
use ruff_db::Upcast;
|
||||
use ruff_db::{Db as _, Upcast};
|
||||
|
||||
use crate::db::RootDatabase;
|
||||
use crate::db::{Db, RootDatabase};
|
||||
use crate::watch::Watcher;
|
||||
|
||||
/// Wrapper around a [`Watcher`] that watches the relevant paths of a workspace.
|
||||
@@ -68,10 +68,9 @@ impl WorkspaceWatcher {
|
||||
|
||||
self.has_errored_paths = false;
|
||||
|
||||
let workspace_path = workspace_path
|
||||
.as_utf8_path()
|
||||
.canonicalize_utf8()
|
||||
.map(SystemPathBuf::from_utf8_path_buf)
|
||||
let workspace_path = db
|
||||
.system()
|
||||
.canonicalize_path(&workspace_path)
|
||||
.unwrap_or(workspace_path);
|
||||
|
||||
// Find the non-overlapping module search paths and filter out paths that are already covered by the workspace.
|
||||
|
||||
@@ -1,26 +1,28 @@
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
use salsa::{Durability, Setter as _};
|
||||
use std::borrow::Cow;
|
||||
use std::{collections::BTreeMap, sync::Arc};
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::db::RootDatabase;
|
||||
use crate::workspace::files::{Index, Indexed, IndexedIter, PackageFiles};
|
||||
pub use metadata::{PackageMetadata, WorkspaceMetadata};
|
||||
pub use metadata::{PackageMetadata, WorkspaceDiscoveryError, WorkspaceMetadata};
|
||||
use red_knot_python_semantic::types::check_types;
|
||||
use red_knot_python_semantic::SearchPathSettings;
|
||||
use ruff_db::diagnostic::{Diagnostic, ParseDiagnostic, Severity};
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::source::{source_text, SourceTextError};
|
||||
use ruff_db::system::FileType;
|
||||
use ruff_db::{
|
||||
files::{system_path_to_file, File},
|
||||
system::{walk_directory::WalkState, SystemPath, SystemPathBuf},
|
||||
};
|
||||
use ruff_python_ast::{name::Name, PySourceType};
|
||||
use ruff_text_size::TextRange;
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
use salsa::{Durability, Setter as _};
|
||||
use std::borrow::Cow;
|
||||
use std::iter::FusedIterator;
|
||||
use std::{collections::BTreeMap, sync::Arc};
|
||||
|
||||
mod files;
|
||||
mod metadata;
|
||||
mod pyproject;
|
||||
pub mod settings;
|
||||
|
||||
/// The project workspace as a Salsa ingredient.
|
||||
@@ -81,7 +83,7 @@ pub struct Workspace {
|
||||
|
||||
/// The (first-party) packages in this workspace.
|
||||
#[return_ref]
|
||||
package_tree: BTreeMap<SystemPathBuf, Package>,
|
||||
package_tree: PackageTree,
|
||||
|
||||
/// The unresolved search path configuration.
|
||||
#[return_ref]
|
||||
@@ -106,7 +108,6 @@ pub struct Package {
|
||||
}
|
||||
|
||||
impl Workspace {
|
||||
/// Discovers the closest workspace at `path` and returns its metadata.
|
||||
pub fn from_metadata(db: &dyn Db, metadata: WorkspaceMetadata) -> Self {
|
||||
let mut packages = BTreeMap::new();
|
||||
|
||||
@@ -114,10 +115,12 @@ impl Workspace {
|
||||
packages.insert(package.root.clone(), Package::from_metadata(db, package));
|
||||
}
|
||||
|
||||
let program_settings = metadata.settings.program;
|
||||
|
||||
Workspace::builder(
|
||||
metadata.root,
|
||||
packages,
|
||||
metadata.settings.program.search_paths,
|
||||
PackageTree(packages),
|
||||
program_settings.search_paths,
|
||||
)
|
||||
.durability(Durability::MEDIUM)
|
||||
.open_fileset_durability(Durability::LOW)
|
||||
@@ -128,15 +131,11 @@ impl Workspace {
|
||||
self.root_buf(db)
|
||||
}
|
||||
|
||||
pub fn packages(self, db: &dyn Db) -> impl Iterator<Item = Package> + '_ {
|
||||
self.package_tree(db).values().copied()
|
||||
}
|
||||
|
||||
pub fn reload(self, db: &mut dyn Db, metadata: WorkspaceMetadata) {
|
||||
tracing::debug!("Reloading workspace");
|
||||
assert_eq!(self.root(db), metadata.root());
|
||||
|
||||
let mut old_packages = self.package_tree(db).clone();
|
||||
let mut old_packages = self.package_tree(db).0.clone();
|
||||
let mut new_packages = BTreeMap::new();
|
||||
|
||||
for package_metadata in metadata.packages {
|
||||
@@ -157,13 +156,13 @@ impl Workspace {
|
||||
.to(metadata.settings.program.search_paths);
|
||||
}
|
||||
|
||||
self.set_package_tree(db).to(new_packages);
|
||||
self.set_package_tree(db).to(PackageTree(new_packages));
|
||||
}
|
||||
|
||||
pub fn update_package(self, db: &mut dyn Db, metadata: PackageMetadata) -> anyhow::Result<()> {
|
||||
let path = metadata.root().to_path_buf();
|
||||
|
||||
if let Some(package) = self.package_tree(db).get(&path).copied() {
|
||||
if let Some(package) = self.package_tree(db).get(&path) {
|
||||
package.update(db, metadata);
|
||||
Ok(())
|
||||
} else {
|
||||
@@ -171,20 +170,17 @@ impl Workspace {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn packages(self, db: &dyn Db) -> &PackageTree {
|
||||
self.package_tree(db)
|
||||
}
|
||||
|
||||
/// Returns the closest package to which the first-party `path` belongs.
|
||||
///
|
||||
/// Returns `None` if the `path` is outside of any package or if `file` isn't a first-party file
|
||||
/// (e.g. third-party dependencies or `excluded`).
|
||||
pub fn package(self, db: &dyn Db, path: &SystemPath) -> Option<Package> {
|
||||
pub fn package(self, db: &dyn Db, path: impl AsRef<SystemPath>) -> Option<Package> {
|
||||
let packages = self.package_tree(db);
|
||||
|
||||
let (package_path, package) = packages.range(..=path.to_path_buf()).next_back()?;
|
||||
|
||||
if path.starts_with(package_path) {
|
||||
Some(*package)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
packages.get(path.as_ref())
|
||||
}
|
||||
|
||||
/// Checks all open files in the workspace and its dependencies.
|
||||
@@ -342,7 +338,7 @@ impl Package {
|
||||
let _entered =
|
||||
tracing::debug_span!("index_package_files", package = %self.name(db)).entered();
|
||||
|
||||
let files = discover_package_files(db, self.root(db));
|
||||
let files = discover_package_files(db, self);
|
||||
tracing::info!("Found {} files in package `{}`", files.len(), self.name(db));
|
||||
vacant.set(files)
|
||||
}
|
||||
@@ -407,23 +403,33 @@ pub(super) fn check_file(db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
|
||||
diagnostics
|
||||
}
|
||||
|
||||
fn discover_package_files(db: &dyn Db, path: &SystemPath) -> FxHashSet<File> {
|
||||
fn discover_package_files(db: &dyn Db, package: Package) -> FxHashSet<File> {
|
||||
let paths = std::sync::Mutex::new(Vec::new());
|
||||
let packages = db.workspace().packages(db);
|
||||
|
||||
db.system().walk_directory(path).run(|| {
|
||||
db.system().walk_directory(package.root(db)).run(|| {
|
||||
Box::new(|entry| {
|
||||
match entry {
|
||||
Ok(entry) => {
|
||||
// Skip over any non python files to avoid creating too many entries in `Files`.
|
||||
if entry.file_type().is_file()
|
||||
&& entry
|
||||
.path()
|
||||
.extension()
|
||||
.and_then(PySourceType::try_from_extension)
|
||||
.is_some()
|
||||
{
|
||||
let mut paths = paths.lock().unwrap();
|
||||
paths.push(entry.into_path());
|
||||
match entry.file_type() {
|
||||
FileType::File => {
|
||||
if entry
|
||||
.path()
|
||||
.extension()
|
||||
.and_then(PySourceType::try_from_extension)
|
||||
.is_some()
|
||||
{
|
||||
let mut paths = paths.lock().unwrap();
|
||||
paths.push(entry.into_path());
|
||||
}
|
||||
}
|
||||
FileType::Directory | FileType::Symlink => {
|
||||
// Don't traverse into nested packages (the workspace-package is an ancestor of all other packages)
|
||||
if packages.get(entry.path()) != Some(package) {
|
||||
return WalkState::Skip;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
@@ -464,6 +470,7 @@ impl<'a> WorkspaceFiles<'a> {
|
||||
WorkspaceFiles::PackageFiles(
|
||||
workspace
|
||||
.packages(db)
|
||||
.iter()
|
||||
.map(|package| package.files(db))
|
||||
.collect(),
|
||||
)
|
||||
@@ -545,20 +552,78 @@ impl Diagnostic for IOErrorDiagnostic {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Clone)]
|
||||
pub struct PackageTree(BTreeMap<SystemPathBuf, Package>);
|
||||
|
||||
impl PackageTree {
|
||||
pub fn get(&self, path: &SystemPath) -> Option<Package> {
|
||||
let (package_path, package) = self.0.range(..=path.to_path_buf()).next_back()?;
|
||||
|
||||
if path.starts_with(package_path) {
|
||||
Some(*package)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
// The package table should never be empty, that's why `is_empty` makes little sense
|
||||
#[allow(clippy::len_without_is_empty)]
|
||||
pub fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> PackageTreeIter {
|
||||
PackageTreeIter(self.0.values())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a PackageTree {
|
||||
type Item = Package;
|
||||
type IntoIter = PackageTreeIter<'a>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PackageTreeIter<'a>(std::collections::btree_map::Values<'a, SystemPathBuf, Package>);
|
||||
|
||||
impl Iterator for PackageTreeIter<'_> {
|
||||
type Item = Package;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.0.next().copied()
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.0.size_hint()
|
||||
}
|
||||
|
||||
fn last(mut self) -> Option<Self::Item> {
|
||||
self.0.next_back().copied()
|
||||
}
|
||||
}
|
||||
|
||||
impl ExactSizeIterator for PackageTreeIter<'_> {}
|
||||
impl FusedIterator for PackageTreeIter<'_> {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::workspace::check_file;
|
||||
use crate::workspace::{check_file, WorkspaceMetadata};
|
||||
use red_knot_python_semantic::types::check_types;
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::source::source_text;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPath};
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_db::testing::assert_function_query_was_not_run;
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
#[test]
|
||||
fn check_file_skips_type_checking_when_file_cant_be_read() -> ruff_db::system::Result<()> {
|
||||
let mut db = TestDb::new();
|
||||
let workspace =
|
||||
WorkspaceMetadata::single_package(Name::new_static("test"), SystemPathBuf::from("/"));
|
||||
let mut db = TestDb::new(workspace);
|
||||
let path = SystemPath::new("test.py");
|
||||
|
||||
db.write_file(path, "x = 10")?;
|
||||
|
||||
@@ -232,21 +232,28 @@ impl Drop for IndexedMut<'_> {
|
||||
mod tests {
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::db::Db;
|
||||
use crate::workspace::files::Index;
|
||||
use crate::workspace::WorkspaceMetadata;
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::workspace::files::Index;
|
||||
use crate::workspace::Package;
|
||||
|
||||
#[test]
|
||||
fn re_entrance() -> anyhow::Result<()> {
|
||||
let mut db = TestDb::new();
|
||||
let metadata = WorkspaceMetadata::single_package(
|
||||
Name::new_static("test"),
|
||||
SystemPathBuf::from("/test"),
|
||||
);
|
||||
let mut db = TestDb::new(metadata);
|
||||
|
||||
db.write_file("test.py", "")?;
|
||||
|
||||
let package = Package::new(&db, Name::new("test"), SystemPathBuf::from("/test"));
|
||||
let package = db
|
||||
.workspace()
|
||||
.package(&db, "/test")
|
||||
.expect("test package to exist");
|
||||
|
||||
let file = system_path_to_file(&db, "test.py").unwrap();
|
||||
|
||||
|
||||
@@ -1,67 +1,191 @@
|
||||
use crate::workspace::settings::{Configuration, WorkspaceSettings};
|
||||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
||||
use ruff_db::system::{GlobError, System, SystemPath, SystemPathBuf};
|
||||
use ruff_python_ast::name::Name;
|
||||
use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug)]
|
||||
use crate::workspace::pyproject::{PyProject, PyProjectError, Workspace};
|
||||
use crate::workspace::settings::{Configuration, WorkspaceSettings};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
pub struct WorkspaceMetadata {
|
||||
pub(super) root: SystemPathBuf,
|
||||
|
||||
/// The (first-party) packages in this workspace.
|
||||
pub(super) packages: Vec<PackageMetadata>,
|
||||
|
||||
/// The resolved settings for this workspace.
|
||||
pub(super) settings: WorkspaceSettings,
|
||||
}
|
||||
|
||||
/// A first-party package in a workspace.
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
pub struct PackageMetadata {
|
||||
pub(super) name: Name,
|
||||
|
||||
/// The path to the root directory of the package.
|
||||
pub(super) root: SystemPathBuf,
|
||||
// TODO: Add the loaded package configuration (not the nested ruff settings)
|
||||
|
||||
pub(super) configuration: Configuration,
|
||||
}
|
||||
|
||||
impl WorkspaceMetadata {
|
||||
/// Creates a workspace that consists of a single package located at `root`.
|
||||
pub fn single_package(name: Name, root: SystemPathBuf) -> Self {
|
||||
let package = PackageMetadata {
|
||||
name,
|
||||
root: root.clone(),
|
||||
configuration: Configuration::default(),
|
||||
};
|
||||
|
||||
let packages = vec![package];
|
||||
let settings = packages[0]
|
||||
.configuration
|
||||
.to_workspace_settings(&root, &packages);
|
||||
|
||||
Self {
|
||||
root,
|
||||
packages,
|
||||
settings,
|
||||
}
|
||||
}
|
||||
|
||||
/// Discovers the closest workspace at `path` and returns its metadata.
|
||||
pub fn from_path(
|
||||
///
|
||||
/// 1. Traverse upwards in the `path`'s ancestor chain and find the first `pyproject.toml`.
|
||||
/// 1. If the `pyproject.toml` contains no `knot.workspace` table, then keep traversing the `path`'s ancestor
|
||||
/// chain until we find one or reach the root.
|
||||
/// 1. If we've found a workspace, then resolve the workspace's members and assert that the closest
|
||||
/// package (the first found package without a `knot.workspace` table is a member. If not, create
|
||||
/// a single package workspace for the closest package.
|
||||
/// 1. If there's no `pyrpoject.toml` with a `knot.workspace` table, then create a single-package workspace.
|
||||
/// 1. If no ancestor directory contains any `pyproject.toml`, create an ad-hoc workspace for `path`
|
||||
/// that consists of a single package and uses the default settings.
|
||||
pub fn discover(
|
||||
path: &SystemPath,
|
||||
system: &dyn System,
|
||||
base_configuration: Option<Configuration>,
|
||||
) -> anyhow::Result<WorkspaceMetadata> {
|
||||
assert!(
|
||||
system.is_directory(path),
|
||||
"Workspace root path must be a directory"
|
||||
);
|
||||
tracing::debug!("Searching for workspace in '{path}'");
|
||||
base_configuration: Option<&Configuration>,
|
||||
) -> Result<WorkspaceMetadata, WorkspaceDiscoveryError> {
|
||||
tracing::debug!("Searching for a workspace in '{path}'");
|
||||
|
||||
let root = path.to_path_buf();
|
||||
|
||||
// TODO: Discover package name from `pyproject.toml`.
|
||||
let package_name: Name = path.file_name().unwrap_or("<root>").into();
|
||||
|
||||
let package = PackageMetadata {
|
||||
name: package_name,
|
||||
root: root.clone(),
|
||||
};
|
||||
|
||||
// TODO: Load the configuration from disk.
|
||||
let mut configuration = Configuration::default();
|
||||
|
||||
if let Some(base_configuration) = base_configuration {
|
||||
configuration.extend(base_configuration);
|
||||
if !system.is_directory(path) {
|
||||
return Err(WorkspaceDiscoveryError::NotADirectory(path.to_path_buf()));
|
||||
}
|
||||
|
||||
// TODO: Respect the package configurations when resolving settings (e.g. for the target version).
|
||||
let settings = configuration.into_workspace_settings(&root);
|
||||
let mut closest_package: Option<PackageMetadata> = None;
|
||||
|
||||
let workspace = WorkspaceMetadata {
|
||||
root,
|
||||
packages: vec![package],
|
||||
settings,
|
||||
for ancestor in path.ancestors() {
|
||||
let pyproject_path = ancestor.join("pyproject.toml");
|
||||
if let Ok(pyproject_str) = system.read_to_string(&pyproject_path) {
|
||||
let pyproject = PyProject::from_str(&pyproject_str).map_err(|error| {
|
||||
WorkspaceDiscoveryError::InvalidPyProject {
|
||||
path: pyproject_path,
|
||||
source: Box::new(error),
|
||||
}
|
||||
})?;
|
||||
|
||||
let workspace_table = pyproject.workspace().cloned();
|
||||
let package = PackageMetadata::from_pyproject(
|
||||
pyproject,
|
||||
ancestor.to_path_buf(),
|
||||
base_configuration,
|
||||
);
|
||||
|
||||
if let Some(workspace_table) = workspace_table {
|
||||
let workspace_root = ancestor;
|
||||
tracing::debug!("Found workspace at '{}'", workspace_root);
|
||||
|
||||
match collect_packages(
|
||||
package,
|
||||
&workspace_table,
|
||||
closest_package,
|
||||
base_configuration,
|
||||
system,
|
||||
)? {
|
||||
CollectedPackagesOrStandalone::Packages(mut packages) => {
|
||||
let mut by_name =
|
||||
FxHashMap::with_capacity_and_hasher(packages.len(), FxBuildHasher);
|
||||
|
||||
let mut workspace_package = None;
|
||||
|
||||
for package in &packages {
|
||||
if let Some(conflicting) = by_name.insert(package.name(), package) {
|
||||
return Err(WorkspaceDiscoveryError::DuplicatePackageNames {
|
||||
name: package.name().clone(),
|
||||
first: conflicting.root().to_path_buf(),
|
||||
second: package.root().to_path_buf(),
|
||||
});
|
||||
}
|
||||
|
||||
if package.root() == workspace_root {
|
||||
workspace_package = Some(package);
|
||||
} else if !package.root().starts_with(workspace_root) {
|
||||
return Err(WorkspaceDiscoveryError::PackageOutsideWorkspace {
|
||||
package_name: package.name().clone(),
|
||||
package_root: package.root().to_path_buf(),
|
||||
workspace_root: workspace_root.to_path_buf(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let workspace_package = workspace_package
|
||||
.expect("workspace package to be part of the workspace's packages");
|
||||
|
||||
let settings = workspace_package
|
||||
.configuration
|
||||
.to_workspace_settings(workspace_root, &packages);
|
||||
|
||||
packages.sort_unstable_by(|a, b| a.root().cmp(b.root()));
|
||||
|
||||
return Ok(Self {
|
||||
root: workspace_root.to_path_buf(),
|
||||
packages,
|
||||
settings,
|
||||
});
|
||||
}
|
||||
CollectedPackagesOrStandalone::Standalone(package) => {
|
||||
closest_package = Some(package);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Not a workspace itself, keep looking for an enclosing workspace.
|
||||
if closest_package.is_none() {
|
||||
closest_package = Some(package);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// No workspace found, but maybe a pyproject.toml was found.
|
||||
let package = if let Some(enclosing_package) = closest_package {
|
||||
tracing::debug!("Single package workspace at '{}'", enclosing_package.root());
|
||||
|
||||
enclosing_package
|
||||
} else {
|
||||
tracing::debug!("The ancestor directories contain no `pyproject.toml`. Falling back to a virtual project.");
|
||||
|
||||
// Create a package with a default configuration
|
||||
PackageMetadata {
|
||||
name: path.file_name().unwrap_or("root").into(),
|
||||
root: path.to_path_buf(),
|
||||
// TODO create the configuration from the pyproject toml
|
||||
configuration: base_configuration.cloned().unwrap_or_default(),
|
||||
}
|
||||
};
|
||||
|
||||
Ok(workspace)
|
||||
let root = package.root().to_path_buf();
|
||||
let packages = vec![package];
|
||||
let settings = packages[0]
|
||||
.configuration
|
||||
.to_workspace_settings(&root, &packages);
|
||||
|
||||
Ok(Self {
|
||||
root,
|
||||
packages,
|
||||
settings,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn root(&self) -> &SystemPath {
|
||||
@@ -78,6 +202,30 @@ impl WorkspaceMetadata {
|
||||
}
|
||||
|
||||
impl PackageMetadata {
|
||||
pub(crate) fn from_pyproject(
|
||||
pyproject: PyProject,
|
||||
root: SystemPathBuf,
|
||||
base_configuration: Option<&Configuration>,
|
||||
) -> Self {
|
||||
let name = pyproject.project.and_then(|project| project.name);
|
||||
let name = name
|
||||
.map(|name| Name::new(&*name))
|
||||
.unwrap_or_else(|| Name::new(root.file_name().unwrap_or("root")));
|
||||
|
||||
// TODO: load configuration from pyrpoject.toml
|
||||
let mut configuration = Configuration::default();
|
||||
|
||||
if let Some(base_configuration) = base_configuration {
|
||||
configuration.extend(base_configuration.clone());
|
||||
}
|
||||
|
||||
PackageMetadata {
|
||||
name,
|
||||
root,
|
||||
configuration,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn name(&self) -> &Name {
|
||||
&self.name
|
||||
}
|
||||
@@ -86,3 +234,577 @@ impl PackageMetadata {
|
||||
&self.root
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_packages(
|
||||
workspace_package: PackageMetadata,
|
||||
workspace_table: &Workspace,
|
||||
closest_package: Option<PackageMetadata>,
|
||||
base_configuration: Option<&Configuration>,
|
||||
system: &dyn System,
|
||||
) -> Result<CollectedPackagesOrStandalone, WorkspaceDiscoveryError> {
|
||||
let workspace_root = workspace_package.root().to_path_buf();
|
||||
let mut member_paths = FxHashSet::default();
|
||||
|
||||
for glob in workspace_table.members() {
|
||||
let full_glob = workspace_package.root().join(glob);
|
||||
|
||||
let matches = system.glob(full_glob.as_str()).map_err(|error| {
|
||||
WorkspaceDiscoveryError::InvalidMembersPattern {
|
||||
raw_glob: glob.clone(),
|
||||
source: error,
|
||||
}
|
||||
})?;
|
||||
|
||||
for result in matches {
|
||||
let path = result?;
|
||||
let normalized = SystemPath::absolute(path, &workspace_root);
|
||||
|
||||
// Skip over non-directory entry. E.g.finder might end up creating a `.DS_STORE` file
|
||||
// that ends up matching `/projects/*`.
|
||||
if system.is_directory(&normalized) {
|
||||
member_paths.insert(normalized);
|
||||
} else {
|
||||
tracing::debug!("Ignoring non-directory workspace member '{normalized}'");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The workspace root is always a member. Don't re-add it
|
||||
let mut packages = vec![workspace_package];
|
||||
member_paths.remove(&workspace_root);
|
||||
|
||||
// Add the package that is closest to the current working directory except
|
||||
// if that package isn't a workspace member, then fallback to creating a single
|
||||
// package workspace.
|
||||
if let Some(closest_package) = closest_package {
|
||||
// the closest `pyproject.toml` isn't a member of this workspace because it is
|
||||
// explicitly included or simply not listed.
|
||||
// Create a standalone workspace.
|
||||
if !member_paths.remove(closest_package.root())
|
||||
|| workspace_table.is_excluded(closest_package.root(), &workspace_root)?
|
||||
{
|
||||
tracing::debug!(
|
||||
"Ignoring workspace '{workspace_root}' because package '{package}' is not a member",
|
||||
package = closest_package.name()
|
||||
);
|
||||
return Ok(CollectedPackagesOrStandalone::Standalone(closest_package));
|
||||
}
|
||||
|
||||
tracing::debug!("adding package '{}'", closest_package.name());
|
||||
packages.push(closest_package);
|
||||
}
|
||||
|
||||
// Add all remaining member paths
|
||||
for member_path in member_paths {
|
||||
if workspace_table.is_excluded(&member_path, workspace_root.as_path())? {
|
||||
tracing::debug!("Ignoring excluded member '{member_path}'");
|
||||
continue;
|
||||
}
|
||||
|
||||
let pyproject_path = member_path.join("pyproject.toml");
|
||||
|
||||
let pyproject_str = match system.read_to_string(&pyproject_path) {
|
||||
Ok(pyproject_str) => pyproject_str,
|
||||
|
||||
Err(error) => {
|
||||
if error.kind() == std::io::ErrorKind::NotFound
|
||||
&& member_path
|
||||
.file_name()
|
||||
.is_some_and(|name| name.starts_with('.'))
|
||||
{
|
||||
tracing::debug!(
|
||||
"Ignore member '{member_path}' because it has no pyproject.toml and is hidden",
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
return Err(WorkspaceDiscoveryError::MemberFailedToReadPyProject {
|
||||
package_root: member_path,
|
||||
source: error,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
let pyproject = PyProject::from_str(&pyproject_str).map_err(|error| {
|
||||
WorkspaceDiscoveryError::InvalidPyProject {
|
||||
source: Box::new(error),
|
||||
path: pyproject_path,
|
||||
}
|
||||
})?;
|
||||
|
||||
if pyproject.workspace().is_some() {
|
||||
return Err(WorkspaceDiscoveryError::NestedWorkspaces {
|
||||
package_root: member_path,
|
||||
});
|
||||
}
|
||||
|
||||
let package = PackageMetadata::from_pyproject(pyproject, member_path, base_configuration);
|
||||
|
||||
tracing::debug!(
|
||||
"Adding package '{}' at '{}'",
|
||||
package.name(),
|
||||
package.root()
|
||||
);
|
||||
|
||||
packages.push(package);
|
||||
}
|
||||
|
||||
Ok(CollectedPackagesOrStandalone::Packages(packages))
|
||||
}
|
||||
|
||||
enum CollectedPackagesOrStandalone {
|
||||
Packages(Vec<PackageMetadata>),
|
||||
Standalone(PackageMetadata),
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum WorkspaceDiscoveryError {
|
||||
#[error("workspace path '{0}' is not a directory")]
|
||||
NotADirectory(SystemPathBuf),
|
||||
|
||||
#[error("nested workspaces aren't supported but the package located at '{package_root}' defines a `knot.workspace` table")]
|
||||
NestedWorkspaces { package_root: SystemPathBuf },
|
||||
|
||||
#[error("the workspace contains two packages named '{name}': '{first}' and '{second}'")]
|
||||
DuplicatePackageNames {
|
||||
name: Name,
|
||||
first: SystemPathBuf,
|
||||
second: SystemPathBuf,
|
||||
},
|
||||
|
||||
#[error("the package '{package_name}' located at '{package_root}' is outside the workspace's root directory '{workspace_root}'")]
|
||||
PackageOutsideWorkspace {
|
||||
workspace_root: SystemPathBuf,
|
||||
package_name: Name,
|
||||
package_root: SystemPathBuf,
|
||||
},
|
||||
|
||||
#[error(
|
||||
"failed to read the `pyproject.toml` for the package located at '{package_root}': {source}"
|
||||
)]
|
||||
MemberFailedToReadPyProject {
|
||||
package_root: SystemPathBuf,
|
||||
source: std::io::Error,
|
||||
},
|
||||
|
||||
#[error("{path} is not a valid `pyproject.toml`: {source}")]
|
||||
InvalidPyProject {
|
||||
source: Box<PyProjectError>,
|
||||
path: SystemPathBuf,
|
||||
},
|
||||
|
||||
#[error("invalid glob '{raw_glob}' in `tool.knot.workspace.members`: {source}")]
|
||||
InvalidMembersPattern {
|
||||
source: glob::PatternError,
|
||||
raw_glob: String,
|
||||
},
|
||||
|
||||
#[error("failed to match member glob: {error}")]
|
||||
FailedToMatchGlob {
|
||||
#[from]
|
||||
error: GlobError,
|
||||
},
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
//! Integration tests for workspace discovery
|
||||
|
||||
use crate::snapshot_workspace;
|
||||
use anyhow::Context;
|
||||
use insta::assert_ron_snapshot;
|
||||
use ruff_db::system::{SystemPathBuf, TestSystem};
|
||||
|
||||
use crate::workspace::{WorkspaceDiscoveryError, WorkspaceMetadata};
|
||||
|
||||
#[test]
|
||||
fn package_without_pyproject() -> anyhow::Result<()> {
|
||||
let system = TestSystem::default();
|
||||
let root = SystemPathBuf::from("/app");
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([(root.join("foo.py"), ""), (root.join("bar.py"), "")])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let workspace = WorkspaceMetadata::discover(&root, &system, None)
|
||||
.context("Failed to discover workspace")?;
|
||||
|
||||
assert_eq!(workspace.root(), &*root);
|
||||
|
||||
snapshot_workspace!(workspace);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn single_package() -> anyhow::Result<()> {
|
||||
let system = TestSystem::default();
|
||||
let root = SystemPathBuf::from("/app");
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "backend"
|
||||
"#,
|
||||
),
|
||||
(root.join("db/__init__.py"), ""),
|
||||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let workspace = WorkspaceMetadata::discover(&root, &system, None)
|
||||
.context("Failed to discover workspace")?;
|
||||
|
||||
assert_eq!(workspace.root(), &*root);
|
||||
snapshot_workspace!(workspace);
|
||||
|
||||
// Discovering the same package from a subdirectory should give the same result
|
||||
let from_src = WorkspaceMetadata::discover(&root.join("db"), &system, None)
|
||||
.context("Failed to discover workspace from src sub-directory")?;
|
||||
|
||||
assert_eq!(from_src, workspace);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn workspace_members() -> anyhow::Result<()> {
|
||||
let system = TestSystem::default();
|
||||
let root = SystemPathBuf::from("/app");
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "workspace-root"
|
||||
|
||||
[tool.knot.workspace]
|
||||
members = ["packages/*"]
|
||||
exclude = ["packages/excluded"]
|
||||
"#,
|
||||
),
|
||||
(
|
||||
root.join("packages/a/pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "member-a"
|
||||
"#,
|
||||
),
|
||||
(
|
||||
root.join("packages/x/pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "member-x"
|
||||
"#,
|
||||
),
|
||||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let workspace = WorkspaceMetadata::discover(&root, &system, None)
|
||||
.context("Failed to discover workspace")?;
|
||||
|
||||
assert_eq!(workspace.root(), &*root);
|
||||
|
||||
snapshot_workspace!(workspace);
|
||||
|
||||
// Discovering the same package from a member should give the same result
|
||||
let from_src = WorkspaceMetadata::discover(&root.join("packages/a"), &system, None)
|
||||
.context("Failed to discover workspace from src sub-directory")?;
|
||||
|
||||
assert_eq!(from_src, workspace);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn workspace_excluded() -> anyhow::Result<()> {
|
||||
let system = TestSystem::default();
|
||||
let root = SystemPathBuf::from("/app");
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "workspace-root"
|
||||
|
||||
[tool.knot.workspace]
|
||||
members = ["packages/*"]
|
||||
exclude = ["packages/excluded"]
|
||||
"#,
|
||||
),
|
||||
(
|
||||
root.join("packages/a/pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "member-a"
|
||||
"#,
|
||||
),
|
||||
(
|
||||
root.join("packages/excluded/pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "member-x"
|
||||
"#,
|
||||
),
|
||||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let workspace = WorkspaceMetadata::discover(&root, &system, None)
|
||||
.context("Failed to discover workspace")?;
|
||||
|
||||
assert_eq!(workspace.root(), &*root);
|
||||
snapshot_workspace!(workspace);
|
||||
|
||||
// Discovering the `workspace` for `excluded` should discover a single-package workspace
|
||||
let excluded_workspace =
|
||||
WorkspaceMetadata::discover(&root.join("packages/excluded"), &system, None)
|
||||
.context("Failed to discover workspace from src sub-directory")?;
|
||||
|
||||
assert_ne!(excluded_workspace, workspace);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn workspace_non_unique_member_names() -> anyhow::Result<()> {
|
||||
let system = TestSystem::default();
|
||||
let root = SystemPathBuf::from("/app");
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "workspace-root"
|
||||
|
||||
[tool.knot.workspace]
|
||||
members = ["packages/*"]
|
||||
"#,
|
||||
),
|
||||
(
|
||||
root.join("packages/a/pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "a"
|
||||
"#,
|
||||
),
|
||||
(
|
||||
root.join("packages/b/pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "a"
|
||||
"#,
|
||||
),
|
||||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let error = WorkspaceMetadata::discover(&root, &system, None).expect_err(
|
||||
"Discovery should error because the workspace contains two packages with the same names.",
|
||||
);
|
||||
|
||||
assert_error_eq(&error, "the workspace contains two packages named 'a': '/app/packages/a' and '/app/packages/b'");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nested_workspaces() -> anyhow::Result<()> {
|
||||
let system = TestSystem::default();
|
||||
let root = SystemPathBuf::from("/app");
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "workspace-root"
|
||||
|
||||
[tool.knot.workspace]
|
||||
members = ["packages/*"]
|
||||
"#,
|
||||
),
|
||||
(
|
||||
root.join("packages/a/pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "nested-workspace"
|
||||
|
||||
[tool.knot.workspace]
|
||||
members = ["packages/*"]
|
||||
"#,
|
||||
),
|
||||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let error = WorkspaceMetadata::discover(&root, &system, None).expect_err(
|
||||
"Discovery should error because the workspace has a package that itself is a workspace",
|
||||
);
|
||||
|
||||
assert_error_eq(&error, "nested workspaces aren't supported but the package located at '/app/packages/a' defines a `knot.workspace` table");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn member_missing_pyproject_toml() -> anyhow::Result<()> {
|
||||
let system = TestSystem::default();
|
||||
let root = SystemPathBuf::from("/app");
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "workspace-root"
|
||||
|
||||
[tool.knot.workspace]
|
||||
members = ["packages/*"]
|
||||
"#,
|
||||
),
|
||||
(root.join("packages/a/test.py"), ""),
|
||||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let error = WorkspaceMetadata::discover(&root, &system, None)
|
||||
.expect_err("Discovery should error because member `a` has no `pypyroject.toml`");
|
||||
|
||||
assert_error_eq(&error, "failed to read the `pyproject.toml` for the package located at '/app/packages/a': No such file or directory");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Folders that match the members pattern but don't have a pyproject.toml
|
||||
/// aren't valid members and discovery fails. However, don't fail
|
||||
/// if the folder name indicates that it is a hidden folder that might
|
||||
/// have been crated by another tool
|
||||
#[test]
|
||||
fn member_pattern_matching_hidden_folder() -> anyhow::Result<()> {
|
||||
let system = TestSystem::default();
|
||||
let root = SystemPathBuf::from("/app");
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "workspace-root"
|
||||
|
||||
[tool.knot.workspace]
|
||||
members = ["packages/*"]
|
||||
"#,
|
||||
),
|
||||
(root.join("packages/.hidden/a.py"), ""),
|
||||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let workspace = WorkspaceMetadata::discover(&root, &system, None)?;
|
||||
|
||||
snapshot_workspace!(workspace);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn member_pattern_matching_file() -> anyhow::Result<()> {
|
||||
let system = TestSystem::default();
|
||||
let root = SystemPathBuf::from("/app");
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "workspace-root"
|
||||
|
||||
[tool.knot.workspace]
|
||||
members = ["packages/*"]
|
||||
"#,
|
||||
),
|
||||
(root.join("packages/.DS_STORE"), ""),
|
||||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let workspace = WorkspaceMetadata::discover(&root, &system, None)?;
|
||||
|
||||
snapshot_workspace!(&workspace);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn workspace_root_not_an_ancestor_of_member() -> anyhow::Result<()> {
|
||||
let system = TestSystem::default();
|
||||
let root = SystemPathBuf::from("/app");
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "workspace-root"
|
||||
|
||||
[tool.knot.workspace]
|
||||
members = ["../packages/*"]
|
||||
"#,
|
||||
),
|
||||
(
|
||||
root.join("../packages/a/pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
name = "a"
|
||||
"#,
|
||||
),
|
||||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let error = WorkspaceMetadata::discover(&root, &system, None).expect_err(
|
||||
"Discovery should error because member `a` is outside the workspace's directory`",
|
||||
);
|
||||
|
||||
assert_error_eq(&error, "the package 'a' located at '/packages/a' is outside the workspace's root directory '/app'");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_error_eq(error: &WorkspaceDiscoveryError, message: &str) {
|
||||
assert_eq!(error.to_string().replace('\\', "/"), message);
|
||||
}
|
||||
|
||||
/// Snapshots a workspace but with all paths using unix separators.
|
||||
#[macro_export]
|
||||
macro_rules! snapshot_workspace {
|
||||
($workspace:expr) => {{
|
||||
assert_ron_snapshot!($workspace,{
|
||||
".root" => insta::dynamic_redaction(|content, _content_path| {
|
||||
content.as_str().unwrap().replace("\\", "/")
|
||||
}),
|
||||
".packages[].root" => insta::dynamic_redaction(|content, _content_path| {
|
||||
content.as_str().unwrap().replace("\\", "/")
|
||||
}),
|
||||
});
|
||||
}};
|
||||
}
|
||||
}
|
||||
|
||||
108
crates/red_knot_workspace/src/workspace/pyproject.rs
Normal file
108
crates/red_knot_workspace/src/workspace/pyproject.rs
Normal file
@@ -0,0 +1,108 @@
|
||||
mod package_name;
|
||||
|
||||
use pep440_rs::{Version, VersionSpecifiers};
|
||||
use serde::Deserialize;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::workspace::metadata::WorkspaceDiscoveryError;
|
||||
pub(crate) use package_name::PackageName;
|
||||
use ruff_db::system::SystemPath;
|
||||
|
||||
/// A `pyproject.toml` as specified in PEP 517.
|
||||
#[derive(Deserialize, Debug, Default, Clone)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub(crate) struct PyProject {
|
||||
/// PEP 621-compliant project metadata.
|
||||
pub project: Option<Project>,
|
||||
/// Tool-specific metadata.
|
||||
pub tool: Option<Tool>,
|
||||
}
|
||||
|
||||
impl PyProject {
|
||||
pub(crate) fn workspace(&self) -> Option<&Workspace> {
|
||||
self.tool
|
||||
.as_ref()
|
||||
.and_then(|tool| tool.knot.as_ref())
|
||||
.and_then(|knot| knot.workspace.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum PyProjectError {
|
||||
#[error(transparent)]
|
||||
TomlSyntax(#[from] toml::de::Error),
|
||||
}
|
||||
|
||||
impl PyProject {
|
||||
pub(crate) fn from_str(content: &str) -> Result<Self, PyProjectError> {
|
||||
toml::from_str(content).map_err(PyProjectError::TomlSyntax)
|
||||
}
|
||||
}
|
||||
|
||||
/// PEP 621 project metadata (`project`).
|
||||
///
|
||||
/// See <https://packaging.python.org/en/latest/specifications/pyproject-toml>.
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq)]
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub(crate) struct Project {
|
||||
/// The name of the project
|
||||
///
|
||||
/// Note: Intentionally option to be more permissive during deserialization.
|
||||
/// `PackageMetadata::from_pyproject` reports missing names.
|
||||
pub name: Option<PackageName>,
|
||||
/// The version of the project
|
||||
pub version: Option<Version>,
|
||||
/// The Python versions this project is compatible with.
|
||||
pub requires_python: Option<VersionSpecifiers>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct Tool {
|
||||
pub knot: Option<Knot>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub(crate) struct Knot {
|
||||
pub(crate) workspace: Option<Workspace>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub(crate) struct Workspace {
|
||||
pub(crate) members: Option<Vec<String>>,
|
||||
pub(crate) exclude: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl Workspace {
|
||||
pub(crate) fn members(&self) -> &[String] {
|
||||
self.members.as_deref().unwrap_or_default()
|
||||
}
|
||||
|
||||
pub(crate) fn exclude(&self) -> &[String] {
|
||||
self.exclude.as_deref().unwrap_or_default()
|
||||
}
|
||||
|
||||
pub(crate) fn is_excluded(
|
||||
&self,
|
||||
path: &SystemPath,
|
||||
workspace_root: &SystemPath,
|
||||
) -> Result<bool, WorkspaceDiscoveryError> {
|
||||
for exclude in self.exclude() {
|
||||
let full_glob =
|
||||
glob::Pattern::new(workspace_root.join(exclude).as_str()).map_err(|error| {
|
||||
WorkspaceDiscoveryError::InvalidMembersPattern {
|
||||
raw_glob: exclude.clone(),
|
||||
source: error,
|
||||
}
|
||||
})?;
|
||||
|
||||
if full_glob.matches_path(path.as_std_path()) {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,140 @@
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use std::ops::Deref;
|
||||
use thiserror::Error;
|
||||
|
||||
/// The normalized name of a package.
|
||||
///
|
||||
/// Converts the name to lowercase and collapses runs of `-`, `_`, and `.` down to a single `-`.
|
||||
/// For example, `---`, `.`, and `__` are all converted to a single `-`.
|
||||
///
|
||||
/// See: <https://packaging.python.org/en/latest/specifications/name-normalization/>
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)]
|
||||
pub(crate) struct PackageName(String);
|
||||
|
||||
impl PackageName {
|
||||
/// Create a validated, normalized package name.
|
||||
pub(crate) fn new(name: String) -> Result<Self, InvalidPackageNameError> {
|
||||
if name.is_empty() {
|
||||
return Err(InvalidPackageNameError::Empty);
|
||||
}
|
||||
|
||||
if name.starts_with(['-', '_', '.']) {
|
||||
return Err(InvalidPackageNameError::NonAlphanumericStart(
|
||||
name.chars().next().unwrap(),
|
||||
));
|
||||
}
|
||||
|
||||
if name.ends_with(['-', '_', '.']) {
|
||||
return Err(InvalidPackageNameError::NonAlphanumericEnd(
|
||||
name.chars().last().unwrap(),
|
||||
));
|
||||
}
|
||||
|
||||
let Some(start) = name.find(|c: char| {
|
||||
!c.is_ascii() || c.is_ascii_uppercase() || matches!(c, '-' | '_' | '.')
|
||||
}) else {
|
||||
return Ok(Self(name));
|
||||
};
|
||||
|
||||
let (already_normalized, maybe_normalized) = name.split_at(start);
|
||||
|
||||
let mut normalized = String::with_capacity(name.len());
|
||||
normalized.push_str(already_normalized);
|
||||
let mut last = None;
|
||||
|
||||
for c in maybe_normalized.chars() {
|
||||
if !c.is_ascii() {
|
||||
return Err(InvalidPackageNameError::InvalidCharacter(c));
|
||||
}
|
||||
|
||||
if c.is_ascii_uppercase() {
|
||||
normalized.push(c.to_ascii_lowercase());
|
||||
} else if matches!(c, '-' | '_' | '.') {
|
||||
if matches!(last, Some('-' | '_' | '.')) {
|
||||
// Only keep a single instance of `-`, `_` and `.`
|
||||
} else {
|
||||
normalized.push('-');
|
||||
}
|
||||
} else {
|
||||
normalized.push(c);
|
||||
}
|
||||
|
||||
last = Some(c);
|
||||
}
|
||||
|
||||
Ok(Self(normalized))
|
||||
}
|
||||
|
||||
/// Returns the underlying package name.
|
||||
pub(crate) fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PackageName> for String {
|
||||
fn from(value: PackageName) -> Self {
|
||||
value.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for PackageName {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let s = String::deserialize(deserializer)?;
|
||||
Self::new(s).map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for PackageName {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for PackageName {
|
||||
type Target = str;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub(crate) enum InvalidPackageNameError {
|
||||
#[error("name must start with letter or number but it starts with '{0}'")]
|
||||
NonAlphanumericStart(char),
|
||||
#[error("name must end with letter or number but it ends with '{0}'")]
|
||||
NonAlphanumericEnd(char),
|
||||
#[error("valid name consists only of ASCII letters and numbers, period, underscore and hyphen but name contains '{0}'"
|
||||
)]
|
||||
InvalidCharacter(char),
|
||||
#[error("name must not be empty")]
|
||||
Empty,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::PackageName;
|
||||
|
||||
#[test]
|
||||
fn normalize() {
|
||||
let inputs = [
|
||||
"friendly-bard",
|
||||
"Friendly-Bard",
|
||||
"FRIENDLY-BARD",
|
||||
"friendly.bard",
|
||||
"friendly_bard",
|
||||
"friendly--bard",
|
||||
"friendly-.bard",
|
||||
"FrIeNdLy-._.-bArD",
|
||||
];
|
||||
|
||||
for input in inputs {
|
||||
assert_eq!(
|
||||
PackageName::new(input.to_string()).unwrap(),
|
||||
PackageName::new("friendly-bard".to_string()).unwrap(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,12 @@
|
||||
use crate::workspace::PackageMetadata;
|
||||
use red_knot_python_semantic::{ProgramSettings, PythonVersion, SearchPathSettings, SitePackages};
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
|
||||
/// The resolved configurations.
|
||||
///
|
||||
/// The main difference to [`Configuration`] is that default values are filled in.
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
pub struct WorkspaceSettings {
|
||||
pub(super) program: ProgramSettings,
|
||||
}
|
||||
@@ -16,7 +18,8 @@ impl WorkspaceSettings {
|
||||
}
|
||||
|
||||
/// The configuration for the workspace or a package.
|
||||
#[derive(Debug, Default, Clone)]
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
pub struct Configuration {
|
||||
pub target_version: Option<PythonVersion>,
|
||||
pub search_paths: SearchPathConfiguration,
|
||||
@@ -29,17 +32,22 @@ impl Configuration {
|
||||
self.search_paths.extend(with.search_paths);
|
||||
}
|
||||
|
||||
pub fn into_workspace_settings(self, workspace_root: &SystemPath) -> WorkspaceSettings {
|
||||
pub fn to_workspace_settings(
|
||||
&self,
|
||||
workspace_root: &SystemPath,
|
||||
_packages: &[PackageMetadata],
|
||||
) -> WorkspaceSettings {
|
||||
WorkspaceSettings {
|
||||
program: ProgramSettings {
|
||||
target_version: self.target_version.unwrap_or_default(),
|
||||
search_paths: self.search_paths.into_settings(workspace_root),
|
||||
search_paths: self.search_paths.to_settings(workspace_root),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Eq, PartialEq)]
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
pub struct SearchPathConfiguration {
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
@@ -59,15 +67,19 @@ pub struct SearchPathConfiguration {
|
||||
}
|
||||
|
||||
impl SearchPathConfiguration {
|
||||
pub fn into_settings(self, workspace_root: &SystemPath) -> SearchPathSettings {
|
||||
let site_packages = self.site_packages.unwrap_or(SitePackages::Known(vec![]));
|
||||
pub fn to_settings(&self, workspace_root: &SystemPath) -> SearchPathSettings {
|
||||
let site_packages = self
|
||||
.site_packages
|
||||
.clone()
|
||||
.unwrap_or(SitePackages::Known(vec![]));
|
||||
|
||||
SearchPathSettings {
|
||||
extra_paths: self.extra_paths.unwrap_or_default(),
|
||||
extra_paths: self.extra_paths.clone().unwrap_or_default(),
|
||||
src_root: self
|
||||
.clone()
|
||||
.src_root
|
||||
.unwrap_or_else(|| workspace_root.to_path_buf()),
|
||||
custom_typeshed: self.custom_typeshed,
|
||||
custom_typeshed: self.custom_typeshed.clone(),
|
||||
site_packages,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,36 @@
|
||||
---
|
||||
source: crates/red_knot_workspace/src/workspace/metadata.rs
|
||||
expression: "&workspace"
|
||||
---
|
||||
WorkspaceMetadata(
|
||||
root: "/app",
|
||||
packages: [
|
||||
PackageMetadata(
|
||||
name: Name("workspace-root"),
|
||||
root: "/app",
|
||||
configuration: Configuration(
|
||||
target_version: None,
|
||||
search_paths: SearchPathConfiguration(
|
||||
extra_paths: None,
|
||||
src_root: None,
|
||||
custom_typeshed: None,
|
||||
site_packages: None,
|
||||
),
|
||||
),
|
||||
),
|
||||
],
|
||||
settings: WorkspaceSettings(
|
||||
program: ProgramSettings(
|
||||
target_version: PythonVersion(
|
||||
major: 3,
|
||||
minor: 13,
|
||||
),
|
||||
search_paths: SearchPathSettings(
|
||||
extra_paths: [],
|
||||
src_root: "/app",
|
||||
custom_typeshed: None,
|
||||
site_packages: Known([]),
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
@@ -0,0 +1,36 @@
|
||||
---
|
||||
source: crates/red_knot_workspace/src/workspace/metadata.rs
|
||||
expression: workspace
|
||||
---
|
||||
WorkspaceMetadata(
|
||||
root: "/app",
|
||||
packages: [
|
||||
PackageMetadata(
|
||||
name: Name("workspace-root"),
|
||||
root: "/app",
|
||||
configuration: Configuration(
|
||||
target_version: None,
|
||||
search_paths: SearchPathConfiguration(
|
||||
extra_paths: None,
|
||||
src_root: None,
|
||||
custom_typeshed: None,
|
||||
site_packages: None,
|
||||
),
|
||||
),
|
||||
),
|
||||
],
|
||||
settings: WorkspaceSettings(
|
||||
program: ProgramSettings(
|
||||
target_version: PythonVersion(
|
||||
major: 3,
|
||||
minor: 13,
|
||||
),
|
||||
search_paths: SearchPathSettings(
|
||||
extra_paths: [],
|
||||
src_root: "/app",
|
||||
custom_typeshed: None,
|
||||
site_packages: Known([]),
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
@@ -0,0 +1,36 @@
|
||||
---
|
||||
source: crates/red_knot_workspace/src/workspace/metadata.rs
|
||||
expression: workspace
|
||||
---
|
||||
WorkspaceMetadata(
|
||||
root: "/app",
|
||||
packages: [
|
||||
PackageMetadata(
|
||||
name: Name("app"),
|
||||
root: "/app",
|
||||
configuration: Configuration(
|
||||
target_version: None,
|
||||
search_paths: SearchPathConfiguration(
|
||||
extra_paths: None,
|
||||
src_root: None,
|
||||
custom_typeshed: None,
|
||||
site_packages: None,
|
||||
),
|
||||
),
|
||||
),
|
||||
],
|
||||
settings: WorkspaceSettings(
|
||||
program: ProgramSettings(
|
||||
target_version: PythonVersion(
|
||||
major: 3,
|
||||
minor: 13,
|
||||
),
|
||||
search_paths: SearchPathSettings(
|
||||
extra_paths: [],
|
||||
src_root: "/app",
|
||||
custom_typeshed: None,
|
||||
site_packages: Known([]),
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user