Compare commits
93 Commits
0.9.8
...
david/mypy
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
71e07fbae3 | ||
|
|
0af4985067 | ||
|
|
da069aa00c | ||
|
|
ec9ee93d68 | ||
|
|
a73548d0ca | ||
|
|
c60e8a037a | ||
|
|
f19cb86c5d | ||
|
|
36d12cea47 | ||
|
|
a4396b3e6b | ||
|
|
acfb920863 | ||
|
|
08c48b15af | ||
|
|
a7095c4196 | ||
|
|
e79f9171cf | ||
|
|
e517b44a0a | ||
|
|
1275665ecb | ||
|
|
f2b41306d0 | ||
|
|
b02a42d99a | ||
|
|
79443d71eb | ||
|
|
ca974706dd | ||
|
|
b6c7ba4f8e | ||
|
|
c970b794d0 | ||
|
|
335b264fe2 | ||
|
|
0361021863 | ||
|
|
24c8b1242e | ||
|
|
820a31af5d | ||
|
|
a18d8bfa7d | ||
|
|
348c196cb3 | ||
|
|
6d6e524b90 | ||
|
|
0dfa810e9a | ||
|
|
9cd0cdefd3 | ||
|
|
05a4c29344 | ||
|
|
b3c884f4f3 | ||
|
|
6c14225c66 | ||
|
|
0a627ef216 | ||
|
|
a25be4610a | ||
|
|
ce0018c3cb | ||
|
|
48f906e06c | ||
|
|
ebd172e732 | ||
|
|
114abc7cfb | ||
|
|
318f503714 | ||
|
|
d0623888b3 | ||
|
|
23fd4927ae | ||
|
|
cc324abcc2 | ||
|
|
80be0a0115 | ||
|
|
b2e90c3f5c | ||
|
|
d7cbe6b7df | ||
|
|
021640a7a6 | ||
|
|
81bcdcebd3 | ||
|
|
d94a78a134 | ||
|
|
bb44926ca5 | ||
|
|
32c66ec4b7 | ||
|
|
087d92cbf4 | ||
|
|
e7b93f93ef | ||
|
|
c8a06a9be8 | ||
|
|
1977dda079 | ||
|
|
c9ab925275 | ||
|
|
37fbe58b13 | ||
|
|
a3ae76edc0 | ||
|
|
d93ed293eb | ||
|
|
4d92e20e81 | ||
|
|
c4578162d5 | ||
|
|
5d56c2e877 | ||
|
|
c80678a1c0 | ||
|
|
be239b9f25 | ||
|
|
8c899c5409 | ||
|
|
a08f5edf75 | ||
|
|
5efcfd3414 | ||
|
|
79a2c7eaa2 | ||
|
|
eaff95e1ad | ||
|
|
2d9f564ecd | ||
|
|
a6ae86c189 | ||
|
|
08e11e991d | ||
|
|
ec311a7ed0 | ||
|
|
b7de42686f | ||
|
|
ff44500517 | ||
|
|
e924ecbdac | ||
|
|
0d615b8765 | ||
|
|
4431978262 | ||
|
|
ba44e9de13 | ||
|
|
fdf0915283 | ||
|
|
5ca6cc2cc8 | ||
|
|
9bb63495dd | ||
|
|
980faff176 | ||
|
|
0c7c001647 | ||
|
|
09d0b227fb | ||
|
|
091d0af2ab | ||
|
|
3d72138740 | ||
|
|
4a23756024 | ||
|
|
af62f7932b | ||
|
|
0ced8d053c | ||
|
|
a8e171f82c | ||
|
|
cf83584abb | ||
|
|
764aa0e6a1 |
15
.github/renovate.json5
vendored
15
.github/renovate.json5
vendored
@@ -58,12 +58,6 @@
|
||||
description: "Disable PRs updating GitHub runners (e.g. 'runs-on: macos-14')",
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// TODO: Remove this once the codebase is upgrade to v4 (https://github.com/astral-sh/ruff/pull/16069)
|
||||
matchPackageNames: ["tailwindcss"],
|
||||
matchManagers: ["npm"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// Disable updates of `zip-rs`; intentionally pinned for now due to ownership change
|
||||
// See: https://github.com/astral-sh/uv/issues/3642
|
||||
@@ -101,14 +95,7 @@
|
||||
matchManagers: ["cargo"],
|
||||
matchPackageNames: ["strum"],
|
||||
description: "Weekly update of strum dependencies",
|
||||
},
|
||||
{
|
||||
groupName: "ESLint",
|
||||
matchManagers: ["npm"],
|
||||
matchPackageNames: ["eslint"],
|
||||
allowedVersions: "<9",
|
||||
description: "Constraint ESLint to version 8 until TypeScript-eslint supports ESLint 9", // https://github.com/typescript-eslint/typescript-eslint/issues/8211
|
||||
},
|
||||
}
|
||||
],
|
||||
vulnerabilityAlerts: {
|
||||
commitMessageSuffix: "",
|
||||
|
||||
1
.github/workflows/daily_property_tests.yaml
vendored
1
.github/workflows/daily_property_tests.yaml
vendored
@@ -47,6 +47,7 @@ jobs:
|
||||
run: |
|
||||
export QUICKCHECK_TESTS=100000
|
||||
for _ in {1..5}; do
|
||||
cargo test --locked --release --package red_knot_python_semantic -- --ignored list::property_tests
|
||||
cargo test --locked --release --package red_knot_python_semantic -- --ignored types::property_tests::stable
|
||||
done
|
||||
|
||||
|
||||
93
.github/workflows/mypy_primer.yaml
vendored
Normal file
93
.github/workflows/mypy_primer.yaml
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
name: Run mypy_primer
|
||||
|
||||
permissions: {}
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "crates/red_knot*/**"
|
||||
- "crates/ruff_db"
|
||||
- "crates/ruff_python_ast"
|
||||
- "crates/ruff_python_parser"
|
||||
- ".github/workflows/mypy_primer.yaml"
|
||||
- ".github/workflows/mypy_primer_comment.yaml"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
jobs:
|
||||
mypy_primer:
|
||||
name: Run mypy_primer
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
path: ruff
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: "ruff"
|
||||
- name: Install Rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Install mypy_primer
|
||||
run: |
|
||||
uv tool install "git+https://github.com/astral-sh/mypy_primer.git@add-red-knot-support"
|
||||
|
||||
- name: Run mypy_primer
|
||||
shell: bash
|
||||
run: |
|
||||
cd ruff
|
||||
|
||||
echo "new commit"
|
||||
git rev-list --format=%s --max-count=1 "$GITHUB_SHA"
|
||||
|
||||
MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
|
||||
git checkout -b base_commit "$MERGE_BASE"
|
||||
echo "base commit"
|
||||
git rev-list --format=%s --max-count=1 base_commit
|
||||
|
||||
cd ..
|
||||
|
||||
# Allow the exit code to be 0 or 1, only fail for actual mypy_primer crashes/bugs
|
||||
uvx mypy_primer \
|
||||
--repo ruff \
|
||||
--type-checker knot \
|
||||
--old base_commit \
|
||||
--new "$GITHUB_SHA" \
|
||||
--project-selector '/(mypy_primer|black|pyp|git-revise|zipp|arrow)$' \
|
||||
--output concise \
|
||||
--debug > mypy_primer.diff || [ $? -eq 1 ]
|
||||
|
||||
# Output diff with ANSI color codes
|
||||
cat mypy_primer.diff
|
||||
|
||||
# Remove ANSI color codes before uploading
|
||||
sed -ie 's/\x1b\[[0-9;]*m//g' mypy_primer.diff
|
||||
|
||||
echo ${{ github.event.number }} > pr-number
|
||||
|
||||
- name: Upload diff
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: mypy_primer_diff
|
||||
path: mypy_primer.diff
|
||||
|
||||
- name: Upload pr-number
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: pr-number
|
||||
path: pr-number
|
||||
97
.github/workflows/mypy_primer_comment.yaml
vendored
Normal file
97
.github/workflows/mypy_primer_comment.yaml
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
name: PR comment (mypy_primer)
|
||||
|
||||
on: # zizmor: ignore[dangerous-triggers]
|
||||
workflow_run:
|
||||
workflows: [Run mypy_primer]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workflow_run_id:
|
||||
description: The mypy_primer workflow that triggers the workflow run
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@v8
|
||||
name: Download PR number
|
||||
with:
|
||||
name: pr-number
|
||||
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Parse pull request number
|
||||
id: pr-number
|
||||
run: |
|
||||
if [[ -f pr-number ]]
|
||||
then
|
||||
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v8
|
||||
name: "Download mypy_primer results"
|
||||
id: download-mypy_primer_diff
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
with:
|
||||
name: mypy_primer_diff
|
||||
workflow: mypy_primer.yaml
|
||||
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||
path: pr/mypy_primer_diff
|
||||
workflow_conclusion: completed
|
||||
if_no_artifact_found: ignore
|
||||
allow_forks: true
|
||||
|
||||
- name: Generate comment content
|
||||
id: generate-comment
|
||||
if: steps.download-mypy_primer_diff.outputs.found_artifact == 'true'
|
||||
run: |
|
||||
# Guard against malicious mypy_primer results that symlink to a secret
|
||||
# file on this runner
|
||||
if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]]
|
||||
then
|
||||
echo "Error: mypy_primer.diff cannot be a symlink"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Note this identifier is used to find the comment to update on
|
||||
# subsequent runs
|
||||
echo '<!-- generated-comment mypy_primer -->' >> comment.txt
|
||||
|
||||
echo '## `mypy_primer` results' >> comment.txt
|
||||
if [ -s "pr/mypy_primer_diff/mypy_primer.diff" ]; then
|
||||
echo '<details>' >> comment.txt
|
||||
echo '<summary>Changes were detected when running on open source projects</summary>' >> comment.txt
|
||||
echo '' >> comment.txt
|
||||
echo '```diff' >> comment.txt
|
||||
cat pr/mypy_primer_diff/mypy_primer.diff >> comment.txt
|
||||
echo '```' >> comment.txt
|
||||
echo '</details>' >> comment.txt
|
||||
else
|
||||
echo 'No ecosystem changes detected ✅' >> comment.txt
|
||||
fi
|
||||
|
||||
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
if: steps.generate-comment.outcome == 'success'
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
body-includes: "<!-- generated-comment mypy_primer -->"
|
||||
|
||||
- name: Create or update comment
|
||||
if: steps.find-comment.outcome == 'success'
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||
body-path: comment.txt
|
||||
edit-mode: replace
|
||||
@@ -60,7 +60,7 @@ repos:
|
||||
- black==25.1.0
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.29.7
|
||||
rev: v1.30.0
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -74,7 +74,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.9.6
|
||||
rev: v0.9.9
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
@@ -84,7 +84,7 @@ repos:
|
||||
|
||||
# Prettier
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: v3.5.1
|
||||
rev: v3.5.2
|
||||
hooks:
|
||||
- id: prettier
|
||||
types: [yaml]
|
||||
@@ -92,12 +92,12 @@ repos:
|
||||
# zizmor detects security vulnerabilities in GitHub Actions workflows.
|
||||
# Additional configuration for the tool is found in `.github/zizmor.yml`
|
||||
- repo: https://github.com/woodruffw/zizmor-pre-commit
|
||||
rev: v1.3.1
|
||||
rev: v1.4.1
|
||||
hooks:
|
||||
- id: zizmor
|
||||
|
||||
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||
rev: 0.31.1
|
||||
rev: 0.31.2
|
||||
hooks:
|
||||
- id: check-github-workflows
|
||||
|
||||
|
||||
42
CHANGELOG.md
42
CHANGELOG.md
@@ -1,5 +1,47 @@
|
||||
# Changelog
|
||||
|
||||
## 0.9.10
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`ruff`\] Add new rule `RUF059`: Unused unpacked assignment ([#16449](https://github.com/astral-sh/ruff/pull/16449))
|
||||
- \[`syntax-errors`\] Detect assignment expressions before Python 3.8 ([#16383](https://github.com/astral-sh/ruff/pull/16383))
|
||||
- \[`syntax-errors`\] Named expressions in decorators before Python 3.9 ([#16386](https://github.com/astral-sh/ruff/pull/16386))
|
||||
- \[`syntax-errors`\] Parenthesized keyword argument names after Python 3.8 ([#16482](https://github.com/astral-sh/ruff/pull/16482))
|
||||
- \[`syntax-errors`\] Positional-only parameters before Python 3.8 ([#16481](https://github.com/astral-sh/ruff/pull/16481))
|
||||
- \[`syntax-errors`\] Tuple unpacking in `return` and `yield` before Python 3.8 ([#16485](https://github.com/astral-sh/ruff/pull/16485))
|
||||
- \[`syntax-errors`\] Type parameter defaults before Python 3.13 ([#16447](https://github.com/astral-sh/ruff/pull/16447))
|
||||
- \[`syntax-errors`\] Type parameter lists before Python 3.12 ([#16479](https://github.com/astral-sh/ruff/pull/16479))
|
||||
- \[`syntax-errors`\] `except*` before Python 3.11 ([#16446](https://github.com/astral-sh/ruff/pull/16446))
|
||||
- \[`syntax-errors`\] `type` statements before Python 3.12 ([#16478](https://github.com/astral-sh/ruff/pull/16478))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Escape template filenames in glob patterns in configuration ([#16407](https://github.com/astral-sh/ruff/pull/16407))
|
||||
- \[`flake8-simplify`\] Exempt unittest context methods for `SIM115` rule ([#16439](https://github.com/astral-sh/ruff/pull/16439))
|
||||
- Formatter: Fix syntax error location in notebooks ([#16499](https://github.com/astral-sh/ruff/pull/16499))
|
||||
- \[`pyupgrade`\] Do not offer fix when at least one target is `global`/`nonlocal` (`UP028`) ([#16451](https://github.com/astral-sh/ruff/pull/16451))
|
||||
- \[`flake8-builtins`\] Ignore variables matching module attribute names (`A001`) ([#16454](https://github.com/astral-sh/ruff/pull/16454))
|
||||
- \[`pylint`\] Convert `code` keyword argument to a positional argument in fix for (`PLR1722`) ([#16424](https://github.com/astral-sh/ruff/pull/16424))
|
||||
|
||||
### CLI
|
||||
|
||||
- Move rule code from `description` to `check_name` in GitLab output serializer ([#16437](https://github.com/astral-sh/ruff/pull/16437))
|
||||
|
||||
### Documentation
|
||||
|
||||
- \[`pydocstyle`\] Clarify that `D417` only checks docstrings with an arguments section ([#16494](https://github.com/astral-sh/ruff/pull/16494))
|
||||
|
||||
## 0.9.9
|
||||
|
||||
### Preview features
|
||||
|
||||
- Fix caching of unsupported-syntax errors ([#16425](https://github.com/astral-sh/ruff/pull/16425))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Only show unsupported-syntax errors in editors when preview mode is enabled ([#16429](https://github.com/astral-sh/ruff/pull/16429))
|
||||
|
||||
## 0.9.8
|
||||
|
||||
### Preview features
|
||||
|
||||
244
Cargo.lock
generated
244
Cargo.lock
generated
@@ -124,9 +124,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.96"
|
||||
version = "1.0.97"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6b964d184e89d9b6b67dd2715bc8e74cf3107fb2b529990c90cf517326150bf4"
|
||||
checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f"
|
||||
|
||||
[[package]]
|
||||
name = "argfile"
|
||||
@@ -188,9 +188,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.8.0"
|
||||
version = "2.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36"
|
||||
checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd"
|
||||
|
||||
[[package]]
|
||||
name = "block-buffer"
|
||||
@@ -300,14 +300,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "chrono"
|
||||
version = "0.4.39"
|
||||
version = "0.4.40"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825"
|
||||
checksum = "1a7964611d71df112cb1730f2ee67324fcf4d0fc6606acbbe9bfe06df124637c"
|
||||
dependencies = [
|
||||
"android-tzdata",
|
||||
"iana-time-zone",
|
||||
"num-traits",
|
||||
"windows-targets 0.52.6",
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -339,9 +339,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.30"
|
||||
version = "4.5.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "92b7b18d71fad5313a1e320fa9897994228ce274b60faa4d694fe0ea89cd9e6d"
|
||||
checksum = "027bb0d98429ae334a8698531da7077bdf906419543a35a55c2cb1b66437d767"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -349,9 +349,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.30"
|
||||
version = "4.5.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a35db2071778a7344791a4fb4f95308b5673d219dee3ae348b86642574ecc90c"
|
||||
checksum = "5589e0cba072e0f3d23791efac0fd8627b49c829c196a492e88168e6a669d863"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -399,7 +399,7 @@ dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -416,16 +416,16 @@ checksum = "8c41dc435a7b98e4608224bbf65282309f5403719df9113621b30f8b6f74e2f4"
|
||||
dependencies = [
|
||||
"nix",
|
||||
"terminfo",
|
||||
"thiserror 2.0.11",
|
||||
"thiserror 2.0.12",
|
||||
"which",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codspeed"
|
||||
version = "2.8.0"
|
||||
version = "2.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "25d2f5a6570db487f5258e0bded6352fa2034c2aeb46bb5cc3ff060a0fcfba2f"
|
||||
checksum = "de4b67ff8985f3993f06167d71cf4aec178b0a1580f91a987170c59d60021103"
|
||||
dependencies = [
|
||||
"colored 2.2.0",
|
||||
"libc",
|
||||
@@ -436,9 +436,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "codspeed-criterion-compat"
|
||||
version = "2.8.0"
|
||||
version = "2.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f53a55558dedec742b14aae3c5fec389361b8b5ca28c1aadf09dd91faf710074"
|
||||
checksum = "68403d768ed1def18a87e2306676781314448393ecf0d3057c4527cabf524a3d"
|
||||
dependencies = [
|
||||
"codspeed",
|
||||
"colored 2.2.0",
|
||||
@@ -458,7 +458,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -467,7 +467,7 @@ version = "3.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e"
|
||||
dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -690,7 +690,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"strsim",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -701,7 +701,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806"
|
||||
dependencies = [
|
||||
"darling_core",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -771,7 +771,7 @@ dependencies = [
|
||||
"glob",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -803,7 +803,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -884,7 +884,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1057,9 +1057,9 @@ checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2"
|
||||
|
||||
[[package]]
|
||||
name = "globset"
|
||||
version = "0.4.15"
|
||||
version = "0.4.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "15f1ce686646e7f1e19bf7d5533fe443a45dbfb990e00629110797578b42fb19"
|
||||
checksum = "54a1028dfc5f5df5da8a56a73e6c153c9a9708ec57232470703592a3f18e49f5"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"bstr",
|
||||
@@ -1074,7 +1074,7 @@ version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0bf760ebf69878d9fd8f110c89703d90ce35095324d1f1edcb595c63945ee757"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"ignore",
|
||||
"walkdir",
|
||||
]
|
||||
@@ -1295,7 +1295,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1387,9 +1387,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "indoc"
|
||||
version = "2.0.5"
|
||||
version = "2.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5"
|
||||
checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd"
|
||||
|
||||
[[package]]
|
||||
name = "inotify"
|
||||
@@ -1397,7 +1397,7 @@ version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"inotify-sys",
|
||||
"libc",
|
||||
]
|
||||
@@ -1413,9 +1413,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "insta"
|
||||
version = "1.42.1"
|
||||
version = "1.42.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "71c1b125e30d93896b365e156c33dadfffab45ee8400afcbba4752f59de08a86"
|
||||
checksum = "50259abbaa67d11d2bcafc7ba1d094ed7a0c70e3ce893f0d0997f73558cb3084"
|
||||
dependencies = [
|
||||
"console",
|
||||
"globset",
|
||||
@@ -1460,7 +1460,7 @@ dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1471,7 +1471,7 @@ checksum = "e19b23d53f35ce9f56aebc7d1bb4e6ac1e9c0db7ac85c8d1760c04379edced37"
|
||||
dependencies = [
|
||||
"hermit-abi 0.4.0",
|
||||
"libc",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1602,7 +1602,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3bf66548c351bcaed792ef3e2b430cc840fbde504e09da6b29ed114ca60dcd4b"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1621,7 +1621,7 @@ version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
]
|
||||
@@ -1804,7 +1804,7 @@ version = "0.29.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"cfg-if",
|
||||
"cfg_aliases",
|
||||
"libc",
|
||||
@@ -1832,7 +1832,7 @@ version = "8.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2fee8403b3d66ac7b26aee6e40a897d85dc5ce26f44da36b8b73e987cc52e943"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"filetime",
|
||||
"fsevent-sys",
|
||||
"inotify",
|
||||
@@ -2081,7 +2081,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"thiserror 2.0.11",
|
||||
"thiserror 2.0.12",
|
||||
"ucd-trie",
|
||||
]
|
||||
|
||||
@@ -2105,7 +2105,7 @@ dependencies = [
|
||||
"pest_meta",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2174,7 +2174,7 @@ checksum = "f6e859e6e5bd50440ab63c47e3ebabc90f26251f7c73c3d3e837b74a1cc3fa67"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2243,9 +2243,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.93"
|
||||
version = "1.0.94"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99"
|
||||
checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
@@ -2275,7 +2275,7 @@ dependencies = [
|
||||
"newtype-uuid",
|
||||
"quick-xml",
|
||||
"strip-ansi-escapes",
|
||||
"thiserror 2.0.11",
|
||||
"thiserror 2.0.12",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
@@ -2310,9 +2310,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.38"
|
||||
version = "1.0.39"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc"
|
||||
checksum = "c1f1914ce909e1658d9907913b4b91947430c7d9be598b15a1912935b8c04801"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
@@ -2453,7 +2453,7 @@ dependencies = [
|
||||
"salsa",
|
||||
"schemars",
|
||||
"serde",
|
||||
"thiserror 2.0.11",
|
||||
"thiserror 2.0.12",
|
||||
"toml",
|
||||
"tracing",
|
||||
]
|
||||
@@ -2463,7 +2463,7 @@ name = "red_knot_python_semantic"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"camino",
|
||||
"compact_str",
|
||||
"countme",
|
||||
@@ -2499,7 +2499,7 @@ dependencies = [
|
||||
"strum_macros",
|
||||
"tempfile",
|
||||
"test-case",
|
||||
"thiserror 2.0.11",
|
||||
"thiserror 2.0.12",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
@@ -2541,6 +2541,7 @@ dependencies = [
|
||||
"regex",
|
||||
"ruff_db",
|
||||
"ruff_index",
|
||||
"ruff_notebook",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_trivia",
|
||||
"ruff_source_file",
|
||||
@@ -2549,6 +2550,8 @@ dependencies = [
|
||||
"salsa",
|
||||
"serde",
|
||||
"smallvec",
|
||||
"tempfile",
|
||||
"thiserror 2.0.12",
|
||||
"toml",
|
||||
]
|
||||
|
||||
@@ -2585,7 +2588,7 @@ version = "0.5.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2656,13 +2659,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.9.8"
|
||||
version = "0.9.10"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
"assert_fs",
|
||||
"bincode",
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"cachedir",
|
||||
"chrono",
|
||||
"clap",
|
||||
@@ -2692,6 +2695,7 @@ dependencies = [
|
||||
"ruff_notebook",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_formatter",
|
||||
"ruff_python_parser",
|
||||
"ruff_server",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
@@ -2703,7 +2707,7 @@ dependencies = [
|
||||
"strum",
|
||||
"tempfile",
|
||||
"test-case",
|
||||
"thiserror 2.0.11",
|
||||
"thiserror 2.0.12",
|
||||
"tikv-jemallocator",
|
||||
"toml",
|
||||
"tracing",
|
||||
@@ -2786,7 +2790,7 @@ dependencies = [
|
||||
"schemars",
|
||||
"serde",
|
||||
"tempfile",
|
||||
"thiserror 2.0.11",
|
||||
"thiserror 2.0.12",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"tracing-tree",
|
||||
@@ -2890,11 +2894,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.9.8"
|
||||
version = "0.9.10"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"anyhow",
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"chrono",
|
||||
"clap",
|
||||
"colored 3.0.0",
|
||||
@@ -2941,7 +2945,7 @@ dependencies = [
|
||||
"strum",
|
||||
"strum_macros",
|
||||
"test-case",
|
||||
"thiserror 2.0.11",
|
||||
"thiserror 2.0.12",
|
||||
"toml",
|
||||
"typed-arena",
|
||||
"unicode-normalization",
|
||||
@@ -2958,7 +2962,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"ruff_python_trivia",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2975,7 +2979,7 @@ dependencies = [
|
||||
"serde_json",
|
||||
"serde_with",
|
||||
"test-case",
|
||||
"thiserror 2.0.11",
|
||||
"thiserror 2.0.12",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
@@ -2984,7 +2988,7 @@ name = "ruff_python_ast"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"compact_str",
|
||||
"is-macro",
|
||||
"itertools 0.14.0",
|
||||
@@ -3049,7 +3053,7 @@ dependencies = [
|
||||
"similar",
|
||||
"smallvec",
|
||||
"static_assertions",
|
||||
"thiserror 2.0.11",
|
||||
"thiserror 2.0.12",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
@@ -3068,7 +3072,7 @@ dependencies = [
|
||||
name = "ruff_python_literal"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"itertools 0.14.0",
|
||||
"ruff_python_ast",
|
||||
"unic-ucd-category",
|
||||
@@ -3079,7 +3083,7 @@ name = "ruff_python_parser"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"bstr",
|
||||
"compact_str",
|
||||
"insta",
|
||||
@@ -3090,6 +3094,8 @@ dependencies = [
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.1.1",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"static_assertions",
|
||||
"unicode-ident",
|
||||
"unicode-normalization",
|
||||
@@ -3111,7 +3117,7 @@ dependencies = [
|
||||
name = "ruff_python_semantic"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"is-macro",
|
||||
"ruff_cache",
|
||||
"ruff_index",
|
||||
@@ -3130,7 +3136,7 @@ dependencies = [
|
||||
name = "ruff_python_stdlib"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
@@ -3183,7 +3189,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
"thiserror 2.0.11",
|
||||
"thiserror 2.0.12",
|
||||
"toml",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
@@ -3210,7 +3216,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.9.8"
|
||||
version = "0.9.10"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -3300,11 +3306,11 @@ version = "0.38.44"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3353,7 +3359,7 @@ dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
@@ -3368,9 +3374,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "schemars"
|
||||
version = "0.8.21"
|
||||
version = "0.8.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09c024468a378b7e36765cd36702b7a90cc3cba11654f6685c8f233408e89e92"
|
||||
checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615"
|
||||
dependencies = [
|
||||
"dyn-clone",
|
||||
"schemars_derive",
|
||||
@@ -3380,14 +3386,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "schemars_derive"
|
||||
version = "0.8.21"
|
||||
version = "0.8.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b1eee588578aff73f856ab961cd2f79e36bc45d7ded33a7562adba4667aecc0e"
|
||||
checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"serde_derive_internals",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3410,9 +3416,9 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.218"
|
||||
version = "1.0.219"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8dfc9d19bdbf6d17e22319da49161d5d0108e4188e8b680aef6299eed22df60"
|
||||
checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
@@ -3430,13 +3436,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.218"
|
||||
version = "1.0.219"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f09503e191f4e797cb8aac08e9a4a4695c5edf6a2e70e376d961ddd5c969f82b"
|
||||
checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3447,14 +3453,14 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.139"
|
||||
version = "1.0.140"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "44f86c3acccc9c65b153fe1b85a3be07fe5515274ec9f0653b4a0875731c72a6"
|
||||
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
@@ -3470,7 +3476,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3511,7 +3517,7 @@ dependencies = [
|
||||
"darling",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3642,7 +3648,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"rustversion",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3658,9 +3664,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.98"
|
||||
version = "2.0.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1"
|
||||
checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3675,7 +3681,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3689,7 +3695,7 @@ dependencies = [
|
||||
"getrandom 0.3.1",
|
||||
"once_cell",
|
||||
"rustix",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3747,7 +3753,7 @@ dependencies = [
|
||||
"cfg-if",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3758,7 +3764,7 @@ checksum = "5c89e72a01ed4c579669add59014b9a524d609c0c88c6a585ce37485879f6ffb"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
"test-case-core",
|
||||
]
|
||||
|
||||
@@ -3773,11 +3779,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "2.0.11"
|
||||
version = "2.0.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc"
|
||||
checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708"
|
||||
dependencies = [
|
||||
"thiserror-impl 2.0.11",
|
||||
"thiserror-impl 2.0.12",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3788,18 +3794,18 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "2.0.11"
|
||||
version = "2.0.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2"
|
||||
checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3930,7 +3936,7 @@ checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4081,9 +4087,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.17"
|
||||
version = "1.0.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "00e2473a93778eb0bad35909dff6a10d28e63f792f16ed15e404fca9d5eeedbe"
|
||||
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-normalization"
|
||||
@@ -4197,7 +4203,7 @@ checksum = "d28dd23acb5f2fa7bd2155ab70b960e770596b3bb6395119b40476c3655dfba4"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4319,7 +4325,7 @@ dependencies = [
|
||||
"log",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
@@ -4354,7 +4360,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
"wasm-bindgen-backend",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
@@ -4389,7 +4395,7 @@ checksum = "17d5042cc5fa009658f9a7333ef24291b1291a25b6382dd68862a7f3b969f69b"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4455,7 +4461,7 @@ version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4504,7 +4510,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4515,9 +4521,15 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-link"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6dccfd733ce2b1753b03b6d3c65edf020262ea35e20ccdf3e288043e6dd620e3"
|
||||
|
||||
[[package]]
|
||||
name = "windows-result"
|
||||
version = "0.2.0"
|
||||
@@ -4706,7 +4718,7 @@ version = "0.33.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4747,7 +4759,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
@@ -4778,7 +4790,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4789,7 +4801,7 @@ checksum = "d3931cb58c62c13adec22e38686b559c86a30565e16ad6e8510a337cedc611e1"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4809,7 +4821,7 @@ checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
@@ -4832,7 +4844,7 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.98",
|
||||
"syn 2.0.100",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -149,8 +149,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.9.8/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.9.8/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.9.10/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.9.10/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -183,7 +183,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.9.8
|
||||
rev: v0.9.10
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -23,6 +23,10 @@ extend-ignore-re = [
|
||||
# Line ignore with trailing "spellchecker:disable-line"
|
||||
"(?Rm)^.*#\\s*spellchecker:disable-line$",
|
||||
"LICENSEs",
|
||||
# Various third party dependencies uses `typ` as struct field names (e.g., lsp_types::LogMessageParams)
|
||||
"typ",
|
||||
# TODO: Remove this once the `TYP` redirects are removed from `rule_redirects.rs`
|
||||
"TYP",
|
||||
]
|
||||
|
||||
[default.extend-identifiers]
|
||||
|
||||
@@ -32,6 +32,13 @@ pub(crate) enum Command {
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
pub(crate) struct CheckCommand {
|
||||
/// List of files or directories to check.
|
||||
#[clap(
|
||||
help = "List of files or directories to check [default: the project root]",
|
||||
value_name = "PATH"
|
||||
)]
|
||||
pub paths: Vec<SystemPathBuf>,
|
||||
|
||||
/// Run the command within the given project directory.
|
||||
///
|
||||
/// All `pyproject.toml` files will be discovered by walking up the directory tree from the given project directory,
|
||||
@@ -76,7 +83,7 @@ pub(crate) struct CheckCommand {
|
||||
#[arg(long)]
|
||||
pub(crate) exit_zero: bool,
|
||||
|
||||
/// Run in watch mode by re-running whenever files change.
|
||||
/// Watch files for changes and recheck files related to the changed files.
|
||||
#[arg(long, short = 'W')]
|
||||
pub(crate) watch: bool,
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ use red_knot_project::watch::ProjectWatcher;
|
||||
use red_knot_project::{watch, Db};
|
||||
use red_knot_project::{ProjectDatabase, ProjectMetadata};
|
||||
use red_knot_server::run_server;
|
||||
use ruff_db::diagnostic::{Diagnostic, DisplayDiagnosticConfig, Severity};
|
||||
use ruff_db::diagnostic::{DisplayDiagnosticConfig, OldDiagnosticTrait, Severity};
|
||||
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
|
||||
use salsa::plumbing::ZalsaDatabase;
|
||||
|
||||
@@ -107,6 +107,12 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
|
||||
.transpose()?
|
||||
.unwrap_or_else(|| cwd.clone());
|
||||
|
||||
let check_paths: Vec<_> = args
|
||||
.paths
|
||||
.iter()
|
||||
.map(|path| SystemPath::absolute(path, &cwd))
|
||||
.collect();
|
||||
|
||||
let system = OsSystem::new(cwd);
|
||||
let watch = args.watch;
|
||||
let exit_zero = args.exit_zero;
|
||||
@@ -118,6 +124,10 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
|
||||
|
||||
let mut db = ProjectDatabase::new(project_metadata, system)?;
|
||||
|
||||
if !check_paths.is_empty() {
|
||||
db.project().set_included_paths(&mut db, check_paths);
|
||||
}
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_options);
|
||||
|
||||
// Listen to Ctrl+C and abort the watch mode.
|
||||
@@ -255,14 +265,43 @@ impl MainLoop {
|
||||
Severity::Error
|
||||
};
|
||||
|
||||
let failed = result
|
||||
.iter()
|
||||
.any(|diagnostic| diagnostic.severity() >= min_error_severity);
|
||||
|
||||
if check_revision == revision {
|
||||
if db.project().files(db).is_empty() {
|
||||
tracing::warn!("No python files found under the given path(s)");
|
||||
}
|
||||
|
||||
let mut stdout = stdout().lock();
|
||||
for diagnostic in result {
|
||||
writeln!(stdout, "{}", diagnostic.display(db, &display_config))?;
|
||||
|
||||
if result.is_empty() {
|
||||
writeln!(stdout, "All checks passed!")?;
|
||||
|
||||
if self.watcher.is_none() {
|
||||
return Ok(ExitStatus::Success);
|
||||
}
|
||||
} else {
|
||||
let mut failed = false;
|
||||
let diagnostics_count = result.len();
|
||||
|
||||
for diagnostic in result {
|
||||
writeln!(stdout, "{}", diagnostic.display(db, &display_config))?;
|
||||
|
||||
failed |= diagnostic.severity() >= min_error_severity;
|
||||
}
|
||||
|
||||
writeln!(
|
||||
stdout,
|
||||
"Found {} diagnostic{}",
|
||||
diagnostics_count,
|
||||
if diagnostics_count > 1 { "s" } else { "" }
|
||||
)?;
|
||||
|
||||
if self.watcher.is_none() {
|
||||
return Ok(if failed {
|
||||
ExitStatus::Failure
|
||||
} else {
|
||||
ExitStatus::Success
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
tracing::debug!(
|
||||
@@ -270,14 +309,6 @@ impl MainLoop {
|
||||
);
|
||||
}
|
||||
|
||||
if self.watcher.is_none() {
|
||||
return Ok(if failed {
|
||||
ExitStatus::Failure
|
||||
} else {
|
||||
ExitStatus::Success
|
||||
});
|
||||
}
|
||||
|
||||
tracing::trace!("Counts after last check:\n{}", countme::get_all());
|
||||
}
|
||||
|
||||
@@ -322,7 +353,8 @@ impl MainLoopCancellationToken {
|
||||
enum MainLoopMessage {
|
||||
CheckWorkspace,
|
||||
CheckCompleted {
|
||||
result: Vec<Box<dyn Diagnostic>>,
|
||||
/// The diagnostics that were found during the check.
|
||||
result: Vec<Box<dyn OldDiagnosticTrait>>,
|
||||
revision: u64,
|
||||
},
|
||||
ApplyChanges(Vec<watch::ChangeEvent>),
|
||||
|
||||
@@ -28,7 +28,7 @@ fn config_override() -> anyhow::Result<()> {
|
||||
),
|
||||
])?;
|
||||
|
||||
assert_cmd_snapshot!(case.command(), @r###"
|
||||
assert_cmd_snapshot!(case.command(), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -40,16 +40,18 @@ fn config_override() -> anyhow::Result<()> {
|
||||
| ^^^^^^^^^^^^ Type `<module 'sys'>` has no attribute `last_exc`
|
||||
|
|
||||
|
||||
Found 1 diagnostic
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
");
|
||||
|
||||
assert_cmd_snapshot!(case.command().arg("--python-version").arg("3.12"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
Ok(())
|
||||
@@ -98,7 +100,7 @@ fn cli_arguments_are_relative_to_the_current_directory() -> anyhow::Result<()> {
|
||||
])?;
|
||||
|
||||
// Make sure that the CLI fails when the `libs` directory is not in the search path.
|
||||
assert_cmd_snapshot!(case.command().current_dir(case.root().join("child")), @r###"
|
||||
assert_cmd_snapshot!(case.command().current_dir(case.root().join("child")), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -111,16 +113,18 @@ fn cli_arguments_are_relative_to_the_current_directory() -> anyhow::Result<()> {
|
||||
4 | stat = add(10, 15)
|
||||
|
|
||||
|
||||
Found 1 diagnostic
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
");
|
||||
|
||||
assert_cmd_snapshot!(case.command().current_dir(case.root().join("child")).arg("--extra-search-path").arg("../libs"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
Ok(())
|
||||
@@ -168,11 +172,12 @@ fn paths_in_configuration_files_are_relative_to_the_project_root() -> anyhow::Re
|
||||
])?;
|
||||
|
||||
assert_cmd_snapshot!(case.command().current_dir(case.root().join("child")), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
Ok(())
|
||||
@@ -195,7 +200,7 @@ fn configuration_rule_severity() -> anyhow::Result<()> {
|
||||
|
||||
// Assert that there's a possibly unresolved reference diagnostic
|
||||
// and that division-by-zero has a severity of error by default.
|
||||
assert_cmd_snapshot!(case.command(), @r###"
|
||||
assert_cmd_snapshot!(case.command(), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -217,9 +222,10 @@ fn configuration_rule_severity() -> anyhow::Result<()> {
|
||||
| - Name `x` used when possibly not defined
|
||||
|
|
||||
|
||||
Found 2 diagnostics
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
");
|
||||
|
||||
case.write_file(
|
||||
"pyproject.toml",
|
||||
@@ -230,7 +236,7 @@ fn configuration_rule_severity() -> anyhow::Result<()> {
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(case.command(), @r###"
|
||||
assert_cmd_snapshot!(case.command(), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -243,9 +249,10 @@ fn configuration_rule_severity() -> anyhow::Result<()> {
|
||||
4 | for a in range(0, y):
|
||||
|
|
||||
|
||||
Found 1 diagnostic
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -269,7 +276,7 @@ fn cli_rule_severity() -> anyhow::Result<()> {
|
||||
|
||||
// Assert that there's a possibly unresolved reference diagnostic
|
||||
// and that division-by-zero has a severity of error by default.
|
||||
assert_cmd_snapshot!(case.command(), @r###"
|
||||
assert_cmd_snapshot!(case.command(), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -302,9 +309,10 @@ fn cli_rule_severity() -> anyhow::Result<()> {
|
||||
| - Name `x` used when possibly not defined
|
||||
|
|
||||
|
||||
Found 3 diagnostics
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
");
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
case
|
||||
@@ -315,7 +323,7 @@ fn cli_rule_severity() -> anyhow::Result<()> {
|
||||
.arg("division-by-zero")
|
||||
.arg("--warn")
|
||||
.arg("unresolved-import"),
|
||||
@r###"
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -339,9 +347,10 @@ fn cli_rule_severity() -> anyhow::Result<()> {
|
||||
6 | for a in range(0, y):
|
||||
|
|
||||
|
||||
Found 2 diagnostics
|
||||
|
||||
----- stderr -----
|
||||
"###
|
||||
"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
@@ -365,7 +374,7 @@ fn cli_rule_severity_precedence() -> anyhow::Result<()> {
|
||||
|
||||
// Assert that there's a possibly unresolved reference diagnostic
|
||||
// and that division-by-zero has a severity of error by default.
|
||||
assert_cmd_snapshot!(case.command(), @r###"
|
||||
assert_cmd_snapshot!(case.command(), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -387,9 +396,10 @@ fn cli_rule_severity_precedence() -> anyhow::Result<()> {
|
||||
| - Name `x` used when possibly not defined
|
||||
|
|
||||
|
||||
Found 2 diagnostics
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
");
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
case
|
||||
@@ -401,7 +411,7 @@ fn cli_rule_severity_precedence() -> anyhow::Result<()> {
|
||||
// Override the error severity with warning
|
||||
.arg("--ignore")
|
||||
.arg("possibly-unresolved-reference"),
|
||||
@r###"
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -414,9 +424,10 @@ fn cli_rule_severity_precedence() -> anyhow::Result<()> {
|
||||
4 | for a in range(0, y):
|
||||
|
|
||||
|
||||
Found 1 diagnostic
|
||||
|
||||
----- stderr -----
|
||||
"###
|
||||
"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
@@ -436,7 +447,7 @@ fn configuration_unknown_rules() -> anyhow::Result<()> {
|
||||
("test.py", "print(10)"),
|
||||
])?;
|
||||
|
||||
assert_cmd_snapshot!(case.command(), @r###"
|
||||
assert_cmd_snapshot!(case.command(), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -448,9 +459,10 @@ fn configuration_unknown_rules() -> anyhow::Result<()> {
|
||||
| --------------- Unknown lint rule `division-by-zer`
|
||||
|
|
||||
|
||||
Found 1 diagnostic
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
"#);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -460,15 +472,16 @@ fn configuration_unknown_rules() -> anyhow::Result<()> {
|
||||
fn cli_unknown_rules() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_file("test.py", "print(10)")?;
|
||||
|
||||
assert_cmd_snapshot!(case.command().arg("--ignore").arg("division-by-zer"), @r###"
|
||||
assert_cmd_snapshot!(case.command().arg("--ignore").arg("division-by-zer"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
warning: unknown-rule: Unknown lint rule `division-by-zer`
|
||||
|
||||
Found 1 diagnostic
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -477,7 +490,7 @@ fn cli_unknown_rules() -> anyhow::Result<()> {
|
||||
fn exit_code_only_warnings() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_file("test.py", r"print(x) # [unresolved-reference]")?;
|
||||
|
||||
assert_cmd_snapshot!(case.command(), @r###"
|
||||
assert_cmd_snapshot!(case.command(), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -488,9 +501,10 @@ fn exit_code_only_warnings() -> anyhow::Result<()> {
|
||||
| - Name `x` used when not defined
|
||||
|
|
||||
|
||||
Found 1 diagnostic
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -505,7 +519,7 @@ fn exit_code_only_info() -> anyhow::Result<()> {
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(case.command(), @r###"
|
||||
assert_cmd_snapshot!(case.command(), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -517,9 +531,10 @@ fn exit_code_only_info() -> anyhow::Result<()> {
|
||||
| -------------- info: Revealed type is `Literal[1]`
|
||||
|
|
||||
|
||||
Found 1 diagnostic
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -534,7 +549,7 @@ fn exit_code_only_info_and_error_on_warning_is_true() -> anyhow::Result<()> {
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(case.command().arg("--error-on-warning"), @r###"
|
||||
assert_cmd_snapshot!(case.command().arg("--error-on-warning"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -546,9 +561,10 @@ fn exit_code_only_info_and_error_on_warning_is_true() -> anyhow::Result<()> {
|
||||
| -------------- info: Revealed type is `Literal[1]`
|
||||
|
|
||||
|
||||
Found 1 diagnostic
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -557,7 +573,7 @@ fn exit_code_only_info_and_error_on_warning_is_true() -> anyhow::Result<()> {
|
||||
fn exit_code_no_errors_but_error_on_warning_is_true() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_file("test.py", r"print(x) # [unresolved-reference]")?;
|
||||
|
||||
assert_cmd_snapshot!(case.command().arg("--error-on-warning"), @r###"
|
||||
assert_cmd_snapshot!(case.command().arg("--error-on-warning"), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -568,9 +584,10 @@ fn exit_code_no_errors_but_error_on_warning_is_true() -> anyhow::Result<()> {
|
||||
| - Name `x` used when not defined
|
||||
|
|
||||
|
||||
Found 1 diagnostic
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -588,7 +605,7 @@ fn exit_code_no_errors_but_error_on_warning_is_enabled_in_configuration() -> any
|
||||
),
|
||||
])?;
|
||||
|
||||
assert_cmd_snapshot!(case.command(), @r###"
|
||||
assert_cmd_snapshot!(case.command(), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -599,9 +616,10 @@ fn exit_code_no_errors_but_error_on_warning_is_enabled_in_configuration() -> any
|
||||
| - Name `x` used when not defined
|
||||
|
|
||||
|
||||
Found 1 diagnostic
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -616,7 +634,7 @@ fn exit_code_both_warnings_and_errors() -> anyhow::Result<()> {
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(case.command(), @r###"
|
||||
assert_cmd_snapshot!(case.command(), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -636,9 +654,10 @@ fn exit_code_both_warnings_and_errors() -> anyhow::Result<()> {
|
||||
| ^ Cannot subscript object of type `Literal[4]` with no `__getitem__` method
|
||||
|
|
||||
|
||||
Found 2 diagnostics
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -653,7 +672,7 @@ fn exit_code_both_warnings_and_errors_and_error_on_warning_is_true() -> anyhow::
|
||||
"###,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(case.command().arg("--error-on-warning"), @r###"
|
||||
assert_cmd_snapshot!(case.command().arg("--error-on-warning"), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -673,9 +692,10 @@ fn exit_code_both_warnings_and_errors_and_error_on_warning_is_true() -> anyhow::
|
||||
| ^ Cannot subscript object of type `Literal[4]` with no `__getitem__` method
|
||||
|
|
||||
|
||||
Found 2 diagnostics
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -690,7 +710,7 @@ fn exit_code_exit_zero_is_true() -> anyhow::Result<()> {
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(case.command().arg("--exit-zero"), @r###"
|
||||
assert_cmd_snapshot!(case.command().arg("--exit-zero"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -710,9 +730,10 @@ fn exit_code_exit_zero_is_true() -> anyhow::Result<()> {
|
||||
| ^ Cannot subscript object of type `Literal[4]` with no `__getitem__` method
|
||||
|
|
||||
|
||||
Found 2 diagnostics
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -749,7 +770,7 @@ fn user_configuration() -> anyhow::Result<()> {
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
case.command().current_dir(case.root().join("project")).env(config_env_var, config_directory.as_os_str()),
|
||||
@r###"
|
||||
@r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
@@ -771,9 +792,10 @@ fn user_configuration() -> anyhow::Result<()> {
|
||||
| - Name `x` used when possibly not defined
|
||||
|
|
||||
|
||||
Found 2 diagnostics
|
||||
|
||||
----- stderr -----
|
||||
"###
|
||||
"
|
||||
);
|
||||
|
||||
// The user-level configuration promotes `possibly-unresolved-reference` to an error.
|
||||
@@ -790,7 +812,7 @@ fn user_configuration() -> anyhow::Result<()> {
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
case.command().current_dir(case.root().join("project")).env(config_env_var, config_directory.as_os_str()),
|
||||
@r###"
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
@@ -812,9 +834,134 @@ fn user_configuration() -> anyhow::Result<()> {
|
||||
| ^ Name `x` used when possibly not defined
|
||||
|
|
||||
|
||||
Found 2 diagnostics
|
||||
|
||||
----- stderr -----
|
||||
"###
|
||||
"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_specific_paths() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_files([
|
||||
(
|
||||
"project/main.py",
|
||||
r#"
|
||||
y = 4 / 0 # error: division-by-zero
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"project/tests/test_main.py",
|
||||
r#"
|
||||
import does_not_exist # error: unresolved-import
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"project/other.py",
|
||||
r#"
|
||||
from main2 import z # error: unresolved-import
|
||||
|
||||
print(z)
|
||||
"#,
|
||||
),
|
||||
])?;
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
case.command(),
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
error: lint:unresolved-import
|
||||
--> <temp_dir>/project/tests/test_main.py:2:8
|
||||
|
|
||||
2 | import does_not_exist # error: unresolved-import
|
||||
| ^^^^^^^^^^^^^^ Cannot resolve import `does_not_exist`
|
||||
|
|
||||
|
||||
error: lint:division-by-zero
|
||||
--> <temp_dir>/project/main.py:2:5
|
||||
|
|
||||
2 | y = 4 / 0 # error: division-by-zero
|
||||
| ^^^^^ Cannot divide object of type `Literal[4]` by zero
|
||||
|
|
||||
|
||||
error: lint:unresolved-import
|
||||
--> <temp_dir>/project/other.py:2:6
|
||||
|
|
||||
2 | from main2 import z # error: unresolved-import
|
||||
| ^^^^^ Cannot resolve import `main2`
|
||||
3 |
|
||||
4 | print(z)
|
||||
|
|
||||
|
||||
Found 3 diagnostics
|
||||
|
||||
----- stderr -----
|
||||
"
|
||||
);
|
||||
|
||||
// Now check only the `tests` and `other.py` files.
|
||||
// We should no longer see any diagnostics related to `main.py`.
|
||||
assert_cmd_snapshot!(
|
||||
case.command().arg("project/tests").arg("project/other.py"),
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
error: lint:unresolved-import
|
||||
--> <temp_dir>/project/tests/test_main.py:2:8
|
||||
|
|
||||
2 | import does_not_exist # error: unresolved-import
|
||||
| ^^^^^^^^^^^^^^ Cannot resolve import `does_not_exist`
|
||||
|
|
||||
|
||||
error: lint:unresolved-import
|
||||
--> <temp_dir>/project/other.py:2:6
|
||||
|
|
||||
2 | from main2 import z # error: unresolved-import
|
||||
| ^^^^^ Cannot resolve import `main2`
|
||||
3 |
|
||||
4 | print(z)
|
||||
|
|
||||
|
||||
Found 2 diagnostics
|
||||
|
||||
----- stderr -----
|
||||
"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_non_existing_path() -> anyhow::Result<()> {
|
||||
let case = TestCase::with_files([])?;
|
||||
|
||||
let mut settings = insta::Settings::clone_current();
|
||||
settings.add_filter(
|
||||
®ex::escape("The system cannot find the path specified. (os error 3)"),
|
||||
"No such file or directory (os error 2)",
|
||||
);
|
||||
let _s = settings.bind_to_scope();
|
||||
|
||||
assert_cmd_snapshot!(
|
||||
case.command().arg("project/main.py").arg("project/tests"),
|
||||
@r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
error: io: `<temp_dir>/project/main.py`: No such file or directory (os error 2)
|
||||
|
||||
error: io: `<temp_dir>/project/tests`: No such file or directory (os error 2)
|
||||
|
||||
Found 2 diagnostics
|
||||
|
||||
----- stderr -----
|
||||
WARN No python files found under the given path(s)
|
||||
"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#![allow(clippy::disallowed_names)]
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::io::Write;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
@@ -193,11 +194,29 @@ impl TestCase {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn collect_project_files(&self) -> Vec<File> {
|
||||
let files = self.db().project().files(self.db());
|
||||
let mut collected: Vec<_> = files.into_iter().collect();
|
||||
collected.sort_unstable_by_key(|file| file.path(self.db()).as_system_path().unwrap());
|
||||
collected
|
||||
#[track_caller]
|
||||
fn assert_indexed_project_files(&self, expected: impl IntoIterator<Item = File>) {
|
||||
let mut expected: HashSet<_> = expected.into_iter().collect();
|
||||
|
||||
let actual = self.db().project().files(self.db());
|
||||
for file in &actual {
|
||||
assert!(
|
||||
expected.remove(&file),
|
||||
"Indexed project files contains '{}' which was not expected.",
|
||||
file.path(self.db())
|
||||
);
|
||||
}
|
||||
|
||||
if !expected.is_empty() {
|
||||
let paths: Vec<_> = expected
|
||||
.iter()
|
||||
.map(|file| file.path(self.db()).as_str())
|
||||
.collect();
|
||||
panic!(
|
||||
"Indexed project files are missing the following files: {:?}",
|
||||
paths.join(", ")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn system_file(&self, path: impl AsRef<SystemPath>) -> Result<File, FileError> {
|
||||
@@ -222,13 +241,15 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
trait SetupFiles {
|
||||
fn setup(self, context: &SetupContext) -> anyhow::Result<()>;
|
||||
trait Setup {
|
||||
fn setup(self, context: &mut SetupContext) -> anyhow::Result<()>;
|
||||
}
|
||||
|
||||
struct SetupContext<'a> {
|
||||
system: &'a OsSystem,
|
||||
root_path: &'a SystemPath,
|
||||
options: Option<Options>,
|
||||
included_paths: Option<Vec<SystemPathBuf>>,
|
||||
}
|
||||
|
||||
impl<'a> SetupContext<'a> {
|
||||
@@ -251,55 +272,77 @@ impl<'a> SetupContext<'a> {
|
||||
fn join_root_path(&self, relative: impl AsRef<SystemPath>) -> SystemPathBuf {
|
||||
self.root_path().join(relative)
|
||||
}
|
||||
|
||||
fn write_project_file(
|
||||
&self,
|
||||
relative_path: impl AsRef<SystemPath>,
|
||||
content: &str,
|
||||
) -> anyhow::Result<()> {
|
||||
let relative_path = relative_path.as_ref();
|
||||
let absolute_path = self.join_project_path(relative_path);
|
||||
Self::write_file_impl(absolute_path, content)
|
||||
}
|
||||
|
||||
fn write_file(
|
||||
&self,
|
||||
relative_path: impl AsRef<SystemPath>,
|
||||
content: &str,
|
||||
) -> anyhow::Result<()> {
|
||||
let relative_path = relative_path.as_ref();
|
||||
let absolute_path = self.join_root_path(relative_path);
|
||||
Self::write_file_impl(absolute_path, content)
|
||||
}
|
||||
|
||||
fn write_file_impl(path: impl AsRef<SystemPath>, content: &str) -> anyhow::Result<()> {
|
||||
let path = path.as_ref();
|
||||
if let Some(parent) = path.parent() {
|
||||
std::fs::create_dir_all(parent)
|
||||
.with_context(|| format!("Failed to create parent directory for file `{path}`"))?;
|
||||
}
|
||||
|
||||
let mut file = std::fs::File::create(path.as_std_path())
|
||||
.with_context(|| format!("Failed to open file `{path}`"))?;
|
||||
file.write_all(content.as_bytes())
|
||||
.with_context(|| format!("Failed to write to file `{path}`"))?;
|
||||
file.sync_data()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_options(&mut self, options: Options) {
|
||||
self.options = Some(options);
|
||||
}
|
||||
|
||||
fn set_included_paths(&mut self, paths: Vec<SystemPathBuf>) {
|
||||
self.included_paths = Some(paths);
|
||||
}
|
||||
}
|
||||
|
||||
impl<const N: usize, P> SetupFiles for [(P, &'static str); N]
|
||||
impl<const N: usize, P> Setup for [(P, &'static str); N]
|
||||
where
|
||||
P: AsRef<SystemPath>,
|
||||
{
|
||||
fn setup(self, context: &SetupContext) -> anyhow::Result<()> {
|
||||
fn setup(self, context: &mut SetupContext) -> anyhow::Result<()> {
|
||||
for (relative_path, content) in self {
|
||||
let relative_path = relative_path.as_ref();
|
||||
let absolute_path = context.join_project_path(relative_path);
|
||||
if let Some(parent) = absolute_path.parent() {
|
||||
std::fs::create_dir_all(parent).with_context(|| {
|
||||
format!("Failed to create parent directory for file `{relative_path}`")
|
||||
})?;
|
||||
}
|
||||
|
||||
let mut file = std::fs::File::create(absolute_path.as_std_path())
|
||||
.with_context(|| format!("Failed to open file `{relative_path}`"))?;
|
||||
file.write_all(content.as_bytes())
|
||||
.with_context(|| format!("Failed to write to file `{relative_path}`"))?;
|
||||
file.sync_data()?;
|
||||
context.write_project_file(relative_path, content)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<F> SetupFiles for F
|
||||
impl<F> Setup for F
|
||||
where
|
||||
F: FnOnce(&SetupContext) -> anyhow::Result<()>,
|
||||
F: FnOnce(&mut SetupContext) -> anyhow::Result<()>,
|
||||
{
|
||||
fn setup(self, context: &SetupContext) -> anyhow::Result<()> {
|
||||
fn setup(self, context: &mut SetupContext) -> anyhow::Result<()> {
|
||||
self(context)
|
||||
}
|
||||
}
|
||||
|
||||
fn setup<F>(setup_files: F) -> anyhow::Result<TestCase>
|
||||
where
|
||||
F: SetupFiles,
|
||||
{
|
||||
setup_with_options(setup_files, |_context| None)
|
||||
}
|
||||
|
||||
fn setup_with_options<F>(
|
||||
setup_files: F,
|
||||
create_options: impl FnOnce(&SetupContext) -> Option<Options>,
|
||||
) -> anyhow::Result<TestCase>
|
||||
where
|
||||
F: SetupFiles,
|
||||
F: Setup,
|
||||
{
|
||||
let temp_dir = tempfile::tempdir()?;
|
||||
|
||||
@@ -325,16 +368,18 @@ where
|
||||
.with_context(|| format!("Failed to create project directory `{project_path}`"))?;
|
||||
|
||||
let system = OsSystem::new(&project_path);
|
||||
let setup_context = SetupContext {
|
||||
let mut setup_context = SetupContext {
|
||||
system: &system,
|
||||
root_path: &root_path,
|
||||
options: None,
|
||||
included_paths: None,
|
||||
};
|
||||
|
||||
setup_files
|
||||
.setup(&setup_context)
|
||||
.setup(&mut setup_context)
|
||||
.context("Failed to setup test files")?;
|
||||
|
||||
if let Some(options) = create_options(&setup_context) {
|
||||
if let Some(options) = setup_context.options {
|
||||
std::fs::write(
|
||||
project_path.join("pyproject.toml").as_std_path(),
|
||||
toml::to_string(&PyProject {
|
||||
@@ -348,6 +393,8 @@ where
|
||||
.context("Failed to write configuration")?;
|
||||
}
|
||||
|
||||
let included_paths = setup_context.included_paths;
|
||||
|
||||
let mut project = ProjectMetadata::discover(&project_path, &system)?;
|
||||
project.apply_configuration_files(&system)?;
|
||||
|
||||
@@ -363,7 +410,11 @@ where
|
||||
.with_context(|| format!("Failed to create search path `{path}`"))?;
|
||||
}
|
||||
|
||||
let db = ProjectDatabase::new(project, system)?;
|
||||
let mut db = ProjectDatabase::new(project, system)?;
|
||||
|
||||
if let Some(included_paths) = included_paths {
|
||||
db.project().set_included_paths(&mut db, included_paths);
|
||||
}
|
||||
|
||||
let (sender, receiver) = crossbeam::channel::unbounded();
|
||||
let watcher = directory_watcher(move |events| sender.send(events).unwrap())
|
||||
@@ -425,7 +476,7 @@ fn new_file() -> anyhow::Result<()> {
|
||||
let foo_path = case.project_path("foo.py");
|
||||
|
||||
assert_eq!(case.system_file(&foo_path), Err(FileError::NotFound));
|
||||
assert_eq!(&case.collect_project_files(), &[bar_file]);
|
||||
case.assert_indexed_project_files([bar_file]);
|
||||
|
||||
std::fs::write(foo_path.as_std_path(), "print('Hello')")?;
|
||||
|
||||
@@ -435,7 +486,7 @@ fn new_file() -> anyhow::Result<()> {
|
||||
|
||||
let foo = case.system_file(&foo_path).expect("foo.py to exist.");
|
||||
|
||||
assert_eq!(&case.collect_project_files(), &[bar_file, foo]);
|
||||
case.assert_indexed_project_files([bar_file, foo]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -448,7 +499,7 @@ fn new_ignored_file() -> anyhow::Result<()> {
|
||||
let foo_path = case.project_path("foo.py");
|
||||
|
||||
assert_eq!(case.system_file(&foo_path), Err(FileError::NotFound));
|
||||
assert_eq!(&case.collect_project_files(), &[bar_file]);
|
||||
case.assert_indexed_project_files([bar_file]);
|
||||
|
||||
std::fs::write(foo_path.as_std_path(), "print('Hello')")?;
|
||||
|
||||
@@ -457,15 +508,16 @@ fn new_ignored_file() -> anyhow::Result<()> {
|
||||
case.apply_changes(changes);
|
||||
|
||||
assert!(case.system_file(&foo_path).is_ok());
|
||||
assert_eq!(&case.collect_project_files(), &[bar_file]);
|
||||
case.assert_indexed_project_files([bar_file]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_non_project_file() -> anyhow::Result<()> {
|
||||
let mut case = setup_with_options([("bar.py", "")], |context| {
|
||||
Some(Options {
|
||||
let mut case = setup(|context: &mut SetupContext| {
|
||||
context.write_project_file("bar.py", "")?;
|
||||
context.set_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
extra_paths: Some(vec![RelativePathBuf::cli(
|
||||
context.join_root_path("site_packages"),
|
||||
@@ -473,13 +525,15 @@ fn new_non_project_file() -> anyhow::Result<()> {
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
})
|
||||
});
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
let bar_path = case.project_path("bar.py");
|
||||
let bar_file = case.system_file(&bar_path).unwrap();
|
||||
|
||||
assert_eq!(&case.collect_project_files(), &[bar_file]);
|
||||
case.assert_indexed_project_files([bar_file]);
|
||||
|
||||
// Add a file to site packages
|
||||
let black_path = case.root_path().join("site_packages/black.py");
|
||||
@@ -492,7 +546,94 @@ fn new_non_project_file() -> anyhow::Result<()> {
|
||||
assert!(case.system_file(&black_path).is_ok());
|
||||
|
||||
// The file should not have been added to the project files
|
||||
assert_eq!(&case.collect_project_files(), &[bar_file]);
|
||||
case.assert_indexed_project_files([bar_file]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_files_with_explicit_included_paths() -> anyhow::Result<()> {
|
||||
let mut case = setup(|context: &mut SetupContext| {
|
||||
context.write_project_file("src/main.py", "")?;
|
||||
context.write_project_file("src/sub/__init__.py", "")?;
|
||||
context.write_project_file("src/test.py", "")?;
|
||||
context.set_included_paths(vec![
|
||||
context.join_project_path("src/main.py"),
|
||||
context.join_project_path("src/sub"),
|
||||
]);
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
let main_path = case.project_path("src/main.py");
|
||||
let main_file = case.system_file(&main_path).unwrap();
|
||||
|
||||
let sub_init_path = case.project_path("src/sub/__init__.py");
|
||||
let sub_init = case.system_file(&sub_init_path).unwrap();
|
||||
|
||||
case.assert_indexed_project_files([main_file, sub_init]);
|
||||
|
||||
// Write a new file to `sub` which is an included path
|
||||
let sub_a_path = case.project_path("src/sub/a.py");
|
||||
std::fs::write(sub_a_path.as_std_path(), "print('Hello')")?;
|
||||
|
||||
// and write a second file in the root directory -- this should not be included
|
||||
let test2_path = case.project_path("src/test2.py");
|
||||
std::fs::write(test2_path.as_std_path(), "print('Hello')")?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("test2.py"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
let sub_a_file = case.system_file(&sub_a_path).expect("sub/a.py to exist");
|
||||
|
||||
case.assert_indexed_project_files([main_file, sub_init, sub_a_file]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new_file_in_included_out_of_project_directory() -> anyhow::Result<()> {
|
||||
let mut case = setup(|context: &mut SetupContext| {
|
||||
context.write_project_file("src/main.py", "")?;
|
||||
context.write_project_file("script.py", "")?;
|
||||
context.write_file("outside_project/a.py", "")?;
|
||||
|
||||
context.set_included_paths(vec![
|
||||
context.join_root_path("outside_project"),
|
||||
context.join_project_path("src"),
|
||||
]);
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
let main_path = case.project_path("src/main.py");
|
||||
let main_file = case.system_file(&main_path).unwrap();
|
||||
|
||||
let outside_a_path = case.root_path().join("outside_project/a.py");
|
||||
let outside_a = case.system_file(&outside_a_path).unwrap();
|
||||
|
||||
case.assert_indexed_project_files([outside_a, main_file]);
|
||||
|
||||
// Write a new file to `src` which should be watched
|
||||
let src_a = case.project_path("src/a.py");
|
||||
std::fs::write(src_a.as_std_path(), "print('Hello')")?;
|
||||
|
||||
// and write a second file to `outside_project` which should be watched too
|
||||
let outside_b_path = case.root_path().join("outside_project/b.py");
|
||||
std::fs::write(outside_b_path.as_std_path(), "print('Hello')")?;
|
||||
|
||||
// and a third file in the project's root that should not be included
|
||||
let script2_path = case.project_path("script2.py");
|
||||
std::fs::write(script2_path.as_std_path(), "print('Hello')")?;
|
||||
|
||||
let changes = case.stop_watch(event_for_file("script2.py"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
let src_a_file = case.system_file(&src_a).unwrap();
|
||||
let outside_b_file = case.system_file(&outside_b_path).unwrap();
|
||||
|
||||
// The file should not have been added to the project files
|
||||
case.assert_indexed_project_files([main_file, outside_a, outside_b_file, src_a_file]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -505,7 +646,7 @@ fn changed_file() -> anyhow::Result<()> {
|
||||
|
||||
let foo = case.system_file(&foo_path)?;
|
||||
assert_eq!(source_text(case.db(), foo).as_str(), foo_source);
|
||||
assert_eq!(&case.collect_project_files(), &[foo]);
|
||||
case.assert_indexed_project_files([foo]);
|
||||
|
||||
update_file(&foo_path, "print('Version 2')")?;
|
||||
|
||||
@@ -516,7 +657,7 @@ fn changed_file() -> anyhow::Result<()> {
|
||||
case.apply_changes(changes);
|
||||
|
||||
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 2')");
|
||||
assert_eq!(&case.collect_project_files(), &[foo]);
|
||||
case.assert_indexed_project_files([foo]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -530,7 +671,7 @@ fn deleted_file() -> anyhow::Result<()> {
|
||||
let foo = case.system_file(&foo_path)?;
|
||||
|
||||
assert!(foo.exists(case.db()));
|
||||
assert_eq!(&case.collect_project_files(), &[foo]);
|
||||
case.assert_indexed_project_files([foo]);
|
||||
|
||||
std::fs::remove_file(foo_path.as_std_path())?;
|
||||
|
||||
@@ -539,7 +680,7 @@ fn deleted_file() -> anyhow::Result<()> {
|
||||
case.apply_changes(changes);
|
||||
|
||||
assert!(!foo.exists(case.db()));
|
||||
assert_eq!(&case.collect_project_files(), &[] as &[File]);
|
||||
case.assert_indexed_project_files([]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -559,7 +700,7 @@ fn move_file_to_trash() -> anyhow::Result<()> {
|
||||
let foo = case.system_file(&foo_path)?;
|
||||
|
||||
assert!(foo.exists(case.db()));
|
||||
assert_eq!(&case.collect_project_files(), &[foo]);
|
||||
case.assert_indexed_project_files([foo]);
|
||||
|
||||
std::fs::rename(
|
||||
foo_path.as_std_path(),
|
||||
@@ -571,7 +712,7 @@ fn move_file_to_trash() -> anyhow::Result<()> {
|
||||
case.apply_changes(changes);
|
||||
|
||||
assert!(!foo.exists(case.db()));
|
||||
assert_eq!(&case.collect_project_files(), &[] as &[File]);
|
||||
case.assert_indexed_project_files([]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -589,7 +730,7 @@ fn move_file_to_project() -> anyhow::Result<()> {
|
||||
let foo_in_project = case.project_path("foo.py");
|
||||
|
||||
assert!(case.system_file(&foo_path).is_ok());
|
||||
assert_eq!(&case.collect_project_files(), &[bar]);
|
||||
case.assert_indexed_project_files([bar]);
|
||||
|
||||
std::fs::rename(foo_path.as_std_path(), foo_in_project.as_std_path())?;
|
||||
|
||||
@@ -600,7 +741,7 @@ fn move_file_to_project() -> anyhow::Result<()> {
|
||||
let foo_in_project = case.system_file(&foo_in_project)?;
|
||||
|
||||
assert!(foo_in_project.exists(case.db()));
|
||||
assert_eq!(&case.collect_project_files(), &[bar, foo_in_project]);
|
||||
case.assert_indexed_project_files([bar, foo_in_project]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -614,7 +755,7 @@ fn rename_file() -> anyhow::Result<()> {
|
||||
|
||||
let foo = case.system_file(&foo_path)?;
|
||||
|
||||
assert_eq!(case.collect_project_files(), [foo]);
|
||||
case.assert_indexed_project_files([foo]);
|
||||
|
||||
std::fs::rename(foo_path.as_std_path(), bar_path.as_std_path())?;
|
||||
|
||||
@@ -627,7 +768,7 @@ fn rename_file() -> anyhow::Result<()> {
|
||||
let bar = case.system_file(&bar_path)?;
|
||||
|
||||
assert!(bar.exists(case.db()));
|
||||
assert_eq!(case.collect_project_files(), [bar]);
|
||||
case.assert_indexed_project_files([bar]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -653,7 +794,7 @@ fn directory_moved_to_project() -> anyhow::Result<()> {
|
||||
);
|
||||
|
||||
assert_eq!(sub_a_module, None);
|
||||
assert_eq!(case.collect_project_files(), &[bar]);
|
||||
case.assert_indexed_project_files([bar]);
|
||||
|
||||
let sub_new_path = case.project_path("sub");
|
||||
std::fs::rename(sub_original_path.as_std_path(), sub_new_path.as_std_path())
|
||||
@@ -677,7 +818,7 @@ fn directory_moved_to_project() -> anyhow::Result<()> {
|
||||
)
|
||||
.is_some());
|
||||
|
||||
assert_eq!(case.collect_project_files(), &[bar, init_file, a_file]);
|
||||
case.assert_indexed_project_files([bar, init_file, a_file]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -705,7 +846,7 @@ fn directory_moved_to_trash() -> anyhow::Result<()> {
|
||||
.system_file(sub_path.join("a.py"))
|
||||
.expect("a.py to exist");
|
||||
|
||||
assert_eq!(case.collect_project_files(), &[bar, init_file, a_file]);
|
||||
case.assert_indexed_project_files([bar, init_file, a_file]);
|
||||
|
||||
std::fs::create_dir(case.root_path().join(".trash").as_std_path())?;
|
||||
let trashed_sub = case.root_path().join(".trash/sub");
|
||||
@@ -726,7 +867,7 @@ fn directory_moved_to_trash() -> anyhow::Result<()> {
|
||||
assert!(!init_file.exists(case.db()));
|
||||
assert!(!a_file.exists(case.db()));
|
||||
|
||||
assert_eq!(case.collect_project_files(), &[bar]);
|
||||
case.assert_indexed_project_files([bar]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -760,7 +901,7 @@ fn directory_renamed() -> anyhow::Result<()> {
|
||||
.system_file(sub_path.join("a.py"))
|
||||
.expect("a.py to exist");
|
||||
|
||||
assert_eq!(case.collect_project_files(), &[bar, sub_init, sub_a]);
|
||||
case.assert_indexed_project_files([bar, sub_init, sub_a]);
|
||||
|
||||
let foo_baz = case.project_path("foo/baz");
|
||||
|
||||
@@ -802,10 +943,7 @@ fn directory_renamed() -> anyhow::Result<()> {
|
||||
assert!(foo_baz_init.exists(case.db()));
|
||||
assert!(foo_baz_a.exists(case.db()));
|
||||
|
||||
assert_eq!(
|
||||
case.collect_project_files(),
|
||||
&[bar, foo_baz_init, foo_baz_a]
|
||||
);
|
||||
case.assert_indexed_project_files([bar, foo_baz_init, foo_baz_a]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -834,7 +972,7 @@ fn directory_deleted() -> anyhow::Result<()> {
|
||||
let a_file = case
|
||||
.system_file(sub_path.join("a.py"))
|
||||
.expect("a.py to exist");
|
||||
assert_eq!(case.collect_project_files(), &[bar, init_file, a_file]);
|
||||
case.assert_indexed_project_files([bar, init_file, a_file]);
|
||||
|
||||
std::fs::remove_dir_all(sub_path.as_std_path())
|
||||
.with_context(|| "Failed to remove the sub directory")?;
|
||||
@@ -852,15 +990,17 @@ fn directory_deleted() -> anyhow::Result<()> {
|
||||
|
||||
assert!(!init_file.exists(case.db()));
|
||||
assert!(!a_file.exists(case.db()));
|
||||
assert_eq!(case.collect_project_files(), &[bar]);
|
||||
case.assert_indexed_project_files([bar]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn search_path() -> anyhow::Result<()> {
|
||||
let mut case = setup_with_options([("bar.py", "import sub.a")], |context| {
|
||||
Some(Options {
|
||||
let mut case = setup(|context: &mut SetupContext| {
|
||||
context.write_project_file("bar.py", "import sub.a")?;
|
||||
|
||||
context.set_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
extra_paths: Some(vec![RelativePathBuf::cli(
|
||||
context.join_root_path("site_packages"),
|
||||
@@ -868,7 +1008,8 @@ fn search_path() -> anyhow::Result<()> {
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
})
|
||||
});
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
let site_packages = case.root_path().join("site_packages");
|
||||
@@ -885,10 +1026,7 @@ fn search_path() -> anyhow::Result<()> {
|
||||
case.apply_changes(changes);
|
||||
|
||||
assert!(resolve_module(case.db().upcast(), &ModuleName::new_static("a").unwrap()).is_some());
|
||||
assert_eq!(
|
||||
case.collect_project_files(),
|
||||
&[case.system_file(case.project_path("bar.py")).unwrap()]
|
||||
);
|
||||
case.assert_indexed_project_files([case.system_file(case.project_path("bar.py")).unwrap()]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -925,8 +1063,9 @@ fn add_search_path() -> anyhow::Result<()> {
|
||||
|
||||
#[test]
|
||||
fn remove_search_path() -> anyhow::Result<()> {
|
||||
let mut case = setup_with_options([("bar.py", "import sub.a")], |context| {
|
||||
Some(Options {
|
||||
let mut case = setup(|context: &mut SetupContext| {
|
||||
context.write_project_file("bar.py", "import sub.a")?;
|
||||
context.set_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
extra_paths: Some(vec![RelativePathBuf::cli(
|
||||
context.join_root_path("site_packages"),
|
||||
@@ -934,7 +1073,9 @@ fn remove_search_path() -> anyhow::Result<()> {
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
})
|
||||
});
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// Remove site packages from the search path settings.
|
||||
@@ -957,30 +1098,30 @@ fn remove_search_path() -> anyhow::Result<()> {
|
||||
|
||||
#[test]
|
||||
fn change_python_version_and_platform() -> anyhow::Result<()> {
|
||||
let mut case = setup_with_options(
|
||||
let mut case = setup(|context: &mut SetupContext| {
|
||||
// `sys.last_exc` is a Python 3.12 only feature
|
||||
// `os.getegid()` is Unix only
|
||||
[(
|
||||
context.write_project_file(
|
||||
"bar.py",
|
||||
r#"
|
||||
import sys
|
||||
import os
|
||||
print(sys.last_exc, os.getegid())
|
||||
"#,
|
||||
)],
|
||||
|_context| {
|
||||
Some(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: Some(RangedValue::cli(PythonVersion::PY311)),
|
||||
python_platform: Some(RangedValue::cli(PythonPlatform::Identifier(
|
||||
"win32".to_string(),
|
||||
))),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
})
|
||||
},
|
||||
)?;
|
||||
)?;
|
||||
context.set_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: Some(RangedValue::cli(PythonVersion::PY311)),
|
||||
python_platform: Some(RangedValue::cli(PythonPlatform::Identifier(
|
||||
"win32".to_string(),
|
||||
))),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
});
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
let diagnostics = case.db.check().context("Failed to check project.")?;
|
||||
|
||||
@@ -1015,38 +1156,35 @@ print(sys.last_exc, os.getegid())
|
||||
|
||||
#[test]
|
||||
fn changed_versions_file() -> anyhow::Result<()> {
|
||||
let mut case = setup_with_options(
|
||||
|context: &SetupContext| {
|
||||
std::fs::write(
|
||||
context.join_project_path("bar.py").as_std_path(),
|
||||
"import sub.a",
|
||||
)?;
|
||||
std::fs::create_dir_all(context.join_root_path("typeshed/stdlib").as_std_path())?;
|
||||
std::fs::write(
|
||||
context
|
||||
.join_root_path("typeshed/stdlib/VERSIONS")
|
||||
.as_std_path(),
|
||||
"",
|
||||
)?;
|
||||
std::fs::write(
|
||||
context
|
||||
.join_root_path("typeshed/stdlib/os.pyi")
|
||||
.as_std_path(),
|
||||
"# not important",
|
||||
)?;
|
||||
let mut case = setup(|context: &mut SetupContext| {
|
||||
std::fs::write(
|
||||
context.join_project_path("bar.py").as_std_path(),
|
||||
"import sub.a",
|
||||
)?;
|
||||
std::fs::create_dir_all(context.join_root_path("typeshed/stdlib").as_std_path())?;
|
||||
std::fs::write(
|
||||
context
|
||||
.join_root_path("typeshed/stdlib/VERSIONS")
|
||||
.as_std_path(),
|
||||
"",
|
||||
)?;
|
||||
std::fs::write(
|
||||
context
|
||||
.join_root_path("typeshed/stdlib/os.pyi")
|
||||
.as_std_path(),
|
||||
"# not important",
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
},
|
||||
|context| {
|
||||
Some(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
typeshed: Some(RelativePathBuf::cli(context.join_root_path("typeshed"))),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
})
|
||||
},
|
||||
)?;
|
||||
context.set_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
typeshed: Some(RelativePathBuf::cli(context.join_root_path("typeshed"))),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
});
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// Unset the custom typeshed directory.
|
||||
assert_eq!(
|
||||
@@ -1091,7 +1229,7 @@ fn changed_versions_file() -> anyhow::Result<()> {
|
||||
/// we're seeing is that Windows only emits a single event, similar to Linux.
|
||||
#[test]
|
||||
fn hard_links_in_project() -> anyhow::Result<()> {
|
||||
let mut case = setup(|context: &SetupContext| {
|
||||
let mut case = setup(|context: &mut SetupContext| {
|
||||
let foo_path = context.join_project_path("foo.py");
|
||||
std::fs::write(foo_path.as_std_path(), "print('Version 1')")?;
|
||||
|
||||
@@ -1110,7 +1248,7 @@ fn hard_links_in_project() -> anyhow::Result<()> {
|
||||
|
||||
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 1')");
|
||||
assert_eq!(source_text(case.db(), bar).as_str(), "print('Version 1')");
|
||||
assert_eq!(case.collect_project_files(), &[bar, foo]);
|
||||
case.assert_indexed_project_files([bar, foo]);
|
||||
|
||||
// Write to the hard link target.
|
||||
update_file(foo_path, "print('Version 2')").context("Failed to update foo.py")?;
|
||||
@@ -1163,7 +1301,7 @@ fn hard_links_in_project() -> anyhow::Result<()> {
|
||||
ignore = "windows doesn't support observing changes to hard linked files."
|
||||
)]
|
||||
fn hard_links_to_target_outside_project() -> anyhow::Result<()> {
|
||||
let mut case = setup(|context: &SetupContext| {
|
||||
let mut case = setup(|context: &mut SetupContext| {
|
||||
let foo_path = context.join_root_path("foo.py");
|
||||
std::fs::write(foo_path.as_std_path(), "print('Version 1')")?;
|
||||
|
||||
@@ -1271,7 +1409,7 @@ mod unix {
|
||||
ignore = "FSEvents doesn't emit change events for symlinked directories outside of the watched paths."
|
||||
)]
|
||||
fn symlink_target_outside_watched_paths() -> anyhow::Result<()> {
|
||||
let mut case = setup(|context: &SetupContext| {
|
||||
let mut case = setup(|context: &mut SetupContext| {
|
||||
// Set up the symlink target.
|
||||
let link_target = context.join_root_path("bar");
|
||||
std::fs::create_dir_all(link_target.as_std_path())
|
||||
@@ -1352,7 +1490,7 @@ mod unix {
|
||||
/// ```
|
||||
#[test]
|
||||
fn symlink_inside_project() -> anyhow::Result<()> {
|
||||
let mut case = setup(|context: &SetupContext| {
|
||||
let mut case = setup(|context: &mut SetupContext| {
|
||||
// Set up the symlink target.
|
||||
let link_target = context.join_project_path("patched/bar");
|
||||
std::fs::create_dir_all(link_target.as_std_path())
|
||||
@@ -1390,7 +1528,7 @@ mod unix {
|
||||
);
|
||||
assert_eq!(baz.file().path(case.db()).as_system_path(), Some(&*bar_baz));
|
||||
|
||||
assert_eq!(case.collect_project_files(), &[patched_bar_baz_file]);
|
||||
case.assert_indexed_project_files([patched_bar_baz_file]);
|
||||
|
||||
// Write to the symlink target.
|
||||
update_file(&patched_bar_baz, "def baz(): print('Version 2')")
|
||||
@@ -1427,7 +1565,7 @@ mod unix {
|
||||
bar_baz_text = bar_baz_text.as_str()
|
||||
);
|
||||
|
||||
assert_eq!(case.collect_project_files(), &[patched_bar_baz_file]);
|
||||
case.assert_indexed_project_files([patched_bar_baz_file]);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1445,43 +1583,39 @@ mod unix {
|
||||
/// ```
|
||||
#[test]
|
||||
fn symlinked_module_search_path() -> anyhow::Result<()> {
|
||||
let mut case = setup_with_options(
|
||||
|context: &SetupContext| {
|
||||
// Set up the symlink target.
|
||||
let site_packages = context.join_root_path("site-packages");
|
||||
let bar = site_packages.join("bar");
|
||||
std::fs::create_dir_all(bar.as_std_path())
|
||||
.context("Failed to create bar directory")?;
|
||||
let baz_original = bar.join("baz.py");
|
||||
std::fs::write(baz_original.as_std_path(), "def baz(): ...")
|
||||
.context("Failed to write baz.py")?;
|
||||
let mut case = setup(|context: &mut SetupContext| {
|
||||
// Set up the symlink target.
|
||||
let site_packages = context.join_root_path("site-packages");
|
||||
let bar = site_packages.join("bar");
|
||||
std::fs::create_dir_all(bar.as_std_path()).context("Failed to create bar directory")?;
|
||||
let baz_original = bar.join("baz.py");
|
||||
std::fs::write(baz_original.as_std_path(), "def baz(): ...")
|
||||
.context("Failed to write baz.py")?;
|
||||
|
||||
// Symlink the site packages in the venv to the global site packages
|
||||
let venv_site_packages =
|
||||
context.join_project_path(".venv/lib/python3.12/site-packages");
|
||||
std::fs::create_dir_all(venv_site_packages.parent().unwrap())
|
||||
.context("Failed to create .venv directory")?;
|
||||
std::os::unix::fs::symlink(
|
||||
site_packages.as_std_path(),
|
||||
venv_site_packages.as_std_path(),
|
||||
)
|
||||
.context("Failed to create symlink to site-packages")?;
|
||||
// Symlink the site packages in the venv to the global site packages
|
||||
let venv_site_packages =
|
||||
context.join_project_path(".venv/lib/python3.12/site-packages");
|
||||
std::fs::create_dir_all(venv_site_packages.parent().unwrap())
|
||||
.context("Failed to create .venv directory")?;
|
||||
std::os::unix::fs::symlink(
|
||||
site_packages.as_std_path(),
|
||||
venv_site_packages.as_std_path(),
|
||||
)
|
||||
.context("Failed to create symlink to site-packages")?;
|
||||
|
||||
Ok(())
|
||||
},
|
||||
|_context| {
|
||||
Some(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
extra_paths: Some(vec![RelativePathBuf::cli(
|
||||
".venv/lib/python3.12/site-packages",
|
||||
)]),
|
||||
python_version: Some(RangedValue::cli(PythonVersion::PY312)),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
})
|
||||
},
|
||||
)?;
|
||||
context.set_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
extra_paths: Some(vec![RelativePathBuf::cli(
|
||||
".venv/lib/python3.12/site-packages",
|
||||
)]),
|
||||
python_version: Some(RangedValue::cli(PythonVersion::PY312)),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
});
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
let baz = resolve_module(
|
||||
case.db().upcast(),
|
||||
@@ -1508,7 +1642,7 @@ mod unix {
|
||||
Some(&*baz_original)
|
||||
);
|
||||
|
||||
assert_eq!(case.collect_project_files(), &[]);
|
||||
case.assert_indexed_project_files([]);
|
||||
|
||||
// Write to the symlink target.
|
||||
update_file(&baz_original, "def baz(): print('Version 2')")
|
||||
@@ -1535,7 +1669,7 @@ mod unix {
|
||||
"def baz(): print('Version 2')"
|
||||
);
|
||||
|
||||
assert_eq!(case.collect_project_files(), &[]);
|
||||
case.assert_indexed_project_files([]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1543,7 +1677,7 @@ mod unix {
|
||||
|
||||
#[test]
|
||||
fn nested_projects_delete_root() -> anyhow::Result<()> {
|
||||
let mut case = setup(|context: &SetupContext| {
|
||||
let mut case = setup(|context: &mut SetupContext| {
|
||||
std::fs::write(
|
||||
context.join_project_path("pyproject.toml").as_std_path(),
|
||||
r#"
|
||||
@@ -1585,7 +1719,7 @@ fn nested_projects_delete_root() -> anyhow::Result<()> {
|
||||
fn changes_to_user_configuration() -> anyhow::Result<()> {
|
||||
let mut _config_dir_override: Option<UserConfigDirectoryOverrideGuard> = None;
|
||||
|
||||
let mut case = setup(|context: &SetupContext| {
|
||||
let mut case = setup(|context: &mut SetupContext| {
|
||||
std::fs::write(
|
||||
context.join_project_path("pyproject.toml").as_std_path(),
|
||||
r#"
|
||||
|
||||
@@ -5,7 +5,7 @@ use crate::DEFAULT_LINT_REGISTRY;
|
||||
use crate::{Project, ProjectMetadata};
|
||||
use red_knot_python_semantic::lint::{LintRegistry, RuleSelection};
|
||||
use red_knot_python_semantic::{Db as SemanticDb, Program};
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_db::diagnostic::OldDiagnosticTrait;
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::System;
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
@@ -55,11 +55,11 @@ impl ProjectDatabase {
|
||||
}
|
||||
|
||||
/// Checks all open files in the project and its dependencies.
|
||||
pub fn check(&self) -> Result<Vec<Box<dyn Diagnostic>>, Cancelled> {
|
||||
pub fn check(&self) -> Result<Vec<Box<dyn OldDiagnosticTrait>>, Cancelled> {
|
||||
self.with_db(|db| db.project().check(db))
|
||||
}
|
||||
|
||||
pub fn check_file(&self, file: File) -> Result<Vec<Box<dyn Diagnostic>>, Cancelled> {
|
||||
pub fn check_file(&self, file: File) -> Result<Vec<Box<dyn OldDiagnosticTrait>>, Cancelled> {
|
||||
let _span = tracing::debug_span!("check_file", file=%file.path(self)).entered();
|
||||
|
||||
self.with_db(|db| self.project().check_file(db, file))
|
||||
|
||||
@@ -2,20 +2,20 @@ use crate::db::{Db, ProjectDatabase};
|
||||
use crate::metadata::options::Options;
|
||||
use crate::watch::{ChangeEvent, CreatedKind, DeletedKind};
|
||||
use crate::{Project, ProjectMetadata};
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use crate::walk::ProjectFilesWalker;
|
||||
use red_knot_python_semantic::Program;
|
||||
use ruff_db::files::{system_path_to_file, File, Files};
|
||||
use ruff_db::system::walk_directory::WalkState;
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::SystemPath;
|
||||
use ruff_db::Db as _;
|
||||
use ruff_python_ast::PySourceType;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
impl ProjectDatabase {
|
||||
#[tracing::instrument(level = "debug", skip(self, changes, cli_options))]
|
||||
pub fn apply_changes(&mut self, changes: Vec<ChangeEvent>, cli_options: Option<&Options>) {
|
||||
let mut project = self.project();
|
||||
let project_path = project.root(self).to_path_buf();
|
||||
let project_root = project.root(self).to_path_buf();
|
||||
let program = Program::get(self);
|
||||
let custom_stdlib_versions_path = program
|
||||
.custom_stdlib_search_path(self)
|
||||
@@ -30,7 +30,7 @@ impl ProjectDatabase {
|
||||
|
||||
// Deduplicate the `sync` calls. Many file watchers emit multiple events for the same path.
|
||||
let mut synced_files = FxHashSet::default();
|
||||
let mut synced_recursively = FxHashSet::default();
|
||||
let mut sync_recursively = BTreeSet::default();
|
||||
|
||||
let mut sync_path = |db: &mut ProjectDatabase, path: &SystemPath| {
|
||||
if synced_files.insert(path.to_path_buf()) {
|
||||
@@ -38,13 +38,9 @@ impl ProjectDatabase {
|
||||
}
|
||||
};
|
||||
|
||||
let mut sync_recursively = |db: &mut ProjectDatabase, path: &SystemPath| {
|
||||
if synced_recursively.insert(path.to_path_buf()) {
|
||||
Files::sync_recursively(db, path);
|
||||
}
|
||||
};
|
||||
|
||||
for change in changes {
|
||||
tracing::trace!("Handle change: {:?}", change);
|
||||
|
||||
if let Some(path) = change.system_path() {
|
||||
if matches!(
|
||||
path.file_name(),
|
||||
@@ -70,16 +66,27 @@ impl ProjectDatabase {
|
||||
match kind {
|
||||
CreatedKind::File => sync_path(self, &path),
|
||||
CreatedKind::Directory | CreatedKind::Any => {
|
||||
sync_recursively(self, &path);
|
||||
sync_recursively.insert(path.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if self.system().is_file(&path) {
|
||||
// Add the parent directory because `walkdir` always visits explicitly passed files
|
||||
// even if they match an exclude filter.
|
||||
added_paths.insert(path.parent().unwrap().to_path_buf());
|
||||
} else {
|
||||
added_paths.insert(path);
|
||||
// Unlike other files, it's not only important to update the status of existing
|
||||
// and known `File`s (`sync_recursively`), it's also important to discover new files
|
||||
// that were added in the project's root (or any of the paths included for checking).
|
||||
//
|
||||
// This is important because `Project::check` iterates over all included files.
|
||||
// The code below walks the `added_paths` and adds all files that
|
||||
// should be included in the project. We can skip this check for
|
||||
// paths that aren't part of the project or shouldn't be included
|
||||
// when checking the project.
|
||||
if project.is_path_included(self, &path) {
|
||||
if self.system().is_file(&path) {
|
||||
// Add the parent directory because `walkdir` always visits explicitly passed files
|
||||
// even if they match an exclude filter.
|
||||
added_paths.insert(path.parent().unwrap().to_path_buf());
|
||||
} else {
|
||||
added_paths.insert(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -103,7 +110,7 @@ impl ProjectDatabase {
|
||||
project.remove_file(self, file);
|
||||
}
|
||||
} else {
|
||||
sync_recursively(self, &path);
|
||||
sync_recursively.insert(path.clone());
|
||||
|
||||
if custom_stdlib_versions_path
|
||||
.as_ref()
|
||||
@@ -112,11 +119,19 @@ impl ProjectDatabase {
|
||||
custom_stdlib_change = true;
|
||||
}
|
||||
|
||||
// Perform a full-reload in case the deleted directory contained the pyproject.toml.
|
||||
// We may want to make this more clever in the future, to e.g. iterate over the
|
||||
// indexed files and remove the once that start with the same path, unless
|
||||
// the deleted path is the project configuration.
|
||||
project_changed = true;
|
||||
if project.is_path_included(self, &path) || path == project_root {
|
||||
// TODO: Shouldn't it be enough to simply traverse the project files and remove all
|
||||
// that start with the given path?
|
||||
tracing::debug!(
|
||||
"Reload project because of a path that could have been a directory."
|
||||
);
|
||||
|
||||
// Perform a full-reload in case the deleted directory contained the pyproject.toml.
|
||||
// We may want to make this more clever in the future, to e.g. iterate over the
|
||||
// indexed files and remove the once that start with the same path, unless
|
||||
// the deleted path is the project configuration.
|
||||
project_changed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,13 +148,29 @@ impl ProjectDatabase {
|
||||
ChangeEvent::Rescan => {
|
||||
project_changed = true;
|
||||
Files::sync_all(self);
|
||||
sync_recursively.clear();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let sync_recursively = sync_recursively.into_iter();
|
||||
let mut last = None;
|
||||
|
||||
for path in sync_recursively {
|
||||
// Avoid re-syncing paths that are sub-paths of each other.
|
||||
if let Some(last) = &last {
|
||||
if path.starts_with(last) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
Files::sync_recursively(self, &path);
|
||||
last = Some(path);
|
||||
}
|
||||
|
||||
if project_changed {
|
||||
match ProjectMetadata::discover(&project_path, self.system()) {
|
||||
match ProjectMetadata::discover(&project_root, self.system()) {
|
||||
Ok(mut metadata) => {
|
||||
if let Some(cli_options) = cli_options {
|
||||
metadata.apply_cli_options(cli_options.clone());
|
||||
@@ -186,51 +217,24 @@ impl ProjectDatabase {
|
||||
}
|
||||
}
|
||||
|
||||
let mut added_paths = added_paths.into_iter();
|
||||
let diagnostics = if let Some(walker) = ProjectFilesWalker::incremental(self, added_paths) {
|
||||
// Use directory walking to discover newly added files.
|
||||
let (files, diagnostics) = walker.collect_vec(self);
|
||||
|
||||
// Use directory walking to discover newly added files.
|
||||
if let Some(path) = added_paths.next() {
|
||||
let mut walker = self.system().walk_directory(&path);
|
||||
|
||||
for extra_path in added_paths {
|
||||
walker = walker.add(&extra_path);
|
||||
for file in files {
|
||||
project.add_file(self, file);
|
||||
}
|
||||
|
||||
let added_paths = std::sync::Mutex::new(Vec::default());
|
||||
diagnostics
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
walker.run(|| {
|
||||
Box::new(|entry| {
|
||||
let Ok(entry) = entry else {
|
||||
return WalkState::Continue;
|
||||
};
|
||||
|
||||
if !entry.file_type().is_file() {
|
||||
return WalkState::Continue;
|
||||
}
|
||||
|
||||
if entry.path().starts_with(&project_path)
|
||||
&& entry
|
||||
.path()
|
||||
.extension()
|
||||
.and_then(PySourceType::try_from_extension)
|
||||
.is_some()
|
||||
{
|
||||
let mut paths = added_paths.lock().unwrap();
|
||||
|
||||
paths.push(entry.into_path());
|
||||
}
|
||||
|
||||
WalkState::Continue
|
||||
})
|
||||
});
|
||||
|
||||
for path in added_paths.into_inner().unwrap() {
|
||||
let file = system_path_to_file(self, &path);
|
||||
|
||||
if let Ok(file) = file {
|
||||
project.add_file(self, file);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Note: We simply replace all IO related diagnostics here. This isn't ideal, because
|
||||
// it removes IO errors that may still be relevant. However, tracking IO errors correctly
|
||||
// across revisions doesn't feel essential, considering that they're rare. However, we could
|
||||
// implement a `BTreeMap` or similar and only prune the diagnostics from paths that we've
|
||||
// re-scanned (or that were removed etc).
|
||||
project.replace_index_diagnostics(self, diagnostics);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,10 +8,7 @@ use salsa::Setter;
|
||||
use ruff_db::files::File;
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::Project;
|
||||
|
||||
/// Cheap cloneable hash set of files.
|
||||
type FileSet = Arc<FxHashSet<File>>;
|
||||
use crate::{IOErrorDiagnostic, Project};
|
||||
|
||||
/// The indexed files of a project.
|
||||
///
|
||||
@@ -35,9 +32,9 @@ impl IndexedFiles {
|
||||
}
|
||||
}
|
||||
|
||||
fn indexed(files: FileSet) -> Self {
|
||||
fn indexed(inner: Arc<IndexedInner>) -> Self {
|
||||
Self {
|
||||
state: std::sync::Mutex::new(State::Indexed(files)),
|
||||
state: std::sync::Mutex::new(State::Indexed(inner)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,8 +43,8 @@ impl IndexedFiles {
|
||||
|
||||
match &*state {
|
||||
State::Lazy => Index::Lazy(LazyFiles { files: state }),
|
||||
State::Indexed(files) => Index::Indexed(Indexed {
|
||||
files: Arc::clone(files),
|
||||
State::Indexed(inner) => Index::Indexed(Indexed {
|
||||
inner: Arc::clone(inner),
|
||||
_lifetime: PhantomData,
|
||||
}),
|
||||
}
|
||||
@@ -94,7 +91,7 @@ impl IndexedFiles {
|
||||
Some(IndexedMut {
|
||||
db: Some(db),
|
||||
project,
|
||||
files: indexed,
|
||||
indexed,
|
||||
did_change: false,
|
||||
})
|
||||
}
|
||||
@@ -112,7 +109,7 @@ enum State {
|
||||
Lazy,
|
||||
|
||||
/// The files are indexed. Stores the known files of a package.
|
||||
Indexed(FileSet),
|
||||
Indexed(Arc<IndexedInner>),
|
||||
}
|
||||
|
||||
pub(super) enum Index<'db> {
|
||||
@@ -129,32 +126,48 @@ pub(super) struct LazyFiles<'db> {
|
||||
|
||||
impl<'db> LazyFiles<'db> {
|
||||
/// Sets the indexed files of a package to `files`.
|
||||
pub(super) fn set(mut self, files: FxHashSet<File>) -> Indexed<'db> {
|
||||
pub(super) fn set(
|
||||
mut self,
|
||||
files: FxHashSet<File>,
|
||||
diagnostics: Vec<IOErrorDiagnostic>,
|
||||
) -> Indexed<'db> {
|
||||
let files = Indexed {
|
||||
files: Arc::new(files),
|
||||
inner: Arc::new(IndexedInner { files, diagnostics }),
|
||||
_lifetime: PhantomData,
|
||||
};
|
||||
*self.files = State::Indexed(Arc::clone(&files.files));
|
||||
*self.files = State::Indexed(Arc::clone(&files.inner));
|
||||
files
|
||||
}
|
||||
}
|
||||
|
||||
/// The indexed files of a package.
|
||||
/// The indexed files of the project.
|
||||
///
|
||||
/// Note: This type is intentionally non-cloneable. Making it cloneable requires
|
||||
/// revisiting the locking behavior in [`IndexedFiles::indexed_mut`].
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Debug)]
|
||||
pub struct Indexed<'db> {
|
||||
files: FileSet,
|
||||
inner: Arc<IndexedInner>,
|
||||
// Preserve the lifetime of `PackageFiles`.
|
||||
_lifetime: PhantomData<&'db ()>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct IndexedInner {
|
||||
files: FxHashSet<File>,
|
||||
diagnostics: Vec<IOErrorDiagnostic>,
|
||||
}
|
||||
|
||||
impl Indexed<'_> {
|
||||
pub(super) fn diagnostics(&self) -> &[IOErrorDiagnostic] {
|
||||
&self.inner.diagnostics
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Indexed<'_> {
|
||||
type Target = FxHashSet<File>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.files
|
||||
&self.inner.files
|
||||
}
|
||||
}
|
||||
|
||||
@@ -165,7 +178,7 @@ impl<'a> IntoIterator for &'a Indexed<'_> {
|
||||
type IntoIter = IndexedIter<'a>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.files.iter().copied()
|
||||
self.inner.files.iter().copied()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -176,13 +189,13 @@ impl<'a> IntoIterator for &'a Indexed<'_> {
|
||||
pub(super) struct IndexedMut<'db> {
|
||||
db: Option<&'db mut dyn Db>,
|
||||
project: Project,
|
||||
files: FileSet,
|
||||
indexed: Arc<IndexedInner>,
|
||||
did_change: bool,
|
||||
}
|
||||
|
||||
impl IndexedMut<'_> {
|
||||
pub(super) fn insert(&mut self, file: File) -> bool {
|
||||
if self.files_mut().insert(file) {
|
||||
if self.inner_mut().files.insert(file) {
|
||||
self.did_change = true;
|
||||
true
|
||||
} else {
|
||||
@@ -191,7 +204,7 @@ impl IndexedMut<'_> {
|
||||
}
|
||||
|
||||
pub(super) fn remove(&mut self, file: File) -> bool {
|
||||
if self.files_mut().remove(&file) {
|
||||
if self.inner_mut().files.remove(&file) {
|
||||
self.did_change = true;
|
||||
true
|
||||
} else {
|
||||
@@ -199,8 +212,13 @@ impl IndexedMut<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
fn files_mut(&mut self) -> &mut FxHashSet<File> {
|
||||
Arc::get_mut(&mut self.files).expect("All references to `FilesSet` to have been dropped")
|
||||
pub(super) fn set_diagnostics(&mut self, diagnostics: Vec<IOErrorDiagnostic>) {
|
||||
self.inner_mut().diagnostics = diagnostics;
|
||||
}
|
||||
|
||||
fn inner_mut(&mut self) -> &mut IndexedInner {
|
||||
Arc::get_mut(&mut self.indexed)
|
||||
.expect("All references to `FilesSet` should have been dropped")
|
||||
}
|
||||
|
||||
fn set_impl(&mut self) {
|
||||
@@ -208,16 +226,16 @@ impl IndexedMut<'_> {
|
||||
return;
|
||||
};
|
||||
|
||||
let files = Arc::clone(&self.files);
|
||||
let indexed = Arc::clone(&self.indexed);
|
||||
|
||||
if self.did_change {
|
||||
// If there are changes, set the new file_set to trigger a salsa revision change.
|
||||
self.project
|
||||
.set_file_set(db)
|
||||
.to(IndexedFiles::indexed(files));
|
||||
.to(IndexedFiles::indexed(indexed));
|
||||
} else {
|
||||
// The `indexed_mut` replaced the `state` with Lazy. Restore it back to the indexed state.
|
||||
*self.project.file_set(db).state.lock().unwrap() = State::Indexed(files);
|
||||
*self.project.file_set(db).state.lock().unwrap() = State::Indexed(indexed);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -237,7 +255,7 @@ mod tests {
|
||||
use crate::files::Index;
|
||||
use crate::ProjectMetadata;
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_db::system::{DbWithWritableSystem as _, SystemPathBuf};
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
#[test]
|
||||
@@ -252,7 +270,7 @@ mod tests {
|
||||
let file = system_path_to_file(&db, "test.py").unwrap();
|
||||
|
||||
let files = match project.file_set(&db).get() {
|
||||
Index::Lazy(lazy) => lazy.set(FxHashSet::from_iter([file])),
|
||||
Index::Lazy(lazy) => lazy.set(FxHashSet::from_iter([file]), Vec::new()),
|
||||
Index::Indexed(files) => files,
|
||||
};
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#![allow(clippy::ref_option)]
|
||||
|
||||
use crate::metadata::options::OptionDiagnostic;
|
||||
use crate::walk::{ProjectFilesFilter, ProjectFilesWalker};
|
||||
pub use db::{Db, ProjectDatabase};
|
||||
use files::{Index, Indexed, IndexedFiles};
|
||||
use metadata::settings::Settings;
|
||||
@@ -8,24 +9,24 @@ pub use metadata::{ProjectDiscoveryError, ProjectMetadata};
|
||||
use red_knot_python_semantic::lint::{LintRegistry, LintRegistryBuilder, RuleSelection};
|
||||
use red_knot_python_semantic::register_lints;
|
||||
use red_knot_python_semantic::types::check_types;
|
||||
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, ParseDiagnostic, Severity, Span};
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::diagnostic::{DiagnosticId, OldDiagnosticTrait, OldParseDiagnostic, Severity, Span};
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::source::{source_text, SourceTextError};
|
||||
use ruff_db::system::walk_directory::WalkState;
|
||||
use ruff_db::system::{FileType, SystemPath};
|
||||
use ruff_python_ast::PySourceType;
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
use rustc_hash::FxHashSet;
|
||||
use salsa::Durability;
|
||||
use salsa::Setter;
|
||||
use std::borrow::Cow;
|
||||
use std::sync::Arc;
|
||||
use thiserror::Error;
|
||||
|
||||
pub mod combine;
|
||||
|
||||
mod db;
|
||||
mod files;
|
||||
pub mod metadata;
|
||||
mod walk;
|
||||
pub mod watch;
|
||||
|
||||
pub static DEFAULT_LINT_REGISTRY: std::sync::LazyLock<LintRegistry> =
|
||||
@@ -71,6 +72,30 @@ pub struct Project {
|
||||
#[return_ref]
|
||||
pub settings: Settings,
|
||||
|
||||
/// The paths that should be included when checking this project.
|
||||
///
|
||||
/// The default (when this list is empty) is to include all files in the project root
|
||||
/// (that satisfy the configured include and exclude patterns).
|
||||
/// However, it's sometimes desired to only check a subset of the project, e.g. to see
|
||||
/// the diagnostics for a single file or a folder.
|
||||
///
|
||||
/// This list gets initialized by the paths passed to `knot check <paths>`
|
||||
///
|
||||
/// ## How is this different from `open_files`?
|
||||
///
|
||||
/// The `included_paths` is closely related to `open_files`. The only difference is that
|
||||
/// `open_files` is already a resolved set of files whereas `included_paths` is only a list of paths
|
||||
/// that are resolved to files by indexing them. The other difference is that
|
||||
/// new files added to any directory in `included_paths` will be indexed and added to the project
|
||||
/// whereas `open_files` needs to be updated manually (e.g. by the IDE).
|
||||
///
|
||||
/// In short, `open_files` is cheaper in contexts where the set of files is known, like
|
||||
/// in an IDE when the user only wants to check the open tabs. This could be modeled
|
||||
/// with `included_paths` too but it would require an explicit walk dir step that's simply unnecessary.
|
||||
#[default]
|
||||
#[return_ref]
|
||||
included_paths_list: Vec<SystemPathBuf>,
|
||||
|
||||
/// Diagnostics that were generated when resolving the project settings.
|
||||
#[return_ref]
|
||||
settings_diagnostics: Vec<OptionDiagnostic>,
|
||||
@@ -106,6 +131,16 @@ impl Project {
|
||||
self.settings(db).to_rules()
|
||||
}
|
||||
|
||||
/// Returns `true` if `path` is both part of the project and included (see `included_paths_list`).
|
||||
///
|
||||
/// Unlike [Self::files], this method does not respect `.gitignore` files. It only checks
|
||||
/// the project's include and exclude settings as well as the paths that were passed to `knot check <paths>`.
|
||||
/// This means, that this method is an over-approximation of `Self::files` and may return `true` for paths
|
||||
/// that won't be included when checking the project because they're ignored in a `.gitignore` file.
|
||||
pub fn is_path_included(self, db: &dyn Db, path: &SystemPath) -> bool {
|
||||
ProjectFilesFilter::from_project(db, self).is_included(path)
|
||||
}
|
||||
|
||||
pub fn reload(self, db: &mut dyn Db, metadata: ProjectMetadata) {
|
||||
tracing::debug!("Reloading project");
|
||||
assert_eq!(self.root(db), metadata.root());
|
||||
@@ -128,15 +163,22 @@ impl Project {
|
||||
}
|
||||
|
||||
/// Checks all open files in the project and its dependencies.
|
||||
pub(crate) fn check(self, db: &ProjectDatabase) -> Vec<Box<dyn Diagnostic>> {
|
||||
pub(crate) fn check(self, db: &ProjectDatabase) -> Vec<Box<dyn OldDiagnosticTrait>> {
|
||||
let project_span = tracing::debug_span!("Project::check");
|
||||
let _span = project_span.enter();
|
||||
|
||||
tracing::debug!("Checking project '{name}'", name = self.name(db));
|
||||
|
||||
let mut diagnostics: Vec<Box<dyn Diagnostic>> = Vec::new();
|
||||
let mut diagnostics: Vec<Box<dyn OldDiagnosticTrait>> = Vec::new();
|
||||
diagnostics.extend(self.settings_diagnostics(db).iter().map(|diagnostic| {
|
||||
let diagnostic: Box<dyn Diagnostic> = Box::new(diagnostic.clone());
|
||||
let diagnostic: Box<dyn OldDiagnosticTrait> = Box::new(diagnostic.clone());
|
||||
diagnostic
|
||||
}));
|
||||
|
||||
let files = ProjectFiles::new(db, self);
|
||||
|
||||
diagnostics.extend(files.diagnostics().iter().cloned().map(|diagnostic| {
|
||||
let diagnostic: Box<dyn OldDiagnosticTrait> = Box::new(diagnostic);
|
||||
diagnostic
|
||||
}));
|
||||
|
||||
@@ -147,7 +189,6 @@ impl Project {
|
||||
let project_span = project_span.clone();
|
||||
|
||||
rayon::scope(move |scope| {
|
||||
let files = ProjectFiles::new(&db, self);
|
||||
for file in &files {
|
||||
let result = inner_result.clone();
|
||||
let db = db.clone();
|
||||
@@ -166,12 +207,12 @@ impl Project {
|
||||
Arc::into_inner(result).unwrap().into_inner().unwrap()
|
||||
}
|
||||
|
||||
pub(crate) fn check_file(self, db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
|
||||
pub(crate) fn check_file(self, db: &dyn Db, file: File) -> Vec<Box<dyn OldDiagnosticTrait>> {
|
||||
let mut file_diagnostics: Vec<_> = self
|
||||
.settings_diagnostics(db)
|
||||
.iter()
|
||||
.map(|diagnostic| {
|
||||
let diagnostic: Box<dyn Diagnostic> = Box::new(diagnostic.clone());
|
||||
let diagnostic: Box<dyn OldDiagnosticTrait> = Box::new(diagnostic.clone());
|
||||
diagnostic
|
||||
})
|
||||
.collect();
|
||||
@@ -207,6 +248,30 @@ impl Project {
|
||||
removed
|
||||
}
|
||||
|
||||
pub fn set_included_paths(self, db: &mut dyn Db, paths: Vec<SystemPathBuf>) {
|
||||
tracing::debug!("Setting included paths: {paths}", paths = paths.len());
|
||||
|
||||
self.set_included_paths_list(db).to(paths);
|
||||
self.reload_files(db);
|
||||
}
|
||||
|
||||
/// Returns the paths that should be checked.
|
||||
///
|
||||
/// The default is to check the entire project in which case this method returns
|
||||
/// the project root. However, users can specify to only check specific sub-folders or
|
||||
/// even files of a project by using `knot check <paths>`. In that case, this method
|
||||
/// returns the provided absolute paths.
|
||||
///
|
||||
/// Note: The CLI doesn't prohibit users from specifying paths outside the project root.
|
||||
/// This can be useful to check arbitrary files, but it isn't something we recommend.
|
||||
/// We should try to support this use case but it's okay if there are some limitations around it.
|
||||
fn included_paths_or_root(self, db: &dyn Db) -> &[SystemPathBuf] {
|
||||
match &**self.included_paths_list(db) {
|
||||
[] => std::slice::from_ref(&self.metadata(db).root),
|
||||
paths => paths,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the open files in the project or `None` if the entire project should be checked.
|
||||
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
|
||||
self.open_fileset(db).as_deref()
|
||||
@@ -289,6 +354,17 @@ impl Project {
|
||||
index.insert(file);
|
||||
}
|
||||
|
||||
/// Replaces the diagnostics from indexing the project files with `diagnostics`.
|
||||
///
|
||||
/// This is a no-op if the project files haven't been indexed yet.
|
||||
pub fn replace_index_diagnostics(self, db: &mut dyn Db, diagnostics: Vec<IOErrorDiagnostic>) {
|
||||
let Some(mut index) = IndexedFiles::indexed_mut(db, self) else {
|
||||
return;
|
||||
};
|
||||
|
||||
index.set_diagnostics(diagnostics);
|
||||
}
|
||||
|
||||
/// Returns the files belonging to this project.
|
||||
pub fn files(self, db: &dyn Db) -> Indexed<'_> {
|
||||
let files = self.file_set(db);
|
||||
@@ -296,12 +372,14 @@ impl Project {
|
||||
let indexed = match files.get() {
|
||||
Index::Lazy(vacant) => {
|
||||
let _entered =
|
||||
tracing::debug_span!("Project::index_files", package = %self.name(db))
|
||||
tracing::debug_span!("Project::index_files", project = %self.name(db))
|
||||
.entered();
|
||||
|
||||
let files = discover_project_files(db, self);
|
||||
tracing::info!("Found {} files in project `{}`", files.len(), self.name(db));
|
||||
vacant.set(files)
|
||||
let walker = ProjectFilesWalker::new(db);
|
||||
let (files, diagnostics) = walker.collect_set(db);
|
||||
|
||||
tracing::info!("Indexed {} file(s)", files.len());
|
||||
vacant.set(files, diagnostics)
|
||||
}
|
||||
Index::Indexed(indexed) => indexed,
|
||||
};
|
||||
@@ -319,28 +397,29 @@ impl Project {
|
||||
}
|
||||
}
|
||||
|
||||
fn check_file_impl(db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
|
||||
let mut diagnostics: Vec<Box<dyn Diagnostic>> = Vec::new();
|
||||
fn check_file_impl(db: &dyn Db, file: File) -> Vec<Box<dyn OldDiagnosticTrait>> {
|
||||
let mut diagnostics: Vec<Box<dyn OldDiagnosticTrait>> = Vec::new();
|
||||
|
||||
// Abort checking if there are IO errors.
|
||||
let source = source_text(db.upcast(), file);
|
||||
|
||||
if let Some(read_error) = source.read_error() {
|
||||
diagnostics.push(Box::new(IOErrorDiagnostic {
|
||||
file,
|
||||
error: read_error.clone(),
|
||||
file: Some(file),
|
||||
error: read_error.clone().into(),
|
||||
}));
|
||||
return diagnostics;
|
||||
}
|
||||
|
||||
let parsed = parsed_module(db.upcast(), file);
|
||||
diagnostics.extend(parsed.errors().iter().map(|error| {
|
||||
let diagnostic: Box<dyn Diagnostic> = Box::new(ParseDiagnostic::new(file, error.clone()));
|
||||
let diagnostic: Box<dyn OldDiagnosticTrait> =
|
||||
Box::new(OldParseDiagnostic::new(file, error.clone()));
|
||||
diagnostic
|
||||
}));
|
||||
|
||||
diagnostics.extend(check_types(db.upcast(), file).iter().map(|diagnostic| {
|
||||
let boxed: Box<dyn Diagnostic> = Box::new(diagnostic.clone());
|
||||
let boxed: Box<dyn OldDiagnosticTrait> = Box::new(diagnostic.clone());
|
||||
boxed
|
||||
}));
|
||||
|
||||
@@ -355,53 +434,6 @@ fn check_file_impl(db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
|
||||
diagnostics
|
||||
}
|
||||
|
||||
fn discover_project_files(db: &dyn Db, project: Project) -> FxHashSet<File> {
|
||||
let paths = std::sync::Mutex::new(Vec::new());
|
||||
|
||||
db.system().walk_directory(project.root(db)).run(|| {
|
||||
Box::new(|entry| {
|
||||
match entry {
|
||||
Ok(entry) => {
|
||||
// Skip over any non python files to avoid creating too many entries in `Files`.
|
||||
match entry.file_type() {
|
||||
FileType::File => {
|
||||
if entry
|
||||
.path()
|
||||
.extension()
|
||||
.and_then(PySourceType::try_from_extension)
|
||||
.is_some()
|
||||
{
|
||||
let mut paths = paths.lock().unwrap();
|
||||
paths.push(entry.into_path());
|
||||
}
|
||||
}
|
||||
FileType::Directory | FileType::Symlink => {}
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
// TODO Handle error
|
||||
tracing::error!("Failed to walk path: {error}");
|
||||
}
|
||||
}
|
||||
|
||||
WalkState::Continue
|
||||
})
|
||||
});
|
||||
|
||||
let paths = paths.into_inner().unwrap();
|
||||
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
|
||||
|
||||
for path in paths {
|
||||
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
|
||||
// We can ignore this.
|
||||
if let Ok(file) = system_path_to_file(db.upcast(), &path) {
|
||||
files.insert(file);
|
||||
}
|
||||
}
|
||||
|
||||
files
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ProjectFiles<'a> {
|
||||
OpenFiles(&'a FxHashSet<File>),
|
||||
@@ -416,6 +448,13 @@ impl<'a> ProjectFiles<'a> {
|
||||
ProjectFiles::Indexed(project.files(db))
|
||||
}
|
||||
}
|
||||
|
||||
fn diagnostics(&self) -> &[IOErrorDiagnostic] {
|
||||
match self {
|
||||
ProjectFiles::OpenFiles(_) => &[],
|
||||
ProjectFiles::Indexed(indexed) => indexed.diagnostics(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a ProjectFiles<'a> {
|
||||
@@ -448,13 +487,13 @@ impl Iterator for ProjectFilesIter<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct IOErrorDiagnostic {
|
||||
file: File,
|
||||
error: SourceTextError,
|
||||
file: Option<File>,
|
||||
error: IOErrorKind,
|
||||
}
|
||||
|
||||
impl Diagnostic for IOErrorDiagnostic {
|
||||
impl OldDiagnosticTrait for IOErrorDiagnostic {
|
||||
fn id(&self) -> DiagnosticId {
|
||||
DiagnosticId::Io
|
||||
}
|
||||
@@ -464,7 +503,7 @@ impl Diagnostic for IOErrorDiagnostic {
|
||||
}
|
||||
|
||||
fn span(&self) -> Option<Span> {
|
||||
Some(Span::from(self.file))
|
||||
self.file.map(Span::from)
|
||||
}
|
||||
|
||||
fn severity(&self) -> Severity {
|
||||
@@ -472,15 +511,24 @@ impl Diagnostic for IOErrorDiagnostic {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug, Clone)]
|
||||
enum IOErrorKind {
|
||||
#[error(transparent)]
|
||||
Walk(#[from] walk::WalkError),
|
||||
|
||||
#[error(transparent)]
|
||||
SourceText(#[from] SourceTextError),
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::{check_file_impl, ProjectMetadata};
|
||||
use red_knot_python_semantic::types::check_types;
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_db::diagnostic::OldDiagnosticTrait;
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::source::source_text;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_db::system::{DbWithTestSystem, DbWithWritableSystem as _, SystemPath, SystemPathBuf};
|
||||
use ruff_db::testing::assert_function_query_was_not_run;
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
|
||||
@@ -321,7 +321,7 @@ mod tests {
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([(root.join("foo.py"), ""), (root.join("bar.py"), "")])
|
||||
.write_files_all([(root.join("foo.py"), ""), (root.join("bar.py"), "")])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let project =
|
||||
@@ -349,7 +349,7 @@ mod tests {
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
.write_files_all([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
@@ -393,7 +393,7 @@ mod tests {
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
.write_files_all([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
@@ -432,7 +432,7 @@ expected `.`, `]`
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
.write_files_all([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
@@ -482,7 +482,7 @@ expected `.`, `]`
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
.write_files_all([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
@@ -532,7 +532,7 @@ expected `.`, `]`
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
.write_files_all([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
@@ -572,7 +572,7 @@ expected `.`, `]`
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
.write_files_all([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
@@ -623,7 +623,7 @@ expected `.`, `]`
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_files([
|
||||
.write_files_all([
|
||||
(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
@@ -673,7 +673,7 @@ expected `.`, `]`
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_file(
|
||||
.write_file_all(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
@@ -703,7 +703,7 @@ expected `.`, `]`
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_file(
|
||||
.write_file_all(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
@@ -735,7 +735,7 @@ expected `.`, `]`
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_file(
|
||||
.write_file_all(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
@@ -765,7 +765,7 @@ expected `.`, `]`
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_file(
|
||||
.write_file_all(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
@@ -795,7 +795,7 @@ expected `.`, `]`
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_file(
|
||||
.write_file_all(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
@@ -828,7 +828,7 @@ expected `.`, `]`
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_file(
|
||||
.write_file_all(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
@@ -861,7 +861,7 @@ expected `.`, `]`
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_file(
|
||||
.write_file_all(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
@@ -886,7 +886,7 @@ expected `.`, `]`
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_file(
|
||||
.write_file_all(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
@@ -911,7 +911,7 @@ expected `.`, `]`
|
||||
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_file(
|
||||
.write_file_all(
|
||||
root.join("pyproject.toml"),
|
||||
r#"
|
||||
[project]
|
||||
|
||||
@@ -2,7 +2,7 @@ use crate::metadata::value::{RangedValue, RelativePathBuf, ValueSource, ValueSou
|
||||
use crate::Db;
|
||||
use red_knot_python_semantic::lint::{GetLintError, Level, LintSource, RuleSelection};
|
||||
use red_knot_python_semantic::{ProgramSettings, PythonPath, PythonPlatform, SearchPathSettings};
|
||||
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, Severity, Span};
|
||||
use ruff_db::diagnostic::{DiagnosticId, OldDiagnosticTrait, Severity, Span};
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{System, SystemPath};
|
||||
use ruff_macros::Combine;
|
||||
@@ -376,7 +376,7 @@ impl OptionDiagnostic {
|
||||
}
|
||||
}
|
||||
|
||||
impl Diagnostic for OptionDiagnostic {
|
||||
impl OldDiagnosticTrait for OptionDiagnostic {
|
||||
fn id(&self) -> DiagnosticId {
|
||||
self.id
|
||||
}
|
||||
|
||||
256
crates/red_knot_project/src/walk.rs
Normal file
256
crates/red_knot_project/src/walk.rs
Normal file
@@ -0,0 +1,256 @@
|
||||
use crate::{Db, IOErrorDiagnostic, IOErrorKind, Project};
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::system::walk_directory::{ErrorKind, WalkDirectoryBuilder, WalkState};
|
||||
use ruff_db::system::{FileType, SystemPath, SystemPathBuf};
|
||||
use ruff_python_ast::PySourceType;
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
use std::path::PathBuf;
|
||||
use thiserror::Error;
|
||||
|
||||
/// Filter that decides which files are included in the project.
|
||||
///
|
||||
/// In the future, this will hold a reference to the `include` and `exclude` pattern.
|
||||
///
|
||||
/// This struct mainly exists because `dyn Db` isn't `Send` or `Sync`, making it impossible
|
||||
/// to access fields from within the walker.
|
||||
#[derive(Default, Debug)]
|
||||
pub(crate) struct ProjectFilesFilter<'a> {
|
||||
/// The same as [`Project::included_paths_or_root`].
|
||||
included_paths: &'a [SystemPathBuf],
|
||||
|
||||
/// The filter skips checking if the path is in `included_paths` if set to `true`.
|
||||
///
|
||||
/// Skipping this check is useful when the walker only walks over `included_paths`.
|
||||
skip_included_paths: bool,
|
||||
}
|
||||
|
||||
impl<'a> ProjectFilesFilter<'a> {
|
||||
pub(crate) fn from_project(db: &'a dyn Db, project: Project) -> Self {
|
||||
Self {
|
||||
included_paths: project.included_paths_or_root(db),
|
||||
skip_included_paths: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if a file is part of the project and included in the paths to check.
|
||||
///
|
||||
/// A file is included in the checked files if it is a sub path of the project's root
|
||||
/// (when no CLI path arguments are specified) or if it is a sub path of any path provided on the CLI (`knot check <paths>`) AND:
|
||||
///
|
||||
/// * It matches a positive `include` pattern and isn't excluded by a later negative `include` pattern.
|
||||
/// * It doesn't match a positive `exclude` pattern or is re-included by a later negative `exclude` pattern.
|
||||
///
|
||||
/// ## Note
|
||||
///
|
||||
/// This method may return `true` for files that don't end up being included when walking the
|
||||
/// project tree because it doesn't consider `.gitignore` and other ignore files when deciding
|
||||
/// if a file's included.
|
||||
pub(crate) fn is_included(&self, path: &SystemPath) -> bool {
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
enum CheckPathMatch {
|
||||
/// The path is a partial match of the checked path (it's a sub path)
|
||||
Partial,
|
||||
|
||||
/// The path matches a check path exactly.
|
||||
Full,
|
||||
}
|
||||
|
||||
let m = if self.skip_included_paths {
|
||||
Some(CheckPathMatch::Partial)
|
||||
} else {
|
||||
self.included_paths
|
||||
.iter()
|
||||
.filter_map(|included_path| {
|
||||
if let Ok(relative_path) = path.strip_prefix(included_path) {
|
||||
// Exact matches are always included
|
||||
if relative_path.as_str().is_empty() {
|
||||
Some(CheckPathMatch::Full)
|
||||
} else {
|
||||
Some(CheckPathMatch::Partial)
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.max()
|
||||
};
|
||||
|
||||
match m {
|
||||
None => false,
|
||||
Some(CheckPathMatch::Partial) => {
|
||||
// TODO: For partial matches, only include the file if it is included by the project's include/exclude settings.
|
||||
true
|
||||
}
|
||||
Some(CheckPathMatch::Full) => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct ProjectFilesWalker<'a> {
|
||||
walker: WalkDirectoryBuilder,
|
||||
|
||||
filter: ProjectFilesFilter<'a>,
|
||||
}
|
||||
|
||||
impl<'a> ProjectFilesWalker<'a> {
|
||||
pub(crate) fn new(db: &'a dyn Db) -> Self {
|
||||
let project = db.project();
|
||||
|
||||
let mut filter = ProjectFilesFilter::from_project(db, project);
|
||||
// It's unnecessary to filter on included paths because it only iterates over those to start with.
|
||||
filter.skip_included_paths = true;
|
||||
|
||||
Self::from_paths(db, project.included_paths_or_root(db), filter)
|
||||
.expect("included_paths_or_root to never return an empty iterator")
|
||||
}
|
||||
|
||||
/// Creates a walker for indexing the project files incrementally.
|
||||
///
|
||||
/// The main difference to a full project walk is that `paths` may contain paths
|
||||
/// that aren't part of the included files.
|
||||
pub(crate) fn incremental<P>(db: &'a dyn Db, paths: impl IntoIterator<Item = P>) -> Option<Self>
|
||||
where
|
||||
P: AsRef<SystemPath>,
|
||||
{
|
||||
let project = db.project();
|
||||
|
||||
let filter = ProjectFilesFilter::from_project(db, project);
|
||||
|
||||
Self::from_paths(db, paths, filter)
|
||||
}
|
||||
|
||||
fn from_paths<P>(
|
||||
db: &'a dyn Db,
|
||||
paths: impl IntoIterator<Item = P>,
|
||||
filter: ProjectFilesFilter<'a>,
|
||||
) -> Option<Self>
|
||||
where
|
||||
P: AsRef<SystemPath>,
|
||||
{
|
||||
let mut paths = paths.into_iter();
|
||||
|
||||
let mut walker = db.system().walk_directory(paths.next()?.as_ref());
|
||||
|
||||
for path in paths {
|
||||
walker = walker.add(path);
|
||||
}
|
||||
|
||||
Some(Self { walker, filter })
|
||||
}
|
||||
|
||||
/// Walks the project paths and collects the paths of all files that
|
||||
/// are included in the project.
|
||||
pub(crate) fn walk_paths(self) -> (Vec<SystemPathBuf>, Vec<IOErrorDiagnostic>) {
|
||||
let paths = std::sync::Mutex::new(Vec::new());
|
||||
let diagnostics = std::sync::Mutex::new(Vec::new());
|
||||
|
||||
self.walker.run(|| {
|
||||
Box::new(|entry| {
|
||||
match entry {
|
||||
Ok(entry) => {
|
||||
if !self.filter.is_included(entry.path()) {
|
||||
tracing::debug!("Ignoring not-included path: {}", entry.path());
|
||||
return WalkState::Skip;
|
||||
}
|
||||
|
||||
// Skip over any non python files to avoid creating too many entries in `Files`.
|
||||
match entry.file_type() {
|
||||
FileType::File => {
|
||||
if entry
|
||||
.path()
|
||||
.extension()
|
||||
.and_then(PySourceType::try_from_extension)
|
||||
.is_some()
|
||||
{
|
||||
let mut paths = paths.lock().unwrap();
|
||||
paths.push(entry.into_path());
|
||||
}
|
||||
}
|
||||
FileType::Directory | FileType::Symlink => {}
|
||||
}
|
||||
}
|
||||
Err(error) => match error.kind() {
|
||||
ErrorKind::Loop { .. } => {
|
||||
unreachable!("Loops shouldn't be possible without following symlinks.")
|
||||
}
|
||||
ErrorKind::Io { path, err } => {
|
||||
let mut diagnostics = diagnostics.lock().unwrap();
|
||||
let error = if let Some(path) = path {
|
||||
WalkError::IOPathError {
|
||||
path: path.clone(),
|
||||
error: err.to_string(),
|
||||
}
|
||||
} else {
|
||||
WalkError::IOError {
|
||||
error: err.to_string(),
|
||||
}
|
||||
};
|
||||
|
||||
diagnostics.push(IOErrorDiagnostic {
|
||||
file: None,
|
||||
error: IOErrorKind::Walk(error),
|
||||
});
|
||||
}
|
||||
ErrorKind::NonUtf8Path { path } => {
|
||||
diagnostics.lock().unwrap().push(IOErrorDiagnostic {
|
||||
file: None,
|
||||
error: IOErrorKind::Walk(WalkError::NonUtf8Path {
|
||||
path: path.clone(),
|
||||
}),
|
||||
});
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
WalkState::Continue
|
||||
})
|
||||
});
|
||||
|
||||
(
|
||||
paths.into_inner().unwrap(),
|
||||
diagnostics.into_inner().unwrap(),
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn collect_vec(self, db: &dyn Db) -> (Vec<File>, Vec<IOErrorDiagnostic>) {
|
||||
let (paths, diagnostics) = self.walk_paths();
|
||||
|
||||
(
|
||||
paths
|
||||
.into_iter()
|
||||
.filter_map(move |path| {
|
||||
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
|
||||
// We can ignore this.
|
||||
system_path_to_file(db.upcast(), &path).ok()
|
||||
})
|
||||
.collect(),
|
||||
diagnostics,
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn collect_set(self, db: &dyn Db) -> (FxHashSet<File>, Vec<IOErrorDiagnostic>) {
|
||||
let (paths, diagnostics) = self.walk_paths();
|
||||
|
||||
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
|
||||
|
||||
for path in paths {
|
||||
if let Ok(file) = system_path_to_file(db.upcast(), &path) {
|
||||
files.insert(file);
|
||||
}
|
||||
}
|
||||
|
||||
(files, diagnostics)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug, Clone)]
|
||||
pub(crate) enum WalkError {
|
||||
#[error("`{path}`: {error}")]
|
||||
IOPathError { path: SystemPathBuf, error: String },
|
||||
|
||||
#[error("Failed to walk project directory: {error}")]
|
||||
IOError { error: String },
|
||||
|
||||
#[error("`{path}` is not a valid UTF-8 path")]
|
||||
NonUtf8Path { path: PathBuf },
|
||||
}
|
||||
@@ -6,7 +6,7 @@ use tracing::info;
|
||||
use red_knot_python_semantic::system_module_search_paths;
|
||||
use ruff_cache::{CacheKey, CacheKeyHasher};
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
use ruff_db::{Db as _, Upcast};
|
||||
use ruff_db::Upcast;
|
||||
|
||||
use crate::db::{Db, ProjectDatabase};
|
||||
use crate::watch::Watcher;
|
||||
@@ -42,9 +42,9 @@ impl ProjectWatcher {
|
||||
|
||||
pub fn update(&mut self, db: &ProjectDatabase) {
|
||||
let search_paths: Vec<_> = system_module_search_paths(db.upcast()).collect();
|
||||
let project_path = db.project().root(db).to_path_buf();
|
||||
let project_path = db.project().root(db);
|
||||
|
||||
let new_cache_key = Self::compute_cache_key(&project_path, &search_paths);
|
||||
let new_cache_key = Self::compute_cache_key(project_path, &search_paths);
|
||||
|
||||
if self.cache_key == Some(new_cache_key) {
|
||||
return;
|
||||
@@ -68,41 +68,47 @@ impl ProjectWatcher {
|
||||
|
||||
self.has_errored_paths = false;
|
||||
|
||||
let project_path = db
|
||||
.system()
|
||||
.canonicalize_path(&project_path)
|
||||
.unwrap_or(project_path);
|
||||
|
||||
let config_paths = db
|
||||
.project()
|
||||
.metadata(db)
|
||||
.extra_configuration_paths()
|
||||
.iter()
|
||||
.cloned();
|
||||
.map(SystemPathBuf::as_path);
|
||||
|
||||
// Watch both the project root and any paths provided by the user on the CLI (removing any redundant nested paths).
|
||||
// This is necessary to observe changes to files that are outside the project root.
|
||||
// We always need to watch the project root to observe changes to its configuration.
|
||||
let included_paths = ruff_db::system::deduplicate_nested_paths(
|
||||
std::iter::once(project_path).chain(
|
||||
db.project()
|
||||
.included_paths_list(db)
|
||||
.iter()
|
||||
.map(SystemPathBuf::as_path),
|
||||
),
|
||||
);
|
||||
|
||||
// Find the non-overlapping module search paths and filter out paths that are already covered by the project.
|
||||
// Module search paths are already canonicalized.
|
||||
let unique_module_paths = ruff_db::system::deduplicate_nested_paths(
|
||||
search_paths
|
||||
.into_iter()
|
||||
.filter(|path| !path.starts_with(&project_path)),
|
||||
)
|
||||
.map(SystemPath::to_path_buf);
|
||||
.filter(|path| !path.starts_with(project_path)),
|
||||
);
|
||||
|
||||
// Now add the new paths, first starting with the project path and then
|
||||
// adding the library search paths, and finally the paths for configurations.
|
||||
for path in std::iter::once(project_path)
|
||||
for path in included_paths
|
||||
.chain(unique_module_paths)
|
||||
.chain(config_paths)
|
||||
{
|
||||
// Log a warning. It's not worth aborting if registering a single folder fails because
|
||||
// Ruff otherwise stills works as expected.
|
||||
if let Err(error) = self.watcher.watch(&path) {
|
||||
if let Err(error) = self.watcher.watch(path) {
|
||||
// TODO: Log a user-facing warning.
|
||||
tracing::warn!("Failed to setup watcher for path `{path}`: {error}. You have to restart Ruff after making changes to files under this path or you might see stale results.");
|
||||
self.has_errored_paths = true;
|
||||
} else {
|
||||
self.watched_paths.push(path);
|
||||
self.watched_paths.push(path.to_path_buf());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -117,7 +117,7 @@ fn run_corpus_tests(pattern: &str) -> anyhow::Result<()> {
|
||||
let code = std::fs::read_to_string(source)?;
|
||||
|
||||
let mut check_with_file_name = |path: &SystemPath| {
|
||||
memory_fs.write_file(path, &code).unwrap();
|
||||
memory_fs.write_file_all(path, &code).unwrap();
|
||||
File::sync_path(&mut db, path);
|
||||
|
||||
// this test is only asserting that we can pull every expression type without a panic
|
||||
@@ -216,6 +216,17 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
|
||||
self.visit_body(&for_stmt.orelse);
|
||||
return;
|
||||
}
|
||||
Stmt::With(with_stmt) => {
|
||||
for item in &with_stmt.items {
|
||||
if let Some(target) = &item.optional_vars {
|
||||
self.visit_target(target);
|
||||
}
|
||||
self.visit_expr(&item.context_expr);
|
||||
}
|
||||
|
||||
self.visit_body(&with_stmt.body);
|
||||
return;
|
||||
}
|
||||
Stmt::AnnAssign(_)
|
||||
| Stmt::Return(_)
|
||||
| Stmt::Delete(_)
|
||||
@@ -223,7 +234,6 @@ impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
|
||||
| Stmt::TypeAlias(_)
|
||||
| Stmt::While(_)
|
||||
| Stmt::If(_)
|
||||
| Stmt::With(_)
|
||||
| Stmt::Match(_)
|
||||
| Stmt::Raise(_)
|
||||
| Stmt::Try(_)
|
||||
@@ -283,4 +293,9 @@ const KNOWN_FAILURES: &[(&str, bool, bool)] = &[
|
||||
// related to circular references in f-string annotations (invalid syntax)
|
||||
("crates/ruff_linter/resources/test/fixtures/pyflakes/F821_15.py", true, true),
|
||||
("crates/ruff_linter/resources/test/fixtures/pyflakes/F821_14.py", false, true),
|
||||
// related to circular references in stub type annotations (salsa cycle panic):
|
||||
("crates/ruff_linter/resources/test/fixtures/pycodestyle/E501_4.py", false, true),
|
||||
("crates/ruff_linter/resources/test/fixtures/pyflakes/F401_0.py", false, true),
|
||||
("crates/ruff_linter/resources/test/fixtures/pyflakes/F401_12.py", false, true),
|
||||
("crates/ruff_linter/resources/test/fixtures/pyflakes/F401_14.py", false, true),
|
||||
];
|
||||
|
||||
@@ -0,0 +1,195 @@
|
||||
# Callable
|
||||
|
||||
References:
|
||||
|
||||
- <https://typing.readthedocs.io/en/latest/spec/callables.html#callable>
|
||||
|
||||
TODO: Use `collections.abc` as importing from `typing` is deprecated but this requires support for
|
||||
`*` imports. See: <https://docs.python.org/3/library/typing.html#deprecated-aliases>.
|
||||
|
||||
## Invalid forms
|
||||
|
||||
The `Callable` special form requires _exactly_ two arguments where the first argument is either a
|
||||
parameter type list, parameter specification, `typing.Concatenate`, or `...` and the second argument
|
||||
is the return type. Here, we explore various invalid forms.
|
||||
|
||||
### Empty
|
||||
|
||||
A bare `Callable` without any type arguments:
|
||||
|
||||
```py
|
||||
from typing import Callable
|
||||
|
||||
def _(c: Callable):
|
||||
reveal_type(c) # revealed: (...) -> Unknown
|
||||
```
|
||||
|
||||
### Invalid parameter type argument
|
||||
|
||||
When it's not a list:
|
||||
|
||||
```py
|
||||
from typing import Callable
|
||||
|
||||
# error: [invalid-type-form] "The first argument to `Callable` must be either a list of types, ParamSpec, Concatenate, or `...`"
|
||||
def _(c: Callable[int, str]):
|
||||
reveal_type(c) # revealed: (...) -> Unknown
|
||||
```
|
||||
|
||||
Or, when it's a literal type:
|
||||
|
||||
```py
|
||||
# error: [invalid-type-form] "The first argument to `Callable` must be either a list of types, ParamSpec, Concatenate, or `...`"
|
||||
def _(c: Callable[42, str]):
|
||||
reveal_type(c) # revealed: (...) -> Unknown
|
||||
```
|
||||
|
||||
Or, when one of the parameter type is invalid in the list:
|
||||
|
||||
```py
|
||||
def _(c: Callable[[int, 42, str, False], None]):
|
||||
# revealed: (int, @Todo(number literal in type expression), str, @Todo(boolean literal in type expression), /) -> None
|
||||
reveal_type(c)
|
||||
```
|
||||
|
||||
### Missing return type
|
||||
|
||||
Using a parameter list:
|
||||
|
||||
```py
|
||||
from typing import Callable
|
||||
|
||||
# error: [invalid-type-form] "Special form `typing.Callable` expected exactly two arguments (parameter types and return type)"
|
||||
def _(c: Callable[[int, str]]):
|
||||
reveal_type(c) # revealed: (int, str, /) -> Unknown
|
||||
```
|
||||
|
||||
Or, an ellipsis:
|
||||
|
||||
```py
|
||||
# error: [invalid-type-form] "Special form `typing.Callable` expected exactly two arguments (parameter types and return type)"
|
||||
def _(c: Callable[...]):
|
||||
reveal_type(c) # revealed: (...) -> Unknown
|
||||
```
|
||||
|
||||
### More than two arguments
|
||||
|
||||
We can't reliably infer the callable type if there are more then 2 arguments because we don't know
|
||||
which argument corresponds to either the parameters or the return type.
|
||||
|
||||
```py
|
||||
from typing import Callable
|
||||
|
||||
# error: [invalid-type-form] "Special form `typing.Callable` expected exactly two arguments (parameter types and return type)"
|
||||
def _(c: Callable[[int], str, str]):
|
||||
reveal_type(c) # revealed: (...) -> Unknown
|
||||
```
|
||||
|
||||
## Simple
|
||||
|
||||
A simple `Callable` with multiple parameters and a return type:
|
||||
|
||||
```py
|
||||
from typing import Callable
|
||||
|
||||
def _(c: Callable[[int, str], int]):
|
||||
reveal_type(c) # revealed: (int, str, /) -> int
|
||||
```
|
||||
|
||||
## Nested
|
||||
|
||||
A nested `Callable` as one of the parameter types:
|
||||
|
||||
```py
|
||||
from typing import Callable
|
||||
|
||||
def _(c: Callable[[Callable[[int], str]], int]):
|
||||
reveal_type(c) # revealed: ((int, /) -> str, /) -> int
|
||||
```
|
||||
|
||||
And, as the return type:
|
||||
|
||||
```py
|
||||
def _(c: Callable[[int, str], Callable[[int], int]]):
|
||||
reveal_type(c) # revealed: (int, str, /) -> (int, /) -> int
|
||||
```
|
||||
|
||||
## Gradual form
|
||||
|
||||
The `Callable` special form supports the use of `...` in place of the list of parameter types. This
|
||||
is a [gradual form] indicating that the type is consistent with any input signature:
|
||||
|
||||
```py
|
||||
from typing import Callable
|
||||
|
||||
def gradual_form(c: Callable[..., str]):
|
||||
reveal_type(c) # revealed: (...) -> str
|
||||
```
|
||||
|
||||
## Using `typing.Concatenate`
|
||||
|
||||
Using `Concatenate` as the first argument to `Callable`:
|
||||
|
||||
```py
|
||||
from typing_extensions import Callable, Concatenate
|
||||
|
||||
def _(c: Callable[Concatenate[int, str, ...], int]):
|
||||
reveal_type(c) # revealed: (*args: @Todo(todo signature *args), **kwargs: @Todo(todo signature **kwargs)) -> int
|
||||
```
|
||||
|
||||
And, as one of the parameter types:
|
||||
|
||||
```py
|
||||
def _(c: Callable[[Concatenate[int, str, ...], int], int]):
|
||||
reveal_type(c) # revealed: (*args: @Todo(todo signature *args), **kwargs: @Todo(todo signature **kwargs)) -> int
|
||||
```
|
||||
|
||||
## Using `typing.ParamSpec`
|
||||
|
||||
Using a `ParamSpec` in a `Callable` annotation:
|
||||
|
||||
```py
|
||||
from typing_extensions import Callable
|
||||
|
||||
# TODO: Not an error; remove once `ParamSpec` is supported
|
||||
# error: [invalid-type-form]
|
||||
def _[**P1](c: Callable[P1, int]):
|
||||
reveal_type(c) # revealed: (...) -> Unknown
|
||||
```
|
||||
|
||||
And, using the legacy syntax:
|
||||
|
||||
```py
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
P2 = ParamSpec("P2")
|
||||
|
||||
# TODO: Not an error; remove once `ParamSpec` is supported
|
||||
# error: [invalid-type-form]
|
||||
def _(c: Callable[P2, int]):
|
||||
reveal_type(c) # revealed: (...) -> Unknown
|
||||
```
|
||||
|
||||
## Using `typing.Unpack`
|
||||
|
||||
Using the unpack operator (`*`):
|
||||
|
||||
```py
|
||||
from typing_extensions import Callable, TypeVarTuple
|
||||
|
||||
Ts = TypeVarTuple("Ts")
|
||||
|
||||
def _(c: Callable[[int, *Ts], int]):
|
||||
reveal_type(c) # revealed: (*args: @Todo(todo signature *args), **kwargs: @Todo(todo signature **kwargs)) -> int
|
||||
```
|
||||
|
||||
And, using the legacy syntax using `Unpack`:
|
||||
|
||||
```py
|
||||
from typing_extensions import Unpack
|
||||
|
||||
def _(c: Callable[[int, Unpack[Ts]], int]):
|
||||
reveal_type(c) # revealed: (*args: @Todo(todo signature *args), **kwargs: @Todo(todo signature **kwargs)) -> int
|
||||
```
|
||||
|
||||
[gradual form]: https://typing.readthedocs.io/en/latest/spec/glossary.html#term-gradual-form
|
||||
@@ -0,0 +1,45 @@
|
||||
# Tests for invalid types in type expressions
|
||||
|
||||
## Invalid types are rejected
|
||||
|
||||
Many types are illegal in the context of a type expression:
|
||||
|
||||
```py
|
||||
import typing
|
||||
from knot_extensions import AlwaysTruthy, AlwaysFalsy
|
||||
from typing_extensions import Literal, Never
|
||||
|
||||
def _(
|
||||
a: type[int],
|
||||
b: AlwaysTruthy,
|
||||
c: AlwaysFalsy,
|
||||
d: Literal[True],
|
||||
e: Literal["bar"],
|
||||
f: Literal[b"foo"],
|
||||
g: tuple[int, str],
|
||||
h: Never,
|
||||
):
|
||||
def foo(): ...
|
||||
def invalid(
|
||||
i: a, # error: [invalid-type-form] "Variable of type `type[int]` is not allowed in a type expression"
|
||||
j: b, # error: [invalid-type-form]
|
||||
k: c, # error: [invalid-type-form]
|
||||
l: d, # error: [invalid-type-form]
|
||||
m: e, # error: [invalid-type-form]
|
||||
n: f, # error: [invalid-type-form]
|
||||
o: g, # error: [invalid-type-form]
|
||||
p: h, # error: [invalid-type-form]
|
||||
q: typing, # error: [invalid-type-form]
|
||||
r: foo, # error: [invalid-type-form]
|
||||
):
|
||||
reveal_type(i) # revealed: Unknown
|
||||
reveal_type(j) # revealed: Unknown
|
||||
reveal_type(k) # revealed: Unknown
|
||||
reveal_type(l) # revealed: Unknown
|
||||
reveal_type(m) # revealed: Unknown
|
||||
reveal_type(n) # revealed: Unknown
|
||||
reveal_type(o) # revealed: Unknown
|
||||
reveal_type(p) # revealed: Unknown
|
||||
reveal_type(q) # revealed: Unknown
|
||||
reveal_type(r) # revealed: Unknown
|
||||
```
|
||||
@@ -73,12 +73,12 @@ qux = (foo, bar)
|
||||
reveal_type(qux) # revealed: tuple[Literal["foo"], Literal["bar"]]
|
||||
|
||||
# TODO: Infer "LiteralString"
|
||||
reveal_type(foo.join(qux)) # revealed: @Todo(overloaded method)
|
||||
reveal_type(foo.join(qux)) # revealed: @Todo(return type of decorated function)
|
||||
|
||||
template: LiteralString = "{}, {}"
|
||||
reveal_type(template) # revealed: Literal["{}, {}"]
|
||||
# TODO: Infer `LiteralString`
|
||||
reveal_type(template.format(foo, bar)) # revealed: @Todo(overloaded method)
|
||||
reveal_type(template.format(foo, bar)) # revealed: @Todo(return type of decorated function)
|
||||
```
|
||||
|
||||
### Assignability
|
||||
|
||||
@@ -70,8 +70,7 @@ import typing
|
||||
|
||||
class ListSubclass(typing.List): ...
|
||||
|
||||
# TODO: should have `Generic`, should not have `Unknown`
|
||||
# revealed: tuple[Literal[ListSubclass], Literal[list], Unknown, Literal[object]]
|
||||
# revealed: tuple[Literal[ListSubclass], Literal[list], Literal[MutableSequence], Literal[Sequence], Literal[Reversible], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(protocol), Literal[object]]
|
||||
reveal_type(ListSubclass.__mro__)
|
||||
|
||||
class DictSubclass(typing.Dict): ...
|
||||
|
||||
@@ -18,7 +18,7 @@ def f(*args: Unpack[Ts]) -> tuple[Unpack[Ts]]:
|
||||
# TODO: should understand the annotation
|
||||
reveal_type(args) # revealed: tuple
|
||||
|
||||
reveal_type(Alias) # revealed: @Todo(Unsupported or invalid type in a type expression)
|
||||
reveal_type(Alias) # revealed: @Todo(Invalid or unsupported `KnownInstanceType` in `Type::to_type_expression`)
|
||||
|
||||
def g() -> TypeGuard[int]: ...
|
||||
def h() -> TypeIs[int]: ...
|
||||
@@ -29,11 +29,13 @@ def i(callback: Callable[Concatenate[int, P], R_co], *args: P.args, **kwargs: P.
|
||||
# TODO: should understand the annotation
|
||||
reveal_type(kwargs) # revealed: dict
|
||||
|
||||
# TODO: not an error; remove once `call` is implemented for `Callable`
|
||||
# error: [call-non-callable]
|
||||
return callback(42, *args, **kwargs)
|
||||
|
||||
class Foo:
|
||||
def method(self, x: Self):
|
||||
reveal_type(x) # revealed: @Todo(Unsupported or invalid type in a type expression)
|
||||
reveal_type(x) # revealed: @Todo(Invalid or unsupported `KnownInstanceType` in `Type::to_type_expression`)
|
||||
```
|
||||
|
||||
## Inheritance
|
||||
|
||||
@@ -75,8 +75,7 @@ def _(flag: bool):
|
||||
|
||||
f = Foo()
|
||||
|
||||
# TODO: We should emit an `unsupported-operator` error here, possibly with the information
|
||||
# that `Foo.__iadd__` may be unbound as additional context.
|
||||
# error: [unsupported-operator] "Operator `+=` is unsupported between objects of type `Foo` and `Literal["Hello, world!"]`"
|
||||
f += "Hello, world!"
|
||||
|
||||
reveal_type(f) # revealed: int | Unknown
|
||||
|
||||
@@ -155,7 +155,9 @@ reveal_type(c_instance.declared_in_body_and_init) # revealed: str | None
|
||||
|
||||
reveal_type(c_instance.declared_in_body_defined_in_init) # revealed: str | None
|
||||
|
||||
reveal_type(c_instance.bound_in_body_declared_in_init) # revealed: str | None
|
||||
# TODO: This should be `str | None`. Fixing this requires an overhaul of the `Symbol` API,
|
||||
# which is planned in https://github.com/astral-sh/ruff/issues/14297
|
||||
reveal_type(c_instance.bound_in_body_declared_in_init) # revealed: Unknown | str | None
|
||||
|
||||
reveal_type(c_instance.bound_in_body_and_init) # revealed: Unknown | None | Literal["a"]
|
||||
```
|
||||
@@ -356,9 +358,25 @@ class C:
|
||||
|
||||
c_instance = C()
|
||||
|
||||
# TODO: Should be `Unknown | int | None`
|
||||
# error: [unresolved-attribute]
|
||||
reveal_type(c_instance.x) # revealed: Unknown
|
||||
reveal_type(c_instance.x) # revealed: Unknown | int | None
|
||||
```
|
||||
|
||||
#### Attributes defined in `with` statements, but with unpacking
|
||||
|
||||
```py
|
||||
class ContextManager:
|
||||
def __enter__(self) -> tuple[int | None, int]: ...
|
||||
def __exit__(self, exc_type, exc_value, traceback) -> None: ...
|
||||
|
||||
class C:
|
||||
def __init__(self) -> None:
|
||||
with ContextManager() as (self.x, self.y):
|
||||
pass
|
||||
|
||||
c_instance = C()
|
||||
|
||||
reveal_type(c_instance.x) # revealed: Unknown | int | None
|
||||
reveal_type(c_instance.y) # revealed: Unknown | int
|
||||
```
|
||||
|
||||
#### Attributes defined in comprehensions
|
||||
@@ -704,8 +722,91 @@ reveal_type(Derived().declared_in_body) # revealed: int | None
|
||||
reveal_type(Derived().defined_in_init) # revealed: str | None
|
||||
```
|
||||
|
||||
## Accessing attributes on class objects
|
||||
|
||||
When accessing attributes on class objects, they are always looked up on the type of the class
|
||||
object first, i.e. on the metaclass:
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
class Meta1:
|
||||
attr: Literal["metaclass value"] = "metaclass value"
|
||||
|
||||
class C1(metaclass=Meta1): ...
|
||||
|
||||
reveal_type(C1.attr) # revealed: Literal["metaclass value"]
|
||||
```
|
||||
|
||||
However, the metaclass attribute only takes precedence over a class-level attribute if it is a data
|
||||
descriptor. If it is a non-data descriptor or a normal attribute, the class-level attribute is used
|
||||
instead (see the [descriptor protocol tests] for data/non-data descriptor attributes):
|
||||
|
||||
```py
|
||||
class Meta2:
|
||||
attr: str = "metaclass value"
|
||||
|
||||
class C2(metaclass=Meta2):
|
||||
attr: Literal["class value"] = "class value"
|
||||
|
||||
reveal_type(C2.attr) # revealed: Literal["class value"]
|
||||
```
|
||||
|
||||
If the class-level attribute is only partially defined, we union the metaclass attribute with the
|
||||
class-level attribute:
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
class Meta3:
|
||||
attr1 = "metaclass value"
|
||||
attr2: Literal["metaclass value"] = "metaclass value"
|
||||
|
||||
class C3(metaclass=Meta3):
|
||||
if flag:
|
||||
attr1 = "class value"
|
||||
# TODO: Neither mypy nor pyright show an error here, but we could consider emitting a conflicting-declaration diagnostic here.
|
||||
attr2: Literal["class value"] = "class value"
|
||||
|
||||
reveal_type(C3.attr1) # revealed: Unknown | Literal["metaclass value", "class value"]
|
||||
reveal_type(C3.attr2) # revealed: Literal["metaclass value", "class value"]
|
||||
```
|
||||
|
||||
If the *metaclass* attribute is only partially defined, we emit a `possibly-unbound-attribute`
|
||||
diagnostic:
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
class Meta4:
|
||||
if flag:
|
||||
attr1: str = "metaclass value"
|
||||
|
||||
class C4(metaclass=Meta4): ...
|
||||
# error: [possibly-unbound-attribute]
|
||||
reveal_type(C4.attr1) # revealed: str
|
||||
```
|
||||
|
||||
Finally, if both the metaclass attribute and the class-level attribute are only partially defined,
|
||||
we union them and emit a `possibly-unbound-attribute` diagnostic:
|
||||
|
||||
```py
|
||||
def _(flag1: bool, flag2: bool):
|
||||
class Meta5:
|
||||
if flag1:
|
||||
attr1 = "metaclass value"
|
||||
|
||||
class C5(metaclass=Meta5):
|
||||
if flag2:
|
||||
attr1 = "class value"
|
||||
|
||||
# error: [possibly-unbound-attribute]
|
||||
reveal_type(C5.attr1) # revealed: Unknown | Literal["metaclass value", "class value"]
|
||||
```
|
||||
|
||||
## Union of attributes
|
||||
|
||||
If the (meta)class is a union type or if the attribute on the (meta) class has a union type, we
|
||||
infer those union types accordingly:
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
if flag:
|
||||
@@ -716,14 +817,35 @@ def _(flag: bool):
|
||||
class C1:
|
||||
x = 2
|
||||
|
||||
reveal_type(C1.x) # revealed: Unknown | Literal[1, 2]
|
||||
|
||||
class C2:
|
||||
if flag:
|
||||
x = 3
|
||||
else:
|
||||
x = 4
|
||||
|
||||
reveal_type(C1.x) # revealed: Unknown | Literal[1, 2]
|
||||
reveal_type(C2.x) # revealed: Unknown | Literal[3, 4]
|
||||
|
||||
if flag:
|
||||
class Meta3(type):
|
||||
x = 5
|
||||
|
||||
else:
|
||||
class Meta3(type):
|
||||
x = 6
|
||||
|
||||
class C3(metaclass=Meta3): ...
|
||||
reveal_type(C3.x) # revealed: Unknown | Literal[5, 6]
|
||||
|
||||
class Meta4(type):
|
||||
if flag:
|
||||
x = 7
|
||||
else:
|
||||
x = 8
|
||||
|
||||
class C4(metaclass=Meta4): ...
|
||||
reveal_type(C4.x) # revealed: Unknown | Literal[7, 8]
|
||||
```
|
||||
|
||||
## Inherited class attributes
|
||||
@@ -883,7 +1005,7 @@ def _(flag: bool):
|
||||
self.x = 1
|
||||
|
||||
# error: [possibly-unbound-attribute]
|
||||
reveal_type(Foo().x) # revealed: int
|
||||
reveal_type(Foo().x) # revealed: int | Unknown
|
||||
```
|
||||
|
||||
#### Possibly unbound
|
||||
@@ -1105,8 +1227,8 @@ Most attribute accesses on bool-literal types are delegated to `builtins.bool`,
|
||||
bools are instances of that class:
|
||||
|
||||
```py
|
||||
reveal_type(True.__and__) # revealed: @Todo(overloaded method)
|
||||
reveal_type(False.__or__) # revealed: @Todo(overloaded method)
|
||||
reveal_type(True.__and__) # revealed: <bound method `__and__` of `Literal[True]`>
|
||||
reveal_type(False.__or__) # revealed: <bound method `__or__` of `Literal[False]`>
|
||||
```
|
||||
|
||||
Some attributes are special-cased, however:
|
||||
@@ -1209,6 +1331,20 @@ class C:
|
||||
reveal_type(C().x) # revealed: Unknown
|
||||
```
|
||||
|
||||
### Accessing attributes on `Never`
|
||||
|
||||
Arbitrary attributes can be accessed on `Never` without emitting any errors:
|
||||
|
||||
```py
|
||||
from typing_extensions import Never
|
||||
|
||||
def f(never: Never):
|
||||
reveal_type(never.arbitrary_attribute) # revealed: Never
|
||||
|
||||
# Assigning `Never` to an attribute on `Never` is also allowed:
|
||||
never.another_attribute = never
|
||||
```
|
||||
|
||||
### Builtin types attributes
|
||||
|
||||
This test can probably be removed eventually, but we currently include it because we do not yet
|
||||
@@ -1248,6 +1384,7 @@ reveal_type(C.a_none) # revealed: None
|
||||
Some of the tests in the *Class and instance variables* section draw inspiration from
|
||||
[pyright's documentation] on this topic.
|
||||
|
||||
[descriptor protocol tests]: descriptor_protocol.md
|
||||
[pyright's documentation]: https://microsoft.github.io/pyright/#/type-concepts-advanced?id=class-and-instance-variables
|
||||
[typing spec on `classvar`]: https://typing.readthedocs.io/en/latest/spec/class-compat.html#classvar
|
||||
[`typing.classvar`]: https://docs.python.org/3/library/typing.html#typing.ClassVar
|
||||
|
||||
@@ -10,8 +10,7 @@ reveal_type(-3 // 3) # revealed: Literal[-1]
|
||||
reveal_type(-3 / 3) # revealed: float
|
||||
reveal_type(5 % 3) # revealed: Literal[2]
|
||||
|
||||
# TODO: Should emit `unsupported-operator` but we don't understand the bases of `str`, so we think
|
||||
# it inherits `Unknown`, so we think `str.__radd__` is `Unknown` instead of nonexistent.
|
||||
# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `Literal[2]` and `Literal["f"]`"
|
||||
reveal_type(2 + "f") # revealed: Unknown
|
||||
|
||||
def lhs(x: int):
|
||||
|
||||
@@ -40,10 +40,21 @@ class Meta(type):
|
||||
def __getitem__(cls, key: int) -> str:
|
||||
return str(key)
|
||||
|
||||
class DunderOnMetaClass(metaclass=Meta):
|
||||
class DunderOnMetaclass(metaclass=Meta):
|
||||
pass
|
||||
|
||||
reveal_type(DunderOnMetaClass[0]) # revealed: str
|
||||
reveal_type(DunderOnMetaclass[0]) # revealed: str
|
||||
```
|
||||
|
||||
If the dunder method is only present on the class itself, it will not be called:
|
||||
|
||||
```py
|
||||
class ClassWithNormalDunder:
|
||||
def __getitem__(self, key: int) -> str:
|
||||
return str(key)
|
||||
|
||||
# error: [non-subscriptable]
|
||||
ClassWithNormalDunder[0]
|
||||
```
|
||||
|
||||
## Operating on instances
|
||||
@@ -79,13 +90,32 @@ reveal_type(this_fails[0]) # revealed: Unknown
|
||||
However, the attached dunder method *can* be called if accessed directly:
|
||||
|
||||
```py
|
||||
# TODO: `this_fails.__getitem__` is incorrectly treated as a bound method. This
|
||||
# should be fixed with https://github.com/astral-sh/ruff/issues/16367
|
||||
# error: [too-many-positional-arguments]
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(this_fails.__getitem__(this_fails, 0)) # revealed: Unknown | str
|
||||
```
|
||||
|
||||
The instance-level method is also not called when the class-level method is present:
|
||||
|
||||
```py
|
||||
def external_getitem1(instance, key) -> str:
|
||||
return "a"
|
||||
|
||||
def external_getitem2(key) -> int:
|
||||
return 1
|
||||
|
||||
def _(flag: bool):
|
||||
class ThisFails:
|
||||
if flag:
|
||||
__getitem__ = external_getitem1
|
||||
|
||||
def __init__(self):
|
||||
self.__getitem__ = external_getitem2
|
||||
|
||||
this_fails = ThisFails()
|
||||
|
||||
# error: [call-possibly-unbound-method]
|
||||
reveal_type(this_fails[0]) # revealed: Unknown | str
|
||||
```
|
||||
|
||||
## When the dunder is not a method
|
||||
|
||||
A dunder can also be a non-method callable:
|
||||
@@ -126,3 +156,64 @@ class_with_descriptor_dunder = ClassWithDescriptorDunder()
|
||||
|
||||
reveal_type(class_with_descriptor_dunder[0]) # revealed: str
|
||||
```
|
||||
|
||||
## Dunders can not be overwritten on instances
|
||||
|
||||
If we attempt to overwrite a dunder method on an instance, it does not affect the behavior of
|
||||
implicit dunder calls:
|
||||
|
||||
```py
|
||||
class C:
|
||||
def __getitem__(self, key: int) -> str:
|
||||
return str(key)
|
||||
|
||||
def f(self):
|
||||
# TODO: This should emit an `invalid-assignment` diagnostic once we understand the type of `self`
|
||||
self.__getitem__ = None
|
||||
|
||||
# This is still fine, and simply calls the `__getitem__` method on the class
|
||||
reveal_type(C()[0]) # revealed: str
|
||||
```
|
||||
|
||||
## Calling a union of dunder methods
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
class C:
|
||||
if flag:
|
||||
def __getitem__(self, key: int) -> str:
|
||||
return str(key)
|
||||
else:
|
||||
def __getitem__(self, key: int) -> bytes:
|
||||
return key
|
||||
|
||||
c = C()
|
||||
reveal_type(c[0]) # revealed: str | bytes
|
||||
|
||||
if flag:
|
||||
class D:
|
||||
def __getitem__(self, key: int) -> str:
|
||||
return str(key)
|
||||
|
||||
else:
|
||||
class D:
|
||||
def __getitem__(self, key: int) -> bytes:
|
||||
return key
|
||||
|
||||
d = D()
|
||||
reveal_type(d[0]) # revealed: str | bytes
|
||||
```
|
||||
|
||||
## Calling a possibly-unbound dunder method
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
class C:
|
||||
if flag:
|
||||
def __getitem__(self, key: int) -> str:
|
||||
return str(key)
|
||||
|
||||
c = C()
|
||||
# error: [call-possibly-unbound-method]
|
||||
reveal_type(c[0]) # revealed: str
|
||||
```
|
||||
|
||||
@@ -12,7 +12,7 @@ import inspect
|
||||
|
||||
class Descriptor:
|
||||
def __get__(self, instance, owner) -> str:
|
||||
return 1
|
||||
return "a"
|
||||
|
||||
class C:
|
||||
normal: int = 1
|
||||
@@ -59,7 +59,7 @@ import sys
|
||||
reveal_type(inspect.getattr_static(sys, "platform")) # revealed: LiteralString
|
||||
reveal_type(inspect.getattr_static(inspect, "getattr_static")) # revealed: Literal[getattr_static]
|
||||
|
||||
reveal_type(inspect.getattr_static(1, "real")) # revealed: Literal[1]
|
||||
reveal_type(inspect.getattr_static(1, "real")) # revealed: Literal[real]
|
||||
```
|
||||
|
||||
(Implicit) instance attributes can also be accessed through `inspect.getattr_static`:
|
||||
@@ -72,6 +72,23 @@ class D:
|
||||
reveal_type(inspect.getattr_static(D(), "instance_attr")) # revealed: int
|
||||
```
|
||||
|
||||
And attributes on metaclasses can be accessed when probing the class:
|
||||
|
||||
```py
|
||||
class Meta(type):
|
||||
attr: int = 1
|
||||
|
||||
class E(metaclass=Meta): ...
|
||||
|
||||
reveal_type(inspect.getattr_static(E, "attr")) # revealed: int
|
||||
```
|
||||
|
||||
Metaclass attributes can not be added when probing an instance of the class:
|
||||
|
||||
```py
|
||||
reveal_type(inspect.getattr_static(E(), "attr", "non_existent")) # revealed: Literal["non_existent"]
|
||||
```
|
||||
|
||||
## Error cases
|
||||
|
||||
We can only infer precise types if the attribute is a literal string. In all other cases, we fall
|
||||
|
||||
@@ -255,6 +255,58 @@ method_wrapper()
|
||||
method_wrapper(C(), C, "one too many")
|
||||
```
|
||||
|
||||
## Fallback to metaclass
|
||||
|
||||
When a method is accessed on a class object, it is looked up on the metaclass if it is not found on
|
||||
the class itself. This also creates a bound method that is bound to the class object itself:
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
class Meta(type):
|
||||
def f(cls, arg: int) -> str:
|
||||
return "a"
|
||||
|
||||
class C(metaclass=Meta):
|
||||
pass
|
||||
|
||||
reveal_type(C.f) # revealed: <bound method `f` of `Literal[C]`>
|
||||
reveal_type(C.f(1)) # revealed: str
|
||||
```
|
||||
|
||||
The method `f` can not be accessed from an instance of the class:
|
||||
|
||||
```py
|
||||
# error: [unresolved-attribute] "Type `C` has no attribute `f`"
|
||||
C().f
|
||||
```
|
||||
|
||||
A metaclass function can be shadowed by a method on the class:
|
||||
|
||||
```py
|
||||
from typing import Any, Literal
|
||||
|
||||
class D(metaclass=Meta):
|
||||
def f(arg: int) -> Literal["a"]:
|
||||
return "a"
|
||||
|
||||
reveal_type(D.f(1)) # revealed: Literal["a"]
|
||||
```
|
||||
|
||||
If the class method is possibly unbound, we union the return types:
|
||||
|
||||
```py
|
||||
def flag() -> bool:
|
||||
return True
|
||||
|
||||
class E(metaclass=Meta):
|
||||
if flag():
|
||||
def f(arg: int) -> Any:
|
||||
return "a"
|
||||
|
||||
reveal_type(E.f(1)) # revealed: str | Any
|
||||
```
|
||||
|
||||
## `@classmethod`
|
||||
|
||||
### Basic
|
||||
@@ -371,10 +423,10 @@ class C:
|
||||
# these should all return `str`:
|
||||
|
||||
reveal_type(C.f1(1)) # revealed: @Todo(return type of decorated function)
|
||||
reveal_type(C().f1(1)) # revealed: @Todo(decorated method)
|
||||
reveal_type(C().f1(1)) # revealed: @Todo(return type of decorated function)
|
||||
|
||||
reveal_type(C.f2(1)) # revealed: @Todo(return type of decorated function)
|
||||
reveal_type(C().f2(1)) # revealed: @Todo(decorated method)
|
||||
reveal_type(C().f2(1)) # revealed: @Todo(return type of decorated function)
|
||||
```
|
||||
|
||||
[functions and methods]: https://docs.python.org/3/howto/descriptor.html#functions-and-methods
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
# Never is callable
|
||||
|
||||
The type `Never` is callable with an arbitrary set of arguments. The result is always `Never`.
|
||||
|
||||
```py
|
||||
from typing_extensions import Never
|
||||
|
||||
def f(never: Never):
|
||||
reveal_type(never()) # revealed: Never
|
||||
reveal_type(never(1)) # revealed: Never
|
||||
reveal_type(never(1, "a", never, x=None)) # revealed: Never
|
||||
```
|
||||
@@ -0,0 +1,50 @@
|
||||
# Call `type[...]`
|
||||
|
||||
## Single class
|
||||
|
||||
### Trivial constructor
|
||||
|
||||
```py
|
||||
class C: ...
|
||||
|
||||
def _(subclass_of_c: type[C]):
|
||||
reveal_type(subclass_of_c()) # revealed: C
|
||||
```
|
||||
|
||||
### Non-trivial constructor
|
||||
|
||||
```py
|
||||
class C:
|
||||
def __init__(self, x: int): ...
|
||||
|
||||
def _(subclass_of_c: type[C]):
|
||||
reveal_type(subclass_of_c(1)) # revealed: C
|
||||
|
||||
# TODO: Those should all be errors
|
||||
reveal_type(subclass_of_c("a")) # revealed: C
|
||||
reveal_type(subclass_of_c()) # revealed: C
|
||||
reveal_type(subclass_of_c(1, 2)) # revealed: C
|
||||
```
|
||||
|
||||
## Dynamic base
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
from knot_extensions import Unknown
|
||||
|
||||
def _(subclass_of_any: type[Any], subclass_of_unknown: type[Unknown]):
|
||||
reveal_type(subclass_of_any()) # revealed: Any
|
||||
reveal_type(subclass_of_any("any", "args", 1, 2)) # revealed: Any
|
||||
reveal_type(subclass_of_unknown()) # revealed: Unknown
|
||||
reveal_type(subclass_of_unknown("any", "args", 1, 2)) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Unions of classes
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
def _(subclass_of_ab: type[A | B]):
|
||||
reveal_type(subclass_of_ab()) # revealed: A | B
|
||||
```
|
||||
@@ -31,16 +31,12 @@ reveal_type(c.ten) # revealed: Literal[10]
|
||||
reveal_type(C.ten) # revealed: Literal[10]
|
||||
|
||||
# These are fine:
|
||||
# TODO: This should not be an error
|
||||
c.ten = 10 # error: [invalid-assignment]
|
||||
c.ten = 10
|
||||
C.ten = 10
|
||||
|
||||
# TODO: This should be an error (as the wrong type is being implicitly passed to `Ten.__set__`),
|
||||
# but the error message is misleading.
|
||||
# error: [invalid-assignment] "Object of type `Literal[11]` is not assignable to attribute `ten` of type `Ten`"
|
||||
# error: [invalid-assignment] "Object of type `Literal[11]` is not assignable to attribute `ten` of type `Literal[10]`"
|
||||
c.ten = 11
|
||||
|
||||
# TODO: same as above
|
||||
# error: [invalid-assignment] "Object of type `Literal[11]` is not assignable to attribute `ten` of type `Literal[10]`"
|
||||
C.ten = 11
|
||||
```
|
||||
@@ -67,16 +63,14 @@ c = C()
|
||||
|
||||
reveal_type(c.flexible_int) # revealed: int | None
|
||||
|
||||
# TODO: These should not be errors
|
||||
# error: [invalid-assignment]
|
||||
c.flexible_int = 42 # okay
|
||||
# TODO: This should not be an error
|
||||
# error: [invalid-assignment]
|
||||
c.flexible_int = "42" # also okay!
|
||||
|
||||
reveal_type(c.flexible_int) # revealed: int | None
|
||||
|
||||
# TODO: This should be an error, but the message needs to be improved.
|
||||
# error: [invalid-assignment] "Object of type `None` is not assignable to attribute `flexible_int` of type `FlexibleInt`"
|
||||
# TODO: This should be an error
|
||||
c.flexible_int = None # not okay
|
||||
|
||||
reveal_type(c.flexible_int) # revealed: int | None
|
||||
@@ -84,11 +78,10 @@ reveal_type(c.flexible_int) # revealed: int | None
|
||||
|
||||
## Data and non-data descriptors
|
||||
|
||||
Descriptors that define `__set__` or `__delete__` are called *data descriptors*. An example\
|
||||
of a data descriptor is a `property` with a setter and/or a deleter.\
|
||||
Descriptors that only define `__get__`, meanwhile, are called *non-data descriptors*. Examples
|
||||
include\
|
||||
functions, `classmethod` or `staticmethod`).
|
||||
Descriptors that define `__set__` or `__delete__` are called *data descriptors*. An example of a
|
||||
data descriptor is a `property` with a setter and/or a deleter. Descriptors that only define
|
||||
`__get__`, meanwhile, are called *non-data descriptors*. Examples include functions, `classmethod`
|
||||
or `staticmethod`.
|
||||
|
||||
The precedence chain for attribute access is (1) data descriptors, (2) instance attributes, and (3)
|
||||
non-data descriptors.
|
||||
@@ -100,7 +93,7 @@ class DataDescriptor:
|
||||
def __get__(self, instance: object, owner: type | None = None) -> Literal["data"]:
|
||||
return "data"
|
||||
|
||||
def __set__(self, instance: int, value) -> None:
|
||||
def __set__(self, instance: object, value: int) -> None:
|
||||
pass
|
||||
|
||||
class NonDataDescriptor:
|
||||
@@ -124,12 +117,7 @@ class C:
|
||||
|
||||
c = C()
|
||||
|
||||
# TODO: This should ideally be `Unknown | Literal["data"]`.
|
||||
#
|
||||
# - Pyright also wrongly shows `int | Literal['data']` here
|
||||
# - Mypy shows Literal["data"] here, but also shows Literal["non-data"] below.
|
||||
#
|
||||
reveal_type(c.data_descriptor) # revealed: Unknown | Literal["data", 1]
|
||||
reveal_type(c.data_descriptor) # revealed: Unknown | Literal["data"]
|
||||
|
||||
reveal_type(c.non_data_descriptor) # revealed: Unknown | Literal["non-data", 1]
|
||||
|
||||
@@ -143,6 +131,230 @@ reveal_type(C.non_data_descriptor) # revealed: Unknown | Literal["non-data"]
|
||||
C.data_descriptor = "something else" # This is okay
|
||||
```
|
||||
|
||||
## Descriptor protocol for class objects
|
||||
|
||||
When attributes are accessed on a class object, the following [precedence chain] is used:
|
||||
|
||||
- Data descriptor on the metaclass
|
||||
- Data or non-data descriptor on the class
|
||||
- Class attribute
|
||||
- Non-data descriptor on the metaclass
|
||||
- Metaclass attribute
|
||||
|
||||
To verify this, we define a data and a non-data descriptor:
|
||||
|
||||
```py
|
||||
from typing import Literal, Any
|
||||
|
||||
class DataDescriptor:
|
||||
def __get__(self, instance: object, owner: type | None = None) -> Literal["data"]:
|
||||
return "data"
|
||||
|
||||
def __set__(self, instance: object, value: str) -> None:
|
||||
pass
|
||||
|
||||
class NonDataDescriptor:
|
||||
def __get__(self, instance: object, owner: type | None = None) -> Literal["non-data"]:
|
||||
return "non-data"
|
||||
```
|
||||
|
||||
First, we make sure that the descriptors are correctly accessed when defined on the metaclass or the
|
||||
class:
|
||||
|
||||
```py
|
||||
class Meta1(type):
|
||||
meta_data_descriptor: DataDescriptor = DataDescriptor()
|
||||
meta_non_data_descriptor: NonDataDescriptor = NonDataDescriptor()
|
||||
|
||||
class C1(metaclass=Meta1):
|
||||
class_data_descriptor: DataDescriptor = DataDescriptor()
|
||||
class_non_data_descriptor: NonDataDescriptor = NonDataDescriptor()
|
||||
|
||||
reveal_type(C1.meta_data_descriptor) # revealed: Literal["data"]
|
||||
reveal_type(C1.meta_non_data_descriptor) # revealed: Literal["non-data"]
|
||||
|
||||
reveal_type(C1.class_data_descriptor) # revealed: Literal["data"]
|
||||
reveal_type(C1.class_non_data_descriptor) # revealed: Literal["non-data"]
|
||||
```
|
||||
|
||||
Next, we demonstrate that a *metaclass data descriptor* takes precedence over all class-level
|
||||
attributes:
|
||||
|
||||
```py
|
||||
class Meta2(type):
|
||||
meta_data_descriptor1: DataDescriptor = DataDescriptor()
|
||||
meta_data_descriptor2: DataDescriptor = DataDescriptor()
|
||||
|
||||
class ClassLevelDataDescriptor:
|
||||
def __get__(self, instance: object, owner: type | None = None) -> Literal["class level data descriptor"]:
|
||||
return "class level data descriptor"
|
||||
|
||||
def __set__(self, instance: object, value: str) -> None:
|
||||
pass
|
||||
|
||||
class C2(metaclass=Meta2):
|
||||
meta_data_descriptor1: Literal["value on class"] = "value on class"
|
||||
meta_data_descriptor2: ClassLevelDataDescriptor = ClassLevelDataDescriptor()
|
||||
|
||||
reveal_type(C2.meta_data_descriptor1) # revealed: Literal["data"]
|
||||
reveal_type(C2.meta_data_descriptor2) # revealed: Literal["data"]
|
||||
```
|
||||
|
||||
On the other hand, normal metaclass attributes and metaclass non-data descriptors are shadowed by
|
||||
class-level attributes (descriptor or not):
|
||||
|
||||
```py
|
||||
class Meta3(type):
|
||||
meta_attribute1: Literal["value on metaclass"] = "value on metaclass"
|
||||
meta_attribute2: Literal["value on metaclass"] = "value on metaclass"
|
||||
meta_non_data_descriptor1: NonDataDescriptor = NonDataDescriptor()
|
||||
meta_non_data_descriptor2: NonDataDescriptor = NonDataDescriptor()
|
||||
|
||||
class C3(metaclass=Meta3):
|
||||
meta_attribute1: Literal["value on class"] = "value on class"
|
||||
meta_attribute2: ClassLevelDataDescriptor = ClassLevelDataDescriptor()
|
||||
meta_non_data_descriptor1: Literal["value on class"] = "value on class"
|
||||
meta_non_data_descriptor2: ClassLevelDataDescriptor = ClassLevelDataDescriptor()
|
||||
|
||||
reveal_type(C3.meta_attribute1) # revealed: Literal["value on class"]
|
||||
reveal_type(C3.meta_attribute2) # revealed: Literal["class level data descriptor"]
|
||||
reveal_type(C3.meta_non_data_descriptor1) # revealed: Literal["value on class"]
|
||||
reveal_type(C3.meta_non_data_descriptor2) # revealed: Literal["class level data descriptor"]
|
||||
```
|
||||
|
||||
Finally, metaclass attributes and metaclass non-data descriptors are only accessible when they are
|
||||
not shadowed by class-level attributes:
|
||||
|
||||
```py
|
||||
class Meta4(type):
|
||||
meta_attribute: Literal["value on metaclass"] = "value on metaclass"
|
||||
meta_non_data_descriptor: NonDataDescriptor = NonDataDescriptor()
|
||||
|
||||
class C4(metaclass=Meta4): ...
|
||||
|
||||
reveal_type(C4.meta_attribute) # revealed: Literal["value on metaclass"]
|
||||
reveal_type(C4.meta_non_data_descriptor) # revealed: Literal["non-data"]
|
||||
```
|
||||
|
||||
When a metaclass data descriptor is possibly unbound, we union the result type of its `__get__`
|
||||
method with an underlying class level attribute, if present:
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
class Meta5(type):
|
||||
if flag:
|
||||
meta_data_descriptor1: DataDescriptor = DataDescriptor()
|
||||
meta_data_descriptor2: DataDescriptor = DataDescriptor()
|
||||
|
||||
class C5(metaclass=Meta5):
|
||||
meta_data_descriptor1: Literal["value on class"] = "value on class"
|
||||
|
||||
reveal_type(C5.meta_data_descriptor1) # revealed: Literal["data", "value on class"]
|
||||
# error: [possibly-unbound-attribute]
|
||||
reveal_type(C5.meta_data_descriptor2) # revealed: Literal["data"]
|
||||
```
|
||||
|
||||
When a class-level attribute is possibly unbound, we union its (descriptor protocol) type with the
|
||||
metaclass attribute (unless it's a data descriptor, which always takes precedence):
|
||||
|
||||
```py
|
||||
from typing import Any
|
||||
|
||||
def _(flag: bool):
|
||||
class Meta6(type):
|
||||
attribute1: DataDescriptor = DataDescriptor()
|
||||
attribute2: NonDataDescriptor = NonDataDescriptor()
|
||||
attribute3: Literal["value on metaclass"] = "value on metaclass"
|
||||
|
||||
class C6(metaclass=Meta6):
|
||||
if flag:
|
||||
attribute1: Literal["value on class"] = "value on class"
|
||||
attribute2: Literal["value on class"] = "value on class"
|
||||
attribute3: Literal["value on class"] = "value on class"
|
||||
attribute4: Literal["value on class"] = "value on class"
|
||||
|
||||
reveal_type(C6.attribute1) # revealed: Literal["data"]
|
||||
reveal_type(C6.attribute2) # revealed: Literal["non-data", "value on class"]
|
||||
reveal_type(C6.attribute3) # revealed: Literal["value on metaclass", "value on class"]
|
||||
# error: [possibly-unbound-attribute]
|
||||
reveal_type(C6.attribute4) # revealed: Literal["value on class"]
|
||||
```
|
||||
|
||||
Finally, we can also have unions of various types of attributes:
|
||||
|
||||
```py
|
||||
def _(flag: bool):
|
||||
class Meta7(type):
|
||||
if flag:
|
||||
union_of_metaclass_attributes: Literal[1] = 1
|
||||
union_of_metaclass_data_descriptor_and_attribute: DataDescriptor = DataDescriptor()
|
||||
else:
|
||||
union_of_metaclass_attributes: Literal[2] = 2
|
||||
union_of_metaclass_data_descriptor_and_attribute: Literal[2] = 2
|
||||
|
||||
class C7(metaclass=Meta7):
|
||||
if flag:
|
||||
union_of_class_attributes: Literal[1] = 1
|
||||
union_of_class_data_descriptor_and_attribute: DataDescriptor = DataDescriptor()
|
||||
else:
|
||||
union_of_class_attributes: Literal[2] = 2
|
||||
union_of_class_data_descriptor_and_attribute: Literal[2] = 2
|
||||
|
||||
reveal_type(C7.union_of_metaclass_attributes) # revealed: Literal[1, 2]
|
||||
reveal_type(C7.union_of_metaclass_data_descriptor_and_attribute) # revealed: Literal["data", 2]
|
||||
reveal_type(C7.union_of_class_attributes) # revealed: Literal[1, 2]
|
||||
reveal_type(C7.union_of_class_data_descriptor_and_attribute) # revealed: Literal["data", 2]
|
||||
```
|
||||
|
||||
## Partial fall back
|
||||
|
||||
Our implementation of the descriptor protocol takes into account that symbols can be possibly
|
||||
unbound. In those cases, we fall back to lower precedence steps of the descriptor protocol and union
|
||||
all possible results accordingly. We start by defining a data and a non-data descriptor:
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
class DataDescriptor:
|
||||
def __get__(self, instance: object, owner: type | None = None) -> Literal["data"]:
|
||||
return "data"
|
||||
|
||||
def __set__(self, instance: object, value: int) -> None:
|
||||
pass
|
||||
|
||||
class NonDataDescriptor:
|
||||
def __get__(self, instance: object, owner: type | None = None) -> Literal["non-data"]:
|
||||
return "non-data"
|
||||
```
|
||||
|
||||
Then, we demonstrate that we fall back to an instance attribute if a data descriptor is possibly
|
||||
unbound:
|
||||
|
||||
```py
|
||||
def f1(flag: bool):
|
||||
class C1:
|
||||
if flag:
|
||||
attr = DataDescriptor()
|
||||
|
||||
def f(self):
|
||||
self.attr = "normal"
|
||||
|
||||
reveal_type(C1().attr) # revealed: Unknown | Literal["data", "normal"]
|
||||
```
|
||||
|
||||
We never treat implicit instance attributes as definitely bound, so we fall back to the non-data
|
||||
descriptor here:
|
||||
|
||||
```py
|
||||
def f2(flag: bool):
|
||||
class C2:
|
||||
def f(self):
|
||||
self.attr = "normal"
|
||||
attr = NonDataDescriptor()
|
||||
|
||||
reveal_type(C2().attr) # revealed: Unknown | Literal["non-data", "normal"]
|
||||
```
|
||||
|
||||
## Built-in `property` descriptor
|
||||
|
||||
The built-in `property` decorator creates a descriptor. The names for attribute reads/writes are
|
||||
@@ -166,18 +378,21 @@ c = C()
|
||||
|
||||
reveal_type(c._name) # revealed: str | None
|
||||
|
||||
# Should be `str`
|
||||
reveal_type(c.name) # revealed: @Todo(decorated method)
|
||||
# TODO: Should be `str`
|
||||
reveal_type(c.name) # revealed: <bound method `name` of `C`>
|
||||
|
||||
# Should be `builtins.property`
|
||||
reveal_type(C.name) # revealed: Literal[name]
|
||||
|
||||
# This is fine:
|
||||
# TODO: These should not emit errors
|
||||
# error: [invalid-assignment]
|
||||
c.name = "new"
|
||||
|
||||
# error: [invalid-assignment]
|
||||
c.name = None
|
||||
|
||||
# TODO: this should be an error
|
||||
# TODO: this should be an error, but with a proper error message
|
||||
# error: [invalid-assignment] "Object of type `Literal[42]` is not assignable to attribute `name` of type `<bound method `name` of `C`>`"
|
||||
c.name = 42
|
||||
```
|
||||
|
||||
@@ -225,8 +440,7 @@ class C:
|
||||
def __init__(self):
|
||||
self.ten: Ten = Ten()
|
||||
|
||||
# TODO: Should be Ten
|
||||
reveal_type(C().ten) # revealed: Literal[10]
|
||||
reveal_type(C().ten) # revealed: Ten
|
||||
```
|
||||
|
||||
## Descriptors distinguishing between class and instance access
|
||||
@@ -295,12 +509,20 @@ class TailoredForInstanceAccess:
|
||||
def __get__(self, instance: C, owner: type[C] | None = None) -> str:
|
||||
return "a"
|
||||
|
||||
class C:
|
||||
class TailoredForMetaclassAccess:
|
||||
def __get__(self, instance: type[C], owner: type[Meta]) -> bytes:
|
||||
return b"a"
|
||||
|
||||
class Meta(type):
|
||||
metaclass_access: TailoredForMetaclassAccess = TailoredForMetaclassAccess()
|
||||
|
||||
class C(metaclass=Meta):
|
||||
class_object_access: TailoredForClassObjectAccess = TailoredForClassObjectAccess()
|
||||
instance_access: TailoredForInstanceAccess = TailoredForInstanceAccess()
|
||||
|
||||
reveal_type(C.class_object_access) # revealed: int
|
||||
reveal_type(C().instance_access) # revealed: str
|
||||
reveal_type(C.metaclass_access) # revealed: bytes
|
||||
|
||||
# TODO: These should emit a diagnostic
|
||||
reveal_type(C().class_object_access) # revealed: TailoredForClassObjectAccess
|
||||
@@ -320,6 +542,42 @@ class C:
|
||||
|
||||
# TODO: This should be an error
|
||||
reveal_type(C.descriptor) # revealed: Descriptor
|
||||
|
||||
# TODO: This should be an error
|
||||
reveal_type(C().descriptor) # revealed: Descriptor
|
||||
```
|
||||
|
||||
## Possibly unbound descriptor attributes
|
||||
|
||||
```py
|
||||
class DataDescriptor:
|
||||
def __get__(self, instance: object, owner: type | None = None) -> int:
|
||||
return 1
|
||||
|
||||
def __set__(self, instance: int, value) -> None:
|
||||
pass
|
||||
|
||||
class NonDataDescriptor:
|
||||
def __get__(self, instance: object, owner: type | None = None) -> int:
|
||||
return 1
|
||||
|
||||
def _(flag: bool):
|
||||
class PossiblyUnbound:
|
||||
if flag:
|
||||
non_data: NonDataDescriptor = NonDataDescriptor()
|
||||
data: DataDescriptor = DataDescriptor()
|
||||
|
||||
# error: [possibly-unbound-attribute] "Attribute `non_data` on type `Literal[PossiblyUnbound]` is possibly unbound"
|
||||
reveal_type(PossiblyUnbound.non_data) # revealed: int
|
||||
|
||||
# error: [possibly-unbound-attribute] "Attribute `non_data` on type `PossiblyUnbound` is possibly unbound"
|
||||
reveal_type(PossiblyUnbound().non_data) # revealed: int
|
||||
|
||||
# error: [possibly-unbound-attribute] "Attribute `data` on type `Literal[PossiblyUnbound]` is possibly unbound"
|
||||
reveal_type(PossiblyUnbound.data) # revealed: int
|
||||
|
||||
# error: [possibly-unbound-attribute] "Attribute `data` on type `PossiblyUnbound` is possibly unbound"
|
||||
reveal_type(PossiblyUnbound().data) # revealed: int
|
||||
```
|
||||
|
||||
## Possibly-unbound `__get__` method
|
||||
@@ -334,13 +592,55 @@ def _(flag: bool):
|
||||
class C:
|
||||
descriptor: MaybeDescriptor = MaybeDescriptor()
|
||||
|
||||
# TODO: This should be `MaybeDescriptor | int`
|
||||
reveal_type(C.descriptor) # revealed: int
|
||||
reveal_type(C.descriptor) # revealed: int | MaybeDescriptor
|
||||
|
||||
reveal_type(C().descriptor) # revealed: int | MaybeDescriptor
|
||||
```
|
||||
|
||||
## Descriptors with non-function `__get__` callables that are descriptors themselves
|
||||
|
||||
The descriptor protocol is recursive, i.e. looking up `__get__` can involve triggering the
|
||||
descriptor protocol on the callable's `__call__` method:
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
class ReturnedCallable2:
|
||||
def __call__(self, descriptor: Descriptor1, instance: None, owner: type[C]) -> int:
|
||||
return 1
|
||||
|
||||
class ReturnedCallable1:
|
||||
def __call__(self, descriptor: Descriptor2, instance: Callable1, owner: type[Callable1]) -> ReturnedCallable2:
|
||||
return ReturnedCallable2()
|
||||
|
||||
class Callable3:
|
||||
def __call__(self, descriptor: Descriptor3, instance: Callable2, owner: type[Callable2]) -> ReturnedCallable1:
|
||||
return ReturnedCallable1()
|
||||
|
||||
class Descriptor3:
|
||||
__get__: Callable3 = Callable3()
|
||||
|
||||
class Callable2:
|
||||
__call__: Descriptor3 = Descriptor3()
|
||||
|
||||
class Descriptor2:
|
||||
__get__: Callable2 = Callable2()
|
||||
|
||||
class Callable1:
|
||||
__call__: Descriptor2 = Descriptor2()
|
||||
|
||||
class Descriptor1:
|
||||
__get__: Callable1 = Callable1()
|
||||
|
||||
class C:
|
||||
d: Descriptor1 = Descriptor1()
|
||||
|
||||
reveal_type(C.d) # revealed: int
|
||||
```
|
||||
|
||||
## Dunder methods
|
||||
|
||||
Dunder methods are looked up on the meta type, but we still need to invoke the descriptor protocol:
|
||||
Dunder methods are looked up on the meta-type, but we still need to invoke the descriptor protocol:
|
||||
|
||||
```py
|
||||
class SomeCallable:
|
||||
@@ -438,4 +738,5 @@ wrapper_descriptor(f, None, type(f), "one too many")
|
||||
```
|
||||
|
||||
[descriptors]: https://docs.python.org/3/howto/descriptor.html
|
||||
[precedence chain]: https://github.com/python/cpython/blob/3.13/Objects/typeobject.c#L5393-L5481
|
||||
[simple example]: https://docs.python.org/3/howto/descriptor.html#simple-example-a-descriptor-that-returns-a-constant
|
||||
|
||||
@@ -0,0 +1,100 @@
|
||||
# `lambda` expression
|
||||
|
||||
## No parameters
|
||||
|
||||
`lambda` expressions can be defined without any parameters.
|
||||
|
||||
```py
|
||||
reveal_type(lambda: 1) # revealed: () -> @Todo(lambda return type)
|
||||
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(lambda: a) # revealed: () -> @Todo(lambda return type)
|
||||
```
|
||||
|
||||
## With parameters
|
||||
|
||||
Unlike parameters in function definition, the parameters in a `lambda` expression cannot be
|
||||
annotated.
|
||||
|
||||
```py
|
||||
reveal_type(lambda a: a) # revealed: (a) -> @Todo(lambda return type)
|
||||
reveal_type(lambda a, b: a + b) # revealed: (a, b) -> @Todo(lambda return type)
|
||||
```
|
||||
|
||||
But, it can have default values:
|
||||
|
||||
```py
|
||||
reveal_type(lambda a=1: a) # revealed: (a=Literal[1]) -> @Todo(lambda return type)
|
||||
reveal_type(lambda a, b=2: a) # revealed: (a, b=Literal[2]) -> @Todo(lambda return type)
|
||||
```
|
||||
|
||||
And, positional-only parameters:
|
||||
|
||||
```py
|
||||
reveal_type(lambda a, b, /, c: c) # revealed: (a, b, /, c) -> @Todo(lambda return type)
|
||||
```
|
||||
|
||||
And, keyword-only parameters:
|
||||
|
||||
```py
|
||||
reveal_type(lambda a, *, b=2, c: b) # revealed: (a, *, b=Literal[2], c) -> @Todo(lambda return type)
|
||||
```
|
||||
|
||||
And, variadic parameter:
|
||||
|
||||
```py
|
||||
reveal_type(lambda *args: args) # revealed: (*args) -> @Todo(lambda return type)
|
||||
```
|
||||
|
||||
And, keyword-varidic parameter:
|
||||
|
||||
```py
|
||||
reveal_type(lambda **kwargs: kwargs) # revealed: (**kwargs) -> @Todo(lambda return type)
|
||||
```
|
||||
|
||||
Mixing all of them together:
|
||||
|
||||
```py
|
||||
# revealed: (a, b, /, c=Literal[True], *args, *, d=Literal["default"], e=Literal[5], **kwargs) -> @Todo(lambda return type)
|
||||
reveal_type(lambda a, b, /, c=True, *args, d="default", e=5, **kwargs: None)
|
||||
```
|
||||
|
||||
## Parameter type
|
||||
|
||||
In addition to correctly inferring the `lambda` expression, the parameters should also be inferred
|
||||
correctly.
|
||||
|
||||
Using a parameter with no default value:
|
||||
|
||||
```py
|
||||
lambda x: reveal_type(x) # revealed: Unknown
|
||||
```
|
||||
|
||||
Using a parameter with default value:
|
||||
|
||||
```py
|
||||
lambda x=1: reveal_type(x) # revealed: Unknown | Literal[1]
|
||||
```
|
||||
|
||||
Using a variadic paramter:
|
||||
|
||||
```py
|
||||
# TODO: should be `tuple[Unknown, ...]` (needs generics)
|
||||
lambda *args: reveal_type(args) # revealed: tuple
|
||||
```
|
||||
|
||||
Using a keyword-varidic parameter:
|
||||
|
||||
```py
|
||||
# TODO: should be `dict[str, Unknown]` (needs generics)
|
||||
lambda **kwargs: reveal_type(kwargs) # revealed: dict
|
||||
```
|
||||
|
||||
## Nested `lambda` expressions
|
||||
|
||||
Here, a `lambda` expression is used as the default value for a parameter in another `lambda`
|
||||
expression.
|
||||
|
||||
```py
|
||||
reveal_type(lambda a=lambda x, y: 0: 2) # revealed: (a=(x, y) -> @Todo(lambda return type)) -> @Todo(lambda return type)
|
||||
```
|
||||
@@ -1,81 +0,0 @@
|
||||
# PEP 695 Generics
|
||||
|
||||
## Class Declarations
|
||||
|
||||
Basic PEP 695 generics
|
||||
|
||||
```py
|
||||
class MyBox[T]:
|
||||
data: T
|
||||
box_model_number = 695
|
||||
|
||||
def __init__(self, data: T):
|
||||
self.data = data
|
||||
|
||||
box: MyBox[int] = MyBox(5)
|
||||
|
||||
# TODO should emit a diagnostic here (str is not assignable to int)
|
||||
wrong_innards: MyBox[int] = MyBox("five")
|
||||
|
||||
# TODO reveal int, do not leak the typevar
|
||||
reveal_type(box.data) # revealed: T
|
||||
|
||||
reveal_type(MyBox.box_model_number) # revealed: Unknown | Literal[695]
|
||||
```
|
||||
|
||||
## Subclassing
|
||||
|
||||
```py
|
||||
class MyBox[T]:
|
||||
data: T
|
||||
|
||||
def __init__(self, data: T):
|
||||
self.data = data
|
||||
|
||||
# TODO not error on the subscripting
|
||||
# error: [non-subscriptable]
|
||||
class MySecureBox[T](MyBox[T]): ...
|
||||
|
||||
secure_box: MySecureBox[int] = MySecureBox(5)
|
||||
reveal_type(secure_box) # revealed: MySecureBox
|
||||
# TODO reveal int
|
||||
# The @Todo(…) is misleading here. We currently treat `MyBox[T]` as a dynamic base class because we
|
||||
# don't understand generics and therefore infer `Unknown` for the `MyBox[T]` base of `MySecureBox[T]`.
|
||||
reveal_type(secure_box.data) # revealed: @Todo(instance attribute on class with dynamic base)
|
||||
```
|
||||
|
||||
## Cyclical class definition
|
||||
|
||||
In type stubs, classes can reference themselves in their base class definitions. For example, in
|
||||
`typeshed`, we have `class str(Sequence[str]): ...`.
|
||||
|
||||
This should hold true even with generics at play.
|
||||
|
||||
```pyi
|
||||
class Seq[T]: ...
|
||||
|
||||
# TODO not error on the subscripting
|
||||
class S[T](Seq[S]): ... # error: [non-subscriptable]
|
||||
|
||||
reveal_type(S) # revealed: Literal[S]
|
||||
```
|
||||
|
||||
## Type params
|
||||
|
||||
A PEP695 type variable defines a value of type `typing.TypeVar`.
|
||||
|
||||
```py
|
||||
def f[T]():
|
||||
reveal_type(T) # revealed: T
|
||||
reveal_type(T.__name__) # revealed: Literal["T"]
|
||||
```
|
||||
|
||||
## Minimum two constraints
|
||||
|
||||
A typevar with less than two constraints emits a diagnostic:
|
||||
|
||||
```py
|
||||
# error: [invalid-type-variable-constraints] "TypeVar must have at least two constrained types"
|
||||
def f[T: (int,)]():
|
||||
pass
|
||||
```
|
||||
@@ -0,0 +1,187 @@
|
||||
# Generic classes
|
||||
|
||||
## PEP 695 syntax
|
||||
|
||||
TODO: Add a `red_knot_extension` function that asserts whether a function or class is generic.
|
||||
|
||||
This is a generic class defined using PEP 695 syntax:
|
||||
|
||||
```py
|
||||
class C[T]: ...
|
||||
```
|
||||
|
||||
A class that inherits from a generic class, and fills its type parameters with typevars, is generic:
|
||||
|
||||
```py
|
||||
# TODO: no error
|
||||
# error: [non-subscriptable]
|
||||
class D[U](C[U]): ...
|
||||
```
|
||||
|
||||
A class that inherits from a generic class, but fills its type parameters with concrete types, is
|
||||
_not_ generic:
|
||||
|
||||
```py
|
||||
# TODO: no error
|
||||
# error: [non-subscriptable]
|
||||
class E(C[int]): ...
|
||||
```
|
||||
|
||||
A class that inherits from a generic class, and doesn't fill its type parameters at all, implicitly
|
||||
uses the default value for the typevar. In this case, that default type is `Unknown`, so `F`
|
||||
inherits from `C[Unknown]` and is not itself generic.
|
||||
|
||||
```py
|
||||
class F(C): ...
|
||||
```
|
||||
|
||||
## Legacy syntax
|
||||
|
||||
This is a generic class defined using the legacy syntax:
|
||||
|
||||
```py
|
||||
from typing import Generic, TypeVar
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
# TODO: no error
|
||||
# error: [invalid-base]
|
||||
class C(Generic[T]): ...
|
||||
```
|
||||
|
||||
A class that inherits from a generic class, and fills its type parameters with typevars, is generic.
|
||||
|
||||
```py
|
||||
class D(C[T]): ...
|
||||
```
|
||||
|
||||
(Examples `E` and `F` from above do not have analogues in the legacy syntax.)
|
||||
|
||||
## Inferring generic class parameters
|
||||
|
||||
The type parameter can be specified explicitly:
|
||||
|
||||
```py
|
||||
class C[T]:
|
||||
x: T
|
||||
|
||||
# TODO: no error
|
||||
# TODO: revealed: C[int]
|
||||
# error: [non-subscriptable]
|
||||
reveal_type(C[int]()) # revealed: C
|
||||
```
|
||||
|
||||
We can infer the type parameter from a type context:
|
||||
|
||||
```py
|
||||
c: C[int] = C()
|
||||
# TODO: revealed: C[int]
|
||||
reveal_type(c) # revealed: C
|
||||
```
|
||||
|
||||
The typevars of a fully specialized generic class should no longer be visible:
|
||||
|
||||
```py
|
||||
# TODO: revealed: int
|
||||
reveal_type(c.x) # revealed: T
|
||||
```
|
||||
|
||||
If the type parameter is not specified explicitly, and there are no constraints that let us infer a
|
||||
specific type, we infer the typevar's default type:
|
||||
|
||||
```py
|
||||
class D[T = int]: ...
|
||||
|
||||
# TODO: revealed: D[int]
|
||||
reveal_type(D()) # revealed: D
|
||||
```
|
||||
|
||||
If a typevar does not provide a default, we use `Unknown`:
|
||||
|
||||
```py
|
||||
# TODO: revealed: C[Unknown]
|
||||
reveal_type(C()) # revealed: C
|
||||
```
|
||||
|
||||
If the type of a constructor parameter is a class typevar, we can use that to infer the type
|
||||
parameter:
|
||||
|
||||
```py
|
||||
class E[T]:
|
||||
def __init__(self, x: T) -> None: ...
|
||||
|
||||
# TODO: revealed: E[int] or E[Literal[1]]
|
||||
reveal_type(E(1)) # revealed: E
|
||||
```
|
||||
|
||||
The types inferred from a type context and from a constructor parameter must be consistent with each
|
||||
other:
|
||||
|
||||
```py
|
||||
# TODO: error
|
||||
wrong_innards: E[int] = E("five")
|
||||
```
|
||||
|
||||
## Generic subclass
|
||||
|
||||
When a generic subclass fills its superclass's type parameter with one of its own, the actual types
|
||||
propagate through:
|
||||
|
||||
```py
|
||||
class Base[T]:
|
||||
x: T | None = None
|
||||
|
||||
# TODO: no error
|
||||
# error: [non-subscriptable]
|
||||
class Sub[U](Base[U]): ...
|
||||
|
||||
# TODO: no error
|
||||
# TODO: revealed: int | None
|
||||
# error: [non-subscriptable]
|
||||
reveal_type(Base[int].x) # revealed: T | None
|
||||
# TODO: revealed: int | None
|
||||
# error: [non-subscriptable]
|
||||
reveal_type(Sub[int].x) # revealed: T | None
|
||||
```
|
||||
|
||||
## Cyclic class definition
|
||||
|
||||
A class can use itself as the type parameter of one of its superclasses. (This is also known as the
|
||||
[curiously recurring template pattern][crtp] or [F-bounded quantification][f-bound].)
|
||||
|
||||
Here, `Sub` is not a generic class, since it fills its superclass's type parameter (with itself).
|
||||
|
||||
`stub.pyi`:
|
||||
|
||||
```pyi
|
||||
class Base[T]: ...
|
||||
# TODO: no error
|
||||
# error: [non-subscriptable]
|
||||
class Sub(Base[Sub]): ...
|
||||
|
||||
reveal_type(Sub) # revealed: Literal[Sub]
|
||||
```
|
||||
|
||||
`string_annotation.py`:
|
||||
|
||||
```py
|
||||
class Base[T]: ...
|
||||
|
||||
# TODO: no error
|
||||
# error: [non-subscriptable]
|
||||
class Sub(Base["Sub"]): ...
|
||||
|
||||
reveal_type(Sub) # revealed: Literal[Sub]
|
||||
```
|
||||
|
||||
`bare_annotation.py`:
|
||||
|
||||
```py
|
||||
class Base[T]: ...
|
||||
|
||||
# TODO: error: [unresolved-reference]
|
||||
class Sub(Base[Sub]): ...
|
||||
```
|
||||
|
||||
[crtp]: https://en.wikipedia.org/wiki/Curiously_recurring_template_pattern
|
||||
[f-bound]: https://en.wikipedia.org/wiki/Bounded_quantification#F-bounded_quantification
|
||||
@@ -0,0 +1,244 @@
|
||||
# Generic functions
|
||||
|
||||
## Typevar must be used at least twice
|
||||
|
||||
If you're only using a typevar for a single parameter, you don't need the typevar — just use
|
||||
`object` (or the typevar's upper bound):
|
||||
|
||||
```py
|
||||
# TODO: error, should be (x: object)
|
||||
def typevar_not_needed[T](x: T) -> None:
|
||||
pass
|
||||
|
||||
# TODO: error, should be (x: int)
|
||||
def bounded_typevar_not_needed[T: int](x: T) -> None:
|
||||
pass
|
||||
```
|
||||
|
||||
Typevars are only needed if you use them more than once. For instance, to specify that two
|
||||
parameters must both have the same type:
|
||||
|
||||
```py
|
||||
def two_params[T](x: T, y: T) -> T:
|
||||
return x
|
||||
```
|
||||
|
||||
or to specify that a return value is the same as a parameter:
|
||||
|
||||
```py
|
||||
def return_value[T](x: T) -> T:
|
||||
return x
|
||||
```
|
||||
|
||||
Each typevar must also appear _somewhere_ in the parameter list:
|
||||
|
||||
```py
|
||||
def absurd[T]() -> T:
|
||||
# There's no way to construct a T!
|
||||
...
|
||||
```
|
||||
|
||||
## Inferring generic function parameter types
|
||||
|
||||
If the type of a generic function parameter is a typevar, then we can infer what type that typevar
|
||||
is bound to at each call site.
|
||||
|
||||
TODO: Note that some of the TODO revealed types have two options, since we haven't decided yet
|
||||
whether we want to infer a more specific `Literal` type where possible, or use heuristics to weaken
|
||||
the inferred type to e.g. `int`.
|
||||
|
||||
```py
|
||||
def f[T](x: T) -> T: ...
|
||||
|
||||
# TODO: no error
|
||||
# TODO: revealed: int or Literal[1]
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(f(1)) # revealed: T
|
||||
|
||||
# TODO: no error
|
||||
# TODO: revealed: float
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(f(1.0)) # revealed: T
|
||||
|
||||
# TODO: no error
|
||||
# TODO: revealed: bool or Literal[true]
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(f(True)) # revealed: T
|
||||
|
||||
# TODO: no error
|
||||
# TODO: revealed: str or Literal["string"]
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(f("string")) # revealed: T
|
||||
```
|
||||
|
||||
## Inferring “deep” generic parameter types
|
||||
|
||||
The matching up of call arguments and discovery of constraints on typevars can be a recursive
|
||||
process for arbitrarily-nested generic types in parameters.
|
||||
|
||||
```py
|
||||
def f[T](x: list[T]) -> T: ...
|
||||
|
||||
# TODO: revealed: float
|
||||
reveal_type(f([1.0, 2.0])) # revealed: T
|
||||
```
|
||||
|
||||
## Typevar constraints
|
||||
|
||||
If a type parameter has an upper bound, that upper bound constrains which types can be used for that
|
||||
typevar. This effectively adds the upper bound as an intersection to every appearance of the typevar
|
||||
in the function.
|
||||
|
||||
```py
|
||||
def good_param[T: int](x: T) -> None:
|
||||
# TODO: revealed: T & int
|
||||
reveal_type(x) # revealed: T
|
||||
```
|
||||
|
||||
If the function is annotated as returning the typevar, this means that the upper bound is _not_
|
||||
assignable to that typevar, since return types are contravariant. In `bad`, we can infer that
|
||||
`x + 1` has type `int`. But `T` might be instantiated with a narrower type than `int`, and so the
|
||||
return value is not guaranteed to be compatible for all `T: int`.
|
||||
|
||||
```py
|
||||
def good_return[T: int](x: T) -> T:
|
||||
return x
|
||||
|
||||
def bad_return[T: int](x: T) -> T:
|
||||
# TODO: error: int is not assignable to T
|
||||
# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `T` and `Literal[1]`"
|
||||
return x + 1
|
||||
```
|
||||
|
||||
## All occurrences of the same typevar have the same type
|
||||
|
||||
If a typevar appears multiple times in a function signature, all occurrences have the same type.
|
||||
|
||||
```py
|
||||
def different_types[T, S](cond: bool, t: T, s: S) -> T:
|
||||
if cond:
|
||||
return t
|
||||
else:
|
||||
# TODO: error: S is not assignable to T
|
||||
return s
|
||||
|
||||
def same_types[T](cond: bool, t1: T, t2: T) -> T:
|
||||
if cond:
|
||||
return t1
|
||||
else:
|
||||
return t2
|
||||
```
|
||||
|
||||
## All occurrences of the same constrained typevar have the same type
|
||||
|
||||
The above is true even when the typevars are constrained. Here, both `int` and `str` have `__add__`
|
||||
methods that are compatible with the return type, so the `return` expression is always well-typed:
|
||||
|
||||
```py
|
||||
def same_constrained_types[T: (int, str)](t1: T, t2: T) -> T:
|
||||
# TODO: no error
|
||||
# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `T` and `T`"
|
||||
return t1 + t2
|
||||
```
|
||||
|
||||
This is _not_ the same as a union type, because of this additional constraint that the two
|
||||
occurrences have the same type. In `unions_are_different`, `t1` and `t2` might have different types,
|
||||
and an `int` and a `str` cannot be added together:
|
||||
|
||||
```py
|
||||
def unions_are_different(t1: int | str, t2: int | str) -> int | str:
|
||||
# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `int | str` and `int | str`"
|
||||
return t1 + t2
|
||||
```
|
||||
|
||||
## Typevar inference is a unification problem
|
||||
|
||||
When inferring typevar assignments in a generic function call, we cannot simply solve constraints
|
||||
eagerly for each parameter in turn. We must solve a unification problem involving all of the
|
||||
parameters simultaneously.
|
||||
|
||||
```py
|
||||
def two_params[T](x: T, y: T) -> T:
|
||||
return x
|
||||
|
||||
# TODO: no error
|
||||
# TODO: revealed: str
|
||||
# error: [invalid-argument-type]
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(two_params("a", "b")) # revealed: T
|
||||
|
||||
# TODO: no error
|
||||
# TODO: revealed: str | int
|
||||
# error: [invalid-argument-type]
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(two_params("a", 1)) # revealed: T
|
||||
```
|
||||
|
||||
```py
|
||||
def param_with_union[T](x: T | int, y: T) -> T:
|
||||
return y
|
||||
|
||||
# TODO: no error
|
||||
# TODO: revealed: str
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(param_with_union(1, "a")) # revealed: T
|
||||
|
||||
# TODO: no error
|
||||
# TODO: revealed: str
|
||||
# error: [invalid-argument-type]
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(param_with_union("a", "a")) # revealed: T
|
||||
|
||||
# TODO: no error
|
||||
# TODO: revealed: int
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(param_with_union(1, 1)) # revealed: T
|
||||
|
||||
# TODO: no error
|
||||
# TODO: revealed: str | int
|
||||
# error: [invalid-argument-type]
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(param_with_union("a", 1)) # revealed: T
|
||||
```
|
||||
|
||||
```py
|
||||
def tuple_param[T, S](x: T | S, y: tuple[T, S]) -> tuple[T, S]:
|
||||
return y
|
||||
|
||||
# TODO: no error
|
||||
# TODO: revealed: tuple[str, int]
|
||||
# error: [invalid-argument-type]
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(tuple_param("a", ("a", 1))) # revealed: tuple[T, S]
|
||||
|
||||
# TODO: no error
|
||||
# TODO: revealed: tuple[str, int]
|
||||
# error: [invalid-argument-type]
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(tuple_param(1, ("a", 1))) # revealed: tuple[T, S]
|
||||
```
|
||||
|
||||
## Inferring nested generic function calls
|
||||
|
||||
We can infer type assignments in nested calls to multiple generic functions. If they use the same
|
||||
type variable, we do not confuse the two; `T@f` and `T@g` have separate types in each example below.
|
||||
|
||||
```py
|
||||
def f[T](x: T) -> tuple[T, int]:
|
||||
return (x, 1)
|
||||
|
||||
def g[T](x: T) -> T | None:
|
||||
return x
|
||||
|
||||
# TODO: no error
|
||||
# TODO: revealed: tuple[str | None, int]
|
||||
# error: [invalid-argument-type]
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(f(g("a"))) # revealed: tuple[T, int]
|
||||
|
||||
# TODO: no error
|
||||
# TODO: revealed: tuple[str, int] | None
|
||||
# error: [invalid-argument-type]
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(g(f("a"))) # revealed: T | None
|
||||
```
|
||||
@@ -0,0 +1,72 @@
|
||||
# Legacy type variables
|
||||
|
||||
The tests in this file focus on how type variables are defined using the legacy notation. Most
|
||||
_uses_ of type variables are tested in other files in this directory; we do not duplicate every test
|
||||
for both type variable syntaxes.
|
||||
|
||||
Unless otherwise specified, all quotations come from the [Generics] section of the typing spec.
|
||||
|
||||
## Type variables
|
||||
|
||||
### Defining legacy type variables
|
||||
|
||||
> Generics can be parameterized by using a factory available in `typing` called `TypeVar`.
|
||||
|
||||
This was the only way to create type variables prior to PEP 695/Python 3.12. It is still available
|
||||
in newer Python releases.
|
||||
|
||||
```py
|
||||
from typing import TypeVar
|
||||
|
||||
T = TypeVar("T")
|
||||
```
|
||||
|
||||
### Directly assigned to a variable
|
||||
|
||||
> A `TypeVar()` expression must always directly be assigned to a variable (it should not be used as
|
||||
> part of a larger expression).
|
||||
|
||||
```py
|
||||
from typing import TypeVar
|
||||
|
||||
# TODO: error
|
||||
TestList = list[TypeVar("W")]
|
||||
```
|
||||
|
||||
### `TypeVar` parameter must match variable name
|
||||
|
||||
> The argument to `TypeVar()` must be a string equal to the variable name to which it is assigned.
|
||||
|
||||
```py
|
||||
from typing import TypeVar
|
||||
|
||||
# TODO: error
|
||||
T = TypeVar("Q")
|
||||
```
|
||||
|
||||
### No redefinition
|
||||
|
||||
> Type variables must not be redefined.
|
||||
|
||||
```py
|
||||
from typing import TypeVar
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
# TODO: error
|
||||
T = TypeVar("T")
|
||||
```
|
||||
|
||||
### Cannot have only one constraint
|
||||
|
||||
> `TypeVar` supports constraining parametric types to a fixed set of possible types...There should
|
||||
> be at least two constraints, if any; specifying a single constraint is disallowed.
|
||||
|
||||
```py
|
||||
from typing import TypeVar
|
||||
|
||||
# TODO: error: [invalid-type-variable-constraints]
|
||||
T = TypeVar("T", int)
|
||||
```
|
||||
|
||||
[generics]: https://typing.readthedocs.io/en/latest/spec/generics.html
|
||||
@@ -0,0 +1,51 @@
|
||||
# PEP 695 Generics
|
||||
|
||||
[PEP 695] and Python 3.12 introduced new, more ergonomic syntax for type variables.
|
||||
|
||||
## Type variables
|
||||
|
||||
### Defining PEP 695 type variables
|
||||
|
||||
PEP 695 introduces a new syntax for defining type variables. The resulting type variables are
|
||||
instances of `typing.TypeVar`, just like legacy type variables.
|
||||
|
||||
```py
|
||||
def f[T]():
|
||||
reveal_type(type(T)) # revealed: Literal[TypeVar]
|
||||
reveal_type(T) # revealed: T
|
||||
reveal_type(T.__name__) # revealed: Literal["T"]
|
||||
```
|
||||
|
||||
### Cannot have only one constraint
|
||||
|
||||
> `TypeVar` supports constraining parametric types to a fixed set of possible types...There should
|
||||
> be at least two constraints, if any; specifying a single constraint is disallowed.
|
||||
|
||||
```py
|
||||
# error: [invalid-type-variable-constraints] "TypeVar must have at least two constrained types"
|
||||
def f[T: (int,)]():
|
||||
pass
|
||||
```
|
||||
|
||||
## Invalid uses
|
||||
|
||||
Note that many of the invalid uses of legacy typevars do not apply to PEP 695 typevars, since the
|
||||
PEP 695 syntax is only allowed places where typevars are allowed.
|
||||
|
||||
## Displaying typevars
|
||||
|
||||
We use a suffix when displaying the typevars of a generic function or class. This helps distinguish
|
||||
different uses of the same typevar.
|
||||
|
||||
```py
|
||||
def f[T](x: T, y: T) -> None:
|
||||
# TODO: revealed: T@f
|
||||
reveal_type(x) # revealed: T
|
||||
|
||||
class C[T]:
|
||||
def m(self, x: T) -> None:
|
||||
# TODO: revealed: T@c
|
||||
reveal_type(x) # revealed: T
|
||||
```
|
||||
|
||||
[pep 695]: https://peps.python.org/pep-0695/
|
||||
@@ -0,0 +1,257 @@
|
||||
# Scoping rules for type variables
|
||||
|
||||
Most of these tests come from the [Scoping rules for type variables][scoping] section of the typing
|
||||
spec.
|
||||
|
||||
## Typevar used outside of generic function or class
|
||||
|
||||
Typevars may only be used in generic function or class definitions.
|
||||
|
||||
```py
|
||||
from typing import TypeVar
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
# TODO: error
|
||||
x: T
|
||||
|
||||
class C:
|
||||
# TODO: error
|
||||
x: T
|
||||
|
||||
def f() -> None:
|
||||
# TODO: error
|
||||
x: T
|
||||
```
|
||||
|
||||
## Legacy typevar used multiple times
|
||||
|
||||
> A type variable used in a generic function could be inferred to represent different types in the
|
||||
> same code block.
|
||||
|
||||
This only applies to typevars defined using the legacy syntax, since the PEP 695 syntax creates a
|
||||
new distinct typevar for each occurrence.
|
||||
|
||||
```py
|
||||
from typing import TypeVar
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
def f1(x: T) -> T: ...
|
||||
def f2(x: T) -> T: ...
|
||||
|
||||
f1(1)
|
||||
f2("a")
|
||||
```
|
||||
|
||||
## Typevar inferred multiple times
|
||||
|
||||
> A type variable used in a generic function could be inferred to represent different types in the
|
||||
> same code block.
|
||||
|
||||
This also applies to a single generic function being used multiple times, instantiating the typevar
|
||||
to a different type each time.
|
||||
|
||||
```py
|
||||
def f[T](x: T) -> T: ...
|
||||
|
||||
# TODO: no error
|
||||
# TODO: revealed: int or Literal[1]
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(f(1)) # revealed: T
|
||||
# TODO: no error
|
||||
# TODO: revealed: str or Literal["a"]
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(f("a")) # revealed: T
|
||||
```
|
||||
|
||||
## Methods can mention class typevars
|
||||
|
||||
> A type variable used in a method of a generic class that coincides with one of the variables that
|
||||
> parameterize this class is always bound to that variable.
|
||||
|
||||
```py
|
||||
class C[T]:
|
||||
def m1(self, x: T) -> T: ...
|
||||
def m2(self, x: T) -> T: ...
|
||||
|
||||
c: C[int] = C()
|
||||
# TODO: no error
|
||||
# error: [invalid-argument-type]
|
||||
c.m1(1)
|
||||
# TODO: no error
|
||||
# error: [invalid-argument-type]
|
||||
c.m2(1)
|
||||
# TODO: expected type `int`
|
||||
# error: [invalid-argument-type] "Object of type `Literal["string"]` cannot be assigned to parameter 2 (`x`) of bound method `m2`; expected type `T`"
|
||||
c.m2("string")
|
||||
```
|
||||
|
||||
## Methods can mention other typevars
|
||||
|
||||
> A type variable used in a method that does not match any of the variables that parameterize the
|
||||
> class makes this method a generic function in that variable.
|
||||
|
||||
```py
|
||||
from typing import TypeVar, Generic
|
||||
|
||||
T = TypeVar("T")
|
||||
S = TypeVar("S")
|
||||
|
||||
# TODO: no error
|
||||
# error: [invalid-base]
|
||||
class Legacy(Generic[T]):
|
||||
def m(self, x: T, y: S) -> S: ...
|
||||
|
||||
legacy: Legacy[int] = Legacy()
|
||||
# TODO: revealed: str
|
||||
reveal_type(legacy.m(1, "string")) # revealed: @Todo(Invalid or unsupported `Instance` in `Type::to_type_expression`)
|
||||
```
|
||||
|
||||
With PEP 695 syntax, it is clearer that the method uses a separate typevar:
|
||||
|
||||
```py
|
||||
class C[T]:
|
||||
def m[S](self, x: T, y: S) -> S: ...
|
||||
|
||||
c: C[int] = C()
|
||||
# TODO: no errors
|
||||
# TODO: revealed: str
|
||||
# error: [invalid-argument-type]
|
||||
# error: [invalid-argument-type]
|
||||
reveal_type(c.m(1, "string")) # revealed: S
|
||||
```
|
||||
|
||||
## Unbound typevars
|
||||
|
||||
> Unbound type variables should not appear in the bodies of generic functions, or in the class
|
||||
> bodies apart from method definitions.
|
||||
|
||||
This is true with the legacy syntax:
|
||||
|
||||
```py
|
||||
from typing import TypeVar, Generic
|
||||
|
||||
T = TypeVar("T")
|
||||
S = TypeVar("S")
|
||||
|
||||
def f(x: T) -> None:
|
||||
x: list[T] = []
|
||||
# TODO: error
|
||||
y: list[S] = []
|
||||
|
||||
# TODO: no error
|
||||
# error: [invalid-base]
|
||||
class C(Generic[T]):
|
||||
# TODO: error
|
||||
x: list[S] = []
|
||||
|
||||
# This is not an error, as shown in the previous test
|
||||
def m(self, x: S) -> S: ...
|
||||
```
|
||||
|
||||
This is true with PEP 695 syntax, as well, though we must use the legacy syntax to define the
|
||||
unbound typevars:
|
||||
|
||||
`pep695.py`:
|
||||
|
||||
```py
|
||||
from typing import TypeVar
|
||||
|
||||
S = TypeVar("S")
|
||||
|
||||
def f[T](x: T) -> None:
|
||||
x: list[T] = []
|
||||
# TODO: error
|
||||
y: list[S] = []
|
||||
|
||||
class C[T]:
|
||||
# TODO: error
|
||||
x: list[S] = []
|
||||
|
||||
def m1(self, x: S) -> S: ...
|
||||
def m2[S](self, x: S) -> S: ...
|
||||
```
|
||||
|
||||
## Nested formal typevars must be distinct
|
||||
|
||||
Generic functions and classes can be nested in each other, but it is an error for the same typevar
|
||||
to be used in nested generic definitions.
|
||||
|
||||
Note that the typing spec only mentions two specific versions of this rule:
|
||||
|
||||
> A generic class definition that appears inside a generic function should not use type variables
|
||||
> that parameterize the generic function.
|
||||
|
||||
and
|
||||
|
||||
> A generic class nested in another generic class cannot use the same type variables.
|
||||
|
||||
We assume that the more general form holds.
|
||||
|
||||
### Generic function within generic function
|
||||
|
||||
```py
|
||||
def f[T](x: T, y: T) -> None:
|
||||
def ok[S](a: S, b: S) -> None: ...
|
||||
|
||||
# TODO: error
|
||||
def bad[T](a: T, b: T) -> None: ...
|
||||
```
|
||||
|
||||
### Generic method within generic class
|
||||
|
||||
```py
|
||||
class C[T]:
|
||||
def ok[S](self, a: S, b: S) -> None: ...
|
||||
|
||||
# TODO: error
|
||||
def bad[T](self, a: T, b: T) -> None: ...
|
||||
```
|
||||
|
||||
### Generic class within generic function
|
||||
|
||||
```py
|
||||
from typing import Iterable
|
||||
|
||||
def f[T](x: T, y: T) -> None:
|
||||
class Ok[S]: ...
|
||||
# TODO: error for reuse of typevar
|
||||
class Bad1[T]: ...
|
||||
# TODO: no non-subscriptable error, error for reuse of typevar
|
||||
# error: [non-subscriptable]
|
||||
class Bad2(Iterable[T]): ...
|
||||
```
|
||||
|
||||
### Generic class within generic class
|
||||
|
||||
```py
|
||||
from typing import Iterable
|
||||
|
||||
class C[T]:
|
||||
class Ok1[S]: ...
|
||||
# TODO: error for reuse of typevar
|
||||
class Bad1[T]: ...
|
||||
# TODO: no non-subscriptable error, error for reuse of typevar
|
||||
# error: [non-subscriptable]
|
||||
class Bad2(Iterable[T]): ...
|
||||
```
|
||||
|
||||
## Class scopes do not cover inner scopes
|
||||
|
||||
Just like regular symbols, the typevars of a generic class are only available in that class's scope,
|
||||
and are not available in nested scopes.
|
||||
|
||||
```py
|
||||
class C[T]:
|
||||
ok1: list[T] = []
|
||||
|
||||
class Bad:
|
||||
# TODO: error
|
||||
bad: list[T] = []
|
||||
|
||||
class Inner[S]: ...
|
||||
ok2: Inner[T]
|
||||
```
|
||||
|
||||
[scoping]: https://typing.readthedocs.io/en/latest/spec/generics.html#scoping-rules-for-type-variables
|
||||
@@ -0,0 +1,131 @@
|
||||
# Case Sensitive Imports
|
||||
|
||||
```toml
|
||||
# TODO: This test should use the real file system instead of the memory file system.
|
||||
# but we can't change the file system yet because the tests would then start failing for
|
||||
# case-insensitive file systems.
|
||||
#system = "os"
|
||||
```
|
||||
|
||||
Python's import system is case-sensitive even on case-insensitive file system. This means, importing
|
||||
a module `a` should fail if the file in the search paths is named `A.py`. See
|
||||
[PEP 235](https://peps.python.org/pep-0235/).
|
||||
|
||||
## Correct casing
|
||||
|
||||
Importing a module where the name matches the file name's casing should succeed.
|
||||
|
||||
`a.py`:
|
||||
|
||||
```py
|
||||
class Foo:
|
||||
x: int = 1
|
||||
```
|
||||
|
||||
```python
|
||||
from a import Foo
|
||||
|
||||
reveal_type(Foo().x) # revealed: int
|
||||
```
|
||||
|
||||
## Incorrect casing
|
||||
|
||||
Importing a module where the name does not match the file name's casing should fail.
|
||||
|
||||
`A.py`:
|
||||
|
||||
```py
|
||||
class Foo:
|
||||
x: int = 1
|
||||
```
|
||||
|
||||
```python
|
||||
# error: [unresolved-import]
|
||||
from a import Foo
|
||||
```
|
||||
|
||||
## Multiple search paths with different cased modules
|
||||
|
||||
The resolved module is the first matching the file name's casing but Python falls back to later
|
||||
search paths if the file name's casing does not match.
|
||||
|
||||
```toml
|
||||
[environment]
|
||||
extra-paths = ["/search-1", "/search-2"]
|
||||
```
|
||||
|
||||
`/search-1/A.py`:
|
||||
|
||||
```py
|
||||
class Foo:
|
||||
x: int = 1
|
||||
```
|
||||
|
||||
`/search-2/a.py`:
|
||||
|
||||
```py
|
||||
class Bar:
|
||||
x: str = "test"
|
||||
```
|
||||
|
||||
```python
|
||||
from A import Foo
|
||||
from a import Bar
|
||||
|
||||
reveal_type(Foo().x) # revealed: int
|
||||
reveal_type(Bar().x) # revealed: str
|
||||
```
|
||||
|
||||
## Intermediate segments
|
||||
|
||||
`db/__init__.py`:
|
||||
|
||||
```py
|
||||
```
|
||||
|
||||
`db/a.py`:
|
||||
|
||||
```py
|
||||
class Foo:
|
||||
x: int = 1
|
||||
```
|
||||
|
||||
`correctly_cased.py`:
|
||||
|
||||
```python
|
||||
from db.a import Foo
|
||||
|
||||
reveal_type(Foo().x) # revealed: int
|
||||
```
|
||||
|
||||
Imports where some segments are incorrectly cased should fail.
|
||||
|
||||
`incorrectly_cased.py`:
|
||||
|
||||
```python
|
||||
# error: [unresolved-import]
|
||||
from DB.a import Foo
|
||||
|
||||
# error: [unresolved-import]
|
||||
from DB.A import Foo
|
||||
|
||||
# error: [unresolved-import]
|
||||
from db.A import Foo
|
||||
```
|
||||
|
||||
## Incorrect extension casing
|
||||
|
||||
The extension of imported python modules must be `.py` or `.pyi` but not `.PY` or `Py` or any
|
||||
variant where some characters are uppercase.
|
||||
|
||||
`a.PY`:
|
||||
|
||||
```py
|
||||
class Foo:
|
||||
x: int = 1
|
||||
```
|
||||
|
||||
```python
|
||||
# error: [unresolved-import]
|
||||
from a import Foo
|
||||
```
|
||||
@@ -91,3 +91,16 @@ match while:
|
||||
for x in foo.pass:
|
||||
pass
|
||||
```
|
||||
|
||||
## Invalid annotation
|
||||
|
||||
### `typing.Callable`
|
||||
|
||||
```py
|
||||
from typing import Callable
|
||||
|
||||
# error: [invalid-syntax] "Expected index or slice expression"
|
||||
# error: [invalid-type-form] "Special form `typing.Callable` expected exactly two arguments (parameter types and return type)"
|
||||
def _(c: Callable[]):
|
||||
reveal_type(c) # revealed: (...) -> Unknown
|
||||
```
|
||||
|
||||
@@ -26,23 +26,6 @@ from typing import TYPE_CHECKING as TC
|
||||
reveal_type(TC) # revealed: Literal[True]
|
||||
```
|
||||
|
||||
### Must originate from `typing`
|
||||
|
||||
Make sure we only use our special handling for `typing.TYPE_CHECKING` and not for other constants
|
||||
with the same name:
|
||||
|
||||
`constants.py`:
|
||||
|
||||
```py
|
||||
TYPE_CHECKING: bool = False
|
||||
```
|
||||
|
||||
```py
|
||||
from constants import TYPE_CHECKING
|
||||
|
||||
reveal_type(TYPE_CHECKING) # revealed: bool
|
||||
```
|
||||
|
||||
### `typing_extensions` re-export
|
||||
|
||||
This should behave in the same way as `typing.TYPE_CHECKING`:
|
||||
@@ -52,3 +35,117 @@ from typing_extensions import TYPE_CHECKING
|
||||
|
||||
reveal_type(TYPE_CHECKING) # revealed: Literal[True]
|
||||
```
|
||||
|
||||
## User-defined `TYPE_CHECKING`
|
||||
|
||||
If we set `TYPE_CHECKING = False` directly instead of importing it from the `typing` module, it will
|
||||
still be treated as `True` during type checking. This behavior is for compatibility with other major
|
||||
type checkers, e.g. mypy and pyright.
|
||||
|
||||
### With no type annotation
|
||||
|
||||
```py
|
||||
TYPE_CHECKING = False
|
||||
reveal_type(TYPE_CHECKING) # revealed: Literal[True]
|
||||
if TYPE_CHECKING:
|
||||
type_checking = True
|
||||
if not TYPE_CHECKING:
|
||||
runtime = True
|
||||
|
||||
# type_checking is treated as unconditionally assigned.
|
||||
reveal_type(type_checking) # revealed: Literal[True]
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(runtime) # revealed: Unknown
|
||||
```
|
||||
|
||||
### With a type annotation
|
||||
|
||||
We can also define `TYPE_CHECKING` with a type annotation. The type must be one to which `bool` can
|
||||
be assigned. Even in this case, the type of `TYPE_CHECKING` is still inferred to be `Literal[True]`.
|
||||
|
||||
```py
|
||||
TYPE_CHECKING: bool = False
|
||||
reveal_type(TYPE_CHECKING) # revealed: Literal[True]
|
||||
if TYPE_CHECKING:
|
||||
type_checking = True
|
||||
if not TYPE_CHECKING:
|
||||
runtime = True
|
||||
|
||||
reveal_type(type_checking) # revealed: Literal[True]
|
||||
# error: [unresolved-reference]
|
||||
reveal_type(runtime) # revealed: Unknown
|
||||
```
|
||||
|
||||
### Importing user-defined `TYPE_CHECKING`
|
||||
|
||||
`constants.py`:
|
||||
|
||||
```py
|
||||
TYPE_CHECKING = False
|
||||
```
|
||||
|
||||
`stub.pyi`:
|
||||
|
||||
```pyi
|
||||
TYPE_CHECKING: bool
|
||||
# or
|
||||
TYPE_CHECKING: bool = ...
|
||||
```
|
||||
|
||||
```py
|
||||
from constants import TYPE_CHECKING
|
||||
|
||||
reveal_type(TYPE_CHECKING) # revealed: Literal[True]
|
||||
|
||||
from stub import TYPE_CHECKING
|
||||
|
||||
reveal_type(TYPE_CHECKING) # revealed: Literal[True]
|
||||
```
|
||||
|
||||
### Invalid assignment to `TYPE_CHECKING`
|
||||
|
||||
Only `False` can be assigned to `TYPE_CHECKING`; any assignment other than `False` will result in an
|
||||
error. A type annotation to which `bool` is not assignable is also an error.
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
# error: [invalid-type-checking-constant]
|
||||
TYPE_CHECKING = True
|
||||
|
||||
# error: [invalid-type-checking-constant]
|
||||
TYPE_CHECKING: bool = True
|
||||
|
||||
# error: [invalid-type-checking-constant]
|
||||
TYPE_CHECKING: int = 1
|
||||
|
||||
# error: [invalid-type-checking-constant]
|
||||
TYPE_CHECKING: str = "str"
|
||||
|
||||
# error: [invalid-type-checking-constant]
|
||||
TYPE_CHECKING: str = False
|
||||
|
||||
# error: [invalid-type-checking-constant]
|
||||
TYPE_CHECKING: Literal[False] = False
|
||||
|
||||
# error: [invalid-type-checking-constant]
|
||||
TYPE_CHECKING: Literal[True] = False
|
||||
```
|
||||
|
||||
The same rules apply in a stub file:
|
||||
|
||||
```pyi
|
||||
from typing import Literal
|
||||
|
||||
# error: [invalid-type-checking-constant]
|
||||
TYPE_CHECKING: str
|
||||
|
||||
# error: [invalid-type-checking-constant]
|
||||
TYPE_CHECKING: str = False
|
||||
|
||||
# error: [invalid-type-checking-constant]
|
||||
TYPE_CHECKING: Literal[False] = ...
|
||||
|
||||
# error: [invalid-type-checking-constant]
|
||||
TYPE_CHECKING: object = "str"
|
||||
```
|
||||
|
||||
@@ -737,3 +737,13 @@ def _(flag: bool, flag2: bool):
|
||||
for y in Iterable2():
|
||||
reveal_type(y) # revealed: bytes | str | int
|
||||
```
|
||||
|
||||
## Never is iterable
|
||||
|
||||
```py
|
||||
from typing_extensions import Never
|
||||
|
||||
def f(never: Never):
|
||||
for x in never:
|
||||
reveal_type(x) # revealed: Never
|
||||
```
|
||||
|
||||
@@ -163,7 +163,7 @@ reveal_type(B.__class__) # revealed: Literal[M]
|
||||
## Non-class
|
||||
|
||||
When a class has an explicit `metaclass` that is not a class, but is a callable that accepts
|
||||
`type.__new__` arguments, we should return the meta type of its return type.
|
||||
`type.__new__` arguments, we should return the meta-type of its return type.
|
||||
|
||||
```py
|
||||
def f(*args, **kwargs) -> int: ...
|
||||
|
||||
@@ -9,7 +9,7 @@ is unbound.
|
||||
```py
|
||||
reveal_type(__name__) # revealed: str
|
||||
reveal_type(__file__) # revealed: str | None
|
||||
reveal_type(__loader__) # revealed: @Todo(instance attribute on class with dynamic base) | None
|
||||
reveal_type(__loader__) # revealed: LoaderProtocol | None
|
||||
reveal_type(__package__) # revealed: str | None
|
||||
reveal_type(__doc__) # revealed: str | None
|
||||
|
||||
@@ -151,6 +151,7 @@ typeshed = "/typeshed"
|
||||
`/typeshed/stdlib/builtins.pyi`:
|
||||
|
||||
```pyi
|
||||
class object: ...
|
||||
class int: ...
|
||||
class bytes: ...
|
||||
|
||||
|
||||
@@ -13,5 +13,33 @@ class Foo[T]: ...
|
||||
class Bar(Foo[Bar]): ...
|
||||
|
||||
reveal_type(Bar) # revealed: Literal[Bar]
|
||||
reveal_type(Bar.__mro__) # revealed: tuple[Literal[Bar], Unknown, Literal[object]]
|
||||
# TODO: Instead of `Literal[Foo]`, we might eventually want to show a type that involves the type parameter.
|
||||
reveal_type(Bar.__mro__) # revealed: tuple[Literal[Bar], Literal[Foo], Literal[object]]
|
||||
```
|
||||
|
||||
## Access to attributes declarated in stubs
|
||||
|
||||
Unlike regular Python modules, stub files often omit the right-hand side in declarations, including
|
||||
in class scope. However, from the perspective of the type checker, we have to treat them as bindings
|
||||
too. That is, `symbol: type` is the same as `symbol: type = ...`.
|
||||
|
||||
One implication of this is that we'll always treat symbols in class scope as safe to be accessed
|
||||
from the class object itself. We'll never infer a "pure instance attribute" from a stub.
|
||||
|
||||
`b.pyi`:
|
||||
|
||||
```pyi
|
||||
from typing import ClassVar
|
||||
|
||||
class C:
|
||||
class_or_instance_var: int
|
||||
```
|
||||
|
||||
```py
|
||||
from typing import ClassVar, Literal
|
||||
|
||||
from b import C
|
||||
|
||||
# No error here, since we treat `class_or_instance_var` as bound on the class.
|
||||
reveal_type(C.class_or_instance_var) # revealed: int
|
||||
```
|
||||
|
||||
@@ -0,0 +1,17 @@
|
||||
# Declarations in stubs
|
||||
|
||||
Unlike regular Python modules, stub files often declare module-global variables without initializing
|
||||
them. If these symbols are then used in the same stub, applying regular logic would lead to an
|
||||
undefined variable access error.
|
||||
|
||||
However, from the perspective of the type checker, we should treat something like `symbol: type` the
|
||||
same as `symbol: type = ...`. In other words, assume these are bindings too.
|
||||
|
||||
```pyi
|
||||
from typing import Literal
|
||||
|
||||
CONSTANT: Literal[42]
|
||||
|
||||
# No error here, even though the variable is not initialized.
|
||||
uses_constant: int = CONSTANT
|
||||
```
|
||||
@@ -117,7 +117,6 @@ from typing import Tuple
|
||||
|
||||
class C(Tuple): ...
|
||||
|
||||
# Runtime value: `(C, tuple, typing.Generic, object)`
|
||||
# TODO: Add `Generic` to the MRO
|
||||
reveal_type(C.__mro__) # revealed: tuple[Literal[C], Literal[tuple], Unknown, Literal[object]]
|
||||
# revealed: tuple[Literal[C], Literal[tuple], Literal[Sequence], Literal[Reversible], Literal[Collection], Literal[Iterable], Literal[Container], @Todo(protocol), Literal[object]]
|
||||
reveal_type(C.__mro__)
|
||||
```
|
||||
|
||||
@@ -38,16 +38,15 @@ For example, the `type: ignore` comment in this example suppresses the error of
|
||||
`"test"` and adding `"other"` to the result of the cast.
|
||||
|
||||
```py
|
||||
# fmt: off
|
||||
from typing import cast
|
||||
|
||||
y = (
|
||||
cast(int, "test" +
|
||||
# TODO: Remove the expected error after implementing `invalid-operator` for binary expressions
|
||||
# error: [unused-ignore-comment]
|
||||
2 # type: ignore
|
||||
# error: [unsupported-operator]
|
||||
cast(
|
||||
int,
|
||||
2 + "test", # type: ignore
|
||||
)
|
||||
+ "other" # TODO: expected-error[invalid-operator]
|
||||
+ "other"
|
||||
)
|
||||
```
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
A type is single-valued iff it is not empty and all inhabitants of it compare equal.
|
||||
|
||||
```py
|
||||
from typing_extensions import Any, Literal, LiteralString, Never
|
||||
from typing_extensions import Any, Literal, LiteralString, Never, Callable
|
||||
from knot_extensions import is_single_valued, static_assert
|
||||
|
||||
static_assert(is_single_valued(None))
|
||||
@@ -22,4 +22,7 @@ static_assert(not is_single_valued(Any))
|
||||
static_assert(not is_single_valued(Literal[1, 2]))
|
||||
|
||||
static_assert(not is_single_valued(tuple[None, int]))
|
||||
|
||||
static_assert(not is_single_valued(Callable[..., None]))
|
||||
static_assert(not is_single_valued(Callable[[int, str], None]))
|
||||
```
|
||||
|
||||
@@ -5,7 +5,7 @@ A type is a singleton type iff it has exactly one inhabitant.
|
||||
## Basic
|
||||
|
||||
```py
|
||||
from typing_extensions import Literal, Never
|
||||
from typing_extensions import Literal, Never, Callable
|
||||
from knot_extensions import is_singleton, static_assert
|
||||
|
||||
static_assert(is_singleton(None))
|
||||
@@ -23,6 +23,9 @@ static_assert(not is_singleton(Literal[1, 2]))
|
||||
static_assert(not is_singleton(tuple[()]))
|
||||
static_assert(not is_singleton(tuple[None]))
|
||||
static_assert(not is_singleton(tuple[None, Literal[True]]))
|
||||
|
||||
static_assert(not is_singleton(Callable[..., None]))
|
||||
static_assert(not is_singleton(Callable[[int, str], None]))
|
||||
```
|
||||
|
||||
## `NoDefault`
|
||||
|
||||
@@ -383,7 +383,7 @@ static_assert(is_subtype_of(LiteralStr, type[object]))
|
||||
|
||||
static_assert(not is_subtype_of(type[str], LiteralStr))
|
||||
|
||||
# custom meta classes
|
||||
# custom metaclasses
|
||||
|
||||
type LiteralHasCustomMetaclass = TypeOf[HasCustomMetaclass]
|
||||
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
# Unpacking
|
||||
|
||||
If there are not enough or too many values when unpacking, an error will occur and the types of
|
||||
all variables (if nested tuple unpacking fails, only the variables within the failed tuples) is
|
||||
inferred to be `Unknown`.
|
||||
|
||||
## Tuple
|
||||
|
||||
### Simple tuple
|
||||
@@ -63,8 +67,8 @@ reveal_type(c) # revealed: Literal[4]
|
||||
```py
|
||||
# error: [invalid-assignment] "Not enough values to unpack (expected 3, got 2)"
|
||||
(a, b, c) = (1, 2)
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
reveal_type(b) # revealed: Literal[2]
|
||||
reveal_type(a) # revealed: Unknown
|
||||
reveal_type(b) # revealed: Unknown
|
||||
reveal_type(c) # revealed: Unknown
|
||||
```
|
||||
|
||||
@@ -73,8 +77,30 @@ reveal_type(c) # revealed: Unknown
|
||||
```py
|
||||
# error: [invalid-assignment] "Too many values to unpack (expected 2, got 3)"
|
||||
(a, b) = (1, 2, 3)
|
||||
reveal_type(a) # revealed: Unknown
|
||||
reveal_type(b) # revealed: Unknown
|
||||
```
|
||||
|
||||
### Nested uneven unpacking (1)
|
||||
|
||||
```py
|
||||
# error: [invalid-assignment] "Not enough values to unpack (expected 2, got 1)"
|
||||
(a, (b, c), d) = (1, (2,), 3)
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
reveal_type(b) # revealed: Literal[2]
|
||||
reveal_type(b) # revealed: Unknown
|
||||
reveal_type(c) # revealed: Unknown
|
||||
reveal_type(d) # revealed: Literal[3]
|
||||
```
|
||||
|
||||
### Nested uneven unpacking (2)
|
||||
|
||||
```py
|
||||
# error: [invalid-assignment] "Too many values to unpack (expected 2, got 3)"
|
||||
(a, (b, c), d) = (1, (2, 3, 4), 5)
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
reveal_type(b) # revealed: Unknown
|
||||
reveal_type(c) # revealed: Unknown
|
||||
reveal_type(d) # revealed: Literal[5]
|
||||
```
|
||||
|
||||
### Starred expression (1)
|
||||
@@ -82,10 +108,10 @@ reveal_type(b) # revealed: Literal[2]
|
||||
```py
|
||||
# error: [invalid-assignment] "Not enough values to unpack (expected 3 or more, got 2)"
|
||||
[a, *b, c, d] = (1, 2)
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
reveal_type(a) # revealed: Unknown
|
||||
# TODO: Should be list[Any] once support for assigning to starred expression is added
|
||||
reveal_type(b) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(c) # revealed: Literal[2]
|
||||
reveal_type(b) # revealed: Unknown
|
||||
reveal_type(c) # revealed: Unknown
|
||||
reveal_type(d) # revealed: Unknown
|
||||
```
|
||||
|
||||
@@ -135,10 +161,10 @@ reveal_type(c) # revealed: @Todo(starred unpacking)
|
||||
```py
|
||||
# error: [invalid-assignment] "Not enough values to unpack (expected 5 or more, got 1)"
|
||||
(a, b, c, *d, e, f) = (1,)
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
reveal_type(a) # revealed: Unknown
|
||||
reveal_type(b) # revealed: Unknown
|
||||
reveal_type(c) # revealed: Unknown
|
||||
reveal_type(d) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(d) # revealed: Unknown
|
||||
reveal_type(e) # revealed: Unknown
|
||||
reveal_type(f) # revealed: Unknown
|
||||
```
|
||||
@@ -201,8 +227,8 @@ reveal_type(b) # revealed: LiteralString
|
||||
```py
|
||||
# error: [invalid-assignment] "Not enough values to unpack (expected 3, got 2)"
|
||||
a, b, c = "ab"
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
reveal_type(b) # revealed: LiteralString
|
||||
reveal_type(a) # revealed: Unknown
|
||||
reveal_type(b) # revealed: Unknown
|
||||
reveal_type(c) # revealed: Unknown
|
||||
```
|
||||
|
||||
@@ -211,8 +237,8 @@ reveal_type(c) # revealed: Unknown
|
||||
```py
|
||||
# error: [invalid-assignment] "Too many values to unpack (expected 2, got 3)"
|
||||
a, b = "abc"
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
reveal_type(b) # revealed: LiteralString
|
||||
reveal_type(a) # revealed: Unknown
|
||||
reveal_type(b) # revealed: Unknown
|
||||
```
|
||||
|
||||
### Starred expression (1)
|
||||
@@ -220,10 +246,19 @@ reveal_type(b) # revealed: LiteralString
|
||||
```py
|
||||
# error: [invalid-assignment] "Not enough values to unpack (expected 3 or more, got 2)"
|
||||
(a, *b, c, d) = "ab"
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
reveal_type(a) # revealed: Unknown
|
||||
# TODO: Should be list[LiteralString] once support for assigning to starred expression is added
|
||||
reveal_type(b) # revealed: @Todo(starred unpacking)
|
||||
reveal_type(c) # revealed: LiteralString
|
||||
reveal_type(b) # revealed: Unknown
|
||||
reveal_type(c) # revealed: Unknown
|
||||
reveal_type(d) # revealed: Unknown
|
||||
```
|
||||
|
||||
```py
|
||||
# error: [invalid-assignment] "Not enough values to unpack (expected 3 or more, got 1)"
|
||||
(a, b, *c, d) = "a"
|
||||
reveal_type(a) # revealed: Unknown
|
||||
reveal_type(b) # revealed: Unknown
|
||||
reveal_type(c) # revealed: Unknown
|
||||
reveal_type(d) # revealed: Unknown
|
||||
```
|
||||
|
||||
@@ -274,7 +309,7 @@ reveal_type(c) # revealed: @Todo(starred unpacking)
|
||||
# error: [invalid-assignment] "Not enough values to unpack (expected 2, got 1)"
|
||||
(a, b) = "é"
|
||||
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
reveal_type(a) # revealed: Unknown
|
||||
reveal_type(b) # revealed: Unknown
|
||||
```
|
||||
|
||||
@@ -284,7 +319,7 @@ reveal_type(b) # revealed: Unknown
|
||||
# error: [invalid-assignment] "Not enough values to unpack (expected 2, got 1)"
|
||||
(a, b) = "\u9e6c"
|
||||
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
reveal_type(a) # revealed: Unknown
|
||||
reveal_type(b) # revealed: Unknown
|
||||
```
|
||||
|
||||
@@ -294,7 +329,7 @@ reveal_type(b) # revealed: Unknown
|
||||
# error: [invalid-assignment] "Not enough values to unpack (expected 2, got 1)"
|
||||
(a, b) = "\U0010ffff"
|
||||
|
||||
reveal_type(a) # revealed: LiteralString
|
||||
reveal_type(a) # revealed: Unknown
|
||||
reveal_type(b) # revealed: Unknown
|
||||
```
|
||||
|
||||
@@ -388,8 +423,8 @@ def _(arg: tuple[int, bytes, int] | tuple[int, int, str, int, bytes]):
|
||||
# error: [invalid-assignment] "Too many values to unpack (expected 2, got 3)"
|
||||
# error: [invalid-assignment] "Too many values to unpack (expected 2, got 5)"
|
||||
a, b = arg
|
||||
reveal_type(a) # revealed: int
|
||||
reveal_type(b) # revealed: bytes | int
|
||||
reveal_type(a) # revealed: Unknown
|
||||
reveal_type(b) # revealed: Unknown
|
||||
```
|
||||
|
||||
### Size mismatch (2)
|
||||
@@ -399,8 +434,8 @@ def _(arg: tuple[int, bytes] | tuple[int, str]):
|
||||
# error: [invalid-assignment] "Not enough values to unpack (expected 3, got 2)"
|
||||
# error: [invalid-assignment] "Not enough values to unpack (expected 3, got 2)"
|
||||
a, b, c = arg
|
||||
reveal_type(a) # revealed: int
|
||||
reveal_type(b) # revealed: bytes | str
|
||||
reveal_type(a) # revealed: Unknown
|
||||
reveal_type(b) # revealed: Unknown
|
||||
reveal_type(c) # revealed: Unknown
|
||||
```
|
||||
|
||||
@@ -542,7 +577,7 @@ for a, b in ((1, 2), ("a", "b")):
|
||||
# error: "Object of type `Literal[4]` is not iterable"
|
||||
# error: [invalid-assignment] "Not enough values to unpack (expected 2, got 1)"
|
||||
for a, b in (1, 2, (3, "a"), 4, (5, "b"), "c"):
|
||||
reveal_type(a) # revealed: Unknown | Literal[3, 5] | LiteralString
|
||||
reveal_type(a) # revealed: Unknown | Literal[3, 5]
|
||||
reveal_type(b) # revealed: Unknown | Literal["a", "b"]
|
||||
```
|
||||
|
||||
@@ -578,3 +613,98 @@ def _(arg: tuple[tuple[int, str], Iterable]):
|
||||
reveal_type(a) # revealed: int | bytes
|
||||
reveal_type(b) # revealed: str | bytes
|
||||
```
|
||||
|
||||
## With statement
|
||||
|
||||
Unpacking in a `with` statement.
|
||||
|
||||
### Same types
|
||||
|
||||
```py
|
||||
class ContextManager:
|
||||
def __enter__(self) -> tuple[int, int]:
|
||||
return (1, 2)
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback) -> None:
|
||||
pass
|
||||
|
||||
with ContextManager() as (a, b):
|
||||
reveal_type(a) # revealed: int
|
||||
reveal_type(b) # revealed: int
|
||||
```
|
||||
|
||||
### Mixed types
|
||||
|
||||
```py
|
||||
class ContextManager:
|
||||
def __enter__(self) -> tuple[int, str]:
|
||||
return (1, "a")
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback) -> None:
|
||||
pass
|
||||
|
||||
with ContextManager() as (a, b):
|
||||
reveal_type(a) # revealed: int
|
||||
reveal_type(b) # revealed: str
|
||||
```
|
||||
|
||||
### Nested
|
||||
|
||||
```py
|
||||
class ContextManager:
|
||||
def __enter__(self) -> tuple[int, tuple[str, bytes]]:
|
||||
return (1, ("a", b"bytes"))
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback) -> None:
|
||||
pass
|
||||
|
||||
with ContextManager() as (a, (b, c)):
|
||||
reveal_type(a) # revealed: int
|
||||
reveal_type(b) # revealed: str
|
||||
reveal_type(c) # revealed: bytes
|
||||
```
|
||||
|
||||
### Starred expression
|
||||
|
||||
```py
|
||||
class ContextManager:
|
||||
def __enter__(self) -> tuple[int, int, int]:
|
||||
return (1, 2, 3)
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback) -> None:
|
||||
pass
|
||||
|
||||
with ContextManager() as (a, *b):
|
||||
reveal_type(a) # revealed: int
|
||||
# TODO: Should be list[int] once support for assigning to starred expression is added
|
||||
reveal_type(b) # revealed: @Todo(starred unpacking)
|
||||
```
|
||||
|
||||
### Unbound context manager expression
|
||||
|
||||
```py
|
||||
# TODO: should only be one diagnostic
|
||||
# error: [unresolved-reference] "Name `nonexistant` used when not defined"
|
||||
# error: [unresolved-reference] "Name `nonexistant` used when not defined"
|
||||
# error: [unresolved-reference] "Name `nonexistant` used when not defined"
|
||||
with nonexistant as (x, y):
|
||||
reveal_type(x) # revealed: Unknown
|
||||
reveal_type(y) # revealed: Unknown
|
||||
```
|
||||
|
||||
### Invalid unpacking
|
||||
|
||||
```py
|
||||
class ContextManager:
|
||||
def __enter__(self) -> tuple[int, str]:
|
||||
return (1, "a")
|
||||
|
||||
def __exit__(self, *args) -> None:
|
||||
pass
|
||||
|
||||
# error: [invalid-assignment] "Not enough values to unpack (expected 3, got 2)"
|
||||
with ContextManager() as (a, b, c):
|
||||
reveal_type(a) # revealed: Unknown
|
||||
reveal_type(b) # revealed: Unknown
|
||||
reveal_type(c) # revealed: Unknown
|
||||
```
|
||||
|
||||
@@ -45,7 +45,7 @@ def _(flag: bool):
|
||||
```py
|
||||
class Manager: ...
|
||||
|
||||
# error: [invalid-context-manager] "Object of type `Manager` cannot be used with `with` because it doesn't implement `__enter__` and `__exit__`"
|
||||
# error: [invalid-context-manager] "Object of type `Manager` cannot be used with `with` because it does not implement `__enter__` and `__exit__`"
|
||||
with Manager():
|
||||
...
|
||||
```
|
||||
@@ -56,7 +56,7 @@ with Manager():
|
||||
class Manager:
|
||||
def __exit__(self, exc_tpe, exc_value, traceback): ...
|
||||
|
||||
# error: [invalid-context-manager] "Object of type `Manager` cannot be used with `with` because it doesn't implement `__enter__`"
|
||||
# error: [invalid-context-manager] "Object of type `Manager` cannot be used with `with` because it does not implement `__enter__`"
|
||||
with Manager():
|
||||
...
|
||||
```
|
||||
@@ -67,7 +67,7 @@ with Manager():
|
||||
class Manager:
|
||||
def __enter__(self): ...
|
||||
|
||||
# error: [invalid-context-manager] "Object of type `Manager` cannot be used with `with` because it doesn't implement `__exit__`"
|
||||
# error: [invalid-context-manager] "Object of type `Manager` cannot be used with `with` because it does not implement `__exit__`"
|
||||
with Manager():
|
||||
...
|
||||
```
|
||||
@@ -113,8 +113,7 @@ def _(flag: bool):
|
||||
class NotAContextManager: ...
|
||||
context_expr = Manager1() if flag else NotAContextManager()
|
||||
|
||||
# error: [invalid-context-manager] "Object of type `Manager1 | NotAContextManager` cannot be used with `with` because the method `__enter__` is possibly unbound"
|
||||
# error: [invalid-context-manager] "Object of type `Manager1 | NotAContextManager` cannot be used with `with` because the method `__exit__` is possibly unbound"
|
||||
# error: [invalid-context-manager] "Object of type `Manager1 | NotAContextManager` cannot be used with `with` because the methods `__enter__` and `__exit__` are possibly unbound"
|
||||
with context_expr as f:
|
||||
reveal_type(f) # revealed: str
|
||||
```
|
||||
|
||||
@@ -45,7 +45,7 @@ pub struct AstNodeRef<T> {
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
impl<T> AstNodeRef<T> {
|
||||
/// Creates a new `AstNodeRef` that reference `node`. The `parsed` is the [`ParsedModule`] to
|
||||
/// Creates a new `AstNodeRef` that references `node`. The `parsed` is the [`ParsedModule`] to
|
||||
/// which the `AstNodeRef` belongs.
|
||||
///
|
||||
/// ## Safety
|
||||
|
||||
@@ -25,7 +25,9 @@ pub(crate) mod tests {
|
||||
use crate::lint::{LintRegistry, RuleSelection};
|
||||
use anyhow::Context;
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::{DbWithTestSystem, System, SystemPathBuf, TestSystem};
|
||||
use ruff_db::system::{
|
||||
DbWithTestSystem, DbWithWritableSystem as _, System, SystemPathBuf, TestSystem,
|
||||
};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
use ruff_python_ast::PythonVersion;
|
||||
|
||||
@@ -14,6 +14,7 @@ pub use semantic_model::{HasType, SemanticModel};
|
||||
pub mod ast_node_ref;
|
||||
mod db;
|
||||
pub mod lint;
|
||||
pub(crate) mod list;
|
||||
mod module_name;
|
||||
mod module_resolver;
|
||||
mod node_key;
|
||||
|
||||
745
crates/red_knot_python_semantic/src/list.rs
Normal file
745
crates/red_knot_python_semantic/src/list.rs
Normal file
@@ -0,0 +1,745 @@
|
||||
//! Sorted, arena-allocated association lists
|
||||
//!
|
||||
//! An [_association list_][alist], which is a linked list of key/value pairs. We additionally
|
||||
//! guarantee that the elements of an association list are sorted (by their keys), and that they do
|
||||
//! not contain any entries with duplicate keys.
|
||||
//!
|
||||
//! Association lists have fallen out of favor in recent decades, since you often need operations
|
||||
//! that are inefficient on them. In particular, looking up a random element by index is O(n), just
|
||||
//! like a linked list; and looking up an element by key is also O(n), since you must do a linear
|
||||
//! scan of the list to find the matching element. The typical implementation also suffers from
|
||||
//! poor cache locality and high memory allocation overhead, since individual list cells are
|
||||
//! typically allocated separately from the heap. We solve that last problem by storing the cells
|
||||
//! of an association list in an [`IndexVec`] arena.
|
||||
//!
|
||||
//! We exploit structural sharing where possible, reusing cells across multiple lists when we can.
|
||||
//! That said, we don't guarantee that lists are canonical — it's entirely possible for two lists
|
||||
//! with identical contents to use different list cells and have different identifiers.
|
||||
//!
|
||||
//! Given all of this, association lists have the following benefits:
|
||||
//!
|
||||
//! - Lists can be represented by a single 32-bit integer (the index into the arena of the head of
|
||||
//! the list).
|
||||
//! - Lists can be cloned in constant time, since the underlying cells are immutable.
|
||||
//! - Lists can be combined quickly (for both intersection and union), especially when you already
|
||||
//! have to zip through both input lists to combine each key's values in some way.
|
||||
//!
|
||||
//! There is one remaining caveat:
|
||||
//!
|
||||
//! - You should construct lists in key order; doing this lets you insert each value in constant time.
|
||||
//! Inserting entries in reverse order results in _quadratic_ overall time to construct the list.
|
||||
//!
|
||||
//! Lists are created using a [`ListBuilder`], and once created are accessed via a [`ListStorage`].
|
||||
//!
|
||||
//! ## Tests
|
||||
//!
|
||||
//! This module contains quickcheck-based property tests.
|
||||
//!
|
||||
//! These tests are disabled by default, as they are non-deterministic and slow. You can run them
|
||||
//! explicitly using:
|
||||
//!
|
||||
//! ```sh
|
||||
//! cargo test -p ruff_index -- --ignored list::property_tests
|
||||
//! ```
|
||||
//!
|
||||
//! The number of tests (default: 100) can be controlled by setting the `QUICKCHECK_TESTS`
|
||||
//! environment variable. For example:
|
||||
//!
|
||||
//! ```sh
|
||||
//! QUICKCHECK_TESTS=10000 cargo test …
|
||||
//! ```
|
||||
//!
|
||||
//! If you want to run these tests for a longer period of time, it's advisable to run them in
|
||||
//! release mode. As some tests are slower than others, it's advisable to run them in a loop until
|
||||
//! they fail:
|
||||
//!
|
||||
//! ```sh
|
||||
//! export QUICKCHECK_TESTS=100000
|
||||
//! while cargo test --release -p ruff_index -- \
|
||||
//! --ignored list::property_tests; do :; done
|
||||
//! ```
|
||||
//!
|
||||
//! [alist]: https://en.wikipedia.org/wiki/Association_list
|
||||
|
||||
use std::cmp::Ordering;
|
||||
use std::marker::PhantomData;
|
||||
use std::ops::Deref;
|
||||
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
|
||||
/// A handle to an association list. Use [`ListStorage`] to access its elements, and
|
||||
/// [`ListBuilder`] to construct other lists based on this one.
|
||||
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]
|
||||
pub(crate) struct List<K, V = ()> {
|
||||
last: Option<ListCellId>,
|
||||
_phantom: PhantomData<(K, V)>,
|
||||
}
|
||||
|
||||
impl<K, V> List<K, V> {
|
||||
pub(crate) const fn empty() -> List<K, V> {
|
||||
List::new(None)
|
||||
}
|
||||
|
||||
const fn new(last: Option<ListCellId>) -> List<K, V> {
|
||||
List {
|
||||
last,
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Default for List<K, V> {
|
||||
fn default() -> Self {
|
||||
List::empty()
|
||||
}
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
#[derive(PartialOrd, Ord)]
|
||||
struct ListCellId;
|
||||
|
||||
/// Stores one or more association lists. This type provides read-only access to the lists. Use a
|
||||
/// [`ListBuilder`] to create lists.
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub(crate) struct ListStorage<K, V = ()> {
|
||||
cells: IndexVec<ListCellId, ListCell<K, V>>,
|
||||
}
|
||||
|
||||
/// Each association list is represented by a sequence of snoc cells. A snoc cell is like the more
|
||||
/// familiar cons cell `(a : (b : (c : nil)))`, but in reverse `(((nil : a) : b) : c)`.
|
||||
///
|
||||
/// **Terminology**: The elements of a cons cell are usually called `head` and `tail` (assuming
|
||||
/// you're not in Lisp-land, where they're called `car` and `cdr`). The elements of a snoc cell
|
||||
/// are usually called `rest` and `last`.
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
struct ListCell<K, V> {
|
||||
rest: Option<ListCellId>,
|
||||
key: K,
|
||||
value: V,
|
||||
}
|
||||
|
||||
/// Constructs one or more association lists.
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub(crate) struct ListBuilder<K, V = ()> {
|
||||
storage: ListStorage<K, V>,
|
||||
|
||||
/// Scratch space that lets us implement our list operations iteratively instead of
|
||||
/// recursively.
|
||||
///
|
||||
/// The snoc-list representation that we use for alists is very common in functional
|
||||
/// programming, and the simplest implementations of most of the operations are defined
|
||||
/// recursively on that data structure. However, they are not _tail_ recursive, which means
|
||||
/// that the call stack grows linearly with the size of the input, which can be a problem for
|
||||
/// large lists.
|
||||
///
|
||||
/// You can often rework those recursive implementations into iterative ones using an
|
||||
/// _accumulator_, but that comes at the cost of reversing the list. If we didn't care about
|
||||
/// ordering, that wouldn't be a problem. Since we want our lists to be sorted, we can't rely
|
||||
/// on that on its own.
|
||||
///
|
||||
/// The next standard trick is to use an accumulator, and use a fix-up step at the end to
|
||||
/// reverse the (reversed) result in the accumulator, restoring the correct order.
|
||||
///
|
||||
/// So, that's what we do! However, as one last optimization, we don't build up alist cells in
|
||||
/// our accumulator, since that would add wasteful cruft to our list storage. Instead, we use a
|
||||
/// normal Vec as our accumulator, holding the key/value pairs that should be stitched onto the
|
||||
/// end of whatever result list we are creating. For our fix-up step, we can consume a Vec in
|
||||
/// reverse order by `pop`ping the elements off one by one.
|
||||
scratch: Vec<(K, V)>,
|
||||
}
|
||||
|
||||
impl<K, V> Default for ListBuilder<K, V> {
|
||||
fn default() -> Self {
|
||||
ListBuilder {
|
||||
storage: ListStorage {
|
||||
cells: IndexVec::default(),
|
||||
},
|
||||
scratch: Vec::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> Deref for ListBuilder<K, V> {
|
||||
type Target = ListStorage<K, V>;
|
||||
fn deref(&self) -> &ListStorage<K, V> {
|
||||
&self.storage
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> ListBuilder<K, V> {
|
||||
/// Finalizes a `ListBuilder`. After calling this, you cannot create any new lists managed by
|
||||
/// this storage.
|
||||
pub(crate) fn build(mut self) -> ListStorage<K, V> {
|
||||
self.storage.cells.shrink_to_fit();
|
||||
self.storage
|
||||
}
|
||||
|
||||
/// Adds a new cell to the list.
|
||||
///
|
||||
/// Adding an element always returns a non-empty list, which means we could technically use `I`
|
||||
/// as our return type, since we never return `None`. However, for consistency with our other
|
||||
/// methods, we always use `Option<I>` as the return type for any method that can return a
|
||||
/// list.
|
||||
#[allow(clippy::unnecessary_wraps)]
|
||||
fn add_cell(&mut self, rest: Option<ListCellId>, key: K, value: V) -> Option<ListCellId> {
|
||||
Some(self.storage.cells.push(ListCell { rest, key, value }))
|
||||
}
|
||||
|
||||
/// Returns an entry pointing at where `key` would be inserted into a list.
|
||||
///
|
||||
/// Note that when we add a new element to a list, we might have to clone the keys and values
|
||||
/// of some existing elements. This is because list cells are immutable once created, since
|
||||
/// they might be shared across multiple lists. We must therefore create new cells for every
|
||||
/// element that appears after the new element.
|
||||
///
|
||||
/// That means that you should construct lists in key order, since that means that there are no
|
||||
/// entries to duplicate for each insertion. If you construct the list in reverse order, we
|
||||
/// will have to duplicate O(n) entries for each insertion, making it _quadratic_ to construct
|
||||
/// the entire list.
|
||||
pub(crate) fn entry(&mut self, list: List<K, V>, key: K) -> ListEntry<K, V>
|
||||
where
|
||||
K: Clone + Ord,
|
||||
V: Clone,
|
||||
{
|
||||
self.scratch.clear();
|
||||
|
||||
// Iterate through the input list, looking for the position where the key should be
|
||||
// inserted. We will need to create new list cells for any elements that appear after the
|
||||
// new key. Stash those away in our scratch accumulator as we step through the input. The
|
||||
// result of the loop is that "rest" of the result list, which we will stitch the new key
|
||||
// (and any succeeding keys) onto.
|
||||
let mut curr = list.last;
|
||||
while let Some(curr_id) = curr {
|
||||
let cell = &self.storage.cells[curr_id];
|
||||
match key.cmp(&cell.key) {
|
||||
// We found an existing entry in the input list with the desired key.
|
||||
Ordering::Equal => {
|
||||
return ListEntry {
|
||||
builder: self,
|
||||
list,
|
||||
key,
|
||||
rest: ListTail::Occupied(curr_id),
|
||||
};
|
||||
}
|
||||
// The input list does not already contain this key, and this is where we should
|
||||
// add it.
|
||||
Ordering::Greater => {
|
||||
return ListEntry {
|
||||
builder: self,
|
||||
list,
|
||||
key,
|
||||
rest: ListTail::Vacant(curr_id),
|
||||
};
|
||||
}
|
||||
// If this key is in the list, it's further along. We'll need to create a new cell
|
||||
// for this entry in the result list, so add its contents to the scratch
|
||||
// accumulator.
|
||||
Ordering::Less => {
|
||||
let new_key = cell.key.clone();
|
||||
let new_value = cell.value.clone();
|
||||
self.scratch.push((new_key, new_value));
|
||||
curr = cell.rest;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We made it all the way through the list without finding the desired key, so it belongs
|
||||
// at the beginning. (And we will unfortunately have to duplicate every existing cell if
|
||||
// the caller proceeds with inserting the new key!)
|
||||
ListEntry {
|
||||
builder: self,
|
||||
list,
|
||||
key,
|
||||
rest: ListTail::Beginning,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A view into a list, indicating where a key would be inserted.
|
||||
pub(crate) struct ListEntry<'a, K, V = ()> {
|
||||
builder: &'a mut ListBuilder<K, V>,
|
||||
list: List<K, V>,
|
||||
key: K,
|
||||
/// Points at the element that already contains `key`, if there is one, or the element
|
||||
/// immediately before where it would go, if not.
|
||||
rest: ListTail<ListCellId>,
|
||||
}
|
||||
|
||||
enum ListTail<I> {
|
||||
/// The list does not already contain `key`, and it would go at the beginning of the list.
|
||||
Beginning,
|
||||
/// The list already contains `key`
|
||||
Occupied(I),
|
||||
/// The list does not already contain key, and it would go immediately after the given element
|
||||
Vacant(I),
|
||||
}
|
||||
|
||||
impl<K, V> ListEntry<'_, K, V>
|
||||
where
|
||||
K: Clone,
|
||||
V: Clone,
|
||||
{
|
||||
fn stitch_up(self, rest: Option<ListCellId>, value: V) -> List<K, V> {
|
||||
let mut last = rest;
|
||||
last = self.builder.add_cell(last, self.key, value);
|
||||
while let Some((key, value)) = self.builder.scratch.pop() {
|
||||
last = self.builder.add_cell(last, key, value);
|
||||
}
|
||||
List::new(last)
|
||||
}
|
||||
|
||||
/// Inserts a new key/value into the list if the key is not already present. If the list
|
||||
/// already contains `key`, we return the original list as-is, and do not invoke your closure.
|
||||
pub(crate) fn or_insert_with<F>(self, f: F) -> List<K, V>
|
||||
where
|
||||
F: FnOnce() -> V,
|
||||
{
|
||||
let rest = match self.rest {
|
||||
// If the list already contains `key`, we don't need to replace anything, and can
|
||||
// return the original list unmodified.
|
||||
ListTail::Occupied(_) => return self.list,
|
||||
// Otherwise we have to create a new entry and stitch it onto the list.
|
||||
ListTail::Beginning => None,
|
||||
ListTail::Vacant(index) => Some(index),
|
||||
};
|
||||
self.stitch_up(rest, f())
|
||||
}
|
||||
|
||||
/// Inserts a new key and the default value into the list if the key is not already present. If
|
||||
/// the list already contains `key`, we return the original list as-is.
|
||||
pub(crate) fn or_insert_default(self) -> List<K, V>
|
||||
where
|
||||
V: Default,
|
||||
{
|
||||
self.or_insert_with(V::default)
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> ListBuilder<K, V> {
|
||||
/// Returns the intersection of two lists. The result will contain an entry for any key that
|
||||
/// appears in both lists. The corresponding values will be combined using the `combine`
|
||||
/// function that you provide.
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
pub(crate) fn intersect_with<F>(
|
||||
&mut self,
|
||||
a: List<K, V>,
|
||||
b: List<K, V>,
|
||||
mut combine: F,
|
||||
) -> List<K, V>
|
||||
where
|
||||
K: Clone + Ord,
|
||||
V: Clone,
|
||||
F: FnMut(&V, &V) -> V,
|
||||
{
|
||||
self.scratch.clear();
|
||||
|
||||
// Zip through the lists, building up the keys/values of the new entries into our scratch
|
||||
// vector. Continue until we run out of elements in either list. (Any remaining elements in
|
||||
// the other list cannot possibly be in the intersection.)
|
||||
let mut a = a.last;
|
||||
let mut b = b.last;
|
||||
while let (Some(a_id), Some(b_id)) = (a, b) {
|
||||
let a_cell = &self.storage.cells[a_id];
|
||||
let b_cell = &self.storage.cells[b_id];
|
||||
match a_cell.key.cmp(&b_cell.key) {
|
||||
// Both lists contain this key; combine their values
|
||||
Ordering::Equal => {
|
||||
let new_key = a_cell.key.clone();
|
||||
let new_value = combine(&a_cell.value, &b_cell.value);
|
||||
self.scratch.push((new_key, new_value));
|
||||
a = a_cell.rest;
|
||||
b = b_cell.rest;
|
||||
}
|
||||
// a's key is only present in a, so it's not included in the result.
|
||||
Ordering::Greater => a = a_cell.rest,
|
||||
// b's key is only present in b, so it's not included in the result.
|
||||
Ordering::Less => b = b_cell.rest,
|
||||
}
|
||||
}
|
||||
|
||||
// Once the iteration loop terminates, we stitch the new entries back together into proper
|
||||
// alist cells.
|
||||
let mut last = None;
|
||||
while let Some((key, value)) = self.scratch.pop() {
|
||||
last = self.add_cell(last, key, value);
|
||||
}
|
||||
List::new(last)
|
||||
}
|
||||
}
|
||||
|
||||
// ----
|
||||
// Sets
|
||||
|
||||
impl<K> ListStorage<K, ()> {
|
||||
/// Iterates through the elements in a set _in reverse order_.
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
pub(crate) fn iter_set_reverse(&self, set: List<K, ()>) -> ListSetReverseIterator<K> {
|
||||
ListSetReverseIterator {
|
||||
storage: self,
|
||||
curr: set.last,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct ListSetReverseIterator<'a, K> {
|
||||
storage: &'a ListStorage<K, ()>,
|
||||
curr: Option<ListCellId>,
|
||||
}
|
||||
|
||||
impl<'a, K> Iterator for ListSetReverseIterator<'a, K> {
|
||||
type Item = &'a K;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let cell = &self.storage.cells[self.curr?];
|
||||
self.curr = cell.rest;
|
||||
Some(&cell.key)
|
||||
}
|
||||
}
|
||||
|
||||
impl<K> ListBuilder<K, ()> {
|
||||
/// Adds an element to a set.
|
||||
pub(crate) fn insert(&mut self, set: List<K, ()>, element: K) -> List<K, ()>
|
||||
where
|
||||
K: Clone + Ord,
|
||||
{
|
||||
self.entry(set, element).or_insert_default()
|
||||
}
|
||||
|
||||
/// Returns the intersection of two sets. The result will contain any value that appears in
|
||||
/// both sets.
|
||||
pub(crate) fn intersect(&mut self, a: List<K, ()>, b: List<K, ()>) -> List<K, ()>
|
||||
where
|
||||
K: Clone + Ord,
|
||||
{
|
||||
self.intersect_with(a, b, |(), ()| ())
|
||||
}
|
||||
}
|
||||
|
||||
// -----
|
||||
// Tests
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use std::fmt::Display;
|
||||
use std::fmt::Write;
|
||||
|
||||
// ----
|
||||
// Sets
|
||||
|
||||
impl<K> ListStorage<K>
|
||||
where
|
||||
K: Display,
|
||||
{
|
||||
fn display_set(&self, list: List<K, ()>) -> String {
|
||||
let elements: Vec<_> = self.iter_set_reverse(list).collect();
|
||||
let mut result = String::new();
|
||||
result.push('[');
|
||||
for element in elements.into_iter().rev() {
|
||||
if result.len() > 1 {
|
||||
result.push_str(", ");
|
||||
}
|
||||
write!(&mut result, "{element}").unwrap();
|
||||
}
|
||||
result.push(']');
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_insert_into_set() {
|
||||
let mut builder = ListBuilder::<u16>::default();
|
||||
|
||||
// Build up the set in order
|
||||
let empty = List::empty();
|
||||
let set1 = builder.insert(empty, 1);
|
||||
let set12 = builder.insert(set1, 2);
|
||||
let set123 = builder.insert(set12, 3);
|
||||
let set1232 = builder.insert(set123, 2);
|
||||
assert_eq!(builder.display_set(empty), "[]");
|
||||
assert_eq!(builder.display_set(set1), "[1]");
|
||||
assert_eq!(builder.display_set(set12), "[1, 2]");
|
||||
assert_eq!(builder.display_set(set123), "[1, 2, 3]");
|
||||
assert_eq!(builder.display_set(set1232), "[1, 2, 3]");
|
||||
|
||||
// And in reverse order
|
||||
let set3 = builder.insert(empty, 3);
|
||||
let set32 = builder.insert(set3, 2);
|
||||
let set321 = builder.insert(set32, 1);
|
||||
let set3212 = builder.insert(set321, 2);
|
||||
assert_eq!(builder.display_set(empty), "[]");
|
||||
assert_eq!(builder.display_set(set3), "[3]");
|
||||
assert_eq!(builder.display_set(set32), "[2, 3]");
|
||||
assert_eq!(builder.display_set(set321), "[1, 2, 3]");
|
||||
assert_eq!(builder.display_set(set3212), "[1, 2, 3]");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_intersect_sets() {
|
||||
let mut builder = ListBuilder::<u16>::default();
|
||||
|
||||
let empty = List::empty();
|
||||
let set1 = builder.insert(empty, 1);
|
||||
let set12 = builder.insert(set1, 2);
|
||||
let set123 = builder.insert(set12, 3);
|
||||
let set1234 = builder.insert(set123, 4);
|
||||
|
||||
let set2 = builder.insert(empty, 2);
|
||||
let set24 = builder.insert(set2, 4);
|
||||
let set245 = builder.insert(set24, 5);
|
||||
let set2457 = builder.insert(set245, 7);
|
||||
|
||||
let intersection = builder.intersect(empty, empty);
|
||||
assert_eq!(builder.display_set(intersection), "[]");
|
||||
let intersection = builder.intersect(empty, set1234);
|
||||
assert_eq!(builder.display_set(intersection), "[]");
|
||||
let intersection = builder.intersect(empty, set2457);
|
||||
assert_eq!(builder.display_set(intersection), "[]");
|
||||
let intersection = builder.intersect(set1, set1234);
|
||||
assert_eq!(builder.display_set(intersection), "[1]");
|
||||
let intersection = builder.intersect(set1, set2457);
|
||||
assert_eq!(builder.display_set(intersection), "[]");
|
||||
let intersection = builder.intersect(set2, set1234);
|
||||
assert_eq!(builder.display_set(intersection), "[2]");
|
||||
let intersection = builder.intersect(set2, set2457);
|
||||
assert_eq!(builder.display_set(intersection), "[2]");
|
||||
let intersection = builder.intersect(set1234, set2457);
|
||||
assert_eq!(builder.display_set(intersection), "[2, 4]");
|
||||
}
|
||||
|
||||
// ----
|
||||
// Maps
|
||||
|
||||
impl<K, V> ListStorage<K, V> {
|
||||
/// Iterates through the entries in a list _in reverse order by key_.
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
pub(crate) fn iter_reverse(&self, list: List<K, V>) -> ListReverseIterator<'_, K, V> {
|
||||
ListReverseIterator {
|
||||
storage: self,
|
||||
curr: list.last,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct ListReverseIterator<'a, K, V> {
|
||||
storage: &'a ListStorage<K, V>,
|
||||
curr: Option<ListCellId>,
|
||||
}
|
||||
|
||||
impl<'a, K, V> Iterator for ListReverseIterator<'a, K, V> {
|
||||
type Item = (&'a K, &'a V);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let cell = &self.storage.cells[self.curr?];
|
||||
self.curr = cell.rest;
|
||||
Some((&cell.key, &cell.value))
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> ListStorage<K, V>
|
||||
where
|
||||
K: Display,
|
||||
V: Display,
|
||||
{
|
||||
fn display(&self, list: List<K, V>) -> String {
|
||||
let entries: Vec<_> = self.iter_reverse(list).collect();
|
||||
let mut result = String::new();
|
||||
result.push('[');
|
||||
for (key, value) in entries.into_iter().rev() {
|
||||
if result.len() > 1 {
|
||||
result.push_str(", ");
|
||||
}
|
||||
write!(&mut result, "{key}:{value}").unwrap();
|
||||
}
|
||||
result.push(']');
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_insert_into_map() {
|
||||
let mut builder = ListBuilder::<u16, u16>::default();
|
||||
|
||||
// Build up the map in order
|
||||
let empty = List::empty();
|
||||
let map1 = builder.entry(empty, 1).or_insert_with(|| 1);
|
||||
let map12 = builder.entry(map1, 2).or_insert_with(|| 2);
|
||||
let map123 = builder.entry(map12, 3).or_insert_with(|| 3);
|
||||
let map1232 = builder.entry(map123, 2).or_insert_with(|| 4);
|
||||
assert_eq!(builder.display(empty), "[]");
|
||||
assert_eq!(builder.display(map1), "[1:1]");
|
||||
assert_eq!(builder.display(map12), "[1:1, 2:2]");
|
||||
assert_eq!(builder.display(map123), "[1:1, 2:2, 3:3]");
|
||||
assert_eq!(builder.display(map1232), "[1:1, 2:2, 3:3]");
|
||||
|
||||
// And in reverse order
|
||||
let map3 = builder.entry(empty, 3).or_insert_with(|| 3);
|
||||
let map32 = builder.entry(map3, 2).or_insert_with(|| 2);
|
||||
let map321 = builder.entry(map32, 1).or_insert_with(|| 1);
|
||||
let map3212 = builder.entry(map321, 2).or_insert_with(|| 4);
|
||||
assert_eq!(builder.display(empty), "[]");
|
||||
assert_eq!(builder.display(map3), "[3:3]");
|
||||
assert_eq!(builder.display(map32), "[2:2, 3:3]");
|
||||
assert_eq!(builder.display(map321), "[1:1, 2:2, 3:3]");
|
||||
assert_eq!(builder.display(map3212), "[1:1, 2:2, 3:3]");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_intersect_maps() {
|
||||
let mut builder = ListBuilder::<u16, u16>::default();
|
||||
|
||||
let empty = List::empty();
|
||||
let map1 = builder.entry(empty, 1).or_insert_with(|| 1);
|
||||
let map12 = builder.entry(map1, 2).or_insert_with(|| 2);
|
||||
let map123 = builder.entry(map12, 3).or_insert_with(|| 3);
|
||||
let map1234 = builder.entry(map123, 4).or_insert_with(|| 4);
|
||||
|
||||
let map2 = builder.entry(empty, 2).or_insert_with(|| 20);
|
||||
let map24 = builder.entry(map2, 4).or_insert_with(|| 40);
|
||||
let map245 = builder.entry(map24, 5).or_insert_with(|| 50);
|
||||
let map2457 = builder.entry(map245, 7).or_insert_with(|| 70);
|
||||
|
||||
let intersection = builder.intersect_with(empty, empty, |a, b| a + b);
|
||||
assert_eq!(builder.display(intersection), "[]");
|
||||
let intersection = builder.intersect_with(empty, map1234, |a, b| a + b);
|
||||
assert_eq!(builder.display(intersection), "[]");
|
||||
let intersection = builder.intersect_with(empty, map2457, |a, b| a + b);
|
||||
assert_eq!(builder.display(intersection), "[]");
|
||||
let intersection = builder.intersect_with(map1, map1234, |a, b| a + b);
|
||||
assert_eq!(builder.display(intersection), "[1:2]");
|
||||
let intersection = builder.intersect_with(map1, map2457, |a, b| a + b);
|
||||
assert_eq!(builder.display(intersection), "[]");
|
||||
let intersection = builder.intersect_with(map2, map1234, |a, b| a + b);
|
||||
assert_eq!(builder.display(intersection), "[2:22]");
|
||||
let intersection = builder.intersect_with(map2, map2457, |a, b| a + b);
|
||||
assert_eq!(builder.display(intersection), "[2:40]");
|
||||
let intersection = builder.intersect_with(map1234, map2457, |a, b| a + b);
|
||||
assert_eq!(builder.display(intersection), "[2:22, 4:44]");
|
||||
}
|
||||
}
|
||||
|
||||
// --------------
|
||||
// Property tests
|
||||
|
||||
#[cfg(test)]
|
||||
mod property_tests {
|
||||
use super::*;
|
||||
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
|
||||
impl<K> ListBuilder<K>
|
||||
where
|
||||
K: Clone + Ord,
|
||||
{
|
||||
fn set_from_elements<'a>(&mut self, elements: impl IntoIterator<Item = &'a K>) -> List<K>
|
||||
where
|
||||
K: 'a,
|
||||
{
|
||||
let mut set = List::empty();
|
||||
for element in elements {
|
||||
set = self.insert(set, element.clone());
|
||||
}
|
||||
set
|
||||
}
|
||||
}
|
||||
|
||||
// For most of the tests below, we use a vec as our input, instead of a HashSet or BTreeSet,
|
||||
// since we want to test the behavior of adding duplicate elements to the set.
|
||||
|
||||
#[quickcheck_macros::quickcheck]
|
||||
#[ignore]
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
fn roundtrip_set_from_vec(elements: Vec<u16>) -> bool {
|
||||
let mut builder = ListBuilder::default();
|
||||
let set = builder.set_from_elements(&elements);
|
||||
let expected: BTreeSet<_> = elements.iter().copied().collect();
|
||||
let actual = builder.iter_set_reverse(set).copied();
|
||||
actual.eq(expected.into_iter().rev())
|
||||
}
|
||||
|
||||
#[quickcheck_macros::quickcheck]
|
||||
#[ignore]
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
fn roundtrip_set_intersection(a_elements: Vec<u16>, b_elements: Vec<u16>) -> bool {
|
||||
let mut builder = ListBuilder::default();
|
||||
let a = builder.set_from_elements(&a_elements);
|
||||
let b = builder.set_from_elements(&b_elements);
|
||||
let intersection = builder.intersect(a, b);
|
||||
let a_set: BTreeSet<_> = a_elements.iter().copied().collect();
|
||||
let b_set: BTreeSet<_> = b_elements.iter().copied().collect();
|
||||
let expected: Vec<_> = a_set.intersection(&b_set).copied().collect();
|
||||
let actual = builder.iter_set_reverse(intersection).copied();
|
||||
actual.eq(expected.into_iter().rev())
|
||||
}
|
||||
|
||||
impl<K, V> ListBuilder<K, V>
|
||||
where
|
||||
K: Clone + Ord,
|
||||
V: Clone + Eq,
|
||||
{
|
||||
fn set_from_pairs<'a, I>(&mut self, pairs: I) -> List<K, V>
|
||||
where
|
||||
K: 'a,
|
||||
V: 'a,
|
||||
I: IntoIterator<Item = &'a (K, V)>,
|
||||
I::IntoIter: DoubleEndedIterator,
|
||||
{
|
||||
let mut list = List::empty();
|
||||
for (key, value) in pairs.into_iter().rev() {
|
||||
list = self
|
||||
.entry(list, key.clone())
|
||||
.or_insert_with(|| value.clone());
|
||||
}
|
||||
list
|
||||
}
|
||||
}
|
||||
|
||||
fn join<K, V>(a: &BTreeMap<K, V>, b: &BTreeMap<K, V>) -> BTreeMap<K, (Option<V>, Option<V>)>
|
||||
where
|
||||
K: Clone + Ord,
|
||||
V: Clone + Ord,
|
||||
{
|
||||
let mut joined: BTreeMap<K, (Option<V>, Option<V>)> = BTreeMap::new();
|
||||
for (k, v) in a {
|
||||
joined.entry(k.clone()).or_default().0 = Some(v.clone());
|
||||
}
|
||||
for (k, v) in b {
|
||||
joined.entry(k.clone()).or_default().1 = Some(v.clone());
|
||||
}
|
||||
joined
|
||||
}
|
||||
|
||||
#[quickcheck_macros::quickcheck]
|
||||
#[ignore]
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
fn roundtrip_list_from_vec(pairs: Vec<(u16, u16)>) -> bool {
|
||||
let mut builder = ListBuilder::default();
|
||||
let list = builder.set_from_pairs(&pairs);
|
||||
let expected: BTreeMap<_, _> = pairs.iter().copied().collect();
|
||||
let actual = builder.iter_reverse(list).map(|(k, v)| (*k, *v));
|
||||
actual.eq(expected.into_iter().rev())
|
||||
}
|
||||
|
||||
#[quickcheck_macros::quickcheck]
|
||||
#[ignore]
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
fn roundtrip_list_intersection(
|
||||
a_elements: Vec<(u16, u16)>,
|
||||
b_elements: Vec<(u16, u16)>,
|
||||
) -> bool {
|
||||
let mut builder = ListBuilder::default();
|
||||
let a = builder.set_from_pairs(&a_elements);
|
||||
let b = builder.set_from_pairs(&b_elements);
|
||||
let intersection = builder.intersect_with(a, b, |a, b| a + b);
|
||||
let a_map: BTreeMap<_, _> = a_elements.iter().copied().collect();
|
||||
let b_map: BTreeMap<_, _> = b_elements.iter().copied().collect();
|
||||
let intersection_map = join(&a_map, &b_map);
|
||||
let expected: Vec<_> = intersection_map
|
||||
.into_iter()
|
||||
.filter_map(|(k, (v1, v2))| Some((k, v1? + v2?)))
|
||||
.collect();
|
||||
let actual = builder.iter_reverse(intersection).map(|(k, v)| (*k, *v));
|
||||
actual.eq(expected.into_iter().rev())
|
||||
}
|
||||
}
|
||||
@@ -720,7 +720,7 @@ impl<'db> ResolverContext<'db> {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::files::{system_path_to_file, File, FilePath};
|
||||
use ruff_db::system::DbWithTestSystem;
|
||||
use ruff_db::system::{DbWithTestSystem as _, DbWithWritableSystem as _};
|
||||
use ruff_db::testing::{
|
||||
assert_const_function_query_was_not_run, assert_function_query_was_not_run,
|
||||
};
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_db::system::{
|
||||
DbWithTestSystem as _, DbWithWritableSystem as _, SystemPath, SystemPathBuf,
|
||||
};
|
||||
use ruff_db::vendored::VendoredPathBuf;
|
||||
use ruff_python_ast::PythonVersion;
|
||||
|
||||
|
||||
@@ -409,7 +409,7 @@ impl FusedIterator for ChildrenIter<'_> {}
|
||||
mod tests {
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::DbWithTestSystem;
|
||||
use ruff_db::system::DbWithWritableSystem as _;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
@@ -440,7 +440,7 @@ mod tests {
|
||||
file: File,
|
||||
}
|
||||
|
||||
fn test_case(content: impl ToString) -> TestCase {
|
||||
fn test_case(content: impl AsRef<str>) -> TestCase {
|
||||
let mut db = TestDb::new();
|
||||
db.write_file("test.py", content).unwrap();
|
||||
|
||||
|
||||
@@ -22,6 +22,10 @@ pub(crate) enum AttributeAssignment<'db> {
|
||||
/// `for self.x in <iterable>`.
|
||||
Iterable { iterable: Expression<'db> },
|
||||
|
||||
/// An attribute assignment where the expression to be assigned is a context manager, for example
|
||||
/// `with <context_manager> as self.x`.
|
||||
ContextManager { context_manager: Expression<'db> },
|
||||
|
||||
/// An attribute assignment where the left-hand side is an unpacking expression,
|
||||
/// e.g. `self.x, self.y = <value>`.
|
||||
Unpack {
|
||||
|
||||
@@ -353,7 +353,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
#[allow(unsafe_code)]
|
||||
// SAFETY: `definition_node` is guaranteed to be a child of `self.module`
|
||||
let kind = unsafe { definition_node.into_owned(self.module.clone()) };
|
||||
let category = kind.category();
|
||||
let category = kind.category(self.file.is_stub(self.db.upcast()));
|
||||
let is_reexported = kind.is_reexported();
|
||||
let definition = Definition::new(
|
||||
self.db,
|
||||
@@ -1032,6 +1032,7 @@ where
|
||||
self.db,
|
||||
self.file,
|
||||
self.current_scope(),
|
||||
// SAFETY: `target` belongs to the `self.module` tree
|
||||
#[allow(unsafe_code)]
|
||||
unsafe {
|
||||
AstNodeRef::new(self.module.clone(), target)
|
||||
@@ -1262,16 +1263,64 @@ where
|
||||
is_async,
|
||||
..
|
||||
}) => {
|
||||
for item in items {
|
||||
self.visit_expr(&item.context_expr);
|
||||
if let Some(optional_vars) = item.optional_vars.as_deref() {
|
||||
self.add_standalone_expression(&item.context_expr);
|
||||
self.push_assignment(CurrentAssignment::WithItem {
|
||||
item,
|
||||
is_async: *is_async,
|
||||
});
|
||||
for item @ ruff_python_ast::WithItem {
|
||||
range: _,
|
||||
context_expr,
|
||||
optional_vars,
|
||||
} in items
|
||||
{
|
||||
self.visit_expr(context_expr);
|
||||
if let Some(optional_vars) = optional_vars.as_deref() {
|
||||
let context_manager = self.add_standalone_expression(context_expr);
|
||||
let current_assignment = match optional_vars {
|
||||
ast::Expr::Tuple(_) | ast::Expr::List(_) => {
|
||||
Some(CurrentAssignment::WithItem {
|
||||
item,
|
||||
first: true,
|
||||
is_async: *is_async,
|
||||
unpack: Some(Unpack::new(
|
||||
self.db,
|
||||
self.file,
|
||||
self.current_scope(),
|
||||
// SAFETY: the node `optional_vars` belongs to the `self.module` tree
|
||||
#[allow(unsafe_code)]
|
||||
unsafe {
|
||||
AstNodeRef::new(self.module.clone(), optional_vars)
|
||||
},
|
||||
UnpackValue::ContextManager(context_manager),
|
||||
countme::Count::default(),
|
||||
)),
|
||||
})
|
||||
}
|
||||
ast::Expr::Name(_) => Some(CurrentAssignment::WithItem {
|
||||
item,
|
||||
is_async: *is_async,
|
||||
unpack: None,
|
||||
// `false` is arbitrary here---we don't actually use it other than in the actual unpacks
|
||||
first: false,
|
||||
}),
|
||||
ast::Expr::Attribute(ast::ExprAttribute {
|
||||
value: object,
|
||||
attr,
|
||||
..
|
||||
}) => {
|
||||
self.register_attribute_assignment(
|
||||
object,
|
||||
attr,
|
||||
AttributeAssignment::ContextManager { context_manager },
|
||||
);
|
||||
None
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
if let Some(current_assignment) = current_assignment {
|
||||
self.push_assignment(current_assignment);
|
||||
}
|
||||
self.visit_expr(optional_vars);
|
||||
self.pop_assignment();
|
||||
if current_assignment.is_some() {
|
||||
self.pop_assignment();
|
||||
}
|
||||
}
|
||||
}
|
||||
self.visit_body(body);
|
||||
@@ -1304,6 +1353,7 @@ where
|
||||
self.db,
|
||||
self.file,
|
||||
self.current_scope(),
|
||||
// SAFETY: the node `target` belongs to the `self.module` tree
|
||||
#[allow(unsafe_code)]
|
||||
unsafe {
|
||||
AstNodeRef::new(self.module.clone(), target)
|
||||
@@ -1631,12 +1681,19 @@ where
|
||||
},
|
||||
);
|
||||
}
|
||||
Some(CurrentAssignment::WithItem { item, is_async }) => {
|
||||
Some(CurrentAssignment::WithItem {
|
||||
item,
|
||||
first,
|
||||
is_async,
|
||||
unpack,
|
||||
}) => {
|
||||
self.add_definition(
|
||||
symbol,
|
||||
WithItemDefinitionNodeRef {
|
||||
node: item,
|
||||
target: name_node,
|
||||
unpack,
|
||||
context_expr: &item.context_expr,
|
||||
name: name_node,
|
||||
first,
|
||||
is_async,
|
||||
},
|
||||
);
|
||||
@@ -1646,7 +1703,9 @@ where
|
||||
}
|
||||
|
||||
if let Some(
|
||||
CurrentAssignment::Assign { first, .. } | CurrentAssignment::For { first, .. },
|
||||
CurrentAssignment::Assign { first, .. }
|
||||
| CurrentAssignment::For { first, .. }
|
||||
| CurrentAssignment::WithItem { first, .. },
|
||||
) = self.current_assignment_mut()
|
||||
{
|
||||
*first = false;
|
||||
@@ -1826,6 +1885,10 @@ where
|
||||
| CurrentAssignment::For {
|
||||
unpack: Some(unpack),
|
||||
..
|
||||
}
|
||||
| CurrentAssignment::WithItem {
|
||||
unpack: Some(unpack),
|
||||
..
|
||||
},
|
||||
) = self.current_assignment()
|
||||
{
|
||||
@@ -1919,7 +1982,9 @@ enum CurrentAssignment<'a> {
|
||||
},
|
||||
WithItem {
|
||||
item: &'a ast::WithItem,
|
||||
first: bool,
|
||||
is_async: bool,
|
||||
unpack: Option<Unpack<'a>>,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -201,8 +201,10 @@ pub(crate) struct AssignmentDefinitionNodeRef<'a> {
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct WithItemDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::WithItem,
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
pub(crate) unpack: Option<Unpack<'a>>,
|
||||
pub(crate) context_expr: &'a ast::Expr,
|
||||
pub(crate) name: &'a ast::ExprName,
|
||||
pub(crate) first: bool,
|
||||
pub(crate) is_async: bool,
|
||||
}
|
||||
|
||||
@@ -323,12 +325,16 @@ impl<'db> DefinitionNodeRef<'db> {
|
||||
DefinitionKind::Parameter(AstNodeRef::new(parsed, parameter))
|
||||
}
|
||||
DefinitionNodeRef::WithItem(WithItemDefinitionNodeRef {
|
||||
node,
|
||||
target,
|
||||
unpack,
|
||||
context_expr,
|
||||
name,
|
||||
first,
|
||||
is_async,
|
||||
}) => DefinitionKind::WithItem(WithItemDefinitionKind {
|
||||
node: AstNodeRef::new(parsed.clone(), node),
|
||||
target: AstNodeRef::new(parsed, target),
|
||||
target: TargetKind::from(unpack),
|
||||
context_expr: AstNodeRef::new(parsed.clone(), context_expr),
|
||||
name: AstNodeRef::new(parsed, name),
|
||||
first,
|
||||
is_async,
|
||||
}),
|
||||
DefinitionNodeRef::MatchPattern(MatchPatternDefinitionNodeRef {
|
||||
@@ -394,10 +400,12 @@ impl<'db> DefinitionNodeRef<'db> {
|
||||
Self::VariadicKeywordParameter(node) => node.into(),
|
||||
Self::Parameter(node) => node.into(),
|
||||
Self::WithItem(WithItemDefinitionNodeRef {
|
||||
node: _,
|
||||
target,
|
||||
unpack: _,
|
||||
context_expr: _,
|
||||
first: _,
|
||||
is_async: _,
|
||||
}) => target.into(),
|
||||
name,
|
||||
}) => name.into(),
|
||||
Self::MatchPattern(MatchPatternDefinitionNodeRef { identifier, .. }) => {
|
||||
identifier.into()
|
||||
}
|
||||
@@ -467,7 +475,7 @@ pub enum DefinitionKind<'db> {
|
||||
VariadicPositionalParameter(AstNodeRef<ast::Parameter>),
|
||||
VariadicKeywordParameter(AstNodeRef<ast::Parameter>),
|
||||
Parameter(AstNodeRef<ast::ParameterWithDefault>),
|
||||
WithItem(WithItemDefinitionKind),
|
||||
WithItem(WithItemDefinitionKind<'db>),
|
||||
MatchPattern(MatchPatternDefinitionKind),
|
||||
ExceptHandler(ExceptHandlerDefinitionKind),
|
||||
TypeVar(AstNodeRef<ast::TypeParamTypeVar>),
|
||||
@@ -506,7 +514,7 @@ impl DefinitionKind<'_> {
|
||||
DefinitionKind::VariadicPositionalParameter(parameter) => parameter.name.range(),
|
||||
DefinitionKind::VariadicKeywordParameter(parameter) => parameter.name.range(),
|
||||
DefinitionKind::Parameter(parameter) => parameter.parameter.name.range(),
|
||||
DefinitionKind::WithItem(with_item) => with_item.target().range(),
|
||||
DefinitionKind::WithItem(with_item) => with_item.name().range(),
|
||||
DefinitionKind::MatchPattern(match_pattern) => match_pattern.identifier.range(),
|
||||
DefinitionKind::ExceptHandler(handler) => handler.node().range(),
|
||||
DefinitionKind::TypeVar(type_var) => type_var.name.range(),
|
||||
@@ -515,7 +523,7 @@ impl DefinitionKind<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn category(&self) -> DefinitionCategory {
|
||||
pub(crate) fn category(&self, in_stub: bool) -> DefinitionCategory {
|
||||
match self {
|
||||
// functions, classes, and imports always bind, and we consider them declarations
|
||||
DefinitionKind::Function(_)
|
||||
@@ -543,9 +551,10 @@ impl DefinitionKind<'_> {
|
||||
DefinitionCategory::Binding
|
||||
}
|
||||
}
|
||||
// annotated assignment is always a declaration, only a binding if there is a RHS
|
||||
// Annotated assignment is always a declaration. It is also a binding if there is a RHS
|
||||
// or if we are in a stub file. Unfortunately, it is common for stubs to omit even an `...` value placeholder.
|
||||
DefinitionKind::AnnotatedAssignment(ann_assign) => {
|
||||
if ann_assign.value.is_some() {
|
||||
if in_stub || ann_assign.value.is_some() {
|
||||
DefinitionCategory::DeclarationAndBinding
|
||||
} else {
|
||||
DefinitionCategory::Declaration
|
||||
@@ -687,19 +696,29 @@ impl<'db> AssignmentDefinitionKind<'db> {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct WithItemDefinitionKind {
|
||||
node: AstNodeRef<ast::WithItem>,
|
||||
target: AstNodeRef<ast::ExprName>,
|
||||
pub struct WithItemDefinitionKind<'db> {
|
||||
target: TargetKind<'db>,
|
||||
context_expr: AstNodeRef<ast::Expr>,
|
||||
name: AstNodeRef<ast::ExprName>,
|
||||
first: bool,
|
||||
is_async: bool,
|
||||
}
|
||||
|
||||
impl WithItemDefinitionKind {
|
||||
pub(crate) fn node(&self) -> &ast::WithItem {
|
||||
self.node.node()
|
||||
impl<'db> WithItemDefinitionKind<'db> {
|
||||
pub(crate) fn context_expr(&self) -> &ast::Expr {
|
||||
self.context_expr.node()
|
||||
}
|
||||
|
||||
pub(crate) fn target(&self) -> &ast::ExprName {
|
||||
self.target.node()
|
||||
pub(crate) fn target(&self) -> TargetKind<'db> {
|
||||
self.target
|
||||
}
|
||||
|
||||
pub(crate) fn name(&self) -> &ast::ExprName {
|
||||
self.name.node()
|
||||
}
|
||||
|
||||
pub(crate) const fn is_first(&self) -> bool {
|
||||
self.first
|
||||
}
|
||||
|
||||
pub(crate) const fn is_async(&self) -> bool {
|
||||
|
||||
@@ -16,10 +16,10 @@
|
||||
//! - Iterating through the predicates in a constraint
|
||||
//!
|
||||
//! In particular, note that we do not need random access to the predicates in a constraint. That
|
||||
//! means that we can use a simple [_sorted association list_][ruff_index::list] as our data
|
||||
//! structure. That lets us use a single 32-bit integer to store each narrowing constraint, no
|
||||
//! matter how many predicates it contains. It also makes merging two narrowing constraints fast,
|
||||
//! since alists support fast intersection.
|
||||
//! means that we can use a simple [_sorted association list_][crate::list] as our data structure.
|
||||
//! That lets us use a single 32-bit integer to store each narrowing constraint, no matter how many
|
||||
//! predicates it contains. It also makes merging two narrowing constraints fast, since alists
|
||||
//! support fast intersection.
|
||||
//!
|
||||
//! Because we visit the contents of each scope in source-file order, and assign scoped IDs in
|
||||
//! source-file order, that means that we will tend to visit narrowing constraints in order by
|
||||
@@ -28,21 +28,15 @@
|
||||
//!
|
||||
//! [`Predicate`]: crate::semantic_index::predicate::Predicate
|
||||
|
||||
use ruff_index::list::{ListBuilder, ListSetReverseIterator, ListStorage};
|
||||
use ruff_index::newtype_index;
|
||||
|
||||
use crate::list::{List, ListBuilder, ListSetReverseIterator, ListStorage};
|
||||
use crate::semantic_index::predicate::ScopedPredicateId;
|
||||
|
||||
/// A narrowing constraint associated with a live binding.
|
||||
///
|
||||
/// A constraint is a list of [`Predicate`]s that each constrain the type of the binding's symbol.
|
||||
///
|
||||
/// An instance of this type represents a _non-empty_ narrowing constraint. You will often wrap
|
||||
/// this in `Option` and use `None` to represent an empty narrowing constraint.
|
||||
///
|
||||
/// [`Predicate`]: crate::semantic_index::predicate::Predicate
|
||||
#[newtype_index]
|
||||
pub(crate) struct ScopedNarrowingConstraintId;
|
||||
pub(crate) type ScopedNarrowingConstraint = List<ScopedNarrowingConstraintPredicate>;
|
||||
|
||||
/// One of the [`Predicate`]s in a narrowing constraint, which constraints the type of the
|
||||
/// binding's symbol.
|
||||
@@ -71,7 +65,7 @@ impl From<ScopedPredicateId> for ScopedNarrowingConstraintPredicate {
|
||||
/// A collection of narrowing constraints for a given scope.
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub(crate) struct NarrowingConstraints {
|
||||
lists: ListStorage<ScopedNarrowingConstraintId, ScopedNarrowingConstraintPredicate>,
|
||||
lists: ListStorage<ScopedNarrowingConstraintPredicate>,
|
||||
}
|
||||
|
||||
// Building constraints
|
||||
@@ -80,7 +74,7 @@ pub(crate) struct NarrowingConstraints {
|
||||
/// A builder for creating narrowing constraints.
|
||||
#[derive(Debug, Default, Eq, PartialEq)]
|
||||
pub(crate) struct NarrowingConstraintsBuilder {
|
||||
lists: ListBuilder<ScopedNarrowingConstraintId, ScopedNarrowingConstraintPredicate>,
|
||||
lists: ListBuilder<ScopedNarrowingConstraintPredicate>,
|
||||
}
|
||||
|
||||
impl NarrowingConstraintsBuilder {
|
||||
@@ -93,9 +87,9 @@ impl NarrowingConstraintsBuilder {
|
||||
/// Adds a predicate to an existing narrowing constraint.
|
||||
pub(crate) fn add_predicate_to_constraint(
|
||||
&mut self,
|
||||
constraint: Option<ScopedNarrowingConstraintId>,
|
||||
constraint: ScopedNarrowingConstraint,
|
||||
predicate: ScopedNarrowingConstraintPredicate,
|
||||
) -> Option<ScopedNarrowingConstraintId> {
|
||||
) -> ScopedNarrowingConstraint {
|
||||
self.lists.insert(constraint, predicate)
|
||||
}
|
||||
|
||||
@@ -103,9 +97,9 @@ impl NarrowingConstraintsBuilder {
|
||||
/// that appear in both inputs.
|
||||
pub(crate) fn intersect_constraints(
|
||||
&mut self,
|
||||
a: Option<ScopedNarrowingConstraintId>,
|
||||
b: Option<ScopedNarrowingConstraintId>,
|
||||
) -> Option<ScopedNarrowingConstraintId> {
|
||||
a: ScopedNarrowingConstraint,
|
||||
b: ScopedNarrowingConstraint,
|
||||
) -> ScopedNarrowingConstraint {
|
||||
self.lists.intersect(a, b)
|
||||
}
|
||||
}
|
||||
@@ -113,15 +107,14 @@ impl NarrowingConstraintsBuilder {
|
||||
// Iteration
|
||||
// ---------
|
||||
|
||||
pub(crate) type NarrowingConstraintsIterator<'a> = std::iter::Copied<
|
||||
ListSetReverseIterator<'a, ScopedNarrowingConstraintId, ScopedNarrowingConstraintPredicate>,
|
||||
>;
|
||||
pub(crate) type NarrowingConstraintsIterator<'a> =
|
||||
std::iter::Copied<ListSetReverseIterator<'a, ScopedNarrowingConstraintPredicate>>;
|
||||
|
||||
impl NarrowingConstraints {
|
||||
/// Iterates over the predicates in a narrowing constraint.
|
||||
pub(crate) fn iter_predicates(
|
||||
&self,
|
||||
set: Option<ScopedNarrowingConstraintId>,
|
||||
set: ScopedNarrowingConstraint,
|
||||
) -> NarrowingConstraintsIterator<'_> {
|
||||
self.lists.iter_set_reverse(set).copied()
|
||||
}
|
||||
@@ -143,7 +136,7 @@ mod tests {
|
||||
impl NarrowingConstraintsBuilder {
|
||||
pub(crate) fn iter_predicates(
|
||||
&self,
|
||||
set: Option<ScopedNarrowingConstraintId>,
|
||||
set: ScopedNarrowingConstraint,
|
||||
) -> NarrowingConstraintsIterator<'_> {
|
||||
self.lists.iter_set_reverse(set).copied()
|
||||
}
|
||||
|
||||
@@ -6,8 +6,8 @@ use hashbrown::hash_map::RawEntryMut;
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::ParsedModule;
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::name::Name;
|
||||
use ruff_python_ast::{self as ast};
|
||||
use rustc_hash::FxHasher;
|
||||
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
|
||||
@@ -47,7 +47,7 @@ use ruff_index::newtype_index;
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
||||
use crate::semantic_index::narrowing_constraints::{
|
||||
NarrowingConstraintsBuilder, ScopedNarrowingConstraintId, ScopedNarrowingConstraintPredicate,
|
||||
NarrowingConstraintsBuilder, ScopedNarrowingConstraint, ScopedNarrowingConstraintPredicate,
|
||||
};
|
||||
use crate::semantic_index::visibility_constraints::{
|
||||
ScopedVisibilityConstraintId, VisibilityConstraintsBuilder,
|
||||
@@ -189,7 +189,7 @@ pub(super) struct SymbolBindings {
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(super) struct LiveBinding {
|
||||
pub(super) binding: ScopedDefinitionId,
|
||||
pub(super) narrowing_constraint: Option<ScopedNarrowingConstraintId>,
|
||||
pub(super) narrowing_constraint: ScopedNarrowingConstraint,
|
||||
pub(super) visibility_constraint: ScopedVisibilityConstraintId,
|
||||
}
|
||||
|
||||
@@ -199,7 +199,7 @@ impl SymbolBindings {
|
||||
fn unbound(scope_start_visibility: ScopedVisibilityConstraintId) -> Self {
|
||||
let initial_binding = LiveBinding {
|
||||
binding: ScopedDefinitionId::UNBOUND,
|
||||
narrowing_constraint: None,
|
||||
narrowing_constraint: ScopedNarrowingConstraint::empty(),
|
||||
visibility_constraint: scope_start_visibility,
|
||||
};
|
||||
Self {
|
||||
@@ -218,7 +218,7 @@ impl SymbolBindings {
|
||||
self.live_bindings.clear();
|
||||
self.live_bindings.push(LiveBinding {
|
||||
binding,
|
||||
narrowing_constraint: None,
|
||||
narrowing_constraint: ScopedNarrowingConstraint::empty(),
|
||||
visibility_constraint,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -545,7 +545,7 @@ mod tests {
|
||||
system_install_sys_prefix.join(&unix_site_packages);
|
||||
(system_home_path, system_exe_path, system_site_packages_path)
|
||||
};
|
||||
memory_fs.write_file(system_exe_path, "").unwrap();
|
||||
memory_fs.write_file_all(system_exe_path, "").unwrap();
|
||||
memory_fs
|
||||
.create_directory_all(&system_site_packages_path)
|
||||
.unwrap();
|
||||
@@ -562,7 +562,7 @@ mod tests {
|
||||
venv_sys_prefix.join(&unix_site_packages),
|
||||
)
|
||||
};
|
||||
memory_fs.write_file(&venv_exe, "").unwrap();
|
||||
memory_fs.write_file_all(&venv_exe, "").unwrap();
|
||||
memory_fs.create_directory_all(&site_packages_path).unwrap();
|
||||
|
||||
let pyvenv_cfg_path = venv_sys_prefix.join("pyvenv.cfg");
|
||||
@@ -576,7 +576,7 @@ mod tests {
|
||||
pyvenv_cfg_contents.push_str("include-system-site-packages = TRuE\n");
|
||||
}
|
||||
memory_fs
|
||||
.write_file(pyvenv_cfg_path, &pyvenv_cfg_contents)
|
||||
.write_file_all(pyvenv_cfg_path, &pyvenv_cfg_contents)
|
||||
.unwrap();
|
||||
|
||||
venv_sys_prefix
|
||||
@@ -740,7 +740,7 @@ mod tests {
|
||||
let system = TestSystem::default();
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_file("/.venv", "")
|
||||
.write_file_all("/.venv", "")
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
VirtualEnvironment::new("/.venv", &system),
|
||||
@@ -767,7 +767,7 @@ mod tests {
|
||||
let memory_fs = system.memory_file_system();
|
||||
let pyvenv_cfg_path = SystemPathBuf::from("/.venv/pyvenv.cfg");
|
||||
memory_fs
|
||||
.write_file(&pyvenv_cfg_path, "home = bar = /.venv/bin")
|
||||
.write_file_all(&pyvenv_cfg_path, "home = bar = /.venv/bin")
|
||||
.unwrap();
|
||||
let venv_result = VirtualEnvironment::new("/.venv", &system);
|
||||
assert!(matches!(
|
||||
@@ -785,7 +785,9 @@ mod tests {
|
||||
let system = TestSystem::default();
|
||||
let memory_fs = system.memory_file_system();
|
||||
let pyvenv_cfg_path = SystemPathBuf::from("/.venv/pyvenv.cfg");
|
||||
memory_fs.write_file(&pyvenv_cfg_path, "home =").unwrap();
|
||||
memory_fs
|
||||
.write_file_all(&pyvenv_cfg_path, "home =")
|
||||
.unwrap();
|
||||
let venv_result = VirtualEnvironment::new("/.venv", &system);
|
||||
assert!(matches!(
|
||||
venv_result,
|
||||
@@ -803,7 +805,7 @@ mod tests {
|
||||
let memory_fs = system.memory_file_system();
|
||||
let pyvenv_cfg_path = SystemPathBuf::from("/.venv/pyvenv.cfg");
|
||||
memory_fs
|
||||
.write_file(&pyvenv_cfg_path, "= whatever")
|
||||
.write_file_all(&pyvenv_cfg_path, "= whatever")
|
||||
.unwrap();
|
||||
let venv_result = VirtualEnvironment::new("/.venv", &system);
|
||||
assert!(matches!(
|
||||
@@ -821,7 +823,7 @@ mod tests {
|
||||
let system = TestSystem::default();
|
||||
let memory_fs = system.memory_file_system();
|
||||
let pyvenv_cfg_path = SystemPathBuf::from("/.venv/pyvenv.cfg");
|
||||
memory_fs.write_file(&pyvenv_cfg_path, "").unwrap();
|
||||
memory_fs.write_file_all(&pyvenv_cfg_path, "").unwrap();
|
||||
let venv_result = VirtualEnvironment::new("/.venv", &system);
|
||||
assert!(matches!(
|
||||
venv_result,
|
||||
@@ -839,7 +841,7 @@ mod tests {
|
||||
let memory_fs = system.memory_file_system();
|
||||
let pyvenv_cfg_path = SystemPathBuf::from("/.venv/pyvenv.cfg");
|
||||
memory_fs
|
||||
.write_file(&pyvenv_cfg_path, "home = foo")
|
||||
.write_file_all(&pyvenv_cfg_path, "home = foo")
|
||||
.unwrap();
|
||||
let venv_result = VirtualEnvironment::new("/.venv", &system);
|
||||
assert!(matches!(
|
||||
|
||||
@@ -21,6 +21,15 @@ pub(crate) enum Boundness {
|
||||
PossiblyUnbound,
|
||||
}
|
||||
|
||||
impl Boundness {
|
||||
pub(crate) const fn max(self, other: Self) -> Self {
|
||||
match (self, other) {
|
||||
(Boundness::Bound, _) | (_, Boundness::Bound) => Boundness::Bound,
|
||||
(Boundness::PossiblyUnbound, Boundness::PossiblyUnbound) => Boundness::PossiblyUnbound,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The result of a symbol lookup, which can either be a (possibly unbound) type
|
||||
/// or a completely unbound symbol.
|
||||
///
|
||||
@@ -79,51 +88,6 @@ impl<'db> Symbol<'db> {
|
||||
.expect("Expected a (possibly unbound) type, not an unbound symbol")
|
||||
}
|
||||
|
||||
/// Transform the symbol into a [`LookupResult`],
|
||||
/// a [`Result`] type in which the `Ok` variant represents a definitely bound symbol
|
||||
/// and the `Err` variant represents a symbol that is either definitely or possibly unbound.
|
||||
pub(crate) fn into_lookup_result(self) -> LookupResult<'db> {
|
||||
match self {
|
||||
Symbol::Type(ty, Boundness::Bound) => Ok(ty),
|
||||
Symbol::Type(ty, Boundness::PossiblyUnbound) => Err(LookupError::PossiblyUnbound(ty)),
|
||||
Symbol::Unbound => Err(LookupError::Unbound),
|
||||
}
|
||||
}
|
||||
|
||||
/// Safely unwrap the symbol into a [`Type`].
|
||||
///
|
||||
/// If the symbol is definitely unbound or possibly unbound, it will be transformed into a
|
||||
/// [`LookupError`] and `diagnostic_fn` will be applied to the error value before returning
|
||||
/// the result of `diagnostic_fn` (which will be a [`Type`]). This allows the caller to ensure
|
||||
/// that a diagnostic is emitted if the symbol is possibly or definitely unbound.
|
||||
pub(crate) fn unwrap_with_diagnostic(
|
||||
self,
|
||||
diagnostic_fn: impl FnOnce(LookupError<'db>) -> Type<'db>,
|
||||
) -> Type<'db> {
|
||||
self.into_lookup_result().unwrap_or_else(diagnostic_fn)
|
||||
}
|
||||
|
||||
/// Fallback (partially or fully) to another symbol if `self` is partially or fully unbound.
|
||||
///
|
||||
/// 1. If `self` is definitely bound, return `self` without evaluating `fallback_fn()`.
|
||||
/// 2. Else, evaluate `fallback_fn()`:
|
||||
/// a. If `self` is definitely unbound, return the result of `fallback_fn()`.
|
||||
/// b. Else, if `fallback` is definitely unbound, return `self`.
|
||||
/// c. Else, if `self` is possibly unbound and `fallback` is definitely bound,
|
||||
/// return `Symbol(<union of self-type and fallback-type>, Boundness::Bound)`
|
||||
/// d. Else, if `self` is possibly unbound and `fallback` is possibly unbound,
|
||||
/// return `Symbol(<union of self-type and fallback-type>, Boundness::PossiblyUnbound)`
|
||||
#[must_use]
|
||||
pub(crate) fn or_fall_back_to(
|
||||
self,
|
||||
db: &'db dyn Db,
|
||||
fallback_fn: impl FnOnce() -> Self,
|
||||
) -> Self {
|
||||
self.into_lookup_result()
|
||||
.or_else(|lookup_error| lookup_error.or_fall_back_to(db, fallback_fn()))
|
||||
.into()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn map_type(self, f: impl FnOnce(Type<'db>) -> Type<'db>) -> Symbol<'db> {
|
||||
match self {
|
||||
@@ -131,14 +95,28 @@ impl<'db> Symbol<'db> {
|
||||
Symbol::Unbound => Symbol::Unbound,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn with_qualifiers(self, qualifiers: TypeQualifiers) -> SymbolAndQualifiers<'db> {
|
||||
SymbolAndQualifiers {
|
||||
symbol: self,
|
||||
qualifiers,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> From<LookupResult<'db>> for Symbol<'db> {
|
||||
impl<'db> From<LookupResult<'db>> for SymbolAndQualifiers<'db> {
|
||||
fn from(value: LookupResult<'db>) -> Self {
|
||||
match value {
|
||||
Ok(ty) => Symbol::Type(ty, Boundness::Bound),
|
||||
Err(LookupError::Unbound) => Symbol::Unbound,
|
||||
Err(LookupError::PossiblyUnbound(ty)) => Symbol::Type(ty, Boundness::PossiblyUnbound),
|
||||
Ok(type_and_qualifiers) => {
|
||||
Symbol::Type(type_and_qualifiers.inner_type(), Boundness::Bound)
|
||||
.with_qualifiers(type_and_qualifiers.qualifiers())
|
||||
}
|
||||
Err(LookupError::Unbound(qualifiers)) => Symbol::Unbound.with_qualifiers(qualifiers),
|
||||
Err(LookupError::PossiblyUnbound(type_and_qualifiers)) => {
|
||||
Symbol::Type(type_and_qualifiers.inner_type(), Boundness::PossiblyUnbound)
|
||||
.with_qualifiers(type_and_qualifiers.qualifiers())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -146,8 +124,8 @@ impl<'db> From<LookupResult<'db>> for Symbol<'db> {
|
||||
/// Possible ways in which a symbol lookup can (possibly or definitely) fail.
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||
pub(crate) enum LookupError<'db> {
|
||||
Unbound,
|
||||
PossiblyUnbound(Type<'db>),
|
||||
Unbound(TypeQualifiers),
|
||||
PossiblyUnbound(TypeAndQualifiers<'db>),
|
||||
}
|
||||
|
||||
impl<'db> LookupError<'db> {
|
||||
@@ -155,18 +133,22 @@ impl<'db> LookupError<'db> {
|
||||
pub(crate) fn or_fall_back_to(
|
||||
self,
|
||||
db: &'db dyn Db,
|
||||
fallback: Symbol<'db>,
|
||||
fallback: SymbolAndQualifiers<'db>,
|
||||
) -> LookupResult<'db> {
|
||||
let fallback = fallback.into_lookup_result();
|
||||
match (&self, &fallback) {
|
||||
(LookupError::Unbound, _) => fallback,
|
||||
(LookupError::PossiblyUnbound { .. }, Err(LookupError::Unbound)) => Err(self),
|
||||
(LookupError::PossiblyUnbound(ty), Ok(ty2)) => {
|
||||
Ok(UnionType::from_elements(db, [ty, ty2]))
|
||||
(LookupError::Unbound(_), _) => fallback,
|
||||
(LookupError::PossiblyUnbound { .. }, Err(LookupError::Unbound(_))) => Err(self),
|
||||
(LookupError::PossiblyUnbound(ty), Ok(ty2)) => Ok(TypeAndQualifiers::new(
|
||||
UnionType::from_elements(db, [ty.inner_type(), ty2.inner_type()]),
|
||||
ty.qualifiers().union(ty2.qualifiers()),
|
||||
)),
|
||||
(LookupError::PossiblyUnbound(ty), Err(LookupError::PossiblyUnbound(ty2))) => {
|
||||
Err(LookupError::PossiblyUnbound(TypeAndQualifiers::new(
|
||||
UnionType::from_elements(db, [ty.inner_type(), ty2.inner_type()]),
|
||||
ty.qualifiers().union(ty2.qualifiers()),
|
||||
)))
|
||||
}
|
||||
(LookupError::PossiblyUnbound(ty), Err(LookupError::PossiblyUnbound(ty2))) => Err(
|
||||
LookupError::PossiblyUnbound(UnionType::from_elements(db, [ty, ty2])),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -176,17 +158,25 @@ impl<'db> LookupError<'db> {
|
||||
///
|
||||
/// Note that this type is exactly isomorphic to [`Symbol`].
|
||||
/// In the future, we could possibly consider removing `Symbol` and using this type everywhere instead.
|
||||
pub(crate) type LookupResult<'db> = Result<Type<'db>, LookupError<'db>>;
|
||||
pub(crate) type LookupResult<'db> = Result<TypeAndQualifiers<'db>, LookupError<'db>>;
|
||||
|
||||
/// Infer the public type of a symbol (its type as seen from outside its scope) in the given
|
||||
/// `scope`.
|
||||
pub(crate) fn symbol<'db>(db: &'db dyn Db, scope: ScopeId<'db>, name: &str) -> Symbol<'db> {
|
||||
pub(crate) fn symbol<'db>(
|
||||
db: &'db dyn Db,
|
||||
scope: ScopeId<'db>,
|
||||
name: &str,
|
||||
) -> SymbolAndQualifiers<'db> {
|
||||
symbol_impl(db, scope, name, RequiresExplicitReExport::No)
|
||||
}
|
||||
|
||||
/// Infer the public type of a class symbol (its type as seen from outside its scope) in the given
|
||||
/// `scope`.
|
||||
pub(crate) fn class_symbol<'db>(db: &'db dyn Db, scope: ScopeId<'db>, name: &str) -> Symbol<'db> {
|
||||
pub(crate) fn class_symbol<'db>(
|
||||
db: &'db dyn Db,
|
||||
scope: ScopeId<'db>,
|
||||
name: &str,
|
||||
) -> SymbolAndQualifiers<'db> {
|
||||
symbol_table(db, scope)
|
||||
.symbol_id_by_name(name)
|
||||
.map(|symbol| {
|
||||
@@ -195,10 +185,14 @@ pub(crate) fn class_symbol<'db>(db: &'db dyn Db, scope: ScopeId<'db>, name: &str
|
||||
if symbol_and_quals.is_class_var() {
|
||||
// For declared class vars we do not need to check if they have bindings,
|
||||
// we just trust the declaration.
|
||||
return symbol_and_quals.0;
|
||||
return symbol_and_quals;
|
||||
}
|
||||
|
||||
if let SymbolAndQualifiers(Symbol::Type(ty, _), _) = symbol_and_quals {
|
||||
if let SymbolAndQualifiers {
|
||||
symbol: Symbol::Type(ty, _),
|
||||
qualifiers,
|
||||
} = symbol_and_quals
|
||||
{
|
||||
// Otherwise, we need to check if the symbol has bindings
|
||||
let use_def = use_def_map(db, scope);
|
||||
let bindings = use_def.public_bindings(symbol);
|
||||
@@ -208,14 +202,16 @@ pub(crate) fn class_symbol<'db>(db: &'db dyn Db, scope: ScopeId<'db>, name: &str
|
||||
// TODO: we should not need to calculate inferred type second time. This is a temporary
|
||||
// solution until the notion of Boundness and Declaredness is split. See #16036, #16264
|
||||
match inferred {
|
||||
Symbol::Unbound => Symbol::Unbound,
|
||||
Symbol::Type(_, boundness) => Symbol::Type(ty, boundness),
|
||||
Symbol::Unbound => Symbol::Unbound.with_qualifiers(qualifiers),
|
||||
Symbol::Type(_, boundness) => {
|
||||
Symbol::Type(ty, boundness).with_qualifiers(qualifiers)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Symbol::Unbound
|
||||
Symbol::Unbound.into()
|
||||
}
|
||||
})
|
||||
.unwrap_or(Symbol::Unbound)
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Infers the public type of an explicit module-global symbol as seen from within the same file.
|
||||
@@ -226,7 +222,11 @@ pub(crate) fn class_symbol<'db>(db: &'db dyn Db, scope: ScopeId<'db>, name: &str
|
||||
/// those additional symbols.
|
||||
///
|
||||
/// Use [`imported_symbol`] to perform the lookup as seen from outside the file (e.g. via imports).
|
||||
pub(crate) fn explicit_global_symbol<'db>(db: &'db dyn Db, file: File, name: &str) -> Symbol<'db> {
|
||||
pub(crate) fn explicit_global_symbol<'db>(
|
||||
db: &'db dyn Db,
|
||||
file: File,
|
||||
name: &str,
|
||||
) -> SymbolAndQualifiers<'db> {
|
||||
symbol_impl(
|
||||
db,
|
||||
global_scope(db, file),
|
||||
@@ -243,13 +243,21 @@ pub(crate) fn explicit_global_symbol<'db>(db: &'db dyn Db, file: File, name: &st
|
||||
///
|
||||
/// Use [`imported_symbol`] to perform the lookup as seen from outside the file (e.g. via imports).
|
||||
#[cfg(test)]
|
||||
pub(crate) fn global_symbol<'db>(db: &'db dyn Db, file: File, name: &str) -> Symbol<'db> {
|
||||
pub(crate) fn global_symbol<'db>(
|
||||
db: &'db dyn Db,
|
||||
file: File,
|
||||
name: &str,
|
||||
) -> SymbolAndQualifiers<'db> {
|
||||
explicit_global_symbol(db, file, name)
|
||||
.or_fall_back_to(db, || module_type_implicit_global_symbol(db, name))
|
||||
}
|
||||
|
||||
/// Infers the public type of an imported symbol.
|
||||
pub(crate) fn imported_symbol<'db>(db: &'db dyn Db, module: &Module, name: &str) -> Symbol<'db> {
|
||||
pub(crate) fn imported_symbol<'db>(
|
||||
db: &'db dyn Db,
|
||||
module: &Module,
|
||||
name: &str,
|
||||
) -> SymbolAndQualifiers<'db> {
|
||||
// If it's not found in the global scope, check if it's present as an instance on
|
||||
// `types.ModuleType` or `builtins.object`.
|
||||
//
|
||||
@@ -267,7 +275,7 @@ pub(crate) fn imported_symbol<'db>(db: &'db dyn Db, module: &Module, name: &str)
|
||||
// module we're dealing with.
|
||||
external_symbol_impl(db, module.file(), name).or_fall_back_to(db, || {
|
||||
if name == "__getattr__" {
|
||||
Symbol::Unbound
|
||||
Symbol::Unbound.into()
|
||||
} else {
|
||||
KnownClass::ModuleType.to_instance(db).member(db, name)
|
||||
}
|
||||
@@ -281,7 +289,7 @@ pub(crate) fn imported_symbol<'db>(db: &'db dyn Db, module: &Module, name: &str)
|
||||
/// Note that this function is only intended for use in the context of the builtins *namespace*
|
||||
/// and should not be used when a symbol is being explicitly imported from the `builtins` module
|
||||
/// (e.g. `from builtins import int`).
|
||||
pub(crate) fn builtins_symbol<'db>(db: &'db dyn Db, symbol: &str) -> Symbol<'db> {
|
||||
pub(crate) fn builtins_symbol<'db>(db: &'db dyn Db, symbol: &str) -> SymbolAndQualifiers<'db> {
|
||||
resolve_module(db, &KnownModule::Builtins.name())
|
||||
.map(|module| {
|
||||
external_symbol_impl(db, module.file(), symbol).or_fall_back_to(db, || {
|
||||
@@ -291,7 +299,7 @@ pub(crate) fn builtins_symbol<'db>(db: &'db dyn Db, symbol: &str) -> Symbol<'db>
|
||||
module_type_implicit_global_symbol(db, symbol)
|
||||
})
|
||||
})
|
||||
.unwrap_or(Symbol::Unbound)
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in a given known module.
|
||||
@@ -301,10 +309,10 @@ pub(crate) fn known_module_symbol<'db>(
|
||||
db: &'db dyn Db,
|
||||
known_module: KnownModule,
|
||||
symbol: &str,
|
||||
) -> Symbol<'db> {
|
||||
) -> SymbolAndQualifiers<'db> {
|
||||
resolve_module(db, &known_module.name())
|
||||
.map(|module| imported_symbol(db, &module, symbol))
|
||||
.unwrap_or(Symbol::Unbound)
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in the `typing` module namespace.
|
||||
@@ -312,7 +320,7 @@ pub(crate) fn known_module_symbol<'db>(
|
||||
/// Returns `Symbol::Unbound` if the `typing` module isn't available for some reason.
|
||||
#[inline]
|
||||
#[cfg(test)]
|
||||
pub(crate) fn typing_symbol<'db>(db: &'db dyn Db, symbol: &str) -> Symbol<'db> {
|
||||
pub(crate) fn typing_symbol<'db>(db: &'db dyn Db, symbol: &str) -> SymbolAndQualifiers<'db> {
|
||||
known_module_symbol(db, KnownModule::Typing, symbol)
|
||||
}
|
||||
|
||||
@@ -320,7 +328,10 @@ pub(crate) fn typing_symbol<'db>(db: &'db dyn Db, symbol: &str) -> Symbol<'db> {
|
||||
///
|
||||
/// Returns `Symbol::Unbound` if the `typing_extensions` module isn't available for some reason.
|
||||
#[inline]
|
||||
pub(crate) fn typing_extensions_symbol<'db>(db: &'db dyn Db, symbol: &str) -> Symbol<'db> {
|
||||
pub(crate) fn typing_extensions_symbol<'db>(
|
||||
db: &'db dyn Db,
|
||||
symbol: &str,
|
||||
) -> SymbolAndQualifiers<'db> {
|
||||
known_module_symbol(db, KnownModule::TypingExtensions, symbol)
|
||||
}
|
||||
|
||||
@@ -383,26 +394,97 @@ pub(crate) type SymbolFromDeclarationsResult<'db> =
|
||||
///
|
||||
/// [`CLASS_VAR`]: crate::types::TypeQualifiers::CLASS_VAR
|
||||
#[derive(Debug, Clone, PartialEq, Eq, salsa::Update)]
|
||||
pub(crate) struct SymbolAndQualifiers<'db>(pub(crate) Symbol<'db>, pub(crate) TypeQualifiers);
|
||||
pub(crate) struct SymbolAndQualifiers<'db> {
|
||||
pub(crate) symbol: Symbol<'db>,
|
||||
pub(crate) qualifiers: TypeQualifiers,
|
||||
}
|
||||
|
||||
impl SymbolAndQualifiers<'_> {
|
||||
impl Default for SymbolAndQualifiers<'_> {
|
||||
fn default() -> Self {
|
||||
SymbolAndQualifiers {
|
||||
symbol: Symbol::Unbound,
|
||||
qualifiers: TypeQualifiers::empty(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> SymbolAndQualifiers<'db> {
|
||||
/// Constructor that creates a [`SymbolAndQualifiers`] instance with a [`TodoType`] type
|
||||
/// and no qualifiers.
|
||||
///
|
||||
/// [`TodoType`]: crate::types::TodoType
|
||||
pub(crate) fn todo(message: &'static str) -> Self {
|
||||
Self(Symbol::todo(message), TypeQualifiers::empty())
|
||||
Self {
|
||||
symbol: Symbol::todo(message),
|
||||
qualifiers: TypeQualifiers::empty(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the symbol has a `ClassVar` type qualifier.
|
||||
pub(crate) fn is_class_var(&self) -> bool {
|
||||
self.1.contains(TypeQualifiers::CLASS_VAR)
|
||||
self.qualifiers.contains(TypeQualifiers::CLASS_VAR)
|
||||
}
|
||||
|
||||
/// Transform symbol and qualifiers into a [`LookupResult`],
|
||||
/// a [`Result`] type in which the `Ok` variant represents a definitely bound symbol
|
||||
/// and the `Err` variant represents a symbol that is either definitely or possibly unbound.
|
||||
pub(crate) fn into_lookup_result(self) -> LookupResult<'db> {
|
||||
match self {
|
||||
SymbolAndQualifiers {
|
||||
symbol: Symbol::Type(ty, Boundness::Bound),
|
||||
qualifiers,
|
||||
} => Ok(TypeAndQualifiers::new(ty, qualifiers)),
|
||||
SymbolAndQualifiers {
|
||||
symbol: Symbol::Type(ty, Boundness::PossiblyUnbound),
|
||||
qualifiers,
|
||||
} => Err(LookupError::PossiblyUnbound(TypeAndQualifiers::new(
|
||||
ty, qualifiers,
|
||||
))),
|
||||
SymbolAndQualifiers {
|
||||
symbol: Symbol::Unbound,
|
||||
qualifiers,
|
||||
} => Err(LookupError::Unbound(qualifiers)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Safely unwrap the symbol and the qualifiers into a [`TypeQualifiers`].
|
||||
///
|
||||
/// If the symbol is definitely unbound or possibly unbound, it will be transformed into a
|
||||
/// [`LookupError`] and `diagnostic_fn` will be applied to the error value before returning
|
||||
/// the result of `diagnostic_fn` (which will be a [`TypeQualifiers`]). This allows the caller
|
||||
/// to ensure that a diagnostic is emitted if the symbol is possibly or definitely unbound.
|
||||
pub(crate) fn unwrap_with_diagnostic(
|
||||
self,
|
||||
diagnostic_fn: impl FnOnce(LookupError<'db>) -> TypeAndQualifiers<'db>,
|
||||
) -> TypeAndQualifiers<'db> {
|
||||
self.into_lookup_result().unwrap_or_else(diagnostic_fn)
|
||||
}
|
||||
|
||||
/// Fallback (partially or fully) to another symbol if `self` is partially or fully unbound.
|
||||
///
|
||||
/// 1. If `self` is definitely bound, return `self` without evaluating `fallback_fn()`.
|
||||
/// 2. Else, evaluate `fallback_fn()`:
|
||||
/// a. If `self` is definitely unbound, return the result of `fallback_fn()`.
|
||||
/// b. Else, if `fallback` is definitely unbound, return `self`.
|
||||
/// c. Else, if `self` is possibly unbound and `fallback` is definitely bound,
|
||||
/// return `Symbol(<union of self-type and fallback-type>, Boundness::Bound)`
|
||||
/// d. Else, if `self` is possibly unbound and `fallback` is possibly unbound,
|
||||
/// return `Symbol(<union of self-type and fallback-type>, Boundness::PossiblyUnbound)`
|
||||
#[must_use]
|
||||
pub(crate) fn or_fall_back_to(
|
||||
self,
|
||||
db: &'db dyn Db,
|
||||
fallback_fn: impl FnOnce() -> SymbolAndQualifiers<'db>,
|
||||
) -> Self {
|
||||
self.into_lookup_result()
|
||||
.or_else(|lookup_error| lookup_error.or_fall_back_to(db, fallback_fn()))
|
||||
.into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> From<Symbol<'db>> for SymbolAndQualifiers<'db> {
|
||||
fn from(symbol: Symbol<'db>) -> Self {
|
||||
SymbolAndQualifiers(symbol, TypeQualifiers::empty())
|
||||
symbol.with_qualifiers(TypeQualifiers::empty())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -423,11 +505,17 @@ fn symbol_by_id<'db>(
|
||||
|
||||
match declared {
|
||||
// Symbol is declared, trust the declared type
|
||||
Ok(symbol_and_quals @ SymbolAndQualifiers(Symbol::Type(_, Boundness::Bound), _)) => {
|
||||
symbol_and_quals
|
||||
}
|
||||
Ok(
|
||||
symbol_and_quals @ SymbolAndQualifiers {
|
||||
symbol: Symbol::Type(_, Boundness::Bound),
|
||||
qualifiers: _,
|
||||
},
|
||||
) => symbol_and_quals,
|
||||
// Symbol is possibly declared
|
||||
Ok(SymbolAndQualifiers(Symbol::Type(declared_ty, Boundness::PossiblyUnbound), quals)) => {
|
||||
Ok(SymbolAndQualifiers {
|
||||
symbol: Symbol::Type(declared_ty, Boundness::PossiblyUnbound),
|
||||
qualifiers,
|
||||
}) => {
|
||||
let bindings = use_def.public_bindings(symbol_id);
|
||||
let inferred = symbol_from_bindings_impl(db, bindings, requires_explicit_reexport);
|
||||
|
||||
@@ -446,10 +534,13 @@ fn symbol_by_id<'db>(
|
||||
),
|
||||
};
|
||||
|
||||
SymbolAndQualifiers(symbol, quals)
|
||||
SymbolAndQualifiers { symbol, qualifiers }
|
||||
}
|
||||
// Symbol is undeclared, return the union of `Unknown` with the inferred type
|
||||
Ok(SymbolAndQualifiers(Symbol::Unbound, _)) => {
|
||||
Ok(SymbolAndQualifiers {
|
||||
symbol: Symbol::Unbound,
|
||||
qualifiers: _,
|
||||
}) => {
|
||||
let bindings = use_def.public_bindings(symbol_id);
|
||||
let inferred = symbol_from_bindings_impl(db, bindings, requires_explicit_reexport);
|
||||
|
||||
@@ -458,20 +549,23 @@ fn symbol_by_id<'db>(
|
||||
// a diagnostic if we see it being modified externally. In type inference, we
|
||||
// can assign a "narrow" type to it even if it is not *declared*. This means, we
|
||||
// do not have to call [`widen_type_for_undeclared_public_symbol`].
|
||||
let is_considered_non_modifiable =
|
||||
symbol_table(db, scope).symbol(symbol_id).name() == "__slots__";
|
||||
//
|
||||
// `TYPE_CHECKING` is a special variable that should only be assigned `False`
|
||||
// at runtime, but is always considered `True` in type checking.
|
||||
// See mdtest/known_constants.md#user-defined-type_checking for details.
|
||||
let is_considered_non_modifiable = matches!(
|
||||
symbol_table(db, scope).symbol(symbol_id).name().as_str(),
|
||||
"__slots__" | "TYPE_CHECKING"
|
||||
);
|
||||
|
||||
widen_type_for_undeclared_public_symbol(db, inferred, is_considered_non_modifiable)
|
||||
.into()
|
||||
}
|
||||
// Symbol has conflicting declared types
|
||||
Err((declared_ty, _)) => {
|
||||
Err((declared, _)) => {
|
||||
// Intentionally ignore conflicting declared types; that's not our problem,
|
||||
// it's the problem of the module we are importing from.
|
||||
SymbolAndQualifiers(
|
||||
Symbol::bound(declared_ty.inner_type()),
|
||||
declared_ty.qualifiers(),
|
||||
)
|
||||
Symbol::bound(declared.inner_type()).with_qualifiers(declared.qualifiers())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -497,24 +591,16 @@ fn symbol_impl<'db>(
|
||||
scope: ScopeId<'db>,
|
||||
name: &str,
|
||||
requires_explicit_reexport: RequiresExplicitReExport,
|
||||
) -> Symbol<'db> {
|
||||
) -> SymbolAndQualifiers<'db> {
|
||||
let _span = tracing::trace_span!("symbol", ?name).entered();
|
||||
|
||||
// We don't need to check for `typing_extensions` here, because `typing_extensions.TYPE_CHECKING`
|
||||
// is just a re-export of `typing.TYPE_CHECKING`.
|
||||
if name == "TYPE_CHECKING"
|
||||
&& file_to_module(db, scope.file(db))
|
||||
.is_some_and(|module| module.is_known(KnownModule::Typing))
|
||||
{
|
||||
return Symbol::bound(Type::BooleanLiteral(true));
|
||||
}
|
||||
if name == "platform"
|
||||
&& file_to_module(db, scope.file(db))
|
||||
.is_some_and(|module| module.is_known(KnownModule::Sys))
|
||||
{
|
||||
match Program::get(db).python_platform(db) {
|
||||
crate::PythonPlatform::Identifier(platform) => {
|
||||
return Symbol::bound(Type::string_literal(db, platform.as_str()));
|
||||
return Symbol::bound(Type::string_literal(db, platform.as_str())).into();
|
||||
}
|
||||
crate::PythonPlatform::All => {
|
||||
// Fall through to the looked up type
|
||||
@@ -524,8 +610,8 @@ fn symbol_impl<'db>(
|
||||
|
||||
symbol_table(db, scope)
|
||||
.symbol_id_by_name(name)
|
||||
.map(|symbol| symbol_by_id(db, scope, symbol, requires_explicit_reexport).0)
|
||||
.unwrap_or(Symbol::Unbound)
|
||||
.map(|symbol| symbol_by_id(db, scope, symbol, requires_explicit_reexport))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Implementation of [`symbol_from_bindings`].
|
||||
@@ -671,7 +757,7 @@ fn symbol_from_declarations_impl<'db>(
|
||||
|
||||
if let Some(first) = types.next() {
|
||||
let mut conflicting: Vec<Type<'db>> = vec![];
|
||||
let declared_ty = if let Some(second) = types.next() {
|
||||
let declared = if let Some(second) = types.next() {
|
||||
let ty_first = first.inner_type();
|
||||
let mut qualifiers = first.qualifiers();
|
||||
|
||||
@@ -697,13 +783,11 @@ fn symbol_from_declarations_impl<'db>(
|
||||
Truthiness::Ambiguous => Boundness::PossiblyUnbound,
|
||||
};
|
||||
|
||||
Ok(SymbolAndQualifiers(
|
||||
Symbol::Type(declared_ty.inner_type(), boundness),
|
||||
declared_ty.qualifiers(),
|
||||
))
|
||||
Ok(Symbol::Type(declared.inner_type(), boundness)
|
||||
.with_qualifiers(declared.qualifiers()))
|
||||
} else {
|
||||
Err((
|
||||
declared_ty,
|
||||
declared,
|
||||
std::iter::once(first.inner_type())
|
||||
.chain(conflicting)
|
||||
.collect(),
|
||||
@@ -719,6 +803,7 @@ mod implicit_globals {
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::semantic_index::{self, symbol_table};
|
||||
use crate::symbol::SymbolAndQualifiers;
|
||||
use crate::types::KnownClass;
|
||||
|
||||
use super::Symbol;
|
||||
@@ -740,7 +825,7 @@ mod implicit_globals {
|
||||
pub(crate) fn module_type_implicit_global_symbol<'db>(
|
||||
db: &'db dyn Db,
|
||||
name: &str,
|
||||
) -> Symbol<'db> {
|
||||
) -> SymbolAndQualifiers<'db> {
|
||||
// In general we wouldn't check to see whether a symbol exists on a class before doing the
|
||||
// `.member()` call on the instance type -- we'd just do the `.member`() call on the instance
|
||||
// type, since it has the same end result. The reason to only call `.member()` on `ModuleType`
|
||||
@@ -752,7 +837,7 @@ mod implicit_globals {
|
||||
{
|
||||
KnownClass::ModuleType.to_instance(db).member(db, name)
|
||||
} else {
|
||||
Symbol::Unbound
|
||||
Symbol::Unbound.into()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -822,7 +907,7 @@ mod implicit_globals {
|
||||
///
|
||||
/// This will take into account whether the definition of the symbol is being explicitly
|
||||
/// re-exported from a stub file or not.
|
||||
fn external_symbol_impl<'db>(db: &'db dyn Db, file: File, name: &str) -> Symbol<'db> {
|
||||
fn external_symbol_impl<'db>(db: &'db dyn Db, file: File, name: &str) -> SymbolAndQualifiers<'db> {
|
||||
symbol_impl(
|
||||
db,
|
||||
global_scope(db, file),
|
||||
@@ -883,48 +968,45 @@ mod tests {
|
||||
let ty1 = Type::IntLiteral(1);
|
||||
let ty2 = Type::IntLiteral(2);
|
||||
|
||||
let unbound = || Symbol::Unbound.with_qualifiers(TypeQualifiers::empty());
|
||||
|
||||
let possibly_unbound_ty1 =
|
||||
|| Symbol::Type(ty1, PossiblyUnbound).with_qualifiers(TypeQualifiers::empty());
|
||||
let possibly_unbound_ty2 =
|
||||
|| Symbol::Type(ty2, PossiblyUnbound).with_qualifiers(TypeQualifiers::empty());
|
||||
|
||||
let bound_ty1 = || Symbol::Type(ty1, Bound).with_qualifiers(TypeQualifiers::empty());
|
||||
let bound_ty2 = || Symbol::Type(ty2, Bound).with_qualifiers(TypeQualifiers::empty());
|
||||
|
||||
// Start from an unbound symbol
|
||||
assert_eq!(unbound().or_fall_back_to(&db, unbound), unbound());
|
||||
assert_eq!(
|
||||
Symbol::Unbound.or_fall_back_to(&db, || Symbol::Unbound),
|
||||
Symbol::Unbound
|
||||
);
|
||||
assert_eq!(
|
||||
Symbol::Unbound.or_fall_back_to(&db, || Symbol::Type(ty1, PossiblyUnbound)),
|
||||
Symbol::Type(ty1, PossiblyUnbound)
|
||||
);
|
||||
assert_eq!(
|
||||
Symbol::Unbound.or_fall_back_to(&db, || Symbol::Type(ty1, Bound)),
|
||||
Symbol::Type(ty1, Bound)
|
||||
unbound().or_fall_back_to(&db, possibly_unbound_ty1),
|
||||
possibly_unbound_ty1()
|
||||
);
|
||||
assert_eq!(unbound().or_fall_back_to(&db, bound_ty1), bound_ty1());
|
||||
|
||||
// Start from a possibly unbound symbol
|
||||
assert_eq!(
|
||||
Symbol::Type(ty1, PossiblyUnbound).or_fall_back_to(&db, || Symbol::Unbound),
|
||||
Symbol::Type(ty1, PossiblyUnbound)
|
||||
possibly_unbound_ty1().or_fall_back_to(&db, unbound),
|
||||
possibly_unbound_ty1()
|
||||
);
|
||||
assert_eq!(
|
||||
Symbol::Type(ty1, PossiblyUnbound)
|
||||
.or_fall_back_to(&db, || Symbol::Type(ty2, PossiblyUnbound)),
|
||||
Symbol::Type(UnionType::from_elements(&db, [ty1, ty2]), PossiblyUnbound)
|
||||
possibly_unbound_ty1().or_fall_back_to(&db, possibly_unbound_ty2),
|
||||
Symbol::Type(UnionType::from_elements(&db, [ty1, ty2]), PossiblyUnbound).into()
|
||||
);
|
||||
assert_eq!(
|
||||
Symbol::Type(ty1, PossiblyUnbound).or_fall_back_to(&db, || Symbol::Type(ty2, Bound)),
|
||||
Symbol::Type(UnionType::from_elements(&db, [ty1, ty2]), Bound)
|
||||
possibly_unbound_ty1().or_fall_back_to(&db, bound_ty2),
|
||||
Symbol::Type(UnionType::from_elements(&db, [ty1, ty2]), Bound).into()
|
||||
);
|
||||
|
||||
// Start from a definitely bound symbol
|
||||
assert_eq!(bound_ty1().or_fall_back_to(&db, unbound), bound_ty1());
|
||||
assert_eq!(
|
||||
Symbol::Type(ty1, Bound).or_fall_back_to(&db, || Symbol::Unbound),
|
||||
Symbol::Type(ty1, Bound)
|
||||
);
|
||||
assert_eq!(
|
||||
Symbol::Type(ty1, Bound).or_fall_back_to(&db, || Symbol::Type(ty2, PossiblyUnbound)),
|
||||
Symbol::Type(ty1, Bound)
|
||||
);
|
||||
assert_eq!(
|
||||
Symbol::Type(ty1, Bound).or_fall_back_to(&db, || Symbol::Type(ty2, Bound)),
|
||||
Symbol::Type(ty1, Bound)
|
||||
bound_ty1().or_fall_back_to(&db, possibly_unbound_ty2),
|
||||
bound_ty1()
|
||||
);
|
||||
assert_eq!(bound_ty1().or_fall_back_to(&db, bound_ty2), bound_ty1());
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
@@ -939,24 +1021,27 @@ mod tests {
|
||||
#[test]
|
||||
fn implicit_builtin_globals() {
|
||||
let db = setup_db();
|
||||
assert_bound_string_symbol(&db, builtins_symbol(&db, "__name__"));
|
||||
assert_bound_string_symbol(&db, builtins_symbol(&db, "__name__").symbol);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn implicit_typing_globals() {
|
||||
let db = setup_db();
|
||||
assert_bound_string_symbol(&db, typing_symbol(&db, "__name__"));
|
||||
assert_bound_string_symbol(&db, typing_symbol(&db, "__name__").symbol);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn implicit_typing_extensions_globals() {
|
||||
let db = setup_db();
|
||||
assert_bound_string_symbol(&db, typing_extensions_symbol(&db, "__name__"));
|
||||
assert_bound_string_symbol(&db, typing_extensions_symbol(&db, "__name__").symbol);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn implicit_sys_globals() {
|
||||
let db = setup_db();
|
||||
assert_bound_string_symbol(&db, known_module_symbol(&db, KnownModule::Sys, "__name__"));
|
||||
assert_bound_string_symbol(
|
||||
&db,
|
||||
known_module_symbol(&db, KnownModule::Sys, "__name__").symbol,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -6,7 +6,7 @@ use crate::types::diagnostic::{
|
||||
};
|
||||
use crate::types::signatures::Parameter;
|
||||
use crate::types::{todo_type, CallableType, UnionType};
|
||||
use ruff_db::diagnostic::{SecondaryDiagnosticMessage, Span};
|
||||
use ruff_db::diagnostic::{OldSecondaryDiagnosticMessage, Span};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
@@ -388,7 +388,7 @@ impl<'db> CallBindingError<'db> {
|
||||
if let Some(span) =
|
||||
Self::parameter_span_from_index(context.db(), callable_ty, parameter.index)
|
||||
{
|
||||
messages.push(SecondaryDiagnosticMessage::new(
|
||||
messages.push(OldSecondaryDiagnosticMessage::new(
|
||||
span,
|
||||
"parameter declared in function definition here",
|
||||
));
|
||||
|
||||
@@ -9,8 +9,8 @@ use crate::{
|
||||
Boundness, LookupError, LookupResult, Symbol, SymbolAndQualifiers,
|
||||
},
|
||||
types::{
|
||||
definition_expression_type, CallArguments, CallError, MetaclassCandidate, TupleType,
|
||||
UnionBuilder, UnionCallError,
|
||||
definition_expression_type, CallArguments, CallError, DynamicType, MetaclassCandidate,
|
||||
TupleType, UnionBuilder, UnionCallError, UnionType,
|
||||
},
|
||||
Db, KnownModule, Program,
|
||||
};
|
||||
@@ -318,10 +318,10 @@ impl<'db> Class<'db> {
|
||||
/// The member resolves to a member on the class itself or any of its proper superclasses.
|
||||
///
|
||||
/// TODO: Should this be made private...?
|
||||
pub(super) fn class_member(self, db: &'db dyn Db, name: &str) -> Symbol<'db> {
|
||||
pub(super) fn class_member(self, db: &'db dyn Db, name: &str) -> SymbolAndQualifiers<'db> {
|
||||
if name == "__mro__" {
|
||||
let tuple_elements = self.iter_mro(db).map(Type::from);
|
||||
return Symbol::bound(TupleType::from_elements(db, tuple_elements));
|
||||
return Symbol::bound(TupleType::from_elements(db, tuple_elements)).into();
|
||||
}
|
||||
|
||||
// If we encounter a dynamic type in this class's MRO, we'll save that dynamic type
|
||||
@@ -332,10 +332,16 @@ impl<'db> Class<'db> {
|
||||
// from the non-dynamic members of the class's MRO.
|
||||
let mut dynamic_type_to_intersect_with: Option<Type<'db>> = None;
|
||||
|
||||
let mut lookup_result: LookupResult<'db> = Err(LookupError::Unbound);
|
||||
let mut lookup_result: LookupResult<'db> =
|
||||
Err(LookupError::Unbound(TypeQualifiers::empty()));
|
||||
|
||||
for superclass in self.iter_mro(db) {
|
||||
match superclass {
|
||||
ClassBase::Dynamic(DynamicType::TodoProtocol) => {
|
||||
// TODO: We currently skip `Protocol` when looking up class members, in order to
|
||||
// avoid creating many dynamic types in our test suite that would otherwise
|
||||
// result from looking up attributes on builtin types like `str`, `list`, `tuple`
|
||||
}
|
||||
ClassBase::Dynamic(_) => {
|
||||
// Note: calling `Type::from(superclass).member()` would be incorrect here.
|
||||
// What we'd really want is a `Type::Any.own_class_member()` method,
|
||||
@@ -353,15 +359,33 @@ impl<'db> Class<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
match (Symbol::from(lookup_result), dynamic_type_to_intersect_with) {
|
||||
(symbol, None) => symbol,
|
||||
(Symbol::Type(ty, _), Some(dynamic_type)) => Symbol::bound(
|
||||
match (
|
||||
SymbolAndQualifiers::from(lookup_result),
|
||||
dynamic_type_to_intersect_with,
|
||||
) {
|
||||
(symbol_and_qualifiers, None) => symbol_and_qualifiers,
|
||||
|
||||
(
|
||||
SymbolAndQualifiers {
|
||||
symbol: Symbol::Type(ty, _),
|
||||
qualifiers,
|
||||
},
|
||||
Some(dynamic_type),
|
||||
) => Symbol::bound(
|
||||
IntersectionBuilder::new(db)
|
||||
.add_positive(ty)
|
||||
.add_positive(dynamic_type)
|
||||
.build(),
|
||||
),
|
||||
(Symbol::Unbound, Some(dynamic_type)) => Symbol::bound(dynamic_type),
|
||||
)
|
||||
.with_qualifiers(qualifiers),
|
||||
|
||||
(
|
||||
SymbolAndQualifiers {
|
||||
symbol: Symbol::Unbound,
|
||||
qualifiers,
|
||||
},
|
||||
Some(dynamic_type),
|
||||
) => Symbol::bound(dynamic_type).with_qualifiers(qualifiers),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -371,7 +395,7 @@ impl<'db> Class<'db> {
|
||||
/// Returns [`Symbol::Unbound`] if `name` cannot be found in this class's scope
|
||||
/// directly. Use [`Class::class_member`] if you require a method that will
|
||||
/// traverse through the MRO until it finds the member.
|
||||
pub(super) fn own_class_member(self, db: &'db dyn Db, name: &str) -> Symbol<'db> {
|
||||
pub(super) fn own_class_member(self, db: &'db dyn Db, name: &str) -> SymbolAndQualifiers<'db> {
|
||||
let body_scope = self.body_scope(db);
|
||||
class_symbol(db, body_scope, name)
|
||||
}
|
||||
@@ -388,17 +412,24 @@ impl<'db> Class<'db> {
|
||||
|
||||
for superclass in self.iter_mro(db) {
|
||||
match superclass {
|
||||
ClassBase::Dynamic(DynamicType::TodoProtocol) => {
|
||||
// TODO: We currently skip `Protocol` when looking up instance members, in order to
|
||||
// avoid creating many dynamic types in our test suite that would otherwise
|
||||
// result from looking up attributes on builtin types like `str`, `list`, `tuple`
|
||||
}
|
||||
ClassBase::Dynamic(_) => {
|
||||
return SymbolAndQualifiers::todo(
|
||||
"instance attribute on class with dynamic base",
|
||||
);
|
||||
}
|
||||
ClassBase::Class(class) => {
|
||||
if let member @ SymbolAndQualifiers(Symbol::Type(ty, boundness), qualifiers) =
|
||||
class.own_instance_member(db, name)
|
||||
if let member @ SymbolAndQualifiers {
|
||||
symbol: Symbol::Type(ty, boundness),
|
||||
qualifiers,
|
||||
} = class.own_instance_member(db, name)
|
||||
{
|
||||
// TODO: We could raise a diagnostic here if there are conflicting type qualifiers
|
||||
union_qualifiers = union_qualifiers.union(qualifiers);
|
||||
union_qualifiers |= qualifiers;
|
||||
|
||||
if boundness == Boundness::Bound {
|
||||
if union.is_empty() {
|
||||
@@ -406,10 +437,8 @@ impl<'db> Class<'db> {
|
||||
return member;
|
||||
}
|
||||
|
||||
return SymbolAndQualifiers(
|
||||
Symbol::bound(union.add(ty).build()),
|
||||
union_qualifiers,
|
||||
);
|
||||
return Symbol::bound(union.add(ty).build())
|
||||
.with_qualifiers(union_qualifiers);
|
||||
}
|
||||
|
||||
// If we see a possibly-unbound symbol, we need to keep looking
|
||||
@@ -421,15 +450,13 @@ impl<'db> Class<'db> {
|
||||
}
|
||||
|
||||
if union.is_empty() {
|
||||
SymbolAndQualifiers(Symbol::Unbound, TypeQualifiers::empty())
|
||||
Symbol::Unbound.with_qualifiers(TypeQualifiers::empty())
|
||||
} else {
|
||||
// If we have reached this point, we know that we have only seen possibly-unbound symbols.
|
||||
// This means that the final result is still possibly-unbound.
|
||||
|
||||
SymbolAndQualifiers(
|
||||
Symbol::Type(union.build(), Boundness::PossiblyUnbound),
|
||||
union_qualifiers,
|
||||
)
|
||||
Symbol::Type(union.build(), Boundness::PossiblyUnbound)
|
||||
.with_qualifiers(union_qualifiers)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -439,31 +466,18 @@ impl<'db> Class<'db> {
|
||||
db: &'db dyn Db,
|
||||
class_body_scope: ScopeId<'db>,
|
||||
name: &str,
|
||||
inferred_from_class_body: &Symbol<'db>,
|
||||
) -> Symbol<'db> {
|
||||
) -> Option<Type<'db>> {
|
||||
// If we do not see any declarations of an attribute, neither in the class body nor in
|
||||
// any method, we build a union of `Unknown` with the inferred types of all bindings of
|
||||
// that attribute. We include `Unknown` in that union to account for the fact that the
|
||||
// attribute might be externally modified.
|
||||
let mut union_of_inferred_types = UnionBuilder::new(db).add(Type::unknown());
|
||||
let mut union_boundness = Boundness::Bound;
|
||||
|
||||
if let Symbol::Type(ty, boundness) = inferred_from_class_body {
|
||||
union_of_inferred_types = union_of_inferred_types.add(*ty);
|
||||
union_boundness = *boundness;
|
||||
}
|
||||
|
||||
let attribute_assignments = attribute_assignments(db, class_body_scope);
|
||||
|
||||
let Some(attribute_assignments) = attribute_assignments
|
||||
let attribute_assignments = attribute_assignments
|
||||
.as_deref()
|
||||
.and_then(|assignments| assignments.get(name))
|
||||
else {
|
||||
if inferred_from_class_body.is_unbound() {
|
||||
return Symbol::Unbound;
|
||||
}
|
||||
return Symbol::Type(union_of_inferred_types.build(), union_boundness);
|
||||
};
|
||||
.and_then(|assignments| assignments.get(name))?;
|
||||
|
||||
for attribute_assignment in attribute_assignments {
|
||||
match attribute_assignment {
|
||||
@@ -477,7 +491,7 @@ impl<'db> Class<'db> {
|
||||
let annotation_ty = infer_expression_type(db, *annotation);
|
||||
|
||||
// TODO: check if there are conflicting declarations
|
||||
return Symbol::bound(annotation_ty);
|
||||
return Some(annotation_ty);
|
||||
}
|
||||
AttributeAssignment::Unannotated { value } => {
|
||||
// We found an un-annotated attribute assignment of the form:
|
||||
@@ -499,6 +513,16 @@ impl<'db> Class<'db> {
|
||||
|
||||
union_of_inferred_types = union_of_inferred_types.add(inferred_ty);
|
||||
}
|
||||
AttributeAssignment::ContextManager { context_manager } => {
|
||||
// We found an attribute assignment like:
|
||||
//
|
||||
// with <context_manager> as self.name:
|
||||
|
||||
let context_ty = infer_expression_type(db, *context_manager);
|
||||
let inferred_ty = context_ty.enter(db);
|
||||
|
||||
union_of_inferred_types = union_of_inferred_types.add(inferred_ty);
|
||||
}
|
||||
AttributeAssignment::Unpack {
|
||||
attribute_expression_id,
|
||||
unpack,
|
||||
@@ -516,7 +540,7 @@ impl<'db> Class<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
Symbol::Type(union_of_inferred_types.build(), union_boundness)
|
||||
Some(union_of_inferred_types.build())
|
||||
}
|
||||
|
||||
/// A helper function for `instance_member` that looks up the `name` attribute only on
|
||||
@@ -533,55 +557,93 @@ impl<'db> Class<'db> {
|
||||
let use_def = use_def_map(db, body_scope);
|
||||
|
||||
let declarations = use_def.public_declarations(symbol_id);
|
||||
|
||||
match symbol_from_declarations(db, declarations) {
|
||||
Ok(SymbolAndQualifiers(declared @ Symbol::Type(declared_ty, _), qualifiers)) => {
|
||||
let declared_and_qualifiers = symbol_from_declarations(db, declarations);
|
||||
match declared_and_qualifiers {
|
||||
Ok(SymbolAndQualifiers {
|
||||
symbol: declared @ Symbol::Type(declared_ty, declaredness),
|
||||
qualifiers,
|
||||
}) => {
|
||||
// The attribute is declared in the class body.
|
||||
|
||||
if let Some(function) = declared_ty.into_function_literal() {
|
||||
// TODO: Eventually, we are going to process all decorators correctly. This is
|
||||
// just a temporary heuristic to provide a broad categorization
|
||||
|
||||
if function.has_known_class_decorator(db, KnownClass::Classmethod)
|
||||
&& function.decorators(db).len() == 1
|
||||
{
|
||||
SymbolAndQualifiers(declared, qualifiers)
|
||||
} else if function.has_known_class_decorator(db, KnownClass::Property) {
|
||||
SymbolAndQualifiers::todo("@property")
|
||||
} else if function.has_known_function_decorator(db, KnownFunction::Overload)
|
||||
{
|
||||
SymbolAndQualifiers::todo("overloaded method")
|
||||
} else if !function.decorators(db).is_empty() {
|
||||
SymbolAndQualifiers::todo("decorated method")
|
||||
} else {
|
||||
SymbolAndQualifiers(declared, qualifiers)
|
||||
}
|
||||
} else {
|
||||
SymbolAndQualifiers(declared, qualifiers)
|
||||
}
|
||||
}
|
||||
Ok(SymbolAndQualifiers(Symbol::Unbound, _)) => {
|
||||
// The attribute is not *declared* in the class body. It could still be declared
|
||||
// in a method, and it could also be *bound* in the class body (and/or in a method).
|
||||
|
||||
let bindings = use_def.public_bindings(symbol_id);
|
||||
let inferred = symbol_from_bindings(db, bindings);
|
||||
let has_binding = !inferred.is_unbound();
|
||||
|
||||
Self::implicit_instance_attribute(db, body_scope, name, &inferred).into()
|
||||
if has_binding {
|
||||
// The attribute is declared and bound in the class body.
|
||||
|
||||
if let Some(implicit_ty) =
|
||||
Self::implicit_instance_attribute(db, body_scope, name)
|
||||
{
|
||||
if declaredness == Boundness::Bound {
|
||||
// If a symbol is definitely declared, and we see
|
||||
// attribute assignments in methods of the class,
|
||||
// we trust the declared type.
|
||||
declared.with_qualifiers(qualifiers)
|
||||
} else {
|
||||
Symbol::Type(
|
||||
UnionType::from_elements(db, [declared_ty, implicit_ty]),
|
||||
declaredness,
|
||||
)
|
||||
.with_qualifiers(qualifiers)
|
||||
}
|
||||
} else {
|
||||
// The symbol is declared and bound in the class body,
|
||||
// but we did not find any attribute assignments in
|
||||
// methods of the class. This means that the attribute
|
||||
// has a class-level default value, but it would not be
|
||||
// found in a `__dict__` lookup.
|
||||
|
||||
Symbol::Unbound.into()
|
||||
}
|
||||
} else {
|
||||
// The attribute is declared but not bound in the class body.
|
||||
// We take this as a sign that this is intended to be a pure
|
||||
// instance attribute, and we trust the declared type, unless
|
||||
// it is possibly-undeclared. In the latter case, we also
|
||||
// union with the inferred type from attribute assignments.
|
||||
|
||||
if declaredness == Boundness::Bound {
|
||||
declared.with_qualifiers(qualifiers)
|
||||
} else {
|
||||
if let Some(implicit_ty) =
|
||||
Self::implicit_instance_attribute(db, body_scope, name)
|
||||
{
|
||||
Symbol::Type(
|
||||
UnionType::from_elements(db, [declared_ty, implicit_ty]),
|
||||
declaredness,
|
||||
)
|
||||
.with_qualifiers(qualifiers)
|
||||
} else {
|
||||
declared.with_qualifiers(qualifiers)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err((declared_ty, _conflicting_declarations)) => {
|
||||
|
||||
Ok(SymbolAndQualifiers {
|
||||
symbol: Symbol::Unbound,
|
||||
qualifiers: _,
|
||||
}) => {
|
||||
// The attribute is not *declared* in the class body. It could still be declared/bound
|
||||
// in a method.
|
||||
|
||||
Self::implicit_instance_attribute(db, body_scope, name)
|
||||
.map_or(Symbol::Unbound, Symbol::bound)
|
||||
.into()
|
||||
}
|
||||
Err((declared, _conflicting_declarations)) => {
|
||||
// There are conflicting declarations for this attribute in the class body.
|
||||
SymbolAndQualifiers(
|
||||
Symbol::bound(declared_ty.inner_type()),
|
||||
declared_ty.qualifiers(),
|
||||
)
|
||||
Symbol::bound(declared.inner_type()).with_qualifiers(declared.qualifiers())
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// This attribute is neither declared nor bound in the class body.
|
||||
// It could still be implicitly defined in a method.
|
||||
|
||||
Self::implicit_instance_attribute(db, body_scope, name, &Symbol::Unbound).into()
|
||||
Self::implicit_instance_attribute(db, body_scope, name)
|
||||
.map_or(Symbol::Unbound, Symbol::bound)
|
||||
.into()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -663,7 +725,7 @@ impl<'db> ClassLiteralType<'db> {
|
||||
self.class.body_scope(db)
|
||||
}
|
||||
|
||||
pub(super) fn static_member(self, db: &'db dyn Db, name: &str) -> Symbol<'db> {
|
||||
pub(super) fn class_member(self, db: &'db dyn Db, name: &str) -> SymbolAndQualifiers<'db> {
|
||||
self.class.class_member(db, name)
|
||||
}
|
||||
}
|
||||
@@ -881,6 +943,7 @@ impl<'db> KnownClass {
|
||||
|
||||
pub(crate) fn to_class_literal(self, db: &'db dyn Db) -> Type<'db> {
|
||||
known_module_symbol(db, self.canonical_module(db), self.as_str(db))
|
||||
.symbol
|
||||
.ignore_possibly_unbound()
|
||||
.unwrap_or(Type::unknown())
|
||||
}
|
||||
@@ -896,6 +959,7 @@ impl<'db> KnownClass {
|
||||
/// *and* `class` is a subclass of `other`.
|
||||
pub(super) fn is_subclass_of(self, db: &'db dyn Db, other: Class<'db>) -> bool {
|
||||
known_module_symbol(db, self.canonical_module(db), self.as_str(db))
|
||||
.symbol
|
||||
.ignore_possibly_unbound()
|
||||
.and_then(Type::into_class_literal)
|
||||
.is_some_and(|ClassLiteralType { class }| class.is_subclass_of(db, other))
|
||||
@@ -1203,6 +1267,8 @@ pub enum KnownInstanceType<'db> {
|
||||
Deque,
|
||||
/// The symbol `typing.OrderedDict` (which can also be found as `typing_extensions.OrderedDict`)
|
||||
OrderedDict,
|
||||
/// The symbol `typing.Protocol` (which can also be found as `typing_extensions.Protocol`)
|
||||
Protocol,
|
||||
/// The symbol `typing.Type` (which can also be found as `typing_extensions.Type`)
|
||||
Type,
|
||||
/// A single instance of `typing.TypeVar`
|
||||
@@ -1274,6 +1340,7 @@ impl<'db> KnownInstanceType<'db> {
|
||||
| Self::Deque
|
||||
| Self::ChainMap
|
||||
| Self::OrderedDict
|
||||
| Self::Protocol
|
||||
| Self::ReadOnly
|
||||
| Self::TypeAliasType(_)
|
||||
| Self::Unknown
|
||||
@@ -1318,6 +1385,7 @@ impl<'db> KnownInstanceType<'db> {
|
||||
Self::Deque => "typing.Deque",
|
||||
Self::ChainMap => "typing.ChainMap",
|
||||
Self::OrderedDict => "typing.OrderedDict",
|
||||
Self::Protocol => "typing.Protocol",
|
||||
Self::ReadOnly => "typing.ReadOnly",
|
||||
Self::TypeVar(typevar) => typevar.name(db),
|
||||
Self::TypeAliasType(_) => "typing.TypeAliasType",
|
||||
@@ -1364,6 +1432,7 @@ impl<'db> KnownInstanceType<'db> {
|
||||
Self::Deque => KnownClass::StdlibAlias,
|
||||
Self::ChainMap => KnownClass::StdlibAlias,
|
||||
Self::OrderedDict => KnownClass::StdlibAlias,
|
||||
Self::Protocol => KnownClass::SpecialForm,
|
||||
Self::TypeVar(_) => KnownClass::TypeVar,
|
||||
Self::TypeAliasType(_) => KnownClass::TypeAliasType,
|
||||
Self::TypeOf => KnownClass::SpecialForm,
|
||||
@@ -1406,6 +1475,7 @@ impl<'db> KnownInstanceType<'db> {
|
||||
"Counter" => Self::Counter,
|
||||
"ChainMap" => Self::ChainMap,
|
||||
"OrderedDict" => Self::OrderedDict,
|
||||
"Protocol" => Self::Protocol,
|
||||
"Optional" => Self::Optional,
|
||||
"Union" => Self::Union,
|
||||
"NoReturn" => Self::NoReturn,
|
||||
@@ -1457,6 +1527,7 @@ impl<'db> KnownInstanceType<'db> {
|
||||
| Self::Counter
|
||||
| Self::ChainMap
|
||||
| Self::OrderedDict
|
||||
| Self::Protocol
|
||||
| Self::Optional
|
||||
| Self::Union
|
||||
| Self::NoReturn
|
||||
@@ -1489,15 +1560,6 @@ impl<'db> KnownInstanceType<'db> {
|
||||
| Self::TypeOf => module.is_knot_extensions(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn static_member(self, db: &'db dyn Db, name: &str) -> Symbol<'db> {
|
||||
let ty = match (self, name) {
|
||||
(Self::TypeVar(typevar), "__name__") => Type::string_literal(db, typevar.name(db)),
|
||||
(Self::TypeAliasType(alias), "__name__") => Type::string_literal(db, alias.name(db)),
|
||||
_ => return self.instance_fallback(db).static_member(db, name),
|
||||
};
|
||||
Symbol::bound(ty)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, salsa::Update)]
|
||||
|
||||
@@ -144,6 +144,7 @@ impl<'db> ClassBase<'db> {
|
||||
KnownInstanceType::Callable => {
|
||||
Self::try_from_type(db, todo_type!("Support for Callable as a base class"))
|
||||
}
|
||||
KnownInstanceType::Protocol => Some(ClassBase::Dynamic(DynamicType::TodoProtocol)),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::fmt;
|
||||
|
||||
use drop_bomb::DebugDropBomb;
|
||||
use ruff_db::{
|
||||
diagnostic::{DiagnosticId, SecondaryDiagnosticMessage, Severity},
|
||||
diagnostic::{DiagnosticId, OldSecondaryDiagnosticMessage, Severity},
|
||||
files::File,
|
||||
};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
@@ -84,7 +84,7 @@ impl<'db> InferContext<'db> {
|
||||
lint: &'static LintMetadata,
|
||||
ranged: T,
|
||||
message: fmt::Arguments,
|
||||
secondary_messages: Vec<SecondaryDiagnosticMessage>,
|
||||
secondary_messages: Vec<OldSecondaryDiagnosticMessage>,
|
||||
) where
|
||||
T: Ranged,
|
||||
{
|
||||
@@ -136,7 +136,7 @@ impl<'db> InferContext<'db> {
|
||||
id: DiagnosticId,
|
||||
severity: Severity,
|
||||
message: fmt::Arguments,
|
||||
secondary_messages: Vec<SecondaryDiagnosticMessage>,
|
||||
secondary_messages: Vec<OldSecondaryDiagnosticMessage>,
|
||||
) where
|
||||
T: Ranged,
|
||||
{
|
||||
|
||||
@@ -8,7 +8,9 @@ use crate::types::string_annotation::{
|
||||
};
|
||||
use crate::types::{ClassLiteralType, KnownInstanceType, Type};
|
||||
use crate::{declare_lint, Db};
|
||||
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, SecondaryDiagnosticMessage, Severity, Span};
|
||||
use ruff_db::diagnostic::{
|
||||
DiagnosticId, OldDiagnosticTrait, OldSecondaryDiagnosticMessage, Severity, Span,
|
||||
};
|
||||
use ruff_db::files::File;
|
||||
use ruff_python_ast::{self as ast, AnyNodeRef};
|
||||
use ruff_text_size::TextRange;
|
||||
@@ -39,6 +41,7 @@ pub(crate) fn register_lints(registry: &mut LintRegistryBuilder) {
|
||||
registry.register_lint(&INVALID_METACLASS);
|
||||
registry.register_lint(&INVALID_PARAMETER_DEFAULT);
|
||||
registry.register_lint(&INVALID_RAISE);
|
||||
registry.register_lint(&INVALID_TYPE_CHECKING_CONSTANT);
|
||||
registry.register_lint(&INVALID_TYPE_FORM);
|
||||
registry.register_lint(&INVALID_TYPE_VARIABLE_CONSTRAINTS);
|
||||
registry.register_lint(&MISSING_ARGUMENT);
|
||||
@@ -412,6 +415,24 @@ declare_lint! {
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for a value other than `False` assigned to the `TYPE_CHECKING` variable, or an
|
||||
/// annotation not assignable from `bool`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// The name `TYPE_CHECKING` is reserved for a flag that can be used to provide conditional
|
||||
/// code seen only by the type checker, and not at runtime. Normally this flag is imported from
|
||||
/// `typing` or `typing_extensions`, but it can also be defined locally. If defined locally, it
|
||||
/// must be assigned the value `False` at runtime; the type checker will consider its value to
|
||||
/// be `True`. If annotated, it must be annotated as a type that can accept `bool` values.
|
||||
pub(crate) static INVALID_TYPE_CHECKING_CONSTANT = {
|
||||
summary: "detects invalid TYPE_CHECKING constant assignments",
|
||||
status: LintStatus::preview("1.0.0"),
|
||||
default_level: Level::Error,
|
||||
}
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// ## What it does
|
||||
/// Checks for invalid type expressions.
|
||||
@@ -809,7 +830,7 @@ pub struct TypeCheckDiagnostic {
|
||||
pub(crate) range: TextRange,
|
||||
pub(crate) severity: Severity,
|
||||
pub(crate) file: File,
|
||||
pub(crate) secondary_messages: Vec<SecondaryDiagnosticMessage>,
|
||||
pub(crate) secondary_messages: Vec<OldSecondaryDiagnosticMessage>,
|
||||
}
|
||||
|
||||
impl TypeCheckDiagnostic {
|
||||
@@ -826,7 +847,7 @@ impl TypeCheckDiagnostic {
|
||||
}
|
||||
}
|
||||
|
||||
impl Diagnostic for TypeCheckDiagnostic {
|
||||
impl OldDiagnosticTrait for TypeCheckDiagnostic {
|
||||
fn id(&self) -> DiagnosticId {
|
||||
self.id
|
||||
}
|
||||
@@ -839,7 +860,7 @@ impl Diagnostic for TypeCheckDiagnostic {
|
||||
Some(Span::from(self.file).with_range(self.range))
|
||||
}
|
||||
|
||||
fn secondary_messages(&self) -> &[SecondaryDiagnosticMessage] {
|
||||
fn secondary_messages(&self) -> &[OldSecondaryDiagnosticMessage] {
|
||||
&self.secondary_messages
|
||||
}
|
||||
|
||||
@@ -1042,6 +1063,14 @@ pub(super) fn report_invalid_attribute_assignment(
|
||||
);
|
||||
}
|
||||
|
||||
pub(super) fn report_invalid_type_checking_constant(context: &InferContext, node: AnyNodeRef) {
|
||||
context.report_lint(
|
||||
&INVALID_TYPE_CHECKING_CONSTANT,
|
||||
node,
|
||||
format_args!("The name TYPE_CHECKING is reserved for use as a flag; only False can be assigned to it.",),
|
||||
);
|
||||
}
|
||||
|
||||
pub(super) fn report_possibly_unresolved_reference(
|
||||
context: &InferContext,
|
||||
expr_name_node: &ast::ExprName,
|
||||
@@ -1122,3 +1151,18 @@ pub(crate) fn report_invalid_arguments_to_annotated<'db>(
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
pub(crate) fn report_invalid_arguments_to_callable<'db>(
|
||||
db: &'db dyn Db,
|
||||
context: &InferContext<'db>,
|
||||
subscript: &ast::ExprSubscript,
|
||||
) {
|
||||
context.report_lint(
|
||||
&INVALID_TYPE_FORM,
|
||||
subscript,
|
||||
format_args!(
|
||||
"Special form `{}` expected exactly two arguments (parameter types and return type)",
|
||||
KnownInstanceType::Callable.repr(db)
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ use ruff_python_ast::str::{Quote, TripleQuotes};
|
||||
use ruff_python_literal::escape::AsciiEscape;
|
||||
|
||||
use crate::types::class_base::ClassBase;
|
||||
use crate::types::signatures::{Parameter, Parameters, Signature};
|
||||
use crate::types::{
|
||||
CallableType, ClassLiteralType, InstanceType, IntersectionType, KnownClass, StringLiteralType,
|
||||
Type, UnionType,
|
||||
@@ -88,6 +89,9 @@ impl Display for DisplayRepresentation<'_> {
|
||||
},
|
||||
Type::KnownInstance(known_instance) => f.write_str(known_instance.repr(self.db)),
|
||||
Type::FunctionLiteral(function) => f.write_str(function.name(self.db)),
|
||||
Type::Callable(CallableType::General(callable)) => {
|
||||
callable.signature(self.db).display(self.db).fmt(f)
|
||||
}
|
||||
Type::Callable(CallableType::BoundMethod(bound_method)) => {
|
||||
write!(
|
||||
f,
|
||||
@@ -156,6 +160,99 @@ impl Display for DisplayRepresentation<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> Signature<'db> {
|
||||
fn display(&'db self, db: &'db dyn Db) -> DisplaySignature<'db> {
|
||||
DisplaySignature {
|
||||
parameters: self.parameters(),
|
||||
return_ty: self.return_ty.as_ref(),
|
||||
db,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct DisplaySignature<'db> {
|
||||
parameters: &'db Parameters<'db>,
|
||||
return_ty: Option<&'db Type<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl Display for DisplaySignature<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
f.write_char('(')?;
|
||||
|
||||
if self.parameters.is_gradual() {
|
||||
// We represent gradual form as `...` in the signature, internally the parameters still
|
||||
// contain `(*args, **kwargs)` parameters.
|
||||
f.write_str("...")?;
|
||||
} else {
|
||||
let mut star_added = false;
|
||||
let mut needs_slash = false;
|
||||
let mut join = f.join(", ");
|
||||
|
||||
for parameter in self.parameters.as_slice() {
|
||||
if !star_added && parameter.is_keyword_only() {
|
||||
join.entry(&'*');
|
||||
star_added = true;
|
||||
}
|
||||
if parameter.is_positional_only() {
|
||||
needs_slash = true;
|
||||
} else if needs_slash {
|
||||
join.entry(&'/');
|
||||
needs_slash = false;
|
||||
}
|
||||
join.entry(¶meter.display(self.db));
|
||||
}
|
||||
if needs_slash {
|
||||
join.entry(&'/');
|
||||
}
|
||||
join.finish()?;
|
||||
}
|
||||
|
||||
write!(
|
||||
f,
|
||||
") -> {}",
|
||||
self.return_ty.unwrap_or(&Type::unknown()).display(self.db)
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> Parameter<'db> {
|
||||
fn display(&'db self, db: &'db dyn Db) -> DisplayParameter<'db> {
|
||||
DisplayParameter { param: self, db }
|
||||
}
|
||||
}
|
||||
|
||||
struct DisplayParameter<'db> {
|
||||
param: &'db Parameter<'db>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl Display for DisplayParameter<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
if let Some(name) = self.param.display_name() {
|
||||
write!(f, "{name}")?;
|
||||
if let Some(annotated_type) = self.param.annotated_type() {
|
||||
write!(f, ": {}", annotated_type.display(self.db))?;
|
||||
}
|
||||
// Default value can only be specified if `name` is given.
|
||||
if let Some(default_ty) = self.param.default_type() {
|
||||
if self.param.annotated_type().is_some() {
|
||||
write!(f, " = {}", default_ty.display(self.db))?;
|
||||
} else {
|
||||
write!(f, "={}", default_ty.display(self.db))?;
|
||||
}
|
||||
}
|
||||
} else if let Some(ty) = self.param.annotated_type() {
|
||||
// This case is specifically for the `Callable` signature where name and default value
|
||||
// cannot be provided.
|
||||
ty.display(self.db).fmt(f)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> UnionType<'db> {
|
||||
fn display(&'db self, db: &'db dyn Db) -> DisplayUnionType<'db> {
|
||||
DisplayUnionType { db, ty: self }
|
||||
@@ -375,8 +472,14 @@ impl Display for DisplayStringLiteralType<'_> {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
use crate::db::tests::setup_db;
|
||||
use crate::types::{SliceLiteralType, StringLiteralType, Type};
|
||||
use crate::types::{
|
||||
KnownClass, Parameter, ParameterKind, Parameters, Signature, SliceLiteralType,
|
||||
StringLiteralType, Type,
|
||||
};
|
||||
use crate::Db;
|
||||
|
||||
#[test]
|
||||
fn test_slice_literal_display() {
|
||||
@@ -443,4 +546,226 @@ mod tests {
|
||||
r#"Literal["\""]"#
|
||||
);
|
||||
}
|
||||
|
||||
fn display_signature<'db>(
|
||||
db: &dyn Db,
|
||||
parameters: impl IntoIterator<Item = Parameter<'db>>,
|
||||
return_ty: Option<Type<'db>>,
|
||||
) -> String {
|
||||
Signature::new(Parameters::new(parameters), return_ty)
|
||||
.display(db)
|
||||
.to_string()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn signature_display() {
|
||||
let db = setup_db();
|
||||
|
||||
// Empty parameters with no return type.
|
||||
assert_eq!(display_signature(&db, [], None), "() -> Unknown");
|
||||
|
||||
// Empty parameters with a return type.
|
||||
assert_eq!(
|
||||
display_signature(&db, [], Some(Type::none(&db))),
|
||||
"() -> None"
|
||||
);
|
||||
|
||||
// Single parameter type (no name) with a return type.
|
||||
assert_eq!(
|
||||
display_signature(
|
||||
&db,
|
||||
[Parameter::new(
|
||||
None,
|
||||
Some(Type::none(&db)),
|
||||
ParameterKind::PositionalOrKeyword { default_ty: None }
|
||||
)],
|
||||
Some(Type::none(&db))
|
||||
),
|
||||
"(None) -> None"
|
||||
);
|
||||
|
||||
// Two parameters where one has annotation and the other doesn't.
|
||||
assert_eq!(
|
||||
display_signature(
|
||||
&db,
|
||||
[
|
||||
Parameter::new(
|
||||
Some(Name::new_static("x")),
|
||||
None,
|
||||
ParameterKind::PositionalOrKeyword {
|
||||
default_ty: Some(KnownClass::Int.to_instance(&db))
|
||||
}
|
||||
),
|
||||
Parameter::new(
|
||||
Some(Name::new_static("y")),
|
||||
Some(KnownClass::Str.to_instance(&db)),
|
||||
ParameterKind::PositionalOrKeyword {
|
||||
default_ty: Some(KnownClass::Str.to_instance(&db))
|
||||
}
|
||||
)
|
||||
],
|
||||
Some(Type::none(&db))
|
||||
),
|
||||
"(x=int, y: str = str) -> None"
|
||||
);
|
||||
|
||||
// All positional only parameters.
|
||||
assert_eq!(
|
||||
display_signature(
|
||||
&db,
|
||||
[
|
||||
Parameter::new(
|
||||
Some(Name::new_static("x")),
|
||||
None,
|
||||
ParameterKind::PositionalOnly { default_ty: None }
|
||||
),
|
||||
Parameter::new(
|
||||
Some(Name::new_static("y")),
|
||||
None,
|
||||
ParameterKind::PositionalOnly { default_ty: None }
|
||||
)
|
||||
],
|
||||
Some(Type::none(&db))
|
||||
),
|
||||
"(x, y, /) -> None"
|
||||
);
|
||||
|
||||
// Positional-only parameters mixed with non-positional-only parameters.
|
||||
assert_eq!(
|
||||
display_signature(
|
||||
&db,
|
||||
[
|
||||
Parameter::new(
|
||||
Some(Name::new_static("x")),
|
||||
None,
|
||||
ParameterKind::PositionalOnly { default_ty: None }
|
||||
),
|
||||
Parameter::new(
|
||||
Some(Name::new_static("y")),
|
||||
None,
|
||||
ParameterKind::PositionalOrKeyword { default_ty: None }
|
||||
)
|
||||
],
|
||||
Some(Type::none(&db))
|
||||
),
|
||||
"(x, /, y) -> None"
|
||||
);
|
||||
|
||||
// All keyword-only parameters.
|
||||
assert_eq!(
|
||||
display_signature(
|
||||
&db,
|
||||
[
|
||||
Parameter::new(
|
||||
Some(Name::new_static("x")),
|
||||
None,
|
||||
ParameterKind::KeywordOnly { default_ty: None }
|
||||
),
|
||||
Parameter::new(
|
||||
Some(Name::new_static("y")),
|
||||
None,
|
||||
ParameterKind::KeywordOnly { default_ty: None }
|
||||
)
|
||||
],
|
||||
Some(Type::none(&db))
|
||||
),
|
||||
"(*, x, y) -> None"
|
||||
);
|
||||
|
||||
// Keyword-only parameters mixed with non-keyword-only parameters.
|
||||
assert_eq!(
|
||||
display_signature(
|
||||
&db,
|
||||
[
|
||||
Parameter::new(
|
||||
Some(Name::new_static("x")),
|
||||
None,
|
||||
ParameterKind::PositionalOrKeyword { default_ty: None }
|
||||
),
|
||||
Parameter::new(
|
||||
Some(Name::new_static("y")),
|
||||
None,
|
||||
ParameterKind::KeywordOnly { default_ty: None }
|
||||
)
|
||||
],
|
||||
Some(Type::none(&db))
|
||||
),
|
||||
"(x, *, y) -> None"
|
||||
);
|
||||
|
||||
// A mix of all parameter kinds.
|
||||
assert_eq!(
|
||||
display_signature(
|
||||
&db,
|
||||
[
|
||||
Parameter::new(
|
||||
Some(Name::new_static("a")),
|
||||
None,
|
||||
ParameterKind::PositionalOnly { default_ty: None },
|
||||
),
|
||||
Parameter::new(
|
||||
Some(Name::new_static("b")),
|
||||
Some(KnownClass::Int.to_instance(&db)),
|
||||
ParameterKind::PositionalOnly { default_ty: None },
|
||||
),
|
||||
Parameter::new(
|
||||
Some(Name::new_static("c")),
|
||||
None,
|
||||
ParameterKind::PositionalOnly {
|
||||
default_ty: Some(Type::IntLiteral(1)),
|
||||
},
|
||||
),
|
||||
Parameter::new(
|
||||
Some(Name::new_static("d")),
|
||||
Some(KnownClass::Int.to_instance(&db)),
|
||||
ParameterKind::PositionalOnly {
|
||||
default_ty: Some(Type::IntLiteral(2)),
|
||||
},
|
||||
),
|
||||
Parameter::new(
|
||||
Some(Name::new_static("e")),
|
||||
None,
|
||||
ParameterKind::PositionalOrKeyword {
|
||||
default_ty: Some(Type::IntLiteral(3)),
|
||||
},
|
||||
),
|
||||
Parameter::new(
|
||||
Some(Name::new_static("f")),
|
||||
Some(KnownClass::Int.to_instance(&db)),
|
||||
ParameterKind::PositionalOrKeyword {
|
||||
default_ty: Some(Type::IntLiteral(4)),
|
||||
},
|
||||
),
|
||||
Parameter::new(
|
||||
Some(Name::new_static("args")),
|
||||
Some(Type::object(&db)),
|
||||
ParameterKind::Variadic,
|
||||
),
|
||||
Parameter::new(
|
||||
Some(Name::new_static("g")),
|
||||
None,
|
||||
ParameterKind::KeywordOnly {
|
||||
default_ty: Some(Type::IntLiteral(5)),
|
||||
},
|
||||
),
|
||||
Parameter::new(
|
||||
Some(Name::new_static("h")),
|
||||
Some(KnownClass::Int.to_instance(&db)),
|
||||
ParameterKind::KeywordOnly {
|
||||
default_ty: Some(Type::IntLiteral(6)),
|
||||
},
|
||||
),
|
||||
Parameter::new(
|
||||
Some(Name::new_static("kwargs")),
|
||||
Some(KnownClass::Str.to_instance(&db)),
|
||||
ParameterKind::KeywordVariadic,
|
||||
),
|
||||
],
|
||||
Some(KnownClass::Bytes.to_instance(&db))
|
||||
),
|
||||
"(a, b: int, c=Literal[1], d: int = Literal[2], \
|
||||
/, e=Literal[3], f: int = Literal[4], *args: object, \
|
||||
*, g=Literal[5], h: int = Literal[6], **kwargs: str) -> bytes"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -100,13 +100,16 @@ impl Ty {
|
||||
Ty::BooleanLiteral(b) => Type::BooleanLiteral(b),
|
||||
Ty::LiteralString => Type::LiteralString,
|
||||
Ty::BytesLiteral(s) => Type::bytes_literal(db, s.as_bytes()),
|
||||
Ty::BuiltinInstance(s) => builtins_symbol(db, s).expect_type().to_instance(db),
|
||||
Ty::BuiltinInstance(s) => builtins_symbol(db, s).symbol.expect_type().to_instance(db),
|
||||
Ty::AbcInstance(s) => known_module_symbol(db, KnownModule::Abc, s)
|
||||
.symbol
|
||||
.expect_type()
|
||||
.to_instance(db),
|
||||
Ty::AbcClassLiteral(s) => known_module_symbol(db, KnownModule::Abc, s).expect_type(),
|
||||
Ty::AbcClassLiteral(s) => known_module_symbol(db, KnownModule::Abc, s)
|
||||
.symbol
|
||||
.expect_type(),
|
||||
Ty::TypingLiteral => Type::KnownInstance(KnownInstanceType::Literal),
|
||||
Ty::BuiltinClassLiteral(s) => builtins_symbol(db, s).expect_type(),
|
||||
Ty::BuiltinClassLiteral(s) => builtins_symbol(db, s).symbol.expect_type(),
|
||||
Ty::KnownClassInstance(known_class) => known_class.to_instance(db),
|
||||
Ty::Union(tys) => {
|
||||
UnionType::from_elements(db, tys.into_iter().map(|ty| ty.into_type(db)))
|
||||
@@ -129,6 +132,7 @@ impl Ty {
|
||||
Ty::SubclassOfBuiltinClass(s) => SubclassOfType::from(
|
||||
db,
|
||||
builtins_symbol(db, s)
|
||||
.symbol
|
||||
.expect_type()
|
||||
.expect_class_literal()
|
||||
.class,
|
||||
@@ -136,16 +140,17 @@ impl Ty {
|
||||
Ty::SubclassOfAbcClass(s) => SubclassOfType::from(
|
||||
db,
|
||||
known_module_symbol(db, KnownModule::Abc, s)
|
||||
.symbol
|
||||
.expect_type()
|
||||
.expect_class_literal()
|
||||
.class,
|
||||
),
|
||||
Ty::AlwaysTruthy => Type::AlwaysTruthy,
|
||||
Ty::AlwaysFalsy => Type::AlwaysFalsy,
|
||||
Ty::BuiltinsFunction(name) => builtins_symbol(db, name).expect_type(),
|
||||
Ty::BuiltinsFunction(name) => builtins_symbol(db, name).symbol.expect_type(),
|
||||
Ty::BuiltinsBoundMethod { class, method } => {
|
||||
let builtins_class = builtins_symbol(db, class).expect_type();
|
||||
let function = builtins_class.static_member(db, method).expect_type();
|
||||
let builtins_class = builtins_symbol(db, class).symbol.expect_type();
|
||||
let function = builtins_class.member(db, method).symbol.expect_type();
|
||||
|
||||
create_bound_method(db, function, builtins_class)
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
use super::{definition_expression_type, Type};
|
||||
use super::{definition_expression_type, DynamicType, Type};
|
||||
use crate::Db;
|
||||
use crate::{semantic_index::definition::Definition, types::todo_type};
|
||||
use ruff_python_ast::{self as ast, name::Name};
|
||||
|
||||
/// A typed callable signature.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, salsa::Update)]
|
||||
pub(crate) struct Signature<'db> {
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, salsa::Update)]
|
||||
pub struct Signature<'db> {
|
||||
/// Parameters, in source order.
|
||||
///
|
||||
/// The ordering of parameters in a valid signature must be: first positional-only parameters,
|
||||
@@ -67,29 +67,113 @@ impl<'db> Signature<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: use SmallVec here once invariance bug is fixed
|
||||
#[derive(Clone, Debug, PartialEq, Eq, salsa::Update)]
|
||||
pub(crate) struct Parameters<'db>(Vec<Parameter<'db>>);
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, salsa::Update)]
|
||||
pub(crate) struct Parameters<'db> {
|
||||
// TODO: use SmallVec here once invariance bug is fixed
|
||||
value: Vec<Parameter<'db>>,
|
||||
|
||||
/// Whether this parameter list represents a gradual form using `...` as the only parameter.
|
||||
///
|
||||
/// If this is `true`, the `value` will still contain the variadic and keyword-variadic
|
||||
/// parameters. This flag is used to distinguish between an explicit `...` in the callable type
|
||||
/// as in `Callable[..., int]` and the variadic arguments in `lambda` expression as in
|
||||
/// `lambda *args, **kwargs: None`.
|
||||
///
|
||||
/// The display implementation utilizes this flag to use `...` instead of displaying the
|
||||
/// individual variadic and keyword-variadic parameters.
|
||||
///
|
||||
/// Note: This flag is also used to indicate invalid forms of `Callable` annotations.
|
||||
is_gradual: bool,
|
||||
}
|
||||
|
||||
impl<'db> Parameters<'db> {
|
||||
pub(crate) fn new(parameters: impl IntoIterator<Item = Parameter<'db>>) -> Self {
|
||||
Self(parameters.into_iter().collect())
|
||||
Self {
|
||||
value: parameters.into_iter().collect(),
|
||||
is_gradual: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create an empty parameter list.
|
||||
pub(crate) fn empty() -> Self {
|
||||
Self {
|
||||
value: Vec::new(),
|
||||
is_gradual: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn as_slice(&self) -> &[Parameter<'db>] {
|
||||
self.value.as_slice()
|
||||
}
|
||||
|
||||
pub(crate) const fn is_gradual(&self) -> bool {
|
||||
self.is_gradual
|
||||
}
|
||||
|
||||
/// Return todo parameters: (*args: Todo, **kwargs: Todo)
|
||||
fn todo() -> Self {
|
||||
Self(vec![
|
||||
Parameter {
|
||||
name: Some(Name::new_static("args")),
|
||||
annotated_ty: Some(todo_type!("todo signature *args")),
|
||||
kind: ParameterKind::Variadic,
|
||||
},
|
||||
Parameter {
|
||||
name: Some(Name::new_static("kwargs")),
|
||||
annotated_ty: Some(todo_type!("todo signature **kwargs")),
|
||||
kind: ParameterKind::KeywordVariadic,
|
||||
},
|
||||
])
|
||||
pub(crate) fn todo() -> Self {
|
||||
Self {
|
||||
value: vec![
|
||||
Parameter {
|
||||
name: Some(Name::new_static("args")),
|
||||
annotated_ty: Some(todo_type!("todo signature *args")),
|
||||
kind: ParameterKind::Variadic,
|
||||
},
|
||||
Parameter {
|
||||
name: Some(Name::new_static("kwargs")),
|
||||
annotated_ty: Some(todo_type!("todo signature **kwargs")),
|
||||
kind: ParameterKind::KeywordVariadic,
|
||||
},
|
||||
],
|
||||
is_gradual: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return parameters that represents a gradual form using `...` as the only parameter.
|
||||
///
|
||||
/// Internally, this is represented as `(*Any, **Any)` that accepts parameters of type [`Any`].
|
||||
///
|
||||
/// [`Any`]: crate::types::DynamicType::Any
|
||||
pub(crate) fn gradual_form() -> Self {
|
||||
Self {
|
||||
value: vec![
|
||||
Parameter {
|
||||
name: None,
|
||||
annotated_ty: Some(Type::Dynamic(DynamicType::Any)),
|
||||
kind: ParameterKind::Variadic,
|
||||
},
|
||||
Parameter {
|
||||
name: None,
|
||||
annotated_ty: Some(Type::Dynamic(DynamicType::Any)),
|
||||
kind: ParameterKind::KeywordVariadic,
|
||||
},
|
||||
],
|
||||
is_gradual: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return parameters that represents an unknown list of parameters.
|
||||
///
|
||||
/// Internally, this is represented as `(*Unknown, **Unknown)` that accepts parameters of type
|
||||
/// [`Unknown`].
|
||||
///
|
||||
/// [`Unknown`]: crate::types::DynamicType::Unknown
|
||||
pub(crate) fn unknown() -> Self {
|
||||
Self {
|
||||
value: vec![
|
||||
Parameter {
|
||||
name: None,
|
||||
annotated_ty: Some(Type::Dynamic(DynamicType::Unknown)),
|
||||
kind: ParameterKind::Variadic,
|
||||
},
|
||||
Parameter {
|
||||
name: None,
|
||||
annotated_ty: Some(Type::Dynamic(DynamicType::Unknown)),
|
||||
kind: ParameterKind::KeywordVariadic,
|
||||
},
|
||||
],
|
||||
is_gradual: true,
|
||||
}
|
||||
}
|
||||
|
||||
fn from_parameters(
|
||||
@@ -146,22 +230,21 @@ impl<'db> Parameters<'db> {
|
||||
let keywords = kwarg.as_ref().map(|arg| {
|
||||
Parameter::from_node_and_kind(db, definition, arg, ParameterKind::KeywordVariadic)
|
||||
});
|
||||
Self(
|
||||
Self::new(
|
||||
positional_only
|
||||
.chain(positional_or_keyword)
|
||||
.chain(variadic)
|
||||
.chain(keyword_only)
|
||||
.chain(keywords)
|
||||
.collect(),
|
||||
.chain(keywords),
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
self.value.len()
|
||||
}
|
||||
|
||||
pub(crate) fn iter(&self) -> std::slice::Iter<Parameter<'db>> {
|
||||
self.0.iter()
|
||||
self.value.iter()
|
||||
}
|
||||
|
||||
/// Iterate initial positional parameters, not including variadic parameter, if any.
|
||||
@@ -175,7 +258,7 @@ impl<'db> Parameters<'db> {
|
||||
|
||||
/// Return parameter at given index, or `None` if index is out-of-range.
|
||||
pub(crate) fn get(&self, index: usize) -> Option<&Parameter<'db>> {
|
||||
self.0.get(index)
|
||||
self.value.get(index)
|
||||
}
|
||||
|
||||
/// Return positional parameter at given index, or `None` if `index` is out of range.
|
||||
@@ -218,7 +301,7 @@ impl<'db, 'a> IntoIterator for &'a Parameters<'db> {
|
||||
type IntoIter = std::slice::Iter<'a, Parameter<'db>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.0.iter()
|
||||
self.value.iter()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -226,11 +309,11 @@ impl<'db> std::ops::Index<usize> for Parameters<'db> {
|
||||
type Output = Parameter<'db>;
|
||||
|
||||
fn index(&self, index: usize) -> &Self::Output {
|
||||
&self.0[index]
|
||||
&self.value[index]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, salsa::Update)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, salsa::Update)]
|
||||
pub(crate) struct Parameter<'db> {
|
||||
/// Parameter name.
|
||||
///
|
||||
@@ -272,6 +355,14 @@ impl<'db> Parameter<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn is_keyword_only(&self) -> bool {
|
||||
matches!(self.kind, ParameterKind::KeywordOnly { .. })
|
||||
}
|
||||
|
||||
pub(crate) fn is_positional_only(&self) -> bool {
|
||||
matches!(self.kind, ParameterKind::PositionalOnly { .. })
|
||||
}
|
||||
|
||||
pub(crate) fn is_variadic(&self) -> bool {
|
||||
matches!(self.kind, ParameterKind::Variadic)
|
||||
}
|
||||
@@ -328,7 +419,7 @@ impl<'db> Parameter<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, salsa::Update)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, salsa::Update)]
|
||||
pub(crate) enum ParameterKind<'db> {
|
||||
/// Positional-only parameter, e.g. `def f(x, /): ...`
|
||||
PositionalOnly { default_ty: Option<Type<'db>> },
|
||||
@@ -348,19 +439,20 @@ mod tests {
|
||||
use crate::db::tests::{setup_db, TestDb};
|
||||
use crate::symbol::global_symbol;
|
||||
use crate::types::{FunctionType, KnownClass};
|
||||
use ruff_db::system::DbWithTestSystem;
|
||||
use ruff_db::system::DbWithWritableSystem as _;
|
||||
|
||||
#[track_caller]
|
||||
fn get_function_f<'db>(db: &'db TestDb, file: &'static str) -> FunctionType<'db> {
|
||||
let module = ruff_db::files::system_path_to_file(db, file).unwrap();
|
||||
global_symbol(db, module, "f")
|
||||
.symbol
|
||||
.expect_type()
|
||||
.expect_function_literal()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_params<'db>(signature: &Signature<'db>, expected: &[Parameter<'db>]) {
|
||||
assert_eq!(signature.parameters.0.as_slice(), expected);
|
||||
assert_eq!(signature.parameters.value.as_slice(), expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -489,7 +581,7 @@ mod tests {
|
||||
name: Some(name),
|
||||
annotated_ty,
|
||||
kind: ParameterKind::PositionalOrKeyword { .. },
|
||||
}] = &sig.parameters.0[..]
|
||||
}] = &sig.parameters.value[..]
|
||||
else {
|
||||
panic!("expected one positional-or-keyword parameter");
|
||||
};
|
||||
@@ -523,7 +615,7 @@ mod tests {
|
||||
name: Some(name),
|
||||
annotated_ty,
|
||||
kind: ParameterKind::PositionalOrKeyword { .. },
|
||||
}] = &sig.parameters.0[..]
|
||||
}] = &sig.parameters.value[..]
|
||||
else {
|
||||
panic!("expected one positional-or-keyword parameter");
|
||||
};
|
||||
@@ -561,7 +653,7 @@ mod tests {
|
||||
name: Some(b_name),
|
||||
annotated_ty: b_annotated_ty,
|
||||
kind: ParameterKind::PositionalOrKeyword { .. },
|
||||
}] = &sig.parameters.0[..]
|
||||
}] = &sig.parameters.value[..]
|
||||
else {
|
||||
panic!("expected two positional-or-keyword parameters");
|
||||
};
|
||||
@@ -604,7 +696,7 @@ mod tests {
|
||||
name: Some(b_name),
|
||||
annotated_ty: b_annotated_ty,
|
||||
kind: ParameterKind::PositionalOrKeyword { .. },
|
||||
}] = &sig.parameters.0[..]
|
||||
}] = &sig.parameters.value[..]
|
||||
else {
|
||||
panic!("expected two positional-or-keyword parameters");
|
||||
};
|
||||
|
||||
@@ -24,7 +24,7 @@ enum SlotsKind {
|
||||
|
||||
impl SlotsKind {
|
||||
fn from(db: &dyn Db, base: Class) -> Self {
|
||||
let Symbol::Type(slots_ty, bound) = base.own_class_member(db, "__slots__") else {
|
||||
let Symbol::Type(slots_ty, bound) = base.own_class_member(db, "__slots__").symbol else {
|
||||
return Self::NotSpecified;
|
||||
};
|
||||
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
use super::{ClassBase, ClassLiteralType, Db, KnownClass, Symbol, Type};
|
||||
use crate::symbol::SymbolAndQualifiers;
|
||||
|
||||
use super::{ClassBase, ClassLiteralType, Db, KnownClass, Type};
|
||||
|
||||
/// A type that represents `type[C]`, i.e. the class object `C` and class objects that are subclasses of `C`.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Update)]
|
||||
@@ -64,8 +66,12 @@ impl<'db> SubclassOfType<'db> {
|
||||
!self.is_dynamic()
|
||||
}
|
||||
|
||||
pub(crate) fn static_member(self, db: &'db dyn Db, name: &str) -> Symbol<'db> {
|
||||
Type::from(self.subclass_of).static_member(db, name)
|
||||
pub(crate) fn find_name_in_mro(
|
||||
self,
|
||||
db: &'db dyn Db,
|
||||
name: &str,
|
||||
) -> Option<SymbolAndQualifiers<'db>> {
|
||||
Type::from(self.subclass_of).find_name_in_mro(db, name)
|
||||
}
|
||||
|
||||
/// Return `true` if `self` is a subtype of `other`.
|
||||
|
||||
@@ -77,6 +77,12 @@ pub(super) fn union_elements_ordering<'db>(left: &Type<'db>, right: &Type<'db>)
|
||||
(Type::Callable(CallableType::WrapperDescriptorDunderGet), _) => Ordering::Less,
|
||||
(_, Type::Callable(CallableType::WrapperDescriptorDunderGet)) => Ordering::Greater,
|
||||
|
||||
(Type::Callable(CallableType::General(_)), Type::Callable(CallableType::General(_))) => {
|
||||
Ordering::Equal
|
||||
}
|
||||
(Type::Callable(CallableType::General(_)), _) => Ordering::Less,
|
||||
(_, Type::Callable(CallableType::General(_))) => Ordering::Greater,
|
||||
|
||||
(Type::Tuple(left), Type::Tuple(right)) => left.cmp(right),
|
||||
(Type::Tuple(_), _) => Ordering::Less,
|
||||
(_, Type::Tuple(_)) => Ordering::Greater,
|
||||
@@ -184,6 +190,9 @@ pub(super) fn union_elements_ordering<'db>(left: &Type<'db>, right: &Type<'db>)
|
||||
(KnownInstanceType::OrderedDict, _) => Ordering::Less,
|
||||
(_, KnownInstanceType::OrderedDict) => Ordering::Greater,
|
||||
|
||||
(KnownInstanceType::Protocol, _) => Ordering::Less,
|
||||
(_, KnownInstanceType::Protocol) => Ordering::Greater,
|
||||
|
||||
(KnownInstanceType::NoReturn, _) => Ordering::Less,
|
||||
(_, KnownInstanceType::NoReturn) => Ordering::Greater,
|
||||
|
||||
@@ -285,5 +294,8 @@ fn dynamic_elements_ordering(left: DynamicType, right: DynamicType) -> Ordering
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
(DynamicType::Todo(TodoType), DynamicType::Todo(TodoType)) => Ordering::Equal,
|
||||
|
||||
(DynamicType::TodoProtocol, _) => Ordering::Less,
|
||||
(_, DynamicType::TodoProtocol) => Ordering::Greater,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,26 +42,28 @@ impl<'db> Unpacker<'db> {
|
||||
"Unpacking target must be a list or tuple expression"
|
||||
);
|
||||
|
||||
let mut value_ty = infer_expression_types(self.db(), value.expression())
|
||||
let value_ty = infer_expression_types(self.db(), value.expression())
|
||||
.expression_type(value.scoped_expression_id(self.db(), self.scope));
|
||||
|
||||
if value.is_assign()
|
||||
&& self.context.in_stub()
|
||||
&& value
|
||||
.expression()
|
||||
.node_ref(self.db())
|
||||
.is_ellipsis_literal_expr()
|
||||
{
|
||||
value_ty = Type::unknown();
|
||||
}
|
||||
if value.is_iterable() {
|
||||
// If the value is an iterable, then the type that needs to be unpacked is the iterator
|
||||
// type.
|
||||
value_ty = value_ty.try_iterate(self.db()).unwrap_or_else(|err| {
|
||||
let value_ty = match value {
|
||||
UnpackValue::Assign(expression) => {
|
||||
if self.context.in_stub()
|
||||
&& expression.node_ref(self.db()).is_ellipsis_literal_expr()
|
||||
{
|
||||
Type::unknown()
|
||||
} else {
|
||||
value_ty
|
||||
}
|
||||
}
|
||||
UnpackValue::Iterable(_) => value_ty.try_iterate(self.db()).unwrap_or_else(|err| {
|
||||
err.report_diagnostic(&self.context, value.as_any_node_ref(self.db()));
|
||||
err.fallback_element_type(self.db())
|
||||
});
|
||||
}
|
||||
}),
|
||||
UnpackValue::ContextManager(_) => value_ty.try_enter(self.db()).unwrap_or_else(|err| {
|
||||
err.report_diagnostic(&self.context, value.as_any_node_ref(self.db()));
|
||||
err.fallback_enter_type(self.db())
|
||||
}),
|
||||
};
|
||||
|
||||
self.unpack_inner(target, value.as_any_node_ref(self.db()), value_ty);
|
||||
}
|
||||
@@ -121,7 +123,7 @@ impl<'db> Unpacker<'db> {
|
||||
if let Some(tuple_ty) = ty.into_tuple() {
|
||||
let tuple_ty_elements = self.tuple_ty_elements(target, elts, tuple_ty);
|
||||
|
||||
match elts.len().cmp(&tuple_ty_elements.len()) {
|
||||
let length_mismatch = match elts.len().cmp(&tuple_ty_elements.len()) {
|
||||
Ordering::Less => {
|
||||
self.context.report_lint(
|
||||
&INVALID_ASSIGNMENT,
|
||||
@@ -132,6 +134,7 @@ impl<'db> Unpacker<'db> {
|
||||
tuple_ty_elements.len()
|
||||
),
|
||||
);
|
||||
true
|
||||
}
|
||||
Ordering::Greater => {
|
||||
self.context.report_lint(
|
||||
@@ -143,13 +146,18 @@ impl<'db> Unpacker<'db> {
|
||||
tuple_ty_elements.len()
|
||||
),
|
||||
);
|
||||
true
|
||||
}
|
||||
Ordering::Equal => {}
|
||||
}
|
||||
Ordering::Equal => false,
|
||||
};
|
||||
|
||||
for (index, ty) in tuple_ty_elements.iter().enumerate() {
|
||||
if let Some(element_types) = target_types.get_mut(index) {
|
||||
element_types.push(*ty);
|
||||
if length_mismatch {
|
||||
element_types.push(Type::unknown());
|
||||
} else {
|
||||
element_types.push(*ty);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -243,15 +251,7 @@ impl<'db> Unpacker<'db> {
|
||||
),
|
||||
);
|
||||
|
||||
let mut element_types = tuple_ty.elements(self.db()).to_vec();
|
||||
|
||||
// Subtract 1 to insert the starred expression type at the correct
|
||||
// index.
|
||||
element_types.resize(targets.len() - 1, Type::unknown());
|
||||
// TODO: This should be `list[Unknown]`
|
||||
element_types.insert(starred_index, todo_type!("starred unpacking"));
|
||||
|
||||
Cow::Owned(element_types)
|
||||
Cow::Owned(vec![Type::unknown(); targets.len()])
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -63,25 +63,19 @@ impl<'db> Unpack<'db> {
|
||||
pub(crate) enum UnpackValue<'db> {
|
||||
/// An iterable expression like the one in a `for` loop or a comprehension.
|
||||
Iterable(Expression<'db>),
|
||||
/// An context manager expression like the one in a `with` statement.
|
||||
ContextManager(Expression<'db>),
|
||||
/// An expression that is being assigned to a target.
|
||||
Assign(Expression<'db>),
|
||||
}
|
||||
|
||||
impl<'db> UnpackValue<'db> {
|
||||
/// Returns `true` if the value is an iterable expression.
|
||||
pub(crate) const fn is_iterable(self) -> bool {
|
||||
matches!(self, UnpackValue::Iterable(_))
|
||||
}
|
||||
|
||||
/// Returns `true` if the value is being assigned to a target.
|
||||
pub(crate) const fn is_assign(self) -> bool {
|
||||
matches!(self, UnpackValue::Assign(_))
|
||||
}
|
||||
|
||||
/// Returns the underlying [`Expression`] that is being unpacked.
|
||||
pub(crate) const fn expression(self) -> Expression<'db> {
|
||||
match self {
|
||||
UnpackValue::Assign(expr) | UnpackValue::Iterable(expr) => expr,
|
||||
UnpackValue::Assign(expr)
|
||||
| UnpackValue::Iterable(expr)
|
||||
| UnpackValue::ContextManager(expr) => expr,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -72,7 +72,7 @@ fn compute_diagnostics(snapshot: &DocumentSnapshot, db: &ProjectDatabase) -> Vec
|
||||
|
||||
fn to_lsp_diagnostic(
|
||||
db: &dyn Db,
|
||||
diagnostic: &dyn ruff_db::diagnostic::Diagnostic,
|
||||
diagnostic: &dyn ruff_db::diagnostic::OldDiagnosticTrait,
|
||||
encoding: crate::PositionEncoding,
|
||||
) -> Diagnostic {
|
||||
let range = if let Some(span) = diagnostic.span() {
|
||||
|
||||
@@ -15,6 +15,7 @@ red_knot_python_semantic = { workspace = true, features = ["serde"] }
|
||||
red_knot_vendored = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["testing"] }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_notebook = { workspace = true }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
@@ -30,7 +31,9 @@ rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -35,16 +35,15 @@
|
||||
//! ```
|
||||
|
||||
use crate::db::Db;
|
||||
use regex::Regex;
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::source::{line_index, source_text, SourceText};
|
||||
use ruff_python_trivia::CommentRanges;
|
||||
use ruff_python_trivia::{CommentRanges, Cursor};
|
||||
use ruff_source_file::{LineIndex, OneIndexed};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
use smallvec::SmallVec;
|
||||
use std::ops::Deref;
|
||||
use std::sync::LazyLock;
|
||||
use std::str::FromStr;
|
||||
|
||||
/// Diagnostic assertion comments in a single embedded file.
|
||||
#[derive(Debug)]
|
||||
@@ -92,12 +91,12 @@ impl<'a> IntoIterator for &'a InlineFileAssertions {
|
||||
}
|
||||
}
|
||||
|
||||
/// An [`Assertion`] with the [`TextRange`] of its original inline comment.
|
||||
/// An [`UnparsedAssertion`] with the [`TextRange`] of its original inline comment.
|
||||
#[derive(Debug)]
|
||||
struct AssertionWithRange<'a>(Assertion<'a>, TextRange);
|
||||
struct AssertionWithRange<'a>(UnparsedAssertion<'a>, TextRange);
|
||||
|
||||
impl<'a> Deref for AssertionWithRange<'a> {
|
||||
type Target = Assertion<'a>;
|
||||
type Target = UnparsedAssertion<'a>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
@@ -110,7 +109,7 @@ impl Ranged for AssertionWithRange<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<AssertionWithRange<'a>> for Assertion<'a> {
|
||||
impl<'a> From<AssertionWithRange<'a>> for UnparsedAssertion<'a> {
|
||||
fn from(value: AssertionWithRange<'a>) -> Self {
|
||||
value.0
|
||||
}
|
||||
@@ -130,7 +129,7 @@ impl<'a> Iterator for AssertionWithRangeIterator<'a> {
|
||||
loop {
|
||||
let inner_next = self.inner.next()?;
|
||||
let comment = &self.file_assertions.source[inner_next];
|
||||
if let Some(assertion) = Assertion::from_comment(comment) {
|
||||
if let Some(assertion) = UnparsedAssertion::from_comment(comment) {
|
||||
return Some(AssertionWithRange(assertion, inner_next));
|
||||
};
|
||||
}
|
||||
@@ -139,11 +138,11 @@ impl<'a> Iterator for AssertionWithRangeIterator<'a> {
|
||||
|
||||
impl std::iter::FusedIterator for AssertionWithRangeIterator<'_> {}
|
||||
|
||||
/// A vector of [`Assertion`]s belonging to a single line.
|
||||
/// A vector of [`UnparsedAssertion`]s belonging to a single line.
|
||||
///
|
||||
/// Most lines will have zero or one assertion, so we use a [`SmallVec`] optimized for a single
|
||||
/// element to avoid most heap vector allocations.
|
||||
type AssertionVec<'a> = SmallVec<[Assertion<'a>; 1]>;
|
||||
type AssertionVec<'a> = SmallVec<[UnparsedAssertion<'a>; 1]>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct LineAssertionsIterator<'a> {
|
||||
@@ -227,50 +226,80 @@ pub(crate) struct LineAssertions<'a> {
|
||||
}
|
||||
|
||||
impl<'a> Deref for LineAssertions<'a> {
|
||||
type Target = [Assertion<'a>];
|
||||
type Target = [UnparsedAssertion<'a>];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.assertions
|
||||
}
|
||||
}
|
||||
|
||||
static TYPE_RE: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"^#\s*revealed:\s*(?<ty_display>.+?)\s*$").unwrap());
|
||||
|
||||
static ERROR_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(
|
||||
r#"^#\s*error:(\s*(?<column>\d+))?(\s*\[(?<rule>.+?)\])?(\s*"(?<message>.+?)")?\s*$"#,
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
/// A single diagnostic assertion comment.
|
||||
///
|
||||
/// This type represents an *attempted* assertion, but not necessarily a *valid* assertion.
|
||||
/// Parsing is done lazily in `matcher.rs`; this allows us to emit nicer error messages
|
||||
/// in the event of an invalid assertion
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum Assertion<'a> {
|
||||
/// A `revealed: ` assertion.
|
||||
pub(crate) enum UnparsedAssertion<'a> {
|
||||
/// A `# revealed:` assertion.
|
||||
Revealed(&'a str),
|
||||
|
||||
/// An `error: ` assertion.
|
||||
Error(ErrorAssertion<'a>),
|
||||
/// An `# error:` assertion.
|
||||
Error(&'a str),
|
||||
}
|
||||
|
||||
impl<'a> Assertion<'a> {
|
||||
impl<'a> UnparsedAssertion<'a> {
|
||||
/// Returns `Some(_)` if the comment starts with `# error:` or `# revealed:`,
|
||||
/// indicating that it is an assertion comment.
|
||||
fn from_comment(comment: &'a str) -> Option<Self> {
|
||||
if let Some(caps) = TYPE_RE.captures(comment) {
|
||||
Some(Self::Revealed(caps.name("ty_display").unwrap().as_str()))
|
||||
} else {
|
||||
ERROR_RE.captures(comment).map(|caps| {
|
||||
Self::Error(ErrorAssertion {
|
||||
rule: caps.name("rule").map(|m| m.as_str()),
|
||||
column: caps.name("column").and_then(|m| m.as_str().parse().ok()),
|
||||
message_contains: caps.name("message").map(|m| m.as_str()),
|
||||
})
|
||||
})
|
||||
let comment = comment.trim().strip_prefix('#')?.trim();
|
||||
let (keyword, body) = comment.split_once(':')?;
|
||||
let keyword = keyword.trim();
|
||||
let body = body.trim();
|
||||
|
||||
match keyword {
|
||||
"revealed" => Some(Self::Revealed(body)),
|
||||
"error" => Some(Self::Error(body)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse the attempted assertion into a [`ParsedAssertion`] structured representation.
|
||||
pub(crate) fn parse(&self) -> Result<ParsedAssertion<'a>, PragmaParseError<'a>> {
|
||||
match self {
|
||||
Self::Revealed(revealed) => {
|
||||
if revealed.is_empty() {
|
||||
Err(PragmaParseError::EmptyRevealTypeAssertion)
|
||||
} else {
|
||||
Ok(ParsedAssertion::Revealed(revealed))
|
||||
}
|
||||
}
|
||||
Self::Error(error) => ErrorAssertion::from_str(error)
|
||||
.map(ParsedAssertion::Error)
|
||||
.map_err(PragmaParseError::ErrorAssertionParseError),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Assertion<'_> {
|
||||
impl std::fmt::Display for UnparsedAssertion<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Revealed(expected_type) => write!(f, "revealed: {expected_type}"),
|
||||
Self::Error(assertion) => write!(f, "error: {assertion}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An assertion comment that has been parsed and validated for correctness.
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum ParsedAssertion<'a> {
|
||||
/// A `# revealed:` assertion.
|
||||
Revealed(&'a str),
|
||||
|
||||
/// An `# error:` assertion.
|
||||
Error(ErrorAssertion<'a>),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ParsedAssertion<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Revealed(expected_type) => write!(f, "revealed: {expected_type}"),
|
||||
@@ -279,7 +308,7 @@ impl std::fmt::Display for Assertion<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
/// An `error: ` assertion comment.
|
||||
/// A parsed and validated `# error:` assertion comment.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ErrorAssertion<'a> {
|
||||
/// The diagnostic rule code we expect.
|
||||
@@ -292,6 +321,12 @@ pub(crate) struct ErrorAssertion<'a> {
|
||||
pub(crate) message_contains: Option<&'a str>,
|
||||
}
|
||||
|
||||
impl<'a> ErrorAssertion<'a> {
|
||||
fn from_str(source: &'a str) -> Result<Self, ErrorAssertionParseError<'a>> {
|
||||
ErrorAssertionParser::new(source).parse()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ErrorAssertion<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str("error:")?;
|
||||
@@ -308,16 +343,159 @@ impl std::fmt::Display for ErrorAssertion<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
/// A parser to convert a string into a [`ErrorAssertion`].
|
||||
#[derive(Debug, Clone)]
|
||||
struct ErrorAssertionParser<'a> {
|
||||
cursor: Cursor<'a>,
|
||||
|
||||
/// string slice representing all characters *after* the `# error:` prefix.
|
||||
comment_source: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> ErrorAssertionParser<'a> {
|
||||
fn new(comment: &'a str) -> Self {
|
||||
Self {
|
||||
cursor: Cursor::new(comment),
|
||||
comment_source: comment,
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves the current offset of the cursor within the source code.
|
||||
fn offset(&self) -> TextSize {
|
||||
self.comment_source.text_len() - self.cursor.text_len()
|
||||
}
|
||||
|
||||
/// Consume characters in the assertion comment until we find a non-whitespace character
|
||||
fn skip_whitespace(&mut self) {
|
||||
self.cursor.eat_while(char::is_whitespace);
|
||||
}
|
||||
|
||||
/// Attempt to parse the assertion comment into a [`ErrorAssertion`].
|
||||
fn parse(mut self) -> Result<ErrorAssertion<'a>, ErrorAssertionParseError<'a>> {
|
||||
let mut column = None;
|
||||
let mut rule = None;
|
||||
|
||||
self.skip_whitespace();
|
||||
|
||||
while let Some(character) = self.cursor.bump() {
|
||||
match character {
|
||||
// column number
|
||||
'0'..='9' => {
|
||||
if column.is_some() {
|
||||
return Err(ErrorAssertionParseError::MultipleColumnNumbers);
|
||||
}
|
||||
if rule.is_some() {
|
||||
return Err(ErrorAssertionParseError::ColumnNumberAfterRuleCode);
|
||||
}
|
||||
let offset = self.offset() - TextSize::new(1);
|
||||
self.cursor.eat_while(|c| !c.is_whitespace());
|
||||
let column_str = &self.comment_source[TextRange::new(offset, self.offset())];
|
||||
column = OneIndexed::from_str(column_str)
|
||||
.map(Some)
|
||||
.map_err(|e| ErrorAssertionParseError::BadColumnNumber(column_str, e))?;
|
||||
}
|
||||
|
||||
// rule code
|
||||
'[' => {
|
||||
if rule.is_some() {
|
||||
return Err(ErrorAssertionParseError::MultipleRuleCodes);
|
||||
}
|
||||
let offset = self.offset();
|
||||
self.cursor.eat_while(|c| c != ']');
|
||||
if self.cursor.is_eof() {
|
||||
return Err(ErrorAssertionParseError::UnclosedRuleCode);
|
||||
}
|
||||
rule = Some(self.comment_source[TextRange::new(offset, self.offset())].trim());
|
||||
self.cursor.bump();
|
||||
}
|
||||
|
||||
// message text
|
||||
'"' => {
|
||||
let comment_source = self.comment_source.trim();
|
||||
return if comment_source.ends_with('"') {
|
||||
let rest =
|
||||
&comment_source[self.offset().to_usize()..comment_source.len() - 1];
|
||||
Ok(ErrorAssertion {
|
||||
rule,
|
||||
column,
|
||||
message_contains: Some(rest),
|
||||
})
|
||||
} else {
|
||||
Err(ErrorAssertionParseError::UnclosedMessage)
|
||||
};
|
||||
}
|
||||
|
||||
// Some other assumptions we make don't hold true if we hit this branch:
|
||||
'\n' | '\r' => {
|
||||
unreachable!("Assertion comments should never contain newlines")
|
||||
}
|
||||
|
||||
// something else (bad!)...
|
||||
unexpected => {
|
||||
return Err(ErrorAssertionParseError::UnexpectedCharacter {
|
||||
character: unexpected,
|
||||
offset: self.offset().to_usize(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
self.skip_whitespace();
|
||||
}
|
||||
|
||||
if rule.is_some() {
|
||||
Ok(ErrorAssertion {
|
||||
rule,
|
||||
column,
|
||||
message_contains: None,
|
||||
})
|
||||
} else {
|
||||
Err(ErrorAssertionParseError::NoRuleOrMessage)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Enumeration of ways in which parsing an assertion comment can fail.
|
||||
///
|
||||
/// The assertion comment could be either a "revealed" assertion or an "error" assertion.
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub(crate) enum PragmaParseError<'a> {
|
||||
#[error("Must specify which type should be revealed")]
|
||||
EmptyRevealTypeAssertion,
|
||||
#[error("{0}")]
|
||||
ErrorAssertionParseError(ErrorAssertionParseError<'a>),
|
||||
}
|
||||
|
||||
/// Enumeration of ways in which parsing an *error* assertion comment can fail.
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub(crate) enum ErrorAssertionParseError<'a> {
|
||||
#[error("no rule or message text")]
|
||||
NoRuleOrMessage,
|
||||
#[error("bad column number `{0}`")]
|
||||
BadColumnNumber(&'a str, #[source] std::num::ParseIntError),
|
||||
#[error("column number must precede the rule code")]
|
||||
ColumnNumberAfterRuleCode,
|
||||
#[error("multiple column numbers in one assertion")]
|
||||
MultipleColumnNumbers,
|
||||
#[error("expected ']' to close rule code")]
|
||||
UnclosedRuleCode,
|
||||
#[error("cannot use multiple rule codes in one assertion")]
|
||||
MultipleRuleCodes,
|
||||
#[error("expected '\"' to be the final character in an assertion with an error message")]
|
||||
UnclosedMessage,
|
||||
#[error("unexpected character `{character}` at offset {offset} (relative to the `:` in the assertion comment)")]
|
||||
UnexpectedCharacter { character: char, offset: usize },
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{Assertion, InlineFileAssertions, LineAssertions};
|
||||
use super::*;
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_db::system::DbWithWritableSystem as _;
|
||||
use ruff_python_trivia::textwrap::dedent;
|
||||
use ruff_source_file::OneIndexed;
|
||||
|
||||
fn get_assertions(source: &str) -> InlineFileAssertions {
|
||||
let mut db = crate::db::Db::setup(SystemPathBuf::from("/src"));
|
||||
let mut db = Db::setup();
|
||||
db.write_file("/src/test.py", source).unwrap();
|
||||
let file = system_path_to_file(&db, "/src/test.py").unwrap();
|
||||
InlineFileAssertions::from_file(&db, file)
|
||||
@@ -366,7 +544,7 @@ mod tests {
|
||||
panic!("expected one assertion");
|
||||
};
|
||||
|
||||
assert_eq!(format!("{assert}"), "error:");
|
||||
assert_eq!(format!("{assert}"), "error: ");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -455,16 +633,20 @@ mod tests {
|
||||
assert_eq!(line1.line_number, OneIndexed::from_zero_indexed(2));
|
||||
assert_eq!(line2.line_number, OneIndexed::from_zero_indexed(3));
|
||||
|
||||
let [Assertion::Error(error1)] = &line1.assertions[..] else {
|
||||
let [UnparsedAssertion::Error(error1)] = &line1.assertions[..] else {
|
||||
panic!("expected one error assertion");
|
||||
};
|
||||
|
||||
let error1 = ErrorAssertion::from_str(error1).unwrap();
|
||||
|
||||
assert_eq!(error1.rule, Some("invalid-assignment"));
|
||||
|
||||
let [Assertion::Error(error2)] = &line2.assertions[..] else {
|
||||
let [UnparsedAssertion::Error(error2)] = &line2.assertions[..] else {
|
||||
panic!("expected one error assertion");
|
||||
};
|
||||
|
||||
let error2 = ErrorAssertion::from_str(error2).unwrap();
|
||||
|
||||
assert_eq!(error2.rule, Some("unbound-name"));
|
||||
}
|
||||
|
||||
@@ -485,18 +667,23 @@ mod tests {
|
||||
assert_eq!(line1.line_number, OneIndexed::from_zero_indexed(2));
|
||||
assert_eq!(line2.line_number, OneIndexed::from_zero_indexed(3));
|
||||
|
||||
let [Assertion::Error(error1), Assertion::Revealed(expected_ty)] = &line1.assertions[..]
|
||||
let [UnparsedAssertion::Error(error1), UnparsedAssertion::Revealed(expected_ty)] =
|
||||
&line1.assertions[..]
|
||||
else {
|
||||
panic!("expected one error assertion and one Revealed assertion");
|
||||
};
|
||||
|
||||
assert_eq!(error1.rule, Some("invalid-assignment"));
|
||||
assert_eq!(*expected_ty, "str");
|
||||
let error1 = ErrorAssertion::from_str(error1).unwrap();
|
||||
|
||||
let [Assertion::Error(error2)] = &line2.assertions[..] else {
|
||||
assert_eq!(error1.rule, Some("invalid-assignment"));
|
||||
assert_eq!(expected_ty.trim(), "str");
|
||||
|
||||
let [UnparsedAssertion::Error(error2)] = &line2.assertions[..] else {
|
||||
panic!("expected one error assertion");
|
||||
};
|
||||
|
||||
let error2 = ErrorAssertion::from_str(error2).unwrap();
|
||||
|
||||
assert_eq!(error2.rule, Some("unbound-name"));
|
||||
}
|
||||
|
||||
|
||||
@@ -10,8 +10,9 @@
|
||||
|
||||
use anyhow::Context;
|
||||
use red_knot_python_semantic::PythonPlatform;
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
use ruff_python_ast::PythonVersion;
|
||||
use serde::Deserialize;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Deserialize, Debug, Default, Clone)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
@@ -19,6 +20,11 @@ pub(crate) struct MarkdownTestConfig {
|
||||
pub(crate) environment: Option<Environment>,
|
||||
|
||||
pub(crate) log: Option<Log>,
|
||||
|
||||
/// The [`ruff_db::system::System`] to use for tests.
|
||||
///
|
||||
/// Defaults to the case-sensitive [`ruff_db::system::InMemorySystem`].
|
||||
pub(crate) system: Option<SystemKind>,
|
||||
}
|
||||
|
||||
impl MarkdownTestConfig {
|
||||
@@ -36,11 +42,17 @@ impl MarkdownTestConfig {
|
||||
.and_then(|env| env.python_platform.clone())
|
||||
}
|
||||
|
||||
pub(crate) fn typeshed(&self) -> Option<&str> {
|
||||
pub(crate) fn typeshed(&self) -> Option<&SystemPath> {
|
||||
self.environment
|
||||
.as_ref()
|
||||
.and_then(|env| env.typeshed.as_deref())
|
||||
}
|
||||
|
||||
pub(crate) fn extra_paths(&self) -> Option<&[SystemPathBuf]> {
|
||||
self.environment
|
||||
.as_ref()
|
||||
.and_then(|env| env.extra_paths.as_deref())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Default, Clone)]
|
||||
@@ -53,7 +65,10 @@ pub(crate) struct Environment {
|
||||
pub(crate) python_platform: Option<PythonPlatform>,
|
||||
|
||||
/// Path to a custom typeshed directory.
|
||||
pub(crate) typeshed: Option<String>,
|
||||
pub(crate) typeshed: Option<SystemPathBuf>,
|
||||
|
||||
/// Additional search paths to consider when resolving modules.
|
||||
pub(crate) extra_paths: Option<Vec<SystemPathBuf>>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
@@ -64,3 +79,19 @@ pub(crate) enum Log {
|
||||
/// Enable logging and only show filters that match the given [env-filter](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html)
|
||||
Filter(String),
|
||||
}
|
||||
|
||||
/// The system to use for tests.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Deserialize, Serialize, Default)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub(crate) enum SystemKind {
|
||||
/// Use an in-memory system with a case sensitive file system..
|
||||
///
|
||||
/// This is recommended for all tests because it's fast.
|
||||
#[default]
|
||||
InMemory,
|
||||
|
||||
/// Use the os system.
|
||||
///
|
||||
/// This system should only be used when testing system or OS specific behavior.
|
||||
Os,
|
||||
}
|
||||
|
||||
@@ -1,69 +1,53 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use camino::{Utf8Component, Utf8PathBuf};
|
||||
use red_knot_python_semantic::lint::{LintRegistry, RuleSelection};
|
||||
use red_knot_python_semantic::{
|
||||
default_lint_registry, Db as SemanticDb, Program, ProgramSettings, PythonPlatform,
|
||||
SearchPathSettings,
|
||||
};
|
||||
use red_knot_python_semantic::{default_lint_registry, Db as SemanticDb};
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::{DbWithTestSystem, System, SystemPath, SystemPathBuf, TestSystem};
|
||||
use ruff_db::system::{
|
||||
DbWithWritableSystem, InMemorySystem, OsSystem, System, SystemPath, SystemPathBuf,
|
||||
WritableSystem,
|
||||
};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
use ruff_python_ast::PythonVersion;
|
||||
use ruff_notebook::{Notebook, NotebookError};
|
||||
use std::borrow::Cow;
|
||||
use std::sync::Arc;
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[salsa::db]
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct Db {
|
||||
project_root: SystemPathBuf,
|
||||
storage: salsa::Storage<Self>,
|
||||
files: Files,
|
||||
system: TestSystem,
|
||||
system: MdtestSystem,
|
||||
vendored: VendoredFileSystem,
|
||||
rule_selection: Arc<RuleSelection>,
|
||||
}
|
||||
|
||||
impl Db {
|
||||
pub(crate) fn setup(project_root: SystemPathBuf) -> Self {
|
||||
pub(crate) fn setup() -> Self {
|
||||
let rule_selection = RuleSelection::from_registry(default_lint_registry());
|
||||
|
||||
let db = Self {
|
||||
project_root,
|
||||
Self {
|
||||
system: MdtestSystem::in_memory(),
|
||||
storage: salsa::Storage::default(),
|
||||
system: TestSystem::default(),
|
||||
vendored: red_knot_vendored::file_system().clone(),
|
||||
files: Files::default(),
|
||||
rule_selection: Arc::new(rule_selection),
|
||||
};
|
||||
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&db.project_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
python_version: PythonVersion::default(),
|
||||
python_platform: PythonPlatform::default(),
|
||||
search_paths: SearchPathSettings::new(vec![db.project_root.clone()]),
|
||||
},
|
||||
)
|
||||
.expect("Invalid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn project_root(&self) -> &SystemPath {
|
||||
&self.project_root
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithTestSystem for Db {
|
||||
fn test_system(&self) -> &TestSystem {
|
||||
&self.system
|
||||
pub(crate) fn use_os_system_with_temp_dir(&mut self, cwd: SystemPathBuf, temp_dir: TempDir) {
|
||||
self.system.with_os(cwd, temp_dir);
|
||||
Files::sync_all(self);
|
||||
}
|
||||
|
||||
fn test_system_mut(&mut self) -> &mut TestSystem {
|
||||
&mut self.system
|
||||
pub(crate) fn use_in_memory_system(&mut self) {
|
||||
self.system.with_in_memory();
|
||||
Files::sync_all(self);
|
||||
}
|
||||
|
||||
pub(crate) fn create_directory_all(&self, path: &SystemPath) -> ruff_db::system::Result<()> {
|
||||
self.system.create_directory_all(path)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -110,3 +94,175 @@ impl SemanticDb for Db {
|
||||
impl salsa::Database for Db {
|
||||
fn salsa_event(&self, _event: &dyn Fn() -> salsa::Event) {}
|
||||
}
|
||||
|
||||
impl DbWithWritableSystem for Db {
|
||||
type System = MdtestSystem;
|
||||
fn writable_system(&self) -> &Self::System {
|
||||
&self.system
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct MdtestSystem(Arc<MdtestSystemInner>);
|
||||
|
||||
#[derive(Debug)]
|
||||
enum MdtestSystemInner {
|
||||
InMemory(InMemorySystem),
|
||||
Os {
|
||||
os_system: OsSystem,
|
||||
_temp_dir: TempDir,
|
||||
},
|
||||
}
|
||||
|
||||
impl MdtestSystem {
|
||||
fn in_memory() -> Self {
|
||||
Self(Arc::new(MdtestSystemInner::InMemory(
|
||||
InMemorySystem::default(),
|
||||
)))
|
||||
}
|
||||
|
||||
fn as_system(&self) -> &dyn WritableSystem {
|
||||
match &*self.0 {
|
||||
MdtestSystemInner::InMemory(system) => system,
|
||||
MdtestSystemInner::Os { os_system, .. } => os_system,
|
||||
}
|
||||
}
|
||||
|
||||
fn with_os(&mut self, cwd: SystemPathBuf, temp_dir: TempDir) {
|
||||
self.0 = Arc::new(MdtestSystemInner::Os {
|
||||
os_system: OsSystem::new(cwd),
|
||||
_temp_dir: temp_dir,
|
||||
});
|
||||
}
|
||||
|
||||
fn with_in_memory(&mut self) {
|
||||
if let MdtestSystemInner::InMemory(in_memory) = &*self.0 {
|
||||
in_memory.fs().remove_all();
|
||||
} else {
|
||||
self.0 = Arc::new(MdtestSystemInner::InMemory(InMemorySystem::default()));
|
||||
}
|
||||
}
|
||||
|
||||
fn normalize_path<'a>(&self, path: &'a SystemPath) -> Cow<'a, SystemPath> {
|
||||
match &*self.0 {
|
||||
MdtestSystemInner::InMemory(_) => Cow::Borrowed(path),
|
||||
MdtestSystemInner::Os { os_system, .. } => {
|
||||
// Make all paths relative to the current directory
|
||||
// to avoid writing or reading from outside the temp directory.
|
||||
let without_root: Utf8PathBuf = path
|
||||
.components()
|
||||
.skip_while(|component| {
|
||||
matches!(
|
||||
component,
|
||||
Utf8Component::RootDir | Utf8Component::Prefix(..)
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
Cow::Owned(os_system.current_directory().join(&without_root))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl System for MdtestSystem {
|
||||
fn path_metadata(
|
||||
&self,
|
||||
path: &SystemPath,
|
||||
) -> ruff_db::system::Result<ruff_db::system::Metadata> {
|
||||
self.as_system().path_metadata(&self.normalize_path(path))
|
||||
}
|
||||
|
||||
fn canonicalize_path(&self, path: &SystemPath) -> ruff_db::system::Result<SystemPathBuf> {
|
||||
let canonicalized = self
|
||||
.as_system()
|
||||
.canonicalize_path(&self.normalize_path(path))?;
|
||||
|
||||
if let MdtestSystemInner::Os { os_system, .. } = &*self.0 {
|
||||
// Make the path relative to the current directory
|
||||
Ok(canonicalized
|
||||
.strip_prefix(os_system.current_directory())
|
||||
.unwrap()
|
||||
.to_owned())
|
||||
} else {
|
||||
Ok(canonicalized)
|
||||
}
|
||||
}
|
||||
|
||||
fn read_to_string(&self, path: &SystemPath) -> ruff_db::system::Result<String> {
|
||||
self.as_system().read_to_string(&self.normalize_path(path))
|
||||
}
|
||||
|
||||
fn read_to_notebook(&self, path: &SystemPath) -> Result<Notebook, NotebookError> {
|
||||
self.as_system()
|
||||
.read_to_notebook(&self.normalize_path(path))
|
||||
}
|
||||
|
||||
fn read_virtual_path_to_string(
|
||||
&self,
|
||||
path: &ruff_db::system::SystemVirtualPath,
|
||||
) -> ruff_db::system::Result<String> {
|
||||
self.as_system().read_virtual_path_to_string(path)
|
||||
}
|
||||
|
||||
fn read_virtual_path_to_notebook(
|
||||
&self,
|
||||
path: &ruff_db::system::SystemVirtualPath,
|
||||
) -> Result<Notebook, NotebookError> {
|
||||
self.as_system().read_virtual_path_to_notebook(path)
|
||||
}
|
||||
|
||||
fn current_directory(&self) -> &SystemPath {
|
||||
self.as_system().current_directory()
|
||||
}
|
||||
|
||||
fn user_config_directory(&self) -> Option<SystemPathBuf> {
|
||||
self.as_system().user_config_directory()
|
||||
}
|
||||
|
||||
fn read_directory<'a>(
|
||||
&'a self,
|
||||
path: &SystemPath,
|
||||
) -> ruff_db::system::Result<
|
||||
Box<dyn Iterator<Item = ruff_db::system::Result<ruff_db::system::DirectoryEntry>> + 'a>,
|
||||
> {
|
||||
self.as_system().read_directory(&self.normalize_path(path))
|
||||
}
|
||||
|
||||
fn walk_directory(
|
||||
&self,
|
||||
path: &SystemPath,
|
||||
) -> ruff_db::system::walk_directory::WalkDirectoryBuilder {
|
||||
self.as_system().walk_directory(&self.normalize_path(path))
|
||||
}
|
||||
|
||||
fn glob(
|
||||
&self,
|
||||
pattern: &str,
|
||||
) -> Result<
|
||||
Box<dyn Iterator<Item = Result<SystemPathBuf, ruff_db::system::GlobError>>>,
|
||||
ruff_db::system::PatternError,
|
||||
> {
|
||||
self.as_system()
|
||||
.glob(self.normalize_path(SystemPath::new(pattern)).as_str())
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl WritableSystem for MdtestSystem {
|
||||
fn write_file(&self, path: &SystemPath, content: &str) -> ruff_db::system::Result<()> {
|
||||
self.as_system()
|
||||
.write_file(&self.normalize_path(path), content)
|
||||
}
|
||||
|
||||
fn create_directory_all(&self, path: &SystemPath) -> ruff_db::system::Result<()> {
|
||||
self.as_system()
|
||||
.create_directory_all(&self.normalize_path(path))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
//!
|
||||
//! We don't assume that we will get the diagnostics in source order.
|
||||
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_db::diagnostic::OldDiagnosticTrait;
|
||||
use ruff_source_file::{LineIndex, OneIndexed};
|
||||
use std::ops::{Deref, Range};
|
||||
|
||||
@@ -19,7 +19,7 @@ pub(crate) struct SortedDiagnostics<T> {
|
||||
|
||||
impl<T> SortedDiagnostics<T>
|
||||
where
|
||||
T: Diagnostic,
|
||||
T: OldDiagnosticTrait,
|
||||
{
|
||||
pub(crate) fn new(diagnostics: impl IntoIterator<Item = T>, line_index: &LineIndex) -> Self {
|
||||
let mut diagnostics: Vec<_> = diagnostics
|
||||
@@ -99,7 +99,7 @@ pub(crate) struct LineDiagnosticsIterator<'a, T> {
|
||||
|
||||
impl<'a, T> Iterator for LineDiagnosticsIterator<'a, T>
|
||||
where
|
||||
T: Diagnostic,
|
||||
T: OldDiagnosticTrait,
|
||||
{
|
||||
type Item = LineDiagnostics<'a, T>;
|
||||
|
||||
@@ -115,7 +115,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> std::iter::FusedIterator for LineDiagnosticsIterator<'_, T> where T: Diagnostic {}
|
||||
impl<T> std::iter::FusedIterator for LineDiagnosticsIterator<'_, T> where T: OldDiagnosticTrait {}
|
||||
|
||||
/// All diagnostics that start on a single line of source code in one embedded Python file.
|
||||
#[derive(Debug)]
|
||||
@@ -144,18 +144,18 @@ struct DiagnosticWithLine<T> {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::db::Db;
|
||||
use crate::diagnostic::Diagnostic;
|
||||
use crate::diagnostic::OldDiagnosticTrait;
|
||||
use ruff_db::diagnostic::{DiagnosticId, LintName, Severity, Span};
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::source::line_index;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_db::system::DbWithWritableSystem as _;
|
||||
use ruff_source_file::OneIndexed;
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
use std::borrow::Cow;
|
||||
|
||||
#[test]
|
||||
fn sort_and_group() {
|
||||
let mut db = Db::setup(SystemPathBuf::from("/src"));
|
||||
let mut db = Db::setup();
|
||||
db.write_file("/src/test.py", "one\ntwo\n").unwrap();
|
||||
let file = system_path_to_file(&db, "/src/test.py").unwrap();
|
||||
let lines = line_index(&db, file);
|
||||
@@ -190,7 +190,7 @@ mod tests {
|
||||
file: File,
|
||||
}
|
||||
|
||||
impl Diagnostic for DummyDiagnostic {
|
||||
impl OldDiagnosticTrait for DummyDiagnostic {
|
||||
fn id(&self) -> DiagnosticId {
|
||||
DiagnosticId::Lint(LintName::of("dummy"))
|
||||
}
|
||||
|
||||
@@ -2,14 +2,15 @@ use crate::config::Log;
|
||||
use crate::parser::{BacktickOffsets, EmbeddedFileSourceMap};
|
||||
use camino::Utf8Path;
|
||||
use colored::Colorize;
|
||||
use config::SystemKind;
|
||||
use parser as test_parser;
|
||||
use red_knot_python_semantic::types::check_types;
|
||||
use red_knot_python_semantic::{Program, ProgramSettings, PythonPath, SearchPathSettings};
|
||||
use ruff_db::diagnostic::{Diagnostic, DisplayDiagnosticConfig, ParseDiagnostic};
|
||||
use ruff_db::files::{system_path_to_file, File, Files};
|
||||
use ruff_db::diagnostic::{DisplayDiagnosticConfig, OldDiagnosticTrait, OldParseDiagnostic};
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::panic::catch_unwind;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_db::system::{DbWithWritableSystem as _, SystemPath, SystemPathBuf};
|
||||
use ruff_db::testing::{setup_logging, setup_logging_with_filter};
|
||||
use ruff_source_file::{LineIndex, OneIndexed};
|
||||
use std::fmt::Write;
|
||||
@@ -42,7 +43,7 @@ pub fn run(
|
||||
}
|
||||
};
|
||||
|
||||
let mut db = db::Db::setup(SystemPathBuf::from("/src"));
|
||||
let mut db = db::Db::setup();
|
||||
|
||||
let filter = std::env::var(MDTEST_TEST_FILTER).ok();
|
||||
let mut any_failures = false;
|
||||
@@ -56,10 +57,6 @@ pub fn run(
|
||||
Log::Filter(filter) => setup_logging_with_filter(filter),
|
||||
});
|
||||
|
||||
// Remove all files so that the db is in a "fresh" state.
|
||||
db.memory_file_system().remove_all();
|
||||
Files::sync_all(&mut db);
|
||||
|
||||
if let Err(failures) = run_test(&mut db, relative_fixture_path, snapshot_path, &test) {
|
||||
any_failures = true;
|
||||
println!("\n{}\n", test.name().bold().underline());
|
||||
@@ -82,13 +79,13 @@ pub fn run(
|
||||
}
|
||||
}
|
||||
|
||||
let escaped_test_name = test.name().replace('\'', "\\'");
|
||||
|
||||
println!(
|
||||
"\nTo rerun this specific test, set the environment variable: {MDTEST_TEST_FILTER}=\"{}\"",
|
||||
test.name()
|
||||
"\nTo rerun this specific test, set the environment variable: {MDTEST_TEST_FILTER}='{escaped_test_name}'",
|
||||
);
|
||||
println!(
|
||||
"{MDTEST_TEST_FILTER}=\"{}\" cargo test -p red_knot_python_semantic --test mdtest -- {test_name}",
|
||||
test.name()
|
||||
"{MDTEST_TEST_FILTER}='{escaped_test_name}' cargo test -p red_knot_python_semantic --test mdtest -- {test_name}",
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -104,9 +101,30 @@ fn run_test(
|
||||
snapshot_path: &Utf8Path,
|
||||
test: &parser::MarkdownTest,
|
||||
) -> Result<(), Failures> {
|
||||
let project_root = db.project_root().to_path_buf();
|
||||
let src_path = SystemPathBuf::from("/src");
|
||||
let custom_typeshed_path = test.configuration().typeshed().map(SystemPathBuf::from);
|
||||
// Initialize the system and remove all files and directories to reset the system to a clean state.
|
||||
match test.configuration().system.unwrap_or_default() {
|
||||
SystemKind::InMemory => {
|
||||
db.use_in_memory_system();
|
||||
}
|
||||
SystemKind::Os => {
|
||||
let dir = tempfile::TempDir::new().expect("Creating a temporary directory to succeed");
|
||||
let root_path = dir
|
||||
.path()
|
||||
.canonicalize()
|
||||
.expect("Canonicalizing to succeed");
|
||||
let root_path = SystemPathBuf::from_path_buf(root_path)
|
||||
.expect("Temp directory to be a valid UTF8 path");
|
||||
|
||||
db.use_os_system_with_temp_dir(root_path, dir);
|
||||
}
|
||||
}
|
||||
|
||||
let project_root = SystemPathBuf::from("/src");
|
||||
db.create_directory_all(&project_root)
|
||||
.expect("Creating the project root to succeed");
|
||||
|
||||
let src_path = project_root.clone();
|
||||
let custom_typeshed_path = test.configuration().typeshed();
|
||||
let mut typeshed_files = vec![];
|
||||
let mut has_custom_versions_file = false;
|
||||
|
||||
@@ -118,13 +136,13 @@ fn run_test(
|
||||
}
|
||||
|
||||
assert!(
|
||||
matches!(embedded.lang, "py" | "pyi" | "text"),
|
||||
"Supported file types are: py, pyi, text"
|
||||
matches!(embedded.lang, "py" | "pyi" | "python" | "text"),
|
||||
"Supported file types are: py (or python), pyi, text, and ignore"
|
||||
);
|
||||
|
||||
let full_path = embedded.full_path(&project_root);
|
||||
|
||||
if let Some(ref typeshed_path) = custom_typeshed_path {
|
||||
if let Some(typeshed_path) = custom_typeshed_path {
|
||||
if let Ok(relative_path) = full_path.strip_prefix(typeshed_path.join("stdlib")) {
|
||||
if relative_path.as_str() == "VERSIONS" {
|
||||
has_custom_versions_file = true;
|
||||
@@ -151,7 +169,7 @@ fn run_test(
|
||||
.collect();
|
||||
|
||||
// Create a custom typeshed `VERSIONS` file if none was provided.
|
||||
if let Some(ref typeshed_path) = custom_typeshed_path {
|
||||
if let Some(typeshed_path) = custom_typeshed_path {
|
||||
if !has_custom_versions_file {
|
||||
let versions_file = typeshed_path.join("stdlib/VERSIONS");
|
||||
let contents = typeshed_files
|
||||
@@ -170,21 +188,26 @@ fn run_test(
|
||||
}
|
||||
}
|
||||
|
||||
Program::get(db)
|
||||
.update_from_settings(
|
||||
db,
|
||||
ProgramSettings {
|
||||
python_version: test.configuration().python_version().unwrap_or_default(),
|
||||
python_platform: test.configuration().python_platform().unwrap_or_default(),
|
||||
search_paths: SearchPathSettings {
|
||||
src_roots: vec![src_path],
|
||||
extra_paths: vec![],
|
||||
custom_typeshed: custom_typeshed_path,
|
||||
python_path: PythonPath::KnownSitePackages(vec![]),
|
||||
},
|
||||
},
|
||||
)
|
||||
.expect("Failed to update Program settings in TestDb");
|
||||
let settings = ProgramSettings {
|
||||
python_version: test.configuration().python_version().unwrap_or_default(),
|
||||
python_platform: test.configuration().python_platform().unwrap_or_default(),
|
||||
search_paths: SearchPathSettings {
|
||||
src_roots: vec![src_path],
|
||||
extra_paths: test
|
||||
.configuration()
|
||||
.extra_paths()
|
||||
.unwrap_or_default()
|
||||
.to_vec(),
|
||||
custom_typeshed: custom_typeshed_path.map(SystemPath::to_path_buf),
|
||||
python_path: PythonPath::KnownSitePackages(vec![]),
|
||||
},
|
||||
};
|
||||
|
||||
match Program::try_get(db) {
|
||||
Some(program) => program.update_from_settings(db, settings),
|
||||
None => Program::from_settings(db, settings).map(|_| ()),
|
||||
}
|
||||
.expect("Failed to update Program settings in TestDb");
|
||||
|
||||
// When snapshot testing is enabled, this is populated with
|
||||
// all diagnostics. Otherwise it remains empty.
|
||||
@@ -200,8 +223,8 @@ fn run_test(
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|error| {
|
||||
let diagnostic: Box<dyn Diagnostic> =
|
||||
Box::new(ParseDiagnostic::new(test_file.file, error));
|
||||
let diagnostic: Box<dyn OldDiagnosticTrait> =
|
||||
Box::new(OldParseDiagnostic::new(test_file.file, error));
|
||||
diagnostic
|
||||
})
|
||||
.collect();
|
||||
@@ -234,7 +257,7 @@ fn run_test(
|
||||
}
|
||||
};
|
||||
diagnostics.extend(type_diagnostics.into_iter().map(|diagnostic| {
|
||||
let diagnostic: Box<dyn Diagnostic> = Box::new((*diagnostic).clone());
|
||||
let diagnostic: Box<dyn OldDiagnosticTrait> = Box::new((*diagnostic).clone());
|
||||
diagnostic
|
||||
}));
|
||||
|
||||
@@ -299,7 +322,7 @@ struct TestFile {
|
||||
backtick_offsets: Vec<BacktickOffsets>,
|
||||
}
|
||||
|
||||
fn create_diagnostic_snapshot<D: Diagnostic>(
|
||||
fn create_diagnostic_snapshot<D: OldDiagnosticTrait>(
|
||||
db: &mut db::Db,
|
||||
relative_fixture_path: &Utf8Path,
|
||||
test: &parser::MarkdownTest,
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
//! Match [`Diagnostic`]s against [`Assertion`]s and produce test failure messages for any
|
||||
//! mismatches.
|
||||
use crate::assertion::{Assertion, ErrorAssertion, InlineFileAssertions};
|
||||
//! Match [`OldDiagnosticTrait`]s against assertions and produce test failure
|
||||
//! messages for any mismatches.
|
||||
use crate::assertion::{InlineFileAssertions, ParsedAssertion, UnparsedAssertion};
|
||||
use crate::db::Db;
|
||||
use crate::diagnostic::SortedDiagnostics;
|
||||
use colored::Colorize;
|
||||
use ruff_db::diagnostic::{Diagnostic, DiagnosticAsStrError, DiagnosticId};
|
||||
use ruff_db::diagnostic::{DiagnosticAsStrError, DiagnosticId, OldDiagnosticTrait};
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::source::{line_index, source_text, SourceText};
|
||||
use ruff_source_file::{LineIndex, OneIndexed};
|
||||
@@ -53,7 +53,7 @@ pub(super) fn match_file<T>(
|
||||
diagnostics: impl IntoIterator<Item = T>,
|
||||
) -> Result<(), FailuresByLine>
|
||||
where
|
||||
T: Diagnostic,
|
||||
T: OldDiagnosticTrait,
|
||||
{
|
||||
// Parse assertions from comments in the file, and get diagnostics from the file; both
|
||||
// ordered by line number.
|
||||
@@ -136,13 +136,26 @@ trait UnmatchedWithColumn {
|
||||
fn unmatched_with_column(&self, column: OneIndexed) -> String;
|
||||
}
|
||||
|
||||
impl Unmatched for Assertion<'_> {
|
||||
// This is necessary since we only parse assertions lazily,
|
||||
// and sometimes we know before parsing any assertions that an assertion will be unmatched,
|
||||
// e.g. if we've exhausted all diagnostics but there are still assertions left.
|
||||
//
|
||||
// TODO: the lazy parsing means that we sometimes won't report malformed assertions as
|
||||
// being invalid if we detect that they'll be unmatched before parsing them.
|
||||
// That's perhaps not the best user experience.
|
||||
impl Unmatched for UnparsedAssertion<'_> {
|
||||
fn unmatched(&self) -> String {
|
||||
format!("{} {self}", "unmatched assertion:".red())
|
||||
}
|
||||
}
|
||||
|
||||
fn maybe_add_undefined_reveal_clarification<T: Diagnostic>(
|
||||
impl Unmatched for ParsedAssertion<'_> {
|
||||
fn unmatched(&self) -> String {
|
||||
format!("{} {self}", "unmatched assertion:".red())
|
||||
}
|
||||
}
|
||||
|
||||
fn maybe_add_undefined_reveal_clarification<T: OldDiagnosticTrait>(
|
||||
diagnostic: &T,
|
||||
original: std::fmt::Arguments,
|
||||
) -> String {
|
||||
@@ -158,7 +171,7 @@ fn maybe_add_undefined_reveal_clarification<T: Diagnostic>(
|
||||
|
||||
impl<T> Unmatched for T
|
||||
where
|
||||
T: Diagnostic,
|
||||
T: OldDiagnosticTrait,
|
||||
{
|
||||
fn unmatched(&self) -> String {
|
||||
let id = self.id();
|
||||
@@ -175,7 +188,7 @@ where
|
||||
|
||||
impl<T> UnmatchedWithColumn for T
|
||||
where
|
||||
T: Diagnostic,
|
||||
T: OldDiagnosticTrait,
|
||||
{
|
||||
fn unmatched_with_column(&self, column: OneIndexed) -> String {
|
||||
let id = self.id();
|
||||
@@ -213,13 +226,15 @@ impl Matcher {
|
||||
}
|
||||
}
|
||||
|
||||
/// Check a slice of [`Diagnostic`]s against a slice of [`Assertion`]s.
|
||||
/// Check a slice of [`OldDiagnosticTrait`]s against a slice of
|
||||
/// [`UnparsedAssertion`]s.
|
||||
///
|
||||
/// Return vector of [`Unmatched`] for any unmatched diagnostics or assertions.
|
||||
fn match_line<'a, 'b, T: Diagnostic + 'a>(
|
||||
/// Return vector of [`Unmatched`] for any unmatched diagnostics or
|
||||
/// assertions.
|
||||
fn match_line<'a, 'b, T: OldDiagnosticTrait + 'a>(
|
||||
&self,
|
||||
diagnostics: &'a [T],
|
||||
assertions: &'a [Assertion<'b>],
|
||||
assertions: &'a [UnparsedAssertion<'b>],
|
||||
) -> Result<(), Vec<String>>
|
||||
where
|
||||
'b: 'a,
|
||||
@@ -227,22 +242,15 @@ impl Matcher {
|
||||
let mut failures = vec![];
|
||||
let mut unmatched: Vec<_> = diagnostics.iter().collect();
|
||||
for assertion in assertions {
|
||||
if matches!(
|
||||
assertion,
|
||||
Assertion::Error(ErrorAssertion {
|
||||
rule: None,
|
||||
message_contains: None,
|
||||
..
|
||||
})
|
||||
) {
|
||||
failures.push(format!(
|
||||
"{} no rule or message text",
|
||||
"invalid assertion:".red()
|
||||
));
|
||||
continue;
|
||||
}
|
||||
if !self.matches(assertion, &mut unmatched) {
|
||||
failures.push(assertion.unmatched());
|
||||
match assertion.parse() {
|
||||
Ok(assertion) => {
|
||||
if !self.matches(&assertion, &mut unmatched) {
|
||||
failures.push(assertion.unmatched());
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
failures.push(format!("{} {}", "invalid assertion:".red(), error));
|
||||
}
|
||||
}
|
||||
}
|
||||
for diagnostic in unmatched {
|
||||
@@ -255,7 +263,7 @@ impl Matcher {
|
||||
}
|
||||
}
|
||||
|
||||
fn column<T: Diagnostic>(&self, diagnostic: &T) -> OneIndexed {
|
||||
fn column<T: OldDiagnosticTrait>(&self, diagnostic: &T) -> OneIndexed {
|
||||
diagnostic
|
||||
.span()
|
||||
.and_then(|span| span.range())
|
||||
@@ -267,7 +275,7 @@ impl Matcher {
|
||||
.unwrap_or(OneIndexed::from_zero_indexed(0))
|
||||
}
|
||||
|
||||
/// Check if `assertion` matches any [`Diagnostic`]s in `unmatched`.
|
||||
/// Check if `assertion` matches any [`OldDiagnosticTrait`]s in `unmatched`.
|
||||
///
|
||||
/// If so, return `true` and remove the matched diagnostics from `unmatched`. Otherwise, return
|
||||
/// `false`.
|
||||
@@ -277,9 +285,13 @@ impl Matcher {
|
||||
///
|
||||
/// A `Revealed` assertion must match a revealed-type diagnostic, and may also match an
|
||||
/// undefined-reveal diagnostic, if present.
|
||||
fn matches<T: Diagnostic>(&self, assertion: &Assertion, unmatched: &mut Vec<&T>) -> bool {
|
||||
fn matches<T: OldDiagnosticTrait>(
|
||||
&self,
|
||||
assertion: &ParsedAssertion,
|
||||
unmatched: &mut Vec<&T>,
|
||||
) -> bool {
|
||||
match assertion {
|
||||
Assertion::Error(error) => {
|
||||
ParsedAssertion::Error(error) => {
|
||||
let position = unmatched.iter().position(|diagnostic| {
|
||||
!error.rule.is_some_and(|rule| {
|
||||
!(diagnostic.id().is_lint_named(rule) || diagnostic.id().matches(rule))
|
||||
@@ -297,7 +309,7 @@ impl Matcher {
|
||||
false
|
||||
}
|
||||
}
|
||||
Assertion::Revealed(expected_type) => {
|
||||
ParsedAssertion::Revealed(expected_type) => {
|
||||
#[cfg(not(debug_assertions))]
|
||||
let expected_type = discard_todo_metadata(&expected_type);
|
||||
|
||||
@@ -335,9 +347,9 @@ impl Matcher {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::FailuresByLine;
|
||||
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, Severity, Span};
|
||||
use ruff_db::diagnostic::{DiagnosticId, OldDiagnosticTrait, Severity, Span};
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_db::system::DbWithWritableSystem as _;
|
||||
use ruff_python_trivia::textwrap::dedent;
|
||||
use ruff_source_file::OneIndexed;
|
||||
use ruff_text_size::TextRange;
|
||||
@@ -377,7 +389,7 @@ mod tests {
|
||||
file: File,
|
||||
}
|
||||
|
||||
impl Diagnostic for TestDiagnostic {
|
||||
impl OldDiagnosticTrait for TestDiagnostic {
|
||||
fn id(&self) -> DiagnosticId {
|
||||
self.id
|
||||
}
|
||||
@@ -401,7 +413,7 @@ mod tests {
|
||||
) -> Result<(), FailuresByLine> {
|
||||
colored::control::set_override(false);
|
||||
|
||||
let mut db = crate::db::Db::setup(SystemPathBuf::from("/src"));
|
||||
let mut db = crate::db::Db::setup();
|
||||
db.write_file("/src/test.py", source).unwrap();
|
||||
let file = system_path_to_file(&db, "/src/test.py").unwrap();
|
||||
|
||||
@@ -648,6 +660,34 @@ mod tests {
|
||||
assert_ok(&result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_match_rule_no_whitespace() {
|
||||
let result = get_result(
|
||||
"x #error:[some-rule]",
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("some-rule"),
|
||||
"Any message",
|
||||
0,
|
||||
)],
|
||||
);
|
||||
|
||||
assert_ok(&result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_match_rule_lots_of_whitespace() {
|
||||
let result = get_result(
|
||||
"x # error : [ some-rule ]",
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("some-rule"),
|
||||
"Any message",
|
||||
0,
|
||||
)],
|
||||
);
|
||||
|
||||
assert_ok(&result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_wrong_rule() {
|
||||
let result = get_result(
|
||||
@@ -722,6 +762,20 @@ mod tests {
|
||||
assert_ok(&result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_match_column_and_rule_and_message() {
|
||||
let result = get_result(
|
||||
r#"x # error: 5 [some-rule] "Some message""#,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("some-rule"),
|
||||
"Some message",
|
||||
4,
|
||||
)],
|
||||
);
|
||||
|
||||
assert_ok(&result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_wrong_column() {
|
||||
let result = get_result(
|
||||
@@ -1031,6 +1085,30 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bare_reveal_assertion_not_allowed() {
|
||||
let source = "x # revealed: ";
|
||||
let result = get_result(
|
||||
source,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("some-rule"),
|
||||
"some message",
|
||||
0,
|
||||
)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
"invalid assertion: Must specify which type should be revealed",
|
||||
r#"unexpected error: 1 [some-rule] "some message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn column_only_error_assertion_not_allowed() {
|
||||
let source = "x # error: 1";
|
||||
@@ -1055,4 +1133,220 @@ mod tests {
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unclosed_rule_not_allowed() {
|
||||
let source = r#"x # error: 42 [some-rule "Some message""#;
|
||||
let result = get_result(
|
||||
source,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("some-rule"),
|
||||
"some message",
|
||||
0,
|
||||
)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
"invalid assertion: expected ']' to close rule code",
|
||||
r#"unexpected error: 1 [some-rule] "some message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bad_column_number_not_allowed() {
|
||||
let source = r#"x # error: 3.14 [some-rule] "Some message""#;
|
||||
let result = get_result(
|
||||
source,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("some-rule"),
|
||||
"some message",
|
||||
0,
|
||||
)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
"invalid assertion: bad column number `3.14`",
|
||||
r#"unexpected error: 1 [some-rule] "some message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_column_numbers_not_allowed() {
|
||||
let source = r#"x # error: 3 14 [some-rule] "Some message""#;
|
||||
let result = get_result(
|
||||
source,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("some-rule"),
|
||||
"some message",
|
||||
0,
|
||||
)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
"invalid assertion: multiple column numbers in one assertion",
|
||||
r#"unexpected error: 1 [some-rule] "some message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_column_numbers_not_allowed_even_if_interspersed() {
|
||||
let source = r#"x # error: 3 [some-rule] 14 "Some message""#;
|
||||
let result = get_result(
|
||||
source,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("some-rule"),
|
||||
"some message",
|
||||
0,
|
||||
)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
"invalid assertion: multiple column numbers in one assertion",
|
||||
r#"unexpected error: 1 [some-rule] "some message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn two_rule_codes_not_allowed() {
|
||||
let source = r#"x # error: [rule1] [rule2] "Some message""#;
|
||||
let result = get_result(
|
||||
source,
|
||||
vec![
|
||||
ExpectedDiagnostic::new(DiagnosticId::lint("rule1"), "Some message", 0),
|
||||
ExpectedDiagnostic::new(DiagnosticId::lint("rule2"), "Some message", 0),
|
||||
],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
"invalid assertion: cannot use multiple rule codes in one assertion",
|
||||
r#"unexpected error: 1 [rule1] "Some message""#,
|
||||
r#"unexpected error: 1 [rule2] "Some message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn column_number_not_allowed_after_rule_code() {
|
||||
let source = r#"x # error: [rule1] 4 "Some message""#;
|
||||
let result = get_result(
|
||||
source,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("rule1"),
|
||||
"Some message",
|
||||
0,
|
||||
)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
"invalid assertion: column number must precede the rule code",
|
||||
r#"unexpected error: 1 [rule1] "Some message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn column_number_not_allowed_after_message() {
|
||||
let source = r#"x # error: "Some message" 0"#;
|
||||
let result = get_result(
|
||||
source,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("rule1"),
|
||||
"Some message",
|
||||
0,
|
||||
)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
"invalid assertion: expected '\"' to be the final character in an assertion with an error message",
|
||||
r#"unexpected error: 1 [rule1] "Some message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unclosed_message_not_allowed() {
|
||||
let source = "x # error: \"Some message";
|
||||
let result = get_result(
|
||||
source,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("some-rule"),
|
||||
"some message",
|
||||
0,
|
||||
)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
"invalid assertion: expected '\"' to be the final character in an assertion with an error message",
|
||||
r#"unexpected error: 1 [some-rule] "some message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unclosed_message_not_allowed_even_after_rule_code() {
|
||||
let source = "x # error: [some-rule] \"Some message";
|
||||
let result = get_result(
|
||||
source,
|
||||
vec![ExpectedDiagnostic::new(
|
||||
DiagnosticId::lint("some-rule"),
|
||||
"some message",
|
||||
0,
|
||||
)],
|
||||
);
|
||||
|
||||
assert_fail(
|
||||
result,
|
||||
&[(
|
||||
0,
|
||||
&[
|
||||
"invalid assertion: expected '\"' to be the final character in an assertion with an error message",
|
||||
r#"unexpected error: 1 [some-rule] "some message""#,
|
||||
],
|
||||
)],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -283,7 +283,10 @@ impl EmbeddedFile<'_> {
|
||||
self.path.as_str()
|
||||
}
|
||||
|
||||
/// Returns the full path using unix file-path convention.
|
||||
pub(crate) fn full_path(&self, project_root: &SystemPath) -> SystemPathBuf {
|
||||
// Don't use `SystemPath::absolute` here because it's platform dependent
|
||||
// and we want to use unix file-path convention.
|
||||
let relative_path = self.relative_path();
|
||||
if relative_path.starts_with('/') {
|
||||
SystemPathBuf::from(relative_path)
|
||||
@@ -421,6 +424,7 @@ impl<'s> Parser<'s> {
|
||||
|
||||
fn parse_impl(&mut self) -> anyhow::Result<()> {
|
||||
const SECTION_CONFIG_SNAPSHOT: &str = "snapshot-diagnostics";
|
||||
const HTML_COMMENT_ALLOWLIST: &[&str] = &["blacken-docs:on", "blacken-docs:off"];
|
||||
const CODE_BLOCK_END: &[u8] = b"```";
|
||||
const HTML_COMMENT_END: &[u8] = b"-->";
|
||||
|
||||
@@ -433,6 +437,12 @@ impl<'s> Parser<'s> {
|
||||
let html_comment = self.cursor.as_str()[..position].trim();
|
||||
if html_comment == SECTION_CONFIG_SNAPSHOT {
|
||||
self.process_snapshot_diagnostics()?;
|
||||
} else if !HTML_COMMENT_ALLOWLIST.contains(&html_comment) {
|
||||
bail!(
|
||||
"Unknown HTML comment `{}` -- possibly a `snapshot-diagnostics` typo? \
|
||||
(Add to `HTML_COMMENT_ALLOWLIST` if this is a false positive)",
|
||||
html_comment
|
||||
);
|
||||
}
|
||||
self.cursor.skip_bytes(position + HTML_COMMENT_END.len());
|
||||
} else {
|
||||
@@ -599,10 +609,13 @@ impl<'s> Parser<'s> {
|
||||
}
|
||||
|
||||
if let Some(explicit_path) = self.explicit_path {
|
||||
if !lang.is_empty()
|
||||
let expected_extension = if lang == "python" { "py" } else { lang };
|
||||
|
||||
if !expected_extension.is_empty()
|
||||
&& lang != "text"
|
||||
&& explicit_path.contains('.')
|
||||
&& !explicit_path.ends_with(&format!(".{lang}"))
|
||||
&& !SystemPath::new(explicit_path)
|
||||
.extension()
|
||||
.is_none_or(|extension| extension.eq_ignore_ascii_case(expected_extension))
|
||||
{
|
||||
bail!(
|
||||
"File extension of test file path `{explicit_path}` in test `{test_name}` does not match language specified `{lang}` of code block"
|
||||
@@ -1801,4 +1814,41 @@ mod tests {
|
||||
(including embedded files).",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn obvious_typos_in_directives_are_detected() {
|
||||
let source = dedent(
|
||||
"
|
||||
# Some header
|
||||
<!-- snpshotttt-digggggnosstic -->
|
||||
```py
|
||||
x = 1
|
||||
```
|
||||
",
|
||||
);
|
||||
let err = super::parse("file.md", &source).expect_err("Should fail to parse");
|
||||
assert_eq!(
|
||||
err.to_string(),
|
||||
"Unknown HTML comment `snpshotttt-digggggnosstic` -- possibly a `snapshot-diagnostics` typo? \
|
||||
(Add to `HTML_COMMENT_ALLOWLIST` if this is a false positive)",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn allowlisted_html_comments_are_permitted() {
|
||||
let source = dedent(
|
||||
"
|
||||
# Some header
|
||||
<!-- blacken-docs:off -->
|
||||
|
||||
```py
|
||||
x = 1
|
||||
```
|
||||
|
||||
<!-- blacken-docs:on -->
|
||||
",
|
||||
);
|
||||
let parse_result = super::parse("file.md", &source);
|
||||
assert!(parse_result.is_ok(), "{parse_result:?}");
|
||||
}
|
||||
}
|
||||
|
||||
4
crates/red_knot_vendored/.gitignore
vendored
Normal file
4
crates/red_knot_vendored/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# Do not ignore any of the vendored files. If this pattern is not present,
|
||||
# we will gitignore the `venv/` stubs in typeshed, as there is a general
|
||||
# rule to ignore `venv/` directories in the root `.gitignore`.
|
||||
!/vendor/typeshed/**/*
|
||||
@@ -1 +1 @@
|
||||
cc8ca939c0477a49fcce0554fa1743bd5c656a11
|
||||
0b13c1deb6d0b2cdc78b246da9a0863c87dd8424
|
||||
|
||||
@@ -26,8 +26,8 @@ else:
|
||||
|
||||
MAGIC_NUMBER: bytes
|
||||
|
||||
def cache_from_source(path: str, debug_override: bool | None = None, *, optimization: Any | None = None) -> str: ...
|
||||
def source_from_cache(path: str) -> str: ...
|
||||
def cache_from_source(path: StrPath, debug_override: bool | None = None, *, optimization: Any | None = None) -> str: ...
|
||||
def source_from_cache(path: StrPath) -> str: ...
|
||||
def decode_source(source_bytes: ReadableBuffer) -> str: ...
|
||||
def spec_from_file_location(
|
||||
name: str,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user