Compare commits
65 Commits
0.6.0
...
perf-node-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
82f33db5e6 | ||
|
|
f873d2ac12 | ||
|
|
ecd9e6a650 | ||
|
|
785c39927b | ||
|
|
a35cdbb275 | ||
|
|
0c98b5949c | ||
|
|
e5f37a8254 | ||
|
|
5c5dfc11f0 | ||
|
|
678045e1aa | ||
|
|
dedefd73da | ||
|
|
37a60460ed | ||
|
|
0bd258a370 | ||
|
|
9baab8672a | ||
|
|
c65e3310d5 | ||
|
|
38c19fb96e | ||
|
|
abb4cdbf3d | ||
|
|
fc811f5168 | ||
|
|
1a8f29ea41 | ||
|
|
aefaddeae7 | ||
|
|
df09045176 | ||
|
|
049cda2ff3 | ||
|
|
358792f2c9 | ||
|
|
e6d5a7af37 | ||
|
|
f5bff82e70 | ||
|
|
ab44152eb5 | ||
|
|
f4c8c7eb70 | ||
|
|
65de8f2c9b | ||
|
|
e6226436fd | ||
|
|
0345d46759 | ||
|
|
4d0d3b00cb | ||
|
|
2be1c4ff04 | ||
|
|
edd86d5603 | ||
|
|
78ad7959ca | ||
|
|
d72ecd6ded | ||
|
|
8617a508bd | ||
|
|
c88bd4e884 | ||
|
|
fbcda90316 | ||
|
|
169d4390cb | ||
|
|
80ade591df | ||
|
|
4881d32c80 | ||
|
|
81a2220ce1 | ||
|
|
900e98b584 | ||
|
|
f9d8189670 | ||
|
|
52ba94191a | ||
|
|
96802d6a7f | ||
|
|
dd0a7ec73e | ||
|
|
25f5ae44c4 | ||
|
|
251efe5c41 | ||
|
|
6359e55383 | ||
|
|
a9847af6e8 | ||
|
|
d61d75d4fa | ||
|
|
499c0bd875 | ||
|
|
4cb30b598f | ||
|
|
aba0d83c11 | ||
|
|
c319414e54 | ||
|
|
ef1f6d98a0 | ||
|
|
b850b812de | ||
|
|
a87b27c075 | ||
|
|
9b73532b11 | ||
|
|
d8debb7a36 | ||
|
|
bd4a947b29 | ||
|
|
f121f8b31b | ||
|
|
80efb865e9 | ||
|
|
52d27befe8 | ||
|
|
6ed06afd28 |
@@ -6,6 +6,8 @@ exclude: |
|
||||
crates/red_knot_workspace/resources/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
crates/ruff_notebook/resources/.*|
|
||||
crates/ruff_server/resources/.*|
|
||||
crates/ruff/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*|
|
||||
crates/ruff_python_formatter/tests/snapshots/.*|
|
||||
@@ -15,7 +17,7 @@ exclude: |
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.18
|
||||
rev: v0.19
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
@@ -57,7 +59,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.5.7
|
||||
rev: v0.6.1
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
|
||||
26
CHANGELOG.md
26
CHANGELOG.md
@@ -1,5 +1,29 @@
|
||||
# Changelog
|
||||
|
||||
## 0.6.1
|
||||
|
||||
This is a hotfix release to address an issue with `ruff-pre-commit`. In v0.6,
|
||||
Ruff changed its behavior to lint and format Jupyter notebooks by default;
|
||||
however, due to an oversight, these files were still excluded by default if
|
||||
Ruff was run via pre-commit, leading to inconsistent behavior.
|
||||
This has [now been fixed](https://github.com/astral-sh/ruff-pre-commit/pull/96).
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`fastapi`\] Implement `fast-api-unused-path-parameter` (`FAST003`) ([#12638](https://github.com/astral-sh/ruff/pull/12638))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pylint`\] Rename `too-many-positional` to `too-many-positional-arguments` (`R0917`) ([#12905](https://github.com/astral-sh/ruff/pull/12905))
|
||||
|
||||
### Server
|
||||
|
||||
- Fix crash when applying "fix-all" code-action to notebook cells ([#12929](https://github.com/astral-sh/ruff/pull/12929))
|
||||
|
||||
### Other changes
|
||||
|
||||
- \[`flake8-naming`\]: Respect import conventions (`N817`) ([#12922](https://github.com/astral-sh/ruff/pull/12922))
|
||||
|
||||
## 0.6.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.6.0) for a migration guide and overview of the changes!
|
||||
@@ -37,7 +61,7 @@ The following rules have been stabilized and are no longer in preview:
|
||||
- [`invalid-bytes-return-type`](https://docs.astral.sh/ruff/rules/invalid-bytes-return-type/) (`PLE0308`)
|
||||
- [`invalid-hash-return-type`](https://docs.astral.sh/ruff/rules/invalid-hash-return-type/) (`PLE0309`)
|
||||
- [`invalid-index-return-type`](https://docs.astral.sh/ruff/rules/invalid-index-return-type/) (`PLE0305`)
|
||||
- [`invalid-length-return-type`](https://docs.astral.sh/ruff/rules/invalid-length-return-type/) (`E303`)
|
||||
- [`invalid-length-return-type`](https://docs.astral.sh/ruff/rules/invalid-length-return-type/) (`PLEE303`)
|
||||
- [`self-or-cls-assignment`](https://docs.astral.sh/ruff/rules/self-or-cls-assignment/) (`PLW0642`)
|
||||
- [`byte-string-usage`](https://docs.astral.sh/ruff/rules/byte-string-usage/) (`PYI057`)
|
||||
- [`duplicate-literal-member`](https://docs.astral.sh/ruff/rules/duplicate-literal-member/) (`PYI062`)
|
||||
|
||||
@@ -2,35 +2,6 @@
|
||||
|
||||
Welcome! We're happy to have you here. Thank you in advance for your contribution to Ruff.
|
||||
|
||||
- [The Basics](#the-basics)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Development](#development)
|
||||
- [Project Structure](#project-structure)
|
||||
- [Example: Adding a new lint rule](#example-adding-a-new-lint-rule)
|
||||
- [Rule naming convention](#rule-naming-convention)
|
||||
- [Rule testing: fixtures and snapshots](#rule-testing-fixtures-and-snapshots)
|
||||
- [Example: Adding a new configuration option](#example-adding-a-new-configuration-option)
|
||||
- [MkDocs](#mkdocs)
|
||||
- [Release Process](#release-process)
|
||||
- [Creating a new release](#creating-a-new-release)
|
||||
- [Ecosystem CI](#ecosystem-ci)
|
||||
- [Benchmarking and Profiling](#benchmarking-and-profiling)
|
||||
- [CPython Benchmark](#cpython-benchmark)
|
||||
- [Microbenchmarks](#microbenchmarks)
|
||||
- [Benchmark-driven Development](#benchmark-driven-development)
|
||||
- [PR Summary](#pr-summary)
|
||||
- [Tips](#tips)
|
||||
- [Profiling Projects](#profiling-projects)
|
||||
- [Linux](#linux)
|
||||
- [Mac](#mac)
|
||||
- [`cargo dev`](#cargo-dev)
|
||||
- [Subsystems](#subsystems)
|
||||
- [Compilation Pipeline](#compilation-pipeline)
|
||||
- [Import Categorization](#import-categorization)
|
||||
- [Project root](#project-root)
|
||||
- [Package root](#package-root)
|
||||
- [Import categorization](#import-categorization-1)
|
||||
|
||||
## The Basics
|
||||
|
||||
Ruff welcomes contributions in the form of pull requests.
|
||||
@@ -333,22 +304,34 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
### Creating a new release
|
||||
|
||||
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
|
||||
|
||||
1. Run `./scripts/release.sh`; this command will:
|
||||
|
||||
- Generate a temporary virtual environment with `rooster`
|
||||
- Generate a changelog entry in `CHANGELOG.md`
|
||||
- Update versions in `pyproject.toml` and `Cargo.toml`
|
||||
- Update references to versions in the `README.md` and documentation
|
||||
- Display contributors for the release
|
||||
|
||||
1. The changelog should then be editorialized for consistency
|
||||
|
||||
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
||||
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
||||
|
||||
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
|
||||
|
||||
1. Run `cargo check`. This should update the lock file with new versions.
|
||||
|
||||
1. Create a pull request with the changelog and version updates
|
||||
|
||||
1. Merge the PR
|
||||
|
||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yml) with:
|
||||
|
||||
- The new version number (without starting `v`)
|
||||
|
||||
1. The release workflow will do the following:
|
||||
|
||||
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
||||
uploaded anything, you can restart after pushing a fix. If you just need to rerun the build,
|
||||
make sure you're [re-running all the failed
|
||||
@@ -359,14 +342,25 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
1. Attach artifacts to draft GitHub release
|
||||
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
|
||||
downstream jobs manually if needed.
|
||||
|
||||
1. Verify the GitHub release:
|
||||
|
||||
1. The Changelog should match the content of `CHANGELOG.md`
|
||||
1. Append the contributors from the `bump.sh` script
|
||||
1. Append the contributors from the `scripts/release.sh` script
|
||||
|
||||
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
|
||||
|
||||
1. One can determine if an update is needed when
|
||||
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
|
||||
1. Once run successfully, you should follow the link in the output to create a PR.
|
||||
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
|
||||
|
||||
1. If needed, update the [`ruff-lsp`](https://github.com/astral-sh/ruff-lsp) and
|
||||
[`ruff-vscode`](https://github.com/astral-sh/ruff-vscode) repositories and follow
|
||||
the release instructions in those repositories. `ruff-lsp` should always be updated
|
||||
before `ruff-vscode`.
|
||||
|
||||
This step is generally not required for a patch release, but should always be done
|
||||
for a minor release.
|
||||
|
||||
## Ecosystem CI
|
||||
|
||||
@@ -389,7 +383,7 @@ We have several ways of benchmarking and profiling Ruff:
|
||||
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
|
||||
- Profiling the linter on either the microbenchmarks or entire projects
|
||||
|
||||
> \[!NOTE\]
|
||||
> **Note**
|
||||
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
|
||||
> applications, like web browsers). You may also want to switch your CPU to a "performance"
|
||||
> mode, if it exists, especially when benchmarking short-lived processes.
|
||||
|
||||
129
Cargo.lock
generated
129
Cargo.lock
generated
@@ -228,9 +228,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "camino"
|
||||
version = "1.1.7"
|
||||
version = "1.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239"
|
||||
checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3"
|
||||
|
||||
[[package]]
|
||||
name = "cast"
|
||||
@@ -270,6 +270,12 @@ version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e"
|
||||
|
||||
[[package]]
|
||||
name = "cfg_aliases"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
|
||||
|
||||
[[package]]
|
||||
name = "chic"
|
||||
version = "1.2.2"
|
||||
@@ -320,9 +326,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.15"
|
||||
version = "4.5.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "11d8838454fda655dafd3accb2b6e2bea645b9e4078abe84a22ceb947235c5cc"
|
||||
checksum = "ed6719fffa43d0d87e5fd8caeab59be1554fb028cd30edc88fc4369b17971019"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -395,7 +401,7 @@ version = "3.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2f8c93eb5f77c9050c7750e14f13ef1033a40a0aac70c6371535b6763a01438c"
|
||||
dependencies = [
|
||||
"nix",
|
||||
"nix 0.28.0",
|
||||
"terminfo",
|
||||
"thiserror",
|
||||
"which",
|
||||
@@ -612,12 +618,12 @@ checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7"
|
||||
|
||||
[[package]]
|
||||
name = "ctrlc"
|
||||
version = "3.4.4"
|
||||
version = "3.4.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "672465ae37dc1bc6380a6547a8883d5dd397b0f1faaad4f265726cc7042a5345"
|
||||
checksum = "90eeab0aa92f3f9b4e87f258c72b139c207d251f9cbc1080a0086b86a8870dd3"
|
||||
dependencies = [
|
||||
"nix",
|
||||
"windows-sys 0.52.0",
|
||||
"nix 0.29.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1047,9 +1053,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.3.0"
|
||||
version = "2.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "de3fc2e30ba82dd1b3911c8de1ffc143c74a914a14e99514d7637e3099df5ea0"
|
||||
checksum = "93ead53efc7ea8ed3cfb0c79fc8023fbb782a5432b52830b6518941cebe6505c"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown",
|
||||
@@ -1215,9 +1221,9 @@ checksum = "8b23360e99b8717f20aaa4598f5a6541efbe30630039fbc7706cf954a87947ae"
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.69"
|
||||
version = "0.3.70"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d"
|
||||
checksum = "1868808506b929d7b0cfa8f75951347aa71bb21144b7791bae35d9bccfcfe37a"
|
||||
dependencies = [
|
||||
"wasm-bindgen",
|
||||
]
|
||||
@@ -1250,9 +1256,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.155"
|
||||
version = "0.2.157"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
|
||||
checksum = "374af5f94e54fa97cf75e945cce8a6b201e88a1a07e688b47dfd2a59c66dbd86"
|
||||
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
@@ -1388,6 +1394,16 @@ dependencies = [
|
||||
"libmimalloc-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "minicov"
|
||||
version = "0.3.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c71e683cd655513b99affab7d317deb690528255a0d5f717f1024093c12b169"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "minimal-lexical"
|
||||
version = "0.2.1"
|
||||
@@ -1438,7 +1454,19 @@ checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"cfg-if",
|
||||
"cfg_aliases",
|
||||
"cfg_aliases 0.1.1",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nix"
|
||||
version = "0.29.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"cfg-if",
|
||||
"cfg_aliases 0.2.1",
|
||||
"libc",
|
||||
]
|
||||
|
||||
@@ -1525,9 +1553,9 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
|
||||
|
||||
[[package]]
|
||||
name = "ordermap"
|
||||
version = "0.5.1"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8c81974681ab4f0cc9fe49cad56f821d1cc67a08cd2caa9b5d58b0adaa5dd36d"
|
||||
checksum = "61d7d835be600a7ac71b24e39c92fe6fad9e818b3c71bfc379e3ba65e327d77f"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
]
|
||||
@@ -1904,6 +1932,8 @@ dependencies = [
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"smallvec",
|
||||
"static_assertions",
|
||||
"tempfile",
|
||||
"tracing",
|
||||
"walkdir",
|
||||
@@ -1923,7 +1953,6 @@ dependencies = [
|
||||
"red_knot_python_semantic",
|
||||
"red_knot_workspace",
|
||||
"ruff_db",
|
||||
"ruff_linter",
|
||||
"ruff_notebook",
|
||||
"ruff_python_ast",
|
||||
"ruff_source_file",
|
||||
@@ -2060,7 +2089,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.6.0"
|
||||
version = "0.6.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2252,7 +2281,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.6.0"
|
||||
version = "0.6.1"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"annotate-snippets 0.9.2",
|
||||
@@ -2572,7 +2601,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.6.0"
|
||||
version = "0.6.1"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -2712,7 +2741,7 @@ checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
|
||||
[[package]]
|
||||
name = "salsa"
|
||||
version = "0.18.0"
|
||||
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=f608ff8b24f07706492027199f51132244034f29#f608ff8b24f07706492027199f51132244034f29"
|
||||
dependencies = [
|
||||
"append-only-vec",
|
||||
"arc-swap",
|
||||
@@ -2732,12 +2761,12 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "salsa-macro-rules"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=f608ff8b24f07706492027199f51132244034f29#f608ff8b24f07706492027199f51132244034f29"
|
||||
|
||||
[[package]]
|
||||
name = "salsa-macros"
|
||||
version = "0.18.0"
|
||||
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=f608ff8b24f07706492027199f51132244034f29#f608ff8b24f07706492027199f51132244034f29"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
@@ -2799,9 +2828,9 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.206"
|
||||
version = "1.0.208"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b3e4cd94123dd520a128bcd11e34d9e9e423e7e3e50425cb1b4b1e3549d0284"
|
||||
checksum = "cff085d2cb684faa248efb494c39b68e522822ac0de72ccf08109abde717cfb2"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
@@ -2819,9 +2848,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.206"
|
||||
version = "1.0.208"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fabfb6138d2383ea8208cf98ccf69cdfb1aff4088460681d84189aa259762f97"
|
||||
checksum = "24008e81ff7613ed8e5ba0cfaf24e2c2f1e5b8a0495711e44fcd4882fca62bcf"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2841,9 +2870,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.124"
|
||||
version = "1.0.125"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "66ad62847a56b3dba58cc891acd13884b9c61138d330c0d7b6181713d4fce38d"
|
||||
checksum = "83c8e735a073ccf5be70aa8066aa984eaf2fa000db6c8d0100ae605b366d31ed"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
@@ -3002,9 +3031,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.74"
|
||||
version = "2.0.75"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fceb41e3d546d0bd83421d3409b1460cc7444cd389341a4c880fe7a042cb3d7"
|
||||
checksum = "f6af063034fc1935ede7be0122941bafa9bacb949334d090b77ca98b5817c7d9"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3526,19 +3555,20 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.92"
|
||||
version = "0.2.93"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
|
||||
checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
"wasm-bindgen-macro",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-backend"
|
||||
version = "0.2.92"
|
||||
version = "0.2.93"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
|
||||
checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"log",
|
||||
@@ -3551,9 +3581,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-futures"
|
||||
version = "0.4.42"
|
||||
version = "0.4.43"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0"
|
||||
checksum = "61e9300f63a621e96ed275155c108eb6f843b6a26d053f122ab69724559dc8ed"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"js-sys",
|
||||
@@ -3563,9 +3593,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro"
|
||||
version = "0.2.92"
|
||||
version = "0.2.93"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
|
||||
checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"wasm-bindgen-macro-support",
|
||||
@@ -3573,9 +3603,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro-support"
|
||||
version = "0.2.92"
|
||||
version = "0.2.93"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
|
||||
checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3586,18 +3616,19 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-shared"
|
||||
version = "0.2.92"
|
||||
version = "0.2.93"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
|
||||
checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-test"
|
||||
version = "0.3.42"
|
||||
version = "0.3.43"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d9bf62a58e0780af3e852044583deee40983e5886da43a271dd772379987667b"
|
||||
checksum = "68497a05fb21143a08a7d24fc81763384a3072ee43c44e86aad1744d6adef9d9"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"js-sys",
|
||||
"minicov",
|
||||
"scoped-tls",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-futures",
|
||||
@@ -3606,9 +3637,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-test-macro"
|
||||
version = "0.3.42"
|
||||
version = "0.3.43"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b7f89739351a2e03cb94beb799d47fb2cac01759b40ec441f7de39b00cbf7ef0"
|
||||
checksum = "4b8220be1fa9e4c889b30fd207d4906657e7e90b12e0e6b0c8b8d8709f5de021"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
||||
@@ -108,7 +108,7 @@ rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
salsa = { git = "https://github.com/MichaReiser/salsa.git", tag = "red-knot-0.0.1" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "f608ff8b24f07706492027199f51132244034f29" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
|
||||
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.6.0/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.6.0/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.6.1/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.6.1/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.6.0
|
||||
rev: v0.6.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -5,8 +5,8 @@ use colored::Colorize;
|
||||
use std::fmt;
|
||||
use std::fs::File;
|
||||
use std::io::BufWriter;
|
||||
use tracing::log::LevelFilter;
|
||||
use tracing::{Event, Subscriber};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::fmt::format::Writer;
|
||||
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields};
|
||||
use tracing_subscriber::registry::LookupSpan;
|
||||
@@ -60,10 +60,10 @@ pub(crate) enum VerbosityLevel {
|
||||
impl VerbosityLevel {
|
||||
const fn level_filter(self) -> LevelFilter {
|
||||
match self {
|
||||
VerbosityLevel::Default => LevelFilter::Warn,
|
||||
VerbosityLevel::Verbose => LevelFilter::Info,
|
||||
VerbosityLevel::ExtraVerbose => LevelFilter::Debug,
|
||||
VerbosityLevel::Trace => LevelFilter::Trace,
|
||||
VerbosityLevel::Default => LevelFilter::WARN,
|
||||
VerbosityLevel::Verbose => LevelFilter::INFO,
|
||||
VerbosityLevel::ExtraVerbose => LevelFilter::DEBUG,
|
||||
VerbosityLevel::Trace => LevelFilter::TRACE,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -88,7 +88,7 @@ pub(crate) fn setup_tracing(level: VerbosityLevel) -> anyhow::Result<TracingGuar
|
||||
match level {
|
||||
VerbosityLevel::Default => {
|
||||
// Show warning traces
|
||||
EnvFilter::default().add_directive(tracing::level_filters::LevelFilter::WARN.into())
|
||||
EnvFilter::default().add_directive(LevelFilter::WARN.into())
|
||||
}
|
||||
level => {
|
||||
let level_filter = level.level_filter();
|
||||
|
||||
@@ -127,7 +127,6 @@ impl TestCase {
|
||||
fn collect_package_files(&self, path: &SystemPath) -> Vec<File> {
|
||||
let package = self.db().workspace().package(self.db(), path).unwrap();
|
||||
let files = package.files(self.db());
|
||||
let files = files.read();
|
||||
let mut collected: Vec<_> = files.into_iter().collect();
|
||||
collected.sort_unstable_by_key(|file| file.path(self.db()).as_system_path().unwrap());
|
||||
collected
|
||||
|
||||
@@ -29,6 +29,8 @@ salsa = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
static_assertions = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
path-slash = { workspace = true }
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
/// Database giving access to semantic information about a Python program.
|
||||
#[salsa::db]
|
||||
pub trait Db: SourceDb + Upcast<dyn SourceDb> {}
|
||||
pub trait Db: SourceDb + Upcast<dyn SourceDb> {
|
||||
fn is_file_open(&self, file: File) -> bool;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::module_resolver::vendored_typeshed_stubs;
|
||||
use ruff_db::files::Files;
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
@@ -91,7 +94,11 @@ pub(crate) mod tests {
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl Db for TestDb {}
|
||||
impl Db for TestDb {
|
||||
fn is_file_open(&self, file: File) -> bool {
|
||||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl salsa::Database for TestDb {
|
||||
|
||||
@@ -168,6 +168,24 @@ impl ModuleName {
|
||||
};
|
||||
Some(Self(name))
|
||||
}
|
||||
|
||||
/// Extend `self` with the components of `other`
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// let mut module_name = ModuleName::new_static("foo").unwrap();
|
||||
/// module_name.extend(&ModuleName::new_static("bar").unwrap());
|
||||
/// assert_eq!(&module_name, "foo.bar");
|
||||
/// module_name.extend(&ModuleName::new_static("baz.eggs.ham").unwrap());
|
||||
/// assert_eq!(&module_name, "foo.bar.baz.eggs.ham");
|
||||
/// ```
|
||||
pub fn extend(&mut self, other: &ModuleName) {
|
||||
self.0.push('.');
|
||||
self.0.push_str(other);
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ModuleName {
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::iter::FusedIterator;
|
||||
|
||||
pub(crate) use module::Module;
|
||||
pub use resolver::resolve_module;
|
||||
pub(crate) use resolver::SearchPaths;
|
||||
pub(crate) use resolver::{file_to_module, SearchPaths};
|
||||
use ruff_db::system::SystemPath;
|
||||
pub use typeshed::vendored_typeshed_stubs;
|
||||
|
||||
|
||||
@@ -77,3 +77,9 @@ pub enum ModuleKind {
|
||||
/// A python package (`foo/__init__.py` or `foo/__init__.pyi`)
|
||||
Package,
|
||||
}
|
||||
|
||||
impl ModuleKind {
|
||||
pub const fn is_package(self) -> bool {
|
||||
matches!(self, ModuleKind::Package)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ pub(crate) fn resolve_module_query<'db>(
|
||||
|
||||
let module = Module::new(name.clone(), kind, search_path, module_file);
|
||||
|
||||
tracing::debug!(
|
||||
tracing::trace!(
|
||||
"Resolved module '{name}' to '{path}'.",
|
||||
path = module_file.path(db)
|
||||
);
|
||||
@@ -172,11 +172,11 @@ impl SearchPaths {
|
||||
static_paths.push(search_path);
|
||||
}
|
||||
|
||||
tracing::debug!("Adding static search path '{src_root}'");
|
||||
tracing::debug!("Adding first-party search path '{src_root}'");
|
||||
static_paths.push(SearchPath::first_party(system, src_root)?);
|
||||
|
||||
static_paths.push(if let Some(custom_typeshed) = custom_typeshed {
|
||||
tracing::debug!("Adding static custom-sdtlib search-path '{custom_typeshed}'");
|
||||
tracing::debug!("Adding custom-stdlib search path '{custom_typeshed}'");
|
||||
|
||||
let search_path = SearchPath::custom_stdlib(db, custom_typeshed)?;
|
||||
files.try_add_root(
|
||||
@@ -192,7 +192,7 @@ impl SearchPaths {
|
||||
let mut site_packages: Vec<_> = Vec::with_capacity(site_packages_paths.len());
|
||||
|
||||
for path in site_packages_paths {
|
||||
tracing::debug!("Adding site-package path '{path}'");
|
||||
tracing::debug!("Adding site-packages search path '{path}'");
|
||||
let search_path = SearchPath::site_packages(system, path)?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use ruff_python_ast::{AnyNodeRef, NodeKind};
|
||||
use ruff_python_ast::{AnyNodeRef, AstNode, NodeKind};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
/// Compact key for a node for use in a hash map.
|
||||
@@ -11,7 +11,19 @@ pub(super) struct NodeKey {
|
||||
}
|
||||
|
||||
impl NodeKey {
|
||||
pub(super) fn from_node<'a, N>(node: N) -> Self
|
||||
#[inline]
|
||||
pub(super) fn from_node<'a, N>(node: &N) -> Self
|
||||
where
|
||||
N: AstNode,
|
||||
{
|
||||
NodeKey {
|
||||
kind: node.kind(),
|
||||
range: node.range(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(super) fn from_ref<'a, N>(node: N) -> Self
|
||||
where
|
||||
N: Into<AnyNodeRef<'a>>,
|
||||
{
|
||||
|
||||
@@ -24,7 +24,7 @@ impl Program {
|
||||
search_paths,
|
||||
} = settings;
|
||||
|
||||
tracing::info!("Target version: {target_version}");
|
||||
tracing::info!("Target version: Python {target_version}");
|
||||
|
||||
let search_paths = SearchPaths::from_settings(db, search_paths)
|
||||
.with_context(|| "Invalid search path settings")?;
|
||||
|
||||
@@ -16,10 +16,9 @@ use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{
|
||||
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId, SymbolTable,
|
||||
};
|
||||
use crate::semantic_index::use_def::UseDefMap;
|
||||
use crate::Db;
|
||||
|
||||
pub(crate) use self::use_def::UseDefMap;
|
||||
|
||||
pub mod ast_ids;
|
||||
mod builder;
|
||||
pub mod definition;
|
||||
@@ -27,6 +26,8 @@ pub mod expression;
|
||||
pub mod symbol;
|
||||
mod use_def;
|
||||
|
||||
pub(crate) use self::use_def::{DefinitionWithConstraints, DefinitionWithConstraintsIterator};
|
||||
|
||||
type SymbolMap = hashbrown::HashMap<ScopedSymbolId, (), ()>;
|
||||
|
||||
/// Returns the semantic index for `file`.
|
||||
@@ -153,6 +154,10 @@ impl<'db> SemanticIndex<'db> {
|
||||
&self.scopes[id]
|
||||
}
|
||||
|
||||
pub(crate) fn scope_ids(&self) -> impl Iterator<Item = ScopeId> {
|
||||
self.scope_ids_by_scope.iter().copied()
|
||||
}
|
||||
|
||||
/// Returns the id of the parent scope.
|
||||
pub(crate) fn parent_scope_id(&self, scope_id: FileScopeId) -> Option<FileScopeId> {
|
||||
let scope = self.scope(scope_id);
|
||||
@@ -310,12 +315,29 @@ mod tests {
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::semantic_index::ast_ids::HasScopedUseId;
|
||||
use crate::semantic_index::definition::DefinitionKind;
|
||||
use crate::semantic_index::symbol::{FileScopeId, Scope, ScopeKind, SymbolTable};
|
||||
use crate::semantic_index::ast_ids::{HasScopedUseId, ScopedUseId};
|
||||
use crate::semantic_index::definition::{Definition, DefinitionKind};
|
||||
use crate::semantic_index::symbol::{
|
||||
FileScopeId, Scope, ScopeKind, ScopedSymbolId, SymbolTable,
|
||||
};
|
||||
use crate::semantic_index::use_def::UseDefMap;
|
||||
use crate::semantic_index::{global_scope, semantic_index, symbol_table, use_def_map};
|
||||
use crate::Db;
|
||||
|
||||
impl UseDefMap<'_> {
|
||||
fn first_public_definition(&self, symbol: ScopedSymbolId) -> Option<Definition<'_>> {
|
||||
self.public_definitions(symbol)
|
||||
.next()
|
||||
.map(|constrained_definition| constrained_definition.definition)
|
||||
}
|
||||
|
||||
fn first_use_definition(&self, use_id: ScopedUseId) -> Option<Definition<'_>> {
|
||||
self.use_definitions(use_id)
|
||||
.next()
|
||||
.map(|constrained_definition| constrained_definition.definition)
|
||||
}
|
||||
}
|
||||
|
||||
struct TestCase {
|
||||
db: TestDb,
|
||||
file: File,
|
||||
@@ -374,9 +396,7 @@ mod tests {
|
||||
let foo = global_table.symbol_id_by_name("foo").unwrap();
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.public_definitions(foo) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def.first_public_definition(foo).unwrap();
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Import(_)));
|
||||
}
|
||||
|
||||
@@ -411,13 +431,13 @@ mod tests {
|
||||
);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.public_definitions(
|
||||
global_table
|
||||
.symbol_id_by_name("foo")
|
||||
.expect("symbol to exist"),
|
||||
) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
global_table
|
||||
.symbol_id_by_name("foo")
|
||||
.expect("symbol to exist"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::ImportFrom(_)
|
||||
@@ -438,17 +458,34 @@ mod tests {
|
||||
"a symbol used but not defined in a scope should have only the used flag"
|
||||
);
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] =
|
||||
use_def.public_definitions(global_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def
|
||||
.first_public_definition(global_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn augmented_assignment() {
|
||||
let TestCase { db, file } = test_case("x += 1");
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(names(&global_table), vec!["x"]);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let definition = use_def
|
||||
.first_public_definition(global_table.symbol_id_by_name("x").unwrap())
|
||||
.unwrap();
|
||||
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::AugmentedAssignment(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn class_scope() {
|
||||
let TestCase { db, file } = test_case(
|
||||
@@ -477,11 +514,9 @@ y = 2
|
||||
assert_eq!(names(&class_table), vec!["x"]);
|
||||
|
||||
let use_def = index.use_def_map(class_scope_id);
|
||||
let [definition] =
|
||||
use_def.public_definitions(class_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def
|
||||
.first_public_definition(class_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
@@ -515,19 +550,116 @@ y = 2
|
||||
assert_eq!(names(&function_table), vec!["x"]);
|
||||
|
||||
let use_def = index.use_def_map(function_scope_id);
|
||||
let [definition] = use_def.public_definitions(
|
||||
function_table
|
||||
.symbol_id_by_name("x")
|
||||
.expect("symbol exists"),
|
||||
) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
function_table
|
||||
.symbol_id_by_name("x")
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_parameter_symbols() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
pass
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&global_table), vec!["f", "str", "int"]);
|
||||
|
||||
let [(function_scope_id, _function_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("Expected a function scope")
|
||||
};
|
||||
|
||||
let function_table = index.symbol_table(function_scope_id);
|
||||
assert_eq!(
|
||||
names(&function_table),
|
||||
vec!["a", "b", "c", "args", "d", "kwargs"],
|
||||
);
|
||||
|
||||
let use_def = index.use_def_map(function_scope_id);
|
||||
for name in ["a", "b", "c", "d"] {
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
function_table
|
||||
.symbol_id_by_name(name)
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::ParameterWithDefault(_)
|
||||
));
|
||||
}
|
||||
for name in ["args", "kwargs"] {
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
function_table
|
||||
.symbol_id_by_name(name)
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Parameter(_)));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lambda_parameter_symbols() {
|
||||
let TestCase { db, file } = test_case("lambda a, b, c=1, *args, d=2, **kwargs: None");
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert!(names(&global_table).is_empty());
|
||||
|
||||
let [(lambda_scope_id, _lambda_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("Expected a lambda scope")
|
||||
};
|
||||
|
||||
let lambda_table = index.symbol_table(lambda_scope_id);
|
||||
assert_eq!(
|
||||
names(&lambda_table),
|
||||
vec!["a", "b", "c", "args", "d", "kwargs"],
|
||||
);
|
||||
|
||||
let use_def = index.use_def_map(lambda_scope_id);
|
||||
for name in ["a", "b", "c", "d"] {
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
lambda_table.symbol_id_by_name(name).expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::ParameterWithDefault(_)
|
||||
));
|
||||
}
|
||||
for name in ["args", "kwargs"] {
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
lambda_table.symbol_id_by_name(name).expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Parameter(_)));
|
||||
}
|
||||
}
|
||||
|
||||
/// Test case to validate that the comprehension scope is correctly identified and that the target
|
||||
/// variable is defined only in the comprehension scope and not in the global scope.
|
||||
#[test]
|
||||
@@ -594,9 +726,7 @@ y = 2
|
||||
let element_use_id =
|
||||
element.scoped_use_id(&db, comprehension_scope_id.to_scope_id(&db, file));
|
||||
|
||||
let [definition] = use_def.use_definitions(element_use_id) else {
|
||||
panic!("expected one definition")
|
||||
};
|
||||
let definition = use_def.first_use_definition(element_use_id).unwrap();
|
||||
let DefinitionKind::Comprehension(comprehension) = definition.node(&db) else {
|
||||
panic!("expected generator definition")
|
||||
};
|
||||
@@ -693,13 +823,13 @@ def func():
|
||||
assert_eq!(names(&func2_table), vec!["y"]);
|
||||
|
||||
let use_def = index.use_def_map(FileScopeId::global());
|
||||
let [definition] = use_def.public_definitions(
|
||||
global_table
|
||||
.symbol_id_by_name("func")
|
||||
.expect("symbol exists"),
|
||||
) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
global_table
|
||||
.symbol_id_by_name("func")
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Function(_)));
|
||||
}
|
||||
|
||||
@@ -800,9 +930,7 @@ class C[T]:
|
||||
};
|
||||
let x_use_id = x_use_expr_name.scoped_use_id(&db, scope);
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.use_definitions(x_use_id) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def.first_use_definition(x_use_id).unwrap();
|
||||
let DefinitionKind::Assignment(assignment) = definition.node(&db) else {
|
||||
panic!("should be an assignment definition")
|
||||
};
|
||||
@@ -893,4 +1021,28 @@ def x():
|
||||
vec!["bar", "foo", "Test", "<module>"]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn match_stmt_symbols() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
match subject:
|
||||
case a: ...
|
||||
case [b, c, *d]: ...
|
||||
case e as f: ...
|
||||
case {'x': g, **h}: ...
|
||||
case Foo(i, z=j): ...
|
||||
case k | l: ...
|
||||
case _: ...
|
||||
",
|
||||
);
|
||||
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert!(global_table.symbol_by_name("Foo").unwrap().is_used());
|
||||
assert_eq!(
|
||||
names(&global_table),
|
||||
vec!["subject", "a", "b", "c", "d", "f", "e", "h", "g", "Foo", "i", "j", "k", "l"]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -197,12 +197,14 @@ pub(crate) mod node_key {
|
||||
pub(crate) struct ExpressionNodeKey(NodeKey);
|
||||
|
||||
impl From<ast::ExpressionRef<'_>> for ExpressionNodeKey {
|
||||
#[inline]
|
||||
fn from(value: ast::ExpressionRef<'_>) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
Self(NodeKey::from_ref(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Expr> for ExpressionNodeKey {
|
||||
#[inline]
|
||||
fn from(value: &ast::Expr) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
}
|
||||
|
||||
@@ -7,7 +7,8 @@ use ruff_db::parsed::ParsedModule;
|
||||
use ruff_index::IndexVec;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::name::Name;
|
||||
use ruff_python_ast::visitor::{walk_expr, walk_stmt, Visitor};
|
||||
use ruff_python_ast::visitor::{walk_expr, walk_pattern, walk_stmt, Visitor};
|
||||
use ruff_python_ast::AnyParameterRef;
|
||||
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
|
||||
@@ -155,7 +156,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
self.current_use_def_map_mut().restore(state);
|
||||
}
|
||||
|
||||
fn flow_merge(&mut self, state: &FlowSnapshot) {
|
||||
fn flow_merge(&mut self, state: FlowSnapshot) {
|
||||
self.current_use_def_map_mut().merge(state);
|
||||
}
|
||||
|
||||
@@ -195,9 +196,16 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
definition
|
||||
}
|
||||
|
||||
fn add_constraint(&mut self, constraint_node: &ast::Expr) -> Expression<'db> {
|
||||
let expression = self.add_standalone_expression(constraint_node);
|
||||
self.current_use_def_map_mut().record_constraint(expression);
|
||||
|
||||
expression
|
||||
}
|
||||
|
||||
/// Record an expression that needs to be a Salsa ingredient, because we need to infer its type
|
||||
/// standalone (type narrowing tests, RHS of an assignment.)
|
||||
fn add_standalone_expression(&mut self, expression_node: &ast::Expr) {
|
||||
fn add_standalone_expression(&mut self, expression_node: &ast::Expr) -> Expression<'db> {
|
||||
let expression = Expression::new(
|
||||
self.db,
|
||||
self.file,
|
||||
@@ -210,6 +218,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
);
|
||||
self.expressions_by_node
|
||||
.insert(expression_node.into(), expression);
|
||||
expression
|
||||
}
|
||||
|
||||
fn with_type_params(
|
||||
@@ -301,6 +310,23 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
fn declare_parameter(&mut self, parameter: AnyParameterRef) {
|
||||
let symbol =
|
||||
self.add_or_update_symbol(parameter.name().id().clone(), SymbolFlags::IS_DEFINED);
|
||||
|
||||
let definition = self.add_definition(symbol, parameter);
|
||||
|
||||
if let AnyParameterRef::NonVariadic(with_default) = parameter {
|
||||
// Insert a mapping from the parameter to the same definition.
|
||||
// This ensures that calling `HasTy::ty` on the inner parameter returns
|
||||
// a valid type (and doesn't panic)
|
||||
self.definitions_by_node.insert(
|
||||
DefinitionNodeRef::from(AnyParameterRef::Variadic(&with_default.parameter)).key(),
|
||||
definition,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn build(mut self) -> SemanticIndex<'db> {
|
||||
let module = self.module;
|
||||
self.visit_body(module.suite());
|
||||
@@ -368,6 +394,16 @@ where
|
||||
.add_or_update_symbol(function_def.name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, function_def);
|
||||
|
||||
// The default value of the parameters needs to be evaluated in the
|
||||
// enclosing scope.
|
||||
for default in function_def
|
||||
.parameters
|
||||
.iter_non_variadic_params()
|
||||
.filter_map(|param| param.default.as_deref())
|
||||
{
|
||||
self.visit_expr(default);
|
||||
}
|
||||
|
||||
self.with_type_params(
|
||||
NodeWithScopeRef::FunctionTypeParameters(function_def),
|
||||
function_def.type_params.as_deref(),
|
||||
@@ -378,6 +414,12 @@ where
|
||||
}
|
||||
|
||||
builder.push_scope(NodeWithScopeRef::Function(function_def));
|
||||
|
||||
// Add symbols and definitions for the parameters to the function scope.
|
||||
for parameter in &*function_def.parameters {
|
||||
builder.declare_parameter(parameter);
|
||||
}
|
||||
|
||||
builder.visit_body(&function_def.body);
|
||||
builder.pop_scope()
|
||||
},
|
||||
@@ -453,9 +495,24 @@ where
|
||||
self.visit_expr(&node.target);
|
||||
self.current_assignment = None;
|
||||
}
|
||||
ast::Stmt::AugAssign(
|
||||
aug_assign @ ast::StmtAugAssign {
|
||||
range: _,
|
||||
target,
|
||||
op: _,
|
||||
value,
|
||||
},
|
||||
) => {
|
||||
debug_assert!(self.current_assignment.is_none());
|
||||
self.visit_expr(value);
|
||||
self.current_assignment = Some(aug_assign.into());
|
||||
self.visit_expr(target);
|
||||
self.current_assignment = None;
|
||||
}
|
||||
ast::Stmt::If(node) => {
|
||||
self.visit_expr(&node.test);
|
||||
let pre_if = self.flow_snapshot();
|
||||
self.add_constraint(&node.test);
|
||||
self.visit_body(&node.body);
|
||||
let mut post_clauses: Vec<FlowSnapshot> = vec![];
|
||||
for clause in &node.elif_else_clauses {
|
||||
@@ -468,7 +525,7 @@ where
|
||||
self.visit_elif_else_clause(clause);
|
||||
}
|
||||
for post_clause_state in post_clauses {
|
||||
self.flow_merge(&post_clause_state);
|
||||
self.flow_merge(post_clause_state);
|
||||
}
|
||||
let has_else = node
|
||||
.elif_else_clauses
|
||||
@@ -477,7 +534,7 @@ where
|
||||
if !has_else {
|
||||
// if there's no else clause, then it's possible we took none of the branches,
|
||||
// and the pre_if state can reach here
|
||||
self.flow_merge(&pre_if);
|
||||
self.flow_merge(pre_if);
|
||||
}
|
||||
}
|
||||
ast::Stmt::While(node) => {
|
||||
@@ -495,13 +552,13 @@ where
|
||||
|
||||
// We may execute the `else` clause without ever executing the body, so merge in
|
||||
// the pre-loop state before visiting `else`.
|
||||
self.flow_merge(&pre_loop);
|
||||
self.flow_merge(pre_loop);
|
||||
self.visit_body(&node.orelse);
|
||||
|
||||
// Breaking out of a while loop bypasses the `else` clause, so merge in the break
|
||||
// states after visiting `else`.
|
||||
for break_state in break_states {
|
||||
self.flow_merge(&break_state);
|
||||
self.flow_merge(break_state);
|
||||
}
|
||||
}
|
||||
ast::Stmt::Break(_) => {
|
||||
@@ -520,12 +577,21 @@ where
|
||||
|
||||
match expr {
|
||||
ast::Expr::Name(name_node @ ast::ExprName { id, ctx, .. }) => {
|
||||
let flags = match ctx {
|
||||
let mut flags = match ctx {
|
||||
ast::ExprContext::Load => SymbolFlags::IS_USED,
|
||||
ast::ExprContext::Store => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Del => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Invalid => SymbolFlags::empty(),
|
||||
};
|
||||
if matches!(
|
||||
self.current_assignment,
|
||||
Some(CurrentAssignment::AugAssign(_))
|
||||
) && !ctx.is_invalid()
|
||||
{
|
||||
// For augmented assignment, the target expression is also used, so we should
|
||||
// record that as a use.
|
||||
flags |= SymbolFlags::IS_USED;
|
||||
}
|
||||
let symbol = self.add_or_update_symbol(id.clone(), flags);
|
||||
if flags.contains(SymbolFlags::IS_DEFINED) {
|
||||
match self.current_assignment {
|
||||
@@ -541,6 +607,9 @@ where
|
||||
Some(CurrentAssignment::AnnAssign(ann_assign)) => {
|
||||
self.add_definition(symbol, ann_assign);
|
||||
}
|
||||
Some(CurrentAssignment::AugAssign(aug_assign)) => {
|
||||
self.add_definition(symbol, aug_assign);
|
||||
}
|
||||
Some(CurrentAssignment::Named(named)) => {
|
||||
// TODO(dhruvmanila): If the current scope is a comprehension, then the
|
||||
// named expression is implicitly nonlocal. This is yet to be
|
||||
@@ -574,9 +643,25 @@ where
|
||||
}
|
||||
ast::Expr::Lambda(lambda) => {
|
||||
if let Some(parameters) = &lambda.parameters {
|
||||
// The default value of the parameters needs to be evaluated in the
|
||||
// enclosing scope.
|
||||
for default in parameters
|
||||
.iter_non_variadic_params()
|
||||
.filter_map(|param| param.default.as_deref())
|
||||
{
|
||||
self.visit_expr(default);
|
||||
}
|
||||
self.visit_parameters(parameters);
|
||||
}
|
||||
self.push_scope(NodeWithScopeRef::Lambda(lambda));
|
||||
|
||||
// Add symbols and definitions for the parameters to the lambda scope.
|
||||
if let Some(parameters) = &lambda.parameters {
|
||||
for parameter in &**parameters {
|
||||
self.declare_parameter(parameter);
|
||||
}
|
||||
}
|
||||
|
||||
self.visit_expr(lambda.body.as_ref());
|
||||
}
|
||||
ast::Expr::If(ast::ExprIf {
|
||||
@@ -591,7 +676,7 @@ where
|
||||
let post_body = self.flow_snapshot();
|
||||
self.flow_restore(pre_if);
|
||||
self.visit_expr(orelse);
|
||||
self.flow_merge(&post_body);
|
||||
self.flow_merge(post_body);
|
||||
}
|
||||
ast::Expr::ListComp(
|
||||
list_comprehension @ ast::ExprListComp {
|
||||
@@ -654,12 +739,40 @@ where
|
||||
self.pop_scope();
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_parameters(&mut self, parameters: &'ast ruff_python_ast::Parameters) {
|
||||
// Intentionally avoid walking default expressions, as we handle them in the enclosing
|
||||
// scope.
|
||||
for parameter in parameters.iter().map(ast::AnyParameterRef::as_parameter) {
|
||||
self.visit_parameter(parameter);
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_pattern(&mut self, pattern: &'ast ast::Pattern) {
|
||||
if let ast::Pattern::MatchAs(ast::PatternMatchAs {
|
||||
name: Some(name), ..
|
||||
})
|
||||
| ast::Pattern::MatchStar(ast::PatternMatchStar {
|
||||
name: Some(name),
|
||||
range: _,
|
||||
})
|
||||
| ast::Pattern::MatchMapping(ast::PatternMatchMapping {
|
||||
rest: Some(name), ..
|
||||
}) = pattern
|
||||
{
|
||||
// TODO(dhruvmanila): Add definition
|
||||
self.add_or_update_symbol(name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
}
|
||||
|
||||
walk_pattern(self, pattern);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum CurrentAssignment<'a> {
|
||||
Assign(&'a ast::StmtAssign),
|
||||
AnnAssign(&'a ast::StmtAnnAssign),
|
||||
AugAssign(&'a ast::StmtAugAssign),
|
||||
Named(&'a ast::ExprNamed),
|
||||
Comprehension {
|
||||
node: &'a ast::Comprehension,
|
||||
@@ -679,6 +792,12 @@ impl<'a> From<&'a ast::StmtAnnAssign> for CurrentAssignment<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAugAssign> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::StmtAugAssign) -> Self {
|
||||
Self::AugAssign(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::ExprNamed> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::ExprNamed) -> Self {
|
||||
Self::Named(value)
|
||||
|
||||
@@ -44,7 +44,9 @@ pub(crate) enum DefinitionNodeRef<'a> {
|
||||
NamedExpression(&'a ast::ExprNamed),
|
||||
Assignment(AssignmentDefinitionNodeRef<'a>),
|
||||
AnnotatedAssignment(&'a ast::StmtAnnAssign),
|
||||
AugmentedAssignment(&'a ast::StmtAugAssign),
|
||||
Comprehension(ComprehensionDefinitionNodeRef<'a>),
|
||||
Parameter(ast::AnyParameterRef<'a>),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtFunctionDef> for DefinitionNodeRef<'a> {
|
||||
@@ -71,6 +73,12 @@ impl<'a> From<&'a ast::StmtAnnAssign> for DefinitionNodeRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAugAssign> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::StmtAugAssign) -> Self {
|
||||
Self::AugmentedAssignment(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::Alias> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: &'a ast::Alias) -> Self {
|
||||
Self::Import(node_ref)
|
||||
@@ -95,6 +103,12 @@ impl<'a> From<ComprehensionDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ast::AnyParameterRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node: ast::AnyParameterRef<'a>) -> Self {
|
||||
Self::Parameter(node)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ImportFromDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::StmtImportFrom,
|
||||
@@ -144,12 +158,23 @@ impl DefinitionNodeRef<'_> {
|
||||
DefinitionNodeRef::AnnotatedAssignment(assign) => {
|
||||
DefinitionKind::AnnotatedAssignment(AstNodeRef::new(parsed, assign))
|
||||
}
|
||||
DefinitionNodeRef::AugmentedAssignment(augmented_assignment) => {
|
||||
DefinitionKind::AugmentedAssignment(AstNodeRef::new(parsed, augmented_assignment))
|
||||
}
|
||||
DefinitionNodeRef::Comprehension(ComprehensionDefinitionNodeRef { node, first }) => {
|
||||
DefinitionKind::Comprehension(ComprehensionDefinitionKind {
|
||||
node: AstNodeRef::new(parsed, node),
|
||||
first,
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::Parameter(parameter) => match parameter {
|
||||
ast::AnyParameterRef::Variadic(parameter) => {
|
||||
DefinitionKind::Parameter(AstNodeRef::new(parsed, parameter))
|
||||
}
|
||||
ast::AnyParameterRef::NonVariadic(parameter) => {
|
||||
DefinitionKind::ParameterWithDefault(AstNodeRef::new(parsed, parameter))
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -167,7 +192,12 @@ impl DefinitionNodeRef<'_> {
|
||||
target,
|
||||
}) => target.into(),
|
||||
Self::AnnotatedAssignment(node) => node.into(),
|
||||
Self::AugmentedAssignment(node) => node.into(),
|
||||
Self::Comprehension(ComprehensionDefinitionNodeRef { node, first: _ }) => node.into(),
|
||||
Self::Parameter(node) => match node {
|
||||
ast::AnyParameterRef::Variadic(parameter) => parameter.into(),
|
||||
ast::AnyParameterRef::NonVariadic(parameter) => parameter.into(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -181,7 +211,10 @@ pub enum DefinitionKind {
|
||||
NamedExpression(AstNodeRef<ast::ExprNamed>),
|
||||
Assignment(AssignmentDefinitionKind),
|
||||
AnnotatedAssignment(AstNodeRef<ast::StmtAnnAssign>),
|
||||
AugmentedAssignment(AstNodeRef<ast::StmtAugAssign>),
|
||||
Comprehension(ComprehensionDefinitionKind),
|
||||
Parameter(AstNodeRef<ast::Parameter>),
|
||||
ParameterWithDefault(AstNodeRef<ast::ParameterWithDefault>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@@ -227,6 +260,10 @@ impl AssignmentDefinitionKind {
|
||||
pub(crate) fn assignment(&self) -> &ast::StmtAssign {
|
||||
self.assignment.node()
|
||||
}
|
||||
|
||||
pub(crate) fn target(&self) -> &ast::ExprName {
|
||||
self.target.node()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
|
||||
@@ -268,8 +305,26 @@ impl From<&ast::StmtAnnAssign> for DefinitionNodeKey {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtAugAssign> for DefinitionNodeKey {
|
||||
fn from(node: &ast::StmtAugAssign) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Comprehension> for DefinitionNodeKey {
|
||||
fn from(node: &ast::Comprehension) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Parameter> for DefinitionNodeKey {
|
||||
fn from(node: &ast::Parameter) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::ParameterWithDefault> for DefinitionNodeKey {
|
||||
fn from(node: &ast::ParameterWithDefault) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ pub(crate) struct Expression<'db> {
|
||||
/// The expression node.
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) node: AstNodeRef<ast::Expr>,
|
||||
pub(crate) node_ref: AstNodeRef<ast::Expr>,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<Expression<'static>>,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
//! Build a map from each use of a symbol to the definitions visible from that use.
|
||||
//! Build a map from each use of a symbol to the definitions visible from that use, and the
|
||||
//! type-narrowing constraints that apply to each definition.
|
||||
//!
|
||||
//! Let's take this code sample:
|
||||
//!
|
||||
@@ -6,7 +7,7 @@
|
||||
//! x = 1
|
||||
//! x = 2
|
||||
//! y = x
|
||||
//! if flag:
|
||||
//! if y is not None:
|
||||
//! x = 3
|
||||
//! else:
|
||||
//! x = 4
|
||||
@@ -34,8 +35,8 @@
|
||||
//! [`AstIds`](crate::semantic_index::ast_ids::AstIds) we number all uses (that means a `Name` node
|
||||
//! with `Load` context) so we have a `ScopedUseId` to efficiently represent each use.
|
||||
//!
|
||||
//! The other case we need to handle is when a symbol is referenced from a different scope (the
|
||||
//! most obvious example of this is an import). We call this "public" use of a symbol. So the other
|
||||
//! Another case we need to handle is when a symbol is referenced from a different scope (the most
|
||||
//! obvious example of this is an import). We call this "public" use of a symbol. So the other
|
||||
//! question we need to be able to answer is, what are the publicly-visible definitions of each
|
||||
//! symbol?
|
||||
//!
|
||||
@@ -53,42 +54,55 @@
|
||||
//! start.)
|
||||
//!
|
||||
//! So this means that the publicly-visible definitions of a symbol are the definitions still
|
||||
//! visible at the end of the scope.
|
||||
//! visible at the end of the scope; effectively we have an implicit "use" of every symbol at the
|
||||
//! end of the scope.
|
||||
//!
|
||||
//! The data structure we build to answer these two questions is the `UseDefMap`. It has a
|
||||
//! We also need to know, for a given definition of a symbol, what type-narrowing constraints apply
|
||||
//! to it. For instance, in this code sample:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 1 if flag else None
|
||||
//! if x is not None:
|
||||
//! y = x
|
||||
//! ```
|
||||
//!
|
||||
//! At the use of `x` in `y = x`, the visible definition of `x` is `1 if flag else None`, which
|
||||
//! would infer as the type `Literal[1] | None`. But the constraint `x is not None` dominates this
|
||||
//! use, which means we can rule out the possibility that `x` is `None` here, which should give us
|
||||
//! the type `Literal[1]` for this use.
|
||||
//!
|
||||
//! The data structure we build to answer these questions is the `UseDefMap`. It has a
|
||||
//! `definitions_by_use` vector indexed by [`ScopedUseId`] and a `public_definitions` vector
|
||||
//! indexed by [`ScopedSymbolId`]. The values in each of these vectors are (in principle) a list of
|
||||
//! visible definitions at that use, or at the end of the scope for that symbol.
|
||||
//! visible definitions at that use, or at the end of the scope for that symbol, with a list of the
|
||||
//! dominating constraints for each of those definitions.
|
||||
//!
|
||||
//! In order to avoid vectors-of-vectors and all the allocations that would entail, we don't
|
||||
//! actually store these "list of visible definitions" as a vector of [`Definition`] IDs. Instead,
|
||||
//! the values in `definitions_by_use` and `public_definitions` are a [`Definitions`] struct that
|
||||
//! keeps a [`Range`] into a third vector of [`Definition`] IDs, `all_definitions`. The trick with
|
||||
//! this representation is that it requires that the definitions visible at any given use of a
|
||||
//! symbol are stored sequentially in `all_definitions`.
|
||||
//! In order to avoid vectors-of-vectors-of-vectors and all the allocations that would entail, we
|
||||
//! don't actually store these "list of visible definitions" as a vector of [`Definition`].
|
||||
//! Instead, the values in `definitions_by_use` and `public_definitions` are a [`SymbolState`]
|
||||
//! struct which uses bit-sets to track definitions and constraints in terms of
|
||||
//! [`ScopedDefinitionId`] and [`ScopedConstraintId`], which are indices into the `all_definitions`
|
||||
//! and `all_constraints` indexvecs in the [`UseDefMap`].
|
||||
//!
|
||||
//! There is another special kind of possible "definition" for a symbol: it might be unbound in the
|
||||
//! scope. (This isn't equivalent to "zero visible definitions", since we may go through an `if`
|
||||
//! that has a definition for the symbol, leaving us with one visible definition, but still also
|
||||
//! the "unbound" possibility, since we might not have taken the `if` branch.)
|
||||
//! There is another special kind of possible "definition" for a symbol: there might be a path from
|
||||
//! the scope entry to a given use in which the symbol is never bound.
|
||||
//!
|
||||
//! The simplest way to model "unbound" would be as an actual [`Definition`] itself: the initial
|
||||
//! visible [`Definition`] for each symbol in a scope. But actually modeling it this way would
|
||||
//! dramatically increase the number of [`Definition`] that Salsa must track. Since "unbound" is a
|
||||
//! unnecessarily increase the number of [`Definition`] that Salsa must track. Since "unbound" is a
|
||||
//! special definition in that all symbols share it, and it doesn't have any additional per-symbol
|
||||
//! state, we can represent it more efficiently: we use the `may_be_unbound` boolean on the
|
||||
//! [`Definitions`] struct. If this flag is `true`, it means the symbol/use really has one
|
||||
//! additional visible "definition", which is the unbound state. If this flag is `false`, it means
|
||||
//! we've eliminated the possibility of unbound: every path we've followed includes a definition
|
||||
//! for this symbol.
|
||||
//! state, and constraints are irrelevant to it, we can represent it more efficiently: we use the
|
||||
//! `may_be_unbound` boolean on the [`SymbolState`] struct. If this flag is `true`, it means the
|
||||
//! symbol/use really has one additional visible "definition", which is the unbound state. If this
|
||||
//! flag is `false`, it means we've eliminated the possibility of unbound: every path we've
|
||||
//! followed includes a definition for this symbol.
|
||||
//!
|
||||
//! To build a [`UseDefMap`], the [`UseDefMapBuilder`] is notified of each new use and definition
|
||||
//! as they are encountered by the
|
||||
//! To build a [`UseDefMap`], the [`UseDefMapBuilder`] is notified of each new use, definition, and
|
||||
//! constraint as they are encountered by the
|
||||
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder) AST visit. For
|
||||
//! each symbol, the builder tracks the currently-visible definitions for that symbol. When we hit
|
||||
//! a use of a symbol, it records the currently-visible definitions for that symbol as the visible
|
||||
//! definitions for that use. When we reach the end of the scope, it records the currently-visible
|
||||
//! definitions for each symbol as the public definitions of that symbol.
|
||||
//! each symbol, the builder tracks the `SymbolState` for that symbol. When we hit a use of a
|
||||
//! symbol, it records the current state for that symbol for that use. When we reach the end of the
|
||||
//! scope, it records the state for each symbol as the public definitions of that symbol.
|
||||
//!
|
||||
//! Let's walk through the above example. Initially we record for `x` that it has no visible
|
||||
//! definitions, and may be unbound. When we see `x = 1`, we record that as the sole visible
|
||||
@@ -98,10 +112,11 @@
|
||||
//!
|
||||
//! Then we hit the `if` branch. We visit the `test` node (`flag` in this case), since that will
|
||||
//! happen regardless. Then we take a pre-branch snapshot of the currently visible definitions for
|
||||
//! all symbols, which we'll need later. Then we go ahead and visit the `if` body. When we see `x =
|
||||
//! 3`, it replaces `x = 2` as the sole visible definition of `x`. At the end of the `if` body, we
|
||||
//! take another snapshot of the currently-visible definitions; we'll call this the post-if-body
|
||||
//! snapshot.
|
||||
//! all symbols, which we'll need later. Then we record `flag` as a possible constraint on the
|
||||
//! currently visible definition (`x = 2`), and go ahead and visit the `if` body. When we see `x =
|
||||
//! 3`, it replaces `x = 2` (constrained by `flag`) as the sole visible definition of `x`. At the
|
||||
//! end of the `if` body, we take another snapshot of the currently-visible definitions; we'll call
|
||||
//! this the post-if-body snapshot.
|
||||
//!
|
||||
//! Now we need to visit the `else` clause. The conditions when entering the `else` clause should
|
||||
//! be the pre-if conditions; if we are entering the `else` clause, we know that the `if` test
|
||||
@@ -125,98 +140,142 @@
|
||||
//! (In the future we may have some other questions we want to answer as well, such as "is this
|
||||
//! definition used?", which will require tracking a bit more info in our map, e.g. a "used" bit
|
||||
//! for each [`Definition`] which is flipped to true when we record that definition for a use.)
|
||||
use self::symbol_state::{
|
||||
ConstraintIdIterator, DefinitionIdWithConstraintsIterator, ScopedConstraintId,
|
||||
ScopedDefinitionId, SymbolState,
|
||||
};
|
||||
use crate::semantic_index::ast_ids::ScopedUseId;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::ScopedSymbolId;
|
||||
use ruff_index::IndexVec;
|
||||
use std::ops::Range;
|
||||
|
||||
/// All definitions that can reach a given use of a name.
|
||||
mod bitset;
|
||||
mod symbol_state;
|
||||
|
||||
/// Applicable definitions and constraints for every use of a name.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) struct UseDefMap<'db> {
|
||||
// TODO store constraints with definitions for type narrowing
|
||||
/// Definition IDs array for `definitions_by_use` and `public_definitions` to slice into.
|
||||
all_definitions: Vec<Definition<'db>>,
|
||||
/// Array of [`Definition`] in this scope.
|
||||
all_definitions: IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
|
||||
/// Definitions that can reach a [`ScopedUseId`].
|
||||
definitions_by_use: IndexVec<ScopedUseId, Definitions>,
|
||||
/// Array of constraints (as [`Expression`]) in this scope.
|
||||
all_constraints: IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
|
||||
/// Definitions of each symbol visible at end of scope.
|
||||
public_definitions: IndexVec<ScopedSymbolId, Definitions>,
|
||||
/// [`SymbolState`] visible at a [`ScopedUseId`].
|
||||
definitions_by_use: IndexVec<ScopedUseId, SymbolState>,
|
||||
|
||||
/// [`SymbolState`] visible at end of scope for each symbol.
|
||||
public_definitions: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
}
|
||||
|
||||
impl<'db> UseDefMap<'db> {
|
||||
pub(crate) fn use_definitions(&self, use_id: ScopedUseId) -> &[Definition<'db>] {
|
||||
&self.all_definitions[self.definitions_by_use[use_id].definitions_range.clone()]
|
||||
pub(crate) fn use_definitions(
|
||||
&self,
|
||||
use_id: ScopedUseId,
|
||||
) -> DefinitionWithConstraintsIterator<'_, 'db> {
|
||||
DefinitionWithConstraintsIterator {
|
||||
all_definitions: &self.all_definitions,
|
||||
all_constraints: &self.all_constraints,
|
||||
inner: self.definitions_by_use[use_id].visible_definitions(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn use_may_be_unbound(&self, use_id: ScopedUseId) -> bool {
|
||||
self.definitions_by_use[use_id].may_be_unbound
|
||||
self.definitions_by_use[use_id].may_be_unbound()
|
||||
}
|
||||
|
||||
pub(crate) fn public_definitions(&self, symbol: ScopedSymbolId) -> &[Definition<'db>] {
|
||||
&self.all_definitions[self.public_definitions[symbol].definitions_range.clone()]
|
||||
pub(crate) fn public_definitions(
|
||||
&self,
|
||||
symbol: ScopedSymbolId,
|
||||
) -> DefinitionWithConstraintsIterator<'_, 'db> {
|
||||
DefinitionWithConstraintsIterator {
|
||||
all_definitions: &self.all_definitions,
|
||||
all_constraints: &self.all_constraints,
|
||||
inner: self.public_definitions[symbol].visible_definitions(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn public_may_be_unbound(&self, symbol: ScopedSymbolId) -> bool {
|
||||
self.public_definitions[symbol].may_be_unbound
|
||||
self.public_definitions[symbol].may_be_unbound()
|
||||
}
|
||||
}
|
||||
|
||||
/// Definitions visible for a symbol at a particular use (or end-of-scope).
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
struct Definitions {
|
||||
/// [`Range`] in `all_definitions` of the visible definition IDs.
|
||||
definitions_range: Range<usize>,
|
||||
/// Is the symbol possibly unbound at this point?
|
||||
may_be_unbound: bool,
|
||||
}
|
||||
|
||||
impl Definitions {
|
||||
/// The default state of a symbol is "no definitions, may be unbound", aka definitely-unbound.
|
||||
fn unbound() -> Self {
|
||||
Self {
|
||||
definitions_range: Range::default(),
|
||||
may_be_unbound: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Definitions {
|
||||
fn default() -> Self {
|
||||
Definitions::unbound()
|
||||
}
|
||||
}
|
||||
|
||||
/// A snapshot of the visible definitions for each symbol at a particular point in control flow.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(super) struct FlowSnapshot {
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, Definitions>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct DefinitionWithConstraintsIterator<'map, 'db> {
|
||||
all_definitions: &'map IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
all_constraints: &'map IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
inner: DefinitionIdWithConstraintsIterator<'map>,
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for DefinitionWithConstraintsIterator<'map, 'db> {
|
||||
type Item = DefinitionWithConstraints<'map, 'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.inner
|
||||
.next()
|
||||
.map(|def_id_with_constraints| DefinitionWithConstraints {
|
||||
definition: self.all_definitions[def_id_with_constraints.definition],
|
||||
constraints: ConstraintsIterator {
|
||||
all_constraints: self.all_constraints,
|
||||
constraint_ids: def_id_with_constraints.constraint_ids,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for DefinitionWithConstraintsIterator<'_, '_> {}
|
||||
|
||||
pub(crate) struct DefinitionWithConstraints<'map, 'db> {
|
||||
pub(crate) definition: Definition<'db>,
|
||||
pub(crate) constraints: ConstraintsIterator<'map, 'db>,
|
||||
}
|
||||
|
||||
pub(crate) struct ConstraintsIterator<'map, 'db> {
|
||||
all_constraints: &'map IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
constraint_ids: ConstraintIdIterator<'map>,
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for ConstraintsIterator<'map, 'db> {
|
||||
type Item = Expression<'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.constraint_ids
|
||||
.next()
|
||||
.map(|constraint_id| self.all_constraints[constraint_id])
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for ConstraintsIterator<'_, '_> {}
|
||||
|
||||
/// A snapshot of the definitions and constraints state at a particular point in control flow.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(super) struct FlowSnapshot {
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct UseDefMapBuilder<'db> {
|
||||
/// Definition IDs array for `definitions_by_use` and `definitions_by_symbol` to slice into.
|
||||
all_definitions: Vec<Definition<'db>>,
|
||||
/// Append-only array of [`Definition`]; None is unbound.
|
||||
all_definitions: IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
|
||||
/// Append-only array of constraints (as [`Expression`]).
|
||||
all_constraints: IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
|
||||
/// Visible definitions at each so-far-recorded use.
|
||||
definitions_by_use: IndexVec<ScopedUseId, Definitions>,
|
||||
definitions_by_use: IndexVec<ScopedUseId, SymbolState>,
|
||||
|
||||
/// Currently visible definitions for each symbol.
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, Definitions>,
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
}
|
||||
|
||||
impl<'db> UseDefMapBuilder<'db> {
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
all_definitions: Vec::new(),
|
||||
definitions_by_use: IndexVec::new(),
|
||||
definitions_by_symbol: IndexVec::new(),
|
||||
}
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub(super) fn add_symbol(&mut self, symbol: ScopedSymbolId) {
|
||||
let new_symbol = self.definitions_by_symbol.push(Definitions::unbound());
|
||||
let new_symbol = self.definitions_by_symbol.push(SymbolState::unbound());
|
||||
debug_assert_eq!(symbol, new_symbol);
|
||||
}
|
||||
|
||||
@@ -227,13 +286,15 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
) {
|
||||
// We have a new definition of a symbol; this replaces any previous definitions in this
|
||||
// path.
|
||||
let def_idx = self.all_definitions.len();
|
||||
self.all_definitions.push(definition);
|
||||
self.definitions_by_symbol[symbol] = Definitions {
|
||||
#[allow(clippy::range_plus_one)]
|
||||
definitions_range: def_idx..(def_idx + 1),
|
||||
may_be_unbound: false,
|
||||
};
|
||||
let def_id = self.all_definitions.push(definition);
|
||||
self.definitions_by_symbol[symbol] = SymbolState::with(def_id);
|
||||
}
|
||||
|
||||
pub(super) fn record_constraint(&mut self, constraint: Expression<'db>) {
|
||||
let constraint_id = self.all_constraints.push(constraint);
|
||||
for definitions in &mut self.definitions_by_symbol {
|
||||
definitions.add_constraint(constraint_id);
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn record_use(&mut self, symbol: ScopedSymbolId, use_id: ScopedUseId) {
|
||||
@@ -265,15 +326,15 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
|
||||
// If the snapshot we are restoring is missing some symbols we've recorded since, we need
|
||||
// to fill them in so the symbol IDs continue to line up. Since they don't exist in the
|
||||
// snapshot, the correct state to fill them in with is "unbound", the default.
|
||||
// snapshot, the correct state to fill them in with is "unbound".
|
||||
self.definitions_by_symbol
|
||||
.resize(num_symbols, Definitions::unbound());
|
||||
.resize(num_symbols, SymbolState::unbound());
|
||||
}
|
||||
|
||||
/// Merge the given snapshot into the current state, reflecting that we might have taken either
|
||||
/// path to get here. The new visible-definitions state for each symbol should include
|
||||
/// definitions from both the prior state and the snapshot.
|
||||
pub(super) fn merge(&mut self, snapshot: &FlowSnapshot) {
|
||||
pub(super) fn merge(&mut self, snapshot: FlowSnapshot) {
|
||||
// The tricky thing about merging two Ranges pointing into `all_definitions` is that if the
|
||||
// two Ranges aren't already adjacent in `all_definitions`, we will have to copy at least
|
||||
// one or the other of the ranges to the end of `all_definitions` so as to make them
|
||||
@@ -287,66 +348,26 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
// greater than the number of known symbols in a previously-taken snapshot.
|
||||
debug_assert!(self.definitions_by_symbol.len() >= snapshot.definitions_by_symbol.len());
|
||||
|
||||
for (symbol_id, current) in self.definitions_by_symbol.iter_mut_enumerated() {
|
||||
let Some(snapshot) = snapshot.definitions_by_symbol.get(symbol_id) else {
|
||||
// Symbol not present in snapshot, so it's unbound from that path.
|
||||
current.may_be_unbound = true;
|
||||
continue;
|
||||
};
|
||||
|
||||
// If the symbol can be unbound in either predecessor, it can be unbound post-merge.
|
||||
current.may_be_unbound |= snapshot.may_be_unbound;
|
||||
|
||||
// Merge the definition ranges.
|
||||
let current = &mut current.definitions_range;
|
||||
let snapshot = &snapshot.definitions_range;
|
||||
|
||||
// We never create reversed ranges.
|
||||
debug_assert!(current.end >= current.start);
|
||||
debug_assert!(snapshot.end >= snapshot.start);
|
||||
|
||||
if current == snapshot {
|
||||
// Ranges already identical, nothing to do.
|
||||
} else if snapshot.is_empty() {
|
||||
// Merging from an empty range; nothing to do.
|
||||
} else if (*current).is_empty() {
|
||||
// Merging to an empty range; just use the incoming range.
|
||||
*current = snapshot.clone();
|
||||
} else if snapshot.end >= current.start && snapshot.start <= current.end {
|
||||
// Ranges are adjacent or overlapping, merge them in-place.
|
||||
*current = current.start.min(snapshot.start)..current.end.max(snapshot.end);
|
||||
} else if current.end == self.all_definitions.len() {
|
||||
// Ranges are not adjacent or overlapping, `current` is at the end of
|
||||
// `all_definitions`, we need to copy `snapshot` to the end so they are adjacent
|
||||
// and can be merged into one range.
|
||||
self.all_definitions.extend_from_within(snapshot.clone());
|
||||
current.end = self.all_definitions.len();
|
||||
} else if snapshot.end == self.all_definitions.len() {
|
||||
// Ranges are not adjacent or overlapping, `snapshot` is at the end of
|
||||
// `all_definitions`, we need to copy `current` to the end so they are adjacent and
|
||||
// can be merged into one range.
|
||||
self.all_definitions.extend_from_within(current.clone());
|
||||
current.start = snapshot.start;
|
||||
current.end = self.all_definitions.len();
|
||||
let mut snapshot_definitions_iter = snapshot.definitions_by_symbol.into_iter();
|
||||
for current in &mut self.definitions_by_symbol {
|
||||
if let Some(snapshot) = snapshot_definitions_iter.next() {
|
||||
current.merge(snapshot);
|
||||
} else {
|
||||
// Ranges are not adjacent and neither one is at the end of `all_definitions`, we
|
||||
// have to copy both to the end so they are adjacent and we can merge them.
|
||||
let start = self.all_definitions.len();
|
||||
self.all_definitions.extend_from_within(current.clone());
|
||||
self.all_definitions.extend_from_within(snapshot.clone());
|
||||
current.start = start;
|
||||
current.end = self.all_definitions.len();
|
||||
// Symbol not present in snapshot, so it's unbound from that path.
|
||||
current.add_unbound();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> UseDefMap<'db> {
|
||||
self.all_definitions.shrink_to_fit();
|
||||
self.all_constraints.shrink_to_fit();
|
||||
self.definitions_by_symbol.shrink_to_fit();
|
||||
self.definitions_by_use.shrink_to_fit();
|
||||
|
||||
UseDefMap {
|
||||
all_definitions: self.all_definitions,
|
||||
all_constraints: self.all_constraints,
|
||||
definitions_by_use: self.definitions_by_use,
|
||||
public_definitions: self.definitions_by_symbol,
|
||||
}
|
||||
|
||||
@@ -0,0 +1,228 @@
|
||||
/// Ordered set of `u32`.
|
||||
///
|
||||
/// Uses an inline bit-set for small values (up to 64 * B), falls back to heap allocated vector of
|
||||
/// blocks for larger values.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(super) enum BitSet<const B: usize> {
|
||||
/// Bit-set (in 64-bit blocks) for the first 64 * B entries.
|
||||
Inline([u64; B]),
|
||||
|
||||
/// Overflow beyond 64 * B.
|
||||
Heap(Vec<u64>),
|
||||
}
|
||||
|
||||
impl<const B: usize> Default for BitSet<B> {
|
||||
fn default() -> Self {
|
||||
// B * 64 must fit in a u32, or else we have unusable bits; this assertion makes the
|
||||
// truncating casts to u32 below safe. This would be better as a const assertion, but
|
||||
// that's not possible on stable with const generic params. (B should never really be
|
||||
// anywhere close to this large.)
|
||||
assert!(B * 64 < (u32::MAX as usize));
|
||||
// This implementation requires usize >= 32 bits.
|
||||
static_assertions::const_assert!(usize::BITS >= 32);
|
||||
Self::Inline([0; B])
|
||||
}
|
||||
}
|
||||
|
||||
impl<const B: usize> BitSet<B> {
|
||||
/// Create and return a new [`BitSet`] with a single `value` inserted.
|
||||
pub(super) fn with(value: u32) -> Self {
|
||||
let mut bitset = Self::default();
|
||||
bitset.insert(value);
|
||||
bitset
|
||||
}
|
||||
|
||||
/// Convert from Inline to Heap, if needed, and resize the Heap vector, if needed.
|
||||
fn resize(&mut self, value: u32) {
|
||||
let num_blocks_needed = (value / 64) + 1;
|
||||
match self {
|
||||
Self::Inline(blocks) => {
|
||||
let mut vec = blocks.to_vec();
|
||||
vec.resize(num_blocks_needed as usize, 0);
|
||||
*self = Self::Heap(vec);
|
||||
}
|
||||
Self::Heap(vec) => {
|
||||
vec.resize(num_blocks_needed as usize, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn blocks_mut(&mut self) -> &mut [u64] {
|
||||
match self {
|
||||
Self::Inline(blocks) => blocks.as_mut_slice(),
|
||||
Self::Heap(blocks) => blocks.as_mut_slice(),
|
||||
}
|
||||
}
|
||||
|
||||
fn blocks(&self) -> &[u64] {
|
||||
match self {
|
||||
Self::Inline(blocks) => blocks.as_slice(),
|
||||
Self::Heap(blocks) => blocks.as_slice(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Insert a value into the [`BitSet`].
|
||||
///
|
||||
/// Return true if the value was newly inserted, false if already present.
|
||||
pub(super) fn insert(&mut self, value: u32) -> bool {
|
||||
let value_usize = value as usize;
|
||||
let (block, index) = (value_usize / 64, value_usize % 64);
|
||||
if block >= self.blocks().len() {
|
||||
self.resize(value);
|
||||
}
|
||||
let blocks = self.blocks_mut();
|
||||
let missing = blocks[block] & (1 << index) == 0;
|
||||
blocks[block] |= 1 << index;
|
||||
missing
|
||||
}
|
||||
|
||||
/// Intersect in-place with another [`BitSet`].
|
||||
pub(super) fn intersect(&mut self, other: &BitSet<B>) {
|
||||
let my_blocks = self.blocks_mut();
|
||||
let other_blocks = other.blocks();
|
||||
let min_len = my_blocks.len().min(other_blocks.len());
|
||||
for i in 0..min_len {
|
||||
my_blocks[i] &= other_blocks[i];
|
||||
}
|
||||
for block in my_blocks.iter_mut().skip(min_len) {
|
||||
*block = 0;
|
||||
}
|
||||
}
|
||||
|
||||
/// Return an iterator over the values (in ascending order) in this [`BitSet`].
|
||||
pub(super) fn iter(&self) -> BitSetIterator<'_, B> {
|
||||
let blocks = self.blocks();
|
||||
BitSetIterator {
|
||||
blocks,
|
||||
current_block_index: 0,
|
||||
current_block: blocks[0],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator over values in a [`BitSet`].
|
||||
#[derive(Debug)]
|
||||
pub(super) struct BitSetIterator<'a, const B: usize> {
|
||||
/// The blocks we are iterating over.
|
||||
blocks: &'a [u64],
|
||||
|
||||
/// The index of the block we are currently iterating through.
|
||||
current_block_index: usize,
|
||||
|
||||
/// The block we are currently iterating through (and zeroing as we go.)
|
||||
current_block: u64,
|
||||
}
|
||||
|
||||
impl<const B: usize> Iterator for BitSetIterator<'_, B> {
|
||||
type Item = u32;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
while self.current_block == 0 {
|
||||
if self.current_block_index + 1 >= self.blocks.len() {
|
||||
return None;
|
||||
}
|
||||
self.current_block_index += 1;
|
||||
self.current_block = self.blocks[self.current_block_index];
|
||||
}
|
||||
let lowest_bit_set = self.current_block.trailing_zeros();
|
||||
// reset the lowest set bit, without a data dependency on `lowest_bit_set`
|
||||
self.current_block &= self.current_block.wrapping_sub(1);
|
||||
// SAFETY: `lowest_bit_set` cannot be more than 64, `current_block_index` cannot be more
|
||||
// than `B - 1`, and we check above that `B * 64 < u32::MAX`. So both `64 *
|
||||
// current_block_index` and the final value here must fit in u32.
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
Some(lowest_bit_set + (64 * self.current_block_index) as u32)
|
||||
}
|
||||
}
|
||||
|
||||
impl<const B: usize> std::iter::FusedIterator for BitSetIterator<'_, B> {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::BitSet;
|
||||
|
||||
fn assert_bitset<const B: usize>(bitset: &BitSet<B>, contents: &[u32]) {
|
||||
assert_eq!(bitset.iter().collect::<Vec<_>>(), contents);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn iter() {
|
||||
let mut b = BitSet::<1>::with(3);
|
||||
b.insert(27);
|
||||
b.insert(6);
|
||||
assert!(matches!(b, BitSet::Inline(_)));
|
||||
assert_bitset(&b, &[3, 6, 27]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn iter_overflow() {
|
||||
let mut b = BitSet::<1>::with(140);
|
||||
b.insert(100);
|
||||
b.insert(129);
|
||||
assert!(matches!(b, BitSet::Heap(_)));
|
||||
assert_bitset(&b, &[100, 129, 140]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(23);
|
||||
b2.insert(5);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_mixed_1() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(89);
|
||||
b2.insert(5);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_mixed_2() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(23);
|
||||
b2.insert(89);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_heap() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(89);
|
||||
b2.insert(90);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_heap_2() {
|
||||
let mut b1 = BitSet::<1>::with(89);
|
||||
let mut b2 = BitSet::<1>::with(89);
|
||||
b1.insert(91);
|
||||
b2.insert(90);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[89]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_blocks() {
|
||||
let mut b = BitSet::<2>::with(120);
|
||||
b.insert(45);
|
||||
assert!(matches!(b, BitSet::Inline(_)));
|
||||
assert_bitset(&b, &[45, 120]);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,374 @@
|
||||
//! Track visible definitions of a symbol, and applicable constraints per definition.
|
||||
//!
|
||||
//! These data structures operate entirely on scope-local newtype-indices for definitions and
|
||||
//! constraints, referring to their location in the `all_definitions` and `all_constraints`
|
||||
//! indexvecs in [`super::UseDefMapBuilder`].
|
||||
//!
|
||||
//! We need to track arbitrary associations between definitions and constraints, not just a single
|
||||
//! set of currently dominating constraints (where "dominating" means "control flow must have
|
||||
//! passed through it to reach this point"), because we can have dominating constraints that apply
|
||||
//! to some definitions but not others, as in this code:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 1 if flag else None
|
||||
//! if x is not None:
|
||||
//! if flag2:
|
||||
//! x = 2 if flag else None
|
||||
//! x
|
||||
//! ```
|
||||
//!
|
||||
//! The `x is not None` constraint dominates the final use of `x`, but it applies only to the first
|
||||
//! definition of `x`, not the second, so `None` is a possible value for `x`.
|
||||
//!
|
||||
//! And we can't just track, for each definition, an index into a list of dominating constraints,
|
||||
//! either, because we can have definitions which are still visible, but subject to constraints
|
||||
//! that are no longer dominating, as in this code:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 0
|
||||
//! if flag1:
|
||||
//! x = 1 if flag2 else None
|
||||
//! assert x is not None
|
||||
//! x
|
||||
//! ```
|
||||
//!
|
||||
//! From the point of view of the final use of `x`, the `x is not None` constraint no longer
|
||||
//! dominates, but it does dominate the `x = 1 if flag2 else None` definition, so we have to keep
|
||||
//! track of that.
|
||||
//!
|
||||
//! The data structures used here ([`BitSet`] and [`smallvec::SmallVec`]) optimize for keeping all
|
||||
//! data inline (avoiding lots of scattered allocations) in small-to-medium cases, and falling back
|
||||
//! to heap allocation to be able to scale to arbitrary numbers of definitions and constraints when
|
||||
//! needed.
|
||||
use super::bitset::{BitSet, BitSetIterator};
|
||||
use ruff_index::newtype_index;
|
||||
use smallvec::SmallVec;
|
||||
|
||||
/// A newtype-index for a definition in a particular scope.
|
||||
#[newtype_index]
|
||||
pub(super) struct ScopedDefinitionId;
|
||||
|
||||
/// A newtype-index for a constraint expression in a particular scope.
|
||||
#[newtype_index]
|
||||
pub(super) struct ScopedConstraintId;
|
||||
|
||||
/// Can reference this * 64 total definitions inline; more will fall back to the heap.
|
||||
const INLINE_DEFINITION_BLOCKS: usize = 3;
|
||||
|
||||
/// A [`BitSet`] of [`ScopedDefinitionId`], representing visible definitions of a symbol in a scope.
|
||||
type Definitions = BitSet<INLINE_DEFINITION_BLOCKS>;
|
||||
type DefinitionsIterator<'a> = BitSetIterator<'a, INLINE_DEFINITION_BLOCKS>;
|
||||
|
||||
/// Can reference this * 64 total constraints inline; more will fall back to the heap.
|
||||
const INLINE_CONSTRAINT_BLOCKS: usize = 2;
|
||||
|
||||
/// Can keep inline this many visible definitions per symbol at a given time; more will go to heap.
|
||||
const INLINE_VISIBLE_DEFINITIONS_PER_SYMBOL: usize = 4;
|
||||
|
||||
/// One [`BitSet`] of applicable [`ScopedConstraintId`] per visible definition.
|
||||
type InlineConstraintArray =
|
||||
[BitSet<INLINE_CONSTRAINT_BLOCKS>; INLINE_VISIBLE_DEFINITIONS_PER_SYMBOL];
|
||||
type Constraints = SmallVec<InlineConstraintArray>;
|
||||
type ConstraintsIterator<'a> = std::slice::Iter<'a, BitSet<INLINE_CONSTRAINT_BLOCKS>>;
|
||||
type ConstraintsIntoIterator = smallvec::IntoIter<InlineConstraintArray>;
|
||||
|
||||
/// Visible definitions and narrowing constraints for a single symbol at some point in control flow.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(super) struct SymbolState {
|
||||
/// [`BitSet`]: which [`ScopedDefinitionId`] are visible for this symbol?
|
||||
visible_definitions: Definitions,
|
||||
|
||||
/// For each definition, which [`ScopedConstraintId`] apply?
|
||||
///
|
||||
/// This is a [`smallvec::SmallVec`] which should always have one [`BitSet`] of constraints per
|
||||
/// definition in `visible_definitions`.
|
||||
constraints: Constraints,
|
||||
|
||||
/// Could the symbol be unbound at this point?
|
||||
may_be_unbound: bool,
|
||||
}
|
||||
|
||||
/// A single [`ScopedDefinitionId`] with an iterator of its applicable [`ScopedConstraintId`].
|
||||
#[derive(Debug)]
|
||||
pub(super) struct DefinitionIdWithConstraints<'a> {
|
||||
pub(super) definition: ScopedDefinitionId,
|
||||
pub(super) constraint_ids: ConstraintIdIterator<'a>,
|
||||
}
|
||||
|
||||
impl SymbolState {
|
||||
/// Return a new [`SymbolState`] representing an unbound symbol.
|
||||
pub(super) fn unbound() -> Self {
|
||||
Self {
|
||||
visible_definitions: Definitions::default(),
|
||||
constraints: Constraints::default(),
|
||||
may_be_unbound: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a new [`SymbolState`] representing a symbol with a single visible definition.
|
||||
pub(super) fn with(definition_id: ScopedDefinitionId) -> Self {
|
||||
let mut constraints = Constraints::with_capacity(1);
|
||||
constraints.push(BitSet::default());
|
||||
Self {
|
||||
visible_definitions: Definitions::with(definition_id.into()),
|
||||
constraints,
|
||||
may_be_unbound: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Add Unbound as a possibility for this symbol.
|
||||
pub(super) fn add_unbound(&mut self) {
|
||||
self.may_be_unbound = true;
|
||||
}
|
||||
|
||||
/// Add given constraint to all currently-visible definitions.
|
||||
pub(super) fn add_constraint(&mut self, constraint_id: ScopedConstraintId) {
|
||||
for bitset in &mut self.constraints {
|
||||
bitset.insert(constraint_id.into());
|
||||
}
|
||||
}
|
||||
|
||||
/// Merge another [`SymbolState`] into this one.
|
||||
pub(super) fn merge(&mut self, b: SymbolState) {
|
||||
let mut a = Self {
|
||||
visible_definitions: Definitions::default(),
|
||||
constraints: Constraints::default(),
|
||||
may_be_unbound: self.may_be_unbound || b.may_be_unbound,
|
||||
};
|
||||
std::mem::swap(&mut a, self);
|
||||
let mut a_defs_iter = a.visible_definitions.iter();
|
||||
let mut b_defs_iter = b.visible_definitions.iter();
|
||||
let mut a_constraints_iter = a.constraints.into_iter();
|
||||
let mut b_constraints_iter = b.constraints.into_iter();
|
||||
|
||||
let mut opt_a_def: Option<u32> = a_defs_iter.next();
|
||||
let mut opt_b_def: Option<u32> = b_defs_iter.next();
|
||||
|
||||
// Iterate through the definitions from `a` and `b`, always processing the lower definition
|
||||
// ID first, and pushing each definition onto the merged `SymbolState` with its
|
||||
// constraints. If a definition is found in both `a` and `b`, push it with the intersection
|
||||
// of the constraints from the two paths; a constraint that applies from only one possible
|
||||
// path is irrelevant.
|
||||
|
||||
// Helper to push `def`, with constraints in `constraints_iter`, onto `self`.
|
||||
let push = |def, constraints_iter: &mut ConstraintsIntoIterator, merged: &mut Self| {
|
||||
merged.visible_definitions.insert(def);
|
||||
// SAFETY: we only ever create SymbolState with either no definitions and no constraint
|
||||
// bitsets (`::unbound`) or one definition and one constraint bitset (`::with`), and
|
||||
// `::merge` always pushes one definition and one constraint bitset together (just
|
||||
// below), so the number of definitions and the number of constraint bitsets can never
|
||||
// get out of sync.
|
||||
let constraints = constraints_iter
|
||||
.next()
|
||||
.expect("definitions and constraints length mismatch");
|
||||
merged.constraints.push(constraints);
|
||||
};
|
||||
|
||||
loop {
|
||||
match (opt_a_def, opt_b_def) {
|
||||
(Some(a_def), Some(b_def)) => match a_def.cmp(&b_def) {
|
||||
std::cmp::Ordering::Less => {
|
||||
// Next definition ID is only in `a`, push it to `self` and advance `a`.
|
||||
push(a_def, &mut a_constraints_iter, self);
|
||||
opt_a_def = a_defs_iter.next();
|
||||
}
|
||||
std::cmp::Ordering::Greater => {
|
||||
// Next definition ID is only in `b`, push it to `self` and advance `b`.
|
||||
push(b_def, &mut b_constraints_iter, self);
|
||||
opt_b_def = b_defs_iter.next();
|
||||
}
|
||||
std::cmp::Ordering::Equal => {
|
||||
// Next definition is in both; push to `self` and intersect constraints.
|
||||
push(a_def, &mut b_constraints_iter, self);
|
||||
// SAFETY: we only ever create SymbolState with either no definitions and
|
||||
// no constraint bitsets (`::unbound`) or one definition and one constraint
|
||||
// bitset (`::with`), and `::merge` always pushes one definition and one
|
||||
// constraint bitset together (just below), so the number of definitions
|
||||
// and the number of constraint bitsets can never get out of sync.
|
||||
let a_constraints = a_constraints_iter
|
||||
.next()
|
||||
.expect("definitions and constraints length mismatch");
|
||||
// If the same definition is visible through both paths, any constraint
|
||||
// that applies on only one path is irrelevant to the resulting type from
|
||||
// unioning the two paths, so we intersect the constraints.
|
||||
self.constraints
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
.intersect(&a_constraints);
|
||||
opt_a_def = a_defs_iter.next();
|
||||
opt_b_def = b_defs_iter.next();
|
||||
}
|
||||
},
|
||||
(Some(a_def), None) => {
|
||||
// We've exhausted `b`, just push the def from `a` and move on to the next.
|
||||
push(a_def, &mut a_constraints_iter, self);
|
||||
opt_a_def = a_defs_iter.next();
|
||||
}
|
||||
(None, Some(b_def)) => {
|
||||
// We've exhausted `a`, just push the def from `b` and move on to the next.
|
||||
push(b_def, &mut b_constraints_iter, self);
|
||||
opt_b_def = b_defs_iter.next();
|
||||
}
|
||||
(None, None) => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get iterator over visible definitions with constraints.
|
||||
pub(super) fn visible_definitions(&self) -> DefinitionIdWithConstraintsIterator {
|
||||
DefinitionIdWithConstraintsIterator {
|
||||
definitions: self.visible_definitions.iter(),
|
||||
constraints: self.constraints.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Could the symbol be unbound?
|
||||
pub(super) fn may_be_unbound(&self) -> bool {
|
||||
self.may_be_unbound
|
||||
}
|
||||
}
|
||||
|
||||
/// The default state of a symbol (if we've seen no definitions of it) is unbound.
|
||||
impl Default for SymbolState {
|
||||
fn default() -> Self {
|
||||
SymbolState::unbound()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct DefinitionIdWithConstraintsIterator<'a> {
|
||||
definitions: DefinitionsIterator<'a>,
|
||||
constraints: ConstraintsIterator<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for DefinitionIdWithConstraintsIterator<'a> {
|
||||
type Item = DefinitionIdWithConstraints<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match (self.definitions.next(), self.constraints.next()) {
|
||||
(None, None) => None,
|
||||
(Some(def), Some(constraints)) => Some(DefinitionIdWithConstraints {
|
||||
definition: ScopedDefinitionId::from_u32(def),
|
||||
constraint_ids: ConstraintIdIterator {
|
||||
wrapped: constraints.iter(),
|
||||
},
|
||||
}),
|
||||
// SAFETY: see above.
|
||||
_ => unreachable!("definitions and constraints length mismatch"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for DefinitionIdWithConstraintsIterator<'_> {}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct ConstraintIdIterator<'a> {
|
||||
wrapped: BitSetIterator<'a, INLINE_CONSTRAINT_BLOCKS>,
|
||||
}
|
||||
|
||||
impl Iterator for ConstraintIdIterator<'_> {
|
||||
type Item = ScopedConstraintId;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.wrapped.next().map(ScopedConstraintId::from_u32)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for ConstraintIdIterator<'_> {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{ScopedConstraintId, ScopedDefinitionId, SymbolState};
|
||||
|
||||
impl SymbolState {
|
||||
pub(crate) fn assert(&self, may_be_unbound: bool, expected: &[&str]) {
|
||||
assert_eq!(self.may_be_unbound(), may_be_unbound);
|
||||
let actual = self
|
||||
.visible_definitions()
|
||||
.map(|def_id_with_constraints| {
|
||||
format!(
|
||||
"{}<{}>",
|
||||
def_id_with_constraints.definition.as_u32(),
|
||||
def_id_with_constraints
|
||||
.constraint_ids
|
||||
.map(ScopedConstraintId::as_u32)
|
||||
.map(|idx| idx.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unbound() {
|
||||
let cd = SymbolState::unbound();
|
||||
|
||||
cd.assert(true, &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn with() {
|
||||
let cd = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
|
||||
cd.assert(false, &["0<>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_unbound() {
|
||||
let mut cd = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd.add_unbound();
|
||||
|
||||
cd.assert(true, &["0<>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_constraint() {
|
||||
let mut cd = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd.add_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
cd.assert(false, &["0<0>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn merge() {
|
||||
// merging the same definition with the same constraint keeps the constraint
|
||||
let mut cd0a = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd0a.add_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
let mut cd0b = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd0b.add_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
cd0a.merge(cd0b);
|
||||
let mut cd0 = cd0a;
|
||||
cd0.assert(false, &["0<0>"]);
|
||||
|
||||
// merging the same definition with differing constraints drops all constraints
|
||||
let mut cd1a = SymbolState::with(ScopedDefinitionId::from_u32(1));
|
||||
cd1a.add_constraint(ScopedConstraintId::from_u32(1));
|
||||
|
||||
let mut cd1b = SymbolState::with(ScopedDefinitionId::from_u32(1));
|
||||
cd1b.add_constraint(ScopedConstraintId::from_u32(2));
|
||||
|
||||
cd1a.merge(cd1b);
|
||||
let cd1 = cd1a;
|
||||
cd1.assert(false, &["1<>"]);
|
||||
|
||||
// merging a constrained definition with unbound keeps both
|
||||
let mut cd2a = SymbolState::with(ScopedDefinitionId::from_u32(2));
|
||||
cd2a.add_constraint(ScopedConstraintId::from_u32(3));
|
||||
|
||||
let cd2b = SymbolState::unbound();
|
||||
|
||||
cd2a.merge(cd2b);
|
||||
let cd2 = cd2a;
|
||||
cd2.assert(true, &["2<3>"]);
|
||||
|
||||
// merging different definitions keeps them each with their existing constraints
|
||||
cd0.merge(cd2);
|
||||
let cd = cd0;
|
||||
cd.assert(true, &["0<0>", "2<3>"]);
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
use ruff_db::files::{File, FilePath};
|
||||
use ruff_db::source::line_index;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::{Expr, ExpressionRef, StmtClassDef};
|
||||
use ruff_python_ast::{Expr, ExpressionRef};
|
||||
use ruff_source_file::LineIndex;
|
||||
|
||||
use crate::module_name::ModuleName;
|
||||
@@ -147,29 +147,24 @@ impl HasTy for ast::Expr {
|
||||
}
|
||||
}
|
||||
|
||||
impl HasTy for ast::StmtFunctionDef {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
}
|
||||
macro_rules! impl_definition_has_ty {
|
||||
($ty: ty) => {
|
||||
impl HasTy for $ty {
|
||||
#[inline]
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl HasTy for StmtClassDef {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasTy for ast::Alias {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
}
|
||||
}
|
||||
impl_definition_has_ty!(ast::StmtFunctionDef);
|
||||
impl_definition_has_ty!(ast::StmtClassDef);
|
||||
impl_definition_has_ty!(ast::Alias);
|
||||
impl_definition_has_ty!(ast::Parameter);
|
||||
impl_definition_has_ty!(ast::ParameterWithDefault);
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
@@ -4,15 +4,38 @@ use ruff_python_ast::name::Name;
|
||||
use crate::builtins::builtins_scope;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId};
|
||||
use crate::semantic_index::{global_scope, symbol_table, use_def_map};
|
||||
use crate::semantic_index::{
|
||||
global_scope, semantic_index, symbol_table, use_def_map, DefinitionWithConstraints,
|
||||
DefinitionWithConstraintsIterator,
|
||||
};
|
||||
use crate::types::narrow::narrowing_constraint;
|
||||
use crate::{Db, FxOrderSet};
|
||||
|
||||
pub(crate) use self::builder::{IntersectionBuilder, UnionBuilder};
|
||||
pub(crate) use self::diagnostic::TypeCheckDiagnostics;
|
||||
pub(crate) use self::infer::{
|
||||
infer_definition_types, infer_expression_types, infer_scope_types, TypeInference,
|
||||
};
|
||||
|
||||
mod builder;
|
||||
mod diagnostic;
|
||||
mod display;
|
||||
mod infer;
|
||||
mod narrow;
|
||||
|
||||
pub(crate) use self::builder::UnionBuilder;
|
||||
pub(crate) use self::infer::{infer_definition_types, infer_scope_types};
|
||||
pub fn check_types(db: &dyn Db, file: File) -> TypeCheckDiagnostics {
|
||||
let _span = tracing::trace_span!("check_types", file=?file.path(db)).entered();
|
||||
|
||||
let index = semantic_index(db, file);
|
||||
let mut diagnostics = TypeCheckDiagnostics::new();
|
||||
|
||||
for scope_id in index.scope_ids() {
|
||||
let result = infer_scope_types(db, scope_id);
|
||||
diagnostics.extend(result.diagnostics());
|
||||
}
|
||||
|
||||
diagnostics
|
||||
}
|
||||
|
||||
/// Infer the public type of a symbol (its type as seen from outside its scope).
|
||||
pub(crate) fn symbol_ty<'db>(
|
||||
@@ -82,10 +105,31 @@ pub(crate) fn definition_ty<'db>(db: &'db dyn Db, definition: Definition<'db>) -
|
||||
/// provide an `unbound_ty`.
|
||||
pub(crate) fn definitions_ty<'db>(
|
||||
db: &'db dyn Db,
|
||||
definitions: &[Definition<'db>],
|
||||
definitions_with_constraints: DefinitionWithConstraintsIterator<'_, 'db>,
|
||||
unbound_ty: Option<Type<'db>>,
|
||||
) -> Type<'db> {
|
||||
let def_types = definitions.iter().map(|def| definition_ty(db, *def));
|
||||
let def_types = definitions_with_constraints.map(
|
||||
|DefinitionWithConstraints {
|
||||
definition,
|
||||
constraints,
|
||||
}| {
|
||||
let mut constraint_tys =
|
||||
constraints.filter_map(|test| narrowing_constraint(db, test, definition));
|
||||
let definition_ty = definition_ty(db, definition);
|
||||
if let Some(first_constraint_ty) = constraint_tys.next() {
|
||||
let mut builder = IntersectionBuilder::new(db);
|
||||
builder = builder
|
||||
.add_positive(definition_ty)
|
||||
.add_positive(first_constraint_ty);
|
||||
for constraint_ty in constraint_tys {
|
||||
builder = builder.add_positive(constraint_ty);
|
||||
}
|
||||
builder.build()
|
||||
} else {
|
||||
definition_ty
|
||||
}
|
||||
},
|
||||
);
|
||||
let mut all_types = unbound_ty.into_iter().chain(def_types);
|
||||
|
||||
let Some(first) = all_types.next() else {
|
||||
@@ -113,7 +157,7 @@ pub enum Type<'db> {
|
||||
Any,
|
||||
/// the empty set of values
|
||||
Never,
|
||||
/// unknown type (no annotation)
|
||||
/// unknown type (either no annotation, or some kind of type error)
|
||||
/// equivalent to Any, or possibly to object in strict mode
|
||||
Unknown,
|
||||
/// name does not exist or is not bound to any value (this represents an error, but with some
|
||||
@@ -149,20 +193,71 @@ impl<'db> Type<'db> {
|
||||
matches!(self, Type::Unknown)
|
||||
}
|
||||
|
||||
pub const fn is_never(&self) -> bool {
|
||||
matches!(self, Type::Never)
|
||||
}
|
||||
|
||||
pub fn may_be_unbound(&self, db: &'db dyn Db) -> bool {
|
||||
match self {
|
||||
Type::Unbound => true,
|
||||
Type::Union(union) => union.contains(db, Type::Unbound),
|
||||
// Unbound can't appear in an intersection, because an intersection with Unbound
|
||||
// simplifies to just Unbound.
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn replace_unbound_with(&self, db: &'db dyn Db, replacement: Type<'db>) -> Type<'db> {
|
||||
match self {
|
||||
Type::Unbound => replacement,
|
||||
Type::Union(union) => union
|
||||
.elements(db)
|
||||
.into_iter()
|
||||
.fold(UnionBuilder::new(db), |builder, ty| {
|
||||
builder.add(ty.replace_unbound_with(db, replacement))
|
||||
})
|
||||
.build(),
|
||||
ty => *ty,
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve a member access of a type.
|
||||
///
|
||||
/// For example, if `foo` is `Type::Instance(<Bar>)`,
|
||||
/// `foo.member(&db, "baz")` returns the type of `baz` attributes
|
||||
/// as accessed from instances of the `Bar` class.
|
||||
///
|
||||
/// TODO: use of this method currently requires manually checking
|
||||
/// whether the returned type is `Unknown`/`Unbound`
|
||||
/// (or a union with `Unknown`/`Unbound`) in many places.
|
||||
/// Ideally we'd use a more type-safe pattern, such as returning
|
||||
/// an `Option` or a `Result` from this method, which would force
|
||||
/// us to explicitly consider whether to handle an error or propagate
|
||||
/// it up the call stack.
|
||||
#[must_use]
|
||||
pub fn member(&self, db: &'db dyn Db, name: &Name) -> Type<'db> {
|
||||
match self {
|
||||
Type::Any => Type::Any,
|
||||
Type::Never => todo!("attribute lookup on Never type"),
|
||||
Type::Never => {
|
||||
// TODO: attribute lookup on Never type
|
||||
Type::Unknown
|
||||
}
|
||||
Type::Unknown => Type::Unknown,
|
||||
Type::Unbound => Type::Unbound,
|
||||
Type::None => todo!("attribute lookup on None type"),
|
||||
Type::Function(_) => todo!("attribute lookup on Function type"),
|
||||
Type::None => {
|
||||
// TODO: attribute lookup on None type
|
||||
Type::Unknown
|
||||
}
|
||||
Type::Function(_) => {
|
||||
// TODO: attribute lookup on function type
|
||||
Type::Unknown
|
||||
}
|
||||
Type::Module(file) => global_symbol_ty_by_name(db, *file, name),
|
||||
Type::Class(class) => class.class_member(db, name),
|
||||
Type::Instance(_) => {
|
||||
// TODO MRO? get_own_instance_member, get_instance_member
|
||||
todo!("attribute lookup on Instance type")
|
||||
Type::Unknown
|
||||
}
|
||||
Type::Union(union) => union
|
||||
.elements(db)
|
||||
@@ -174,7 +269,7 @@ impl<'db> Type<'db> {
|
||||
Type::Intersection(_) => {
|
||||
// TODO perform the get_member on each type in the intersection
|
||||
// TODO return the intersection of those results
|
||||
todo!("attribute lookup on Intersection type")
|
||||
Type::Unknown
|
||||
}
|
||||
Type::IntLiteral(_) => {
|
||||
// TODO raise error
|
||||
@@ -276,3 +371,115 @@ pub struct IntersectionType<'db> {
|
||||
/// directly in intersections rather than as a separate type.
|
||||
negative: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Context;
|
||||
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::{Program, ProgramSettings, PythonVersion, SearchPathSettings};
|
||||
|
||||
use super::TypeCheckDiagnostics;
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
db.memory_file_system()
|
||||
.create_directory_all("/src")
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
},
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
fn assert_diagnostic_messages(diagnostics: &TypeCheckDiagnostics, expected: &[&str]) {
|
||||
let messages: Vec<&str> = diagnostics
|
||||
.iter()
|
||||
.map(|diagnostic| diagnostic.message())
|
||||
.collect();
|
||||
assert_eq!(&messages, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unresolved_import_statement() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_file("src/foo.py", "import bar\n")
|
||||
.context("Failed to write foo.py")?;
|
||||
|
||||
let foo = system_path_to_file(&db, "src/foo.py").context("Failed to resolve foo.py")?;
|
||||
|
||||
let diagnostics = super::check_types(&db, foo);
|
||||
assert_diagnostic_messages(&diagnostics, &["Import 'bar' could not be resolved."]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unresolved_import_from_statement() {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_file("src/foo.py", "from bar import baz\n")
|
||||
.unwrap();
|
||||
let foo = system_path_to_file(&db, "src/foo.py").unwrap();
|
||||
let diagnostics = super::check_types(&db, foo);
|
||||
assert_diagnostic_messages(&diagnostics, &["Import 'bar' could not be resolved."]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unresolved_import_from_resolved_module() {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([("/src/a.py", ""), ("/src/b.py", "from a import thing")])
|
||||
.unwrap();
|
||||
|
||||
let b_file = system_path_to_file(&db, "/src/b.py").unwrap();
|
||||
let b_file_diagnostics = super::check_types(&db, b_file);
|
||||
assert_diagnostic_messages(
|
||||
&b_file_diagnostics,
|
||||
&["Could not resolve import of 'thing' from 'a'"],
|
||||
);
|
||||
}
|
||||
|
||||
#[ignore = "\
|
||||
A spurious second 'Unresolved import' diagnostic message is emitted on `b.py`, \
|
||||
despite the symbol existing in the symbol table for `a.py`"]
|
||||
#[test]
|
||||
fn resolved_import_of_symbol_from_unresolved_import() {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("/src/a.py", "import foo as foo"),
|
||||
("/src/b.py", "from a import foo"),
|
||||
])
|
||||
.unwrap();
|
||||
|
||||
let a_file = system_path_to_file(&db, "/src/a.py").unwrap();
|
||||
let a_file_diagnostics = super::check_types(&db, a_file);
|
||||
assert_diagnostic_messages(
|
||||
&a_file_diagnostics,
|
||||
&["Import 'foo' could not be resolved."],
|
||||
);
|
||||
|
||||
// Importing the unresolved import into a second first-party file should not trigger
|
||||
// an additional "unresolved import" violation
|
||||
let b_file = system_path_to_file(&db, "/src/b.py").unwrap();
|
||||
let b_file_diagnostics = super::check_types(&db, b_file);
|
||||
assert_eq!(&*b_file_diagnostics, &[]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -65,7 +65,6 @@ impl<'db> UnionBuilder<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct IntersectionBuilder<'db> {
|
||||
// Really this builds a union-of-intersections, because we always keep our set-theoretic types
|
||||
@@ -78,8 +77,7 @@ pub(crate) struct IntersectionBuilder<'db> {
|
||||
}
|
||||
|
||||
impl<'db> IntersectionBuilder<'db> {
|
||||
#[allow(dead_code)]
|
||||
fn new(db: &'db dyn Db) -> Self {
|
||||
pub(crate) fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
intersections: vec![InnerIntersectionBuilder::new()],
|
||||
@@ -93,8 +91,7 @@ impl<'db> IntersectionBuilder<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn add_positive(mut self, ty: Type<'db>) -> Self {
|
||||
pub(crate) fn add_positive(mut self, ty: Type<'db>) -> Self {
|
||||
if let Type::Union(union) = ty {
|
||||
// Distribute ourself over this union: for each union element, clone ourself and
|
||||
// intersect with that union element, then create a new union-of-intersections with all
|
||||
@@ -122,8 +119,7 @@ impl<'db> IntersectionBuilder<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn add_negative(mut self, ty: Type<'db>) -> Self {
|
||||
pub(crate) fn add_negative(mut self, ty: Type<'db>) -> Self {
|
||||
// See comments above in `add_positive`; this is just the negated version.
|
||||
if let Type::Union(union) = ty {
|
||||
union
|
||||
@@ -142,8 +138,7 @@ impl<'db> IntersectionBuilder<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn build(mut self) -> Type<'db> {
|
||||
pub(crate) fn build(mut self) -> Type<'db> {
|
||||
// Avoid allocating the UnionBuilder unnecessarily if we have just one intersection:
|
||||
if self.intersections.len() == 1 {
|
||||
self.intersections.pop().unwrap().build(self.db)
|
||||
@@ -157,7 +152,6 @@ impl<'db> IntersectionBuilder<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
#[derive(Debug, Clone, Default)]
|
||||
struct InnerIntersectionBuilder<'db> {
|
||||
positive: FxOrderSet<Type<'db>>,
|
||||
@@ -201,6 +195,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
self.negative.retain(|elem| !pos.contains(elem));
|
||||
}
|
||||
Type::Never => {}
|
||||
Type::Unbound => {}
|
||||
_ => {
|
||||
if !self.positive.remove(&ty) {
|
||||
self.negative.insert(ty);
|
||||
@@ -214,9 +209,23 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
|
||||
// Never is a subtype of all types
|
||||
if self.positive.contains(&Type::Never) {
|
||||
self.positive.clear();
|
||||
self.positive.retain(Type::is_never);
|
||||
self.negative.clear();
|
||||
self.positive.insert(Type::Never);
|
||||
}
|
||||
|
||||
if self.positive.contains(&Type::Unbound) {
|
||||
self.positive.retain(Type::is_unbound);
|
||||
self.negative.clear();
|
||||
}
|
||||
|
||||
// None intersects only with object
|
||||
for pos in &self.positive {
|
||||
if let Type::Instance(_) = pos {
|
||||
// could be `object` type
|
||||
} else {
|
||||
self.negative.remove(&Type::None);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -426,4 +435,37 @@ mod tests {
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_positive_unbound() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::Unbound)
|
||||
.add_positive(Type::IntLiteral(1))
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Unbound);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_unbound() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::Unbound)
|
||||
.add_positive(Type::IntLiteral(1))
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::IntLiteral(1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_none() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::None)
|
||||
.add_positive(Type::IntLiteral(1))
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::IntLiteral(1));
|
||||
}
|
||||
}
|
||||
|
||||
111
crates/red_knot_python_semantic/src/types/diagnostic.rs
Normal file
111
crates/red_knot_python_semantic/src/types/diagnostic.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use std::fmt::Formatter;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub struct TypeCheckDiagnostic {
|
||||
// TODO: Don't use string keys for rules
|
||||
pub(super) rule: String,
|
||||
pub(super) message: String,
|
||||
pub(super) range: TextRange,
|
||||
pub(super) file: File,
|
||||
}
|
||||
|
||||
impl TypeCheckDiagnostic {
|
||||
pub fn rule(&self) -> &str {
|
||||
&self.rule
|
||||
}
|
||||
|
||||
pub fn message(&self) -> &str {
|
||||
&self.message
|
||||
}
|
||||
|
||||
pub fn file(&self) -> File {
|
||||
self.file
|
||||
}
|
||||
}
|
||||
|
||||
impl Ranged for TypeCheckDiagnostic {
|
||||
fn range(&self) -> TextRange {
|
||||
self.range
|
||||
}
|
||||
}
|
||||
|
||||
/// A collection of type check diagnostics.
|
||||
///
|
||||
/// The diagnostics are wrapped in an `Arc` because they need to be cloned multiple times
|
||||
/// when going from `infer_expression` to `check_file`. We could consider
|
||||
/// making [`TypeCheckDiagnostic`] a Salsa struct to have them Arena-allocated (once the Tables refactor is done).
|
||||
/// Using Salsa struct does have the downside that it leaks the Salsa dependency into diagnostics and
|
||||
/// each Salsa-struct comes with an overhead.
|
||||
#[derive(Default, Eq, PartialEq)]
|
||||
pub struct TypeCheckDiagnostics {
|
||||
inner: Vec<std::sync::Arc<TypeCheckDiagnostic>>,
|
||||
}
|
||||
|
||||
impl TypeCheckDiagnostics {
|
||||
pub fn new() -> Self {
|
||||
Self { inner: Vec::new() }
|
||||
}
|
||||
|
||||
pub(super) fn push(&mut self, diagnostic: TypeCheckDiagnostic) {
|
||||
self.inner.push(Arc::new(diagnostic));
|
||||
}
|
||||
|
||||
pub(crate) fn shrink_to_fit(&mut self) {
|
||||
self.inner.shrink_to_fit();
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TypeCheckDiagnostic> for TypeCheckDiagnostics {
|
||||
fn extend<T: IntoIterator<Item = TypeCheckDiagnostic>>(&mut self, iter: T) {
|
||||
self.inner.extend(iter.into_iter().map(std::sync::Arc::new));
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<std::sync::Arc<TypeCheckDiagnostic>> for TypeCheckDiagnostics {
|
||||
fn extend<T: IntoIterator<Item = Arc<TypeCheckDiagnostic>>>(&mut self, iter: T) {
|
||||
self.inner.extend(iter);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Extend<&'a std::sync::Arc<TypeCheckDiagnostic>> for TypeCheckDiagnostics {
|
||||
fn extend<T: IntoIterator<Item = &'a Arc<TypeCheckDiagnostic>>>(&mut self, iter: T) {
|
||||
self.inner
|
||||
.extend(iter.into_iter().map(std::sync::Arc::clone));
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for TypeCheckDiagnostics {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
self.inner.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for TypeCheckDiagnostics {
|
||||
type Target = [std::sync::Arc<TypeCheckDiagnostic>];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for TypeCheckDiagnostics {
|
||||
type Item = Arc<TypeCheckDiagnostic>;
|
||||
type IntoIter = std::vec::IntoIter<std::sync::Arc<TypeCheckDiagnostic>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.inner.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a TypeCheckDiagnostics {
|
||||
type Item = &'a Arc<TypeCheckDiagnostic>;
|
||||
type IntoIter = std::slice::Iter<'a, std::sync::Arc<TypeCheckDiagnostic>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.inner.iter()
|
||||
}
|
||||
}
|
||||
@@ -20,6 +20,8 @@
|
||||
//!
|
||||
//! Inferring types at any of the three region granularities returns a [`TypeInference`], which
|
||||
//! holds types for every [`Definition`] and expression within the inferred region.
|
||||
use std::num::NonZeroU32;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
use salsa;
|
||||
use salsa::plumbing::AsId;
|
||||
@@ -27,17 +29,19 @@ use salsa::plumbing::AsId;
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::{ExprContext, TypeParams};
|
||||
use ruff_python_ast::{AnyNodeRef, ExprContext};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::builtins::builtins_scope;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::resolve_module;
|
||||
use crate::module_resolver::{file_to_module, resolve_module};
|
||||
use crate::semantic_index::ast_ids::{HasScopedAstId, HasScopedUseId, ScopedExpressionId};
|
||||
use crate::semantic_index::definition::{Definition, DefinitionKind, DefinitionNodeKey};
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::semantic_index::symbol::{FileScopeId, NodeWithScopeKind, NodeWithScopeRef, ScopeId};
|
||||
use crate::semantic_index::SemanticIndex;
|
||||
use crate::types::diagnostic::{TypeCheckDiagnostic, TypeCheckDiagnostics};
|
||||
use crate::types::{
|
||||
builtins_symbol_ty_by_name, definitions_ty, global_symbol_ty_by_name, ClassType, FunctionType,
|
||||
Name, Type, UnionBuilder,
|
||||
@@ -121,13 +125,16 @@ pub(crate) enum InferenceRegion<'db> {
|
||||
}
|
||||
|
||||
/// The inferred types for a single region.
|
||||
#[derive(Debug, Eq, PartialEq, Default, Clone)]
|
||||
#[derive(Debug, Eq, PartialEq, Default)]
|
||||
pub(crate) struct TypeInference<'db> {
|
||||
/// The types of every expression in this region.
|
||||
expressions: FxHashMap<ScopedExpressionId, Type<'db>>,
|
||||
|
||||
/// The types of every definition in this region.
|
||||
definitions: FxHashMap<Definition<'db>, Type<'db>>,
|
||||
|
||||
/// The diagnostics for this region.
|
||||
diagnostics: TypeCheckDiagnostics,
|
||||
}
|
||||
|
||||
impl<'db> TypeInference<'db> {
|
||||
@@ -140,9 +147,14 @@ impl<'db> TypeInference<'db> {
|
||||
self.definitions[&definition]
|
||||
}
|
||||
|
||||
pub(crate) fn diagnostics(&self) -> &[std::sync::Arc<TypeCheckDiagnostic>] {
|
||||
&self.diagnostics
|
||||
}
|
||||
|
||||
fn shrink_to_fit(&mut self) {
|
||||
self.expressions.shrink_to_fit();
|
||||
self.definitions.shrink_to_fit();
|
||||
self.diagnostics.shrink_to_fit();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -233,6 +245,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
fn extend(&mut self, inference: &TypeInference<'db>) {
|
||||
self.types.definitions.extend(inference.definitions.iter());
|
||||
self.types.expressions.extend(inference.expressions.iter());
|
||||
self.types.diagnostics.extend(&inference.diagnostics);
|
||||
}
|
||||
|
||||
/// Infers types in the given [`InferenceRegion`].
|
||||
@@ -292,11 +305,18 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
);
|
||||
}
|
||||
DefinitionKind::Assignment(assignment) => {
|
||||
self.infer_assignment_definition(assignment.assignment(), definition);
|
||||
self.infer_assignment_definition(
|
||||
assignment.target(),
|
||||
assignment.assignment(),
|
||||
definition,
|
||||
);
|
||||
}
|
||||
DefinitionKind::AnnotatedAssignment(annotated_assignment) => {
|
||||
self.infer_annotated_assignment_definition(annotated_assignment.node(), definition);
|
||||
}
|
||||
DefinitionKind::AugmentedAssignment(augmented_assignment) => {
|
||||
self.infer_augment_assignment_definition(augmented_assignment.node(), definition);
|
||||
}
|
||||
DefinitionKind::NamedExpression(named_expression) => {
|
||||
self.infer_named_expression_definition(named_expression.node(), definition);
|
||||
}
|
||||
@@ -307,11 +327,17 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
definition,
|
||||
);
|
||||
}
|
||||
DefinitionKind::Parameter(parameter) => {
|
||||
self.infer_parameter_definition(parameter, definition);
|
||||
}
|
||||
DefinitionKind::ParameterWithDefault(parameter_with_default) => {
|
||||
self.infer_parameter_with_default_definition(parameter_with_default, definition);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_region_expression(&mut self, expression: Expression<'db>) {
|
||||
self.infer_expression(expression.node(self.db));
|
||||
self.infer_expression(expression.node_ref(self.db));
|
||||
}
|
||||
|
||||
fn infer_module(&mut self, module: &ast::ModModule) {
|
||||
@@ -421,6 +447,13 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
.map(|decorator| self.infer_decorator(decorator))
|
||||
.collect();
|
||||
|
||||
for default in parameters
|
||||
.iter_non_variadic_params()
|
||||
.filter_map(|param| param.default.as_deref())
|
||||
{
|
||||
self.infer_expression(default);
|
||||
}
|
||||
|
||||
// If there are type params, parameters and returns are evaluated in that scope.
|
||||
if type_params.is_none() {
|
||||
self.infer_parameters(parameters);
|
||||
@@ -458,10 +491,12 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
let ast::ParameterWithDefault {
|
||||
range: _,
|
||||
parameter,
|
||||
default,
|
||||
default: _,
|
||||
} = parameter_with_default;
|
||||
self.infer_parameter(parameter);
|
||||
self.infer_optional_expression(default.as_deref());
|
||||
|
||||
self.infer_optional_expression(parameter.annotation.as_deref());
|
||||
|
||||
self.infer_definition(parameter_with_default);
|
||||
}
|
||||
|
||||
fn infer_parameter(&mut self, parameter: &ast::Parameter) {
|
||||
@@ -470,7 +505,29 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
name: _,
|
||||
annotation,
|
||||
} = parameter;
|
||||
|
||||
self.infer_optional_expression(annotation.as_deref());
|
||||
|
||||
self.infer_definition(parameter);
|
||||
}
|
||||
|
||||
fn infer_parameter_with_default_definition(
|
||||
&mut self,
|
||||
_parameter_with_default: &ast::ParameterWithDefault,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
// TODO(dhruvmanila): Infer types from annotation or default expression
|
||||
self.types.definitions.insert(definition, Type::Unknown);
|
||||
}
|
||||
|
||||
fn infer_parameter_definition(
|
||||
&mut self,
|
||||
_parameter: &ast::Parameter,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
// TODO(dhruvmanila): Annotation expression is resolved at the enclosing scope, infer the
|
||||
// parameter type from there
|
||||
self.types.definitions.insert(definition, Type::Unknown);
|
||||
}
|
||||
|
||||
fn infer_class_definition_statement(&mut self, class: &ast::StmtClassDef) {
|
||||
@@ -667,6 +724,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
fn infer_assignment_definition(
|
||||
&mut self,
|
||||
target: &ast::ExprName,
|
||||
assignment: &ast::StmtAssign,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
@@ -676,6 +734,9 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
let value_ty = self
|
||||
.types
|
||||
.expression_ty(assignment.value.scoped_ast_id(self.db, self.scope));
|
||||
self.types
|
||||
.expressions
|
||||
.insert(target.scoped_ast_id(self.db, self.scope), value_ty);
|
||||
self.types.definitions.insert(definition, value_ty);
|
||||
}
|
||||
|
||||
@@ -716,15 +777,35 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
|
||||
fn infer_augmented_assignment_statement(&mut self, assignment: &ast::StmtAugAssign) {
|
||||
// TODO this should be a Definition
|
||||
if assignment.target.is_name_expr() {
|
||||
self.infer_definition(assignment);
|
||||
} else {
|
||||
// TODO currently we don't consider assignments to non-Names to be Definitions
|
||||
self.infer_augment_assignment(assignment);
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_augment_assignment_definition(
|
||||
&mut self,
|
||||
assignment: &ast::StmtAugAssign,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
let target_ty = self.infer_augment_assignment(assignment);
|
||||
self.types.definitions.insert(definition, target_ty);
|
||||
}
|
||||
|
||||
fn infer_augment_assignment(&mut self, assignment: &ast::StmtAugAssign) -> Type<'db> {
|
||||
let ast::StmtAugAssign {
|
||||
range: _,
|
||||
target,
|
||||
op: _,
|
||||
value,
|
||||
} = assignment;
|
||||
self.infer_expression(target);
|
||||
self.infer_expression(value);
|
||||
self.infer_expression(target);
|
||||
|
||||
// TODO(dhruvmanila): Resolve the target type using the value type and the operator
|
||||
Type::Unknown
|
||||
}
|
||||
|
||||
fn infer_type_alias_statement(&mut self, type_alias_statement: &ast::StmtTypeAlias) {
|
||||
@@ -785,7 +866,26 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
asname: _,
|
||||
} = alias;
|
||||
|
||||
let module_ty = self.module_ty_from_name(name);
|
||||
let module_ty = ModuleName::new(name)
|
||||
.ok_or(ModuleResolutionError::InvalidSyntax)
|
||||
.and_then(|module_name| self.module_ty_from_name(module_name));
|
||||
|
||||
let module_ty = match module_ty {
|
||||
Ok(ty) => ty,
|
||||
Err(ModuleResolutionError::InvalidSyntax) => {
|
||||
tracing::debug!("Failed to resolve import due to invalid syntax");
|
||||
Type::Unknown
|
||||
}
|
||||
Err(ModuleResolutionError::UnresolvedModule) => {
|
||||
self.add_diagnostic(
|
||||
AnyNodeRef::Alias(alias),
|
||||
"unresolved-import",
|
||||
format_args!("Import '{name}' could not be resolved."),
|
||||
);
|
||||
Type::Unknown
|
||||
}
|
||||
};
|
||||
|
||||
self.types.definitions.insert(definition, module_ty);
|
||||
}
|
||||
|
||||
@@ -823,29 +923,128 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.infer_optional_expression(cause.as_deref());
|
||||
}
|
||||
|
||||
/// Given a `from .foo import bar` relative import, resolve the relative module
|
||||
/// we're importing `bar` from into an absolute [`ModuleName`]
|
||||
/// using the name of the module we're currently analyzing.
|
||||
///
|
||||
/// - `level` is the number of dots at the beginning of the relative module name:
|
||||
/// - `from .foo.bar import baz` => `level == 1`
|
||||
/// - `from ...foo.bar import baz` => `level == 3`
|
||||
/// - `tail` is the relative module name stripped of all leading dots:
|
||||
/// - `from .foo import bar` => `tail == "foo"`
|
||||
/// - `from ..foo.bar import baz` => `tail == "foo.bar"`
|
||||
fn relative_module_name(
|
||||
&self,
|
||||
tail: Option<&str>,
|
||||
level: NonZeroU32,
|
||||
) -> Result<ModuleName, ModuleResolutionError> {
|
||||
let Some(module) = file_to_module(self.db, self.file) else {
|
||||
tracing::debug!(
|
||||
"Relative module resolution '{}' failed; could not resolve file '{}' to a module",
|
||||
format_import_from_module(level.get(), tail),
|
||||
self.file.path(self.db)
|
||||
);
|
||||
return Err(ModuleResolutionError::UnresolvedModule);
|
||||
};
|
||||
let mut level = level.get();
|
||||
if module.kind().is_package() {
|
||||
level -= 1;
|
||||
}
|
||||
let mut module_name = module.name().to_owned();
|
||||
for _ in 0..level {
|
||||
module_name = module_name
|
||||
.parent()
|
||||
.ok_or(ModuleResolutionError::UnresolvedModule)?;
|
||||
}
|
||||
if let Some(tail) = tail {
|
||||
if let Some(valid_tail) = ModuleName::new(tail) {
|
||||
module_name.extend(&valid_tail);
|
||||
} else {
|
||||
tracing::debug!("Relative module resolution failed: invalid syntax");
|
||||
return Err(ModuleResolutionError::InvalidSyntax);
|
||||
}
|
||||
}
|
||||
Ok(module_name)
|
||||
}
|
||||
|
||||
fn infer_import_from_definition(
|
||||
&mut self,
|
||||
import_from: &ast::StmtImportFrom,
|
||||
alias: &ast::Alias,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
let ast::StmtImportFrom { module, .. } = import_from;
|
||||
let module_ty = if let Some(module) = module {
|
||||
self.module_ty_from_name(module)
|
||||
// TODO:
|
||||
// - Absolute `*` imports (`from collections import *`)
|
||||
// - Relative `*` imports (`from ...foo import *`)
|
||||
// - Submodule imports (`from collections import abc`,
|
||||
// where `abc` is a submodule of the `collections` package)
|
||||
//
|
||||
// For the last item, see the currently skipped tests
|
||||
// `follow_relative_import_bare_to_module()` and
|
||||
// `follow_nonexistent_import_bare_to_module()`.
|
||||
let ast::StmtImportFrom { module, level, .. } = import_from;
|
||||
tracing::trace!("Resolving imported object {alias:?} from statement {import_from:?}");
|
||||
let module = module.as_deref();
|
||||
let module_name = if let Some(level) = NonZeroU32::new(*level) {
|
||||
tracing::trace!(
|
||||
"Resolving imported object '{}' from module '{}' relative to file '{}'",
|
||||
alias.name,
|
||||
format_import_from_module(level.get(), module),
|
||||
self.file.path(self.db),
|
||||
);
|
||||
self.relative_module_name(module, level)
|
||||
} else {
|
||||
// TODO support relative imports
|
||||
Type::Unknown
|
||||
tracing::trace!(
|
||||
"Resolving imported object '{}' from module '{}'",
|
||||
alias.name,
|
||||
format_import_from_module(*level, module),
|
||||
);
|
||||
module
|
||||
.and_then(ModuleName::new)
|
||||
.ok_or(ModuleResolutionError::InvalidSyntax)
|
||||
};
|
||||
|
||||
let module_ty = module_name.and_then(|module_name| self.module_ty_from_name(module_name));
|
||||
|
||||
let ast::Alias {
|
||||
range: _,
|
||||
name,
|
||||
asname: _,
|
||||
} = alias;
|
||||
|
||||
let ty = module_ty.member(self.db, &Name::new(&name.id));
|
||||
// If a symbol is unbound in the module the symbol was originally defined in,
|
||||
// when we're trying to import the symbol from that module into "our" module,
|
||||
// the runtime error will occur immediately (rather than when the symbol is *used*,
|
||||
// as would be the case for a symbol with type `Unbound`), so it's appropriate to
|
||||
// think of the type of the imported symbol as `Unknown` rather than `Unbound`
|
||||
let member_ty = module_ty
|
||||
.unwrap_or(Type::Unbound)
|
||||
.member(self.db, &Name::new(&name.id))
|
||||
.replace_unbound_with(self.db, Type::Unknown);
|
||||
|
||||
self.types.definitions.insert(definition, ty);
|
||||
if matches!(module_ty, Err(ModuleResolutionError::UnresolvedModule)) {
|
||||
self.add_diagnostic(
|
||||
AnyNodeRef::StmtImportFrom(import_from),
|
||||
"unresolved-import",
|
||||
format_args!(
|
||||
"Import '{}{}' could not be resolved.",
|
||||
".".repeat(*level as usize),
|
||||
module.unwrap_or_default()
|
||||
),
|
||||
);
|
||||
} else if module_ty.is_ok() && member_ty.is_unknown() {
|
||||
self.add_diagnostic(
|
||||
AnyNodeRef::Alias(alias),
|
||||
"unresolved-import",
|
||||
format_args!(
|
||||
"Could not resolve import of '{name}' from '{}{}'",
|
||||
".".repeat(*level as usize),
|
||||
module.unwrap_or_default()
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
self.types.definitions.insert(definition, member_ty);
|
||||
}
|
||||
|
||||
fn infer_return_statement(&mut self, ret: &ast::StmtReturn) {
|
||||
@@ -859,11 +1058,13 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
fn module_ty_from_name(&self, name: &ast::Identifier) -> Type<'db> {
|
||||
let module = ModuleName::new(&name.id).and_then(|name| resolve_module(self.db, name));
|
||||
module
|
||||
fn module_ty_from_name(
|
||||
&self,
|
||||
module_name: ModuleName,
|
||||
) -> Result<Type<'db>, ModuleResolutionError> {
|
||||
resolve_module(self.db, module_name)
|
||||
.map(|module| Type::Module(module.file()))
|
||||
.unwrap_or(Type::Unbound)
|
||||
.ok_or(ModuleResolutionError::UnresolvedModule)
|
||||
}
|
||||
|
||||
fn infer_decorator(&mut self, decorator: &ast::Decorator) -> Type<'db> {
|
||||
@@ -906,6 +1107,9 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
ast::Expr::NumberLiteral(literal) => self.infer_number_literal_expression(literal),
|
||||
ast::Expr::BooleanLiteral(literal) => self.infer_boolean_literal_expression(literal),
|
||||
ast::Expr::StringLiteral(literal) => self.infer_string_literal_expression(literal),
|
||||
ast::Expr::BytesLiteral(bytes_literal) => {
|
||||
self.infer_bytes_literal_expression(bytes_literal)
|
||||
}
|
||||
ast::Expr::FString(fstring) => self.infer_fstring_expression(fstring),
|
||||
ast::Expr::EllipsisLiteral(literal) => self.infer_ellipsis_literal_expression(literal),
|
||||
ast::Expr::Tuple(tuple) => self.infer_tuple_expression(tuple),
|
||||
@@ -932,8 +1136,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
ast::Expr::Yield(yield_expression) => self.infer_yield_expression(yield_expression),
|
||||
ast::Expr::YieldFrom(yield_from) => self.infer_yield_from_expression(yield_from),
|
||||
ast::Expr::Await(await_expression) => self.infer_await_expression(await_expression),
|
||||
|
||||
_ => todo!("expression type resolution for {:?}", expression),
|
||||
ast::Expr::IpyEscapeCommand(_) => todo!("Implement Ipy escape command support"),
|
||||
};
|
||||
|
||||
let expr_id = expression.scoped_ast_id(self.db, self.scope);
|
||||
@@ -970,6 +1173,12 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
Type::Unknown
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_self)]
|
||||
fn infer_bytes_literal_expression(&mut self, _literal: &ast::ExprBytesLiteral) -> Type<'db> {
|
||||
// TODO
|
||||
Type::Unknown
|
||||
}
|
||||
|
||||
fn infer_fstring_expression(&mut self, fstring: &ast::ExprFString) -> Type<'db> {
|
||||
let ast::ExprFString { range: _, value } = fstring;
|
||||
|
||||
@@ -985,21 +1194,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
flags: _,
|
||||
} = fstring;
|
||||
for element in elements {
|
||||
match element {
|
||||
ast::FStringElement::Literal(_) => {
|
||||
// TODO string literal type
|
||||
}
|
||||
ast::FStringElement::Expression(expr_element) => {
|
||||
let ast::FStringExpressionElement {
|
||||
range: _,
|
||||
expression,
|
||||
debug_text: _,
|
||||
conversion: _,
|
||||
format_spec: _,
|
||||
} = expr_element;
|
||||
self.infer_expression(expression);
|
||||
}
|
||||
}
|
||||
self.infer_fstring_element(element);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1009,6 +1204,30 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
Type::Unknown
|
||||
}
|
||||
|
||||
fn infer_fstring_element(&mut self, element: &ast::FStringElement) {
|
||||
match element {
|
||||
ast::FStringElement::Literal(_) => {
|
||||
// TODO string literal type
|
||||
}
|
||||
ast::FStringElement::Expression(expr_element) => {
|
||||
let ast::FStringExpressionElement {
|
||||
range: _,
|
||||
expression,
|
||||
debug_text: _,
|
||||
conversion: _,
|
||||
format_spec,
|
||||
} = expr_element;
|
||||
self.infer_expression(expression);
|
||||
|
||||
if let Some(format_spec) = format_spec {
|
||||
for spec_element in &format_spec.elements {
|
||||
self.infer_fstring_element(spec_element);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_self)]
|
||||
fn infer_ellipsis_literal_expression(
|
||||
&mut self,
|
||||
@@ -1277,6 +1496,13 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
} = lambda_expression;
|
||||
|
||||
if let Some(parameters) = parameters {
|
||||
for default in parameters
|
||||
.iter_non_variadic_params()
|
||||
.filter_map(|param| param.default.as_deref())
|
||||
{
|
||||
self.infer_expression(default);
|
||||
}
|
||||
|
||||
self.infer_parameters(parameters);
|
||||
}
|
||||
|
||||
@@ -1354,18 +1580,22 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
let symbol = symbols.symbol_by_name(id).unwrap();
|
||||
if !symbol.is_defined() || !self.scope.is_function_like(self.db) {
|
||||
// implicit global
|
||||
let mut unbound_ty = if file_scope_id == FileScopeId::global() {
|
||||
let unbound_ty = if file_scope_id == FileScopeId::global() {
|
||||
Type::Unbound
|
||||
} else {
|
||||
global_symbol_ty_by_name(self.db, self.file, id)
|
||||
};
|
||||
// fallback to builtins
|
||||
if matches!(unbound_ty, Type::Unbound)
|
||||
if unbound_ty.may_be_unbound(self.db)
|
||||
&& Some(self.scope) != builtins_scope(self.db)
|
||||
{
|
||||
unbound_ty = builtins_symbol_ty_by_name(self.db, id);
|
||||
Some(unbound_ty.replace_unbound_with(
|
||||
self.db,
|
||||
builtins_symbol_ty_by_name(self.db, id),
|
||||
))
|
||||
} else {
|
||||
Some(unbound_ty)
|
||||
}
|
||||
Some(unbound_ty)
|
||||
} else {
|
||||
Some(Type::Unbound)
|
||||
}
|
||||
@@ -1526,7 +1756,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
Type::Unknown
|
||||
}
|
||||
|
||||
fn infer_type_parameters(&mut self, type_parameters: &TypeParams) {
|
||||
fn infer_type_parameters(&mut self, type_parameters: &ast::TypeParams) {
|
||||
let ast::TypeParams {
|
||||
range: _,
|
||||
type_params,
|
||||
@@ -1563,6 +1793,28 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a new diagnostic.
|
||||
///
|
||||
/// The diagnostic does not get added if the rule isn't enabled for this file.
|
||||
fn add_diagnostic(&mut self, node: AnyNodeRef, rule: &str, message: std::fmt::Arguments) {
|
||||
if !self.db.is_file_open(self.file) {
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: Don't emit the diagnostic if:
|
||||
// * The enclosing node contains any syntax errors
|
||||
// * The rule is disabled for this file. We probably want to introduce a new query that
|
||||
// returns a rule selector for a given file that respects the package's settings,
|
||||
// any global pragma comments in the file, and any per-file-ignores.
|
||||
|
||||
self.types.diagnostics.push(TypeCheckDiagnostic {
|
||||
file: self.file,
|
||||
rule: rule.to_string(),
|
||||
message: message.to_string(),
|
||||
range: node.range(),
|
||||
});
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> TypeInference<'db> {
|
||||
self.infer_region();
|
||||
self.types.shrink_to_fit();
|
||||
@@ -1570,9 +1822,24 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
fn format_import_from_module(level: u32, module: Option<&str>) -> String {
|
||||
format!(
|
||||
"{}{}",
|
||||
".".repeat(level as usize),
|
||||
module.unwrap_or_default()
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
enum ModuleResolutionError {
|
||||
InvalidSyntax,
|
||||
UnresolvedModule,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Context;
|
||||
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
@@ -1662,6 +1929,176 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_relative_import_simple() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("src/package/__init__.py", ""),
|
||||
("src/package/foo.py", "X = 42"),
|
||||
("src/package/bar.py", "from .foo import X"),
|
||||
])?;
|
||||
|
||||
assert_public_ty(&db, "src/package/bar.py", "X", "Literal[42]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_nonexistent_relative_import_simple() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("src/package/__init__.py", ""),
|
||||
("src/package/bar.py", "from .foo import X"),
|
||||
])?;
|
||||
|
||||
assert_public_ty(&db, "src/package/bar.py", "X", "Unknown");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_relative_import_dotted() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("src/package/__init__.py", ""),
|
||||
("src/package/foo/bar/baz.py", "X = 42"),
|
||||
("src/package/bar.py", "from .foo.bar.baz import X"),
|
||||
])?;
|
||||
|
||||
assert_public_ty(&db, "src/package/bar.py", "X", "Literal[42]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_relative_import_bare_to_package() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("src/package/__init__.py", "X = 42"),
|
||||
("src/package/bar.py", "from . import X"),
|
||||
])?;
|
||||
|
||||
assert_public_ty(&db, "src/package/bar.py", "X", "Literal[42]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_nonexistent_relative_import_bare_to_package() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
db.write_files([("src/package/bar.py", "from . import X")])?;
|
||||
assert_public_ty(&db, "src/package/bar.py", "X", "Unknown");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[ignore = "TODO: Submodule imports possibly not supported right now?"]
|
||||
#[test]
|
||||
fn follow_relative_import_bare_to_module() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("src/package/__init__.py", ""),
|
||||
("src/package/foo.py", "X = 42"),
|
||||
("src/package/bar.py", "from . import foo; y = foo.X"),
|
||||
])?;
|
||||
|
||||
assert_public_ty(&db, "src/package/bar.py", "y", "Literal[42]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[ignore = "TODO: Submodule imports possibly not supported right now?"]
|
||||
#[test]
|
||||
fn follow_nonexistent_import_bare_to_module() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("src/package/__init__.py", ""),
|
||||
("src/package/bar.py", "from . import foo"),
|
||||
])?;
|
||||
|
||||
assert_public_ty(&db, "src/package/bar.py", "foo", "Unknown");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_relative_import_from_dunder_init() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("src/package/__init__.py", "from .foo import X"),
|
||||
("src/package/foo.py", "X = 42"),
|
||||
])?;
|
||||
|
||||
assert_public_ty(&db, "src/package/__init__.py", "X", "Literal[42]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_nonexistent_relative_import_from_dunder_init() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
db.write_files([("src/package/__init__.py", "from .foo import X")])?;
|
||||
assert_public_ty(&db, "src/package/__init__.py", "X", "Unknown");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn follow_very_relative_import() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("src/package/__init__.py", ""),
|
||||
("src/package/foo.py", "X = 42"),
|
||||
(
|
||||
"src/package/subpackage/subsubpackage/bar.py",
|
||||
"from ...foo import X",
|
||||
),
|
||||
])?;
|
||||
|
||||
assert_public_ty(
|
||||
&db,
|
||||
"src/package/subpackage/subsubpackage/bar.py",
|
||||
"X",
|
||||
"Literal[42]",
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn imported_unbound_symbol_is_unknown() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("src/package/__init__.py", ""),
|
||||
("src/package/foo.py", "x"),
|
||||
("src/package/bar.py", "from package.foo import x"),
|
||||
])?;
|
||||
|
||||
// the type as seen from external modules (`Unknown`)
|
||||
// is different from the type inside the module itself (`Unbound`):
|
||||
assert_public_ty(&db, "src/package/foo.py", "x", "Unbound");
|
||||
assert_public_ty(&db, "src/package/bar.py", "x", "Unknown");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_import_with_no_module_name() -> anyhow::Result<()> {
|
||||
// This test checks that invalid syntax in a `StmtImportFrom` node
|
||||
// leads to the type being inferred as `Unknown`
|
||||
let mut db = setup_db();
|
||||
db.write_file("src/foo.py", "from import bar")?;
|
||||
assert_public_ty(&db, "src/foo.py", "bar", "Unknown");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resolve_base_class_by_name() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
@@ -2163,6 +2600,38 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conditionally_global_or_builtin() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_dedented(
|
||||
"/src/a.py",
|
||||
"
|
||||
if flag:
|
||||
copyright = 1
|
||||
def f():
|
||||
y = copyright
|
||||
",
|
||||
)?;
|
||||
|
||||
let file = system_path_to_file(&db, "src/a.py").expect("Expected file to exist.");
|
||||
let index = semantic_index(&db, file);
|
||||
let function_scope = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.next()
|
||||
.unwrap()
|
||||
.0
|
||||
.to_scope_id(&db, file);
|
||||
let y_ty = symbol_ty_by_name(&db, function_scope, "y");
|
||||
|
||||
assert_eq!(
|
||||
y_ty.display(&db).to_string(),
|
||||
"Literal[1] | Literal[copyright]"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Class name lookups do fall back to globals, but the public type never does.
|
||||
#[test]
|
||||
fn unbound_class_local() -> anyhow::Result<()> {
|
||||
@@ -2291,6 +2760,26 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn narrow_not_none() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_dedented(
|
||||
"/src/a.py",
|
||||
"
|
||||
x = None if flag else 1
|
||||
y = 0
|
||||
if x is not None:
|
||||
y = x
|
||||
",
|
||||
)?;
|
||||
|
||||
assert_public_ty(&db, "/src/a.py", "x", "Literal[1] | None");
|
||||
assert_public_ty(&db, "/src/a.py", "y", "Literal[0, 1]");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn while_loop() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
@@ -2388,10 +2877,11 @@ mod tests {
|
||||
|
||||
fn first_public_def<'db>(db: &'db TestDb, file: File, name: &str) -> Definition<'db> {
|
||||
let scope = global_scope(db, file);
|
||||
*use_def_map(db, scope)
|
||||
use_def_map(db, scope)
|
||||
.public_definitions(symbol_table(db, scope).symbol_id_by_name(name).unwrap())
|
||||
.first()
|
||||
.next()
|
||||
.unwrap()
|
||||
.definition
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
115
crates/red_knot_python_semantic/src/types/narrow.rs
Normal file
115
crates/red_knot_python_semantic/src/types/narrow.rs
Normal file
@@ -0,0 +1,115 @@
|
||||
use crate::semantic_index::ast_ids::HasScopedAstId;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId, SymbolTable};
|
||||
use crate::semantic_index::symbol_table;
|
||||
use crate::types::{infer_expression_types, IntersectionBuilder, Type, TypeInference};
|
||||
use crate::Db;
|
||||
use ruff_python_ast as ast;
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Return the type constraint that `test` (if true) would place on `definition`, if any.
|
||||
///
|
||||
/// For example, if we have this code:
|
||||
///
|
||||
/// ```python
|
||||
/// y = 1 if flag else None
|
||||
/// x = 1 if flag else None
|
||||
/// if x is not None:
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
/// The `test` expression `x is not None` places the constraint "not None" on the definition of
|
||||
/// `x`, so in that case we'd return `Some(Type::Intersection(negative=[Type::None]))`.
|
||||
///
|
||||
/// But if we called this with the same `test` expression, but the `definition` of `y`, no
|
||||
/// constraint is applied to that definition, so we'd just return `None`.
|
||||
pub(crate) fn narrowing_constraint<'db>(
|
||||
db: &'db dyn Db,
|
||||
test: Expression<'db>,
|
||||
definition: Definition<'db>,
|
||||
) -> Option<Type<'db>> {
|
||||
all_narrowing_constraints(db, test)
|
||||
.get(&definition.symbol(db))
|
||||
.copied()
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
fn all_narrowing_constraints<'db>(
|
||||
db: &'db dyn Db,
|
||||
test: Expression<'db>,
|
||||
) -> NarrowingConstraints<'db> {
|
||||
NarrowingConstraintsBuilder::new(db, test).finish()
|
||||
}
|
||||
|
||||
type NarrowingConstraints<'db> = FxHashMap<ScopedSymbolId, Type<'db>>;
|
||||
|
||||
struct NarrowingConstraintsBuilder<'db> {
|
||||
db: &'db dyn Db,
|
||||
expression: Expression<'db>,
|
||||
constraints: NarrowingConstraints<'db>,
|
||||
}
|
||||
|
||||
impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
fn new(db: &'db dyn Db, expression: Expression<'db>) -> Self {
|
||||
Self {
|
||||
db,
|
||||
expression,
|
||||
constraints: NarrowingConstraints::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(mut self) -> NarrowingConstraints<'db> {
|
||||
if let ast::Expr::Compare(expr_compare) = self.expression.node_ref(self.db).node() {
|
||||
self.add_expr_compare(expr_compare);
|
||||
}
|
||||
// TODO other test expression kinds
|
||||
|
||||
self.constraints.shrink_to_fit();
|
||||
self.constraints
|
||||
}
|
||||
|
||||
fn symbols(&self) -> Arc<SymbolTable> {
|
||||
symbol_table(self.db, self.scope())
|
||||
}
|
||||
|
||||
fn scope(&self) -> ScopeId<'db> {
|
||||
self.expression.scope(self.db)
|
||||
}
|
||||
|
||||
fn inference(&self) -> &'db TypeInference<'db> {
|
||||
infer_expression_types(self.db, self.expression)
|
||||
}
|
||||
|
||||
fn add_expr_compare(&mut self, expr_compare: &ast::ExprCompare) {
|
||||
let ast::ExprCompare {
|
||||
range: _,
|
||||
left,
|
||||
ops,
|
||||
comparators,
|
||||
} = expr_compare;
|
||||
|
||||
if let ast::Expr::Name(ast::ExprName {
|
||||
range: _,
|
||||
id,
|
||||
ctx: _,
|
||||
}) = left.as_ref()
|
||||
{
|
||||
// SAFETY: we should always have a symbol for every Name node.
|
||||
let symbol = self.symbols().symbol_id_by_name(id).unwrap();
|
||||
let scope = self.scope();
|
||||
let inference = self.inference();
|
||||
for (op, comparator) in std::iter::zip(&**ops, &**comparators) {
|
||||
let comp_ty = inference.expression_ty(comparator.scoped_ast_id(self.db, scope));
|
||||
if matches!(op, ast::CmpOp::IsNot) {
|
||||
let ty = IntersectionBuilder::new(self.db)
|
||||
.add_negative(comp_ty)
|
||||
.build();
|
||||
self.constraints.insert(symbol, ty);
|
||||
};
|
||||
// TODO other comparison types
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -14,7 +14,6 @@ license = { workspace = true }
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
red_knot_workspace = { workspace = true }
|
||||
ruff_db = { workspace = true }
|
||||
ruff_linter = { workspace = true }
|
||||
ruff_notebook = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use lsp_types::ClientCapabilities;
|
||||
use ruff_linter::display_settings;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Default)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
@@ -66,20 +65,3 @@ impl ResolvedClientCapabilities {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ResolvedClientCapabilities {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
display_settings! {
|
||||
formatter = f,
|
||||
namespace = "capabilities",
|
||||
fields = [
|
||||
self.code_action_deferred_edit_resolution,
|
||||
self.apply_edit,
|
||||
self.document_changes,
|
||||
self.workspace_refresh,
|
||||
self.pull_diagnostics,
|
||||
]
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -278,18 +278,6 @@ impl DocumentQuery {
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate a source kind used by the linter.
|
||||
pub(crate) fn make_source_kind(&self) -> ruff_linter::source_kind::SourceKind {
|
||||
match self {
|
||||
Self::Text { document, .. } => {
|
||||
ruff_linter::source_kind::SourceKind::Python(document.contents().to_string())
|
||||
}
|
||||
Self::Notebook { notebook, .. } => {
|
||||
ruff_linter::source_kind::SourceKind::IpyNotebook(notebook.make_ruff_notebook())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempts to access the underlying notebook document that this query is selecting.
|
||||
pub fn as_notebook(&self) -> Option<&NotebookDocument> {
|
||||
match self {
|
||||
|
||||
@@ -109,7 +109,7 @@ impl Workspace {
|
||||
pub fn check_file(&self, file_id: &FileHandle) -> Result<Vec<String>, Error> {
|
||||
let result = self.db.check_file(file_id.file).map_err(into_error)?;
|
||||
|
||||
Ok(result.to_vec())
|
||||
Ok(result.clone())
|
||||
}
|
||||
|
||||
/// Checks all open files
|
||||
|
||||
@@ -17,5 +17,8 @@ fn check() {
|
||||
|
||||
let result = workspace.check_file(&test).expect("Check to succeed");
|
||||
|
||||
assert_eq!(result, vec!["/test.py:1:8: Unresolved import 'random22'"]);
|
||||
assert_eq!(
|
||||
result,
|
||||
vec!["/test.py:1:8: Import 'random22' could not be resolved.",]
|
||||
);
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_db = { workspace = true, features = ["testing"]}
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -11,7 +11,6 @@ use ruff_db::{Db as SourceDb, Upcast};
|
||||
use salsa::plumbing::ZalsaDatabase;
|
||||
use salsa::{Cancelled, Event};
|
||||
|
||||
use crate::lint::Diagnostics;
|
||||
use crate::workspace::{check_file, Workspace, WorkspaceMetadata};
|
||||
|
||||
mod changes;
|
||||
@@ -61,7 +60,7 @@ impl RootDatabase {
|
||||
self.with_db(|db| db.workspace().check(db))
|
||||
}
|
||||
|
||||
pub fn check_file(&self, file: File) -> Result<Diagnostics, Cancelled> {
|
||||
pub fn check_file(&self, file: File) -> Result<Vec<String>, Cancelled> {
|
||||
self.with_db(|db| check_file(db, file))
|
||||
}
|
||||
|
||||
@@ -115,7 +114,15 @@ impl Upcast<dyn SourceDb> for RootDatabase {
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl SemanticDb for RootDatabase {}
|
||||
impl SemanticDb for RootDatabase {
|
||||
fn is_file_open(&self, file: File) -> bool {
|
||||
let Some(workspace) = &self.workspace else {
|
||||
return false;
|
||||
};
|
||||
|
||||
workspace.is_file_open(self, file)
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl SourceDb for RootDatabase {
|
||||
@@ -242,7 +249,12 @@ pub(crate) mod tests {
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl red_knot_python_semantic::Db for TestDb {}
|
||||
impl red_knot_python_semantic::Db for TestDb {
|
||||
fn is_file_open(&self, file: ruff_db::files::File) -> bool {
|
||||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl Db for TestDb {}
|
||||
|
||||
|
||||
@@ -120,7 +120,7 @@ impl RootDatabase {
|
||||
if workspace_change {
|
||||
match WorkspaceMetadata::from_path(&workspace_path, self.system()) {
|
||||
Ok(metadata) => {
|
||||
tracing::debug!("Reload workspace after structural change.");
|
||||
tracing::debug!("Reloading workspace after structural change.");
|
||||
// TODO: Handle changes in the program settings.
|
||||
workspace.reload(self, metadata);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use std::cell::RefCell;
|
||||
use std::ops::Deref;
|
||||
use std::time::Duration;
|
||||
|
||||
use tracing::debug_span;
|
||||
@@ -22,7 +21,7 @@ use crate::db::Db;
|
||||
pub(crate) fn unwind_if_cancelled(db: &dyn Db) {}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn lint_syntax(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
pub(crate) fn lint_syntax(db: &dyn Db, file_id: File) -> Vec<String> {
|
||||
#[allow(clippy::print_stdout)]
|
||||
if std::env::var("RED_KNOT_SLOW_LINT").is_ok() {
|
||||
for i in 0..10 {
|
||||
@@ -64,7 +63,7 @@ pub(crate) fn lint_syntax(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
}));
|
||||
}
|
||||
|
||||
Diagnostics::from(diagnostics)
|
||||
diagnostics
|
||||
}
|
||||
|
||||
fn lint_lines(source: &str, diagnostics: &mut Vec<String>) {
|
||||
@@ -86,7 +85,7 @@ fn lint_lines(source: &str, diagnostics: &mut Vec<String>) {
|
||||
|
||||
#[allow(unreachable_pub)]
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub fn lint_semantic(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
pub fn lint_semantic(db: &dyn Db, file_id: File) -> Vec<String> {
|
||||
let _span = debug_span!("lint_semantic", file=%file_id.path(db)).entered();
|
||||
|
||||
let source = source_text(db.upcast(), file_id);
|
||||
@@ -94,7 +93,7 @@ pub fn lint_semantic(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
let semantic = SemanticModel::new(db.upcast(), file_id);
|
||||
|
||||
if !parsed.is_valid() {
|
||||
return Diagnostics::Empty;
|
||||
return vec![];
|
||||
}
|
||||
|
||||
let context = SemanticLintContext {
|
||||
@@ -106,7 +105,7 @@ pub fn lint_semantic(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
|
||||
SemanticVisitor { context: &context }.visit_body(parsed.suite());
|
||||
|
||||
Diagnostics::from(context.diagnostics.take())
|
||||
context.diagnostics.take()
|
||||
}
|
||||
|
||||
fn format_diagnostic(context: &SemanticLintContext, message: &str, start: TextSize) -> String {
|
||||
@@ -116,44 +115,13 @@ fn format_diagnostic(context: &SemanticLintContext, message: &str, start: TextSi
|
||||
.source_location(start, context.source_text());
|
||||
format!(
|
||||
"{}:{}:{}: {}",
|
||||
context.semantic.file_path().as_str(),
|
||||
context.semantic.file_path(),
|
||||
source_location.row,
|
||||
source_location.column,
|
||||
message,
|
||||
)
|
||||
}
|
||||
|
||||
fn lint_unresolved_imports(context: &SemanticLintContext, import: AnyImportRef) {
|
||||
match import {
|
||||
AnyImportRef::Import(import) => {
|
||||
for alias in &import.names {
|
||||
let ty = alias.ty(&context.semantic);
|
||||
|
||||
if ty.is_unbound() {
|
||||
context.push_diagnostic(format_diagnostic(
|
||||
context,
|
||||
&format!("Unresolved import '{}'", &alias.name),
|
||||
alias.start(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
AnyImportRef::ImportFrom(import) => {
|
||||
for alias in &import.names {
|
||||
let ty = alias.ty(&context.semantic);
|
||||
|
||||
if ty.is_unbound() {
|
||||
context.push_diagnostic(format_diagnostic(
|
||||
context,
|
||||
&format!("Unresolved import '{}'", &alias.name),
|
||||
alias.start(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lint_maybe_undefined(context: &SemanticLintContext, name: &ast::ExprName) {
|
||||
if !matches!(name.ctx, ast::ExprContext::Load) {
|
||||
return;
|
||||
@@ -276,17 +244,8 @@ struct SemanticVisitor<'a> {
|
||||
|
||||
impl Visitor<'_> for SemanticVisitor<'_> {
|
||||
fn visit_stmt(&mut self, stmt: &ast::Stmt) {
|
||||
match stmt {
|
||||
ast::Stmt::ClassDef(class) => {
|
||||
lint_bad_override(self.context, class);
|
||||
}
|
||||
ast::Stmt::Import(import) => {
|
||||
lint_unresolved_imports(self.context, AnyImportRef::Import(import));
|
||||
}
|
||||
ast::Stmt::ImportFrom(import) => {
|
||||
lint_unresolved_imports(self.context, AnyImportRef::ImportFrom(import));
|
||||
}
|
||||
_ => {}
|
||||
if let ast::Stmt::ClassDef(class) = stmt {
|
||||
lint_bad_override(self.context, class);
|
||||
}
|
||||
|
||||
walk_stmt(self, stmt);
|
||||
@@ -304,53 +263,6 @@ impl Visitor<'_> for SemanticVisitor<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum Diagnostics {
|
||||
Empty,
|
||||
List(Vec<String>),
|
||||
}
|
||||
|
||||
impl Diagnostics {
|
||||
pub fn as_slice(&self) -> &[String] {
|
||||
match self {
|
||||
Diagnostics::Empty => &[],
|
||||
Diagnostics::List(list) => list.as_slice(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Diagnostics {
|
||||
type Target = [String];
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<String>> for Diagnostics {
|
||||
fn from(value: Vec<String>) -> Self {
|
||||
if value.is_empty() {
|
||||
Diagnostics::Empty
|
||||
} else {
|
||||
Diagnostics::List(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum AnyImportRef<'a> {
|
||||
Import(&'a ast::StmtImport),
|
||||
ImportFrom(&'a ast::StmtImportFrom),
|
||||
}
|
||||
|
||||
impl Ranged for AnyImportRef<'_> {
|
||||
fn range(&self) -> ruff_text_size::TextRange {
|
||||
match self {
|
||||
AnyImportRef::Import(import) => import.range(),
|
||||
AnyImportRef::ImportFrom(import) => import.range(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use red_knot_python_semantic::{Program, ProgramSettings, PythonVersion, SearchPathSettings};
|
||||
@@ -359,7 +271,7 @@ mod tests {
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
|
||||
use super::{lint_semantic, Diagnostics};
|
||||
use super::lint_semantic;
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
setup_db_with_root(SystemPathBuf::from("/src"))
|
||||
@@ -405,9 +317,9 @@ mod tests {
|
||||
.unwrap();
|
||||
|
||||
let file = system_path_to_file(&db, "/src/a.py").expect("file to exist");
|
||||
let Diagnostics::List(messages) = lint_semantic(&db, file) else {
|
||||
panic!("expected some diagnostics");
|
||||
};
|
||||
let messages = lint_semantic(&db, file);
|
||||
|
||||
assert_ne!(messages, &[] as &[String], "expected some diagnostics");
|
||||
|
||||
assert_eq!(
|
||||
*messages,
|
||||
|
||||
@@ -55,7 +55,7 @@ impl VirtualEnvironment {
|
||||
|
||||
let venv_path = SysPrefixPath::new(path, system)?;
|
||||
let pyvenv_cfg_path = venv_path.join("pyvenv.cfg");
|
||||
tracing::debug!("Attempting to parse virtual environment metadata at {pyvenv_cfg_path}");
|
||||
tracing::debug!("Attempting to parse virtual environment metadata at '{pyvenv_cfg_path}'");
|
||||
|
||||
let pyvenv_cfg = system
|
||||
.read_to_string(&pyvenv_cfg_path)
|
||||
@@ -191,7 +191,7 @@ impl VirtualEnvironment {
|
||||
} else {
|
||||
tracing::warn!(
|
||||
"Failed to resolve `sys.prefix` of the system Python installation \
|
||||
from the `home` value in the `pyvenv.cfg` file at {}. \
|
||||
from the `home` value in the `pyvenv.cfg` file at '{}'. \
|
||||
System site-packages will not be used for module resolution.",
|
||||
venv_path.join("pyvenv.cfg")
|
||||
);
|
||||
@@ -425,7 +425,7 @@ impl Deref for SysPrefixPath {
|
||||
|
||||
impl fmt::Display for SysPrefixPath {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "`sys.prefix` path {}", self.0)
|
||||
write!(f, "`sys.prefix` path '{}'", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -482,7 +482,7 @@ impl Deref for PythonHomePath {
|
||||
|
||||
impl fmt::Display for PythonHomePath {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "`home` location {}", self.0)
|
||||
write!(f, "`home` location '{}'", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -109,7 +109,7 @@ struct WatcherInner {
|
||||
impl Watcher {
|
||||
/// Sets up file watching for `path`.
|
||||
pub fn watch(&mut self, path: &SystemPath) -> notify::Result<()> {
|
||||
tracing::debug!("Watching path: {path}.");
|
||||
tracing::debug!("Watching path: '{path}'.");
|
||||
|
||||
self.inner_mut()
|
||||
.watcher
|
||||
@@ -118,7 +118,7 @@ impl Watcher {
|
||||
|
||||
/// Stops file watching for `path`.
|
||||
pub fn unwatch(&mut self, path: &SystemPath) -> notify::Result<()> {
|
||||
tracing::debug!("Unwatching path: {path}.");
|
||||
tracing::debug!("Unwatching path: '{path}'.");
|
||||
|
||||
self.inner_mut().watcher.unwatch(path.as_std_path())
|
||||
}
|
||||
@@ -351,7 +351,7 @@ impl Debouncer {
|
||||
}
|
||||
|
||||
EventKind::Any => {
|
||||
tracing::debug!("Skip any FS event for {path}.");
|
||||
tracing::debug!("Skipping any FS event for '{path}'.");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -4,17 +4,19 @@ use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
use salsa::{Durability, Setter as _};
|
||||
|
||||
pub use metadata::{PackageMetadata, WorkspaceMetadata};
|
||||
use ruff_db::source::{source_text, SourceDiagnostic};
|
||||
use red_knot_python_semantic::types::check_types;
|
||||
use ruff_db::source::{line_index, source_text, SourceDiagnostic};
|
||||
use ruff_db::{
|
||||
files::{system_path_to_file, File},
|
||||
system::{walk_directory::WalkState, SystemPath, SystemPathBuf},
|
||||
};
|
||||
use ruff_python_ast::{name::Name, PySourceType};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::workspace::files::{Index, IndexedFiles, PackageFiles};
|
||||
use crate::workspace::files::{Index, Indexed, PackageFiles};
|
||||
use crate::{
|
||||
db::Db,
|
||||
lint::{lint_semantic, lint_syntax, Diagnostics},
|
||||
lint::{lint_semantic, lint_syntax},
|
||||
};
|
||||
|
||||
mod files;
|
||||
@@ -92,8 +94,8 @@ pub struct Package {
|
||||
root_buf: SystemPathBuf,
|
||||
|
||||
/// The files that are part of this package.
|
||||
#[return_ref]
|
||||
#[default]
|
||||
#[return_ref]
|
||||
file_set: PackageFiles,
|
||||
// TODO: Add the loaded settings.
|
||||
}
|
||||
@@ -141,9 +143,7 @@ impl Workspace {
|
||||
new_packages.insert(path, package);
|
||||
}
|
||||
|
||||
self.set_package_tree(db)
|
||||
.with_durability(Durability::MEDIUM)
|
||||
.to(new_packages);
|
||||
self.set_package_tree(db).to(new_packages);
|
||||
}
|
||||
|
||||
pub fn update_package(self, db: &mut dyn Db, metadata: PackageMetadata) -> anyhow::Result<()> {
|
||||
@@ -197,7 +197,7 @@ impl Workspace {
|
||||
///
|
||||
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
|
||||
pub fn open_file(self, db: &mut dyn Db, file: File) {
|
||||
tracing::debug!("Opening file {}", file.path(db));
|
||||
tracing::debug!("Opening file '{}'", file.path(db));
|
||||
|
||||
let mut open_files = self.take_open_files(db);
|
||||
open_files.insert(file);
|
||||
@@ -206,7 +206,7 @@ impl Workspace {
|
||||
|
||||
/// Closes a file in the workspace.
|
||||
pub fn close_file(self, db: &mut dyn Db, file: File) -> bool {
|
||||
tracing::debug!("Closing file {}", file.path(db));
|
||||
tracing::debug!("Closing file '{}'", file.path(db));
|
||||
|
||||
let mut open_files = self.take_open_files(db);
|
||||
let removed = open_files.remove(&file);
|
||||
@@ -249,6 +249,23 @@ impl Workspace {
|
||||
FxHashSet::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the file is open in the workspace.
|
||||
///
|
||||
/// A file is considered open when:
|
||||
/// * explicitly set as an open file using [`open_file`](Self::open_file)
|
||||
/// * It has a [`SystemPath`] and belongs to a package's `src` files
|
||||
/// * It has a [`SystemVirtualPath`](ruff_db::system::SystemVirtualPath)
|
||||
pub fn is_file_open(self, db: &dyn Db, file: File) -> bool {
|
||||
if let Some(open_files) = self.open_files(db) {
|
||||
open_files.contains(&file)
|
||||
} else if let Some(system_path) = file.path(db).as_system_path() {
|
||||
self.package(db, system_path)
|
||||
.map_or(false, |package| package.contains_file(db, file))
|
||||
} else {
|
||||
file.path(db).is_system_virtual_path()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::tracked]
|
||||
@@ -259,13 +276,13 @@ impl Package {
|
||||
|
||||
/// Returns `true` if `file` is a first-party file part of this package.
|
||||
pub fn contains_file(self, db: &dyn Db, file: File) -> bool {
|
||||
self.files(db).read().contains(&file)
|
||||
self.files(db).contains(&file)
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub fn remove_file(self, db: &mut dyn Db, file: File) {
|
||||
tracing::debug!(
|
||||
"Remove file {} from package {}",
|
||||
"Removing file '{}' from package '{}'",
|
||||
file.path(db),
|
||||
self.name(db)
|
||||
);
|
||||
@@ -278,7 +295,11 @@ impl Package {
|
||||
}
|
||||
|
||||
pub fn add_file(self, db: &mut dyn Db, file: File) {
|
||||
tracing::debug!("Add file {} to package {}", file.path(db), self.name(db));
|
||||
tracing::debug!(
|
||||
"Adding file '{}' to package '{}'",
|
||||
file.path(db),
|
||||
self.name(db)
|
||||
);
|
||||
|
||||
let Some(mut index) = PackageFiles::indexed_mut(db, self) else {
|
||||
return;
|
||||
@@ -289,10 +310,10 @@ impl Package {
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(db))]
|
||||
pub(crate) fn check(self, db: &dyn Db) -> Vec<String> {
|
||||
tracing::debug!("Checking package {}", self.root(db));
|
||||
tracing::debug!("Checking package '{}'", self.root(db));
|
||||
|
||||
let mut result = Vec::new();
|
||||
for file in &self.files(db).read() {
|
||||
for file in &self.files(db) {
|
||||
let diagnostics = check_file(db, file);
|
||||
result.extend_from_slice(&diagnostics);
|
||||
}
|
||||
@@ -301,15 +322,20 @@ impl Package {
|
||||
}
|
||||
|
||||
/// Returns the files belonging to this package.
|
||||
#[salsa::tracked]
|
||||
pub fn files(self, db: &dyn Db) -> IndexedFiles {
|
||||
let _entered = tracing::debug_span!("files").entered();
|
||||
pub fn files(self, db: &dyn Db) -> Indexed<'_> {
|
||||
let files = self.file_set(db);
|
||||
|
||||
let indexed = match files.get() {
|
||||
Index::Lazy(vacant) => {
|
||||
tracing::debug!("Indexing files for package {}", self.name(db));
|
||||
let _entered =
|
||||
tracing::debug_span!("index_package_files", package = %self.name(db)).entered();
|
||||
|
||||
let files = discover_package_files(db, self.root(db));
|
||||
tracing::info!(
|
||||
"Indexed {} files for package '{}'",
|
||||
files.len(),
|
||||
self.name(db)
|
||||
);
|
||||
vacant.set(files)
|
||||
}
|
||||
Index::Indexed(indexed) => indexed,
|
||||
@@ -330,14 +356,12 @@ impl Package {
|
||||
assert_eq!(root, metadata.root());
|
||||
|
||||
if self.name(db) != metadata.name() {
|
||||
self.set_name(db)
|
||||
.with_durability(Durability::MEDIUM)
|
||||
.to(metadata.name);
|
||||
self.set_name(db).to(metadata.name);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reload_files(self, db: &mut dyn Db) {
|
||||
tracing::debug!("Reload files for package {}", self.name(db));
|
||||
tracing::debug!("Reloading files for package '{}'", self.name(db));
|
||||
|
||||
if !self.file_set(db).is_lazy() {
|
||||
// Force a re-index of the files in the next revision.
|
||||
@@ -347,10 +371,10 @@ impl Package {
|
||||
}
|
||||
|
||||
#[salsa::tracked]
|
||||
pub(super) fn check_file(db: &dyn Db, file: File) -> Diagnostics {
|
||||
pub(super) fn check_file(db: &dyn Db, file: File) -> Vec<String> {
|
||||
let path = file.path(db);
|
||||
let _span = tracing::debug_span!("check_file", file=%path).entered();
|
||||
tracing::debug!("Checking file {path}");
|
||||
tracing::debug!("Checking file '{path}'");
|
||||
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
@@ -363,13 +387,25 @@ pub(super) fn check_file(db: &dyn Db, file: File) -> Diagnostics {
|
||||
);
|
||||
|
||||
// Abort checking if there are IO errors.
|
||||
if source_text(db.upcast(), file).has_read_error() {
|
||||
return Diagnostics::from(diagnostics);
|
||||
let source = source_text(db.upcast(), file);
|
||||
|
||||
if source.has_read_error() {
|
||||
return diagnostics;
|
||||
}
|
||||
|
||||
for diagnostic in check_types(db.upcast(), file) {
|
||||
let index = line_index(db.upcast(), diagnostic.file());
|
||||
let location = index.source_location(diagnostic.start(), source.as_str());
|
||||
diagnostics.push(format!(
|
||||
"{path}:{location}: {message}",
|
||||
path = file.path(db),
|
||||
message = diagnostic.message()
|
||||
));
|
||||
}
|
||||
|
||||
diagnostics.extend_from_slice(lint_syntax(db, file));
|
||||
diagnostics.extend_from_slice(lint_semantic(db, file));
|
||||
Diagnostics::from(diagnostics)
|
||||
diagnostics
|
||||
}
|
||||
|
||||
fn discover_package_files(db: &dyn Db, path: &SystemPath) -> FxHashSet<File> {
|
||||
@@ -423,7 +459,7 @@ mod tests {
|
||||
use ruff_db::testing::assert_function_query_was_not_run;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::lint::{lint_syntax, Diagnostics};
|
||||
use crate::lint::lint_syntax;
|
||||
use crate::workspace::check_file;
|
||||
|
||||
#[test]
|
||||
@@ -441,9 +477,7 @@ mod tests {
|
||||
assert_eq!(source_text(&db, file).as_str(), "");
|
||||
assert_eq!(
|
||||
check_file(&db, file),
|
||||
Diagnostics::List(vec![
|
||||
"Failed to read file: No such file or directory".to_string()
|
||||
])
|
||||
vec!["Failed to read file: No such file or directory".to_string()]
|
||||
);
|
||||
|
||||
let events = db.take_salsa_events();
|
||||
@@ -454,7 +488,7 @@ mod tests {
|
||||
db.write_file(path, "").unwrap();
|
||||
|
||||
assert_eq!(source_text(&db, file).as_str(), "");
|
||||
assert_eq!(check_file(&db, file), Diagnostics::Empty);
|
||||
assert_eq!(check_file(&db, file), vec![] as Vec<String>);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use std::iter::FusedIterator;
|
||||
use std::marker::PhantomData;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
@@ -10,6 +10,9 @@ use ruff_db::files::File;
|
||||
use crate::db::Db;
|
||||
use crate::workspace::Package;
|
||||
|
||||
/// Cheap cloneable hash set of files.
|
||||
type FileSet = Arc<FxHashSet<File>>;
|
||||
|
||||
/// The indexed files of a package.
|
||||
///
|
||||
/// The indexing happens lazily, but the files are then cached for subsequent reads.
|
||||
@@ -18,7 +21,7 @@ use crate::workspace::Package;
|
||||
/// The implementation uses internal mutability to transition between the lazy and indexed state
|
||||
/// without triggering a new salsa revision. This is safe because the initial indexing happens on first access,
|
||||
/// so no query can be depending on the contents of the indexed files before that. All subsequent mutations to
|
||||
/// the indexed files must go through `IndexedFilesMut`, which uses the Salsa setter `package.set_file_set` to
|
||||
/// the indexed files must go through `IndexedMut`, which uses the Salsa setter `package.set_file_set` to
|
||||
/// ensure that Salsa always knows when the set of indexed files have changed.
|
||||
#[derive(Debug)]
|
||||
pub struct PackageFiles {
|
||||
@@ -32,46 +35,67 @@ impl PackageFiles {
|
||||
}
|
||||
}
|
||||
|
||||
fn indexed(indexed_files: IndexedFiles) -> Self {
|
||||
fn indexed(files: FileSet) -> Self {
|
||||
Self {
|
||||
state: std::sync::Mutex::new(State::Indexed(indexed_files)),
|
||||
state: std::sync::Mutex::new(State::Indexed(files)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get(&self) -> Index {
|
||||
pub(super) fn get(&self) -> Index {
|
||||
let state = self.state.lock().unwrap();
|
||||
|
||||
match &*state {
|
||||
State::Lazy => Index::Lazy(LazyFiles { files: state }),
|
||||
State::Indexed(files) => Index::Indexed(files.clone()),
|
||||
State::Indexed(files) => Index::Indexed(Indexed {
|
||||
files: Arc::clone(files),
|
||||
_lifetime: PhantomData,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_lazy(&self) -> bool {
|
||||
pub(super) fn is_lazy(&self) -> bool {
|
||||
matches!(*self.state.lock().unwrap(), State::Lazy)
|
||||
}
|
||||
|
||||
/// Returns a mutable view on the index that allows cheap in-place mutations.
|
||||
///
|
||||
/// The changes are automatically written back to the database once the view is dropped.
|
||||
pub fn indexed_mut(db: &mut dyn Db, package: Package) -> Option<IndexedFilesMut> {
|
||||
pub(super) fn indexed_mut(db: &mut dyn Db, package: Package) -> Option<IndexedMut> {
|
||||
// Calling `zalsa_mut` cancels all pending salsa queries. This ensures that there are no pending
|
||||
// reads to the file set.
|
||||
// TODO: Use a non-internal API instead https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries
|
||||
let _ = db.as_dyn_database_mut().zalsa_mut();
|
||||
|
||||
let files = package.file_set(db);
|
||||
|
||||
let indexed = match &*files.state.lock().unwrap() {
|
||||
State::Lazy => return None,
|
||||
State::Indexed(indexed) => indexed.clone(),
|
||||
// Replace the state with lazy. The `IndexedMut` guard restores the state
|
||||
// to `State::Indexed` or sets a new `PackageFiles` when it gets dropped to ensure the state
|
||||
// is restored to how it has been before replacing the value.
|
||||
//
|
||||
// It isn't necessary to hold on to the lock after this point:
|
||||
// * The above call to `zalsa_mut` guarantees that there's exactly **one** DB reference.
|
||||
// * `Indexed` has a `'db` lifetime, and this method requires a `&mut db`.
|
||||
// This means that there can't be any pending reference to `Indexed` because Rust
|
||||
// doesn't allow borrowing `db` as mutable (to call this method) and immutable (`Indexed<'db>`) at the same time.
|
||||
// There can't be any other `Indexed<'db>` references created by clones of this DB because
|
||||
// all clones must have been dropped at this point and the `Indexed`
|
||||
// can't outlive the database (constrained by the `db` lifetime).
|
||||
let state = {
|
||||
let files = package.file_set(db);
|
||||
let mut locked = files.state.lock().unwrap();
|
||||
std::mem::replace(&mut *locked, State::Lazy)
|
||||
};
|
||||
|
||||
Some(IndexedFilesMut {
|
||||
let indexed = match state {
|
||||
// If it's already lazy, just return. We also don't need to restore anything because the
|
||||
// replace above was a no-op.
|
||||
State::Lazy => return None,
|
||||
State::Indexed(indexed) => indexed,
|
||||
};
|
||||
|
||||
Some(IndexedMut {
|
||||
db: Some(db),
|
||||
package,
|
||||
new_revision: indexed.revision,
|
||||
indexed,
|
||||
files: indexed,
|
||||
did_change: false,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -88,152 +112,93 @@ enum State {
|
||||
Lazy,
|
||||
|
||||
/// The files are indexed. Stores the known files of a package.
|
||||
Indexed(IndexedFiles),
|
||||
Indexed(FileSet),
|
||||
}
|
||||
|
||||
pub enum Index<'a> {
|
||||
pub(super) enum Index<'db> {
|
||||
/// The index has not yet been computed. Allows inserting the files.
|
||||
Lazy(LazyFiles<'a>),
|
||||
Lazy(LazyFiles<'db>),
|
||||
|
||||
Indexed(IndexedFiles),
|
||||
Indexed(Indexed<'db>),
|
||||
}
|
||||
|
||||
/// Package files that have not been indexed yet.
|
||||
pub struct LazyFiles<'a> {
|
||||
files: std::sync::MutexGuard<'a, State>,
|
||||
pub(super) struct LazyFiles<'db> {
|
||||
files: std::sync::MutexGuard<'db, State>,
|
||||
}
|
||||
|
||||
impl<'a> LazyFiles<'a> {
|
||||
impl<'db> LazyFiles<'db> {
|
||||
/// Sets the indexed files of a package to `files`.
|
||||
pub fn set(mut self, files: FxHashSet<File>) -> IndexedFiles {
|
||||
let files = IndexedFiles::new(files);
|
||||
*self.files = State::Indexed(files.clone());
|
||||
pub(super) fn set(mut self, files: FxHashSet<File>) -> Indexed<'db> {
|
||||
let files = Indexed {
|
||||
files: Arc::new(files),
|
||||
_lifetime: PhantomData,
|
||||
};
|
||||
*self.files = State::Indexed(Arc::clone(&files.files));
|
||||
files
|
||||
}
|
||||
}
|
||||
|
||||
/// The indexed files of a package.
|
||||
///
|
||||
/// # Salsa integration
|
||||
/// The type is cheap clonable and allows for in-place mutation of the files. The in-place mutation requires
|
||||
/// extra care because the type is used as the result of Salsa queries and Salsa relies on a type's equality
|
||||
/// to determine if the output has changed. This is accomplished by using a `revision` that gets incremented
|
||||
/// whenever the files are changed. The revision ensures that salsa's comparison of the
|
||||
/// previous [`IndexedFiles`] with the next [`IndexedFiles`] returns false even though they both
|
||||
/// point to the same underlying hash set.
|
||||
///
|
||||
/// # Equality
|
||||
/// Two [`IndexedFiles`] are only equal if they have the same revision and point to the **same** (identity) hash set.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct IndexedFiles {
|
||||
revision: u64,
|
||||
files: Arc<std::sync::Mutex<FxHashSet<File>>>,
|
||||
/// Note: This type is intentionally non-cloneable. Making it cloneable requires
|
||||
/// revisiting the locking behavior in [`PackageFiles::indexed_mut`].
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct Indexed<'db> {
|
||||
files: FileSet,
|
||||
// Preserve the lifetime of `PackageFiles`.
|
||||
_lifetime: PhantomData<&'db ()>,
|
||||
}
|
||||
|
||||
impl IndexedFiles {
|
||||
fn new(files: FxHashSet<File>) -> Self {
|
||||
Self {
|
||||
files: Arc::new(std::sync::Mutex::new(files)),
|
||||
revision: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Locks the file index for reading.
|
||||
pub fn read(&self) -> IndexedFilesGuard {
|
||||
IndexedFilesGuard {
|
||||
guard: self.files.lock().unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for IndexedFiles {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.revision == other.revision && Arc::ptr_eq(&self.files, &other.files)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for IndexedFiles {}
|
||||
|
||||
pub struct IndexedFilesGuard<'a> {
|
||||
guard: std::sync::MutexGuard<'a, FxHashSet<File>>,
|
||||
}
|
||||
|
||||
impl Deref for IndexedFilesGuard<'_> {
|
||||
impl Deref for Indexed<'_> {
|
||||
type Target = FxHashSet<File>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.guard
|
||||
&self.files
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a IndexedFilesGuard<'a> {
|
||||
impl<'a> IntoIterator for &'a Indexed<'_> {
|
||||
type Item = File;
|
||||
type IntoIter = IndexedFilesIter<'a>;
|
||||
type IntoIter = std::iter::Copied<std::collections::hash_set::Iter<'a, File>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
IndexedFilesIter {
|
||||
inner: self.guard.iter(),
|
||||
}
|
||||
self.files.iter().copied()
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator over the indexed files.
|
||||
///
|
||||
/// # Locks
|
||||
/// Holding on to the iterator locks the file index for reading.
|
||||
pub struct IndexedFilesIter<'a> {
|
||||
inner: std::collections::hash_set::Iter<'a, File>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for IndexedFilesIter<'a> {
|
||||
type Item = File;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.inner.next().copied()
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.inner.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedIterator for IndexedFilesIter<'_> {}
|
||||
|
||||
impl ExactSizeIterator for IndexedFilesIter<'_> {}
|
||||
|
||||
/// A Mutable view of a package's indexed files.
|
||||
///
|
||||
/// Allows in-place mutation of the files without deep cloning the hash set.
|
||||
/// The changes are written back when the mutable view is dropped or by calling [`Self::set`] manually.
|
||||
pub struct IndexedFilesMut<'db> {
|
||||
pub(super) struct IndexedMut<'db> {
|
||||
db: Option<&'db mut dyn Db>,
|
||||
package: Package,
|
||||
indexed: IndexedFiles,
|
||||
new_revision: u64,
|
||||
files: FileSet,
|
||||
did_change: bool,
|
||||
}
|
||||
|
||||
impl IndexedFilesMut<'_> {
|
||||
pub fn insert(&mut self, file: File) -> bool {
|
||||
if self.indexed.files.lock().unwrap().insert(file) {
|
||||
self.new_revision += 1;
|
||||
impl IndexedMut<'_> {
|
||||
pub(super) fn insert(&mut self, file: File) -> bool {
|
||||
if self.files_mut().insert(file) {
|
||||
self.did_change = true;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, file: File) -> bool {
|
||||
if self.indexed.files.lock().unwrap().remove(&file) {
|
||||
self.new_revision += 1;
|
||||
pub(super) fn remove(&mut self, file: File) -> bool {
|
||||
if self.files_mut().remove(&file) {
|
||||
self.did_change = true;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Writes the changes back to the database.
|
||||
pub fn set(mut self) {
|
||||
self.set_impl();
|
||||
fn files_mut(&mut self) -> &mut FxHashSet<File> {
|
||||
Arc::get_mut(&mut self.files).expect("All references to `FilesSet` to have been dropped")
|
||||
}
|
||||
|
||||
fn set_impl(&mut self) {
|
||||
@@ -241,19 +206,70 @@ impl IndexedFilesMut<'_> {
|
||||
return;
|
||||
};
|
||||
|
||||
if self.indexed.revision != self.new_revision {
|
||||
let files = Arc::clone(&self.files);
|
||||
|
||||
if self.did_change {
|
||||
// If there are changes, set the new file_set to trigger a salsa revision change.
|
||||
self.package
|
||||
.set_file_set(db)
|
||||
.to(PackageFiles::indexed(IndexedFiles {
|
||||
revision: self.new_revision,
|
||||
files: self.indexed.files.clone(),
|
||||
}));
|
||||
.to(PackageFiles::indexed(files));
|
||||
} else {
|
||||
// The `indexed_mut` replaced the `state` with Lazy. Restore it back to the indexed state.
|
||||
*self.package.file_set(db).state.lock().unwrap() = State::Indexed(files);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for IndexedFilesMut<'_> {
|
||||
impl Drop for IndexedMut<'_> {
|
||||
fn drop(&mut self) {
|
||||
self.set_impl();
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::workspace::files::Index;
|
||||
use crate::workspace::Package;
|
||||
|
||||
#[test]
|
||||
fn re_entrance() -> anyhow::Result<()> {
|
||||
let mut db = TestDb::new();
|
||||
|
||||
db.write_file("test.py", "")?;
|
||||
|
||||
let package = Package::new(&db, Name::new("test"), SystemPathBuf::from("/test"));
|
||||
|
||||
let file = system_path_to_file(&db, "test.py").unwrap();
|
||||
|
||||
let files = match package.file_set(&db).get() {
|
||||
Index::Lazy(lazy) => lazy.set(FxHashSet::from_iter([file])),
|
||||
Index::Indexed(files) => files,
|
||||
};
|
||||
|
||||
// Calling files a second time should not dead-lock.
|
||||
// This can e.g. happen when `check_file` iterates over all files and
|
||||
// `is_file_open` queries the open files.
|
||||
let files_2 = package.file_set(&db).get();
|
||||
|
||||
match files_2 {
|
||||
Index::Lazy(_) => {
|
||||
panic!("Expected indexed files, got lazy files");
|
||||
}
|
||||
Index::Indexed(files_2) => {
|
||||
assert_eq!(
|
||||
files_2.iter().collect::<Vec<_>>(),
|
||||
files.iter().collect::<Vec<_>>()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
use red_knot_python_semantic::{ProgramSettings, PythonVersion, SearchPathSettings};
|
||||
use red_knot_python_semantic::{
|
||||
HasTy, ProgramSettings, PythonVersion, SearchPathSettings, SemanticModel,
|
||||
};
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::lint::lint_semantic;
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{OsSystem, SystemPathBuf};
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_python_ast::visitor::source_order;
|
||||
use ruff_python_ast::visitor::source_order::SourceOrderVisitor;
|
||||
use ruff_python_ast::{Alias, Expr, Parameter, ParameterWithDefault, Stmt};
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
@@ -28,17 +33,100 @@ fn setup_db(workspace_root: SystemPathBuf) -> anyhow::Result<RootDatabase> {
|
||||
#[allow(clippy::print_stdout)]
|
||||
fn corpus_no_panic() -> anyhow::Result<()> {
|
||||
let corpus = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("resources/test/corpus");
|
||||
let system_corpus =
|
||||
SystemPathBuf::from_path_buf(corpus.clone()).expect("corpus path to be UTF8");
|
||||
let db = setup_db(system_corpus.clone())?;
|
||||
let system_corpus = SystemPath::from_std_path(&corpus).expect("corpus path to be UTF8");
|
||||
let db = setup_db(system_corpus.to_path_buf())?;
|
||||
|
||||
for path in fs::read_dir(&corpus).expect("corpus to be a directory") {
|
||||
let path = path.expect("path to not be an error").path();
|
||||
println!("checking {path:?}");
|
||||
let path = SystemPathBuf::from_path_buf(path.clone()).expect("path to be UTF-8");
|
||||
// this test is only asserting that we can run the lint without a panic
|
||||
// this test is only asserting that we can pull every expression type without a panic
|
||||
// (and some non-expressions that clearly define a single type)
|
||||
let file = system_path_to_file(&db, path).expect("file to exist");
|
||||
lint_semantic(&db, file);
|
||||
|
||||
pull_types(&db, file);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn pull_types(db: &RootDatabase, file: File) {
|
||||
let mut visitor = PullTypesVisitor::new(db, file);
|
||||
|
||||
let ast = parsed_module(db, file);
|
||||
|
||||
visitor.visit_body(ast.suite());
|
||||
}
|
||||
|
||||
struct PullTypesVisitor<'db> {
|
||||
model: SemanticModel<'db>,
|
||||
}
|
||||
|
||||
impl<'db> PullTypesVisitor<'db> {
|
||||
fn new(db: &'db RootDatabase, file: File) -> Self {
|
||||
Self {
|
||||
model: SemanticModel::new(db, file),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceOrderVisitor<'_> for PullTypesVisitor<'_> {
|
||||
fn visit_stmt(&mut self, stmt: &Stmt) {
|
||||
match stmt {
|
||||
Stmt::FunctionDef(function) => {
|
||||
let _ty = function.ty(&self.model);
|
||||
}
|
||||
Stmt::ClassDef(class) => {
|
||||
let _ty = class.ty(&self.model);
|
||||
}
|
||||
Stmt::AnnAssign(_)
|
||||
| Stmt::Return(_)
|
||||
| Stmt::Delete(_)
|
||||
| Stmt::Assign(_)
|
||||
| Stmt::AugAssign(_)
|
||||
| Stmt::TypeAlias(_)
|
||||
| Stmt::For(_)
|
||||
| Stmt::While(_)
|
||||
| Stmt::If(_)
|
||||
| Stmt::With(_)
|
||||
| Stmt::Match(_)
|
||||
| Stmt::Raise(_)
|
||||
| Stmt::Try(_)
|
||||
| Stmt::Assert(_)
|
||||
| Stmt::Import(_)
|
||||
| Stmt::ImportFrom(_)
|
||||
| Stmt::Global(_)
|
||||
| Stmt::Nonlocal(_)
|
||||
| Stmt::Expr(_)
|
||||
| Stmt::Pass(_)
|
||||
| Stmt::Break(_)
|
||||
| Stmt::Continue(_)
|
||||
| Stmt::IpyEscapeCommand(_) => {}
|
||||
}
|
||||
|
||||
source_order::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &Expr) {
|
||||
let _ty = expr.ty(&self.model);
|
||||
|
||||
source_order::walk_expr(self, expr);
|
||||
}
|
||||
|
||||
fn visit_parameter(&mut self, parameter: &Parameter) {
|
||||
let _ty = parameter.ty(&self.model);
|
||||
|
||||
source_order::walk_parameter(self, parameter);
|
||||
}
|
||||
|
||||
fn visit_parameter_with_default(&mut self, parameter_with_default: &ParameterWithDefault) {
|
||||
let _ty = parameter_with_default.ty(&self.model);
|
||||
|
||||
source_order::walk_parameter_with_default(self, parameter_with_default);
|
||||
}
|
||||
|
||||
fn visit_alias(&mut self, alias: &Alias) {
|
||||
let _ty = alias.ty(&self.model);
|
||||
|
||||
source_order::walk_alias(self, alias);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.6.0"
|
||||
version = "0.6.1"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -1434,7 +1434,7 @@ def unused(x):
|
||||
|
||||
insta::assert_snapshot!(test_code, @r###"
|
||||
|
||||
def unused(x): # noqa: ANN001, ANN201, ARG001, D103
|
||||
def unused(x): # noqa: ANN001, ANN201, D103
|
||||
pass
|
||||
"###);
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
use red_knot_python_semantic::{ProgramSettings, PythonVersion, SearchPathSettings};
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::watch::{ChangeEvent, ChangedKind};
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_benchmark::criterion::{criterion_group, criterion_main, BatchSize, Criterion};
|
||||
use ruff_benchmark::TestFile;
|
||||
@@ -12,13 +13,53 @@ use ruff_db::system::{MemoryFileSystem, SystemPath, TestSystem};
|
||||
struct Case {
|
||||
db: RootDatabase,
|
||||
fs: MemoryFileSystem,
|
||||
parser: File,
|
||||
re: File,
|
||||
re_path: &'static SystemPath,
|
||||
}
|
||||
|
||||
const TOMLLIB_312_URL: &str = "https://raw.githubusercontent.com/python/cpython/8e8a4baf652f6e1cee7acde9d78c4b6154539748/Lib/tomllib";
|
||||
|
||||
// This first "unresolved import" is because we don't understand `*` imports yet.
|
||||
// The following "unresolved import" violations are because we can't distinguish currently from
|
||||
// "Symbol exists in the module but its type is unknown" and
|
||||
// "Symbol does not exist in the module"
|
||||
static EXPECTED_DIAGNOSTICS: &[&str] = &[
|
||||
"/src/tomllib/_parser.py:7:29: Could not resolve import of 'Iterable' from 'collections.abc'",
|
||||
"/src/tomllib/_parser.py:10:20: Could not resolve import of 'Any' from 'typing'",
|
||||
"/src/tomllib/_parser.py:13:5: Could not resolve import of 'RE_DATETIME' from '._re'",
|
||||
"/src/tomllib/_parser.py:14:5: Could not resolve import of 'RE_LOCALTIME' from '._re'",
|
||||
"/src/tomllib/_parser.py:15:5: Could not resolve import of 'RE_NUMBER' from '._re'",
|
||||
"/src/tomllib/_parser.py:20:21: Could not resolve import of 'Key' from '._types'",
|
||||
"/src/tomllib/_parser.py:20:26: Could not resolve import of 'ParseFloat' from '._types'",
|
||||
"Line 69 is too long (89 characters)",
|
||||
"Use double quotes for strings",
|
||||
"Use double quotes for strings",
|
||||
"Use double quotes for strings",
|
||||
"Use double quotes for strings",
|
||||
"Use double quotes for strings",
|
||||
"Use double quotes for strings",
|
||||
"Use double quotes for strings",
|
||||
"/src/tomllib/_parser.py:153:22: Name 'key' used when not defined.",
|
||||
"/src/tomllib/_parser.py:153:27: Name 'flag' used when not defined.",
|
||||
"/src/tomllib/_parser.py:159:16: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:161:25: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:168:16: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:169:22: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:170:25: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:180:16: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:182:31: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:206:16: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:207:22: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:208:25: Name 'k' used when not defined.",
|
||||
"/src/tomllib/_parser.py:330:32: Name 'header' used when not defined.",
|
||||
"/src/tomllib/_parser.py:330:41: Name 'key' used when not defined.",
|
||||
"/src/tomllib/_parser.py:333:26: Name 'cont_key' used when not defined.",
|
||||
"/src/tomllib/_parser.py:334:71: Name 'cont_key' used when not defined.",
|
||||
"/src/tomllib/_parser.py:337:31: Name 'cont_key' used when not defined.",
|
||||
"/src/tomllib/_parser.py:628:75: Name 'e' used when not defined.",
|
||||
"/src/tomllib/_parser.py:686:23: Name 'parse_float' used when not defined.",
|
||||
];
|
||||
|
||||
fn get_test_file(name: &str) -> TestFile {
|
||||
let path = format!("tomllib/{name}");
|
||||
let url = format!("{TOMLLIB_312_URL}/{name}");
|
||||
@@ -28,15 +69,19 @@ fn get_test_file(name: &str) -> TestFile {
|
||||
fn setup_case() -> Case {
|
||||
let system = TestSystem::default();
|
||||
let fs = system.memory_file_system().clone();
|
||||
let init_path = SystemPath::new("/src/tomllib/__init__.py");
|
||||
let parser_path = SystemPath::new("/src/tomllib/_parser.py");
|
||||
let re_path = SystemPath::new("/src/tomllib/_re.py");
|
||||
let types_path = SystemPath::new("/src/tomllib/_types.py");
|
||||
fs.write_files([
|
||||
(init_path, get_test_file("__init__.py").code()),
|
||||
(
|
||||
SystemPath::new("/src/tomllib/__init__.py"),
|
||||
get_test_file("__init__.py").code(),
|
||||
),
|
||||
(parser_path, get_test_file("_parser.py").code()),
|
||||
(re_path, get_test_file("_re.py").code()),
|
||||
(types_path, get_test_file("_types.py").code()),
|
||||
(
|
||||
SystemPath::new("/src/tomllib/_types.py"),
|
||||
get_test_file("_types.py").code(),
|
||||
),
|
||||
])
|
||||
.unwrap();
|
||||
|
||||
@@ -62,7 +107,6 @@ fn setup_case() -> Case {
|
||||
Case {
|
||||
db,
|
||||
fs,
|
||||
parser,
|
||||
re,
|
||||
re_path,
|
||||
}
|
||||
@@ -72,8 +116,8 @@ fn benchmark_incremental(criterion: &mut Criterion) {
|
||||
criterion.bench_function("red_knot_check_file[incremental]", |b| {
|
||||
b.iter_batched_ref(
|
||||
|| {
|
||||
let mut case = setup_case();
|
||||
case.db.check_file(case.parser).unwrap();
|
||||
let case = setup_case();
|
||||
case.db.check().unwrap();
|
||||
|
||||
case.fs
|
||||
.write_file(
|
||||
@@ -82,14 +126,19 @@ fn benchmark_incremental(criterion: &mut Criterion) {
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
case.re.sync(&mut case.db);
|
||||
case
|
||||
},
|
||||
|case| {
|
||||
let Case { db, parser, .. } = case;
|
||||
let result = db.check_file(*parser).unwrap();
|
||||
let Case { db, .. } = case;
|
||||
|
||||
assert_eq!(result.len(), 402);
|
||||
db.apply_changes(vec![ChangeEvent::Changed {
|
||||
path: case.re_path.to_path_buf(),
|
||||
kind: ChangedKind::FileContent,
|
||||
}]);
|
||||
|
||||
let result = db.check().unwrap();
|
||||
|
||||
assert_eq!(result, EXPECTED_DIAGNOSTICS);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
@@ -101,10 +150,10 @@ fn benchmark_cold(criterion: &mut Criterion) {
|
||||
b.iter_batched_ref(
|
||||
setup_case,
|
||||
|case| {
|
||||
let Case { db, parser, .. } = case;
|
||||
let result = db.check_file(*parser).unwrap();
|
||||
let Case { db, .. } = case;
|
||||
let result = db.check().unwrap();
|
||||
|
||||
assert_eq!(result.len(), 402);
|
||||
assert_eq!(result, EXPECTED_DIAGNOSTICS);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
|
||||
@@ -85,7 +85,7 @@ impl Files {
|
||||
.system_by_path
|
||||
.entry(absolute.clone())
|
||||
.or_insert_with(|| {
|
||||
tracing::trace!("Adding file {path}");
|
||||
tracing::trace!("Adding file '{path}'");
|
||||
|
||||
let metadata = db.system().path_metadata(path);
|
||||
let durability = self
|
||||
@@ -131,7 +131,7 @@ impl Files {
|
||||
Err(_) => return Err(FileError::NotFound),
|
||||
};
|
||||
|
||||
tracing::trace!("Adding vendored file {}", path);
|
||||
tracing::trace!("Adding vendored file '{}'", path);
|
||||
let file = File::builder(FilePath::Vendored(path.to_path_buf()))
|
||||
.permissions(Some(0o444))
|
||||
.revision(metadata.revision())
|
||||
@@ -158,7 +158,7 @@ impl Files {
|
||||
Entry::Vacant(entry) => {
|
||||
let metadata = db.system().virtual_path_metadata(path).ok()?;
|
||||
|
||||
tracing::trace!("Adding virtual file {}", path);
|
||||
tracing::trace!("Adding virtual file '{}'", path);
|
||||
|
||||
let file = File::builder(FilePath::SystemVirtual(path.to_path_buf()))
|
||||
.revision(metadata.revision())
|
||||
@@ -211,7 +211,7 @@ impl Files {
|
||||
/// That's why [`File::sync_path`] and [`File::sync_path`] is preferred if it is known that the path is a file.
|
||||
pub fn sync_recursively(db: &mut dyn Db, path: &SystemPath) {
|
||||
let path = SystemPath::absolute(path, db.system().current_directory());
|
||||
tracing::debug!("Syncing all files in {path}");
|
||||
tracing::debug!("Syncing all files in '{path}'");
|
||||
|
||||
let inner = Arc::clone(&db.files().inner);
|
||||
for entry in inner.system_by_path.iter_mut() {
|
||||
@@ -224,9 +224,7 @@ impl Files {
|
||||
|
||||
for root in roots.all() {
|
||||
if root.path(db).starts_with(&path) {
|
||||
root.set_revision(db)
|
||||
.with_durability(Durability::HIGH)
|
||||
.to(FileRevision::now());
|
||||
root.set_revision(db).to(FileRevision::now());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -249,9 +247,7 @@ impl Files {
|
||||
let roots = inner.roots.read().unwrap();
|
||||
|
||||
for root in roots.all() {
|
||||
root.set_revision(db)
|
||||
.with_durability(Durability::HIGH)
|
||||
.to(FileRevision::now());
|
||||
root.set_revision(db).to(FileRevision::now());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -381,23 +377,17 @@ impl File {
|
||||
return;
|
||||
};
|
||||
let metadata = db.system().path_metadata(path);
|
||||
let durability = db.files().root(db, path).map(|root| root.durability(db));
|
||||
Self::sync_impl(db, metadata, file, durability);
|
||||
Self::sync_impl(db, metadata, file);
|
||||
}
|
||||
|
||||
fn sync_system_virtual_path(db: &mut dyn Db, path: &SystemVirtualPath, file: File) {
|
||||
let metadata = db.system().virtual_path_metadata(path);
|
||||
Self::sync_impl(db, metadata, file, None);
|
||||
Self::sync_impl(db, metadata, file);
|
||||
}
|
||||
|
||||
/// Private method providing the implementation for [`Self::sync_system_path`] and
|
||||
/// [`Self::sync_system_virtual_path`].
|
||||
fn sync_impl(
|
||||
db: &mut dyn Db,
|
||||
metadata: crate::system::Result<Metadata>,
|
||||
file: File,
|
||||
durability: Option<Durability>,
|
||||
) {
|
||||
fn sync_impl(db: &mut dyn Db, metadata: crate::system::Result<Metadata>, file: File) {
|
||||
let (status, revision, permission) = match metadata {
|
||||
Ok(metadata) if metadata.file_type().is_file() => (
|
||||
FileStatus::Exists,
|
||||
@@ -410,25 +400,19 @@ impl File {
|
||||
_ => (FileStatus::NotFound, FileRevision::zero(), None),
|
||||
};
|
||||
|
||||
let durability = durability.unwrap_or_default();
|
||||
|
||||
if file.status(db) != status {
|
||||
tracing::debug!("Updating the status of {}", file.path(db),);
|
||||
file.set_status(db).with_durability(durability).to(status);
|
||||
tracing::debug!("Updating the status of '{}'", file.path(db),);
|
||||
file.set_status(db).to(status);
|
||||
}
|
||||
|
||||
if file.revision(db) != revision {
|
||||
tracing::debug!("Updating the revision of {}", file.path(db));
|
||||
file.set_revision(db)
|
||||
.with_durability(durability)
|
||||
.to(revision);
|
||||
tracing::debug!("Updating the revision of '{}'", file.path(db));
|
||||
file.set_revision(db).to(revision);
|
||||
}
|
||||
|
||||
if file.permissions(db) != permission {
|
||||
tracing::debug!("Updating the permissions of {}", file.path(db),);
|
||||
file.set_permissions(db)
|
||||
.with_durability(durability)
|
||||
.to(permission);
|
||||
tracing::debug!("Updating the permissions of '{}'", file.path(db),);
|
||||
file.set_permissions(db).to(permission);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
let kind = if is_notebook(file.path(db)) {
|
||||
file.read_to_notebook(db)
|
||||
.unwrap_or_else(|error| {
|
||||
tracing::debug!("Failed to read notebook {path}: {error}");
|
||||
tracing::debug!("Failed to read notebook '{path}': {error}");
|
||||
|
||||
has_read_error = true;
|
||||
SourceDiagnostic(Arc::new(SourceTextError::FailedToReadNotebook(error)))
|
||||
@@ -33,7 +33,7 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
} else {
|
||||
file.read_to_string(db)
|
||||
.unwrap_or_else(|error| {
|
||||
tracing::debug!("Failed to read file {path}: {error}");
|
||||
tracing::debug!("Failed to read file '{path}': {error}");
|
||||
|
||||
has_read_error = true;
|
||||
SourceDiagnostic(Arc::new(SourceTextError::FailedToReadFile(error))).accumulate(db);
|
||||
|
||||
@@ -31,10 +31,20 @@ pub fn assert_const_function_query_was_not_run<Db, Q, QDb, R>(
|
||||
Db: salsa::Database,
|
||||
Q: Fn(QDb) -> R,
|
||||
{
|
||||
let (query_name, will_execute_event) = find_will_execute_event(db, query, (), events);
|
||||
// Salsa now interns singleton ingredients. But we know that it is a singleton, so we can just search for
|
||||
// any event of that ingredient.
|
||||
let query_name = query_name(&query);
|
||||
|
||||
let event = events.iter().find(|event| {
|
||||
if let salsa::EventKind::WillExecute { database_key } = event.kind {
|
||||
db.ingredient_debug_name(database_key.ingredient_index()) == query_name
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
|
||||
db.attach(|_| {
|
||||
if let Some(will_execute_event) = will_execute_event {
|
||||
if let Some(will_execute_event) = event {
|
||||
panic!(
|
||||
"Expected query {query_name}() not to have run but it did: {will_execute_event:?}"
|
||||
);
|
||||
|
||||
@@ -97,7 +97,16 @@ impl VendoredFileSystem {
|
||||
fn read_to_string(fs: &VendoredFileSystem, path: &VendoredPath) -> Result<String> {
|
||||
let mut archive = fs.lock_archive();
|
||||
let mut zip_file = archive.lookup_path(&NormalizedVendoredPath::from(path))?;
|
||||
let mut buffer = String::new();
|
||||
|
||||
// Pre-allocate the buffer with the size specified in the ZIP file metadata
|
||||
// because `read_to_string` passes `None` as the size hint.
|
||||
// But let's not trust the zip file metadata (even though it's vendored)
|
||||
// and limit it to a reasonable size.
|
||||
let mut buffer = String::with_capacity(
|
||||
usize::try_from(zip_file.size())
|
||||
.unwrap_or(usize::MAX)
|
||||
.min(10_000_000),
|
||||
);
|
||||
zip_file.read_to_string(&mut buffer)?;
|
||||
Ok(buffer)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.6.0"
|
||||
version = "0.6.1"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
134
crates/ruff_linter/resources/test/fixtures/fastapi/FAST003.py
vendored
Normal file
134
crates/ruff_linter/resources/test/fixtures/fastapi/FAST003.py
vendored
Normal file
@@ -0,0 +1,134 @@
|
||||
from fastapi import FastAPI
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
# Errors
|
||||
@app.get("/things/{thing_id}")
|
||||
async def read_thing(query: str):
|
||||
return {"query": query}
|
||||
|
||||
|
||||
@app.get("/books/isbn-{isbn}")
|
||||
async def read_thing():
|
||||
...
|
||||
|
||||
|
||||
@app.get("/things/{thing_id:path}")
|
||||
async def read_thing(query: str):
|
||||
return {"query": query}
|
||||
|
||||
|
||||
@app.get("/things/{thing_id : path}")
|
||||
async def read_thing(query: str):
|
||||
return {"query": query}
|
||||
|
||||
|
||||
@app.get("/books/{author}/{title}")
|
||||
async def read_thing(author: str):
|
||||
return {"author": author}
|
||||
|
||||
|
||||
@app.get("/books/{author_name}/{title}")
|
||||
async def read_thing():
|
||||
...
|
||||
|
||||
|
||||
@app.get("/books/{author}/{title}")
|
||||
async def read_thing(author: str, title: str, /):
|
||||
return {"author": author, "title": title}
|
||||
|
||||
|
||||
@app.get("/books/{author}/{title}/{page}")
|
||||
async def read_thing(
|
||||
author: str,
|
||||
query: str,
|
||||
): ...
|
||||
|
||||
|
||||
@app.get("/books/{author}/{title}")
|
||||
async def read_thing():
|
||||
...
|
||||
|
||||
|
||||
@app.get("/books/{author}/{title}")
|
||||
async def read_thing(*, author: str):
|
||||
...
|
||||
|
||||
|
||||
@app.get("/books/{author}/{title}")
|
||||
async def read_thing(hello, /, *, author: str):
|
||||
...
|
||||
|
||||
|
||||
@app.get("/things/{thing_id}")
|
||||
async def read_thing(
|
||||
query: str,
|
||||
):
|
||||
return {"query": query}
|
||||
|
||||
|
||||
@app.get("/things/{thing_id}")
|
||||
async def read_thing(
|
||||
query: str = "default",
|
||||
):
|
||||
return {"query": query}
|
||||
|
||||
|
||||
@app.get("/things/{thing_id}")
|
||||
async def read_thing(
|
||||
*, query: str = "default",
|
||||
):
|
||||
return {"query": query}
|
||||
|
||||
|
||||
# OK
|
||||
@app.get("/things/{thing_id}")
|
||||
async def read_thing(thing_id: int, query: str):
|
||||
return {"thing_id": thing_id, "query": query}
|
||||
|
||||
|
||||
@app.get("/books/isbn-{isbn}")
|
||||
async def read_thing(isbn: str):
|
||||
return {"isbn": isbn}
|
||||
|
||||
|
||||
@app.get("/things/{thing_id:path}")
|
||||
async def read_thing(thing_id: str, query: str):
|
||||
return {"thing_id": thing_id, "query": query}
|
||||
|
||||
|
||||
@app.get("/things/{thing_id : path}")
|
||||
async def read_thing(thing_id: str, query: str):
|
||||
return {"thing_id": thing_id, "query": query}
|
||||
|
||||
|
||||
@app.get("/books/{author}/{title}")
|
||||
async def read_thing(author: str, title: str):
|
||||
return {"author": author, "title": title}
|
||||
|
||||
|
||||
@app.get("/books/{author}/{title}")
|
||||
async def read_thing(*, author: str, title: str):
|
||||
return {"author": author, "title": title}
|
||||
|
||||
|
||||
@app.get("/books/{author}/{title:path}")
|
||||
async def read_thing(*, author: str, title: str):
|
||||
return {"author": author, "title": title}
|
||||
|
||||
|
||||
# Ignored
|
||||
@app.get("/things/{thing-id}")
|
||||
async def read_thing(query: str):
|
||||
return {"query": query}
|
||||
|
||||
|
||||
@app.get("/things/{thing_id!r}")
|
||||
async def read_thing(query: str):
|
||||
return {"query": query}
|
||||
|
||||
|
||||
@app.get("/things/{thing_id=}")
|
||||
async def read_thing(query: str):
|
||||
return {"query": query}
|
||||
@@ -17,6 +17,11 @@ def test():
|
||||
1 in (1, 2)
|
||||
|
||||
|
||||
def test2():
|
||||
1 in (1, 2)
|
||||
return
|
||||
|
||||
|
||||
data = [x for x in [1, 2, 3] if x in (1, 2)]
|
||||
|
||||
|
||||
|
||||
@@ -66,3 +66,6 @@ def not_warnings_dot_deprecated(
|
||||
def not_a_deprecated_function() -> None: ...
|
||||
|
||||
fbaz: str = f"51 character {foo} stringgggggggggggggggggggggggggg" # Error: PYI053
|
||||
|
||||
# see https://github.com/astral-sh/ruff/issues/12995
|
||||
def foo(bar: typing.Literal["a", "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"]):...
|
||||
@@ -1,2 +1,10 @@
|
||||
import mod.CaMel as CM
|
||||
from mod import CamelCase as CC
|
||||
|
||||
|
||||
# OK depending on configured import convention
|
||||
import xml.etree.ElementTree as ET
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
# Always an error (relative import)
|
||||
from ..xml.eltree import ElementTree as ET
|
||||
|
||||
@@ -42,3 +42,9 @@ class Foo:
|
||||
@classmethod
|
||||
def graze(cls, x, y, z):
|
||||
pass
|
||||
|
||||
|
||||
class Foo:
|
||||
@staticmethod
|
||||
def __new__(cls, x, y, z): # OK, see https://docs.python.org/3/reference/datamodel.html#basic-customization
|
||||
pass
|
||||
|
||||
@@ -3,7 +3,7 @@ class Fruit:
|
||||
def list_fruits(cls) -> None:
|
||||
cls = "apple" # PLW0642
|
||||
cls: Fruit = "apple" # PLW0642
|
||||
cls += "orange" # PLW0642
|
||||
cls += "orange" # OK, augmented assignments are ignored
|
||||
*cls = "banana" # PLW0642
|
||||
cls, blah = "apple", "orange" # PLW0642
|
||||
blah, (cls, blah2) = "apple", ("orange", "banana") # PLW0642
|
||||
@@ -16,7 +16,7 @@ class Fruit:
|
||||
def print_color(self) -> None:
|
||||
self = "red" # PLW0642
|
||||
self: Self = "red" # PLW0642
|
||||
self += "blue" # PLW0642
|
||||
self += "blue" # OK, augmented assignments are ignored
|
||||
*self = "blue" # PLW0642
|
||||
self, blah = "red", "blue" # PLW0642
|
||||
blah, (self, blah2) = "apple", ("orange", "banana") # PLW0642
|
||||
|
||||
@@ -59,3 +59,12 @@ def negative_cases():
|
||||
# See https://docs.python.org/3/howto/logging-cookbook.html#formatting-styles
|
||||
import logging
|
||||
logging.info("yet {another} non-f-string")
|
||||
|
||||
# See https://fastapi.tiangolo.com/tutorial/path-params/
|
||||
from fastapi import FastAPI
|
||||
app = FastAPI()
|
||||
item_id = 42
|
||||
|
||||
@app.get("/items/{item_id}")
|
||||
async def read_item(item_id):
|
||||
return {"item_id": item_id}
|
||||
|
||||
@@ -78,3 +78,13 @@ async def test():
|
||||
async def test() -> str:
|
||||
vals = [str(val) for val in await async_func(1)]
|
||||
return ",".join(vals)
|
||||
|
||||
|
||||
from fastapi import FastAPI
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
@app.post("/count")
|
||||
async def fastapi_route(): # Ok: FastApi routes can be async without actually using await
|
||||
return 1
|
||||
|
||||
120
crates/ruff_linter/resources/test/fixtures/ruff/RUF032.py
vendored
Normal file
120
crates/ruff_linter/resources/test/fixtures/ruff/RUF032.py
vendored
Normal file
@@ -0,0 +1,120 @@
|
||||
import decimal
|
||||
|
||||
# Tests with fully qualified import
|
||||
decimal.Decimal(0)
|
||||
|
||||
decimal.Decimal(0.0) # Should error
|
||||
|
||||
decimal.Decimal("0.0")
|
||||
|
||||
decimal.Decimal(10)
|
||||
|
||||
decimal.Decimal(10.0) # Should error
|
||||
|
||||
decimal.Decimal("10.0")
|
||||
|
||||
decimal.Decimal(-10)
|
||||
|
||||
decimal.Decimal(-10.0) # Should error
|
||||
|
||||
decimal.Decimal("-10.0")
|
||||
|
||||
a = 10.0
|
||||
|
||||
decimal.Decimal(a)
|
||||
|
||||
|
||||
# Tests with relative import
|
||||
from decimal import Decimal
|
||||
|
||||
|
||||
val = Decimal(0)
|
||||
|
||||
val = Decimal(0.0) # Should error
|
||||
|
||||
val = Decimal("0.0")
|
||||
|
||||
val = Decimal(10)
|
||||
|
||||
val = Decimal(10.0) # Should error
|
||||
|
||||
val = Decimal("10.0")
|
||||
|
||||
val = Decimal(-10)
|
||||
|
||||
val = Decimal(-10.0) # Should error
|
||||
|
||||
val = Decimal("-10.0")
|
||||
|
||||
a = 10.0
|
||||
|
||||
val = Decimal(a)
|
||||
|
||||
|
||||
# Tests with shadowed name
|
||||
class Decimal():
|
||||
value: float | int | str
|
||||
|
||||
def __init__(self, value: float | int | str) -> None:
|
||||
self.value = value
|
||||
|
||||
|
||||
val = Decimal(0.0)
|
||||
|
||||
val = Decimal("0.0")
|
||||
|
||||
val = Decimal(10.0)
|
||||
|
||||
val = Decimal("10.0")
|
||||
|
||||
val = Decimal(-10.0)
|
||||
|
||||
val = Decimal("-10.0")
|
||||
|
||||
a = 10.0
|
||||
|
||||
val = Decimal(a)
|
||||
|
||||
|
||||
# Retest with fully qualified import
|
||||
|
||||
val = decimal.Decimal(0.0) # Should error
|
||||
|
||||
val = decimal.Decimal("0.0")
|
||||
|
||||
val = decimal.Decimal(10.0) # Should error
|
||||
|
||||
val = decimal.Decimal("10.0")
|
||||
|
||||
val = decimal.Decimal(-10.0) # Should error
|
||||
|
||||
val = decimal.Decimal("-10.0")
|
||||
|
||||
a = 10.0
|
||||
|
||||
val = decimal.Decimal(a)
|
||||
|
||||
|
||||
class decimal():
|
||||
class Decimal():
|
||||
value: float | int | str
|
||||
|
||||
def __init__(self, value: float | int | str) -> None:
|
||||
self.value = value
|
||||
|
||||
|
||||
val = decimal.Decimal(0.0)
|
||||
|
||||
val = decimal.Decimal("0.0")
|
||||
|
||||
val = decimal.Decimal(10.0)
|
||||
|
||||
val = decimal.Decimal("10.0")
|
||||
|
||||
val = decimal.Decimal(-10.0)
|
||||
|
||||
val = decimal.Decimal("-10.0")
|
||||
|
||||
a = 10.0
|
||||
|
||||
val = decimal.Decimal(a)
|
||||
@@ -1011,6 +1011,9 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::UnnecessaryIterableAllocationForFirstElement) {
|
||||
ruff::rules::unnecessary_iterable_allocation_for_first_element(checker, expr);
|
||||
}
|
||||
if checker.enabled(Rule::DecimalFromFloatLiteral) {
|
||||
ruff::rules::decimal_from_float_literal_syntax(checker, call);
|
||||
}
|
||||
if checker.enabled(Rule::IntOnSlicedStr) {
|
||||
refurb::rules::int_on_sliced_str(checker, call);
|
||||
}
|
||||
|
||||
@@ -94,6 +94,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::FastApiNonAnnotatedDependency) {
|
||||
fastapi::rules::fastapi_non_annotated_dependency(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::FastApiUnusedPathParameter) {
|
||||
fastapi::rules::fastapi_unused_path_parameter(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::AmbiguousFunctionName) {
|
||||
if let Some(diagnostic) = pycodestyle::rules::ambiguous_function_name(name) {
|
||||
checker.diagnostics.push(diagnostic);
|
||||
@@ -263,8 +266,8 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::TooManyArguments) {
|
||||
pylint::rules::too_many_arguments(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::TooManyPositional) {
|
||||
pylint::rules::too_many_positional(checker, function_def);
|
||||
if checker.enabled(Rule::TooManyPositionalArguments) {
|
||||
pylint::rules::too_many_positional_arguments(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::TooManyReturnStatements) {
|
||||
if let Some(diagnostic) = pylint::rules::too_many_return_statements(
|
||||
@@ -704,11 +707,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
}
|
||||
if checker.enabled(Rule::CamelcaseImportedAsAcronym) {
|
||||
if let Some(diagnostic) = pep8_naming::rules::camelcase_imported_as_acronym(
|
||||
name,
|
||||
asname,
|
||||
alias,
|
||||
stmt,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
name, asname, alias, stmt, checker,
|
||||
) {
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
@@ -1023,7 +1022,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
asname,
|
||||
alias,
|
||||
stmt,
|
||||
&checker.settings.pep8_naming.ignore_names,
|
||||
checker,
|
||||
) {
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
@@ -1113,9 +1112,6 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
}
|
||||
}
|
||||
Stmt::AugAssign(aug_assign @ ast::StmtAugAssign { target, .. }) => {
|
||||
if checker.enabled(Rule::SelfOrClsAssignment) {
|
||||
pylint::rules::self_or_cls_assignment(checker, target);
|
||||
}
|
||||
if checker.enabled(Rule::GlobalStatement) {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = target.as_ref() {
|
||||
pylint::rules::global_statement(checker, id);
|
||||
|
||||
@@ -248,7 +248,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pylint, "R0914") => (RuleGroup::Preview, rules::pylint::rules::TooManyLocals),
|
||||
(Pylint, "R0915") => (RuleGroup::Stable, rules::pylint::rules::TooManyStatements),
|
||||
(Pylint, "R0916") => (RuleGroup::Preview, rules::pylint::rules::TooManyBooleanExpressions),
|
||||
(Pylint, "R0917") => (RuleGroup::Preview, rules::pylint::rules::TooManyPositional),
|
||||
(Pylint, "R0917") => (RuleGroup::Preview, rules::pylint::rules::TooManyPositionalArguments),
|
||||
(Pylint, "R1701") => (RuleGroup::Removed, rules::pylint::rules::RepeatedIsinstanceCalls),
|
||||
(Pylint, "R1702") => (RuleGroup::Preview, rules::pylint::rules::TooManyNestedBlocks),
|
||||
(Pylint, "R1704") => (RuleGroup::Stable, rules::pylint::rules::RedefinedArgumentFromLocal),
|
||||
@@ -920,6 +920,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
// fastapi
|
||||
(FastApi, "001") => (RuleGroup::Preview, rules::fastapi::rules::FastApiRedundantResponseModel),
|
||||
(FastApi, "002") => (RuleGroup::Preview, rules::fastapi::rules::FastApiNonAnnotatedDependency),
|
||||
(FastApi, "003") => (RuleGroup::Preview, rules::fastapi::rules::FastApiUnusedPathParameter),
|
||||
|
||||
// pydoclint
|
||||
(Pydoclint, "201") => (RuleGroup::Preview, rules::pydoclint::rules::DocstringMissingReturns),
|
||||
@@ -958,6 +959,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Ruff, "029") => (RuleGroup::Preview, rules::ruff::rules::UnusedAsync),
|
||||
(Ruff, "030") => (RuleGroup::Preview, rules::ruff::rules::AssertWithPrintMessage),
|
||||
(Ruff, "031") => (RuleGroup::Preview, rules::ruff::rules::IncorrectlyParenthesizedTupleInSubscript),
|
||||
(Ruff, "032") => (RuleGroup::Preview, rules::ruff::rules::DecimalFromFloatLiteral),
|
||||
(Ruff, "100") => (RuleGroup::Stable, rules::ruff::rules::UnusedNOQA),
|
||||
(Ruff, "101") => (RuleGroup::Stable, rules::ruff::rules::RedirectedNOQA),
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ use anyhow::{Context, Result};
|
||||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_python_ast::parenthesize::parenthesized_range;
|
||||
use ruff_python_ast::{self as ast, Arguments, ExceptHandler, Expr, ExprList, Stmt};
|
||||
use ruff_python_ast::{self as ast, Arguments, ExceptHandler, Expr, ExprList, Parameters, Stmt};
|
||||
use ruff_python_ast::{AnyNodeRef, ArgOrKeyword};
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
@@ -282,6 +282,59 @@ pub(crate) fn add_argument(
|
||||
}
|
||||
}
|
||||
|
||||
/// Generic function to add a (regular) parameter to a function definition.
|
||||
pub(crate) fn add_parameter(parameter: &str, parameters: &Parameters, source: &str) -> Edit {
|
||||
if let Some(last) = parameters
|
||||
.args
|
||||
.iter()
|
||||
.filter(|arg| arg.default.is_none())
|
||||
.last()
|
||||
{
|
||||
// Case 1: at least one regular parameter, so append after the last one.
|
||||
Edit::insertion(format!(", {parameter}"), last.end())
|
||||
} else if parameters.args.first().is_some() {
|
||||
// Case 2: no regular parameters, but at least one keyword parameter, so add before the
|
||||
// first.
|
||||
let pos = parameters.start();
|
||||
let mut tokenizer = SimpleTokenizer::starts_at(pos, source);
|
||||
let name = tokenizer
|
||||
.find(|token| token.kind == SimpleTokenKind::Name)
|
||||
.expect("Unable to find name token");
|
||||
Edit::insertion(format!("{parameter}, "), name.start())
|
||||
} else if let Some(last) = parameters.posonlyargs.last() {
|
||||
// Case 2: no regular parameter, but a positional-only parameter exists, so add after that.
|
||||
// We take care to add it *after* the `/` separator.
|
||||
let pos = last.end();
|
||||
let mut tokenizer = SimpleTokenizer::starts_at(pos, source);
|
||||
let slash = tokenizer
|
||||
.find(|token| token.kind == SimpleTokenKind::Slash)
|
||||
.expect("Unable to find `/` token");
|
||||
// Try to find a comma after the slash.
|
||||
let comma = tokenizer.find(|token| token.kind == SimpleTokenKind::Comma);
|
||||
if let Some(comma) = comma {
|
||||
Edit::insertion(format!(" {parameter},"), comma.start() + TextSize::from(1))
|
||||
} else {
|
||||
Edit::insertion(format!(", {parameter}"), slash.start())
|
||||
}
|
||||
} else if parameters.kwonlyargs.first().is_some() {
|
||||
// Case 3: no regular parameter, but a keyword-only parameter exist, so add parameter before that.
|
||||
// We need to backtrack to before the `*` separator.
|
||||
// We know there is no non-keyword-only params, so we can safely assume that the `*` separator is the first
|
||||
let pos = parameters.start();
|
||||
let mut tokenizer = SimpleTokenizer::starts_at(pos, source);
|
||||
let star = tokenizer
|
||||
.find(|token| token.kind == SimpleTokenKind::Star)
|
||||
.expect("Unable to find `*` token");
|
||||
Edit::insertion(format!("{parameter}, "), star.start())
|
||||
} else {
|
||||
// Case 4: no parameters at all, so add parameter after the opening parenthesis.
|
||||
Edit::insertion(
|
||||
parameter.to_string(),
|
||||
parameters.start() + TextSize::from(1),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Safely adjust the indentation of the indented block at [`TextRange`].
|
||||
///
|
||||
/// The [`TextRange`] is assumed to represent an entire indented block, including the leading
|
||||
|
||||
@@ -15,6 +15,7 @@ mod tests {
|
||||
|
||||
#[test_case(Rule::FastApiRedundantResponseModel, Path::new("FAST001.py"))]
|
||||
#[test_case(Rule::FastApiNonAnnotatedDependency, Path::new("FAST002.py"))]
|
||||
#[test_case(Rule::FastApiUnusedPathParameter, Path::new("FAST003.py"))]
|
||||
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
|
||||
@@ -0,0 +1,232 @@
|
||||
use std::iter::Peekable;
|
||||
use std::ops::Range;
|
||||
use std::str::CharIndices;
|
||||
|
||||
use ruff_diagnostics::Fix;
|
||||
use ruff_diagnostics::{Diagnostic, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_semantic::Modules;
|
||||
use ruff_python_stdlib::identifiers::is_identifier;
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::fix::edits::add_parameter;
|
||||
use crate::rules::fastapi::rules::is_fastapi_route_decorator;
|
||||
|
||||
/// ## What it does
|
||||
/// Identifies FastAPI routes that declare path parameters in the route path
|
||||
/// that are not included in the function signature.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Path parameters are used to extract values from the URL path.
|
||||
///
|
||||
/// If a path parameter is declared in the route path but not in the function
|
||||
/// signature, it will not be accessible in the function body, which is likely
|
||||
/// a mistake.
|
||||
///
|
||||
/// If a path parameter is declared in the route path, but as a positional-only
|
||||
/// argument in the function signature, it will also not be accessible in the
|
||||
/// function body, as FastAPI will not inject the parameter.
|
||||
///
|
||||
/// ## Known problems
|
||||
/// If the path parameter is _not_ a valid Python identifier (e.g., `user-id`, as
|
||||
/// opposed to `user_id`), FastAPI will normalize it. However, this rule simply
|
||||
/// ignores such path parameters, as FastAPI's normalization behavior is undocumented.
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// ```python
|
||||
/// from fastapi import FastAPI
|
||||
///
|
||||
/// app = FastAPI()
|
||||
///
|
||||
///
|
||||
/// @app.get("/things/{thing_id}")
|
||||
/// async def read_thing(query: str): ...
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
///
|
||||
/// ```python
|
||||
/// from fastapi import FastAPI
|
||||
///
|
||||
/// app = FastAPI()
|
||||
///
|
||||
///
|
||||
/// @app.get("/things/{thing_id}")
|
||||
/// async def read_thing(thing_id: int, query: str): ...
|
||||
/// ```
|
||||
///
|
||||
/// ## Fix safety
|
||||
/// This rule's fix is marked as unsafe, as modifying a function signature can
|
||||
/// change the behavior of the code.
|
||||
#[violation]
|
||||
pub struct FastApiUnusedPathParameter {
|
||||
arg_name: String,
|
||||
function_name: String,
|
||||
is_positional: bool,
|
||||
}
|
||||
|
||||
impl Violation for FastApiUnusedPathParameter {
|
||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
let Self {
|
||||
arg_name,
|
||||
function_name,
|
||||
is_positional,
|
||||
} = self;
|
||||
#[allow(clippy::if_not_else)]
|
||||
if !is_positional {
|
||||
format!("Parameter `{arg_name}` appears in route path, but not in `{function_name}` signature")
|
||||
} else {
|
||||
format!(
|
||||
"Parameter `{arg_name}` appears in route path, but only as a positional-only argument in `{function_name}` signature"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
let Self {
|
||||
arg_name,
|
||||
is_positional,
|
||||
..
|
||||
} = self;
|
||||
if *is_positional {
|
||||
None
|
||||
} else {
|
||||
Some(format!("Add `{arg_name}` to function signature"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// FAST003
|
||||
pub(crate) fn fastapi_unused_path_parameter(
|
||||
checker: &mut Checker,
|
||||
function_def: &ast::StmtFunctionDef,
|
||||
) {
|
||||
if !checker.semantic().seen_module(Modules::FASTAPI) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Get the route path from the decorator.
|
||||
let route_decorator = function_def
|
||||
.decorator_list
|
||||
.iter()
|
||||
.find_map(|decorator| is_fastapi_route_decorator(decorator, checker.semantic()));
|
||||
|
||||
let Some(route_decorator) = route_decorator else {
|
||||
return;
|
||||
};
|
||||
|
||||
let Some(path_arg) = route_decorator.arguments.args.first() else {
|
||||
return;
|
||||
};
|
||||
let diagnostic_range = path_arg.range();
|
||||
|
||||
// We can't really handle anything other than string literals.
|
||||
let path = match path_arg.as_string_literal_expr() {
|
||||
Some(path_arg) => &path_arg.value,
|
||||
None => return,
|
||||
};
|
||||
|
||||
// Extract the path parameters from the route path.
|
||||
let path_params = PathParamIterator::new(path.to_str());
|
||||
|
||||
// Extract the arguments from the function signature
|
||||
let named_args: Vec<_> = function_def
|
||||
.parameters
|
||||
.args
|
||||
.iter()
|
||||
.chain(function_def.parameters.kwonlyargs.iter())
|
||||
.map(|arg| arg.parameter.name.as_str())
|
||||
.collect();
|
||||
|
||||
// Check if any of the path parameters are not in the function signature.
|
||||
let mut diagnostics = vec![];
|
||||
for (path_param, range) in path_params {
|
||||
// Ignore invalid identifiers (e.g., `user-id`, as opposed to `user_id`)
|
||||
if !is_identifier(path_param) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the path parameter is already in the function signature, we don't need to do anything.
|
||||
if named_args.contains(&path_param) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Determine whether the path parameter is used as a positional-only argument. In this case,
|
||||
// the path parameter injection won't work, but we also can't fix it (yet), since we'd need
|
||||
// to make the parameter non-positional-only.
|
||||
let is_positional = function_def
|
||||
.parameters
|
||||
.posonlyargs
|
||||
.iter()
|
||||
.any(|arg| arg.parameter.name.as_str() == path_param);
|
||||
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
FastApiUnusedPathParameter {
|
||||
arg_name: path_param.to_string(),
|
||||
function_name: function_def.name.to_string(),
|
||||
is_positional,
|
||||
},
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
diagnostic_range
|
||||
.add_start(TextSize::from(range.start as u32 + 1))
|
||||
.sub_end(TextSize::from((path.len() - range.end + 1) as u32)),
|
||||
);
|
||||
if !is_positional {
|
||||
diagnostic.set_fix(Fix::unsafe_edit(add_parameter(
|
||||
path_param,
|
||||
&function_def.parameters,
|
||||
checker.locator().contents(),
|
||||
)));
|
||||
}
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
|
||||
checker.diagnostics.extend(diagnostics);
|
||||
}
|
||||
|
||||
/// An iterator to extract parameters from FastAPI route paths.
|
||||
///
|
||||
/// The iterator yields tuples of the parameter name and the range of the parameter in the input,
|
||||
/// inclusive of curly braces.
|
||||
#[derive(Debug)]
|
||||
struct PathParamIterator<'a> {
|
||||
input: &'a str,
|
||||
chars: Peekable<CharIndices<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> PathParamIterator<'a> {
|
||||
fn new(input: &'a str) -> Self {
|
||||
PathParamIterator {
|
||||
input,
|
||||
chars: input.char_indices().peekable(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for PathParamIterator<'a> {
|
||||
type Item = (&'a str, Range<usize>);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
while let Some((start, c)) = self.chars.next() {
|
||||
if c == '{' {
|
||||
if let Some((end, _)) = self.chars.by_ref().find(|&(_, ch)| ch == '}') {
|
||||
let param_content = &self.input[start + 1..end];
|
||||
// We ignore text after a colon, since those are path convertors
|
||||
// See also: https://fastapi.tiangolo.com/tutorial/path-params/?h=path#path-convertor
|
||||
let param_name_end = param_content.find(':').unwrap_or(param_content.len());
|
||||
let param_name = ¶m_content[..param_name_end].trim();
|
||||
|
||||
#[allow(clippy::range_plus_one)]
|
||||
return Some((param_name, start..end + 1));
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
@@ -1,15 +1,20 @@
|
||||
pub(crate) use fastapi_non_annotated_dependency::*;
|
||||
pub(crate) use fastapi_redundant_response_model::*;
|
||||
pub(crate) use fastapi_unused_path_parameter::*;
|
||||
|
||||
mod fastapi_non_annotated_dependency;
|
||||
mod fastapi_redundant_response_model;
|
||||
mod fastapi_unused_path_parameter;
|
||||
|
||||
use ruff_python_ast::{Decorator, ExprCall, StmtFunctionDef};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_semantic::analyze::typing::resolve_assignment;
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
|
||||
/// Returns `true` if the function is a FastAPI route.
|
||||
pub(crate) fn is_fastapi_route(function_def: &StmtFunctionDef, semantic: &SemanticModel) -> bool {
|
||||
pub(crate) fn is_fastapi_route(
|
||||
function_def: &ast::StmtFunctionDef,
|
||||
semantic: &SemanticModel,
|
||||
) -> bool {
|
||||
return function_def
|
||||
.decorator_list
|
||||
.iter()
|
||||
@@ -18,27 +23,29 @@ pub(crate) fn is_fastapi_route(function_def: &StmtFunctionDef, semantic: &Semant
|
||||
|
||||
/// Returns `true` if the decorator is indicative of a FastAPI route.
|
||||
pub(crate) fn is_fastapi_route_decorator<'a>(
|
||||
decorator: &'a Decorator,
|
||||
decorator: &'a ast::Decorator,
|
||||
semantic: &'a SemanticModel,
|
||||
) -> Option<&'a ExprCall> {
|
||||
) -> Option<&'a ast::ExprCall> {
|
||||
let call = decorator.expression.as_call_expr()?;
|
||||
let decorator_method = call.func.as_attribute_expr()?;
|
||||
let method_name = &decorator_method.attr;
|
||||
is_fastapi_route_call(call, semantic).then_some(call)
|
||||
}
|
||||
|
||||
pub(crate) fn is_fastapi_route_call(call_expr: &ast::ExprCall, semantic: &SemanticModel) -> bool {
|
||||
let ast::Expr::Attribute(ast::ExprAttribute { attr, value, .. }) = &*call_expr.func else {
|
||||
return false;
|
||||
};
|
||||
|
||||
if !matches!(
|
||||
method_name.as_str(),
|
||||
attr.as_str(),
|
||||
"get" | "post" | "put" | "delete" | "patch" | "options" | "head" | "trace"
|
||||
) {
|
||||
return None;
|
||||
return false;
|
||||
}
|
||||
|
||||
let qualified_name = resolve_assignment(&decorator_method.value, semantic)?;
|
||||
if matches!(
|
||||
qualified_name.segments(),
|
||||
["fastapi", "FastAPI" | "APIRouter"]
|
||||
) {
|
||||
Some(call)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
resolve_assignment(value, semantic).is_some_and(|qualified_name| {
|
||||
matches!(
|
||||
qualified_name.segments(),
|
||||
["fastapi", "FastAPI" | "APIRouter"]
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -0,0 +1,323 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/fastapi/mod.rs
|
||||
---
|
||||
FAST003.py:7:19: FAST003 [*] Parameter `thing_id` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
6 | # Errors
|
||||
7 | @app.get("/things/{thing_id}")
|
||||
| ^^^^^^^^^^ FAST003
|
||||
8 | async def read_thing(query: str):
|
||||
9 | return {"query": query}
|
||||
|
|
||||
= help: Add `thing_id` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
5 5 |
|
||||
6 6 | # Errors
|
||||
7 7 | @app.get("/things/{thing_id}")
|
||||
8 |-async def read_thing(query: str):
|
||||
8 |+async def read_thing(query: str, thing_id):
|
||||
9 9 | return {"query": query}
|
||||
10 10 |
|
||||
11 11 |
|
||||
|
||||
FAST003.py:12:23: FAST003 [*] Parameter `isbn` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
12 | @app.get("/books/isbn-{isbn}")
|
||||
| ^^^^^^ FAST003
|
||||
13 | async def read_thing():
|
||||
14 | ...
|
||||
|
|
||||
= help: Add `isbn` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
10 10 |
|
||||
11 11 |
|
||||
12 12 | @app.get("/books/isbn-{isbn}")
|
||||
13 |-async def read_thing():
|
||||
13 |+async def read_thing(isbn):
|
||||
14 14 | ...
|
||||
15 15 |
|
||||
16 16 |
|
||||
|
||||
FAST003.py:17:19: FAST003 [*] Parameter `thing_id` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
17 | @app.get("/things/{thing_id:path}")
|
||||
| ^^^^^^^^^^^^^^^ FAST003
|
||||
18 | async def read_thing(query: str):
|
||||
19 | return {"query": query}
|
||||
|
|
||||
= help: Add `thing_id` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
15 15 |
|
||||
16 16 |
|
||||
17 17 | @app.get("/things/{thing_id:path}")
|
||||
18 |-async def read_thing(query: str):
|
||||
18 |+async def read_thing(query: str, thing_id):
|
||||
19 19 | return {"query": query}
|
||||
20 20 |
|
||||
21 21 |
|
||||
|
||||
FAST003.py:22:19: FAST003 [*] Parameter `thing_id` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
22 | @app.get("/things/{thing_id : path}")
|
||||
| ^^^^^^^^^^^^^^^^^ FAST003
|
||||
23 | async def read_thing(query: str):
|
||||
24 | return {"query": query}
|
||||
|
|
||||
= help: Add `thing_id` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
20 20 |
|
||||
21 21 |
|
||||
22 22 | @app.get("/things/{thing_id : path}")
|
||||
23 |-async def read_thing(query: str):
|
||||
23 |+async def read_thing(query: str, thing_id):
|
||||
24 24 | return {"query": query}
|
||||
25 25 |
|
||||
26 26 |
|
||||
|
||||
FAST003.py:27:27: FAST003 [*] Parameter `title` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
27 | @app.get("/books/{author}/{title}")
|
||||
| ^^^^^^^ FAST003
|
||||
28 | async def read_thing(author: str):
|
||||
29 | return {"author": author}
|
||||
|
|
||||
= help: Add `title` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
25 25 |
|
||||
26 26 |
|
||||
27 27 | @app.get("/books/{author}/{title}")
|
||||
28 |-async def read_thing(author: str):
|
||||
28 |+async def read_thing(author: str, title):
|
||||
29 29 | return {"author": author}
|
||||
30 30 |
|
||||
31 31 |
|
||||
|
||||
FAST003.py:32:18: FAST003 [*] Parameter `author_name` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
32 | @app.get("/books/{author_name}/{title}")
|
||||
| ^^^^^^^^^^^^^ FAST003
|
||||
33 | async def read_thing():
|
||||
34 | ...
|
||||
|
|
||||
= help: Add `author_name` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
30 30 |
|
||||
31 31 |
|
||||
32 32 | @app.get("/books/{author_name}/{title}")
|
||||
33 |-async def read_thing():
|
||||
33 |+async def read_thing(author_name):
|
||||
34 34 | ...
|
||||
35 35 |
|
||||
36 36 |
|
||||
|
||||
FAST003.py:32:32: FAST003 [*] Parameter `title` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
32 | @app.get("/books/{author_name}/{title}")
|
||||
| ^^^^^^^ FAST003
|
||||
33 | async def read_thing():
|
||||
34 | ...
|
||||
|
|
||||
= help: Add `title` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
30 30 |
|
||||
31 31 |
|
||||
32 32 | @app.get("/books/{author_name}/{title}")
|
||||
33 |-async def read_thing():
|
||||
33 |+async def read_thing(title):
|
||||
34 34 | ...
|
||||
35 35 |
|
||||
36 36 |
|
||||
|
||||
FAST003.py:37:18: FAST003 Parameter `author` appears in route path, but only as a positional-only argument in `read_thing` signature
|
||||
|
|
||||
37 | @app.get("/books/{author}/{title}")
|
||||
| ^^^^^^^^ FAST003
|
||||
38 | async def read_thing(author: str, title: str, /):
|
||||
39 | return {"author": author, "title": title}
|
||||
|
|
||||
|
||||
FAST003.py:37:27: FAST003 Parameter `title` appears in route path, but only as a positional-only argument in `read_thing` signature
|
||||
|
|
||||
37 | @app.get("/books/{author}/{title}")
|
||||
| ^^^^^^^ FAST003
|
||||
38 | async def read_thing(author: str, title: str, /):
|
||||
39 | return {"author": author, "title": title}
|
||||
|
|
||||
|
||||
FAST003.py:42:27: FAST003 [*] Parameter `title` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
42 | @app.get("/books/{author}/{title}/{page}")
|
||||
| ^^^^^^^ FAST003
|
||||
43 | async def read_thing(
|
||||
44 | author: str,
|
||||
|
|
||||
= help: Add `title` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
42 42 | @app.get("/books/{author}/{title}/{page}")
|
||||
43 43 | async def read_thing(
|
||||
44 44 | author: str,
|
||||
45 |- query: str,
|
||||
45 |+ query: str, title,
|
||||
46 46 | ): ...
|
||||
47 47 |
|
||||
48 48 |
|
||||
|
||||
FAST003.py:42:35: FAST003 [*] Parameter `page` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
42 | @app.get("/books/{author}/{title}/{page}")
|
||||
| ^^^^^^ FAST003
|
||||
43 | async def read_thing(
|
||||
44 | author: str,
|
||||
|
|
||||
= help: Add `page` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
42 42 | @app.get("/books/{author}/{title}/{page}")
|
||||
43 43 | async def read_thing(
|
||||
44 44 | author: str,
|
||||
45 |- query: str,
|
||||
45 |+ query: str, page,
|
||||
46 46 | ): ...
|
||||
47 47 |
|
||||
48 48 |
|
||||
|
||||
FAST003.py:49:18: FAST003 [*] Parameter `author` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
49 | @app.get("/books/{author}/{title}")
|
||||
| ^^^^^^^^ FAST003
|
||||
50 | async def read_thing():
|
||||
51 | ...
|
||||
|
|
||||
= help: Add `author` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
47 47 |
|
||||
48 48 |
|
||||
49 49 | @app.get("/books/{author}/{title}")
|
||||
50 |-async def read_thing():
|
||||
50 |+async def read_thing(author):
|
||||
51 51 | ...
|
||||
52 52 |
|
||||
53 53 |
|
||||
|
||||
FAST003.py:49:27: FAST003 [*] Parameter `title` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
49 | @app.get("/books/{author}/{title}")
|
||||
| ^^^^^^^ FAST003
|
||||
50 | async def read_thing():
|
||||
51 | ...
|
||||
|
|
||||
= help: Add `title` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
47 47 |
|
||||
48 48 |
|
||||
49 49 | @app.get("/books/{author}/{title}")
|
||||
50 |-async def read_thing():
|
||||
50 |+async def read_thing(title):
|
||||
51 51 | ...
|
||||
52 52 |
|
||||
53 53 |
|
||||
|
||||
FAST003.py:54:27: FAST003 [*] Parameter `title` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
54 | @app.get("/books/{author}/{title}")
|
||||
| ^^^^^^^ FAST003
|
||||
55 | async def read_thing(*, author: str):
|
||||
56 | ...
|
||||
|
|
||||
= help: Add `title` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
52 52 |
|
||||
53 53 |
|
||||
54 54 | @app.get("/books/{author}/{title}")
|
||||
55 |-async def read_thing(*, author: str):
|
||||
55 |+async def read_thing(title, *, author: str):
|
||||
56 56 | ...
|
||||
57 57 |
|
||||
58 58 |
|
||||
|
||||
FAST003.py:59:27: FAST003 [*] Parameter `title` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
59 | @app.get("/books/{author}/{title}")
|
||||
| ^^^^^^^ FAST003
|
||||
60 | async def read_thing(hello, /, *, author: str):
|
||||
61 | ...
|
||||
|
|
||||
= help: Add `title` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
57 57 |
|
||||
58 58 |
|
||||
59 59 | @app.get("/books/{author}/{title}")
|
||||
60 |-async def read_thing(hello, /, *, author: str):
|
||||
60 |+async def read_thing(hello, /, title, *, author: str):
|
||||
61 61 | ...
|
||||
62 62 |
|
||||
63 63 |
|
||||
|
||||
FAST003.py:64:19: FAST003 [*] Parameter `thing_id` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
64 | @app.get("/things/{thing_id}")
|
||||
| ^^^^^^^^^^ FAST003
|
||||
65 | async def read_thing(
|
||||
66 | query: str,
|
||||
|
|
||||
= help: Add `thing_id` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
63 63 |
|
||||
64 64 | @app.get("/things/{thing_id}")
|
||||
65 65 | async def read_thing(
|
||||
66 |- query: str,
|
||||
66 |+ query: str, thing_id,
|
||||
67 67 | ):
|
||||
68 68 | return {"query": query}
|
||||
69 69 |
|
||||
|
||||
FAST003.py:71:19: FAST003 [*] Parameter `thing_id` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
71 | @app.get("/things/{thing_id}")
|
||||
| ^^^^^^^^^^ FAST003
|
||||
72 | async def read_thing(
|
||||
73 | query: str = "default",
|
||||
|
|
||||
= help: Add `thing_id` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
70 70 |
|
||||
71 71 | @app.get("/things/{thing_id}")
|
||||
72 72 | async def read_thing(
|
||||
73 |- query: str = "default",
|
||||
73 |+ thing_id, query: str = "default",
|
||||
74 74 | ):
|
||||
75 75 | return {"query": query}
|
||||
76 76 |
|
||||
|
||||
FAST003.py:78:19: FAST003 [*] Parameter `thing_id` appears in route path, but not in `read_thing` signature
|
||||
|
|
||||
78 | @app.get("/things/{thing_id}")
|
||||
| ^^^^^^^^^^ FAST003
|
||||
79 | async def read_thing(
|
||||
80 | *, query: str = "default",
|
||||
|
|
||||
= help: Add `thing_id` to function signature
|
||||
|
||||
ℹ Unsafe fix
|
||||
77 77 |
|
||||
78 78 | @app.get("/things/{thing_id}")
|
||||
79 79 | async def read_thing(
|
||||
80 |- *, query: str = "default",
|
||||
80 |+ thing_id, *, query: str = "default",
|
||||
81 81 | ):
|
||||
82 82 | return {"query": query}
|
||||
83 83 |
|
||||
@@ -32,7 +32,7 @@ use crate::rules::flake8_async::helpers::AsyncModule;
|
||||
///
|
||||
///
|
||||
/// async def main():
|
||||
/// with asyncio.timeout(2):
|
||||
/// async with asyncio.timeout(2):
|
||||
/// await long_running_task()
|
||||
/// ```
|
||||
///
|
||||
|
||||
@@ -22,14 +22,14 @@ use crate::rules::flake8_async::helpers::MethodName;
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// async def func():
|
||||
/// with asyncio.timeout(2):
|
||||
/// async with asyncio.timeout(2):
|
||||
/// do_something()
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// async def func():
|
||||
/// with asyncio.timeout(2):
|
||||
/// async with asyncio.timeout(2):
|
||||
/// do_something()
|
||||
/// await awaitable()
|
||||
/// ```
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::Expr;
|
||||
use ruff_python_ast::{Expr, Stmt};
|
||||
use ruff_python_semantic::ScopeKind;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
@@ -33,24 +34,34 @@ use super::super::helpers::at_last_top_level_expression_in_cell;
|
||||
/// ## References
|
||||
/// - [Python documentation: `assert` statement](https://docs.python.org/3/reference/simple_stmts.html#the-assert-statement)
|
||||
#[violation]
|
||||
pub struct UselessComparison;
|
||||
pub struct UselessComparison {
|
||||
at: ComparisonLocationAt,
|
||||
}
|
||||
|
||||
impl Violation for UselessComparison {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!(
|
||||
"Pointless comparison. Did you mean to assign a value? \
|
||||
Otherwise, prepend `assert` or remove it."
|
||||
)
|
||||
match self.at {
|
||||
ComparisonLocationAt::MiddleBody => format!(
|
||||
"Pointless comparison. Did you mean to assign a value? \
|
||||
Otherwise, prepend `assert` or remove it."
|
||||
),
|
||||
ComparisonLocationAt::EndOfFunction => format!(
|
||||
"Pointless comparison at end of function scope. Did you mean \
|
||||
to return the expression result?"
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// B015
|
||||
pub(crate) fn useless_comparison(checker: &mut Checker, expr: &Expr) {
|
||||
if expr.is_compare_expr() {
|
||||
let semantic = checker.semantic();
|
||||
|
||||
if checker.source_type.is_ipynb()
|
||||
&& at_last_top_level_expression_in_cell(
|
||||
checker.semantic(),
|
||||
semantic,
|
||||
checker.locator(),
|
||||
checker.cell_offsets(),
|
||||
)
|
||||
@@ -58,8 +69,34 @@ pub(crate) fn useless_comparison(checker: &mut Checker, expr: &Expr) {
|
||||
return;
|
||||
}
|
||||
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(UselessComparison, expr.range()));
|
||||
if let ScopeKind::Function(func_def) = semantic.current_scope().kind {
|
||||
if func_def
|
||||
.body
|
||||
.last()
|
||||
.and_then(Stmt::as_expr_stmt)
|
||||
.is_some_and(|last_stmt| &*last_stmt.value == expr)
|
||||
{
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
UselessComparison {
|
||||
at: ComparisonLocationAt::EndOfFunction,
|
||||
},
|
||||
expr.range(),
|
||||
));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
UselessComparison {
|
||||
at: ComparisonLocationAt::MiddleBody,
|
||||
},
|
||||
expr.range(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
|
||||
enum ComparisonLocationAt {
|
||||
MiddleBody,
|
||||
EndOfFunction,
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_bugbear/mod.rs
|
||||
assertion_line: 74
|
||||
---
|
||||
B015.py:3:1: B015 Pointless comparison. Did you mean to assign a value? Otherwise, prepend `assert` or remove it.
|
||||
|
|
||||
@@ -19,7 +20,7 @@ B015.py:7:1: B015 Pointless comparison. Did you mean to assign a value? Otherwis
|
||||
| ^^^^^^^^^^^ B015
|
||||
|
|
||||
|
||||
B015.py:17:5: B015 Pointless comparison. Did you mean to assign a value? Otherwise, prepend `assert` or remove it.
|
||||
B015.py:17:5: B015 Pointless comparison at end of function scope. Did you mean to return the expression result?
|
||||
|
|
||||
15 | assert 1 in (1, 2)
|
||||
16 |
|
||||
@@ -27,11 +28,17 @@ B015.py:17:5: B015 Pointless comparison. Did you mean to assign a value? Otherwi
|
||||
| ^^^^^^^^^^^ B015
|
||||
|
|
||||
|
||||
B015.py:24:5: B015 Pointless comparison. Did you mean to assign a value? Otherwise, prepend `assert` or remove it.
|
||||
B015.py:21:5: B015 Pointless comparison. Did you mean to assign a value? Otherwise, prepend `assert` or remove it.
|
||||
|
|
||||
23 | class TestClass:
|
||||
24 | 1 == 1
|
||||
20 | def test2():
|
||||
21 | 1 in (1, 2)
|
||||
| ^^^^^^^^^^^ B015
|
||||
22 | return
|
||||
|
|
||||
|
||||
B015.py:29:5: B015 Pointless comparison. Did you mean to assign a value? Otherwise, prepend `assert` or remove it.
|
||||
|
|
||||
28 | class TestClass:
|
||||
29 | 1 == 1
|
||||
| ^^^^^^ B015
|
||||
|
|
||||
|
||||
|
||||
|
||||
@@ -59,6 +59,10 @@ pub(crate) fn string_or_bytes_too_long(checker: &mut Checker, string: StringLike
|
||||
return;
|
||||
}
|
||||
|
||||
if semantic.in_annotation() {
|
||||
return;
|
||||
}
|
||||
|
||||
let length = match string {
|
||||
StringLike::String(ast::ExprStringLiteral { value, .. }) => value.chars().count(),
|
||||
StringLike::Bytes(ast::ExprBytesLiteral { value, .. }) => value.len(),
|
||||
|
||||
@@ -152,6 +152,8 @@ PYI053.pyi:68:13: PYI053 [*] String and bytes literals longer than 50 characters
|
||||
67 |
|
||||
68 | fbaz: str = f"51 character {foo} stringgggggggggggggggggggggggggg" # Error: PYI053
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI053
|
||||
69 |
|
||||
70 | # see https://github.com/astral-sh/ruff/issues/12995
|
||||
|
|
||||
= help: Replace with `...`
|
||||
|
||||
@@ -161,3 +163,6 @@ PYI053.pyi:68:13: PYI053 [*] String and bytes literals longer than 50 characters
|
||||
67 67 |
|
||||
68 |-fbaz: str = f"51 character {foo} stringgggggggggggggggggggggggggg" # Error: PYI053
|
||||
68 |+fbaz: str = ... # Error: PYI053
|
||||
69 69 |
|
||||
70 70 | # see https://github.com/astral-sh/ruff/issues/12995
|
||||
71 71 | def foo(bar: typing.Literal["a", "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"]):...
|
||||
|
||||
@@ -42,7 +42,7 @@ use super::helpers::{
|
||||
/// import pytest
|
||||
///
|
||||
///
|
||||
/// @pytest.fixture
|
||||
/// @pytest.fixture()
|
||||
/// def my_fixture(): ...
|
||||
/// ```
|
||||
///
|
||||
@@ -52,7 +52,7 @@ use super::helpers::{
|
||||
/// import pytest
|
||||
///
|
||||
///
|
||||
/// @pytest.fixture()
|
||||
/// @pytest.fixture
|
||||
/// def my_fixture(): ...
|
||||
/// ```
|
||||
///
|
||||
|
||||
@@ -300,7 +300,7 @@ pub(crate) fn typing_only_runtime_import(
|
||||
// Categorize the import, using coarse-grained categorization.
|
||||
let import_type = match categorize(
|
||||
&qualified_name.to_string(),
|
||||
0,
|
||||
qualified_name.is_unresolved_import(),
|
||||
&checker.settings.src,
|
||||
checker.package(),
|
||||
checker.settings.isort.detect_same_package,
|
||||
|
||||
@@ -344,6 +344,7 @@ pub(crate) fn unused_arguments(
|
||||
) {
|
||||
function_type::FunctionType::Function => {
|
||||
if checker.enabled(Argumentable::Function.rule_code())
|
||||
&& !function_type::is_stub(function_def, checker.semantic())
|
||||
&& !visibility::is_overload(decorator_list, checker.semantic())
|
||||
{
|
||||
function(
|
||||
|
||||
@@ -32,33 +32,3 @@ ARG.py:13:12: ARG001 Unused function argument: `x`
|
||||
| ^ ARG001
|
||||
14 | print("Hello, world!")
|
||||
|
|
||||
|
||||
ARG.py:17:7: ARG001 Unused function argument: `self`
|
||||
|
|
||||
17 | def f(self, x):
|
||||
| ^^^^ ARG001
|
||||
18 | ...
|
||||
|
|
||||
|
||||
ARG.py:17:13: ARG001 Unused function argument: `x`
|
||||
|
|
||||
17 | def f(self, x):
|
||||
| ^ ARG001
|
||||
18 | ...
|
||||
|
|
||||
|
||||
ARG.py:21:7: ARG001 Unused function argument: `cls`
|
||||
|
|
||||
21 | def f(cls, x):
|
||||
| ^^^ ARG001
|
||||
22 | ...
|
||||
|
|
||||
|
||||
ARG.py:21:12: ARG001 Unused function argument: `x`
|
||||
|
|
||||
21 | def f(cls, x):
|
||||
| ^ ARG001
|
||||
22 | ...
|
||||
|
|
||||
|
||||
|
||||
|
||||
@@ -91,7 +91,7 @@ enum Reason<'a> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn categorize<'a>(
|
||||
module_name: &str,
|
||||
level: u32,
|
||||
is_relative: bool,
|
||||
src: &[PathBuf],
|
||||
package: Option<&Path>,
|
||||
detect_same_package: bool,
|
||||
@@ -103,14 +103,14 @@ pub(crate) fn categorize<'a>(
|
||||
) -> &'a ImportSection {
|
||||
let module_base = module_name.split('.').next().unwrap();
|
||||
let (mut import_type, mut reason) = {
|
||||
if level == 0 && module_base == "__future__" {
|
||||
if !is_relative && module_base == "__future__" {
|
||||
(&ImportSection::Known(ImportType::Future), Reason::Future)
|
||||
} else if no_sections {
|
||||
(
|
||||
&ImportSection::Known(ImportType::FirstParty),
|
||||
Reason::NoSections,
|
||||
)
|
||||
} else if level > 0 {
|
||||
} else if is_relative {
|
||||
(
|
||||
&ImportSection::Known(ImportType::LocalFolder),
|
||||
Reason::NonZeroLevel,
|
||||
@@ -132,7 +132,7 @@ pub(crate) fn categorize<'a>(
|
||||
&ImportSection::Known(ImportType::FirstParty),
|
||||
Reason::SourceMatch(src),
|
||||
)
|
||||
} else if level == 0 && module_name == "__main__" {
|
||||
} else if !is_relative && module_name == "__main__" {
|
||||
(
|
||||
&ImportSection::Known(ImportType::FirstParty),
|
||||
Reason::KnownFirstParty,
|
||||
@@ -190,7 +190,7 @@ pub(crate) fn categorize_imports<'a>(
|
||||
for (alias, comments) in block.import {
|
||||
let import_type = categorize(
|
||||
&alias.module_name(),
|
||||
0,
|
||||
false,
|
||||
src,
|
||||
package,
|
||||
detect_same_package,
|
||||
@@ -210,7 +210,7 @@ pub(crate) fn categorize_imports<'a>(
|
||||
for (import_from, aliases) in block.import_from {
|
||||
let classification = categorize(
|
||||
&import_from.module_name(),
|
||||
import_from.level,
|
||||
import_from.level > 0,
|
||||
src,
|
||||
package,
|
||||
detect_same_package,
|
||||
@@ -230,7 +230,7 @@ pub(crate) fn categorize_imports<'a>(
|
||||
for ((import_from, alias), aliases) in block.import_from_as {
|
||||
let classification = categorize(
|
||||
&import_from.module_name(),
|
||||
import_from.level,
|
||||
import_from.level > 0,
|
||||
src,
|
||||
package,
|
||||
detect_same_package,
|
||||
@@ -250,7 +250,7 @@ pub(crate) fn categorize_imports<'a>(
|
||||
for (import_from, comments) in block.import_from_star {
|
||||
let classification = categorize(
|
||||
&import_from.module_name(),
|
||||
import_from.level,
|
||||
import_from.level > 0,
|
||||
src,
|
||||
package,
|
||||
detect_same_package,
|
||||
|
||||
@@ -8,11 +8,12 @@ mod tests {
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use anyhow::Result;
|
||||
use rustc_hash::FxHashMap;
|
||||
use test_case::test_case;
|
||||
|
||||
use crate::registry::Rule;
|
||||
use crate::rules::pep8_naming;
|
||||
use crate::rules::pep8_naming::settings::IgnoreNames;
|
||||
use crate::rules::{flake8_import_conventions, pep8_naming};
|
||||
use crate::test::test_path;
|
||||
use crate::{assert_messages, settings};
|
||||
|
||||
@@ -87,6 +88,25 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn camelcase_imported_as_incorrect_convention() -> Result<()> {
|
||||
let diagnostics = test_path(
|
||||
Path::new("pep8_naming").join("N817.py").as_path(),
|
||||
&settings::LinterSettings {
|
||||
flake8_import_conventions: flake8_import_conventions::settings::Settings {
|
||||
aliases: FxHashMap::from_iter([(
|
||||
"xml.etree.ElementTree".to_string(),
|
||||
"XET".to_string(),
|
||||
)]),
|
||||
..Default::default()
|
||||
},
|
||||
..settings::LinterSettings::for_rule(Rule::CamelcaseImportedAsAcronym)
|
||||
},
|
||||
)?;
|
||||
assert_messages!(diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn classmethod_decorators() -> Result<()> {
|
||||
let diagnostics = test_path(
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
use ruff_python_ast::{Alias, Stmt};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{Alias, Stmt};
|
||||
use ruff_python_stdlib::str::{self};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::rules::pep8_naming::helpers;
|
||||
use crate::rules::pep8_naming::settings::IgnoreNames;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for `CamelCase` imports that are aliased as acronyms.
|
||||
@@ -23,6 +22,9 @@ use crate::rules::pep8_naming::settings::IgnoreNames;
|
||||
/// Note that this rule is distinct from `camelcase-imported-as-constant`
|
||||
/// to accommodate selective enforcement.
|
||||
///
|
||||
/// Also note that import aliases following an import convention according to the
|
||||
/// [`lint.flake8-import-conventions.aliases`] option are allowed.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from example import MyClassName as MCN
|
||||
@@ -34,6 +36,9 @@ use crate::rules::pep8_naming::settings::IgnoreNames;
|
||||
/// ```
|
||||
///
|
||||
/// [PEP 8]: https://peps.python.org/pep-0008/
|
||||
///
|
||||
/// ## Options
|
||||
/// - `lint.flake8-import-conventions.aliases`
|
||||
#[violation]
|
||||
pub struct CamelcaseImportedAsAcronym {
|
||||
name: String,
|
||||
@@ -54,17 +59,25 @@ pub(crate) fn camelcase_imported_as_acronym(
|
||||
asname: &str,
|
||||
alias: &Alias,
|
||||
stmt: &Stmt,
|
||||
ignore_names: &IgnoreNames,
|
||||
checker: &Checker,
|
||||
) -> Option<Diagnostic> {
|
||||
if helpers::is_camelcase(name)
|
||||
&& !str::is_cased_lowercase(asname)
|
||||
&& str::is_cased_uppercase(asname)
|
||||
&& helpers::is_acronym(name, asname)
|
||||
{
|
||||
let ignore_names = &checker.settings.pep8_naming.ignore_names;
|
||||
|
||||
// Ignore any explicitly-allowed names.
|
||||
if ignore_names.matches(name) || ignore_names.matches(asname) {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Ignore names that follow a community-agreed import convention.
|
||||
if is_ignored_because_of_import_convention(asname, stmt, alias, checker) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
CamelcaseImportedAsAcronym {
|
||||
name: name.to_string(),
|
||||
@@ -77,3 +90,34 @@ pub(crate) fn camelcase_imported_as_acronym(
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn is_ignored_because_of_import_convention(
|
||||
asname: &str,
|
||||
stmt: &Stmt,
|
||||
alias: &Alias,
|
||||
checker: &Checker,
|
||||
) -> bool {
|
||||
let full_name = if let Some(import_from) = stmt.as_import_from_stmt() {
|
||||
// Never test relative imports for exclusion because we can't resolve the full-module name.
|
||||
let Some(module) = import_from.module.as_ref() else {
|
||||
return false;
|
||||
};
|
||||
|
||||
if import_from.level != 0 {
|
||||
return false;
|
||||
}
|
||||
|
||||
std::borrow::Cow::Owned(format!("{module}.{}", alias.name))
|
||||
} else {
|
||||
std::borrow::Cow::Borrowed(&*alias.name)
|
||||
};
|
||||
|
||||
// Ignore names that follow a community-agreed import convention.
|
||||
checker
|
||||
.settings
|
||||
.flake8_import_conventions
|
||||
.aliases
|
||||
.get(&*full_name)
|
||||
.map(String::as_str)
|
||||
== Some(asname)
|
||||
}
|
||||
|
||||
@@ -15,4 +15,9 @@ N817.py:2:17: N817 CamelCase `CamelCase` imported as acronym `CC`
|
||||
| ^^^^^^^^^^^^^^^ N817
|
||||
|
|
||||
|
||||
|
||||
N817.py:10:26: N817 CamelCase `ElementTree` imported as acronym `ET`
|
||||
|
|
||||
9 | # Always an error (relative import)
|
||||
10 | from ..xml.eltree import ElementTree as ET
|
||||
| ^^^^^^^^^^^^^^^^^ N817
|
||||
|
|
||||
|
||||
@@ -0,0 +1,41 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pep8_naming/mod.rs
|
||||
---
|
||||
N817.py:1:8: N817 CamelCase `CaMel` imported as acronym `CM`
|
||||
|
|
||||
1 | import mod.CaMel as CM
|
||||
| ^^^^^^^^^^^^^^^ N817
|
||||
2 | from mod import CamelCase as CC
|
||||
|
|
||||
|
||||
N817.py:2:17: N817 CamelCase `CamelCase` imported as acronym `CC`
|
||||
|
|
||||
1 | import mod.CaMel as CM
|
||||
2 | from mod import CamelCase as CC
|
||||
| ^^^^^^^^^^^^^^^ N817
|
||||
|
|
||||
|
||||
N817.py:6:8: N817 CamelCase `ElementTree` imported as acronym `ET`
|
||||
|
|
||||
5 | # OK depending on configured import convention
|
||||
6 | import xml.etree.ElementTree as ET
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ N817
|
||||
7 | from xml.etree import ElementTree as ET
|
||||
|
|
||||
|
||||
N817.py:7:23: N817 CamelCase `ElementTree` imported as acronym `ET`
|
||||
|
|
||||
5 | # OK depending on configured import convention
|
||||
6 | import xml.etree.ElementTree as ET
|
||||
7 | from xml.etree import ElementTree as ET
|
||||
| ^^^^^^^^^^^^^^^^^ N817
|
||||
8 |
|
||||
9 | # Always an error (relative import)
|
||||
|
|
||||
|
||||
N817.py:10:26: N817 CamelCase `ElementTree` imported as acronym `ET`
|
||||
|
|
||||
9 | # Always an error (relative import)
|
||||
10 | from ..xml.eltree import ElementTree as ET
|
||||
| ^^^^^^^^^^^^^^^^^ N817
|
||||
|
|
||||
@@ -15,21 +15,29 @@ use crate::settings::types::PythonVersion;
|
||||
/// Exception handling via `try`-`except` blocks incurs some performance
|
||||
/// overhead, regardless of whether an exception is raised.
|
||||
///
|
||||
/// When possible, refactor your code to put the entire loop into the
|
||||
/// `try`-`except` block, rather than wrapping each iteration in a separate
|
||||
/// `try`-`except` block.
|
||||
/// To optimize your code, two techniques are possible:
|
||||
/// 1. Refactor your code to put the entire loop into the `try`-`except` block,
|
||||
/// rather than wrapping each iteration in a separate `try`-`except` block.
|
||||
/// 2. Use "Look Before You Leap" idioms that attempt to avoid exceptions
|
||||
/// being raised in the first place, avoiding the need to use `try`-`except`
|
||||
/// blocks in the first place.
|
||||
///
|
||||
/// This rule is only enforced for Python versions prior to 3.11, which
|
||||
/// introduced "zero cost" exception handling.
|
||||
/// introduced "zero-cost" exception handling. However, note that even on
|
||||
/// Python 3.11 and newer, refactoring your code to avoid exception handling in
|
||||
/// tight loops can provide a significant speedup in some cases, as zero-cost
|
||||
/// exception handling is only zero-cost in the "happy path" where no exception
|
||||
/// is raised in the `try`-`except` block.
|
||||
///
|
||||
/// Note that, as with all `perflint` rules, this is only intended as a
|
||||
/// micro-optimization, and will have a negligible impact on performance in
|
||||
/// most cases.
|
||||
/// As with all `perflint` rules, this is only intended as a
|
||||
/// micro-optimization. In many cases, it will have a negligible impact on
|
||||
/// performance.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// string_numbers: list[str] = ["1", "2", "three", "4", "5"]
|
||||
///
|
||||
/// # `try`/`except` that could be moved out of the loop:
|
||||
/// int_numbers: list[int] = []
|
||||
/// for num in string_numbers:
|
||||
/// try:
|
||||
@@ -37,6 +45,16 @@ use crate::settings::types::PythonVersion;
|
||||
/// except ValueError as e:
|
||||
/// print(f"Couldn't convert to integer: {e}")
|
||||
/// break
|
||||
///
|
||||
/// # `try`/`except` used when "look before you leap" idioms could be used:
|
||||
/// number_names: dict[int, str] = {1: "one", 3: "three", 4: "four"}
|
||||
/// for number in range(5):
|
||||
/// try:
|
||||
/// name = number_names[number]
|
||||
/// except KeyError:
|
||||
/// continue
|
||||
/// else:
|
||||
/// print(f"The name of {number} is {name}")
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
@@ -49,6 +67,12 @@ use crate::settings::types::PythonVersion;
|
||||
/// int_numbers.append(int(num))
|
||||
/// except ValueError as e:
|
||||
/// print(f"Couldn't convert to integer: {e}")
|
||||
///
|
||||
/// number_names: dict[int, str] = {1: "one", 3: "three", 4: "four"}
|
||||
/// for number in range(5):
|
||||
/// name = number_names.get(number)
|
||||
/// if name is not None:
|
||||
/// print(f"The name of {number} is {name}")
|
||||
/// ```
|
||||
///
|
||||
/// ## Options
|
||||
|
||||
@@ -34,6 +34,7 @@ use crate::registry::Rule;
|
||||
///
|
||||
/// ```python
|
||||
/// class PhotoMetadata:
|
||||
///
|
||||
/// """Metadata about a photo."""
|
||||
/// ```
|
||||
///
|
||||
@@ -125,6 +126,7 @@ impl AlwaysFixableViolation for OneBlankLineAfterClass {
|
||||
///
|
||||
/// ```python
|
||||
/// class PhotoMetadata:
|
||||
///
|
||||
/// """Metadata about a photo."""
|
||||
/// ```
|
||||
///
|
||||
|
||||
@@ -218,6 +218,9 @@ impl Violation for UndocumentedPublicClass {
|
||||
/// raise ValueError("Tried to greet an unhappy cat.")
|
||||
/// ```
|
||||
///
|
||||
/// ## Options
|
||||
/// - `lint.pydocstyle.ignore-decorators`
|
||||
///
|
||||
/// ## References
|
||||
/// - [PEP 257 – Docstring Conventions](https://peps.python.org/pep-0257/)
|
||||
/// - [PEP 287 – reStructuredText Docstring Format](https://peps.python.org/pep-0287/)
|
||||
@@ -305,6 +308,9 @@ impl Violation for UndocumentedPublicMethod {
|
||||
/// raise FasterThanLightError from exc
|
||||
/// ```
|
||||
///
|
||||
/// ## Options
|
||||
/// - `lint.pydocstyle.ignore-decorators`
|
||||
///
|
||||
/// ## References
|
||||
/// - [PEP 257 – Docstring Conventions](https://peps.python.org/pep-0257/)
|
||||
/// - [PEP 287 – reStructuredText Docstring Format](https://peps.python.org/pep-0287/)
|
||||
@@ -402,6 +408,9 @@ impl Violation for UndocumentedPublicPackage {
|
||||
/// print(cat) # "Cat: Dusty"
|
||||
/// ```
|
||||
///
|
||||
/// ## Options
|
||||
/// - `lint.pydocstyle.ignore-decorators`
|
||||
///
|
||||
/// ## References
|
||||
/// - [PEP 257 – Docstring Conventions](https://peps.python.org/pep-0257/)
|
||||
/// - [PEP 287 – reStructuredText Docstring Format](https://peps.python.org/pep-0287/)
|
||||
@@ -502,6 +511,9 @@ impl Violation for UndocumentedPublicNestedClass {
|
||||
/// self.population: int = population
|
||||
/// ```
|
||||
///
|
||||
/// ## Options
|
||||
/// - `lint.pydocstyle.ignore-decorators`
|
||||
///
|
||||
/// ## References
|
||||
/// - [PEP 257 – Docstring Conventions](https://peps.python.org/pep-0257/)
|
||||
/// - [PEP 287 – reStructuredText Docstring Format](https://peps.python.org/pep-0287/)
|
||||
|
||||
@@ -6,8 +6,7 @@ use std::collections::BTreeMap;
|
||||
|
||||
use ruff_diagnostics::{Applicability, Diagnostic, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::{Stmt, StmtImportFrom};
|
||||
use ruff_python_ast::{self as ast, Stmt};
|
||||
use ruff_python_semantic::{
|
||||
AnyImport, BindingKind, Exceptions, Imported, NodeId, Scope, SemanticModel, SubmoduleImport,
|
||||
};
|
||||
@@ -218,10 +217,11 @@ enum UnusedImportContext {
|
||||
Other,
|
||||
}
|
||||
|
||||
fn is_first_party(qualified_name: &str, level: u32, checker: &Checker) -> bool {
|
||||
fn is_first_party(import: &AnyImport, checker: &Checker) -> bool {
|
||||
let qualified_name = import.qualified_name();
|
||||
let category = isort::categorize(
|
||||
qualified_name,
|
||||
level,
|
||||
&qualified_name.to_string(),
|
||||
qualified_name.is_unresolved_import(),
|
||||
&checker.settings.src,
|
||||
checker.package(),
|
||||
checker.settings.isort.detect_same_package,
|
||||
@@ -343,13 +343,6 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut
|
||||
let in_except_handler =
|
||||
exceptions.intersects(Exceptions::MODULE_NOT_FOUND_ERROR | Exceptions::IMPORT_ERROR);
|
||||
let multiple = bindings.len() > 1;
|
||||
let level = match checker.semantic().statement(import_statement) {
|
||||
Stmt::Import(_) => 0,
|
||||
Stmt::ImportFrom(StmtImportFrom { level, .. }) => *level,
|
||||
_ => {
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
// pair each binding with context; divide them by how we want to fix them
|
||||
let (to_reexport, to_remove): (Vec<_>, Vec<_>) = bindings
|
||||
@@ -357,9 +350,7 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut
|
||||
.map(|binding| {
|
||||
let context = if in_except_handler {
|
||||
UnusedImportContext::ExceptHandler
|
||||
} else if in_init
|
||||
&& is_first_party(&binding.import.qualified_name().to_string(), level, checker)
|
||||
{
|
||||
} else if in_init && is_first_party(&binding.import, checker) {
|
||||
UnusedImportContext::DunderInitFirstParty {
|
||||
dunder_all_count: DunderAllCount::from(dunder_all_exprs.len()),
|
||||
submodule_import: binding.import.is_submodule_import(),
|
||||
|
||||
@@ -123,7 +123,10 @@ mod tests {
|
||||
#[test_case(Rule::RedefinedLoopName, Path::new("redefined_loop_name.py"))]
|
||||
#[test_case(Rule::ReturnInInit, Path::new("return_in_init.py"))]
|
||||
#[test_case(Rule::TooManyArguments, Path::new("too_many_arguments.py"))]
|
||||
#[test_case(Rule::TooManyPositional, Path::new("too_many_positional.py"))]
|
||||
#[test_case(
|
||||
Rule::TooManyPositionalArguments,
|
||||
Path::new("too_many_positional_arguments.py")
|
||||
)]
|
||||
#[test_case(Rule::TooManyBranches, Path::new("too_many_branches.py"))]
|
||||
#[test_case(
|
||||
Rule::TooManyReturnStatements,
|
||||
@@ -294,7 +297,7 @@ mod tests {
|
||||
max_positional_args: 4,
|
||||
..pylint::settings::Settings::default()
|
||||
},
|
||||
..LinterSettings::for_rule(Rule::TooManyPositional)
|
||||
..LinterSettings::for_rule(Rule::TooManyPositionalArguments)
|
||||
},
|
||||
)?;
|
||||
assert_messages!(diagnostics);
|
||||
|
||||
@@ -91,13 +91,18 @@ pub(crate) fn bad_staticmethod_argument(
|
||||
return;
|
||||
};
|
||||
|
||||
if matches!(self_or_cls.name.as_str(), "self" | "cls") {
|
||||
let diagnostic = Diagnostic::new(
|
||||
BadStaticmethodArgument {
|
||||
argument_name: self_or_cls.name.to_string(),
|
||||
},
|
||||
self_or_cls.range(),
|
||||
);
|
||||
diagnostics.push(diagnostic);
|
||||
match (name.as_str(), self_or_cls.name.as_str()) {
|
||||
("__new__", "cls") => {
|
||||
return;
|
||||
}
|
||||
(_, "self" | "cls") => {}
|
||||
_ => return,
|
||||
}
|
||||
|
||||
diagnostics.push(Diagnostic::new(
|
||||
BadStaticmethodArgument {
|
||||
argument_name: self_or_cls.name.to_string(),
|
||||
},
|
||||
self_or_cls.range(),
|
||||
));
|
||||
}
|
||||
|
||||
@@ -79,7 +79,7 @@ pub(crate) use too_many_boolean_expressions::*;
|
||||
pub(crate) use too_many_branches::*;
|
||||
pub(crate) use too_many_locals::*;
|
||||
pub(crate) use too_many_nested_blocks::*;
|
||||
pub(crate) use too_many_positional::*;
|
||||
pub(crate) use too_many_positional_arguments::*;
|
||||
pub(crate) use too_many_public_methods::*;
|
||||
pub(crate) use too_many_return_statements::*;
|
||||
pub(crate) use too_many_statements::*;
|
||||
@@ -182,7 +182,7 @@ mod too_many_boolean_expressions;
|
||||
mod too_many_branches;
|
||||
mod too_many_locals;
|
||||
mod too_many_nested_blocks;
|
||||
mod too_many_positional;
|
||||
mod too_many_positional_arguments;
|
||||
mod too_many_public_methods;
|
||||
mod too_many_return_statements;
|
||||
mod too_many_statements;
|
||||
|
||||
@@ -42,21 +42,24 @@ use crate::checkers::ast::Checker;
|
||||
/// ## Options
|
||||
/// - `lint.pylint.max-positional-args`
|
||||
#[violation]
|
||||
pub struct TooManyPositional {
|
||||
pub struct TooManyPositionalArguments {
|
||||
c_pos: usize,
|
||||
max_pos: usize,
|
||||
}
|
||||
|
||||
impl Violation for TooManyPositional {
|
||||
impl Violation for TooManyPositionalArguments {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
let TooManyPositional { c_pos, max_pos } = self;
|
||||
let TooManyPositionalArguments { c_pos, max_pos } = self;
|
||||
format!("Too many positional arguments ({c_pos}/{max_pos})")
|
||||
}
|
||||
}
|
||||
|
||||
/// PLR0917
|
||||
pub(crate) fn too_many_positional(checker: &mut Checker, function_def: &ast::StmtFunctionDef) {
|
||||
pub(crate) fn too_many_positional_arguments(
|
||||
checker: &mut Checker,
|
||||
function_def: &ast::StmtFunctionDef,
|
||||
) {
|
||||
let semantic = checker.semantic();
|
||||
|
||||
// Count the number of positional arguments.
|
||||
@@ -109,7 +112,7 @@ pub(crate) fn too_many_positional(checker: &mut Checker, function_def: &ast::Stm
|
||||
}
|
||||
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
TooManyPositional {
|
||||
TooManyPositionalArguments {
|
||||
c_pos: num_positional_args,
|
||||
max_pos: checker.settings.pylint.max_positional_args,
|
||||
},
|
||||
@@ -1,28 +1,28 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pylint/mod.rs
|
||||
---
|
||||
too_many_positional.py:1:5: PLR0917 Too many positional arguments (8/5)
|
||||
too_many_positional_arguments.py:1:5: PLR0917 Too many positional arguments (8/5)
|
||||
|
|
||||
1 | def f(x, y, z, t, u, v, w, r): # Too many positional arguments (8/5)
|
||||
| ^ PLR0917
|
||||
2 | pass
|
||||
|
|
||||
|
||||
too_many_positional.py:21:5: PLR0917 Too many positional arguments (6/5)
|
||||
too_many_positional_arguments.py:21:5: PLR0917 Too many positional arguments (6/5)
|
||||
|
|
||||
21 | def f(x, y, z, /, u, v, w): # Too many positional arguments (6/5)
|
||||
| ^ PLR0917
|
||||
22 | pass
|
||||
|
|
||||
|
||||
too_many_positional.py:29:5: PLR0917 Too many positional arguments (6/5)
|
||||
too_many_positional_arguments.py:29:5: PLR0917 Too many positional arguments (6/5)
|
||||
|
|
||||
29 | def f(x, y, z, a, b, c, *, u, v, w): # Too many positional arguments (6/5)
|
||||
| ^ PLR0917
|
||||
30 | pass
|
||||
|
|
||||
|
||||
too_many_positional.py:43:9: PLR0917 Too many positional arguments (6/5)
|
||||
too_many_positional_arguments.py:43:9: PLR0917 Too many positional arguments (6/5)
|
||||
|
|
||||
41 | pass
|
||||
42 |
|
||||
@@ -31,12 +31,10 @@ too_many_positional.py:43:9: PLR0917 Too many positional arguments (6/5)
|
||||
44 | pass
|
||||
|
|
||||
|
||||
too_many_positional.py:47:9: PLR0917 Too many positional arguments (6/5)
|
||||
too_many_positional_arguments.py:47:9: PLR0917 Too many positional arguments (6/5)
|
||||
|
|
||||
46 | @staticmethod
|
||||
47 | def f(self, a, b, c, d, e): # Too many positional arguments (6/5)
|
||||
| ^ PLR0917
|
||||
48 | pass
|
||||
|
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ self_or_cls_assignment.py:4:9: PLW0642 Reassigned `cls` variable in class method
|
||||
4 | cls = "apple" # PLW0642
|
||||
| ^^^ PLW0642
|
||||
5 | cls: Fruit = "apple" # PLW0642
|
||||
6 | cls += "orange" # PLW0642
|
||||
6 | cls += "orange" # OK, augmented assignments are ignored
|
||||
|
|
||||
= help: Consider using a different variable name
|
||||
|
||||
@@ -18,26 +18,15 @@ self_or_cls_assignment.py:5:9: PLW0642 Reassigned `cls` variable in class method
|
||||
4 | cls = "apple" # PLW0642
|
||||
5 | cls: Fruit = "apple" # PLW0642
|
||||
| ^^^ PLW0642
|
||||
6 | cls += "orange" # PLW0642
|
||||
6 | cls += "orange" # OK, augmented assignments are ignored
|
||||
7 | *cls = "banana" # PLW0642
|
||||
|
|
||||
= help: Consider using a different variable name
|
||||
|
||||
self_or_cls_assignment.py:6:9: PLW0642 Reassigned `cls` variable in class method
|
||||
|
|
||||
4 | cls = "apple" # PLW0642
|
||||
5 | cls: Fruit = "apple" # PLW0642
|
||||
6 | cls += "orange" # PLW0642
|
||||
| ^^^ PLW0642
|
||||
7 | *cls = "banana" # PLW0642
|
||||
8 | cls, blah = "apple", "orange" # PLW0642
|
||||
|
|
||||
= help: Consider using a different variable name
|
||||
|
||||
self_or_cls_assignment.py:7:10: PLW0642 Reassigned `cls` variable in class method
|
||||
|
|
||||
5 | cls: Fruit = "apple" # PLW0642
|
||||
6 | cls += "orange" # PLW0642
|
||||
6 | cls += "orange" # OK, augmented assignments are ignored
|
||||
7 | *cls = "banana" # PLW0642
|
||||
| ^^^ PLW0642
|
||||
8 | cls, blah = "apple", "orange" # PLW0642
|
||||
@@ -47,7 +36,7 @@ self_or_cls_assignment.py:7:10: PLW0642 Reassigned `cls` variable in class metho
|
||||
|
||||
self_or_cls_assignment.py:8:9: PLW0642 Reassigned `cls` variable in class method
|
||||
|
|
||||
6 | cls += "orange" # PLW0642
|
||||
6 | cls += "orange" # OK, augmented assignments are ignored
|
||||
7 | *cls = "banana" # PLW0642
|
||||
8 | cls, blah = "apple", "orange" # PLW0642
|
||||
| ^^^ PLW0642
|
||||
@@ -94,7 +83,7 @@ self_or_cls_assignment.py:17:9: PLW0642 Reassigned `self` variable in instance m
|
||||
17 | self = "red" # PLW0642
|
||||
| ^^^^ PLW0642
|
||||
18 | self: Self = "red" # PLW0642
|
||||
19 | self += "blue" # PLW0642
|
||||
19 | self += "blue" # OK, augmented assignments are ignored
|
||||
|
|
||||
= help: Consider using a different variable name
|
||||
|
||||
@@ -104,26 +93,15 @@ self_or_cls_assignment.py:18:9: PLW0642 Reassigned `self` variable in instance m
|
||||
17 | self = "red" # PLW0642
|
||||
18 | self: Self = "red" # PLW0642
|
||||
| ^^^^ PLW0642
|
||||
19 | self += "blue" # PLW0642
|
||||
19 | self += "blue" # OK, augmented assignments are ignored
|
||||
20 | *self = "blue" # PLW0642
|
||||
|
|
||||
= help: Consider using a different variable name
|
||||
|
||||
self_or_cls_assignment.py:19:9: PLW0642 Reassigned `self` variable in instance method
|
||||
|
|
||||
17 | self = "red" # PLW0642
|
||||
18 | self: Self = "red" # PLW0642
|
||||
19 | self += "blue" # PLW0642
|
||||
| ^^^^ PLW0642
|
||||
20 | *self = "blue" # PLW0642
|
||||
21 | self, blah = "red", "blue" # PLW0642
|
||||
|
|
||||
= help: Consider using a different variable name
|
||||
|
||||
self_or_cls_assignment.py:20:10: PLW0642 Reassigned `self` variable in instance method
|
||||
|
|
||||
18 | self: Self = "red" # PLW0642
|
||||
19 | self += "blue" # PLW0642
|
||||
19 | self += "blue" # OK, augmented assignments are ignored
|
||||
20 | *self = "blue" # PLW0642
|
||||
| ^^^^ PLW0642
|
||||
21 | self, blah = "red", "blue" # PLW0642
|
||||
@@ -133,7 +111,7 @@ self_or_cls_assignment.py:20:10: PLW0642 Reassigned `self` variable in instance
|
||||
|
||||
self_or_cls_assignment.py:21:9: PLW0642 Reassigned `self` variable in instance method
|
||||
|
|
||||
19 | self += "blue" # PLW0642
|
||||
19 | self += "blue" # OK, augmented assignments are ignored
|
||||
20 | *self = "blue" # PLW0642
|
||||
21 | self, blah = "red", "blue" # PLW0642
|
||||
| ^^^^ PLW0642
|
||||
|
||||
@@ -15,7 +15,7 @@ FURB177.py:5:5: FURB177 [*] Prefer `Path.cwd()` over `Path().resolve()` for curr
|
||||
3 3 |
|
||||
4 4 | # Errors
|
||||
5 |-_ = Path().resolve()
|
||||
5 |+_ = pathlib.Path.cwd()
|
||||
5 |+_ = Path.cwd()
|
||||
6 6 | _ = pathlib.Path().resolve()
|
||||
7 7 |
|
||||
8 8 | _ = Path("").resolve()
|
||||
@@ -36,7 +36,7 @@ FURB177.py:6:5: FURB177 [*] Prefer `Path.cwd()` over `Path().resolve()` for curr
|
||||
4 4 | # Errors
|
||||
5 5 | _ = Path().resolve()
|
||||
6 |-_ = pathlib.Path().resolve()
|
||||
6 |+_ = pathlib.Path.cwd()
|
||||
6 |+_ = Path.cwd()
|
||||
7 7 |
|
||||
8 8 | _ = Path("").resolve()
|
||||
9 9 | _ = pathlib.Path("").resolve()
|
||||
@@ -56,7 +56,7 @@ FURB177.py:8:5: FURB177 [*] Prefer `Path.cwd()` over `Path().resolve()` for curr
|
||||
6 6 | _ = pathlib.Path().resolve()
|
||||
7 7 |
|
||||
8 |-_ = Path("").resolve()
|
||||
8 |+_ = pathlib.Path.cwd()
|
||||
8 |+_ = Path.cwd()
|
||||
9 9 | _ = pathlib.Path("").resolve()
|
||||
10 10 |
|
||||
11 11 | _ = Path(".").resolve()
|
||||
@@ -76,7 +76,7 @@ FURB177.py:9:5: FURB177 [*] Prefer `Path.cwd()` over `Path().resolve()` for curr
|
||||
7 7 |
|
||||
8 8 | _ = Path("").resolve()
|
||||
9 |-_ = pathlib.Path("").resolve()
|
||||
9 |+_ = pathlib.Path.cwd()
|
||||
9 |+_ = Path.cwd()
|
||||
10 10 |
|
||||
11 11 | _ = Path(".").resolve()
|
||||
12 12 | _ = pathlib.Path(".").resolve()
|
||||
@@ -96,7 +96,7 @@ FURB177.py:11:5: FURB177 [*] Prefer `Path.cwd()` over `Path().resolve()` for cur
|
||||
9 9 | _ = pathlib.Path("").resolve()
|
||||
10 10 |
|
||||
11 |-_ = Path(".").resolve()
|
||||
11 |+_ = pathlib.Path.cwd()
|
||||
11 |+_ = Path.cwd()
|
||||
12 12 | _ = pathlib.Path(".").resolve()
|
||||
13 13 |
|
||||
14 14 | _ = Path("", **kwargs).resolve()
|
||||
@@ -116,7 +116,7 @@ FURB177.py:12:5: FURB177 [*] Prefer `Path.cwd()` over `Path().resolve()` for cur
|
||||
10 10 |
|
||||
11 11 | _ = Path(".").resolve()
|
||||
12 |-_ = pathlib.Path(".").resolve()
|
||||
12 |+_ = pathlib.Path.cwd()
|
||||
12 |+_ = Path.cwd()
|
||||
13 13 |
|
||||
14 14 | _ = Path("", **kwargs).resolve()
|
||||
15 15 | _ = pathlib.Path("", **kwargs).resolve()
|
||||
@@ -136,7 +136,7 @@ FURB177.py:14:5: FURB177 [*] Prefer `Path.cwd()` over `Path().resolve()` for cur
|
||||
12 12 | _ = pathlib.Path(".").resolve()
|
||||
13 13 |
|
||||
14 |-_ = Path("", **kwargs).resolve()
|
||||
14 |+_ = pathlib.Path.cwd()
|
||||
14 |+_ = Path.cwd()
|
||||
15 15 | _ = pathlib.Path("", **kwargs).resolve()
|
||||
16 16 |
|
||||
17 17 | _ = Path(".", **kwargs).resolve()
|
||||
@@ -156,7 +156,7 @@ FURB177.py:15:5: FURB177 [*] Prefer `Path.cwd()` over `Path().resolve()` for cur
|
||||
13 13 |
|
||||
14 14 | _ = Path("", **kwargs).resolve()
|
||||
15 |-_ = pathlib.Path("", **kwargs).resolve()
|
||||
15 |+_ = pathlib.Path.cwd()
|
||||
15 |+_ = Path.cwd()
|
||||
16 16 |
|
||||
17 17 | _ = Path(".", **kwargs).resolve()
|
||||
18 18 | _ = pathlib.Path(".", **kwargs).resolve()
|
||||
@@ -176,7 +176,7 @@ FURB177.py:17:5: FURB177 [*] Prefer `Path.cwd()` over `Path().resolve()` for cur
|
||||
15 15 | _ = pathlib.Path("", **kwargs).resolve()
|
||||
16 16 |
|
||||
17 |-_ = Path(".", **kwargs).resolve()
|
||||
17 |+_ = pathlib.Path.cwd()
|
||||
17 |+_ = Path.cwd()
|
||||
18 18 | _ = pathlib.Path(".", **kwargs).resolve()
|
||||
19 19 |
|
||||
20 20 | # OK
|
||||
@@ -196,7 +196,7 @@ FURB177.py:18:5: FURB177 [*] Prefer `Path.cwd()` over `Path().resolve()` for cur
|
||||
16 16 |
|
||||
17 17 | _ = Path(".", **kwargs).resolve()
|
||||
18 |-_ = pathlib.Path(".", **kwargs).resolve()
|
||||
18 |+_ = pathlib.Path.cwd()
|
||||
18 |+_ = Path.cwd()
|
||||
19 19 |
|
||||
20 20 | # OK
|
||||
21 21 | _ = Path.cwd()
|
||||
|
||||
@@ -57,6 +57,7 @@ mod tests {
|
||||
#[test_case(Rule::UnusedAsync, Path::new("RUF029.py"))]
|
||||
#[test_case(Rule::AssertWithPrintMessage, Path::new("RUF030.py"))]
|
||||
#[test_case(Rule::IncorrectlyParenthesizedTupleInSubscript, Path::new("RUF031.py"))]
|
||||
#[test_case(Rule::DecimalFromFloatLiteral, Path::new("RUF032.py"))]
|
||||
#[test_case(Rule::RedirectedNOQA, Path::new("RUF101.py"))]
|
||||
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
|
||||
|
||||
@@ -0,0 +1,85 @@
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast};
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for `Decimal` calls passing a float literal.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Float literals have limited precision that can lead to unexpected results.
|
||||
/// The `Decimal` class is designed to handle numbers with fixed-point precision,
|
||||
/// so a string literal should be used instead.
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// ```python
|
||||
/// num = Decimal(1.2345)
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// num = Decimal("1.2345")
|
||||
/// ```
|
||||
///
|
||||
/// ## Fix Safety
|
||||
/// This rule's fix is marked as unsafe because it changes the underlying value
|
||||
/// of the `Decimal` instance that is constructed. This can lead to unexpected
|
||||
/// behavior if your program relies on the previous value (whether deliberately or not).
|
||||
#[violation]
|
||||
pub struct DecimalFromFloatLiteral;
|
||||
|
||||
impl AlwaysFixableViolation for DecimalFromFloatLiteral {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`Decimal()` called with float literal argument")
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> String {
|
||||
"Use a string literal instead".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// RUF032: `Decimal()` called with float literal argument
|
||||
pub(crate) fn decimal_from_float_literal_syntax(checker: &mut Checker, call: &ast::ExprCall) {
|
||||
let Some(arg) = call.arguments.args.first() else {
|
||||
return;
|
||||
};
|
||||
|
||||
if !is_arg_float_literal(arg) {
|
||||
return;
|
||||
}
|
||||
|
||||
if checker
|
||||
.semantic()
|
||||
.resolve_qualified_name(call.func.as_ref())
|
||||
.is_some_and(|qualified_name| matches!(qualified_name.segments(), ["decimal", "Decimal"]))
|
||||
{
|
||||
let diagnostic =
|
||||
Diagnostic::new(DecimalFromFloatLiteral, arg.range()).with_fix(fix_float_literal(
|
||||
arg.range(),
|
||||
&checker.generator().expr(arg),
|
||||
checker.stylist(),
|
||||
));
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
|
||||
fn is_arg_float_literal(arg: &ast::Expr) -> bool {
|
||||
match arg {
|
||||
ast::Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
value: ast::Number::Float(_),
|
||||
..
|
||||
}) => true,
|
||||
ast::Expr::UnaryOp(ast::ExprUnaryOp { operand, .. }) => is_arg_float_literal(operand),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn fix_float_literal(range: TextRange, float_literal: &str, stylist: &Stylist) -> Fix {
|
||||
let content = format!("{quote}{float_literal}{quote}", quote = stylist.quote());
|
||||
Fix::unsafe_edit(Edit::range_replacement(content, range))
|
||||
}
|
||||
@@ -1,10 +1,10 @@
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_literal::format::FormatSpec;
|
||||
use ruff_python_parser::parse_expression;
|
||||
use ruff_python_semantic::analyze::logging;
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
use ruff_python_semantic::analyze::logging::is_logger_candidate;
|
||||
use ruff_python_semantic::{Modules, SemanticModel};
|
||||
use ruff_source_file::Locator;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
@@ -12,6 +12,7 @@ use memchr::memchr2_iter;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::rules::fastapi::rules::is_fastapi_route_call;
|
||||
|
||||
/// ## What it does
|
||||
/// Searches for strings that look like they were meant to be f-strings, but are missing an `f` prefix.
|
||||
@@ -33,6 +34,8 @@ use crate::checkers::ast::Checker;
|
||||
/// 4. The string has no `{...}` expression sections, or uses invalid f-string syntax.
|
||||
/// 5. The string references variables that are not in scope, or it doesn't capture variables at all.
|
||||
/// 6. Any format specifiers in the potential f-string are invalid.
|
||||
/// 7. The string is part of a function call that is known to expect a template string rather than an
|
||||
/// evaluated f-string: for example, a [`logging`] call, a [`gettext`] call, or a [`fastAPI` path].
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
@@ -48,6 +51,10 @@ use crate::checkers::ast::Checker;
|
||||
/// day_of_week = "Tuesday"
|
||||
/// print(f"Hello {name}! It is {day_of_week} today!")
|
||||
/// ```
|
||||
///
|
||||
/// [`logging`]: https://docs.python.org/3/howto/logging-cookbook.html#using-particular-formatting-styles-throughout-your-application
|
||||
/// [`gettext`]: https://docs.python.org/3/library/gettext.html
|
||||
/// [`fastAPI` path]: https://fastapi.tiangolo.com/tutorial/path-params/
|
||||
#[violation]
|
||||
pub struct MissingFStringSyntax;
|
||||
|
||||
@@ -75,11 +82,25 @@ pub(crate) fn missing_fstring_syntax(checker: &mut Checker, literal: &ast::Strin
|
||||
}
|
||||
}
|
||||
|
||||
// We also want to avoid expressions that are intended to be translated.
|
||||
if semantic.current_expressions().any(|expr| {
|
||||
is_gettext(expr, semantic)
|
||||
|| is_logger_call(expr, semantic, &checker.settings.logger_objects)
|
||||
}) {
|
||||
let logger_objects = &checker.settings.logger_objects;
|
||||
let fastapi_seen = semantic.seen_module(Modules::FASTAPI);
|
||||
|
||||
// We also want to avoid:
|
||||
// - Expressions inside `gettext()` calls
|
||||
// - Expressions passed to logging calls (since the `logging` module evaluates them lazily:
|
||||
// https://docs.python.org/3/howto/logging-cookbook.html#using-particular-formatting-styles-throughout-your-application)
|
||||
// - `fastAPI` paths: https://fastapi.tiangolo.com/tutorial/path-params/
|
||||
// - Expressions where a method is immediately called on the string literal
|
||||
if semantic
|
||||
.current_expressions()
|
||||
.filter_map(ast::Expr::as_call_expr)
|
||||
.any(|call_expr| {
|
||||
is_method_call_on_literal(call_expr, literal)
|
||||
|| is_gettext(call_expr, semantic)
|
||||
|| is_logger_candidate(&call_expr.func, semantic, logger_objects)
|
||||
|| (fastapi_seen && is_fastapi_route_call(call_expr, semantic))
|
||||
})
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -90,13 +111,6 @@ pub(crate) fn missing_fstring_syntax(checker: &mut Checker, literal: &ast::Strin
|
||||
}
|
||||
}
|
||||
|
||||
fn is_logger_call(expr: &ast::Expr, semantic: &SemanticModel, logger_objects: &[String]) -> bool {
|
||||
let ast::Expr::Call(ast::ExprCall { func, .. }) = expr else {
|
||||
return false;
|
||||
};
|
||||
logging::is_logger_candidate(func, semantic, logger_objects)
|
||||
}
|
||||
|
||||
/// Returns `true` if an expression appears to be a `gettext` call.
|
||||
///
|
||||
/// We want to avoid statement expressions and assignments related to aliases
|
||||
@@ -107,12 +121,9 @@ fn is_logger_call(expr: &ast::Expr, semantic: &SemanticModel, logger_objects: &[
|
||||
/// and replace the original string with its translated counterpart. If the
|
||||
/// string contains variable placeholders or formatting, it can complicate the
|
||||
/// translation process, lead to errors or incorrect translations.
|
||||
fn is_gettext(expr: &ast::Expr, semantic: &SemanticModel) -> bool {
|
||||
let ast::Expr::Call(ast::ExprCall { func, .. }) = expr else {
|
||||
return false;
|
||||
};
|
||||
|
||||
let short_circuit = match func.as_ref() {
|
||||
fn is_gettext(call_expr: &ast::ExprCall, semantic: &SemanticModel) -> bool {
|
||||
let func = &*call_expr.func;
|
||||
let short_circuit = match func {
|
||||
ast::Expr::Name(ast::ExprName { id, .. }) => {
|
||||
matches!(id.as_str(), "gettext" | "ngettext" | "_")
|
||||
}
|
||||
@@ -136,6 +147,21 @@ fn is_gettext(expr: &ast::Expr, semantic: &SemanticModel) -> bool {
|
||||
})
|
||||
}
|
||||
|
||||
/// Return `true` if `call_expr` is a method call on an [`ast::ExprStringLiteral`]
|
||||
/// in which `literal` is one of the [`ast::StringLiteral`] parts.
|
||||
///
|
||||
/// For example: `expr` is a node representing the expression `"{foo}".format(foo="bar")`,
|
||||
/// and `literal` is the node representing the string literal `"{foo}"`.
|
||||
fn is_method_call_on_literal(call_expr: &ast::ExprCall, literal: &ast::StringLiteral) -> bool {
|
||||
let ast::Expr::Attribute(ast::ExprAttribute { value, .. }) = &*call_expr.func else {
|
||||
return false;
|
||||
};
|
||||
let ast::Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) = &**value else {
|
||||
return false;
|
||||
};
|
||||
value.as_slice().contains(literal)
|
||||
}
|
||||
|
||||
/// Returns `true` if `literal` is likely an f-string with a missing `f` prefix.
|
||||
/// See [`MissingFStringSyntax`] for the validation criteria.
|
||||
fn should_be_fstring(
|
||||
@@ -158,55 +184,28 @@ fn should_be_fstring(
|
||||
};
|
||||
|
||||
let mut arg_names = FxHashSet::default();
|
||||
let mut last_expr: Option<&ast::Expr> = None;
|
||||
for expr in semantic.current_expressions() {
|
||||
match expr {
|
||||
ast::Expr::Call(ast::ExprCall {
|
||||
arguments: ast::Arguments { keywords, args, .. },
|
||||
func,
|
||||
..
|
||||
}) => {
|
||||
if let ast::Expr::Attribute(ast::ExprAttribute { value, .. }) = func.as_ref() {
|
||||
match value.as_ref() {
|
||||
// if the first part of the attribute is the string literal,
|
||||
// we want to ignore this literal from the lint.
|
||||
// for example: `"{x}".some_method(...)`
|
||||
ast::Expr::StringLiteral(expr_literal)
|
||||
if expr_literal.value.as_slice().contains(literal) =>
|
||||
{
|
||||
return false;
|
||||
}
|
||||
// if the first part of the attribute was the expression we
|
||||
// just went over in the last iteration, then we also want to pass
|
||||
// this over in the lint.
|
||||
// for example: `some_func("{x}").some_method(...)`
|
||||
value if last_expr == Some(value) => {
|
||||
return false;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
for keyword in &**keywords {
|
||||
if let Some(ident) = keyword.arg.as_ref() {
|
||||
arg_names.insert(ident.as_str());
|
||||
}
|
||||
}
|
||||
for arg in &**args {
|
||||
if let ast::Expr::Name(ast::ExprName { id, .. }) = arg {
|
||||
arg_names.insert(id.as_str());
|
||||
}
|
||||
}
|
||||
for expr in semantic
|
||||
.current_expressions()
|
||||
.filter_map(ast::Expr::as_call_expr)
|
||||
{
|
||||
let ast::Arguments { keywords, args, .. } = &expr.arguments;
|
||||
for keyword in &**keywords {
|
||||
if let Some(ident) = keyword.arg.as_ref() {
|
||||
arg_names.insert(&ident.id);
|
||||
}
|
||||
}
|
||||
for arg in &**args {
|
||||
if let ast::Expr::Name(ast::ExprName { id, .. }) = arg {
|
||||
arg_names.insert(id);
|
||||
}
|
||||
_ => continue,
|
||||
}
|
||||
last_expr.replace(expr);
|
||||
}
|
||||
|
||||
for f_string in value.f_strings() {
|
||||
let mut has_name = false;
|
||||
for element in f_string.elements.expressions() {
|
||||
if let ast::Expr::Name(ast::ExprName { id, .. }) = element.expression.as_ref() {
|
||||
if arg_names.contains(id.as_str()) {
|
||||
if arg_names.contains(id) {
|
||||
return false;
|
||||
}
|
||||
if semantic
|
||||
|
||||
@@ -3,6 +3,7 @@ pub(crate) use assert_with_print_message::*;
|
||||
pub(crate) use assignment_in_assert::*;
|
||||
pub(crate) use asyncio_dangling_task::*;
|
||||
pub(crate) use collection_literal_concatenation::*;
|
||||
pub(crate) use decimal_from_float_literal::*;
|
||||
pub(crate) use default_factory_kwarg::*;
|
||||
pub(crate) use explicit_f_string_type_conversion::*;
|
||||
pub(crate) use function_call_in_dataclass_default::*;
|
||||
@@ -36,6 +37,7 @@ mod assignment_in_assert;
|
||||
mod asyncio_dangling_task;
|
||||
mod collection_literal_concatenation;
|
||||
mod confusables;
|
||||
mod decimal_from_float_literal;
|
||||
mod default_factory_kwarg;
|
||||
mod explicit_f_string_type_conversion;
|
||||
mod function_call_in_dataclass_default;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user