Compare commits
1 Commits
dhruv/rest
...
perf-node-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
82f33db5e6 |
@@ -20,7 +20,7 @@
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"rust-lang.rust-analyzer",
|
||||
"fill-labs.dependi",
|
||||
"serayuzgur.crates",
|
||||
"tamasfe.even-better-toml",
|
||||
"Swellaby.vscode-rust-test-adapter",
|
||||
"charliermarsh.ruff"
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: tj-actions/changed-files@v45
|
||||
- uses: tj-actions/changed-files@v44
|
||||
id: changed
|
||||
with:
|
||||
files_yaml: |
|
||||
|
||||
9
.github/workflows/publish-docs.yml
vendored
9
.github/workflows/publish-docs.yml
vendored
@@ -34,10 +34,10 @@ jobs:
|
||||
- name: "Set docs version"
|
||||
run: |
|
||||
version="${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}"
|
||||
# if version is missing, use 'latest'
|
||||
if [ -z "$version" ]; then
|
||||
echo "Using 'latest' as version"
|
||||
version="latest"
|
||||
# if version is missing, exit with error
|
||||
if [[ -z "$version" ]]; then
|
||||
echo "Can't build docs without a version."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Use version as display name for now
|
||||
@@ -145,7 +145,6 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
|
||||
# give the PR a few seconds to be created before trying to auto-merge it
|
||||
sleep 10
|
||||
|
||||
@@ -45,7 +45,7 @@ repos:
|
||||
)$
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.24.3
|
||||
rev: v1.23.6
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -59,7 +59,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.6.3
|
||||
rev: v0.6.1
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
|
||||
56
CHANGELOG.md
56
CHANGELOG.md
@@ -1,61 +1,5 @@
|
||||
# Changelog
|
||||
|
||||
## 0.6.3
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Extend `open-file-with-context-handler` to work with `dbm.sqlite3` (`SIM115`) ([#13104](https://github.com/astral-sh/ruff/pull/13104))
|
||||
- \[`pycodestyle`\] Disable `E741` in stub files (`.pyi`) ([#13119](https://github.com/astral-sh/ruff/pull/13119))
|
||||
- \[`pydoclint`\] Avoid `DOC201` on explicit returns in functions that only return `None` ([#13064](https://github.com/astral-sh/ruff/pull/13064))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-async`\] Disable check for `asyncio` before Python 3.11 (`ASYNC109`) ([#13023](https://github.com/astral-sh/ruff/pull/13023))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`FastAPI`\] Avoid introducing invalid syntax in fix for `fast-api-non-annotated-dependency` (`FAST002`) ([#13133](https://github.com/astral-sh/ruff/pull/13133))
|
||||
- \[`flake8-implicit-str-concat`\] Normalize octals before merging concatenated strings in `single-line-implicit-string-concatenation` (`ISC001`) ([#13118](https://github.com/astral-sh/ruff/pull/13118))
|
||||
- \[`flake8-pytest-style`\] Improve help message for `pytest-incorrect-mark-parentheses-style` (`PT023`) ([#13092](https://github.com/astral-sh/ruff/pull/13092))
|
||||
- \[`pylint`\] Avoid autofix for calls that aren't `min` or `max` as starred expression (`PLW3301`) ([#13089](https://github.com/astral-sh/ruff/pull/13089))
|
||||
- \[`ruff`\] Add `datetime.time`, `datetime.tzinfo`, and `datetime.timezone` as immutable function calls (`RUF009`) ([#13109](https://github.com/astral-sh/ruff/pull/13109))
|
||||
- \[`ruff`\] Extend comment deletion for `RUF100` to include trailing text from `noqa` directives while preserving any following comments on the same line, if any ([#13105](https://github.com/astral-sh/ruff/pull/13105))
|
||||
- Fix dark theme on initial page load for the Ruff playground ([#13077](https://github.com/astral-sh/ruff/pull/13077))
|
||||
|
||||
## 0.6.2
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Extend `open-file-with-context-handler` to work with other standard-library IO modules (`SIM115`) ([#12959](https://github.com/astral-sh/ruff/pull/12959))
|
||||
- \[`ruff`\] Avoid `unused-async` for functions with FastAPI route decorator (`RUF029`) ([#12938](https://github.com/astral-sh/ruff/pull/12938))
|
||||
- \[`ruff`\] Ignore `fstring-missing-syntax` (`RUF027`) for `fastAPI` paths ([#12939](https://github.com/astral-sh/ruff/pull/12939))
|
||||
- \[`ruff`\] Implement check for Decimal called with a float literal (RUF032) ([#12909](https://github.com/astral-sh/ruff/pull/12909))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-bugbear`\] Update diagnostic message when expression is at the end of function (`B015`) ([#12944](https://github.com/astral-sh/ruff/pull/12944))
|
||||
- \[`flake8-pyi`\] Skip type annotations in `string-or-bytes-too-long` (`PYI053`) ([#13002](https://github.com/astral-sh/ruff/pull/13002))
|
||||
- \[`flake8-type-checking`\] Always recognise relative imports as first-party ([#12994](https://github.com/astral-sh/ruff/pull/12994))
|
||||
- \[`flake8-unused-arguments`\] Ignore unused arguments on stub functions (`ARG001`) ([#12966](https://github.com/astral-sh/ruff/pull/12966))
|
||||
- \[`pylint`\] Ignore augmented assignment for `self-cls-assignment` (`PLW0642`) ([#12957](https://github.com/astral-sh/ruff/pull/12957))
|
||||
|
||||
### Server
|
||||
|
||||
- Show full context in error log messages ([#13029](https://github.com/astral-sh/ruff/pull/13029))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`pep8-naming`\] Don't flag `from` imports following conventional import names (`N817`) ([#12946](https://github.com/astral-sh/ruff/pull/12946))
|
||||
- \[`pylint`\] - Allow `__new__` methods to have `cls` as their first argument even if decorated with `@staticmethod` for `bad-staticmethod-argument` (`PLW0211`) ([#12958](https://github.com/astral-sh/ruff/pull/12958))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add `hyperfine` installation instructions; update `hyperfine` code samples ([#13034](https://github.com/astral-sh/ruff/pull/13034))
|
||||
- Expand note to use Ruff with other language server in Kate ([#12806](https://github.com/astral-sh/ruff/pull/12806))
|
||||
- Update example for `PT001` as per the new default behavior ([#13019](https://github.com/astral-sh/ruff/pull/13019))
|
||||
- \[`perflint`\] Improve docs for `try-except-in-loop` (`PERF203`) ([#12947](https://github.com/astral-sh/ruff/pull/12947))
|
||||
- \[`pydocstyle`\] Add reference to `lint.pydocstyle.ignore-decorators` setting to rule docs ([#12996](https://github.com/astral-sh/ruff/pull/12996))
|
||||
|
||||
## 0.6.1
|
||||
|
||||
This is a hotfix release to address an issue with `ruff-pre-commit`. In v0.6,
|
||||
|
||||
@@ -397,18 +397,12 @@ which makes it a good target for benchmarking.
|
||||
git clone --branch 3.10 https://github.com/python/cpython.git crates/ruff_linter/resources/test/cpython
|
||||
```
|
||||
|
||||
Install `hyperfine`:
|
||||
|
||||
```shell
|
||||
cargo install hyperfine
|
||||
```
|
||||
|
||||
To benchmark the release build:
|
||||
|
||||
```shell
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ -e"
|
||||
|
||||
Benchmark 1: ./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache
|
||||
Time (mean ± σ): 293.8 ms ± 3.2 ms [User: 2384.6 ms, System: 90.3 ms]
|
||||
@@ -427,7 +421,7 @@ To benchmark against the ecosystem's existing tools:
|
||||
|
||||
```shell
|
||||
hyperfine --ignore-failure --warmup 5 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
|
||||
"pyflakes crates/ruff_linter/resources/test/cpython" \
|
||||
"autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython" \
|
||||
"pycodestyle crates/ruff_linter/resources/test/cpython" \
|
||||
@@ -473,7 +467,7 @@ To benchmark a subset of rules, e.g. `LineTooLong` and `DocLineTooLong`:
|
||||
|
||||
```shell
|
||||
cargo build --release && hyperfine --warmup 10 \
|
||||
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
|
||||
```
|
||||
|
||||
You can run `poetry install` from `./scripts/benchmarks` to create a working environment for the
|
||||
@@ -530,8 +524,6 @@ You can run the benchmarks with
|
||||
cargo benchmark
|
||||
```
|
||||
|
||||
`cargo benchmark` is an alias for `cargo bench -p ruff_benchmark --bench linter --bench formatter --`
|
||||
|
||||
#### Benchmark-driven Development
|
||||
|
||||
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
|
||||
@@ -570,7 +562,7 @@ cargo install critcmp
|
||||
|
||||
#### Tips
|
||||
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo bench -p ruff_benchmark lexer`
|
||||
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
|
||||
to only run the lexer benchmarks.
|
||||
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
|
||||
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)
|
||||
|
||||
45
Cargo.lock
generated
45
Cargo.lock
generated
@@ -1256,9 +1256,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.158"
|
||||
version = "0.2.157"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439"
|
||||
checksum = "374af5f94e54fa97cf75e945cce8a6b201e88a1a07e688b47dfd2a59c66dbd86"
|
||||
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
@@ -1803,9 +1803,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "quick-junit"
|
||||
version = "0.5.0"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "62ffd2f9a162cfae131bed6d9d1ed60adced33be340a94f96952897d7cb0c240"
|
||||
checksum = "cfc1a6a5406a114913df2df8507998c755311b55b78584bed5f6e88f6417c4d4"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"indexmap",
|
||||
@@ -1818,18 +1818,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "quick-xml"
|
||||
version = "0.36.1"
|
||||
version = "0.31.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "96a05e2e8efddfa51a84ca47cec303fac86c8541b686d37cac5efc0e094417bc"
|
||||
checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.37"
|
||||
version = "1.0.36"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af"
|
||||
checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
@@ -1926,7 +1926,6 @@ dependencies = [
|
||||
"ruff_db",
|
||||
"ruff_index",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_literal",
|
||||
"ruff_python_parser",
|
||||
"ruff_python_stdlib",
|
||||
"ruff_source_file",
|
||||
@@ -1936,7 +1935,6 @@ dependencies = [
|
||||
"smallvec",
|
||||
"static_assertions",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
"tracing",
|
||||
"walkdir",
|
||||
"zip",
|
||||
@@ -1952,6 +1950,7 @@ dependencies = [
|
||||
"libc",
|
||||
"lsp-server",
|
||||
"lsp-types",
|
||||
"red_knot_python_semantic",
|
||||
"red_knot_workspace",
|
||||
"ruff_db",
|
||||
"ruff_notebook",
|
||||
@@ -1989,7 +1988,6 @@ dependencies = [
|
||||
"anyhow",
|
||||
"crossbeam",
|
||||
"notify",
|
||||
"rayon",
|
||||
"red_knot_python_semantic",
|
||||
"ruff_cache",
|
||||
"ruff_db",
|
||||
@@ -1997,7 +1995,7 @@ dependencies = [
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
@@ -2091,7 +2089,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.6.3"
|
||||
version = "0.6.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2149,7 +2147,6 @@ dependencies = [
|
||||
"criterion",
|
||||
"mimalloc",
|
||||
"once_cell",
|
||||
"rayon",
|
||||
"red_knot_python_semantic",
|
||||
"red_knot_workspace",
|
||||
"ruff_db",
|
||||
@@ -2284,7 +2281,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.6.3"
|
||||
version = "0.6.1"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"annotate-snippets 0.9.2",
|
||||
@@ -2604,7 +2601,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.6.3"
|
||||
version = "0.6.1"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -2831,9 +2828,9 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.209"
|
||||
version = "1.0.208"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "99fce0ffe7310761ca6bf9faf5115afbc19688edd00171d81b1bb1b116c63e09"
|
||||
checksum = "cff085d2cb684faa248efb494c39b68e522822ac0de72ccf08109abde717cfb2"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
@@ -2851,9 +2848,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.209"
|
||||
version = "1.0.208"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a5831b979fd7b5439637af1752d535ff49f4860c0f341d1baeb6faf0f4242170"
|
||||
checksum = "24008e81ff7613ed8e5ba0cfaf24e2c2f1e5b8a0495711e44fcd4882fca62bcf"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2873,9 +2870,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.127"
|
||||
version = "1.0.125"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8043c06d9f82bd7271361ed64f415fe5e12a77fdb52e573e7f06a516dea329ad"
|
||||
checksum = "83c8e735a073ccf5be70aa8066aa984eaf2fa000db6c8d0100ae605b366d31ed"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
@@ -3034,9 +3031,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.76"
|
||||
version = "2.0.75"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "578e081a14e0cefc3279b0472138c513f37b41a08d5a3cca9b6e4e8ceb6cd525"
|
||||
checksum = "f6af063034fc1935ede7be0122941bafa9bacb949334d090b77ca98b5817c7d9"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
||||
@@ -102,7 +102,7 @@ pep440_rs = { version = "0.6.0", features = ["serde"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
proc-macro2 = { version = "1.0.79" }
|
||||
pyproject-toml = { version = "0.9.0" }
|
||||
quick-junit = { version = "0.5.0" }
|
||||
quick-junit = { version = "0.4.0" }
|
||||
quote = { version = "1.0.23" }
|
||||
rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
|
||||
22
README.md
22
README.md
@@ -110,7 +110,7 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
1. [Who's Using Ruff?](#whos-using-ruff)
|
||||
1. [License](#license)
|
||||
|
||||
## Getting Started<a id="getting-started"></a>
|
||||
## Getting Started
|
||||
|
||||
For more, see the [documentation](https://docs.astral.sh/ruff/).
|
||||
|
||||
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.6.3/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.6.3/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.6.1/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.6.1/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.6.3
|
||||
rev: v0.6.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
@@ -195,7 +195,7 @@ jobs:
|
||||
- uses: chartboost/ruff-action@v1
|
||||
```
|
||||
|
||||
### Configuration<a id="configuration"></a>
|
||||
### Configuration
|
||||
|
||||
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
|
||||
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
||||
@@ -291,7 +291,7 @@ features that may change prior to stabilization.
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
||||
for more on the linting and formatting commands, respectively.
|
||||
|
||||
## Rules<a id="rules"></a>
|
||||
## Rules
|
||||
|
||||
<!-- Begin section: Rules -->
|
||||
|
||||
@@ -367,21 +367,21 @@ quality tools, including:
|
||||
|
||||
For a complete enumeration of the supported rules, see [_Rules_](https://docs.astral.sh/ruff/rules/).
|
||||
|
||||
## Contributing<a id="contributing"></a>
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome and highly appreciated. To get started, check out the
|
||||
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
|
||||
|
||||
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
|
||||
## Support<a id="support"></a>
|
||||
## Support
|
||||
|
||||
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
|
||||
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
|
||||
|
||||
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
|
||||
|
||||
## Acknowledgements<a id="acknowledgements"></a>
|
||||
## Acknowledgements
|
||||
|
||||
Ruff's linter draws on both the APIs and implementation details of many other
|
||||
tools in the Python ecosystem, especially [Flake8](https://github.com/PyCQA/flake8), [Pyflakes](https://github.com/PyCQA/pyflakes),
|
||||
@@ -405,7 +405,7 @@ Ruff is the beneficiary of a large number of [contributors](https://github.com/a
|
||||
|
||||
Ruff is released under the MIT license.
|
||||
|
||||
## Who's Using Ruff?<a id="whos-using-ruff"></a>
|
||||
## Who's Using Ruff?
|
||||
|
||||
Ruff is used by a number of major open-source projects and companies, including:
|
||||
|
||||
@@ -524,7 +524,7 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
|
||||
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
|
||||
```
|
||||
|
||||
## License<a id="license"></a>
|
||||
## License
|
||||
|
||||
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)
|
||||
|
||||
|
||||
@@ -13,17 +13,12 @@ The CLI supports different verbosity levels.
|
||||
- `-vv` activates `debug!` and timestamps: This should be enough information to get to the bottom of bug reports. When you're processing many packages or files, you'll get pages and pages of output, but each line is link to a specific action or state change.
|
||||
- `-vvv` activates `trace!` (only in debug builds) and shows tracing-spans: At this level, you're logging everything. Most of this is wasted, it's really slow, we dump e.g. the entire resolution graph. Only useful to developers, and you almost certainly want to use `RED_KNOT_LOG` to filter it down to the area your investigating.
|
||||
|
||||
## Better logging with `RED_KNOT_LOG` and `RAYON_NUM_THREADS`
|
||||
## `RED_KNOT_LOG`
|
||||
|
||||
By default, the CLI shows messages from the `ruff` and `red_knot` crates. Tracing messages from other crates are not shown.
|
||||
The `RED_KNOT_LOG` environment variable allows you to customize which messages are shown by specifying one
|
||||
or more [filter directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives).
|
||||
|
||||
The `RAYON_NUM_THREADS` environment variable, meanwhile, can be used to control the level of concurrency red-knot uses.
|
||||
By default, red-knot will attempt to parallelize its work so that multiple files are checked simultaneously,
|
||||
but this can result in a confused logging output where messages from different threads are intertwined.
|
||||
To switch off concurrency entirely and have more readable logs, use `RAYON_NUM_THREADS=1`.
|
||||
|
||||
### Examples
|
||||
|
||||
#### Show all debug messages
|
||||
|
||||
@@ -7,12 +7,12 @@ use colored::Colorize;
|
||||
use crossbeam::channel as crossbeam_channel;
|
||||
use salsa::plumbing::ZalsaDatabase;
|
||||
|
||||
use red_knot_python_semantic::SitePackages;
|
||||
use red_knot_python_semantic::{ProgramSettings, SearchPathSettings};
|
||||
use red_knot_server::run_server;
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::site_packages::VirtualEnvironment;
|
||||
use red_knot_workspace::watch;
|
||||
use red_knot_workspace::watch::WorkspaceWatcher;
|
||||
use red_knot_workspace::workspace::settings::Configuration;
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
|
||||
use target_version::TargetVersion;
|
||||
@@ -65,14 +65,15 @@ to resolve type information for the project's third-party dependencies.",
|
||||
value_name = "PATH",
|
||||
help = "Additional path to use as a module-resolution source (can be passed multiple times)"
|
||||
)]
|
||||
extra_search_path: Option<Vec<SystemPathBuf>>,
|
||||
extra_search_path: Vec<SystemPathBuf>,
|
||||
|
||||
#[arg(
|
||||
long,
|
||||
help = "Python version to assume when resolving types",
|
||||
value_name = "VERSION"
|
||||
)]
|
||||
target_version: Option<TargetVersion>,
|
||||
default_value_t = TargetVersion::default(),
|
||||
value_name="VERSION")
|
||||
]
|
||||
target_version: TargetVersion,
|
||||
|
||||
#[clap(flatten)]
|
||||
verbosity: Verbosity,
|
||||
@@ -85,36 +86,6 @@ to resolve type information for the project's third-party dependencies.",
|
||||
watch: bool,
|
||||
}
|
||||
|
||||
impl Args {
|
||||
fn to_configuration(&self, cli_cwd: &SystemPath) -> Configuration {
|
||||
let mut configuration = Configuration::default();
|
||||
|
||||
if let Some(target_version) = self.target_version {
|
||||
configuration.target_version = Some(target_version.into());
|
||||
}
|
||||
|
||||
if let Some(venv_path) = &self.venv_path {
|
||||
configuration.search_paths.site_packages = Some(SitePackages::Derived {
|
||||
venv_path: SystemPath::absolute(venv_path, cli_cwd),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(custom_typeshed_dir) = &self.custom_typeshed_dir {
|
||||
configuration.search_paths.custom_typeshed =
|
||||
Some(SystemPath::absolute(custom_typeshed_dir, cli_cwd));
|
||||
}
|
||||
|
||||
if let Some(extra_search_paths) = &self.extra_search_path {
|
||||
configuration.search_paths.extra_paths = extra_search_paths
|
||||
.iter()
|
||||
.map(|path| Some(SystemPath::absolute(path, cli_cwd)))
|
||||
.collect();
|
||||
}
|
||||
|
||||
configuration
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum Command {
|
||||
/// Start the language server
|
||||
@@ -144,13 +115,22 @@ pub fn main() -> ExitStatus {
|
||||
}
|
||||
|
||||
fn run() -> anyhow::Result<ExitStatus> {
|
||||
let args = Args::parse_from(std::env::args().collect::<Vec<_>>());
|
||||
let Args {
|
||||
command,
|
||||
current_directory,
|
||||
custom_typeshed_dir,
|
||||
extra_search_path: extra_paths,
|
||||
venv_path,
|
||||
target_version,
|
||||
verbosity,
|
||||
watch,
|
||||
} = Args::parse_from(std::env::args().collect::<Vec<_>>());
|
||||
|
||||
if matches!(args.command, Some(Command::Server)) {
|
||||
if matches!(command, Some(Command::Server)) {
|
||||
return run_server().map(|()| ExitStatus::Success);
|
||||
}
|
||||
|
||||
let verbosity = args.verbosity.level();
|
||||
let verbosity = verbosity.level();
|
||||
countme::enable(verbosity.is_trace());
|
||||
let _guard = setup_tracing(verbosity)?;
|
||||
|
||||
@@ -166,12 +146,10 @@ fn run() -> anyhow::Result<ExitStatus> {
|
||||
})?
|
||||
};
|
||||
|
||||
let cwd = args
|
||||
.current_directory
|
||||
.as_ref()
|
||||
let cwd = current_directory
|
||||
.map(|cwd| {
|
||||
if cwd.as_std_path().is_dir() {
|
||||
Ok(SystemPath::absolute(cwd, &cli_base_path))
|
||||
Ok(SystemPath::absolute(&cwd, &cli_base_path))
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Provided current-directory path '{cwd}' is not a directory."
|
||||
@@ -182,18 +160,33 @@ fn run() -> anyhow::Result<ExitStatus> {
|
||||
.unwrap_or_else(|| cli_base_path.clone());
|
||||
|
||||
let system = OsSystem::new(cwd.clone());
|
||||
let cli_configuration = args.to_configuration(&cwd);
|
||||
let workspace_metadata = WorkspaceMetadata::from_path(
|
||||
system.current_directory(),
|
||||
&system,
|
||||
Some(cli_configuration.clone()),
|
||||
)?;
|
||||
let workspace_metadata = WorkspaceMetadata::from_path(system.current_directory(), &system)?;
|
||||
|
||||
// TODO: Verify the remaining search path settings eagerly.
|
||||
let site_packages = venv_path
|
||||
.map(|path| {
|
||||
VirtualEnvironment::new(path, &OsSystem::new(cli_base_path))
|
||||
.and_then(|venv| venv.site_packages_directories(&system))
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
|
||||
// TODO: Respect the settings from the workspace metadata. when resolving the program settings.
|
||||
let program_settings = ProgramSettings {
|
||||
target_version: target_version.into(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths,
|
||||
src_root: workspace_metadata.root().to_path_buf(),
|
||||
custom_typeshed: custom_typeshed_dir,
|
||||
site_packages,
|
||||
},
|
||||
};
|
||||
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
// cache and load the cache if it exists.
|
||||
let mut db = RootDatabase::new(workspace_metadata, system)?;
|
||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system)?;
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_configuration);
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
|
||||
|
||||
// Listen to Ctrl+C and abort the watch mode.
|
||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
||||
@@ -205,7 +198,7 @@ fn run() -> anyhow::Result<ExitStatus> {
|
||||
}
|
||||
})?;
|
||||
|
||||
let exit_status = if args.watch {
|
||||
let exit_status = if watch {
|
||||
main_loop.watch(&mut db)?
|
||||
} else {
|
||||
main_loop.run(&mut db)
|
||||
@@ -245,12 +238,10 @@ struct MainLoop {
|
||||
|
||||
/// The file system watcher, if running in watch mode.
|
||||
watcher: Option<WorkspaceWatcher>,
|
||||
|
||||
cli_configuration: Configuration,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new(cli_configuration: Configuration) -> (Self, MainLoopCancellationToken) {
|
||||
fn new() -> (Self, MainLoopCancellationToken) {
|
||||
let (sender, receiver) = crossbeam_channel::bounded(10);
|
||||
|
||||
(
|
||||
@@ -258,7 +249,6 @@ impl MainLoop {
|
||||
sender: sender.clone(),
|
||||
receiver,
|
||||
watcher: None,
|
||||
cli_configuration,
|
||||
},
|
||||
MainLoopCancellationToken { sender },
|
||||
)
|
||||
@@ -341,7 +331,7 @@ impl MainLoop {
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
revision += 1;
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
db.apply_changes(changes, Some(&self.cli_configuration));
|
||||
db.apply_changes(changes);
|
||||
if let Some(watcher) = self.watcher.as_mut() {
|
||||
watcher.update(db);
|
||||
}
|
||||
|
||||
@@ -5,11 +5,12 @@ use std::time::Duration;
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
|
||||
use red_knot_python_semantic::{resolve_module, ModuleName, Program, PythonVersion, SitePackages};
|
||||
use red_knot_python_semantic::{
|
||||
resolve_module, ModuleName, Program, ProgramSettings, PythonVersion, SearchPathSettings,
|
||||
};
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::watch;
|
||||
use red_knot_workspace::watch::{directory_watcher, WorkspaceWatcher};
|
||||
use red_knot_workspace::workspace::settings::{Configuration, SearchPathConfiguration};
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::files::{system_path_to_file, File, FileError};
|
||||
use ruff_db::source::source_text;
|
||||
@@ -24,7 +25,7 @@ struct TestCase {
|
||||
/// We need to hold on to it in the test case or the temp files get deleted.
|
||||
_temp_dir: tempfile::TempDir,
|
||||
root_dir: SystemPathBuf,
|
||||
configuration: Configuration,
|
||||
search_path_settings: SearchPathSettings,
|
||||
}
|
||||
|
||||
impl TestCase {
|
||||
@@ -40,6 +41,10 @@ impl TestCase {
|
||||
&self.db
|
||||
}
|
||||
|
||||
fn db_mut(&mut self) -> &mut RootDatabase {
|
||||
&mut self.db
|
||||
}
|
||||
|
||||
fn stop_watch(&mut self) -> Vec<watch::ChangeEvent> {
|
||||
self.try_stop_watch(Duration::from_secs(10))
|
||||
.expect("Expected watch changes but observed none.")
|
||||
@@ -100,20 +105,16 @@ impl TestCase {
|
||||
Some(all_events)
|
||||
}
|
||||
|
||||
fn apply_changes(&mut self, changes: Vec<watch::ChangeEvent>) {
|
||||
self.db.apply_changes(changes, Some(&self.configuration));
|
||||
}
|
||||
|
||||
fn update_search_path_settings(
|
||||
&mut self,
|
||||
configuration: SearchPathConfiguration,
|
||||
f: impl FnOnce(&SearchPathSettings) -> SearchPathSettings,
|
||||
) -> anyhow::Result<()> {
|
||||
let program = Program::get(self.db());
|
||||
|
||||
self.configuration.search_paths = configuration.clone();
|
||||
let new_settings = configuration.into_settings(self.db.workspace().root(&self.db));
|
||||
let new_settings = f(&self.search_path_settings);
|
||||
|
||||
program.update_search_paths(&mut self.db, &new_settings)?;
|
||||
program.update_search_paths(&mut self.db, new_settings.clone())?;
|
||||
self.search_path_settings = new_settings;
|
||||
|
||||
if let Some(watcher) = &mut self.watcher {
|
||||
watcher.update(&self.db);
|
||||
@@ -178,14 +179,17 @@ fn setup<F>(setup_files: F) -> anyhow::Result<TestCase>
|
||||
where
|
||||
F: SetupFiles,
|
||||
{
|
||||
setup_with_search_paths(setup_files, |_root, _workspace_path| {
|
||||
SearchPathConfiguration::default()
|
||||
setup_with_search_paths(setup_files, |_root, workspace_path| SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: workspace_path.to_path_buf(),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![],
|
||||
})
|
||||
}
|
||||
|
||||
fn setup_with_search_paths<F>(
|
||||
setup_files: F,
|
||||
create_search_paths: impl FnOnce(&SystemPath, &SystemPath) -> SearchPathConfiguration,
|
||||
create_search_paths: impl FnOnce(&SystemPath, &SystemPath) -> SearchPathSettings,
|
||||
) -> anyhow::Result<TestCase>
|
||||
where
|
||||
F: SetupFiles,
|
||||
@@ -217,34 +221,25 @@ where
|
||||
|
||||
let system = OsSystem::new(&workspace_path);
|
||||
|
||||
let search_paths = create_search_paths(&root_path, &workspace_path);
|
||||
let workspace = WorkspaceMetadata::from_path(&workspace_path, &system)?;
|
||||
let search_path_settings = create_search_paths(&root_path, workspace.root());
|
||||
|
||||
for path in search_paths
|
||||
for path in search_path_settings
|
||||
.extra_paths
|
||||
.iter()
|
||||
.flatten()
|
||||
.chain(search_paths.custom_typeshed.iter())
|
||||
.chain(search_paths.site_packages.iter().flat_map(|site_packages| {
|
||||
if let SitePackages::Known(path) = site_packages {
|
||||
path.as_slice()
|
||||
} else {
|
||||
&[]
|
||||
}
|
||||
}))
|
||||
.chain(search_path_settings.site_packages.iter())
|
||||
.chain(search_path_settings.custom_typeshed.iter())
|
||||
{
|
||||
std::fs::create_dir_all(path.as_std_path())
|
||||
.with_context(|| format!("Failed to create search path '{path}'"))?;
|
||||
}
|
||||
|
||||
let configuration = Configuration {
|
||||
target_version: Some(PythonVersion::PY312),
|
||||
search_paths,
|
||||
let settings = ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: search_path_settings.clone(),
|
||||
};
|
||||
|
||||
let workspace =
|
||||
WorkspaceMetadata::from_path(&workspace_path, &system, Some(configuration.clone()))?;
|
||||
|
||||
let db = RootDatabase::new(workspace, system)?;
|
||||
let db = RootDatabase::new(workspace, settings, system)?;
|
||||
|
||||
let (sender, receiver) = crossbeam::channel::unbounded();
|
||||
let watcher = directory_watcher(move |events| sender.send(events).unwrap())
|
||||
@@ -259,7 +254,7 @@ where
|
||||
watcher: Some(watcher),
|
||||
_temp_dir: temp_dir,
|
||||
root_dir: root_path,
|
||||
configuration,
|
||||
search_path_settings,
|
||||
};
|
||||
|
||||
// Sometimes the file watcher reports changes for events that happened before the watcher was started.
|
||||
@@ -312,7 +307,7 @@ fn new_file() -> anyhow::Result<()> {
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
let foo = case.system_file(&foo_path).expect("foo.py to exist.");
|
||||
|
||||
@@ -335,7 +330,7 @@ fn new_ignored_file() -> anyhow::Result<()> {
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert!(case.system_file(&foo_path).is_ok());
|
||||
assert_eq!(&case.collect_package_files(&bar_path), &[bar_file]);
|
||||
@@ -359,7 +354,7 @@ fn changed_file() -> anyhow::Result<()> {
|
||||
|
||||
assert!(!changes.is_empty());
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 2')");
|
||||
assert_eq!(&case.collect_package_files(&foo_path), &[foo]);
|
||||
@@ -382,7 +377,7 @@ fn deleted_file() -> anyhow::Result<()> {
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert!(!foo.exists(case.db()));
|
||||
assert_eq!(&case.collect_package_files(&foo_path), &[] as &[File]);
|
||||
@@ -414,7 +409,7 @@ fn move_file_to_trash() -> anyhow::Result<()> {
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert!(!foo.exists(case.db()));
|
||||
assert_eq!(&case.collect_package_files(&foo_path), &[] as &[File]);
|
||||
@@ -446,7 +441,7 @@ fn move_file_to_workspace() -> anyhow::Result<()> {
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
let foo_in_workspace = case.system_file(&foo_in_workspace_path)?;
|
||||
|
||||
@@ -474,7 +469,7 @@ fn rename_file() -> anyhow::Result<()> {
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert!(!foo.exists(case.db()));
|
||||
|
||||
@@ -515,7 +510,7 @@ fn directory_moved_to_workspace() -> anyhow::Result<()> {
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
let init_file = case
|
||||
.system_file(sub_new_path.join("__init__.py"))
|
||||
@@ -566,7 +561,7 @@ fn directory_moved_to_trash() -> anyhow::Result<()> {
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
// `import sub.a` should no longer resolve
|
||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_none());
|
||||
@@ -620,7 +615,7 @@ fn directory_renamed() -> anyhow::Result<()> {
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
// `import sub.a` should no longer resolve
|
||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_none());
|
||||
@@ -685,7 +680,7 @@ fn directory_deleted() -> anyhow::Result<()> {
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
// `import sub.a` should no longer resolve
|
||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_none());
|
||||
@@ -699,13 +694,15 @@ fn directory_deleted() -> anyhow::Result<()> {
|
||||
|
||||
#[test]
|
||||
fn search_path() -> anyhow::Result<()> {
|
||||
let mut case = setup_with_search_paths(
|
||||
[("bar.py", "import sub.a")],
|
||||
|root_path, _workspace_path| SearchPathConfiguration {
|
||||
site_packages: Some(SitePackages::Known(vec![root_path.join("site_packages")])),
|
||||
..SearchPathConfiguration::default()
|
||||
},
|
||||
)?;
|
||||
let mut case =
|
||||
setup_with_search_paths([("bar.py", "import sub.a")], |root_path, workspace_path| {
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: workspace_path.to_path_buf(),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![root_path.join("site_packages")],
|
||||
}
|
||||
})?;
|
||||
|
||||
let site_packages = case.root_path().join("site_packages");
|
||||
|
||||
@@ -718,7 +715,7 @@ fn search_path() -> anyhow::Result<()> {
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("a").unwrap()).is_some());
|
||||
assert_eq!(
|
||||
@@ -739,9 +736,9 @@ fn add_search_path() -> anyhow::Result<()> {
|
||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("a").unwrap()).is_none());
|
||||
|
||||
// Register site-packages as a search path.
|
||||
case.update_search_path_settings(SearchPathConfiguration {
|
||||
site_packages: Some(SitePackages::Known(vec![site_packages.clone()])),
|
||||
..SearchPathConfiguration::default()
|
||||
case.update_search_path_settings(|settings| SearchPathSettings {
|
||||
site_packages: vec![site_packages.clone()],
|
||||
..settings.clone()
|
||||
})
|
||||
.expect("Search path settings to be valid");
|
||||
|
||||
@@ -749,7 +746,7 @@ fn add_search_path() -> anyhow::Result<()> {
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("a").unwrap()).is_some());
|
||||
|
||||
@@ -758,19 +755,21 @@ fn add_search_path() -> anyhow::Result<()> {
|
||||
|
||||
#[test]
|
||||
fn remove_search_path() -> anyhow::Result<()> {
|
||||
let mut case = setup_with_search_paths(
|
||||
[("bar.py", "import sub.a")],
|
||||
|root_path, _workspace_path| SearchPathConfiguration {
|
||||
site_packages: Some(SitePackages::Known(vec![root_path.join("site_packages")])),
|
||||
..SearchPathConfiguration::default()
|
||||
},
|
||||
)?;
|
||||
let mut case =
|
||||
setup_with_search_paths([("bar.py", "import sub.a")], |root_path, workspace_path| {
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: workspace_path.to_path_buf(),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![root_path.join("site_packages")],
|
||||
}
|
||||
})?;
|
||||
|
||||
// Remove site packages from the search path settings.
|
||||
let site_packages = case.root_path().join("site_packages");
|
||||
case.update_search_path_settings(SearchPathConfiguration {
|
||||
site_packages: None,
|
||||
..SearchPathConfiguration::default()
|
||||
case.update_search_path_settings(|settings| SearchPathSettings {
|
||||
site_packages: vec![],
|
||||
..settings.clone()
|
||||
})
|
||||
.expect("Search path settings to be valid");
|
||||
|
||||
@@ -783,48 +782,6 @@ fn remove_search_path() -> anyhow::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn changed_versions_file() -> anyhow::Result<()> {
|
||||
let mut case = setup_with_search_paths(
|
||||
|root_path: &SystemPath, workspace_path: &SystemPath| {
|
||||
std::fs::write(workspace_path.join("bar.py").as_std_path(), "import sub.a")?;
|
||||
std::fs::create_dir_all(root_path.join("typeshed/stdlib").as_std_path())?;
|
||||
std::fs::write(root_path.join("typeshed/stdlib/VERSIONS").as_std_path(), "")?;
|
||||
std::fs::write(
|
||||
root_path.join("typeshed/stdlib/os.pyi").as_std_path(),
|
||||
"# not important",
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
},
|
||||
|root_path, _workspace_path| SearchPathConfiguration {
|
||||
custom_typeshed: Some(root_path.join("typeshed")),
|
||||
..SearchPathConfiguration::default()
|
||||
},
|
||||
)?;
|
||||
|
||||
// Unset the custom typeshed directory.
|
||||
assert_eq!(
|
||||
resolve_module(case.db(), ModuleName::new("os").unwrap()),
|
||||
None
|
||||
);
|
||||
|
||||
std::fs::write(
|
||||
case.root_path()
|
||||
.join("typeshed/stdlib/VERSIONS")
|
||||
.as_std_path(),
|
||||
"os: 3.0-",
|
||||
)?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
assert!(resolve_module(case.db(), ModuleName::new("os").unwrap()).is_some());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Watch a workspace that contains two files where one file is a hardlink to another.
|
||||
///
|
||||
/// Setup:
|
||||
@@ -871,7 +828,7 @@ fn hard_links_in_workspace() -> anyhow::Result<()> {
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 2')");
|
||||
|
||||
@@ -942,7 +899,7 @@ fn hard_links_to_target_outside_workspace() -> anyhow::Result<()> {
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert_eq!(source_text(case.db(), bar).as_str(), "print('Version 2')");
|
||||
|
||||
@@ -981,7 +938,7 @@ mod unix {
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert_eq!(
|
||||
foo.permissions(case.db()),
|
||||
@@ -1066,7 +1023,7 @@ mod unix {
|
||||
|
||||
let changes = case.take_watch_changes();
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert_eq!(
|
||||
source_text(case.db(), baz.file()).as_str(),
|
||||
@@ -1079,7 +1036,7 @@ mod unix {
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert_eq!(
|
||||
source_text(case.db(), baz.file()).as_str(),
|
||||
@@ -1150,7 +1107,7 @@ mod unix {
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
// The file watcher is guaranteed to emit one event for the changed file, but it isn't specified
|
||||
// if the event is emitted for the "original" or linked path because both paths are watched.
|
||||
@@ -1219,11 +1176,11 @@ mod unix {
|
||||
|
||||
Ok(())
|
||||
},
|
||||
|_root, workspace| SearchPathConfiguration {
|
||||
site_packages: Some(SitePackages::Known(vec![
|
||||
workspace.join(".venv/lib/python3.12/site-packages")
|
||||
])),
|
||||
..SearchPathConfiguration::default()
|
||||
|_root, workspace| SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: workspace.to_path_buf(),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![workspace.join(".venv/lib/python3.12/site-packages")],
|
||||
},
|
||||
)?;
|
||||
|
||||
@@ -1258,7 +1215,7 @@ mod unix {
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
case.db_mut().apply_changes(changes);
|
||||
|
||||
assert_eq!(
|
||||
source_text(case.db(), baz_original_file).as_str(),
|
||||
|
||||
@@ -17,7 +17,6 @@ ruff_python_ast = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
ruff_python_literal = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
@@ -27,7 +26,6 @@ countme = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
ordermap = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
|
||||
@@ -31,10 +31,10 @@ impl<T> AstNodeRef<T> {
|
||||
/// which the `AstNodeRef` belongs.
|
||||
///
|
||||
/// ## Safety
|
||||
///
|
||||
/// Dereferencing the `node` can result in undefined behavior if `parsed` isn't the
|
||||
/// [`ParsedModule`] to which `node` belongs. It's the caller's responsibility to ensure that
|
||||
/// the invariant `node belongs to parsed` is upheld.
|
||||
|
||||
pub(super) unsafe fn new(parsed: ParsedModule, node: &T) -> Self {
|
||||
Self {
|
||||
_parsed: parsed,
|
||||
|
||||
@@ -5,7 +5,7 @@ use rustc_hash::FxHasher;
|
||||
pub use db::Db;
|
||||
pub use module_name::ModuleName;
|
||||
pub use module_resolver::{resolve_module, system_module_search_paths, vendored_typeshed_stubs};
|
||||
pub use program::{Program, ProgramSettings, SearchPathSettings, SitePackages};
|
||||
pub use program::{Program, ProgramSettings, SearchPathSettings};
|
||||
pub use python_version::PythonVersion;
|
||||
pub use semantic_model::{HasTy, SemanticModel};
|
||||
|
||||
@@ -19,8 +19,6 @@ mod program;
|
||||
mod python_version;
|
||||
pub mod semantic_index;
|
||||
mod semantic_model;
|
||||
pub(crate) mod site_packages;
|
||||
pub mod types;
|
||||
|
||||
type FxOrderSet<V> = ordermap::set::OrderSet<V, BuildHasherDefault<FxHasher>>;
|
||||
type FxOrderMap<K, V> = ordermap::map::OrderMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
@@ -13,6 +13,7 @@ use resolver::SearchPathIterator;
|
||||
mod module;
|
||||
mod path;
|
||||
mod resolver;
|
||||
mod state;
|
||||
mod typeshed;
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -9,11 +9,11 @@ use ruff_db::files::{system_path_to_file, vendored_path_to_file, File, FileError
|
||||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::{VendoredPath, VendoredPathBuf};
|
||||
|
||||
use super::typeshed::{typeshed_versions, TypeshedVersionsParseError, TypeshedVersionsQueryResult};
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::resolver::ResolverContext;
|
||||
use crate::site_packages::SitePackagesDiscoveryError;
|
||||
|
||||
use super::state::ResolverState;
|
||||
use super::typeshed::{TypeshedVersionsParseError, TypeshedVersionsQueryResult};
|
||||
|
||||
/// A path that points to a Python module.
|
||||
///
|
||||
@@ -60,7 +60,7 @@ impl ModulePath {
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(super) fn is_directory(&self, resolver: &ResolverContext) -> bool {
|
||||
pub(crate) fn is_directory(&self, resolver: &ResolverState) -> bool {
|
||||
let ModulePath {
|
||||
search_path,
|
||||
relative_path,
|
||||
@@ -74,7 +74,7 @@ impl ModulePath {
|
||||
== Err(FileError::IsADirectory)
|
||||
}
|
||||
SearchPathInner::StandardLibraryCustom(stdlib_root) => {
|
||||
match query_stdlib_version(relative_path, resolver) {
|
||||
match query_stdlib_version(Some(stdlib_root), relative_path, resolver) {
|
||||
TypeshedVersionsQueryResult::DoesNotExist => false,
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
| TypeshedVersionsQueryResult::MaybeExists => {
|
||||
@@ -84,7 +84,7 @@ impl ModulePath {
|
||||
}
|
||||
}
|
||||
SearchPathInner::StandardLibraryVendored(stdlib_root) => {
|
||||
match query_stdlib_version(relative_path, resolver) {
|
||||
match query_stdlib_version(None, relative_path, resolver) {
|
||||
TypeshedVersionsQueryResult::DoesNotExist => false,
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
| TypeshedVersionsQueryResult::MaybeExists => resolver
|
||||
@@ -96,7 +96,7 @@ impl ModulePath {
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(super) fn is_regular_package(&self, resolver: &ResolverContext) -> bool {
|
||||
pub(crate) fn is_regular_package(&self, resolver: &ResolverState) -> bool {
|
||||
let ModulePath {
|
||||
search_path,
|
||||
relative_path,
|
||||
@@ -113,7 +113,7 @@ impl ModulePath {
|
||||
.is_ok()
|
||||
}
|
||||
SearchPathInner::StandardLibraryCustom(search_path) => {
|
||||
match query_stdlib_version(relative_path, resolver) {
|
||||
match query_stdlib_version(Some(search_path), relative_path, resolver) {
|
||||
TypeshedVersionsQueryResult::DoesNotExist => false,
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
| TypeshedVersionsQueryResult::MaybeExists => system_path_to_file(
|
||||
@@ -124,7 +124,7 @@ impl ModulePath {
|
||||
}
|
||||
}
|
||||
SearchPathInner::StandardLibraryVendored(search_path) => {
|
||||
match query_stdlib_version(relative_path, resolver) {
|
||||
match query_stdlib_version(None, relative_path, resolver) {
|
||||
TypeshedVersionsQueryResult::DoesNotExist => false,
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
| TypeshedVersionsQueryResult::MaybeExists => resolver
|
||||
@@ -136,7 +136,7 @@ impl ModulePath {
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(super) fn to_file(&self, resolver: &ResolverContext) -> Option<File> {
|
||||
pub(crate) fn to_file(&self, resolver: &ResolverState) -> Option<File> {
|
||||
let db = resolver.db.upcast();
|
||||
let ModulePath {
|
||||
search_path,
|
||||
@@ -150,7 +150,7 @@ impl ModulePath {
|
||||
system_path_to_file(db, search_path.join(relative_path)).ok()
|
||||
}
|
||||
SearchPathInner::StandardLibraryCustom(stdlib_root) => {
|
||||
match query_stdlib_version(relative_path, resolver) {
|
||||
match query_stdlib_version(Some(stdlib_root), relative_path, resolver) {
|
||||
TypeshedVersionsQueryResult::DoesNotExist => None,
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
| TypeshedVersionsQueryResult::MaybeExists => {
|
||||
@@ -159,7 +159,7 @@ impl ModulePath {
|
||||
}
|
||||
}
|
||||
SearchPathInner::StandardLibraryVendored(stdlib_root) => {
|
||||
match query_stdlib_version(relative_path, resolver) {
|
||||
match query_stdlib_version(None, relative_path, resolver) {
|
||||
TypeshedVersionsQueryResult::DoesNotExist => None,
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
| TypeshedVersionsQueryResult::MaybeExists => {
|
||||
@@ -273,15 +273,19 @@ fn stdlib_path_to_module_name(relative_path: &Utf8Path) -> Option<ModuleName> {
|
||||
|
||||
#[must_use]
|
||||
fn query_stdlib_version(
|
||||
custom_stdlib_root: Option<&SystemPath>,
|
||||
relative_path: &Utf8Path,
|
||||
context: &ResolverContext,
|
||||
resolver: &ResolverState,
|
||||
) -> TypeshedVersionsQueryResult {
|
||||
let Some(module_name) = stdlib_path_to_module_name(relative_path) else {
|
||||
return TypeshedVersionsQueryResult::DoesNotExist;
|
||||
};
|
||||
let ResolverContext { db, target_version } = context;
|
||||
|
||||
typeshed_versions(*db).query_module(&module_name, *target_version)
|
||||
let ResolverState {
|
||||
db,
|
||||
typeshed_versions,
|
||||
target_version,
|
||||
} = resolver;
|
||||
typeshed_versions.query_module(*db, &module_name, custom_stdlib_root, *target_version)
|
||||
}
|
||||
|
||||
/// Enumeration describing the various ways in which validation of a search path might fail.
|
||||
@@ -289,7 +293,7 @@ fn query_stdlib_version(
|
||||
/// If validation fails for a search path derived from the user settings,
|
||||
/// a message must be displayed to the user,
|
||||
/// as type checking cannot be done reliably in these circumstances.
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) enum SearchPathValidationError {
|
||||
/// The path provided by the user was not a directory
|
||||
NotADirectory(SystemPathBuf),
|
||||
@@ -300,20 +304,18 @@ pub(crate) enum SearchPathValidationError {
|
||||
NoStdlibSubdirectory(SystemPathBuf),
|
||||
|
||||
/// The typeshed path provided by the user is a directory,
|
||||
/// but `stdlib/VERSIONS` could not be read.
|
||||
/// but no `stdlib/VERSIONS` file exists.
|
||||
/// (This is only relevant for stdlib search paths.)
|
||||
FailedToReadVersionsFile {
|
||||
path: SystemPathBuf,
|
||||
error: std::io::Error,
|
||||
},
|
||||
NoVersionsFile(SystemPathBuf),
|
||||
|
||||
/// `stdlib/VERSIONS` is a directory.
|
||||
/// (This is only relevant for stdlib search paths.)
|
||||
VersionsIsADirectory(SystemPathBuf),
|
||||
|
||||
/// The path provided by the user is a directory,
|
||||
/// and a `stdlib/VERSIONS` file exists, but it fails to parse.
|
||||
/// (This is only relevant for stdlib search paths.)
|
||||
VersionsParseError(TypeshedVersionsParseError),
|
||||
|
||||
/// Failed to discover the site-packages for the configured virtual environment.
|
||||
SitePackagesDiscovery(SitePackagesDiscoveryError),
|
||||
}
|
||||
|
||||
impl fmt::Display for SearchPathValidationError {
|
||||
@@ -323,16 +325,9 @@ impl fmt::Display for SearchPathValidationError {
|
||||
Self::NoStdlibSubdirectory(path) => {
|
||||
write!(f, "The directory at {path} has no `stdlib/` subdirectory")
|
||||
}
|
||||
Self::FailedToReadVersionsFile { path, error } => {
|
||||
write!(
|
||||
f,
|
||||
"Failed to read the custom typeshed versions file '{path}': {error}"
|
||||
)
|
||||
}
|
||||
Self::NoVersionsFile(path) => write!(f, "Expected a file at {path}/stdlib/VERSIONS"),
|
||||
Self::VersionsIsADirectory(path) => write!(f, "{path}/stdlib/VERSIONS is a directory."),
|
||||
Self::VersionsParseError(underlying_error) => underlying_error.fmt(f),
|
||||
SearchPathValidationError::SitePackagesDiscovery(error) => {
|
||||
write!(f, "Failed to discover the site-packages directory: {error}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -347,18 +342,6 @@ impl std::error::Error for SearchPathValidationError {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TypeshedVersionsParseError> for SearchPathValidationError {
|
||||
fn from(value: TypeshedVersionsParseError) -> Self {
|
||||
Self::VersionsParseError(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SitePackagesDiscoveryError> for SearchPathValidationError {
|
||||
fn from(value: SitePackagesDiscoveryError) -> Self {
|
||||
Self::SitePackagesDiscovery(value)
|
||||
}
|
||||
}
|
||||
|
||||
type SearchPathResult<T> = Result<T, SearchPathValidationError>;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
@@ -401,10 +384,11 @@ pub(crate) struct SearchPath(Arc<SearchPathInner>);
|
||||
|
||||
impl SearchPath {
|
||||
fn directory_path(system: &dyn System, root: SystemPathBuf) -> SearchPathResult<SystemPathBuf> {
|
||||
if system.is_directory(&root) {
|
||||
Ok(root)
|
||||
let canonicalized = system.canonicalize_path(&root).unwrap_or(root);
|
||||
if system.is_directory(&canonicalized) {
|
||||
Ok(canonicalized)
|
||||
} else {
|
||||
Err(SearchPathValidationError::NotADirectory(root))
|
||||
Err(SearchPathValidationError::NotADirectory(canonicalized))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -423,22 +407,32 @@ impl SearchPath {
|
||||
}
|
||||
|
||||
/// Create a new standard-library search path pointing to a custom directory on disk
|
||||
pub(crate) fn custom_stdlib(db: &dyn Db, typeshed: &SystemPath) -> SearchPathResult<Self> {
|
||||
pub(crate) fn custom_stdlib(db: &dyn Db, typeshed: SystemPathBuf) -> SearchPathResult<Self> {
|
||||
let system = db.system();
|
||||
if !system.is_directory(typeshed) {
|
||||
if !system.is_directory(&typeshed) {
|
||||
return Err(SearchPathValidationError::NotADirectory(
|
||||
typeshed.to_path_buf(),
|
||||
));
|
||||
}
|
||||
|
||||
let stdlib =
|
||||
Self::directory_path(system, typeshed.join("stdlib")).map_err(|err| match err {
|
||||
SearchPathValidationError::NotADirectory(_) => {
|
||||
SearchPathValidationError::NoStdlibSubdirectory(typeshed.to_path_buf())
|
||||
SearchPathValidationError::NotADirectory(path) => {
|
||||
SearchPathValidationError::NoStdlibSubdirectory(path)
|
||||
}
|
||||
err => err,
|
||||
})?;
|
||||
|
||||
let typeshed_versions =
|
||||
system_path_to_file(db.upcast(), stdlib.join("VERSIONS")).map_err(|err| match err {
|
||||
FileError::NotFound => SearchPathValidationError::NoVersionsFile(typeshed),
|
||||
FileError::IsADirectory => {
|
||||
SearchPathValidationError::VersionsIsADirectory(typeshed)
|
||||
}
|
||||
})?;
|
||||
super::typeshed::parse_typeshed_versions(db, typeshed_versions)
|
||||
.as_ref()
|
||||
.map_err(|validation_error| {
|
||||
SearchPathValidationError::VersionsParseError(validation_error.clone())
|
||||
})?;
|
||||
Ok(Self(Arc::new(SearchPathInner::StandardLibraryCustom(
|
||||
stdlib,
|
||||
))))
|
||||
@@ -629,10 +623,10 @@ mod tests {
|
||||
use ruff_db::Db;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::module_resolver::testing::{FileSpec, MockedTypeshed, TestCase, TestCaseBuilder};
|
||||
use crate::python_version::PythonVersion;
|
||||
|
||||
use super::*;
|
||||
use crate::module_resolver::testing::{FileSpec, MockedTypeshed, TestCase, TestCaseBuilder};
|
||||
use crate::python_version::PythonVersion;
|
||||
|
||||
impl ModulePath {
|
||||
#[must_use]
|
||||
@@ -644,6 +638,15 @@ mod tests {
|
||||
}
|
||||
|
||||
impl SearchPath {
|
||||
#[must_use]
|
||||
pub(crate) fn is_stdlib_search_path(&self) -> bool {
|
||||
matches!(
|
||||
&*self.0,
|
||||
SearchPathInner::StandardLibraryCustom(_)
|
||||
| SearchPathInner::StandardLibraryVendored(_)
|
||||
)
|
||||
}
|
||||
|
||||
fn join(&self, component: &str) -> ModulePath {
|
||||
self.to_module_path().join(component)
|
||||
}
|
||||
@@ -658,7 +661,7 @@ mod tests {
|
||||
.build();
|
||||
|
||||
assert_eq!(
|
||||
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap())
|
||||
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf())
|
||||
.unwrap()
|
||||
.to_module_path()
|
||||
.with_py_extension(),
|
||||
@@ -666,7 +669,7 @@ mod tests {
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
&SearchPath::custom_stdlib(&db, stdlib.parent().unwrap())
|
||||
&SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf())
|
||||
.unwrap()
|
||||
.join("foo")
|
||||
.with_pyi_extension(),
|
||||
@@ -777,7 +780,7 @@ mod tests {
|
||||
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
|
||||
.with_custom_typeshed(MockedTypeshed::default())
|
||||
.build();
|
||||
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap())
|
||||
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf())
|
||||
.unwrap()
|
||||
.to_module_path()
|
||||
.push("bar.py");
|
||||
@@ -789,7 +792,7 @@ mod tests {
|
||||
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
|
||||
.with_custom_typeshed(MockedTypeshed::default())
|
||||
.build();
|
||||
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap())
|
||||
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf())
|
||||
.unwrap()
|
||||
.to_module_path()
|
||||
.push("bar.rs");
|
||||
@@ -821,7 +824,7 @@ mod tests {
|
||||
.with_custom_typeshed(MockedTypeshed::default())
|
||||
.build();
|
||||
|
||||
let root = SearchPath::custom_stdlib(&db, stdlib.parent().unwrap()).unwrap();
|
||||
let root = SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf()).unwrap();
|
||||
|
||||
// Must have a `.pyi` extension or no extension:
|
||||
let bad_absolute_path = SystemPath::new("foo/stdlib/x.py");
|
||||
@@ -869,7 +872,8 @@ mod tests {
|
||||
.with_custom_typeshed(typeshed)
|
||||
.with_target_version(target_version)
|
||||
.build();
|
||||
let stdlib = SearchPath::custom_stdlib(&db, stdlib.parent().unwrap()).unwrap();
|
||||
let stdlib =
|
||||
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf()).unwrap();
|
||||
(db, stdlib)
|
||||
}
|
||||
|
||||
@@ -894,7 +898,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY38);
|
||||
|
||||
let asyncio_regular_package = stdlib_path.join("asyncio");
|
||||
assert!(asyncio_regular_package.is_directory(&resolver));
|
||||
@@ -922,7 +926,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY38);
|
||||
|
||||
let xml_namespace_package = stdlib_path.join("xml");
|
||||
assert!(xml_namespace_package.is_directory(&resolver));
|
||||
@@ -944,7 +948,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY38);
|
||||
|
||||
let functools_module = stdlib_path.join("functools.pyi");
|
||||
assert!(functools_module.to_file(&resolver).is_some());
|
||||
@@ -960,7 +964,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY38);
|
||||
|
||||
let collections_regular_package = stdlib_path.join("collections");
|
||||
assert_eq!(collections_regular_package.to_file(&resolver), None);
|
||||
@@ -976,7 +980,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY38);
|
||||
|
||||
let importlib_namespace_package = stdlib_path.join("importlib");
|
||||
assert_eq!(importlib_namespace_package.to_file(&resolver), None);
|
||||
@@ -997,7 +1001,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY38);
|
||||
|
||||
let non_existent = stdlib_path.join("doesnt_even_exist");
|
||||
assert_eq!(non_existent.to_file(&resolver), None);
|
||||
@@ -1025,7 +1029,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py39_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverContext::new(&db, PythonVersion::PY39);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY39);
|
||||
|
||||
// Since we've set the target version to Py39,
|
||||
// `collections` should now exist as a directory, according to VERSIONS...
|
||||
@@ -1054,7 +1058,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py39_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverContext::new(&db, PythonVersion::PY39);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY39);
|
||||
|
||||
// The `importlib` directory now also exists
|
||||
let importlib_namespace_package = stdlib_path.join("importlib");
|
||||
@@ -1078,7 +1082,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py39_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverContext::new(&db, PythonVersion::PY39);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY39);
|
||||
|
||||
// The `xml` package no longer exists on py39:
|
||||
let xml_namespace_package = stdlib_path.join("xml");
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
use std::borrow::Cow;
|
||||
use std::iter::FusedIterator;
|
||||
use std::ops::Deref;
|
||||
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
|
||||
use ruff_db::files::{File, FilePath, FileRootKind};
|
||||
use ruff_db::system::{DirectoryEntry, System, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::{VendoredFileSystem, VendoredPath};
|
||||
use ruff_db::system::{DirectoryEntry, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::VendoredPath;
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::{Program, SearchPathSettings};
|
||||
|
||||
use super::module::{Module, ModuleKind};
|
||||
use super::path::{ModulePath, SearchPath, SearchPathValidationError};
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::typeshed::{vendored_typeshed_versions, TypeshedVersions};
|
||||
use crate::site_packages::VirtualEnvironment;
|
||||
use crate::{Program, PythonVersion, SearchPathSettings, SitePackages};
|
||||
use super::state::ResolverState;
|
||||
|
||||
/// Resolves a module name to a module.
|
||||
pub fn resolve_module(db: &dyn Db, module_name: ModuleName) -> Option<Module> {
|
||||
@@ -122,7 +122,7 @@ pub(crate) fn search_paths(db: &dyn Db) -> SearchPathIterator {
|
||||
Program::get(db).search_paths(db).iter(db)
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
#[derive(Debug, PartialEq, Eq, Default)]
|
||||
pub(crate) struct SearchPaths {
|
||||
/// Search paths that have been statically determined purely from reading Ruff's configuration settings.
|
||||
/// These shouldn't ever change unless the config settings themselves change.
|
||||
@@ -135,8 +135,6 @@ pub(crate) struct SearchPaths {
|
||||
/// in terms of module-resolution priority until we've discovered the editable installs
|
||||
/// for the first `site-packages` path
|
||||
site_packages: Vec<SearchPath>,
|
||||
|
||||
typeshed_versions: ResolvedTypeshedVersions,
|
||||
}
|
||||
|
||||
impl SearchPaths {
|
||||
@@ -148,14 +146,8 @@ impl SearchPaths {
|
||||
/// [module resolution order]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
|
||||
pub(crate) fn from_settings(
|
||||
db: &dyn Db,
|
||||
settings: &SearchPathSettings,
|
||||
settings: SearchPathSettings,
|
||||
) -> Result<Self, SearchPathValidationError> {
|
||||
fn canonicalize(path: &SystemPath, system: &dyn System) -> SystemPathBuf {
|
||||
system
|
||||
.canonicalize_path(path)
|
||||
.unwrap_or_else(|_| path.to_path_buf())
|
||||
}
|
||||
|
||||
let SearchPathSettings {
|
||||
extra_paths,
|
||||
src_root,
|
||||
@@ -169,65 +161,45 @@ impl SearchPaths {
|
||||
let mut static_paths = vec![];
|
||||
|
||||
for path in extra_paths {
|
||||
let path = canonicalize(path, system);
|
||||
files.try_add_root(db.upcast(), &path, FileRootKind::LibrarySearchPath);
|
||||
tracing::debug!("Adding extra search-path '{path}'");
|
||||
tracing::debug!("Adding static extra search-path '{path}'");
|
||||
|
||||
static_paths.push(SearchPath::extra(system, path)?);
|
||||
let search_path = SearchPath::extra(system, path)?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
search_path.as_system_path().unwrap(),
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
static_paths.push(search_path);
|
||||
}
|
||||
|
||||
tracing::debug!("Adding first-party search path '{src_root}'");
|
||||
static_paths.push(SearchPath::first_party(system, src_root.to_path_buf())?);
|
||||
static_paths.push(SearchPath::first_party(system, src_root)?);
|
||||
|
||||
let (typeshed_versions, stdlib_path) = if let Some(custom_typeshed) = custom_typeshed {
|
||||
let custom_typeshed = canonicalize(custom_typeshed, system);
|
||||
static_paths.push(if let Some(custom_typeshed) = custom_typeshed {
|
||||
tracing::debug!("Adding custom-stdlib search path '{custom_typeshed}'");
|
||||
|
||||
let search_path = SearchPath::custom_stdlib(db, custom_typeshed)?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
&custom_typeshed,
|
||||
search_path.as_system_path().unwrap(),
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
|
||||
let versions_path = custom_typeshed.join("stdlib/VERSIONS");
|
||||
|
||||
let versions_content = system.read_to_string(&versions_path).map_err(|error| {
|
||||
SearchPathValidationError::FailedToReadVersionsFile {
|
||||
path: versions_path,
|
||||
error,
|
||||
}
|
||||
})?;
|
||||
|
||||
let parsed: TypeshedVersions = versions_content.parse()?;
|
||||
|
||||
let search_path = SearchPath::custom_stdlib(db, &custom_typeshed)?;
|
||||
|
||||
(ResolvedTypeshedVersions::Custom(parsed), search_path)
|
||||
search_path
|
||||
} else {
|
||||
tracing::debug!("Using vendored stdlib");
|
||||
(
|
||||
ResolvedTypeshedVersions::Vendored(vendored_typeshed_versions()),
|
||||
SearchPath::vendored_stdlib(),
|
||||
)
|
||||
};
|
||||
|
||||
static_paths.push(stdlib_path);
|
||||
|
||||
let site_packages_paths = match site_packages_paths {
|
||||
SitePackages::Derived { venv_path } => VirtualEnvironment::new(venv_path, system)
|
||||
.and_then(|venv| venv.site_packages_directories(system))?,
|
||||
SitePackages::Known(paths) => paths
|
||||
.iter()
|
||||
.map(|path| canonicalize(path, system))
|
||||
.collect(),
|
||||
};
|
||||
SearchPath::vendored_stdlib()
|
||||
});
|
||||
|
||||
let mut site_packages: Vec<_> = Vec::with_capacity(site_packages_paths.len());
|
||||
|
||||
for path in site_packages_paths {
|
||||
tracing::debug!("Adding site-packages search path '{path}'");
|
||||
files.try_add_root(db.upcast(), &path, FileRootKind::LibrarySearchPath);
|
||||
site_packages.push(SearchPath::site_packages(system, path)?);
|
||||
let search_path = SearchPath::site_packages(system, path)?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
search_path.as_system_path().unwrap(),
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
site_packages.push(search_path);
|
||||
}
|
||||
|
||||
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
|
||||
@@ -252,48 +224,16 @@ impl SearchPaths {
|
||||
Ok(SearchPaths {
|
||||
static_paths,
|
||||
site_packages,
|
||||
typeshed_versions,
|
||||
})
|
||||
}
|
||||
|
||||
pub(super) fn iter<'a>(&'a self, db: &'a dyn Db) -> SearchPathIterator<'a> {
|
||||
pub(crate) fn iter<'a>(&'a self, db: &'a dyn Db) -> SearchPathIterator<'a> {
|
||||
SearchPathIterator {
|
||||
db,
|
||||
static_paths: self.static_paths.iter(),
|
||||
dynamic_paths: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn custom_stdlib(&self) -> Option<&SystemPath> {
|
||||
self.static_paths.iter().find_map(|search_path| {
|
||||
if search_path.is_standard_library() {
|
||||
search_path.as_system_path()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub(super) fn typeshed_versions(&self) -> &TypeshedVersions {
|
||||
&self.typeshed_versions
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
enum ResolvedTypeshedVersions {
|
||||
Vendored(&'static TypeshedVersions),
|
||||
Custom(TypeshedVersions),
|
||||
}
|
||||
|
||||
impl Deref for ResolvedTypeshedVersions {
|
||||
type Target = TypeshedVersions;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
match self {
|
||||
ResolvedTypeshedVersions::Vendored(versions) => versions,
|
||||
ResolvedTypeshedVersions::Custom(versions) => versions,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Collect all dynamic search paths. For each `site-packages` path:
|
||||
@@ -311,7 +251,6 @@ pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
|
||||
let SearchPaths {
|
||||
static_paths,
|
||||
site_packages,
|
||||
typeshed_versions: _,
|
||||
} = Program::get(db).search_paths(db);
|
||||
|
||||
let mut dynamic_paths = Vec::new();
|
||||
@@ -376,16 +315,12 @@ pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
|
||||
let installations = all_pth_files.iter().flat_map(PthFile::items);
|
||||
|
||||
for installation in installations {
|
||||
let installation = system
|
||||
.canonicalize_path(&installation)
|
||||
.unwrap_or(installation);
|
||||
|
||||
if existing_paths.insert(Cow::Owned(installation.clone())) {
|
||||
match SearchPath::editable(system, installation.clone()) {
|
||||
match SearchPath::editable(system, installation) {
|
||||
Ok(search_path) => {
|
||||
tracing::debug!(
|
||||
"Adding editable installation to module resolution path {path}",
|
||||
path = installation
|
||||
path = search_path.as_system_path().unwrap()
|
||||
);
|
||||
dynamic_paths.push(search_path);
|
||||
}
|
||||
@@ -547,7 +482,7 @@ struct ModuleNameIngredient<'db> {
|
||||
fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(SearchPath, File, ModuleKind)> {
|
||||
let program = Program::get(db);
|
||||
let target_version = program.target_version(db);
|
||||
let resolver_state = ResolverContext::new(db, target_version);
|
||||
let resolver_state = ResolverState::new(db, target_version);
|
||||
let is_builtin_module =
|
||||
ruff_python_stdlib::sys::is_builtin_module(target_version.minor, name.as_str());
|
||||
|
||||
@@ -610,7 +545,7 @@ fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(SearchPath, File, Mod
|
||||
fn resolve_package<'a, 'db, I>(
|
||||
module_search_path: &SearchPath,
|
||||
components: I,
|
||||
resolver_state: &ResolverContext<'db>,
|
||||
resolver_state: &ResolverState<'db>,
|
||||
) -> Result<ResolvedPackage, PackageKind>
|
||||
where
|
||||
I: Iterator<Item = &'a str>,
|
||||
@@ -692,21 +627,6 @@ impl PackageKind {
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct ResolverContext<'db> {
|
||||
pub(super) db: &'db dyn Db,
|
||||
pub(super) target_version: PythonVersion,
|
||||
}
|
||||
|
||||
impl<'db> ResolverContext<'db> {
|
||||
pub(super) fn new(db: &'db dyn Db, target_version: PythonVersion) -> Self {
|
||||
Self { db, target_version }
|
||||
}
|
||||
|
||||
pub(super) fn vendored(&self) -> &VendoredFileSystem {
|
||||
self.db.vendored()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::files::{system_path_to_file, File, FilePath};
|
||||
@@ -861,7 +781,7 @@ mod tests {
|
||||
"Search path for {module_name} was unexpectedly {search_path:?}"
|
||||
);
|
||||
assert!(
|
||||
search_path.is_standard_library(),
|
||||
search_path.is_stdlib_search_path(),
|
||||
"Expected a stdlib search path, but got {search_path:?}"
|
||||
);
|
||||
}
|
||||
@@ -957,7 +877,7 @@ mod tests {
|
||||
"Search path for {module_name} was unexpectedly {search_path:?}"
|
||||
);
|
||||
assert!(
|
||||
search_path.is_standard_library(),
|
||||
search_path.is_stdlib_search_path(),
|
||||
"Expected a stdlib search path, but got {search_path:?}"
|
||||
);
|
||||
}
|
||||
@@ -1274,13 +1194,13 @@ mod tests {
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::PY38,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(custom_typeshed.clone()),
|
||||
site_packages: SitePackages::Known(vec![site_packages]),
|
||||
site_packages: vec![site_packages],
|
||||
},
|
||||
},
|
||||
)
|
||||
@@ -1779,16 +1699,13 @@ not_a_directory
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
custom_typeshed: None,
|
||||
site_packages: SitePackages::Known(vec![
|
||||
venv_site_packages,
|
||||
system_site_packages,
|
||||
]),
|
||||
site_packages: vec![venv_site_packages, system_site_packages],
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
25
crates/red_knot_python_semantic/src/module_resolver/state.rs
Normal file
25
crates/red_knot_python_semantic/src/module_resolver/state.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
|
||||
use super::typeshed::LazyTypeshedVersions;
|
||||
use crate::db::Db;
|
||||
use crate::python_version::PythonVersion;
|
||||
|
||||
pub(crate) struct ResolverState<'db> {
|
||||
pub(crate) db: &'db dyn Db,
|
||||
pub(crate) typeshed_versions: LazyTypeshedVersions<'db>,
|
||||
pub(crate) target_version: PythonVersion,
|
||||
}
|
||||
|
||||
impl<'db> ResolverState<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db, target_version: PythonVersion) -> Self {
|
||||
Self {
|
||||
db,
|
||||
typeshed_versions: LazyTypeshedVersions::new(),
|
||||
target_version,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn vendored(&self) -> &VendoredFileSystem {
|
||||
self.db.vendored()
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,7 @@ use ruff_db::vendored::VendoredPathBuf;
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::{ProgramSettings, SitePackages};
|
||||
use crate::ProgramSettings;
|
||||
|
||||
/// A test case for the module resolver.
|
||||
///
|
||||
@@ -179,7 +179,6 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
first_party_files,
|
||||
site_packages_files,
|
||||
} = self;
|
||||
|
||||
TestCaseBuilder {
|
||||
typeshed_option: typeshed,
|
||||
target_version,
|
||||
@@ -196,7 +195,6 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
site_packages,
|
||||
target_version,
|
||||
} = self.with_custom_typeshed(MockedTypeshed::default()).build();
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
src,
|
||||
@@ -225,13 +223,13 @@ impl TestCaseBuilder<MockedTypeshed> {
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
ProgramSettings {
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
site_packages: SitePackages::Known(vec![site_packages.clone()]),
|
||||
site_packages: vec![site_packages.clone()],
|
||||
},
|
||||
},
|
||||
)
|
||||
@@ -281,11 +279,13 @@ impl TestCaseBuilder<VendoredTypeshed> {
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
ProgramSettings {
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
site_packages: SitePackages::Known(vec![site_packages.clone()]),
|
||||
..SearchPathSettings::new(src.clone())
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![site_packages.clone()],
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
pub use self::vendored::vendored_typeshed_stubs;
|
||||
pub(super) use self::versions::{
|
||||
typeshed_versions, vendored_typeshed_versions, TypeshedVersions, TypeshedVersionsParseError,
|
||||
parse_typeshed_versions, LazyTypeshedVersions, TypeshedVersionsParseError,
|
||||
TypeshedVersionsQueryResult,
|
||||
};
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::cell::OnceCell;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt;
|
||||
use std::num::{NonZeroU16, NonZeroUsize};
|
||||
@@ -5,12 +6,78 @@ use std::ops::{RangeFrom, RangeInclusive};
|
||||
use std::str::FromStr;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use ruff_db::system::SystemPath;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
|
||||
use super::vendored::vendored_typeshed_stubs;
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::{Program, PythonVersion};
|
||||
use crate::python_version::PythonVersion;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct LazyTypeshedVersions<'db>(OnceCell<&'db TypeshedVersions>);
|
||||
|
||||
impl<'db> LazyTypeshedVersions<'db> {
|
||||
#[must_use]
|
||||
pub(crate) fn new() -> Self {
|
||||
Self(OnceCell::new())
|
||||
}
|
||||
|
||||
/// Query whether a module exists at runtime in the stdlib on a certain Python version.
|
||||
///
|
||||
/// Simply probing whether a file exists in typeshed is insufficient for this question,
|
||||
/// as a module in the stdlib may have been added in Python 3.10, but the typeshed stub
|
||||
/// will still be available (either in a custom typeshed dir or in our vendored copy)
|
||||
/// even if the user specified Python 3.8 as the target version.
|
||||
///
|
||||
/// For top-level modules and packages, the VERSIONS file can always provide an unambiguous answer
|
||||
/// as to whether the module exists on the specified target version. However, VERSIONS does not
|
||||
/// provide comprehensive information on all submodules, meaning that this method sometimes
|
||||
/// returns [`TypeshedVersionsQueryResult::MaybeExists`].
|
||||
/// See [`TypeshedVersionsQueryResult`] for more details.
|
||||
#[must_use]
|
||||
pub(crate) fn query_module(
|
||||
&self,
|
||||
db: &'db dyn Db,
|
||||
module: &ModuleName,
|
||||
stdlib_root: Option<&SystemPath>,
|
||||
target_version: PythonVersion,
|
||||
) -> TypeshedVersionsQueryResult {
|
||||
let versions = self.0.get_or_init(|| {
|
||||
let versions_path = if let Some(system_path) = stdlib_root {
|
||||
system_path.join("VERSIONS")
|
||||
} else {
|
||||
return &VENDORED_VERSIONS;
|
||||
};
|
||||
let Ok(versions_file) = system_path_to_file(db.upcast(), &versions_path) else {
|
||||
todo!(
|
||||
"Still need to figure out how to handle VERSIONS files being deleted \
|
||||
from custom typeshed directories! Expected a file to exist at {versions_path}"
|
||||
)
|
||||
};
|
||||
// TODO(Alex/Micha): If VERSIONS is invalid,
|
||||
// this should invalidate not just the specific module resolution we're currently attempting,
|
||||
// but all type inference that depends on any standard-library types.
|
||||
// Unwrapping here is not correct...
|
||||
parse_typeshed_versions(db, versions_file).as_ref().unwrap()
|
||||
});
|
||||
versions.query_module(module, target_version)
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn parse_typeshed_versions(
|
||||
db: &dyn Db,
|
||||
versions_file: File,
|
||||
) -> Result<TypeshedVersions, TypeshedVersionsParseError> {
|
||||
// TODO: Handle IO errors
|
||||
let file_content = versions_file
|
||||
.read_to_string(db.upcast())
|
||||
.unwrap_or_default();
|
||||
file_content.parse()
|
||||
}
|
||||
|
||||
static VENDORED_VERSIONS: Lazy<TypeshedVersions> = Lazy::new(|| {
|
||||
TypeshedVersions::from_str(
|
||||
@@ -21,14 +88,6 @@ static VENDORED_VERSIONS: Lazy<TypeshedVersions> = Lazy::new(|| {
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
pub(crate) fn vendored_typeshed_versions() -> &'static TypeshedVersions {
|
||||
&VENDORED_VERSIONS
|
||||
}
|
||||
|
||||
pub(crate) fn typeshed_versions(db: &dyn Db) -> &TypeshedVersions {
|
||||
Program::get(db).search_paths(db).typeshed_versions()
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub(crate) struct TypeshedVersionsParseError {
|
||||
line_number: Option<NonZeroU16>,
|
||||
@@ -115,7 +174,7 @@ impl TypeshedVersions {
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(in crate::module_resolver) fn query_module(
|
||||
fn query_module(
|
||||
&self,
|
||||
module: &ModuleName,
|
||||
target_version: PythonVersion,
|
||||
@@ -145,7 +204,7 @@ impl TypeshedVersions {
|
||||
}
|
||||
}
|
||||
|
||||
/// Possible answers [`TypeshedVersions::query_module()`] could give to the question:
|
||||
/// Possible answers [`LazyTypeshedVersions::query_module()`] could give to the question:
|
||||
/// "Does this module exist in the stdlib at runtime on a certain target version?"
|
||||
#[derive(Debug, Copy, PartialEq, Eq, Clone, Hash)]
|
||||
pub(crate) enum TypeshedVersionsQueryResult {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use ruff_python_ast::{AnyNodeRef, NodeKind};
|
||||
use ruff_python_ast::{AnyNodeRef, AstNode, NodeKind};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
/// Compact key for a node for use in a hash map.
|
||||
@@ -11,7 +11,19 @@ pub(super) struct NodeKey {
|
||||
}
|
||||
|
||||
impl NodeKey {
|
||||
pub(super) fn from_node<'a, N>(node: N) -> Self
|
||||
#[inline]
|
||||
pub(super) fn from_node<'a, N>(node: &N) -> Self
|
||||
where
|
||||
N: AstNode,
|
||||
{
|
||||
NodeKey {
|
||||
kind: node.kind(),
|
||||
range: node.range(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(super) fn from_ref<'a, N>(node: N) -> Self
|
||||
where
|
||||
N: Into<AnyNodeRef<'a>>,
|
||||
{
|
||||
|
||||
@@ -3,7 +3,7 @@ use anyhow::Context;
|
||||
use salsa::Durability;
|
||||
use salsa::Setter;
|
||||
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
use ruff_db::system::SystemPathBuf;
|
||||
|
||||
use crate::module_resolver::SearchPaths;
|
||||
use crate::Db;
|
||||
@@ -12,12 +12,13 @@ use crate::Db;
|
||||
pub struct Program {
|
||||
pub target_version: PythonVersion,
|
||||
|
||||
#[default]
|
||||
#[return_ref]
|
||||
pub(crate) search_paths: SearchPaths,
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn from_settings(db: &dyn Db, settings: &ProgramSettings) -> anyhow::Result<Self> {
|
||||
pub fn from_settings(db: &dyn Db, settings: ProgramSettings) -> anyhow::Result<Self> {
|
||||
let ProgramSettings {
|
||||
target_version,
|
||||
search_paths,
|
||||
@@ -28,15 +29,16 @@ impl Program {
|
||||
let search_paths = SearchPaths::from_settings(db, search_paths)
|
||||
.with_context(|| "Invalid search path settings")?;
|
||||
|
||||
Ok(Program::builder(settings.target_version, search_paths)
|
||||
Ok(Program::builder(settings.target_version)
|
||||
.durability(Durability::HIGH)
|
||||
.search_paths(search_paths)
|
||||
.new(db))
|
||||
}
|
||||
|
||||
pub fn update_search_paths(
|
||||
self,
|
||||
&self,
|
||||
db: &mut dyn Db,
|
||||
search_path_settings: &SearchPathSettings,
|
||||
search_path_settings: SearchPathSettings,
|
||||
) -> anyhow::Result<()> {
|
||||
let search_paths = SearchPaths::from_settings(db, search_path_settings)?;
|
||||
|
||||
@@ -47,20 +49,16 @@ impl Program {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn custom_stdlib_search_path(self, db: &dyn Db) -> Option<&SystemPath> {
|
||||
self.search_paths(db).custom_stdlib()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub struct ProgramSettings {
|
||||
pub target_version: PythonVersion,
|
||||
pub search_paths: SearchPathSettings,
|
||||
}
|
||||
|
||||
/// Configures the search paths for module resolution.
|
||||
#[derive(Eq, PartialEq, Debug, Clone)]
|
||||
#[derive(Eq, PartialEq, Debug, Clone, Default)]
|
||||
pub struct SearchPathSettings {
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
@@ -76,25 +74,5 @@ pub struct SearchPathSettings {
|
||||
pub custom_typeshed: Option<SystemPathBuf>,
|
||||
|
||||
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
|
||||
pub site_packages: SitePackages,
|
||||
}
|
||||
|
||||
impl SearchPathSettings {
|
||||
pub fn new(src_root: SystemPathBuf) -> Self {
|
||||
Self {
|
||||
src_root,
|
||||
extra_paths: vec![],
|
||||
custom_typeshed: None,
|
||||
site_packages: SitePackages::Known(vec![]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum SitePackages {
|
||||
Derived {
|
||||
venv_path: SystemPathBuf,
|
||||
},
|
||||
/// Resolved site packages paths
|
||||
Known(Vec<SystemPathBuf>),
|
||||
pub site_packages: Vec<SystemPathBuf>,
|
||||
}
|
||||
|
||||
@@ -575,7 +575,7 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&global_table), vec!["str", "int", "f"]);
|
||||
assert_eq!(names(&global_table), vec!["f", "str", "int"]);
|
||||
|
||||
let [(function_scope_id, _function_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
@@ -666,7 +666,7 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
fn comprehension_scope() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[x for x, y in iter1]
|
||||
[x for x in iter1]
|
||||
",
|
||||
);
|
||||
|
||||
@@ -690,22 +690,7 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
|
||||
let comprehension_symbol_table = index.symbol_table(comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&comprehension_symbol_table), vec!["x", "y"]);
|
||||
|
||||
let use_def = index.use_def_map(comprehension_scope_id);
|
||||
for name in ["x", "y"] {
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
comprehension_symbol_table
|
||||
.symbol_id_by_name(name)
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Comprehension(_)
|
||||
));
|
||||
}
|
||||
assert_eq!(names(&comprehension_symbol_table), vec!["x"]);
|
||||
}
|
||||
|
||||
/// Test case to validate that the `x` variable used in the comprehension is referencing the
|
||||
@@ -745,8 +730,8 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
let DefinitionKind::Comprehension(comprehension) = definition.node(&db) else {
|
||||
panic!("expected generator definition")
|
||||
};
|
||||
let target = comprehension.target();
|
||||
let name = target.id().as_str();
|
||||
let ast::Comprehension { target, .. } = comprehension.node();
|
||||
let name = target.as_name_expr().unwrap().id().as_str();
|
||||
|
||||
assert_eq!(name, "x");
|
||||
assert_eq!(target.range(), TextRange::new(23.into(), 24.into()));
|
||||
@@ -805,56 +790,6 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
assert_eq!(names(&inner_comprehension_symbol_table), vec!["x"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn with_item_definition() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
with item1 as x, item2 as y:
|
||||
pass
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&global_table), vec!["item1", "x", "item2", "y"]);
|
||||
|
||||
let use_def = index.use_def_map(FileScopeId::global());
|
||||
for name in ["x", "y"] {
|
||||
let Some(definition) = use_def.first_public_definition(
|
||||
global_table.symbol_id_by_name(name).expect("symbol exists"),
|
||||
) else {
|
||||
panic!("Expected with item definition for {name}");
|
||||
};
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::WithItem(_)));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn with_item_unpacked_definition() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
with context() as (x, y):
|
||||
pass
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&global_table), vec!["context", "x", "y"]);
|
||||
|
||||
let use_def = index.use_def_map(FileScopeId::global());
|
||||
for name in ["x", "y"] {
|
||||
let Some(definition) = use_def.first_public_definition(
|
||||
global_table.symbol_id_by_name(name).expect("symbol exists"),
|
||||
) else {
|
||||
panic!("Expected with item definition for {name}");
|
||||
};
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::WithItem(_)));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dupes() {
|
||||
let TestCase { db, file } = test_case(
|
||||
@@ -1088,7 +1023,7 @@ def x():
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn match_stmt() {
|
||||
fn match_stmt_symbols() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
match subject:
|
||||
@@ -1102,124 +1037,12 @@ match subject:
|
||||
",
|
||||
);
|
||||
|
||||
let global_scope_id = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope_id);
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert!(global_table.symbol_by_name("Foo").unwrap().is_used());
|
||||
assert_eq!(
|
||||
names(&global_table),
|
||||
vec!["subject", "a", "b", "c", "d", "e", "f", "g", "h", "Foo", "i", "j", "k", "l"]
|
||||
vec!["subject", "a", "b", "c", "d", "f", "e", "h", "g", "Foo", "i", "j", "k", "l"]
|
||||
);
|
||||
|
||||
let use_def = use_def_map(&db, global_scope_id);
|
||||
for (name, expected_index) in [
|
||||
("a", 0),
|
||||
("b", 0),
|
||||
("c", 1),
|
||||
("d", 2),
|
||||
("e", 0),
|
||||
("f", 1),
|
||||
("g", 0),
|
||||
("h", 1),
|
||||
("i", 0),
|
||||
("j", 1),
|
||||
("k", 0),
|
||||
("l", 1),
|
||||
] {
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
global_table.symbol_id_by_name(name).expect("symbol exists"),
|
||||
)
|
||||
.expect("Expected with item definition for {name}");
|
||||
if let DefinitionKind::MatchPattern(pattern) = definition.node(&db) {
|
||||
assert_eq!(pattern.index(), expected_index);
|
||||
} else {
|
||||
panic!("Expected match pattern definition for {name}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nested_match_case() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
match 1:
|
||||
case first:
|
||||
match 2:
|
||||
case second:
|
||||
pass
|
||||
",
|
||||
);
|
||||
|
||||
let global_scope_id = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope_id);
|
||||
|
||||
assert_eq!(names(&global_table), vec!["first", "second"]);
|
||||
|
||||
let use_def = use_def_map(&db, global_scope_id);
|
||||
for (name, expected_index) in [("first", 0), ("second", 0)] {
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
global_table.symbol_id_by_name(name).expect("symbol exists"),
|
||||
)
|
||||
.expect("Expected with item definition for {name}");
|
||||
if let DefinitionKind::MatchPattern(pattern) = definition.node(&db) {
|
||||
assert_eq!(pattern.index(), expected_index);
|
||||
} else {
|
||||
panic!("Expected match pattern definition for {name}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn for_loops_single_assignment() {
|
||||
let TestCase { db, file } = test_case("for x in a: pass");
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(&names(&global_table), &["a", "x"]);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let definition = use_def
|
||||
.first_public_definition(global_table.symbol_id_by_name("x").unwrap())
|
||||
.unwrap();
|
||||
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::For(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn for_loops_simple_unpacking() {
|
||||
let TestCase { db, file } = test_case("for (x, y) in a: pass");
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(&names(&global_table), &["a", "x", "y"]);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let x_definition = use_def
|
||||
.first_public_definition(global_table.symbol_id_by_name("x").unwrap())
|
||||
.unwrap();
|
||||
let y_definition = use_def
|
||||
.first_public_definition(global_table.symbol_id_by_name("y").unwrap())
|
||||
.unwrap();
|
||||
|
||||
assert!(matches!(x_definition.node(&db), DefinitionKind::For(_)));
|
||||
assert!(matches!(y_definition.node(&db), DefinitionKind::For(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn for_loops_complex_unpacking() {
|
||||
let TestCase { db, file } = test_case("for [((a,) b), (c, d)] in e: pass");
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(&names(&global_table), &["e", "a", "b", "c", "d"]);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let definition = use_def
|
||||
.first_public_definition(global_table.symbol_id_by_name("a").unwrap())
|
||||
.unwrap();
|
||||
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::For(_)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -197,12 +197,14 @@ pub(crate) mod node_key {
|
||||
pub(crate) struct ExpressionNodeKey(NodeKey);
|
||||
|
||||
impl From<ast::ExpressionRef<'_>> for ExpressionNodeKey {
|
||||
#[inline]
|
||||
fn from(value: ast::ExpressionRef<'_>) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
Self(NodeKey::from_ref(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Expr> for ExpressionNodeKey {
|
||||
#[inline]
|
||||
fn from(value: &ast::Expr) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
|
||||
use crate::semantic_index::ast_ids::AstIdsBuilder;
|
||||
use crate::semantic_index::definition::{
|
||||
AssignmentDefinitionNodeRef, ComprehensionDefinitionNodeRef, Definition, DefinitionNodeKey,
|
||||
DefinitionNodeRef, ForStmtDefinitionNodeRef, ImportFromDefinitionNodeRef,
|
||||
DefinitionNodeRef, ImportFromDefinitionNodeRef,
|
||||
};
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{
|
||||
@@ -26,8 +26,6 @@ use crate::semantic_index::use_def::{FlowSnapshot, UseDefMapBuilder};
|
||||
use crate::semantic_index::SemanticIndex;
|
||||
use crate::Db;
|
||||
|
||||
use super::definition::{MatchPatternDefinitionNodeRef, WithItemDefinitionNodeRef};
|
||||
|
||||
pub(super) struct SemanticIndexBuilder<'db> {
|
||||
// Builder state
|
||||
db: &'db dyn Db,
|
||||
@@ -36,8 +34,6 @@ pub(super) struct SemanticIndexBuilder<'db> {
|
||||
scope_stack: Vec<FileScopeId>,
|
||||
/// The assignment we're currently visiting.
|
||||
current_assignment: Option<CurrentAssignment<'db>>,
|
||||
/// The match case we're currently visiting.
|
||||
current_match_case: Option<CurrentMatchCase<'db>>,
|
||||
/// Flow states at each `break` in the current loop.
|
||||
loop_break_states: Vec<FlowSnapshot>,
|
||||
|
||||
@@ -61,7 +57,6 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
module: parsed,
|
||||
scope_stack: Vec::new(),
|
||||
current_assignment: None,
|
||||
current_match_case: None,
|
||||
loop_break_states: vec![],
|
||||
|
||||
scopes: IndexVec::new(),
|
||||
@@ -285,7 +280,6 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
|
||||
// The `iter` of the first generator is evaluated in the outer scope, while all subsequent
|
||||
// nodes are evaluated in the inner scope.
|
||||
self.add_standalone_expression(&generator.iter);
|
||||
self.visit_expr(&generator.iter);
|
||||
self.push_scope(scope);
|
||||
|
||||
@@ -301,7 +295,6 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
}
|
||||
|
||||
for generator in generators_iter {
|
||||
self.add_standalone_expression(&generator.iter);
|
||||
self.visit_expr(&generator.iter);
|
||||
|
||||
self.current_assignment = Some(CurrentAssignment::Comprehension {
|
||||
@@ -397,6 +390,20 @@ where
|
||||
self.visit_decorator(decorator);
|
||||
}
|
||||
|
||||
let symbol = self
|
||||
.add_or_update_symbol(function_def.name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, function_def);
|
||||
|
||||
// The default value of the parameters needs to be evaluated in the
|
||||
// enclosing scope.
|
||||
for default in function_def
|
||||
.parameters
|
||||
.iter_non_variadic_params()
|
||||
.filter_map(|param| param.default.as_deref())
|
||||
{
|
||||
self.visit_expr(default);
|
||||
}
|
||||
|
||||
self.with_type_params(
|
||||
NodeWithScopeRef::FunctionTypeParameters(function_def),
|
||||
function_def.type_params.as_deref(),
|
||||
@@ -417,21 +424,6 @@ where
|
||||
builder.pop_scope()
|
||||
},
|
||||
);
|
||||
// The default value of the parameters needs to be evaluated in the
|
||||
// enclosing scope.
|
||||
for default in function_def
|
||||
.parameters
|
||||
.iter_non_variadic_params()
|
||||
.filter_map(|param| param.default.as_deref())
|
||||
{
|
||||
self.visit_expr(default);
|
||||
}
|
||||
// The symbol for the function name itself has to be evaluated
|
||||
// at the end to match the runtime evaluation of parameter defaults
|
||||
// and return-type annotations.
|
||||
let symbol = self
|
||||
.add_or_update_symbol(function_def.name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, function_def);
|
||||
}
|
||||
ast::Stmt::ClassDef(class) => {
|
||||
for decorator in &class.decorator_list {
|
||||
@@ -569,53 +561,9 @@ where
|
||||
self.flow_merge(break_state);
|
||||
}
|
||||
}
|
||||
ast::Stmt::With(ast::StmtWith { items, body, .. }) => {
|
||||
for item in items {
|
||||
self.visit_expr(&item.context_expr);
|
||||
if let Some(optional_vars) = item.optional_vars.as_deref() {
|
||||
self.add_standalone_expression(&item.context_expr);
|
||||
self.current_assignment = Some(item.into());
|
||||
self.visit_expr(optional_vars);
|
||||
self.current_assignment = None;
|
||||
}
|
||||
}
|
||||
self.visit_body(body);
|
||||
}
|
||||
ast::Stmt::Break(_) => {
|
||||
self.loop_break_states.push(self.flow_snapshot());
|
||||
}
|
||||
|
||||
ast::Stmt::For(
|
||||
for_stmt @ ast::StmtFor {
|
||||
range: _,
|
||||
is_async: _,
|
||||
target,
|
||||
iter,
|
||||
body,
|
||||
orelse,
|
||||
},
|
||||
) => {
|
||||
// TODO add control flow similar to `ast::Stmt::While` above
|
||||
self.add_standalone_expression(iter);
|
||||
self.visit_expr(iter);
|
||||
debug_assert!(self.current_assignment.is_none());
|
||||
self.current_assignment = Some(for_stmt.into());
|
||||
self.visit_expr(target);
|
||||
self.current_assignment = None;
|
||||
self.visit_body(body);
|
||||
self.visit_body(orelse);
|
||||
}
|
||||
ast::Stmt::Match(ast::StmtMatch {
|
||||
subject,
|
||||
cases,
|
||||
range: _,
|
||||
}) => {
|
||||
self.add_standalone_expression(subject);
|
||||
self.visit_expr(subject);
|
||||
for case in cases {
|
||||
self.visit_match_case(case);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
walk_stmt(self, stmt);
|
||||
}
|
||||
@@ -662,15 +610,6 @@ where
|
||||
Some(CurrentAssignment::AugAssign(aug_assign)) => {
|
||||
self.add_definition(symbol, aug_assign);
|
||||
}
|
||||
Some(CurrentAssignment::For(node)) => {
|
||||
self.add_definition(
|
||||
symbol,
|
||||
ForStmtDefinitionNodeRef {
|
||||
iterable: &node.iter,
|
||||
target: name_node,
|
||||
},
|
||||
);
|
||||
}
|
||||
Some(CurrentAssignment::Named(named)) => {
|
||||
// TODO(dhruvmanila): If the current scope is a comprehension, then the
|
||||
// named expression is implicitly nonlocal. This is yet to be
|
||||
@@ -680,20 +619,7 @@ where
|
||||
Some(CurrentAssignment::Comprehension { node, first }) => {
|
||||
self.add_definition(
|
||||
symbol,
|
||||
ComprehensionDefinitionNodeRef {
|
||||
iterable: &node.iter,
|
||||
target: name_node,
|
||||
first,
|
||||
},
|
||||
);
|
||||
}
|
||||
Some(CurrentAssignment::WithItem(with_item)) => {
|
||||
self.add_definition(
|
||||
symbol,
|
||||
WithItemDefinitionNodeRef {
|
||||
node: with_item,
|
||||
target: name_node,
|
||||
},
|
||||
ComprehensionDefinitionNodeRef { node, first },
|
||||
);
|
||||
}
|
||||
None => {}
|
||||
@@ -709,11 +635,11 @@ where
|
||||
}
|
||||
ast::Expr::Named(node) => {
|
||||
debug_assert!(self.current_assignment.is_none());
|
||||
// TODO walrus in comprehensions is implicitly nonlocal
|
||||
self.visit_expr(&node.value);
|
||||
self.current_assignment = Some(node.into());
|
||||
// TODO walrus in comprehensions is implicitly nonlocal
|
||||
self.visit_expr(&node.target);
|
||||
self.current_assignment = None;
|
||||
self.visit_expr(&node.value);
|
||||
}
|
||||
ast::Expr::Lambda(lambda) => {
|
||||
if let Some(parameters) = &lambda.parameters {
|
||||
@@ -814,7 +740,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_parameters(&mut self, parameters: &'ast ast::Parameters) {
|
||||
fn visit_parameters(&mut self, parameters: &'ast ruff_python_ast::Parameters) {
|
||||
// Intentionally avoid walking default expressions, as we handle them in the enclosing
|
||||
// scope.
|
||||
for parameter in parameters.iter().map(ast::AnyParameterRef::as_parameter) {
|
||||
@@ -822,58 +748,23 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_match_case(&mut self, match_case: &'ast ast::MatchCase) {
|
||||
debug_assert!(self.current_match_case.is_none());
|
||||
self.current_match_case = Some(CurrentMatchCase::new(&match_case.pattern));
|
||||
self.visit_pattern(&match_case.pattern);
|
||||
self.current_match_case = None;
|
||||
|
||||
if let Some(expr) = &match_case.guard {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
self.visit_body(&match_case.body);
|
||||
}
|
||||
|
||||
fn visit_pattern(&mut self, pattern: &'ast ast::Pattern) {
|
||||
if let ast::Pattern::MatchStar(ast::PatternMatchStar {
|
||||
name: Some(name),
|
||||
range: _,
|
||||
}) = pattern
|
||||
{
|
||||
let symbol = self.add_or_update_symbol(name.id().clone(), SymbolFlags::IS_DEFINED);
|
||||
let state = self.current_match_case.as_ref().unwrap();
|
||||
self.add_definition(
|
||||
symbol,
|
||||
MatchPatternDefinitionNodeRef {
|
||||
pattern: state.pattern,
|
||||
identifier: name,
|
||||
index: state.index,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
walk_pattern(self, pattern);
|
||||
|
||||
if let ast::Pattern::MatchAs(ast::PatternMatchAs {
|
||||
name: Some(name), ..
|
||||
})
|
||||
| ast::Pattern::MatchStar(ast::PatternMatchStar {
|
||||
name: Some(name),
|
||||
range: _,
|
||||
})
|
||||
| ast::Pattern::MatchMapping(ast::PatternMatchMapping {
|
||||
rest: Some(name), ..
|
||||
}) = pattern
|
||||
{
|
||||
let symbol = self.add_or_update_symbol(name.id().clone(), SymbolFlags::IS_DEFINED);
|
||||
let state = self.current_match_case.as_ref().unwrap();
|
||||
self.add_definition(
|
||||
symbol,
|
||||
MatchPatternDefinitionNodeRef {
|
||||
pattern: state.pattern,
|
||||
identifier: name,
|
||||
index: state.index,
|
||||
},
|
||||
);
|
||||
// TODO(dhruvmanila): Add definition
|
||||
self.add_or_update_symbol(name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
}
|
||||
|
||||
self.current_match_case.as_mut().unwrap().index += 1;
|
||||
walk_pattern(self, pattern);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -882,13 +773,11 @@ enum CurrentAssignment<'a> {
|
||||
Assign(&'a ast::StmtAssign),
|
||||
AnnAssign(&'a ast::StmtAnnAssign),
|
||||
AugAssign(&'a ast::StmtAugAssign),
|
||||
For(&'a ast::StmtFor),
|
||||
Named(&'a ast::ExprNamed),
|
||||
Comprehension {
|
||||
node: &'a ast::Comprehension,
|
||||
first: bool,
|
||||
},
|
||||
WithItem(&'a ast::WithItem),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAssign> for CurrentAssignment<'a> {
|
||||
@@ -909,44 +798,8 @@ impl<'a> From<&'a ast::StmtAugAssign> for CurrentAssignment<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtFor> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::StmtFor) -> Self {
|
||||
Self::For(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::ExprNamed> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::ExprNamed) -> Self {
|
||||
Self::Named(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::WithItem> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::WithItem) -> Self {
|
||||
Self::WithItem(value)
|
||||
}
|
||||
}
|
||||
|
||||
struct CurrentMatchCase<'a> {
|
||||
/// The pattern that's part of the current match case.
|
||||
pattern: &'a ast::Pattern,
|
||||
|
||||
/// The index of the sub-pattern that's being currently visited within the pattern.
|
||||
///
|
||||
/// For example:
|
||||
/// ```py
|
||||
/// match subject:
|
||||
/// case a as b: ...
|
||||
/// case [a, b]: ...
|
||||
/// case a | b: ...
|
||||
/// ```
|
||||
///
|
||||
/// In all of the above cases, the index would be 0 for `a` and 1 for `b`.
|
||||
index: u32,
|
||||
}
|
||||
|
||||
impl<'a> CurrentMatchCase<'a> {
|
||||
fn new(pattern: &'a ast::Pattern) -> Self {
|
||||
Self { pattern, index: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,7 +39,6 @@ impl<'db> Definition<'db> {
|
||||
pub(crate) enum DefinitionNodeRef<'a> {
|
||||
Import(&'a ast::Alias),
|
||||
ImportFrom(ImportFromDefinitionNodeRef<'a>),
|
||||
For(ForStmtDefinitionNodeRef<'a>),
|
||||
Function(&'a ast::StmtFunctionDef),
|
||||
Class(&'a ast::StmtClassDef),
|
||||
NamedExpression(&'a ast::ExprNamed),
|
||||
@@ -48,8 +47,6 @@ pub(crate) enum DefinitionNodeRef<'a> {
|
||||
AugmentedAssignment(&'a ast::StmtAugAssign),
|
||||
Comprehension(ComprehensionDefinitionNodeRef<'a>),
|
||||
Parameter(ast::AnyParameterRef<'a>),
|
||||
WithItem(WithItemDefinitionNodeRef<'a>),
|
||||
MatchPattern(MatchPatternDefinitionNodeRef<'a>),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtFunctionDef> for DefinitionNodeRef<'a> {
|
||||
@@ -94,24 +91,12 @@ impl<'a> From<ImportFromDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ForStmtDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(value: ForStmtDefinitionNodeRef<'a>) -> Self {
|
||||
Self::For(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<AssignmentDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: AssignmentDefinitionNodeRef<'a>) -> Self {
|
||||
Self::Assignment(node_ref)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<WithItemDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: WithItemDefinitionNodeRef<'a>) -> Self {
|
||||
Self::WithItem(node_ref)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ComprehensionDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node: ComprehensionDefinitionNodeRef<'a>) -> Self {
|
||||
Self::Comprehension(node)
|
||||
@@ -124,12 +109,6 @@ impl<'a> From<ast::AnyParameterRef<'a>> for DefinitionNodeRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<MatchPatternDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node: MatchPatternDefinitionNodeRef<'a>) -> Self {
|
||||
Self::MatchPattern(node)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ImportFromDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::StmtImportFrom,
|
||||
@@ -142,36 +121,12 @@ pub(crate) struct AssignmentDefinitionNodeRef<'a> {
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct WithItemDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::WithItem,
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ForStmtDefinitionNodeRef<'a> {
|
||||
pub(crate) iterable: &'a ast::Expr,
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ComprehensionDefinitionNodeRef<'a> {
|
||||
pub(crate) iterable: &'a ast::Expr,
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
pub(crate) node: &'a ast::Comprehension,
|
||||
pub(crate) first: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct MatchPatternDefinitionNodeRef<'a> {
|
||||
/// The outermost pattern node in which the identifier being defined occurs.
|
||||
pub(crate) pattern: &'a ast::Pattern,
|
||||
/// The identifier being defined.
|
||||
pub(crate) identifier: &'a ast::Identifier,
|
||||
/// The index of the identifier in the pattern when visiting the `pattern` node in evaluation
|
||||
/// order.
|
||||
pub(crate) index: u32,
|
||||
}
|
||||
|
||||
impl DefinitionNodeRef<'_> {
|
||||
#[allow(unsafe_code)]
|
||||
pub(super) unsafe fn into_owned(self, parsed: ParsedModule) -> DefinitionKind {
|
||||
@@ -206,21 +161,12 @@ impl DefinitionNodeRef<'_> {
|
||||
DefinitionNodeRef::AugmentedAssignment(augmented_assignment) => {
|
||||
DefinitionKind::AugmentedAssignment(AstNodeRef::new(parsed, augmented_assignment))
|
||||
}
|
||||
DefinitionNodeRef::For(ForStmtDefinitionNodeRef { iterable, target }) => {
|
||||
DefinitionKind::For(ForStmtDefinitionKind {
|
||||
iterable: AstNodeRef::new(parsed.clone(), iterable),
|
||||
target: AstNodeRef::new(parsed, target),
|
||||
DefinitionNodeRef::Comprehension(ComprehensionDefinitionNodeRef { node, first }) => {
|
||||
DefinitionKind::Comprehension(ComprehensionDefinitionKind {
|
||||
node: AstNodeRef::new(parsed, node),
|
||||
first,
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::Comprehension(ComprehensionDefinitionNodeRef {
|
||||
iterable,
|
||||
target,
|
||||
first,
|
||||
}) => DefinitionKind::Comprehension(ComprehensionDefinitionKind {
|
||||
iterable: AstNodeRef::new(parsed.clone(), iterable),
|
||||
target: AstNodeRef::new(parsed, target),
|
||||
first,
|
||||
}),
|
||||
DefinitionNodeRef::Parameter(parameter) => match parameter {
|
||||
ast::AnyParameterRef::Variadic(parameter) => {
|
||||
DefinitionKind::Parameter(AstNodeRef::new(parsed, parameter))
|
||||
@@ -229,21 +175,6 @@ impl DefinitionNodeRef<'_> {
|
||||
DefinitionKind::ParameterWithDefault(AstNodeRef::new(parsed, parameter))
|
||||
}
|
||||
},
|
||||
DefinitionNodeRef::WithItem(WithItemDefinitionNodeRef { node, target }) => {
|
||||
DefinitionKind::WithItem(WithItemDefinitionKind {
|
||||
node: AstNodeRef::new(parsed.clone(), node),
|
||||
target: AstNodeRef::new(parsed, target),
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::MatchPattern(MatchPatternDefinitionNodeRef {
|
||||
pattern,
|
||||
identifier,
|
||||
index,
|
||||
}) => DefinitionKind::MatchPattern(MatchPatternDefinitionKind {
|
||||
pattern: AstNodeRef::new(parsed.clone(), pattern),
|
||||
identifier: AstNodeRef::new(parsed, identifier),
|
||||
index,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -262,19 +193,11 @@ impl DefinitionNodeRef<'_> {
|
||||
}) => target.into(),
|
||||
Self::AnnotatedAssignment(node) => node.into(),
|
||||
Self::AugmentedAssignment(node) => node.into(),
|
||||
Self::For(ForStmtDefinitionNodeRef {
|
||||
iterable: _,
|
||||
target,
|
||||
}) => target.into(),
|
||||
Self::Comprehension(ComprehensionDefinitionNodeRef { target, .. }) => target.into(),
|
||||
Self::Comprehension(ComprehensionDefinitionNodeRef { node, first: _ }) => node.into(),
|
||||
Self::Parameter(node) => match node {
|
||||
ast::AnyParameterRef::Variadic(parameter) => parameter.into(),
|
||||
ast::AnyParameterRef::NonVariadic(parameter) => parameter.into(),
|
||||
},
|
||||
Self::WithItem(WithItemDefinitionNodeRef { node: _, target }) => target.into(),
|
||||
Self::MatchPattern(MatchPatternDefinitionNodeRef { identifier, .. }) => {
|
||||
identifier.into()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -289,46 +212,20 @@ pub enum DefinitionKind {
|
||||
Assignment(AssignmentDefinitionKind),
|
||||
AnnotatedAssignment(AstNodeRef<ast::StmtAnnAssign>),
|
||||
AugmentedAssignment(AstNodeRef<ast::StmtAugAssign>),
|
||||
For(ForStmtDefinitionKind),
|
||||
Comprehension(ComprehensionDefinitionKind),
|
||||
Parameter(AstNodeRef<ast::Parameter>),
|
||||
ParameterWithDefault(AstNodeRef<ast::ParameterWithDefault>),
|
||||
WithItem(WithItemDefinitionKind),
|
||||
MatchPattern(MatchPatternDefinitionKind),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub struct MatchPatternDefinitionKind {
|
||||
pattern: AstNodeRef<ast::Pattern>,
|
||||
identifier: AstNodeRef<ast::Identifier>,
|
||||
index: u32,
|
||||
}
|
||||
|
||||
impl MatchPatternDefinitionKind {
|
||||
pub(crate) fn pattern(&self) -> &ast::Pattern {
|
||||
self.pattern.node()
|
||||
}
|
||||
|
||||
pub(crate) fn index(&self) -> u32 {
|
||||
self.index
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ComprehensionDefinitionKind {
|
||||
iterable: AstNodeRef<ast::Expr>,
|
||||
target: AstNodeRef<ast::ExprName>,
|
||||
node: AstNodeRef<ast::Comprehension>,
|
||||
first: bool,
|
||||
}
|
||||
|
||||
impl ComprehensionDefinitionKind {
|
||||
pub(crate) fn iterable(&self) -> &ast::Expr {
|
||||
self.iterable.node()
|
||||
}
|
||||
|
||||
pub(crate) fn target(&self) -> &ast::ExprName {
|
||||
self.target.node()
|
||||
pub(crate) fn node(&self) -> &ast::Comprehension {
|
||||
self.node.node()
|
||||
}
|
||||
|
||||
pub(crate) fn is_first(&self) -> bool {
|
||||
@@ -353,6 +250,7 @@ impl ImportFromDefinitionKind {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub struct AssignmentDefinitionKind {
|
||||
assignment: AstNodeRef<ast::StmtAssign>,
|
||||
target: AstNodeRef<ast::ExprName>,
|
||||
@@ -368,38 +266,6 @@ impl AssignmentDefinitionKind {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct WithItemDefinitionKind {
|
||||
node: AstNodeRef<ast::WithItem>,
|
||||
target: AstNodeRef<ast::ExprName>,
|
||||
}
|
||||
|
||||
impl WithItemDefinitionKind {
|
||||
pub(crate) fn node(&self) -> &ast::WithItem {
|
||||
self.node.node()
|
||||
}
|
||||
|
||||
pub(crate) fn target(&self) -> &ast::ExprName {
|
||||
self.target.node()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ForStmtDefinitionKind {
|
||||
iterable: AstNodeRef<ast::Expr>,
|
||||
target: AstNodeRef<ast::ExprName>,
|
||||
}
|
||||
|
||||
impl ForStmtDefinitionKind {
|
||||
pub(crate) fn iterable(&self) -> &ast::Expr {
|
||||
self.iterable.node()
|
||||
}
|
||||
|
||||
pub(crate) fn target(&self) -> &ast::ExprName {
|
||||
self.target.node()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
|
||||
pub(crate) struct DefinitionNodeKey(NodeKey);
|
||||
|
||||
@@ -445,9 +311,9 @@ impl From<&ast::StmtAugAssign> for DefinitionNodeKey {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtFor> for DefinitionNodeKey {
|
||||
fn from(value: &ast::StmtFor) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
impl From<&ast::Comprehension> for DefinitionNodeKey {
|
||||
fn from(node: &ast::Comprehension) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -462,9 +328,3 @@ impl From<&ast::ParameterWithDefault> for DefinitionNodeKey {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Identifier> for DefinitionNodeKey {
|
||||
fn from(identifier: &ast::Identifier) -> Self {
|
||||
Self(NodeKey::from_node(identifier))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -149,10 +149,6 @@ impl FileScopeId {
|
||||
FileScopeId::from_u32(0)
|
||||
}
|
||||
|
||||
pub fn is_global(self) -> bool {
|
||||
self == FileScopeId::global()
|
||||
}
|
||||
|
||||
pub fn to_scope_id(self, db: &dyn Db, file: File) -> ScopeId<'_> {
|
||||
let index = semantic_index(db, file);
|
||||
index.scope_ids_by_scope[self]
|
||||
|
||||
@@ -184,9 +184,14 @@ mod tests {
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings::new(SystemPathBuf::from("/src")),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
},
|
||||
},
|
||||
)?;
|
||||
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
use crate::builtins::builtins_scope;
|
||||
use crate::semantic_index::ast_ids::HasScopedAstId;
|
||||
use crate::semantic_index::definition::{Definition, DefinitionKind};
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId};
|
||||
use crate::semantic_index::{
|
||||
global_scope, semantic_index, symbol_table, use_def_map, DefinitionWithConstraints,
|
||||
@@ -15,8 +14,7 @@ use crate::{Db, FxOrderSet};
|
||||
pub(crate) use self::builder::{IntersectionBuilder, UnionBuilder};
|
||||
pub(crate) use self::diagnostic::TypeCheckDiagnostics;
|
||||
pub(crate) use self::infer::{
|
||||
infer_deferred_types, infer_definition_types, infer_expression_types, infer_scope_types,
|
||||
TypeInference,
|
||||
infer_definition_types, infer_expression_types, infer_scope_types, TypeInference,
|
||||
};
|
||||
|
||||
mod builder;
|
||||
@@ -90,24 +88,6 @@ pub(crate) fn definition_ty<'db>(db: &'db dyn Db, definition: Definition<'db>) -
|
||||
inference.definition_ty(definition)
|
||||
}
|
||||
|
||||
/// Infer the type of a (possibly deferred) sub-expression of a [`Definition`].
|
||||
///
|
||||
/// ## Panics
|
||||
/// If the given expression is not a sub-expression of the given [`Definition`].
|
||||
pub(crate) fn definition_expression_ty<'db>(
|
||||
db: &'db dyn Db,
|
||||
definition: Definition<'db>,
|
||||
expression: &ast::Expr,
|
||||
) -> Type<'db> {
|
||||
let expr_id = expression.scoped_ast_id(db, definition.scope(db));
|
||||
let inference = infer_definition_types(db, definition);
|
||||
if let Some(ty) = inference.try_expression_ty(expr_id) {
|
||||
ty
|
||||
} else {
|
||||
infer_deferred_types(db, definition).expression_ty(expr_id)
|
||||
}
|
||||
}
|
||||
|
||||
/// Infer the combined type of an array of [`Definition`]s, plus one optional "unbound type".
|
||||
///
|
||||
/// Will return a union if there is more than one definition, or at least one plus an unbound
|
||||
@@ -152,9 +132,9 @@ pub(crate) fn definitions_ty<'db>(
|
||||
);
|
||||
let mut all_types = unbound_ty.into_iter().chain(def_types);
|
||||
|
||||
let first = all_types
|
||||
.next()
|
||||
.expect("definitions_ty should never be called with zero definitions and no unbound_ty.");
|
||||
let Some(first) = all_types.next() else {
|
||||
panic!("definitions_ty should never be called with zero definitions and no unbound_ty.")
|
||||
};
|
||||
|
||||
if let Some(second) = all_types.next() {
|
||||
let mut builder = UnionBuilder::new(db);
|
||||
@@ -171,7 +151,7 @@ pub(crate) fn definitions_ty<'db>(
|
||||
}
|
||||
|
||||
/// Unique ID for a type.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[derive(Copy, Clone, Debug, PartialOrd, Ord, PartialEq, Eq, Hash)]
|
||||
pub enum Type<'db> {
|
||||
/// the dynamic type: a statically-unknown set of values
|
||||
Any,
|
||||
@@ -201,13 +181,6 @@ pub enum Type<'db> {
|
||||
IntLiteral(i64),
|
||||
/// A boolean literal, either `True` or `False`.
|
||||
BooleanLiteral(bool),
|
||||
/// A string literal
|
||||
StringLiteral(StringLiteralType<'db>),
|
||||
/// A string known to originate only from literal values, but whose value is not known (unlike
|
||||
/// `StringLiteral` above).
|
||||
LiteralString,
|
||||
/// A bytes literal
|
||||
BytesLiteral(BytesLiteralType<'db>),
|
||||
// TODO protocols, callable types, overloads, generics, type vars
|
||||
}
|
||||
|
||||
@@ -216,82 +189,14 @@ impl<'db> Type<'db> {
|
||||
matches!(self, Type::Unbound)
|
||||
}
|
||||
|
||||
pub const fn is_unknown(&self) -> bool {
|
||||
matches!(self, Type::Unknown)
|
||||
}
|
||||
|
||||
pub const fn is_never(&self) -> bool {
|
||||
matches!(self, Type::Never)
|
||||
}
|
||||
|
||||
pub const fn into_class_type(self) -> Option<ClassType<'db>> {
|
||||
match self {
|
||||
Type::Class(class_type) => Some(class_type),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_class(self) -> ClassType<'db> {
|
||||
self.into_class_type()
|
||||
.expect("Expected a Type::Class variant")
|
||||
}
|
||||
|
||||
pub const fn into_module_type(self) -> Option<File> {
|
||||
match self {
|
||||
Type::Module(file) => Some(file),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_module(self) -> File {
|
||||
self.into_module_type()
|
||||
.expect("Expected a Type::Module variant")
|
||||
}
|
||||
|
||||
pub const fn into_union_type(self) -> Option<UnionType<'db>> {
|
||||
match self {
|
||||
Type::Union(union_type) => Some(union_type),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_union(self) -> UnionType<'db> {
|
||||
self.into_union_type()
|
||||
.expect("Expected a Type::Union variant")
|
||||
}
|
||||
|
||||
pub const fn into_intersection_type(self) -> Option<IntersectionType<'db>> {
|
||||
match self {
|
||||
Type::Intersection(intersection_type) => Some(intersection_type),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_intersection(self) -> IntersectionType<'db> {
|
||||
self.into_intersection_type()
|
||||
.expect("Expected a Type::Intersection variant")
|
||||
}
|
||||
|
||||
pub const fn into_function_type(self) -> Option<FunctionType<'db>> {
|
||||
match self {
|
||||
Type::Function(function_type) => Some(function_type),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_function(self) -> FunctionType<'db> {
|
||||
self.into_function_type()
|
||||
.expect("Expected a Type::Function variant")
|
||||
}
|
||||
|
||||
pub const fn into_int_literal_type(self) -> Option<i64> {
|
||||
match self {
|
||||
Type::IntLiteral(value) => Some(value),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_int_literal(self) -> i64 {
|
||||
self.into_int_literal_type()
|
||||
.expect("Expected a Type::IntLiteral variant")
|
||||
}
|
||||
|
||||
pub fn may_be_unbound(&self, db: &'db dyn Db) -> bool {
|
||||
match self {
|
||||
Type::Unbound => true,
|
||||
@@ -331,7 +236,7 @@ impl<'db> Type<'db> {
|
||||
/// us to explicitly consider whether to handle an error or propagate
|
||||
/// it up the call stack.
|
||||
#[must_use]
|
||||
pub fn member(&self, db: &'db dyn Db, name: &ast::name::Name) -> Type<'db> {
|
||||
pub fn member(&self, db: &'db dyn Db, name: &Name) -> Type<'db> {
|
||||
match self {
|
||||
Type::Any => Type::Any,
|
||||
Type::Never => {
|
||||
@@ -371,52 +276,11 @@ impl<'db> Type<'db> {
|
||||
Type::Unknown
|
||||
}
|
||||
Type::BooleanLiteral(_) => Type::Unknown,
|
||||
Type::StringLiteral(_) => {
|
||||
// TODO defer to `typing.LiteralString`/`builtins.str` methods
|
||||
// from typeshed's stubs
|
||||
Type::Unknown
|
||||
}
|
||||
Type::LiteralString => {
|
||||
// TODO defer to `typing.LiteralString`/`builtins.str` methods
|
||||
// from typeshed's stubs
|
||||
Type::Unknown
|
||||
}
|
||||
Type::BytesLiteral(_) => {
|
||||
// TODO defer to Type::Instance(<bytes from typeshed>).member
|
||||
Type::Unknown
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the type resulting from calling an object of this type.
|
||||
///
|
||||
/// Returns `None` if `self` is not a callable type.
|
||||
#[must_use]
|
||||
pub fn call(&self, db: &'db dyn Db) -> Option<Type<'db>> {
|
||||
match self {
|
||||
Type::Function(function_type) => Some(function_type.return_type(db)),
|
||||
|
||||
// TODO annotated return type on `__new__` or metaclass `__call__`
|
||||
Type::Class(class) => Some(Type::Instance(*class)),
|
||||
|
||||
// TODO: handle classes which implement the Callable protocol
|
||||
Type::Instance(_instance_ty) => Some(Type::Unknown),
|
||||
|
||||
// `Any` is callable, and its return type is also `Any`.
|
||||
Type::Any => Some(Type::Any),
|
||||
|
||||
Type::Unknown => Some(Type::Unknown),
|
||||
|
||||
// TODO: union and intersection types, if they reduce to `Callable`
|
||||
Type::Union(_) => Some(Type::Unknown),
|
||||
Type::Intersection(_) => Some(Type::Unknown),
|
||||
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn to_instance(&self) -> Type<'db> {
|
||||
pub fn instance(&self) -> Type<'db> {
|
||||
match self {
|
||||
Type::Any => Type::Any,
|
||||
Type::Unknown => Type::Unknown,
|
||||
@@ -429,10 +293,7 @@ impl<'db> Type<'db> {
|
||||
#[salsa::interned]
|
||||
pub struct FunctionType<'db> {
|
||||
/// name of the function at definition
|
||||
#[return_ref]
|
||||
pub name: ast::name::Name,
|
||||
|
||||
definition: Definition<'db>,
|
||||
pub name: Name,
|
||||
|
||||
/// types of all decorators on this function
|
||||
decorators: Vec<Type<'db>>,
|
||||
@@ -442,69 +303,24 @@ impl<'db> FunctionType<'db> {
|
||||
pub fn has_decorator(self, db: &dyn Db, decorator: Type<'_>) -> bool {
|
||||
self.decorators(db).contains(&decorator)
|
||||
}
|
||||
|
||||
/// inferred return type for this function
|
||||
pub fn return_type(&self, db: &'db dyn Db) -> Type<'db> {
|
||||
let definition = self.definition(db);
|
||||
let DefinitionKind::Function(function_stmt_node) = definition.node(db) else {
|
||||
panic!("Function type definition must have `DefinitionKind::Function`")
|
||||
};
|
||||
|
||||
// TODO if a function `bar` is decorated by `foo`,
|
||||
// where `foo` is annotated as returning a type `X` that is a subtype of `Callable`,
|
||||
// we need to infer the return type from `X`'s return annotation
|
||||
// rather than from `bar`'s return annotation
|
||||
// in order to determine the type that `bar` returns
|
||||
if !function_stmt_node.decorator_list.is_empty() {
|
||||
return Type::Unknown;
|
||||
}
|
||||
|
||||
function_stmt_node
|
||||
.returns
|
||||
.as_ref()
|
||||
.map(|returns| {
|
||||
if function_stmt_node.is_async {
|
||||
// TODO: generic `types.CoroutineType`!
|
||||
Type::Unknown
|
||||
} else {
|
||||
definition_expression_ty(db, definition, returns.as_ref())
|
||||
}
|
||||
})
|
||||
.unwrap_or(Type::Unknown)
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::interned]
|
||||
pub struct ClassType<'db> {
|
||||
/// Name of the class at definition
|
||||
#[return_ref]
|
||||
pub name: ast::name::Name,
|
||||
pub name: Name,
|
||||
|
||||
definition: Definition<'db>,
|
||||
/// Types of all class bases
|
||||
bases: Vec<Type<'db>>,
|
||||
|
||||
body_scope: ScopeId<'db>,
|
||||
}
|
||||
|
||||
impl<'db> ClassType<'db> {
|
||||
/// Return an iterator over the types of this class's bases.
|
||||
///
|
||||
/// # Panics:
|
||||
/// If `definition` is not a `DefinitionKind::Class`.
|
||||
pub fn bases(&self, db: &'db dyn Db) -> impl Iterator<Item = Type<'db>> {
|
||||
let definition = self.definition(db);
|
||||
let DefinitionKind::Class(class_stmt_node) = definition.node(db) else {
|
||||
panic!("Class type definition must have DefinitionKind::Class");
|
||||
};
|
||||
class_stmt_node
|
||||
.bases()
|
||||
.iter()
|
||||
.map(move |base_expr| definition_expression_ty(db, definition, base_expr))
|
||||
}
|
||||
|
||||
/// Returns the class member of this class named `name`.
|
||||
///
|
||||
/// The member resolves to a member of the class itself or any of its bases.
|
||||
pub fn class_member(self, db: &'db dyn Db, name: &ast::name::Name) -> Type<'db> {
|
||||
pub fn class_member(self, db: &'db dyn Db, name: &Name) -> Type<'db> {
|
||||
let member = self.own_class_member(db, name);
|
||||
if !member.is_unbound() {
|
||||
return member;
|
||||
@@ -514,12 +330,12 @@ impl<'db> ClassType<'db> {
|
||||
}
|
||||
|
||||
/// Returns the inferred type of the class member named `name`.
|
||||
pub fn own_class_member(self, db: &'db dyn Db, name: &ast::name::Name) -> Type<'db> {
|
||||
pub fn own_class_member(self, db: &'db dyn Db, name: &Name) -> Type<'db> {
|
||||
let scope = self.body_scope(db);
|
||||
symbol_ty_by_name(db, scope, name)
|
||||
}
|
||||
|
||||
pub fn inherited_class_member(self, db: &'db dyn Db, name: &ast::name::Name) -> Type<'db> {
|
||||
pub fn inherited_class_member(self, db: &'db dyn Db, name: &Name) -> Type<'db> {
|
||||
for base in self.bases(db) {
|
||||
let member = base.member(db, name);
|
||||
if !member.is_unbound() {
|
||||
@@ -534,7 +350,6 @@ impl<'db> ClassType<'db> {
|
||||
#[salsa::interned]
|
||||
pub struct UnionType<'db> {
|
||||
/// The union type includes values in any of these types.
|
||||
#[return_ref]
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
@@ -547,7 +362,6 @@ impl<'db> UnionType<'db> {
|
||||
#[salsa::interned]
|
||||
pub struct IntersectionType<'db> {
|
||||
/// The intersection type includes only values in all of these types.
|
||||
#[return_ref]
|
||||
positive: FxOrderSet<Type<'db>>,
|
||||
|
||||
/// The intersection type does not include any value in any of these types.
|
||||
@@ -555,22 +369,9 @@ pub struct IntersectionType<'db> {
|
||||
/// Negation types aren't expressible in annotations, and are most likely to arise from type
|
||||
/// narrowing along with intersections (e.g. `if not isinstance(...)`), so we represent them
|
||||
/// directly in intersections rather than as a separate type.
|
||||
#[return_ref]
|
||||
negative: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
#[salsa::interned]
|
||||
pub struct StringLiteralType<'db> {
|
||||
#[return_ref]
|
||||
value: Box<str>,
|
||||
}
|
||||
|
||||
#[salsa::interned]
|
||||
pub struct BytesLiteralType<'db> {
|
||||
#[return_ref]
|
||||
value: Box<[u8]>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Context;
|
||||
@@ -591,9 +392,14 @@ mod tests {
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings::new(SystemPathBuf::from("/src")),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
},
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
@@ -619,7 +425,7 @@ mod tests {
|
||||
let foo = system_path_to_file(&db, "src/foo.py").context("Failed to resolve foo.py")?;
|
||||
|
||||
let diagnostics = super::check_types(&db, foo);
|
||||
assert_diagnostic_messages(&diagnostics, &["Cannot resolve import 'bar'."]);
|
||||
assert_diagnostic_messages(&diagnostics, &["Import 'bar' could not be resolved."]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -632,7 +438,7 @@ mod tests {
|
||||
.unwrap();
|
||||
let foo = system_path_to_file(&db, "src/foo.py").unwrap();
|
||||
let diagnostics = super::check_types(&db, foo);
|
||||
assert_diagnostic_messages(&diagnostics, &["Cannot resolve import 'bar'."]);
|
||||
assert_diagnostic_messages(&diagnostics, &["Import 'bar' could not be resolved."]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -644,9 +450,15 @@ mod tests {
|
||||
|
||||
let b_file = system_path_to_file(&db, "/src/b.py").unwrap();
|
||||
let b_file_diagnostics = super::check_types(&db, b_file);
|
||||
assert_diagnostic_messages(&b_file_diagnostics, &["Module 'a' has no member 'thing'"]);
|
||||
assert_diagnostic_messages(
|
||||
&b_file_diagnostics,
|
||||
&["Could not resolve import of 'thing' from 'a'"],
|
||||
);
|
||||
}
|
||||
|
||||
#[ignore = "\
|
||||
A spurious second 'Unresolved import' diagnostic message is emitted on `b.py`, \
|
||||
despite the symbol existing in the symbol table for `a.py`"]
|
||||
#[test]
|
||||
fn resolved_import_of_symbol_from_unresolved_import() {
|
||||
let mut db = setup_db();
|
||||
@@ -659,7 +471,10 @@ mod tests {
|
||||
|
||||
let a_file = system_path_to_file(&db, "/src/a.py").unwrap();
|
||||
let a_file_diagnostics = super::check_types(&db, a_file);
|
||||
assert_diagnostic_messages(&a_file_diagnostics, &["Cannot resolve import 'foo'."]);
|
||||
assert_diagnostic_messages(
|
||||
&a_file_diagnostics,
|
||||
&["Import 'foo' could not be resolved."],
|
||||
);
|
||||
|
||||
// Importing the unresolved import into a second first-party file should not trigger
|
||||
// an additional "unresolved import" violation
|
||||
@@ -667,25 +482,4 @@ mod tests {
|
||||
let b_file_diagnostics = super::check_types(&db, b_file);
|
||||
assert_eq!(&*b_file_diagnostics, &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_callable() {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_dedented(
|
||||
"src/a.py",
|
||||
"
|
||||
nonsense = 123
|
||||
x = nonsense()
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let a_file = system_path_to_file(&db, "/src/a.py").unwrap();
|
||||
let a_file_diagnostics = super::check_types(&db, a_file);
|
||||
assert_diagnostic_messages(
|
||||
&a_file_diagnostics,
|
||||
&["Object of type 'Literal[123]' is not callable"],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,12 +25,8 @@
|
||||
//! * No type in an intersection can be a supertype of any other type in the intersection (just
|
||||
//! eliminate the supertype from the intersection).
|
||||
//! * An intersection containing two non-overlapping types should simplify to [`Type::Never`].
|
||||
|
||||
use crate::types::{IntersectionType, Type, UnionType};
|
||||
use crate::{Db, FxOrderSet};
|
||||
use ordermap::set::MutableValues;
|
||||
|
||||
use super::builtins_symbol_ty_by_name;
|
||||
|
||||
pub(crate) struct UnionBuilder<'db> {
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
@@ -49,7 +45,7 @@ impl<'db> UnionBuilder<'db> {
|
||||
pub(crate) fn add(mut self, ty: Type<'db>) -> Self {
|
||||
match ty {
|
||||
Type::Union(union) => {
|
||||
self.elements.extend(union.elements(self.db));
|
||||
self.elements.extend(&union.elements(self.db));
|
||||
}
|
||||
Type::Never => {}
|
||||
_ => {
|
||||
@@ -60,36 +56,11 @@ impl<'db> UnionBuilder<'db> {
|
||||
self
|
||||
}
|
||||
|
||||
/// Performs the following normalizations:
|
||||
/// - Replaces `Literal[True,False]` with `bool`.
|
||||
/// - TODO For enums `E` with members `X1`,...,`Xn`, replaces
|
||||
/// `Literal[E.X1,...,E.Xn]` with `E`.
|
||||
fn simplify(&mut self) {
|
||||
if let Some(true_index) = self.elements.get_index_of(&Type::BooleanLiteral(true)) {
|
||||
if self.elements.contains(&Type::BooleanLiteral(false)) {
|
||||
*self.elements.get_index_mut2(true_index).unwrap() =
|
||||
builtins_symbol_ty_by_name(self.db, "bool");
|
||||
self.elements.remove(&Type::BooleanLiteral(false));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn build(mut self) -> Type<'db> {
|
||||
pub(crate) fn build(self) -> Type<'db> {
|
||||
match self.elements.len() {
|
||||
0 => Type::Never,
|
||||
1 => self.elements[0],
|
||||
_ => {
|
||||
self.simplify();
|
||||
|
||||
match self.elements.len() {
|
||||
0 => Type::Never,
|
||||
1 => self.elements[0],
|
||||
_ => {
|
||||
self.elements.shrink_to_fit();
|
||||
Type::Union(UnionType::new(self.db, self.elements))
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => Type::Union(UnionType::new(self.db, self.elements)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -276,48 +247,25 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
mod tests {
|
||||
use super::{IntersectionBuilder, IntersectionType, Type, UnionBuilder, UnionType};
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::types::builtins_symbol_ty_by_name;
|
||||
use crate::ProgramSettings;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
TestDb::new()
|
||||
}
|
||||
|
||||
impl<'db> UnionType<'db> {
|
||||
fn elements_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.elements(db).into_iter().copied().collect()
|
||||
self.elements(db).into_iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&src_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings::new(src_root),
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let union = UnionBuilder::new(&db)
|
||||
.add(t0)
|
||||
.add(t1)
|
||||
.build()
|
||||
.expect_union();
|
||||
let Type::Union(union) = UnionBuilder::new(&db).add(t0).add(t1).build() else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t1]);
|
||||
}
|
||||
@@ -348,34 +296,6 @@ mod tests {
|
||||
assert_eq!(ty, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_bool() {
|
||||
let db = setup_db();
|
||||
let bool_ty = builtins_symbol_ty_by_name(&db, "bool");
|
||||
|
||||
let t0 = Type::BooleanLiteral(true);
|
||||
let t1 = Type::BooleanLiteral(true);
|
||||
let t2 = Type::BooleanLiteral(false);
|
||||
let t3 = Type::IntLiteral(17);
|
||||
|
||||
let union = UnionBuilder::new(&db)
|
||||
.add(t0)
|
||||
.add(t1)
|
||||
.add(t3)
|
||||
.build()
|
||||
.expect_union();
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t3]);
|
||||
let union = UnionBuilder::new(&db)
|
||||
.add(t0)
|
||||
.add(t1)
|
||||
.add(t2)
|
||||
.add(t3)
|
||||
.build()
|
||||
.expect_union();
|
||||
|
||||
assert_eq!(union.elements_vec(&db), &[bool_ty, t3]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_flatten() {
|
||||
let db = setup_db();
|
||||
@@ -383,22 +303,20 @@ mod tests {
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let u1 = UnionBuilder::new(&db).add(t0).add(t1).build();
|
||||
let union = UnionBuilder::new(&db)
|
||||
.add(u1)
|
||||
.add(t2)
|
||||
.build()
|
||||
.expect_union();
|
||||
let Type::Union(union) = UnionBuilder::new(&db).add(u1).add(t2).build() else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t1, t2]);
|
||||
}
|
||||
|
||||
impl<'db> IntersectionType<'db> {
|
||||
fn pos_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.positive(db).into_iter().copied().collect()
|
||||
self.positive(db).into_iter().collect()
|
||||
}
|
||||
|
||||
fn neg_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.negative(db).into_iter().copied().collect()
|
||||
self.negative(db).into_iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -407,14 +325,16 @@ mod tests {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ta = Type::Any;
|
||||
let intersection = IntersectionBuilder::new(&db)
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t0)
|
||||
.build()
|
||||
.expect_intersection();
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(intersection.pos_vec(&db), &[ta]);
|
||||
assert_eq!(intersection.neg_vec(&db), &[t0]);
|
||||
assert_eq!(inter.pos_vec(&db), &[ta]);
|
||||
assert_eq!(inter.neg_vec(&db), &[t0]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -427,14 +347,16 @@ mod tests {
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
let intersection = IntersectionBuilder::new(&db)
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_positive(i0)
|
||||
.build()
|
||||
.expect_intersection();
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(intersection.pos_vec(&db), &[t2, ta]);
|
||||
assert_eq!(intersection.neg_vec(&db), &[t1]);
|
||||
assert_eq!(inter.pos_vec(&db), &[t2, ta]);
|
||||
assert_eq!(inter.neg_vec(&db), &[t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -447,14 +369,16 @@ mod tests {
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
let intersection = IntersectionBuilder::new(&db)
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_negative(i0)
|
||||
.build()
|
||||
.expect_intersection();
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(intersection.pos_vec(&db), &[t2, t1]);
|
||||
assert_eq!(intersection.neg_vec(&db), &[ta]);
|
||||
assert_eq!(inter.pos_vec(&db), &[t2, t1]);
|
||||
assert_eq!(inter.neg_vec(&db), &[ta]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -465,11 +389,13 @@ mod tests {
|
||||
let ta = Type::Any;
|
||||
let u0 = UnionBuilder::new(&db).add(t0).add(t1).build();
|
||||
|
||||
let union = IntersectionBuilder::new(&db)
|
||||
let Type::Union(union) = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_positive(u0)
|
||||
.build()
|
||||
.expect_union();
|
||||
else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
let [Type::Intersection(i0), Type::Intersection(i1)] = union.elements_vec(&db)[..] else {
|
||||
panic!("expected a union of two intersections");
|
||||
};
|
||||
|
||||
@@ -2,20 +2,13 @@
|
||||
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
use ruff_python_ast::str::Quote;
|
||||
use ruff_python_literal::escape::AsciiEscape;
|
||||
|
||||
use crate::types::{IntersectionType, Type, UnionType};
|
||||
use crate::{Db, FxOrderMap};
|
||||
use crate::Db;
|
||||
|
||||
impl<'db> Type<'db> {
|
||||
pub fn display(&'db self, db: &'db dyn Db) -> DisplayType<'db> {
|
||||
DisplayType { ty: self, db }
|
||||
}
|
||||
|
||||
fn representation(&'db self, db: &'db dyn Db) -> DisplayRepresentation<'db> {
|
||||
DisplayRepresentation { db, ty: self }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
@@ -25,39 +18,6 @@ pub struct DisplayType<'db> {
|
||||
}
|
||||
|
||||
impl Display for DisplayType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let representation = self.ty.representation(self.db);
|
||||
if matches!(
|
||||
self.ty,
|
||||
Type::IntLiteral(_)
|
||||
| Type::BooleanLiteral(_)
|
||||
| Type::StringLiteral(_)
|
||||
| Type::BytesLiteral(_)
|
||||
| Type::Class(_)
|
||||
| Type::Function(_)
|
||||
) {
|
||||
write!(f, "Literal[{representation}]",)
|
||||
} else {
|
||||
representation.fmt(f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for DisplayType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
/// Writes the string representation of a type, which is the value displayed either as
|
||||
/// `Literal[<repr>]` or `Literal[<repr1>, <repr2>]` for literal types or as `<repr>` for
|
||||
/// non literals
|
||||
struct DisplayRepresentation<'db> {
|
||||
ty: &'db Type<'db>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for DisplayRepresentation<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self.ty {
|
||||
Type::Any => f.write_str("Any"),
|
||||
@@ -69,27 +29,25 @@ impl std::fmt::Display for DisplayRepresentation<'_> {
|
||||
write!(f, "<module '{:?}'>", file.path(self.db))
|
||||
}
|
||||
// TODO functions and classes should display using a fully qualified name
|
||||
Type::Class(class) => f.write_str(class.name(self.db)),
|
||||
Type::Instance(class) => f.write_str(class.name(self.db)),
|
||||
Type::Function(function) => f.write_str(function.name(self.db)),
|
||||
Type::Class(class) => write!(f, "Literal[{}]", class.name(self.db)),
|
||||
Type::Instance(class) => f.write_str(&class.name(self.db)),
|
||||
Type::Function(function) => write!(f, "Literal[{}]", function.name(self.db)),
|
||||
Type::Union(union) => union.display(self.db).fmt(f),
|
||||
Type::Intersection(intersection) => intersection.display(self.db).fmt(f),
|
||||
Type::IntLiteral(n) => write!(f, "{n}"),
|
||||
Type::BooleanLiteral(boolean) => f.write_str(if *boolean { "True" } else { "False" }),
|
||||
Type::StringLiteral(string) => {
|
||||
write!(f, r#""{}""#, string.value(self.db).replace('"', r#"\""#))
|
||||
}
|
||||
Type::LiteralString => f.write_str("LiteralString"),
|
||||
Type::BytesLiteral(bytes) => {
|
||||
let escape =
|
||||
AsciiEscape::with_preferred_quote(bytes.value(self.db).as_ref(), Quote::Double);
|
||||
|
||||
escape.bytes_repr().write(f)
|
||||
Type::IntLiteral(n) => write!(f, "Literal[{n}]"),
|
||||
Type::BooleanLiteral(boolean) => {
|
||||
write!(f, "Literal[{}]", if *boolean { "True" } else { "False" })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for DisplayType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> UnionType<'db> {
|
||||
fn display(&'db self, db: &'db dyn Db) -> DisplayUnionType<'db> {
|
||||
DisplayUnionType { db, ty: self }
|
||||
@@ -103,58 +61,45 @@ struct DisplayUnionType<'db> {
|
||||
|
||||
impl Display for DisplayUnionType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
let elements = self.ty.elements(self.db);
|
||||
let union = self.ty;
|
||||
|
||||
// Group literal types by kind.
|
||||
let mut grouped_literals = FxOrderMap::default();
|
||||
|
||||
for element in elements {
|
||||
if let Ok(literal_kind) = LiteralTypeKind::try_from(*element) {
|
||||
grouped_literals
|
||||
.entry(literal_kind)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(*element);
|
||||
}
|
||||
}
|
||||
let (int_literals, other_types): (Vec<Type>, Vec<Type>) = union
|
||||
.elements(self.db)
|
||||
.iter()
|
||||
.copied()
|
||||
.partition(|ty| matches!(ty, Type::IntLiteral(_)));
|
||||
|
||||
let mut first = true;
|
||||
|
||||
// Print all types, but write all literals together (while preserving their position).
|
||||
for ty in elements {
|
||||
if let Ok(literal_kind) = LiteralTypeKind::try_from(*ty) {
|
||||
let Some(mut literals) = grouped_literals.remove(&literal_kind) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if !first {
|
||||
f.write_str(" | ")?;
|
||||
};
|
||||
|
||||
f.write_str("Literal[")?;
|
||||
|
||||
if literal_kind == LiteralTypeKind::IntLiteral {
|
||||
literals.sort_unstable_by_key(|ty| ty.expect_int_literal());
|
||||
}
|
||||
|
||||
for (i, literal_ty) in literals.iter().enumerate() {
|
||||
if i > 0 {
|
||||
f.write_str(", ")?;
|
||||
if !int_literals.is_empty() {
|
||||
f.write_str("Literal[")?;
|
||||
let mut nums: Vec<_> = int_literals
|
||||
.into_iter()
|
||||
.filter_map(|ty| {
|
||||
if let Type::IntLiteral(n) = ty {
|
||||
Some(n)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
literal_ty.representation(self.db).fmt(f)?;
|
||||
}
|
||||
f.write_str("]")?;
|
||||
} else {
|
||||
})
|
||||
.collect();
|
||||
nums.sort_unstable();
|
||||
for num in nums {
|
||||
if !first {
|
||||
f.write_str(" | ")?;
|
||||
};
|
||||
|
||||
ty.display(self.db).fmt(f)?;
|
||||
f.write_str(", ")?;
|
||||
}
|
||||
write!(f, "{num}")?;
|
||||
first = false;
|
||||
}
|
||||
|
||||
first = false;
|
||||
f.write_str("]")?;
|
||||
}
|
||||
|
||||
debug_assert!(grouped_literals.is_empty());
|
||||
for ty in other_types {
|
||||
if !first {
|
||||
f.write_str(" | ")?;
|
||||
};
|
||||
first = false;
|
||||
write!(f, "{}", ty.display(self.db))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -166,30 +111,6 @@ impl std::fmt::Debug for DisplayUnionType<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
enum LiteralTypeKind {
|
||||
Class,
|
||||
Function,
|
||||
IntLiteral,
|
||||
StringLiteral,
|
||||
BytesLiteral,
|
||||
}
|
||||
|
||||
impl TryFrom<Type<'_>> for LiteralTypeKind {
|
||||
type Error = ();
|
||||
|
||||
fn try_from(value: Type<'_>) -> Result<Self, Self::Error> {
|
||||
match value {
|
||||
Type::Class(_) => Ok(Self::Class),
|
||||
Type::Function(_) => Ok(Self::Function),
|
||||
Type::IntLiteral(_) => Ok(Self::IntLiteral),
|
||||
Type::StringLiteral(_) => Ok(Self::StringLiteral),
|
||||
Type::BytesLiteral(_) => Ok(Self::BytesLiteral),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> IntersectionType<'db> {
|
||||
fn display(&'db self, db: &'db dyn Db) -> DisplayIntersectionType<'db> {
|
||||
DisplayIntersectionType { db, ty: self }
|
||||
@@ -229,91 +150,3 @@ impl std::fmt::Debug for DisplayIntersectionType<'_> {
|
||||
std::fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::types::{
|
||||
global_symbol_ty_by_name, BytesLiteralType, StringLiteralType, Type, UnionBuilder,
|
||||
};
|
||||
use crate::{Program, ProgramSettings, PythonVersion, SearchPathSettings};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&src_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings::new(src_root),
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_condense_literal_display_by_type() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_dedented(
|
||||
"src/main.py",
|
||||
"
|
||||
def foo(x: int) -> int:
|
||||
return x + 1
|
||||
|
||||
def bar(s: str) -> str:
|
||||
return s
|
||||
|
||||
class A: ...
|
||||
class B: ...
|
||||
",
|
||||
)?;
|
||||
let mod_file = system_path_to_file(&db, "src/main.py").expect("Expected file to exist.");
|
||||
|
||||
let vec: Vec<Type<'_>> = vec![
|
||||
Type::Unknown,
|
||||
Type::IntLiteral(-1),
|
||||
global_symbol_ty_by_name(&db, mod_file, "A"),
|
||||
Type::StringLiteral(StringLiteralType::new(&db, Box::from("A"))),
|
||||
Type::BytesLiteral(BytesLiteralType::new(&db, Box::from([0]))),
|
||||
Type::BytesLiteral(BytesLiteralType::new(&db, Box::from([7]))),
|
||||
Type::IntLiteral(0),
|
||||
Type::IntLiteral(1),
|
||||
Type::StringLiteral(StringLiteralType::new(&db, Box::from("B"))),
|
||||
global_symbol_ty_by_name(&db, mod_file, "foo"),
|
||||
global_symbol_ty_by_name(&db, mod_file, "bar"),
|
||||
global_symbol_ty_by_name(&db, mod_file, "B"),
|
||||
Type::BooleanLiteral(true),
|
||||
Type::None,
|
||||
];
|
||||
let builder = vec.iter().fold(UnionBuilder::new(&db), |builder, literal| {
|
||||
builder.add(*literal)
|
||||
});
|
||||
let union = builder.build().expect_union();
|
||||
let display = format!("{}", union.display(&db));
|
||||
assert_eq!(
|
||||
display,
|
||||
concat!(
|
||||
"Unknown | ",
|
||||
"Literal[-1, 0, 1] | ",
|
||||
"Literal[A, B] | ",
|
||||
"Literal[\"A\", \"B\"] | ",
|
||||
"Literal[b\"\\x00\", b\"\\x07\"] | ",
|
||||
"Literal[foo, bar] | ",
|
||||
"Literal[True] | ",
|
||||
"None"
|
||||
)
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1 +1 @@
|
||||
23d867efb2df6de5600f64656f1aa8a83e06109e
|
||||
1ace5718deaf3041f8e3d1dc9c9e8a8e830e517f
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import sys
|
||||
import typing_extensions
|
||||
from typing import Any, ClassVar, Generic, Literal, TypedDict, overload
|
||||
from typing_extensions import Self, Unpack
|
||||
from typing_extensions import Unpack
|
||||
|
||||
PyCF_ONLY_AST: Literal[1024]
|
||||
PyCF_TYPE_COMMENTS: Literal[4096]
|
||||
@@ -34,9 +34,6 @@ class AST:
|
||||
if sys.version_info >= (3, 13):
|
||||
_field_types: ClassVar[dict[str, Any]]
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self) -> Self: ...
|
||||
|
||||
class mod(AST): ...
|
||||
class type_ignore(AST): ...
|
||||
|
||||
@@ -47,9 +44,6 @@ class TypeIgnore(type_ignore):
|
||||
tag: str
|
||||
def __init__(self, lineno: int, tag: str) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, lineno: int = ..., tag: str = ...) -> Self: ...
|
||||
|
||||
class FunctionType(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("argtypes", "returns")
|
||||
@@ -63,9 +57,6 @@ class FunctionType(mod):
|
||||
else:
|
||||
def __init__(self, argtypes: list[expr], returns: expr) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, argtypes: list[expr] = ..., returns: expr = ...) -> Self: ...
|
||||
|
||||
class Module(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body", "type_ignores")
|
||||
@@ -76,9 +67,6 @@ class Module(mod):
|
||||
else:
|
||||
def __init__(self, body: list[stmt], type_ignores: list[TypeIgnore]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, body: list[stmt] = ..., type_ignores: list[TypeIgnore] = ...) -> Self: ...
|
||||
|
||||
class Interactive(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body",)
|
||||
@@ -88,18 +76,12 @@ class Interactive(mod):
|
||||
else:
|
||||
def __init__(self, body: list[stmt]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, body: list[stmt] = ...) -> Self: ...
|
||||
|
||||
class Expression(mod):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body",)
|
||||
body: expr
|
||||
def __init__(self, body: expr) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, body: expr = ...) -> Self: ...
|
||||
|
||||
class stmt(AST):
|
||||
lineno: int
|
||||
col_offset: int
|
||||
@@ -107,9 +89,6 @@ class stmt(AST):
|
||||
end_col_offset: int | None
|
||||
def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class FunctionDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params")
|
||||
@@ -173,19 +152,6 @@ class FunctionDef(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
name: _Identifier = ...,
|
||||
args: arguments = ...,
|
||||
body: list[stmt] = ...,
|
||||
decorator_list: list[expr] = ...,
|
||||
returns: expr | None = ...,
|
||||
type_comment: str | None = ...,
|
||||
type_params: list[type_param] = ...,
|
||||
) -> Self: ...
|
||||
|
||||
class AsyncFunctionDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params")
|
||||
@@ -249,19 +215,6 @@ class AsyncFunctionDef(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
name: _Identifier = ...,
|
||||
args: arguments = ...,
|
||||
body: list[stmt],
|
||||
decorator_list: list[expr],
|
||||
returns: expr | None,
|
||||
type_comment: str | None,
|
||||
type_params: list[type_param],
|
||||
) -> Self: ...
|
||||
|
||||
class ClassDef(stmt):
|
||||
if sys.version_info >= (3, 12):
|
||||
__match_args__ = ("name", "bases", "keywords", "body", "decorator_list", "type_params")
|
||||
@@ -307,28 +260,12 @@ class ClassDef(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
name: _Identifier,
|
||||
bases: list[expr],
|
||||
keywords: list[keyword],
|
||||
body: list[stmt],
|
||||
decorator_list: list[expr],
|
||||
type_params: list[type_param],
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class Return(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr | None
|
||||
def __init__(self, value: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Delete(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("targets",)
|
||||
@@ -338,9 +275,6 @@ class Delete(stmt):
|
||||
else:
|
||||
def __init__(self, targets: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, targets: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Assign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("targets", "value", "type_comment")
|
||||
@@ -361,11 +295,6 @@ class Assign(stmt):
|
||||
self, targets: list[expr], value: expr, type_comment: str | None = None, **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, targets: list[expr] = ..., value: expr = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class AugAssign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "op", "value")
|
||||
@@ -376,16 +305,6 @@ class AugAssign(stmt):
|
||||
self, target: Name | Attribute | Subscript, op: operator, value: expr, **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
target: Name | Attribute | Subscript = ...,
|
||||
op: operator = ...,
|
||||
value: expr = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class AnnAssign(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "annotation", "value", "simple")
|
||||
@@ -413,17 +332,6 @@ class AnnAssign(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
target: Name | Attribute | Subscript = ...,
|
||||
annotation: expr = ...,
|
||||
value: expr | None = ...,
|
||||
simple: int = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class For(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
|
||||
@@ -453,18 +361,6 @@ class For(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
target: expr = ...,
|
||||
iter: expr = ...,
|
||||
body: list[stmt] = ...,
|
||||
orelse: list[stmt] = ...,
|
||||
type_comment: str | None = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class AsyncFor(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
|
||||
@@ -494,18 +390,6 @@ class AsyncFor(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
target: expr = ...,
|
||||
iter: expr = ...,
|
||||
body: list[stmt] = ...,
|
||||
orelse: list[stmt] = ...,
|
||||
type_comment: str | None = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class While(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
@@ -519,9 +403,6 @@ class While(stmt):
|
||||
else:
|
||||
def __init__(self, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class If(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
@@ -535,11 +416,6 @@ class If(stmt):
|
||||
else:
|
||||
def __init__(self, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, test: expr = ..., body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class With(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("items", "body", "type_comment")
|
||||
@@ -559,16 +435,6 @@ class With(stmt):
|
||||
self, items: list[withitem], body: list[stmt], type_comment: str | None = None, **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
items: list[withitem] = ...,
|
||||
body: list[stmt] = ...,
|
||||
type_comment: str | None = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class AsyncWith(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("items", "body", "type_comment")
|
||||
@@ -588,16 +454,6 @@ class AsyncWith(stmt):
|
||||
self, items: list[withitem], body: list[stmt], type_comment: str | None = None, **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
items: list[withitem] = ...,
|
||||
body: list[stmt] = ...,
|
||||
type_comment: str | None = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class Raise(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("exc", "cause")
|
||||
@@ -605,9 +461,6 @@ class Raise(stmt):
|
||||
cause: expr | None
|
||||
def __init__(self, exc: expr | None = None, cause: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, exc: expr | None = ..., cause: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Try(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("body", "handlers", "orelse", "finalbody")
|
||||
@@ -634,17 +487,6 @@ class Try(stmt):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
body: list[stmt] = ...,
|
||||
handlers: list[ExceptHandler] = ...,
|
||||
orelse: list[stmt] = ...,
|
||||
finalbody: list[stmt] = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
class TryStar(stmt):
|
||||
__match_args__ = ("body", "handlers", "orelse", "finalbody")
|
||||
@@ -671,17 +513,6 @@ if sys.version_info >= (3, 11):
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
body: list[stmt] = ...,
|
||||
handlers: list[ExceptHandler] = ...,
|
||||
orelse: list[stmt] = ...,
|
||||
finalbody: list[stmt] = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class Assert(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "msg")
|
||||
@@ -689,9 +520,6 @@ class Assert(stmt):
|
||||
msg: expr | None
|
||||
def __init__(self, test: expr, msg: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, test: expr, msg: expr | None, **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Import(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
@@ -701,9 +529,6 @@ class Import(stmt):
|
||||
else:
|
||||
def __init__(self, names: list[alias], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, names: list[alias] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class ImportFrom(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("module", "names", "level")
|
||||
@@ -725,11 +550,6 @@ class ImportFrom(stmt):
|
||||
self, module: str | None = None, *, names: list[alias], level: int, **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, module: str | None = ..., names: list[alias] = ..., level: int = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class Global(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
@@ -739,9 +559,6 @@ class Global(stmt):
|
||||
else:
|
||||
def __init__(self, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Nonlocal(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("names",)
|
||||
@@ -751,18 +568,12 @@ class Nonlocal(stmt):
|
||||
else:
|
||||
def __init__(self, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, names: list[_Identifier] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Expr(stmt):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Pass(stmt): ...
|
||||
class Break(stmt): ...
|
||||
class Continue(stmt): ...
|
||||
@@ -774,9 +585,6 @@ class expr(AST):
|
||||
end_col_offset: int | None
|
||||
def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class BoolOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("op", "values")
|
||||
@@ -787,9 +595,6 @@ class BoolOp(expr):
|
||||
else:
|
||||
def __init__(self, op: boolop, values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, op: boolop = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class BinOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("left", "op", "right")
|
||||
@@ -798,11 +603,6 @@ class BinOp(expr):
|
||||
right: expr
|
||||
def __init__(self, left: expr, op: operator, right: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, left: expr = ..., op: operator = ..., right: expr = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class UnaryOp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("op", "operand")
|
||||
@@ -810,9 +610,6 @@ class UnaryOp(expr):
|
||||
operand: expr
|
||||
def __init__(self, op: unaryop, operand: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, op: unaryop = ..., operand: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Lambda(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("args", "body")
|
||||
@@ -820,9 +617,6 @@ class Lambda(expr):
|
||||
body: expr
|
||||
def __init__(self, args: arguments, body: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, args: arguments = ..., body: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class IfExp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("test", "body", "orelse")
|
||||
@@ -831,11 +625,6 @@ class IfExp(expr):
|
||||
orelse: expr
|
||||
def __init__(self, test: expr, body: expr, orelse: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, test: expr = ..., body: expr = ..., orelse: expr = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class Dict(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("keys", "values")
|
||||
@@ -846,11 +635,6 @@ class Dict(expr):
|
||||
else:
|
||||
def __init__(self, keys: list[expr | None], values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, keys: list[expr | None] = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class Set(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts",)
|
||||
@@ -860,9 +644,6 @@ class Set(expr):
|
||||
else:
|
||||
def __init__(self, elts: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, elts: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class ListComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
@@ -873,11 +654,6 @@ class ListComp(expr):
|
||||
else:
|
||||
def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class SetComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
@@ -888,11 +664,6 @@ class SetComp(expr):
|
||||
else:
|
||||
def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class DictComp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("key", "value", "generators")
|
||||
@@ -906,11 +677,6 @@ class DictComp(expr):
|
||||
else:
|
||||
def __init__(self, key: expr, value: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, key: expr = ..., value: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class GeneratorExp(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elt", "generators")
|
||||
@@ -921,38 +687,24 @@ class GeneratorExp(expr):
|
||||
else:
|
||||
def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class Await(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Yield(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr | None
|
||||
def __init__(self, value: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class YieldFrom(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Compare(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("left", "ops", "comparators")
|
||||
@@ -966,11 +718,6 @@ class Compare(expr):
|
||||
else:
|
||||
def __init__(self, left: expr, ops: list[cmpop], comparators: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, left: expr = ..., ops: list[cmpop] = ..., comparators: list[expr] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class Call(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("func", "args", "keywords")
|
||||
@@ -984,11 +731,6 @@ class Call(expr):
|
||||
else:
|
||||
def __init__(self, func: expr, args: list[expr], keywords: list[keyword], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, func: expr = ..., args: list[expr] = ..., keywords: list[keyword] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class FormattedValue(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "conversion", "format_spec")
|
||||
@@ -997,11 +739,6 @@ class FormattedValue(expr):
|
||||
format_spec: expr | None
|
||||
def __init__(self, value: expr, conversion: int, format_spec: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, value: expr = ..., conversion: int = ..., format_spec: expr | None = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class JoinedStr(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("values",)
|
||||
@@ -1011,9 +748,6 @@ class JoinedStr(expr):
|
||||
else:
|
||||
def __init__(self, values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Constant(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "kind")
|
||||
@@ -1026,9 +760,6 @@ class Constant(expr):
|
||||
|
||||
def __init__(self, value: Any, kind: str | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: Any = ..., kind: str | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class NamedExpr(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("target", "value")
|
||||
@@ -1036,9 +767,6 @@ class NamedExpr(expr):
|
||||
value: expr
|
||||
def __init__(self, target: Name, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, target: Name = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Attribute(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "attr", "ctx")
|
||||
@@ -1047,11 +775,6 @@ class Attribute(expr):
|
||||
ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__`
|
||||
def __init__(self, value: expr, attr: _Identifier, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, value: expr = ..., attr: _Identifier = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
_Slice: typing_extensions.TypeAlias = expr
|
||||
_SliceAttributes: typing_extensions.TypeAlias = _Attributes
|
||||
@@ -1071,16 +794,6 @@ class Slice(_Slice):
|
||||
self, lower: expr | None = None, upper: expr | None = None, step: expr | None = None, **kwargs: Unpack[_SliceAttributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
lower: expr | None = ...,
|
||||
upper: expr | None = ...,
|
||||
step: expr | None = ...,
|
||||
**kwargs: Unpack[_SliceAttributes],
|
||||
) -> Self: ...
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
class ExtSlice(slice):
|
||||
dims: list[slice]
|
||||
@@ -1098,11 +811,6 @@ class Subscript(expr):
|
||||
ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__`
|
||||
def __init__(self, value: expr, slice: _Slice, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, value: expr = ..., slice: _Slice = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class Starred(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("value", "ctx")
|
||||
@@ -1110,9 +818,6 @@ class Starred(expr):
|
||||
ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__`
|
||||
def __init__(self, value: expr, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Name(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("id", "ctx")
|
||||
@@ -1120,9 +825,6 @@ class Name(expr):
|
||||
ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__`
|
||||
def __init__(self, id: _Identifier, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, id: _Identifier = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class List(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts", "ctx")
|
||||
@@ -1133,9 +835,6 @@ class List(expr):
|
||||
else:
|
||||
def __init__(self, elts: list[expr], ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class Tuple(expr):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("elts", "ctx")
|
||||
@@ -1148,9 +847,6 @@ class Tuple(expr):
|
||||
else:
|
||||
def __init__(self, elts: list[expr], ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class expr_context(AST): ...
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
@@ -1214,9 +910,6 @@ class comprehension(AST):
|
||||
else:
|
||||
def __init__(self, target: expr, iter: expr, ifs: list[expr], is_async: int) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, target: expr = ..., iter: expr = ..., ifs: list[expr] = ..., is_async: int = ...) -> Self: ...
|
||||
|
||||
class excepthandler(AST):
|
||||
lineno: int
|
||||
col_offset: int
|
||||
@@ -1224,11 +917,6 @@ class excepthandler(AST):
|
||||
end_col_offset: int | None
|
||||
def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, lineno: int = ..., col_offset: int = ..., end_lineno: int | None = ..., end_col_offset: int | None = ...
|
||||
) -> Self: ...
|
||||
|
||||
class ExceptHandler(excepthandler):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("type", "name", "body")
|
||||
@@ -1249,16 +937,6 @@ class ExceptHandler(excepthandler):
|
||||
self, type: expr | None = None, name: _Identifier | None = None, *, body: list[stmt], **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
type: expr | None = ...,
|
||||
name: _Identifier | None = ...,
|
||||
body: list[stmt] = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class arguments(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("posonlyargs", "args", "vararg", "kwonlyargs", "kw_defaults", "kwarg", "defaults")
|
||||
@@ -1317,19 +995,6 @@ class arguments(AST):
|
||||
defaults: list[expr],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
posonlyargs: list[arg] = ...,
|
||||
args: list[arg] = ...,
|
||||
vararg: arg | None = ...,
|
||||
kwonlyargs: list[arg] = ...,
|
||||
kw_defaults: list[expr | None] = ...,
|
||||
kwarg: arg | None = ...,
|
||||
defaults: list[expr] = ...,
|
||||
) -> Self: ...
|
||||
|
||||
class arg(AST):
|
||||
lineno: int
|
||||
col_offset: int
|
||||
@@ -1344,16 +1009,6 @@ class arg(AST):
|
||||
self, arg: _Identifier, annotation: expr | None = None, type_comment: str | None = None, **kwargs: Unpack[_Attributes]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
arg: _Identifier = ...,
|
||||
annotation: expr | None = ...,
|
||||
type_comment: str | None = ...,
|
||||
**kwargs: Unpack[_Attributes],
|
||||
) -> Self: ...
|
||||
|
||||
class keyword(AST):
|
||||
lineno: int
|
||||
col_offset: int
|
||||
@@ -1368,9 +1023,6 @@ class keyword(AST):
|
||||
@overload
|
||||
def __init__(self, arg: _Identifier | None = None, *, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, arg: _Identifier | None = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class alias(AST):
|
||||
lineno: int
|
||||
col_offset: int
|
||||
@@ -1382,9 +1034,6 @@ class alias(AST):
|
||||
asname: _Identifier | None
|
||||
def __init__(self, name: str, asname: _Identifier | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, name: str = ..., asname: _Identifier | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
|
||||
|
||||
class withitem(AST):
|
||||
if sys.version_info >= (3, 10):
|
||||
__match_args__ = ("context_expr", "optional_vars")
|
||||
@@ -1392,9 +1041,6 @@ class withitem(AST):
|
||||
optional_vars: expr | None
|
||||
def __init__(self, context_expr: expr, optional_vars: expr | None = None) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, context_expr: expr = ..., optional_vars: expr | None = ...) -> Self: ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
class Match(stmt):
|
||||
__match_args__ = ("subject", "cases")
|
||||
@@ -1405,11 +1051,6 @@ if sys.version_info >= (3, 10):
|
||||
else:
|
||||
def __init__(self, subject: expr, cases: list[match_case], **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, subject: expr = ..., cases: list[match_case] = ..., **kwargs: Unpack[_Attributes]
|
||||
) -> Self: ...
|
||||
|
||||
class pattern(AST):
|
||||
lineno: int
|
||||
col_offset: int
|
||||
@@ -1417,11 +1058,6 @@ if sys.version_info >= (3, 10):
|
||||
end_col_offset: int
|
||||
def __init__(self, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, lineno: int = ..., col_offset: int = ..., end_lineno: int = ..., end_col_offset: int = ...
|
||||
) -> Self: ...
|
||||
|
||||
# Without the alias, Pyright complains variables named pattern are recursively defined
|
||||
_Pattern: typing_extensions.TypeAlias = pattern
|
||||
|
||||
@@ -1438,25 +1074,16 @@ if sys.version_info >= (3, 10):
|
||||
@overload
|
||||
def __init__(self, pattern: _Pattern, guard: expr | None = None, *, body: list[stmt]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, pattern: _Pattern = ..., guard: expr | None = ..., body: list[stmt] = ...) -> Self: ...
|
||||
|
||||
class MatchValue(pattern):
|
||||
__match_args__ = ("value",)
|
||||
value: expr
|
||||
def __init__(self, value: expr, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
|
||||
|
||||
class MatchSingleton(pattern):
|
||||
__match_args__ = ("value",)
|
||||
value: Literal[True, False] | None
|
||||
def __init__(self, value: Literal[True, False] | None, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, value: Literal[True, False] | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
|
||||
|
||||
class MatchSequence(pattern):
|
||||
__match_args__ = ("patterns",)
|
||||
patterns: list[pattern]
|
||||
@@ -1465,17 +1092,11 @@ if sys.version_info >= (3, 10):
|
||||
else:
|
||||
def __init__(self, patterns: list[pattern], **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
|
||||
|
||||
class MatchStar(pattern):
|
||||
__match_args__ = ("name",)
|
||||
name: _Identifier | None
|
||||
def __init__(self, name: _Identifier | None, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, name: _Identifier | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
|
||||
|
||||
class MatchMapping(pattern):
|
||||
__match_args__ = ("keys", "patterns", "rest")
|
||||
keys: list[expr]
|
||||
@@ -1498,16 +1119,6 @@ if sys.version_info >= (3, 10):
|
||||
**kwargs: Unpack[_Attributes[int]],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
keys: list[expr] = ...,
|
||||
patterns: list[pattern] = ...,
|
||||
rest: _Identifier | None = ...,
|
||||
**kwargs: Unpack[_Attributes[int]],
|
||||
) -> Self: ...
|
||||
|
||||
class MatchClass(pattern):
|
||||
__match_args__ = ("cls", "patterns", "kwd_attrs", "kwd_patterns")
|
||||
cls: expr
|
||||
@@ -1533,17 +1144,6 @@ if sys.version_info >= (3, 10):
|
||||
**kwargs: Unpack[_Attributes[int]],
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
cls: expr = ...,
|
||||
patterns: list[pattern] = ...,
|
||||
kwd_attrs: list[_Identifier] = ...,
|
||||
kwd_patterns: list[pattern] = ...,
|
||||
**kwargs: Unpack[_Attributes[int]],
|
||||
) -> Self: ...
|
||||
|
||||
class MatchAs(pattern):
|
||||
__match_args__ = ("pattern", "name")
|
||||
pattern: _Pattern | None
|
||||
@@ -1552,11 +1152,6 @@ if sys.version_info >= (3, 10):
|
||||
self, pattern: _Pattern | None = None, name: _Identifier | None = None, **kwargs: Unpack[_Attributes[int]]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, pattern: _Pattern | None = ..., name: _Identifier | None = ..., **kwargs: Unpack[_Attributes[int]]
|
||||
) -> Self: ...
|
||||
|
||||
class MatchOr(pattern):
|
||||
__match_args__ = ("patterns",)
|
||||
patterns: list[pattern]
|
||||
@@ -1565,9 +1160,6 @@ if sys.version_info >= (3, 10):
|
||||
else:
|
||||
def __init__(self, patterns: list[pattern], **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
class type_param(AST):
|
||||
lineno: int
|
||||
@@ -1576,9 +1168,6 @@ if sys.version_info >= (3, 12):
|
||||
end_col_offset: int
|
||||
def __init__(self, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(self, **kwargs: Unpack[_Attributes[int]]) -> Self: ...
|
||||
|
||||
class TypeVar(type_param):
|
||||
if sys.version_info >= (3, 13):
|
||||
__match_args__ = ("name", "bound", "default_value")
|
||||
@@ -1598,16 +1187,6 @@ if sys.version_info >= (3, 12):
|
||||
else:
|
||||
def __init__(self, name: _Identifier, bound: expr | None = None, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
name: _Identifier = ...,
|
||||
bound: expr | None = ...,
|
||||
default_value: expr | None = ...,
|
||||
**kwargs: Unpack[_Attributes[int]],
|
||||
) -> Self: ...
|
||||
|
||||
class ParamSpec(type_param):
|
||||
if sys.version_info >= (3, 13):
|
||||
__match_args__ = ("name", "default_value")
|
||||
@@ -1622,11 +1201,6 @@ if sys.version_info >= (3, 12):
|
||||
else:
|
||||
def __init__(self, name: _Identifier, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, name: _Identifier = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]]
|
||||
) -> Self: ...
|
||||
|
||||
class TypeVarTuple(type_param):
|
||||
if sys.version_info >= (3, 13):
|
||||
__match_args__ = ("name", "default_value")
|
||||
@@ -1641,11 +1215,6 @@ if sys.version_info >= (3, 12):
|
||||
else:
|
||||
def __init__(self, name: _Identifier, **kwargs: Unpack[_Attributes[int]]) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self, *, name: _Identifier = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]]
|
||||
) -> Self: ...
|
||||
|
||||
class TypeAlias(stmt):
|
||||
__match_args__ = ("name", "type_params", "value")
|
||||
name: Name
|
||||
@@ -1664,13 +1233,3 @@ if sys.version_info >= (3, 12):
|
||||
def __init__(
|
||||
self, name: Name, type_params: list[type_param], value: expr, **kwargs: Unpack[_Attributes[int]]
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def __replace__(
|
||||
self,
|
||||
*,
|
||||
name: Name = ...,
|
||||
type_params: list[type_param] = ...,
|
||||
value: expr = ...,
|
||||
**kwargs: Unpack[_Attributes[int]],
|
||||
) -> Self: ...
|
||||
|
||||
@@ -2,7 +2,7 @@ import sys
|
||||
from _typeshed import SupportsGetItem
|
||||
from collections.abc import Callable, Container, Iterable, MutableMapping, MutableSequence, Sequence
|
||||
from typing import Any, AnyStr, Generic, Protocol, SupportsAbs, SupportsIndex, TypeVar, final, overload
|
||||
from typing_extensions import ParamSpec, TypeAlias, TypeIs, TypeVarTuple, Unpack
|
||||
from typing_extensions import ParamSpec, TypeAlias, TypeVarTuple, Unpack
|
||||
|
||||
_R = TypeVar("_R")
|
||||
_T = TypeVar("_T")
|
||||
@@ -145,7 +145,3 @@ if sys.version_info >= (3, 11):
|
||||
def call(obj: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: ...
|
||||
|
||||
def _compare_digest(a: AnyStr, b: AnyStr, /) -> bool: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def is_none(a: object, /) -> TypeIs[None]: ...
|
||||
def is_not_none(a: _T | None, /) -> TypeIs[_T]: ...
|
||||
|
||||
@@ -28,9 +28,9 @@ S_IFDIR: Final = 0o040000
|
||||
|
||||
# These are 0 on systems that don't support the specific kind of file.
|
||||
# Example: Linux doesn't support door files, so S_IFDOOR is 0 on linux.
|
||||
S_IFDOOR: Final[int]
|
||||
S_IFPORT: Final[int]
|
||||
S_IFWHT: Final[int]
|
||||
S_IFDOOR: int
|
||||
S_IFPORT: int
|
||||
S_IFWHT: int
|
||||
|
||||
S_ISUID: Final = 0o4000
|
||||
S_ISGID: Final = 0o2000
|
||||
@@ -79,9 +79,9 @@ def S_ISWHT(mode: int, /) -> bool: ...
|
||||
def filemode(mode: int, /) -> str: ...
|
||||
|
||||
if sys.platform == "win32":
|
||||
IO_REPARSE_TAG_SYMLINK: Final = 0xA000000C
|
||||
IO_REPARSE_TAG_MOUNT_POINT: Final = 0xA0000003
|
||||
IO_REPARSE_TAG_APPEXECLINK: Final = 0x8000001B
|
||||
IO_REPARSE_TAG_SYMLINK: int
|
||||
IO_REPARSE_TAG_MOUNT_POINT: int
|
||||
IO_REPARSE_TAG_APPEXECLINK: int
|
||||
|
||||
if sys.platform == "win32":
|
||||
FILE_ATTRIBUTE_ARCHIVE: Final = 32
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import signal
|
||||
import sys
|
||||
from _typeshed import structseq
|
||||
from collections.abc import Callable
|
||||
@@ -17,39 +16,16 @@ class LockType:
|
||||
def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ...
|
||||
def release(self) -> None: ...
|
||||
def locked(self) -> bool: ...
|
||||
def acquire_lock(self, blocking: bool = True, timeout: float = -1) -> bool: ...
|
||||
def release_lock(self) -> None: ...
|
||||
def locked_lock(self) -> bool: ...
|
||||
def __enter__(self) -> bool: ...
|
||||
def __exit__(
|
||||
self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
@final
|
||||
class _ThreadHandle:
|
||||
ident: int
|
||||
|
||||
def join(self, timeout: float | None = None, /) -> None: ...
|
||||
def is_done(self) -> bool: ...
|
||||
def _set_done(self) -> None: ...
|
||||
|
||||
def start_joinable_thread(
|
||||
function: Callable[[], object], handle: _ThreadHandle | None = None, daemon: bool = True
|
||||
) -> _ThreadHandle: ...
|
||||
lock = LockType
|
||||
|
||||
@overload
|
||||
def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]], /) -> int: ...
|
||||
@overload
|
||||
def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any], /) -> int: ...
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
def interrupt_main(signum: signal.Signals = ..., /) -> None: ...
|
||||
|
||||
else:
|
||||
def interrupt_main() -> None: ...
|
||||
|
||||
def interrupt_main() -> None: ...
|
||||
def exit() -> NoReturn: ...
|
||||
def allocate_lock() -> LockType: ...
|
||||
def get_ident() -> int: ...
|
||||
|
||||
@@ -106,8 +106,8 @@ EXCEPTION: Final = 8
|
||||
READABLE: Final = 2
|
||||
WRITABLE: Final = 4
|
||||
|
||||
TCL_VERSION: Final[str]
|
||||
TK_VERSION: Final[str]
|
||||
TCL_VERSION: str
|
||||
TK_VERSION: str
|
||||
|
||||
@final
|
||||
class TkttType:
|
||||
|
||||
@@ -33,8 +33,7 @@ from collections.abc import Awaitable, Callable, Iterable, Iterator, MutableSet,
|
||||
from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper
|
||||
from types import CellType, CodeType, TracebackType
|
||||
|
||||
# mypy crashes if any of {ByteString, Sequence, MutableSequence, Mapping, MutableMapping}
|
||||
# are imported from collections.abc in builtins.pyi
|
||||
# mypy crashes if any of {ByteString, Sequence, MutableSequence, Mapping, MutableMapping} are imported from collections.abc in builtins.pyi
|
||||
from typing import ( # noqa: Y022
|
||||
IO,
|
||||
Any,
|
||||
@@ -968,9 +967,7 @@ class tuple(Sequence[_T_co]):
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
|
||||
|
||||
# Doesn't exist at runtime, but deleting this breaks mypy and pyright. See:
|
||||
# https://github.com/python/typeshed/issues/7580
|
||||
# https://github.com/python/mypy/issues/8240
|
||||
# Doesn't exist at runtime, but deleting this breaks mypy. See #2999
|
||||
@final
|
||||
@type_check_only
|
||||
class function:
|
||||
@@ -1087,8 +1084,7 @@ class dict(MutableMapping[_KT, _VT]):
|
||||
def keys(self) -> dict_keys[_KT, _VT]: ...
|
||||
def values(self) -> dict_values[_KT, _VT]: ...
|
||||
def items(self) -> dict_items[_KT, _VT]: ...
|
||||
# Signature of `dict.fromkeys` should be kept identical to
|
||||
# `fromkeys` methods of `OrderedDict`/`ChainMap`/`UserDict` in `collections`
|
||||
# Signature of `dict.fromkeys` should be kept identical to `fromkeys` methods of `OrderedDict`/`ChainMap`/`UserDict` in `collections`
|
||||
# TODO: the true signature of `dict.fromkeys` is not expressible in the current type system.
|
||||
# See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963.
|
||||
@classmethod
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
from collections.abc import Callable
|
||||
from typing import IO, Any, Final
|
||||
from typing_extensions import LiteralString
|
||||
|
||||
__all__ = ["Cmd"]
|
||||
|
||||
PROMPT: Final = "(Cmd) "
|
||||
IDENTCHARS: Final[LiteralString] # Too big to be `Literal`
|
||||
IDENTCHARS: str # Too big to be `Literal`
|
||||
|
||||
class Cmd:
|
||||
prompt: str
|
||||
|
||||
@@ -345,15 +345,15 @@ class _OrderedDictValuesView(ValuesView[_VT_co], Reversible[_VT_co]):
|
||||
# but they are not exposed anywhere)
|
||||
# pyright doesn't have a specific error code for subclassing error!
|
||||
@final
|
||||
class _odict_keys(dict_keys[_KT_co, _VT_co], Reversible[_KT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
class _odict_keys(dict_keys[_KT_co, _VT_co], Reversible[_KT_co]): # type: ignore[misc] # pyright: ignore
|
||||
def __reversed__(self) -> Iterator[_KT_co]: ...
|
||||
|
||||
@final
|
||||
class _odict_items(dict_items[_KT_co, _VT_co], Reversible[tuple[_KT_co, _VT_co]]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
class _odict_items(dict_items[_KT_co, _VT_co], Reversible[tuple[_KT_co, _VT_co]]): # type: ignore[misc] # pyright: ignore
|
||||
def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ...
|
||||
|
||||
@final
|
||||
class _odict_values(dict_values[_KT_co, _VT_co], Reversible[_VT_co], Generic[_KT_co, _VT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
class _odict_values(dict_values[_KT_co, _VT_co], Reversible[_VT_co], Generic[_KT_co, _VT_co]): # type: ignore[misc] # pyright: ignore
|
||||
def __reversed__(self) -> Iterator[_VT_co]: ...
|
||||
|
||||
class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]):
|
||||
@@ -475,8 +475,7 @@ class ChainMap(MutableMapping[_KT, _VT]):
|
||||
def pop(self, key: _KT, default: _T) -> _VT | _T: ...
|
||||
def copy(self) -> Self: ...
|
||||
__copy__ = copy
|
||||
# All arguments to `fromkeys` are passed to `dict.fromkeys` at runtime,
|
||||
# so the signature should be kept in line with `dict.fromkeys`.
|
||||
# All arguments to `fromkeys` are passed to `dict.fromkeys` at runtime, so the signature should be kept in line with `dict.fromkeys`.
|
||||
@classmethod
|
||||
@overload
|
||||
def fromkeys(cls, iterable: Iterable[_T]) -> ChainMap[_T, Any | None]: ...
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
import sys
|
||||
from typing import Final
|
||||
|
||||
if sys.platform != "win32":
|
||||
class _Method: ...
|
||||
METHOD_CRYPT: Final[_Method]
|
||||
METHOD_MD5: Final[_Method]
|
||||
METHOD_SHA256: Final[_Method]
|
||||
METHOD_SHA512: Final[_Method]
|
||||
METHOD_BLOWFISH: Final[_Method]
|
||||
METHOD_CRYPT: _Method
|
||||
METHOD_MD5: _Method
|
||||
METHOD_SHA256: _Method
|
||||
METHOD_SHA512: _Method
|
||||
METHOD_BLOWFISH: _Method
|
||||
methods: list[_Method]
|
||||
def mksalt(method: _Method | None = None, *, rounds: int | None = None) -> str: ...
|
||||
def crypt(word: str, salt: str | _Method | None = None) -> str: ...
|
||||
|
||||
@@ -185,8 +185,3 @@ if sys.version_info >= (3, 12):
|
||||
c_time_t: type[c_int32 | c_int64] # alias for one or the other at runtime
|
||||
|
||||
class py_object(_CanCastTo, _SimpleCData[_T]): ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
class c_float_complex(_SimpleCData[complex]): ...
|
||||
class c_double_complex(_SimpleCData[complex]): ...
|
||||
class c_longdouble_complex(_SimpleCData[complex]): ...
|
||||
|
||||
@@ -229,17 +229,18 @@ if sys.version_info >= (3, 9):
|
||||
else:
|
||||
class _InitVarMeta(type):
|
||||
# Not used, instead `InitVar.__class_getitem__` is called.
|
||||
# pyright (not unreasonably) thinks this is an invalid use of InitVar.
|
||||
def __getitem__(self, params: Any) -> InitVar[Any]: ... # pyright: ignore[reportInvalidTypeForm]
|
||||
# pyright ignore is needed because pyright (not unreasonably) thinks this
|
||||
# is an invalid use of InitVar.
|
||||
def __getitem__(self, params: Any) -> InitVar[Any]: ... # pyright: ignore
|
||||
|
||||
class InitVar(Generic[_T], metaclass=_InitVarMeta):
|
||||
type: Type[_T]
|
||||
def __init__(self, type: Type[_T]) -> None: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
@overload
|
||||
def __class_getitem__(cls, type: Type[_T]) -> InitVar[_T]: ... # pyright: ignore[reportInvalidTypeForm]
|
||||
def __class_getitem__(cls, type: Type[_T]) -> InitVar[_T]: ... # pyright: ignore
|
||||
@overload
|
||||
def __class_getitem__(cls, type: Any) -> InitVar[Any]: ... # pyright: ignore[reportInvalidTypeForm]
|
||||
def __class_getitem__(cls, type: Any) -> InitVar[Any]: ... # pyright: ignore
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
def make_dataclass(
|
||||
|
||||
@@ -265,12 +265,12 @@ class datetime(date):
|
||||
def fromtimestamp(cls, timestamp: float, /, tz: _TzInfo | None = ...) -> Self: ...
|
||||
|
||||
@classmethod
|
||||
@deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .fromtimestamp(datetime.timezone.utc)")
|
||||
@deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .fromtimestamp(datetime.UTC)")
|
||||
def utcfromtimestamp(cls, t: float, /) -> Self: ...
|
||||
@classmethod
|
||||
def now(cls, tz: _TzInfo | None = None) -> Self: ...
|
||||
@classmethod
|
||||
@deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .now(datetime.timezone.utc)")
|
||||
@deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .now(datetime.UTC)")
|
||||
def utcnow(cls) -> Self: ...
|
||||
@classmethod
|
||||
def combine(cls, date: _Date, time: _Time, tzinfo: _TzInfo | None = ...) -> Self: ...
|
||||
|
||||
@@ -30,7 +30,6 @@ _CommandT = TypeVar("_CommandT", bound=Command)
|
||||
_Ts = TypeVarTuple("_Ts")
|
||||
|
||||
class Command:
|
||||
dry_run: Literal[0, 1] # Exposed from __getattr_. Same as Distribution.dry_run
|
||||
distribution: Distribution
|
||||
# Any to work around variance issues
|
||||
sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]]
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Any, ClassVar, Final, Literal
|
||||
from typing import Any, ClassVar, Literal
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from ..cmd import Command
|
||||
@@ -22,7 +22,7 @@ class SilentReporter(_Reporter):
|
||||
) -> None: ...
|
||||
def system_message(self, level, message, *children, **kwargs): ...
|
||||
|
||||
HAS_DOCUTILS: Final[bool]
|
||||
HAS_DOCUTILS: bool
|
||||
|
||||
class check(Command):
|
||||
description: str
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
from _typeshed import StrOrBytesPath
|
||||
from collections.abc import Sequence
|
||||
from re import Pattern
|
||||
from typing import Any, ClassVar, Final, Literal
|
||||
from typing import Any, ClassVar, Literal
|
||||
|
||||
from ..ccompiler import CCompiler
|
||||
from ..cmd import Command
|
||||
|
||||
LANG_EXT: Final[dict[str, str]]
|
||||
LANG_EXT: dict[str, str]
|
||||
|
||||
class config(Command):
|
||||
description: str
|
||||
|
||||
@@ -1,16 +1,11 @@
|
||||
import sys
|
||||
from collections.abc import Callable
|
||||
from typing import Any, ClassVar, Final, Literal
|
||||
from typing import Any, ClassVar
|
||||
|
||||
from ..cmd import Command
|
||||
|
||||
HAS_USER_SITE: Final[bool]
|
||||
|
||||
SCHEME_KEYS: Final[tuple[Literal["purelib"], Literal["platlib"], Literal["headers"], Literal["scripts"], Literal["data"]]]
|
||||
INSTALL_SCHEMES: Final[dict[str, dict[str, str]]]
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
WINDOWS_SCHEME: Final[dict[str, str]]
|
||||
HAS_USER_SITE: bool
|
||||
SCHEME_KEYS: tuple[str, ...]
|
||||
INSTALL_SCHEMES: dict[str, dict[Any, Any]]
|
||||
|
||||
class install(Command):
|
||||
description: str
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from typing import Any, ClassVar, Final
|
||||
from typing import Any, ClassVar
|
||||
|
||||
from ..cmd import Command
|
||||
|
||||
PYTHON_SOURCE_EXTENSION: Final = ".py"
|
||||
PYTHON_SOURCE_EXTENSION: str
|
||||
|
||||
class install_lib(Command):
|
||||
description: str
|
||||
|
||||
@@ -3,9 +3,9 @@ from collections.abc import Mapping
|
||||
from distutils.cmd import Command as Command
|
||||
from distutils.dist import Distribution as Distribution
|
||||
from distutils.extension import Extension as Extension
|
||||
from typing import Any, Final, Literal
|
||||
from typing import Any, Literal
|
||||
|
||||
USAGE: Final[str]
|
||||
USAGE: str
|
||||
|
||||
def gen_usage(script_name: StrOrBytesPath) -> str: ...
|
||||
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
from distutils.unixccompiler import UnixCCompiler
|
||||
from distutils.version import LooseVersion
|
||||
from re import Pattern
|
||||
from typing import Final, Literal
|
||||
from typing import Literal
|
||||
|
||||
def get_msvcr() -> list[str] | None: ...
|
||||
|
||||
class CygwinCCompiler(UnixCCompiler): ...
|
||||
class Mingw32CCompiler(CygwinCCompiler): ...
|
||||
|
||||
CONFIG_H_OK: Final = "ok"
|
||||
CONFIG_H_NOTOK: Final = "not ok"
|
||||
CONFIG_H_UNCERTAIN: Final = "uncertain"
|
||||
CONFIG_H_OK: str
|
||||
CONFIG_H_NOTOK: str
|
||||
CONFIG_H_UNCERTAIN: str
|
||||
|
||||
def check_config_h() -> tuple[Literal["ok", "not ok", "uncertain"], str]: ...
|
||||
|
||||
RE_VERSION: Final[Pattern[bytes]]
|
||||
RE_VERSION: Pattern[bytes]
|
||||
|
||||
def get_versions() -> tuple[LooseVersion | None, ...]: ...
|
||||
def is_cygwingcc() -> bool: ...
|
||||
|
||||
@@ -1,3 +1 @@
|
||||
from typing import Final
|
||||
|
||||
DEBUG: Final[str | None]
|
||||
DEBUG: bool | None
|
||||
|
||||
@@ -88,9 +88,9 @@ class Distribution:
|
||||
display_options: ClassVar[_OptionsList]
|
||||
display_option_names: ClassVar[list[str]]
|
||||
negative_opt: ClassVar[dict[str, str]]
|
||||
verbose: Literal[0, 1]
|
||||
dry_run: Literal[0, 1]
|
||||
help: Literal[0, 1]
|
||||
verbose: int
|
||||
dry_run: int
|
||||
help: int
|
||||
command_packages: list[str] | None
|
||||
script_name: str | None
|
||||
script_args: list[str] | None
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
from collections.abc import Iterable, Mapping
|
||||
from re import Pattern
|
||||
from typing import Any, Final, overload
|
||||
from typing import Any, overload
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
_Option: TypeAlias = tuple[str, str | None, str]
|
||||
_GR: TypeAlias = tuple[list[str], OptionDummy]
|
||||
|
||||
longopt_pat: Final = r"[a-zA-Z](?:[a-zA-Z0-9-]*)"
|
||||
longopt_re: Final[Pattern[str]]
|
||||
neg_alias_re: Final[Pattern[str]]
|
||||
longopt_xlate: Final[dict[int, int]]
|
||||
longopt_pat: str
|
||||
longopt_re: Pattern[str]
|
||||
neg_alias_re: Pattern[str]
|
||||
longopt_xlate: dict[int, int]
|
||||
|
||||
class FancyGetopt:
|
||||
def __init__(self, option_table: list[_Option] | None = None) -> None: ...
|
||||
@@ -25,7 +25,7 @@ def fancy_getopt(
|
||||
options: list[_Option], negative_opt: Mapping[_Option, _Option], object: Any, args: list[str] | None
|
||||
) -> list[str] | _GR: ...
|
||||
|
||||
WS_TRANS: Final[dict[int, str]]
|
||||
WS_TRANS: dict[int, str]
|
||||
|
||||
def wrap_text(text: str, width: int) -> list[str]: ...
|
||||
def translate_longopt(opt: str) -> str: ...
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
from typing import Any, Final
|
||||
from typing import Any
|
||||
|
||||
DEBUG: Final = 1
|
||||
INFO: Final = 2
|
||||
WARN: Final = 3
|
||||
ERROR: Final = 4
|
||||
FATAL: Final = 5
|
||||
DEBUG: int
|
||||
INFO: int
|
||||
WARN: int
|
||||
ERROR: int
|
||||
FATAL: int
|
||||
|
||||
class Log:
|
||||
def __init__(self, threshold: int = 3) -> None: ...
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
import sys
|
||||
from collections.abc import Mapping
|
||||
from distutils.ccompiler import CCompiler
|
||||
from typing import Final, Literal, overload
|
||||
from typing import Literal, overload
|
||||
from typing_extensions import deprecated
|
||||
|
||||
PREFIX: Final[str]
|
||||
EXEC_PREFIX: Final[str]
|
||||
BASE_PREFIX: Final[str]
|
||||
BASE_EXEC_PREFIX: Final[str]
|
||||
project_base: Final[str]
|
||||
python_build: Final[bool]
|
||||
PREFIX: str
|
||||
EXEC_PREFIX: str
|
||||
BASE_PREFIX: str
|
||||
BASE_EXEC_PREFIX: str
|
||||
project_base: str
|
||||
python_build: bool
|
||||
|
||||
def expand_makefile_vars(s: str, vars: Mapping[str, str]) -> str: ...
|
||||
@overload
|
||||
|
||||
@@ -50,8 +50,7 @@ class Message(Generic[_HeaderT, _HeaderParamT]):
|
||||
def get_payload(self, i: None = None, *, decode: Literal[True]) -> _EncodedPayloadType | Any: ...
|
||||
@overload # not multipart, IDEM but w/o kwarg
|
||||
def get_payload(self, i: None, decode: Literal[True]) -> _EncodedPayloadType | Any: ...
|
||||
# If `charset=None` and payload supports both `encode` AND `decode`,
|
||||
# then an invalid payload could be passed, but this is unlikely
|
||||
# If `charset=None` and payload supports both `encode` AND `decode`, then an invalid payload could be passed, but this is unlikely
|
||||
# Not[_SupportsEncodeToPayload]
|
||||
@overload
|
||||
def set_payload(
|
||||
@@ -147,11 +146,7 @@ class Message(Generic[_HeaderT, _HeaderParamT]):
|
||||
class MIMEPart(Message[_HeaderRegistryT, _HeaderRegistryParamT]):
|
||||
def __init__(self, policy: Policy | None = None) -> None: ...
|
||||
def get_body(self, preferencelist: Sequence[str] = ("related", "html", "plain")) -> MIMEPart[_HeaderRegistryT] | None: ...
|
||||
def attach(self, payload: Self) -> None: ... # type: ignore[override]
|
||||
# The attachments are created via type(self) in the attach method. It's theoretically
|
||||
# possible to sneak other attachment types into a MIMEPart instance, but could cause
|
||||
# cause unforseen consequences.
|
||||
def iter_attachments(self) -> Iterator[Self]: ...
|
||||
def iter_attachments(self) -> Iterator[MIMEPart[_HeaderRegistryT]]: ...
|
||||
def iter_parts(self) -> Iterator[MIMEPart[_HeaderRegistryT]]: ...
|
||||
def get_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> Any: ...
|
||||
def set_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> None: ...
|
||||
|
||||
@@ -86,7 +86,7 @@ class FTP:
|
||||
def makeport(self) -> socket: ...
|
||||
def makepasv(self) -> tuple[str, int]: ...
|
||||
def login(self, user: str = "", passwd: str = "", acct: str = "") -> str: ...
|
||||
# In practice, `rest` can actually be anything whose str() is an integer sequence, so to make it simple we allow integers
|
||||
# In practice, `rest` rest can actually be anything whose str() is an integer sequence, so to make it simple we allow integers.
|
||||
def ntransfercmd(self, cmd: str, rest: int | str | None = None) -> tuple[socket, int | None]: ...
|
||||
def transfercmd(self, cmd: str, rest: int | str | None = None) -> socket: ...
|
||||
def retrbinary(
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
from collections.abc import Callable, Iterator
|
||||
from re import Pattern
|
||||
from typing import Any, Final
|
||||
from typing import Any
|
||||
|
||||
ESCAPE: Final[Pattern[str]]
|
||||
ESCAPE_ASCII: Final[Pattern[str]]
|
||||
HAS_UTF8: Final[Pattern[bytes]]
|
||||
ESCAPE_DCT: Final[dict[str, str]]
|
||||
INFINITY: Final[float]
|
||||
ESCAPE: Pattern[str]
|
||||
ESCAPE_ASCII: Pattern[str]
|
||||
HAS_UTF8: Pattern[bytes]
|
||||
ESCAPE_DCT: dict[str, str]
|
||||
INFINITY: float
|
||||
|
||||
def py_encode_basestring(s: str) -> str: ... # undocumented
|
||||
def py_encode_basestring_ascii(s: str) -> str: ... # undocumented
|
||||
|
||||
@@ -61,9 +61,6 @@ __all__ = [
|
||||
if sys.version_info >= (3, 11):
|
||||
__all__ += ["call"]
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
__all__ += ["is_none", "is_not_none"]
|
||||
|
||||
__lt__ = lt
|
||||
__le__ = le
|
||||
__eq__ = eq
|
||||
|
||||
@@ -365,9 +365,7 @@ class stat_result(structseq[float], tuple[int, int, int, int, int, int, int, flo
|
||||
if sys.version_info >= (3, 12) and sys.platform == "win32":
|
||||
@property
|
||||
@deprecated(
|
||||
"""\
|
||||
Use st_birthtime instead to retrieve the file creation time. \
|
||||
In the future, this property will contain the last metadata change time."""
|
||||
"Use st_birthtime instead to retrieve the file creation time. In the future, this property will contain the last metadata change time."
|
||||
)
|
||||
def st_ctime(self) -> float: ...
|
||||
else:
|
||||
|
||||
@@ -67,6 +67,5 @@ class POP3_SSL(POP3):
|
||||
timeout: float = ...,
|
||||
context: ssl.SSLContext | None = None,
|
||||
) -> None: ...
|
||||
# "context" is actually the last argument,
|
||||
# but that breaks LSP and it doesn't really matter because all the arguments are ignored
|
||||
# "context" is actually the last argument, but that breaks LSP and it doesn't really matter because all the arguments are ignored
|
||||
def stls(self, context: Any = None, keyfile: Any = None, certfile: Any = None) -> NoReturn: ...
|
||||
|
||||
@@ -15,9 +15,9 @@ class ExpatError(Exception):
|
||||
offset: int
|
||||
|
||||
error = ExpatError
|
||||
XML_PARAM_ENTITY_PARSING_NEVER: Final = 0
|
||||
XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE: Final = 1
|
||||
XML_PARAM_ENTITY_PARSING_ALWAYS: Final = 2
|
||||
XML_PARAM_ENTITY_PARSING_NEVER: int
|
||||
XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE: int
|
||||
XML_PARAM_ENTITY_PARSING_ALWAYS: int
|
||||
|
||||
_Model: TypeAlias = tuple[int, int, str | None, tuple[Any, ...]]
|
||||
|
||||
|
||||
@@ -1,51 +1,49 @@
|
||||
import sys
|
||||
from typing import Final
|
||||
from typing_extensions import LiteralString
|
||||
|
||||
codes: dict[str, int]
|
||||
messages: dict[int, str]
|
||||
|
||||
XML_ERROR_ABORTED: Final[LiteralString]
|
||||
XML_ERROR_ASYNC_ENTITY: Final[LiteralString]
|
||||
XML_ERROR_ATTRIBUTE_EXTERNAL_ENTITY_REF: Final[LiteralString]
|
||||
XML_ERROR_BAD_CHAR_REF: Final[LiteralString]
|
||||
XML_ERROR_BINARY_ENTITY_REF: Final[LiteralString]
|
||||
XML_ERROR_CANT_CHANGE_FEATURE_ONCE_PARSING: Final[LiteralString]
|
||||
XML_ERROR_DUPLICATE_ATTRIBUTE: Final[LiteralString]
|
||||
XML_ERROR_ENTITY_DECLARED_IN_PE: Final[LiteralString]
|
||||
XML_ERROR_EXTERNAL_ENTITY_HANDLING: Final[LiteralString]
|
||||
XML_ERROR_FEATURE_REQUIRES_XML_DTD: Final[LiteralString]
|
||||
XML_ERROR_FINISHED: Final[LiteralString]
|
||||
XML_ERROR_INCOMPLETE_PE: Final[LiteralString]
|
||||
XML_ERROR_INCORRECT_ENCODING: Final[LiteralString]
|
||||
XML_ERROR_INVALID_TOKEN: Final[LiteralString]
|
||||
XML_ERROR_JUNK_AFTER_DOC_ELEMENT: Final[LiteralString]
|
||||
XML_ERROR_MISPLACED_XML_PI: Final[LiteralString]
|
||||
XML_ERROR_NOT_STANDALONE: Final[LiteralString]
|
||||
XML_ERROR_NOT_SUSPENDED: Final[LiteralString]
|
||||
XML_ERROR_NO_ELEMENTS: Final[LiteralString]
|
||||
XML_ERROR_NO_MEMORY: Final[LiteralString]
|
||||
XML_ERROR_PARAM_ENTITY_REF: Final[LiteralString]
|
||||
XML_ERROR_PARTIAL_CHAR: Final[LiteralString]
|
||||
XML_ERROR_PUBLICID: Final[LiteralString]
|
||||
XML_ERROR_RECURSIVE_ENTITY_REF: Final[LiteralString]
|
||||
XML_ERROR_SUSPENDED: Final[LiteralString]
|
||||
XML_ERROR_SUSPEND_PE: Final[LiteralString]
|
||||
XML_ERROR_SYNTAX: Final[LiteralString]
|
||||
XML_ERROR_TAG_MISMATCH: Final[LiteralString]
|
||||
XML_ERROR_TEXT_DECL: Final[LiteralString]
|
||||
XML_ERROR_UNBOUND_PREFIX: Final[LiteralString]
|
||||
XML_ERROR_UNCLOSED_CDATA_SECTION: Final[LiteralString]
|
||||
XML_ERROR_UNCLOSED_TOKEN: Final[LiteralString]
|
||||
XML_ERROR_UNDECLARING_PREFIX: Final[LiteralString]
|
||||
XML_ERROR_UNDEFINED_ENTITY: Final[LiteralString]
|
||||
XML_ERROR_UNEXPECTED_STATE: Final[LiteralString]
|
||||
XML_ERROR_UNKNOWN_ENCODING: Final[LiteralString]
|
||||
XML_ERROR_XML_DECL: Final[LiteralString]
|
||||
XML_ERROR_ABORTED: str
|
||||
XML_ERROR_ASYNC_ENTITY: str
|
||||
XML_ERROR_ATTRIBUTE_EXTERNAL_ENTITY_REF: str
|
||||
XML_ERROR_BAD_CHAR_REF: str
|
||||
XML_ERROR_BINARY_ENTITY_REF: str
|
||||
XML_ERROR_CANT_CHANGE_FEATURE_ONCE_PARSING: str
|
||||
XML_ERROR_DUPLICATE_ATTRIBUTE: str
|
||||
XML_ERROR_ENTITY_DECLARED_IN_PE: str
|
||||
XML_ERROR_EXTERNAL_ENTITY_HANDLING: str
|
||||
XML_ERROR_FEATURE_REQUIRES_XML_DTD: str
|
||||
XML_ERROR_FINISHED: str
|
||||
XML_ERROR_INCOMPLETE_PE: str
|
||||
XML_ERROR_INCORRECT_ENCODING: str
|
||||
XML_ERROR_INVALID_TOKEN: str
|
||||
XML_ERROR_JUNK_AFTER_DOC_ELEMENT: str
|
||||
XML_ERROR_MISPLACED_XML_PI: str
|
||||
XML_ERROR_NOT_STANDALONE: str
|
||||
XML_ERROR_NOT_SUSPENDED: str
|
||||
XML_ERROR_NO_ELEMENTS: str
|
||||
XML_ERROR_NO_MEMORY: str
|
||||
XML_ERROR_PARAM_ENTITY_REF: str
|
||||
XML_ERROR_PARTIAL_CHAR: str
|
||||
XML_ERROR_PUBLICID: str
|
||||
XML_ERROR_RECURSIVE_ENTITY_REF: str
|
||||
XML_ERROR_SUSPENDED: str
|
||||
XML_ERROR_SUSPEND_PE: str
|
||||
XML_ERROR_SYNTAX: str
|
||||
XML_ERROR_TAG_MISMATCH: str
|
||||
XML_ERROR_TEXT_DECL: str
|
||||
XML_ERROR_UNBOUND_PREFIX: str
|
||||
XML_ERROR_UNCLOSED_CDATA_SECTION: str
|
||||
XML_ERROR_UNCLOSED_TOKEN: str
|
||||
XML_ERROR_UNDECLARING_PREFIX: str
|
||||
XML_ERROR_UNDEFINED_ENTITY: str
|
||||
XML_ERROR_UNEXPECTED_STATE: str
|
||||
XML_ERROR_UNKNOWN_ENCODING: str
|
||||
XML_ERROR_XML_DECL: str
|
||||
if sys.version_info >= (3, 11):
|
||||
XML_ERROR_RESERVED_PREFIX_XML: Final[LiteralString]
|
||||
XML_ERROR_RESERVED_PREFIX_XMLNS: Final[LiteralString]
|
||||
XML_ERROR_RESERVED_NAMESPACE_URI: Final[LiteralString]
|
||||
XML_ERROR_INVALID_ARGUMENT: Final[LiteralString]
|
||||
XML_ERROR_NO_BUFFER: Final[LiteralString]
|
||||
XML_ERROR_AMPLIFICATION_LIMIT_BREACH: Final[LiteralString]
|
||||
XML_ERROR_RESERVED_PREFIX_XML: str
|
||||
XML_ERROR_RESERVED_PREFIX_XMLNS: str
|
||||
XML_ERROR_RESERVED_NAMESPACE_URI: str
|
||||
XML_ERROR_INVALID_ARGUMENT: str
|
||||
XML_ERROR_NO_BUFFER: str
|
||||
XML_ERROR_AMPLIFICATION_LIMIT_BREACH: str
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
from typing import Final
|
||||
XML_CTYPE_ANY: int
|
||||
XML_CTYPE_CHOICE: int
|
||||
XML_CTYPE_EMPTY: int
|
||||
XML_CTYPE_MIXED: int
|
||||
XML_CTYPE_NAME: int
|
||||
XML_CTYPE_SEQ: int
|
||||
|
||||
XML_CTYPE_ANY: Final = 2
|
||||
XML_CTYPE_EMPTY: Final = 1
|
||||
XML_CTYPE_MIXED: Final = 3
|
||||
XML_CTYPE_NAME: Final = 4
|
||||
XML_CTYPE_CHOICE: Final = 5
|
||||
XML_CTYPE_SEQ: Final = 6
|
||||
|
||||
XML_CQUANT_NONE: Final = 0
|
||||
XML_CQUANT_OPT: Final = 1
|
||||
XML_CQUANT_REP: Final = 2
|
||||
XML_CQUANT_PLUS: Final = 3
|
||||
XML_CQUANT_NONE: int
|
||||
XML_CQUANT_OPT: int
|
||||
XML_CQUANT_PLUS: int
|
||||
XML_CQUANT_REP: int
|
||||
|
||||
@@ -3,9 +3,8 @@ import types
|
||||
from _socket import _Address, _RetAddress
|
||||
from _typeshed import ReadableBuffer
|
||||
from collections.abc import Callable
|
||||
from io import BufferedIOBase
|
||||
from socket import socket as _socket
|
||||
from typing import Any, ClassVar
|
||||
from typing import Any, BinaryIO, ClassVar
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
__all__ = [
|
||||
@@ -159,11 +158,11 @@ class StreamRequestHandler(BaseRequestHandler):
|
||||
timeout: ClassVar[float | None] # undocumented
|
||||
disable_nagle_algorithm: ClassVar[bool] # undocumented
|
||||
connection: Any # undocumented
|
||||
rfile: BufferedIOBase
|
||||
wfile: BufferedIOBase
|
||||
rfile: BinaryIO
|
||||
wfile: BinaryIO
|
||||
|
||||
class DatagramRequestHandler(BaseRequestHandler):
|
||||
packet: bytes # undocumented
|
||||
packet: _socket # undocumented
|
||||
socket: _socket # undocumented
|
||||
rfile: BufferedIOBase
|
||||
wfile: BufferedIOBase
|
||||
rfile: BinaryIO
|
||||
wfile: BinaryIO
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
from typing import Final
|
||||
|
||||
# These are not actually bools. See #4669
|
||||
NO: Final[bool]
|
||||
YES: Final[bool]
|
||||
TRUE: Final[bool]
|
||||
FALSE: Final[bool]
|
||||
ON: Final[bool]
|
||||
OFF: Final[bool]
|
||||
NO: bool
|
||||
YES: bool
|
||||
TRUE: bool
|
||||
FALSE: bool
|
||||
ON: bool
|
||||
OFF: bool
|
||||
N: Final = "n"
|
||||
S: Final = "s"
|
||||
W: Final = "w"
|
||||
|
||||
@@ -2,12 +2,12 @@ import sys
|
||||
from _typeshed import Incomplete
|
||||
from collections.abc import Mapping
|
||||
from tkinter import Widget
|
||||
from typing import Any, Final
|
||||
from typing import Any
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
__all__ = ["Dialog"]
|
||||
|
||||
DIALOG_ICON: Final = "questhead"
|
||||
DIALOG_ICON: str
|
||||
|
||||
class Dialog(Widget):
|
||||
widgetName: str
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import sys
|
||||
from tkinter.commondialog import Dialog
|
||||
from typing import ClassVar, Final
|
||||
from typing import ClassVar
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
__all__ = [
|
||||
@@ -14,22 +14,22 @@ if sys.version_info >= (3, 9):
|
||||
"askretrycancel",
|
||||
]
|
||||
|
||||
ERROR: Final = "error"
|
||||
INFO: Final = "info"
|
||||
QUESTION: Final = "question"
|
||||
WARNING: Final = "warning"
|
||||
ABORTRETRYIGNORE: Final = "abortretryignore"
|
||||
OK: Final = "ok"
|
||||
OKCANCEL: Final = "okcancel"
|
||||
RETRYCANCEL: Final = "retrycancel"
|
||||
YESNO: Final = "yesno"
|
||||
YESNOCANCEL: Final = "yesnocancel"
|
||||
ABORT: Final = "abort"
|
||||
RETRY: Final = "retry"
|
||||
IGNORE: Final = "ignore"
|
||||
CANCEL: Final = "cancel"
|
||||
YES: Final = "yes"
|
||||
NO: Final = "no"
|
||||
ERROR: str
|
||||
INFO: str
|
||||
QUESTION: str
|
||||
WARNING: str
|
||||
ABORTRETRYIGNORE: str
|
||||
OK: str
|
||||
OKCANCEL: str
|
||||
RETRYCANCEL: str
|
||||
YESNO: str
|
||||
YESNOCANCEL: str
|
||||
ABORT: str
|
||||
RETRY: str
|
||||
IGNORE: str
|
||||
CANCEL: str
|
||||
YES: str
|
||||
NO: str
|
||||
|
||||
class Message(Dialog):
|
||||
command: ClassVar[str]
|
||||
|
||||
@@ -556,9 +556,7 @@ class Notebook(Widget):
|
||||
sticky: str = ..., # consists of letters 'n', 's', 'w', 'e', no repeats, may be empty
|
||||
padding: _Padding = ...,
|
||||
text: str = ...,
|
||||
# `image` is a sequence of an image name, followed by zero or more
|
||||
# (sequences of one or more state names followed by an image name)
|
||||
image=...,
|
||||
image=..., # Sequence of an image name, followed by zero or more (sequences of one or more state names followed by an image name)
|
||||
compound: tkinter._Compound = ...,
|
||||
underline: int = ...,
|
||||
) -> None: ...
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# ruff: noqa: F811
|
||||
# TODO: The collections import is required, otherwise mypy crashes.
|
||||
# https://github.com/python/mypy/issues/16744
|
||||
import collections # noqa: F401 # pyright: ignore[reportUnusedImport]
|
||||
import collections # noqa: F401 # pyright: ignore
|
||||
import sys
|
||||
import typing_extensions
|
||||
from _collections_abc import dict_items, dict_keys, dict_values
|
||||
@@ -800,12 +800,18 @@ class IO(Iterator[AnyStr]):
|
||||
def writable(self) -> bool: ...
|
||||
@abstractmethod
|
||||
@overload
|
||||
def write(self: IO[str], s: str, /) -> int: ...
|
||||
@abstractmethod
|
||||
@overload
|
||||
def write(self: IO[bytes], s: ReadableBuffer, /) -> int: ...
|
||||
@abstractmethod
|
||||
@overload
|
||||
def write(self, s: AnyStr, /) -> int: ...
|
||||
@abstractmethod
|
||||
@overload
|
||||
def writelines(self: IO[str], lines: Iterable[str], /) -> None: ...
|
||||
@abstractmethod
|
||||
@overload
|
||||
def writelines(self: IO[bytes], lines: Iterable[ReadableBuffer], /) -> None: ...
|
||||
@abstractmethod
|
||||
@overload
|
||||
|
||||
@@ -2,52 +2,36 @@ import sys
|
||||
import unittest.case
|
||||
import unittest.result
|
||||
import unittest.suite
|
||||
from _typeshed import SupportsFlush, SupportsWrite
|
||||
from _typeshed import Incomplete
|
||||
from collections.abc import Callable, Iterable
|
||||
from typing import Any, Generic, Protocol, TypeVar
|
||||
from typing_extensions import Never, TypeAlias
|
||||
from typing import TextIO
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
_ResultClassType: TypeAlias = Callable[[_TextTestStream, bool, int], TextTestResult]
|
||||
_ResultClassType: TypeAlias = Callable[[TextIO, bool, int], unittest.result.TestResult]
|
||||
|
||||
class _SupportsWriteAndFlush(SupportsWrite[str], SupportsFlush, Protocol): ...
|
||||
|
||||
# All methods used by unittest.runner.TextTestResult's stream
|
||||
class _TextTestStream(_SupportsWriteAndFlush, Protocol):
|
||||
def writeln(self, arg: str | None = None) -> str: ...
|
||||
|
||||
# _WritelnDecorator should have all the same attrs as its stream param.
|
||||
# But that's not feasible to do Generically
|
||||
# We can expand the attributes if requested
|
||||
class _WritelnDecorator(_TextTestStream):
|
||||
def __init__(self, stream: _TextTestStream) -> None: ...
|
||||
def __getattr__(self, attr: str) -> Any: ... # Any attribute from the stream type passed to __init__
|
||||
# These attributes are prevented by __getattr__
|
||||
stream: Never
|
||||
__getstate__: Never
|
||||
|
||||
_StreamT = TypeVar("_StreamT", bound=_TextTestStream, default=_WritelnDecorator)
|
||||
|
||||
class TextTestResult(unittest.result.TestResult, Generic[_StreamT]):
|
||||
class TextTestResult(unittest.result.TestResult):
|
||||
descriptions: bool # undocumented
|
||||
dots: bool # undocumented
|
||||
separator1: str
|
||||
separator2: str
|
||||
showAll: bool # undocumented
|
||||
stream: _StreamT # undocumented
|
||||
stream: TextIO # undocumented
|
||||
if sys.version_info >= (3, 12):
|
||||
durations: unittest.result._DurationsType | None
|
||||
def __init__(
|
||||
self, stream: _StreamT, descriptions: bool, verbosity: int, *, durations: unittest.result._DurationsType | None = None
|
||||
self, stream: TextIO, descriptions: bool, verbosity: int, *, durations: unittest.result._DurationsType | None = None
|
||||
) -> None: ...
|
||||
else:
|
||||
def __init__(self, stream: _StreamT, descriptions: bool, verbosity: int) -> None: ...
|
||||
def __init__(self, stream: TextIO, descriptions: bool, verbosity: int) -> None: ...
|
||||
|
||||
def getDescription(self, test: unittest.case.TestCase) -> str: ...
|
||||
def printErrorList(self, flavour: str, errors: Iterable[tuple[unittest.case.TestCase, str]]) -> None: ...
|
||||
|
||||
class TextTestRunner:
|
||||
resultclass: _ResultClassType
|
||||
stream: _WritelnDecorator
|
||||
# TODO: add `_WritelnDecorator` type
|
||||
# stream: _WritelnDecorator
|
||||
stream: Incomplete
|
||||
descriptions: bool
|
||||
verbosity: int
|
||||
failfast: bool
|
||||
@@ -59,7 +43,7 @@ class TextTestRunner:
|
||||
durations: unittest.result._DurationsType | None
|
||||
def __init__(
|
||||
self,
|
||||
stream: _SupportsWriteAndFlush | None = None,
|
||||
stream: TextIO | None = None,
|
||||
descriptions: bool = True,
|
||||
verbosity: int = 1,
|
||||
failfast: bool = False,
|
||||
@@ -73,7 +57,7 @@ class TextTestRunner:
|
||||
else:
|
||||
def __init__(
|
||||
self,
|
||||
stream: _SupportsWriteAndFlush | None = None,
|
||||
stream: TextIO | None = None,
|
||||
descriptions: bool = True,
|
||||
verbosity: int = 1,
|
||||
failfast: bool = False,
|
||||
@@ -84,5 +68,5 @@ class TextTestRunner:
|
||||
tb_locals: bool = False,
|
||||
) -> None: ...
|
||||
|
||||
def _makeResult(self) -> TextTestResult: ...
|
||||
def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase) -> TextTestResult: ...
|
||||
def _makeResult(self) -> unittest.result.TestResult: ...
|
||||
def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase) -> unittest.result.TestResult: ...
|
||||
|
||||
@@ -304,7 +304,6 @@ else:
|
||||
|
||||
class Path:
|
||||
root: CompleteDirs
|
||||
at: str
|
||||
def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: ...
|
||||
@property
|
||||
def name(self) -> str: ...
|
||||
|
||||
@@ -31,7 +31,6 @@ if sys.version_info >= (3, 12):
|
||||
|
||||
class Path:
|
||||
root: CompleteDirs
|
||||
at: str
|
||||
def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = "") -> None: ...
|
||||
@property
|
||||
def name(self) -> str: ...
|
||||
|
||||
@@ -11,6 +11,7 @@ repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
red_knot_workspace = { workspace = true }
|
||||
ruff_db = { workspace = true }
|
||||
ruff_notebook = { workspace = true }
|
||||
|
||||
@@ -3,11 +3,11 @@
|
||||
use std::num::NonZeroUsize;
|
||||
use std::panic::PanicInfo;
|
||||
|
||||
use lsp_server::Message;
|
||||
use lsp_server as lsp;
|
||||
use lsp_types as types;
|
||||
use lsp_types::{
|
||||
ClientCapabilities, DiagnosticOptions, DiagnosticServerCapabilities, MessageType,
|
||||
ServerCapabilities, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
|
||||
Url,
|
||||
ClientCapabilities, DiagnosticOptions, NotebookCellSelector, NotebookDocumentSyncOptions,
|
||||
NotebookSelector, TextDocumentSyncCapability, TextDocumentSyncOptions,
|
||||
};
|
||||
|
||||
use self::connection::{Connection, ConnectionInitializer};
|
||||
@@ -74,7 +74,7 @@ impl Server {
|
||||
init_params.client_info.as_ref(),
|
||||
);
|
||||
|
||||
let mut workspace_for_url = |url: Url| {
|
||||
let mut workspace_for_url = |url: lsp_types::Url| {
|
||||
let Some(workspace_settings) = workspace_settings.as_mut() else {
|
||||
return (url, ClientSettings::default());
|
||||
};
|
||||
@@ -93,18 +93,13 @@ impl Server {
|
||||
}).collect())
|
||||
.or_else(|| {
|
||||
tracing::warn!("No workspace(s) were provided during initialization. Using the current working directory as a default workspace...");
|
||||
let uri = Url::from_file_path(std::env::current_dir().ok()?).ok()?;
|
||||
let uri = types::Url::from_file_path(std::env::current_dir().ok()?).ok()?;
|
||||
Some(vec![workspace_for_url(uri)])
|
||||
})
|
||||
.ok_or_else(|| {
|
||||
anyhow::anyhow!("Failed to get the current working directory while creating a default workspace.")
|
||||
})?;
|
||||
|
||||
if workspaces.len() > 1 {
|
||||
// TODO(dhruvmanila): Support multi-root workspaces
|
||||
anyhow::bail!("Multi-root workspaces are not supported yet");
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
connection,
|
||||
worker_threads,
|
||||
@@ -154,7 +149,7 @@ impl Server {
|
||||
try_show_message(
|
||||
"The Ruff language server exited with a panic. See the logs for more details."
|
||||
.to_string(),
|
||||
MessageType::ERROR,
|
||||
lsp_types::MessageType::ERROR,
|
||||
)
|
||||
.ok();
|
||||
}));
|
||||
@@ -187,9 +182,9 @@ impl Server {
|
||||
break;
|
||||
}
|
||||
let task = match msg {
|
||||
Message::Request(req) => api::request(req),
|
||||
Message::Notification(notification) => api::notification(notification),
|
||||
Message::Response(response) => scheduler.response(response),
|
||||
lsp::Message::Request(req) => api::request(req),
|
||||
lsp::Message::Notification(notification) => api::notification(notification),
|
||||
lsp::Message::Response(response) => scheduler.response(response),
|
||||
};
|
||||
scheduler.dispatch(task);
|
||||
}
|
||||
@@ -211,17 +206,28 @@ impl Server {
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn server_capabilities(position_encoding: PositionEncoding) -> ServerCapabilities {
|
||||
ServerCapabilities {
|
||||
fn server_capabilities(position_encoding: PositionEncoding) -> types::ServerCapabilities {
|
||||
types::ServerCapabilities {
|
||||
position_encoding: Some(position_encoding.into()),
|
||||
diagnostic_provider: Some(DiagnosticServerCapabilities::Options(DiagnosticOptions {
|
||||
identifier: Some(crate::DIAGNOSTIC_NAME.into()),
|
||||
..Default::default()
|
||||
diagnostic_provider: Some(types::DiagnosticServerCapabilities::Options(
|
||||
DiagnosticOptions {
|
||||
identifier: Some(crate::DIAGNOSTIC_NAME.into()),
|
||||
..Default::default()
|
||||
},
|
||||
)),
|
||||
notebook_document_sync: Some(types::OneOf::Left(NotebookDocumentSyncOptions {
|
||||
save: Some(false),
|
||||
notebook_selector: [NotebookSelector::ByCells {
|
||||
notebook: None,
|
||||
cells: vec![NotebookCellSelector {
|
||||
language: "python".to_string(),
|
||||
}],
|
||||
}]
|
||||
.to_vec(),
|
||||
})),
|
||||
text_document_sync: Some(TextDocumentSyncCapability::Options(
|
||||
TextDocumentSyncOptions {
|
||||
open_close: Some(true),
|
||||
change: Some(TextDocumentSyncKind::INCREMENTAL),
|
||||
..Default::default()
|
||||
},
|
||||
)),
|
||||
|
||||
@@ -1,15 +1,13 @@
|
||||
use crate::{server::schedule::Task, session::Session, system::url_to_system_path};
|
||||
use lsp_server as server;
|
||||
|
||||
use crate::server::schedule::Task;
|
||||
use crate::session::Session;
|
||||
use crate::system::{url_to_any_system_path, AnySystemPath};
|
||||
|
||||
mod diagnostics;
|
||||
mod notifications;
|
||||
mod requests;
|
||||
mod traits;
|
||||
|
||||
use notifications as notification;
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use requests as request;
|
||||
|
||||
use self::traits::{NotificationHandler, RequestHandler};
|
||||
@@ -45,7 +43,6 @@ pub(super) fn notification<'a>(notif: server::Notification) -> Task<'a> {
|
||||
match notif.method.as_str() {
|
||||
notification::DidCloseTextDocumentHandler::METHOD => local_notification_task::<notification::DidCloseTextDocumentHandler>(notif),
|
||||
notification::DidOpenTextDocumentHandler::METHOD => local_notification_task::<notification::DidOpenTextDocumentHandler>(notif),
|
||||
notification::DidChangeTextDocumentHandler::METHOD => local_notification_task::<notification::DidChangeTextDocumentHandler>(notif),
|
||||
notification::DidOpenNotebookHandler::METHOD => {
|
||||
local_notification_task::<notification::DidOpenNotebookHandler>(notif)
|
||||
}
|
||||
@@ -85,18 +82,12 @@ fn background_request_task<'a, R: traits::BackgroundDocumentRequestHandler>(
|
||||
Ok(Task::background(schedule, move |session: &Session| {
|
||||
let url = R::document_url(¶ms).into_owned();
|
||||
|
||||
let Ok(path) = url_to_any_system_path(&url) else {
|
||||
let Ok(path) = url_to_system_path(&url) else {
|
||||
return Box::new(|_, _| {});
|
||||
};
|
||||
let db = match path {
|
||||
AnySystemPath::System(path) => {
|
||||
match session.workspace_db_for_path(path.as_std_path()) {
|
||||
Some(db) => db.snapshot(),
|
||||
None => session.default_workspace_db().snapshot(),
|
||||
}
|
||||
}
|
||||
AnySystemPath::SystemVirtual(_) => session.default_workspace_db().snapshot(),
|
||||
};
|
||||
let db = session
|
||||
.workspace_db_for_path(path.as_std_path())
|
||||
.map(RootDatabase::snapshot);
|
||||
|
||||
let Some(snapshot) = session.take_snapshot(url) else {
|
||||
return Box::new(|_, _| {});
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
mod did_change;
|
||||
mod did_close;
|
||||
mod did_close_notebook;
|
||||
mod did_open;
|
||||
mod did_open_notebook;
|
||||
mod set_trace;
|
||||
|
||||
pub(super) use did_change::DidChangeTextDocumentHandler;
|
||||
pub(super) use did_close::DidCloseTextDocumentHandler;
|
||||
pub(super) use did_close_notebook::DidCloseNotebookHandler;
|
||||
pub(super) use did_open::DidOpenTextDocumentHandler;
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
use lsp_server::ErrorCode;
|
||||
use lsp_types::notification::DidChangeTextDocument;
|
||||
use lsp_types::DidChangeTextDocumentParams;
|
||||
|
||||
use red_knot_workspace::watch::ChangeEvent;
|
||||
|
||||
use crate::server::api::traits::{NotificationHandler, SyncNotificationHandler};
|
||||
use crate::server::api::LSPResult;
|
||||
use crate::server::client::{Notifier, Requester};
|
||||
use crate::server::Result;
|
||||
use crate::session::Session;
|
||||
use crate::system::{url_to_any_system_path, AnySystemPath};
|
||||
|
||||
pub(crate) struct DidChangeTextDocumentHandler;
|
||||
|
||||
impl NotificationHandler for DidChangeTextDocumentHandler {
|
||||
type NotificationType = DidChangeTextDocument;
|
||||
}
|
||||
|
||||
impl SyncNotificationHandler for DidChangeTextDocumentHandler {
|
||||
fn run(
|
||||
session: &mut Session,
|
||||
_notifier: Notifier,
|
||||
_requester: &mut Requester,
|
||||
params: DidChangeTextDocumentParams,
|
||||
) -> Result<()> {
|
||||
let Ok(path) = url_to_any_system_path(¶ms.text_document.uri) else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let key = session.key_from_url(params.text_document.uri);
|
||||
|
||||
session
|
||||
.update_text_document(&key, params.content_changes, params.text_document.version)
|
||||
.with_failure_code(ErrorCode::InternalError)?;
|
||||
|
||||
match path {
|
||||
AnySystemPath::System(path) => {
|
||||
let db = match session.workspace_db_for_path_mut(path.as_std_path()) {
|
||||
Some(db) => db,
|
||||
None => session.default_workspace_db_mut(),
|
||||
};
|
||||
db.apply_changes(vec![ChangeEvent::file_content_changed(path)], None);
|
||||
}
|
||||
AnySystemPath::SystemVirtual(virtual_path) => {
|
||||
let db = session.default_workspace_db_mut();
|
||||
db.apply_changes(vec![ChangeEvent::ChangedVirtual(virtual_path)], None);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(dhruvmanila): Publish diagnostics if the client doesnt support pull diagnostics
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,8 @@
|
||||
use lsp_server::ErrorCode;
|
||||
use lsp_types::notification::DidCloseTextDocument;
|
||||
use lsp_types::DidCloseTextDocumentParams;
|
||||
use red_knot_workspace::watch::ChangeEvent;
|
||||
|
||||
use ruff_db::files::File;
|
||||
|
||||
use crate::server::api::diagnostics::clear_diagnostics;
|
||||
use crate::server::api::traits::{NotificationHandler, SyncNotificationHandler};
|
||||
@@ -9,7 +10,7 @@ use crate::server::api::LSPResult;
|
||||
use crate::server::client::{Notifier, Requester};
|
||||
use crate::server::Result;
|
||||
use crate::session::Session;
|
||||
use crate::system::{url_to_any_system_path, AnySystemPath};
|
||||
use crate::system::url_to_system_path;
|
||||
|
||||
pub(crate) struct DidCloseTextDocumentHandler;
|
||||
|
||||
@@ -24,7 +25,7 @@ impl SyncNotificationHandler for DidCloseTextDocumentHandler {
|
||||
_requester: &mut Requester,
|
||||
params: DidCloseTextDocumentParams,
|
||||
) -> Result<()> {
|
||||
let Ok(path) = url_to_any_system_path(¶ms.text_document.uri) else {
|
||||
let Ok(path) = url_to_system_path(¶ms.text_document.uri) else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
@@ -33,9 +34,8 @@ impl SyncNotificationHandler for DidCloseTextDocumentHandler {
|
||||
.close_document(&key)
|
||||
.with_failure_code(ErrorCode::InternalError)?;
|
||||
|
||||
if let AnySystemPath::SystemVirtual(virtual_path) = path {
|
||||
let db = session.default_workspace_db_mut();
|
||||
db.apply_changes(vec![ChangeEvent::DeletedVirtual(virtual_path)], None);
|
||||
if let Some(db) = session.workspace_db_for_path_mut(path.as_std_path()) {
|
||||
File::sync_path(db, &path);
|
||||
}
|
||||
|
||||
clear_diagnostics(key.url(), ¬ifier)?;
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
use lsp_types::notification::DidCloseNotebookDocument;
|
||||
use lsp_types::DidCloseNotebookDocumentParams;
|
||||
|
||||
use red_knot_workspace::watch::ChangeEvent;
|
||||
use ruff_db::files::File;
|
||||
|
||||
use crate::server::api::traits::{NotificationHandler, SyncNotificationHandler};
|
||||
use crate::server::api::LSPResult;
|
||||
use crate::server::client::{Notifier, Requester};
|
||||
use crate::server::Result;
|
||||
use crate::session::Session;
|
||||
use crate::system::{url_to_any_system_path, AnySystemPath};
|
||||
use crate::system::url_to_system_path;
|
||||
|
||||
pub(crate) struct DidCloseNotebookHandler;
|
||||
|
||||
@@ -23,7 +23,7 @@ impl SyncNotificationHandler for DidCloseNotebookHandler {
|
||||
_requester: &mut Requester,
|
||||
params: DidCloseNotebookDocumentParams,
|
||||
) -> Result<()> {
|
||||
let Ok(path) = url_to_any_system_path(¶ms.notebook_document.uri) else {
|
||||
let Ok(path) = url_to_system_path(¶ms.notebook_document.uri) else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
@@ -32,9 +32,8 @@ impl SyncNotificationHandler for DidCloseNotebookHandler {
|
||||
.close_document(&key)
|
||||
.with_failure_code(lsp_server::ErrorCode::InternalError)?;
|
||||
|
||||
if let AnySystemPath::SystemVirtual(virtual_path) = path {
|
||||
let db = session.default_workspace_db_mut();
|
||||
db.apply_changes(vec![ChangeEvent::DeletedVirtual(virtual_path)], None);
|
||||
if let Some(db) = session.workspace_db_for_path_mut(path.as_std_path()) {
|
||||
File::sync_path(db, &path);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
use lsp_types::notification::DidOpenTextDocument;
|
||||
use lsp_types::DidOpenTextDocumentParams;
|
||||
|
||||
use red_knot_workspace::watch::ChangeEvent;
|
||||
use ruff_db::Db;
|
||||
use ruff_db::files::system_path_to_file;
|
||||
|
||||
use crate::server::api::traits::{NotificationHandler, SyncNotificationHandler};
|
||||
use crate::server::client::{Notifier, Requester};
|
||||
use crate::server::Result;
|
||||
use crate::session::Session;
|
||||
use crate::system::{url_to_any_system_path, AnySystemPath};
|
||||
use crate::system::url_to_system_path;
|
||||
use crate::TextDocument;
|
||||
|
||||
pub(crate) struct DidOpenTextDocumentHandler;
|
||||
@@ -24,25 +23,17 @@ impl SyncNotificationHandler for DidOpenTextDocumentHandler {
|
||||
_requester: &mut Requester,
|
||||
params: DidOpenTextDocumentParams,
|
||||
) -> Result<()> {
|
||||
let Ok(path) = url_to_any_system_path(¶ms.text_document.uri) else {
|
||||
let Ok(path) = url_to_system_path(¶ms.text_document.uri) else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let document = TextDocument::new(params.text_document.text, params.text_document.version);
|
||||
session.open_text_document(params.text_document.uri, document);
|
||||
|
||||
match path {
|
||||
AnySystemPath::System(path) => {
|
||||
let db = match session.workspace_db_for_path_mut(path.as_std_path()) {
|
||||
Some(db) => db,
|
||||
None => session.default_workspace_db_mut(),
|
||||
};
|
||||
db.apply_changes(vec![ChangeEvent::Opened(path)], None);
|
||||
}
|
||||
AnySystemPath::SystemVirtual(virtual_path) => {
|
||||
let db = session.default_workspace_db_mut();
|
||||
db.files().virtual_file(db, &virtual_path);
|
||||
}
|
||||
if let Some(db) = session.workspace_db_for_path_mut(path.as_std_path()) {
|
||||
// TODO(dhruvmanila): Store the `file` in `DocumentController`
|
||||
let file = system_path_to_file(db, &path).unwrap();
|
||||
file.sync(db);
|
||||
}
|
||||
|
||||
// TODO(dhruvmanila): Publish diagnostics if the client doesn't support pull diagnostics
|
||||
|
||||
@@ -2,8 +2,7 @@ use lsp_server::ErrorCode;
|
||||
use lsp_types::notification::DidOpenNotebookDocument;
|
||||
use lsp_types::DidOpenNotebookDocumentParams;
|
||||
|
||||
use red_knot_workspace::watch::ChangeEvent;
|
||||
use ruff_db::Db;
|
||||
use ruff_db::files::system_path_to_file;
|
||||
|
||||
use crate::edit::NotebookDocument;
|
||||
use crate::server::api::traits::{NotificationHandler, SyncNotificationHandler};
|
||||
@@ -11,7 +10,7 @@ use crate::server::api::LSPResult;
|
||||
use crate::server::client::{Notifier, Requester};
|
||||
use crate::server::Result;
|
||||
use crate::session::Session;
|
||||
use crate::system::{url_to_any_system_path, AnySystemPath};
|
||||
use crate::system::url_to_system_path;
|
||||
|
||||
pub(crate) struct DidOpenNotebookHandler;
|
||||
|
||||
@@ -26,7 +25,7 @@ impl SyncNotificationHandler for DidOpenNotebookHandler {
|
||||
_requester: &mut Requester,
|
||||
params: DidOpenNotebookDocumentParams,
|
||||
) -> Result<()> {
|
||||
let Ok(path) = url_to_any_system_path(¶ms.notebook_document.uri) else {
|
||||
let Ok(path) = url_to_system_path(¶ms.notebook_document.uri) else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
@@ -39,18 +38,10 @@ impl SyncNotificationHandler for DidOpenNotebookHandler {
|
||||
.with_failure_code(ErrorCode::InternalError)?;
|
||||
session.open_notebook_document(params.notebook_document.uri.clone(), notebook);
|
||||
|
||||
match path {
|
||||
AnySystemPath::System(path) => {
|
||||
let db = match session.workspace_db_for_path_mut(path.as_std_path()) {
|
||||
Some(db) => db,
|
||||
None => session.default_workspace_db_mut(),
|
||||
};
|
||||
db.apply_changes(vec![ChangeEvent::Opened(path)], None);
|
||||
}
|
||||
AnySystemPath::SystemVirtual(virtual_path) => {
|
||||
let db = session.default_workspace_db_mut();
|
||||
db.files().virtual_file(db, &virtual_path);
|
||||
}
|
||||
if let Some(db) = session.workspace_db_for_path_mut(path.as_std_path()) {
|
||||
// TODO(dhruvmanila): Store the `file` in `DocumentController`
|
||||
let file = system_path_to_file(db, &path).unwrap();
|
||||
file.sync(db);
|
||||
}
|
||||
|
||||
// TODO(dhruvmanila): Publish diagnostics if the client doesn't support pull diagnostics
|
||||
|
||||
@@ -26,11 +26,13 @@ impl BackgroundDocumentRequestHandler for DocumentDiagnosticRequestHandler {
|
||||
|
||||
fn run_with_snapshot(
|
||||
snapshot: DocumentSnapshot,
|
||||
db: RootDatabase,
|
||||
db: Option<RootDatabase>,
|
||||
_notifier: Notifier,
|
||||
_params: DocumentDiagnosticParams,
|
||||
) -> Result<DocumentDiagnosticReportResult> {
|
||||
let diagnostics = compute_diagnostics(&snapshot, &db);
|
||||
let diagnostics = db
|
||||
.map(|db| compute_diagnostics(&snapshot, &db))
|
||||
.unwrap_or_default();
|
||||
|
||||
Ok(DocumentDiagnosticReportResult::Report(
|
||||
DocumentDiagnosticReport::Full(RelatedFullDocumentDiagnosticReport {
|
||||
@@ -46,19 +48,10 @@ impl BackgroundDocumentRequestHandler for DocumentDiagnosticRequestHandler {
|
||||
|
||||
fn compute_diagnostics(snapshot: &DocumentSnapshot, db: &RootDatabase) -> Vec<Diagnostic> {
|
||||
let Some(file) = snapshot.file(db) else {
|
||||
tracing::info!(
|
||||
"No file found for snapshot for '{}'",
|
||||
snapshot.query().file_url()
|
||||
);
|
||||
return vec![];
|
||||
};
|
||||
|
||||
let diagnostics = match db.check_file(file) {
|
||||
Ok(diagnostics) => diagnostics,
|
||||
Err(cancelled) => {
|
||||
tracing::info!("Diagnostics computation {cancelled}");
|
||||
return vec![];
|
||||
}
|
||||
let Ok(diagnostics) = db.check_file(file) else {
|
||||
return vec![];
|
||||
};
|
||||
|
||||
diagnostics
|
||||
@@ -72,12 +65,12 @@ fn to_lsp_diagnostic(message: &str) -> Diagnostic {
|
||||
let words = message.split(':').collect::<Vec<_>>();
|
||||
|
||||
let (range, message) = match words.as_slice() {
|
||||
[_, _, line, column, message] | [_, line, column, message] => {
|
||||
let line = line.parse::<u32>().unwrap_or_default().saturating_sub(1);
|
||||
[_filename, line, column, message] => {
|
||||
let line = line.parse::<u32>().unwrap_or_default();
|
||||
let column = column.parse::<u32>().unwrap_or_default();
|
||||
(
|
||||
Range::new(
|
||||
Position::new(line, column.saturating_sub(1)),
|
||||
Position::new(line.saturating_sub(1), column.saturating_sub(1)),
|
||||
Position::new(line, column),
|
||||
),
|
||||
message.trim(),
|
||||
|
||||
@@ -34,7 +34,7 @@ pub(super) trait BackgroundDocumentRequestHandler: RequestHandler {
|
||||
|
||||
fn run_with_snapshot(
|
||||
snapshot: DocumentSnapshot,
|
||||
db: RootDatabase,
|
||||
db: Option<RootDatabase>,
|
||||
notifier: Notifier,
|
||||
params: <<Self as RequestHandler>::RequestType as Request>::Params,
|
||||
) -> super::Result<<<Self as RequestHandler>::RequestType as Request>::Result>;
|
||||
|
||||
@@ -6,16 +6,16 @@ use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use lsp_types::{ClientCapabilities, TextDocumentContentChangeEvent, Url};
|
||||
use lsp_types::{ClientCapabilities, Url};
|
||||
|
||||
use red_knot_python_semantic::{ProgramSettings, PythonVersion, SearchPathSettings};
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::system::SystemPath;
|
||||
use ruff_db::Db;
|
||||
|
||||
use crate::edit::{DocumentKey, DocumentVersion, NotebookDocument};
|
||||
use crate::system::{url_to_any_system_path, AnySystemPath, LSPSystem};
|
||||
use crate::edit::{DocumentKey, NotebookDocument};
|
||||
use crate::system::{url_to_system_path, LSPSystem};
|
||||
use crate::{PositionEncoding, TextDocument};
|
||||
|
||||
pub(crate) use self::capabilities::ResolvedClientCapabilities;
|
||||
@@ -67,10 +67,19 @@ impl Session {
|
||||
.ok_or_else(|| anyhow!("Workspace path is not a valid UTF-8 path: {:?}", path))?;
|
||||
let system = LSPSystem::new(index.clone());
|
||||
|
||||
let metadata = WorkspaceMetadata::from_path(system_path, &system)?;
|
||||
// TODO(dhruvmanila): Get the values from the client settings
|
||||
let metadata = WorkspaceMetadata::from_path(system_path, &system, None)?;
|
||||
let program_settings = ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: system_path.to_path_buf(),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
},
|
||||
};
|
||||
// TODO(micha): Handle the case where the program settings are incorrect more gracefully.
|
||||
workspaces.insert(path, RootDatabase::new(metadata, system)?);
|
||||
workspaces.insert(path, RootDatabase::new(metadata, program_settings, system)?);
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
@@ -83,12 +92,6 @@ impl Session {
|
||||
})
|
||||
}
|
||||
|
||||
// TODO(dhruvmanila): Ideally, we should have a single method for `workspace_db_for_path_mut`
|
||||
// and `default_workspace_db_mut` but the borrow checker doesn't allow that.
|
||||
// https://github.com/astral-sh/ruff/pull/13041#discussion_r1726725437
|
||||
|
||||
/// Returns a reference to the workspace [`RootDatabase`] corresponding to the given path, if
|
||||
/// any.
|
||||
pub(crate) fn workspace_db_for_path(&self, path: impl AsRef<Path>) -> Option<&RootDatabase> {
|
||||
self.workspaces
|
||||
.range(..=path.as_ref().to_path_buf())
|
||||
@@ -96,8 +99,6 @@ impl Session {
|
||||
.map(|(_, db)| db)
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the workspace [`RootDatabase`] corresponding to the given
|
||||
/// path, if any.
|
||||
pub(crate) fn workspace_db_for_path_mut(
|
||||
&mut self,
|
||||
path: impl AsRef<Path>,
|
||||
@@ -108,19 +109,6 @@ impl Session {
|
||||
.map(|(_, db)| db)
|
||||
}
|
||||
|
||||
/// Returns a reference to the default workspace [`RootDatabase`]. The default workspace is the
|
||||
/// minimum root path in the workspace map.
|
||||
pub(crate) fn default_workspace_db(&self) -> &RootDatabase {
|
||||
// SAFETY: Currently, red knot only support a single workspace.
|
||||
self.workspaces.values().next().unwrap()
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the default workspace [`RootDatabase`].
|
||||
pub(crate) fn default_workspace_db_mut(&mut self) -> &mut RootDatabase {
|
||||
// SAFETY: Currently, red knot only support a single workspace.
|
||||
self.workspaces.values_mut().next().unwrap()
|
||||
}
|
||||
|
||||
pub fn key_from_url(&self, url: Url) -> DocumentKey {
|
||||
self.index().key_from_url(url)
|
||||
}
|
||||
@@ -147,20 +135,6 @@ impl Session {
|
||||
self.index_mut().open_text_document(url, document);
|
||||
}
|
||||
|
||||
/// Updates a text document at the associated `key`.
|
||||
///
|
||||
/// The document key must point to a text document, or this will throw an error.
|
||||
pub(crate) fn update_text_document(
|
||||
&mut self,
|
||||
key: &DocumentKey,
|
||||
content_changes: Vec<TextDocumentContentChangeEvent>,
|
||||
new_version: DocumentVersion,
|
||||
) -> crate::Result<()> {
|
||||
let position_encoding = self.position_encoding;
|
||||
self.index_mut()
|
||||
.update_text_document(key, content_changes, new_version, position_encoding)
|
||||
}
|
||||
|
||||
/// De-registers a document, specified by its key.
|
||||
/// Calling this multiple times for the same document is a logic error.
|
||||
pub(crate) fn close_document(&mut self, key: &DocumentKey) -> crate::Result<()> {
|
||||
@@ -247,7 +221,6 @@ impl Drop for MutIndexGuard<'_> {
|
||||
|
||||
/// An immutable snapshot of `Session` that references
|
||||
/// a specific document.
|
||||
#[derive(Debug)]
|
||||
pub struct DocumentSnapshot {
|
||||
resolved_client_capabilities: Arc<ResolvedClientCapabilities>,
|
||||
document_ref: index::DocumentQuery,
|
||||
@@ -268,12 +241,7 @@ impl DocumentSnapshot {
|
||||
}
|
||||
|
||||
pub(crate) fn file(&self, db: &RootDatabase) -> Option<File> {
|
||||
match url_to_any_system_path(self.document_ref.file_url()).ok()? {
|
||||
AnySystemPath::System(path) => system_path_to_file(db, path).ok(),
|
||||
AnySystemPath::SystemVirtual(virtual_path) => db
|
||||
.files()
|
||||
.try_virtual_file(&virtual_path)
|
||||
.map(|virtual_file| virtual_file.file()),
|
||||
}
|
||||
let path = url_to_system_path(self.document_ref.file_url()).ok()?;
|
||||
system_path_to_file(db, path).ok()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,40 +8,27 @@ use ruff_db::file_revision::FileRevision;
|
||||
use ruff_db::system::walk_directory::WalkDirectoryBuilder;
|
||||
use ruff_db::system::{
|
||||
DirectoryEntry, FileType, Metadata, OsSystem, Result, System, SystemPath, SystemPathBuf,
|
||||
SystemVirtualPath, SystemVirtualPathBuf,
|
||||
SystemVirtualPath,
|
||||
};
|
||||
use ruff_notebook::{Notebook, NotebookError};
|
||||
|
||||
use crate::session::index::Index;
|
||||
use crate::DocumentQuery;
|
||||
|
||||
/// Converts the given [`Url`] to an [`AnySystemPath`].
|
||||
///
|
||||
/// If the URL scheme is `file`, then the path is converted to a [`SystemPathBuf`]. Otherwise, the
|
||||
/// URL is converted to a [`SystemVirtualPathBuf`].
|
||||
/// Converts the given [`Url`] to a [`SystemPathBuf`].
|
||||
///
|
||||
/// This fails in the following cases:
|
||||
/// * The URL scheme is not `file`.
|
||||
/// * The URL cannot be converted to a file path (refer to [`Url::to_file_path`]).
|
||||
/// * If the URL is not a valid UTF-8 string.
|
||||
pub(crate) fn url_to_any_system_path(url: &Url) -> std::result::Result<AnySystemPath, ()> {
|
||||
pub(crate) fn url_to_system_path(url: &Url) -> std::result::Result<SystemPathBuf, ()> {
|
||||
if url.scheme() == "file" {
|
||||
Ok(AnySystemPath::System(
|
||||
SystemPathBuf::from_path_buf(url.to_file_path()?).map_err(|_| ())?,
|
||||
))
|
||||
Ok(SystemPathBuf::from_path_buf(url.to_file_path()?).map_err(|_| ())?)
|
||||
} else {
|
||||
Ok(AnySystemPath::SystemVirtual(
|
||||
SystemVirtualPath::new(url.as_str()).to_path_buf(),
|
||||
))
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents either a [`SystemPath`] or a [`SystemVirtualPath`].
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum AnySystemPath {
|
||||
System(SystemPathBuf),
|
||||
SystemVirtual(SystemVirtualPathBuf),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct LSPSystem {
|
||||
/// A read-only copy of the index where the server stores all the open documents and settings.
|
||||
@@ -157,6 +144,19 @@ impl System for LSPSystem {
|
||||
}
|
||||
}
|
||||
|
||||
fn virtual_path_metadata(&self, path: &SystemVirtualPath) -> Result<Metadata> {
|
||||
// Virtual paths only exists in the LSP system, so we don't need to check the OS system.
|
||||
let document = self
|
||||
.system_virtual_path_to_document_ref(path)?
|
||||
.ok_or_else(|| virtual_path_not_found(path))?;
|
||||
|
||||
Ok(Metadata::new(
|
||||
document_revision(&document),
|
||||
None,
|
||||
FileType::File,
|
||||
))
|
||||
}
|
||||
|
||||
fn read_virtual_path_to_string(&self, path: &SystemVirtualPath) -> Result<String> {
|
||||
let document = self
|
||||
.system_virtual_path_to_document_ref(path)?
|
||||
|
||||
@@ -3,8 +3,8 @@ use std::any::Any;
|
||||
use js_sys::Error;
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
use red_knot_python_semantic::{ProgramSettings, SearchPathSettings};
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::workspace::settings::Configuration;
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::system::walk_directory::WalkDirectoryBuilder;
|
||||
@@ -41,17 +41,16 @@ impl Workspace {
|
||||
#[wasm_bindgen(constructor)]
|
||||
pub fn new(root: &str, settings: &Settings) -> Result<Workspace, Error> {
|
||||
let system = WasmSystem::new(SystemPath::new(root));
|
||||
let workspace = WorkspaceMetadata::from_path(
|
||||
SystemPath::new(root),
|
||||
&system,
|
||||
Some(Configuration {
|
||||
target_version: Some(settings.target_version.into()),
|
||||
..Configuration::default()
|
||||
}),
|
||||
)
|
||||
.map_err(into_error)?;
|
||||
let workspace =
|
||||
WorkspaceMetadata::from_path(SystemPath::new(root), &system).map_err(into_error)?;
|
||||
|
||||
let db = RootDatabase::new(workspace, system.clone()).map_err(into_error)?;
|
||||
let program_settings = ProgramSettings {
|
||||
target_version: settings.target_version.into(),
|
||||
search_paths: SearchPathSettings::default(),
|
||||
};
|
||||
|
||||
let db =
|
||||
RootDatabase::new(workspace, program_settings, system.clone()).map_err(into_error)?;
|
||||
|
||||
Ok(Self { db, system })
|
||||
}
|
||||
@@ -234,6 +233,13 @@ impl System for WasmSystem {
|
||||
Notebook::from_source_code(&content)
|
||||
}
|
||||
|
||||
fn virtual_path_metadata(
|
||||
&self,
|
||||
_path: &SystemVirtualPath,
|
||||
) -> ruff_db::system::Result<Metadata> {
|
||||
Err(not_found())
|
||||
}
|
||||
|
||||
fn read_virtual_path_to_string(
|
||||
&self,
|
||||
_path: &SystemVirtualPath,
|
||||
|
||||
@@ -11,14 +11,14 @@ fn check() {
|
||||
};
|
||||
let mut workspace = Workspace::new("/", &settings).expect("Workspace to be created");
|
||||
|
||||
workspace
|
||||
let test = workspace
|
||||
.open_file("test.py", "import random22\n")
|
||||
.expect("File to be opened");
|
||||
|
||||
let result = workspace.check().expect("Check to succeed");
|
||||
let result = workspace.check_file(&test).expect("Check to succeed");
|
||||
|
||||
assert_eq!(
|
||||
result,
|
||||
vec!["/test.py:1:8: Cannot resolve import 'random22'."]
|
||||
vec!["/test.py:1:8: Import 'random22' could not be resolved.",]
|
||||
);
|
||||
}
|
||||
|
||||
@@ -22,14 +22,13 @@ ruff_text_size = { workspace = true }
|
||||
anyhow = { workspace = true }
|
||||
crossbeam = { workspace = true }
|
||||
notify = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_db = { workspace = true, features = ["testing"] }
|
||||
tempfile = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["testing"]}
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
x = 0
|
||||
(x := x + 1)
|
||||
@@ -1,3 +0,0 @@
|
||||
x = 0
|
||||
if x := x + 1:
|
||||
pass
|
||||
@@ -1,11 +0,0 @@
|
||||
def bool(x) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
class MyClass: ...
|
||||
|
||||
|
||||
def MyClass() -> MyClass: ...
|
||||
|
||||
|
||||
def x(self) -> x: ...
|
||||
@@ -1,2 +0,0 @@
|
||||
def bool(x=bool):
|
||||
return x
|
||||
@@ -1,3 +0,0 @@
|
||||
match x:
|
||||
case [1, 0] if x := x[:0]:
|
||||
y = 1
|
||||
@@ -1,14 +1,15 @@
|
||||
use std::panic::RefUnwindSafe;
|
||||
use std::sync::Arc;
|
||||
|
||||
use salsa::plumbing::ZalsaDatabase;
|
||||
use salsa::{Cancelled, Event};
|
||||
|
||||
use red_knot_python_semantic::{vendored_typeshed_stubs, Db as SemanticDb, Program};
|
||||
use red_knot_python_semantic::{
|
||||
vendored_typeshed_stubs, Db as SemanticDb, Program, ProgramSettings,
|
||||
};
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::System;
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
use salsa::plumbing::ZalsaDatabase;
|
||||
use salsa::{Cancelled, Event};
|
||||
|
||||
use crate::workspace::{check_file, Workspace, WorkspaceMetadata};
|
||||
|
||||
@@ -26,7 +27,11 @@ pub struct RootDatabase {
|
||||
}
|
||||
|
||||
impl RootDatabase {
|
||||
pub fn new<S>(workspace: WorkspaceMetadata, system: S) -> anyhow::Result<Self>
|
||||
pub fn new<S>(
|
||||
workspace: WorkspaceMetadata,
|
||||
settings: ProgramSettings,
|
||||
system: S,
|
||||
) -> anyhow::Result<Self>
|
||||
where
|
||||
S: System + 'static + Send + Sync + RefUnwindSafe,
|
||||
{
|
||||
@@ -37,11 +42,11 @@ impl RootDatabase {
|
||||
system: Arc::new(system),
|
||||
};
|
||||
|
||||
let workspace = Workspace::from_metadata(&db, workspace);
|
||||
// Initialize the `Program` singleton
|
||||
Program::from_settings(&db, workspace.settings().program())?;
|
||||
|
||||
db.workspace = Some(Workspace::from_metadata(&db, workspace));
|
||||
Program::from_settings(&db, settings)?;
|
||||
|
||||
db.workspace = Some(workspace);
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
@@ -56,8 +61,6 @@ impl RootDatabase {
|
||||
}
|
||||
|
||||
pub fn check_file(&self, file: File) -> Result<Vec<String>, Cancelled> {
|
||||
let _span = tracing::debug_span!("check_file", file=%file.path(self)).entered();
|
||||
|
||||
self.with_db(|db| check_file(db, file))
|
||||
}
|
||||
|
||||
@@ -157,9 +160,8 @@ impl Db for RootDatabase {}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use salsa::Event;
|
||||
use std::sync::Arc;
|
||||
|
||||
use red_knot_python_semantic::{vendored_typeshed_stubs, Db as SemanticDb};
|
||||
use ruff_db::files::Files;
|
||||
|
||||
@@ -1,33 +1,22 @@
|
||||
use red_knot_python_semantic::Program;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use ruff_db::files::{system_path_to_file, File, Files};
|
||||
use ruff_db::system::walk_directory::WalkState;
|
||||
use ruff_db::system::SystemPath;
|
||||
use ruff_db::Db;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::db::RootDatabase;
|
||||
use crate::watch;
|
||||
use crate::watch::{CreatedKind, DeletedKind};
|
||||
use crate::workspace::settings::Configuration;
|
||||
use crate::workspace::WorkspaceMetadata;
|
||||
|
||||
impl RootDatabase {
|
||||
#[tracing::instrument(level = "debug", skip(self, changes, base_configuration))]
|
||||
pub fn apply_changes(
|
||||
&mut self,
|
||||
changes: Vec<watch::ChangeEvent>,
|
||||
base_configuration: Option<&Configuration>,
|
||||
) {
|
||||
#[tracing::instrument(level = "debug", skip(self, changes))]
|
||||
pub fn apply_changes(&mut self, changes: Vec<watch::ChangeEvent>) {
|
||||
let workspace = self.workspace();
|
||||
let workspace_path = workspace.root(self).to_path_buf();
|
||||
let program = Program::get(self);
|
||||
let custom_stdlib_versions_path = program
|
||||
.custom_stdlib_search_path(self)
|
||||
.map(|path| path.join("VERSIONS"));
|
||||
|
||||
let mut workspace_change = false;
|
||||
// Changes to a custom stdlib path's VERSIONS
|
||||
let mut custom_stdlib_change = false;
|
||||
// Packages that need reloading
|
||||
let mut changed_packages = FxHashSet::default();
|
||||
// Paths that were added
|
||||
@@ -50,7 +39,7 @@ impl RootDatabase {
|
||||
};
|
||||
|
||||
for change in changes {
|
||||
if let Some(path) = change.system_path() {
|
||||
if let Some(path) = change.path() {
|
||||
if matches!(
|
||||
path.file_name(),
|
||||
Some(".gitignore" | ".ignore" | "ruff.toml" | ".ruff.toml" | "pyproject.toml")
|
||||
@@ -65,15 +54,10 @@ impl RootDatabase {
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if Some(path) == custom_stdlib_versions_path.as_deref() {
|
||||
custom_stdlib_change = true;
|
||||
}
|
||||
}
|
||||
|
||||
match change {
|
||||
watch::ChangeEvent::Changed { path, kind: _ }
|
||||
| watch::ChangeEvent::Opened(path) => sync_path(self, &path),
|
||||
watch::ChangeEvent::Changed { path, kind: _ } => sync_path(self, &path),
|
||||
|
||||
watch::ChangeEvent::Created { kind, path } => {
|
||||
match kind {
|
||||
@@ -116,13 +100,7 @@ impl RootDatabase {
|
||||
} else {
|
||||
sync_recursively(self, &path);
|
||||
|
||||
if custom_stdlib_versions_path
|
||||
.as_ref()
|
||||
.is_some_and(|versions_path| versions_path.starts_with(&path))
|
||||
{
|
||||
custom_stdlib_change = true;
|
||||
}
|
||||
|
||||
// TODO: Remove after converting `package.files()` to a salsa query.
|
||||
if let Some(package) = workspace.package(self, &path) {
|
||||
changed_packages.insert(package);
|
||||
} else {
|
||||
@@ -131,17 +109,6 @@ impl RootDatabase {
|
||||
}
|
||||
}
|
||||
|
||||
watch::ChangeEvent::CreatedVirtual(path)
|
||||
| watch::ChangeEvent::ChangedVirtual(path) => {
|
||||
File::sync_virtual_path(self, &path);
|
||||
}
|
||||
|
||||
watch::ChangeEvent::DeletedVirtual(path) => {
|
||||
if let Some(virtual_file) = self.files().try_virtual_file(&path) {
|
||||
virtual_file.close(self);
|
||||
}
|
||||
}
|
||||
|
||||
watch::ChangeEvent::Rescan => {
|
||||
workspace_change = true;
|
||||
Files::sync_all(self);
|
||||
@@ -151,11 +118,7 @@ impl RootDatabase {
|
||||
}
|
||||
|
||||
if workspace_change {
|
||||
match WorkspaceMetadata::from_path(
|
||||
&workspace_path,
|
||||
self.system(),
|
||||
base_configuration.cloned(),
|
||||
) {
|
||||
match WorkspaceMetadata::from_path(&workspace_path, self.system()) {
|
||||
Ok(metadata) => {
|
||||
tracing::debug!("Reloading workspace after structural change.");
|
||||
// TODO: Handle changes in the program settings.
|
||||
@@ -167,11 +130,6 @@ impl RootDatabase {
|
||||
}
|
||||
|
||||
return;
|
||||
} else if custom_stdlib_change {
|
||||
let search_paths = workspace.search_path_settings(self).clone();
|
||||
if let Err(error) = program.update_search_paths(self, &search_paths) {
|
||||
tracing::error!("Failed to set the new search paths: {error}");
|
||||
}
|
||||
}
|
||||
|
||||
let mut added_paths = added_paths.into_iter().filter(|path| {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user