Compare commits
140 Commits
0.5.7
...
perf-node-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
82f33db5e6 | ||
|
|
f873d2ac12 | ||
|
|
ecd9e6a650 | ||
|
|
785c39927b | ||
|
|
a35cdbb275 | ||
|
|
0c98b5949c | ||
|
|
e5f37a8254 | ||
|
|
5c5dfc11f0 | ||
|
|
678045e1aa | ||
|
|
dedefd73da | ||
|
|
37a60460ed | ||
|
|
0bd258a370 | ||
|
|
9baab8672a | ||
|
|
c65e3310d5 | ||
|
|
38c19fb96e | ||
|
|
abb4cdbf3d | ||
|
|
fc811f5168 | ||
|
|
1a8f29ea41 | ||
|
|
aefaddeae7 | ||
|
|
df09045176 | ||
|
|
049cda2ff3 | ||
|
|
358792f2c9 | ||
|
|
e6d5a7af37 | ||
|
|
f5bff82e70 | ||
|
|
ab44152eb5 | ||
|
|
f4c8c7eb70 | ||
|
|
65de8f2c9b | ||
|
|
e6226436fd | ||
|
|
0345d46759 | ||
|
|
4d0d3b00cb | ||
|
|
2be1c4ff04 | ||
|
|
edd86d5603 | ||
|
|
78ad7959ca | ||
|
|
d72ecd6ded | ||
|
|
8617a508bd | ||
|
|
c88bd4e884 | ||
|
|
fbcda90316 | ||
|
|
169d4390cb | ||
|
|
80ade591df | ||
|
|
4881d32c80 | ||
|
|
81a2220ce1 | ||
|
|
900e98b584 | ||
|
|
f9d8189670 | ||
|
|
52ba94191a | ||
|
|
96802d6a7f | ||
|
|
dd0a7ec73e | ||
|
|
25f5ae44c4 | ||
|
|
251efe5c41 | ||
|
|
6359e55383 | ||
|
|
a9847af6e8 | ||
|
|
d61d75d4fa | ||
|
|
499c0bd875 | ||
|
|
4cb30b598f | ||
|
|
aba0d83c11 | ||
|
|
c319414e54 | ||
|
|
ef1f6d98a0 | ||
|
|
b850b812de | ||
|
|
a87b27c075 | ||
|
|
9b73532b11 | ||
|
|
d8debb7a36 | ||
|
|
bd4a947b29 | ||
|
|
f121f8b31b | ||
|
|
80efb865e9 | ||
|
|
52d27befe8 | ||
|
|
6ed06afd28 | ||
|
|
b9da31610a | ||
|
|
ac7b1770e2 | ||
|
|
e4c2859c0f | ||
|
|
6dcd743111 | ||
|
|
73160dc8b6 | ||
|
|
15aa5a6d57 | ||
|
|
33512a4249 | ||
|
|
d8ebb03591 | ||
|
|
2e211c5c22 | ||
|
|
9fd8aaaf29 | ||
|
|
d110bd4e60 | ||
|
|
eb9c7ae869 | ||
|
|
7defc0d136 | ||
|
|
45f459bafd | ||
|
|
99e946a005 | ||
|
|
78a7ac0722 | ||
|
|
fa2f3f9f2f | ||
|
|
3898d737d8 | ||
|
|
c487149b7d | ||
|
|
bebed67bf1 | ||
|
|
3ddcad64f5 | ||
|
|
05c35b6975 | ||
|
|
7fc39ad624 | ||
|
|
2520ebb145 | ||
|
|
89c8b49027 | ||
|
|
e05953a991 | ||
|
|
d0ac38f9d3 | ||
|
|
ff53db3d99 | ||
|
|
899a52390b | ||
|
|
82a3e69b8a | ||
|
|
7027344dfc | ||
|
|
fb9f0c448f | ||
|
|
75131c6f4a | ||
|
|
4b9ddc4a06 | ||
|
|
99dc208b00 | ||
|
|
540023262e | ||
|
|
2ea79572ae | ||
|
|
aa0db338d9 | ||
|
|
a99a45868c | ||
|
|
fabf19fdc9 | ||
|
|
59f712a566 | ||
|
|
1d080465de | ||
|
|
3481e16cdf | ||
|
|
d7e9280e1e | ||
|
|
f237d36d2f | ||
|
|
12f22b1fdd | ||
|
|
47d05ee9ea | ||
|
|
9caec36b59 | ||
|
|
cb364780b3 | ||
|
|
71b8bf211f | ||
|
|
109b9cc4f9 | ||
|
|
5d02627794 | ||
|
|
65444bb00e | ||
|
|
8822a79b4d | ||
|
|
2df4d23113 | ||
|
|
603b62607a | ||
|
|
2b71fc4510 | ||
|
|
1b78d872ec | ||
|
|
feba5031dc | ||
|
|
0c2b88f224 | ||
|
|
cf1a57df5a | ||
|
|
597c5f9124 | ||
|
|
69e1c567d4 | ||
|
|
37b9bac403 | ||
|
|
83db48d316 | ||
|
|
c4e651921b | ||
|
|
b595346213 | ||
|
|
253474b312 | ||
|
|
a176679b24 | ||
|
|
1f51048fa4 | ||
|
|
2abfab0f9b | ||
|
|
64f1f3468d | ||
|
|
ffaa35eafe | ||
|
|
c906b0183b | ||
|
|
bc5b9b81dd |
16
.github/renovate.json5
vendored
16
.github/renovate.json5
vendored
@@ -14,12 +14,26 @@
|
||||
rangeStrategy: "update-lockfile",
|
||||
},
|
||||
pep621: {
|
||||
// The default for this package manager is to only search for `pyproject.toml` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/pep621/#file-matching
|
||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||
},
|
||||
pip_requirements: {
|
||||
fileMatch: ["^docs/requirements.*\\.txt$"],
|
||||
// The default for this package manager is to run on all requirements.txt files:
|
||||
// https://docs.renovatebot.com/modules/manager/pip_requirements/#file-matching
|
||||
// `fileMatch` doesn't work for excluding files; to exclude `requirements.txt` files
|
||||
// outside the `doc/` directory, we instead have to use `ignorePaths`. Unlike `fileMatch`,
|
||||
// which takes a regex string, `ignorePaths` takes a glob string, so we have to use
|
||||
// a "negative glob pattern".
|
||||
// See:
|
||||
// - https://docs.renovatebot.com/modules/manager/#ignoring-files-that-match-the-default-filematch
|
||||
// - https://docs.renovatebot.com/configuration-options/#ignorepaths
|
||||
// - https://docs.renovatebot.com/string-pattern-matching/#negative-matching
|
||||
ignorePaths: ["!docs/requirements*.txt"]
|
||||
},
|
||||
npm: {
|
||||
// The default for this package manager is to only search for `package.json` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/npm/#file-matching
|
||||
fileMatch: ["^playground/.*package\\.json$"],
|
||||
},
|
||||
"pre-commit": {
|
||||
|
||||
7
.github/workflows/ci.yaml
vendored
7
.github/workflows/ci.yaml
vendored
@@ -142,6 +142,13 @@ jobs:
|
||||
|
||||
# Check for broken links in the documentation.
|
||||
- run: cargo doc --all --no-deps
|
||||
env:
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
# Use --document-private-items so that all our doc comments are kept in
|
||||
# sync, not just public items. Eventually we should do this for all
|
||||
# crates; for now add crates here as they are warning-clean to prevent
|
||||
# regression.
|
||||
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p ruff_db --document-private-items
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
|
||||
@@ -14,6 +14,9 @@ MD041: false
|
||||
# MD013/line-length
|
||||
MD013: false
|
||||
|
||||
# MD014/commands-show-output
|
||||
MD014: false
|
||||
|
||||
# MD024/no-duplicate-heading
|
||||
MD024:
|
||||
# Allow when nested under different parents e.g. CHANGELOG.md
|
||||
|
||||
@@ -6,6 +6,8 @@ exclude: |
|
||||
crates/red_knot_workspace/resources/.*|
|
||||
crates/ruff_linter/resources/.*|
|
||||
crates/ruff_linter/src/rules/.*/snapshots/.*|
|
||||
crates/ruff_notebook/resources/.*|
|
||||
crates/ruff_server/resources/.*|
|
||||
crates/ruff/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*|
|
||||
crates/ruff_python_formatter/tests/snapshots/.*|
|
||||
@@ -15,7 +17,7 @@ exclude: |
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.18
|
||||
rev: v0.19
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
@@ -57,7 +59,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.5.6
|
||||
rev: v0.6.1
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
|
||||
@@ -1,5 +1,43 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.6.0
|
||||
|
||||
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
|
||||
|
||||
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
|
||||
|
||||
You can disable specific rules for notebooks using [`per-file-ignores`](https://docs.astral.sh/ruff/settings/#lint_per-file-ignores):
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"*.ipynb" = ["E501"] # disable line-too-long in notebooks
|
||||
```
|
||||
|
||||
If you'd prefer to either only lint or only format Jupyter Notebook files, you can use the
|
||||
section-specific `exclude` option to do so. For example, the following would only lint Jupyter
|
||||
Notebook files and not format them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.format]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
And, conversely, the following would only format Jupyter Notebook files and not lint them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
You can completely disable Jupyter Notebook support by updating the [`extend-exclude`](https://docs.astral.sh/ruff/settings/#extend-exclude) setting:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
## 0.5.0
|
||||
|
||||
- Follow the XDG specification to discover user-level configurations on macOS (same as on other Unix platforms)
|
||||
|
||||
115
CHANGELOG.md
115
CHANGELOG.md
@@ -1,5 +1,120 @@
|
||||
# Changelog
|
||||
|
||||
## 0.6.1
|
||||
|
||||
This is a hotfix release to address an issue with `ruff-pre-commit`. In v0.6,
|
||||
Ruff changed its behavior to lint and format Jupyter notebooks by default;
|
||||
however, due to an oversight, these files were still excluded by default if
|
||||
Ruff was run via pre-commit, leading to inconsistent behavior.
|
||||
This has [now been fixed](https://github.com/astral-sh/ruff-pre-commit/pull/96).
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`fastapi`\] Implement `fast-api-unused-path-parameter` (`FAST003`) ([#12638](https://github.com/astral-sh/ruff/pull/12638))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`pylint`\] Rename `too-many-positional` to `too-many-positional-arguments` (`R0917`) ([#12905](https://github.com/astral-sh/ruff/pull/12905))
|
||||
|
||||
### Server
|
||||
|
||||
- Fix crash when applying "fix-all" code-action to notebook cells ([#12929](https://github.com/astral-sh/ruff/pull/12929))
|
||||
|
||||
### Other changes
|
||||
|
||||
- \[`flake8-naming`\]: Respect import conventions (`N817`) ([#12922](https://github.com/astral-sh/ruff/pull/12922))
|
||||
|
||||
## 0.6.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.6.0) for a migration guide and overview of the changes!
|
||||
|
||||
### Breaking changes
|
||||
|
||||
See also, the "Remapped rules" section which may result in disabled rules.
|
||||
|
||||
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
|
||||
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
|
||||
|
||||
### Deprecations
|
||||
|
||||
The following rules are now deprecated:
|
||||
|
||||
- [`pytest-missing-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-missing-fixture-name-underscore/) (`PT004`)
|
||||
- [`pytest-incorrect-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-incorrect-fixture-name-underscore/) (`PT005`)
|
||||
- [`unpacked-list-comprehension`](https://docs.astral.sh/ruff/rules/unpacked-list-comprehension/) (`UP027`)
|
||||
|
||||
### Remapped rules
|
||||
|
||||
The following rules have been remapped to new rule codes:
|
||||
|
||||
- [`unnecessary-dict-comprehension-for-iterable`](https://docs.astral.sh/ruff/rules/unnecessary-dict-comprehension-for-iterable/): `RUF025` to `C420`
|
||||
|
||||
### Stabilization
|
||||
|
||||
The following rules have been stabilized and are no longer in preview:
|
||||
|
||||
- [`singledispatch-method`](https://docs.astral.sh/ruff/rules/singledispatch-method/) (`PLE1519`)
|
||||
- [`singledispatchmethod-function`](https://docs.astral.sh/ruff/rules/singledispatchmethod-function/) (`PLE1520`)
|
||||
- [`bad-staticmethod-argument`](https://docs.astral.sh/ruff/rules/bad-staticmethod-argument/) (`PLW0211`)
|
||||
- [`if-stmt-min-max`](https://docs.astral.sh/ruff/rules/if-stmt-min-max/) (`PLR1730`)
|
||||
- [`invalid-bytes-return-type`](https://docs.astral.sh/ruff/rules/invalid-bytes-return-type/) (`PLE0308`)
|
||||
- [`invalid-hash-return-type`](https://docs.astral.sh/ruff/rules/invalid-hash-return-type/) (`PLE0309`)
|
||||
- [`invalid-index-return-type`](https://docs.astral.sh/ruff/rules/invalid-index-return-type/) (`PLE0305`)
|
||||
- [`invalid-length-return-type`](https://docs.astral.sh/ruff/rules/invalid-length-return-type/) (`PLEE303`)
|
||||
- [`self-or-cls-assignment`](https://docs.astral.sh/ruff/rules/self-or-cls-assignment/) (`PLW0642`)
|
||||
- [`byte-string-usage`](https://docs.astral.sh/ruff/rules/byte-string-usage/) (`PYI057`)
|
||||
- [`duplicate-literal-member`](https://docs.astral.sh/ruff/rules/duplicate-literal-member/) (`PYI062`)
|
||||
- [`redirected-noqa`](https://docs.astral.sh/ruff/rules/redirected-noqa/) (`RUF101`)
|
||||
|
||||
The following behaviors have been stabilized:
|
||||
|
||||
- [`cancel-scope-no-checkpoint`](https://docs.astral.sh/ruff/rules/cancel-scope-no-checkpoint/) (`ASYNC100`): Support `asyncio` and `anyio` context mangers.
|
||||
- [`async-function-with-timeout`](https://docs.astral.sh/ruff/rules/async-function-with-timeout/) (`ASYNC109`): Support `asyncio` and `anyio` context mangers.
|
||||
- [`async-busy-wait`](https://docs.astral.sh/ruff/rules/async-busy-wait/) (`ASYNC110`): Support `asyncio` and `anyio` context mangers.
|
||||
- [`async-zero-sleep`](https://docs.astral.sh/ruff/rules/async-zero-sleep/) (`ASYNC115`): Support `anyio` context mangers.
|
||||
- [`long-sleep-not-forever`](https://docs.astral.sh/ruff/rules/long-sleep-not-forever/) (`ASYNC116`): Support `anyio` context mangers.
|
||||
|
||||
The following fixes have been stabilized:
|
||||
|
||||
- [`superfluous-else-return`](https://docs.astral.sh/ruff/rules/superfluous-else-return/) (`RET505`)
|
||||
- [`superfluous-else-raise`](https://docs.astral.sh/ruff/rules/superfluous-else-raise/) (`RET506`)
|
||||
- [`superfluous-else-continue`](https://docs.astral.sh/ruff/rules/superfluous-else-continue/) (`RET507`)
|
||||
- [`superfluous-else-break`](https://docs.astral.sh/ruff/rules/superfluous-else-break/) (`RET508`)
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Further simplify to binary in preview for (`SIM108`) ([#12796](https://github.com/astral-sh/ruff/pull/12796))
|
||||
- \[`pyupgrade`\] Show violations without auto-fix (`UP031`) ([#11229](https://github.com/astral-sh/ruff/pull/11229))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-import-conventions`\] Add `xml.etree.ElementTree` to default conventions ([#12455](https://github.com/astral-sh/ruff/pull/12455))
|
||||
- \[`flake8-pytest-style`\] Add a space after comma in CSV output (`PT006`) ([#12853](https://github.com/astral-sh/ruff/pull/12853))
|
||||
|
||||
### Server
|
||||
|
||||
- Show a message for incorrect settings ([#12781](https://github.com/astral-sh/ruff/pull/12781))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Do not lint yield in context manager (`ASYNC100`) ([#12896](https://github.com/astral-sh/ruff/pull/12896))
|
||||
- \[`flake8-comprehensions`\] Do not lint `async for` comprehensions (`C419`) ([#12895](https://github.com/astral-sh/ruff/pull/12895))
|
||||
- \[`flake8-return`\] Only add return `None` at end of a function (`RET503`) ([#11074](https://github.com/astral-sh/ruff/pull/11074))
|
||||
- \[`flake8-type-checking`\] Avoid treating `dataclasses.KW_ONLY` as typing-only (`TCH003`) ([#12863](https://github.com/astral-sh/ruff/pull/12863))
|
||||
- \[`pep8-naming`\] Treat `type(Protocol)` et al as metaclass base (`N805`) ([#12770](https://github.com/astral-sh/ruff/pull/12770))
|
||||
- \[`pydoclint`\] Don't enforce returns and yields in abstract methods (`DOC201`, `DOC202`) ([#12771](https://github.com/astral-sh/ruff/pull/12771))
|
||||
- \[`ruff`\] Skip tuples with slice expressions in (`RUF031`) ([#12768](https://github.com/astral-sh/ruff/pull/12768))
|
||||
- \[`ruff`\] Ignore unparenthesized tuples in subscripts when the subscript is a type annotation or type alias (`RUF031`) ([#12762](https://github.com/astral-sh/ruff/pull/12762))
|
||||
- \[`ruff`\] Ignore template strings passed to logging and `builtins._()` calls (`RUF027`) ([#12889](https://github.com/astral-sh/ruff/pull/12889))
|
||||
- \[`ruff`\] Do not remove parens for tuples with starred expressions in Python \<=3.10 (`RUF031`) ([#12784](https://github.com/astral-sh/ruff/pull/12784))
|
||||
- Evaluate default parameter values for a function in that function's enclosing scope ([#12852](https://github.com/astral-sh/ruff/pull/12852))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Respect VS Code cell metadata when detecting the language of Jupyter Notebook cells ([#12864](https://github.com/astral-sh/ruff/pull/12864))
|
||||
- Respect `kernelspec` notebook metadata when detecting the preferred language for a Jupyter Notebook ([#12875](https://github.com/astral-sh/ruff/pull/12875))
|
||||
|
||||
## 0.5.7
|
||||
|
||||
### Preview features
|
||||
|
||||
@@ -2,35 +2,6 @@
|
||||
|
||||
Welcome! We're happy to have you here. Thank you in advance for your contribution to Ruff.
|
||||
|
||||
- [The Basics](#the-basics)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Development](#development)
|
||||
- [Project Structure](#project-structure)
|
||||
- [Example: Adding a new lint rule](#example-adding-a-new-lint-rule)
|
||||
- [Rule naming convention](#rule-naming-convention)
|
||||
- [Rule testing: fixtures and snapshots](#rule-testing-fixtures-and-snapshots)
|
||||
- [Example: Adding a new configuration option](#example-adding-a-new-configuration-option)
|
||||
- [MkDocs](#mkdocs)
|
||||
- [Release Process](#release-process)
|
||||
- [Creating a new release](#creating-a-new-release)
|
||||
- [Ecosystem CI](#ecosystem-ci)
|
||||
- [Benchmarking and Profiling](#benchmarking-and-profiling)
|
||||
- [CPython Benchmark](#cpython-benchmark)
|
||||
- [Microbenchmarks](#microbenchmarks)
|
||||
- [Benchmark-driven Development](#benchmark-driven-development)
|
||||
- [PR Summary](#pr-summary)
|
||||
- [Tips](#tips)
|
||||
- [Profiling Projects](#profiling-projects)
|
||||
- [Linux](#linux)
|
||||
- [Mac](#mac)
|
||||
- [`cargo dev`](#cargo-dev)
|
||||
- [Subsystems](#subsystems)
|
||||
- [Compilation Pipeline](#compilation-pipeline)
|
||||
- [Import Categorization](#import-categorization)
|
||||
- [Project root](#project-root)
|
||||
- [Package root](#package-root)
|
||||
- [Import categorization](#import-categorization-1)
|
||||
|
||||
## The Basics
|
||||
|
||||
Ruff welcomes contributions in the form of pull requests.
|
||||
@@ -333,22 +304,34 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
### Creating a new release
|
||||
|
||||
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
|
||||
|
||||
1. Run `./scripts/release.sh`; this command will:
|
||||
|
||||
- Generate a temporary virtual environment with `rooster`
|
||||
- Generate a changelog entry in `CHANGELOG.md`
|
||||
- Update versions in `pyproject.toml` and `Cargo.toml`
|
||||
- Update references to versions in the `README.md` and documentation
|
||||
- Display contributors for the release
|
||||
|
||||
1. The changelog should then be editorialized for consistency
|
||||
|
||||
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
|
||||
- Changes should be edited to be user-facing descriptions, avoiding internal details
|
||||
|
||||
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
|
||||
|
||||
1. Run `cargo check`. This should update the lock file with new versions.
|
||||
|
||||
1. Create a pull request with the changelog and version updates
|
||||
|
||||
1. Merge the PR
|
||||
|
||||
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yml) with:
|
||||
|
||||
- The new version number (without starting `v`)
|
||||
|
||||
1. The release workflow will do the following:
|
||||
|
||||
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
|
||||
uploaded anything, you can restart after pushing a fix. If you just need to rerun the build,
|
||||
make sure you're [re-running all the failed
|
||||
@@ -359,14 +342,25 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
|
||||
1. Attach artifacts to draft GitHub release
|
||||
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
|
||||
downstream jobs manually if needed.
|
||||
|
||||
1. Verify the GitHub release:
|
||||
|
||||
1. The Changelog should match the content of `CHANGELOG.md`
|
||||
1. Append the contributors from the `bump.sh` script
|
||||
1. Append the contributors from the `scripts/release.sh` script
|
||||
|
||||
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
|
||||
|
||||
1. One can determine if an update is needed when
|
||||
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
|
||||
1. Once run successfully, you should follow the link in the output to create a PR.
|
||||
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
|
||||
|
||||
1. If needed, update the [`ruff-lsp`](https://github.com/astral-sh/ruff-lsp) and
|
||||
[`ruff-vscode`](https://github.com/astral-sh/ruff-vscode) repositories and follow
|
||||
the release instructions in those repositories. `ruff-lsp` should always be updated
|
||||
before `ruff-vscode`.
|
||||
|
||||
This step is generally not required for a patch release, but should always be done
|
||||
for a minor release.
|
||||
|
||||
## Ecosystem CI
|
||||
|
||||
@@ -389,7 +383,7 @@ We have several ways of benchmarking and profiling Ruff:
|
||||
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
|
||||
- Profiling the linter on either the microbenchmarks or entire projects
|
||||
|
||||
> \[!NOTE\]
|
||||
> **Note**
|
||||
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
|
||||
> applications, like web browsers). You may also want to switch your CPU to a "performance"
|
||||
> mode, if it exists, especially when benchmarking short-lived processes.
|
||||
@@ -911,9 +905,5 @@ There are three ways in which an import can be categorized as "first-party":
|
||||
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
|
||||
file `foo.py`.
|
||||
|
||||
By default, `src` is set to the project root. In the above example, we'd want to set
|
||||
`src = ["./src"]` to ensure that we locate `./my_project/src/foo` and thus categorize `import foo`
|
||||
as first-party in `baz.py`. In practice, for this limited example, setting `src = ["./src"]` is
|
||||
unnecessary, as all imports within `./my_project/src/foo` would be categorized as first-party via
|
||||
the same-package heuristic; but if your project contains multiple packages, you'll want to set `src`
|
||||
explicitly.
|
||||
By default, `src` is set to the project root, along with `"src"` subdirectory in the project root.
|
||||
This ensures that Ruff supports both flat and "src" layouts out of the box.
|
||||
|
||||
248
Cargo.lock
generated
248
Cargo.lock
generated
@@ -95,9 +95,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anstyle"
|
||||
version = "1.0.6"
|
||||
version = "1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc"
|
||||
checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1"
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-parse"
|
||||
@@ -228,9 +228,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "camino"
|
||||
version = "1.1.7"
|
||||
version = "1.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239"
|
||||
checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3"
|
||||
|
||||
[[package]]
|
||||
name = "cast"
|
||||
@@ -270,6 +270,12 @@ version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e"
|
||||
|
||||
[[package]]
|
||||
name = "cfg_aliases"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
|
||||
|
||||
[[package]]
|
||||
name = "chic"
|
||||
version = "1.2.2"
|
||||
@@ -288,7 +294,7 @@ dependencies = [
|
||||
"android-tzdata",
|
||||
"iana-time-zone",
|
||||
"num-traits",
|
||||
"windows-targets 0.52.5",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -320,9 +326,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.13"
|
||||
version = "4.5.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fbb260a053428790f3de475e304ff84cdbc4face759ea7a3e64c1edd938a7fc"
|
||||
checksum = "ed6719fffa43d0d87e5fd8caeab59be1554fb028cd30edc88fc4369b17971019"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -330,9 +336,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.13"
|
||||
version = "4.5.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "64b17d7ea74e9f833c7dbf2cbe4fb12ff26783eda4782a8975b72f895c9b4d99"
|
||||
checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -395,7 +401,7 @@ version = "3.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2f8c93eb5f77c9050c7750e14f13ef1033a40a0aac70c6371535b6763a01438c"
|
||||
dependencies = [
|
||||
"nix",
|
||||
"nix 0.28.0",
|
||||
"terminfo",
|
||||
"thiserror",
|
||||
"which",
|
||||
@@ -612,12 +618,12 @@ checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7"
|
||||
|
||||
[[package]]
|
||||
name = "ctrlc"
|
||||
version = "3.4.4"
|
||||
version = "3.4.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "672465ae37dc1bc6380a6547a8883d5dd397b0f1faaad4f265726cc7042a5345"
|
||||
checksum = "90eeab0aa92f3f9b4e87f258c72b139c207d251f9cbc1080a0086b86a8870dd3"
|
||||
dependencies = [
|
||||
"nix",
|
||||
"windows-sys 0.52.0",
|
||||
"nix 0.29.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -820,14 +826,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "filetime"
|
||||
version = "0.2.23"
|
||||
version = "0.2.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd"
|
||||
checksum = "bf401df4a4e3872c4fe8151134cf483738e74b67fc934d6532c882b3d24a4550"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"windows-sys 0.52.0",
|
||||
"libredox",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1047,9 +1053,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.3.0"
|
||||
version = "2.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "de3fc2e30ba82dd1b3911c8de1ffc143c74a914a14e99514d7637e3099df5ea0"
|
||||
checksum = "93ead53efc7ea8ed3cfb0c79fc8023fbb782a5432b52830b6518941cebe6505c"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown",
|
||||
@@ -1143,9 +1149,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "is-macro"
|
||||
version = "0.3.5"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "59a85abdc13717906baccb5a1e435556ce0df215f242892f721dff62bf25288f"
|
||||
checksum = "2069faacbe981460232f880d26bf3c7634e322d49053aa48c27e3ae642f728f1"
|
||||
dependencies = [
|
||||
"Inflector",
|
||||
"proc-macro2",
|
||||
@@ -1215,9 +1221,9 @@ checksum = "8b23360e99b8717f20aaa4598f5a6541efbe30630039fbc7706cf954a87947ae"
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.69"
|
||||
version = "0.3.70"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d"
|
||||
checksum = "1868808506b929d7b0cfa8f75951347aa71bb21144b7791bae35d9bccfcfe37a"
|
||||
dependencies = [
|
||||
"wasm-bindgen",
|
||||
]
|
||||
@@ -1250,9 +1256,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.155"
|
||||
version = "0.2.157"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
|
||||
checksum = "374af5f94e54fa97cf75e945cce8a6b201e88a1a07e688b47dfd2a59c66dbd86"
|
||||
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
@@ -1297,6 +1303,7 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"libc",
|
||||
"redox_syscall 0.5.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1387,6 +1394,16 @@ dependencies = [
|
||||
"libmimalloc-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "minicov"
|
||||
version = "0.3.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c71e683cd655513b99affab7d317deb690528255a0d5f717f1024093c12b169"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "minimal-lexical"
|
||||
version = "0.2.1"
|
||||
@@ -1437,7 +1454,19 @@ checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"cfg-if",
|
||||
"cfg_aliases",
|
||||
"cfg_aliases 0.1.1",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nix"
|
||||
version = "0.29.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"cfg-if",
|
||||
"cfg_aliases 0.2.1",
|
||||
"libc",
|
||||
]
|
||||
|
||||
@@ -1524,9 +1553,9 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
|
||||
|
||||
[[package]]
|
||||
name = "ordermap"
|
||||
version = "0.5.1"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8c81974681ab4f0cc9fe49cad56f821d1cc67a08cd2caa9b5d58b0adaa5dd36d"
|
||||
checksum = "61d7d835be600a7ac71b24e39c92fe6fad9e818b3c71bfc379e3ba65e327d77f"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
]
|
||||
@@ -1564,7 +1593,7 @@ checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"redox_syscall 0.4.1",
|
||||
"smallvec",
|
||||
"windows-targets 0.48.5",
|
||||
]
|
||||
@@ -1899,9 +1928,12 @@ dependencies = [
|
||||
"ruff_python_ast",
|
||||
"ruff_python_parser",
|
||||
"ruff_python_stdlib",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"smallvec",
|
||||
"static_assertions",
|
||||
"tempfile",
|
||||
"tracing",
|
||||
"walkdir",
|
||||
@@ -1918,9 +1950,9 @@ dependencies = [
|
||||
"libc",
|
||||
"lsp-server",
|
||||
"lsp-types",
|
||||
"red_knot_python_semantic",
|
||||
"red_knot_workspace",
|
||||
"ruff_db",
|
||||
"ruff_linter",
|
||||
"ruff_notebook",
|
||||
"ruff_python_ast",
|
||||
"ruff_source_file",
|
||||
@@ -1941,6 +1973,7 @@ dependencies = [
|
||||
"console_log",
|
||||
"js-sys",
|
||||
"log",
|
||||
"red_knot_python_semantic",
|
||||
"red_knot_workspace",
|
||||
"ruff_db",
|
||||
"ruff_notebook",
|
||||
@@ -1959,6 +1992,7 @@ dependencies = [
|
||||
"ruff_cache",
|
||||
"ruff_db",
|
||||
"ruff_python_ast",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"thiserror",
|
||||
@@ -1974,6 +2008,15 @@ dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_users"
|
||||
version = "0.4.5"
|
||||
@@ -2046,7 +2089,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.5.7"
|
||||
version = "0.6.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2104,6 +2147,7 @@ dependencies = [
|
||||
"criterion",
|
||||
"mimalloc",
|
||||
"once_cell",
|
||||
"red_knot_python_semantic",
|
||||
"red_knot_workspace",
|
||||
"ruff_db",
|
||||
"ruff_linter",
|
||||
@@ -2153,7 +2197,10 @@ dependencies = [
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"tracing-tree",
|
||||
"web-time",
|
||||
"zip",
|
||||
]
|
||||
@@ -2234,7 +2281,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.5.7"
|
||||
version = "0.6.1"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"annotate-snippets 0.9.2",
|
||||
@@ -2554,7 +2601,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.5.7"
|
||||
version = "0.6.1"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -2694,7 +2741,7 @@ checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
|
||||
[[package]]
|
||||
name = "salsa"
|
||||
version = "0.18.0"
|
||||
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=f608ff8b24f07706492027199f51132244034f29#f608ff8b24f07706492027199f51132244034f29"
|
||||
dependencies = [
|
||||
"append-only-vec",
|
||||
"arc-swap",
|
||||
@@ -2714,12 +2761,12 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "salsa-macro-rules"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=f608ff8b24f07706492027199f51132244034f29#f608ff8b24f07706492027199f51132244034f29"
|
||||
|
||||
[[package]]
|
||||
name = "salsa-macros"
|
||||
version = "0.18.0"
|
||||
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=f608ff8b24f07706492027199f51132244034f29#f608ff8b24f07706492027199f51132244034f29"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
@@ -2781,9 +2828,9 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.204"
|
||||
version = "1.0.208"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12"
|
||||
checksum = "cff085d2cb684faa248efb494c39b68e522822ac0de72ccf08109abde717cfb2"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
@@ -2801,9 +2848,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.204"
|
||||
version = "1.0.208"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222"
|
||||
checksum = "24008e81ff7613ed8e5ba0cfaf24e2c2f1e5b8a0495711e44fcd4882fca62bcf"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2823,9 +2870,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.122"
|
||||
version = "1.0.125"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "784b6203951c57ff748476b126ccb5e8e2959a5c19e5c617ab1956be3dbc68da"
|
||||
checksum = "83c8e735a073ccf5be70aa8066aa984eaf2fa000db6c8d0100ae605b366d31ed"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
@@ -2855,9 +2902,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_test"
|
||||
version = "1.0.176"
|
||||
version = "1.0.177"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a2f49ace1498612d14f7e0b8245519584db8299541dfe31a06374a828d620ab"
|
||||
checksum = "7f901ee573cab6b3060453d2d5f0bae4e6d628c23c0a962ff9b5f1d7c8d4f1ed"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
@@ -2984,9 +3031,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.72"
|
||||
version = "2.0.75"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af"
|
||||
checksum = "f6af063034fc1935ede7be0122941bafa9bacb949334d090b77ca98b5817c7d9"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3006,15 +3053,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.11.0"
|
||||
version = "3.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8fcd239983515c23a32fb82099f97d0b11b8c72f654ed659363a95c3dad7a53"
|
||||
checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"fastrand",
|
||||
"once_cell",
|
||||
"rustix",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3390,9 +3437,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
|
||||
|
||||
[[package]]
|
||||
name = "ureq"
|
||||
version = "2.10.0"
|
||||
version = "2.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72139d247e5f97a3eff96229a7ae85ead5328a39efe76f8bf5a06313d505b6ea"
|
||||
checksum = "b74fc6b57825be3373f7054754755f03ac3a8f5d70015ccad699ba2029956f4a"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"flate2",
|
||||
@@ -3508,19 +3555,20 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.92"
|
||||
version = "0.2.93"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
|
||||
checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
"wasm-bindgen-macro",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-backend"
|
||||
version = "0.2.92"
|
||||
version = "0.2.93"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
|
||||
checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"log",
|
||||
@@ -3533,9 +3581,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-futures"
|
||||
version = "0.4.42"
|
||||
version = "0.4.43"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0"
|
||||
checksum = "61e9300f63a621e96ed275155c108eb6f843b6a26d053f122ab69724559dc8ed"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"js-sys",
|
||||
@@ -3545,9 +3593,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro"
|
||||
version = "0.2.92"
|
||||
version = "0.2.93"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
|
||||
checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"wasm-bindgen-macro-support",
|
||||
@@ -3555,9 +3603,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro-support"
|
||||
version = "0.2.92"
|
||||
version = "0.2.93"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
|
||||
checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3568,18 +3616,19 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-shared"
|
||||
version = "0.2.92"
|
||||
version = "0.2.93"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
|
||||
checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-test"
|
||||
version = "0.3.42"
|
||||
version = "0.3.43"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d9bf62a58e0780af3e852044583deee40983e5886da43a271dd772379987667b"
|
||||
checksum = "68497a05fb21143a08a7d24fc81763384a3072ee43c44e86aad1744d6adef9d9"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"js-sys",
|
||||
"minicov",
|
||||
"scoped-tls",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-futures",
|
||||
@@ -3588,9 +3637,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-test-macro"
|
||||
version = "0.3.42"
|
||||
version = "0.3.43"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b7f89739351a2e03cb94beb799d47fb2cac01759b40ec441f7de39b00cbf7ef0"
|
||||
checksum = "4b8220be1fa9e4c889b30fd207d4906657e7e90b12e0e6b0c8b8d8709f5de021"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3684,7 +3733,7 @@ version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.5",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3702,7 +3751,16 @@ version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.5",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.59.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3722,18 +3780,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb"
|
||||
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm 0.52.5",
|
||||
"windows_aarch64_msvc 0.52.5",
|
||||
"windows_i686_gnu 0.52.5",
|
||||
"windows_aarch64_gnullvm 0.52.6",
|
||||
"windows_aarch64_msvc 0.52.6",
|
||||
"windows_i686_gnu 0.52.6",
|
||||
"windows_i686_gnullvm",
|
||||
"windows_i686_msvc 0.52.5",
|
||||
"windows_x86_64_gnu 0.52.5",
|
||||
"windows_x86_64_gnullvm 0.52.5",
|
||||
"windows_x86_64_msvc 0.52.5",
|
||||
"windows_i686_msvc 0.52.6",
|
||||
"windows_x86_64_gnu 0.52.6",
|
||||
"windows_x86_64_gnullvm 0.52.6",
|
||||
"windows_x86_64_msvc 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3744,9 +3802,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263"
|
||||
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
@@ -3756,9 +3814,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6"
|
||||
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
@@ -3768,15 +3826,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670"
|
||||
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnullvm"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9"
|
||||
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
@@ -3786,9 +3844,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf"
|
||||
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
@@ -3798,9 +3856,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9"
|
||||
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
@@ -3810,9 +3868,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596"
|
||||
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
@@ -3822,9 +3880,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0"
|
||||
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||
|
||||
[[package]]
|
||||
name = "winnow"
|
||||
|
||||
@@ -108,7 +108,7 @@ rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.10.0" }
|
||||
regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
salsa = { git = "https://github.com/MichaReiser/salsa.git", tag = "red-knot-0.0.1" }
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "f608ff8b24f07706492027199f51132244034f29" }
|
||||
schemars = { version = "0.8.16" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
|
||||
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.5.7/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.5.7/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.6.1/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.6.1/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.7
|
||||
rev: v0.6.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -72,6 +72,26 @@ runs or when restoring from a persistent cache. This can be confusing for users
|
||||
don't understand why a specific lint violation isn't raised. Instead, change your
|
||||
query to return the failure as part of the query's result or use a Salsa accumulator.
|
||||
|
||||
## Tracing in tests
|
||||
|
||||
You can use `ruff_db::testing::setup_logging` or `ruff_db::testing::setup_logging_with_filter` to set up logging in tests.
|
||||
|
||||
```rust
|
||||
use ruff_db::testing::setup_logging;
|
||||
|
||||
#[test]
|
||||
fn test() {
|
||||
let _logging = setup_logging();
|
||||
|
||||
tracing::info!("This message will be printed to stderr");
|
||||
}
|
||||
```
|
||||
|
||||
Note: Most test runners capture stderr and only show its output when a test fails.
|
||||
|
||||
Note also that `setup_logging` only sets up logging for the current thread because [`set_global_default`](https://docs.rs/tracing/latest/tracing/subscriber/fn.set_global_default.html) can only be
|
||||
called **once**.
|
||||
|
||||
## Release builds
|
||||
|
||||
`trace!` events are removed in release builds.
|
||||
|
||||
@@ -5,8 +5,8 @@ use colored::Colorize;
|
||||
use std::fmt;
|
||||
use std::fs::File;
|
||||
use std::io::BufWriter;
|
||||
use tracing::log::LevelFilter;
|
||||
use tracing::{Event, Subscriber};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::fmt::format::Writer;
|
||||
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields};
|
||||
use tracing_subscriber::registry::LookupSpan;
|
||||
@@ -60,10 +60,10 @@ pub(crate) enum VerbosityLevel {
|
||||
impl VerbosityLevel {
|
||||
const fn level_filter(self) -> LevelFilter {
|
||||
match self {
|
||||
VerbosityLevel::Default => LevelFilter::Warn,
|
||||
VerbosityLevel::Verbose => LevelFilter::Info,
|
||||
VerbosityLevel::ExtraVerbose => LevelFilter::Debug,
|
||||
VerbosityLevel::Trace => LevelFilter::Trace,
|
||||
VerbosityLevel::Default => LevelFilter::WARN,
|
||||
VerbosityLevel::Verbose => LevelFilter::INFO,
|
||||
VerbosityLevel::ExtraVerbose => LevelFilter::DEBUG,
|
||||
VerbosityLevel::Trace => LevelFilter::TRACE,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -88,7 +88,7 @@ pub(crate) fn setup_tracing(level: VerbosityLevel) -> anyhow::Result<TracingGuar
|
||||
match level {
|
||||
VerbosityLevel::Default => {
|
||||
// Show warning traces
|
||||
EnvFilter::default().add_directive(tracing::level_filters::LevelFilter::WARN.into())
|
||||
EnvFilter::default().add_directive(LevelFilter::WARN.into())
|
||||
}
|
||||
level => {
|
||||
let level_filter = level.level_filter();
|
||||
|
||||
@@ -7,13 +7,13 @@ use colored::Colorize;
|
||||
use crossbeam::channel as crossbeam_channel;
|
||||
use salsa::plumbing::ZalsaDatabase;
|
||||
|
||||
use red_knot_python_semantic::{ProgramSettings, SearchPathSettings};
|
||||
use red_knot_server::run_server;
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::site_packages::site_packages_dirs_of_venv;
|
||||
use red_knot_workspace::site_packages::VirtualEnvironment;
|
||||
use red_knot_workspace::watch;
|
||||
use red_knot_workspace::watch::WorkspaceWatcher;
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::program::{ProgramSettings, SearchPathSettings};
|
||||
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
|
||||
use target_version::TargetVersion;
|
||||
|
||||
@@ -164,16 +164,9 @@ fn run() -> anyhow::Result<ExitStatus> {
|
||||
|
||||
// TODO: Verify the remaining search path settings eagerly.
|
||||
let site_packages = venv_path
|
||||
.map(|venv_path| {
|
||||
let venv_path = SystemPath::absolute(venv_path, &cli_base_path);
|
||||
|
||||
if system.is_directory(&venv_path) {
|
||||
Ok(site_packages_dirs_of_venv(&venv_path, &system)?)
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Provided venv-path {venv_path} is not a directory!"
|
||||
))
|
||||
}
|
||||
.map(|path| {
|
||||
VirtualEnvironment::new(path, &OsSystem::new(cli_base_path))
|
||||
.and_then(|venv| venv.site_packages_directories(&system))
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
@@ -191,7 +184,7 @@ fn run() -> anyhow::Result<ExitStatus> {
|
||||
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
// cache and load the cache if it exists.
|
||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system);
|
||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system)?;
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
|
||||
|
||||
|
||||
@@ -13,22 +13,36 @@ pub enum TargetVersion {
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TargetVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
ruff_db::program::TargetVersion::from(*self).fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for ruff_db::program::TargetVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => Self::Py37,
|
||||
TargetVersion::Py38 => Self::Py38,
|
||||
TargetVersion::Py39 => Self::Py39,
|
||||
TargetVersion::Py310 => Self::Py310,
|
||||
TargetVersion::Py311 => Self::Py311,
|
||||
TargetVersion::Py312 => Self::Py312,
|
||||
TargetVersion::Py313 => Self::Py313,
|
||||
impl TargetVersion {
|
||||
const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Py37 => "py37",
|
||||
Self::Py38 => "py38",
|
||||
Self::Py39 => "py39",
|
||||
Self::Py310 => "py310",
|
||||
Self::Py311 => "py311",
|
||||
Self::Py312 => "py312",
|
||||
Self::Py313 => "py313",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TargetVersion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for red_knot_python_semantic::PythonVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => Self::PY37,
|
||||
TargetVersion::Py38 => Self::PY38,
|
||||
TargetVersion::Py39 => Self::PY39,
|
||||
TargetVersion::Py310 => Self::PY310,
|
||||
TargetVersion::Py311 => Self::PY311,
|
||||
TargetVersion::Py312 => Self::PY312,
|
||||
TargetVersion::Py313 => Self::PY313,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,15 +4,15 @@ use std::io::Write;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
use salsa::Setter;
|
||||
|
||||
use red_knot_python_semantic::{resolve_module, ModuleName};
|
||||
use red_knot_python_semantic::{
|
||||
resolve_module, ModuleName, Program, ProgramSettings, PythonVersion, SearchPathSettings,
|
||||
};
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::watch;
|
||||
use red_knot_workspace::watch::{directory_watcher, WorkspaceWatcher};
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::files::{system_path_to_file, File, FileError};
|
||||
use ruff_db::program::{Program, ProgramSettings, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::source::source_text;
|
||||
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_db::Upcast;
|
||||
@@ -25,6 +25,7 @@ struct TestCase {
|
||||
/// We need to hold on to it in the test case or the temp files get deleted.
|
||||
_temp_dir: tempfile::TempDir,
|
||||
root_dir: SystemPathBuf,
|
||||
search_path_settings: SearchPathSettings,
|
||||
}
|
||||
|
||||
impl TestCase {
|
||||
@@ -107,24 +108,25 @@ impl TestCase {
|
||||
fn update_search_path_settings(
|
||||
&mut self,
|
||||
f: impl FnOnce(&SearchPathSettings) -> SearchPathSettings,
|
||||
) {
|
||||
) -> anyhow::Result<()> {
|
||||
let program = Program::get(self.db());
|
||||
let search_path_settings = program.search_paths(self.db());
|
||||
|
||||
let new_settings = f(search_path_settings);
|
||||
let new_settings = f(&self.search_path_settings);
|
||||
|
||||
program.set_search_paths(&mut self.db).to(new_settings);
|
||||
program.update_search_paths(&mut self.db, new_settings.clone())?;
|
||||
self.search_path_settings = new_settings;
|
||||
|
||||
if let Some(watcher) = &mut self.watcher {
|
||||
watcher.update(&self.db);
|
||||
assert!(!watcher.has_errored_paths());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn collect_package_files(&self, path: &SystemPath) -> Vec<File> {
|
||||
let package = self.db().workspace().package(self.db(), path).unwrap();
|
||||
let files = package.files(self.db());
|
||||
let files = files.read();
|
||||
let mut collected: Vec<_> = files.into_iter().collect();
|
||||
collected.sort_unstable_by_key(|file| file.path(self.db()).as_system_path().unwrap());
|
||||
collected
|
||||
@@ -220,24 +222,24 @@ where
|
||||
let system = OsSystem::new(&workspace_path);
|
||||
|
||||
let workspace = WorkspaceMetadata::from_path(&workspace_path, &system)?;
|
||||
let search_paths = create_search_paths(&root_path, workspace.root());
|
||||
let search_path_settings = create_search_paths(&root_path, workspace.root());
|
||||
|
||||
for path in search_paths
|
||||
for path in search_path_settings
|
||||
.extra_paths
|
||||
.iter()
|
||||
.chain(search_paths.site_packages.iter())
|
||||
.chain(search_paths.custom_typeshed.iter())
|
||||
.chain(search_path_settings.site_packages.iter())
|
||||
.chain(search_path_settings.custom_typeshed.iter())
|
||||
{
|
||||
std::fs::create_dir_all(path.as_std_path())
|
||||
.with_context(|| format!("Failed to create search path '{path}'"))?;
|
||||
}
|
||||
|
||||
let settings = ProgramSettings {
|
||||
target_version: TargetVersion::default(),
|
||||
search_paths,
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: search_path_settings.clone(),
|
||||
};
|
||||
|
||||
let db = RootDatabase::new(workspace, settings, system);
|
||||
let db = RootDatabase::new(workspace, settings, system)?;
|
||||
|
||||
let (sender, receiver) = crossbeam::channel::unbounded();
|
||||
let watcher = directory_watcher(move |events| sender.send(events).unwrap())
|
||||
@@ -252,6 +254,7 @@ where
|
||||
watcher: Some(watcher),
|
||||
_temp_dir: temp_dir,
|
||||
root_dir: root_path,
|
||||
search_path_settings,
|
||||
};
|
||||
|
||||
// Sometimes the file watcher reports changes for events that happened before the watcher was started.
|
||||
@@ -736,7 +739,8 @@ fn add_search_path() -> anyhow::Result<()> {
|
||||
case.update_search_path_settings(|settings| SearchPathSettings {
|
||||
site_packages: vec![site_packages.clone()],
|
||||
..settings.clone()
|
||||
});
|
||||
})
|
||||
.expect("Search path settings to be valid");
|
||||
|
||||
std::fs::write(site_packages.join("a.py").as_std_path(), "class A: ...")?;
|
||||
|
||||
@@ -766,7 +770,8 @@ fn remove_search_path() -> anyhow::Result<()> {
|
||||
case.update_search_path_settings(|settings| SearchPathSettings {
|
||||
site_packages: vec![],
|
||||
..settings.clone()
|
||||
});
|
||||
})
|
||||
.expect("Search path settings to be valid");
|
||||
|
||||
std::fs::write(site_packages.join("a.py").as_std_path(), "class A: ...")?;
|
||||
|
||||
|
||||
@@ -15,8 +15,10 @@ ruff_db = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
compact_str = { workspace = true }
|
||||
@@ -27,6 +29,8 @@ salsa = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
static_assertions = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
path-slash = { workspace = true }
|
||||
@@ -34,12 +38,14 @@ walkdir = { workspace = true }
|
||||
zip = { workspace = true, features = ["zstd", "deflate"] }
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_db = { workspace = true, features = ["os", "testing"] }
|
||||
ruff_python_parser = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
ruff_python_parser = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -1,15 +1,18 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
|
||||
/// Database giving access to semantic information about a Python program.
|
||||
#[salsa::db]
|
||||
pub trait Db: SourceDb + Upcast<dyn SourceDb> {}
|
||||
pub trait Db: SourceDb + Upcast<dyn SourceDb> {
|
||||
fn is_file_open(&self, file: File) -> bool;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::module_resolver::vendored_typeshed_stubs;
|
||||
use ruff_db::files::Files;
|
||||
use ruff_db::files::{File, Files};
|
||||
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
use ruff_db::{Db as SourceDb, Upcast};
|
||||
@@ -91,7 +94,11 @@ pub(crate) mod tests {
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl Db for TestDb {}
|
||||
impl Db for TestDb {
|
||||
fn is_file_open(&self, file: File) -> bool {
|
||||
!file.path(self).is_vendored_path()
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl salsa::Database for TestDb {
|
||||
|
||||
@@ -5,6 +5,8 @@ use rustc_hash::FxHasher;
|
||||
pub use db::Db;
|
||||
pub use module_name::ModuleName;
|
||||
pub use module_resolver::{resolve_module, system_module_search_paths, vendored_typeshed_stubs};
|
||||
pub use program::{Program, ProgramSettings, SearchPathSettings};
|
||||
pub use python_version::PythonVersion;
|
||||
pub use semantic_model::{HasTy, SemanticModel};
|
||||
|
||||
pub mod ast_node_ref;
|
||||
@@ -13,6 +15,8 @@ mod db;
|
||||
mod module_name;
|
||||
mod module_resolver;
|
||||
mod node_key;
|
||||
mod program;
|
||||
mod python_version;
|
||||
pub mod semantic_index;
|
||||
mod semantic_model;
|
||||
pub mod types;
|
||||
|
||||
@@ -168,6 +168,24 @@ impl ModuleName {
|
||||
};
|
||||
Some(Self(name))
|
||||
}
|
||||
|
||||
/// Extend `self` with the components of `other`
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use red_knot_python_semantic::ModuleName;
|
||||
///
|
||||
/// let mut module_name = ModuleName::new_static("foo").unwrap();
|
||||
/// module_name.extend(&ModuleName::new_static("bar").unwrap());
|
||||
/// assert_eq!(&module_name, "foo.bar");
|
||||
/// module_name.extend(&ModuleName::new_static("baz.eggs.ham").unwrap());
|
||||
/// assert_eq!(&module_name, "foo.bar.baz.eggs.ham");
|
||||
/// ```
|
||||
pub fn extend(&mut self, other: &ModuleName) {
|
||||
self.0.push('.');
|
||||
self.0.push_str(other);
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ModuleName {
|
||||
|
||||
@@ -2,11 +2,13 @@ use std::iter::FusedIterator;
|
||||
|
||||
pub(crate) use module::Module;
|
||||
pub use resolver::resolve_module;
|
||||
pub(crate) use resolver::{file_to_module, SearchPaths};
|
||||
use ruff_db::system::SystemPath;
|
||||
pub use typeshed::vendored_typeshed_stubs;
|
||||
|
||||
use crate::module_resolver::resolver::search_paths;
|
||||
use crate::Db;
|
||||
use resolver::{module_resolution_settings, SearchPathIterator};
|
||||
use resolver::SearchPathIterator;
|
||||
|
||||
mod module;
|
||||
mod path;
|
||||
@@ -20,7 +22,7 @@ mod testing;
|
||||
/// Returns an iterator over all search paths pointing to a system path
|
||||
pub fn system_module_search_paths(db: &dyn Db) -> SystemModuleSearchPathsIter {
|
||||
SystemModuleSearchPathsIter {
|
||||
inner: module_resolution_settings(db).search_paths(db),
|
||||
inner: search_paths(db),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -77,3 +77,9 @@ pub enum ModuleKind {
|
||||
/// A python package (`foo/__init__.py` or `foo/__init__.pyi`)
|
||||
Package,
|
||||
}
|
||||
|
||||
impl ModuleKind {
|
||||
pub const fn is_package(self) -> bool {
|
||||
matches!(self, ModuleKind::Package)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -620,14 +620,13 @@ impl PartialEq<SearchPath> for VendoredPathBuf {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::program::TargetVersion;
|
||||
use ruff_db::Db;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
|
||||
use crate::module_resolver::testing::{FileSpec, MockedTypeshed, TestCase, TestCaseBuilder};
|
||||
|
||||
use super::*;
|
||||
use crate::module_resolver::testing::{FileSpec, MockedTypeshed, TestCase, TestCaseBuilder};
|
||||
use crate::python_version::PythonVersion;
|
||||
|
||||
impl ModulePath {
|
||||
#[must_use]
|
||||
@@ -867,7 +866,7 @@ mod tests {
|
||||
|
||||
fn typeshed_test_case(
|
||||
typeshed: MockedTypeshed,
|
||||
target_version: TargetVersion,
|
||||
target_version: PythonVersion,
|
||||
) -> (TestDb, SearchPath) {
|
||||
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
|
||||
.with_custom_typeshed(typeshed)
|
||||
@@ -879,11 +878,11 @@ mod tests {
|
||||
}
|
||||
|
||||
fn py38_typeshed_test_case(typeshed: MockedTypeshed) -> (TestDb, SearchPath) {
|
||||
typeshed_test_case(typeshed, TargetVersion::Py38)
|
||||
typeshed_test_case(typeshed, PythonVersion::PY38)
|
||||
}
|
||||
|
||||
fn py39_typeshed_test_case(typeshed: MockedTypeshed) -> (TestDb, SearchPath) {
|
||||
typeshed_test_case(typeshed, TargetVersion::Py39)
|
||||
typeshed_test_case(typeshed, PythonVersion::PY39)
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -899,7 +898,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverState::new(&db, TargetVersion::Py38);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY38);
|
||||
|
||||
let asyncio_regular_package = stdlib_path.join("asyncio");
|
||||
assert!(asyncio_regular_package.is_directory(&resolver));
|
||||
@@ -927,7 +926,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverState::new(&db, TargetVersion::Py38);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY38);
|
||||
|
||||
let xml_namespace_package = stdlib_path.join("xml");
|
||||
assert!(xml_namespace_package.is_directory(&resolver));
|
||||
@@ -949,7 +948,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverState::new(&db, TargetVersion::Py38);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY38);
|
||||
|
||||
let functools_module = stdlib_path.join("functools.pyi");
|
||||
assert!(functools_module.to_file(&resolver).is_some());
|
||||
@@ -965,7 +964,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverState::new(&db, TargetVersion::Py38);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY38);
|
||||
|
||||
let collections_regular_package = stdlib_path.join("collections");
|
||||
assert_eq!(collections_regular_package.to_file(&resolver), None);
|
||||
@@ -981,7 +980,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverState::new(&db, TargetVersion::Py38);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY38);
|
||||
|
||||
let importlib_namespace_package = stdlib_path.join("importlib");
|
||||
assert_eq!(importlib_namespace_package.to_file(&resolver), None);
|
||||
@@ -1002,7 +1001,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverState::new(&db, TargetVersion::Py38);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY38);
|
||||
|
||||
let non_existent = stdlib_path.join("doesnt_even_exist");
|
||||
assert_eq!(non_existent.to_file(&resolver), None);
|
||||
@@ -1030,7 +1029,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py39_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverState::new(&db, TargetVersion::Py39);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY39);
|
||||
|
||||
// Since we've set the target version to Py39,
|
||||
// `collections` should now exist as a directory, according to VERSIONS...
|
||||
@@ -1059,7 +1058,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py39_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverState::new(&db, TargetVersion::Py39);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY39);
|
||||
|
||||
// The `importlib` directory now also exists
|
||||
let importlib_namespace_package = stdlib_path.join("importlib");
|
||||
@@ -1083,7 +1082,7 @@ mod tests {
|
||||
};
|
||||
|
||||
let (db, stdlib_path) = py39_typeshed_test_case(TYPESHED);
|
||||
let resolver = ResolverState::new(&db, TargetVersion::Py39);
|
||||
let resolver = ResolverState::new(&db, PythonVersion::PY39);
|
||||
|
||||
// The `xml` package no longer exists on py39:
|
||||
let xml_namespace_package = stdlib_path.join("xml");
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
use std::borrow::Cow;
|
||||
use std::iter::FusedIterator;
|
||||
|
||||
use ruff_db::files::{File, FilePath, FileRootKind};
|
||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DirectoryEntry, System, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::VendoredPath;
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
|
||||
use ruff_db::files::{File, FilePath, FileRootKind};
|
||||
use ruff_db::system::{DirectoryEntry, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::VendoredPath;
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::{Program, SearchPathSettings};
|
||||
|
||||
use super::module::{Module, ModuleKind};
|
||||
use super::path::{ModulePath, SearchPath, SearchPathValidationError};
|
||||
@@ -40,7 +41,7 @@ pub(crate) fn resolve_module_query<'db>(
|
||||
|
||||
let module = Module::new(name.clone(), kind, search_path, module_file);
|
||||
|
||||
tracing::debug!(
|
||||
tracing::trace!(
|
||||
"Resolved module '{name}' to '{path}'.",
|
||||
path = module_file.path(db)
|
||||
);
|
||||
@@ -84,9 +85,7 @@ pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module> {
|
||||
FilePath::SystemVirtual(_) => return None,
|
||||
};
|
||||
|
||||
let settings = module_resolution_settings(db);
|
||||
|
||||
let mut search_paths = settings.search_paths(db);
|
||||
let mut search_paths = search_paths(db);
|
||||
|
||||
let module_name = loop {
|
||||
let candidate = search_paths.next()?;
|
||||
@@ -119,92 +118,122 @@ pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate and normalize the raw settings given by the user
|
||||
/// into settings we can use for module resolution
|
||||
///
|
||||
/// This method also implements the typing spec's [module resolution order].
|
||||
///
|
||||
/// [module resolution order]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
|
||||
fn try_resolve_module_resolution_settings(
|
||||
db: &dyn Db,
|
||||
) -> Result<ModuleResolutionSettings, SearchPathValidationError> {
|
||||
let program = Program::get(db.upcast());
|
||||
|
||||
let SearchPathSettings {
|
||||
extra_paths,
|
||||
src_root,
|
||||
custom_typeshed,
|
||||
site_packages,
|
||||
} = program.search_paths(db.upcast());
|
||||
|
||||
if !extra_paths.is_empty() {
|
||||
tracing::info!("Extra search paths: {extra_paths:?}");
|
||||
}
|
||||
|
||||
if let Some(custom_typeshed) = custom_typeshed {
|
||||
tracing::info!("Custom typeshed directory: {custom_typeshed}");
|
||||
}
|
||||
|
||||
if !site_packages.is_empty() {
|
||||
tracing::info!("Site-packages directories: {site_packages:?}");
|
||||
}
|
||||
|
||||
let system = db.system();
|
||||
let files = db.files();
|
||||
|
||||
let mut static_search_paths = vec![];
|
||||
|
||||
for path in extra_paths {
|
||||
files.try_add_root(db.upcast(), path, FileRootKind::LibrarySearchPath);
|
||||
static_search_paths.push(SearchPath::extra(system, path.clone())?);
|
||||
}
|
||||
|
||||
static_search_paths.push(SearchPath::first_party(system, src_root.clone())?);
|
||||
|
||||
static_search_paths.push(if let Some(custom_typeshed) = custom_typeshed.as_ref() {
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
custom_typeshed,
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
SearchPath::custom_stdlib(db, custom_typeshed.clone())?
|
||||
} else {
|
||||
SearchPath::vendored_stdlib()
|
||||
});
|
||||
|
||||
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
|
||||
|
||||
let target_version = program.target_version(db.upcast());
|
||||
tracing::info!("Target version: {target_version}");
|
||||
|
||||
// Filter out module resolution paths that point to the same directory on disk (the same invariant maintained by [`sys.path` at runtime]).
|
||||
// (Paths may, however, *overlap* -- e.g. you could have both `src/` and `src/foo`
|
||||
// as module resolution paths simultaneously.)
|
||||
//
|
||||
// [`sys.path` at runtime]: https://docs.python.org/3/library/site.html#module-site
|
||||
// This code doesn't use an `IndexSet` because the key is the system path and not the search root.
|
||||
let mut seen_paths =
|
||||
FxHashSet::with_capacity_and_hasher(static_search_paths.len(), FxBuildHasher);
|
||||
|
||||
static_search_paths.retain(|path| {
|
||||
if let Some(path) = path.as_system_path() {
|
||||
seen_paths.insert(path.to_path_buf())
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
|
||||
Ok(ModuleResolutionSettings {
|
||||
target_version,
|
||||
static_search_paths,
|
||||
site_packages_paths: site_packages.to_owned(),
|
||||
})
|
||||
pub(crate) fn search_paths(db: &dyn Db) -> SearchPathIterator {
|
||||
Program::get(db).search_paths(db).iter(db)
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn module_resolution_settings(db: &dyn Db) -> ModuleResolutionSettings {
|
||||
// TODO proper error handling if this returns an error:
|
||||
try_resolve_module_resolution_settings(db).unwrap()
|
||||
#[derive(Debug, PartialEq, Eq, Default)]
|
||||
pub(crate) struct SearchPaths {
|
||||
/// Search paths that have been statically determined purely from reading Ruff's configuration settings.
|
||||
/// These shouldn't ever change unless the config settings themselves change.
|
||||
static_paths: Vec<SearchPath>,
|
||||
|
||||
/// site-packages paths are not included in the above field:
|
||||
/// if there are multiple site-packages paths, editable installations can appear
|
||||
/// *between* the site-packages paths on `sys.path` at runtime.
|
||||
/// That means we can't know where a second or third `site-packages` path should sit
|
||||
/// in terms of module-resolution priority until we've discovered the editable installs
|
||||
/// for the first `site-packages` path
|
||||
site_packages: Vec<SearchPath>,
|
||||
}
|
||||
|
||||
impl SearchPaths {
|
||||
/// Validate and normalize the raw settings given by the user
|
||||
/// into settings we can use for module resolution
|
||||
///
|
||||
/// This method also implements the typing spec's [module resolution order].
|
||||
///
|
||||
/// [module resolution order]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
|
||||
pub(crate) fn from_settings(
|
||||
db: &dyn Db,
|
||||
settings: SearchPathSettings,
|
||||
) -> Result<Self, SearchPathValidationError> {
|
||||
let SearchPathSettings {
|
||||
extra_paths,
|
||||
src_root,
|
||||
custom_typeshed,
|
||||
site_packages: site_packages_paths,
|
||||
} = settings;
|
||||
|
||||
let system = db.system();
|
||||
let files = db.files();
|
||||
|
||||
let mut static_paths = vec![];
|
||||
|
||||
for path in extra_paths {
|
||||
tracing::debug!("Adding static extra search-path '{path}'");
|
||||
|
||||
let search_path = SearchPath::extra(system, path)?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
search_path.as_system_path().unwrap(),
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
static_paths.push(search_path);
|
||||
}
|
||||
|
||||
tracing::debug!("Adding first-party search path '{src_root}'");
|
||||
static_paths.push(SearchPath::first_party(system, src_root)?);
|
||||
|
||||
static_paths.push(if let Some(custom_typeshed) = custom_typeshed {
|
||||
tracing::debug!("Adding custom-stdlib search path '{custom_typeshed}'");
|
||||
|
||||
let search_path = SearchPath::custom_stdlib(db, custom_typeshed)?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
search_path.as_system_path().unwrap(),
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
search_path
|
||||
} else {
|
||||
SearchPath::vendored_stdlib()
|
||||
});
|
||||
|
||||
let mut site_packages: Vec<_> = Vec::with_capacity(site_packages_paths.len());
|
||||
|
||||
for path in site_packages_paths {
|
||||
tracing::debug!("Adding site-packages search path '{path}'");
|
||||
let search_path = SearchPath::site_packages(system, path)?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
search_path.as_system_path().unwrap(),
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
site_packages.push(search_path);
|
||||
}
|
||||
|
||||
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
|
||||
|
||||
// Filter out module resolution paths that point to the same directory on disk (the same invariant maintained by [`sys.path` at runtime]).
|
||||
// (Paths may, however, *overlap* -- e.g. you could have both `src/` and `src/foo`
|
||||
// as module resolution paths simultaneously.)
|
||||
//
|
||||
// This code doesn't use an `IndexSet` because the key is the system path and not the search root.
|
||||
//
|
||||
// [`sys.path` at runtime]: https://docs.python.org/3/library/site.html#module-site
|
||||
let mut seen_paths = FxHashSet::with_capacity_and_hasher(static_paths.len(), FxBuildHasher);
|
||||
|
||||
static_paths.retain(|path| {
|
||||
if let Some(path) = path.as_system_path() {
|
||||
seen_paths.insert(path.to_path_buf())
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
|
||||
Ok(SearchPaths {
|
||||
static_paths,
|
||||
site_packages,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn iter<'a>(&'a self, db: &'a dyn Db) -> SearchPathIterator<'a> {
|
||||
SearchPathIterator {
|
||||
db,
|
||||
static_paths: self.static_paths.iter(),
|
||||
dynamic_paths: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Collect all dynamic search paths. For each `site-packages` path:
|
||||
@@ -217,19 +246,20 @@ pub(crate) fn module_resolution_settings(db: &dyn Db) -> ModuleResolutionSetting
|
||||
/// module-resolution priority.
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
|
||||
let ModuleResolutionSettings {
|
||||
target_version: _,
|
||||
static_search_paths,
|
||||
site_packages_paths,
|
||||
} = module_resolution_settings(db);
|
||||
tracing::debug!("Resolving dynamic module resolution paths");
|
||||
|
||||
let SearchPaths {
|
||||
static_paths,
|
||||
site_packages,
|
||||
} = Program::get(db).search_paths(db);
|
||||
|
||||
let mut dynamic_paths = Vec::new();
|
||||
|
||||
if site_packages_paths.is_empty() {
|
||||
if site_packages.is_empty() {
|
||||
return dynamic_paths;
|
||||
}
|
||||
|
||||
let mut existing_paths: FxHashSet<_> = static_search_paths
|
||||
let mut existing_paths: FxHashSet<_> = static_paths
|
||||
.iter()
|
||||
.filter_map(|path| path.as_system_path())
|
||||
.map(Cow::Borrowed)
|
||||
@@ -238,15 +268,19 @@ pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
|
||||
let files = db.files();
|
||||
let system = db.system();
|
||||
|
||||
for site_packages_dir in site_packages_paths {
|
||||
for site_packages_search_path in site_packages {
|
||||
let site_packages_dir = site_packages_search_path
|
||||
.as_system_path()
|
||||
.expect("Expected site package path to be a system path");
|
||||
|
||||
if !existing_paths.insert(Cow::Borrowed(site_packages_dir)) {
|
||||
continue;
|
||||
}
|
||||
let site_packages_root = files.try_add_root(
|
||||
db.upcast(),
|
||||
site_packages_dir,
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
|
||||
let site_packages_root = files
|
||||
.root(db.upcast(), site_packages_dir)
|
||||
.expect("Site-package root to have been created.");
|
||||
|
||||
// This query needs to be re-executed each time a `.pth` file
|
||||
// is added, modified or removed from the `site-packages` directory.
|
||||
// However, we don't use Salsa queries to read the source text of `.pth` files;
|
||||
@@ -254,8 +288,7 @@ pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
|
||||
// site-package directory's revision.
|
||||
site_packages_root.revision(db.upcast());
|
||||
|
||||
dynamic_paths
|
||||
.push(SearchPath::site_packages(system, site_packages_dir.to_owned()).unwrap());
|
||||
dynamic_paths.push(site_packages_search_path.clone());
|
||||
|
||||
// As well as modules installed directly into `site-packages`,
|
||||
// the directory may also contain `.pth` files.
|
||||
@@ -263,22 +296,38 @@ pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
|
||||
// containing a (relative or absolute) path.
|
||||
// Each of these paths may point to an editable install of a package,
|
||||
// so should be considered an additional search path.
|
||||
let Ok(pth_file_iterator) = PthFileIterator::new(db, site_packages_dir) else {
|
||||
continue;
|
||||
let pth_file_iterator = match PthFileIterator::new(db, site_packages_dir) {
|
||||
Ok(iterator) => iterator,
|
||||
Err(error) => {
|
||||
tracing::warn!(
|
||||
"Failed to search for editable installation in {site_packages_dir}: {error}"
|
||||
);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
// The Python documentation specifies that `.pth` files in `site-packages`
|
||||
// are processed in alphabetical order, so collecting and then sorting is necessary.
|
||||
// https://docs.python.org/3/library/site.html#module-site
|
||||
let mut all_pth_files: Vec<PthFile> = pth_file_iterator.collect();
|
||||
all_pth_files.sort_by(|a, b| a.path.cmp(&b.path));
|
||||
all_pth_files.sort_unstable_by(|a, b| a.path.cmp(&b.path));
|
||||
|
||||
for pth_file in &all_pth_files {
|
||||
for installation in pth_file.editable_installations() {
|
||||
if existing_paths.insert(Cow::Owned(
|
||||
installation.as_system_path().unwrap().to_path_buf(),
|
||||
)) {
|
||||
dynamic_paths.push(installation);
|
||||
let installations = all_pth_files.iter().flat_map(PthFile::items);
|
||||
|
||||
for installation in installations {
|
||||
if existing_paths.insert(Cow::Owned(installation.clone())) {
|
||||
match SearchPath::editable(system, installation) {
|
||||
Ok(search_path) => {
|
||||
tracing::debug!(
|
||||
"Adding editable installation to module resolution path {path}",
|
||||
path = search_path.as_system_path().unwrap()
|
||||
);
|
||||
dynamic_paths.push(search_path);
|
||||
}
|
||||
|
||||
Err(error) => {
|
||||
tracing::debug!("Skipping editable installation: {error}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -324,7 +373,6 @@ impl<'db> FusedIterator for SearchPathIterator<'db> {}
|
||||
/// One or more lines in a `.pth` file may be a (relative or absolute)
|
||||
/// path that represents an editable installation of a package.
|
||||
struct PthFile<'db> {
|
||||
system: &'db dyn System,
|
||||
path: SystemPathBuf,
|
||||
contents: String,
|
||||
site_packages: &'db SystemPath,
|
||||
@@ -333,9 +381,8 @@ struct PthFile<'db> {
|
||||
impl<'db> PthFile<'db> {
|
||||
/// Yield paths in this `.pth` file that appear to represent editable installations,
|
||||
/// and should therefore be added as module-resolution search paths.
|
||||
fn editable_installations(&'db self) -> impl Iterator<Item = SearchPath> + 'db {
|
||||
fn items(&'db self) -> impl Iterator<Item = SystemPathBuf> + 'db {
|
||||
let PthFile {
|
||||
system,
|
||||
path: _,
|
||||
contents,
|
||||
site_packages,
|
||||
@@ -354,8 +401,8 @@ impl<'db> PthFile<'db> {
|
||||
{
|
||||
return None;
|
||||
}
|
||||
let possible_editable_install = SystemPath::absolute(line, site_packages);
|
||||
SearchPath::editable(*system, possible_editable_install).ok()
|
||||
|
||||
Some(SystemPath::absolute(line, site_packages))
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -404,12 +451,15 @@ impl<'db> Iterator for PthFileIterator<'db> {
|
||||
continue;
|
||||
}
|
||||
|
||||
let Ok(contents) = db.system().read_to_string(&path) else {
|
||||
continue;
|
||||
let contents = match system.read_to_string(&path) {
|
||||
Ok(contents) => contents,
|
||||
Err(error) => {
|
||||
tracing::warn!("Failed to read .pth file '{path}': {error}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
return Some(PthFile {
|
||||
system,
|
||||
path,
|
||||
contents,
|
||||
site_packages,
|
||||
@@ -418,38 +468,6 @@ impl<'db> Iterator for PthFileIterator<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Validated and normalized module-resolution settings.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct ModuleResolutionSettings {
|
||||
target_version: TargetVersion,
|
||||
|
||||
/// Search paths that have been statically determined purely from reading Ruff's configuration settings.
|
||||
/// These shouldn't ever change unless the config settings themselves change.
|
||||
static_search_paths: Vec<SearchPath>,
|
||||
|
||||
/// site-packages paths are not included in the above field:
|
||||
/// if there are multiple site-packages paths, editable installations can appear
|
||||
/// *between* the site-packages paths on `sys.path` at runtime.
|
||||
/// That means we can't know where a second or third `site-packages` path should sit
|
||||
/// in terms of module-resolution priority until we've discovered the editable installs
|
||||
/// for the first `site-packages` path
|
||||
site_packages_paths: Vec<SystemPathBuf>,
|
||||
}
|
||||
|
||||
impl ModuleResolutionSettings {
|
||||
fn target_version(&self) -> TargetVersion {
|
||||
self.target_version
|
||||
}
|
||||
|
||||
pub(crate) fn search_paths<'db>(&'db self, db: &'db dyn Db) -> SearchPathIterator<'db> {
|
||||
SearchPathIterator {
|
||||
db,
|
||||
static_paths: self.static_search_paths.iter(),
|
||||
dynamic_paths: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A thin wrapper around `ModuleName` to make it a Salsa ingredient.
|
||||
///
|
||||
/// This is needed because Salsa requires that all query arguments are salsa ingredients.
|
||||
@@ -462,14 +480,13 @@ struct ModuleNameIngredient<'db> {
|
||||
/// Given a module name and a list of search paths in which to lookup modules,
|
||||
/// attempt to resolve the module name
|
||||
fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(SearchPath, File, ModuleKind)> {
|
||||
let resolver_settings = module_resolution_settings(db);
|
||||
let target_version = resolver_settings.target_version();
|
||||
let program = Program::get(db);
|
||||
let target_version = program.target_version(db);
|
||||
let resolver_state = ResolverState::new(db, target_version);
|
||||
let (_, minor_version) = target_version.as_tuple();
|
||||
let is_builtin_module =
|
||||
ruff_python_stdlib::sys::is_builtin_module(minor_version, name.as_str());
|
||||
ruff_python_stdlib::sys::is_builtin_module(target_version.minor, name.as_str());
|
||||
|
||||
for search_path in resolver_settings.search_paths(db) {
|
||||
for search_path in search_paths(db) {
|
||||
// When a builtin module is imported, standard module resolution is bypassed:
|
||||
// the module name always resolves to the stdlib module,
|
||||
// even if there's a module of the same name in the first-party root
|
||||
@@ -623,6 +640,8 @@ mod tests {
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::module::ModuleKind;
|
||||
use crate::module_resolver::testing::{FileSpec, MockedTypeshed, TestCase, TestCaseBuilder};
|
||||
use crate::ProgramSettings;
|
||||
use crate::PythonVersion;
|
||||
|
||||
use super::*;
|
||||
|
||||
@@ -677,7 +696,7 @@ mod tests {
|
||||
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
|
||||
.with_src_files(SRC)
|
||||
.with_custom_typeshed(TYPESHED)
|
||||
.with_target_version(TargetVersion::Py38)
|
||||
.with_target_version(PythonVersion::PY38)
|
||||
.build();
|
||||
|
||||
let builtins_module_name = ModuleName::new_static("builtins").unwrap();
|
||||
@@ -695,7 +714,7 @@ mod tests {
|
||||
|
||||
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
|
||||
.with_custom_typeshed(TYPESHED)
|
||||
.with_target_version(TargetVersion::Py38)
|
||||
.with_target_version(PythonVersion::PY38)
|
||||
.build();
|
||||
|
||||
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
||||
@@ -748,7 +767,7 @@ mod tests {
|
||||
|
||||
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
|
||||
.with_custom_typeshed(TYPESHED)
|
||||
.with_target_version(TargetVersion::Py38)
|
||||
.with_target_version(PythonVersion::PY38)
|
||||
.build();
|
||||
|
||||
let existing_modules = create_module_names(&["asyncio", "functools", "xml.etree"]);
|
||||
@@ -793,7 +812,7 @@ mod tests {
|
||||
|
||||
let TestCase { db, .. } = TestCaseBuilder::new()
|
||||
.with_custom_typeshed(TYPESHED)
|
||||
.with_target_version(TargetVersion::Py38)
|
||||
.with_target_version(PythonVersion::PY38)
|
||||
.build();
|
||||
|
||||
let nonexisting_modules = create_module_names(&[
|
||||
@@ -837,7 +856,7 @@ mod tests {
|
||||
|
||||
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
|
||||
.with_custom_typeshed(TYPESHED)
|
||||
.with_target_version(TargetVersion::Py39)
|
||||
.with_target_version(PythonVersion::PY39)
|
||||
.build();
|
||||
|
||||
let existing_modules = create_module_names(&[
|
||||
@@ -879,7 +898,7 @@ mod tests {
|
||||
|
||||
let TestCase { db, .. } = TestCaseBuilder::new()
|
||||
.with_custom_typeshed(TYPESHED)
|
||||
.with_target_version(TargetVersion::Py39)
|
||||
.with_target_version(PythonVersion::PY39)
|
||||
.build();
|
||||
|
||||
let nonexisting_modules = create_module_names(&["importlib", "xml", "xml.etree"]);
|
||||
@@ -903,7 +922,7 @@ mod tests {
|
||||
let TestCase { db, src, .. } = TestCaseBuilder::new()
|
||||
.with_src_files(SRC)
|
||||
.with_custom_typeshed(TYPESHED)
|
||||
.with_target_version(TargetVersion::Py38)
|
||||
.with_target_version(PythonVersion::PY38)
|
||||
.build();
|
||||
|
||||
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
||||
@@ -927,7 +946,7 @@ mod tests {
|
||||
fn stdlib_uses_vendored_typeshed_when_no_custom_typeshed_supplied() {
|
||||
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
|
||||
.with_vendored_typeshed()
|
||||
.with_target_version(TargetVersion::default())
|
||||
.with_target_version(PythonVersion::default())
|
||||
.build();
|
||||
|
||||
let pydoc_data_topics_name = ModuleName::new_static("pydoc_data.topics").unwrap();
|
||||
@@ -1143,7 +1162,7 @@ mod tests {
|
||||
fn symlink() -> anyhow::Result<()> {
|
||||
use anyhow::Context;
|
||||
|
||||
use ruff_db::program::Program;
|
||||
use crate::program::Program;
|
||||
use ruff_db::system::{OsSystem, SystemPath};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
@@ -1173,14 +1192,19 @@ mod tests {
|
||||
std::fs::write(foo.as_std_path(), "")?;
|
||||
std::os::unix::fs::symlink(foo.as_std_path(), bar.as_std_path())?;
|
||||
|
||||
let search_paths = SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(custom_typeshed.clone()),
|
||||
site_packages: vec![site_packages],
|
||||
};
|
||||
|
||||
Program::new(&db, TargetVersion::Py38, search_paths);
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::PY38,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(custom_typeshed.clone()),
|
||||
site_packages: vec![site_packages],
|
||||
},
|
||||
},
|
||||
)
|
||||
.context("Invalid program settings")?;
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
let bar_module = resolve_module(&db, ModuleName::new_static("bar").unwrap()).unwrap();
|
||||
@@ -1214,7 +1238,7 @@ mod tests {
|
||||
fn deleting_an_unrelated_file_doesnt_change_module_resolution() {
|
||||
let TestCase { mut db, src, .. } = TestCaseBuilder::new()
|
||||
.with_src_files(&[("foo.py", "x = 1"), ("bar.py", "x = 2")])
|
||||
.with_target_version(TargetVersion::Py38)
|
||||
.with_target_version(PythonVersion::PY38)
|
||||
.build();
|
||||
|
||||
let foo_module_name = ModuleName::new_static("foo").unwrap();
|
||||
@@ -1302,7 +1326,7 @@ mod tests {
|
||||
..
|
||||
} = TestCaseBuilder::new()
|
||||
.with_custom_typeshed(TYPESHED)
|
||||
.with_target_version(TargetVersion::Py38)
|
||||
.with_target_version(PythonVersion::PY38)
|
||||
.build();
|
||||
|
||||
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
||||
@@ -1350,7 +1374,7 @@ mod tests {
|
||||
..
|
||||
} = TestCaseBuilder::new()
|
||||
.with_custom_typeshed(TYPESHED)
|
||||
.with_target_version(TargetVersion::Py38)
|
||||
.with_target_version(PythonVersion::PY38)
|
||||
.build();
|
||||
|
||||
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
||||
@@ -1390,7 +1414,7 @@ mod tests {
|
||||
} = TestCaseBuilder::new()
|
||||
.with_src_files(SRC)
|
||||
.with_custom_typeshed(TYPESHED)
|
||||
.with_target_version(TargetVersion::Py38)
|
||||
.with_target_version(PythonVersion::PY38)
|
||||
.build();
|
||||
|
||||
let functools_module_name = ModuleName::new_static("functools").unwrap();
|
||||
@@ -1644,8 +1668,7 @@ not_a_directory
|
||||
.with_site_packages_files(&[("_foo.pth", "/src")])
|
||||
.build();
|
||||
|
||||
let search_paths: Vec<&SearchPath> =
|
||||
module_resolution_settings(&db).search_paths(&db).collect();
|
||||
let search_paths: Vec<&SearchPath> = search_paths(&db).collect();
|
||||
|
||||
assert!(search_paths.contains(
|
||||
&&SearchPath::first_party(db.system(), SystemPathBuf::from("/src")).unwrap()
|
||||
@@ -1674,16 +1697,19 @@ not_a_directory
|
||||
])
|
||||
.unwrap();
|
||||
|
||||
Program::new(
|
||||
Program::from_settings(
|
||||
&db,
|
||||
TargetVersion::default(),
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![venv_site_packages, system_site_packages],
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![venv_site_packages, system_site_packages],
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.expect("Valid program settings");
|
||||
|
||||
// The editable installs discovered from the `.pth` file in the first `site-packages` directory
|
||||
// take precedence over the second `site-packages` directory...
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
use ruff_db::program::TargetVersion;
|
||||
use ruff_db::vendored::VendoredFileSystem;
|
||||
|
||||
use super::typeshed::LazyTypeshedVersions;
|
||||
use crate::db::Db;
|
||||
use crate::python_version::PythonVersion;
|
||||
|
||||
pub(crate) struct ResolverState<'db> {
|
||||
pub(crate) db: &'db dyn Db,
|
||||
pub(crate) typeshed_versions: LazyTypeshedVersions<'db>,
|
||||
pub(crate) target_version: TargetVersion,
|
||||
pub(crate) target_version: PythonVersion,
|
||||
}
|
||||
|
||||
impl<'db> ResolverState<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db, target_version: TargetVersion) -> Self {
|
||||
pub(crate) fn new(db: &'db dyn Db, target_version: PythonVersion) -> Self {
|
||||
Self {
|
||||
db,
|
||||
typeshed_versions: LazyTypeshedVersions::new(),
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::VendoredPathBuf;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::ProgramSettings;
|
||||
|
||||
/// A test case for the module resolver.
|
||||
///
|
||||
@@ -16,7 +18,7 @@ pub(crate) struct TestCase<T> {
|
||||
// so this is a single directory instead of a `Vec` of directories,
|
||||
// like it is in `ruff_db::Program`.
|
||||
pub(crate) site_packages: SystemPathBuf,
|
||||
pub(crate) target_version: TargetVersion,
|
||||
pub(crate) target_version: PythonVersion,
|
||||
}
|
||||
|
||||
/// A `(file_name, file_contents)` tuple
|
||||
@@ -98,7 +100,7 @@ pub(crate) struct UnspecifiedTypeshed;
|
||||
/// to `()`.
|
||||
pub(crate) struct TestCaseBuilder<T> {
|
||||
typeshed_option: T,
|
||||
target_version: TargetVersion,
|
||||
target_version: PythonVersion,
|
||||
first_party_files: Vec<FileSpec>,
|
||||
site_packages_files: Vec<FileSpec>,
|
||||
}
|
||||
@@ -117,7 +119,7 @@ impl<T> TestCaseBuilder<T> {
|
||||
}
|
||||
|
||||
/// Specify the target Python version the module resolver should assume
|
||||
pub(crate) fn with_target_version(mut self, target_version: TargetVersion) -> Self {
|
||||
pub(crate) fn with_target_version(mut self, target_version: PythonVersion) -> Self {
|
||||
self.target_version = target_version;
|
||||
self
|
||||
}
|
||||
@@ -144,7 +146,7 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
pub(crate) fn new() -> TestCaseBuilder<UnspecifiedTypeshed> {
|
||||
Self {
|
||||
typeshed_option: UnspecifiedTypeshed,
|
||||
target_version: TargetVersion::default(),
|
||||
target_version: PythonVersion::default(),
|
||||
first_party_files: vec![],
|
||||
site_packages_files: vec![],
|
||||
}
|
||||
@@ -219,16 +221,19 @@ impl TestCaseBuilder<MockedTypeshed> {
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
let typeshed = Self::build_typeshed_mock(&mut db, &typeshed_option);
|
||||
|
||||
Program::new(
|
||||
Program::from_settings(
|
||||
&db,
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
site_packages: vec![site_packages.clone()],
|
||||
ProgramSettings {
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
site_packages: vec![site_packages.clone()],
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.expect("Valid program settings");
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
@@ -272,16 +277,19 @@ impl TestCaseBuilder<VendoredTypeshed> {
|
||||
Self::write_mock_directory(&mut db, "/site-packages", site_packages_files);
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
|
||||
Program::new(
|
||||
Program::from_settings(
|
||||
&db,
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![site_packages.clone()],
|
||||
ProgramSettings {
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![site_packages.clone()],
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
|
||||
@@ -6,16 +6,15 @@ use std::ops::{RangeFrom, RangeInclusive};
|
||||
use std::str::FromStr;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use ruff_db::program::TargetVersion;
|
||||
use ruff_db::system::SystemPath;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
|
||||
use super::vendored::vendored_typeshed_stubs;
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
|
||||
use super::vendored::vendored_typeshed_stubs;
|
||||
use crate::python_version::PythonVersion;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct LazyTypeshedVersions<'db>(OnceCell<&'db TypeshedVersions>);
|
||||
@@ -44,7 +43,7 @@ impl<'db> LazyTypeshedVersions<'db> {
|
||||
db: &'db dyn Db,
|
||||
module: &ModuleName,
|
||||
stdlib_root: Option<&SystemPath>,
|
||||
target_version: TargetVersion,
|
||||
target_version: PythonVersion,
|
||||
) -> TypeshedVersionsQueryResult {
|
||||
let versions = self.0.get_or_init(|| {
|
||||
let versions_path = if let Some(system_path) = stdlib_root {
|
||||
@@ -64,7 +63,7 @@ impl<'db> LazyTypeshedVersions<'db> {
|
||||
// Unwrapping here is not correct...
|
||||
parse_typeshed_versions(db, versions_file).as_ref().unwrap()
|
||||
});
|
||||
versions.query_module(module, PyVersion::from(target_version))
|
||||
versions.query_module(module, target_version)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -178,7 +177,7 @@ impl TypeshedVersions {
|
||||
fn query_module(
|
||||
&self,
|
||||
module: &ModuleName,
|
||||
target_version: PyVersion,
|
||||
target_version: PythonVersion,
|
||||
) -> TypeshedVersionsQueryResult {
|
||||
if let Some(range) = self.exact(module) {
|
||||
if range.contains(target_version) {
|
||||
@@ -323,13 +322,13 @@ impl fmt::Display for TypeshedVersions {
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
enum PyVersionRange {
|
||||
AvailableFrom(RangeFrom<PyVersion>),
|
||||
AvailableWithin(RangeInclusive<PyVersion>),
|
||||
AvailableFrom(RangeFrom<PythonVersion>),
|
||||
AvailableWithin(RangeInclusive<PythonVersion>),
|
||||
}
|
||||
|
||||
impl PyVersionRange {
|
||||
#[must_use]
|
||||
fn contains(&self, version: PyVersion) -> bool {
|
||||
fn contains(&self, version: PythonVersion) -> bool {
|
||||
match self {
|
||||
Self::AvailableFrom(inner) => inner.contains(&version),
|
||||
Self::AvailableWithin(inner) => inner.contains(&version),
|
||||
@@ -343,9 +342,14 @@ impl FromStr for PyVersionRange {
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut parts = s.split('-').map(str::trim);
|
||||
match (parts.next(), parts.next(), parts.next()) {
|
||||
(Some(lower), Some(""), None) => Ok(Self::AvailableFrom((lower.parse()?)..)),
|
||||
(Some(lower), Some(""), None) => {
|
||||
let lower = PythonVersion::from_versions_file_string(lower)?;
|
||||
Ok(Self::AvailableFrom(lower..))
|
||||
}
|
||||
(Some(lower), Some(upper), None) => {
|
||||
Ok(Self::AvailableWithin((lower.parse()?)..=(upper.parse()?)))
|
||||
let lower = PythonVersion::from_versions_file_string(lower)?;
|
||||
let upper = PythonVersion::from_versions_file_string(upper)?;
|
||||
Ok(Self::AvailableWithin(lower..=upper))
|
||||
}
|
||||
_ => Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfHyphens),
|
||||
}
|
||||
@@ -363,74 +367,20 @@ impl fmt::Display for PyVersionRange {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
struct PyVersion {
|
||||
major: u8,
|
||||
minor: u8,
|
||||
}
|
||||
|
||||
impl FromStr for PyVersion {
|
||||
type Err = TypeshedVersionsParseErrorKind;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
impl PythonVersion {
|
||||
fn from_versions_file_string(s: &str) -> Result<Self, TypeshedVersionsParseErrorKind> {
|
||||
let mut parts = s.split('.').map(str::trim);
|
||||
let (Some(major), Some(minor), None) = (parts.next(), parts.next(), parts.next()) else {
|
||||
return Err(TypeshedVersionsParseErrorKind::UnexpectedNumberOfPeriods(
|
||||
s.to_string(),
|
||||
));
|
||||
};
|
||||
let major = match u8::from_str(major) {
|
||||
Ok(major) => major,
|
||||
Err(err) => {
|
||||
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err,
|
||||
})
|
||||
PythonVersion::try_from((major, minor)).map_err(|int_parse_error| {
|
||||
TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err: int_parse_error,
|
||||
}
|
||||
};
|
||||
let minor = match u8::from_str(minor) {
|
||||
Ok(minor) => minor,
|
||||
Err(err) => {
|
||||
return Err(TypeshedVersionsParseErrorKind::IntegerParsingFailure {
|
||||
version: s.to_string(),
|
||||
err,
|
||||
})
|
||||
}
|
||||
};
|
||||
Ok(Self { major, minor })
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PyVersion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let PyVersion { major, minor } = self;
|
||||
write!(f, "{major}.{minor}")
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for PyVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => PyVersion { major: 3, minor: 7 },
|
||||
TargetVersion::Py38 => PyVersion { major: 3, minor: 8 },
|
||||
TargetVersion::Py39 => PyVersion { major: 3, minor: 9 },
|
||||
TargetVersion::Py310 => PyVersion {
|
||||
major: 3,
|
||||
minor: 10,
|
||||
},
|
||||
TargetVersion::Py311 => PyVersion {
|
||||
major: 3,
|
||||
minor: 11,
|
||||
},
|
||||
TargetVersion::Py312 => PyVersion {
|
||||
major: 3,
|
||||
minor: 12,
|
||||
},
|
||||
TargetVersion::Py313 => PyVersion {
|
||||
major: 3,
|
||||
minor: 13,
|
||||
},
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -440,7 +390,6 @@ mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use insta::assert_snapshot;
|
||||
use ruff_db::program::TargetVersion;
|
||||
|
||||
use super::*;
|
||||
|
||||
@@ -478,27 +427,27 @@ mod tests {
|
||||
|
||||
assert!(versions.contains_exact(&asyncio));
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio, TargetVersion::Py310.into()),
|
||||
versions.query_module(&asyncio, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
|
||||
assert!(versions.contains_exact(&asyncio_staggered));
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio_staggered, TargetVersion::Py38.into()),
|
||||
versions.query_module(&asyncio_staggered, PythonVersion::PY38),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
versions.query_module(&asyncio_staggered, TargetVersion::Py37.into()),
|
||||
versions.query_module(&asyncio_staggered, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
|
||||
assert!(versions.contains_exact(&audioop));
|
||||
assert_eq!(
|
||||
versions.query_module(&audioop, TargetVersion::Py312.into()),
|
||||
versions.query_module(&audioop, PythonVersion::PY312),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
versions.query_module(&audioop, TargetVersion::Py313.into()),
|
||||
versions.query_module(&audioop, PythonVersion::PY313),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
@@ -590,15 +539,15 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(parsed_versions.contains_exact(&bar));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, TargetVersion::Py37.into()),
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, TargetVersion::Py310.into()),
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar, TargetVersion::Py311.into()),
|
||||
parsed_versions.query_module(&bar, PythonVersion::PY311),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
@@ -610,15 +559,15 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(parsed_versions.contains_exact(&foo));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, TargetVersion::Py37.into()),
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, TargetVersion::Py38.into()),
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY38),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&foo, TargetVersion::Py311.into()),
|
||||
parsed_versions.query_module(&foo, PythonVersion::PY311),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
}
|
||||
@@ -630,15 +579,15 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(parsed_versions.contains_exact(&bar_baz));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, TargetVersion::Py37.into()),
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, TargetVersion::Py39.into()),
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY39),
|
||||
TypeshedVersionsQueryResult::Exists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_baz, TargetVersion::Py310.into()),
|
||||
parsed_versions.query_module(&bar_baz, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
@@ -650,15 +599,15 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(!parsed_versions.contains_exact(&bar_eggs));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, TargetVersion::Py37.into()),
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, TargetVersion::Py310.into()),
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY310),
|
||||
TypeshedVersionsQueryResult::MaybeExists
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&bar_eggs, TargetVersion::Py311.into()),
|
||||
parsed_versions.query_module(&bar_eggs, PythonVersion::PY311),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
@@ -670,11 +619,11 @@ foo: 3.8- # trailing comment
|
||||
|
||||
assert!(!parsed_versions.contains_exact(&spam));
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&spam, TargetVersion::Py37.into()),
|
||||
parsed_versions.query_module(&spam, PythonVersion::PY37),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
assert_eq!(
|
||||
parsed_versions.query_module(&spam, TargetVersion::Py313.into()),
|
||||
parsed_versions.query_module(&spam, PythonVersion::PY313),
|
||||
TypeshedVersionsQueryResult::DoesNotExist
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use ruff_python_ast::{AnyNodeRef, NodeKind};
|
||||
use ruff_python_ast::{AnyNodeRef, AstNode, NodeKind};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
/// Compact key for a node for use in a hash map.
|
||||
@@ -11,7 +11,19 @@ pub(super) struct NodeKey {
|
||||
}
|
||||
|
||||
impl NodeKey {
|
||||
pub(super) fn from_node<'a, N>(node: N) -> Self
|
||||
#[inline]
|
||||
pub(super) fn from_node<'a, N>(node: &N) -> Self
|
||||
where
|
||||
N: AstNode,
|
||||
{
|
||||
NodeKey {
|
||||
kind: node.kind(),
|
||||
range: node.range(),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(super) fn from_ref<'a, N>(node: N) -> Self
|
||||
where
|
||||
N: Into<AnyNodeRef<'a>>,
|
||||
{
|
||||
|
||||
78
crates/red_knot_python_semantic/src/program.rs
Normal file
78
crates/red_knot_python_semantic/src/program.rs
Normal file
@@ -0,0 +1,78 @@
|
||||
use crate::python_version::PythonVersion;
|
||||
use anyhow::Context;
|
||||
use salsa::Durability;
|
||||
use salsa::Setter;
|
||||
|
||||
use ruff_db::system::SystemPathBuf;
|
||||
|
||||
use crate::module_resolver::SearchPaths;
|
||||
use crate::Db;
|
||||
|
||||
#[salsa::input(singleton)]
|
||||
pub struct Program {
|
||||
pub target_version: PythonVersion,
|
||||
|
||||
#[default]
|
||||
#[return_ref]
|
||||
pub(crate) search_paths: SearchPaths,
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn from_settings(db: &dyn Db, settings: ProgramSettings) -> anyhow::Result<Self> {
|
||||
let ProgramSettings {
|
||||
target_version,
|
||||
search_paths,
|
||||
} = settings;
|
||||
|
||||
tracing::info!("Target version: Python {target_version}");
|
||||
|
||||
let search_paths = SearchPaths::from_settings(db, search_paths)
|
||||
.with_context(|| "Invalid search path settings")?;
|
||||
|
||||
Ok(Program::builder(settings.target_version)
|
||||
.durability(Durability::HIGH)
|
||||
.search_paths(search_paths)
|
||||
.new(db))
|
||||
}
|
||||
|
||||
pub fn update_search_paths(
|
||||
&self,
|
||||
db: &mut dyn Db,
|
||||
search_path_settings: SearchPathSettings,
|
||||
) -> anyhow::Result<()> {
|
||||
let search_paths = SearchPaths::from_settings(db, search_path_settings)?;
|
||||
|
||||
if self.search_paths(db) != &search_paths {
|
||||
tracing::debug!("Update search paths");
|
||||
self.set_search_paths(db).to(search_paths);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub struct ProgramSettings {
|
||||
pub target_version: PythonVersion,
|
||||
pub search_paths: SearchPathSettings,
|
||||
}
|
||||
|
||||
/// Configures the search paths for module resolution.
|
||||
#[derive(Eq, PartialEq, Debug, Clone, Default)]
|
||||
pub struct SearchPathSettings {
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
/// or pyright's stubPath configuration setting.
|
||||
pub extra_paths: Vec<SystemPathBuf>,
|
||||
|
||||
/// The root of the workspace, used for finding first-party modules.
|
||||
pub src_root: SystemPathBuf,
|
||||
|
||||
/// Optional path to a "custom typeshed" directory on disk for us to use for standard-library types.
|
||||
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
|
||||
/// bundled as a zip file in the binary
|
||||
pub custom_typeshed: Option<SystemPathBuf>,
|
||||
|
||||
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
|
||||
pub site_packages: Vec<SystemPathBuf>,
|
||||
}
|
||||
62
crates/red_knot_python_semantic/src/python_version.rs
Normal file
62
crates/red_knot_python_semantic/src/python_version.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
use std::fmt;
|
||||
|
||||
/// Representation of a Python version.
|
||||
///
|
||||
/// Unlike the `TargetVersion` enums in the CLI crates,
|
||||
/// this does not necessarily represent a Python version that we actually support.
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub struct PythonVersion {
|
||||
pub major: u8,
|
||||
pub minor: u8,
|
||||
}
|
||||
|
||||
impl PythonVersion {
|
||||
pub const PY37: PythonVersion = PythonVersion { major: 3, minor: 7 };
|
||||
pub const PY38: PythonVersion = PythonVersion { major: 3, minor: 8 };
|
||||
pub const PY39: PythonVersion = PythonVersion { major: 3, minor: 9 };
|
||||
pub const PY310: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 10,
|
||||
};
|
||||
pub const PY311: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 11,
|
||||
};
|
||||
pub const PY312: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 12,
|
||||
};
|
||||
pub const PY313: PythonVersion = PythonVersion {
|
||||
major: 3,
|
||||
minor: 13,
|
||||
};
|
||||
|
||||
pub fn free_threaded_build_available(self) -> bool {
|
||||
self >= PythonVersion::PY313
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for PythonVersion {
|
||||
fn default() -> Self {
|
||||
Self::PY38
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<(&str, &str)> for PythonVersion {
|
||||
type Error = std::num::ParseIntError;
|
||||
|
||||
fn try_from(value: (&str, &str)) -> Result<Self, Self::Error> {
|
||||
let (major, minor) = value;
|
||||
Ok(Self {
|
||||
major: major.parse()?,
|
||||
minor: minor.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PythonVersion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let PythonVersion { major, minor } = self;
|
||||
write!(f, "{major}.{minor}")
|
||||
}
|
||||
}
|
||||
@@ -16,10 +16,9 @@ use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{
|
||||
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId, SymbolTable,
|
||||
};
|
||||
use crate::semantic_index::use_def::UseDefMap;
|
||||
use crate::Db;
|
||||
|
||||
pub(crate) use self::use_def::UseDefMap;
|
||||
|
||||
pub mod ast_ids;
|
||||
mod builder;
|
||||
pub mod definition;
|
||||
@@ -27,6 +26,8 @@ pub mod expression;
|
||||
pub mod symbol;
|
||||
mod use_def;
|
||||
|
||||
pub(crate) use self::use_def::{DefinitionWithConstraints, DefinitionWithConstraintsIterator};
|
||||
|
||||
type SymbolMap = hashbrown::HashMap<ScopedSymbolId, (), ()>;
|
||||
|
||||
/// Returns the semantic index for `file`.
|
||||
@@ -89,8 +90,6 @@ pub(crate) struct SemanticIndex<'db> {
|
||||
scopes: IndexVec<FileScopeId, Scope>,
|
||||
|
||||
/// Map expressions to their corresponding scope.
|
||||
/// We can't use [`ExpressionId`] here, because the challenge is how to get from
|
||||
/// an [`ast::Expr`] to an [`ExpressionId`] (which requires knowing the scope).
|
||||
scopes_by_expression: FxHashMap<ExpressionNodeKey, FileScopeId>,
|
||||
|
||||
/// Map from a node creating a definition to its definition.
|
||||
@@ -118,7 +117,7 @@ pub(crate) struct SemanticIndex<'db> {
|
||||
impl<'db> SemanticIndex<'db> {
|
||||
/// Returns the symbol table for a specific scope.
|
||||
///
|
||||
/// Use the Salsa cached [`symbol_table`] query if you only need the
|
||||
/// Use the Salsa cached [`symbol_table()`] query if you only need the
|
||||
/// symbol table for a single scope.
|
||||
pub(super) fn symbol_table(&self, scope_id: FileScopeId) -> Arc<SymbolTable> {
|
||||
self.symbol_tables[scope_id].clone()
|
||||
@@ -126,7 +125,7 @@ impl<'db> SemanticIndex<'db> {
|
||||
|
||||
/// Returns the use-def map for a specific scope.
|
||||
///
|
||||
/// Use the Salsa cached [`use_def_map`] query if you only need the
|
||||
/// Use the Salsa cached [`use_def_map()`] query if you only need the
|
||||
/// use-def map for a single scope.
|
||||
pub(super) fn use_def_map(&self, scope_id: FileScopeId) -> Arc<UseDefMap> {
|
||||
self.use_def_maps[scope_id].clone()
|
||||
@@ -155,6 +154,10 @@ impl<'db> SemanticIndex<'db> {
|
||||
&self.scopes[id]
|
||||
}
|
||||
|
||||
pub(crate) fn scope_ids(&self) -> impl Iterator<Item = ScopeId> {
|
||||
self.scope_ids_by_scope.iter().copied()
|
||||
}
|
||||
|
||||
/// Returns the id of the parent scope.
|
||||
pub(crate) fn parent_scope_id(&self, scope_id: FileScopeId) -> Option<FileScopeId> {
|
||||
let scope = self.scope(scope_id);
|
||||
@@ -309,14 +312,32 @@ mod tests {
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::DbWithTestSystem;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::semantic_index::ast_ids::HasScopedUseId;
|
||||
use crate::semantic_index::definition::DefinitionKind;
|
||||
use crate::semantic_index::symbol::{FileScopeId, Scope, ScopeKind, SymbolTable};
|
||||
use crate::semantic_index::ast_ids::{HasScopedUseId, ScopedUseId};
|
||||
use crate::semantic_index::definition::{Definition, DefinitionKind};
|
||||
use crate::semantic_index::symbol::{
|
||||
FileScopeId, Scope, ScopeKind, ScopedSymbolId, SymbolTable,
|
||||
};
|
||||
use crate::semantic_index::use_def::UseDefMap;
|
||||
use crate::semantic_index::{global_scope, semantic_index, symbol_table, use_def_map};
|
||||
use crate::Db;
|
||||
|
||||
impl UseDefMap<'_> {
|
||||
fn first_public_definition(&self, symbol: ScopedSymbolId) -> Option<Definition<'_>> {
|
||||
self.public_definitions(symbol)
|
||||
.next()
|
||||
.map(|constrained_definition| constrained_definition.definition)
|
||||
}
|
||||
|
||||
fn first_use_definition(&self, use_id: ScopedUseId) -> Option<Definition<'_>> {
|
||||
self.use_definitions(use_id)
|
||||
.next()
|
||||
.map(|constrained_definition| constrained_definition.definition)
|
||||
}
|
||||
}
|
||||
|
||||
struct TestCase {
|
||||
db: TestDb,
|
||||
file: File,
|
||||
@@ -375,9 +396,7 @@ mod tests {
|
||||
let foo = global_table.symbol_id_by_name("foo").unwrap();
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.public_definitions(foo) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def.first_public_definition(foo).unwrap();
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Import(_)));
|
||||
}
|
||||
|
||||
@@ -412,13 +431,13 @@ mod tests {
|
||||
);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.public_definitions(
|
||||
global_table
|
||||
.symbol_id_by_name("foo")
|
||||
.expect("symbol to exist"),
|
||||
) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
global_table
|
||||
.symbol_id_by_name("foo")
|
||||
.expect("symbol to exist"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::ImportFrom(_)
|
||||
@@ -439,17 +458,34 @@ mod tests {
|
||||
"a symbol used but not defined in a scope should have only the used flag"
|
||||
);
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] =
|
||||
use_def.public_definitions(global_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def
|
||||
.first_public_definition(global_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn augmented_assignment() {
|
||||
let TestCase { db, file } = test_case("x += 1");
|
||||
let scope = global_scope(&db, file);
|
||||
let global_table = symbol_table(&db, scope);
|
||||
|
||||
assert_eq!(names(&global_table), vec!["x"]);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let definition = use_def
|
||||
.first_public_definition(global_table.symbol_id_by_name("x").unwrap())
|
||||
.unwrap();
|
||||
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::AugmentedAssignment(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn class_scope() {
|
||||
let TestCase { db, file } = test_case(
|
||||
@@ -478,11 +514,9 @@ y = 2
|
||||
assert_eq!(names(&class_table), vec!["x"]);
|
||||
|
||||
let use_def = index.use_def_map(class_scope_id);
|
||||
let [definition] =
|
||||
use_def.public_definitions(class_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def
|
||||
.first_public_definition(class_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
@@ -516,19 +550,246 @@ y = 2
|
||||
assert_eq!(names(&function_table), vec!["x"]);
|
||||
|
||||
let use_def = index.use_def_map(function_scope_id);
|
||||
let [definition] = use_def.public_definitions(
|
||||
function_table
|
||||
.symbol_id_by_name("x")
|
||||
.expect("symbol exists"),
|
||||
) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
function_table
|
||||
.symbol_id_by_name("x")
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_parameter_symbols() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
|
||||
pass
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert_eq!(names(&global_table), vec!["f", "str", "int"]);
|
||||
|
||||
let [(function_scope_id, _function_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("Expected a function scope")
|
||||
};
|
||||
|
||||
let function_table = index.symbol_table(function_scope_id);
|
||||
assert_eq!(
|
||||
names(&function_table),
|
||||
vec!["a", "b", "c", "args", "d", "kwargs"],
|
||||
);
|
||||
|
||||
let use_def = index.use_def_map(function_scope_id);
|
||||
for name in ["a", "b", "c", "d"] {
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
function_table
|
||||
.symbol_id_by_name(name)
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::ParameterWithDefault(_)
|
||||
));
|
||||
}
|
||||
for name in ["args", "kwargs"] {
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
function_table
|
||||
.symbol_id_by_name(name)
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Parameter(_)));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lambda_parameter_symbols() {
|
||||
let TestCase { db, file } = test_case("lambda a, b, c=1, *args, d=2, **kwargs: None");
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert!(names(&global_table).is_empty());
|
||||
|
||||
let [(lambda_scope_id, _lambda_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("Expected a lambda scope")
|
||||
};
|
||||
|
||||
let lambda_table = index.symbol_table(lambda_scope_id);
|
||||
assert_eq!(
|
||||
names(&lambda_table),
|
||||
vec!["a", "b", "c", "args", "d", "kwargs"],
|
||||
);
|
||||
|
||||
let use_def = index.use_def_map(lambda_scope_id);
|
||||
for name in ["a", "b", "c", "d"] {
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
lambda_table.symbol_id_by_name(name).expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::ParameterWithDefault(_)
|
||||
));
|
||||
}
|
||||
for name in ["args", "kwargs"] {
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
lambda_table.symbol_id_by_name(name).expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Parameter(_)));
|
||||
}
|
||||
}
|
||||
|
||||
/// Test case to validate that the comprehension scope is correctly identified and that the target
|
||||
/// variable is defined only in the comprehension scope and not in the global scope.
|
||||
#[test]
|
||||
fn comprehension_scope() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[x for x in iter1]
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&global_table), vec!["iter1"]);
|
||||
|
||||
let [(comprehension_scope_id, comprehension_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
|
||||
assert_eq!(comprehension_scope.kind(), ScopeKind::Comprehension);
|
||||
assert_eq!(
|
||||
comprehension_scope_id.to_scope_id(&db, file).name(&db),
|
||||
"<listcomp>"
|
||||
);
|
||||
|
||||
let comprehension_symbol_table = index.symbol_table(comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&comprehension_symbol_table), vec!["x"]);
|
||||
}
|
||||
|
||||
/// Test case to validate that the `x` variable used in the comprehension is referencing the
|
||||
/// `x` variable defined by the inner generator (`for x in iter2`) and not the outer one.
|
||||
#[test]
|
||||
fn multiple_generators() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[x for x in iter1 for x in iter2]
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let [(comprehension_scope_id, _)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
|
||||
let use_def = index.use_def_map(comprehension_scope_id);
|
||||
|
||||
let module = parsed_module(&db, file).syntax();
|
||||
let element = module.body[0]
|
||||
.as_expr_stmt()
|
||||
.unwrap()
|
||||
.value
|
||||
.as_list_comp_expr()
|
||||
.unwrap()
|
||||
.elt
|
||||
.as_name_expr()
|
||||
.unwrap();
|
||||
let element_use_id =
|
||||
element.scoped_use_id(&db, comprehension_scope_id.to_scope_id(&db, file));
|
||||
|
||||
let definition = use_def.first_use_definition(element_use_id).unwrap();
|
||||
let DefinitionKind::Comprehension(comprehension) = definition.node(&db) else {
|
||||
panic!("expected generator definition")
|
||||
};
|
||||
let ast::Comprehension { target, .. } = comprehension.node();
|
||||
let name = target.as_name_expr().unwrap().id().as_str();
|
||||
|
||||
assert_eq!(name, "x");
|
||||
assert_eq!(target.range(), TextRange::new(23.into(), 24.into()));
|
||||
}
|
||||
|
||||
/// Test case to validate that the nested comprehension creates a new scope which is a child of
|
||||
/// the outer comprehension scope and the variables are correctly defined in the respective
|
||||
/// scopes.
|
||||
#[test]
|
||||
fn nested_generators() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[{x for x in iter2} for y in iter1]
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&global_table), vec!["iter1"]);
|
||||
|
||||
let [(comprehension_scope_id, comprehension_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
|
||||
assert_eq!(comprehension_scope.kind(), ScopeKind::Comprehension);
|
||||
assert_eq!(
|
||||
comprehension_scope_id.to_scope_id(&db, file).name(&db),
|
||||
"<listcomp>"
|
||||
);
|
||||
|
||||
let comprehension_symbol_table = index.symbol_table(comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&comprehension_symbol_table), vec!["y", "iter2"]);
|
||||
|
||||
let [(inner_comprehension_scope_id, inner_comprehension_scope)] = index
|
||||
.child_scopes(comprehension_scope_id)
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one inner generator scope")
|
||||
};
|
||||
|
||||
assert_eq!(inner_comprehension_scope.kind(), ScopeKind::Comprehension);
|
||||
assert_eq!(
|
||||
inner_comprehension_scope_id
|
||||
.to_scope_id(&db, file)
|
||||
.name(&db),
|
||||
"<setcomp>"
|
||||
);
|
||||
|
||||
let inner_comprehension_symbol_table = index.symbol_table(inner_comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&inner_comprehension_symbol_table), vec!["x"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dupes() {
|
||||
let TestCase { db, file } = test_case(
|
||||
@@ -562,13 +823,13 @@ def func():
|
||||
assert_eq!(names(&func2_table), vec!["y"]);
|
||||
|
||||
let use_def = index.use_def_map(FileScopeId::global());
|
||||
let [definition] = use_def.public_definitions(
|
||||
global_table
|
||||
.symbol_id_by_name("func")
|
||||
.expect("symbol exists"),
|
||||
) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def
|
||||
.first_public_definition(
|
||||
global_table
|
||||
.symbol_id_by_name("func")
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Function(_)));
|
||||
}
|
||||
|
||||
@@ -669,9 +930,7 @@ class C[T]:
|
||||
};
|
||||
let x_use_id = x_use_expr_name.scoped_use_id(&db, scope);
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.use_definitions(x_use_id) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def.first_use_definition(x_use_id).unwrap();
|
||||
let DefinitionKind::Assignment(assignment) = definition.node(&db) else {
|
||||
panic!("should be an assignment definition")
|
||||
};
|
||||
@@ -762,4 +1021,28 @@ def x():
|
||||
vec!["bar", "foo", "Test", "<module>"]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn match_stmt_symbols() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
match subject:
|
||||
case a: ...
|
||||
case [b, c, *d]: ...
|
||||
case e as f: ...
|
||||
case {'x': g, **h}: ...
|
||||
case Foo(i, z=j): ...
|
||||
case k | l: ...
|
||||
case _: ...
|
||||
",
|
||||
);
|
||||
|
||||
let global_table = symbol_table(&db, global_scope(&db, file));
|
||||
|
||||
assert!(global_table.symbol_by_name("Foo").unwrap().is_used());
|
||||
assert_eq!(
|
||||
names(&global_table),
|
||||
vec!["subject", "a", "b", "c", "d", "f", "e", "h", "g", "Foo", "i", "j", "k", "l"]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,9 +26,9 @@ use crate::Db;
|
||||
/// ```
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct AstIds {
|
||||
/// Maps expressions to their expression id. Uses `NodeKey` because it avoids cloning [`Parsed`].
|
||||
/// Maps expressions to their expression id.
|
||||
expressions_map: FxHashMap<ExpressionNodeKey, ScopedExpressionId>,
|
||||
/// Maps expressions which "use" a symbol (that is, [`ExprName`]) to a use id.
|
||||
/// Maps expressions which "use" a symbol (that is, [`ast::ExprName`]) to a use id.
|
||||
uses_map: FxHashMap<ExpressionNodeKey, ScopedUseId>,
|
||||
}
|
||||
|
||||
@@ -197,12 +197,14 @@ pub(crate) mod node_key {
|
||||
pub(crate) struct ExpressionNodeKey(NodeKey);
|
||||
|
||||
impl From<ast::ExpressionRef<'_>> for ExpressionNodeKey {
|
||||
#[inline]
|
||||
fn from(value: ast::ExpressionRef<'_>) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
Self(NodeKey::from_ref(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Expr> for ExpressionNodeKey {
|
||||
#[inline]
|
||||
fn from(value: &ast::Expr) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
}
|
||||
|
||||
@@ -7,14 +7,15 @@ use ruff_db::parsed::ParsedModule;
|
||||
use ruff_index::IndexVec;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::name::Name;
|
||||
use ruff_python_ast::visitor::{walk_expr, walk_stmt, Visitor};
|
||||
use ruff_python_ast::visitor::{walk_expr, walk_pattern, walk_stmt, Visitor};
|
||||
use ruff_python_ast::AnyParameterRef;
|
||||
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
|
||||
use crate::semantic_index::ast_ids::AstIdsBuilder;
|
||||
use crate::semantic_index::definition::{
|
||||
AssignmentDefinitionNodeRef, Definition, DefinitionNodeKey, DefinitionNodeRef,
|
||||
ImportFromDefinitionNodeRef,
|
||||
AssignmentDefinitionNodeRef, ComprehensionDefinitionNodeRef, Definition, DefinitionNodeKey,
|
||||
DefinitionNodeRef, ImportFromDefinitionNodeRef,
|
||||
};
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{
|
||||
@@ -155,7 +156,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
self.current_use_def_map_mut().restore(state);
|
||||
}
|
||||
|
||||
fn flow_merge(&mut self, state: &FlowSnapshot) {
|
||||
fn flow_merge(&mut self, state: FlowSnapshot) {
|
||||
self.current_use_def_map_mut().merge(state);
|
||||
}
|
||||
|
||||
@@ -174,7 +175,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
symbol: ScopedSymbolId,
|
||||
definition_node: impl Into<DefinitionNodeRef<'a>>,
|
||||
) -> Definition<'db> {
|
||||
let definition_node = definition_node.into();
|
||||
let definition_node: DefinitionNodeRef<'_> = definition_node.into();
|
||||
let definition = Definition::new(
|
||||
self.db,
|
||||
self.file,
|
||||
@@ -195,9 +196,16 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
definition
|
||||
}
|
||||
|
||||
fn add_constraint(&mut self, constraint_node: &ast::Expr) -> Expression<'db> {
|
||||
let expression = self.add_standalone_expression(constraint_node);
|
||||
self.current_use_def_map_mut().record_constraint(expression);
|
||||
|
||||
expression
|
||||
}
|
||||
|
||||
/// Record an expression that needs to be a Salsa ingredient, because we need to infer its type
|
||||
/// standalone (type narrowing tests, RHS of an assignment.)
|
||||
fn add_standalone_expression(&mut self, expression_node: &ast::Expr) {
|
||||
fn add_standalone_expression(&mut self, expression_node: &ast::Expr) -> Expression<'db> {
|
||||
let expression = Expression::new(
|
||||
self.db,
|
||||
self.file,
|
||||
@@ -210,6 +218,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
);
|
||||
self.expressions_by_node
|
||||
.insert(expression_node.into(), expression);
|
||||
expression
|
||||
}
|
||||
|
||||
fn with_type_params(
|
||||
@@ -258,6 +267,66 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
nested_scope
|
||||
}
|
||||
|
||||
/// Visit a list of [`Comprehension`] nodes, assumed to be the "generators" that compose a
|
||||
/// comprehension (that is, the `for x in y` and `for y in z` parts of `x for x in y for y in z`.)
|
||||
///
|
||||
/// [`Comprehension`]: ast::Comprehension
|
||||
fn visit_generators(&mut self, scope: NodeWithScopeRef, generators: &'db [ast::Comprehension]) {
|
||||
let mut generators_iter = generators.iter();
|
||||
|
||||
let Some(generator) = generators_iter.next() else {
|
||||
unreachable!("Expression must contain at least one generator");
|
||||
};
|
||||
|
||||
// The `iter` of the first generator is evaluated in the outer scope, while all subsequent
|
||||
// nodes are evaluated in the inner scope.
|
||||
self.visit_expr(&generator.iter);
|
||||
self.push_scope(scope);
|
||||
|
||||
self.current_assignment = Some(CurrentAssignment::Comprehension {
|
||||
node: generator,
|
||||
first: true,
|
||||
});
|
||||
self.visit_expr(&generator.target);
|
||||
self.current_assignment = None;
|
||||
|
||||
for expr in &generator.ifs {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
|
||||
for generator in generators_iter {
|
||||
self.visit_expr(&generator.iter);
|
||||
|
||||
self.current_assignment = Some(CurrentAssignment::Comprehension {
|
||||
node: generator,
|
||||
first: false,
|
||||
});
|
||||
self.visit_expr(&generator.target);
|
||||
self.current_assignment = None;
|
||||
|
||||
for expr in &generator.ifs {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn declare_parameter(&mut self, parameter: AnyParameterRef) {
|
||||
let symbol =
|
||||
self.add_or_update_symbol(parameter.name().id().clone(), SymbolFlags::IS_DEFINED);
|
||||
|
||||
let definition = self.add_definition(symbol, parameter);
|
||||
|
||||
if let AnyParameterRef::NonVariadic(with_default) = parameter {
|
||||
// Insert a mapping from the parameter to the same definition.
|
||||
// This ensures that calling `HasTy::ty` on the inner parameter returns
|
||||
// a valid type (and doesn't panic)
|
||||
self.definitions_by_node.insert(
|
||||
DefinitionNodeRef::from(AnyParameterRef::Variadic(&with_default.parameter)).key(),
|
||||
definition,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn build(mut self) -> SemanticIndex<'db> {
|
||||
let module = self.module;
|
||||
self.visit_body(module.suite());
|
||||
@@ -325,6 +394,16 @@ where
|
||||
.add_or_update_symbol(function_def.name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
self.add_definition(symbol, function_def);
|
||||
|
||||
// The default value of the parameters needs to be evaluated in the
|
||||
// enclosing scope.
|
||||
for default in function_def
|
||||
.parameters
|
||||
.iter_non_variadic_params()
|
||||
.filter_map(|param| param.default.as_deref())
|
||||
{
|
||||
self.visit_expr(default);
|
||||
}
|
||||
|
||||
self.with_type_params(
|
||||
NodeWithScopeRef::FunctionTypeParameters(function_def),
|
||||
function_def.type_params.as_deref(),
|
||||
@@ -335,6 +414,12 @@ where
|
||||
}
|
||||
|
||||
builder.push_scope(NodeWithScopeRef::Function(function_def));
|
||||
|
||||
// Add symbols and definitions for the parameters to the function scope.
|
||||
for parameter in &*function_def.parameters {
|
||||
builder.declare_parameter(parameter);
|
||||
}
|
||||
|
||||
builder.visit_body(&function_def.body);
|
||||
builder.pop_scope()
|
||||
},
|
||||
@@ -410,9 +495,24 @@ where
|
||||
self.visit_expr(&node.target);
|
||||
self.current_assignment = None;
|
||||
}
|
||||
ast::Stmt::AugAssign(
|
||||
aug_assign @ ast::StmtAugAssign {
|
||||
range: _,
|
||||
target,
|
||||
op: _,
|
||||
value,
|
||||
},
|
||||
) => {
|
||||
debug_assert!(self.current_assignment.is_none());
|
||||
self.visit_expr(value);
|
||||
self.current_assignment = Some(aug_assign.into());
|
||||
self.visit_expr(target);
|
||||
self.current_assignment = None;
|
||||
}
|
||||
ast::Stmt::If(node) => {
|
||||
self.visit_expr(&node.test);
|
||||
let pre_if = self.flow_snapshot();
|
||||
self.add_constraint(&node.test);
|
||||
self.visit_body(&node.body);
|
||||
let mut post_clauses: Vec<FlowSnapshot> = vec![];
|
||||
for clause in &node.elif_else_clauses {
|
||||
@@ -425,7 +525,7 @@ where
|
||||
self.visit_elif_else_clause(clause);
|
||||
}
|
||||
for post_clause_state in post_clauses {
|
||||
self.flow_merge(&post_clause_state);
|
||||
self.flow_merge(post_clause_state);
|
||||
}
|
||||
let has_else = node
|
||||
.elif_else_clauses
|
||||
@@ -434,7 +534,7 @@ where
|
||||
if !has_else {
|
||||
// if there's no else clause, then it's possible we took none of the branches,
|
||||
// and the pre_if state can reach here
|
||||
self.flow_merge(&pre_if);
|
||||
self.flow_merge(pre_if);
|
||||
}
|
||||
}
|
||||
ast::Stmt::While(node) => {
|
||||
@@ -452,13 +552,13 @@ where
|
||||
|
||||
// We may execute the `else` clause without ever executing the body, so merge in
|
||||
// the pre-loop state before visiting `else`.
|
||||
self.flow_merge(&pre_loop);
|
||||
self.flow_merge(pre_loop);
|
||||
self.visit_body(&node.orelse);
|
||||
|
||||
// Breaking out of a while loop bypasses the `else` clause, so merge in the break
|
||||
// states after visiting `else`.
|
||||
for break_state in break_states {
|
||||
self.flow_merge(&break_state);
|
||||
self.flow_merge(break_state);
|
||||
}
|
||||
}
|
||||
ast::Stmt::Break(_) => {
|
||||
@@ -476,14 +576,22 @@ where
|
||||
self.current_ast_ids().record_expression(expr);
|
||||
|
||||
match expr {
|
||||
ast::Expr::Name(name_node) => {
|
||||
let ast::ExprName { id, ctx, .. } = name_node;
|
||||
let flags = match ctx {
|
||||
ast::Expr::Name(name_node @ ast::ExprName { id, ctx, .. }) => {
|
||||
let mut flags = match ctx {
|
||||
ast::ExprContext::Load => SymbolFlags::IS_USED,
|
||||
ast::ExprContext::Store => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Del => SymbolFlags::IS_DEFINED,
|
||||
ast::ExprContext::Invalid => SymbolFlags::empty(),
|
||||
};
|
||||
if matches!(
|
||||
self.current_assignment,
|
||||
Some(CurrentAssignment::AugAssign(_))
|
||||
) && !ctx.is_invalid()
|
||||
{
|
||||
// For augmented assignment, the target expression is also used, so we should
|
||||
// record that as a use.
|
||||
flags |= SymbolFlags::IS_USED;
|
||||
}
|
||||
let symbol = self.add_or_update_symbol(id.clone(), flags);
|
||||
if flags.contains(SymbolFlags::IS_DEFINED) {
|
||||
match self.current_assignment {
|
||||
@@ -499,9 +607,21 @@ where
|
||||
Some(CurrentAssignment::AnnAssign(ann_assign)) => {
|
||||
self.add_definition(symbol, ann_assign);
|
||||
}
|
||||
Some(CurrentAssignment::AugAssign(aug_assign)) => {
|
||||
self.add_definition(symbol, aug_assign);
|
||||
}
|
||||
Some(CurrentAssignment::Named(named)) => {
|
||||
// TODO(dhruvmanila): If the current scope is a comprehension, then the
|
||||
// named expression is implicitly nonlocal. This is yet to be
|
||||
// implemented.
|
||||
self.add_definition(symbol, named);
|
||||
}
|
||||
Some(CurrentAssignment::Comprehension { node, first }) => {
|
||||
self.add_definition(
|
||||
symbol,
|
||||
ComprehensionDefinitionNodeRef { node, first },
|
||||
);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
@@ -523,11 +643,26 @@ where
|
||||
}
|
||||
ast::Expr::Lambda(lambda) => {
|
||||
if let Some(parameters) = &lambda.parameters {
|
||||
// The default value of the parameters needs to be evaluated in the
|
||||
// enclosing scope.
|
||||
for default in parameters
|
||||
.iter_non_variadic_params()
|
||||
.filter_map(|param| param.default.as_deref())
|
||||
{
|
||||
self.visit_expr(default);
|
||||
}
|
||||
self.visit_parameters(parameters);
|
||||
}
|
||||
self.push_scope(NodeWithScopeRef::Lambda(lambda));
|
||||
|
||||
// Add symbols and definitions for the parameters to the lambda scope.
|
||||
if let Some(parameters) = &lambda.parameters {
|
||||
for parameter in &**parameters {
|
||||
self.declare_parameter(parameter);
|
||||
}
|
||||
}
|
||||
|
||||
self.visit_expr(lambda.body.as_ref());
|
||||
self.pop_scope();
|
||||
}
|
||||
ast::Expr::If(ast::ExprIf {
|
||||
body, test, orelse, ..
|
||||
@@ -541,12 +676,95 @@ where
|
||||
let post_body = self.flow_snapshot();
|
||||
self.flow_restore(pre_if);
|
||||
self.visit_expr(orelse);
|
||||
self.flow_merge(&post_body);
|
||||
self.flow_merge(post_body);
|
||||
}
|
||||
ast::Expr::ListComp(
|
||||
list_comprehension @ ast::ExprListComp {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
NodeWithScopeRef::ListComprehension(list_comprehension),
|
||||
generators,
|
||||
);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::SetComp(
|
||||
set_comprehension @ ast::ExprSetComp {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
NodeWithScopeRef::SetComprehension(set_comprehension),
|
||||
generators,
|
||||
);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::Generator(
|
||||
generator @ ast::ExprGenerator {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(NodeWithScopeRef::GeneratorExpression(generator), generators);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::DictComp(
|
||||
dict_comprehension @ ast::ExprDictComp {
|
||||
key,
|
||||
value,
|
||||
generators,
|
||||
..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
NodeWithScopeRef::DictComprehension(dict_comprehension),
|
||||
generators,
|
||||
);
|
||||
self.visit_expr(key);
|
||||
self.visit_expr(value);
|
||||
}
|
||||
_ => {
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
|
||||
if matches!(
|
||||
expr,
|
||||
ast::Expr::Lambda(_)
|
||||
| ast::Expr::ListComp(_)
|
||||
| ast::Expr::SetComp(_)
|
||||
| ast::Expr::Generator(_)
|
||||
| ast::Expr::DictComp(_)
|
||||
) {
|
||||
self.pop_scope();
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_parameters(&mut self, parameters: &'ast ruff_python_ast::Parameters) {
|
||||
// Intentionally avoid walking default expressions, as we handle them in the enclosing
|
||||
// scope.
|
||||
for parameter in parameters.iter().map(ast::AnyParameterRef::as_parameter) {
|
||||
self.visit_parameter(parameter);
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_pattern(&mut self, pattern: &'ast ast::Pattern) {
|
||||
if let ast::Pattern::MatchAs(ast::PatternMatchAs {
|
||||
name: Some(name), ..
|
||||
})
|
||||
| ast::Pattern::MatchStar(ast::PatternMatchStar {
|
||||
name: Some(name),
|
||||
range: _,
|
||||
})
|
||||
| ast::Pattern::MatchMapping(ast::PatternMatchMapping {
|
||||
rest: Some(name), ..
|
||||
}) = pattern
|
||||
{
|
||||
// TODO(dhruvmanila): Add definition
|
||||
self.add_or_update_symbol(name.id.clone(), SymbolFlags::IS_DEFINED);
|
||||
}
|
||||
|
||||
walk_pattern(self, pattern);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -554,7 +772,12 @@ where
|
||||
enum CurrentAssignment<'a> {
|
||||
Assign(&'a ast::StmtAssign),
|
||||
AnnAssign(&'a ast::StmtAnnAssign),
|
||||
AugAssign(&'a ast::StmtAugAssign),
|
||||
Named(&'a ast::ExprNamed),
|
||||
Comprehension {
|
||||
node: &'a ast::Comprehension,
|
||||
first: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAssign> for CurrentAssignment<'a> {
|
||||
@@ -569,6 +792,12 @@ impl<'a> From<&'a ast::StmtAnnAssign> for CurrentAssignment<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAugAssign> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::StmtAugAssign) -> Self {
|
||||
Self::AugAssign(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::ExprNamed> for CurrentAssignment<'a> {
|
||||
fn from(value: &'a ast::ExprNamed) -> Self {
|
||||
Self::Named(value)
|
||||
|
||||
@@ -44,6 +44,9 @@ pub(crate) enum DefinitionNodeRef<'a> {
|
||||
NamedExpression(&'a ast::ExprNamed),
|
||||
Assignment(AssignmentDefinitionNodeRef<'a>),
|
||||
AnnotatedAssignment(&'a ast::StmtAnnAssign),
|
||||
AugmentedAssignment(&'a ast::StmtAugAssign),
|
||||
Comprehension(ComprehensionDefinitionNodeRef<'a>),
|
||||
Parameter(ast::AnyParameterRef<'a>),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtFunctionDef> for DefinitionNodeRef<'a> {
|
||||
@@ -70,6 +73,12 @@ impl<'a> From<&'a ast::StmtAnnAssign> for DefinitionNodeRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAugAssign> for DefinitionNodeRef<'a> {
|
||||
fn from(node: &'a ast::StmtAugAssign) -> Self {
|
||||
Self::AugmentedAssignment(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::Alias> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: &'a ast::Alias) -> Self {
|
||||
Self::Import(node_ref)
|
||||
@@ -88,6 +97,18 @@ impl<'a> From<AssignmentDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ComprehensionDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node: ComprehensionDefinitionNodeRef<'a>) -> Self {
|
||||
Self::Comprehension(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ast::AnyParameterRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node: ast::AnyParameterRef<'a>) -> Self {
|
||||
Self::Parameter(node)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ImportFromDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::StmtImportFrom,
|
||||
@@ -100,6 +121,12 @@ pub(crate) struct AssignmentDefinitionNodeRef<'a> {
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ComprehensionDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::Comprehension,
|
||||
pub(crate) first: bool,
|
||||
}
|
||||
|
||||
impl DefinitionNodeRef<'_> {
|
||||
#[allow(unsafe_code)]
|
||||
pub(super) unsafe fn into_owned(self, parsed: ParsedModule) -> DefinitionKind {
|
||||
@@ -131,6 +158,23 @@ impl DefinitionNodeRef<'_> {
|
||||
DefinitionNodeRef::AnnotatedAssignment(assign) => {
|
||||
DefinitionKind::AnnotatedAssignment(AstNodeRef::new(parsed, assign))
|
||||
}
|
||||
DefinitionNodeRef::AugmentedAssignment(augmented_assignment) => {
|
||||
DefinitionKind::AugmentedAssignment(AstNodeRef::new(parsed, augmented_assignment))
|
||||
}
|
||||
DefinitionNodeRef::Comprehension(ComprehensionDefinitionNodeRef { node, first }) => {
|
||||
DefinitionKind::Comprehension(ComprehensionDefinitionKind {
|
||||
node: AstNodeRef::new(parsed, node),
|
||||
first,
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::Parameter(parameter) => match parameter {
|
||||
ast::AnyParameterRef::Variadic(parameter) => {
|
||||
DefinitionKind::Parameter(AstNodeRef::new(parsed, parameter))
|
||||
}
|
||||
ast::AnyParameterRef::NonVariadic(parameter) => {
|
||||
DefinitionKind::ParameterWithDefault(AstNodeRef::new(parsed, parameter))
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -148,6 +192,12 @@ impl DefinitionNodeRef<'_> {
|
||||
target,
|
||||
}) => target.into(),
|
||||
Self::AnnotatedAssignment(node) => node.into(),
|
||||
Self::AugmentedAssignment(node) => node.into(),
|
||||
Self::Comprehension(ComprehensionDefinitionNodeRef { node, first: _ }) => node.into(),
|
||||
Self::Parameter(node) => match node {
|
||||
ast::AnyParameterRef::Variadic(parameter) => parameter.into(),
|
||||
ast::AnyParameterRef::NonVariadic(parameter) => parameter.into(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -161,6 +211,26 @@ pub enum DefinitionKind {
|
||||
NamedExpression(AstNodeRef<ast::ExprNamed>),
|
||||
Assignment(AssignmentDefinitionKind),
|
||||
AnnotatedAssignment(AstNodeRef<ast::StmtAnnAssign>),
|
||||
AugmentedAssignment(AstNodeRef<ast::StmtAugAssign>),
|
||||
Comprehension(ComprehensionDefinitionKind),
|
||||
Parameter(AstNodeRef<ast::Parameter>),
|
||||
ParameterWithDefault(AstNodeRef<ast::ParameterWithDefault>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ComprehensionDefinitionKind {
|
||||
node: AstNodeRef<ast::Comprehension>,
|
||||
first: bool,
|
||||
}
|
||||
|
||||
impl ComprehensionDefinitionKind {
|
||||
pub(crate) fn node(&self) -> &ast::Comprehension {
|
||||
self.node.node()
|
||||
}
|
||||
|
||||
pub(crate) fn is_first(&self) -> bool {
|
||||
self.first
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@@ -190,6 +260,10 @@ impl AssignmentDefinitionKind {
|
||||
pub(crate) fn assignment(&self) -> &ast::StmtAssign {
|
||||
self.assignment.node()
|
||||
}
|
||||
|
||||
pub(crate) fn target(&self) -> &ast::ExprName {
|
||||
self.target.node()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
|
||||
@@ -230,3 +304,27 @@ impl From<&ast::StmtAnnAssign> for DefinitionNodeKey {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::StmtAugAssign> for DefinitionNodeKey {
|
||||
fn from(node: &ast::StmtAugAssign) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Comprehension> for DefinitionNodeKey {
|
||||
fn from(node: &ast::Comprehension) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Parameter> for DefinitionNodeKey {
|
||||
fn from(node: &ast::Parameter) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::ParameterWithDefault> for DefinitionNodeKey {
|
||||
fn from(node: &ast::ParameterWithDefault) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ pub(crate) struct Expression<'db> {
|
||||
/// The expression node.
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) node: AstNodeRef<ast::Expr>,
|
||||
pub(crate) node_ref: AstNodeRef<ast::Expr>,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<Expression<'static>>,
|
||||
|
||||
@@ -114,6 +114,10 @@ impl<'db> ScopeId<'db> {
|
||||
NodeWithScopeKind::ClassTypeParameters(_)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(_)
|
||||
| NodeWithScopeKind::Function(_)
|
||||
| NodeWithScopeKind::ListComprehension(_)
|
||||
| NodeWithScopeKind::SetComprehension(_)
|
||||
| NodeWithScopeKind::DictComprehension(_)
|
||||
| NodeWithScopeKind::GeneratorExpression(_)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -127,6 +131,10 @@ impl<'db> ScopeId<'db> {
|
||||
NodeWithScopeKind::Function(function)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(function) => function.name.as_str(),
|
||||
NodeWithScopeKind::Lambda(_) => "<lambda>",
|
||||
NodeWithScopeKind::ListComprehension(_) => "<listcomp>",
|
||||
NodeWithScopeKind::SetComprehension(_) => "<setcomp>",
|
||||
NodeWithScopeKind::DictComprehension(_) => "<dictcomp>",
|
||||
NodeWithScopeKind::GeneratorExpression(_) => "<generator>",
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -170,6 +178,13 @@ pub enum ScopeKind {
|
||||
Annotation,
|
||||
Class,
|
||||
Function,
|
||||
Comprehension,
|
||||
}
|
||||
|
||||
impl ScopeKind {
|
||||
pub const fn is_comprehension(self) -> bool {
|
||||
matches!(self, ScopeKind::Comprehension)
|
||||
}
|
||||
}
|
||||
|
||||
/// Symbol table for a specific [`Scope`].
|
||||
@@ -300,6 +315,10 @@ pub(crate) enum NodeWithScopeRef<'a> {
|
||||
Lambda(&'a ast::ExprLambda),
|
||||
FunctionTypeParameters(&'a ast::StmtFunctionDef),
|
||||
ClassTypeParameters(&'a ast::StmtClassDef),
|
||||
ListComprehension(&'a ast::ExprListComp),
|
||||
SetComprehension(&'a ast::ExprSetComp),
|
||||
DictComprehension(&'a ast::ExprDictComp),
|
||||
GeneratorExpression(&'a ast::ExprGenerator),
|
||||
}
|
||||
|
||||
impl NodeWithScopeRef<'_> {
|
||||
@@ -326,6 +345,18 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::ClassTypeParameters(class) => {
|
||||
NodeWithScopeKind::ClassTypeParameters(AstNodeRef::new(module, class))
|
||||
}
|
||||
NodeWithScopeRef::ListComprehension(comprehension) => {
|
||||
NodeWithScopeKind::ListComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::SetComprehension(comprehension) => {
|
||||
NodeWithScopeKind::SetComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::DictComprehension(comprehension) => {
|
||||
NodeWithScopeKind::DictComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::GeneratorExpression(generator) => {
|
||||
NodeWithScopeKind::GeneratorExpression(AstNodeRef::new(module, generator))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -337,6 +368,10 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::Lambda(_) => ScopeKind::Function,
|
||||
NodeWithScopeRef::FunctionTypeParameters(_)
|
||||
| NodeWithScopeRef::ClassTypeParameters(_) => ScopeKind::Annotation,
|
||||
NodeWithScopeRef::ListComprehension(_)
|
||||
| NodeWithScopeRef::SetComprehension(_)
|
||||
| NodeWithScopeRef::DictComprehension(_)
|
||||
| NodeWithScopeRef::GeneratorExpression(_) => ScopeKind::Comprehension,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -356,6 +391,18 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::ClassTypeParameters(class) => {
|
||||
NodeWithScopeKey::ClassTypeParameters(NodeKey::from_node(class))
|
||||
}
|
||||
NodeWithScopeRef::ListComprehension(comprehension) => {
|
||||
NodeWithScopeKey::ListComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::SetComprehension(comprehension) => {
|
||||
NodeWithScopeKey::SetComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::DictComprehension(comprehension) => {
|
||||
NodeWithScopeKey::DictComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::GeneratorExpression(generator) => {
|
||||
NodeWithScopeKey::GeneratorExpression(NodeKey::from_node(generator))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -369,6 +416,10 @@ pub enum NodeWithScopeKind {
|
||||
Function(AstNodeRef<ast::StmtFunctionDef>),
|
||||
FunctionTypeParameters(AstNodeRef<ast::StmtFunctionDef>),
|
||||
Lambda(AstNodeRef<ast::ExprLambda>),
|
||||
ListComprehension(AstNodeRef<ast::ExprListComp>),
|
||||
SetComprehension(AstNodeRef<ast::ExprSetComp>),
|
||||
DictComprehension(AstNodeRef<ast::ExprDictComp>),
|
||||
GeneratorExpression(AstNodeRef<ast::ExprGenerator>),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
@@ -379,4 +430,8 @@ pub(crate) enum NodeWithScopeKey {
|
||||
Function(NodeKey),
|
||||
FunctionTypeParameters(NodeKey),
|
||||
Lambda(NodeKey),
|
||||
ListComprehension(NodeKey),
|
||||
SetComprehension(NodeKey),
|
||||
DictComprehension(NodeKey),
|
||||
GeneratorExpression(NodeKey),
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
//! Build a map from each use of a symbol to the definitions visible from that use.
|
||||
//! Build a map from each use of a symbol to the definitions visible from that use, and the
|
||||
//! type-narrowing constraints that apply to each definition.
|
||||
//!
|
||||
//! Let's take this code sample:
|
||||
//!
|
||||
@@ -6,7 +7,7 @@
|
||||
//! x = 1
|
||||
//! x = 2
|
||||
//! y = x
|
||||
//! if flag:
|
||||
//! if y is not None:
|
||||
//! x = 3
|
||||
//! else:
|
||||
//! x = 4
|
||||
@@ -34,8 +35,8 @@
|
||||
//! [`AstIds`](crate::semantic_index::ast_ids::AstIds) we number all uses (that means a `Name` node
|
||||
//! with `Load` context) so we have a `ScopedUseId` to efficiently represent each use.
|
||||
//!
|
||||
//! The other case we need to handle is when a symbol is referenced from a different scope (the
|
||||
//! most obvious example of this is an import). We call this "public" use of a symbol. So the other
|
||||
//! Another case we need to handle is when a symbol is referenced from a different scope (the most
|
||||
//! obvious example of this is an import). We call this "public" use of a symbol. So the other
|
||||
//! question we need to be able to answer is, what are the publicly-visible definitions of each
|
||||
//! symbol?
|
||||
//!
|
||||
@@ -53,42 +54,55 @@
|
||||
//! start.)
|
||||
//!
|
||||
//! So this means that the publicly-visible definitions of a symbol are the definitions still
|
||||
//! visible at the end of the scope.
|
||||
//! visible at the end of the scope; effectively we have an implicit "use" of every symbol at the
|
||||
//! end of the scope.
|
||||
//!
|
||||
//! The data structure we build to answer these two questions is the `UseDefMap`. It has a
|
||||
//! We also need to know, for a given definition of a symbol, what type-narrowing constraints apply
|
||||
//! to it. For instance, in this code sample:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 1 if flag else None
|
||||
//! if x is not None:
|
||||
//! y = x
|
||||
//! ```
|
||||
//!
|
||||
//! At the use of `x` in `y = x`, the visible definition of `x` is `1 if flag else None`, which
|
||||
//! would infer as the type `Literal[1] | None`. But the constraint `x is not None` dominates this
|
||||
//! use, which means we can rule out the possibility that `x` is `None` here, which should give us
|
||||
//! the type `Literal[1]` for this use.
|
||||
//!
|
||||
//! The data structure we build to answer these questions is the `UseDefMap`. It has a
|
||||
//! `definitions_by_use` vector indexed by [`ScopedUseId`] and a `public_definitions` vector
|
||||
//! indexed by [`ScopedSymbolId`]. The values in each of these vectors are (in principle) a list of
|
||||
//! visible definitions at that use, or at the end of the scope for that symbol.
|
||||
//! visible definitions at that use, or at the end of the scope for that symbol, with a list of the
|
||||
//! dominating constraints for each of those definitions.
|
||||
//!
|
||||
//! In order to avoid vectors-of-vectors and all the allocations that would entail, we don't
|
||||
//! actually store these "list of visible definitions" as a vector of [`Definition`] IDs. Instead,
|
||||
//! the values in `definitions_by_use` and `public_definitions` are a [`Definitions`] struct that
|
||||
//! keeps a [`Range`] into a third vector of [`Definition`] IDs, `all_definitions`. The trick with
|
||||
//! this representation is that it requires that the definitions visible at any given use of a
|
||||
//! symbol are stored sequentially in `all_definitions`.
|
||||
//! In order to avoid vectors-of-vectors-of-vectors and all the allocations that would entail, we
|
||||
//! don't actually store these "list of visible definitions" as a vector of [`Definition`].
|
||||
//! Instead, the values in `definitions_by_use` and `public_definitions` are a [`SymbolState`]
|
||||
//! struct which uses bit-sets to track definitions and constraints in terms of
|
||||
//! [`ScopedDefinitionId`] and [`ScopedConstraintId`], which are indices into the `all_definitions`
|
||||
//! and `all_constraints` indexvecs in the [`UseDefMap`].
|
||||
//!
|
||||
//! There is another special kind of possible "definition" for a symbol: it might be unbound in the
|
||||
//! scope. (This isn't equivalent to "zero visible definitions", since we may go through an `if`
|
||||
//! that has a definition for the symbol, leaving us with one visible definition, but still also
|
||||
//! the "unbound" possibility, since we might not have taken the `if` branch.)
|
||||
//! There is another special kind of possible "definition" for a symbol: there might be a path from
|
||||
//! the scope entry to a given use in which the symbol is never bound.
|
||||
//!
|
||||
//! The simplest way to model "unbound" would be as an actual [`Definition`] itself: the initial
|
||||
//! visible [`Definition`] for each symbol in a scope. But actually modeling it this way would
|
||||
//! dramatically increase the number of [`Definition`] that Salsa must track. Since "unbound" is a
|
||||
//! unnecessarily increase the number of [`Definition`] that Salsa must track. Since "unbound" is a
|
||||
//! special definition in that all symbols share it, and it doesn't have any additional per-symbol
|
||||
//! state, we can represent it more efficiently: we use the `may_be_unbound` boolean on the
|
||||
//! [`Definitions`] struct. If this flag is `true`, it means the symbol/use really has one
|
||||
//! additional visible "definition", which is the unbound state. If this flag is `false`, it means
|
||||
//! we've eliminated the possibility of unbound: every path we've followed includes a definition
|
||||
//! for this symbol.
|
||||
//! state, and constraints are irrelevant to it, we can represent it more efficiently: we use the
|
||||
//! `may_be_unbound` boolean on the [`SymbolState`] struct. If this flag is `true`, it means the
|
||||
//! symbol/use really has one additional visible "definition", which is the unbound state. If this
|
||||
//! flag is `false`, it means we've eliminated the possibility of unbound: every path we've
|
||||
//! followed includes a definition for this symbol.
|
||||
//!
|
||||
//! To build a [`UseDefMap`], the [`UseDefMapBuilder`] is notified of each new use and definition
|
||||
//! as they are encountered by the
|
||||
//! To build a [`UseDefMap`], the [`UseDefMapBuilder`] is notified of each new use, definition, and
|
||||
//! constraint as they are encountered by the
|
||||
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder) AST visit. For
|
||||
//! each symbol, the builder tracks the currently-visible definitions for that symbol. When we hit
|
||||
//! a use of a symbol, it records the currently-visible definitions for that symbol as the visible
|
||||
//! definitions for that use. When we reach the end of the scope, it records the currently-visible
|
||||
//! definitions for each symbol as the public definitions of that symbol.
|
||||
//! each symbol, the builder tracks the `SymbolState` for that symbol. When we hit a use of a
|
||||
//! symbol, it records the current state for that symbol for that use. When we reach the end of the
|
||||
//! scope, it records the state for each symbol as the public definitions of that symbol.
|
||||
//!
|
||||
//! Let's walk through the above example. Initially we record for `x` that it has no visible
|
||||
//! definitions, and may be unbound. When we see `x = 1`, we record that as the sole visible
|
||||
@@ -98,10 +112,11 @@
|
||||
//!
|
||||
//! Then we hit the `if` branch. We visit the `test` node (`flag` in this case), since that will
|
||||
//! happen regardless. Then we take a pre-branch snapshot of the currently visible definitions for
|
||||
//! all symbols, which we'll need later. Then we go ahead and visit the `if` body. When we see `x =
|
||||
//! 3`, it replaces `x = 2` as the sole visible definition of `x`. At the end of the `if` body, we
|
||||
//! take another snapshot of the currently-visible definitions; we'll call this the post-if-body
|
||||
//! snapshot.
|
||||
//! all symbols, which we'll need later. Then we record `flag` as a possible constraint on the
|
||||
//! currently visible definition (`x = 2`), and go ahead and visit the `if` body. When we see `x =
|
||||
//! 3`, it replaces `x = 2` (constrained by `flag`) as the sole visible definition of `x`. At the
|
||||
//! end of the `if` body, we take another snapshot of the currently-visible definitions; we'll call
|
||||
//! this the post-if-body snapshot.
|
||||
//!
|
||||
//! Now we need to visit the `else` clause. The conditions when entering the `else` clause should
|
||||
//! be the pre-if conditions; if we are entering the `else` clause, we know that the `if` test
|
||||
@@ -125,98 +140,142 @@
|
||||
//! (In the future we may have some other questions we want to answer as well, such as "is this
|
||||
//! definition used?", which will require tracking a bit more info in our map, e.g. a "used" bit
|
||||
//! for each [`Definition`] which is flipped to true when we record that definition for a use.)
|
||||
use self::symbol_state::{
|
||||
ConstraintIdIterator, DefinitionIdWithConstraintsIterator, ScopedConstraintId,
|
||||
ScopedDefinitionId, SymbolState,
|
||||
};
|
||||
use crate::semantic_index::ast_ids::ScopedUseId;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::ScopedSymbolId;
|
||||
use ruff_index::IndexVec;
|
||||
use std::ops::Range;
|
||||
|
||||
/// All definitions that can reach a given use of a name.
|
||||
mod bitset;
|
||||
mod symbol_state;
|
||||
|
||||
/// Applicable definitions and constraints for every use of a name.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) struct UseDefMap<'db> {
|
||||
// TODO store constraints with definitions for type narrowing
|
||||
/// Definition IDs array for `definitions_by_use` and `public_definitions` to slice into.
|
||||
all_definitions: Vec<Definition<'db>>,
|
||||
/// Array of [`Definition`] in this scope.
|
||||
all_definitions: IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
|
||||
/// Definitions that can reach a [`ScopedUseId`].
|
||||
definitions_by_use: IndexVec<ScopedUseId, Definitions>,
|
||||
/// Array of constraints (as [`Expression`]) in this scope.
|
||||
all_constraints: IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
|
||||
/// Definitions of each symbol visible at end of scope.
|
||||
public_definitions: IndexVec<ScopedSymbolId, Definitions>,
|
||||
/// [`SymbolState`] visible at a [`ScopedUseId`].
|
||||
definitions_by_use: IndexVec<ScopedUseId, SymbolState>,
|
||||
|
||||
/// [`SymbolState`] visible at end of scope for each symbol.
|
||||
public_definitions: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
}
|
||||
|
||||
impl<'db> UseDefMap<'db> {
|
||||
pub(crate) fn use_definitions(&self, use_id: ScopedUseId) -> &[Definition<'db>] {
|
||||
&self.all_definitions[self.definitions_by_use[use_id].definitions_range.clone()]
|
||||
pub(crate) fn use_definitions(
|
||||
&self,
|
||||
use_id: ScopedUseId,
|
||||
) -> DefinitionWithConstraintsIterator<'_, 'db> {
|
||||
DefinitionWithConstraintsIterator {
|
||||
all_definitions: &self.all_definitions,
|
||||
all_constraints: &self.all_constraints,
|
||||
inner: self.definitions_by_use[use_id].visible_definitions(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn use_may_be_unbound(&self, use_id: ScopedUseId) -> bool {
|
||||
self.definitions_by_use[use_id].may_be_unbound
|
||||
self.definitions_by_use[use_id].may_be_unbound()
|
||||
}
|
||||
|
||||
pub(crate) fn public_definitions(&self, symbol: ScopedSymbolId) -> &[Definition<'db>] {
|
||||
&self.all_definitions[self.public_definitions[symbol].definitions_range.clone()]
|
||||
pub(crate) fn public_definitions(
|
||||
&self,
|
||||
symbol: ScopedSymbolId,
|
||||
) -> DefinitionWithConstraintsIterator<'_, 'db> {
|
||||
DefinitionWithConstraintsIterator {
|
||||
all_definitions: &self.all_definitions,
|
||||
all_constraints: &self.all_constraints,
|
||||
inner: self.public_definitions[symbol].visible_definitions(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn public_may_be_unbound(&self, symbol: ScopedSymbolId) -> bool {
|
||||
self.public_definitions[symbol].may_be_unbound
|
||||
self.public_definitions[symbol].may_be_unbound()
|
||||
}
|
||||
}
|
||||
|
||||
/// Definitions visible for a symbol at a particular use (or end-of-scope).
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
struct Definitions {
|
||||
/// [`Range`] in `all_definitions` of the visible definition IDs.
|
||||
definitions_range: Range<usize>,
|
||||
/// Is the symbol possibly unbound at this point?
|
||||
may_be_unbound: bool,
|
||||
}
|
||||
|
||||
impl Definitions {
|
||||
/// The default state of a symbol is "no definitions, may be unbound", aka definitely-unbound.
|
||||
fn unbound() -> Self {
|
||||
Self {
|
||||
definitions_range: Range::default(),
|
||||
may_be_unbound: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Definitions {
|
||||
fn default() -> Self {
|
||||
Definitions::unbound()
|
||||
}
|
||||
}
|
||||
|
||||
/// A snapshot of the visible definitions for each symbol at a particular point in control flow.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(super) struct FlowSnapshot {
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, Definitions>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct DefinitionWithConstraintsIterator<'map, 'db> {
|
||||
all_definitions: &'map IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
all_constraints: &'map IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
inner: DefinitionIdWithConstraintsIterator<'map>,
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for DefinitionWithConstraintsIterator<'map, 'db> {
|
||||
type Item = DefinitionWithConstraints<'map, 'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.inner
|
||||
.next()
|
||||
.map(|def_id_with_constraints| DefinitionWithConstraints {
|
||||
definition: self.all_definitions[def_id_with_constraints.definition],
|
||||
constraints: ConstraintsIterator {
|
||||
all_constraints: self.all_constraints,
|
||||
constraint_ids: def_id_with_constraints.constraint_ids,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for DefinitionWithConstraintsIterator<'_, '_> {}
|
||||
|
||||
pub(crate) struct DefinitionWithConstraints<'map, 'db> {
|
||||
pub(crate) definition: Definition<'db>,
|
||||
pub(crate) constraints: ConstraintsIterator<'map, 'db>,
|
||||
}
|
||||
|
||||
pub(crate) struct ConstraintsIterator<'map, 'db> {
|
||||
all_constraints: &'map IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
constraint_ids: ConstraintIdIterator<'map>,
|
||||
}
|
||||
|
||||
impl<'map, 'db> Iterator for ConstraintsIterator<'map, 'db> {
|
||||
type Item = Expression<'db>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.constraint_ids
|
||||
.next()
|
||||
.map(|constraint_id| self.all_constraints[constraint_id])
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for ConstraintsIterator<'_, '_> {}
|
||||
|
||||
/// A snapshot of the definitions and constraints state at a particular point in control flow.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(super) struct FlowSnapshot {
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct UseDefMapBuilder<'db> {
|
||||
/// Definition IDs array for `definitions_by_use` and `definitions_by_symbol` to slice into.
|
||||
all_definitions: Vec<Definition<'db>>,
|
||||
/// Append-only array of [`Definition`]; None is unbound.
|
||||
all_definitions: IndexVec<ScopedDefinitionId, Definition<'db>>,
|
||||
|
||||
/// Append-only array of constraints (as [`Expression`]).
|
||||
all_constraints: IndexVec<ScopedConstraintId, Expression<'db>>,
|
||||
|
||||
/// Visible definitions at each so-far-recorded use.
|
||||
definitions_by_use: IndexVec<ScopedUseId, Definitions>,
|
||||
definitions_by_use: IndexVec<ScopedUseId, SymbolState>,
|
||||
|
||||
/// Currently visible definitions for each symbol.
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, Definitions>,
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, SymbolState>,
|
||||
}
|
||||
|
||||
impl<'db> UseDefMapBuilder<'db> {
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
all_definitions: Vec::new(),
|
||||
definitions_by_use: IndexVec::new(),
|
||||
definitions_by_symbol: IndexVec::new(),
|
||||
}
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub(super) fn add_symbol(&mut self, symbol: ScopedSymbolId) {
|
||||
let new_symbol = self.definitions_by_symbol.push(Definitions::unbound());
|
||||
let new_symbol = self.definitions_by_symbol.push(SymbolState::unbound());
|
||||
debug_assert_eq!(symbol, new_symbol);
|
||||
}
|
||||
|
||||
@@ -227,13 +286,15 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
) {
|
||||
// We have a new definition of a symbol; this replaces any previous definitions in this
|
||||
// path.
|
||||
let def_idx = self.all_definitions.len();
|
||||
self.all_definitions.push(definition);
|
||||
self.definitions_by_symbol[symbol] = Definitions {
|
||||
#[allow(clippy::range_plus_one)]
|
||||
definitions_range: def_idx..(def_idx + 1),
|
||||
may_be_unbound: false,
|
||||
};
|
||||
let def_id = self.all_definitions.push(definition);
|
||||
self.definitions_by_symbol[symbol] = SymbolState::with(def_id);
|
||||
}
|
||||
|
||||
pub(super) fn record_constraint(&mut self, constraint: Expression<'db>) {
|
||||
let constraint_id = self.all_constraints.push(constraint);
|
||||
for definitions in &mut self.definitions_by_symbol {
|
||||
definitions.add_constraint(constraint_id);
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn record_use(&mut self, symbol: ScopedSymbolId, use_id: ScopedUseId) {
|
||||
@@ -265,15 +326,15 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
|
||||
// If the snapshot we are restoring is missing some symbols we've recorded since, we need
|
||||
// to fill them in so the symbol IDs continue to line up. Since they don't exist in the
|
||||
// snapshot, the correct state to fill them in with is "unbound", the default.
|
||||
// snapshot, the correct state to fill them in with is "unbound".
|
||||
self.definitions_by_symbol
|
||||
.resize(num_symbols, Definitions::unbound());
|
||||
.resize(num_symbols, SymbolState::unbound());
|
||||
}
|
||||
|
||||
/// Merge the given snapshot into the current state, reflecting that we might have taken either
|
||||
/// path to get here. The new visible-definitions state for each symbol should include
|
||||
/// definitions from both the prior state and the snapshot.
|
||||
pub(super) fn merge(&mut self, snapshot: &FlowSnapshot) {
|
||||
pub(super) fn merge(&mut self, snapshot: FlowSnapshot) {
|
||||
// The tricky thing about merging two Ranges pointing into `all_definitions` is that if the
|
||||
// two Ranges aren't already adjacent in `all_definitions`, we will have to copy at least
|
||||
// one or the other of the ranges to the end of `all_definitions` so as to make them
|
||||
@@ -287,66 +348,26 @@ impl<'db> UseDefMapBuilder<'db> {
|
||||
// greater than the number of known symbols in a previously-taken snapshot.
|
||||
debug_assert!(self.definitions_by_symbol.len() >= snapshot.definitions_by_symbol.len());
|
||||
|
||||
for (symbol_id, current) in self.definitions_by_symbol.iter_mut_enumerated() {
|
||||
let Some(snapshot) = snapshot.definitions_by_symbol.get(symbol_id) else {
|
||||
// Symbol not present in snapshot, so it's unbound from that path.
|
||||
current.may_be_unbound = true;
|
||||
continue;
|
||||
};
|
||||
|
||||
// If the symbol can be unbound in either predecessor, it can be unbound post-merge.
|
||||
current.may_be_unbound |= snapshot.may_be_unbound;
|
||||
|
||||
// Merge the definition ranges.
|
||||
let current = &mut current.definitions_range;
|
||||
let snapshot = &snapshot.definitions_range;
|
||||
|
||||
// We never create reversed ranges.
|
||||
debug_assert!(current.end >= current.start);
|
||||
debug_assert!(snapshot.end >= snapshot.start);
|
||||
|
||||
if current == snapshot {
|
||||
// Ranges already identical, nothing to do.
|
||||
} else if snapshot.is_empty() {
|
||||
// Merging from an empty range; nothing to do.
|
||||
} else if (*current).is_empty() {
|
||||
// Merging to an empty range; just use the incoming range.
|
||||
*current = snapshot.clone();
|
||||
} else if snapshot.end >= current.start && snapshot.start <= current.end {
|
||||
// Ranges are adjacent or overlapping, merge them in-place.
|
||||
*current = current.start.min(snapshot.start)..current.end.max(snapshot.end);
|
||||
} else if current.end == self.all_definitions.len() {
|
||||
// Ranges are not adjacent or overlapping, `current` is at the end of
|
||||
// `all_definitions`, we need to copy `snapshot` to the end so they are adjacent
|
||||
// and can be merged into one range.
|
||||
self.all_definitions.extend_from_within(snapshot.clone());
|
||||
current.end = self.all_definitions.len();
|
||||
} else if snapshot.end == self.all_definitions.len() {
|
||||
// Ranges are not adjacent or overlapping, `snapshot` is at the end of
|
||||
// `all_definitions`, we need to copy `current` to the end so they are adjacent and
|
||||
// can be merged into one range.
|
||||
self.all_definitions.extend_from_within(current.clone());
|
||||
current.start = snapshot.start;
|
||||
current.end = self.all_definitions.len();
|
||||
let mut snapshot_definitions_iter = snapshot.definitions_by_symbol.into_iter();
|
||||
for current in &mut self.definitions_by_symbol {
|
||||
if let Some(snapshot) = snapshot_definitions_iter.next() {
|
||||
current.merge(snapshot);
|
||||
} else {
|
||||
// Ranges are not adjacent and neither one is at the end of `all_definitions`, we
|
||||
// have to copy both to the end so they are adjacent and we can merge them.
|
||||
let start = self.all_definitions.len();
|
||||
self.all_definitions.extend_from_within(current.clone());
|
||||
self.all_definitions.extend_from_within(snapshot.clone());
|
||||
current.start = start;
|
||||
current.end = self.all_definitions.len();
|
||||
// Symbol not present in snapshot, so it's unbound from that path.
|
||||
current.add_unbound();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> UseDefMap<'db> {
|
||||
self.all_definitions.shrink_to_fit();
|
||||
self.all_constraints.shrink_to_fit();
|
||||
self.definitions_by_symbol.shrink_to_fit();
|
||||
self.definitions_by_use.shrink_to_fit();
|
||||
|
||||
UseDefMap {
|
||||
all_definitions: self.all_definitions,
|
||||
all_constraints: self.all_constraints,
|
||||
definitions_by_use: self.definitions_by_use,
|
||||
public_definitions: self.definitions_by_symbol,
|
||||
}
|
||||
|
||||
@@ -0,0 +1,228 @@
|
||||
/// Ordered set of `u32`.
|
||||
///
|
||||
/// Uses an inline bit-set for small values (up to 64 * B), falls back to heap allocated vector of
|
||||
/// blocks for larger values.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(super) enum BitSet<const B: usize> {
|
||||
/// Bit-set (in 64-bit blocks) for the first 64 * B entries.
|
||||
Inline([u64; B]),
|
||||
|
||||
/// Overflow beyond 64 * B.
|
||||
Heap(Vec<u64>),
|
||||
}
|
||||
|
||||
impl<const B: usize> Default for BitSet<B> {
|
||||
fn default() -> Self {
|
||||
// B * 64 must fit in a u32, or else we have unusable bits; this assertion makes the
|
||||
// truncating casts to u32 below safe. This would be better as a const assertion, but
|
||||
// that's not possible on stable with const generic params. (B should never really be
|
||||
// anywhere close to this large.)
|
||||
assert!(B * 64 < (u32::MAX as usize));
|
||||
// This implementation requires usize >= 32 bits.
|
||||
static_assertions::const_assert!(usize::BITS >= 32);
|
||||
Self::Inline([0; B])
|
||||
}
|
||||
}
|
||||
|
||||
impl<const B: usize> BitSet<B> {
|
||||
/// Create and return a new [`BitSet`] with a single `value` inserted.
|
||||
pub(super) fn with(value: u32) -> Self {
|
||||
let mut bitset = Self::default();
|
||||
bitset.insert(value);
|
||||
bitset
|
||||
}
|
||||
|
||||
/// Convert from Inline to Heap, if needed, and resize the Heap vector, if needed.
|
||||
fn resize(&mut self, value: u32) {
|
||||
let num_blocks_needed = (value / 64) + 1;
|
||||
match self {
|
||||
Self::Inline(blocks) => {
|
||||
let mut vec = blocks.to_vec();
|
||||
vec.resize(num_blocks_needed as usize, 0);
|
||||
*self = Self::Heap(vec);
|
||||
}
|
||||
Self::Heap(vec) => {
|
||||
vec.resize(num_blocks_needed as usize, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn blocks_mut(&mut self) -> &mut [u64] {
|
||||
match self {
|
||||
Self::Inline(blocks) => blocks.as_mut_slice(),
|
||||
Self::Heap(blocks) => blocks.as_mut_slice(),
|
||||
}
|
||||
}
|
||||
|
||||
fn blocks(&self) -> &[u64] {
|
||||
match self {
|
||||
Self::Inline(blocks) => blocks.as_slice(),
|
||||
Self::Heap(blocks) => blocks.as_slice(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Insert a value into the [`BitSet`].
|
||||
///
|
||||
/// Return true if the value was newly inserted, false if already present.
|
||||
pub(super) fn insert(&mut self, value: u32) -> bool {
|
||||
let value_usize = value as usize;
|
||||
let (block, index) = (value_usize / 64, value_usize % 64);
|
||||
if block >= self.blocks().len() {
|
||||
self.resize(value);
|
||||
}
|
||||
let blocks = self.blocks_mut();
|
||||
let missing = blocks[block] & (1 << index) == 0;
|
||||
blocks[block] |= 1 << index;
|
||||
missing
|
||||
}
|
||||
|
||||
/// Intersect in-place with another [`BitSet`].
|
||||
pub(super) fn intersect(&mut self, other: &BitSet<B>) {
|
||||
let my_blocks = self.blocks_mut();
|
||||
let other_blocks = other.blocks();
|
||||
let min_len = my_blocks.len().min(other_blocks.len());
|
||||
for i in 0..min_len {
|
||||
my_blocks[i] &= other_blocks[i];
|
||||
}
|
||||
for block in my_blocks.iter_mut().skip(min_len) {
|
||||
*block = 0;
|
||||
}
|
||||
}
|
||||
|
||||
/// Return an iterator over the values (in ascending order) in this [`BitSet`].
|
||||
pub(super) fn iter(&self) -> BitSetIterator<'_, B> {
|
||||
let blocks = self.blocks();
|
||||
BitSetIterator {
|
||||
blocks,
|
||||
current_block_index: 0,
|
||||
current_block: blocks[0],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator over values in a [`BitSet`].
|
||||
#[derive(Debug)]
|
||||
pub(super) struct BitSetIterator<'a, const B: usize> {
|
||||
/// The blocks we are iterating over.
|
||||
blocks: &'a [u64],
|
||||
|
||||
/// The index of the block we are currently iterating through.
|
||||
current_block_index: usize,
|
||||
|
||||
/// The block we are currently iterating through (and zeroing as we go.)
|
||||
current_block: u64,
|
||||
}
|
||||
|
||||
impl<const B: usize> Iterator for BitSetIterator<'_, B> {
|
||||
type Item = u32;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
while self.current_block == 0 {
|
||||
if self.current_block_index + 1 >= self.blocks.len() {
|
||||
return None;
|
||||
}
|
||||
self.current_block_index += 1;
|
||||
self.current_block = self.blocks[self.current_block_index];
|
||||
}
|
||||
let lowest_bit_set = self.current_block.trailing_zeros();
|
||||
// reset the lowest set bit, without a data dependency on `lowest_bit_set`
|
||||
self.current_block &= self.current_block.wrapping_sub(1);
|
||||
// SAFETY: `lowest_bit_set` cannot be more than 64, `current_block_index` cannot be more
|
||||
// than `B - 1`, and we check above that `B * 64 < u32::MAX`. So both `64 *
|
||||
// current_block_index` and the final value here must fit in u32.
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
Some(lowest_bit_set + (64 * self.current_block_index) as u32)
|
||||
}
|
||||
}
|
||||
|
||||
impl<const B: usize> std::iter::FusedIterator for BitSetIterator<'_, B> {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::BitSet;
|
||||
|
||||
fn assert_bitset<const B: usize>(bitset: &BitSet<B>, contents: &[u32]) {
|
||||
assert_eq!(bitset.iter().collect::<Vec<_>>(), contents);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn iter() {
|
||||
let mut b = BitSet::<1>::with(3);
|
||||
b.insert(27);
|
||||
b.insert(6);
|
||||
assert!(matches!(b, BitSet::Inline(_)));
|
||||
assert_bitset(&b, &[3, 6, 27]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn iter_overflow() {
|
||||
let mut b = BitSet::<1>::with(140);
|
||||
b.insert(100);
|
||||
b.insert(129);
|
||||
assert!(matches!(b, BitSet::Heap(_)));
|
||||
assert_bitset(&b, &[100, 129, 140]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(23);
|
||||
b2.insert(5);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_mixed_1() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(89);
|
||||
b2.insert(5);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_mixed_2() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(23);
|
||||
b2.insert(89);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_heap() {
|
||||
let mut b1 = BitSet::<1>::with(4);
|
||||
let mut b2 = BitSet::<1>::with(4);
|
||||
b1.insert(89);
|
||||
b2.insert(90);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[4]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersect_heap_2() {
|
||||
let mut b1 = BitSet::<1>::with(89);
|
||||
let mut b2 = BitSet::<1>::with(89);
|
||||
b1.insert(91);
|
||||
b2.insert(90);
|
||||
|
||||
b1.intersect(&b2);
|
||||
assert_bitset(&b1, &[89]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_blocks() {
|
||||
let mut b = BitSet::<2>::with(120);
|
||||
b.insert(45);
|
||||
assert!(matches!(b, BitSet::Inline(_)));
|
||||
assert_bitset(&b, &[45, 120]);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,374 @@
|
||||
//! Track visible definitions of a symbol, and applicable constraints per definition.
|
||||
//!
|
||||
//! These data structures operate entirely on scope-local newtype-indices for definitions and
|
||||
//! constraints, referring to their location in the `all_definitions` and `all_constraints`
|
||||
//! indexvecs in [`super::UseDefMapBuilder`].
|
||||
//!
|
||||
//! We need to track arbitrary associations between definitions and constraints, not just a single
|
||||
//! set of currently dominating constraints (where "dominating" means "control flow must have
|
||||
//! passed through it to reach this point"), because we can have dominating constraints that apply
|
||||
//! to some definitions but not others, as in this code:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 1 if flag else None
|
||||
//! if x is not None:
|
||||
//! if flag2:
|
||||
//! x = 2 if flag else None
|
||||
//! x
|
||||
//! ```
|
||||
//!
|
||||
//! The `x is not None` constraint dominates the final use of `x`, but it applies only to the first
|
||||
//! definition of `x`, not the second, so `None` is a possible value for `x`.
|
||||
//!
|
||||
//! And we can't just track, for each definition, an index into a list of dominating constraints,
|
||||
//! either, because we can have definitions which are still visible, but subject to constraints
|
||||
//! that are no longer dominating, as in this code:
|
||||
//!
|
||||
//! ```python
|
||||
//! x = 0
|
||||
//! if flag1:
|
||||
//! x = 1 if flag2 else None
|
||||
//! assert x is not None
|
||||
//! x
|
||||
//! ```
|
||||
//!
|
||||
//! From the point of view of the final use of `x`, the `x is not None` constraint no longer
|
||||
//! dominates, but it does dominate the `x = 1 if flag2 else None` definition, so we have to keep
|
||||
//! track of that.
|
||||
//!
|
||||
//! The data structures used here ([`BitSet`] and [`smallvec::SmallVec`]) optimize for keeping all
|
||||
//! data inline (avoiding lots of scattered allocations) in small-to-medium cases, and falling back
|
||||
//! to heap allocation to be able to scale to arbitrary numbers of definitions and constraints when
|
||||
//! needed.
|
||||
use super::bitset::{BitSet, BitSetIterator};
|
||||
use ruff_index::newtype_index;
|
||||
use smallvec::SmallVec;
|
||||
|
||||
/// A newtype-index for a definition in a particular scope.
|
||||
#[newtype_index]
|
||||
pub(super) struct ScopedDefinitionId;
|
||||
|
||||
/// A newtype-index for a constraint expression in a particular scope.
|
||||
#[newtype_index]
|
||||
pub(super) struct ScopedConstraintId;
|
||||
|
||||
/// Can reference this * 64 total definitions inline; more will fall back to the heap.
|
||||
const INLINE_DEFINITION_BLOCKS: usize = 3;
|
||||
|
||||
/// A [`BitSet`] of [`ScopedDefinitionId`], representing visible definitions of a symbol in a scope.
|
||||
type Definitions = BitSet<INLINE_DEFINITION_BLOCKS>;
|
||||
type DefinitionsIterator<'a> = BitSetIterator<'a, INLINE_DEFINITION_BLOCKS>;
|
||||
|
||||
/// Can reference this * 64 total constraints inline; more will fall back to the heap.
|
||||
const INLINE_CONSTRAINT_BLOCKS: usize = 2;
|
||||
|
||||
/// Can keep inline this many visible definitions per symbol at a given time; more will go to heap.
|
||||
const INLINE_VISIBLE_DEFINITIONS_PER_SYMBOL: usize = 4;
|
||||
|
||||
/// One [`BitSet`] of applicable [`ScopedConstraintId`] per visible definition.
|
||||
type InlineConstraintArray =
|
||||
[BitSet<INLINE_CONSTRAINT_BLOCKS>; INLINE_VISIBLE_DEFINITIONS_PER_SYMBOL];
|
||||
type Constraints = SmallVec<InlineConstraintArray>;
|
||||
type ConstraintsIterator<'a> = std::slice::Iter<'a, BitSet<INLINE_CONSTRAINT_BLOCKS>>;
|
||||
type ConstraintsIntoIterator = smallvec::IntoIter<InlineConstraintArray>;
|
||||
|
||||
/// Visible definitions and narrowing constraints for a single symbol at some point in control flow.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(super) struct SymbolState {
|
||||
/// [`BitSet`]: which [`ScopedDefinitionId`] are visible for this symbol?
|
||||
visible_definitions: Definitions,
|
||||
|
||||
/// For each definition, which [`ScopedConstraintId`] apply?
|
||||
///
|
||||
/// This is a [`smallvec::SmallVec`] which should always have one [`BitSet`] of constraints per
|
||||
/// definition in `visible_definitions`.
|
||||
constraints: Constraints,
|
||||
|
||||
/// Could the symbol be unbound at this point?
|
||||
may_be_unbound: bool,
|
||||
}
|
||||
|
||||
/// A single [`ScopedDefinitionId`] with an iterator of its applicable [`ScopedConstraintId`].
|
||||
#[derive(Debug)]
|
||||
pub(super) struct DefinitionIdWithConstraints<'a> {
|
||||
pub(super) definition: ScopedDefinitionId,
|
||||
pub(super) constraint_ids: ConstraintIdIterator<'a>,
|
||||
}
|
||||
|
||||
impl SymbolState {
|
||||
/// Return a new [`SymbolState`] representing an unbound symbol.
|
||||
pub(super) fn unbound() -> Self {
|
||||
Self {
|
||||
visible_definitions: Definitions::default(),
|
||||
constraints: Constraints::default(),
|
||||
may_be_unbound: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a new [`SymbolState`] representing a symbol with a single visible definition.
|
||||
pub(super) fn with(definition_id: ScopedDefinitionId) -> Self {
|
||||
let mut constraints = Constraints::with_capacity(1);
|
||||
constraints.push(BitSet::default());
|
||||
Self {
|
||||
visible_definitions: Definitions::with(definition_id.into()),
|
||||
constraints,
|
||||
may_be_unbound: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Add Unbound as a possibility for this symbol.
|
||||
pub(super) fn add_unbound(&mut self) {
|
||||
self.may_be_unbound = true;
|
||||
}
|
||||
|
||||
/// Add given constraint to all currently-visible definitions.
|
||||
pub(super) fn add_constraint(&mut self, constraint_id: ScopedConstraintId) {
|
||||
for bitset in &mut self.constraints {
|
||||
bitset.insert(constraint_id.into());
|
||||
}
|
||||
}
|
||||
|
||||
/// Merge another [`SymbolState`] into this one.
|
||||
pub(super) fn merge(&mut self, b: SymbolState) {
|
||||
let mut a = Self {
|
||||
visible_definitions: Definitions::default(),
|
||||
constraints: Constraints::default(),
|
||||
may_be_unbound: self.may_be_unbound || b.may_be_unbound,
|
||||
};
|
||||
std::mem::swap(&mut a, self);
|
||||
let mut a_defs_iter = a.visible_definitions.iter();
|
||||
let mut b_defs_iter = b.visible_definitions.iter();
|
||||
let mut a_constraints_iter = a.constraints.into_iter();
|
||||
let mut b_constraints_iter = b.constraints.into_iter();
|
||||
|
||||
let mut opt_a_def: Option<u32> = a_defs_iter.next();
|
||||
let mut opt_b_def: Option<u32> = b_defs_iter.next();
|
||||
|
||||
// Iterate through the definitions from `a` and `b`, always processing the lower definition
|
||||
// ID first, and pushing each definition onto the merged `SymbolState` with its
|
||||
// constraints. If a definition is found in both `a` and `b`, push it with the intersection
|
||||
// of the constraints from the two paths; a constraint that applies from only one possible
|
||||
// path is irrelevant.
|
||||
|
||||
// Helper to push `def`, with constraints in `constraints_iter`, onto `self`.
|
||||
let push = |def, constraints_iter: &mut ConstraintsIntoIterator, merged: &mut Self| {
|
||||
merged.visible_definitions.insert(def);
|
||||
// SAFETY: we only ever create SymbolState with either no definitions and no constraint
|
||||
// bitsets (`::unbound`) or one definition and one constraint bitset (`::with`), and
|
||||
// `::merge` always pushes one definition and one constraint bitset together (just
|
||||
// below), so the number of definitions and the number of constraint bitsets can never
|
||||
// get out of sync.
|
||||
let constraints = constraints_iter
|
||||
.next()
|
||||
.expect("definitions and constraints length mismatch");
|
||||
merged.constraints.push(constraints);
|
||||
};
|
||||
|
||||
loop {
|
||||
match (opt_a_def, opt_b_def) {
|
||||
(Some(a_def), Some(b_def)) => match a_def.cmp(&b_def) {
|
||||
std::cmp::Ordering::Less => {
|
||||
// Next definition ID is only in `a`, push it to `self` and advance `a`.
|
||||
push(a_def, &mut a_constraints_iter, self);
|
||||
opt_a_def = a_defs_iter.next();
|
||||
}
|
||||
std::cmp::Ordering::Greater => {
|
||||
// Next definition ID is only in `b`, push it to `self` and advance `b`.
|
||||
push(b_def, &mut b_constraints_iter, self);
|
||||
opt_b_def = b_defs_iter.next();
|
||||
}
|
||||
std::cmp::Ordering::Equal => {
|
||||
// Next definition is in both; push to `self` and intersect constraints.
|
||||
push(a_def, &mut b_constraints_iter, self);
|
||||
// SAFETY: we only ever create SymbolState with either no definitions and
|
||||
// no constraint bitsets (`::unbound`) or one definition and one constraint
|
||||
// bitset (`::with`), and `::merge` always pushes one definition and one
|
||||
// constraint bitset together (just below), so the number of definitions
|
||||
// and the number of constraint bitsets can never get out of sync.
|
||||
let a_constraints = a_constraints_iter
|
||||
.next()
|
||||
.expect("definitions and constraints length mismatch");
|
||||
// If the same definition is visible through both paths, any constraint
|
||||
// that applies on only one path is irrelevant to the resulting type from
|
||||
// unioning the two paths, so we intersect the constraints.
|
||||
self.constraints
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
.intersect(&a_constraints);
|
||||
opt_a_def = a_defs_iter.next();
|
||||
opt_b_def = b_defs_iter.next();
|
||||
}
|
||||
},
|
||||
(Some(a_def), None) => {
|
||||
// We've exhausted `b`, just push the def from `a` and move on to the next.
|
||||
push(a_def, &mut a_constraints_iter, self);
|
||||
opt_a_def = a_defs_iter.next();
|
||||
}
|
||||
(None, Some(b_def)) => {
|
||||
// We've exhausted `a`, just push the def from `b` and move on to the next.
|
||||
push(b_def, &mut b_constraints_iter, self);
|
||||
opt_b_def = b_defs_iter.next();
|
||||
}
|
||||
(None, None) => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get iterator over visible definitions with constraints.
|
||||
pub(super) fn visible_definitions(&self) -> DefinitionIdWithConstraintsIterator {
|
||||
DefinitionIdWithConstraintsIterator {
|
||||
definitions: self.visible_definitions.iter(),
|
||||
constraints: self.constraints.iter(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Could the symbol be unbound?
|
||||
pub(super) fn may_be_unbound(&self) -> bool {
|
||||
self.may_be_unbound
|
||||
}
|
||||
}
|
||||
|
||||
/// The default state of a symbol (if we've seen no definitions of it) is unbound.
|
||||
impl Default for SymbolState {
|
||||
fn default() -> Self {
|
||||
SymbolState::unbound()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct DefinitionIdWithConstraintsIterator<'a> {
|
||||
definitions: DefinitionsIterator<'a>,
|
||||
constraints: ConstraintsIterator<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for DefinitionIdWithConstraintsIterator<'a> {
|
||||
type Item = DefinitionIdWithConstraints<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match (self.definitions.next(), self.constraints.next()) {
|
||||
(None, None) => None,
|
||||
(Some(def), Some(constraints)) => Some(DefinitionIdWithConstraints {
|
||||
definition: ScopedDefinitionId::from_u32(def),
|
||||
constraint_ids: ConstraintIdIterator {
|
||||
wrapped: constraints.iter(),
|
||||
},
|
||||
}),
|
||||
// SAFETY: see above.
|
||||
_ => unreachable!("definitions and constraints length mismatch"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for DefinitionIdWithConstraintsIterator<'_> {}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct ConstraintIdIterator<'a> {
|
||||
wrapped: BitSetIterator<'a, INLINE_CONSTRAINT_BLOCKS>,
|
||||
}
|
||||
|
||||
impl Iterator for ConstraintIdIterator<'_> {
|
||||
type Item = ScopedConstraintId;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.wrapped.next().map(ScopedConstraintId::from_u32)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::iter::FusedIterator for ConstraintIdIterator<'_> {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{ScopedConstraintId, ScopedDefinitionId, SymbolState};
|
||||
|
||||
impl SymbolState {
|
||||
pub(crate) fn assert(&self, may_be_unbound: bool, expected: &[&str]) {
|
||||
assert_eq!(self.may_be_unbound(), may_be_unbound);
|
||||
let actual = self
|
||||
.visible_definitions()
|
||||
.map(|def_id_with_constraints| {
|
||||
format!(
|
||||
"{}<{}>",
|
||||
def_id_with_constraints.definition.as_u32(),
|
||||
def_id_with_constraints
|
||||
.constraint_ids
|
||||
.map(ScopedConstraintId::as_u32)
|
||||
.map(|idx| idx.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unbound() {
|
||||
let cd = SymbolState::unbound();
|
||||
|
||||
cd.assert(true, &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn with() {
|
||||
let cd = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
|
||||
cd.assert(false, &["0<>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_unbound() {
|
||||
let mut cd = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd.add_unbound();
|
||||
|
||||
cd.assert(true, &["0<>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_constraint() {
|
||||
let mut cd = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd.add_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
cd.assert(false, &["0<0>"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn merge() {
|
||||
// merging the same definition with the same constraint keeps the constraint
|
||||
let mut cd0a = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd0a.add_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
let mut cd0b = SymbolState::with(ScopedDefinitionId::from_u32(0));
|
||||
cd0b.add_constraint(ScopedConstraintId::from_u32(0));
|
||||
|
||||
cd0a.merge(cd0b);
|
||||
let mut cd0 = cd0a;
|
||||
cd0.assert(false, &["0<0>"]);
|
||||
|
||||
// merging the same definition with differing constraints drops all constraints
|
||||
let mut cd1a = SymbolState::with(ScopedDefinitionId::from_u32(1));
|
||||
cd1a.add_constraint(ScopedConstraintId::from_u32(1));
|
||||
|
||||
let mut cd1b = SymbolState::with(ScopedDefinitionId::from_u32(1));
|
||||
cd1b.add_constraint(ScopedConstraintId::from_u32(2));
|
||||
|
||||
cd1a.merge(cd1b);
|
||||
let cd1 = cd1a;
|
||||
cd1.assert(false, &["1<>"]);
|
||||
|
||||
// merging a constrained definition with unbound keeps both
|
||||
let mut cd2a = SymbolState::with(ScopedDefinitionId::from_u32(2));
|
||||
cd2a.add_constraint(ScopedConstraintId::from_u32(3));
|
||||
|
||||
let cd2b = SymbolState::unbound();
|
||||
|
||||
cd2a.merge(cd2b);
|
||||
let cd2 = cd2a;
|
||||
cd2.assert(true, &["2<3>"]);
|
||||
|
||||
// merging different definitions keeps them each with their existing constraints
|
||||
cd0.merge(cd2);
|
||||
let cd = cd0;
|
||||
cd.assert(true, &["0<0>", "2<3>"]);
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::files::{File, FilePath};
|
||||
use ruff_db::source::line_index;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::{Expr, ExpressionRef, StmtClassDef};
|
||||
use ruff_python_ast::{Expr, ExpressionRef};
|
||||
use ruff_source_file::LineIndex;
|
||||
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::{resolve_module, Module};
|
||||
@@ -25,6 +27,14 @@ impl<'db> SemanticModel<'db> {
|
||||
self.db
|
||||
}
|
||||
|
||||
pub fn file_path(&self) -> &FilePath {
|
||||
self.file.path(self.db)
|
||||
}
|
||||
|
||||
pub fn line_index(&self) -> LineIndex {
|
||||
line_index(self.db.upcast(), self.file)
|
||||
}
|
||||
|
||||
pub fn resolve_module(&self, module_name: ModuleName) -> Option<Module> {
|
||||
resolve_module(self.db, module_name)
|
||||
}
|
||||
@@ -137,62 +147,61 @@ impl HasTy for ast::Expr {
|
||||
}
|
||||
}
|
||||
|
||||
impl HasTy for ast::StmtFunctionDef {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
}
|
||||
macro_rules! impl_definition_has_ty {
|
||||
($ty: ty) => {
|
||||
impl HasTy for $ty {
|
||||
#[inline]
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl HasTy for StmtClassDef {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasTy for ast::Alias {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
}
|
||||
}
|
||||
impl_definition_has_ty!(ast::StmtFunctionDef);
|
||||
impl_definition_has_ty!(ast::StmtClassDef);
|
||||
impl_definition_has_ty!(ast::Alias);
|
||||
impl_definition_has_ty!(ast::Parameter);
|
||||
impl_definition_has_ty!(ast::ParameterWithDefault);
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::program::{Program, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::types::Type;
|
||||
use crate::{HasTy, SemanticModel};
|
||||
use crate::{HasTy, ProgramSettings, SemanticModel};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
Program::new(
|
||||
fn setup_db<'a>(files: impl IntoIterator<Item = (&'a str, &'a str)>) -> anyhow::Result<TestDb> {
|
||||
let mut db = TestDb::new();
|
||||
db.write_files(files)?;
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
TargetVersion::Py38,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
},
|
||||
},
|
||||
);
|
||||
)?;
|
||||
|
||||
db
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_ty() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
let db = setup_db([("/src/foo.py", "def test(): pass")])?;
|
||||
|
||||
db.write_file("/src/foo.py", "def test(): pass")?;
|
||||
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, foo);
|
||||
@@ -208,9 +217,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn class_ty() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
let db = setup_db([("/src/foo.py", "class Test: pass")])?;
|
||||
|
||||
db.write_file("/src/foo.py", "class Test: pass")?;
|
||||
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, foo);
|
||||
@@ -226,12 +234,11 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn alias_ty() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
let db = setup_db([
|
||||
("/src/foo.py", "class Test: pass"),
|
||||
("/src/bar.py", "from foo import Test"),
|
||||
])?;
|
||||
|
||||
let bar = system_path_to_file(&db, "/src/bar.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, bar);
|
||||
|
||||
@@ -4,13 +4,38 @@ use ruff_python_ast::name::Name;
|
||||
use crate::builtins::builtins_scope;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId};
|
||||
use crate::semantic_index::{global_scope, symbol_table, use_def_map};
|
||||
use crate::semantic_index::{
|
||||
global_scope, semantic_index, symbol_table, use_def_map, DefinitionWithConstraints,
|
||||
DefinitionWithConstraintsIterator,
|
||||
};
|
||||
use crate::types::narrow::narrowing_constraint;
|
||||
use crate::{Db, FxOrderSet};
|
||||
|
||||
pub(crate) use self::builder::{IntersectionBuilder, UnionBuilder};
|
||||
pub(crate) use self::diagnostic::TypeCheckDiagnostics;
|
||||
pub(crate) use self::infer::{
|
||||
infer_definition_types, infer_expression_types, infer_scope_types, TypeInference,
|
||||
};
|
||||
|
||||
mod builder;
|
||||
mod diagnostic;
|
||||
mod display;
|
||||
mod infer;
|
||||
mod narrow;
|
||||
|
||||
pub(crate) use self::infer::{infer_definition_types, infer_scope_types};
|
||||
pub fn check_types(db: &dyn Db, file: File) -> TypeCheckDiagnostics {
|
||||
let _span = tracing::trace_span!("check_types", file=?file.path(db)).entered();
|
||||
|
||||
let index = semantic_index(db, file);
|
||||
let mut diagnostics = TypeCheckDiagnostics::new();
|
||||
|
||||
for scope_id in index.scope_ids() {
|
||||
let result = infer_scope_types(db, scope_id);
|
||||
diagnostics.extend(result.diagnostics());
|
||||
}
|
||||
|
||||
diagnostics
|
||||
}
|
||||
|
||||
/// Infer the public type of a symbol (its type as seen from outside its scope).
|
||||
pub(crate) fn symbol_ty<'db>(
|
||||
@@ -80,10 +105,31 @@ pub(crate) fn definition_ty<'db>(db: &'db dyn Db, definition: Definition<'db>) -
|
||||
/// provide an `unbound_ty`.
|
||||
pub(crate) fn definitions_ty<'db>(
|
||||
db: &'db dyn Db,
|
||||
definitions: &[Definition<'db>],
|
||||
definitions_with_constraints: DefinitionWithConstraintsIterator<'_, 'db>,
|
||||
unbound_ty: Option<Type<'db>>,
|
||||
) -> Type<'db> {
|
||||
let def_types = definitions.iter().map(|def| definition_ty(db, *def));
|
||||
let def_types = definitions_with_constraints.map(
|
||||
|DefinitionWithConstraints {
|
||||
definition,
|
||||
constraints,
|
||||
}| {
|
||||
let mut constraint_tys =
|
||||
constraints.filter_map(|test| narrowing_constraint(db, test, definition));
|
||||
let definition_ty = definition_ty(db, definition);
|
||||
if let Some(first_constraint_ty) = constraint_tys.next() {
|
||||
let mut builder = IntersectionBuilder::new(db);
|
||||
builder = builder
|
||||
.add_positive(definition_ty)
|
||||
.add_positive(first_constraint_ty);
|
||||
for constraint_ty in constraint_tys {
|
||||
builder = builder.add_positive(constraint_ty);
|
||||
}
|
||||
builder.build()
|
||||
} else {
|
||||
definition_ty
|
||||
}
|
||||
},
|
||||
);
|
||||
let mut all_types = unbound_ty.into_iter().chain(def_types);
|
||||
|
||||
let Some(first) = all_types.next() else {
|
||||
@@ -91,14 +137,14 @@ pub(crate) fn definitions_ty<'db>(
|
||||
};
|
||||
|
||||
if let Some(second) = all_types.next() {
|
||||
let mut builder = UnionTypeBuilder::new(db);
|
||||
let mut builder = UnionBuilder::new(db);
|
||||
builder = builder.add(first).add(second);
|
||||
|
||||
for variant in all_types {
|
||||
builder = builder.add(variant);
|
||||
}
|
||||
|
||||
Type::Union(builder.build())
|
||||
builder.build()
|
||||
} else {
|
||||
first
|
||||
}
|
||||
@@ -111,13 +157,13 @@ pub enum Type<'db> {
|
||||
Any,
|
||||
/// the empty set of values
|
||||
Never,
|
||||
/// unknown type (no annotation)
|
||||
/// unknown type (either no annotation, or some kind of type error)
|
||||
/// equivalent to Any, or possibly to object in strict mode
|
||||
Unknown,
|
||||
/// name does not exist or is not bound to any value (this represents an error, but with some
|
||||
/// leniency options it could be silently resolved to Unknown in some cases)
|
||||
Unbound,
|
||||
/// the None object (TODO remove this in favor of Instance(types.NoneType)
|
||||
/// the None object -- TODO remove this in favor of Instance(types.NoneType)
|
||||
None,
|
||||
/// a specific function object
|
||||
Function(FunctionType<'db>),
|
||||
@@ -127,8 +173,11 @@ pub enum Type<'db> {
|
||||
Class(ClassType<'db>),
|
||||
/// the set of Python objects with the given class in their __class__'s method resolution order
|
||||
Instance(ClassType<'db>),
|
||||
/// the set of objects in any of the types in the union
|
||||
Union(UnionType<'db>),
|
||||
/// the set of objects in all of the types in the intersection
|
||||
Intersection(IntersectionType<'db>),
|
||||
/// An integer literal
|
||||
IntLiteral(i64),
|
||||
/// A boolean literal, either `True` or `False`.
|
||||
BooleanLiteral(bool),
|
||||
@@ -144,34 +193,83 @@ impl<'db> Type<'db> {
|
||||
matches!(self, Type::Unknown)
|
||||
}
|
||||
|
||||
pub const fn is_never(&self) -> bool {
|
||||
matches!(self, Type::Never)
|
||||
}
|
||||
|
||||
pub fn may_be_unbound(&self, db: &'db dyn Db) -> bool {
|
||||
match self {
|
||||
Type::Unbound => true,
|
||||
Type::Union(union) => union.contains(db, Type::Unbound),
|
||||
// Unbound can't appear in an intersection, because an intersection with Unbound
|
||||
// simplifies to just Unbound.
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn replace_unbound_with(&self, db: &'db dyn Db, replacement: Type<'db>) -> Type<'db> {
|
||||
match self {
|
||||
Type::Unbound => replacement,
|
||||
Type::Union(union) => union
|
||||
.elements(db)
|
||||
.into_iter()
|
||||
.fold(UnionBuilder::new(db), |builder, ty| {
|
||||
builder.add(ty.replace_unbound_with(db, replacement))
|
||||
})
|
||||
.build(),
|
||||
ty => *ty,
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve a member access of a type.
|
||||
///
|
||||
/// For example, if `foo` is `Type::Instance(<Bar>)`,
|
||||
/// `foo.member(&db, "baz")` returns the type of `baz` attributes
|
||||
/// as accessed from instances of the `Bar` class.
|
||||
///
|
||||
/// TODO: use of this method currently requires manually checking
|
||||
/// whether the returned type is `Unknown`/`Unbound`
|
||||
/// (or a union with `Unknown`/`Unbound`) in many places.
|
||||
/// Ideally we'd use a more type-safe pattern, such as returning
|
||||
/// an `Option` or a `Result` from this method, which would force
|
||||
/// us to explicitly consider whether to handle an error or propagate
|
||||
/// it up the call stack.
|
||||
#[must_use]
|
||||
pub fn member(&self, db: &'db dyn Db, name: &Name) -> Type<'db> {
|
||||
match self {
|
||||
Type::Any => Type::Any,
|
||||
Type::Never => todo!("attribute lookup on Never type"),
|
||||
Type::Never => {
|
||||
// TODO: attribute lookup on Never type
|
||||
Type::Unknown
|
||||
}
|
||||
Type::Unknown => Type::Unknown,
|
||||
Type::Unbound => Type::Unbound,
|
||||
Type::None => todo!("attribute lookup on None type"),
|
||||
Type::Function(_) => todo!("attribute lookup on Function type"),
|
||||
Type::None => {
|
||||
// TODO: attribute lookup on None type
|
||||
Type::Unknown
|
||||
}
|
||||
Type::Function(_) => {
|
||||
// TODO: attribute lookup on function type
|
||||
Type::Unknown
|
||||
}
|
||||
Type::Module(file) => global_symbol_ty_by_name(db, *file, name),
|
||||
Type::Class(class) => class.class_member(db, name),
|
||||
Type::Instance(_) => {
|
||||
// TODO MRO? get_own_instance_member, get_instance_member
|
||||
todo!("attribute lookup on Instance type")
|
||||
Type::Unknown
|
||||
}
|
||||
Type::Union(union) => Type::Union(
|
||||
union
|
||||
.elements(db)
|
||||
.iter()
|
||||
.fold(UnionTypeBuilder::new(db), |builder, element_ty| {
|
||||
builder.add(element_ty.member(db, name))
|
||||
})
|
||||
.build(),
|
||||
),
|
||||
Type::Union(union) => union
|
||||
.elements(db)
|
||||
.iter()
|
||||
.fold(UnionBuilder::new(db), |builder, element_ty| {
|
||||
builder.add(element_ty.member(db, name))
|
||||
})
|
||||
.build(),
|
||||
Type::Intersection(_) => {
|
||||
// TODO perform the get_member on each type in the intersection
|
||||
// TODO return the intersection of those results
|
||||
todo!("attribute lookup on Intersection type")
|
||||
Type::Unknown
|
||||
}
|
||||
Type::IntLiteral(_) => {
|
||||
// TODO raise error
|
||||
@@ -251,7 +349,7 @@ impl<'db> ClassType<'db> {
|
||||
|
||||
#[salsa::interned]
|
||||
pub struct UnionType<'db> {
|
||||
/// the union type includes values in any of these types
|
||||
/// The union type includes values in any of these types.
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
@@ -261,48 +359,127 @@ impl<'db> UnionType<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
struct UnionTypeBuilder<'db> {
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> UnionTypeBuilder<'db> {
|
||||
fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
elements: FxOrderSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a type to this union.
|
||||
fn add(mut self, ty: Type<'db>) -> Self {
|
||||
match ty {
|
||||
Type::Union(union) => {
|
||||
self.elements.extend(&union.elements(self.db));
|
||||
}
|
||||
_ => {
|
||||
self.elements.insert(ty);
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
fn build(self) -> UnionType<'db> {
|
||||
UnionType::new(self.db, self.elements)
|
||||
}
|
||||
}
|
||||
|
||||
// Negation types aren't expressible in annotations, and are most likely to arise from type
|
||||
// narrowing along with intersections (e.g. `if not isinstance(...)`), so we represent them
|
||||
// directly in intersections rather than as a separate type. This sacrifices some efficiency in the
|
||||
// case where a Not appears outside an intersection (unclear when that could even happen, but we'd
|
||||
// have to represent it as a single-element intersection if it did) in exchange for better
|
||||
// efficiency in the within-intersection case.
|
||||
#[salsa::interned]
|
||||
pub struct IntersectionType<'db> {
|
||||
// the intersection type includes only values in all of these types
|
||||
/// The intersection type includes only values in all of these types.
|
||||
positive: FxOrderSet<Type<'db>>,
|
||||
// the intersection type does not include any value in any of these types
|
||||
|
||||
/// The intersection type does not include any value in any of these types.
|
||||
///
|
||||
/// Negation types aren't expressible in annotations, and are most likely to arise from type
|
||||
/// narrowing along with intersections (e.g. `if not isinstance(...)`), so we represent them
|
||||
/// directly in intersections rather than as a separate type.
|
||||
negative: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Context;
|
||||
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::{Program, ProgramSettings, PythonVersion, SearchPathSettings};
|
||||
|
||||
use super::TypeCheckDiagnostics;
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
db.memory_file_system()
|
||||
.create_directory_all("/src")
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
},
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
fn assert_diagnostic_messages(diagnostics: &TypeCheckDiagnostics, expected: &[&str]) {
|
||||
let messages: Vec<&str> = diagnostics
|
||||
.iter()
|
||||
.map(|diagnostic| diagnostic.message())
|
||||
.collect();
|
||||
assert_eq!(&messages, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unresolved_import_statement() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_file("src/foo.py", "import bar\n")
|
||||
.context("Failed to write foo.py")?;
|
||||
|
||||
let foo = system_path_to_file(&db, "src/foo.py").context("Failed to resolve foo.py")?;
|
||||
|
||||
let diagnostics = super::check_types(&db, foo);
|
||||
assert_diagnostic_messages(&diagnostics, &["Import 'bar' could not be resolved."]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unresolved_import_from_statement() {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_file("src/foo.py", "from bar import baz\n")
|
||||
.unwrap();
|
||||
let foo = system_path_to_file(&db, "src/foo.py").unwrap();
|
||||
let diagnostics = super::check_types(&db, foo);
|
||||
assert_diagnostic_messages(&diagnostics, &["Import 'bar' could not be resolved."]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unresolved_import_from_resolved_module() {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([("/src/a.py", ""), ("/src/b.py", "from a import thing")])
|
||||
.unwrap();
|
||||
|
||||
let b_file = system_path_to_file(&db, "/src/b.py").unwrap();
|
||||
let b_file_diagnostics = super::check_types(&db, b_file);
|
||||
assert_diagnostic_messages(
|
||||
&b_file_diagnostics,
|
||||
&["Could not resolve import of 'thing' from 'a'"],
|
||||
);
|
||||
}
|
||||
|
||||
#[ignore = "\
|
||||
A spurious second 'Unresolved import' diagnostic message is emitted on `b.py`, \
|
||||
despite the symbol existing in the symbol table for `a.py`"]
|
||||
#[test]
|
||||
fn resolved_import_of_symbol_from_unresolved_import() {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
("/src/a.py", "import foo as foo"),
|
||||
("/src/b.py", "from a import foo"),
|
||||
])
|
||||
.unwrap();
|
||||
|
||||
let a_file = system_path_to_file(&db, "/src/a.py").unwrap();
|
||||
let a_file_diagnostics = super::check_types(&db, a_file);
|
||||
assert_diagnostic_messages(
|
||||
&a_file_diagnostics,
|
||||
&["Import 'foo' could not be resolved."],
|
||||
);
|
||||
|
||||
// Importing the unresolved import into a second first-party file should not trigger
|
||||
// an additional "unresolved import" violation
|
||||
let b_file = system_path_to_file(&db, "/src/b.py").unwrap();
|
||||
let b_file_diagnostics = super::check_types(&db, b_file);
|
||||
assert_eq!(&*b_file_diagnostics, &[]);
|
||||
}
|
||||
}
|
||||
|
||||
471
crates/red_knot_python_semantic/src/types/builder.rs
Normal file
471
crates/red_knot_python_semantic/src/types/builder.rs
Normal file
@@ -0,0 +1,471 @@
|
||||
//! Smart builders for union and intersection types.
|
||||
//!
|
||||
//! Invariants we maintain here:
|
||||
//! * No single-element union types (should just be the contained type instead.)
|
||||
//! * No single-positive-element intersection types. Single-negative-element are OK, we don't
|
||||
//! have a standalone negation type so there's no other representation for this.
|
||||
//! * The same type should never appear more than once in a union or intersection. (This should
|
||||
//! be expanded to cover subtyping -- see below -- but for now we only implement it for type
|
||||
//! identity.)
|
||||
//! * Disjunctive normal form (DNF): the tree of unions and intersections can never be deeper
|
||||
//! than a union-of-intersections. Unions cannot contain other unions (the inner union just
|
||||
//! flattens into the outer one), intersections cannot contain other intersections (also
|
||||
//! flattens), and intersections cannot contain unions (the intersection distributes over the
|
||||
//! union, inverting it into a union-of-intersections).
|
||||
//!
|
||||
//! The implication of these invariants is that a [`UnionBuilder`] does not necessarily build a
|
||||
//! [`Type::Union`]. For example, if only one type is added to the [`UnionBuilder`], `build()` will
|
||||
//! just return that type directly. The same is true for [`IntersectionBuilder`]; for example, if a
|
||||
//! union type is added to the intersection, it will distribute and [`IntersectionBuilder::build`]
|
||||
//! may end up returning a [`Type::Union`] of intersections.
|
||||
//!
|
||||
//! In the future we should have these additional invariants, but they aren't implemented yet:
|
||||
//! * No type in a union can be a subtype of any other type in the union (just eliminate the
|
||||
//! subtype from the union).
|
||||
//! * No type in an intersection can be a supertype of any other type in the intersection (just
|
||||
//! eliminate the supertype from the intersection).
|
||||
//! * An intersection containing two non-overlapping types should simplify to [`Type::Never`].
|
||||
use crate::types::{IntersectionType, Type, UnionType};
|
||||
use crate::{Db, FxOrderSet};
|
||||
|
||||
pub(crate) struct UnionBuilder<'db> {
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> UnionBuilder<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
elements: FxOrderSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a type to this union.
|
||||
pub(crate) fn add(mut self, ty: Type<'db>) -> Self {
|
||||
match ty {
|
||||
Type::Union(union) => {
|
||||
self.elements.extend(&union.elements(self.db));
|
||||
}
|
||||
Type::Never => {}
|
||||
_ => {
|
||||
self.elements.insert(ty);
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn build(self) -> Type<'db> {
|
||||
match self.elements.len() {
|
||||
0 => Type::Never,
|
||||
1 => self.elements[0],
|
||||
_ => Type::Union(UnionType::new(self.db, self.elements)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct IntersectionBuilder<'db> {
|
||||
// Really this builds a union-of-intersections, because we always keep our set-theoretic types
|
||||
// in disjunctive normal form (DNF), a union of intersections. In the simplest case there's
|
||||
// just a single intersection in this vector, and we are building a single intersection type,
|
||||
// but if a union is added to the intersection, we'll distribute ourselves over that union and
|
||||
// create a union of intersections.
|
||||
intersections: Vec<InnerIntersectionBuilder<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> IntersectionBuilder<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
intersections: vec![InnerIntersectionBuilder::new()],
|
||||
}
|
||||
}
|
||||
|
||||
fn empty(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
intersections: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn add_positive(mut self, ty: Type<'db>) -> Self {
|
||||
if let Type::Union(union) = ty {
|
||||
// Distribute ourself over this union: for each union element, clone ourself and
|
||||
// intersect with that union element, then create a new union-of-intersections with all
|
||||
// of those sub-intersections in it. E.g. if `self` is a simple intersection `T1 & T2`
|
||||
// and we add `T3 | T4` to the intersection, we don't get `T1 & T2 & (T3 | T4)` (that's
|
||||
// not in DNF), we distribute the union and get `(T1 & T3) | (T2 & T3) | (T1 & T4) |
|
||||
// (T2 & T4)`. If `self` is already a union-of-intersections `(T1 & T2) | (T3 & T4)`
|
||||
// and we add `T5 | T6` to it, that flattens all the way out to `(T1 & T2 & T5) | (T1 &
|
||||
// T2 & T6) | (T3 & T4 & T5) ...` -- you get the idea.
|
||||
union
|
||||
.elements(self.db)
|
||||
.iter()
|
||||
.map(|elem| self.clone().add_positive(*elem))
|
||||
.fold(IntersectionBuilder::empty(self.db), |mut builder, sub| {
|
||||
builder.intersections.extend(sub.intersections);
|
||||
builder
|
||||
})
|
||||
} else {
|
||||
// If we are already a union-of-intersections, distribute the new intersected element
|
||||
// across all of those intersections.
|
||||
for inner in &mut self.intersections {
|
||||
inner.add_positive(self.db, ty);
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn add_negative(mut self, ty: Type<'db>) -> Self {
|
||||
// See comments above in `add_positive`; this is just the negated version.
|
||||
if let Type::Union(union) = ty {
|
||||
union
|
||||
.elements(self.db)
|
||||
.iter()
|
||||
.map(|elem| self.clone().add_negative(*elem))
|
||||
.fold(IntersectionBuilder::empty(self.db), |mut builder, sub| {
|
||||
builder.intersections.extend(sub.intersections);
|
||||
builder
|
||||
})
|
||||
} else {
|
||||
for inner in &mut self.intersections {
|
||||
inner.add_negative(self.db, ty);
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn build(mut self) -> Type<'db> {
|
||||
// Avoid allocating the UnionBuilder unnecessarily if we have just one intersection:
|
||||
if self.intersections.len() == 1 {
|
||||
self.intersections.pop().unwrap().build(self.db)
|
||||
} else {
|
||||
let mut builder = UnionBuilder::new(self.db);
|
||||
for inner in self.intersections {
|
||||
builder = builder.add(inner.build(self.db));
|
||||
}
|
||||
builder.build()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
struct InnerIntersectionBuilder<'db> {
|
||||
positive: FxOrderSet<Type<'db>>,
|
||||
negative: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> InnerIntersectionBuilder<'db> {
|
||||
fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Adds a positive type to this intersection.
|
||||
fn add_positive(&mut self, db: &'db dyn Db, ty: Type<'db>) {
|
||||
match ty {
|
||||
Type::Intersection(inter) => {
|
||||
let pos = inter.positive(db);
|
||||
let neg = inter.negative(db);
|
||||
self.positive.extend(pos.difference(&self.negative));
|
||||
self.negative.extend(neg.difference(&self.positive));
|
||||
self.positive.retain(|elem| !neg.contains(elem));
|
||||
self.negative.retain(|elem| !pos.contains(elem));
|
||||
}
|
||||
_ => {
|
||||
if !self.negative.remove(&ty) {
|
||||
self.positive.insert(ty);
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a negative type to this intersection.
|
||||
fn add_negative(&mut self, db: &'db dyn Db, ty: Type<'db>) {
|
||||
// TODO Any/Unknown actually should not self-cancel
|
||||
match ty {
|
||||
Type::Intersection(intersection) => {
|
||||
let pos = intersection.negative(db);
|
||||
let neg = intersection.positive(db);
|
||||
self.positive.extend(pos.difference(&self.negative));
|
||||
self.negative.extend(neg.difference(&self.positive));
|
||||
self.positive.retain(|elem| !neg.contains(elem));
|
||||
self.negative.retain(|elem| !pos.contains(elem));
|
||||
}
|
||||
Type::Never => {}
|
||||
Type::Unbound => {}
|
||||
_ => {
|
||||
if !self.positive.remove(&ty) {
|
||||
self.negative.insert(ty);
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn simplify(&mut self) {
|
||||
// TODO this should be generalized based on subtyping, for now we just handle a few cases
|
||||
|
||||
// Never is a subtype of all types
|
||||
if self.positive.contains(&Type::Never) {
|
||||
self.positive.retain(Type::is_never);
|
||||
self.negative.clear();
|
||||
}
|
||||
|
||||
if self.positive.contains(&Type::Unbound) {
|
||||
self.positive.retain(Type::is_unbound);
|
||||
self.negative.clear();
|
||||
}
|
||||
|
||||
// None intersects only with object
|
||||
for pos in &self.positive {
|
||||
if let Type::Instance(_) = pos {
|
||||
// could be `object` type
|
||||
} else {
|
||||
self.negative.remove(&Type::None);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn build(mut self, db: &'db dyn Db) -> Type<'db> {
|
||||
self.simplify();
|
||||
match (self.positive.len(), self.negative.len()) {
|
||||
(0, 0) => Type::Never,
|
||||
(1, 0) => self.positive[0],
|
||||
_ => {
|
||||
self.positive.shrink_to_fit();
|
||||
self.negative.shrink_to_fit();
|
||||
Type::Intersection(IntersectionType::new(db, self.positive, self.negative))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{IntersectionBuilder, IntersectionType, Type, UnionBuilder, UnionType};
|
||||
use crate::db::tests::TestDb;
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
TestDb::new()
|
||||
}
|
||||
|
||||
impl<'db> UnionType<'db> {
|
||||
fn elements_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.elements(db).into_iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let Type::Union(union) = UnionBuilder::new(&db).add(t0).add(t1).build() else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_single() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ty = UnionBuilder::new(&db).add(t0).build();
|
||||
|
||||
assert_eq!(ty, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_empty() {
|
||||
let db = setup_db();
|
||||
let ty = UnionBuilder::new(&db).build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_never() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ty = UnionBuilder::new(&db).add(t0).add(Type::Never).build();
|
||||
|
||||
assert_eq!(ty, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_flatten() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let u1 = UnionBuilder::new(&db).add(t0).add(t1).build();
|
||||
let Type::Union(union) = UnionBuilder::new(&db).add(u1).add(t2).build() else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t1, t2]);
|
||||
}
|
||||
|
||||
impl<'db> IntersectionType<'db> {
|
||||
fn pos_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.positive(db).into_iter().collect()
|
||||
}
|
||||
|
||||
fn neg_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.negative(db).into_iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ta = Type::Any;
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[ta]);
|
||||
assert_eq!(inter.neg_vec(&db), &[t0]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_flatten_positive() {
|
||||
let db = setup_db();
|
||||
let ta = Type::Any;
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let i0 = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_positive(i0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[t2, ta]);
|
||||
assert_eq!(inter.neg_vec(&db), &[t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_flatten_negative() {
|
||||
let db = setup_db();
|
||||
let ta = Type::Any;
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let i0 = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_negative(i0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[t2, t1]);
|
||||
assert_eq!(inter.neg_vec(&db), &[ta]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersection_distributes_over_union() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let ta = Type::Any;
|
||||
let u0 = UnionBuilder::new(&db).add(t0).add(t1).build();
|
||||
|
||||
let Type::Union(union) = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_positive(u0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
let [Type::Intersection(i0), Type::Intersection(i1)] = union.elements_vec(&db)[..] else {
|
||||
panic!("expected a union of two intersections");
|
||||
};
|
||||
assert_eq!(i0.pos_vec(&db), &[ta, t0]);
|
||||
assert_eq!(i1.pos_vec(&db), &[ta, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_self_negation() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_negative(Type::None)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_never() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_negative(Type::Never)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_positive_never() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_positive(Type::Never)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_positive_unbound() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::Unbound)
|
||||
.add_positive(Type::IntLiteral(1))
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Unbound);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_unbound() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::Unbound)
|
||||
.add_positive(Type::IntLiteral(1))
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::IntLiteral(1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_none() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_negative(Type::None)
|
||||
.add_positive(Type::IntLiteral(1))
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::IntLiteral(1));
|
||||
}
|
||||
}
|
||||
111
crates/red_knot_python_semantic/src/types/diagnostic.rs
Normal file
111
crates/red_knot_python_semantic/src/types/diagnostic.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use std::fmt::Formatter;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub struct TypeCheckDiagnostic {
|
||||
// TODO: Don't use string keys for rules
|
||||
pub(super) rule: String,
|
||||
pub(super) message: String,
|
||||
pub(super) range: TextRange,
|
||||
pub(super) file: File,
|
||||
}
|
||||
|
||||
impl TypeCheckDiagnostic {
|
||||
pub fn rule(&self) -> &str {
|
||||
&self.rule
|
||||
}
|
||||
|
||||
pub fn message(&self) -> &str {
|
||||
&self.message
|
||||
}
|
||||
|
||||
pub fn file(&self) -> File {
|
||||
self.file
|
||||
}
|
||||
}
|
||||
|
||||
impl Ranged for TypeCheckDiagnostic {
|
||||
fn range(&self) -> TextRange {
|
||||
self.range
|
||||
}
|
||||
}
|
||||
|
||||
/// A collection of type check diagnostics.
|
||||
///
|
||||
/// The diagnostics are wrapped in an `Arc` because they need to be cloned multiple times
|
||||
/// when going from `infer_expression` to `check_file`. We could consider
|
||||
/// making [`TypeCheckDiagnostic`] a Salsa struct to have them Arena-allocated (once the Tables refactor is done).
|
||||
/// Using Salsa struct does have the downside that it leaks the Salsa dependency into diagnostics and
|
||||
/// each Salsa-struct comes with an overhead.
|
||||
#[derive(Default, Eq, PartialEq)]
|
||||
pub struct TypeCheckDiagnostics {
|
||||
inner: Vec<std::sync::Arc<TypeCheckDiagnostic>>,
|
||||
}
|
||||
|
||||
impl TypeCheckDiagnostics {
|
||||
pub fn new() -> Self {
|
||||
Self { inner: Vec::new() }
|
||||
}
|
||||
|
||||
pub(super) fn push(&mut self, diagnostic: TypeCheckDiagnostic) {
|
||||
self.inner.push(Arc::new(diagnostic));
|
||||
}
|
||||
|
||||
pub(crate) fn shrink_to_fit(&mut self) {
|
||||
self.inner.shrink_to_fit();
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<TypeCheckDiagnostic> for TypeCheckDiagnostics {
|
||||
fn extend<T: IntoIterator<Item = TypeCheckDiagnostic>>(&mut self, iter: T) {
|
||||
self.inner.extend(iter.into_iter().map(std::sync::Arc::new));
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<std::sync::Arc<TypeCheckDiagnostic>> for TypeCheckDiagnostics {
|
||||
fn extend<T: IntoIterator<Item = Arc<TypeCheckDiagnostic>>>(&mut self, iter: T) {
|
||||
self.inner.extend(iter);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Extend<&'a std::sync::Arc<TypeCheckDiagnostic>> for TypeCheckDiagnostics {
|
||||
fn extend<T: IntoIterator<Item = &'a Arc<TypeCheckDiagnostic>>>(&mut self, iter: T) {
|
||||
self.inner
|
||||
.extend(iter.into_iter().map(std::sync::Arc::clone));
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for TypeCheckDiagnostics {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
self.inner.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for TypeCheckDiagnostics {
|
||||
type Target = [std::sync::Arc<TypeCheckDiagnostic>];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for TypeCheckDiagnostics {
|
||||
type Item = Arc<TypeCheckDiagnostic>;
|
||||
type IntoIter = std::vec::IntoIter<std::sync::Arc<TypeCheckDiagnostic>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.inner.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a TypeCheckDiagnostics {
|
||||
type Item = &'a Arc<TypeCheckDiagnostic>;
|
||||
type IntoIter = std::slice::Iter<'a, std::sync::Arc<TypeCheckDiagnostic>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.inner.iter()
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
115
crates/red_knot_python_semantic/src/types/narrow.rs
Normal file
115
crates/red_knot_python_semantic/src/types/narrow.rs
Normal file
@@ -0,0 +1,115 @@
|
||||
use crate::semantic_index::ast_ids::HasScopedAstId;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId, SymbolTable};
|
||||
use crate::semantic_index::symbol_table;
|
||||
use crate::types::{infer_expression_types, IntersectionBuilder, Type, TypeInference};
|
||||
use crate::Db;
|
||||
use ruff_python_ast as ast;
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Return the type constraint that `test` (if true) would place on `definition`, if any.
|
||||
///
|
||||
/// For example, if we have this code:
|
||||
///
|
||||
/// ```python
|
||||
/// y = 1 if flag else None
|
||||
/// x = 1 if flag else None
|
||||
/// if x is not None:
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
/// The `test` expression `x is not None` places the constraint "not None" on the definition of
|
||||
/// `x`, so in that case we'd return `Some(Type::Intersection(negative=[Type::None]))`.
|
||||
///
|
||||
/// But if we called this with the same `test` expression, but the `definition` of `y`, no
|
||||
/// constraint is applied to that definition, so we'd just return `None`.
|
||||
pub(crate) fn narrowing_constraint<'db>(
|
||||
db: &'db dyn Db,
|
||||
test: Expression<'db>,
|
||||
definition: Definition<'db>,
|
||||
) -> Option<Type<'db>> {
|
||||
all_narrowing_constraints(db, test)
|
||||
.get(&definition.symbol(db))
|
||||
.copied()
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
fn all_narrowing_constraints<'db>(
|
||||
db: &'db dyn Db,
|
||||
test: Expression<'db>,
|
||||
) -> NarrowingConstraints<'db> {
|
||||
NarrowingConstraintsBuilder::new(db, test).finish()
|
||||
}
|
||||
|
||||
type NarrowingConstraints<'db> = FxHashMap<ScopedSymbolId, Type<'db>>;
|
||||
|
||||
struct NarrowingConstraintsBuilder<'db> {
|
||||
db: &'db dyn Db,
|
||||
expression: Expression<'db>,
|
||||
constraints: NarrowingConstraints<'db>,
|
||||
}
|
||||
|
||||
impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
fn new(db: &'db dyn Db, expression: Expression<'db>) -> Self {
|
||||
Self {
|
||||
db,
|
||||
expression,
|
||||
constraints: NarrowingConstraints::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(mut self) -> NarrowingConstraints<'db> {
|
||||
if let ast::Expr::Compare(expr_compare) = self.expression.node_ref(self.db).node() {
|
||||
self.add_expr_compare(expr_compare);
|
||||
}
|
||||
// TODO other test expression kinds
|
||||
|
||||
self.constraints.shrink_to_fit();
|
||||
self.constraints
|
||||
}
|
||||
|
||||
fn symbols(&self) -> Arc<SymbolTable> {
|
||||
symbol_table(self.db, self.scope())
|
||||
}
|
||||
|
||||
fn scope(&self) -> ScopeId<'db> {
|
||||
self.expression.scope(self.db)
|
||||
}
|
||||
|
||||
fn inference(&self) -> &'db TypeInference<'db> {
|
||||
infer_expression_types(self.db, self.expression)
|
||||
}
|
||||
|
||||
fn add_expr_compare(&mut self, expr_compare: &ast::ExprCompare) {
|
||||
let ast::ExprCompare {
|
||||
range: _,
|
||||
left,
|
||||
ops,
|
||||
comparators,
|
||||
} = expr_compare;
|
||||
|
||||
if let ast::Expr::Name(ast::ExprName {
|
||||
range: _,
|
||||
id,
|
||||
ctx: _,
|
||||
}) = left.as_ref()
|
||||
{
|
||||
// SAFETY: we should always have a symbol for every Name node.
|
||||
let symbol = self.symbols().symbol_id_by_name(id).unwrap();
|
||||
let scope = self.scope();
|
||||
let inference = self.inference();
|
||||
for (op, comparator) in std::iter::zip(&**ops, &**comparators) {
|
||||
let comp_ty = inference.expression_ty(comparator.scoped_ast_id(self.db, scope));
|
||||
if matches!(op, ast::CmpOp::IsNot) {
|
||||
let ty = IntersectionBuilder::new(self.db)
|
||||
.add_negative(comp_ty)
|
||||
.build();
|
||||
self.constraints.insert(symbol, ty);
|
||||
};
|
||||
// TODO other comparison types
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1 +1 @@
|
||||
4ef2d66663fc080fefa379e6ae5fc45d4f8b54eb
|
||||
1ace5718deaf3041f8e3d1dc9c9e8a8e830e517f
|
||||
|
||||
@@ -753,9 +753,11 @@ class Constant(expr):
|
||||
__match_args__ = ("value", "kind")
|
||||
value: Any # None, str, bytes, bool, int, float, complex, Ellipsis
|
||||
kind: str | None
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: Any
|
||||
n: int | float | complex
|
||||
if sys.version_info < (3, 14):
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: Any
|
||||
n: int | float | complex
|
||||
|
||||
def __init__(self, value: Any, kind: str | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
class NamedExpr(expr):
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
import sys
|
||||
from abc import abstractmethod
|
||||
from types import MappingProxyType
|
||||
from typing import ( # noqa: Y022,Y038,Y057
|
||||
from typing import ( # noqa: Y022,Y038
|
||||
AbstractSet as Set,
|
||||
AsyncGenerator as AsyncGenerator,
|
||||
AsyncIterable as AsyncIterable,
|
||||
AsyncIterator as AsyncIterator,
|
||||
Awaitable as Awaitable,
|
||||
ByteString as ByteString,
|
||||
Callable as Callable,
|
||||
Collection as Collection,
|
||||
Container as Container,
|
||||
@@ -59,8 +58,12 @@ __all__ = [
|
||||
"ValuesView",
|
||||
"Sequence",
|
||||
"MutableSequence",
|
||||
"ByteString",
|
||||
]
|
||||
if sys.version_info < (3, 14):
|
||||
from typing import ByteString as ByteString # noqa: Y057
|
||||
|
||||
__all__ += ["ByteString"]
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
__all__ += ["Buffer"]
|
||||
|
||||
|
||||
@@ -51,8 +51,8 @@ class _CDataMeta(type):
|
||||
# By default mypy complains about the following two methods, because strictly speaking cls
|
||||
# might not be a Type[_CT]. However this can never actually happen, because the only class that
|
||||
# uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here.
|
||||
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc]
|
||||
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc]
|
||||
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
|
||||
class _CData(metaclass=_CDataMeta):
|
||||
_b_base_: int
|
||||
|
||||
@@ -357,7 +357,17 @@ class Action(_AttributeHolder):
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
class BooleanOptionalAction(Action):
|
||||
if sys.version_info >= (3, 13):
|
||||
if sys.version_info >= (3, 14):
|
||||
def __init__(
|
||||
self,
|
||||
option_strings: Sequence[str],
|
||||
dest: str,
|
||||
default: bool | None = None,
|
||||
required: bool = False,
|
||||
help: str | None = None,
|
||||
deprecated: bool = False,
|
||||
) -> None: ...
|
||||
elif sys.version_info >= (3, 13):
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
@@ -10,27 +10,28 @@ class _ABC(type):
|
||||
if sys.version_info >= (3, 9):
|
||||
def __init__(cls, *args: Unused) -> None: ...
|
||||
|
||||
@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
|
||||
class Num(Constant, metaclass=_ABC):
|
||||
value: int | float | complex
|
||||
if sys.version_info < (3, 14):
|
||||
@deprecated("Replaced by ast.Constant; removed in Python 3.14")
|
||||
class Num(Constant, metaclass=_ABC):
|
||||
value: int | float | complex
|
||||
|
||||
@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
|
||||
class Str(Constant, metaclass=_ABC):
|
||||
value: str
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: str
|
||||
@deprecated("Replaced by ast.Constant; removed in Python 3.14")
|
||||
class Str(Constant, metaclass=_ABC):
|
||||
value: str
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: str
|
||||
|
||||
@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
|
||||
class Bytes(Constant, metaclass=_ABC):
|
||||
value: bytes
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: bytes
|
||||
@deprecated("Replaced by ast.Constant; removed in Python 3.14")
|
||||
class Bytes(Constant, metaclass=_ABC):
|
||||
value: bytes
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: bytes
|
||||
|
||||
@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
|
||||
class NameConstant(Constant, metaclass=_ABC): ...
|
||||
@deprecated("Replaced by ast.Constant; removed in Python 3.14")
|
||||
class NameConstant(Constant, metaclass=_ABC): ...
|
||||
|
||||
@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
|
||||
class Ellipsis(Constant, metaclass=_ABC): ...
|
||||
@deprecated("Replaced by ast.Constant; removed in Python 3.14")
|
||||
class Ellipsis(Constant, metaclass=_ABC): ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
class slice(AST): ...
|
||||
|
||||
@@ -151,13 +151,13 @@ if sys.version_info >= (3, 10):
|
||||
@overload
|
||||
def gather(*coros_or_futures: _FutureLike[_T], return_exceptions: Literal[False] = False) -> Future[list[_T]]: ... # type: ignore[overload-overlap]
|
||||
@overload
|
||||
def gather(coro_or_future1: _FutureLike[_T1], /, *, return_exceptions: bool) -> Future[tuple[_T1 | BaseException]]: ... # type: ignore[overload-overlap]
|
||||
def gather(coro_or_future1: _FutureLike[_T1], /, *, return_exceptions: bool) -> Future[tuple[_T1 | BaseException]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
def gather(
|
||||
coro_or_future1: _FutureLike[_T1], coro_or_future2: _FutureLike[_T2], /, *, return_exceptions: bool
|
||||
) -> Future[tuple[_T1 | BaseException, _T2 | BaseException]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
def gather(
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
@@ -166,7 +166,7 @@ if sys.version_info >= (3, 10):
|
||||
return_exceptions: bool,
|
||||
) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
def gather(
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
@@ -176,7 +176,7 @@ if sys.version_info >= (3, 10):
|
||||
return_exceptions: bool,
|
||||
) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
def gather(
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
@@ -189,7 +189,7 @@ if sys.version_info >= (3, 10):
|
||||
tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException]
|
||||
]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
def gather(
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
|
||||
@@ -159,7 +159,7 @@ if sys.platform != "win32":
|
||||
|
||||
class _UnixSelectorEventLoop(BaseSelectorEventLoop):
|
||||
if sys.version_info >= (3, 13):
|
||||
async def create_unix_server( # type: ignore[override]
|
||||
async def create_unix_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
path: StrPath | None = None,
|
||||
|
||||
@@ -1744,7 +1744,7 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit
|
||||
# without creating many false-positive errors (see #7578).
|
||||
# Instead, we special-case the most common examples of this: bool and literal integers.
|
||||
@overload
|
||||
def sum(iterable: Iterable[bool | _LiteralInteger], /, start: int = 0) -> int: ... # type: ignore[overload-overlap]
|
||||
def sum(iterable: Iterable[bool | _LiteralInteger], /, start: int = 0) -> int: ...
|
||||
@overload
|
||||
def sum(iterable: Iterable[_SupportsSumNoDefaultT], /) -> _SupportsSumNoDefaultT | Literal[0]: ...
|
||||
@overload
|
||||
@@ -1752,9 +1752,8 @@ def sum(iterable: Iterable[_AddableT1], /, start: _AddableT2) -> _AddableT1 | _A
|
||||
|
||||
# The argument to `vars()` has to have a `__dict__` attribute, so the second overload can't be annotated with `object`
|
||||
# (A "SupportsDunderDict" protocol doesn't work)
|
||||
# Use a type: ignore to make complaints about overlapping overloads go away
|
||||
@overload
|
||||
def vars(object: type, /) -> types.MappingProxyType[str, Any]: ... # type: ignore[overload-overlap]
|
||||
def vars(object: type, /) -> types.MappingProxyType[str, Any]: ...
|
||||
@overload
|
||||
def vars(object: Any = ..., /) -> dict[str, Any]: ...
|
||||
|
||||
|
||||
@@ -55,6 +55,7 @@ class AbstractAsyncContextManager(Protocol[_T_co, _ExitT_co]):
|
||||
) -> _ExitT_co: ...
|
||||
|
||||
class ContextDecorator:
|
||||
def _recreate_cm(self) -> Self: ...
|
||||
def __call__(self, func: _F) -> _F: ...
|
||||
|
||||
class _GeneratorContextManager(AbstractContextManager[_T_co, bool | None], ContextDecorator):
|
||||
@@ -80,6 +81,7 @@ if sys.version_info >= (3, 10):
|
||||
_AF = TypeVar("_AF", bound=Callable[..., Awaitable[Any]])
|
||||
|
||||
class AsyncContextDecorator:
|
||||
def _recreate_cm(self) -> Self: ...
|
||||
def __call__(self, func: _AF) -> _AF: ...
|
||||
|
||||
class _AsyncGeneratorContextManager(AbstractAsyncContextManager[_T_co, bool | None], AsyncContextDecorator):
|
||||
|
||||
@@ -1,12 +1,5 @@
|
||||
import sys
|
||||
from _ctypes import RTLD_GLOBAL as RTLD_GLOBAL, RTLD_LOCAL as RTLD_LOCAL, Structure, Union
|
||||
from ctypes import DEFAULT_MODE as DEFAULT_MODE, cdll as cdll, pydll as pydll, pythonapi as pythonapi
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
from _ctypes import SIZEOF_TIME_T as SIZEOF_TIME_T
|
||||
|
||||
if sys.platform == "win32":
|
||||
from ctypes import oledll as oledll, windll as windll
|
||||
from ctypes import Structure, Union
|
||||
|
||||
# At runtime, the native endianness is an alias for Structure,
|
||||
# while the other is a subclass with a metaclass added in.
|
||||
|
||||
@@ -5,7 +5,7 @@ from _typeshed import DataclassInstance
|
||||
from builtins import type as Type # alias to avoid name clashes with fields named "type"
|
||||
from collections.abc import Callable, Iterable, Mapping
|
||||
from typing import Any, Generic, Literal, Protocol, TypeVar, overload
|
||||
from typing_extensions import TypeAlias, TypeIs
|
||||
from typing_extensions import Never, TypeAlias, TypeIs
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
from types import GenericAlias
|
||||
@@ -213,6 +213,10 @@ else:
|
||||
) -> Any: ...
|
||||
|
||||
def fields(class_or_instance: DataclassInstance | type[DataclassInstance]) -> tuple[Field[Any], ...]: ...
|
||||
|
||||
# HACK: `obj: Never` typing matches if object argument is using `Any` type.
|
||||
@overload
|
||||
def is_dataclass(obj: Never) -> TypeIs[DataclassInstance | type[DataclassInstance]]: ... # type: ignore[narrowed-type-not-subtype] # pyright: ignore[reportGeneralTypeIssues]
|
||||
@overload
|
||||
def is_dataclass(obj: type) -> TypeIs[type[DataclassInstance]]: ...
|
||||
@overload
|
||||
|
||||
@@ -1,6 +1,26 @@
|
||||
from _typeshed import BytesPath, Incomplete, StrOrBytesPath, StrPath, Unused
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Callable, Iterable
|
||||
from distutils.command.bdist import bdist
|
||||
from distutils.command.bdist_dumb import bdist_dumb
|
||||
from distutils.command.bdist_rpm import bdist_rpm
|
||||
from distutils.command.build import build
|
||||
from distutils.command.build_clib import build_clib
|
||||
from distutils.command.build_ext import build_ext
|
||||
from distutils.command.build_py import build_py
|
||||
from distutils.command.build_scripts import build_scripts
|
||||
from distutils.command.check import check
|
||||
from distutils.command.clean import clean
|
||||
from distutils.command.config import config
|
||||
from distutils.command.install import install
|
||||
from distutils.command.install_data import install_data
|
||||
from distutils.command.install_egg_info import install_egg_info
|
||||
from distutils.command.install_headers import install_headers
|
||||
from distutils.command.install_lib import install_lib
|
||||
from distutils.command.install_scripts import install_scripts
|
||||
from distutils.command.register import register
|
||||
from distutils.command.sdist import sdist
|
||||
from distutils.command.upload import upload
|
||||
from distutils.dist import Distribution
|
||||
from distutils.file_util import _BytesPathT, _StrPathT
|
||||
from typing import Any, ClassVar, Literal, TypeVar, overload
|
||||
@@ -28,8 +48,108 @@ class Command:
|
||||
def ensure_dirname(self, option: str) -> None: ...
|
||||
def get_command_name(self) -> str: ...
|
||||
def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: ...
|
||||
# NOTE: This list comes directly from the distutils/command folder. Minus bdist_msi and bdist_wininst.
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["bdist"], create: bool | Literal[0, 1] = 1) -> bdist: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["bdist_dumb"], create: bool | Literal[0, 1] = 1) -> bdist_dumb: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["bdist_rpm"], create: bool | Literal[0, 1] = 1) -> bdist_rpm: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["build"], create: bool | Literal[0, 1] = 1) -> build: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["build_clib"], create: bool | Literal[0, 1] = 1) -> build_clib: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["build_ext"], create: bool | Literal[0, 1] = 1) -> build_ext: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["build_py"], create: bool | Literal[0, 1] = 1) -> build_py: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["build_scripts"], create: bool | Literal[0, 1] = 1) -> build_scripts: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["check"], create: bool | Literal[0, 1] = 1) -> check: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["clean"], create: bool | Literal[0, 1] = 1) -> clean: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["config"], create: bool | Literal[0, 1] = 1) -> config: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["install"], create: bool | Literal[0, 1] = 1) -> install: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["install_data"], create: bool | Literal[0, 1] = 1) -> install_data: ...
|
||||
@overload
|
||||
def get_finalized_command(
|
||||
self, command: Literal["install_egg_info"], create: bool | Literal[0, 1] = 1
|
||||
) -> install_egg_info: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["install_headers"], create: bool | Literal[0, 1] = 1) -> install_headers: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["install_lib"], create: bool | Literal[0, 1] = 1) -> install_lib: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["install_scripts"], create: bool | Literal[0, 1] = 1) -> install_scripts: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["register"], create: bool | Literal[0, 1] = 1) -> register: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["sdist"], create: bool | Literal[0, 1] = 1) -> sdist: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["upload"], create: bool | Literal[0, 1] = 1) -> upload: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: str, create: bool | Literal[0, 1] = 1) -> Command: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["bdist"], reinit_subcommands: bool | Literal[0, 1] = 0) -> bdist: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: Literal["bdist_dumb"], reinit_subcommands: bool | Literal[0, 1] = 0
|
||||
) -> bdist_dumb: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["bdist_rpm"], reinit_subcommands: bool | Literal[0, 1] = 0) -> bdist_rpm: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["build"], reinit_subcommands: bool | Literal[0, 1] = 0) -> build: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: Literal["build_clib"], reinit_subcommands: bool | Literal[0, 1] = 0
|
||||
) -> build_clib: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["build_ext"], reinit_subcommands: bool | Literal[0, 1] = 0) -> build_ext: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["build_py"], reinit_subcommands: bool | Literal[0, 1] = 0) -> build_py: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: Literal["build_scripts"], reinit_subcommands: bool | Literal[0, 1] = 0
|
||||
) -> build_scripts: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["check"], reinit_subcommands: bool | Literal[0, 1] = 0) -> check: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["clean"], reinit_subcommands: bool | Literal[0, 1] = 0) -> clean: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["config"], reinit_subcommands: bool | Literal[0, 1] = 0) -> config: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["install"], reinit_subcommands: bool | Literal[0, 1] = 0) -> install: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: Literal["install_data"], reinit_subcommands: bool | Literal[0, 1] = 0
|
||||
) -> install_data: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: Literal["install_egg_info"], reinit_subcommands: bool | Literal[0, 1] = 0
|
||||
) -> install_egg_info: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: Literal["install_headers"], reinit_subcommands: bool | Literal[0, 1] = 0
|
||||
) -> install_headers: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: Literal["install_lib"], reinit_subcommands: bool | Literal[0, 1] = 0
|
||||
) -> install_lib: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: Literal["install_scripts"], reinit_subcommands: bool | Literal[0, 1] = 0
|
||||
) -> install_scripts: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["register"], reinit_subcommands: bool | Literal[0, 1] = 0) -> register: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["sdist"], reinit_subcommands: bool | Literal[0, 1] = 0) -> sdist: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["upload"], reinit_subcommands: bool | Literal[0, 1] = 0) -> upload: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: str, reinit_subcommands: bool | Literal[0, 1] = 0) -> Command: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool | Literal[0, 1] = 0) -> _CommandT: ...
|
||||
|
||||
@@ -0,0 +1,48 @@
|
||||
import sys
|
||||
|
||||
from . import (
|
||||
bdist,
|
||||
bdist_dumb,
|
||||
bdist_rpm,
|
||||
build,
|
||||
build_clib,
|
||||
build_ext,
|
||||
build_py,
|
||||
build_scripts,
|
||||
check,
|
||||
clean,
|
||||
install,
|
||||
install_data,
|
||||
install_headers,
|
||||
install_lib,
|
||||
install_scripts,
|
||||
register,
|
||||
sdist,
|
||||
upload,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"build",
|
||||
"build_py",
|
||||
"build_ext",
|
||||
"build_clib",
|
||||
"build_scripts",
|
||||
"clean",
|
||||
"install",
|
||||
"install_lib",
|
||||
"install_headers",
|
||||
"install_scripts",
|
||||
"install_data",
|
||||
"sdist",
|
||||
"register",
|
||||
"bdist",
|
||||
"bdist_dumb",
|
||||
"bdist_rpm",
|
||||
"check",
|
||||
"upload",
|
||||
]
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
from . import bdist_wininst
|
||||
|
||||
__all__ += ["bdist_wininst"]
|
||||
|
||||
@@ -1,6 +1,26 @@
|
||||
from _typeshed import Incomplete, StrOrBytesPath, StrPath, SupportsWrite
|
||||
from collections.abc import Iterable, MutableMapping
|
||||
from distutils.cmd import Command
|
||||
from distutils.command.bdist import bdist
|
||||
from distutils.command.bdist_dumb import bdist_dumb
|
||||
from distutils.command.bdist_rpm import bdist_rpm
|
||||
from distutils.command.build import build
|
||||
from distutils.command.build_clib import build_clib
|
||||
from distutils.command.build_ext import build_ext
|
||||
from distutils.command.build_py import build_py
|
||||
from distutils.command.build_scripts import build_scripts
|
||||
from distutils.command.check import check
|
||||
from distutils.command.clean import clean
|
||||
from distutils.command.config import config
|
||||
from distutils.command.install import install
|
||||
from distutils.command.install_data import install_data
|
||||
from distutils.command.install_egg_info import install_egg_info
|
||||
from distutils.command.install_headers import install_headers
|
||||
from distutils.command.install_lib import install_lib
|
||||
from distutils.command.install_scripts import install_scripts
|
||||
from distutils.command.register import register
|
||||
from distutils.command.sdist import sdist
|
||||
from distutils.command.upload import upload
|
||||
from re import Pattern
|
||||
from typing import IO, ClassVar, Literal, TypeVar, overload
|
||||
from typing_extensions import TypeAlias
|
||||
@@ -63,10 +83,6 @@ class Distribution:
|
||||
def __init__(self, attrs: MutableMapping[str, Incomplete] | None = None) -> None: ...
|
||||
def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: ...
|
||||
def parse_config_files(self, filenames: Iterable[str] | None = None) -> None: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: str, create: Literal[1, True] = 1) -> Command: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: str, create: Literal[0, False]) -> Command | None: ...
|
||||
global_options: ClassVar[_OptionsList]
|
||||
common_usage: ClassVar[str]
|
||||
display_options: ClassVar[_OptionsList]
|
||||
@@ -108,8 +124,137 @@ class Distribution:
|
||||
def print_commands(self) -> None: ...
|
||||
def get_command_list(self): ...
|
||||
def get_command_packages(self): ...
|
||||
# NOTE: This list comes directly from the distutils/command folder. Minus bdist_msi and bdist_wininst.
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["bdist"], create: Literal[1, True] = 1) -> bdist: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["bdist_dumb"], create: Literal[1, True] = 1) -> bdist_dumb: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["bdist_rpm"], create: Literal[1, True] = 1) -> bdist_rpm: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["build"], create: Literal[1, True] = 1) -> build: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["build_clib"], create: Literal[1, True] = 1) -> build_clib: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["build_ext"], create: Literal[1, True] = 1) -> build_ext: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["build_py"], create: Literal[1, True] = 1) -> build_py: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["build_scripts"], create: Literal[1, True] = 1) -> build_scripts: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["check"], create: Literal[1, True] = 1) -> check: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["clean"], create: Literal[1, True] = 1) -> clean: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["config"], create: Literal[1, True] = 1) -> config: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["install"], create: Literal[1, True] = 1) -> install: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["install_data"], create: Literal[1, True] = 1) -> install_data: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["install_egg_info"], create: Literal[1, True] = 1) -> install_egg_info: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["install_headers"], create: Literal[1, True] = 1) -> install_headers: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["install_lib"], create: Literal[1, True] = 1) -> install_lib: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["install_scripts"], create: Literal[1, True] = 1) -> install_scripts: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["register"], create: Literal[1, True] = 1) -> register: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["sdist"], create: Literal[1, True] = 1) -> sdist: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["upload"], create: Literal[1, True] = 1) -> upload: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: str, create: Literal[1, True] = 1) -> Command: ...
|
||||
# Not replicating the overloads for "Command | None", user may use "isinstance"
|
||||
@overload
|
||||
def get_command_obj(self, command: str, create: Literal[0, False]) -> Command | None: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["bdist"]) -> type[bdist]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["bdist_dumb"]) -> type[bdist_dumb]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["bdist_rpm"]) -> type[bdist_rpm]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["build"]) -> type[build]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["build_clib"]) -> type[build_clib]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["build_ext"]) -> type[build_ext]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["build_py"]) -> type[build_py]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["build_scripts"]) -> type[build_scripts]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["check"]) -> type[check]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["clean"]) -> type[clean]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["config"]) -> type[config]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["install"]) -> type[install]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["install_data"]) -> type[install_data]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["install_egg_info"]) -> type[install_egg_info]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["install_headers"]) -> type[install_headers]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["install_lib"]) -> type[install_lib]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["install_scripts"]) -> type[install_scripts]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["register"]) -> type[register]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["sdist"]) -> type[sdist]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["upload"]) -> type[upload]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: str) -> type[Command]: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["bdist"], reinit_subcommands: bool = False) -> bdist: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["bdist_dumb"], reinit_subcommands: bool = False) -> bdist_dumb: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["bdist_rpm"], reinit_subcommands: bool = False) -> bdist_rpm: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["build"], reinit_subcommands: bool = False) -> build: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["build_clib"], reinit_subcommands: bool = False) -> build_clib: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["build_ext"], reinit_subcommands: bool = False) -> build_ext: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["build_py"], reinit_subcommands: bool = False) -> build_py: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["build_scripts"], reinit_subcommands: bool = False) -> build_scripts: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["check"], reinit_subcommands: bool = False) -> check: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["clean"], reinit_subcommands: bool = False) -> clean: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["config"], reinit_subcommands: bool = False) -> config: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["install"], reinit_subcommands: bool = False) -> install: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["install_data"], reinit_subcommands: bool = False) -> install_data: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: Literal["install_egg_info"], reinit_subcommands: bool = False
|
||||
) -> install_egg_info: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["install_headers"], reinit_subcommands: bool = False) -> install_headers: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["install_lib"], reinit_subcommands: bool = False) -> install_lib: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["install_scripts"], reinit_subcommands: bool = False) -> install_scripts: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["register"], reinit_subcommands: bool = False) -> register: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["sdist"], reinit_subcommands: bool = False) -> sdist: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["upload"], reinit_subcommands: bool = False) -> upload: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: str, reinit_subcommands: bool = False) -> Command: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool = False) -> _CommandT: ...
|
||||
|
||||
@@ -66,7 +66,10 @@ def mktime_tz(data: _PDTZ) -> int: ...
|
||||
def formatdate(timeval: float | None = None, localtime: bool = False, usegmt: bool = False) -> str: ...
|
||||
def format_datetime(dt: datetime.datetime, usegmt: bool = False) -> str: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
if sys.version_info >= (3, 14):
|
||||
def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: ...
|
||||
|
||||
elif sys.version_info >= (3, 12):
|
||||
@overload
|
||||
def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: ...
|
||||
@overload
|
||||
|
||||
@@ -17,13 +17,24 @@ def cmpfiles(
|
||||
) -> tuple[list[AnyStr], list[AnyStr], list[AnyStr]]: ...
|
||||
|
||||
class dircmp(Generic[AnyStr]):
|
||||
def __init__(
|
||||
self,
|
||||
a: GenericPath[AnyStr],
|
||||
b: GenericPath[AnyStr],
|
||||
ignore: Sequence[AnyStr] | None = None,
|
||||
hide: Sequence[AnyStr] | None = None,
|
||||
) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def __init__(
|
||||
self,
|
||||
a: GenericPath[AnyStr],
|
||||
b: GenericPath[AnyStr],
|
||||
ignore: Sequence[AnyStr] | None = None,
|
||||
hide: Sequence[AnyStr] | None = None,
|
||||
*,
|
||||
shallow: bool = True,
|
||||
) -> None: ...
|
||||
else:
|
||||
def __init__(
|
||||
self,
|
||||
a: GenericPath[AnyStr],
|
||||
b: GenericPath[AnyStr],
|
||||
ignore: Sequence[AnyStr] | None = None,
|
||||
hide: Sequence[AnyStr] | None = None,
|
||||
) -> None: ...
|
||||
left: AnyStr
|
||||
right: AnyStr
|
||||
hide: Sequence[AnyStr]
|
||||
|
||||
@@ -155,7 +155,7 @@ if sys.version_info >= (3, 10) and sys.version_info < (3, 12):
|
||||
@property
|
||||
def names(self) -> set[str]: ...
|
||||
@overload
|
||||
def select(self) -> Self: ... # type: ignore[misc]
|
||||
def select(self) -> Self: ...
|
||||
@overload
|
||||
def select(
|
||||
self,
|
||||
@@ -277,7 +277,7 @@ if sys.version_info >= (3, 12):
|
||||
|
||||
elif sys.version_info >= (3, 10):
|
||||
@overload
|
||||
def entry_points() -> SelectableGroups: ... # type: ignore[overload-overlap]
|
||||
def entry_points() -> SelectableGroups: ...
|
||||
@overload
|
||||
def entry_points(
|
||||
*, name: str = ..., value: str = ..., group: str = ..., module: str = ..., attr: str = ..., extras: list[str] = ...
|
||||
|
||||
@@ -6,7 +6,7 @@ from ..pytree import Node
|
||||
|
||||
class FixUnicode(fixer_base.BaseFix):
|
||||
BM_compatible: ClassVar[Literal[True]]
|
||||
PATTERN: ClassVar[Literal["STRING | 'unicode' | 'unichr'"]] # type: ignore[name-defined] # Name "STRING" is not defined
|
||||
PATTERN: ClassVar[str]
|
||||
unicode_literals: bool
|
||||
def start_tree(self, tree: Node, filename: StrPath) -> None: ...
|
||||
def transform(self, node, results): ...
|
||||
|
||||
@@ -55,10 +55,9 @@ __all__ = [
|
||||
"setLogRecordFactory",
|
||||
"lastResort",
|
||||
"raiseExceptions",
|
||||
"warn",
|
||||
]
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
__all__ += ["warn"]
|
||||
if sys.version_info >= (3, 11):
|
||||
__all__ += ["getLevelNamesMapping"]
|
||||
if sys.version_info >= (3, 12):
|
||||
@@ -157,17 +156,16 @@ class Logger(Filterer):
|
||||
stacklevel: int = 1,
|
||||
extra: Mapping[str, object] | None = None,
|
||||
) -> None: ...
|
||||
if sys.version_info < (3, 13):
|
||||
def warn(
|
||||
self,
|
||||
msg: object,
|
||||
*args: object,
|
||||
exc_info: _ExcInfoType = None,
|
||||
stack_info: bool = False,
|
||||
stacklevel: int = 1,
|
||||
extra: Mapping[str, object] | None = None,
|
||||
) -> None: ...
|
||||
|
||||
@deprecated("Deprecated; use warning() instead.")
|
||||
def warn(
|
||||
self,
|
||||
msg: object,
|
||||
*args: object,
|
||||
exc_info: _ExcInfoType = None,
|
||||
stack_info: bool = False,
|
||||
stacklevel: int = 1,
|
||||
extra: Mapping[str, object] | None = None,
|
||||
) -> None: ...
|
||||
def error(
|
||||
self,
|
||||
msg: object,
|
||||
@@ -412,18 +410,17 @@ class LoggerAdapter(Generic[_L]):
|
||||
extra: Mapping[str, object] | None = None,
|
||||
**kwargs: object,
|
||||
) -> None: ...
|
||||
if sys.version_info < (3, 13):
|
||||
def warn(
|
||||
self,
|
||||
msg: object,
|
||||
*args: object,
|
||||
exc_info: _ExcInfoType = None,
|
||||
stack_info: bool = False,
|
||||
stacklevel: int = 1,
|
||||
extra: Mapping[str, object] | None = None,
|
||||
**kwargs: object,
|
||||
) -> None: ...
|
||||
|
||||
@deprecated("Deprecated; use warning() instead.")
|
||||
def warn(
|
||||
self,
|
||||
msg: object,
|
||||
*args: object,
|
||||
exc_info: _ExcInfoType = None,
|
||||
stack_info: bool = False,
|
||||
stacklevel: int = 1,
|
||||
extra: Mapping[str, object] | None = None,
|
||||
**kwargs: object,
|
||||
) -> None: ...
|
||||
def error(
|
||||
self,
|
||||
msg: object,
|
||||
@@ -523,17 +520,15 @@ def warning(
|
||||
stacklevel: int = 1,
|
||||
extra: Mapping[str, object] | None = None,
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
def warn(
|
||||
msg: object,
|
||||
*args: object,
|
||||
exc_info: _ExcInfoType = None,
|
||||
stack_info: bool = False,
|
||||
stacklevel: int = 1,
|
||||
extra: Mapping[str, object] | None = None,
|
||||
) -> None: ...
|
||||
|
||||
@deprecated("Deprecated; use warning() instead.")
|
||||
def warn(
|
||||
msg: object,
|
||||
*args: object,
|
||||
exc_info: _ExcInfoType = None,
|
||||
stack_info: bool = False,
|
||||
stacklevel: int = 1,
|
||||
extra: Mapping[str, object] | None = None,
|
||||
) -> None: ...
|
||||
def error(
|
||||
msg: object,
|
||||
*args: object,
|
||||
|
||||
@@ -73,7 +73,7 @@ def copy(obj: _CT) -> _CT: ...
|
||||
@overload
|
||||
def synchronized(obj: _SimpleCData[_T], lock: _LockLike | None = None, ctx: Any | None = None) -> Synchronized[_T]: ...
|
||||
@overload
|
||||
def synchronized(obj: ctypes.Array[c_char], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedString: ... # type: ignore
|
||||
def synchronized(obj: ctypes.Array[c_char], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedString: ...
|
||||
@overload
|
||||
def synchronized(
|
||||
obj: ctypes.Array[_SimpleCData[_T]], lock: _LockLike | None = None, ctx: Any | None = None
|
||||
@@ -115,12 +115,12 @@ class SynchronizedArray(SynchronizedBase[ctypes.Array[_SimpleCData[_T]]], Generi
|
||||
class SynchronizedString(SynchronizedArray[bytes]):
|
||||
@overload # type: ignore[override]
|
||||
def __getitem__(self, i: slice) -> bytes: ...
|
||||
@overload # type: ignore[override]
|
||||
@overload
|
||||
def __getitem__(self, i: int) -> bytes: ...
|
||||
@overload # type: ignore[override]
|
||||
def __setitem__(self, i: slice, value: bytes) -> None: ...
|
||||
@overload # type: ignore[override]
|
||||
def __setitem__(self, i: int, value: bytes) -> None: ... # type: ignore[override]
|
||||
@overload
|
||||
def __setitem__(self, i: int, value: bytes) -> None: ...
|
||||
def __getslice__(self, start: int, stop: int) -> bytes: ... # type: ignore[override]
|
||||
def __setslice__(self, start: int, stop: int, values: bytes) -> None: ... # type: ignore[override]
|
||||
|
||||
|
||||
@@ -159,6 +159,20 @@ class Path(PurePath):
|
||||
def lchmod(self, mode: int) -> None: ...
|
||||
def lstat(self) -> stat_result: ...
|
||||
def mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def copy(self, target: StrPath, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> None: ...
|
||||
def copytree(
|
||||
self,
|
||||
target: StrPath,
|
||||
*,
|
||||
follow_symlinks: bool = True,
|
||||
preserve_metadata: bool = False,
|
||||
dirs_exist_ok: bool = False,
|
||||
ignore: Callable[[Self], bool] | None = None,
|
||||
on_error: Callable[[OSError], object] | None = None,
|
||||
) -> None: ...
|
||||
|
||||
# Adapted from builtins.open
|
||||
# Text mode: always returns a TextIOWrapper
|
||||
# The Traversable .open in stdlib/importlib/abc.pyi should be kept in sync with this.
|
||||
@@ -232,10 +246,18 @@ class Path(PurePath):
|
||||
if sys.version_info >= (3, 9):
|
||||
def readlink(self) -> Self: ...
|
||||
|
||||
def rename(self, target: str | PurePath) -> Self: ...
|
||||
def replace(self, target: str | PurePath) -> Self: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
def rename(self, target: StrPath) -> Self: ...
|
||||
def replace(self, target: StrPath) -> Self: ...
|
||||
else:
|
||||
def rename(self, target: str | PurePath) -> Self: ...
|
||||
def replace(self, target: str | PurePath) -> Self: ...
|
||||
|
||||
def resolve(self, strict: bool = False) -> Self: ...
|
||||
def rmdir(self) -> None: ...
|
||||
if sys.version_info >= (3, 14):
|
||||
def delete(self, ignore_errors: bool = False, on_error: Callable[[OSError], object] | None = None) -> None: ...
|
||||
|
||||
def symlink_to(self, target: StrOrBytesPath, target_is_directory: bool = False) -> None: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
def hardlink_to(self, target: StrOrBytesPath) -> None: ...
|
||||
@@ -266,6 +288,9 @@ class Path(PurePath):
|
||||
self, top_down: bool = ..., on_error: Callable[[OSError], object] | None = ..., follow_symlinks: bool = ...
|
||||
) -> Iterator[tuple[Self, list[str], list[str]]]: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def rmtree(self, ignore_errors: bool = False, on_error: Callable[[OSError], object] | None = None) -> None: ...
|
||||
|
||||
class PosixPath(Path, PurePosixPath): ...
|
||||
class WindowsPath(Path, PureWindowsPath): ...
|
||||
|
||||
|
||||
@@ -84,7 +84,7 @@ class Pdb(Bdb, Cmd):
|
||||
def _runscript(self, filename: str) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
def completedefault(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... # type: ignore[override]
|
||||
def completedefault(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ...
|
||||
|
||||
def do_commands(self, arg: str) -> bool | None: ...
|
||||
def do_break(self, arg: str, temporary: bool = ...) -> bool | None: ...
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import sys
|
||||
from collections.abc import Callable, Iterable
|
||||
from typing import Final
|
||||
from typing_extensions import TypeAlias
|
||||
from typing_extensions import TypeAlias, deprecated
|
||||
|
||||
if sys.platform != "win32":
|
||||
__all__ = ["openpty", "fork", "spawn"]
|
||||
@@ -13,7 +13,12 @@ if sys.platform != "win32":
|
||||
|
||||
CHILD: Final = 0
|
||||
def openpty() -> tuple[int, int]: ...
|
||||
def master_open() -> tuple[int, str]: ... # deprecated, use openpty()
|
||||
def slave_open(tty_name: str) -> int: ... # deprecated, use openpty()
|
||||
|
||||
if sys.version_info < (3, 14):
|
||||
@deprecated("Deprecated in 3.12, to be removed in 3.14; use openpty() instead")
|
||||
def master_open() -> tuple[int, str]: ...
|
||||
@deprecated("Deprecated in 3.12, to be removed in 3.14; use openpty() instead")
|
||||
def slave_open(tty_name: str) -> int: ...
|
||||
|
||||
def fork() -> tuple[int, int]: ...
|
||||
def spawn(argv: str | Iterable[str], master_read: _Reader = ..., stdin_read: _Reader = ...) -> int: ...
|
||||
|
||||
@@ -74,7 +74,7 @@ class Match(Generic[AnyStr]):
|
||||
@overload
|
||||
def expand(self: Match[str], template: str) -> str: ...
|
||||
@overload
|
||||
def expand(self: Match[bytes], template: ReadableBuffer) -> bytes: ... # type: ignore[overload-overlap]
|
||||
def expand(self: Match[bytes], template: ReadableBuffer) -> bytes: ...
|
||||
@overload
|
||||
def expand(self, template: AnyStr) -> AnyStr: ...
|
||||
# group() returns "AnyStr" or "AnyStr | None", depending on the pattern.
|
||||
@@ -124,19 +124,21 @@ class Pattern(Generic[AnyStr]):
|
||||
@overload
|
||||
def search(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ...
|
||||
@overload
|
||||
def search(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... # type: ignore[overload-overlap]
|
||||
def search(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ...
|
||||
@overload
|
||||
def search(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ...
|
||||
@overload
|
||||
def match(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ...
|
||||
@overload
|
||||
def match(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... # type: ignore[overload-overlap]
|
||||
def match(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ...
|
||||
@overload
|
||||
def match(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ...
|
||||
@overload
|
||||
def fullmatch(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ...
|
||||
@overload
|
||||
def fullmatch(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... # type: ignore[overload-overlap]
|
||||
def fullmatch(
|
||||
self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize
|
||||
) -> Match[bytes] | None: ...
|
||||
@overload
|
||||
def fullmatch(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ...
|
||||
@overload
|
||||
@@ -155,13 +157,15 @@ class Pattern(Generic[AnyStr]):
|
||||
@overload
|
||||
def finditer(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[str]]: ...
|
||||
@overload
|
||||
def finditer(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[bytes]]: ... # type: ignore[overload-overlap]
|
||||
def finditer(
|
||||
self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize
|
||||
) -> Iterator[Match[bytes]]: ...
|
||||
@overload
|
||||
def finditer(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[AnyStr]]: ...
|
||||
@overload
|
||||
def sub(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> str: ...
|
||||
@overload
|
||||
def sub( # type: ignore[overload-overlap]
|
||||
def sub(
|
||||
self: Pattern[bytes],
|
||||
repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer],
|
||||
string: ReadableBuffer,
|
||||
@@ -172,7 +176,7 @@ class Pattern(Generic[AnyStr]):
|
||||
@overload
|
||||
def subn(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> tuple[str, int]: ...
|
||||
@overload
|
||||
def subn( # type: ignore[overload-overlap]
|
||||
def subn(
|
||||
self: Pattern[bytes],
|
||||
repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer],
|
||||
string: ReadableBuffer,
|
||||
|
||||
@@ -29,7 +29,10 @@ def DateFromTicks(ticks: float) -> Date: ...
|
||||
def TimeFromTicks(ticks: float) -> Time: ...
|
||||
def TimestampFromTicks(ticks: float) -> Timestamp: ...
|
||||
|
||||
version_info: tuple[int, int, int]
|
||||
if sys.version_info < (3, 14):
|
||||
# Deprecated in 3.12, removed in 3.14.
|
||||
version_info: tuple[int, int, int]
|
||||
|
||||
sqlite_version_info: tuple[int, int, int]
|
||||
Binary = memoryview
|
||||
|
||||
@@ -90,7 +93,10 @@ SQLITE_UPDATE: Final[int]
|
||||
adapters: dict[tuple[type[Any], type[Any]], _Adapter[Any]]
|
||||
converters: dict[str, _Converter]
|
||||
sqlite_version: str
|
||||
version: str
|
||||
|
||||
if sys.version_info < (3, 14):
|
||||
# Deprecated in 3.12, removed in 3.14.
|
||||
version: str
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
SQLITE_ABORT: Final[int]
|
||||
|
||||
@@ -2,6 +2,7 @@ import sys
|
||||
from _collections_abc import dict_keys
|
||||
from collections.abc import Sequence
|
||||
from typing import Any
|
||||
from typing_extensions import deprecated
|
||||
|
||||
__all__ = ["symtable", "SymbolTable", "Class", "Function", "Symbol"]
|
||||
|
||||
@@ -51,7 +52,9 @@ class Function(SymbolTable):
|
||||
def get_nonlocals(self) -> tuple[str, ...]: ...
|
||||
|
||||
class Class(SymbolTable):
|
||||
def get_methods(self) -> tuple[str, ...]: ...
|
||||
if sys.version_info < (3, 16):
|
||||
@deprecated("deprecated in Python 3.14, will be removed in Python 3.16")
|
||||
def get_methods(self) -> tuple[str, ...]: ...
|
||||
|
||||
class Symbol:
|
||||
def __init__(
|
||||
|
||||
@@ -423,7 +423,7 @@ class TarInfo:
|
||||
name: str
|
||||
path: str
|
||||
size: int
|
||||
mtime: int
|
||||
mtime: int | float
|
||||
chksum: int
|
||||
devmajor: int
|
||||
devminor: int
|
||||
|
||||
@@ -463,7 +463,7 @@ class TemporaryDirectory(Generic[AnyStr]):
|
||||
|
||||
# The overloads overlap, but they should still work fine.
|
||||
@overload
|
||||
def mkstemp( # type: ignore[overload-overlap]
|
||||
def mkstemp(
|
||||
suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None, text: bool = False
|
||||
) -> tuple[int, str]: ...
|
||||
@overload
|
||||
@@ -473,7 +473,7 @@ def mkstemp(
|
||||
|
||||
# The overloads overlap, but they should still work fine.
|
||||
@overload
|
||||
def mkdtemp(suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None) -> str: ... # type: ignore[overload-overlap]
|
||||
def mkdtemp(suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None) -> str: ...
|
||||
@overload
|
||||
def mkdtemp(suffix: bytes | None = None, prefix: bytes | None = None, dir: BytesPath | None = None) -> bytes: ...
|
||||
def mktemp(suffix: str = "", prefix: str = "tmp", dir: StrPath | None = None) -> str: ...
|
||||
|
||||
@@ -2148,11 +2148,12 @@ class Listbox(Widget, XView, YView):
|
||||
selectborderwidth: _ScreenUnits = 0,
|
||||
selectforeground: str = ...,
|
||||
# from listbox man page: "The value of the [selectmode] option may be
|
||||
# arbitrary, but the default bindings expect it to be ..."
|
||||
# arbitrary, but the default bindings expect it to be either single,
|
||||
# browse, multiple, or extended"
|
||||
#
|
||||
# I have never seen anyone setting this to something else than what
|
||||
# "the default bindings expect", but let's support it anyway.
|
||||
selectmode: str = "browse",
|
||||
selectmode: str | Literal["single", "browse", "multiple", "extended"] = "browse", # noqa: Y051
|
||||
setgrid: bool = False,
|
||||
state: Literal["normal", "disabled"] = "normal",
|
||||
takefocus: _TakeFocusValue = "",
|
||||
@@ -2187,7 +2188,7 @@ class Listbox(Widget, XView, YView):
|
||||
selectbackground: str = ...,
|
||||
selectborderwidth: _ScreenUnits = ...,
|
||||
selectforeground: str = ...,
|
||||
selectmode: str = ...,
|
||||
selectmode: str | Literal["single", "browse", "multiple", "extended"] = ..., # noqa: Y051
|
||||
setgrid: bool = ...,
|
||||
state: Literal["normal", "disabled"] = ...,
|
||||
takefocus: _TakeFocusValue = ...,
|
||||
@@ -2907,6 +2908,9 @@ class Scrollbar(Widget):
|
||||
def set(self, first: float | str, last: float | str) -> None: ...
|
||||
|
||||
_TextIndex: TypeAlias = _tkinter.Tcl_Obj | str | float | Misc
|
||||
_WhatToCount: TypeAlias = Literal[
|
||||
"chars", "displaychars", "displayindices", "displaylines", "indices", "lines", "xpixels", "ypixels"
|
||||
]
|
||||
|
||||
class Text(Widget, XView, YView):
|
||||
def __init__(
|
||||
@@ -3021,7 +3025,27 @@ class Text(Widget, XView, YView):
|
||||
config = configure
|
||||
def bbox(self, index: _TextIndex) -> tuple[int, int, int, int] | None: ... # type: ignore[override]
|
||||
def compare(self, index1: _TextIndex, op: Literal["<", "<=", "==", ">=", ">", "!="], index2: _TextIndex) -> bool: ...
|
||||
def count(self, index1, index2, *args): ... # TODO
|
||||
@overload
|
||||
def count(self, index1: _TextIndex, index2: _TextIndex) -> tuple[int] | None: ...
|
||||
@overload
|
||||
def count(self, index1: _TextIndex, index2: _TextIndex, arg: _WhatToCount | Literal["update"], /) -> tuple[int] | None: ...
|
||||
@overload
|
||||
def count(self, index1: _TextIndex, index2: _TextIndex, arg1: Literal["update"], arg2: _WhatToCount, /) -> int | None: ...
|
||||
@overload
|
||||
def count(self, index1: _TextIndex, index2: _TextIndex, arg1: _WhatToCount, arg2: Literal["update"], /) -> int | None: ...
|
||||
@overload
|
||||
def count(self, index1: _TextIndex, index2: _TextIndex, arg1: _WhatToCount, arg2: _WhatToCount, /) -> tuple[int, int]: ...
|
||||
@overload
|
||||
def count(
|
||||
self,
|
||||
index1: _TextIndex,
|
||||
index2: _TextIndex,
|
||||
arg1: _WhatToCount | Literal["update"],
|
||||
arg2: _WhatToCount | Literal["update"],
|
||||
arg3: _WhatToCount | Literal["update"],
|
||||
/,
|
||||
*args: _WhatToCount | Literal["update"],
|
||||
) -> tuple[int, ...]: ...
|
||||
@overload
|
||||
def debug(self, boolean: None = None) -> bool: ...
|
||||
@overload
|
||||
@@ -3564,7 +3588,7 @@ class Spinbox(Widget, XView):
|
||||
def scan_dragto(self, x): ...
|
||||
def selection(self, *args) -> tuple[int, ...]: ...
|
||||
def selection_adjust(self, index): ...
|
||||
def selection_clear(self): ...
|
||||
def selection_clear(self): ... # type: ignore[override]
|
||||
def selection_element(self, element: Incomplete | None = None): ...
|
||||
def selection_from(self, index: int) -> None: ...
|
||||
def selection_present(self) -> None: ...
|
||||
|
||||
@@ -1040,7 +1040,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
|
||||
@overload
|
||||
def heading(self, column: str | int, option: str) -> Any: ...
|
||||
@overload
|
||||
def heading(self, column: str | int, option: None = None) -> _TreeviewHeaderDict: ... # type: ignore[overload-overlap]
|
||||
def heading(self, column: str | int, option: None = None) -> _TreeviewHeaderDict: ...
|
||||
@overload
|
||||
def heading(
|
||||
self,
|
||||
@@ -1052,7 +1052,8 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
|
||||
anchor: tkinter._Anchor = ...,
|
||||
command: str | Callable[[], object] = ...,
|
||||
) -> None: ...
|
||||
def identify(self, component, x, y): ... # Internal Method. Leave untyped
|
||||
# Internal Method. Leave untyped:
|
||||
def identify(self, component, x, y): ... # type: ignore[override]
|
||||
def identify_row(self, y: int) -> str: ...
|
||||
def identify_column(self, x: int) -> str: ...
|
||||
def identify_region(self, x: int, y: int) -> Literal["heading", "separator", "tree", "cell", "nothing"]: ...
|
||||
@@ -1084,7 +1085,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
|
||||
@overload
|
||||
def item(self, item: str | int, option: str) -> Any: ...
|
||||
@overload
|
||||
def item(self, item: str | int, option: None = None) -> _TreeviewItemDict: ... # type: ignore[overload-overlap]
|
||||
def item(self, item: str | int, option: None = None) -> _TreeviewItemDict: ...
|
||||
@overload
|
||||
def item(
|
||||
self,
|
||||
|
||||
@@ -338,7 +338,7 @@ class TPen:
|
||||
def isvisible(self) -> bool: ...
|
||||
# Note: signatures 1 and 2 overlap unsafely when no arguments are provided
|
||||
@overload
|
||||
def pen(self) -> _PenState: ... # type: ignore[overload-overlap]
|
||||
def pen(self) -> _PenState: ...
|
||||
@overload
|
||||
def pen(
|
||||
self,
|
||||
@@ -384,7 +384,7 @@ class RawTurtle(TPen, TNavigator):
|
||||
def shape(self, name: str) -> None: ...
|
||||
# Unsafely overlaps when no arguments are provided
|
||||
@overload
|
||||
def shapesize(self) -> tuple[float, float, float]: ... # type: ignore[overload-overlap]
|
||||
def shapesize(self) -> tuple[float, float, float]: ...
|
||||
@overload
|
||||
def shapesize(
|
||||
self, stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None
|
||||
@@ -395,7 +395,7 @@ class RawTurtle(TPen, TNavigator):
|
||||
def shearfactor(self, shear: float) -> None: ...
|
||||
# Unsafely overlaps when no arguments are provided
|
||||
@overload
|
||||
def shapetransform(self) -> tuple[float, float, float, float]: ... # type: ignore[overload-overlap]
|
||||
def shapetransform(self) -> tuple[float, float, float, float]: ...
|
||||
@overload
|
||||
def shapetransform(
|
||||
self, t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None
|
||||
@@ -622,7 +622,7 @@ def isvisible() -> bool: ...
|
||||
|
||||
# Note: signatures 1 and 2 overlap unsafely when no arguments are provided
|
||||
@overload
|
||||
def pen() -> _PenState: ... # type: ignore[overload-overlap]
|
||||
def pen() -> _PenState: ...
|
||||
@overload
|
||||
def pen(
|
||||
pen: _PenState | None = None,
|
||||
@@ -661,7 +661,7 @@ if sys.version_info >= (3, 12):
|
||||
|
||||
# Unsafely overlaps when no arguments are provided
|
||||
@overload
|
||||
def shapesize() -> tuple[float, float, float]: ... # type: ignore[overload-overlap]
|
||||
def shapesize() -> tuple[float, float, float]: ...
|
||||
@overload
|
||||
def shapesize(stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None) -> None: ...
|
||||
@overload
|
||||
@@ -671,7 +671,7 @@ def shearfactor(shear: float) -> None: ...
|
||||
|
||||
# Unsafely overlaps when no arguments are provided
|
||||
@overload
|
||||
def shapetransform() -> tuple[float, float, float, float]: ... # type: ignore[overload-overlap]
|
||||
def shapetransform() -> tuple[float, float, float, float]: ...
|
||||
@overload
|
||||
def shapetransform(
|
||||
t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None
|
||||
|
||||
@@ -305,9 +305,9 @@ class MappingProxyType(Mapping[_KT, _VT_co]):
|
||||
def values(self) -> ValuesView[_VT_co]: ...
|
||||
def items(self) -> ItemsView[_KT, _VT_co]: ...
|
||||
@overload
|
||||
def get(self, key: _KT, /) -> _VT_co | None: ... # type: ignore[override]
|
||||
def get(self, key: _KT, /) -> _VT_co | None: ...
|
||||
@overload
|
||||
def get(self, key: _KT, default: _VT_co | _T2, /) -> _VT_co | _T2: ... # type: ignore[override]
|
||||
def get(self, key: _KT, default: _VT_co | _T2, /) -> _VT_co | _T2: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
|
||||
def __reversed__(self) -> Iterator[_KT]: ...
|
||||
@@ -583,7 +583,7 @@ _P = ParamSpec("_P")
|
||||
|
||||
# it's not really an Awaitable, but can be used in an await expression. Real type: Generator & Awaitable
|
||||
@overload
|
||||
def coroutine(func: Callable[_P, Generator[Any, Any, _R]]) -> Callable[_P, Awaitable[_R]]: ... # type: ignore[overload-overlap]
|
||||
def coroutine(func: Callable[_P, Generator[Any, Any, _R]]) -> Callable[_P, Awaitable[_R]]: ...
|
||||
@overload
|
||||
def coroutine(func: _Fn) -> _Fn: ...
|
||||
|
||||
|
||||
@@ -846,7 +846,8 @@ class TextIO(IO[str]):
|
||||
@abstractmethod
|
||||
def __enter__(self) -> TextIO: ...
|
||||
|
||||
ByteString: typing_extensions.TypeAlias = bytes | bytearray | memoryview
|
||||
if sys.version_info < (3, 14):
|
||||
ByteString: typing_extensions.TypeAlias = bytes | bytearray | memoryview
|
||||
|
||||
# Functions
|
||||
|
||||
|
||||
@@ -299,7 +299,7 @@ class _patcher:
|
||||
# Ideally we'd be able to add an overload for it so that the return type is _patch[MagicMock],
|
||||
# but that's impossible with the current type system.
|
||||
@overload
|
||||
def __call__( # type: ignore[overload-overlap]
|
||||
def __call__(
|
||||
self,
|
||||
target: str,
|
||||
new: _T,
|
||||
|
||||
@@ -198,13 +198,13 @@ else:
|
||||
|
||||
# Requires an iterable of length 6
|
||||
@overload
|
||||
def urlunparse(components: Iterable[None]) -> Literal[b""]: ...
|
||||
def urlunparse(components: Iterable[None]) -> Literal[b""]: ... # type: ignore[overload-overlap]
|
||||
@overload
|
||||
def urlunparse(components: Iterable[AnyStr | None]) -> AnyStr: ...
|
||||
|
||||
# Requires an iterable of length 5
|
||||
@overload
|
||||
def urlunsplit(components: Iterable[None]) -> Literal[b""]: ...
|
||||
def urlunsplit(components: Iterable[None]) -> Literal[b""]: ... # type: ignore[overload-overlap]
|
||||
@overload
|
||||
def urlunsplit(components: Iterable[AnyStr | None]) -> AnyStr: ...
|
||||
def unwrap(url: str) -> str: ...
|
||||
|
||||
@@ -79,6 +79,7 @@ else:
|
||||
def pathname2url(pathname: str) -> str: ...
|
||||
|
||||
def getproxies() -> dict[str, str]: ...
|
||||
def getproxies_environment() -> dict[str, str]: ...
|
||||
def parse_http_list(s: str) -> list[str]: ...
|
||||
def parse_keqv_list(l: list[str]) -> dict[str, str]: ...
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Any
|
||||
from typing import Any, Final
|
||||
|
||||
from .domreg import getDOMImplementation as getDOMImplementation, registerDOMImplementation as registerDOMImplementation
|
||||
|
||||
@@ -17,22 +17,22 @@ class Node:
|
||||
NOTATION_NODE: int
|
||||
|
||||
# ExceptionCode
|
||||
INDEX_SIZE_ERR: int
|
||||
DOMSTRING_SIZE_ERR: int
|
||||
HIERARCHY_REQUEST_ERR: int
|
||||
WRONG_DOCUMENT_ERR: int
|
||||
INVALID_CHARACTER_ERR: int
|
||||
NO_DATA_ALLOWED_ERR: int
|
||||
NO_MODIFICATION_ALLOWED_ERR: int
|
||||
NOT_FOUND_ERR: int
|
||||
NOT_SUPPORTED_ERR: int
|
||||
INUSE_ATTRIBUTE_ERR: int
|
||||
INVALID_STATE_ERR: int
|
||||
SYNTAX_ERR: int
|
||||
INVALID_MODIFICATION_ERR: int
|
||||
NAMESPACE_ERR: int
|
||||
INVALID_ACCESS_ERR: int
|
||||
VALIDATION_ERR: int
|
||||
INDEX_SIZE_ERR: Final[int]
|
||||
DOMSTRING_SIZE_ERR: Final[int]
|
||||
HIERARCHY_REQUEST_ERR: Final[int]
|
||||
WRONG_DOCUMENT_ERR: Final[int]
|
||||
INVALID_CHARACTER_ERR: Final[int]
|
||||
NO_DATA_ALLOWED_ERR: Final[int]
|
||||
NO_MODIFICATION_ALLOWED_ERR: Final[int]
|
||||
NOT_FOUND_ERR: Final[int]
|
||||
NOT_SUPPORTED_ERR: Final[int]
|
||||
INUSE_ATTRIBUTE_ERR: Final[int]
|
||||
INVALID_STATE_ERR: Final[int]
|
||||
SYNTAX_ERR: Final[int]
|
||||
INVALID_MODIFICATION_ERR: Final[int]
|
||||
NAMESPACE_ERR: Final[int]
|
||||
INVALID_ACCESS_ERR: Final[int]
|
||||
VALIDATION_ERR: Final[int]
|
||||
|
||||
class DOMException(Exception):
|
||||
code: int
|
||||
@@ -62,8 +62,8 @@ class UserDataHandler:
|
||||
NODE_DELETED: int
|
||||
NODE_RENAMED: int
|
||||
|
||||
XML_NAMESPACE: str
|
||||
XMLNS_NAMESPACE: str
|
||||
XHTML_NAMESPACE: str
|
||||
EMPTY_NAMESPACE: None
|
||||
EMPTY_PREFIX: None
|
||||
XML_NAMESPACE: Final[str]
|
||||
XMLNS_NAMESPACE: Final[str]
|
||||
XHTML_NAMESPACE: Final[str]
|
||||
EMPTY_NAMESPACE: Final[None]
|
||||
EMPTY_PREFIX: Final[None]
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
import sys
|
||||
from _typeshed import FileDescriptorOrPath
|
||||
from collections.abc import Callable
|
||||
from typing import Final
|
||||
from xml.etree.ElementTree import Element
|
||||
|
||||
XINCLUDE: str
|
||||
XINCLUDE_INCLUDE: str
|
||||
XINCLUDE_FALLBACK: str
|
||||
XINCLUDE: Final[str]
|
||||
XINCLUDE_INCLUDE: Final[str]
|
||||
XINCLUDE_FALLBACK: Final[str]
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
DEFAULT_MAX_INCLUSION_DEPTH: int
|
||||
DEFAULT_MAX_INCLUSION_DEPTH: Final = 6
|
||||
|
||||
class FatalIncludeError(SyntaxError): ...
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import sys
|
||||
from _collections_abc import dict_keys
|
||||
from _typeshed import FileDescriptorOrPath, ReadableBuffer, SupportsRead, SupportsWrite
|
||||
from collections.abc import Callable, Generator, ItemsView, Iterable, Iterator, Mapping, Sequence
|
||||
from typing import Any, Literal, SupportsIndex, TypeVar, overload
|
||||
from typing import Any, Final, Literal, SupportsIndex, TypeVar, overload
|
||||
from typing_extensions import TypeAlias, TypeGuard, deprecated
|
||||
|
||||
__all__ = [
|
||||
@@ -41,7 +41,7 @@ _FileRead: TypeAlias = FileDescriptorOrPath | SupportsRead[bytes] | SupportsRead
|
||||
_FileWriteC14N: TypeAlias = FileDescriptorOrPath | SupportsWrite[bytes]
|
||||
_FileWrite: TypeAlias = _FileWriteC14N | SupportsWrite[str]
|
||||
|
||||
VERSION: str
|
||||
VERSION: Final[str]
|
||||
|
||||
class ParseError(SyntaxError):
|
||||
code: int
|
||||
|
||||
@@ -94,6 +94,20 @@ class ZipExtFile(io.BufferedIOBase):
|
||||
class _Writer(Protocol):
|
||||
def write(self, s: str, /) -> object: ...
|
||||
|
||||
class _ZipReadable(Protocol):
|
||||
def seek(self, offset: int, whence: int = 0, /) -> int: ...
|
||||
def read(self, n: int = -1, /) -> bytes: ...
|
||||
|
||||
class _ZipTellable(Protocol):
|
||||
def tell(self) -> int: ...
|
||||
|
||||
class _ZipReadableTellable(_ZipReadable, _ZipTellable, Protocol): ...
|
||||
|
||||
class _ZipWritable(Protocol):
|
||||
def flush(self) -> None: ...
|
||||
def close(self) -> None: ...
|
||||
def write(self, b: bytes, /) -> int: ...
|
||||
|
||||
class ZipFile:
|
||||
filename: str | None
|
||||
debug: int
|
||||
@@ -106,24 +120,50 @@ class ZipFile:
|
||||
compresslevel: int | None # undocumented
|
||||
mode: _ZipFileMode # undocumented
|
||||
pwd: bytes | None # undocumented
|
||||
# metadata_encoding is new in 3.11
|
||||
if sys.version_info >= (3, 11):
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
file: StrPath | IO[bytes],
|
||||
mode: _ZipFileMode = "r",
|
||||
compression: int = 0,
|
||||
allowZip64: bool = True,
|
||||
compresslevel: int | None = None,
|
||||
*,
|
||||
strict_timestamps: bool = True,
|
||||
metadata_encoding: str | None = None,
|
||||
) -> None: ...
|
||||
# metadata_encoding is only allowed for read mode
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
file: StrPath | _ZipReadable,
|
||||
mode: Literal["r"] = "r",
|
||||
compression: int = 0,
|
||||
allowZip64: bool = True,
|
||||
compresslevel: int | None = None,
|
||||
*,
|
||||
strict_timestamps: bool = True,
|
||||
metadata_encoding: str | None,
|
||||
metadata_encoding: str | None = None,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
file: StrPath | IO[bytes],
|
||||
mode: _ZipFileMode = "r",
|
||||
file: StrPath | _ZipWritable,
|
||||
mode: Literal["w", "x"] = ...,
|
||||
compression: int = 0,
|
||||
allowZip64: bool = True,
|
||||
compresslevel: int | None = None,
|
||||
*,
|
||||
strict_timestamps: bool = True,
|
||||
metadata_encoding: None = None,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
file: StrPath | _ZipReadableTellable,
|
||||
mode: Literal["a"] = ...,
|
||||
compression: int = 0,
|
||||
allowZip64: bool = True,
|
||||
compresslevel: int | None = None,
|
||||
@@ -132,6 +172,7 @@ class ZipFile:
|
||||
metadata_encoding: None = None,
|
||||
) -> None: ...
|
||||
else:
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
file: StrPath | IO[bytes],
|
||||
@@ -142,6 +183,39 @@ class ZipFile:
|
||||
*,
|
||||
strict_timestamps: bool = True,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
file: StrPath | _ZipReadable,
|
||||
mode: Literal["r"] = "r",
|
||||
compression: int = 0,
|
||||
allowZip64: bool = True,
|
||||
compresslevel: int | None = None,
|
||||
*,
|
||||
strict_timestamps: bool = True,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
file: StrPath | _ZipWritable,
|
||||
mode: Literal["w", "x"] = ...,
|
||||
compression: int = 0,
|
||||
allowZip64: bool = True,
|
||||
compresslevel: int | None = None,
|
||||
*,
|
||||
strict_timestamps: bool = True,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
file: StrPath | _ZipReadableTellable,
|
||||
mode: Literal["a"] = ...,
|
||||
compression: int = 0,
|
||||
allowZip64: bool = True,
|
||||
compresslevel: int | None = None,
|
||||
*,
|
||||
strict_timestamps: bool = True,
|
||||
) -> None: ...
|
||||
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(
|
||||
|
||||
@@ -11,9 +11,9 @@ repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
red_knot_workspace = { workspace = true }
|
||||
ruff_db = { workspace = true }
|
||||
ruff_linter = { workspace = true }
|
||||
ruff_notebook = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use anyhow::Ok;
|
||||
use lsp_types::NotebookCellKind;
|
||||
use ruff_notebook::CellMetadata;
|
||||
use rustc_hash::{FxBuildHasher, FxHashMap};
|
||||
|
||||
use crate::{PositionEncoding, TextDocument};
|
||||
@@ -65,7 +66,7 @@ impl NotebookDocument {
|
||||
NotebookCellKind::Code => ruff_notebook::Cell::Code(ruff_notebook::CodeCell {
|
||||
execution_count: None,
|
||||
id: None,
|
||||
metadata: serde_json::Value::Null,
|
||||
metadata: CellMetadata::default(),
|
||||
outputs: vec![],
|
||||
source: ruff_notebook::SourceValue::String(
|
||||
cell.document.contents().to_string(),
|
||||
@@ -75,7 +76,7 @@ impl NotebookDocument {
|
||||
ruff_notebook::Cell::Markdown(ruff_notebook::MarkdownCell {
|
||||
attachments: None,
|
||||
id: None,
|
||||
metadata: serde_json::Value::Null,
|
||||
metadata: CellMetadata::default(),
|
||||
source: ruff_notebook::SourceValue::String(
|
||||
cell.document.contents().to_string(),
|
||||
),
|
||||
|
||||
@@ -2,8 +2,9 @@ use std::borrow::Cow;
|
||||
|
||||
use lsp_types::request::DocumentDiagnosticRequest;
|
||||
use lsp_types::{
|
||||
Diagnostic, DocumentDiagnosticParams, DocumentDiagnosticReport, DocumentDiagnosticReportResult,
|
||||
FullDocumentDiagnosticReport, Range, RelatedFullDocumentDiagnosticReport, Url,
|
||||
Diagnostic, DiagnosticSeverity, DocumentDiagnosticParams, DocumentDiagnosticReport,
|
||||
DocumentDiagnosticReportResult, FullDocumentDiagnosticReport, Position, Range,
|
||||
RelatedFullDocumentDiagnosticReport, Url,
|
||||
};
|
||||
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
@@ -56,16 +57,37 @@ fn compute_diagnostics(snapshot: &DocumentSnapshot, db: &RootDatabase) -> Vec<Di
|
||||
diagnostics
|
||||
.as_slice()
|
||||
.iter()
|
||||
.map(|message| Diagnostic {
|
||||
range: Range::default(),
|
||||
severity: None,
|
||||
tags: None,
|
||||
code: None,
|
||||
code_description: None,
|
||||
source: Some("red-knot".into()),
|
||||
message: message.to_string(),
|
||||
related_information: None,
|
||||
data: None,
|
||||
})
|
||||
.map(|message| to_lsp_diagnostic(message))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn to_lsp_diagnostic(message: &str) -> Diagnostic {
|
||||
let words = message.split(':').collect::<Vec<_>>();
|
||||
|
||||
let (range, message) = match words.as_slice() {
|
||||
[_filename, line, column, message] => {
|
||||
let line = line.parse::<u32>().unwrap_or_default();
|
||||
let column = column.parse::<u32>().unwrap_or_default();
|
||||
(
|
||||
Range::new(
|
||||
Position::new(line.saturating_sub(1), column.saturating_sub(1)),
|
||||
Position::new(line, column),
|
||||
),
|
||||
message.trim(),
|
||||
)
|
||||
}
|
||||
_ => (Range::default(), message),
|
||||
};
|
||||
|
||||
Diagnostic {
|
||||
range,
|
||||
severity: Some(DiagnosticSeverity::ERROR),
|
||||
tags: None,
|
||||
code: None,
|
||||
code_description: None,
|
||||
source: Some("red-knot".into()),
|
||||
message: message.to_string(),
|
||||
related_information: None,
|
||||
data: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,10 +8,10 @@ use std::sync::Arc;
|
||||
use anyhow::anyhow;
|
||||
use lsp_types::{ClientCapabilities, Url};
|
||||
|
||||
use red_knot_python_semantic::{ProgramSettings, PythonVersion, SearchPathSettings};
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::program::{ProgramSettings, SearchPathSettings, TargetVersion};
|
||||
use ruff_db::system::SystemPath;
|
||||
|
||||
use crate::edit::{DocumentKey, NotebookDocument};
|
||||
@@ -70,7 +70,7 @@ impl Session {
|
||||
let metadata = WorkspaceMetadata::from_path(system_path, &system)?;
|
||||
// TODO(dhruvmanila): Get the values from the client settings
|
||||
let program_settings = ProgramSettings {
|
||||
target_version: TargetVersion::default(),
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: system_path.to_path_buf(),
|
||||
@@ -78,7 +78,8 @@ impl Session {
|
||||
custom_typeshed: None,
|
||||
},
|
||||
};
|
||||
workspaces.insert(path, RootDatabase::new(metadata, program_settings, system));
|
||||
// TODO(micha): Handle the case where the program settings are incorrect more gracefully.
|
||||
workspaces.insert(path, RootDatabase::new(metadata, program_settings, system)?);
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use lsp_types::ClientCapabilities;
|
||||
use ruff_linter::display_settings;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Default)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
@@ -66,20 +65,3 @@ impl ResolvedClientCapabilities {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ResolvedClientCapabilities {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
display_settings! {
|
||||
formatter = f,
|
||||
namespace = "capabilities",
|
||||
fields = [
|
||||
self.code_action_deferred_edit_resolution,
|
||||
self.apply_edit,
|
||||
self.document_changes,
|
||||
self.workspace_refresh,
|
||||
self.pull_diagnostics,
|
||||
]
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -278,18 +278,6 @@ impl DocumentQuery {
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate a source kind used by the linter.
|
||||
pub(crate) fn make_source_kind(&self) -> ruff_linter::source_kind::SourceKind {
|
||||
match self {
|
||||
Self::Text { document, .. } => {
|
||||
ruff_linter::source_kind::SourceKind::Python(document.contents().to_string())
|
||||
}
|
||||
Self::Notebook { notebook, .. } => {
|
||||
ruff_linter::source_kind::SourceKind::IpyNotebook(notebook.make_ruff_notebook())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempts to access the underlying notebook document that this query is selecting.
|
||||
pub fn as_notebook(&self) -> Option<&NotebookDocument> {
|
||||
match self {
|
||||
|
||||
@@ -19,6 +19,7 @@ doctest = false
|
||||
default = ["console_error_panic_hook"]
|
||||
|
||||
[dependencies]
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
red_knot_workspace = { workspace = true }
|
||||
|
||||
ruff_db = { workspace = true }
|
||||
|
||||
@@ -3,10 +3,10 @@ use std::any::Any;
|
||||
use js_sys::Error;
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
use red_knot_python_semantic::{ProgramSettings, SearchPathSettings};
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::program::{ProgramSettings, SearchPathSettings};
|
||||
use ruff_db::system::walk_directory::WalkDirectoryBuilder;
|
||||
use ruff_db::system::{
|
||||
DirectoryEntry, MemoryFileSystem, Metadata, System, SystemPath, SystemPathBuf,
|
||||
@@ -49,7 +49,8 @@ impl Workspace {
|
||||
search_paths: SearchPathSettings::default(),
|
||||
};
|
||||
|
||||
let db = RootDatabase::new(workspace, program_settings, system.clone());
|
||||
let db =
|
||||
RootDatabase::new(workspace, program_settings, system.clone()).map_err(into_error)?;
|
||||
|
||||
Ok(Self { db, system })
|
||||
}
|
||||
@@ -108,7 +109,7 @@ impl Workspace {
|
||||
pub fn check_file(&self, file_id: &FileHandle) -> Result<Vec<String>, Error> {
|
||||
let result = self.db.check_file(file_id.file).map_err(into_error)?;
|
||||
|
||||
Ok(result.to_vec())
|
||||
Ok(result.clone())
|
||||
}
|
||||
|
||||
/// Checks all open files
|
||||
@@ -184,16 +185,16 @@ pub enum TargetVersion {
|
||||
Py313,
|
||||
}
|
||||
|
||||
impl From<TargetVersion> for ruff_db::program::TargetVersion {
|
||||
impl From<TargetVersion> for red_knot_python_semantic::PythonVersion {
|
||||
fn from(value: TargetVersion) -> Self {
|
||||
match value {
|
||||
TargetVersion::Py37 => Self::Py37,
|
||||
TargetVersion::Py38 => Self::Py38,
|
||||
TargetVersion::Py39 => Self::Py39,
|
||||
TargetVersion::Py310 => Self::Py310,
|
||||
TargetVersion::Py311 => Self::Py311,
|
||||
TargetVersion::Py312 => Self::Py312,
|
||||
TargetVersion::Py313 => Self::Py313,
|
||||
TargetVersion::Py37 => Self::PY37,
|
||||
TargetVersion::Py38 => Self::PY38,
|
||||
TargetVersion::Py39 => Self::PY39,
|
||||
TargetVersion::Py310 => Self::PY310,
|
||||
TargetVersion::Py311 => Self::PY311,
|
||||
TargetVersion::Py312 => Self::PY312,
|
||||
TargetVersion::Py313 => Self::PY313,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,5 +17,8 @@ fn check() {
|
||||
|
||||
let result = workspace.check_file(&test).expect("Check to succeed");
|
||||
|
||||
assert_eq!(result, vec!["Unresolved import 'random22'"]);
|
||||
assert_eq!(
|
||||
result,
|
||||
vec!["/test.py:1:8: Import 'random22' could not be resolved.",]
|
||||
);
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ red_knot_python_semantic = { workspace = true }
|
||||
ruff_cache = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
crossbeam = { workspace = true }
|
||||
@@ -27,6 +28,7 @@ thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_db = { workspace = true, features = ["testing"]}
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
Signature: 8a477f597d28d172789f06886806bc55
|
||||
@@ -1 +0,0 @@
|
||||
/Users/alexw/.pyenv/versions/3.12.4/bin/python3.12
|
||||
@@ -1 +0,0 @@
|
||||
python
|
||||
@@ -1 +0,0 @@
|
||||
python
|
||||
@@ -1 +0,0 @@
|
||||
import _virtualenv
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user