Compare commits

..

1 Commits

Author SHA1 Message Date
Carl Meyer
bccc33c6fe [WIP] working version of SSA-style use-def map 2024-08-14 17:10:11 -07:00
527 changed files with 5319 additions and 24664 deletions

View File

@@ -20,7 +20,7 @@
"extensions": [
"ms-python.python",
"rust-lang.rust-analyzer",
"fill-labs.dependi",
"serayuzgur.crates",
"tamasfe.even-better-toml",
"Swellaby.vscode-rust-test-adapter",
"charliermarsh.ruff"

View File

@@ -37,7 +37,7 @@ jobs:
with:
fetch-depth: 0
- uses: tj-actions/changed-files@v45
- uses: tj-actions/changed-files@v44
id: changed
with:
files_yaml: |

View File

@@ -34,10 +34,10 @@ jobs:
- name: "Set docs version"
run: |
version="${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}"
# if version is missing, use 'latest'
if [ -z "$version" ]; then
echo "Using 'latest' as version"
version="latest"
# if version is missing, exit with error
if [[ -z "$version" ]]; then
echo "Can't build docs without a version."
exit 1
fi
# Use version as display name for now
@@ -145,7 +145,6 @@ jobs:
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
run: |
branch_name="${{ env.branch_name }}"
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
# give the PR a few seconds to be created before trying to auto-merge it
sleep 10

View File

@@ -6,8 +6,6 @@ exclude: |
crates/red_knot_workspace/resources/.*|
crates/ruff_linter/resources/.*|
crates/ruff_linter/src/rules/.*/snapshots/.*|
crates/ruff_notebook/resources/.*|
crates/ruff_server/resources/.*|
crates/ruff/resources/.*|
crates/ruff_python_formatter/resources/.*|
crates/ruff_python_formatter/tests/snapshots/.*|
@@ -17,7 +15,7 @@ exclude: |
repos:
- repo: https://github.com/abravalheri/validate-pyproject
rev: v0.19
rev: v0.18
hooks:
- id: validate-pyproject
@@ -45,7 +43,7 @@ repos:
)$
- repo: https://github.com/crate-ci/typos
rev: v1.24.5
rev: v1.23.6
hooks:
- id: typos
@@ -59,7 +57,7 @@ repos:
pass_filenames: false # This makes it a lot faster
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.6.5
rev: v0.5.7
hooks:
- id: ruff-format
- id: ruff

View File

@@ -1,43 +1,5 @@
# Breaking Changes
## 0.6.0
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
You can disable specific rules for notebooks using [`per-file-ignores`](https://docs.astral.sh/ruff/settings/#lint_per-file-ignores):
```toml
[tool.ruff.lint.per-file-ignores]
"*.ipynb" = ["E501"] # disable line-too-long in notebooks
```
If you'd prefer to either only lint or only format Jupyter Notebook files, you can use the
section-specific `exclude` option to do so. For example, the following would only lint Jupyter
Notebook files and not format them:
```toml
[tool.ruff.format]
exclude = ["*.ipynb"]
```
And, conversely, the following would only format Jupyter Notebook files and not lint them:
```toml
[tool.ruff.lint]
exclude = ["*.ipynb"]
```
You can completely disable Jupyter Notebook support by updating the [`extend-exclude`](https://docs.astral.sh/ruff/settings/#extend-exclude) setting:
```toml
[tool.ruff]
extend-exclude = ["*.ipynb"]
```
## 0.5.0
- Follow the XDG specification to discover user-level configurations on macOS (same as on other Unix platforms)

View File

@@ -1,235 +1,5 @@
# Changelog
## 0.6.5
### Preview features
- \[`pydoclint`\] Ignore `DOC201` when function name is "**new**" ([#13300](https://github.com/astral-sh/ruff/pull/13300))
- \[`refurb`\] Implement `slice-to-remove-prefix-or-suffix` (`FURB188`) ([#13256](https://github.com/astral-sh/ruff/pull/13256))
### Rule changes
- \[`eradicate`\] Ignore script-comments with multiple end-tags (`ERA001`) ([#13283](https://github.com/astral-sh/ruff/pull/13283))
- \[`pyflakes`\] Improve error message for `UndefinedName` when a builtin was added in a newer version than specified in Ruff config (`F821`) ([#13293](https://github.com/astral-sh/ruff/pull/13293))
### Server
- Add support for extensionless Python files for server ([#13326](https://github.com/astral-sh/ruff/pull/13326))
- Fix configuration inheritance for configurations specified in the LSP settings ([#13285](https://github.com/astral-sh/ruff/pull/13285))
### Bug fixes
- \[`ruff`\] Handle unary operators in `decimal-from-float-literal` (`RUF032`) ([#13275](https://github.com/astral-sh/ruff/pull/13275))
### CLI
- Only include rules with diagnostics in SARIF metadata ([#13268](https://github.com/astral-sh/ruff/pull/13268))
### Playground
- Add "Copy as pyproject.toml/ruff.toml" and "Paste from TOML" ([#13328](https://github.com/astral-sh/ruff/pull/13328))
- Fix errors not shown for restored snippet on page load ([#13262](https://github.com/astral-sh/ruff/pull/13262))
## 0.6.4
### Preview features
- \[`flake8-builtins`\] Use dynamic builtins list based on Python version ([#13172](https://github.com/astral-sh/ruff/pull/13172))
- \[`pydoclint`\] Permit yielding `None` in `DOC402` and `DOC403` ([#13148](https://github.com/astral-sh/ruff/pull/13148))
- \[`pylint`\] Update diagnostic message for `PLW3201` ([#13194](https://github.com/astral-sh/ruff/pull/13194))
- \[`ruff`\] Implement `post-init-default` (`RUF033`) ([#13192](https://github.com/astral-sh/ruff/pull/13192))
- \[`ruff`\] Implement useless if-else (`RUF034`) ([#13218](https://github.com/astral-sh/ruff/pull/13218))
### Rule changes
- \[`flake8-pyi`\] Respect `pep8_naming.classmethod-decorators` settings when determining if a method is a classmethod in `custom-type-var-return-type` (`PYI019`) ([#13162](https://github.com/astral-sh/ruff/pull/13162))
- \[`flake8-pyi`\] Teach various rules that annotations might be stringized ([#12951](https://github.com/astral-sh/ruff/pull/12951))
- \[`pylint`\] Avoid `no-self-use` for `attrs`-style validators ([#13166](https://github.com/astral-sh/ruff/pull/13166))
- \[`pylint`\] Recurse into subscript subexpressions when searching for list/dict lookups (`PLR1733`, `PLR1736`) ([#13186](https://github.com/astral-sh/ruff/pull/13186))
- \[`pyupgrade`\] Detect `aiofiles.open` calls in `UP015` ([#13173](https://github.com/astral-sh/ruff/pull/13173))
- \[`pyupgrade`\] Mark `sys.version_info[0] < 3` and similar comparisons as outdated (`UP036`) ([#13175](https://github.com/astral-sh/ruff/pull/13175))
### CLI
- Enrich messages of SARIF results ([#13180](https://github.com/astral-sh/ruff/pull/13180))
- Handle singular case for incompatible rules warning in `ruff format` output ([#13212](https://github.com/astral-sh/ruff/pull/13212))
### Bug fixes
- \[`pydocstyle`\] Improve heuristics for detecting Google-style docstrings ([#13142](https://github.com/astral-sh/ruff/pull/13142))
- \[`refurb`\] Treat `sep` arguments with effects as unsafe removals (`FURB105`) ([#13165](https://github.com/astral-sh/ruff/pull/13165))
## 0.6.3
### Preview features
- \[`flake8-simplify`\] Extend `open-file-with-context-handler` to work with `dbm.sqlite3` (`SIM115`) ([#13104](https://github.com/astral-sh/ruff/pull/13104))
- \[`pycodestyle`\] Disable `E741` in stub files (`.pyi`) ([#13119](https://github.com/astral-sh/ruff/pull/13119))
- \[`pydoclint`\] Avoid `DOC201` on explicit returns in functions that only return `None` ([#13064](https://github.com/astral-sh/ruff/pull/13064))
### Rule changes
- \[`flake8-async`\] Disable check for `asyncio` before Python 3.11 (`ASYNC109`) ([#13023](https://github.com/astral-sh/ruff/pull/13023))
### Bug fixes
- \[`FastAPI`\] Avoid introducing invalid syntax in fix for `fast-api-non-annotated-dependency` (`FAST002`) ([#13133](https://github.com/astral-sh/ruff/pull/13133))
- \[`flake8-implicit-str-concat`\] Normalize octals before merging concatenated strings in `single-line-implicit-string-concatenation` (`ISC001`) ([#13118](https://github.com/astral-sh/ruff/pull/13118))
- \[`flake8-pytest-style`\] Improve help message for `pytest-incorrect-mark-parentheses-style` (`PT023`) ([#13092](https://github.com/astral-sh/ruff/pull/13092))
- \[`pylint`\] Avoid autofix for calls that aren't `min` or `max` as starred expression (`PLW3301`) ([#13089](https://github.com/astral-sh/ruff/pull/13089))
- \[`ruff`\] Add `datetime.time`, `datetime.tzinfo`, and `datetime.timezone` as immutable function calls (`RUF009`) ([#13109](https://github.com/astral-sh/ruff/pull/13109))
- \[`ruff`\] Extend comment deletion for `RUF100` to include trailing text from `noqa` directives while preserving any following comments on the same line, if any ([#13105](https://github.com/astral-sh/ruff/pull/13105))
- Fix dark theme on initial page load for the Ruff playground ([#13077](https://github.com/astral-sh/ruff/pull/13077))
## 0.6.2
### Preview features
- \[`flake8-simplify`\] Extend `open-file-with-context-handler` to work with other standard-library IO modules (`SIM115`) ([#12959](https://github.com/astral-sh/ruff/pull/12959))
- \[`ruff`\] Avoid `unused-async` for functions with FastAPI route decorator (`RUF029`) ([#12938](https://github.com/astral-sh/ruff/pull/12938))
- \[`ruff`\] Ignore `fstring-missing-syntax` (`RUF027`) for `fastAPI` paths ([#12939](https://github.com/astral-sh/ruff/pull/12939))
- \[`ruff`\] Implement check for Decimal called with a float literal (RUF032) ([#12909](https://github.com/astral-sh/ruff/pull/12909))
### Rule changes
- \[`flake8-bugbear`\] Update diagnostic message when expression is at the end of function (`B015`) ([#12944](https://github.com/astral-sh/ruff/pull/12944))
- \[`flake8-pyi`\] Skip type annotations in `string-or-bytes-too-long` (`PYI053`) ([#13002](https://github.com/astral-sh/ruff/pull/13002))
- \[`flake8-type-checking`\] Always recognise relative imports as first-party ([#12994](https://github.com/astral-sh/ruff/pull/12994))
- \[`flake8-unused-arguments`\] Ignore unused arguments on stub functions (`ARG001`) ([#12966](https://github.com/astral-sh/ruff/pull/12966))
- \[`pylint`\] Ignore augmented assignment for `self-cls-assignment` (`PLW0642`) ([#12957](https://github.com/astral-sh/ruff/pull/12957))
### Server
- Show full context in error log messages ([#13029](https://github.com/astral-sh/ruff/pull/13029))
### Bug fixes
- \[`pep8-naming`\] Don't flag `from` imports following conventional import names (`N817`) ([#12946](https://github.com/astral-sh/ruff/pull/12946))
- \[`pylint`\] - Allow `__new__` methods to have `cls` as their first argument even if decorated with `@staticmethod` for `bad-staticmethod-argument` (`PLW0211`) ([#12958](https://github.com/astral-sh/ruff/pull/12958))
### Documentation
- Add `hyperfine` installation instructions; update `hyperfine` code samples ([#13034](https://github.com/astral-sh/ruff/pull/13034))
- Expand note to use Ruff with other language server in Kate ([#12806](https://github.com/astral-sh/ruff/pull/12806))
- Update example for `PT001` as per the new default behavior ([#13019](https://github.com/astral-sh/ruff/pull/13019))
- \[`perflint`\] Improve docs for `try-except-in-loop` (`PERF203`) ([#12947](https://github.com/astral-sh/ruff/pull/12947))
- \[`pydocstyle`\] Add reference to `lint.pydocstyle.ignore-decorators` setting to rule docs ([#12996](https://github.com/astral-sh/ruff/pull/12996))
## 0.6.1
This is a hotfix release to address an issue with `ruff-pre-commit`. In v0.6,
Ruff changed its behavior to lint and format Jupyter notebooks by default;
however, due to an oversight, these files were still excluded by default if
Ruff was run via pre-commit, leading to inconsistent behavior.
This has [now been fixed](https://github.com/astral-sh/ruff-pre-commit/pull/96).
### Preview features
- \[`fastapi`\] Implement `fast-api-unused-path-parameter` (`FAST003`) ([#12638](https://github.com/astral-sh/ruff/pull/12638))
### Rule changes
- \[`pylint`\] Rename `too-many-positional` to `too-many-positional-arguments` (`R0917`) ([#12905](https://github.com/astral-sh/ruff/pull/12905))
### Server
- Fix crash when applying "fix-all" code-action to notebook cells ([#12929](https://github.com/astral-sh/ruff/pull/12929))
### Other changes
- \[`flake8-naming`\]: Respect import conventions (`N817`) ([#12922](https://github.com/astral-sh/ruff/pull/12922))
## 0.6.0
Check out the [blog post](https://astral.sh/blog/ruff-v0.6.0) for a migration guide and overview of the changes!
### Breaking changes
See also, the "Remapped rules" section which may result in disabled rules.
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
### Deprecations
The following rules are now deprecated:
- [`pytest-missing-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-missing-fixture-name-underscore/) (`PT004`)
- [`pytest-incorrect-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-incorrect-fixture-name-underscore/) (`PT005`)
- [`unpacked-list-comprehension`](https://docs.astral.sh/ruff/rules/unpacked-list-comprehension/) (`UP027`)
### Remapped rules
The following rules have been remapped to new rule codes:
- [`unnecessary-dict-comprehension-for-iterable`](https://docs.astral.sh/ruff/rules/unnecessary-dict-comprehension-for-iterable/): `RUF025` to `C420`
### Stabilization
The following rules have been stabilized and are no longer in preview:
- [`singledispatch-method`](https://docs.astral.sh/ruff/rules/singledispatch-method/) (`PLE1519`)
- [`singledispatchmethod-function`](https://docs.astral.sh/ruff/rules/singledispatchmethod-function/) (`PLE1520`)
- [`bad-staticmethod-argument`](https://docs.astral.sh/ruff/rules/bad-staticmethod-argument/) (`PLW0211`)
- [`if-stmt-min-max`](https://docs.astral.sh/ruff/rules/if-stmt-min-max/) (`PLR1730`)
- [`invalid-bytes-return-type`](https://docs.astral.sh/ruff/rules/invalid-bytes-return-type/) (`PLE0308`)
- [`invalid-hash-return-type`](https://docs.astral.sh/ruff/rules/invalid-hash-return-type/) (`PLE0309`)
- [`invalid-index-return-type`](https://docs.astral.sh/ruff/rules/invalid-index-return-type/) (`PLE0305`)
- [`invalid-length-return-type`](https://docs.astral.sh/ruff/rules/invalid-length-return-type/) (`PLEE303`)
- [`self-or-cls-assignment`](https://docs.astral.sh/ruff/rules/self-or-cls-assignment/) (`PLW0642`)
- [`byte-string-usage`](https://docs.astral.sh/ruff/rules/byte-string-usage/) (`PYI057`)
- [`duplicate-literal-member`](https://docs.astral.sh/ruff/rules/duplicate-literal-member/) (`PYI062`)
- [`redirected-noqa`](https://docs.astral.sh/ruff/rules/redirected-noqa/) (`RUF101`)
The following behaviors have been stabilized:
- [`cancel-scope-no-checkpoint`](https://docs.astral.sh/ruff/rules/cancel-scope-no-checkpoint/) (`ASYNC100`): Support `asyncio` and `anyio` context mangers.
- [`async-function-with-timeout`](https://docs.astral.sh/ruff/rules/async-function-with-timeout/) (`ASYNC109`): Support `asyncio` and `anyio` context mangers.
- [`async-busy-wait`](https://docs.astral.sh/ruff/rules/async-busy-wait/) (`ASYNC110`): Support `asyncio` and `anyio` context mangers.
- [`async-zero-sleep`](https://docs.astral.sh/ruff/rules/async-zero-sleep/) (`ASYNC115`): Support `anyio` context mangers.
- [`long-sleep-not-forever`](https://docs.astral.sh/ruff/rules/long-sleep-not-forever/) (`ASYNC116`): Support `anyio` context mangers.
The following fixes have been stabilized:
- [`superfluous-else-return`](https://docs.astral.sh/ruff/rules/superfluous-else-return/) (`RET505`)
- [`superfluous-else-raise`](https://docs.astral.sh/ruff/rules/superfluous-else-raise/) (`RET506`)
- [`superfluous-else-continue`](https://docs.astral.sh/ruff/rules/superfluous-else-continue/) (`RET507`)
- [`superfluous-else-break`](https://docs.astral.sh/ruff/rules/superfluous-else-break/) (`RET508`)
### Preview features
- \[`flake8-simplify`\] Further simplify to binary in preview for (`SIM108`) ([#12796](https://github.com/astral-sh/ruff/pull/12796))
- \[`pyupgrade`\] Show violations without auto-fix (`UP031`) ([#11229](https://github.com/astral-sh/ruff/pull/11229))
### Rule changes
- \[`flake8-import-conventions`\] Add `xml.etree.ElementTree` to default conventions ([#12455](https://github.com/astral-sh/ruff/pull/12455))
- \[`flake8-pytest-style`\] Add a space after comma in CSV output (`PT006`) ([#12853](https://github.com/astral-sh/ruff/pull/12853))
### Server
- Show a message for incorrect settings ([#12781](https://github.com/astral-sh/ruff/pull/12781))
### Bug fixes
- \[`flake8-async`\] Do not lint yield in context manager (`ASYNC100`) ([#12896](https://github.com/astral-sh/ruff/pull/12896))
- \[`flake8-comprehensions`\] Do not lint `async for` comprehensions (`C419`) ([#12895](https://github.com/astral-sh/ruff/pull/12895))
- \[`flake8-return`\] Only add return `None` at end of a function (`RET503`) ([#11074](https://github.com/astral-sh/ruff/pull/11074))
- \[`flake8-type-checking`\] Avoid treating `dataclasses.KW_ONLY` as typing-only (`TCH003`) ([#12863](https://github.com/astral-sh/ruff/pull/12863))
- \[`pep8-naming`\] Treat `type(Protocol)` et al as metaclass base (`N805`) ([#12770](https://github.com/astral-sh/ruff/pull/12770))
- \[`pydoclint`\] Don't enforce returns and yields in abstract methods (`DOC201`, `DOC202`) ([#12771](https://github.com/astral-sh/ruff/pull/12771))
- \[`ruff`\] Skip tuples with slice expressions in (`RUF031`) ([#12768](https://github.com/astral-sh/ruff/pull/12768))
- \[`ruff`\] Ignore unparenthesized tuples in subscripts when the subscript is a type annotation or type alias (`RUF031`) ([#12762](https://github.com/astral-sh/ruff/pull/12762))
- \[`ruff`\] Ignore template strings passed to logging and `builtins._()` calls (`RUF027`) ([#12889](https://github.com/astral-sh/ruff/pull/12889))
- \[`ruff`\] Do not remove parens for tuples with starred expressions in Python \<=3.10 (`RUF031`) ([#12784](https://github.com/astral-sh/ruff/pull/12784))
- Evaluate default parameter values for a function in that function's enclosing scope ([#12852](https://github.com/astral-sh/ruff/pull/12852))
### Other changes
- Respect VS Code cell metadata when detecting the language of Jupyter Notebook cells ([#12864](https://github.com/astral-sh/ruff/pull/12864))
- Respect `kernelspec` notebook metadata when detecting the preferred language for a Jupyter Notebook ([#12875](https://github.com/astral-sh/ruff/pull/12875))
## 0.5.7
### Preview features

View File

@@ -2,6 +2,35 @@
Welcome! We're happy to have you here. Thank you in advance for your contribution to Ruff.
- [The Basics](#the-basics)
- [Prerequisites](#prerequisites)
- [Development](#development)
- [Project Structure](#project-structure)
- [Example: Adding a new lint rule](#example-adding-a-new-lint-rule)
- [Rule naming convention](#rule-naming-convention)
- [Rule testing: fixtures and snapshots](#rule-testing-fixtures-and-snapshots)
- [Example: Adding a new configuration option](#example-adding-a-new-configuration-option)
- [MkDocs](#mkdocs)
- [Release Process](#release-process)
- [Creating a new release](#creating-a-new-release)
- [Ecosystem CI](#ecosystem-ci)
- [Benchmarking and Profiling](#benchmarking-and-profiling)
- [CPython Benchmark](#cpython-benchmark)
- [Microbenchmarks](#microbenchmarks)
- [Benchmark-driven Development](#benchmark-driven-development)
- [PR Summary](#pr-summary)
- [Tips](#tips)
- [Profiling Projects](#profiling-projects)
- [Linux](#linux)
- [Mac](#mac)
- [`cargo dev`](#cargo-dev)
- [Subsystems](#subsystems)
- [Compilation Pipeline](#compilation-pipeline)
- [Import Categorization](#import-categorization)
- [Project root](#project-root)
- [Package root](#package-root)
- [Import categorization](#import-categorization-1)
## The Basics
Ruff welcomes contributions in the form of pull requests.
@@ -304,34 +333,22 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
### Creating a new release
1. Install `uv`: `curl -LsSf https://astral.sh/uv/install.sh | sh`
1. Run `./scripts/release.sh`; this command will:
- Generate a temporary virtual environment with `rooster`
- Generate a changelog entry in `CHANGELOG.md`
- Update versions in `pyproject.toml` and `Cargo.toml`
- Update references to versions in the `README.md` and documentation
- Display contributors for the release
1. The changelog should then be editorialized for consistency
- Often labels will be missing from pull requests they will need to be manually organized into the proper section
- Changes should be edited to be user-facing descriptions, avoiding internal details
1. Highlight any breaking changes in `BREAKING_CHANGES.md`
1. Run `cargo check`. This should update the lock file with new versions.
1. Create a pull request with the changelog and version updates
1. Merge the PR
1. Run the [release workflow](https://github.com/astral-sh/ruff/actions/workflows/release.yml) with:
- The new version number (without starting `v`)
1. The release workflow will do the following:
1. Build all the assets. If this fails (even though we tested in step 4), we haven't tagged or
uploaded anything, you can restart after pushing a fix. If you just need to rerun the build,
make sure you're [re-running all the failed
@@ -342,25 +359,14 @@ even patch releases may contain [non-backwards-compatible changes](https://semve
1. Attach artifacts to draft GitHub release
1. Trigger downstream repositories. This can fail non-catastrophically, as we can run any
downstream jobs manually if needed.
1. Verify the GitHub release:
1. The Changelog should match the content of `CHANGELOG.md`
1. Append the contributors from the `scripts/release.sh` script
1. Append the contributors from the `bump.sh` script
1. If needed, [update the schemastore](https://github.com/astral-sh/ruff/blob/main/scripts/update_schemastore.py).
1. One can determine if an update is needed when
`git diff old-version-tag new-version-tag -- ruff.schema.json` returns a non-empty diff.
1. Once run successfully, you should follow the link in the output to create a PR.
1. If needed, update the [`ruff-lsp`](https://github.com/astral-sh/ruff-lsp) and
[`ruff-vscode`](https://github.com/astral-sh/ruff-vscode) repositories and follow
the release instructions in those repositories. `ruff-lsp` should always be updated
before `ruff-vscode`.
This step is generally not required for a patch release, but should always be done
for a minor release.
1. If needed, update the `ruff-lsp` and `ruff-vscode` repositories.
## Ecosystem CI
@@ -383,7 +389,7 @@ We have several ways of benchmarking and profiling Ruff:
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
- Profiling the linter on either the microbenchmarks or entire projects
> **Note**
> \[!NOTE\]
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
> applications, like web browsers). You may also want to switch your CPU to a "performance"
> mode, if it exists, especially when benchmarking short-lived processes.
@@ -397,18 +403,12 @@ which makes it a good target for benchmarking.
git clone --branch 3.10 https://github.com/python/cpython.git crates/ruff_linter/resources/test/cpython
```
Install `hyperfine`:
```shell
cargo install hyperfine
```
To benchmark the release build:
```shell
cargo build --release && hyperfine --warmup 10 \
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ -e"
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e" \
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ -e"
Benchmark 1: ./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache
Time (mean ± σ): 293.8 ms ± 3.2 ms [User: 2384.6 ms, System: 90.3 ms]
@@ -427,7 +427,7 @@ To benchmark against the ecosystem's existing tools:
```shell
hyperfine --ignore-failure --warmup 5 \
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache" \
"pyflakes crates/ruff_linter/resources/test/cpython" \
"autoflake --recursive --expand-star-imports --remove-all-unused-imports --remove-unused-variables --remove-duplicate-keys resources/test/cpython" \
"pycodestyle crates/ruff_linter/resources/test/cpython" \
@@ -473,7 +473,7 @@ To benchmark a subset of rules, e.g. `LineTooLong` and `DocLineTooLong`:
```shell
cargo build --release && hyperfine --warmup 10 \
"./target/release/ruff check ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
"./target/release/ruff ./crates/ruff_linter/resources/test/cpython/ --no-cache -e --select W505,E501"
```
You can run `poetry install` from `./scripts/benchmarks` to create a working environment for the
@@ -530,8 +530,6 @@ You can run the benchmarks with
cargo benchmark
```
`cargo benchmark` is an alias for `cargo bench -p ruff_benchmark --bench linter --bench formatter --`
#### Benchmark-driven Development
Ruff uses [Criterion.rs](https://bheisler.github.io/criterion.rs/book/) for benchmarks. You can use
@@ -570,7 +568,7 @@ cargo install critcmp
#### Tips
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo bench -p ruff_benchmark lexer`
- Use `cargo bench -p ruff_benchmark <filter>` to only run specific benchmarks. For example: `cargo benchmark lexer`
to only run the lexer benchmarks.
- Use `cargo bench -p ruff_benchmark -- --quiet` for a more cleaned up output (without statistical relevance)
- Use `cargo bench -p ruff_benchmark -- --quick` to get faster results (more prone to noise)

266
Cargo.lock generated
View File

@@ -194,15 +194,6 @@ version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
[[package]]
name = "block-buffer"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
dependencies = [
"generic-array",
]
[[package]]
name = "bstr"
version = "1.10.0"
@@ -237,9 +228,9 @@ dependencies = [
[[package]]
name = "camino"
version = "1.1.9"
version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3"
checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239"
[[package]]
name = "cast"
@@ -279,12 +270,6 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e"
[[package]]
name = "cfg_aliases"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]]
name = "chic"
version = "1.2.2"
@@ -335,9 +320,9 @@ dependencies = [
[[package]]
name = "clap"
version = "4.5.16"
version = "4.5.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed6719fffa43d0d87e5fd8caeab59be1554fb028cd30edc88fc4369b17971019"
checksum = "11d8838454fda655dafd3accb2b6e2bea645b9e4078abe84a22ceb947235c5cc"
dependencies = [
"clap_builder",
"clap_derive",
@@ -410,7 +395,7 @@ version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2f8c93eb5f77c9050c7750e14f13ef1033a40a0aac70c6371535b6763a01438c"
dependencies = [
"nix 0.28.0",
"nix",
"terminfo",
"thiserror",
"which",
@@ -520,15 +505,6 @@ dependencies = [
"rustc-hash 1.1.0",
]
[[package]]
name = "cpufeatures"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51e852e6dc9a5bed1fae92dd2375037bf2b768725bf3be87811edee3249d09ad"
dependencies = [
"libc",
]
[[package]]
name = "crc32fast"
version = "1.4.0"
@@ -634,24 +610,14 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7"
[[package]]
name = "crypto-common"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
dependencies = [
"generic-array",
"typenum",
]
[[package]]
name = "ctrlc"
version = "3.4.5"
version = "3.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90eeab0aa92f3f9b4e87f258c72b139c207d251f9cbc1080a0086b86a8870dd3"
checksum = "672465ae37dc1bc6380a6547a8883d5dd397b0f1faaad4f265726cc7042a5345"
dependencies = [
"nix 0.29.0",
"windows-sys 0.59.0",
"nix",
"windows-sys 0.52.0",
]
[[package]]
@@ -722,16 +688,6 @@ version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
[[package]]
name = "digest"
version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer",
"crypto-common",
]
[[package]]
name = "dirs"
version = "4.0.0"
@@ -917,16 +873,6 @@ dependencies = [
"libc",
]
[[package]]
name = "generic-array"
version = "0.14.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
dependencies = [
"typenum",
"version_check",
]
[[package]]
name = "getopts"
version = "0.2.21"
@@ -1101,9 +1047,9 @@ dependencies = [
[[package]]
name = "indexmap"
version = "2.4.0"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93ead53efc7ea8ed3cfb0c79fc8023fbb782a5432b52830b6518941cebe6505c"
checksum = "de3fc2e30ba82dd1b3911c8de1ffc143c74a914a14e99514d7637e3099df5ea0"
dependencies = [
"equivalent",
"hashbrown",
@@ -1160,8 +1106,6 @@ dependencies = [
"globset",
"lazy_static",
"linked-hash-map",
"pest",
"pest_derive",
"regex",
"serde",
"similar",
@@ -1271,9 +1215,9 @@ checksum = "8b23360e99b8717f20aaa4598f5a6541efbe30630039fbc7706cf954a87947ae"
[[package]]
name = "js-sys"
version = "0.3.70"
version = "0.3.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1868808506b929d7b0cfa8f75951347aa71bb21144b7791bae35d9bccfcfe37a"
checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d"
dependencies = [
"wasm-bindgen",
]
@@ -1306,9 +1250,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.158"
version = "0.2.155"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439"
checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
[[package]]
name = "libcst"
@@ -1444,16 +1388,6 @@ dependencies = [
"libmimalloc-sys",
]
[[package]]
name = "minicov"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c71e683cd655513b99affab7d317deb690528255a0d5f717f1024093c12b169"
dependencies = [
"cc",
"walkdir",
]
[[package]]
name = "minimal-lexical"
version = "0.2.1"
@@ -1504,19 +1438,7 @@ checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4"
dependencies = [
"bitflags 2.6.0",
"cfg-if",
"cfg_aliases 0.1.1",
"libc",
]
[[package]]
name = "nix"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46"
dependencies = [
"bitflags 2.6.0",
"cfg-if",
"cfg_aliases 0.2.1",
"cfg_aliases",
"libc",
]
@@ -1603,9 +1525,9 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
[[package]]
name = "ordermap"
version = "0.5.2"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61d7d835be600a7ac71b24e39c92fe6fad9e818b3c71bfc379e3ba65e327d77f"
checksum = "8c81974681ab4f0cc9fe49cad56f821d1cc67a08cd2caa9b5d58b0adaa5dd36d"
dependencies = [
"indexmap",
]
@@ -1757,51 +1679,6 @@ version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "pest"
version = "2.7.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd53dff83f26735fdc1ca837098ccf133605d794cdae66acfc2bfac3ec809d95"
dependencies = [
"memchr",
"thiserror",
"ucd-trie",
]
[[package]]
name = "pest_derive"
version = "2.7.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a548d2beca6773b1c244554d36fcf8548a8a58e74156968211567250e48e49a"
dependencies = [
"pest",
"pest_generator",
]
[[package]]
name = "pest_generator"
version = "2.7.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c93a82e8d145725dcbaf44e5ea887c8a869efdcc28706df2d08c69e17077183"
dependencies = [
"pest",
"pest_meta",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "pest_meta"
version = "2.7.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a941429fea7e08bedec25e4f6785b6ffaacc6b755da98df5ef3e7dcf4a124c4f"
dependencies = [
"once_cell",
"pest",
"sha2",
]
[[package]]
name = "phf"
version = "0.11.2"
@@ -1898,9 +1775,9 @@ dependencies = [
[[package]]
name = "quick-junit"
version = "0.5.0"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62ffd2f9a162cfae131bed6d9d1ed60adced33be340a94f96952897d7cb0c240"
checksum = "cfc1a6a5406a114913df2df8507998c755311b55b78584bed5f6e88f6417c4d4"
dependencies = [
"chrono",
"indexmap",
@@ -1913,18 +1790,18 @@ dependencies = [
[[package]]
name = "quick-xml"
version = "0.36.1"
version = "0.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96a05e2e8efddfa51a84ca47cec303fac86c8541b686d37cac5efc0e094417bc"
checksum = "1004a344b30a54e2ee58d66a71b32d2db2feb0a31f9a2d302bf0536f15de2a33"
dependencies = [
"memchr",
]
[[package]]
name = "quote"
version = "1.0.37"
version = "1.0.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af"
checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7"
dependencies = [
"proc-macro2",
]
@@ -2021,7 +1898,6 @@ dependencies = [
"ruff_db",
"ruff_index",
"ruff_python_ast",
"ruff_python_literal",
"ruff_python_parser",
"ruff_python_stdlib",
"ruff_source_file",
@@ -2029,10 +1905,7 @@ dependencies = [
"rustc-hash 2.0.0",
"salsa",
"smallvec",
"static_assertions",
"tempfile",
"test-case",
"thiserror",
"tracing",
"walkdir",
"zip",
@@ -2048,8 +1921,10 @@ dependencies = [
"libc",
"lsp-server",
"lsp-types",
"red_knot_python_semantic",
"red_knot_workspace",
"ruff_db",
"ruff_linter",
"ruff_notebook",
"ruff_python_ast",
"ruff_source_file",
@@ -2085,7 +1960,6 @@ dependencies = [
"anyhow",
"crossbeam",
"notify",
"rayon",
"red_knot_python_semantic",
"ruff_cache",
"ruff_db",
@@ -2093,7 +1967,7 @@ dependencies = [
"ruff_text_size",
"rustc-hash 2.0.0",
"salsa",
"tempfile",
"thiserror",
"tracing",
]
@@ -2187,7 +2061,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.6.5"
version = "0.5.7"
dependencies = [
"anyhow",
"argfile",
@@ -2245,7 +2119,6 @@ dependencies = [
"criterion",
"mimalloc",
"once_cell",
"rayon",
"red_knot_python_semantic",
"red_knot_workspace",
"ruff_db",
@@ -2380,7 +2253,7 @@ dependencies = [
[[package]]
name = "ruff_linter"
version = "0.6.5"
version = "0.5.7"
dependencies = [
"aho-corasick",
"annotate-snippets 0.9.2",
@@ -2700,7 +2573,7 @@ dependencies = [
[[package]]
name = "ruff_wasm"
version = "0.6.5"
version = "0.5.7"
dependencies = [
"console_error_panic_hook",
"console_log",
@@ -2840,7 +2713,7 @@ checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
[[package]]
name = "salsa"
version = "0.18.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=f608ff8b24f07706492027199f51132244034f29#f608ff8b24f07706492027199f51132244034f29"
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
dependencies = [
"append-only-vec",
"arc-swap",
@@ -2860,12 +2733,12 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
version = "0.1.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=f608ff8b24f07706492027199f51132244034f29#f608ff8b24f07706492027199f51132244034f29"
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
[[package]]
name = "salsa-macros"
version = "0.18.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=f608ff8b24f07706492027199f51132244034f29#f608ff8b24f07706492027199f51132244034f29"
source = "git+https://github.com/MichaReiser/salsa.git?tag=red-knot-0.0.1#ece083e15b79f155f9e4368ec1318cec9a08d88b"
dependencies = [
"heck",
"proc-macro2",
@@ -2927,9 +2800,9 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
[[package]]
name = "serde"
version = "1.0.209"
version = "1.0.206"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99fce0ffe7310761ca6bf9faf5115afbc19688edd00171d81b1bb1b116c63e09"
checksum = "5b3e4cd94123dd520a128bcd11e34d9e9e423e7e3e50425cb1b4b1e3549d0284"
dependencies = [
"serde_derive",
]
@@ -2947,9 +2820,9 @@ dependencies = [
[[package]]
name = "serde_derive"
version = "1.0.209"
version = "1.0.206"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a5831b979fd7b5439637af1752d535ff49f4860c0f341d1baeb6faf0f4242170"
checksum = "fabfb6138d2383ea8208cf98ccf69cdfb1aff4088460681d84189aa259762f97"
dependencies = [
"proc-macro2",
"quote",
@@ -2969,9 +2842,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.127"
version = "1.0.124"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8043c06d9f82bd7271361ed64f415fe5e12a77fdb52e573e7f06a516dea329ad"
checksum = "66ad62847a56b3dba58cc891acd13884b9c61138d330c0d7b6181713d4fce38d"
dependencies = [
"itoa",
"memchr",
@@ -3031,17 +2904,6 @@ dependencies = [
"syn",
]
[[package]]
name = "sha2"
version = "0.10.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
dependencies = [
"cfg-if",
"cpufeatures",
"digest",
]
[[package]]
name = "sharded-slab"
version = "0.1.7"
@@ -3141,9 +3003,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
[[package]]
name = "syn"
version = "2.0.76"
version = "2.0.74"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "578e081a14e0cefc3279b0472138c513f37b41a08d5a3cca9b6e4e8ceb6cd525"
checksum = "1fceb41e3d546d0bd83421d3409b1460cc7444cd389341a4c880fe7a042cb3d7"
dependencies = [
"proc-macro2",
"quote",
@@ -3442,18 +3304,6 @@ version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a"
[[package]]
name = "typenum"
version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
[[package]]
name = "ucd-trie"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9"
[[package]]
name = "unic-char-property"
version = "0.9.0"
@@ -3677,20 +3527,19 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wasm-bindgen"
version = "0.2.93"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5"
checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
dependencies = [
"cfg-if",
"once_cell",
"wasm-bindgen-macro",
]
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.93"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b"
checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
dependencies = [
"bumpalo",
"log",
@@ -3703,9 +3552,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-futures"
version = "0.4.43"
version = "0.4.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61e9300f63a621e96ed275155c108eb6f843b6a26d053f122ab69724559dc8ed"
checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0"
dependencies = [
"cfg-if",
"js-sys",
@@ -3715,9 +3564,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.93"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf"
checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
@@ -3725,9 +3574,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.93"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836"
checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
dependencies = [
"proc-macro2",
"quote",
@@ -3738,19 +3587,18 @@ dependencies = [
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.93"
version = "0.2.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484"
checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
[[package]]
name = "wasm-bindgen-test"
version = "0.3.43"
version = "0.3.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68497a05fb21143a08a7d24fc81763384a3072ee43c44e86aad1744d6adef9d9"
checksum = "d9bf62a58e0780af3e852044583deee40983e5886da43a271dd772379987667b"
dependencies = [
"console_error_panic_hook",
"js-sys",
"minicov",
"scoped-tls",
"wasm-bindgen",
"wasm-bindgen-futures",
@@ -3759,9 +3607,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-test-macro"
version = "0.3.43"
version = "0.3.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b8220be1fa9e4c889b30fd207d4906657e7e90b12e0e6b0c8b8d8709f5de021"
checksum = "b7f89739351a2e03cb94beb799d47fb2cac01759b40ec441f7de39b00cbf7ef0"
dependencies = [
"proc-macro2",
"quote",

View File

@@ -102,13 +102,13 @@ pep440_rs = { version = "0.6.0", features = ["serde"] }
pretty_assertions = "1.3.0"
proc-macro2 = { version = "1.0.79" }
pyproject-toml = { version = "0.9.0" }
quick-junit = { version = "0.5.0" }
quick-junit = { version = "0.4.0" }
quote = { version = "1.0.23" }
rand = { version = "0.8.5" }
rayon = { version = "1.10.0" }
regex = { version = "1.10.2" }
rustc-hash = { version = "2.0.0" }
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "f608ff8b24f07706492027199f51132244034f29" }
salsa = { git = "https://github.com/MichaReiser/salsa.git", tag = "red-knot-0.0.1" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] }

View File

@@ -110,7 +110,7 @@ For more, see the [documentation](https://docs.astral.sh/ruff/).
1. [Who's Using Ruff?](#whos-using-ruff)
1. [License](#license)
## Getting Started<a id="getting-started"></a>
## Getting Started
For more, see the [documentation](https://docs.astral.sh/ruff/).
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.6.5/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.6.5/install.ps1 | iex"
curl -LsSf https://astral.sh/ruff/0.5.7/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.5.7/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.6.5
rev: v0.5.7
hooks:
# Run the linter.
- id: ruff
@@ -195,7 +195,7 @@ jobs:
- uses: chartboost/ruff-action@v1
```
### Configuration<a id="configuration"></a>
### Configuration
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
@@ -291,7 +291,7 @@ features that may change prior to stabilization.
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
for more on the linting and formatting commands, respectively.
## Rules<a id="rules"></a>
## Rules
<!-- Begin section: Rules -->
@@ -367,21 +367,21 @@ quality tools, including:
For a complete enumeration of the supported rules, see [_Rules_](https://docs.astral.sh/ruff/rules/).
## Contributing<a id="contributing"></a>
## Contributing
Contributions are welcome and highly appreciated. To get started, check out the
[**contributing guidelines**](https://docs.astral.sh/ruff/contributing/).
You can also join us on [**Discord**](https://discord.com/invite/astral-sh).
## Support<a id="support"></a>
## Support
Having trouble? Check out the existing issues on [**GitHub**](https://github.com/astral-sh/ruff/issues),
or feel free to [**open a new one**](https://github.com/astral-sh/ruff/issues/new).
You can also ask for help on [**Discord**](https://discord.com/invite/astral-sh).
## Acknowledgements<a id="acknowledgements"></a>
## Acknowledgements
Ruff's linter draws on both the APIs and implementation details of many other
tools in the Python ecosystem, especially [Flake8](https://github.com/PyCQA/flake8), [Pyflakes](https://github.com/PyCQA/pyflakes),
@@ -405,7 +405,7 @@ Ruff is the beneficiary of a large number of [contributors](https://github.com/a
Ruff is released under the MIT license.
## Who's Using Ruff?<a id="whos-using-ruff"></a>
## Who's Using Ruff?
Ruff is used by a number of major open-source projects and companies, including:
@@ -524,7 +524,7 @@ If you're using Ruff, consider adding the Ruff badge to your project's `README.m
<a href="https://github.com/astral-sh/ruff"><img src="https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json" alt="Ruff" style="max-width:100%;"></a>
```
## License<a id="license"></a>
## License
This repository is licensed under the [MIT License](https://github.com/astral-sh/ruff/blob/main/LICENSE)

View File

@@ -13,17 +13,12 @@ The CLI supports different verbosity levels.
- `-vv` activates `debug!` and timestamps: This should be enough information to get to the bottom of bug reports. When you're processing many packages or files, you'll get pages and pages of output, but each line is link to a specific action or state change.
- `-vvv` activates `trace!` (only in debug builds) and shows tracing-spans: At this level, you're logging everything. Most of this is wasted, it's really slow, we dump e.g. the entire resolution graph. Only useful to developers, and you almost certainly want to use `RED_KNOT_LOG` to filter it down to the area your investigating.
## Better logging with `RED_KNOT_LOG` and `RAYON_NUM_THREADS`
## `RED_KNOT_LOG`
By default, the CLI shows messages from the `ruff` and `red_knot` crates. Tracing messages from other crates are not shown.
The `RED_KNOT_LOG` environment variable allows you to customize which messages are shown by specifying one
or more [filter directives](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives).
The `RAYON_NUM_THREADS` environment variable, meanwhile, can be used to control the level of concurrency red-knot uses.
By default, red-knot will attempt to parallelize its work so that multiple files are checked simultaneously,
but this can result in a confused logging output where messages from different threads are intertwined.
To switch off concurrency entirely and have more readable logs, use `RAYON_NUM_THREADS=1`.
### Examples
#### Show all debug messages

View File

@@ -5,8 +5,8 @@ use colored::Colorize;
use std::fmt;
use std::fs::File;
use std::io::BufWriter;
use tracing::log::LevelFilter;
use tracing::{Event, Subscriber};
use tracing_subscriber::filter::LevelFilter;
use tracing_subscriber::fmt::format::Writer;
use tracing_subscriber::fmt::{FmtContext, FormatEvent, FormatFields};
use tracing_subscriber::registry::LookupSpan;
@@ -60,10 +60,10 @@ pub(crate) enum VerbosityLevel {
impl VerbosityLevel {
const fn level_filter(self) -> LevelFilter {
match self {
VerbosityLevel::Default => LevelFilter::WARN,
VerbosityLevel::Verbose => LevelFilter::INFO,
VerbosityLevel::ExtraVerbose => LevelFilter::DEBUG,
VerbosityLevel::Trace => LevelFilter::TRACE,
VerbosityLevel::Default => LevelFilter::Warn,
VerbosityLevel::Verbose => LevelFilter::Info,
VerbosityLevel::ExtraVerbose => LevelFilter::Debug,
VerbosityLevel::Trace => LevelFilter::Trace,
}
}
@@ -88,7 +88,7 @@ pub(crate) fn setup_tracing(level: VerbosityLevel) -> anyhow::Result<TracingGuar
match level {
VerbosityLevel::Default => {
// Show warning traces
EnvFilter::default().add_directive(LevelFilter::WARN.into())
EnvFilter::default().add_directive(tracing::level_filters::LevelFilter::WARN.into())
}
level => {
let level_filter = level.level_filter();

View File

@@ -7,12 +7,12 @@ use colored::Colorize;
use crossbeam::channel as crossbeam_channel;
use salsa::plumbing::ZalsaDatabase;
use red_knot_python_semantic::SitePackages;
use red_knot_python_semantic::{ProgramSettings, SearchPathSettings};
use red_knot_server::run_server;
use red_knot_workspace::db::RootDatabase;
use red_knot_workspace::site_packages::VirtualEnvironment;
use red_knot_workspace::watch;
use red_knot_workspace::watch::WorkspaceWatcher;
use red_knot_workspace::workspace::settings::Configuration;
use red_knot_workspace::workspace::WorkspaceMetadata;
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
use target_version::TargetVersion;
@@ -65,14 +65,15 @@ to resolve type information for the project's third-party dependencies.",
value_name = "PATH",
help = "Additional path to use as a module-resolution source (can be passed multiple times)"
)]
extra_search_path: Option<Vec<SystemPathBuf>>,
extra_search_path: Vec<SystemPathBuf>,
#[arg(
long,
help = "Python version to assume when resolving types",
value_name = "VERSION"
)]
target_version: Option<TargetVersion>,
default_value_t = TargetVersion::default(),
value_name="VERSION")
]
target_version: TargetVersion,
#[clap(flatten)]
verbosity: Verbosity,
@@ -85,36 +86,6 @@ to resolve type information for the project's third-party dependencies.",
watch: bool,
}
impl Args {
fn to_configuration(&self, cli_cwd: &SystemPath) -> Configuration {
let mut configuration = Configuration::default();
if let Some(target_version) = self.target_version {
configuration.target_version = Some(target_version.into());
}
if let Some(venv_path) = &self.venv_path {
configuration.search_paths.site_packages = Some(SitePackages::Derived {
venv_path: SystemPath::absolute(venv_path, cli_cwd),
});
}
if let Some(custom_typeshed_dir) = &self.custom_typeshed_dir {
configuration.search_paths.custom_typeshed =
Some(SystemPath::absolute(custom_typeshed_dir, cli_cwd));
}
if let Some(extra_search_paths) = &self.extra_search_path {
configuration.search_paths.extra_paths = extra_search_paths
.iter()
.map(|path| Some(SystemPath::absolute(path, cli_cwd)))
.collect();
}
configuration
}
}
#[derive(Debug, clap::Subcommand)]
pub enum Command {
/// Start the language server
@@ -144,13 +115,22 @@ pub fn main() -> ExitStatus {
}
fn run() -> anyhow::Result<ExitStatus> {
let args = Args::parse_from(std::env::args().collect::<Vec<_>>());
let Args {
command,
current_directory,
custom_typeshed_dir,
extra_search_path: extra_paths,
venv_path,
target_version,
verbosity,
watch,
} = Args::parse_from(std::env::args().collect::<Vec<_>>());
if matches!(args.command, Some(Command::Server)) {
if matches!(command, Some(Command::Server)) {
return run_server().map(|()| ExitStatus::Success);
}
let verbosity = args.verbosity.level();
let verbosity = verbosity.level();
countme::enable(verbosity.is_trace());
let _guard = setup_tracing(verbosity)?;
@@ -166,12 +146,10 @@ fn run() -> anyhow::Result<ExitStatus> {
})?
};
let cwd = args
.current_directory
.as_ref()
let cwd = current_directory
.map(|cwd| {
if cwd.as_std_path().is_dir() {
Ok(SystemPath::absolute(cwd, &cli_base_path))
Ok(SystemPath::absolute(&cwd, &cli_base_path))
} else {
Err(anyhow!(
"Provided current-directory path '{cwd}' is not a directory."
@@ -182,18 +160,33 @@ fn run() -> anyhow::Result<ExitStatus> {
.unwrap_or_else(|| cli_base_path.clone());
let system = OsSystem::new(cwd.clone());
let cli_configuration = args.to_configuration(&cwd);
let workspace_metadata = WorkspaceMetadata::from_path(
system.current_directory(),
&system,
Some(cli_configuration.clone()),
)?;
let workspace_metadata = WorkspaceMetadata::from_path(system.current_directory(), &system)?;
// TODO: Verify the remaining search path settings eagerly.
let site_packages = venv_path
.map(|path| {
VirtualEnvironment::new(path, &OsSystem::new(cli_base_path))
.and_then(|venv| venv.site_packages_directories(&system))
})
.transpose()?
.unwrap_or_default();
// TODO: Respect the settings from the workspace metadata. when resolving the program settings.
let program_settings = ProgramSettings {
target_version: target_version.into(),
search_paths: SearchPathSettings {
extra_paths,
src_root: workspace_metadata.root().to_path_buf(),
custom_typeshed: custom_typeshed_dir,
site_packages,
},
};
// TODO: Use the `program_settings` to compute the key for the database's persistent
// cache and load the cache if it exists.
let mut db = RootDatabase::new(workspace_metadata, system)?;
let mut db = RootDatabase::new(workspace_metadata, program_settings, system)?;
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_configuration);
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
// Listen to Ctrl+C and abort the watch mode.
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
@@ -205,7 +198,7 @@ fn run() -> anyhow::Result<ExitStatus> {
}
})?;
let exit_status = if args.watch {
let exit_status = if watch {
main_loop.watch(&mut db)?
} else {
main_loop.run(&mut db)
@@ -245,12 +238,10 @@ struct MainLoop {
/// The file system watcher, if running in watch mode.
watcher: Option<WorkspaceWatcher>,
cli_configuration: Configuration,
}
impl MainLoop {
fn new(cli_configuration: Configuration) -> (Self, MainLoopCancellationToken) {
fn new() -> (Self, MainLoopCancellationToken) {
let (sender, receiver) = crossbeam_channel::bounded(10);
(
@@ -258,7 +249,6 @@ impl MainLoop {
sender: sender.clone(),
receiver,
watcher: None,
cli_configuration,
},
MainLoopCancellationToken { sender },
)
@@ -341,7 +331,7 @@ impl MainLoop {
MainLoopMessage::ApplyChanges(changes) => {
revision += 1;
// Automatically cancels any pending queries and waits for them to complete.
db.apply_changes(changes, Some(&self.cli_configuration));
db.apply_changes(changes);
if let Some(watcher) = self.watcher.as_mut() {
watcher.update(db);
}

View File

@@ -5,11 +5,12 @@ use std::time::Duration;
use anyhow::{anyhow, Context};
use red_knot_python_semantic::{resolve_module, ModuleName, Program, PythonVersion, SitePackages};
use red_knot_python_semantic::{
resolve_module, ModuleName, Program, ProgramSettings, PythonVersion, SearchPathSettings,
};
use red_knot_workspace::db::RootDatabase;
use red_knot_workspace::watch;
use red_knot_workspace::watch::{directory_watcher, WorkspaceWatcher};
use red_knot_workspace::workspace::settings::{Configuration, SearchPathConfiguration};
use red_knot_workspace::workspace::WorkspaceMetadata;
use ruff_db::files::{system_path_to_file, File, FileError};
use ruff_db::source::source_text;
@@ -24,7 +25,7 @@ struct TestCase {
/// We need to hold on to it in the test case or the temp files get deleted.
_temp_dir: tempfile::TempDir,
root_dir: SystemPathBuf,
configuration: Configuration,
search_path_settings: SearchPathSettings,
}
impl TestCase {
@@ -40,6 +41,10 @@ impl TestCase {
&self.db
}
fn db_mut(&mut self) -> &mut RootDatabase {
&mut self.db
}
fn stop_watch(&mut self) -> Vec<watch::ChangeEvent> {
self.try_stop_watch(Duration::from_secs(10))
.expect("Expected watch changes but observed none.")
@@ -100,20 +105,16 @@ impl TestCase {
Some(all_events)
}
fn apply_changes(&mut self, changes: Vec<watch::ChangeEvent>) {
self.db.apply_changes(changes, Some(&self.configuration));
}
fn update_search_path_settings(
&mut self,
configuration: SearchPathConfiguration,
f: impl FnOnce(&SearchPathSettings) -> SearchPathSettings,
) -> anyhow::Result<()> {
let program = Program::get(self.db());
self.configuration.search_paths = configuration.clone();
let new_settings = configuration.into_settings(self.db.workspace().root(&self.db));
let new_settings = f(&self.search_path_settings);
program.update_search_paths(&mut self.db, &new_settings)?;
program.update_search_paths(&mut self.db, new_settings.clone())?;
self.search_path_settings = new_settings;
if let Some(watcher) = &mut self.watcher {
watcher.update(&self.db);
@@ -126,6 +127,7 @@ impl TestCase {
fn collect_package_files(&self, path: &SystemPath) -> Vec<File> {
let package = self.db().workspace().package(self.db(), path).unwrap();
let files = package.files(self.db());
let files = files.read();
let mut collected: Vec<_> = files.into_iter().collect();
collected.sort_unstable_by_key(|file| file.path(self.db()).as_system_path().unwrap());
collected
@@ -178,14 +180,17 @@ fn setup<F>(setup_files: F) -> anyhow::Result<TestCase>
where
F: SetupFiles,
{
setup_with_search_paths(setup_files, |_root, _workspace_path| {
SearchPathConfiguration::default()
setup_with_search_paths(setup_files, |_root, workspace_path| SearchPathSettings {
extra_paths: vec![],
src_root: workspace_path.to_path_buf(),
custom_typeshed: None,
site_packages: vec![],
})
}
fn setup_with_search_paths<F>(
setup_files: F,
create_search_paths: impl FnOnce(&SystemPath, &SystemPath) -> SearchPathConfiguration,
create_search_paths: impl FnOnce(&SystemPath, &SystemPath) -> SearchPathSettings,
) -> anyhow::Result<TestCase>
where
F: SetupFiles,
@@ -217,34 +222,25 @@ where
let system = OsSystem::new(&workspace_path);
let search_paths = create_search_paths(&root_path, &workspace_path);
let workspace = WorkspaceMetadata::from_path(&workspace_path, &system)?;
let search_path_settings = create_search_paths(&root_path, workspace.root());
for path in search_paths
for path in search_path_settings
.extra_paths
.iter()
.flatten()
.chain(search_paths.custom_typeshed.iter())
.chain(search_paths.site_packages.iter().flat_map(|site_packages| {
if let SitePackages::Known(path) = site_packages {
path.as_slice()
} else {
&[]
}
}))
.chain(search_path_settings.site_packages.iter())
.chain(search_path_settings.custom_typeshed.iter())
{
std::fs::create_dir_all(path.as_std_path())
.with_context(|| format!("Failed to create search path '{path}'"))?;
}
let configuration = Configuration {
target_version: Some(PythonVersion::PY312),
search_paths,
let settings = ProgramSettings {
target_version: PythonVersion::default(),
search_paths: search_path_settings.clone(),
};
let workspace =
WorkspaceMetadata::from_path(&workspace_path, &system, Some(configuration.clone()))?;
let db = RootDatabase::new(workspace, system)?;
let db = RootDatabase::new(workspace, settings, system)?;
let (sender, receiver) = crossbeam::channel::unbounded();
let watcher = directory_watcher(move |events| sender.send(events).unwrap())
@@ -259,7 +255,7 @@ where
watcher: Some(watcher),
_temp_dir: temp_dir,
root_dir: root_path,
configuration,
search_path_settings,
};
// Sometimes the file watcher reports changes for events that happened before the watcher was started.
@@ -312,7 +308,7 @@ fn new_file() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
let foo = case.system_file(&foo_path).expect("foo.py to exist.");
@@ -335,7 +331,7 @@ fn new_ignored_file() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert!(case.system_file(&foo_path).is_ok());
assert_eq!(&case.collect_package_files(&bar_path), &[bar_file]);
@@ -359,7 +355,7 @@ fn changed_file() -> anyhow::Result<()> {
assert!(!changes.is_empty());
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 2')");
assert_eq!(&case.collect_package_files(&foo_path), &[foo]);
@@ -382,7 +378,7 @@ fn deleted_file() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert!(!foo.exists(case.db()));
assert_eq!(&case.collect_package_files(&foo_path), &[] as &[File]);
@@ -414,7 +410,7 @@ fn move_file_to_trash() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert!(!foo.exists(case.db()));
assert_eq!(&case.collect_package_files(&foo_path), &[] as &[File]);
@@ -446,7 +442,7 @@ fn move_file_to_workspace() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
let foo_in_workspace = case.system_file(&foo_in_workspace_path)?;
@@ -474,7 +470,7 @@ fn rename_file() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert!(!foo.exists(case.db()));
@@ -515,7 +511,7 @@ fn directory_moved_to_workspace() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
let init_file = case
.system_file(sub_new_path.join("__init__.py"))
@@ -566,7 +562,7 @@ fn directory_moved_to_trash() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
// `import sub.a` should no longer resolve
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_none());
@@ -620,7 +616,7 @@ fn directory_renamed() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
// `import sub.a` should no longer resolve
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_none());
@@ -685,7 +681,7 @@ fn directory_deleted() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
// `import sub.a` should no longer resolve
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("sub.a").unwrap()).is_none());
@@ -699,13 +695,15 @@ fn directory_deleted() -> anyhow::Result<()> {
#[test]
fn search_path() -> anyhow::Result<()> {
let mut case = setup_with_search_paths(
[("bar.py", "import sub.a")],
|root_path, _workspace_path| SearchPathConfiguration {
site_packages: Some(SitePackages::Known(vec![root_path.join("site_packages")])),
..SearchPathConfiguration::default()
},
)?;
let mut case =
setup_with_search_paths([("bar.py", "import sub.a")], |root_path, workspace_path| {
SearchPathSettings {
extra_paths: vec![],
src_root: workspace_path.to_path_buf(),
custom_typeshed: None,
site_packages: vec![root_path.join("site_packages")],
}
})?;
let site_packages = case.root_path().join("site_packages");
@@ -718,7 +716,7 @@ fn search_path() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("a").unwrap()).is_some());
assert_eq!(
@@ -739,9 +737,9 @@ fn add_search_path() -> anyhow::Result<()> {
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("a").unwrap()).is_none());
// Register site-packages as a search path.
case.update_search_path_settings(SearchPathConfiguration {
site_packages: Some(SitePackages::Known(vec![site_packages.clone()])),
..SearchPathConfiguration::default()
case.update_search_path_settings(|settings| SearchPathSettings {
site_packages: vec![site_packages.clone()],
..settings.clone()
})
.expect("Search path settings to be valid");
@@ -749,7 +747,7 @@ fn add_search_path() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert!(resolve_module(case.db().upcast(), ModuleName::new_static("a").unwrap()).is_some());
@@ -758,19 +756,21 @@ fn add_search_path() -> anyhow::Result<()> {
#[test]
fn remove_search_path() -> anyhow::Result<()> {
let mut case = setup_with_search_paths(
[("bar.py", "import sub.a")],
|root_path, _workspace_path| SearchPathConfiguration {
site_packages: Some(SitePackages::Known(vec![root_path.join("site_packages")])),
..SearchPathConfiguration::default()
},
)?;
let mut case =
setup_with_search_paths([("bar.py", "import sub.a")], |root_path, workspace_path| {
SearchPathSettings {
extra_paths: vec![],
src_root: workspace_path.to_path_buf(),
custom_typeshed: None,
site_packages: vec![root_path.join("site_packages")],
}
})?;
// Remove site packages from the search path settings.
let site_packages = case.root_path().join("site_packages");
case.update_search_path_settings(SearchPathConfiguration {
site_packages: None,
..SearchPathConfiguration::default()
case.update_search_path_settings(|settings| SearchPathSettings {
site_packages: vec![],
..settings.clone()
})
.expect("Search path settings to be valid");
@@ -783,48 +783,6 @@ fn remove_search_path() -> anyhow::Result<()> {
Ok(())
}
#[test]
fn changed_versions_file() -> anyhow::Result<()> {
let mut case = setup_with_search_paths(
|root_path: &SystemPath, workspace_path: &SystemPath| {
std::fs::write(workspace_path.join("bar.py").as_std_path(), "import sub.a")?;
std::fs::create_dir_all(root_path.join("typeshed/stdlib").as_std_path())?;
std::fs::write(root_path.join("typeshed/stdlib/VERSIONS").as_std_path(), "")?;
std::fs::write(
root_path.join("typeshed/stdlib/os.pyi").as_std_path(),
"# not important",
)?;
Ok(())
},
|root_path, _workspace_path| SearchPathConfiguration {
custom_typeshed: Some(root_path.join("typeshed")),
..SearchPathConfiguration::default()
},
)?;
// Unset the custom typeshed directory.
assert_eq!(
resolve_module(case.db(), ModuleName::new("os").unwrap()),
None
);
std::fs::write(
case.root_path()
.join("typeshed/stdlib/VERSIONS")
.as_std_path(),
"os: 3.0-",
)?;
let changes = case.stop_watch();
case.apply_changes(changes);
assert!(resolve_module(case.db(), ModuleName::new("os").unwrap()).is_some());
Ok(())
}
/// Watch a workspace that contains two files where one file is a hardlink to another.
///
/// Setup:
@@ -871,7 +829,7 @@ fn hard_links_in_workspace() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 2')");
@@ -942,7 +900,7 @@ fn hard_links_to_target_outside_workspace() -> anyhow::Result<()> {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert_eq!(source_text(case.db(), bar).as_str(), "print('Version 2')");
@@ -981,7 +939,7 @@ mod unix {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert_eq!(
foo.permissions(case.db()),
@@ -1066,7 +1024,7 @@ mod unix {
let changes = case.take_watch_changes();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert_eq!(
source_text(case.db(), baz.file()).as_str(),
@@ -1079,7 +1037,7 @@ mod unix {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert_eq!(
source_text(case.db(), baz.file()).as_str(),
@@ -1150,7 +1108,7 @@ mod unix {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
// The file watcher is guaranteed to emit one event for the changed file, but it isn't specified
// if the event is emitted for the "original" or linked path because both paths are watched.
@@ -1219,11 +1177,11 @@ mod unix {
Ok(())
},
|_root, workspace| SearchPathConfiguration {
site_packages: Some(SitePackages::Known(vec![
workspace.join(".venv/lib/python3.12/site-packages")
])),
..SearchPathConfiguration::default()
|_root, workspace| SearchPathSettings {
extra_paths: vec![],
src_root: workspace.to_path_buf(),
custom_typeshed: None,
site_packages: vec![workspace.join(".venv/lib/python3.12/site-packages")],
},
)?;
@@ -1258,7 +1216,7 @@ mod unix {
let changes = case.stop_watch();
case.apply_changes(changes);
case.db_mut().apply_changes(changes);
assert_eq!(
source_text(case.db(), baz_original_file).as_str(),

View File

@@ -17,7 +17,6 @@ ruff_python_ast = { workspace = true }
ruff_python_stdlib = { workspace = true }
ruff_source_file = { workspace = true }
ruff_text_size = { workspace = true }
ruff_python_literal = { workspace = true }
anyhow = { workspace = true }
bitflags = { workspace = true }
@@ -27,13 +26,10 @@ countme = { workspace = true }
once_cell = { workspace = true }
ordermap = { workspace = true }
salsa = { workspace = true }
thiserror = { workspace = true }
smallvec = { workspace = true }
tracing = { workspace = true }
rustc-hash = { workspace = true }
hashbrown = { workspace = true }
smallvec = { workspace = true }
static_assertions = { workspace = true }
test-case = { workspace = true }
[build-dependencies]
path-slash = { workspace = true }

View File

@@ -31,10 +31,10 @@ impl<T> AstNodeRef<T> {
/// which the `AstNodeRef` belongs.
///
/// ## Safety
///
/// Dereferencing the `node` can result in undefined behavior if `parsed` isn't the
/// [`ParsedModule`] to which `node` belongs. It's the caller's responsibility to ensure that
/// the invariant `node belongs to parsed` is upheld.
pub(super) unsafe fn new(parsed: ParsedModule, node: &T) -> Self {
Self {
_parsed: parsed,

View File

@@ -0,0 +1,16 @@
use crate::module_name::ModuleName;
use crate::module_resolver::resolve_module;
use crate::semantic_index::global_scope;
use crate::semantic_index::symbol::ScopeId;
use crate::Db;
/// Salsa query to get the builtins scope.
///
/// Can return None if a custom typeshed is used that is missing `builtins.pyi`.
#[salsa::tracked]
pub(crate) fn builtins_scope(db: &dyn Db) -> Option<ScopeId<'_>> {
let builtins_name =
ModuleName::new_static("builtins").expect("Expected 'builtins' to be a valid module name");
let builtins_file = resolve_module(db, builtins_name)?.file();
Some(global_scope(db, builtins_file))
}

View File

@@ -1,18 +1,15 @@
use ruff_db::files::File;
use ruff_db::{Db as SourceDb, Upcast};
/// Database giving access to semantic information about a Python program.
#[salsa::db]
pub trait Db: SourceDb + Upcast<dyn SourceDb> {
fn is_file_open(&self, file: File) -> bool;
}
pub trait Db: SourceDb + Upcast<dyn SourceDb> {}
#[cfg(test)]
pub(crate) mod tests {
use std::sync::Arc;
use crate::module_resolver::vendored_typeshed_stubs;
use ruff_db::files::{File, Files};
use ruff_db::files::Files;
use ruff_db::system::{DbWithTestSystem, System, TestSystem};
use ruff_db::vendored::VendoredFileSystem;
use ruff_db::{Db as SourceDb, Upcast};
@@ -94,11 +91,7 @@ pub(crate) mod tests {
}
#[salsa::db]
impl Db for TestDb {
fn is_file_open(&self, file: File) -> bool {
!file.path(self).is_vendored_path()
}
}
impl Db for TestDb {}
#[salsa::db]
impl salsa::Database for TestDb {

View File

@@ -5,11 +5,12 @@ use rustc_hash::FxHasher;
pub use db::Db;
pub use module_name::ModuleName;
pub use module_resolver::{resolve_module, system_module_search_paths, vendored_typeshed_stubs};
pub use program::{Program, ProgramSettings, SearchPathSettings, SitePackages};
pub use program::{Program, ProgramSettings, SearchPathSettings};
pub use python_version::PythonVersion;
pub use semantic_model::{HasTy, SemanticModel};
pub mod ast_node_ref;
mod builtins;
mod db;
mod module_name;
mod module_resolver;
@@ -18,8 +19,6 @@ mod program;
mod python_version;
pub mod semantic_index;
mod semantic_model;
pub(crate) mod site_packages;
mod stdlib;
pub mod types;
type FxOrderSet<V> = ordermap::set::OrderSet<V, BuildHasherDefault<FxHasher>>;

View File

@@ -168,24 +168,6 @@ impl ModuleName {
};
Some(Self(name))
}
/// Extend `self` with the components of `other`
///
/// # Examples
///
/// ```
/// use red_knot_python_semantic::ModuleName;
///
/// let mut module_name = ModuleName::new_static("foo").unwrap();
/// module_name.extend(&ModuleName::new_static("bar").unwrap());
/// assert_eq!(&module_name, "foo.bar");
/// module_name.extend(&ModuleName::new_static("baz.eggs.ham").unwrap());
/// assert_eq!(&module_name, "foo.bar.baz.eggs.ham");
/// ```
pub fn extend(&mut self, other: &ModuleName) {
self.0.push('.');
self.0.push_str(other);
}
}
impl Deref for ModuleName {

View File

@@ -2,7 +2,7 @@ use std::iter::FusedIterator;
pub(crate) use module::Module;
pub use resolver::resolve_module;
pub(crate) use resolver::{file_to_module, SearchPaths};
pub(crate) use resolver::SearchPaths;
use ruff_db::system::SystemPath;
pub use typeshed::vendored_typeshed_stubs;
@@ -13,6 +13,7 @@ use resolver::SearchPathIterator;
mod module;
mod path;
mod resolver;
mod state;
mod typeshed;
#[cfg(test)]

View File

@@ -77,9 +77,3 @@ pub enum ModuleKind {
/// A python package (`foo/__init__.py` or `foo/__init__.pyi`)
Package,
}
impl ModuleKind {
pub const fn is_package(self) -> bool {
matches!(self, ModuleKind::Package)
}
}

View File

@@ -9,11 +9,11 @@ use ruff_db::files::{system_path_to_file, vendored_path_to_file, File, FileError
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use ruff_db::vendored::{VendoredPath, VendoredPathBuf};
use super::typeshed::{typeshed_versions, TypeshedVersionsParseError, TypeshedVersionsQueryResult};
use crate::db::Db;
use crate::module_name::ModuleName;
use crate::module_resolver::resolver::ResolverContext;
use crate::site_packages::SitePackagesDiscoveryError;
use super::state::ResolverState;
use super::typeshed::{TypeshedVersionsParseError, TypeshedVersionsQueryResult};
/// A path that points to a Python module.
///
@@ -59,12 +59,8 @@ impl ModulePath {
self.relative_path.push(component);
}
pub(crate) fn pop(&mut self) -> bool {
self.relative_path.pop()
}
#[must_use]
pub(super) fn is_directory(&self, resolver: &ResolverContext) -> bool {
pub(crate) fn is_directory(&self, resolver: &ResolverState) -> bool {
let ModulePath {
search_path,
relative_path,
@@ -78,7 +74,7 @@ impl ModulePath {
== Err(FileError::IsADirectory)
}
SearchPathInner::StandardLibraryCustom(stdlib_root) => {
match query_stdlib_version(relative_path, resolver) {
match query_stdlib_version(Some(stdlib_root), relative_path, resolver) {
TypeshedVersionsQueryResult::DoesNotExist => false,
TypeshedVersionsQueryResult::Exists
| TypeshedVersionsQueryResult::MaybeExists => {
@@ -88,7 +84,7 @@ impl ModulePath {
}
}
SearchPathInner::StandardLibraryVendored(stdlib_root) => {
match query_stdlib_version(relative_path, resolver) {
match query_stdlib_version(None, relative_path, resolver) {
TypeshedVersionsQueryResult::DoesNotExist => false,
TypeshedVersionsQueryResult::Exists
| TypeshedVersionsQueryResult::MaybeExists => resolver
@@ -100,7 +96,7 @@ impl ModulePath {
}
#[must_use]
pub(super) fn is_regular_package(&self, resolver: &ResolverContext) -> bool {
pub(crate) fn is_regular_package(&self, resolver: &ResolverState) -> bool {
let ModulePath {
search_path,
relative_path,
@@ -117,7 +113,7 @@ impl ModulePath {
.is_ok()
}
SearchPathInner::StandardLibraryCustom(search_path) => {
match query_stdlib_version(relative_path, resolver) {
match query_stdlib_version(Some(search_path), relative_path, resolver) {
TypeshedVersionsQueryResult::DoesNotExist => false,
TypeshedVersionsQueryResult::Exists
| TypeshedVersionsQueryResult::MaybeExists => system_path_to_file(
@@ -128,7 +124,7 @@ impl ModulePath {
}
}
SearchPathInner::StandardLibraryVendored(search_path) => {
match query_stdlib_version(relative_path, resolver) {
match query_stdlib_version(None, relative_path, resolver) {
TypeshedVersionsQueryResult::DoesNotExist => false,
TypeshedVersionsQueryResult::Exists
| TypeshedVersionsQueryResult::MaybeExists => resolver
@@ -140,7 +136,7 @@ impl ModulePath {
}
#[must_use]
pub(super) fn to_file(&self, resolver: &ResolverContext) -> Option<File> {
pub(crate) fn to_file(&self, resolver: &ResolverState) -> Option<File> {
let db = resolver.db.upcast();
let ModulePath {
search_path,
@@ -154,7 +150,7 @@ impl ModulePath {
system_path_to_file(db, search_path.join(relative_path)).ok()
}
SearchPathInner::StandardLibraryCustom(stdlib_root) => {
match query_stdlib_version(relative_path, resolver) {
match query_stdlib_version(Some(stdlib_root), relative_path, resolver) {
TypeshedVersionsQueryResult::DoesNotExist => None,
TypeshedVersionsQueryResult::Exists
| TypeshedVersionsQueryResult::MaybeExists => {
@@ -163,7 +159,7 @@ impl ModulePath {
}
}
SearchPathInner::StandardLibraryVendored(stdlib_root) => {
match query_stdlib_version(relative_path, resolver) {
match query_stdlib_version(None, relative_path, resolver) {
TypeshedVersionsQueryResult::DoesNotExist => None,
TypeshedVersionsQueryResult::Exists
| TypeshedVersionsQueryResult::MaybeExists => {
@@ -277,15 +273,19 @@ fn stdlib_path_to_module_name(relative_path: &Utf8Path) -> Option<ModuleName> {
#[must_use]
fn query_stdlib_version(
custom_stdlib_root: Option<&SystemPath>,
relative_path: &Utf8Path,
context: &ResolverContext,
resolver: &ResolverState,
) -> TypeshedVersionsQueryResult {
let Some(module_name) = stdlib_path_to_module_name(relative_path) else {
return TypeshedVersionsQueryResult::DoesNotExist;
};
let ResolverContext { db, target_version } = context;
typeshed_versions(*db).query_module(&module_name, *target_version)
let ResolverState {
db,
typeshed_versions,
target_version,
} = resolver;
typeshed_versions.query_module(*db, &module_name, custom_stdlib_root, *target_version)
}
/// Enumeration describing the various ways in which validation of a search path might fail.
@@ -293,7 +293,7 @@ fn query_stdlib_version(
/// If validation fails for a search path derived from the user settings,
/// a message must be displayed to the user,
/// as type checking cannot be done reliably in these circumstances.
#[derive(Debug)]
#[derive(Debug, PartialEq, Eq)]
pub(crate) enum SearchPathValidationError {
/// The path provided by the user was not a directory
NotADirectory(SystemPathBuf),
@@ -304,20 +304,18 @@ pub(crate) enum SearchPathValidationError {
NoStdlibSubdirectory(SystemPathBuf),
/// The typeshed path provided by the user is a directory,
/// but `stdlib/VERSIONS` could not be read.
/// but no `stdlib/VERSIONS` file exists.
/// (This is only relevant for stdlib search paths.)
FailedToReadVersionsFile {
path: SystemPathBuf,
error: std::io::Error,
},
NoVersionsFile(SystemPathBuf),
/// `stdlib/VERSIONS` is a directory.
/// (This is only relevant for stdlib search paths.)
VersionsIsADirectory(SystemPathBuf),
/// The path provided by the user is a directory,
/// and a `stdlib/VERSIONS` file exists, but it fails to parse.
/// (This is only relevant for stdlib search paths.)
VersionsParseError(TypeshedVersionsParseError),
/// Failed to discover the site-packages for the configured virtual environment.
SitePackagesDiscovery(SitePackagesDiscoveryError),
}
impl fmt::Display for SearchPathValidationError {
@@ -327,16 +325,9 @@ impl fmt::Display for SearchPathValidationError {
Self::NoStdlibSubdirectory(path) => {
write!(f, "The directory at {path} has no `stdlib/` subdirectory")
}
Self::FailedToReadVersionsFile { path, error } => {
write!(
f,
"Failed to read the custom typeshed versions file '{path}': {error}"
)
}
Self::NoVersionsFile(path) => write!(f, "Expected a file at {path}/stdlib/VERSIONS"),
Self::VersionsIsADirectory(path) => write!(f, "{path}/stdlib/VERSIONS is a directory."),
Self::VersionsParseError(underlying_error) => underlying_error.fmt(f),
SearchPathValidationError::SitePackagesDiscovery(error) => {
write!(f, "Failed to discover the site-packages directory: {error}")
}
}
}
}
@@ -351,18 +342,6 @@ impl std::error::Error for SearchPathValidationError {
}
}
impl From<TypeshedVersionsParseError> for SearchPathValidationError {
fn from(value: TypeshedVersionsParseError) -> Self {
Self::VersionsParseError(value)
}
}
impl From<SitePackagesDiscoveryError> for SearchPathValidationError {
fn from(value: SitePackagesDiscoveryError) -> Self {
Self::SitePackagesDiscovery(value)
}
}
type SearchPathResult<T> = Result<T, SearchPathValidationError>;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -405,10 +384,11 @@ pub(crate) struct SearchPath(Arc<SearchPathInner>);
impl SearchPath {
fn directory_path(system: &dyn System, root: SystemPathBuf) -> SearchPathResult<SystemPathBuf> {
if system.is_directory(&root) {
Ok(root)
let canonicalized = system.canonicalize_path(&root).unwrap_or(root);
if system.is_directory(&canonicalized) {
Ok(canonicalized)
} else {
Err(SearchPathValidationError::NotADirectory(root))
Err(SearchPathValidationError::NotADirectory(canonicalized))
}
}
@@ -427,22 +407,32 @@ impl SearchPath {
}
/// Create a new standard-library search path pointing to a custom directory on disk
pub(crate) fn custom_stdlib(db: &dyn Db, typeshed: &SystemPath) -> SearchPathResult<Self> {
pub(crate) fn custom_stdlib(db: &dyn Db, typeshed: SystemPathBuf) -> SearchPathResult<Self> {
let system = db.system();
if !system.is_directory(typeshed) {
if !system.is_directory(&typeshed) {
return Err(SearchPathValidationError::NotADirectory(
typeshed.to_path_buf(),
));
}
let stdlib =
Self::directory_path(system, typeshed.join("stdlib")).map_err(|err| match err {
SearchPathValidationError::NotADirectory(_) => {
SearchPathValidationError::NoStdlibSubdirectory(typeshed.to_path_buf())
SearchPathValidationError::NotADirectory(path) => {
SearchPathValidationError::NoStdlibSubdirectory(path)
}
err => err,
})?;
let typeshed_versions =
system_path_to_file(db.upcast(), stdlib.join("VERSIONS")).map_err(|err| match err {
FileError::NotFound => SearchPathValidationError::NoVersionsFile(typeshed),
FileError::IsADirectory => {
SearchPathValidationError::VersionsIsADirectory(typeshed)
}
})?;
super::typeshed::parse_typeshed_versions(db, typeshed_versions)
.as_ref()
.map_err(|validation_error| {
SearchPathValidationError::VersionsParseError(validation_error.clone())
})?;
Ok(Self(Arc::new(SearchPathInner::StandardLibraryCustom(
stdlib,
))))
@@ -633,10 +623,10 @@ mod tests {
use ruff_db::Db;
use crate::db::tests::TestDb;
use crate::module_resolver::testing::{FileSpec, MockedTypeshed, TestCase, TestCaseBuilder};
use crate::python_version::PythonVersion;
use super::*;
use crate::module_resolver::testing::{FileSpec, MockedTypeshed, TestCase, TestCaseBuilder};
use crate::python_version::PythonVersion;
impl ModulePath {
#[must_use]
@@ -648,6 +638,15 @@ mod tests {
}
impl SearchPath {
#[must_use]
pub(crate) fn is_stdlib_search_path(&self) -> bool {
matches!(
&*self.0,
SearchPathInner::StandardLibraryCustom(_)
| SearchPathInner::StandardLibraryVendored(_)
)
}
fn join(&self, component: &str) -> ModulePath {
self.to_module_path().join(component)
}
@@ -662,7 +661,7 @@ mod tests {
.build();
assert_eq!(
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap())
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf())
.unwrap()
.to_module_path()
.with_py_extension(),
@@ -670,7 +669,7 @@ mod tests {
);
assert_eq!(
&SearchPath::custom_stdlib(&db, stdlib.parent().unwrap())
&SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf())
.unwrap()
.join("foo")
.with_pyi_extension(),
@@ -781,7 +780,7 @@ mod tests {
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
.with_custom_typeshed(MockedTypeshed::default())
.build();
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap())
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf())
.unwrap()
.to_module_path()
.push("bar.py");
@@ -793,7 +792,7 @@ mod tests {
let TestCase { db, stdlib, .. } = TestCaseBuilder::new()
.with_custom_typeshed(MockedTypeshed::default())
.build();
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap())
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf())
.unwrap()
.to_module_path()
.push("bar.rs");
@@ -825,7 +824,7 @@ mod tests {
.with_custom_typeshed(MockedTypeshed::default())
.build();
let root = SearchPath::custom_stdlib(&db, stdlib.parent().unwrap()).unwrap();
let root = SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf()).unwrap();
// Must have a `.pyi` extension or no extension:
let bad_absolute_path = SystemPath::new("foo/stdlib/x.py");
@@ -873,7 +872,8 @@ mod tests {
.with_custom_typeshed(typeshed)
.with_target_version(target_version)
.build();
let stdlib = SearchPath::custom_stdlib(&db, stdlib.parent().unwrap()).unwrap();
let stdlib =
SearchPath::custom_stdlib(&db, stdlib.parent().unwrap().to_path_buf()).unwrap();
(db, stdlib)
}
@@ -898,7 +898,7 @@ mod tests {
};
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
let resolver = ResolverState::new(&db, PythonVersion::PY38);
let asyncio_regular_package = stdlib_path.join("asyncio");
assert!(asyncio_regular_package.is_directory(&resolver));
@@ -926,7 +926,7 @@ mod tests {
};
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
let resolver = ResolverState::new(&db, PythonVersion::PY38);
let xml_namespace_package = stdlib_path.join("xml");
assert!(xml_namespace_package.is_directory(&resolver));
@@ -948,7 +948,7 @@ mod tests {
};
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
let resolver = ResolverState::new(&db, PythonVersion::PY38);
let functools_module = stdlib_path.join("functools.pyi");
assert!(functools_module.to_file(&resolver).is_some());
@@ -964,7 +964,7 @@ mod tests {
};
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
let resolver = ResolverState::new(&db, PythonVersion::PY38);
let collections_regular_package = stdlib_path.join("collections");
assert_eq!(collections_regular_package.to_file(&resolver), None);
@@ -980,7 +980,7 @@ mod tests {
};
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
let resolver = ResolverState::new(&db, PythonVersion::PY38);
let importlib_namespace_package = stdlib_path.join("importlib");
assert_eq!(importlib_namespace_package.to_file(&resolver), None);
@@ -1001,7 +1001,7 @@ mod tests {
};
let (db, stdlib_path) = py38_typeshed_test_case(TYPESHED);
let resolver = ResolverContext::new(&db, PythonVersion::PY38);
let resolver = ResolverState::new(&db, PythonVersion::PY38);
let non_existent = stdlib_path.join("doesnt_even_exist");
assert_eq!(non_existent.to_file(&resolver), None);
@@ -1029,7 +1029,7 @@ mod tests {
};
let (db, stdlib_path) = py39_typeshed_test_case(TYPESHED);
let resolver = ResolverContext::new(&db, PythonVersion::PY39);
let resolver = ResolverState::new(&db, PythonVersion::PY39);
// Since we've set the target version to Py39,
// `collections` should now exist as a directory, according to VERSIONS...
@@ -1058,7 +1058,7 @@ mod tests {
};
let (db, stdlib_path) = py39_typeshed_test_case(TYPESHED);
let resolver = ResolverContext::new(&db, PythonVersion::PY39);
let resolver = ResolverState::new(&db, PythonVersion::PY39);
// The `importlib` directory now also exists
let importlib_namespace_package = stdlib_path.join("importlib");
@@ -1082,7 +1082,7 @@ mod tests {
};
let (db, stdlib_path) = py39_typeshed_test_case(TYPESHED);
let resolver = ResolverContext::new(&db, PythonVersion::PY39);
let resolver = ResolverState::new(&db, PythonVersion::PY39);
// The `xml` package no longer exists on py39:
let xml_namespace_package = stdlib_path.join("xml");

View File

@@ -1,19 +1,19 @@
use rustc_hash::{FxBuildHasher, FxHashSet};
use std::borrow::Cow;
use std::iter::FusedIterator;
use std::ops::Deref;
use rustc_hash::{FxBuildHasher, FxHashSet};
use ruff_db::files::{File, FilePath, FileRootKind};
use ruff_db::system::{DirectoryEntry, System, SystemPath, SystemPathBuf};
use ruff_db::vendored::{VendoredFileSystem, VendoredPath};
use ruff_db::system::{DirectoryEntry, SystemPath, SystemPathBuf};
use ruff_db::vendored::VendoredPath;
use crate::db::Db;
use crate::module_name::ModuleName;
use crate::{Program, SearchPathSettings};
use super::module::{Module, ModuleKind};
use super::path::{ModulePath, SearchPath, SearchPathValidationError};
use crate::db::Db;
use crate::module_name::ModuleName;
use crate::module_resolver::typeshed::{vendored_typeshed_versions, TypeshedVersions};
use crate::site_packages::VirtualEnvironment;
use crate::{Program, PythonVersion, SearchPathSettings, SitePackages};
use super::state::ResolverState;
/// Resolves a module name to a module.
pub fn resolve_module(db: &dyn Db, module_name: ModuleName) -> Option<Module> {
@@ -41,7 +41,7 @@ pub(crate) fn resolve_module_query<'db>(
let module = Module::new(name.clone(), kind, search_path, module_file);
tracing::trace!(
tracing::debug!(
"Resolved module '{name}' to '{path}'.",
path = module_file.path(db)
);
@@ -122,7 +122,7 @@ pub(crate) fn search_paths(db: &dyn Db) -> SearchPathIterator {
Program::get(db).search_paths(db).iter(db)
}
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug, PartialEq, Eq, Default)]
pub(crate) struct SearchPaths {
/// Search paths that have been statically determined purely from reading Ruff's configuration settings.
/// These shouldn't ever change unless the config settings themselves change.
@@ -135,8 +135,6 @@ pub(crate) struct SearchPaths {
/// in terms of module-resolution priority until we've discovered the editable installs
/// for the first `site-packages` path
site_packages: Vec<SearchPath>,
typeshed_versions: ResolvedTypeshedVersions,
}
impl SearchPaths {
@@ -148,14 +146,8 @@ impl SearchPaths {
/// [module resolution order]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
pub(crate) fn from_settings(
db: &dyn Db,
settings: &SearchPathSettings,
settings: SearchPathSettings,
) -> Result<Self, SearchPathValidationError> {
fn canonicalize(path: &SystemPath, system: &dyn System) -> SystemPathBuf {
system
.canonicalize_path(path)
.unwrap_or_else(|_| path.to_path_buf())
}
let SearchPathSettings {
extra_paths,
src_root,
@@ -169,65 +161,45 @@ impl SearchPaths {
let mut static_paths = vec![];
for path in extra_paths {
let path = canonicalize(path, system);
files.try_add_root(db.upcast(), &path, FileRootKind::LibrarySearchPath);
tracing::debug!("Adding extra search-path '{path}'");
static_paths.push(SearchPath::extra(system, path)?);
}
tracing::debug!("Adding first-party search path '{src_root}'");
static_paths.push(SearchPath::first_party(system, src_root.to_path_buf())?);
let (typeshed_versions, stdlib_path) = if let Some(custom_typeshed) = custom_typeshed {
let custom_typeshed = canonicalize(custom_typeshed, system);
tracing::debug!("Adding custom-stdlib search path '{custom_typeshed}'");
tracing::debug!("Adding static extra search-path '{path}'");
let search_path = SearchPath::extra(system, path)?;
files.try_add_root(
db.upcast(),
&custom_typeshed,
search_path.as_system_path().unwrap(),
FileRootKind::LibrarySearchPath,
);
static_paths.push(search_path);
}
let versions_path = custom_typeshed.join("stdlib/VERSIONS");
tracing::debug!("Adding static search path '{src_root}'");
static_paths.push(SearchPath::first_party(system, src_root)?);
let versions_content = system.read_to_string(&versions_path).map_err(|error| {
SearchPathValidationError::FailedToReadVersionsFile {
path: versions_path,
error,
}
})?;
static_paths.push(if let Some(custom_typeshed) = custom_typeshed {
tracing::debug!("Adding static custom-sdtlib search-path '{custom_typeshed}'");
let parsed: TypeshedVersions = versions_content.parse()?;
let search_path = SearchPath::custom_stdlib(db, &custom_typeshed)?;
(ResolvedTypeshedVersions::Custom(parsed), search_path)
let search_path = SearchPath::custom_stdlib(db, custom_typeshed)?;
files.try_add_root(
db.upcast(),
search_path.as_system_path().unwrap(),
FileRootKind::LibrarySearchPath,
);
search_path
} else {
tracing::debug!("Using vendored stdlib");
(
ResolvedTypeshedVersions::Vendored(vendored_typeshed_versions()),
SearchPath::vendored_stdlib(),
)
};
static_paths.push(stdlib_path);
let site_packages_paths = match site_packages_paths {
SitePackages::Derived { venv_path } => VirtualEnvironment::new(venv_path, system)
.and_then(|venv| venv.site_packages_directories(system))?,
SitePackages::Known(paths) => paths
.iter()
.map(|path| canonicalize(path, system))
.collect(),
};
SearchPath::vendored_stdlib()
});
let mut site_packages: Vec<_> = Vec::with_capacity(site_packages_paths.len());
for path in site_packages_paths {
tracing::debug!("Adding site-packages search path '{path}'");
files.try_add_root(db.upcast(), &path, FileRootKind::LibrarySearchPath);
site_packages.push(SearchPath::site_packages(system, path)?);
tracing::debug!("Adding site-package path '{path}'");
let search_path = SearchPath::site_packages(system, path)?;
files.try_add_root(
db.upcast(),
search_path.as_system_path().unwrap(),
FileRootKind::LibrarySearchPath,
);
site_packages.push(search_path);
}
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
@@ -252,48 +224,16 @@ impl SearchPaths {
Ok(SearchPaths {
static_paths,
site_packages,
typeshed_versions,
})
}
pub(super) fn iter<'a>(&'a self, db: &'a dyn Db) -> SearchPathIterator<'a> {
pub(crate) fn iter<'a>(&'a self, db: &'a dyn Db) -> SearchPathIterator<'a> {
SearchPathIterator {
db,
static_paths: self.static_paths.iter(),
dynamic_paths: None,
}
}
pub(crate) fn custom_stdlib(&self) -> Option<&SystemPath> {
self.static_paths.iter().find_map(|search_path| {
if search_path.is_standard_library() {
search_path.as_system_path()
} else {
None
}
})
}
pub(super) fn typeshed_versions(&self) -> &TypeshedVersions {
&self.typeshed_versions
}
}
#[derive(Debug, PartialEq, Eq)]
enum ResolvedTypeshedVersions {
Vendored(&'static TypeshedVersions),
Custom(TypeshedVersions),
}
impl Deref for ResolvedTypeshedVersions {
type Target = TypeshedVersions;
fn deref(&self) -> &Self::Target {
match self {
ResolvedTypeshedVersions::Vendored(versions) => versions,
ResolvedTypeshedVersions::Custom(versions) => versions,
}
}
}
/// Collect all dynamic search paths. For each `site-packages` path:
@@ -311,7 +251,6 @@ pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
let SearchPaths {
static_paths,
site_packages,
typeshed_versions: _,
} = Program::get(db).search_paths(db);
let mut dynamic_paths = Vec::new();
@@ -376,16 +315,12 @@ pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
let installations = all_pth_files.iter().flat_map(PthFile::items);
for installation in installations {
let installation = system
.canonicalize_path(&installation)
.unwrap_or(installation);
if existing_paths.insert(Cow::Owned(installation.clone())) {
match SearchPath::editable(system, installation.clone()) {
match SearchPath::editable(system, installation) {
Ok(search_path) => {
tracing::debug!(
"Adding editable installation to module resolution path {path}",
path = installation
path = search_path.as_system_path().unwrap()
);
dynamic_paths.push(search_path);
}
@@ -547,7 +482,7 @@ struct ModuleNameIngredient<'db> {
fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(SearchPath, File, ModuleKind)> {
let program = Program::get(db);
let target_version = program.target_version(db);
let resolver_state = ResolverContext::new(db, target_version);
let resolver_state = ResolverState::new(db, target_version);
let is_builtin_module =
ruff_python_stdlib::sys::is_builtin_module(target_version.minor, name.as_str());
@@ -569,16 +504,24 @@ fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(SearchPath, File, Mod
package_path.push(module_name);
// Check for a regular package first (highest priority)
package_path.push("__init__");
if let Some(regular_package) = resolve_file_module(&package_path, &resolver_state) {
return Some((search_path.clone(), regular_package, ModuleKind::Package));
// Must be a `__init__.pyi` or `__init__.py` or it isn't a package.
let kind = if package_path.is_directory(&resolver_state) {
package_path.push("__init__");
ModuleKind::Package
} else {
ModuleKind::Module
};
// TODO Implement full https://peps.python.org/pep-0561/#type-checker-module-resolution-order resolution
if let Some(stub) = package_path.with_pyi_extension().to_file(&resolver_state) {
return Some((search_path.clone(), stub, kind));
}
// Check for a file module next
package_path.pop();
if let Some(file_module) = resolve_file_module(&package_path, &resolver_state) {
return Some((search_path.clone(), file_module, ModuleKind::Module));
if let Some(module) = package_path
.with_py_extension()
.and_then(|path| path.to_file(&resolver_state))
{
return Some((search_path.clone(), module, kind));
}
// For regular packages, don't search the next search path. All files of that
@@ -599,27 +542,10 @@ fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(SearchPath, File, Mod
None
}
/// If `module` exists on disk with either a `.pyi` or `.py` extension,
/// return the [`File`] corresponding to that path.
///
/// `.pyi` files take priority, as they always have priority when
/// resolving modules.
fn resolve_file_module(module: &ModulePath, resolver_state: &ResolverContext) -> Option<File> {
// Stubs have precedence over source files
module
.with_pyi_extension()
.to_file(resolver_state)
.or_else(|| {
module
.with_py_extension()
.and_then(|path| path.to_file(resolver_state))
})
}
fn resolve_package<'a, 'db, I>(
module_search_path: &SearchPath,
components: I,
resolver_state: &ResolverContext<'db>,
resolver_state: &ResolverState<'db>,
) -> Result<ResolvedPackage, PackageKind>
where
I: Iterator<Item = &'a str>,
@@ -642,10 +568,7 @@ where
if is_regular_package {
in_namespace_package = false;
} else if package_path.is_directory(resolver_state)
// Pure modules hide namespace packages with the same name
&& resolve_file_module(&package_path, resolver_state).is_none()
{
} else if package_path.is_directory(resolver_state) {
// A directory without an `__init__.py` is a namespace package, continue with the next folder.
in_namespace_package = true;
} else if in_namespace_package {
@@ -704,21 +627,6 @@ impl PackageKind {
}
}
pub(super) struct ResolverContext<'db> {
pub(super) db: &'db dyn Db,
pub(super) target_version: PythonVersion,
}
impl<'db> ResolverContext<'db> {
pub(super) fn new(db: &'db dyn Db, target_version: PythonVersion) -> Self {
Self { db, target_version }
}
pub(super) fn vendored(&self) -> &VendoredFileSystem {
self.db.vendored()
}
}
#[cfg(test)]
mod tests {
use ruff_db::files::{system_path_to_file, File, FilePath};
@@ -873,7 +781,7 @@ mod tests {
"Search path for {module_name} was unexpectedly {search_path:?}"
);
assert!(
search_path.is_standard_library(),
search_path.is_stdlib_search_path(),
"Expected a stdlib search path, but got {search_path:?}"
);
}
@@ -969,7 +877,7 @@ mod tests {
"Search path for {module_name} was unexpectedly {search_path:?}"
);
assert!(
search_path.is_standard_library(),
search_path.is_stdlib_search_path(),
"Expected a stdlib search path, but got {search_path:?}"
);
}
@@ -1103,25 +1011,6 @@ mod tests {
);
}
#[test]
fn single_file_takes_priority_over_namespace_package() {
//const SRC: &[FileSpec] = &[("foo.py", "x = 1")];
const SRC: &[FileSpec] = &[("foo.py", "x = 1"), ("foo/bar.py", "x = 2")];
let TestCase { db, src, .. } = TestCaseBuilder::new().with_src_files(SRC).build();
let foo_module_name = ModuleName::new_static("foo").unwrap();
let foo_bar_module_name = ModuleName::new_static("foo.bar").unwrap();
// `foo.py` takes priority over the `foo` namespace package
let foo_module = resolve_module(&db, foo_module_name.clone()).unwrap();
assert_eq!(foo_module.file().path(&db), &src.join("foo.py"));
// `foo.bar` isn't recognised as a module
let foo_bar_module = resolve_module(&db, foo_bar_module_name.clone());
assert_eq!(foo_bar_module, None);
}
#[test]
fn typing_stub_over_module() {
const SRC: &[FileSpec] = &[("foo.py", "print('Hello, world!')"), ("foo.pyi", "x: int")];
@@ -1305,13 +1194,13 @@ mod tests {
Program::from_settings(
&db,
&ProgramSettings {
ProgramSettings {
target_version: PythonVersion::PY38,
search_paths: SearchPathSettings {
extra_paths: vec![],
src_root: src.clone(),
custom_typeshed: Some(custom_typeshed.clone()),
site_packages: SitePackages::Known(vec![site_packages]),
site_packages: vec![site_packages],
},
},
)
@@ -1810,16 +1699,13 @@ not_a_directory
Program::from_settings(
&db,
&ProgramSettings {
ProgramSettings {
target_version: PythonVersion::default(),
search_paths: SearchPathSettings {
extra_paths: vec![],
src_root: SystemPathBuf::from("/src"),
custom_typeshed: None,
site_packages: SitePackages::Known(vec![
venv_site_packages,
system_site_packages,
]),
site_packages: vec![venv_site_packages, system_site_packages],
},
},
)

View File

@@ -0,0 +1,25 @@
use ruff_db::vendored::VendoredFileSystem;
use super::typeshed::LazyTypeshedVersions;
use crate::db::Db;
use crate::python_version::PythonVersion;
pub(crate) struct ResolverState<'db> {
pub(crate) db: &'db dyn Db,
pub(crate) typeshed_versions: LazyTypeshedVersions<'db>,
pub(crate) target_version: PythonVersion,
}
impl<'db> ResolverState<'db> {
pub(crate) fn new(db: &'db dyn Db, target_version: PythonVersion) -> Self {
Self {
db,
typeshed_versions: LazyTypeshedVersions::new(),
target_version,
}
}
pub(crate) fn vendored(&self) -> &VendoredFileSystem {
self.db.vendored()
}
}

View File

@@ -4,7 +4,7 @@ use ruff_db::vendored::VendoredPathBuf;
use crate::db::tests::TestDb;
use crate::program::{Program, SearchPathSettings};
use crate::python_version::PythonVersion;
use crate::{ProgramSettings, SitePackages};
use crate::ProgramSettings;
/// A test case for the module resolver.
///
@@ -179,7 +179,6 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
first_party_files,
site_packages_files,
} = self;
TestCaseBuilder {
typeshed_option: typeshed,
target_version,
@@ -196,7 +195,6 @@ impl TestCaseBuilder<UnspecifiedTypeshed> {
site_packages,
target_version,
} = self.with_custom_typeshed(MockedTypeshed::default()).build();
TestCase {
db,
src,
@@ -225,13 +223,13 @@ impl TestCaseBuilder<MockedTypeshed> {
Program::from_settings(
&db,
&ProgramSettings {
ProgramSettings {
target_version,
search_paths: SearchPathSettings {
extra_paths: vec![],
src_root: src.clone(),
custom_typeshed: Some(typeshed.clone()),
site_packages: SitePackages::Known(vec![site_packages.clone()]),
site_packages: vec![site_packages.clone()],
},
},
)
@@ -281,11 +279,13 @@ impl TestCaseBuilder<VendoredTypeshed> {
Program::from_settings(
&db,
&ProgramSettings {
ProgramSettings {
target_version,
search_paths: SearchPathSettings {
site_packages: SitePackages::Known(vec![site_packages.clone()]),
..SearchPathSettings::new(src.clone())
extra_paths: vec![],
src_root: src.clone(),
custom_typeshed: None,
site_packages: vec![site_packages.clone()],
},
},
)

View File

@@ -1,6 +1,6 @@
pub use self::vendored::vendored_typeshed_stubs;
pub(super) use self::versions::{
typeshed_versions, vendored_typeshed_versions, TypeshedVersions, TypeshedVersionsParseError,
parse_typeshed_versions, LazyTypeshedVersions, TypeshedVersionsParseError,
TypeshedVersionsQueryResult,
};

View File

@@ -1,3 +1,4 @@
use std::cell::OnceCell;
use std::collections::BTreeMap;
use std::fmt;
use std::num::{NonZeroU16, NonZeroUsize};
@@ -5,12 +6,78 @@ use std::ops::{RangeFrom, RangeInclusive};
use std::str::FromStr;
use once_cell::sync::Lazy;
use ruff_db::system::SystemPath;
use rustc_hash::FxHashMap;
use ruff_db::files::{system_path_to_file, File};
use super::vendored::vendored_typeshed_stubs;
use crate::db::Db;
use crate::module_name::ModuleName;
use crate::{Program, PythonVersion};
use crate::python_version::PythonVersion;
#[derive(Debug)]
pub(crate) struct LazyTypeshedVersions<'db>(OnceCell<&'db TypeshedVersions>);
impl<'db> LazyTypeshedVersions<'db> {
#[must_use]
pub(crate) fn new() -> Self {
Self(OnceCell::new())
}
/// Query whether a module exists at runtime in the stdlib on a certain Python version.
///
/// Simply probing whether a file exists in typeshed is insufficient for this question,
/// as a module in the stdlib may have been added in Python 3.10, but the typeshed stub
/// will still be available (either in a custom typeshed dir or in our vendored copy)
/// even if the user specified Python 3.8 as the target version.
///
/// For top-level modules and packages, the VERSIONS file can always provide an unambiguous answer
/// as to whether the module exists on the specified target version. However, VERSIONS does not
/// provide comprehensive information on all submodules, meaning that this method sometimes
/// returns [`TypeshedVersionsQueryResult::MaybeExists`].
/// See [`TypeshedVersionsQueryResult`] for more details.
#[must_use]
pub(crate) fn query_module(
&self,
db: &'db dyn Db,
module: &ModuleName,
stdlib_root: Option<&SystemPath>,
target_version: PythonVersion,
) -> TypeshedVersionsQueryResult {
let versions = self.0.get_or_init(|| {
let versions_path = if let Some(system_path) = stdlib_root {
system_path.join("VERSIONS")
} else {
return &VENDORED_VERSIONS;
};
let Ok(versions_file) = system_path_to_file(db.upcast(), &versions_path) else {
todo!(
"Still need to figure out how to handle VERSIONS files being deleted \
from custom typeshed directories! Expected a file to exist at {versions_path}"
)
};
// TODO(Alex/Micha): If VERSIONS is invalid,
// this should invalidate not just the specific module resolution we're currently attempting,
// but all type inference that depends on any standard-library types.
// Unwrapping here is not correct...
parse_typeshed_versions(db, versions_file).as_ref().unwrap()
});
versions.query_module(module, target_version)
}
}
#[salsa::tracked(return_ref)]
pub(crate) fn parse_typeshed_versions(
db: &dyn Db,
versions_file: File,
) -> Result<TypeshedVersions, TypeshedVersionsParseError> {
// TODO: Handle IO errors
let file_content = versions_file
.read_to_string(db.upcast())
.unwrap_or_default();
file_content.parse()
}
static VENDORED_VERSIONS: Lazy<TypeshedVersions> = Lazy::new(|| {
TypeshedVersions::from_str(
@@ -21,14 +88,6 @@ static VENDORED_VERSIONS: Lazy<TypeshedVersions> = Lazy::new(|| {
.unwrap()
});
pub(crate) fn vendored_typeshed_versions() -> &'static TypeshedVersions {
&VENDORED_VERSIONS
}
pub(crate) fn typeshed_versions(db: &dyn Db) -> &TypeshedVersions {
Program::get(db).search_paths(db).typeshed_versions()
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub(crate) struct TypeshedVersionsParseError {
line_number: Option<NonZeroU16>,
@@ -115,7 +174,7 @@ impl TypeshedVersions {
}
#[must_use]
pub(in crate::module_resolver) fn query_module(
fn query_module(
&self,
module: &ModuleName,
target_version: PythonVersion,
@@ -145,7 +204,7 @@ impl TypeshedVersions {
}
}
/// Possible answers [`TypeshedVersions::query_module()`] could give to the question:
/// Possible answers [`LazyTypeshedVersions::query_module()`] could give to the question:
/// "Does this module exist in the stdlib at runtime on a certain target version?"
#[derive(Debug, Copy, PartialEq, Eq, Clone, Hash)]
pub(crate) enum TypeshedVersionsQueryResult {

View File

@@ -3,7 +3,7 @@ use anyhow::Context;
use salsa::Durability;
use salsa::Setter;
use ruff_db::system::{SystemPath, SystemPathBuf};
use ruff_db::system::SystemPathBuf;
use crate::module_resolver::SearchPaths;
use crate::Db;
@@ -12,31 +12,33 @@ use crate::Db;
pub struct Program {
pub target_version: PythonVersion,
#[default]
#[return_ref]
pub(crate) search_paths: SearchPaths,
}
impl Program {
pub fn from_settings(db: &dyn Db, settings: &ProgramSettings) -> anyhow::Result<Self> {
pub fn from_settings(db: &dyn Db, settings: ProgramSettings) -> anyhow::Result<Self> {
let ProgramSettings {
target_version,
search_paths,
} = settings;
tracing::info!("Target version: Python {target_version}");
tracing::info!("Target version: {target_version}");
let search_paths = SearchPaths::from_settings(db, search_paths)
.with_context(|| "Invalid search path settings")?;
Ok(Program::builder(settings.target_version, search_paths)
Ok(Program::builder(settings.target_version)
.durability(Durability::HIGH)
.search_paths(search_paths)
.new(db))
}
pub fn update_search_paths(
self,
&self,
db: &mut dyn Db,
search_path_settings: &SearchPathSettings,
search_path_settings: SearchPathSettings,
) -> anyhow::Result<()> {
let search_paths = SearchPaths::from_settings(db, search_path_settings)?;
@@ -47,20 +49,16 @@ impl Program {
Ok(())
}
pub fn custom_stdlib_search_path(self, db: &dyn Db) -> Option<&SystemPath> {
self.search_paths(db).custom_stdlib()
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
#[derive(Debug, Eq, PartialEq)]
pub struct ProgramSettings {
pub target_version: PythonVersion,
pub search_paths: SearchPathSettings,
}
/// Configures the search paths for module resolution.
#[derive(Eq, PartialEq, Debug, Clone)]
#[derive(Eq, PartialEq, Debug, Clone, Default)]
pub struct SearchPathSettings {
/// List of user-provided paths that should take first priority in the module resolution.
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
@@ -76,25 +74,5 @@ pub struct SearchPathSettings {
pub custom_typeshed: Option<SystemPathBuf>,
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
pub site_packages: SitePackages,
}
impl SearchPathSettings {
pub fn new(src_root: SystemPathBuf) -> Self {
Self {
src_root,
extra_paths: vec![],
custom_typeshed: None,
site_packages: SitePackages::Known(vec![]),
}
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum SitePackages {
Derived {
venv_path: SystemPathBuf,
},
/// Resolved site packages paths
Known(Vec<SystemPathBuf>),
pub site_packages: Vec<SystemPathBuf>,
}

View File

@@ -16,21 +16,17 @@ use crate::semantic_index::expression::Expression;
use crate::semantic_index::symbol::{
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId, SymbolTable,
};
use crate::semantic_index::use_def::UseDefMap;
use crate::Db;
pub(crate) use self::use_def::UseDefMap;
pub mod ast_ids;
mod builder;
pub(crate) mod constraint;
pub mod definition;
pub mod expression;
pub mod symbol;
mod use_def;
pub(crate) use self::use_def::{
BindingWithConstraints, BindingWithConstraintsIterator, DeclarationsIterator,
};
type SymbolMap = hashbrown::HashMap<ScopedSymbolId, (), ()>;
/// Returns the semantic index for `file`.
@@ -130,7 +126,7 @@ impl<'db> SemanticIndex<'db> {
///
/// Use the Salsa cached [`use_def_map()`] query if you only need the
/// use-def map for a single scope.
pub(super) fn use_def_map(&self, scope_id: FileScopeId) -> Arc<UseDefMap> {
pub(super) fn use_def_map(&self, scope_id: FileScopeId) -> Arc<UseDefMap<'db>> {
self.use_def_maps[scope_id].clone()
}
@@ -157,10 +153,6 @@ impl<'db> SemanticIndex<'db> {
&self.scopes[id]
}
pub(crate) fn scope_ids(&self) -> impl Iterator<Item = ScopeId> {
self.scope_ids_by_scope.iter().copied()
}
/// Returns the id of the parent scope.
pub(crate) fn parent_scope_id(&self, scope_id: FileScopeId) -> Option<FileScopeId> {
let scope = self.scope(scope_id);
@@ -318,29 +310,12 @@ mod tests {
use ruff_text_size::{Ranged, TextRange};
use crate::db::tests::TestDb;
use crate::semantic_index::ast_ids::{HasScopedUseId, ScopedUseId};
use crate::semantic_index::definition::{Definition, DefinitionKind};
use crate::semantic_index::symbol::{
FileScopeId, Scope, ScopeKind, ScopedSymbolId, SymbolTable,
};
use crate::semantic_index::use_def::UseDefMap;
use crate::semantic_index::ast_ids::HasScopedUseId;
use crate::semantic_index::definition::{DefinitionKind, DefinitionNode};
use crate::semantic_index::symbol::{FileScopeId, Scope, ScopeKind, SymbolTable};
use crate::semantic_index::{global_scope, semantic_index, symbol_table, use_def_map};
use crate::Db;
impl UseDefMap<'_> {
fn first_public_binding(&self, symbol: ScopedSymbolId) -> Option<Definition<'_>> {
self.public_bindings(symbol)
.next()
.map(|constrained_binding| constrained_binding.binding)
}
fn first_binding_at_use(&self, use_id: ScopedUseId) -> Option<Definition<'_>> {
self.bindings_at_use(use_id)
.next()
.map(|constrained_binding| constrained_binding.binding)
}
}
struct TestCase {
db: TestDb,
file: File,
@@ -399,8 +374,11 @@ mod tests {
let foo = global_table.symbol_id_by_name("foo").unwrap();
let use_def = use_def_map(&db, scope);
let binding = use_def.first_public_binding(foo).unwrap();
assert!(matches!(binding.kind(&db), DefinitionKind::Import(_)));
let definition = use_def.public_definition(foo).unwrap();
assert!(matches!(
definition.kind(&db),
DefinitionKind::Node(DefinitionNode::Import(_))
));
}
#[test]
@@ -429,19 +407,22 @@ mod tests {
assert!(
global_table
.symbol_by_name("foo")
.is_some_and(|symbol| { symbol.is_bound() && !symbol.is_used() }),
.is_some_and(|symbol| { symbol.is_defined() && !symbol.is_used() }),
"symbols that are defined get the defined flag"
);
let use_def = use_def_map(&db, scope);
let binding = use_def
.first_public_binding(
let definition = use_def
.public_definition(
global_table
.symbol_id_by_name("foo")
.expect("symbol to exist"),
)
.unwrap();
assert!(matches!(binding.kind(&db), DefinitionKind::ImportFrom(_)));
assert!(matches!(
definition.kind(&db),
DefinitionKind::Node(DefinitionNode::ImportFrom(_))
));
}
#[test]
@@ -454,32 +435,16 @@ mod tests {
assert!(
global_table
.symbol_by_name("foo")
.is_some_and(|symbol| { !symbol.is_bound() && symbol.is_used() }),
"a symbol used but not bound in a scope should have only the used flag"
.is_some_and(|symbol| { !symbol.is_defined() && symbol.is_used() }),
"a symbol used but not defined in a scope should have only the used flag"
);
let use_def = use_def_map(&db, scope);
let binding = use_def
.first_public_binding(global_table.symbol_id_by_name("x").expect("symbol exists"))
let definition = use_def
.public_definition(global_table.symbol_id_by_name("x").expect("symbol exists"))
.unwrap();
assert!(matches!(binding.kind(&db), DefinitionKind::Assignment(_)));
}
#[test]
fn augmented_assignment() {
let TestCase { db, file } = test_case("x += 1");
let scope = global_scope(&db, file);
let global_table = symbol_table(&db, scope);
assert_eq!(names(&global_table), vec!["x"]);
let use_def = use_def_map(&db, scope);
let binding = use_def
.first_public_binding(global_table.symbol_id_by_name("x").unwrap())
.unwrap();
assert!(matches!(
binding.kind(&db),
DefinitionKind::AugmentedAssignment(_)
definition.kind(&db),
DefinitionKind::Node(DefinitionNode::Assignment(_))
));
}
@@ -511,10 +476,13 @@ y = 2
assert_eq!(names(&class_table), vec!["x"]);
let use_def = index.use_def_map(class_scope_id);
let binding = use_def
.first_public_binding(class_table.symbol_id_by_name("x").expect("symbol exists"))
let definition = use_def
.public_definition(class_table.symbol_id_by_name("x").expect("symbol exists"))
.unwrap();
assert!(matches!(binding.kind(&db), DefinitionKind::Assignment(_)));
assert!(matches!(
definition.kind(&db),
DefinitionKind::Node(DefinitionNode::Assignment(_))
));
}
#[test]
@@ -544,107 +512,17 @@ y = 2
assert_eq!(names(&function_table), vec!["x"]);
let use_def = index.use_def_map(function_scope_id);
let binding = use_def
.first_public_binding(
let definition = use_def
.public_definition(
function_table
.symbol_id_by_name("x")
.expect("symbol exists"),
)
.unwrap();
assert!(matches!(binding.kind(&db), DefinitionKind::Assignment(_)));
}
#[test]
fn function_parameter_symbols() {
let TestCase { db, file } = test_case(
"
def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
pass
",
);
let index = semantic_index(&db, file);
let global_table = symbol_table(&db, global_scope(&db, file));
assert_eq!(names(&global_table), vec!["str", "int", "f"]);
let [(function_scope_id, _function_scope)] = index
.child_scopes(FileScopeId::global())
.collect::<Vec<_>>()[..]
else {
panic!("Expected a function scope")
};
let function_table = index.symbol_table(function_scope_id);
assert_eq!(
names(&function_table),
vec!["a", "b", "c", "args", "d", "kwargs"],
);
let use_def = index.use_def_map(function_scope_id);
for name in ["a", "b", "c", "d"] {
let binding = use_def
.first_public_binding(
function_table
.symbol_id_by_name(name)
.expect("symbol exists"),
)
.unwrap();
assert!(matches!(
binding.kind(&db),
DefinitionKind::ParameterWithDefault(_)
));
}
for name in ["args", "kwargs"] {
let binding = use_def
.first_public_binding(
function_table
.symbol_id_by_name(name)
.expect("symbol exists"),
)
.unwrap();
assert!(matches!(binding.kind(&db), DefinitionKind::Parameter(_)));
}
}
#[test]
fn lambda_parameter_symbols() {
let TestCase { db, file } = test_case("lambda a, b, c=1, *args, d=2, **kwargs: None");
let index = semantic_index(&db, file);
let global_table = symbol_table(&db, global_scope(&db, file));
assert!(names(&global_table).is_empty());
let [(lambda_scope_id, _lambda_scope)] = index
.child_scopes(FileScopeId::global())
.collect::<Vec<_>>()[..]
else {
panic!("Expected a lambda scope")
};
let lambda_table = index.symbol_table(lambda_scope_id);
assert_eq!(
names(&lambda_table),
vec!["a", "b", "c", "args", "d", "kwargs"],
);
let use_def = index.use_def_map(lambda_scope_id);
for name in ["a", "b", "c", "d"] {
let binding = use_def
.first_public_binding(lambda_table.symbol_id_by_name(name).expect("symbol exists"))
.unwrap();
assert!(matches!(
binding.kind(&db),
DefinitionKind::ParameterWithDefault(_)
));
}
for name in ["args", "kwargs"] {
let binding = use_def
.first_public_binding(lambda_table.symbol_id_by_name(name).expect("symbol exists"))
.unwrap();
assert!(matches!(binding.kind(&db), DefinitionKind::Parameter(_)));
}
assert!(matches!(
definition.kind(&db),
DefinitionKind::Node(DefinitionNode::Assignment(_))
));
}
/// Test case to validate that the comprehension scope is correctly identified and that the target
@@ -653,7 +531,7 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
fn comprehension_scope() {
let TestCase { db, file } = test_case(
"
[x for x, y in iter1]
[x for x in iter1]
",
);
@@ -677,22 +555,7 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
let comprehension_symbol_table = index.symbol_table(comprehension_scope_id);
assert_eq!(names(&comprehension_symbol_table), vec!["x", "y"]);
let use_def = index.use_def_map(comprehension_scope_id);
for name in ["x", "y"] {
let binding = use_def
.first_public_binding(
comprehension_symbol_table
.symbol_id_by_name(name)
.expect("symbol exists"),
)
.unwrap();
assert!(matches!(
binding.kind(&db),
DefinitionKind::Comprehension(_)
));
}
assert_eq!(names(&comprehension_symbol_table), vec!["x"]);
}
/// Test case to validate that the `x` variable used in the comprehension is referencing the
@@ -728,12 +591,14 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
let element_use_id =
element.scoped_use_id(&db, comprehension_scope_id.to_scope_id(&db, file));
let binding = use_def.first_binding_at_use(element_use_id).unwrap();
let DefinitionKind::Comprehension(comprehension) = binding.kind(&db) else {
let definition = use_def.definition_for_use(element_use_id).unwrap();
let DefinitionKind::Node(DefinitionNode::Comprehension(comprehension)) =
definition.kind(&db)
else {
panic!("expected generator definition")
};
let target = comprehension.target();
let name = target.id().as_str();
let ast::Comprehension { target, .. } = comprehension.node();
let name = target.as_name_expr().unwrap().id().as_str();
assert_eq!(name, "x");
assert_eq!(target.range(), TextRange::new(23.into(), 24.into()));
@@ -743,7 +608,7 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
/// the outer comprehension scope and the variables are correctly defined in the respective
/// scopes.
#[test]
fn nested_generators() {
fn nested_comprehensions() {
let TestCase { db, file } = test_case(
"
[{x for x in iter2} for y in iter1]
@@ -776,7 +641,7 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
.child_scopes(comprehension_scope_id)
.collect::<Vec<_>>()[..]
else {
panic!("expected one inner generator scope")
panic!("expected one inner comprehension scope")
};
assert_eq!(inner_comprehension_scope.kind(), ScopeKind::Comprehension);
@@ -792,52 +657,6 @@ def f(a: str, /, b: str, c: int = 1, *args, d: int = 2, **kwargs):
assert_eq!(names(&inner_comprehension_symbol_table), vec!["x"]);
}
#[test]
fn with_item_definition() {
let TestCase { db, file } = test_case(
"
with item1 as x, item2 as y:
pass
",
);
let index = semantic_index(&db, file);
let global_table = index.symbol_table(FileScopeId::global());
assert_eq!(names(&global_table), vec!["item1", "x", "item2", "y"]);
let use_def = index.use_def_map(FileScopeId::global());
for name in ["x", "y"] {
let binding = use_def
.first_public_binding(global_table.symbol_id_by_name(name).expect("symbol exists"))
.expect("Expected with item definition for {name}");
assert!(matches!(binding.kind(&db), DefinitionKind::WithItem(_)));
}
}
#[test]
fn with_item_unpacked_definition() {
let TestCase { db, file } = test_case(
"
with context() as (x, y):
pass
",
);
let index = semantic_index(&db, file);
let global_table = index.symbol_table(FileScopeId::global());
assert_eq!(names(&global_table), vec!["context", "x", "y"]);
let use_def = index.use_def_map(FileScopeId::global());
for name in ["x", "y"] {
let binding = use_def
.first_public_binding(global_table.symbol_id_by_name(name).expect("symbol exists"))
.expect("Expected with item definition for {name}");
assert!(matches!(binding.kind(&db), DefinitionKind::WithItem(_)));
}
}
#[test]
fn dupes() {
let TestCase { db, file } = test_case(
@@ -871,14 +690,17 @@ def func():
assert_eq!(names(&func2_table), vec!["y"]);
let use_def = index.use_def_map(FileScopeId::global());
let binding = use_def
.first_public_binding(
let definition = use_def
.public_definition(
global_table
.symbol_id_by_name("func")
.expect("symbol exists"),
)
.unwrap();
assert!(matches!(binding.kind(&db), DefinitionKind::Function(_)));
assert!(matches!(
definition.kind(&db),
DefinitionKind::Node(DefinitionNode::Function(_))
));
}
#[test]
@@ -946,7 +768,7 @@ class C[T]:
assert!(
ann_table
.symbol_by_name("T")
.is_some_and(|s| s.is_bound() && !s.is_used()),
.is_some_and(|s| s.is_defined() && !s.is_used()),
"type parameters are defined by the scope that introduces them"
);
@@ -978,8 +800,9 @@ class C[T]:
};
let x_use_id = x_use_expr_name.scoped_use_id(&db, scope);
let use_def = use_def_map(&db, scope);
let binding = use_def.first_binding_at_use(x_use_id).unwrap();
let DefinitionKind::Assignment(assignment) = binding.kind(&db) else {
let definition = use_def.definition_for_use(x_use_id).unwrap();
let DefinitionKind::Node(DefinitionNode::Assignment(assignment)) = definition.kind(&db)
else {
panic!("should be an assignment definition")
};
let ast::Expr::NumberLiteral(ast::ExprNumberLiteral {
@@ -1069,136 +892,4 @@ def x():
vec!["bar", "foo", "Test", "<module>"]
);
}
#[test]
fn match_stmt() {
let TestCase { db, file } = test_case(
"
match subject:
case a: ...
case [b, c, *d]: ...
case e as f: ...
case {'x': g, **h}: ...
case Foo(i, z=j): ...
case k | l: ...
case _: ...
",
);
let global_scope_id = global_scope(&db, file);
let global_table = symbol_table(&db, global_scope_id);
assert!(global_table.symbol_by_name("Foo").unwrap().is_used());
assert_eq!(
names(&global_table),
vec!["subject", "a", "b", "c", "d", "e", "f", "g", "h", "Foo", "i", "j", "k", "l"]
);
let use_def = use_def_map(&db, global_scope_id);
for (name, expected_index) in [
("a", 0),
("b", 0),
("c", 1),
("d", 2),
("e", 0),
("f", 1),
("g", 0),
("h", 1),
("i", 0),
("j", 1),
("k", 0),
("l", 1),
] {
let binding = use_def
.first_public_binding(global_table.symbol_id_by_name(name).expect("symbol exists"))
.expect("Expected with item definition for {name}");
if let DefinitionKind::MatchPattern(pattern) = binding.kind(&db) {
assert_eq!(pattern.index(), expected_index);
} else {
panic!("Expected match pattern definition for {name}");
}
}
}
#[test]
fn nested_match_case() {
let TestCase { db, file } = test_case(
"
match 1:
case first:
match 2:
case second:
pass
",
);
let global_scope_id = global_scope(&db, file);
let global_table = symbol_table(&db, global_scope_id);
assert_eq!(names(&global_table), vec!["first", "second"]);
let use_def = use_def_map(&db, global_scope_id);
for (name, expected_index) in [("first", 0), ("second", 0)] {
let binding = use_def
.first_public_binding(global_table.symbol_id_by_name(name).expect("symbol exists"))
.expect("Expected with item definition for {name}");
if let DefinitionKind::MatchPattern(pattern) = binding.kind(&db) {
assert_eq!(pattern.index(), expected_index);
} else {
panic!("Expected match pattern definition for {name}");
}
}
}
#[test]
fn for_loops_single_assignment() {
let TestCase { db, file } = test_case("for x in a: pass");
let scope = global_scope(&db, file);
let global_table = symbol_table(&db, scope);
assert_eq!(&names(&global_table), &["a", "x"]);
let use_def = use_def_map(&db, scope);
let binding = use_def
.first_public_binding(global_table.symbol_id_by_name("x").unwrap())
.unwrap();
assert!(matches!(binding.kind(&db), DefinitionKind::For(_)));
}
#[test]
fn for_loops_simple_unpacking() {
let TestCase { db, file } = test_case("for (x, y) in a: pass");
let scope = global_scope(&db, file);
let global_table = symbol_table(&db, scope);
assert_eq!(&names(&global_table), &["a", "x", "y"]);
let use_def = use_def_map(&db, scope);
let x_binding = use_def
.first_public_binding(global_table.symbol_id_by_name("x").unwrap())
.unwrap();
let y_binding = use_def
.first_public_binding(global_table.symbol_id_by_name("y").unwrap())
.unwrap();
assert!(matches!(x_binding.kind(&db), DefinitionKind::For(_)));
assert!(matches!(y_binding.kind(&db), DefinitionKind::For(_)));
}
#[test]
fn for_loops_complex_unpacking() {
let TestCase { db, file } = test_case("for [((a,) b), (c, d)] in e: pass");
let scope = global_scope(&db, file);
let global_table = symbol_table(&db, scope);
assert_eq!(&names(&global_table), &["e", "a", "b", "c", "d"]);
let use_def = use_def_map(&db, scope);
let binding = use_def
.first_public_binding(global_table.symbol_id_by_name("a").unwrap())
.unwrap();
assert!(matches!(binding.kind(&db), DefinitionKind::For(_)));
}
}

View File

@@ -7,31 +7,24 @@ use ruff_db::parsed::ParsedModule;
use ruff_index::IndexVec;
use ruff_python_ast as ast;
use ruff_python_ast::name::Name;
use ruff_python_ast::visitor::{walk_expr, walk_pattern, walk_stmt, Visitor};
use ruff_python_ast::AnyParameterRef;
use ruff_python_ast::visitor::{walk_expr, walk_stmt, Visitor};
use crate::ast_node_ref::AstNodeRef;
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
use crate::semantic_index::ast_ids::AstIdsBuilder;
use crate::semantic_index::definition::{
AssignmentDefinitionNodeRef, ComprehensionDefinitionNodeRef, Definition, DefinitionNodeKey,
DefinitionNodeRef, ForStmtDefinitionNodeRef, ImportFromDefinitionNodeRef,
AssignmentDefinitionNodeRef, ComprehensionDefinitionNodeRef, Definition, DefinitionKind,
DefinitionNodeKey, DefinitionNodeRef, ImportFromDefinitionNodeRef,
};
use crate::semantic_index::expression::Expression;
use crate::semantic_index::symbol::{
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId,
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId, SymbolFlags,
SymbolTableBuilder,
};
use crate::semantic_index::use_def::{FlowSnapshot, UseDefMapBuilder};
use crate::semantic_index::use_def::{BasicBlockId, UseDefMapBuilder};
use crate::semantic_index::SemanticIndex;
use crate::Db;
use super::constraint::{Constraint, PatternConstraint};
use super::definition::{
DefinitionCategory, ExceptHandlerDefinitionNodeRef, MatchPatternDefinitionNodeRef,
WithItemDefinitionNodeRef,
};
pub(super) struct SemanticIndexBuilder<'db> {
// Builder state
db: &'db dyn Db,
@@ -40,10 +33,8 @@ pub(super) struct SemanticIndexBuilder<'db> {
scope_stack: Vec<FileScopeId>,
/// The assignment we're currently visiting.
current_assignment: Option<CurrentAssignment<'db>>,
/// The match case we're currently visiting.
current_match_case: Option<CurrentMatchCase<'db>>,
/// Flow states at each `break` in the current loop.
loop_break_states: Vec<FlowSnapshot>,
/// Basic block ending at each `break` in the current loop.
loop_breaks: Vec<BasicBlockId>,
// Semantic Index fields
scopes: IndexVec<FileScopeId, Scope>,
@@ -65,8 +56,7 @@ impl<'db> SemanticIndexBuilder<'db> {
module: parsed,
scope_stack: Vec::new(),
current_assignment: None,
current_match_case: None,
loop_break_states: vec![],
loop_breaks: vec![],
scopes: IndexVec::new(),
symbol_tables: IndexVec::new(),
@@ -108,7 +98,8 @@ impl<'db> SemanticIndexBuilder<'db> {
let file_scope_id = self.scopes.push(scope);
self.symbol_tables.push(SymbolTableBuilder::new());
self.use_def_maps.push(UseDefMapBuilder::new());
self.use_def_maps
.push(UseDefMapBuilder::new(self.db, self.file, file_scope_id));
let ast_id_scope = self.ast_ids.push(AstIdsBuilder::new());
#[allow(unsafe_code)]
@@ -142,47 +133,50 @@ impl<'db> SemanticIndexBuilder<'db> {
&mut self.symbol_tables[scope_id]
}
fn current_use_def_map_mut(&mut self) -> &mut UseDefMapBuilder<'db> {
fn current_use_def_map(&mut self) -> &mut UseDefMapBuilder<'db> {
let scope_id = self.current_scope();
&mut self.use_def_maps[scope_id]
}
fn current_use_def_map(&self) -> &UseDefMapBuilder<'db> {
let scope_id = self.current_scope();
&self.use_def_maps[scope_id]
}
fn current_ast_ids(&mut self) -> &mut AstIdsBuilder {
let scope_id = self.current_scope();
&mut self.ast_ids[scope_id]
}
fn flow_snapshot(&self) -> FlowSnapshot {
self.current_use_def_map().snapshot()
/// Start a new basic block and return the previous block's ID.
fn next_block(&mut self) -> BasicBlockId {
self.current_use_def_map().next_block(/* sealed */ true)
}
fn flow_restore(&mut self, state: FlowSnapshot) {
self.current_use_def_map_mut().restore(state);
/// Start a new unsealed basic block and return the previous block's ID.
fn next_block_unsealed(&mut self) -> BasicBlockId {
self.current_use_def_map().next_block(/* sealed */ false)
}
fn flow_merge(&mut self, state: FlowSnapshot) {
self.current_use_def_map_mut().merge(state);
/// Seal an unsealed basic block.
fn seal_block(&mut self) {
self.current_use_def_map().seal_current_block();
}
fn add_symbol(&mut self, name: Name) -> ScopedSymbolId {
let (symbol_id, added) = self.current_symbol_table().add_symbol(name);
if added {
self.current_use_def_map_mut().add_symbol(symbol_id);
}
symbol_id
/// Start a new basic block with the given block as predecessor.
fn new_block_from(&mut self, predecessor: BasicBlockId) {
self.current_use_def_map()
.new_block_from(predecessor, /* sealed */ true);
}
fn mark_symbol_bound(&mut self, id: ScopedSymbolId) {
self.current_symbol_table().mark_symbol_bound(id);
/// Add a predecessor to the current block.
fn merge_block(&mut self, predecessor: BasicBlockId) {
self.current_use_def_map().merge_block(predecessor);
}
fn mark_symbol_used(&mut self, id: ScopedSymbolId) {
self.current_symbol_table().mark_symbol_used(id);
/// Add predecessors to the current block.
fn merge_blocks(&mut self, predecessors: Vec<BasicBlockId>) {
self.current_use_def_map().merge_blocks(predecessors);
}
fn add_or_update_symbol(&mut self, name: Name, flags: SymbolFlags) -> ScopedSymbolId {
let symbol_table = self.current_symbol_table();
symbol_table.add_or_update_symbol(name, flags)
}
fn add_definition<'a>(
@@ -191,76 +185,27 @@ impl<'db> SemanticIndexBuilder<'db> {
definition_node: impl Into<DefinitionNodeRef<'a>>,
) -> Definition<'db> {
let definition_node: DefinitionNodeRef<'_> = definition_node.into();
#[allow(unsafe_code)]
// SAFETY: `definition_node` is guaranteed to be a child of `self.module`
let kind = unsafe { definition_node.into_owned(self.module.clone()) };
let category = kind.category();
let definition = Definition::new(
self.db,
self.file,
self.current_scope(),
symbol,
kind,
#[allow(unsafe_code)]
DefinitionKind::Node(unsafe { definition_node.into_owned(self.module.clone()) }),
countme::Count::default(),
);
let existing_definition = self
.definitions_by_node
self.definitions_by_node
.insert(definition_node.key(), definition);
debug_assert_eq!(existing_definition, None);
if category.is_binding() {
self.mark_symbol_bound(symbol);
}
let use_def = self.current_use_def_map_mut();
match category {
DefinitionCategory::DeclarationAndBinding => {
use_def.record_declaration_and_binding(symbol, definition);
}
DefinitionCategory::Declaration => use_def.record_declaration(symbol, definition),
DefinitionCategory::Binding => use_def.record_binding(symbol, definition),
}
self.current_use_def_map()
.record_definition(symbol, definition);
definition
}
fn add_expression_constraint(&mut self, constraint_node: &ast::Expr) -> Expression<'db> {
let expression = self.add_standalone_expression(constraint_node);
self.current_use_def_map_mut()
.record_constraint(Constraint::Expression(expression));
expression
}
fn add_pattern_constraint(
&mut self,
subject: &ast::Expr,
pattern: &ast::Pattern,
) -> PatternConstraint<'db> {
#[allow(unsafe_code)]
let (subject, pattern) = unsafe {
(
AstNodeRef::new(self.module.clone(), subject),
AstNodeRef::new(self.module.clone(), pattern),
)
};
let pattern_constraint = PatternConstraint::new(
self.db,
self.file,
self.current_scope(),
subject,
pattern,
countme::Count::default(),
);
self.current_use_def_map_mut()
.record_constraint(Constraint::Pattern(pattern_constraint));
pattern_constraint
}
/// Record an expression that needs to be a Salsa ingredient, because we need to infer its type
/// standalone (type narrowing tests, RHS of an assignment.)
fn add_standalone_expression(&mut self, expression_node: &ast::Expr) -> Expression<'db> {
fn add_standalone_expression(&mut self, expression_node: &ast::Expr) {
let expression = Expression::new(
self.db,
self.file,
@@ -273,7 +218,6 @@ impl<'db> SemanticIndexBuilder<'db> {
);
self.expressions_by_node
.insert(expression_node.into(), expression);
expression
}
fn with_type_params(
@@ -302,13 +246,10 @@ impl<'db> SemanticIndexBuilder<'db> {
..
}) => (name, &None, default),
};
let symbol = self.add_symbol(name.id.clone());
// TODO create Definition for PEP 695 typevars
// note that the "bound" on the typevar is a totally different thing than whether
// or not a name is "bound" by a typevar declaration; the latter is always true.
self.mark_symbol_bound(symbol);
if let Some(bounds) = bound {
self.visit_expr(bounds);
// TODO create Definition for typevars
self.add_or_update_symbol(name.id.clone(), SymbolFlags::IS_DEFINED);
if let Some(bound) = bound {
self.visit_expr(bound);
}
if let Some(default) = default {
self.visit_expr(default);
@@ -325,23 +266,11 @@ impl<'db> SemanticIndexBuilder<'db> {
nested_scope
}
/// This method does several things:
/// - It pushes a new scope onto the stack for visiting
/// a list/dict/set comprehension or generator expression
/// - Inside that scope, it visits a list of [`Comprehension`] nodes,
/// assumed to be the "generators" that compose a comprehension
/// (that is, the `for x in y` and `for y in z` parts of `x for x in y for y in z`).
/// - Inside that scope, it also calls a closure for visiting the outer `elt`
/// of a list/dict/set comprehension or generator expression
/// - It then pops the new scope off the stack
/// Visit a list of [`Comprehension`] nodes, assumed to be the "generators" that compose a
/// comprehension (that is, the `for x in y` and `for y in z` parts of `x for x in y for y in z`.)
///
/// [`Comprehension`]: ast::Comprehension
fn with_generators_scope(
&mut self,
scope: NodeWithScopeRef,
generators: &'db [ast::Comprehension],
visit_outer_elt: impl FnOnce(&mut Self),
) {
fn visit_generators(&mut self, scope: NodeWithScopeRef, generators: &'db [ast::Comprehension]) {
let mut generators_iter = generators.iter();
let Some(generator) = generators_iter.next() else {
@@ -350,7 +279,6 @@ impl<'db> SemanticIndexBuilder<'db> {
// The `iter` of the first generator is evaluated in the outer scope, while all subsequent
// nodes are evaluated in the inner scope.
self.add_standalone_expression(&generator.iter);
self.visit_expr(&generator.iter);
self.push_scope(scope);
@@ -366,7 +294,6 @@ impl<'db> SemanticIndexBuilder<'db> {
}
for generator in generators_iter {
self.add_standalone_expression(&generator.iter);
self.visit_expr(&generator.iter);
self.current_assignment = Some(CurrentAssignment::Comprehension {
@@ -380,26 +307,6 @@ impl<'db> SemanticIndexBuilder<'db> {
self.visit_expr(expr);
}
}
visit_outer_elt(self);
self.pop_scope();
}
fn declare_parameter(&mut self, parameter: AnyParameterRef) {
let symbol = self.add_symbol(parameter.name().id().clone());
let definition = self.add_definition(symbol, parameter);
if let AnyParameterRef::NonVariadic(with_default) = parameter {
// Insert a mapping from the parameter to the same definition.
// This ensures that calling `HasTy::ty` on the inner parameter returns
// a valid type (and doesn't panic)
let existing_definition = self.definitions_by_node.insert(
DefinitionNodeRef::from(AnyParameterRef::Variadic(&with_default.parameter)).key(),
definition,
);
debug_assert_eq!(existing_definition, None);
}
}
pub(super) fn build(mut self) -> SemanticIndex<'db> {
@@ -465,6 +372,10 @@ where
self.visit_decorator(decorator);
}
let symbol = self
.add_or_update_symbol(function_def.name.id.clone(), SymbolFlags::IS_DEFINED);
self.add_definition(symbol, function_def);
self.with_type_params(
NodeWithScopeRef::FunctionTypeParameters(function_def),
function_def.type_params.as_deref(),
@@ -475,37 +386,18 @@ where
}
builder.push_scope(NodeWithScopeRef::Function(function_def));
// Add symbols and definitions for the parameters to the function scope.
for parameter in &*function_def.parameters {
builder.declare_parameter(parameter);
}
builder.visit_body(&function_def.body);
builder.pop_scope()
},
);
// The default value of the parameters needs to be evaluated in the
// enclosing scope.
for default in function_def
.parameters
.iter_non_variadic_params()
.filter_map(|param| param.default.as_deref())
{
self.visit_expr(default);
}
// The symbol for the function name itself has to be evaluated
// at the end to match the runtime evaluation of parameter defaults
// and return-type annotations.
let symbol = self.add_symbol(function_def.name.id.clone());
self.add_definition(symbol, function_def);
}
ast::Stmt::ClassDef(class) => {
for decorator in &class.decorator_list {
self.visit_decorator(decorator);
}
let symbol = self.add_symbol(class.name.id.clone());
let symbol =
self.add_or_update_symbol(class.name.id.clone(), SymbolFlags::IS_DEFINED);
self.add_definition(symbol, class);
self.with_type_params(
@@ -531,7 +423,7 @@ where
Name::new(alias.name.id.split('.').next().unwrap())
};
let symbol = self.add_symbol(symbol_name);
let symbol = self.add_or_update_symbol(symbol_name, SymbolFlags::IS_DEFINED);
self.add_definition(symbol, alias);
}
}
@@ -543,7 +435,8 @@ where
&alias.name.id
};
let symbol = self.add_symbol(symbol_name.clone());
let symbol =
self.add_or_update_symbol(symbol_name.clone(), SymbolFlags::IS_DEFINED);
self.add_definition(symbol, ImportFromDefinitionNodeRef { node, alias_index });
}
}
@@ -559,6 +452,7 @@ where
}
ast::Stmt::AnnAssign(node) => {
debug_assert!(self.current_assignment.is_none());
// TODO deferred annotation visiting
self.visit_expr(&node.annotation);
if let Some(value) = &node.value {
self.visit_expr(value);
@@ -567,38 +461,21 @@ where
self.visit_expr(&node.target);
self.current_assignment = None;
}
ast::Stmt::AugAssign(
aug_assign @ ast::StmtAugAssign {
range: _,
target,
op: _,
value,
},
) => {
debug_assert!(self.current_assignment.is_none());
self.visit_expr(value);
self.current_assignment = Some(aug_assign.into());
self.visit_expr(target);
self.current_assignment = None;
}
ast::Stmt::If(node) => {
self.visit_expr(&node.test);
let pre_if = self.flow_snapshot();
self.add_expression_constraint(&node.test);
let pre_if = self.next_block();
self.visit_body(&node.body);
let mut post_clauses: Vec<FlowSnapshot> = vec![];
let mut post_clauses: Vec<BasicBlockId> = vec![];
for clause in &node.elif_else_clauses {
// snapshot after every block except the last; the last one will just become
// the state that we merge the other snapshots into
post_clauses.push(self.flow_snapshot());
post_clauses.push(self.next_block());
// we can only take an elif/else branch if none of the previous ones were
// taken, so the block entry state is always `pre_if`
self.flow_restore(pre_if.clone());
self.new_block_from(pre_if);
self.visit_elif_else_clause(clause);
}
for post_clause_state in post_clauses {
self.flow_merge(post_clause_state);
}
self.next_block_unsealed();
let has_else = node
.elif_else_clauses
.last()
@@ -606,173 +483,39 @@ where
if !has_else {
// if there's no else clause, then it's possible we took none of the branches,
// and the pre_if state can reach here
self.flow_merge(pre_if);
self.merge_block(pre_if);
}
self.merge_blocks(post_clauses);
self.seal_block();
}
ast::Stmt::While(ast::StmtWhile {
test,
body,
orelse,
range: _,
}) => {
self.visit_expr(test);
ast::Stmt::While(node) => {
self.visit_expr(&node.test);
let pre_loop = self.flow_snapshot();
let pre_loop = self.next_block();
// Save aside any break states from an outer loop
let saved_break_states = std::mem::take(&mut self.loop_break_states);
// TODO: definitions created inside the body should be fully visible
// to other statements/expressions inside the body --Alex/Carl
self.visit_body(body);
let saved_break_states = std::mem::take(&mut self.loop_breaks);
self.visit_body(&node.body);
// Get the break states from the body of this loop, and restore the saved outer
// ones.
let break_states =
std::mem::replace(&mut self.loop_break_states, saved_break_states);
let break_states = std::mem::replace(&mut self.loop_breaks, saved_break_states);
// We may execute the `else` clause without ever executing the body, so merge in
// the pre-loop state before visiting `else`.
self.flow_merge(pre_loop);
self.visit_body(orelse);
self.next_block_unsealed();
self.merge_block(pre_loop);
self.seal_block();
self.visit_body(&node.orelse);
// Breaking out of a while loop bypasses the `else` clause, so merge in the break
// states after visiting `else`.
for break_state in break_states {
self.flow_merge(break_state);
}
}
ast::Stmt::With(ast::StmtWith { items, body, .. }) => {
for item in items {
self.visit_expr(&item.context_expr);
if let Some(optional_vars) = item.optional_vars.as_deref() {
self.add_standalone_expression(&item.context_expr);
self.current_assignment = Some(item.into());
self.visit_expr(optional_vars);
self.current_assignment = None;
}
}
self.visit_body(body);
self.next_block_unsealed();
self.merge_blocks(break_states);
self.seal_block();
}
ast::Stmt::Break(_) => {
self.loop_break_states.push(self.flow_snapshot());
}
ast::Stmt::For(
for_stmt @ ast::StmtFor {
range: _,
is_async: _,
target,
iter,
body,
orelse,
},
) => {
self.add_standalone_expression(iter);
self.visit_expr(iter);
let pre_loop = self.flow_snapshot();
let saved_break_states = std::mem::take(&mut self.loop_break_states);
debug_assert!(self.current_assignment.is_none());
self.current_assignment = Some(for_stmt.into());
self.visit_expr(target);
self.current_assignment = None;
// TODO: Definitions created by loop variables
// (and definitions created inside the body)
// are fully visible to other statements/expressions inside the body --Alex/Carl
self.visit_body(body);
let break_states =
std::mem::replace(&mut self.loop_break_states, saved_break_states);
// We may execute the `else` clause without ever executing the body, so merge in
// the pre-loop state before visiting `else`.
self.flow_merge(pre_loop);
self.visit_body(orelse);
// Breaking out of a `for` loop bypasses the `else` clause, so merge in the break
// states after visiting `else`.
for break_state in break_states {
self.flow_merge(break_state);
}
}
ast::Stmt::Match(ast::StmtMatch {
subject,
cases,
range: _,
}) => {
self.add_standalone_expression(subject);
self.visit_expr(subject);
let after_subject = self.flow_snapshot();
let Some((first, remaining)) = cases.split_first() else {
return;
};
self.add_pattern_constraint(subject, &first.pattern);
self.visit_match_case(first);
let mut post_case_snapshots = vec![];
for case in remaining {
post_case_snapshots.push(self.flow_snapshot());
self.flow_restore(after_subject.clone());
self.add_pattern_constraint(subject, &case.pattern);
self.visit_match_case(case);
}
for post_clause_state in post_case_snapshots {
self.flow_merge(post_clause_state);
}
if !cases
.last()
.is_some_and(|case| case.guard.is_none() && case.pattern.is_wildcard())
{
self.flow_merge(after_subject);
}
}
ast::Stmt::Try(ast::StmtTry {
body,
handlers,
orelse,
finalbody,
is_star,
range: _,
}) => {
self.visit_body(body);
for except_handler in handlers {
let ast::ExceptHandler::ExceptHandler(except_handler) = except_handler;
let ast::ExceptHandlerExceptHandler {
name: symbol_name,
type_: handled_exceptions,
body: handler_body,
range: _,
} = except_handler;
if let Some(handled_exceptions) = handled_exceptions {
self.visit_expr(handled_exceptions);
}
// If `handled_exceptions` above was `None`, it's something like `except as e:`,
// which is invalid syntax. However, it's still pretty obvious here that the user
// *wanted* `e` to be bound, so we should still create a definition here nonetheless.
if let Some(symbol_name) = symbol_name {
let symbol = self.add_symbol(symbol_name.id.clone());
self.add_definition(
symbol,
DefinitionNodeRef::ExceptHandler(ExceptHandlerDefinitionNodeRef {
handler: except_handler,
is_star: *is_star,
}),
);
}
self.visit_body(handler_body);
}
self.visit_body(orelse);
self.visit_body(finalbody);
let block_id = self.next_block();
self.loop_breaks.push(block_id);
}
_ => {
walk_stmt(self, stmt);
@@ -787,18 +530,14 @@ where
match expr {
ast::Expr::Name(name_node @ ast::ExprName { id, ctx, .. }) => {
let (is_use, is_definition) = match (ctx, self.current_assignment) {
(ast::ExprContext::Store, Some(CurrentAssignment::AugAssign(_))) => {
// For augmented assignment, the target expression is also used.
(true, true)
}
(ast::ExprContext::Load, _) => (true, false),
(ast::ExprContext::Store, _) => (false, true),
(ast::ExprContext::Del, _) => (false, true),
(ast::ExprContext::Invalid, _) => (false, false),
let flags = match ctx {
ast::ExprContext::Load => SymbolFlags::IS_USED,
ast::ExprContext::Store => SymbolFlags::IS_DEFINED,
ast::ExprContext::Del => SymbolFlags::IS_DEFINED,
ast::ExprContext::Invalid => SymbolFlags::empty(),
};
let symbol = self.add_symbol(id.clone());
if is_definition {
let symbol = self.add_or_update_symbol(id.clone(), flags);
if flags.contains(SymbolFlags::IS_DEFINED) {
match self.current_assignment {
Some(CurrentAssignment::Assign(assignment)) => {
self.add_definition(
@@ -812,19 +551,6 @@ where
Some(CurrentAssignment::AnnAssign(ann_assign)) => {
self.add_definition(symbol, ann_assign);
}
Some(CurrentAssignment::AugAssign(aug_assign)) => {
self.add_definition(symbol, aug_assign);
}
Some(CurrentAssignment::For(node)) => {
self.add_definition(
symbol,
ForStmtDefinitionNodeRef {
iterable: &node.iter,
target: name_node,
is_async: node.is_async,
},
);
}
Some(CurrentAssignment::Named(named)) => {
// TODO(dhruvmanila): If the current scope is a comprehension, then the
// named expression is implicitly nonlocal. This is yet to be
@@ -834,66 +560,34 @@ where
Some(CurrentAssignment::Comprehension { node, first }) => {
self.add_definition(
symbol,
ComprehensionDefinitionNodeRef {
iterable: &node.iter,
target: name_node,
first,
is_async: node.is_async,
},
);
}
Some(CurrentAssignment::WithItem(with_item)) => {
self.add_definition(
symbol,
WithItemDefinitionNodeRef {
node: with_item,
target: name_node,
},
ComprehensionDefinitionNodeRef { node, first },
);
}
None => {}
}
}
if is_use {
self.mark_symbol_used(symbol);
if flags.contains(SymbolFlags::IS_USED) {
let use_id = self.current_ast_ids().record_use(expr);
self.current_use_def_map_mut().record_use(symbol, use_id);
self.current_use_def_map().record_use(symbol, use_id);
}
walk_expr(self, expr);
}
ast::Expr::Named(node) => {
debug_assert!(self.current_assignment.is_none());
// TODO walrus in comprehensions is implicitly nonlocal
self.visit_expr(&node.value);
self.current_assignment = Some(node.into());
// TODO walrus in comprehensions is implicitly nonlocal
self.visit_expr(&node.target);
self.current_assignment = None;
self.visit_expr(&node.value);
}
ast::Expr::Lambda(lambda) => {
if let Some(parameters) = &lambda.parameters {
// The default value of the parameters needs to be evaluated in the
// enclosing scope.
for default in parameters
.iter_non_variadic_params()
.filter_map(|param| param.default.as_deref())
{
self.visit_expr(default);
}
self.visit_parameters(parameters);
}
self.push_scope(NodeWithScopeRef::Lambda(lambda));
// Add symbols and definitions for the parameters to the lambda scope.
if let Some(parameters) = &lambda.parameters {
for parameter in &**parameters {
self.declare_parameter(parameter);
}
}
self.visit_expr(lambda.body.as_ref());
self.pop_scope();
}
ast::Expr::If(ast::ExprIf {
body, test, orelse, ..
@@ -902,45 +596,44 @@ where
// AST inspection, so we can't simplify here, need to record test expression for
// later checking)
self.visit_expr(test);
let pre_if = self.flow_snapshot();
let pre_if = self.next_block();
self.visit_expr(body);
let post_body = self.flow_snapshot();
self.flow_restore(pre_if);
let post_body = self.next_block();
self.new_block_from(pre_if);
self.visit_expr(orelse);
self.flow_merge(post_body);
self.next_block_unsealed();
self.merge_block(post_body);
self.seal_block();
}
ast::Expr::ListComp(
list_comprehension @ ast::ExprListComp {
elt, generators, ..
},
) => {
self.with_generators_scope(
self.visit_generators(
NodeWithScopeRef::ListComprehension(list_comprehension),
generators,
|builder| builder.visit_expr(elt),
);
self.visit_expr(elt);
}
ast::Expr::SetComp(
set_comprehension @ ast::ExprSetComp {
elt, generators, ..
},
) => {
self.with_generators_scope(
self.visit_generators(
NodeWithScopeRef::SetComprehension(set_comprehension),
generators,
|builder| builder.visit_expr(elt),
);
self.visit_expr(elt);
}
ast::Expr::Generator(
generator @ ast::ExprGenerator {
elt, generators, ..
},
) => {
self.with_generators_scope(
NodeWithScopeRef::GeneratorExpression(generator),
generators,
|builder| builder.visit_expr(elt),
);
self.visit_generators(NodeWithScopeRef::GeneratorExpression(generator), generators);
self.visit_expr(elt);
}
ast::Expr::DictComp(
dict_comprehension @ ast::ExprDictComp {
@@ -950,96 +643,40 @@ where
..
},
) => {
self.with_generators_scope(
self.visit_generators(
NodeWithScopeRef::DictComprehension(dict_comprehension),
generators,
|builder| {
builder.visit_expr(key);
builder.visit_expr(value);
},
);
self.visit_expr(key);
self.visit_expr(value);
}
_ => {
walk_expr(self, expr);
}
}
}
fn visit_parameters(&mut self, parameters: &'ast ast::Parameters) {
// Intentionally avoid walking default expressions, as we handle them in the enclosing
// scope.
for parameter in parameters.iter().map(ast::AnyParameterRef::as_parameter) {
self.visit_parameter(parameter);
if matches!(
expr,
ast::Expr::Lambda(_)
| ast::Expr::ListComp(_)
| ast::Expr::SetComp(_)
| ast::Expr::Generator(_)
| ast::Expr::DictComp(_)
) {
self.pop_scope();
}
}
fn visit_match_case(&mut self, match_case: &'ast ast::MatchCase) {
debug_assert!(self.current_match_case.is_none());
self.current_match_case = Some(CurrentMatchCase::new(&match_case.pattern));
self.visit_pattern(&match_case.pattern);
self.current_match_case = None;
if let Some(expr) = &match_case.guard {
self.visit_expr(expr);
}
self.visit_body(&match_case.body);
}
fn visit_pattern(&mut self, pattern: &'ast ast::Pattern) {
if let ast::Pattern::MatchStar(ast::PatternMatchStar {
name: Some(name),
range: _,
}) = pattern
{
let symbol = self.add_symbol(name.id().clone());
let state = self.current_match_case.as_ref().unwrap();
self.add_definition(
symbol,
MatchPatternDefinitionNodeRef {
pattern: state.pattern,
identifier: name,
index: state.index,
},
);
}
walk_pattern(self, pattern);
if let ast::Pattern::MatchAs(ast::PatternMatchAs {
name: Some(name), ..
})
| ast::Pattern::MatchMapping(ast::PatternMatchMapping {
rest: Some(name), ..
}) = pattern
{
let symbol = self.add_symbol(name.id().clone());
let state = self.current_match_case.as_ref().unwrap();
self.add_definition(
symbol,
MatchPatternDefinitionNodeRef {
pattern: state.pattern,
identifier: name,
index: state.index,
},
);
}
self.current_match_case.as_mut().unwrap().index += 1;
}
}
#[derive(Copy, Clone, Debug)]
enum CurrentAssignment<'a> {
Assign(&'a ast::StmtAssign),
AnnAssign(&'a ast::StmtAnnAssign),
AugAssign(&'a ast::StmtAugAssign),
For(&'a ast::StmtFor),
Named(&'a ast::ExprNamed),
Comprehension {
node: &'a ast::Comprehension,
first: bool,
},
WithItem(&'a ast::WithItem),
}
impl<'a> From<&'a ast::StmtAssign> for CurrentAssignment<'a> {
@@ -1054,50 +691,8 @@ impl<'a> From<&'a ast::StmtAnnAssign> for CurrentAssignment<'a> {
}
}
impl<'a> From<&'a ast::StmtAugAssign> for CurrentAssignment<'a> {
fn from(value: &'a ast::StmtAugAssign) -> Self {
Self::AugAssign(value)
}
}
impl<'a> From<&'a ast::StmtFor> for CurrentAssignment<'a> {
fn from(value: &'a ast::StmtFor) -> Self {
Self::For(value)
}
}
impl<'a> From<&'a ast::ExprNamed> for CurrentAssignment<'a> {
fn from(value: &'a ast::ExprNamed) -> Self {
Self::Named(value)
}
}
impl<'a> From<&'a ast::WithItem> for CurrentAssignment<'a> {
fn from(value: &'a ast::WithItem) -> Self {
Self::WithItem(value)
}
}
struct CurrentMatchCase<'a> {
/// The pattern that's part of the current match case.
pattern: &'a ast::Pattern,
/// The index of the sub-pattern that's being currently visited within the pattern.
///
/// For example:
/// ```py
/// match subject:
/// case a as b: ...
/// case [a, b]: ...
/// case a | b: ...
/// ```
///
/// In all of the above cases, the index would be 0 for `a` and 1 for `b`.
index: u32,
}
impl<'a> CurrentMatchCase<'a> {
fn new(pattern: &'a ast::Pattern) -> Self {
Self { pattern, index: 0 }
}
}

View File

@@ -1,39 +0,0 @@
use ruff_db::files::File;
use ruff_python_ast as ast;
use crate::ast_node_ref::AstNodeRef;
use crate::db::Db;
use crate::semantic_index::expression::Expression;
use crate::semantic_index::symbol::{FileScopeId, ScopeId};
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub(crate) enum Constraint<'db> {
Expression(Expression<'db>),
Pattern(PatternConstraint<'db>),
}
#[salsa::tracked]
pub(crate) struct PatternConstraint<'db> {
#[id]
pub(crate) file: File,
#[id]
pub(crate) file_scope: FileScopeId,
#[no_eq]
#[return_ref]
pub(crate) subject: AstNodeRef<ast::Expr>,
#[no_eq]
#[return_ref]
pub(crate) pattern: AstNodeRef<ast::Pattern>,
#[no_eq]
count: countme::Count<PatternConstraint<'static>>,
}
impl<'db> PatternConstraint<'db> {
pub(crate) fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
self.file_scope(db).to_scope_id(db, self.file(db))
}
}

View File

@@ -1,5 +1,6 @@
use ruff_db::files::File;
use ruff_db::parsed::ParsedModule;
use ruff_index::newtype_index;
use ruff_python_ast as ast;
use crate::ast_node_ref::AstNodeRef;
@@ -8,7 +9,7 @@ use crate::semantic_index::symbol::{FileScopeId, ScopeId, ScopedSymbolId};
use crate::Db;
#[salsa::tracked]
pub struct Definition<'db> {
pub(crate) struct Definition<'db> {
/// The file in which the definition occurs.
#[id]
pub(crate) file: File,
@@ -33,36 +34,34 @@ impl<'db> Definition<'db> {
pub(crate) fn scope(self, db: &'db dyn Db) -> ScopeId<'db> {
self.file_scope(db).to_scope_id(db, self.file(db))
}
pub(crate) fn category(self, db: &'db dyn Db) -> DefinitionCategory {
self.kind(db).category()
}
pub(crate) fn is_declaration(self, db: &'db dyn Db) -> bool {
self.kind(db).category().is_declaration()
}
pub(crate) fn is_binding(self, db: &'db dyn Db) -> bool {
self.kind(db).category().is_binding()
}
}
#[derive(Clone, Debug)]
pub(crate) enum DefinitionKind {
/// Inserted at control-flow merge points, if multiple definitions can reach the merge point.
///
/// Operands are not kept inline, since it's not possible to construct cyclically-referential
/// Salsa tracked structs; they are kept instead in the
/// [`UseDefMap`](super::use_def::UseDefMap).
Phi(ScopedPhiId),
/// An assignment to the symbol.
Node(DefinitionNode),
}
#[newtype_index]
pub(crate) struct ScopedPhiId;
#[derive(Copy, Clone, Debug)]
pub(crate) enum DefinitionNodeRef<'a> {
Import(&'a ast::Alias),
ImportFrom(ImportFromDefinitionNodeRef<'a>),
For(ForStmtDefinitionNodeRef<'a>),
Function(&'a ast::StmtFunctionDef),
Class(&'a ast::StmtClassDef),
NamedExpression(&'a ast::ExprNamed),
Assignment(AssignmentDefinitionNodeRef<'a>),
AnnotatedAssignment(&'a ast::StmtAnnAssign),
AugmentedAssignment(&'a ast::StmtAugAssign),
Comprehension(ComprehensionDefinitionNodeRef<'a>),
Parameter(ast::AnyParameterRef<'a>),
WithItem(WithItemDefinitionNodeRef<'a>),
MatchPattern(MatchPatternDefinitionNodeRef<'a>),
ExceptHandler(ExceptHandlerDefinitionNodeRef<'a>),
}
impl<'a> From<&'a ast::StmtFunctionDef> for DefinitionNodeRef<'a> {
@@ -89,12 +88,6 @@ impl<'a> From<&'a ast::StmtAnnAssign> for DefinitionNodeRef<'a> {
}
}
impl<'a> From<&'a ast::StmtAugAssign> for DefinitionNodeRef<'a> {
fn from(node: &'a ast::StmtAugAssign) -> Self {
Self::AugmentedAssignment(node)
}
}
impl<'a> From<&'a ast::Alias> for DefinitionNodeRef<'a> {
fn from(node_ref: &'a ast::Alias) -> Self {
Self::Import(node_ref)
@@ -107,42 +100,18 @@ impl<'a> From<ImportFromDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
}
}
impl<'a> From<ForStmtDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
fn from(value: ForStmtDefinitionNodeRef<'a>) -> Self {
Self::For(value)
}
}
impl<'a> From<AssignmentDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
fn from(node_ref: AssignmentDefinitionNodeRef<'a>) -> Self {
Self::Assignment(node_ref)
}
}
impl<'a> From<WithItemDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
fn from(node_ref: WithItemDefinitionNodeRef<'a>) -> Self {
Self::WithItem(node_ref)
}
}
impl<'a> From<ComprehensionDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
fn from(node: ComprehensionDefinitionNodeRef<'a>) -> Self {
Self::Comprehension(node)
}
}
impl<'a> From<ast::AnyParameterRef<'a>> for DefinitionNodeRef<'a> {
fn from(node: ast::AnyParameterRef<'a>) -> Self {
Self::Parameter(node)
}
}
impl<'a> From<MatchPatternDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
fn from(node: MatchPatternDefinitionNodeRef<'a>) -> Self {
Self::MatchPattern(node)
}
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct ImportFromDefinitionNodeRef<'a> {
pub(crate) node: &'a ast::StmtImportFrom,
@@ -155,128 +124,49 @@ pub(crate) struct AssignmentDefinitionNodeRef<'a> {
pub(crate) target: &'a ast::ExprName,
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct WithItemDefinitionNodeRef<'a> {
pub(crate) node: &'a ast::WithItem,
pub(crate) target: &'a ast::ExprName,
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct ForStmtDefinitionNodeRef<'a> {
pub(crate) iterable: &'a ast::Expr,
pub(crate) target: &'a ast::ExprName,
pub(crate) is_async: bool,
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct ExceptHandlerDefinitionNodeRef<'a> {
pub(crate) handler: &'a ast::ExceptHandlerExceptHandler,
pub(crate) is_star: bool,
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct ComprehensionDefinitionNodeRef<'a> {
pub(crate) iterable: &'a ast::Expr,
pub(crate) target: &'a ast::ExprName,
pub(crate) node: &'a ast::Comprehension,
pub(crate) first: bool,
pub(crate) is_async: bool,
}
#[derive(Copy, Clone, Debug)]
pub(crate) struct MatchPatternDefinitionNodeRef<'a> {
/// The outermost pattern node in which the identifier being defined occurs.
pub(crate) pattern: &'a ast::Pattern,
/// The identifier being defined.
pub(crate) identifier: &'a ast::Identifier,
/// The index of the identifier in the pattern when visiting the `pattern` node in evaluation
/// order.
pub(crate) index: u32,
}
impl DefinitionNodeRef<'_> {
#[allow(unsafe_code)]
pub(super) unsafe fn into_owned(self, parsed: ParsedModule) -> DefinitionKind {
pub(super) unsafe fn into_owned(self, parsed: ParsedModule) -> DefinitionNode {
match self {
DefinitionNodeRef::Import(alias) => {
DefinitionKind::Import(AstNodeRef::new(parsed, alias))
DefinitionNode::Import(AstNodeRef::new(parsed, alias))
}
DefinitionNodeRef::ImportFrom(ImportFromDefinitionNodeRef { node, alias_index }) => {
DefinitionKind::ImportFrom(ImportFromDefinitionKind {
DefinitionNode::ImportFrom(ImportFromDefinitionNode {
node: AstNodeRef::new(parsed, node),
alias_index,
})
}
DefinitionNodeRef::Function(function) => {
DefinitionKind::Function(AstNodeRef::new(parsed, function))
DefinitionNode::Function(AstNodeRef::new(parsed, function))
}
DefinitionNodeRef::Class(class) => {
DefinitionKind::Class(AstNodeRef::new(parsed, class))
DefinitionNode::Class(AstNodeRef::new(parsed, class))
}
DefinitionNodeRef::NamedExpression(named) => {
DefinitionKind::NamedExpression(AstNodeRef::new(parsed, named))
DefinitionNode::NamedExpression(AstNodeRef::new(parsed, named))
}
DefinitionNodeRef::Assignment(AssignmentDefinitionNodeRef { assignment, target }) => {
DefinitionKind::Assignment(AssignmentDefinitionKind {
DefinitionNode::Assignment(AssignmentDefinitionNode {
assignment: AstNodeRef::new(parsed.clone(), assignment),
target: AstNodeRef::new(parsed, target),
})
}
DefinitionNodeRef::AnnotatedAssignment(assign) => {
DefinitionKind::AnnotatedAssignment(AstNodeRef::new(parsed, assign))
DefinitionNode::AnnotatedAssignment(AstNodeRef::new(parsed, assign))
}
DefinitionNodeRef::AugmentedAssignment(augmented_assignment) => {
DefinitionKind::AugmentedAssignment(AstNodeRef::new(parsed, augmented_assignment))
}
DefinitionNodeRef::For(ForStmtDefinitionNodeRef {
iterable,
target,
is_async,
}) => DefinitionKind::For(ForStmtDefinitionKind {
iterable: AstNodeRef::new(parsed.clone(), iterable),
target: AstNodeRef::new(parsed, target),
is_async,
}),
DefinitionNodeRef::Comprehension(ComprehensionDefinitionNodeRef {
iterable,
target,
first,
is_async,
}) => DefinitionKind::Comprehension(ComprehensionDefinitionKind {
iterable: AstNodeRef::new(parsed.clone(), iterable),
target: AstNodeRef::new(parsed, target),
first,
is_async,
}),
DefinitionNodeRef::Parameter(parameter) => match parameter {
ast::AnyParameterRef::Variadic(parameter) => {
DefinitionKind::Parameter(AstNodeRef::new(parsed, parameter))
}
ast::AnyParameterRef::NonVariadic(parameter) => {
DefinitionKind::ParameterWithDefault(AstNodeRef::new(parsed, parameter))
}
},
DefinitionNodeRef::WithItem(WithItemDefinitionNodeRef { node, target }) => {
DefinitionKind::WithItem(WithItemDefinitionKind {
node: AstNodeRef::new(parsed.clone(), node),
target: AstNodeRef::new(parsed, target),
DefinitionNodeRef::Comprehension(ComprehensionDefinitionNodeRef { node, first }) => {
DefinitionNode::Comprehension(ComprehensionDefinitionNode {
node: AstNodeRef::new(parsed, node),
first,
})
}
DefinitionNodeRef::MatchPattern(MatchPatternDefinitionNodeRef {
pattern,
identifier,
index,
}) => DefinitionKind::MatchPattern(MatchPatternDefinitionKind {
pattern: AstNodeRef::new(parsed.clone(), pattern),
identifier: AstNodeRef::new(parsed, identifier),
index,
}),
DefinitionNodeRef::ExceptHandler(ExceptHandlerDefinitionNodeRef {
handler,
is_star,
}) => DefinitionKind::ExceptHandler(ExceptHandlerDefinitionKind {
handler: AstNodeRef::new(parsed.clone(), handler),
is_star,
}),
}
}
@@ -294,177 +184,46 @@ impl DefinitionNodeRef<'_> {
target,
}) => target.into(),
Self::AnnotatedAssignment(node) => node.into(),
Self::AugmentedAssignment(node) => node.into(),
Self::For(ForStmtDefinitionNodeRef {
iterable: _,
target,
is_async: _,
}) => target.into(),
Self::Comprehension(ComprehensionDefinitionNodeRef { target, .. }) => target.into(),
Self::Parameter(node) => match node {
ast::AnyParameterRef::Variadic(parameter) => parameter.into(),
ast::AnyParameterRef::NonVariadic(parameter) => parameter.into(),
},
Self::WithItem(WithItemDefinitionNodeRef { node: _, target }) => target.into(),
Self::MatchPattern(MatchPatternDefinitionNodeRef { identifier, .. }) => {
identifier.into()
}
Self::ExceptHandler(ExceptHandlerDefinitionNodeRef { handler, .. }) => handler.into(),
Self::Comprehension(ComprehensionDefinitionNodeRef { node, first: _ }) => node.into(),
}
}
}
#[derive(Clone, Copy, Debug)]
pub(crate) enum DefinitionCategory {
/// A Definition which binds a value to a name (e.g. `x = 1`).
Binding,
/// A Definition which declares the upper-bound of acceptable types for this name (`x: int`).
Declaration,
/// A Definition which both declares a type and binds a value (e.g. `x: int = 1`).
DeclarationAndBinding,
}
impl DefinitionCategory {
/// True if this definition establishes a "declared type" for the symbol.
///
/// If so, any assignments reached by this definition are in error if they assign a value of a
/// type not assignable to the declared type.
///
/// Annotations establish a declared type. So do function and class definition.
pub(crate) fn is_declaration(self) -> bool {
matches!(
self,
DefinitionCategory::Declaration | DefinitionCategory::DeclarationAndBinding
)
}
/// True if this definition assigns a value to the symbol.
///
/// False only for annotated assignments without a RHS.
pub(crate) fn is_binding(self) -> bool {
matches!(
self,
DefinitionCategory::Binding | DefinitionCategory::DeclarationAndBinding
)
}
}
#[derive(Clone, Debug)]
pub enum DefinitionKind {
pub enum DefinitionNode {
Import(AstNodeRef<ast::Alias>),
ImportFrom(ImportFromDefinitionKind),
ImportFrom(ImportFromDefinitionNode),
Function(AstNodeRef<ast::StmtFunctionDef>),
Class(AstNodeRef<ast::StmtClassDef>),
NamedExpression(AstNodeRef<ast::ExprNamed>),
Assignment(AssignmentDefinitionKind),
Assignment(AssignmentDefinitionNode),
AnnotatedAssignment(AstNodeRef<ast::StmtAnnAssign>),
AugmentedAssignment(AstNodeRef<ast::StmtAugAssign>),
For(ForStmtDefinitionKind),
Comprehension(ComprehensionDefinitionKind),
Parameter(AstNodeRef<ast::Parameter>),
ParameterWithDefault(AstNodeRef<ast::ParameterWithDefault>),
WithItem(WithItemDefinitionKind),
MatchPattern(MatchPatternDefinitionKind),
ExceptHandler(ExceptHandlerDefinitionKind),
}
impl DefinitionKind {
pub(crate) fn category(&self) -> DefinitionCategory {
match self {
// functions and classes always bind a value, and we always consider them declarations
DefinitionKind::Function(_) | DefinitionKind::Class(_) => {
DefinitionCategory::DeclarationAndBinding
}
// a parameter always binds a value, but is only a declaration if annotated
DefinitionKind::Parameter(parameter) => {
if parameter.annotation.is_some() {
DefinitionCategory::DeclarationAndBinding
} else {
DefinitionCategory::Binding
}
}
// presence of a default is irrelevant, same logic as for a no-default parameter
DefinitionKind::ParameterWithDefault(parameter_with_default) => {
if parameter_with_default.parameter.annotation.is_some() {
DefinitionCategory::DeclarationAndBinding
} else {
DefinitionCategory::Binding
}
}
// annotated assignment is always a declaration, only a binding if there is a RHS
DefinitionKind::AnnotatedAssignment(ann_assign) => {
if ann_assign.value.is_some() {
DefinitionCategory::DeclarationAndBinding
} else {
DefinitionCategory::Declaration
}
}
// all of these bind values without declaring a type
DefinitionKind::Import(_)
| DefinitionKind::ImportFrom(_)
| DefinitionKind::NamedExpression(_)
| DefinitionKind::Assignment(_)
| DefinitionKind::AugmentedAssignment(_)
| DefinitionKind::For(_)
| DefinitionKind::Comprehension(_)
| DefinitionKind::WithItem(_)
| DefinitionKind::MatchPattern(_)
| DefinitionKind::ExceptHandler(_) => DefinitionCategory::Binding,
}
}
Comprehension(ComprehensionDefinitionNode),
}
#[derive(Clone, Debug)]
#[allow(dead_code)]
pub struct MatchPatternDefinitionKind {
pattern: AstNodeRef<ast::Pattern>,
identifier: AstNodeRef<ast::Identifier>,
index: u32,
}
impl MatchPatternDefinitionKind {
pub(crate) fn pattern(&self) -> &ast::Pattern {
self.pattern.node()
}
pub(crate) fn index(&self) -> u32 {
self.index
}
}
#[derive(Clone, Debug)]
pub struct ComprehensionDefinitionKind {
iterable: AstNodeRef<ast::Expr>,
target: AstNodeRef<ast::ExprName>,
pub struct ComprehensionDefinitionNode {
node: AstNodeRef<ast::Comprehension>,
first: bool,
is_async: bool,
}
impl ComprehensionDefinitionKind {
pub(crate) fn iterable(&self) -> &ast::Expr {
self.iterable.node()
}
pub(crate) fn target(&self) -> &ast::ExprName {
self.target.node()
impl ComprehensionDefinitionNode {
pub(crate) fn node(&self) -> &ast::Comprehension {
self.node.node()
}
pub(crate) fn is_first(&self) -> bool {
self.first
}
pub(crate) fn is_async(&self) -> bool {
self.is_async
}
}
#[derive(Clone, Debug)]
pub struct ImportFromDefinitionKind {
pub struct ImportFromDefinitionNode {
node: AstNodeRef<ast::StmtImportFrom>,
alias_index: usize,
}
impl ImportFromDefinitionKind {
impl ImportFromDefinitionNode {
pub(crate) fn import(&self) -> &ast::StmtImportFrom {
self.node.node()
}
@@ -475,76 +234,16 @@ impl ImportFromDefinitionKind {
}
#[derive(Clone, Debug)]
pub struct AssignmentDefinitionKind {
#[allow(dead_code)]
pub struct AssignmentDefinitionNode {
assignment: AstNodeRef<ast::StmtAssign>,
target: AstNodeRef<ast::ExprName>,
}
impl AssignmentDefinitionKind {
impl AssignmentDefinitionNode {
pub(crate) fn assignment(&self) -> &ast::StmtAssign {
self.assignment.node()
}
pub(crate) fn target(&self) -> &ast::ExprName {
self.target.node()
}
}
#[derive(Clone, Debug)]
pub struct WithItemDefinitionKind {
node: AstNodeRef<ast::WithItem>,
target: AstNodeRef<ast::ExprName>,
}
impl WithItemDefinitionKind {
pub(crate) fn node(&self) -> &ast::WithItem {
self.node.node()
}
pub(crate) fn target(&self) -> &ast::ExprName {
self.target.node()
}
}
#[derive(Clone, Debug)]
pub struct ForStmtDefinitionKind {
iterable: AstNodeRef<ast::Expr>,
target: AstNodeRef<ast::ExprName>,
is_async: bool,
}
impl ForStmtDefinitionKind {
pub(crate) fn iterable(&self) -> &ast::Expr {
self.iterable.node()
}
pub(crate) fn target(&self) -> &ast::ExprName {
self.target.node()
}
pub(crate) fn is_async(&self) -> bool {
self.is_async
}
}
#[derive(Clone, Debug)]
pub struct ExceptHandlerDefinitionKind {
handler: AstNodeRef<ast::ExceptHandlerExceptHandler>,
is_star: bool,
}
impl ExceptHandlerDefinitionKind {
pub(crate) fn node(&self) -> &ast::ExceptHandlerExceptHandler {
self.handler.node()
}
pub(crate) fn handled_exceptions(&self) -> Option<&ast::Expr> {
self.node().type_.as_deref()
}
pub(crate) fn is_star(&self) -> bool {
self.is_star
}
}
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
@@ -586,38 +285,8 @@ impl From<&ast::StmtAnnAssign> for DefinitionNodeKey {
}
}
impl From<&ast::StmtAugAssign> for DefinitionNodeKey {
fn from(node: &ast::StmtAugAssign) -> Self {
impl From<&ast::Comprehension> for DefinitionNodeKey {
fn from(node: &ast::Comprehension) -> Self {
Self(NodeKey::from_node(node))
}
}
impl From<&ast::StmtFor> for DefinitionNodeKey {
fn from(value: &ast::StmtFor) -> Self {
Self(NodeKey::from_node(value))
}
}
impl From<&ast::Parameter> for DefinitionNodeKey {
fn from(node: &ast::Parameter) -> Self {
Self(NodeKey::from_node(node))
}
}
impl From<&ast::ParameterWithDefault> for DefinitionNodeKey {
fn from(node: &ast::ParameterWithDefault) -> Self {
Self(NodeKey::from_node(node))
}
}
impl From<&ast::Identifier> for DefinitionNodeKey {
fn from(identifier: &ast::Identifier) -> Self {
Self(NodeKey::from_node(identifier))
}
}
impl From<&ast::ExceptHandlerExceptHandler> for DefinitionNodeKey {
fn from(handler: &ast::ExceptHandlerExceptHandler) -> Self {
Self(NodeKey::from_node(handler))
}
}

View File

@@ -21,7 +21,7 @@ pub(crate) struct Expression<'db> {
/// The expression node.
#[no_eq]
#[return_ref]
pub(crate) node_ref: AstNodeRef<ast::Expr>,
pub(crate) node: AstNodeRef<ast::Expr>,
#[no_eq]
count: countme::Count<Expression<'static>>,

View File

@@ -44,16 +44,16 @@ impl Symbol {
}
/// Is the symbol defined in its containing scope?
pub fn is_bound(&self) -> bool {
self.flags.contains(SymbolFlags::IS_BOUND)
pub fn is_defined(&self) -> bool {
self.flags.contains(SymbolFlags::IS_DEFINED)
}
}
bitflags! {
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
struct SymbolFlags: u8 {
pub(super) struct SymbolFlags: u8 {
const IS_USED = 1 << 0;
const IS_BOUND = 1 << 1;
const IS_DEFINED = 1 << 1;
/// TODO: This flag is not yet set by anything
const MARKED_GLOBAL = 1 << 2;
/// TODO: This flag is not yet set by anything
@@ -149,10 +149,6 @@ impl FileScopeId {
FileScopeId::from_u32(0)
}
pub fn is_global(self) -> bool {
self == FileScopeId::global()
}
pub fn to_scope_id(self, db: &dyn Db, file: File) -> ScopeId<'_> {
let index = semantic_index(db, file);
index.scope_ids_by_scope[self]
@@ -272,7 +268,11 @@ impl SymbolTableBuilder {
}
}
pub(super) fn add_symbol(&mut self, name: Name) -> (ScopedSymbolId, bool) {
pub(super) fn add_or_update_symbol(
&mut self,
name: Name,
flags: SymbolFlags,
) -> ScopedSymbolId {
let hash = SymbolTable::hash_name(&name);
let entry = self
.table
@@ -281,27 +281,25 @@ impl SymbolTableBuilder {
.from_hash(hash, |id| self.table.symbols[*id].name() == &name);
match entry {
RawEntryMut::Occupied(entry) => (*entry.key(), false),
RawEntryMut::Occupied(entry) => {
let symbol = &mut self.table.symbols[*entry.key()];
symbol.insert_flags(flags);
*entry.key()
}
RawEntryMut::Vacant(entry) => {
let symbol = Symbol::new(name);
let mut symbol = Symbol::new(name);
symbol.insert_flags(flags);
let id = self.table.symbols.push(symbol);
entry.insert_with_hasher(hash, id, (), |id| {
SymbolTable::hash_name(self.table.symbols[*id].name().as_str())
});
(id, true)
id
}
}
}
pub(super) fn mark_symbol_bound(&mut self, id: ScopedSymbolId) {
self.table.symbols[id].insert_flags(SymbolFlags::IS_BOUND);
}
pub(super) fn mark_symbol_used(&mut self, id: ScopedSymbolId) {
self.table.symbols[id].insert_flags(SymbolFlags::IS_USED);
}
pub(super) fn finish(mut self) -> SymbolTable {
self.table.shrink_to_fit();
self.table

View File

@@ -1,79 +1,4 @@
//! First, some terminology:
//!
//! * A "binding" gives a new value to a variable. This includes many different Python statements
//! (assignment statements of course, but also imports, `def` and `class` statements, `as`
//! clauses in `with` and `except` statements, match patterns, and others) and even one
//! expression kind (named expressions). It notably does not include annotated assignment
//! statements without a right-hand side value; these do not assign any new value to the
//! variable. We consider function parameters to be bindings as well, since (from the perspective
//! of the function's internal scope), a function parameter begins the scope bound to a value.
//!
//! * A "declaration" establishes an upper bound type for the values that a variable may be
//! permitted to take on. Annotated assignment statements (with or without an RHS value) are
//! declarations; annotated function parameters are also declarations. We consider `def` and
//! `class` statements to also be declarations, so as to prohibit accidentally shadowing them.
//!
//! Annotated assignments with a right-hand side, and annotated function parameters, are both
//! bindings and declarations.
//!
//! We use [`Definition`] as the universal term (and Salsa tracked struct) encompassing both
//! bindings and declarations. (This sacrifices a bit of type safety in exchange for improved
//! performance via fewer Salsa tracked structs and queries, since most declarations -- typed
//! parameters and annotated assignments with RHS -- are both bindings and declarations.)
//!
//! At any given use of a variable, we can ask about both its "declared type" and its "inferred
//! type". These may be different, but the inferred type must always be assignable to the declared
//! type; that is, the declared type is always wider, and the inferred type may be more precise. If
//! we see an invalid assignment, we emit a diagnostic and abandon our inferred type, deferring to
//! the declared type (this allows an explicit annotation to override bad inference, without a
//! cast), maintaining the invariant.
//!
//! The **inferred type** represents the most precise type we believe encompasses all possible
//! values for the variable at a given use. It is based on a union of the bindings which can reach
//! that use through some control flow path, and the narrowing constraints that control flow must
//! have passed through between the binding and the use. For example, in this code:
//!
//! ```python
//! x = 1 if flag else None
//! if x is not None:
//! use(x)
//! ```
//!
//! For the use of `x` on the third line, the inferred type should be `Literal[1]`. This is based
//! on the binding on the first line, which assigns the type `Literal[1] | None`, and the narrowing
//! constraint on the second line, which rules out the type `None`, since control flow must pass
//! through this constraint to reach the use in question.
//!
//! The **declared type** represents the code author's declaration (usually through a type
//! annotation) that a given variable should not be assigned any type outside the declared type. In
//! our model, declared types are also control-flow-sensitive; we allow the code author to
//! explicitly re-declare the same variable with a different type. So for a given binding of a
//! variable, we will want to ask which declarations of that variable can reach that binding, in
//! order to determine whether the binding is permitted, or should be a type error. For example:
//!
//! ```python
//! from pathlib import Path
//! def f(path: str):
//! path: Path = Path(path)
//! ```
//!
//! In this function, the initial declared type of `path` is `str`, meaning that the assignment
//! `path = Path(path)` would be a type error, since it assigns to `path` a value whose type is not
//! assignable to `str`. This is the purpose of declared types: they prevent accidental assignment
//! of the wrong type to a variable.
//!
//! But in some cases it is useful to "shadow" or "re-declare" a variable with a new type, and we
//! permit this, as long as it is done with an explicit re-annotation. So `path: Path =
//! Path(path)`, with the explicit `: Path` annotation, is permitted.
//!
//! The general rule is that whatever declaration(s) can reach a given binding determine the
//! validity of that binding. If there is a path in which the symbol is not declared, that is a
//! declaration of `Unknown`. If multiple declarations can reach a binding, we union them, but by
//! default we also issue a type error, since this implicit union of declared types may hide an
//! error.
//!
//! To support type inference, we build a map from each use of a symbol to the bindings live at
//! that use, and the type narrowing constraints that apply to each binding.
//! Build a map from each use of a symbol to the definitions visible from that use.
//!
//! Let's take this code sample:
//!
@@ -88,488 +13,366 @@
//! z = x
//! ```
//!
//! In this snippet, we have four bindings of `x` (the statements assigning `1`, `2`, `3`, and `4`
//! to it), and two uses of `x` (the `y = x` and `z = x` assignments). The first binding of `x`
//! does not reach any use, because it's immediately replaced by the second binding, before any use
//! happens. (A linter could thus flag the statement `x = 1` as likely superfluous.)
//! In this snippet, we have four definitions of `x` (the statements assigning `1`, `2`, `3`,
//! and `4` to it), and two uses of `x` (the `y = x` and `z = x` assignments). The first
//! [`Definition`] of `x` is never visible to any use, because it's immediately replaced by the
//! second definition, before any use happens. (A linter could thus flag the statement `x = 1`
//! as likely superfluous.)
//!
//! The first use of `x` has one live binding: the assignment `x = 2`.
//! The first use of `x` has one definition visible to it: the assignment `x = 2`.
//!
//! Things get a bit more complex when we have branches. We will definitely take either the `if` or
//! the `else` branch. Thus, the second use of `x` has two live bindings: `x = 3` and `x = 4`. The
//! `x = 2` assignment is no longer visible, because it must be replaced by either `x = 3` or `x =
//! 4`, no matter which branch was taken. We don't know which branch was taken, so we must consider
//! both bindings as live, which means eventually we would (in type inference) look at these two
//! bindings and infer a type of `Literal[3, 4]` -- the union of `Literal[3]` and `Literal[4]` --
//! for the second use of `x`.
//! the `else` branch. Thus, the second use of `x` has two definitions visible to it: `x = 3` and
//! `x = 4`. The `x = 2` definition is no longer visible, because it must be replaced by either `x
//! = 3` or `x = 4`, no matter which branch was taken. We don't know which branch was taken, so we
//! must consider both definitions as visible, which means eventually we would (in type inference)
//! look at these two definitions and infer a type of `Literal[3, 4]` -- the union of `Literal[3]`
//! and `Literal[4]` -- for the second use of `x`.
//!
//! So that's one question our use-def map needs to answer: given a specific use of a symbol, which
//! binding(s) can reach that use. In [`AstIds`](crate::semantic_index::ast_ids::AstIds) we number
//! all uses (that means a `Name` node with `Load` context) so we have a `ScopedUseId` to
//! efficiently represent each use.
//! definition(s) is/are visible from that use. In
//! [`AstIds`](crate::semantic_index::ast_ids::AstIds) we number all uses (that means a `Name` node
//! with `Load` context) so we have a `ScopedUseId` to efficiently represent each use.
//!
//! We also need to know, for a given definition of a symbol, what type narrowing constraints apply
//! to it. For instance, in this code sample:
//! The other case we need to handle is when a symbol is referenced from a different scope (the
//! most obvious example of this is an import). We call this "public" use of a symbol. So the other
//! question we need to be able to answer is, what are the publicly-visible definitions of each
//! symbol?
//!
//! ```python
//! x = 1 if flag else None
//! if x is not None:
//! use(x)
//! ```
//! Technically, public use of a symbol could also occur from any point in control flow of the
//! scope where the symbol is defined (via inline imports and import cycles, in the case of an
//! import, or via a function call partway through the local scope that ends up using a symbol from
//! the scope via a global or nonlocal reference.) But modeling this fully accurately requires
//! whole-program analysis that isn't tractable for an efficient incremental compiler, since it
//! means a given symbol could have a different type every place it's referenced throughout the
//! program, depending on the shape of arbitrarily-sized call/import graphs. So we follow other
//! Python type-checkers in making the simplifying assumption that usually the scope will finish
//! execution before its symbols are made visible to other scopes; for instance, most imports will
//! import from a complete module, not a partially-executed module. (We may want to get a little
//! smarter than this in the future, in particular for closures, but for now this is where we
//! start.)
//!
//! At the use of `x`, the live binding of `x` is `1 if flag else None`, which would infer as the
//! type `Literal[1] | None`. But the constraint `x is not None` dominates this use, which means we
//! can rule out the possibility that `x` is `None` here, which should give us the type
//! `Literal[1]` for this use.
//! So this means that the publicly-visible definitions of a symbol are the definitions still
//! visible at the end of the scope.
//!
//! For declared types, we need to be able to answer the question "given a binding to a symbol,
//! which declarations of that symbol can reach the binding?" This allows us to emit a diagnostic
//! if the binding is attempting to bind a value of a type that is not assignable to the declared
//! type for that symbol, at that point in control flow.
//! The data structure we build to answer these two questions is the `UseDefMap`. It has a
//! `definitions_by_use` vector indexed by [`ScopedUseId`] and a `public_definitions` map
//! indexed by [`ScopedSymbolId`]. The values in each are the visible definition of a symbol at
//! that use, or at the end of the scope.
//!
//! We also need to know, given a declaration of a symbol, what the inferred type of that symbol is
//! at that point. This allows us to emit a diagnostic in a case like `x = "foo"; x: int`. The
//! binding `x = "foo"` occurs before the declaration `x: int`, so according to our
//! control-flow-sensitive interpretation of declarations, the assignment is not an error. But the
//! declaration is an error, since it would violate the "inferred type must be assignable to
//! declared type" rule.
//!
//! Another case we need to handle is when a symbol is referenced from a different scope (for
//! example, an import or a nonlocal reference). We call this "public" use of a symbol. For public
//! use of a symbol, we prefer the declared type, if there are any declarations of that symbol; if
//! not, we fall back to the inferred type. So we also need to know which declarations and bindings
//! can reach the end of the scope.
//!
//! Technically, public use of a symbol could occur from any point in control flow of the scope
//! where the symbol is defined (via inline imports and import cycles, in the case of an import, or
//! via a function call partway through the local scope that ends up using a symbol from the scope
//! via a global or nonlocal reference.) But modeling this fully accurately requires whole-program
//! analysis that isn't tractable for an efficient analysis, since it means a given symbol could
//! have a different type every place it's referenced throughout the program, depending on the
//! shape of arbitrarily-sized call/import graphs. So we follow other Python type checkers in
//! making the simplifying assumption that usually the scope will finish execution before its
//! symbols are made visible to other scopes; for instance, most imports will import from a
//! complete module, not a partially-executed module. (We may want to get a little smarter than
//! this in the future for some closures, but for now this is where we start.)
//!
//! The data structure we build to answer these questions is the `UseDefMap`. It has a
//! `bindings_by_use` vector of [`SymbolBindings`] indexed by [`ScopedUseId`], a
//! `declarations_by_binding` vector of [`SymbolDeclarations`] indexed by [`ScopedDefinitionId`], a
//! `bindings_by_declaration` vector of [`SymbolBindings`] indexed by [`ScopedDefinitionId`], and
//! `public_bindings` and `public_definitions` vectors indexed by [`ScopedSymbolId`]. The values in
//! each of these vectors are (in principle) a list of live bindings at that use/definition, or at
//! the end of the scope for that symbol, with a list of the dominating constraints for each
//! binding.
//!
//! In order to avoid vectors-of-vectors-of-vectors and all the allocations that would entail, we
//! don't actually store these "list of visible definitions" as a vector of [`Definition`].
//! Instead, [`SymbolBindings`] and [`SymbolDeclarations`] are structs which use bit-sets to track
//! definitions (and constraints, in the case of bindings) in terms of [`ScopedDefinitionId`] and
//! [`ScopedConstraintId`], which are indices into the `all_definitions` and `all_constraints`
//! indexvecs in the [`UseDefMap`].
//!
//! There is another special kind of possible "definition" for a symbol: there might be a path from
//! the scope entry to a given use in which the symbol is never bound.
//!
//! The simplest way to model "unbound" would be as a "binding" itself: the initial "binding" for
//! each symbol in a scope. But actually modeling it this way would unnecessarily increase the
//! number of [`Definition`]s that Salsa must track. Since "unbound" is special in that all symbols
//! share it, and it doesn't have any additional per-symbol state, and constraints are irrelevant
//! to it, we can represent it more efficiently: we use the `may_be_unbound` boolean on the
//! [`SymbolBindings`] struct. If this flag is `true` for a use of a symbol, it means the symbol
//! has a path to the use in which it is never bound. If this flag is `false`, it means we've
//! eliminated the possibility of unbound: every control flow path to the use includes a binding
//! for this symbol.
//!
//! To build a [`UseDefMap`], the [`UseDefMapBuilder`] is notified of each new use, definition, and
//! constraint as they are encountered by the
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder) AST visit. For
//! each symbol, the builder tracks the `SymbolState` (`SymbolBindings` and `SymbolDeclarations`)
//! for that symbol. When we hit a use or definition of a symbol, we record the necessary parts of
//! the current state for that symbol that we need for that use or definition. When we reach the
//! end of the scope, it records the state for each symbol as the public definitions of that
//! symbol.
//!
//! Let's walk through the above example. Initially we record for `x` that it has no bindings, and
//! may be unbound. When we see `x = 1`, we record that as the sole live binding of `x`, and flip
//! `may_be_unbound` to `false`. Then we see `x = 2`, and we replace `x = 1` as the sole live
//! binding of `x`. When we get to `y = x`, we record that the live bindings for that use of `x`
//! are just the `x = 2` definition.
//!
//! Then we hit the `if` branch. We visit the `test` node (`flag` in this case), since that will
//! happen regardless. Then we take a pre-branch snapshot of the current state for all symbols,
//! which we'll need later. Then we record `flag` as a possible constraint on the current binding
//! (`x = 2`), and go ahead and visit the `if` body. When we see `x = 3`, it replaces `x = 2`
//! (constrained by `flag`) as the sole live binding of `x`. At the end of the `if` body, we take
//! another snapshot of the current symbol state; we'll call this the post-if-body snapshot.
//!
//! Now we need to visit the `else` clause. The conditions when entering the `else` clause should
//! be the pre-if conditions; if we are entering the `else` clause, we know that the `if` test
//! failed and we didn't execute the `if` body. So we first reset the builder to the pre-if state,
//! using the snapshot we took previously (meaning we now have `x = 2` as the sole binding for `x`
//! again), then visit the `else` clause, where `x = 4` replaces `x = 2` as the sole live binding
//! of `x`.
//!
//! Now we reach the end of the if/else, and want to visit the following code. The state here needs
//! to reflect that we might have gone through the `if` branch, or we might have gone through the
//! `else` branch, and we don't know which. So we need to "merge" our current builder state
//! (reflecting the end-of-else state, with `x = 4` as the only live binding) with our post-if-body
//! snapshot (which has `x = 3` as the only live binding). The result of this merge is that we now
//! have two live bindings of `x`: `x = 3` and `x = 4`.
//!
//! The [`UseDefMapBuilder`] itself just exposes methods for taking a snapshot, resetting to a
//! snapshot, and merging a snapshot into the current state. The logic using these methods lives in
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder), e.g. where it
//! visits a `StmtIf` node.
use self::symbol_state::{
BindingIdWithConstraintsIterator, ConstraintIdIterator, DeclarationIdIterator,
ScopedConstraintId, ScopedDefinitionId, SymbolBindings, SymbolDeclarations, SymbolState,
};
//! Rather than have multiple definitions, we use a Phi definition at control flow join points to
//! merge the visible definition in each path. This means at any given point we always have exactly
//! one definition for a symbol. (This is analogous to static-single-assignment, or SSA, form, and
//! in fact we use the algorithm from [Simple and efficient construction of static single
//! assignment form](https://dl.acm.org/doi/10.1007/978-3-642-37051-9_6) here.)
use crate::semantic_index::ast_ids::ScopedUseId;
use crate::semantic_index::definition::Definition;
use crate::semantic_index::symbol::ScopedSymbolId;
use ruff_index::IndexVec;
use rustc_hash::FxHashMap;
use crate::semantic_index::definition::{Definition, DefinitionKind, ScopedPhiId};
use crate::semantic_index::symbol::{FileScopeId, ScopedSymbolId};
use crate::Db;
use ruff_db::files::File;
use ruff_index::{newtype_index, IndexVec};
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
use super::constraint::Constraint;
/// Number of basic block predecessors we store inline.
const PREDECESSORS: usize = 2;
mod bitset;
mod symbol_state;
/// Input operands (definitions) for a Phi definition. None means not defined.
// TODO would like to use SmallVec here but can't due to lifetime invariance issue.
type PhiOperands<'db> = Vec<Option<Definition<'db>>>;
/// Applicable definitions and constraints for every use of a name.
/// Definition for each use of a name.
#[derive(Debug, PartialEq, Eq)]
pub(crate) struct UseDefMap<'db> {
/// Array of [`Definition`] in this scope.
all_definitions: IndexVec<ScopedDefinitionId, Definition<'db>>,
// TODO store constraints with definitions for type narrowing
/// Definition that reaches each [`ScopedUseId`].
definitions_by_use: IndexVec<ScopedUseId, Option<Definition<'db>>>,
/// Array of [`Constraint`] in this scope.
all_constraints: IndexVec<ScopedConstraintId, Constraint<'db>>,
/// [`SymbolBindings`] reaching a [`ScopedUseId`].
bindings_by_use: IndexVec<ScopedUseId, SymbolBindings>,
/// [`SymbolBindings`] or [`SymbolDeclarations`] reaching a given [`Definition`].
/// Definition of each symbol visible at end of scope.
///
/// If the definition is a binding (only) -- `x = 1` for example -- then we need
/// [`SymbolDeclarations`] to know whether this binding is permitted by the live declarations.
///
/// If the definition is a declaration (only) -- `x: int` for example -- then we need
/// [`SymbolBindings`] to know whether this declaration is consistent with the previously
/// inferred type.
///
/// If the definition is both a declaration and a binding -- `x: int = 1` for example -- then
/// we don't actually need anything here, all we'll need to validate is that our own RHS is a
/// valid assignment to our own annotation.
definitions_by_definition: FxHashMap<Definition<'db>, SymbolDefinitions>,
/// Sparse, because it only includes symbols defined in the scope.
public_definitions: FxHashMap<ScopedSymbolId, Definition<'db>>,
/// [`SymbolState`] visible at end of scope for each symbol.
public_symbols: IndexVec<ScopedSymbolId, SymbolState>,
/// Operands for each Phi definition in this scope.
phi_operands: IndexVec<ScopedPhiId, PhiOperands<'db>>,
}
impl<'db> UseDefMap<'db> {
pub(crate) fn bindings_at_use(
&self,
use_id: ScopedUseId,
) -> BindingWithConstraintsIterator<'_, 'db> {
self.bindings_iterator(&self.bindings_by_use[use_id])
/// Return the dominating definition for a given use of a name; None means not-defined.
pub(crate) fn definition_for_use(&self, use_id: ScopedUseId) -> Option<Definition<'db>> {
self.definitions_by_use[use_id]
}
pub(crate) fn use_may_be_unbound(&self, use_id: ScopedUseId) -> bool {
self.bindings_by_use[use_id].may_be_unbound()
/// Return the definition visible at end of scope for a symbol.
///
/// Return None if the symbol is never defined in the scope.
pub(crate) fn public_definition(&self, symbol_id: ScopedSymbolId) -> Option<Definition<'db>> {
self.public_definitions.get(&symbol_id).copied()
}
pub(crate) fn public_bindings(
&self,
symbol: ScopedSymbolId,
) -> BindingWithConstraintsIterator<'_, 'db> {
self.bindings_iterator(self.public_symbols[symbol].bindings())
}
pub(crate) fn public_may_be_unbound(&self, symbol: ScopedSymbolId) -> bool {
self.public_symbols[symbol].may_be_unbound()
}
pub(crate) fn bindings_at_declaration(
&self,
declaration: Definition<'db>,
) -> BindingWithConstraintsIterator<'_, 'db> {
if let SymbolDefinitions::Bindings(bindings) = &self.definitions_by_definition[&declaration]
{
self.bindings_iterator(bindings)
} else {
unreachable!("Declaration has non-Bindings in definitions_by_definition");
}
}
pub(crate) fn declarations_at_binding(
&self,
binding: Definition<'db>,
) -> DeclarationsIterator<'_, 'db> {
if let SymbolDefinitions::Declarations(declarations) =
&self.definitions_by_definition[&binding]
{
self.declarations_iterator(declarations)
} else {
unreachable!("Binding has non-Declarations in definitions_by_definition");
}
}
pub(crate) fn public_declarations(
&self,
symbol: ScopedSymbolId,
) -> DeclarationsIterator<'_, 'db> {
let declarations = self.public_symbols[symbol].declarations();
self.declarations_iterator(declarations)
}
pub(crate) fn has_public_declarations(&self, symbol: ScopedSymbolId) -> bool {
!self.public_symbols[symbol].declarations().is_empty()
}
fn bindings_iterator<'a>(
&'a self,
bindings: &'a SymbolBindings,
) -> BindingWithConstraintsIterator<'a, 'db> {
BindingWithConstraintsIterator {
all_definitions: &self.all_definitions,
all_constraints: &self.all_constraints,
inner: bindings.iter(),
}
}
fn declarations_iterator<'a>(
&'a self,
declarations: &'a SymbolDeclarations,
) -> DeclarationsIterator<'a, 'db> {
DeclarationsIterator {
all_definitions: &self.all_definitions,
inner: declarations.iter(),
may_be_undeclared: declarations.may_be_undeclared(),
}
/// Return the operands for a Phi in this scope; a None means not-defined.
pub(crate) fn phi_operands<'s>(&'s self, phi_id: ScopedPhiId) -> &'s [Option<Definition<'db>>] {
self.phi_operands[phi_id].as_slice()
}
}
/// Either live bindings or live declarations for a symbol.
#[derive(Debug, PartialEq, Eq)]
enum SymbolDefinitions {
Bindings(SymbolBindings),
Declarations(SymbolDeclarations),
}
type PredecessorBlocks = SmallVec<[BasicBlockId; PREDECESSORS]>;
#[derive(Debug)]
pub(crate) struct BindingWithConstraintsIterator<'map, 'db> {
all_definitions: &'map IndexVec<ScopedDefinitionId, Definition<'db>>,
all_constraints: &'map IndexVec<ScopedConstraintId, Constraint<'db>>,
inner: BindingIdWithConstraintsIterator<'map>,
}
/// A basic block is a linear region of code (no branches.)
#[newtype_index]
pub(super) struct BasicBlockId;
impl<'map, 'db> Iterator for BindingWithConstraintsIterator<'map, 'db> {
type Item = BindingWithConstraints<'map, 'db>;
fn next(&mut self) -> Option<Self::Item> {
self.inner
.next()
.map(|def_id_with_constraints| BindingWithConstraints {
binding: self.all_definitions[def_id_with_constraints.definition],
constraints: ConstraintsIterator {
all_constraints: self.all_constraints,
constraint_ids: def_id_with_constraints.constraint_ids,
},
})
}
}
impl std::iter::FusedIterator for BindingWithConstraintsIterator<'_, '_> {}
pub(crate) struct BindingWithConstraints<'map, 'db> {
pub(crate) binding: Definition<'db>,
pub(crate) constraints: ConstraintsIterator<'map, 'db>,
}
pub(crate) struct ConstraintsIterator<'map, 'db> {
all_constraints: &'map IndexVec<ScopedConstraintId, Constraint<'db>>,
constraint_ids: ConstraintIdIterator<'map>,
}
impl<'map, 'db> Iterator for ConstraintsIterator<'map, 'db> {
type Item = Constraint<'db>;
fn next(&mut self) -> Option<Self::Item> {
self.constraint_ids
.next()
.map(|constraint_id| self.all_constraints[constraint_id])
}
}
impl std::iter::FusedIterator for ConstraintsIterator<'_, '_> {}
pub(crate) struct DeclarationsIterator<'map, 'db> {
all_definitions: &'map IndexVec<ScopedDefinitionId, Definition<'db>>,
inner: DeclarationIdIterator<'map>,
may_be_undeclared: bool,
}
impl DeclarationsIterator<'_, '_> {
pub(crate) fn may_be_undeclared(&self) -> bool {
self.may_be_undeclared
}
}
impl<'map, 'db> Iterator for DeclarationsIterator<'map, 'db> {
type Item = Definition<'db>;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(|def_id| self.all_definitions[def_id])
}
}
impl std::iter::FusedIterator for DeclarationsIterator<'_, '_> {}
/// A snapshot of the definitions and constraints state at a particular point in control flow.
#[derive(Clone, Debug)]
pub(super) struct FlowSnapshot {
symbol_states: IndexVec<ScopedSymbolId, SymbolState>,
}
#[derive(Debug, Default)]
pub(super) struct UseDefMapBuilder<'db> {
/// Append-only array of [`Definition`].
all_definitions: IndexVec<ScopedDefinitionId, Definition<'db>>,
db: &'db dyn Db,
file: File,
file_scope: FileScopeId,
/// Append-only array of [`Constraint`].
all_constraints: IndexVec<ScopedConstraintId, Constraint<'db>>,
/// Predecessor blocks for each basic block.
///
/// Entry block has none, all other blocks have at least one, blocks that join control flow can
/// have two or more.
predecessors: IndexVec<BasicBlockId, PredecessorBlocks>,
/// Live bindings at each so-far-recorded use.
bindings_by_use: IndexVec<ScopedUseId, SymbolBindings>,
/// The definition of each symbol which dominates each basic block.
///
/// No entry means "lazily unfilled"; we haven't had to query for it yet, and we may never have
/// to, if the symbol isn't used in this block or any successor block.
///
/// Each block has an [`FxHashMap`] of symbols instead of an [`IndexVec`] because it is lazy
/// and potentially sparse; it will only include a definition for a symbol that is actually
/// used in that block or a successor. An [`IndexVec`] would have to be eagerly filled with
/// placeholders.
definitions_per_block:
IndexVec<BasicBlockId, FxHashMap<ScopedSymbolId, Option<Definition<'db>>>>,
/// Live bindings or declarations for each so-far-recorded definition.
definitions_by_definition: FxHashMap<Definition<'db>, SymbolDefinitions>,
/// Incomplete Phi definitions in each block.
///
/// An incomplete Phi is used when we don't know, while processing a block's body, what new
/// predecessors it may later gain (that is, backward jumps.)
///
/// Sparse, because relative few blocks (just loop headers) will have any incomplete Phis.
incomplete_phis: FxHashMap<BasicBlockId, Vec<Definition<'db>>>,
/// Currently live bindings and declarations for each symbol.
symbol_states: IndexVec<ScopedSymbolId, SymbolState>,
/// Operands for each Phi definition in this scope.
phi_operands: IndexVec<ScopedPhiId, PhiOperands<'db>>,
/// Are this block's predecessors fully populated?
///
/// If not, it isn't safe to recurse to predecessors yet; we might miss a predecessor block.
sealed_blocks: IndexVec<BasicBlockId, bool>,
/// Definition for each so-far-recorded use.
definitions_by_use: IndexVec<ScopedUseId, Option<Definition<'db>>>,
/// All symbols defined in this scope.
defined_symbols: FxHashSet<ScopedSymbolId>,
}
impl<'db> UseDefMapBuilder<'db> {
pub(super) fn new() -> Self {
Self::default()
pub(super) fn new(db: &'db dyn Db, file: File, file_scope: FileScopeId) -> Self {
let mut new = Self {
db,
file,
file_scope,
predecessors: IndexVec::new(),
definitions_per_block: IndexVec::new(),
incomplete_phis: FxHashMap::default(),
sealed_blocks: IndexVec::new(),
definitions_by_use: IndexVec::new(),
phi_operands: IndexVec::new(),
defined_symbols: FxHashSet::default(),
};
// create the entry basic block
new.predecessors.push(PredecessorBlocks::default());
new.definitions_per_block.push(FxHashMap::default());
new.sealed_blocks.push(true);
new
}
pub(super) fn add_symbol(&mut self, symbol: ScopedSymbolId) {
let new_symbol = self.symbol_states.push(SymbolState::undefined());
debug_assert_eq!(symbol, new_symbol);
}
pub(super) fn record_binding(&mut self, symbol: ScopedSymbolId, binding: Definition<'db>) {
let def_id = self.all_definitions.push(binding);
let symbol_state = &mut self.symbol_states[symbol];
self.definitions_by_definition.insert(
binding,
SymbolDefinitions::Declarations(symbol_state.declarations().clone()),
);
symbol_state.record_binding(def_id);
}
pub(super) fn record_constraint(&mut self, constraint: Constraint<'db>) {
let constraint_id = self.all_constraints.push(constraint);
for state in &mut self.symbol_states {
state.record_constraint(constraint_id);
}
}
pub(super) fn record_declaration(
/// Record a definition for a symbol.
pub(super) fn record_definition(
&mut self,
symbol: ScopedSymbolId,
declaration: Definition<'db>,
) {
let def_id = self.all_definitions.push(declaration);
let symbol_state = &mut self.symbol_states[symbol];
self.definitions_by_definition.insert(
declaration,
SymbolDefinitions::Bindings(symbol_state.bindings().clone()),
);
symbol_state.record_declaration(def_id);
}
pub(super) fn record_declaration_and_binding(
&mut self,
symbol: ScopedSymbolId,
symbol_id: ScopedSymbolId,
definition: Definition<'db>,
) {
// We don't need to store anything in self.definitions_by_definition.
let def_id = self.all_definitions.push(definition);
let symbol_state = &mut self.symbol_states[symbol];
symbol_state.record_declaration(def_id);
symbol_state.record_binding(def_id);
self.memoize(self.current_block_id(), symbol_id, Some(definition));
self.defined_symbols.insert(symbol_id);
}
pub(super) fn record_use(&mut self, symbol: ScopedSymbolId, use_id: ScopedUseId) {
// We have a use of a symbol; clone the current bindings for that symbol, and record them
// as the live bindings for this use.
let new_use = self
.bindings_by_use
.push(self.symbol_states[symbol].bindings().clone());
/// Record a use of a symbol.
pub(super) fn record_use(&mut self, symbol_id: ScopedSymbolId, use_id: ScopedUseId) {
let definition_id = self.lookup(symbol_id);
let new_use = self.definitions_by_use.push(definition_id);
debug_assert_eq!(use_id, new_use);
}
/// Take a snapshot of the current visible-symbols state.
pub(super) fn snapshot(&self) -> FlowSnapshot {
FlowSnapshot {
symbol_states: self.symbol_states.clone(),
/// Get the id of the current basic block.
pub(super) fn current_block_id(&self) -> BasicBlockId {
BasicBlockId::from(self.definitions_per_block.len() - 1)
}
/// Push a new basic block, with given block as predecessor.
pub(super) fn new_block_from(&mut self, block_id: BasicBlockId, sealed: bool) {
self.new_block_with_predecessors(smallvec![block_id], sealed);
}
/// Push a new basic block, with current block as predecessor; return the current block's ID.
pub(super) fn next_block(&mut self, sealed: bool) -> BasicBlockId {
let current_block_id = self.current_block_id();
self.new_block_from(current_block_id, sealed);
current_block_id
}
/// Add a predecessor to the current block.
pub(super) fn merge_block(&mut self, new_predecessor: BasicBlockId) {
let block_id = self.current_block_id();
debug_assert!(!self.sealed_blocks[block_id]);
self.predecessors[block_id].push(new_predecessor);
}
/// Add predecessors to the current block.
pub(super) fn merge_blocks(&mut self, new_predecessors: Vec<BasicBlockId>) {
let block_id = self.current_block_id();
debug_assert!(!self.sealed_blocks[block_id]);
self.predecessors[block_id].extend(new_predecessors);
}
/// Mark the current block as sealed; it cannot have any more predecessors added.
pub(super) fn seal_current_block(&mut self) {
self.seal_block(self.current_block_id());
}
/// Mark a block as sealed; it cannot have any more predecessors added.
pub(super) fn seal_block(&mut self, block_id: BasicBlockId) {
debug_assert!(!self.sealed_blocks[block_id]);
if let Some(phis) = self.incomplete_phis.get(&block_id) {
for phi in phis.clone() {
self.add_phi_operands(block_id, phi);
}
self.incomplete_phis.remove(&block_id);
}
self.sealed_blocks[block_id] = true;
}
pub(super) fn finish(mut self) -> UseDefMap<'db> {
debug_assert!(self.incomplete_phis.is_empty());
debug_assert!(self.sealed_blocks.iter().all(|&b| b));
self.definitions_by_use.shrink_to_fit();
self.phi_operands.shrink_to_fit();
let mut public_definitions: FxHashMap<ScopedSymbolId, Definition<'db>> =
FxHashMap::default();
for symbol_id in self.defined_symbols.clone() {
// SAFETY: We are only looking up defined symbols here, can't get None.
public_definitions.insert(symbol_id, self.lookup(symbol_id).unwrap());
}
UseDefMap {
definitions_by_use: self.definitions_by_use,
public_definitions,
phi_operands: self.phi_operands,
}
}
/// Restore the current builder symbols state to the given snapshot.
pub(super) fn restore(&mut self, snapshot: FlowSnapshot) {
// We never remove symbols from `symbol_states` (it's an IndexVec, and the symbol
// IDs must line up), so the current number of known symbols must always be equal to or
// greater than the number of known symbols in a previously-taken snapshot.
let num_symbols = self.symbol_states.len();
debug_assert!(num_symbols >= snapshot.symbol_states.len());
/// Push a new basic block (with given predecessors) and return its ID.
fn new_block_with_predecessors(
&mut self,
predecessors: PredecessorBlocks,
sealed: bool,
) -> BasicBlockId {
let new_block_id = self.predecessors.push(predecessors);
self.definitions_per_block.push(FxHashMap::default());
self.sealed_blocks.push(sealed);
// Restore the current visible-definitions state to the given snapshot.
self.symbol_states = snapshot.symbol_states;
// If the snapshot we are restoring is missing some symbols we've recorded since, we need
// to fill them in so the symbol IDs continue to line up. Since they don't exist in the
// snapshot, the correct state to fill them in with is "undefined".
self.symbol_states
.resize(num_symbols, SymbolState::undefined());
new_block_id
}
/// Merge the given snapshot into the current state, reflecting that we might have taken either
/// path to get here. The new state for each symbol should include definitions from both the
/// prior state and the snapshot.
pub(super) fn merge(&mut self, snapshot: FlowSnapshot) {
// We never remove symbols from `symbol_states` (it's an IndexVec, and the symbol
// IDs must line up), so the current number of known symbols must always be equal to or
// greater than the number of known symbols in a previously-taken snapshot.
debug_assert!(self.symbol_states.len() >= snapshot.symbol_states.len());
/// Look up the dominating definition for a symbol in the current block.
///
/// If there isn't a local definition, recursively look up the symbol in predecessor blocks,
/// memoizing the found symbol in each block.
fn lookup(&mut self, symbol_id: ScopedSymbolId) -> Option<Definition<'db>> {
self.lookup_impl(self.current_block_id(), symbol_id)
}
let mut snapshot_definitions_iter = snapshot.symbol_states.into_iter();
for current in &mut self.symbol_states {
if let Some(snapshot) = snapshot_definitions_iter.next() {
current.merge(snapshot);
} else {
// Symbol not present in snapshot, so it's unbound/undeclared from that path.
current.set_may_be_unbound();
current.set_may_be_undeclared();
fn lookup_impl(
&mut self,
block_id: BasicBlockId,
symbol_id: ScopedSymbolId,
) -> Option<Definition<'db>> {
if let Some(local) = self.definitions_per_block[block_id].get(&symbol_id) {
return *local;
}
if !self.sealed_blocks[block_id] {
// we may still be missing predecessors; insert an incomplete Phi.
let definition = self.create_incomplete_phi(block_id, symbol_id);
self.incomplete_phis
.entry(block_id)
.or_default()
.push(definition);
return Some(definition);
}
match self.predecessors[block_id].as_slice() {
// entry block, no definition found: return None
[] => None,
// single predecessor, recurse
&[single_predecessor_id] => {
let definition = self.lookup_impl(single_predecessor_id, symbol_id);
self.memoize(block_id, symbol_id, definition);
definition
}
// multiple predecessors: create and memoize an incomplete Phi to break cycles, then
// recurse into predecessors and fill the Phi operands.
_ => {
let phi = self.create_incomplete_phi(block_id, symbol_id);
self.add_phi_operands(block_id, phi);
Some(phi)
}
}
}
pub(super) fn finish(mut self) -> UseDefMap<'db> {
self.all_definitions.shrink_to_fit();
self.all_constraints.shrink_to_fit();
self.symbol_states.shrink_to_fit();
self.bindings_by_use.shrink_to_fit();
self.definitions_by_definition.shrink_to_fit();
/// Recurse into predecessors to add operands for an incomplete Phi.
fn add_phi_operands(&mut self, block_id: BasicBlockId, phi: Definition<'db>) {
let predecessors: PredecessorBlocks = self.predecessors[block_id].clone();
let operands: PhiOperands = predecessors
.iter()
.map(|pred_id| self.lookup_impl(*pred_id, phi.symbol(self.db)))
.collect();
let DefinitionKind::Phi(phi_id) = phi.kind(self.db) else {
unreachable!("add_phi_operands called with non-Phi");
};
self.phi_operands[*phi_id] = operands;
}
UseDefMap {
all_definitions: self.all_definitions,
all_constraints: self.all_constraints,
bindings_by_use: self.bindings_by_use,
public_symbols: self.symbol_states,
definitions_by_definition: self.definitions_by_definition,
}
/// Remember a given definition for a given symbol in the given block.
fn memoize(
&mut self,
block_id: BasicBlockId,
symbol_id: ScopedSymbolId,
definition_id: Option<Definition<'db>>,
) {
self.definitions_per_block[block_id].insert(symbol_id, definition_id);
}
/// Create an incomplete Phi for the given block and symbol, memoize it, and return its ID.
fn create_incomplete_phi(
&mut self,
block_id: BasicBlockId,
symbol_id: ScopedSymbolId,
) -> Definition<'db> {
let phi_id = self.phi_operands.push(vec![]);
let definition = Definition::new(
self.db,
self.file,
self.file_scope,
symbol_id,
DefinitionKind::Phi(phi_id),
countme::Count::default(),
);
self.memoize(block_id, symbol_id, Some(definition));
definition
}
}

View File

@@ -1,309 +0,0 @@
/// Ordered set of `u32`.
///
/// Uses an inline bit-set for small values (up to 64 * B), falls back to heap allocated vector of
/// blocks for larger values.
#[derive(Debug, Clone, PartialEq, Eq)]
pub(super) enum BitSet<const B: usize> {
/// Bit-set (in 64-bit blocks) for the first 64 * B entries.
Inline([u64; B]),
/// Overflow beyond 64 * B.
Heap(Vec<u64>),
}
impl<const B: usize> Default for BitSet<B> {
fn default() -> Self {
// B * 64 must fit in a u32, or else we have unusable bits; this assertion makes the
// truncating casts to u32 below safe. This would be better as a const assertion, but
// that's not possible on stable with const generic params. (B should never really be
// anywhere close to this large.)
assert!(B * 64 < (u32::MAX as usize));
// This implementation requires usize >= 32 bits.
static_assertions::const_assert!(usize::BITS >= 32);
Self::Inline([0; B])
}
}
impl<const B: usize> BitSet<B> {
/// Create and return a new [`BitSet`] with a single `value` inserted.
pub(super) fn with(value: u32) -> Self {
let mut bitset = Self::default();
bitset.insert(value);
bitset
}
pub(super) fn is_empty(&self) -> bool {
self.blocks().iter().all(|&b| b == 0)
}
/// Convert from Inline to Heap, if needed, and resize the Heap vector, if needed.
fn resize(&mut self, value: u32) {
let num_blocks_needed = (value / 64) + 1;
self.resize_blocks(num_blocks_needed as usize);
}
fn resize_blocks(&mut self, num_blocks_needed: usize) {
match self {
Self::Inline(blocks) => {
let mut vec = blocks.to_vec();
vec.resize(num_blocks_needed, 0);
*self = Self::Heap(vec);
}
Self::Heap(vec) => {
vec.resize(num_blocks_needed, 0);
}
}
}
fn blocks_mut(&mut self) -> &mut [u64] {
match self {
Self::Inline(blocks) => blocks.as_mut_slice(),
Self::Heap(blocks) => blocks.as_mut_slice(),
}
}
fn blocks(&self) -> &[u64] {
match self {
Self::Inline(blocks) => blocks.as_slice(),
Self::Heap(blocks) => blocks.as_slice(),
}
}
/// Insert a value into the [`BitSet`].
///
/// Return true if the value was newly inserted, false if already present.
pub(super) fn insert(&mut self, value: u32) -> bool {
let value_usize = value as usize;
let (block, index) = (value_usize / 64, value_usize % 64);
if block >= self.blocks().len() {
self.resize(value);
}
let blocks = self.blocks_mut();
let missing = blocks[block] & (1 << index) == 0;
blocks[block] |= 1 << index;
missing
}
/// Intersect in-place with another [`BitSet`].
pub(super) fn intersect(&mut self, other: &BitSet<B>) {
let my_blocks = self.blocks_mut();
let other_blocks = other.blocks();
let min_len = my_blocks.len().min(other_blocks.len());
for i in 0..min_len {
my_blocks[i] &= other_blocks[i];
}
for block in my_blocks.iter_mut().skip(min_len) {
*block = 0;
}
}
/// Union in-place with another [`BitSet`].
pub(super) fn union(&mut self, other: &BitSet<B>) {
let mut max_len = self.blocks().len();
let other_len = other.blocks().len();
if other_len > max_len {
max_len = other_len;
self.resize_blocks(max_len);
}
for (my_block, other_block) in self.blocks_mut().iter_mut().zip(other.blocks()) {
*my_block |= other_block;
}
}
/// Return an iterator over the values (in ascending order) in this [`BitSet`].
pub(super) fn iter(&self) -> BitSetIterator<'_, B> {
let blocks = self.blocks();
BitSetIterator {
blocks,
current_block_index: 0,
current_block: blocks[0],
}
}
}
/// Iterator over values in a [`BitSet`].
#[derive(Debug)]
pub(super) struct BitSetIterator<'a, const B: usize> {
/// The blocks we are iterating over.
blocks: &'a [u64],
/// The index of the block we are currently iterating through.
current_block_index: usize,
/// The block we are currently iterating through (and zeroing as we go.)
current_block: u64,
}
impl<const B: usize> Iterator for BitSetIterator<'_, B> {
type Item = u32;
fn next(&mut self) -> Option<Self::Item> {
while self.current_block == 0 {
if self.current_block_index + 1 >= self.blocks.len() {
return None;
}
self.current_block_index += 1;
self.current_block = self.blocks[self.current_block_index];
}
let lowest_bit_set = self.current_block.trailing_zeros();
// reset the lowest set bit, without a data dependency on `lowest_bit_set`
self.current_block &= self.current_block.wrapping_sub(1);
// SAFETY: `lowest_bit_set` cannot be more than 64, `current_block_index` cannot be more
// than `B - 1`, and we check above that `B * 64 < u32::MAX`. So both `64 *
// current_block_index` and the final value here must fit in u32.
#[allow(clippy::cast_possible_truncation)]
Some(lowest_bit_set + (64 * self.current_block_index) as u32)
}
}
impl<const B: usize> std::iter::FusedIterator for BitSetIterator<'_, B> {}
#[cfg(test)]
mod tests {
use super::BitSet;
fn assert_bitset<const B: usize>(bitset: &BitSet<B>, contents: &[u32]) {
assert_eq!(bitset.iter().collect::<Vec<_>>(), contents);
}
#[test]
fn iter() {
let mut b = BitSet::<1>::with(3);
b.insert(27);
b.insert(6);
assert!(matches!(b, BitSet::Inline(_)));
assert_bitset(&b, &[3, 6, 27]);
}
#[test]
fn iter_overflow() {
let mut b = BitSet::<1>::with(140);
b.insert(100);
b.insert(129);
assert!(matches!(b, BitSet::Heap(_)));
assert_bitset(&b, &[100, 129, 140]);
}
#[test]
fn intersect() {
let mut b1 = BitSet::<1>::with(4);
let mut b2 = BitSet::<1>::with(4);
b1.insert(23);
b2.insert(5);
b1.intersect(&b2);
assert_bitset(&b1, &[4]);
}
#[test]
fn intersect_mixed_1() {
let mut b1 = BitSet::<1>::with(4);
let mut b2 = BitSet::<1>::with(4);
b1.insert(89);
b2.insert(5);
b1.intersect(&b2);
assert_bitset(&b1, &[4]);
}
#[test]
fn intersect_mixed_2() {
let mut b1 = BitSet::<1>::with(4);
let mut b2 = BitSet::<1>::with(4);
b1.insert(23);
b2.insert(89);
b1.intersect(&b2);
assert_bitset(&b1, &[4]);
}
#[test]
fn intersect_heap() {
let mut b1 = BitSet::<1>::with(4);
let mut b2 = BitSet::<1>::with(4);
b1.insert(89);
b2.insert(90);
b1.intersect(&b2);
assert_bitset(&b1, &[4]);
}
#[test]
fn intersect_heap_2() {
let mut b1 = BitSet::<1>::with(89);
let mut b2 = BitSet::<1>::with(89);
b1.insert(91);
b2.insert(90);
b1.intersect(&b2);
assert_bitset(&b1, &[89]);
}
#[test]
fn union() {
let mut b1 = BitSet::<1>::with(2);
let b2 = BitSet::<1>::with(4);
b1.union(&b2);
assert_bitset(&b1, &[2, 4]);
}
#[test]
fn union_mixed_1() {
let mut b1 = BitSet::<1>::with(4);
let mut b2 = BitSet::<1>::with(4);
b1.insert(89);
b2.insert(5);
b1.union(&b2);
assert_bitset(&b1, &[4, 5, 89]);
}
#[test]
fn union_mixed_2() {
let mut b1 = BitSet::<1>::with(4);
let mut b2 = BitSet::<1>::with(4);
b1.insert(23);
b2.insert(89);
b1.union(&b2);
assert_bitset(&b1, &[4, 23, 89]);
}
#[test]
fn union_heap() {
let mut b1 = BitSet::<1>::with(4);
let mut b2 = BitSet::<1>::with(4);
b1.insert(89);
b2.insert(90);
b1.union(&b2);
assert_bitset(&b1, &[4, 89, 90]);
}
#[test]
fn union_heap_2() {
let mut b1 = BitSet::<1>::with(89);
let mut b2 = BitSet::<1>::with(89);
b1.insert(91);
b2.insert(90);
b1.union(&b2);
assert_bitset(&b1, &[89, 90, 91]);
}
#[test]
fn multiple_blocks() {
let mut b = BitSet::<2>::with(120);
b.insert(45);
assert!(matches!(b, BitSet::Inline(_)));
assert_bitset(&b, &[45, 120]);
}
#[test]
fn empty() {
let b = BitSet::<1>::default();
assert!(b.is_empty());
}
}

View File

@@ -1,588 +0,0 @@
//! Track live bindings per symbol, applicable constraints per binding, and live declarations.
//!
//! These data structures operate entirely on scope-local newtype-indices for definitions and
//! constraints, referring to their location in the `all_definitions` and `all_constraints`
//! indexvecs in [`super::UseDefMapBuilder`].
//!
//! We need to track arbitrary associations between bindings and constraints, not just a single set
//! of currently dominating constraints (where "dominating" means "control flow must have passed
//! through it to reach this point"), because we can have dominating constraints that apply to some
//! bindings but not others, as in this code:
//!
//! ```python
//! x = 1 if flag else None
//! if x is not None:
//! if flag2:
//! x = 2 if flag else None
//! x
//! ```
//!
//! The `x is not None` constraint dominates the final use of `x`, but it applies only to the first
//! binding of `x`, not the second, so `None` is a possible value for `x`.
//!
//! And we can't just track, for each binding, an index into a list of dominating constraints,
//! either, because we can have bindings which are still visible, but subject to constraints that
//! are no longer dominating, as in this code:
//!
//! ```python
//! x = 0
//! if flag1:
//! x = 1 if flag2 else None
//! assert x is not None
//! x
//! ```
//!
//! From the point of view of the final use of `x`, the `x is not None` constraint no longer
//! dominates, but it does dominate the `x = 1 if flag2 else None` binding, so we have to keep
//! track of that.
//!
//! The data structures used here ([`BitSet`] and [`smallvec::SmallVec`]) optimize for keeping all
//! data inline (avoiding lots of scattered allocations) in small-to-medium cases, and falling back
//! to heap allocation to be able to scale to arbitrary numbers of live bindings and constraints
//! when needed.
//!
//! Tracking live declarations is simpler, since constraints are not involved, but otherwise very
//! similar to tracking live bindings.
use super::bitset::{BitSet, BitSetIterator};
use ruff_index::newtype_index;
use smallvec::SmallVec;
/// A newtype-index for a definition in a particular scope.
#[newtype_index]
pub(super) struct ScopedDefinitionId;
/// A newtype-index for a constraint expression in a particular scope.
#[newtype_index]
pub(super) struct ScopedConstraintId;
/// Can reference this * 64 total definitions inline; more will fall back to the heap.
const INLINE_BINDING_BLOCKS: usize = 3;
/// A [`BitSet`] of [`ScopedDefinitionId`], representing live bindings of a symbol in a scope.
type Bindings = BitSet<INLINE_BINDING_BLOCKS>;
type BindingsIterator<'a> = BitSetIterator<'a, INLINE_BINDING_BLOCKS>;
/// Can reference this * 64 total declarations inline; more will fall back to the heap.
const INLINE_DECLARATION_BLOCKS: usize = 3;
/// A [`BitSet`] of [`ScopedDefinitionId`], representing live declarations of a symbol in a scope.
type Declarations = BitSet<INLINE_DECLARATION_BLOCKS>;
type DeclarationsIterator<'a> = BitSetIterator<'a, INLINE_DECLARATION_BLOCKS>;
/// Can reference this * 64 total constraints inline; more will fall back to the heap.
const INLINE_CONSTRAINT_BLOCKS: usize = 2;
/// Can keep inline this many live bindings per symbol at a given time; more will go to heap.
const INLINE_BINDINGS_PER_SYMBOL: usize = 4;
/// One [`BitSet`] of applicable [`ScopedConstraintId`] per live binding.
type InlineConstraintArray = [BitSet<INLINE_CONSTRAINT_BLOCKS>; INLINE_BINDINGS_PER_SYMBOL];
type Constraints = SmallVec<InlineConstraintArray>;
type ConstraintsIterator<'a> = std::slice::Iter<'a, BitSet<INLINE_CONSTRAINT_BLOCKS>>;
type ConstraintsIntoIterator = smallvec::IntoIter<InlineConstraintArray>;
/// Live declarations for a single symbol at some point in control flow.
#[derive(Clone, Debug, PartialEq, Eq)]
pub(super) struct SymbolDeclarations {
/// [`BitSet`]: which declarations (as [`ScopedDefinitionId`]) can reach the current location?
live_declarations: Declarations,
/// Could the symbol be un-declared at this point?
may_be_undeclared: bool,
}
impl SymbolDeclarations {
fn undeclared() -> Self {
Self {
live_declarations: Declarations::default(),
may_be_undeclared: true,
}
}
/// Record a newly-encountered declaration for this symbol.
fn record_declaration(&mut self, declaration_id: ScopedDefinitionId) {
self.live_declarations = Declarations::with(declaration_id.into());
self.may_be_undeclared = false;
}
/// Add undeclared as a possibility for this symbol.
fn set_may_be_undeclared(&mut self) {
self.may_be_undeclared = true;
}
/// Return an iterator over live declarations for this symbol.
pub(super) fn iter(&self) -> DeclarationIdIterator {
DeclarationIdIterator {
inner: self.live_declarations.iter(),
}
}
pub(super) fn is_empty(&self) -> bool {
self.live_declarations.is_empty()
}
pub(super) fn may_be_undeclared(&self) -> bool {
self.may_be_undeclared
}
}
/// Live bindings and narrowing constraints for a single symbol at some point in control flow.
#[derive(Clone, Debug, PartialEq, Eq)]
pub(super) struct SymbolBindings {
/// [`BitSet`]: which bindings (as [`ScopedDefinitionId`]) can reach the current location?
live_bindings: Bindings,
/// For each live binding, which [`ScopedConstraintId`] apply?
///
/// This is a [`smallvec::SmallVec`] which should always have one [`BitSet`] of constraints per
/// binding in `live_bindings`.
constraints: Constraints,
/// Could the symbol be unbound at this point?
may_be_unbound: bool,
}
impl SymbolBindings {
fn unbound() -> Self {
Self {
live_bindings: Bindings::default(),
constraints: Constraints::default(),
may_be_unbound: true,
}
}
/// Add Unbound as a possibility for this symbol.
fn set_may_be_unbound(&mut self) {
self.may_be_unbound = true;
}
/// Record a newly-encountered binding for this symbol.
pub(super) fn record_binding(&mut self, binding_id: ScopedDefinitionId) {
// The new binding replaces all previous live bindings in this path, and has no
// constraints.
self.live_bindings = Bindings::with(binding_id.into());
self.constraints = Constraints::with_capacity(1);
self.constraints.push(BitSet::default());
self.may_be_unbound = false;
}
/// Add given constraint to all live bindings.
pub(super) fn record_constraint(&mut self, constraint_id: ScopedConstraintId) {
for bitset in &mut self.constraints {
bitset.insert(constraint_id.into());
}
}
/// Iterate over currently live bindings for this symbol.
pub(super) fn iter(&self) -> BindingIdWithConstraintsIterator {
BindingIdWithConstraintsIterator {
definitions: self.live_bindings.iter(),
constraints: self.constraints.iter(),
}
}
pub(super) fn may_be_unbound(&self) -> bool {
self.may_be_unbound
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub(super) struct SymbolState {
declarations: SymbolDeclarations,
bindings: SymbolBindings,
}
impl SymbolState {
/// Return a new [`SymbolState`] representing an unbound, undeclared symbol.
pub(super) fn undefined() -> Self {
Self {
declarations: SymbolDeclarations::undeclared(),
bindings: SymbolBindings::unbound(),
}
}
/// Add Unbound as a possibility for this symbol.
pub(super) fn set_may_be_unbound(&mut self) {
self.bindings.set_may_be_unbound();
}
/// Record a newly-encountered binding for this symbol.
pub(super) fn record_binding(&mut self, binding_id: ScopedDefinitionId) {
self.bindings.record_binding(binding_id);
}
/// Add given constraint to all live bindings.
pub(super) fn record_constraint(&mut self, constraint_id: ScopedConstraintId) {
self.bindings.record_constraint(constraint_id);
}
/// Add undeclared as a possibility for this symbol.
pub(super) fn set_may_be_undeclared(&mut self) {
self.declarations.set_may_be_undeclared();
}
/// Record a newly-encountered declaration of this symbol.
pub(super) fn record_declaration(&mut self, declaration_id: ScopedDefinitionId) {
self.declarations.record_declaration(declaration_id);
}
/// Merge another [`SymbolState`] into this one.
pub(super) fn merge(&mut self, b: SymbolState) {
let mut a = Self {
bindings: SymbolBindings {
live_bindings: Bindings::default(),
constraints: Constraints::default(),
may_be_unbound: self.bindings.may_be_unbound || b.bindings.may_be_unbound,
},
declarations: SymbolDeclarations {
live_declarations: self.declarations.live_declarations.clone(),
may_be_undeclared: self.declarations.may_be_undeclared
|| b.declarations.may_be_undeclared,
},
};
std::mem::swap(&mut a, self);
self.declarations
.live_declarations
.union(&b.declarations.live_declarations);
let mut a_defs_iter = a.bindings.live_bindings.iter();
let mut b_defs_iter = b.bindings.live_bindings.iter();
let mut a_constraints_iter = a.bindings.constraints.into_iter();
let mut b_constraints_iter = b.bindings.constraints.into_iter();
let mut opt_a_def: Option<u32> = a_defs_iter.next();
let mut opt_b_def: Option<u32> = b_defs_iter.next();
// Iterate through the definitions from `a` and `b`, always processing the lower definition
// ID first, and pushing each definition onto the merged `SymbolState` with its
// constraints. If a definition is found in both `a` and `b`, push it with the intersection
// of the constraints from the two paths; a constraint that applies from only one possible
// path is irrelevant.
// Helper to push `def`, with constraints in `constraints_iter`, onto `self`.
let push = |def, constraints_iter: &mut ConstraintsIntoIterator, merged: &mut Self| {
merged.bindings.live_bindings.insert(def);
// SAFETY: we only ever create SymbolState with either no definitions and no constraint
// bitsets (`::unbound`) or one definition and one constraint bitset (`::with`), and
// `::merge` always pushes one definition and one constraint bitset together (just
// below), so the number of definitions and the number of constraint bitsets can never
// get out of sync.
let constraints = constraints_iter
.next()
.expect("definitions and constraints length mismatch");
merged.bindings.constraints.push(constraints);
};
loop {
match (opt_a_def, opt_b_def) {
(Some(a_def), Some(b_def)) => match a_def.cmp(&b_def) {
std::cmp::Ordering::Less => {
// Next definition ID is only in `a`, push it to `self` and advance `a`.
push(a_def, &mut a_constraints_iter, self);
opt_a_def = a_defs_iter.next();
}
std::cmp::Ordering::Greater => {
// Next definition ID is only in `b`, push it to `self` and advance `b`.
push(b_def, &mut b_constraints_iter, self);
opt_b_def = b_defs_iter.next();
}
std::cmp::Ordering::Equal => {
// Next definition is in both; push to `self` and intersect constraints.
push(a_def, &mut b_constraints_iter, self);
// SAFETY: we only ever create SymbolState with either no definitions and
// no constraint bitsets (`::unbound`) or one definition and one constraint
// bitset (`::with`), and `::merge` always pushes one definition and one
// constraint bitset together (just below), so the number of definitions
// and the number of constraint bitsets can never get out of sync.
let a_constraints = a_constraints_iter
.next()
.expect("definitions and constraints length mismatch");
// If the same definition is visible through both paths, any constraint
// that applies on only one path is irrelevant to the resulting type from
// unioning the two paths, so we intersect the constraints.
self.bindings
.constraints
.last_mut()
.unwrap()
.intersect(&a_constraints);
opt_a_def = a_defs_iter.next();
opt_b_def = b_defs_iter.next();
}
},
(Some(a_def), None) => {
// We've exhausted `b`, just push the def from `a` and move on to the next.
push(a_def, &mut a_constraints_iter, self);
opt_a_def = a_defs_iter.next();
}
(None, Some(b_def)) => {
// We've exhausted `a`, just push the def from `b` and move on to the next.
push(b_def, &mut b_constraints_iter, self);
opt_b_def = b_defs_iter.next();
}
(None, None) => break,
}
}
}
pub(super) fn bindings(&self) -> &SymbolBindings {
&self.bindings
}
pub(super) fn declarations(&self) -> &SymbolDeclarations {
&self.declarations
}
/// Could the symbol be unbound?
pub(super) fn may_be_unbound(&self) -> bool {
self.bindings.may_be_unbound()
}
}
/// The default state of a symbol, if we've seen no definitions of it, is undefined (that is,
/// both unbound and undeclared).
impl Default for SymbolState {
fn default() -> Self {
SymbolState::undefined()
}
}
/// A single binding (as [`ScopedDefinitionId`]) with an iterator of its applicable
/// [`ScopedConstraintId`].
#[derive(Debug)]
pub(super) struct BindingIdWithConstraints<'a> {
pub(super) definition: ScopedDefinitionId,
pub(super) constraint_ids: ConstraintIdIterator<'a>,
}
#[derive(Debug)]
pub(super) struct BindingIdWithConstraintsIterator<'a> {
definitions: BindingsIterator<'a>,
constraints: ConstraintsIterator<'a>,
}
impl<'a> Iterator for BindingIdWithConstraintsIterator<'a> {
type Item = BindingIdWithConstraints<'a>;
fn next(&mut self) -> Option<Self::Item> {
match (self.definitions.next(), self.constraints.next()) {
(None, None) => None,
(Some(def), Some(constraints)) => Some(BindingIdWithConstraints {
definition: ScopedDefinitionId::from_u32(def),
constraint_ids: ConstraintIdIterator {
wrapped: constraints.iter(),
},
}),
// SAFETY: see above.
_ => unreachable!("definitions and constraints length mismatch"),
}
}
}
impl std::iter::FusedIterator for BindingIdWithConstraintsIterator<'_> {}
#[derive(Debug)]
pub(super) struct ConstraintIdIterator<'a> {
wrapped: BitSetIterator<'a, INLINE_CONSTRAINT_BLOCKS>,
}
impl Iterator for ConstraintIdIterator<'_> {
type Item = ScopedConstraintId;
fn next(&mut self) -> Option<Self::Item> {
self.wrapped.next().map(ScopedConstraintId::from_u32)
}
}
impl std::iter::FusedIterator for ConstraintIdIterator<'_> {}
#[derive(Debug)]
pub(super) struct DeclarationIdIterator<'a> {
inner: DeclarationsIterator<'a>,
}
impl<'a> Iterator for DeclarationIdIterator<'a> {
type Item = ScopedDefinitionId;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(ScopedDefinitionId::from_u32)
}
}
impl std::iter::FusedIterator for DeclarationIdIterator<'_> {}
#[cfg(test)]
mod tests {
use super::{ScopedConstraintId, ScopedDefinitionId, SymbolState};
fn assert_bindings(symbol: &SymbolState, may_be_unbound: bool, expected: &[&str]) {
assert_eq!(symbol.may_be_unbound(), may_be_unbound);
let actual = symbol
.bindings()
.iter()
.map(|def_id_with_constraints| {
format!(
"{}<{}>",
def_id_with_constraints.definition.as_u32(),
def_id_with_constraints
.constraint_ids
.map(ScopedConstraintId::as_u32)
.map(|idx| idx.to_string())
.collect::<Vec<_>>()
.join(", ")
)
})
.collect::<Vec<_>>();
assert_eq!(actual, expected);
}
pub(crate) fn assert_declarations(
symbol: &SymbolState,
may_be_undeclared: bool,
expected: &[u32],
) {
assert_eq!(symbol.declarations.may_be_undeclared(), may_be_undeclared);
let actual = symbol
.declarations()
.iter()
.map(ScopedDefinitionId::as_u32)
.collect::<Vec<_>>();
assert_eq!(actual, expected);
}
#[test]
fn unbound() {
let sym = SymbolState::undefined();
assert_bindings(&sym, true, &[]);
}
#[test]
fn with() {
let mut sym = SymbolState::undefined();
sym.record_binding(ScopedDefinitionId::from_u32(0));
assert_bindings(&sym, false, &["0<>"]);
}
#[test]
fn set_may_be_unbound() {
let mut sym = SymbolState::undefined();
sym.record_binding(ScopedDefinitionId::from_u32(0));
sym.set_may_be_unbound();
assert_bindings(&sym, true, &["0<>"]);
}
#[test]
fn record_constraint() {
let mut sym = SymbolState::undefined();
sym.record_binding(ScopedDefinitionId::from_u32(0));
sym.record_constraint(ScopedConstraintId::from_u32(0));
assert_bindings(&sym, false, &["0<0>"]);
}
#[test]
fn merge() {
// merging the same definition with the same constraint keeps the constraint
let mut sym0a = SymbolState::undefined();
sym0a.record_binding(ScopedDefinitionId::from_u32(0));
sym0a.record_constraint(ScopedConstraintId::from_u32(0));
let mut sym0b = SymbolState::undefined();
sym0b.record_binding(ScopedDefinitionId::from_u32(0));
sym0b.record_constraint(ScopedConstraintId::from_u32(0));
sym0a.merge(sym0b);
let mut sym0 = sym0a;
assert_bindings(&sym0, false, &["0<0>"]);
// merging the same definition with differing constraints drops all constraints
let mut sym1a = SymbolState::undefined();
sym1a.record_binding(ScopedDefinitionId::from_u32(1));
sym1a.record_constraint(ScopedConstraintId::from_u32(1));
let mut sym1b = SymbolState::undefined();
sym1b.record_binding(ScopedDefinitionId::from_u32(1));
sym1b.record_constraint(ScopedConstraintId::from_u32(2));
sym1a.merge(sym1b);
let sym1 = sym1a;
assert_bindings(&sym1, false, &["1<>"]);
// merging a constrained definition with unbound keeps both
let mut sym2a = SymbolState::undefined();
sym2a.record_binding(ScopedDefinitionId::from_u32(2));
sym2a.record_constraint(ScopedConstraintId::from_u32(3));
let sym2b = SymbolState::undefined();
sym2a.merge(sym2b);
let sym2 = sym2a;
assert_bindings(&sym2, true, &["2<3>"]);
// merging different definitions keeps them each with their existing constraints
sym0.merge(sym2);
let sym = sym0;
assert_bindings(&sym, true, &["0<0>", "2<3>"]);
}
#[test]
fn no_declaration() {
let sym = SymbolState::undefined();
assert_declarations(&sym, true, &[]);
}
#[test]
fn record_declaration() {
let mut sym = SymbolState::undefined();
sym.record_declaration(ScopedDefinitionId::from_u32(1));
assert_declarations(&sym, false, &[1]);
}
#[test]
fn record_declaration_override() {
let mut sym = SymbolState::undefined();
sym.record_declaration(ScopedDefinitionId::from_u32(1));
sym.record_declaration(ScopedDefinitionId::from_u32(2));
assert_declarations(&sym, false, &[2]);
}
#[test]
fn record_declaration_merge() {
let mut sym = SymbolState::undefined();
sym.record_declaration(ScopedDefinitionId::from_u32(1));
let mut sym2 = SymbolState::undefined();
sym2.record_declaration(ScopedDefinitionId::from_u32(2));
sym.merge(sym2);
assert_declarations(&sym, false, &[1, 2]);
}
#[test]
fn record_declaration_merge_partial_undeclared() {
let mut sym = SymbolState::undefined();
sym.record_declaration(ScopedDefinitionId::from_u32(1));
let sym2 = SymbolState::undefined();
sym.merge(sym2);
assert_declarations(&sym, true, &[1]);
}
#[test]
fn set_may_be_undeclared() {
let mut sym = SymbolState::undefined();
sym.record_declaration(ScopedDefinitionId::from_u32(0));
sym.set_may_be_undeclared();
assert_declarations(&sym, true, &[0]);
}
}

View File

@@ -1,14 +1,14 @@
use ruff_db::files::{File, FilePath};
use ruff_db::source::line_index;
use ruff_python_ast as ast;
use ruff_python_ast::{Expr, ExpressionRef};
use ruff_python_ast::{Expr, ExpressionRef, StmtClassDef};
use ruff_source_file::LineIndex;
use crate::module_name::ModuleName;
use crate::module_resolver::{resolve_module, Module};
use crate::semantic_index::ast_ids::HasScopedAstId;
use crate::semantic_index::semantic_index;
use crate::types::{binding_ty, global_symbol_ty, infer_scope_types, Type};
use crate::types::{definition_ty, global_symbol_ty_by_name, infer_scope_types, Type};
use crate::Db;
pub struct SemanticModel<'db> {
@@ -40,7 +40,7 @@ impl<'db> SemanticModel<'db> {
}
pub fn global_symbol_ty(&self, module: &Module, symbol_name: &str) -> Type<'db> {
global_symbol_ty(self.db, module.file(), symbol_name)
global_symbol_ty_by_name(self.db, module.file(), symbol_name)
}
}
@@ -147,24 +147,29 @@ impl HasTy for ast::Expr {
}
}
macro_rules! impl_binding_has_ty {
($ty: ty) => {
impl HasTy for $ty {
#[inline]
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
let index = semantic_index(model.db, model.file);
let binding = index.definition(self);
binding_ty(model.db, binding)
}
}
};
impl HasTy for ast::StmtFunctionDef {
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
let index = semantic_index(model.db, model.file);
let definition = index.definition(self);
definition_ty(model.db, Some(definition))
}
}
impl_binding_has_ty!(ast::StmtFunctionDef);
impl_binding_has_ty!(ast::StmtClassDef);
impl_binding_has_ty!(ast::Alias);
impl_binding_has_ty!(ast::Parameter);
impl_binding_has_ty!(ast::ParameterWithDefault);
impl HasTy for StmtClassDef {
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
let index = semantic_index(model.db, model.file);
let definition = index.definition(self);
definition_ty(model.db, Some(definition))
}
}
impl HasTy for ast::Alias {
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
let index = semantic_index(model.db, model.file);
let definition = index.definition(self);
definition_ty(model.db, Some(definition))
}
}
#[cfg(test)]
mod tests {
@@ -184,9 +189,14 @@ mod tests {
Program::from_settings(
&db,
&ProgramSettings {
ProgramSettings {
target_version: PythonVersion::default(),
search_paths: SearchPathSettings::new(SystemPathBuf::from("/src")),
search_paths: SearchPathSettings {
extra_paths: vec![],
src_root: SystemPathBuf::from("/src"),
site_packages: vec![],
custom_typeshed: None,
},
},
)?;

View File

@@ -1,77 +0,0 @@
use crate::module_name::ModuleName;
use crate::module_resolver::resolve_module;
use crate::semantic_index::global_scope;
use crate::semantic_index::symbol::ScopeId;
use crate::types::{global_symbol_ty, Type};
use crate::Db;
/// Enumeration of various core stdlib modules, for which we have dedicated Salsa queries.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum CoreStdlibModule {
Builtins,
Types,
Typeshed,
}
impl CoreStdlibModule {
fn name(self) -> ModuleName {
let module_name = match self {
Self::Builtins => "builtins",
Self::Types => "types",
Self::Typeshed => "_typeshed",
};
ModuleName::new_static(module_name)
.unwrap_or_else(|| panic!("{module_name} should be a valid module name!"))
}
}
/// Lookup the type of `symbol` in a given core module
///
/// Returns `Unbound` if the given core module cannot be resolved for some reason
fn core_module_symbol_ty<'db>(
db: &'db dyn Db,
core_module: CoreStdlibModule,
symbol: &str,
) -> Type<'db> {
resolve_module(db, core_module.name())
.map(|module| global_symbol_ty(db, module.file(), symbol))
.unwrap_or(Type::Unbound)
}
/// Lookup the type of `symbol` in the builtins namespace.
///
/// Returns `Unbound` if the `builtins` module isn't available for some reason.
#[inline]
pub(crate) fn builtins_symbol_ty<'db>(db: &'db dyn Db, symbol: &str) -> Type<'db> {
core_module_symbol_ty(db, CoreStdlibModule::Builtins, symbol)
}
/// Lookup the type of `symbol` in the `types` module namespace.
///
/// Returns `Unbound` if the `types` module isn't available for some reason.
#[inline]
pub(crate) fn types_symbol_ty<'db>(db: &'db dyn Db, symbol: &str) -> Type<'db> {
core_module_symbol_ty(db, CoreStdlibModule::Types, symbol)
}
/// Lookup the type of `symbol` in the `_typeshed` module namespace.
///
/// Returns `Unbound` if the `_typeshed` module isn't available for some reason.
#[inline]
pub(crate) fn typeshed_symbol_ty<'db>(db: &'db dyn Db, symbol: &str) -> Type<'db> {
core_module_symbol_ty(db, CoreStdlibModule::Typeshed, symbol)
}
/// Get the scope of a core stdlib module.
///
/// Can return `None` if a custom typeshed is used that is missing the core module in question.
fn core_module_scope(db: &dyn Db, core_module: CoreStdlibModule) -> Option<ScopeId<'_>> {
resolve_module(db, core_module.name()).map(|module| global_scope(db, module.file()))
}
/// Get the `builtins` module scope.
///
/// Can return `None` if a custom typeshed is used that is missing `builtins.pyi`.
pub(crate) fn builtins_module_scope(db: &dyn Db) -> Option<ScopeId<'_>> {
core_module_scope(db, CoreStdlibModule::Builtins)
}

View File

@@ -1,234 +1,80 @@
use infer::TypeInferenceContext;
use ruff_db::files::File;
use ruff_python_ast as ast;
use ruff_python_ast::name::Name;
use crate::module_resolver::file_to_module;
use crate::semantic_index::ast_ids::HasScopedAstId;
use crate::semantic_index::definition::{Definition, DefinitionKind};
use crate::builtins::builtins_scope;
use crate::semantic_index::definition::Definition;
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId};
use crate::semantic_index::{
global_scope, semantic_index, symbol_table, use_def_map, BindingWithConstraints,
BindingWithConstraintsIterator, DeclarationsIterator,
};
use crate::stdlib::{builtins_symbol_ty, types_symbol_ty, typeshed_symbol_ty};
use crate::types::narrow::narrowing_constraint;
use crate::semantic_index::{global_scope, symbol_table, use_def_map};
use crate::{Db, FxOrderSet};
pub(crate) use self::builder::{IntersectionBuilder, UnionBuilder};
pub(crate) use self::diagnostic::TypeCheckDiagnostics;
pub(crate) use self::display::TypeArrayDisplay;
pub(crate) use self::infer::{
infer_deferred_types, infer_definition_types, infer_expression_types, infer_scope_types,
};
mod builder;
mod diagnostic;
mod display;
mod infer;
mod narrow;
pub fn check_types(db: &dyn Db, file: File) -> TypeCheckDiagnostics {
let _span = tracing::trace_span!("check_types", file=?file.path(db)).entered();
let index = semantic_index(db, file);
let mut diagnostics = TypeCheckDiagnostics::new();
for scope_id in index.scope_ids() {
let result = infer_scope_types(db, scope_id);
diagnostics.extend(result.diagnostics());
}
diagnostics
}
pub(crate) use self::builder::UnionBuilder;
pub(crate) use self::infer::{infer_definition_types, infer_scope_types};
/// Infer the public type of a symbol (its type as seen from outside its scope).
fn symbol_ty_by_id<'db>(db: &'db dyn Db, scope: ScopeId<'db>, symbol: ScopedSymbolId) -> Type<'db> {
let _span = tracing::trace_span!("symbol_ty_by_id", ?symbol).entered();
pub(crate) fn symbol_ty<'db>(
db: &'db dyn Db,
scope: ScopeId<'db>,
symbol: ScopedSymbolId,
) -> Type<'db> {
let _span = tracing::trace_span!("symbol_ty", ?symbol).entered();
let use_def = use_def_map(db, scope);
// If the symbol is declared, the public type is based on declarations; otherwise, it's based
// on inference from bindings.
if use_def.has_public_declarations(symbol) {
let declarations = use_def.public_declarations(symbol);
// Intentionally ignore conflicting declared types; that's not our problem, it's the
// problem of the module we are importing from.
declarations_ty(db, declarations).unwrap_or_else(|(ty, _)| ty)
} else {
bindings_ty(
db,
use_def.public_bindings(symbol),
use_def
.public_may_be_unbound(symbol)
.then_some(Type::Unbound),
)
}
definition_ty(db, use_def.public_definition(symbol))
}
/// Shorthand for `symbol_ty` that takes a symbol name instead of an ID.
fn symbol_ty<'db>(db: &'db dyn Db, scope: ScopeId<'db>, name: &str) -> Type<'db> {
pub(crate) fn symbol_ty_by_name<'db>(
db: &'db dyn Db,
scope: ScopeId<'db>,
name: &str,
) -> Type<'db> {
let table = symbol_table(db, scope);
table
.symbol_id_by_name(name)
.map(|symbol| symbol_ty_by_id(db, scope, symbol))
.map(|symbol| symbol_ty(db, scope, symbol))
.unwrap_or(Type::Unbound)
}
/// Shorthand for `symbol_ty` that looks up a module-global symbol by name in a file.
pub(crate) fn global_symbol_ty<'db>(db: &'db dyn Db, file: File, name: &str) -> Type<'db> {
symbol_ty(db, global_scope(db, file), name)
pub(crate) fn global_symbol_ty_by_name<'db>(db: &'db dyn Db, file: File, name: &str) -> Type<'db> {
symbol_ty_by_name(db, global_scope(db, file), name)
}
/// Infer the type of a binding.
pub(crate) fn binding_ty<'db>(db: &'db dyn Db, definition: Definition<'db>) -> Type<'db> {
let inference = infer_definition_types(db, definition);
inference.binding_ty(definition)
}
/// Infer the type of a declaration.
fn declaration_ty<'db>(db: &'db dyn Db, definition: Definition<'db>) -> Type<'db> {
let inference = infer_definition_types(db, definition);
inference.declaration_ty(definition)
}
/// Infer the type of a (possibly deferred) sub-expression of a [`Definition`].
/// Shorthand for `symbol_ty` that looks up a symbol in the builtins.
///
/// ## Panics
/// If the given expression is not a sub-expression of the given [`Definition`].
fn definition_expression_ty<'db>(
/// Returns `Unbound` if the builtins module isn't available for some reason.
pub(crate) fn builtins_symbol_ty_by_name<'db>(db: &'db dyn Db, name: &str) -> Type<'db> {
builtins_scope(db)
.map(|builtins| symbol_ty_by_name(db, builtins, name))
.unwrap_or(Type::Unbound)
}
/// Infer the type of a [`Definition`].
pub(crate) fn definition_ty<'db>(
db: &'db dyn Db,
definition: Definition<'db>,
expression: &ast::Expr,
definition: Option<Definition<'db>>,
) -> Type<'db> {
let expr_id = expression.scoped_ast_id(db, definition.scope(db));
let inference = infer_definition_types(db, definition);
if let Some(ty) = inference.try_expression_ty(expr_id) {
ty
} else {
infer_deferred_types(db, definition).expression_ty(expr_id)
}
}
/// Infer the combined type of an iterator of bindings, plus one optional "unbound type".
///
/// Will return a union if there is more than one binding, or at least one plus an unbound
/// type.
///
/// The "unbound type" represents the type in case control flow may not have passed through any
/// bindings in this scope. If this isn't possible, then it will be `None`. If it is possible, and
/// the result in that case should be Unbound (e.g. an unbound function local), then it will be
/// `Some(Type::Unbound)`. If it is possible and the result should be something else (e.g. an
/// implicit global lookup), then `unbound_type` will be `Some(the_global_symbol_type)`.
///
/// # Panics
/// Will panic if called with zero bindings and no `unbound_ty`. This is a logic error, as any
/// symbol with zero visible bindings clearly may be unbound, and the caller should provide an
/// `unbound_ty`.
fn bindings_ty<'db>(
db: &'db dyn Db,
bindings_with_constraints: BindingWithConstraintsIterator<'_, 'db>,
unbound_ty: Option<Type<'db>>,
) -> Type<'db> {
let def_types = bindings_with_constraints.map(
|BindingWithConstraints {
binding,
constraints,
}| {
let mut constraint_tys =
constraints.filter_map(|constraint| narrowing_constraint(db, constraint, binding));
let binding_ty = binding_ty(db, binding);
if let Some(first_constraint_ty) = constraint_tys.next() {
let mut builder = IntersectionBuilder::new(db);
builder = builder
.add_positive(binding_ty)
.add_positive(first_constraint_ty);
for constraint_ty in constraint_tys {
builder = builder.add_positive(constraint_ty);
}
builder.build()
} else {
binding_ty
}
},
);
let mut all_types = unbound_ty.into_iter().chain(def_types);
let first = all_types
.next()
.expect("bindings_ty should never be called with zero definitions and no unbound_ty.");
if let Some(second) = all_types.next() {
UnionType::from_elements(db, [first, second].into_iter().chain(all_types))
} else {
first
}
}
/// The result of looking up a declared type from declarations; see [`declarations_ty`].
type DeclaredTypeResult<'db> = Result<Type<'db>, (Type<'db>, Box<[Type<'db>]>)>;
/// Build a declared type from a [`DeclarationsIterator`].
///
/// If there is only one declaration, or all declarations declare the same type, returns
/// `Ok(declared_type)`. If there are conflicting declarations, returns
/// `Err((union_of_declared_types, conflicting_declared_types))`.
///
/// If undeclared is a possibility, `Unknown` type will be part of the return type (and may
/// conflict with other declarations.)
///
/// # Panics
/// Will panic if there are no declarations and no possibility of undeclared. This is a logic
/// error, as any symbol with zero live declarations clearly must be undeclared.
fn declarations_ty<'db>(
db: &'db dyn Db,
declarations: DeclarationsIterator<'_, 'db>,
) -> DeclaredTypeResult<'db> {
let may_be_undeclared = declarations.may_be_undeclared();
let decl_types = declarations.map(|declaration| declaration_ty(db, declaration));
let mut all_types = (if may_be_undeclared {
Some(Type::Unknown)
} else {
None
})
.into_iter()
.chain(decl_types);
let first = all_types.next().expect(
"declarations_ty must not be called with zero declarations and no may-be-undeclared.",
);
let mut conflicting: Vec<Type<'db>> = vec![];
let declared_ty = if let Some(second) = all_types.next() {
let mut builder = UnionBuilder::new(db).add(first);
for other in [second].into_iter().chain(all_types) {
if !first.is_equivalent_to(db, other) {
conflicting.push(other);
}
builder = builder.add(other);
match definition {
Some(definition) => {
let inference = infer_definition_types(db, definition);
inference.definition_ty(definition)
}
builder.build()
} else {
first
};
if conflicting.is_empty() {
DeclaredTypeResult::Ok(declared_ty)
} else {
DeclaredTypeResult::Err((
declared_ty,
[first].into_iter().chain(conflicting).collect(),
))
None => Type::Unbound,
}
}
/// Unique ID for a type.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[derive(Copy, Clone, Debug, PartialOrd, Ord, PartialEq, Eq, Hash)]
pub enum Type<'db> {
/// the dynamic type: a statically-unknown set of values
Any,
/// the empty set of values
Never,
/// unknown type (either no annotation, or some kind of type error)
/// unknown type (no annotation)
/// equivalent to Any, or possibly to object in strict mode
Unknown,
/// name does not exist or is not bound to any value (this represents an error, but with some
@@ -252,16 +98,6 @@ pub enum Type<'db> {
IntLiteral(i64),
/// A boolean literal, either `True` or `False`.
BooleanLiteral(bool),
/// A string literal
StringLiteral(StringLiteralType<'db>),
/// A string known to originate only from literal values, but whose value is not known (unlike
/// `StringLiteral` above).
LiteralString,
/// A bytes literal
BytesLiteral(BytesLiteralType<'db>),
/// A heterogeneous tuple type, with elements of the given types in source order.
// TODO: Support variable length homogeneous tuple type like `tuple[int, ...]`.
Tuple(TupleType<'db>),
// TODO protocols, callable types, overloads, generics, type vars
}
@@ -270,88 +106,10 @@ impl<'db> Type<'db> {
matches!(self, Type::Unbound)
}
pub const fn is_never(&self) -> bool {
matches!(self, Type::Never)
}
pub const fn into_class_type(self) -> Option<ClassType<'db>> {
match self {
Type::Class(class_type) => Some(class_type),
_ => None,
}
}
pub fn expect_class(self) -> ClassType<'db> {
self.into_class_type()
.expect("Expected a Type::Class variant")
}
pub const fn into_module_type(self) -> Option<File> {
match self {
Type::Module(file) => Some(file),
_ => None,
}
}
pub fn expect_module(self) -> File {
self.into_module_type()
.expect("Expected a Type::Module variant")
}
pub const fn into_union_type(self) -> Option<UnionType<'db>> {
match self {
Type::Union(union_type) => Some(union_type),
_ => None,
}
}
pub fn expect_union(self) -> UnionType<'db> {
self.into_union_type()
.expect("Expected a Type::Union variant")
}
pub const fn into_intersection_type(self) -> Option<IntersectionType<'db>> {
match self {
Type::Intersection(intersection_type) => Some(intersection_type),
_ => None,
}
}
pub fn expect_intersection(self) -> IntersectionType<'db> {
self.into_intersection_type()
.expect("Expected a Type::Intersection variant")
}
pub const fn into_function_type(self) -> Option<FunctionType<'db>> {
match self {
Type::Function(function_type) => Some(function_type),
_ => None,
}
}
pub fn expect_function(self) -> FunctionType<'db> {
self.into_function_type()
.expect("Expected a Type::Function variant")
}
pub const fn into_int_literal_type(self) -> Option<i64> {
match self {
Type::IntLiteral(value) => Some(value),
_ => None,
}
}
pub fn expect_int_literal(self) -> i64 {
self.into_int_literal_type()
.expect("Expected a Type::IntLiteral variant")
}
pub fn may_be_unbound(&self, db: &'db dyn Db) -> bool {
match self {
Type::Unbound => true,
Type::Union(union) => union.contains(db, Type::Unbound),
// Unbound can't appear in an intersection, because an intersection with Unbound
// simplifies to just Unbound.
_ => false,
}
}
@@ -360,350 +118,94 @@ impl<'db> Type<'db> {
pub fn replace_unbound_with(&self, db: &'db dyn Db, replacement: Type<'db>) -> Type<'db> {
match self {
Type::Unbound => replacement,
Type::Union(union) => {
union.map(db, |element| element.replace_unbound_with(db, replacement))
}
Type::Union(union) => union
.elements(db)
.into_iter()
.fold(UnionBuilder::new(db), |builder, ty| {
builder.add(ty.replace_unbound_with(db, replacement))
})
.build(),
ty => *ty,
}
}
/// Return true if this type is [assignable to] type `target`.
///
/// [assignable to]: https://typing.readthedocs.io/en/latest/spec/concepts.html#the-assignable-to-or-consistent-subtyping-relation
pub(crate) fn is_assignable_to(self, db: &'db dyn Db, target: Type<'db>) -> bool {
if self.is_equivalent_to(db, target) {
return true;
}
match (self, target) {
(Type::Unknown | Type::Any | Type::Never, _) => true,
(_, Type::Unknown | Type::Any) => true,
(Type::IntLiteral(_), Type::Instance(class))
if class.is_stdlib_symbol(db, "builtins", "int") =>
{
true
}
(Type::StringLiteral(_), Type::LiteralString) => true,
(Type::StringLiteral(_) | Type::LiteralString, Type::Instance(class))
if class.is_stdlib_symbol(db, "builtins", "str") =>
{
true
}
(Type::BytesLiteral(_), Type::Instance(class))
if class.is_stdlib_symbol(db, "builtins", "bytes") =>
{
true
}
(ty, Type::Union(union)) => union
.elements(db)
.iter()
.any(|&elem_ty| ty.is_assignable_to(db, elem_ty)),
// TODO
_ => false,
}
}
/// Return true if this type is equivalent to type `other`.
pub(crate) fn is_equivalent_to(self, _db: &'db dyn Db, other: Type<'db>) -> bool {
// TODO equivalent but not identical structural types, differently-ordered unions and
// intersections, other cases?
self == other
}
/// Resolve a member access of a type.
///
/// For example, if `foo` is `Type::Instance(<Bar>)`,
/// `foo.member(&db, "baz")` returns the type of `baz` attributes
/// as accessed from instances of the `Bar` class.
///
/// TODO: use of this method currently requires manually checking
/// whether the returned type is `Unknown`/`Unbound`
/// (or a union with `Unknown`/`Unbound`) in many places.
/// Ideally we'd use a more type-safe pattern, such as returning
/// an `Option` or a `Result` from this method, which would force
/// us to explicitly consider whether to handle an error or propagate
/// it up the call stack.
#[must_use]
pub fn member(&self, db: &'db dyn Db, name: &str) -> Type<'db> {
pub fn member(&self, db: &'db dyn Db, name: &Name) -> Type<'db> {
match self {
Type::Any => Type::Any,
Type::Never => {
// TODO: attribute lookup on Never type
Type::Unknown
}
Type::Never => todo!("attribute lookup on Never type"),
Type::Unknown => Type::Unknown,
Type::Unbound => Type::Unbound,
Type::None => {
// TODO: attribute lookup on None type
Type::Unknown
}
Type::Function(_) => {
// TODO: attribute lookup on function type
Type::Unknown
}
Type::Module(file) => global_symbol_ty(db, *file, name),
Type::None => todo!("attribute lookup on None type"),
Type::Function(_) => todo!("attribute lookup on Function type"),
Type::Module(file) => global_symbol_ty_by_name(db, *file, name),
Type::Class(class) => class.class_member(db, name),
Type::Instance(_) => {
// TODO MRO? get_own_instance_member, get_instance_member
Type::Unknown
todo!("attribute lookup on Instance type")
}
Type::Union(union) => union.map(db, |element| element.member(db, name)),
Type::Union(union) => union
.elements(db)
.iter()
.fold(UnionBuilder::new(db), |builder, element_ty| {
builder.add(element_ty.member(db, name))
})
.build(),
Type::Intersection(_) => {
// TODO perform the get_member on each type in the intersection
// TODO return the intersection of those results
Type::Unknown
todo!("attribute lookup on Intersection type")
}
Type::IntLiteral(_) => {
// TODO raise error
Type::Unknown
}
Type::BooleanLiteral(_) => Type::Unknown,
Type::StringLiteral(_) => {
// TODO defer to `typing.LiteralString`/`builtins.str` methods
// from typeshed's stubs
Type::Unknown
}
Type::LiteralString => {
// TODO defer to `typing.LiteralString`/`builtins.str` methods
// from typeshed's stubs
Type::Unknown
}
Type::BytesLiteral(_) => {
// TODO defer to Type::Instance(<bytes from typeshed>).member
Type::Unknown
}
Type::Tuple(_) => {
// TODO: implement tuple methods
Type::Unknown
}
}
}
/// Return the type resulting from calling an object of this type.
///
/// Returns `None` if `self` is not a callable type.
#[must_use]
fn call(&self, db: &'db dyn Db, _context: &mut TypeInferenceContext<'db>) -> Option<Type<'db>> {
match self {
Type::Function(function_type) => Some(function_type.return_type(db)),
// TODO annotated return type on `__new__` or metaclass `__call__`
Type::Class(class) => Some(Type::Instance(*class)),
// TODO: handle classes which implement `__call__`
Type::Instance(_instance_ty) => Some(Type::Unknown),
// `Any` is callable, and its return type is also `Any`.
Type::Any => Some(Type::Any),
Type::Unknown => Some(Type::Unknown),
// TODO: union and intersection types
Type::Union(_) => Some(Type::Unknown),
Type::Intersection(_) => Some(Type::Unknown),
_ => None,
}
}
/// Given the type of an object that is iterated over in some way,
/// return the type of objects that are yielded by that iteration.
///
/// E.g., for the following loop, given the type of `x`, infer the type of `y`:
/// ```python
/// for y in x:
/// pass
/// ```
/// Return None and emit a diagnostic if this type is not iterable.
fn iterate(
&self,
db: &'db dyn Db,
context: &mut TypeInferenceContext<'db>,
) -> Option<Type<'db>> {
if let Type::Tuple(tuple_type) = self {
return Some(UnionType::from_elements(db, &**tuple_type.elements(db)));
}
// `self` represents the type of the iterable;
// `__iter__` and `__next__` are both looked up on the class of the iterable:
let iterable_meta_type = self.to_meta_type(db);
let dunder_iter_method = iterable_meta_type.member(db, "__iter__");
if !dunder_iter_method.is_unbound() {
let Some(iterator_ty) = dunder_iter_method.call(db, context) else {
context.not_iterable_diagnostic(*self);
return None;
};
let dunder_next_method = iterator_ty.to_meta_type(db).member(db, "__next__");
return dunder_next_method.call(db, context).or_else(|| {
context.not_iterable_diagnostic(*self);
None
});
}
// Although it's not considered great practice,
// classes that define `__getitem__` are also iterable,
// even if they do not define `__iter__`.
//
// TODO(Alex) this is only valid if the `__getitem__` method is annotated as
// accepting `int` or `SupportsIndex`
let dunder_get_item_method = iterable_meta_type.member(db, "__getitem__");
dunder_get_item_method.call(db, context).or_else(|| {
context.not_iterable_diagnostic(*self);
None
})
}
#[must_use]
pub fn to_instance(&self, db: &'db dyn Db) -> Type<'db> {
pub fn instance(&self) -> Type<'db> {
match self {
Type::Any => Type::Any,
Type::Unknown => Type::Unknown,
Type::Unbound => Type::Unknown,
Type::Never => Type::Never,
Type::Class(class) => Type::Instance(*class),
Type::Union(union) => union.map(db, |element| element.to_instance(db)),
// TODO: we can probably do better here: --Alex
Type::Intersection(_) => Type::Unknown,
// TODO: calling `.to_instance()` on any of these should result in a diagnostic,
// since they already indicate that the object is an instance of some kind:
Type::BooleanLiteral(_)
| Type::BytesLiteral(_)
| Type::Function(_)
| Type::Instance(_)
| Type::Module(_)
| Type::IntLiteral(_)
| Type::StringLiteral(_)
| Type::Tuple(_)
| Type::LiteralString
| Type::None => Type::Unknown,
_ => Type::Unknown, // TODO type errors
}
}
/// Given a type that is assumed to represent an instance of a class,
/// return a type that represents that class itself.
#[must_use]
pub fn to_meta_type(&self, db: &'db dyn Db) -> Type<'db> {
match self {
Type::Unbound => Type::Unbound,
Type::Never => Type::Never,
Type::Instance(class) => Type::Class(*class),
Type::Union(union) => union.map(db, |ty| ty.to_meta_type(db)),
Type::BooleanLiteral(_) => builtins_symbol_ty(db, "bool"),
Type::BytesLiteral(_) => builtins_symbol_ty(db, "bytes"),
Type::IntLiteral(_) => builtins_symbol_ty(db, "int"),
Type::Function(_) => types_symbol_ty(db, "FunctionType"),
Type::Module(_) => types_symbol_ty(db, "ModuleType"),
Type::None => typeshed_symbol_ty(db, "NoneType"),
// TODO not accurate if there's a custom metaclass...
Type::Class(_) => builtins_symbol_ty(db, "type"),
// TODO can we do better here? `type[LiteralString]`?
Type::StringLiteral(_) | Type::LiteralString => builtins_symbol_ty(db, "str"),
// TODO: `type[Any]`?
Type::Any => Type::Any,
// TODO: `type[Unknown]`?
Type::Unknown => Type::Unknown,
// TODO intersections
Type::Intersection(_) => Type::Unknown,
Type::Tuple(_) => builtins_symbol_ty(db, "tuple"),
}
}
}
impl<'db> From<&Type<'db>> for Type<'db> {
fn from(value: &Type<'db>) -> Self {
*value
}
}
#[salsa::interned]
pub struct FunctionType<'db> {
/// name of the function at definition
#[return_ref]
pub name: ast::name::Name,
definition: Definition<'db>,
pub name: Name,
/// types of all decorators on this function
decorators: Box<[Type<'db>]>,
decorators: Vec<Type<'db>>,
}
impl<'db> FunctionType<'db> {
pub fn has_decorator(self, db: &dyn Db, decorator: Type<'_>) -> bool {
self.decorators(db).contains(&decorator)
}
/// inferred return type for this function
pub fn return_type(&self, db: &'db dyn Db) -> Type<'db> {
let definition = self.definition(db);
let DefinitionKind::Function(function_stmt_node) = definition.kind(db) else {
panic!("Function type definition must have `DefinitionKind::Function`")
};
// TODO if a function `bar` is decorated by `foo`,
// where `foo` is annotated as returning a type `X` that is a subtype of `Callable`,
// we need to infer the return type from `X`'s return annotation
// rather than from `bar`'s return annotation
// in order to determine the type that `bar` returns
if !function_stmt_node.decorator_list.is_empty() {
return Type::Unknown;
}
function_stmt_node
.returns
.as_ref()
.map(|returns| {
if function_stmt_node.is_async {
// TODO: generic `types.CoroutineType`!
Type::Unknown
} else {
definition_expression_ty(db, definition, returns.as_ref())
}
})
.unwrap_or(Type::Unknown)
}
}
#[salsa::interned]
pub struct ClassType<'db> {
/// Name of the class at definition
#[return_ref]
pub name: ast::name::Name,
pub name: Name,
definition: Definition<'db>,
/// Types of all class bases
bases: Vec<Type<'db>>,
body_scope: ScopeId<'db>,
}
impl<'db> ClassType<'db> {
/// Return true if this class is a standard library type with given module name and name.
pub(crate) fn is_stdlib_symbol(self, db: &'db dyn Db, module_name: &str, name: &str) -> bool {
name == self.name(db)
&& file_to_module(db, self.body_scope(db).file(db)).is_some_and(|module| {
module.search_path().is_standard_library() && module.name() == module_name
})
}
/// Return an iterator over the types of this class's bases.
///
/// # Panics:
/// If `definition` is not a `DefinitionKind::Class`.
pub fn bases(&self, db: &'db dyn Db) -> impl Iterator<Item = Type<'db>> {
let definition = self.definition(db);
let DefinitionKind::Class(class_stmt_node) = definition.kind(db) else {
panic!("Class type definition must have DefinitionKind::Class");
};
class_stmt_node
.bases()
.iter()
.map(move |base_expr| definition_expression_ty(db, definition, base_expr))
}
/// Returns the class member of this class named `name`.
///
/// The member resolves to a member of the class itself or any of its bases.
pub fn class_member(self, db: &'db dyn Db, name: &str) -> Type<'db> {
pub fn class_member(self, db: &'db dyn Db, name: &Name) -> Type<'db> {
let member = self.own_class_member(db, name);
if !member.is_unbound() {
return member;
@@ -713,12 +215,12 @@ impl<'db> ClassType<'db> {
}
/// Returns the inferred type of the class member named `name`.
pub fn own_class_member(self, db: &'db dyn Db, name: &str) -> Type<'db> {
pub fn own_class_member(self, db: &'db dyn Db, name: &Name) -> Type<'db> {
let scope = self.body_scope(db);
symbol_ty(db, scope, name)
symbol_ty_by_name(db, scope, name)
}
pub fn inherited_class_member(self, db: &'db dyn Db, name: &str) -> Type<'db> {
pub fn inherited_class_member(self, db: &'db dyn Db, name: &Name) -> Type<'db> {
for base in self.bases(db) {
let member = base.member(db, name);
if !member.is_unbound() {
@@ -733,7 +235,6 @@ impl<'db> ClassType<'db> {
#[salsa::interned]
pub struct UnionType<'db> {
/// The union type includes values in any of these types.
#[return_ref]
elements: FxOrderSet<Type<'db>>,
}
@@ -741,36 +242,11 @@ impl<'db> UnionType<'db> {
pub fn contains(&self, db: &'db dyn Db, ty: Type<'db>) -> bool {
self.elements(db).contains(&ty)
}
/// Create a union from a list of elements
/// (which may be eagerly simplified into a different variant of [`Type`] altogether)
pub fn from_elements<T: Into<Type<'db>>>(
db: &'db dyn Db,
elements: impl IntoIterator<Item = T>,
) -> Type<'db> {
elements
.into_iter()
.fold(UnionBuilder::new(db), |builder, element| {
builder.add(element.into())
})
.build()
}
/// Apply a transformation function to all elements of the union,
/// and create a new union from the resulting set of types
pub fn map(
&self,
db: &'db dyn Db,
transform_fn: impl Fn(&Type<'db>) -> Type<'db>,
) -> Type<'db> {
Self::from_elements(db, self.elements(db).into_iter().map(transform_fn))
}
}
#[salsa::interned]
pub struct IntersectionType<'db> {
/// The intersection type includes only values in all of these types.
#[return_ref]
positive: FxOrderSet<Type<'db>>,
/// The intersection type does not include any value in any of these types.
@@ -778,127 +254,5 @@ pub struct IntersectionType<'db> {
/// Negation types aren't expressible in annotations, and are most likely to arise from type
/// narrowing along with intersections (e.g. `if not isinstance(...)`), so we represent them
/// directly in intersections rather than as a separate type.
#[return_ref]
negative: FxOrderSet<Type<'db>>,
}
#[salsa::interned]
pub struct StringLiteralType<'db> {
#[return_ref]
value: Box<str>,
}
#[salsa::interned]
pub struct BytesLiteralType<'db> {
#[return_ref]
value: Box<[u8]>,
}
#[salsa::interned]
pub struct TupleType<'db> {
#[return_ref]
elements: Box<[Type<'db>]>,
}
#[cfg(test)]
mod tests {
use super::{builtins_symbol_ty, BytesLiteralType, StringLiteralType, Type, UnionType};
use crate::db::tests::TestDb;
use crate::program::{Program, SearchPathSettings};
use crate::python_version::PythonVersion;
use crate::ProgramSettings;
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
use test_case::test_case;
fn setup_db() -> TestDb {
let db = TestDb::new();
let src_root = SystemPathBuf::from("/src");
db.memory_file_system()
.create_directory_all(&src_root)
.unwrap();
Program::from_settings(
&db,
&ProgramSettings {
target_version: PythonVersion::default(),
search_paths: SearchPathSettings::new(src_root),
},
)
.expect("Valid search path settings");
db
}
/// A test representation of a type that can be transformed unambiguously into a real Type,
/// given a db.
#[derive(Debug)]
enum Ty {
Never,
Unknown,
Any,
IntLiteral(i64),
StringLiteral(&'static str),
LiteralString,
BytesLiteral(&'static str),
BuiltinInstance(&'static str),
Union(Vec<Ty>),
}
impl Ty {
fn into_type(self, db: &TestDb) -> Type<'_> {
match self {
Ty::Never => Type::Never,
Ty::Unknown => Type::Unknown,
Ty::Any => Type::Any,
Ty::IntLiteral(n) => Type::IntLiteral(n),
Ty::StringLiteral(s) => {
Type::StringLiteral(StringLiteralType::new(db, (*s).into()))
}
Ty::LiteralString => Type::LiteralString,
Ty::BytesLiteral(s) => {
Type::BytesLiteral(BytesLiteralType::new(db, s.as_bytes().into()))
}
Ty::BuiltinInstance(s) => builtins_symbol_ty(db, s).to_instance(db),
Ty::Union(tys) => {
UnionType::from_elements(db, tys.into_iter().map(|ty| ty.into_type(db)))
}
}
}
}
#[test_case(Ty::Unknown, Ty::IntLiteral(1))]
#[test_case(Ty::Any, Ty::IntLiteral(1))]
#[test_case(Ty::Never, Ty::IntLiteral(1))]
#[test_case(Ty::IntLiteral(1), Ty::Unknown)]
#[test_case(Ty::IntLiteral(1), Ty::Any)]
#[test_case(Ty::IntLiteral(1), Ty::BuiltinInstance("int"))]
#[test_case(Ty::StringLiteral("foo"), Ty::BuiltinInstance("str"))]
#[test_case(Ty::StringLiteral("foo"), Ty::LiteralString)]
#[test_case(Ty::LiteralString, Ty::BuiltinInstance("str"))]
#[test_case(Ty::BytesLiteral("foo"), Ty::BuiltinInstance("bytes"))]
#[test_case(Ty::IntLiteral(1), Ty::Union(vec![Ty::BuiltinInstance("int"), Ty::BuiltinInstance("str")]))]
#[test_case(Ty::IntLiteral(1), Ty::Union(vec![Ty::Unknown, Ty::BuiltinInstance("str")]))]
fn is_assignable_to(from: Ty, to: Ty) {
let db = setup_db();
assert!(from.into_type(&db).is_assignable_to(&db, to.into_type(&db)));
}
#[test_case(Ty::IntLiteral(1), Ty::BuiltinInstance("str"))]
#[test_case(Ty::BuiltinInstance("int"), Ty::BuiltinInstance("str"))]
#[test_case(Ty::BuiltinInstance("int"), Ty::IntLiteral(1))]
fn is_not_assignable_to(from: Ty, to: Ty) {
let db = setup_db();
assert!(!from.into_type(&db).is_assignable_to(&db, to.into_type(&db)));
}
#[test_case(
Ty::Union(vec![Ty::IntLiteral(1), Ty::IntLiteral(2)]),
Ty::Union(vec![Ty::IntLiteral(1), Ty::IntLiteral(2)])
)]
fn is_equivalent_to(from: Ty, to: Ty) {
let db = setup_db();
assert!(from.into_type(&db).is_equivalent_to(&db, to.into_type(&db)));
}
}

View File

@@ -25,9 +25,8 @@
//! * No type in an intersection can be a supertype of any other type in the intersection (just
//! eliminate the supertype from the intersection).
//! * An intersection containing two non-overlapping types should simplify to [`Type::Never`].
use crate::types::{builtins_symbol_ty, IntersectionType, Type, UnionType};
use crate::types::{IntersectionType, Type, UnionType};
use crate::{Db, FxOrderSet};
use ordermap::set::MutableValues;
pub(crate) struct UnionBuilder<'db> {
elements: FxOrderSet<Type<'db>>,
@@ -46,7 +45,7 @@ impl<'db> UnionBuilder<'db> {
pub(crate) fn add(mut self, ty: Type<'db>) -> Self {
match ty {
Type::Union(union) => {
self.elements.extend(union.elements(self.db));
self.elements.extend(&union.elements(self.db));
}
Type::Never => {}
_ => {
@@ -57,40 +56,16 @@ impl<'db> UnionBuilder<'db> {
self
}
/// Performs the following normalizations:
/// - Replaces `Literal[True,False]` with `bool`.
/// - TODO For enums `E` with members `X1`,...,`Xn`, replaces
/// `Literal[E.X1,...,E.Xn]` with `E`.
fn simplify(&mut self) {
if let Some(true_index) = self.elements.get_index_of(&Type::BooleanLiteral(true)) {
if self.elements.contains(&Type::BooleanLiteral(false)) {
*self.elements.get_index_mut2(true_index).unwrap() =
builtins_symbol_ty(self.db, "bool");
self.elements.remove(&Type::BooleanLiteral(false));
}
}
}
pub(crate) fn build(mut self) -> Type<'db> {
pub(crate) fn build(self) -> Type<'db> {
match self.elements.len() {
0 => Type::Never,
1 => self.elements[0],
_ => {
self.simplify();
match self.elements.len() {
0 => Type::Never,
1 => self.elements[0],
_ => {
self.elements.shrink_to_fit();
Type::Union(UnionType::new(self.db, self.elements))
}
}
}
_ => Type::Union(UnionType::new(self.db, self.elements)),
}
}
}
#[allow(unused)]
#[derive(Clone)]
pub(crate) struct IntersectionBuilder<'db> {
// Really this builds a union-of-intersections, because we always keep our set-theoretic types
@@ -103,7 +78,8 @@ pub(crate) struct IntersectionBuilder<'db> {
}
impl<'db> IntersectionBuilder<'db> {
pub(crate) fn new(db: &'db dyn Db) -> Self {
#[allow(dead_code)]
fn new(db: &'db dyn Db) -> Self {
Self {
db,
intersections: vec![InnerIntersectionBuilder::new()],
@@ -117,7 +93,8 @@ impl<'db> IntersectionBuilder<'db> {
}
}
pub(crate) fn add_positive(mut self, ty: Type<'db>) -> Self {
#[allow(dead_code)]
fn add_positive(mut self, ty: Type<'db>) -> Self {
if let Type::Union(union) = ty {
// Distribute ourself over this union: for each union element, clone ourself and
// intersect with that union element, then create a new union-of-intersections with all
@@ -145,7 +122,8 @@ impl<'db> IntersectionBuilder<'db> {
}
}
pub(crate) fn add_negative(mut self, ty: Type<'db>) -> Self {
#[allow(dead_code)]
fn add_negative(mut self, ty: Type<'db>) -> Self {
// See comments above in `add_positive`; this is just the negated version.
if let Type::Union(union) = ty {
union
@@ -164,21 +142,22 @@ impl<'db> IntersectionBuilder<'db> {
}
}
pub(crate) fn build(mut self) -> Type<'db> {
#[allow(dead_code)]
fn build(mut self) -> Type<'db> {
// Avoid allocating the UnionBuilder unnecessarily if we have just one intersection:
if self.intersections.len() == 1 {
self.intersections.pop().unwrap().build(self.db)
} else {
UnionType::from_elements(
self.db,
self.intersections
.into_iter()
.map(|inner| inner.build(self.db)),
)
let mut builder = UnionBuilder::new(self.db);
for inner in self.intersections {
builder = builder.add(inner.build(self.db));
}
builder.build()
}
}
}
#[allow(unused)]
#[derive(Debug, Clone, Default)]
struct InnerIntersectionBuilder<'db> {
positive: FxOrderSet<Type<'db>>,
@@ -222,7 +201,6 @@ impl<'db> InnerIntersectionBuilder<'db> {
self.negative.retain(|elem| !pos.contains(elem));
}
Type::Never => {}
Type::Unbound => {}
_ => {
if !self.positive.remove(&ty) {
self.negative.insert(ty);
@@ -236,23 +214,9 @@ impl<'db> InnerIntersectionBuilder<'db> {
// Never is a subtype of all types
if self.positive.contains(&Type::Never) {
self.positive.retain(Type::is_never);
self.positive.clear();
self.negative.clear();
}
if self.positive.contains(&Type::Unbound) {
self.positive.retain(Type::is_unbound);
self.negative.clear();
}
// None intersects only with object
for pos in &self.positive {
if let Type::Instance(_) = pos {
// could be `object` type
} else {
self.negative.remove(&Type::None);
break;
}
self.positive.insert(Type::Never);
}
}
@@ -272,46 +236,27 @@ impl<'db> InnerIntersectionBuilder<'db> {
#[cfg(test)]
mod tests {
use super::{IntersectionBuilder, IntersectionType, Type, UnionType};
use super::{IntersectionBuilder, IntersectionType, Type, UnionBuilder, UnionType};
use crate::db::tests::TestDb;
use crate::program::{Program, SearchPathSettings};
use crate::python_version::PythonVersion;
use crate::types::{builtins_symbol_ty, UnionBuilder};
use crate::ProgramSettings;
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
fn setup_db() -> TestDb {
TestDb::new()
}
impl<'db> UnionType<'db> {
fn elements_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
self.elements(db).into_iter().copied().collect()
self.elements(db).into_iter().collect()
}
}
fn setup_db() -> TestDb {
let db = TestDb::new();
let src_root = SystemPathBuf::from("/src");
db.memory_file_system()
.create_directory_all(&src_root)
.unwrap();
Program::from_settings(
&db,
&ProgramSettings {
target_version: PythonVersion::default(),
search_paths: SearchPathSettings::new(src_root),
},
)
.expect("Valid search path settings");
db
}
#[test]
fn build_union() {
let db = setup_db();
let t0 = Type::IntLiteral(0);
let t1 = Type::IntLiteral(1);
let union = UnionType::from_elements(&db, [t0, t1]).expect_union();
let Type::Union(union) = UnionBuilder::new(&db).add(t0).add(t1).build() else {
panic!("expected a union");
};
assert_eq!(union.elements_vec(&db), &[t0, t1]);
}
@@ -320,7 +265,8 @@ mod tests {
fn build_union_single() {
let db = setup_db();
let t0 = Type::IntLiteral(0);
let ty = UnionType::from_elements(&db, [t0]);
let ty = UnionBuilder::new(&db).add(t0).build();
assert_eq!(ty, t0);
}
@@ -328,6 +274,7 @@ mod tests {
fn build_union_empty() {
let db = setup_db();
let ty = UnionBuilder::new(&db).build();
assert_eq!(ty, Type::Never);
}
@@ -335,46 +282,32 @@ mod tests {
fn build_union_never() {
let db = setup_db();
let t0 = Type::IntLiteral(0);
let ty = UnionType::from_elements(&db, [t0, Type::Never]);
let ty = UnionBuilder::new(&db).add(t0).add(Type::Never).build();
assert_eq!(ty, t0);
}
#[test]
fn build_union_bool() {
let db = setup_db();
let bool_ty = builtins_symbol_ty(&db, "bool");
let t0 = Type::BooleanLiteral(true);
let t1 = Type::BooleanLiteral(true);
let t2 = Type::BooleanLiteral(false);
let t3 = Type::IntLiteral(17);
let union = UnionType::from_elements(&db, [t0, t1, t3]).expect_union();
assert_eq!(union.elements_vec(&db), &[t0, t3]);
let union = UnionType::from_elements(&db, [t0, t1, t2, t3]).expect_union();
assert_eq!(union.elements_vec(&db), &[bool_ty, t3]);
}
#[test]
fn build_union_flatten() {
let db = setup_db();
let t0 = Type::IntLiteral(0);
let t1 = Type::IntLiteral(1);
let t2 = Type::IntLiteral(2);
let u1 = UnionType::from_elements(&db, [t0, t1]);
let union = UnionType::from_elements(&db, [u1, t2]).expect_union();
let u1 = UnionBuilder::new(&db).add(t0).add(t1).build();
let Type::Union(union) = UnionBuilder::new(&db).add(u1).add(t2).build() else {
panic!("expected a union");
};
assert_eq!(union.elements_vec(&db), &[t0, t1, t2]);
}
impl<'db> IntersectionType<'db> {
fn pos_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
self.positive(db).into_iter().copied().collect()
self.positive(db).into_iter().collect()
}
fn neg_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
self.negative(db).into_iter().copied().collect()
self.negative(db).into_iter().collect()
}
}
@@ -383,14 +316,16 @@ mod tests {
let db = setup_db();
let t0 = Type::IntLiteral(0);
let ta = Type::Any;
let intersection = IntersectionBuilder::new(&db)
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
.add_positive(ta)
.add_negative(t0)
.build()
.expect_intersection();
else {
panic!("expected to be an intersection");
};
assert_eq!(intersection.pos_vec(&db), &[ta]);
assert_eq!(intersection.neg_vec(&db), &[t0]);
assert_eq!(inter.pos_vec(&db), &[ta]);
assert_eq!(inter.neg_vec(&db), &[t0]);
}
#[test]
@@ -403,14 +338,16 @@ mod tests {
.add_positive(ta)
.add_negative(t1)
.build();
let intersection = IntersectionBuilder::new(&db)
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
.add_positive(t2)
.add_positive(i0)
.build()
.expect_intersection();
else {
panic!("expected to be an intersection");
};
assert_eq!(intersection.pos_vec(&db), &[t2, ta]);
assert_eq!(intersection.neg_vec(&db), &[t1]);
assert_eq!(inter.pos_vec(&db), &[t2, ta]);
assert_eq!(inter.neg_vec(&db), &[t1]);
}
#[test]
@@ -423,14 +360,16 @@ mod tests {
.add_positive(ta)
.add_negative(t1)
.build();
let intersection = IntersectionBuilder::new(&db)
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
.add_positive(t2)
.add_negative(i0)
.build()
.expect_intersection();
else {
panic!("expected to be an intersection");
};
assert_eq!(intersection.pos_vec(&db), &[t2, t1]);
assert_eq!(intersection.neg_vec(&db), &[ta]);
assert_eq!(inter.pos_vec(&db), &[t2, t1]);
assert_eq!(inter.neg_vec(&db), &[ta]);
}
#[test]
@@ -439,13 +378,15 @@ mod tests {
let t0 = Type::IntLiteral(0);
let t1 = Type::IntLiteral(1);
let ta = Type::Any;
let u0 = UnionType::from_elements(&db, [t0, t1]);
let u0 = UnionBuilder::new(&db).add(t0).add(t1).build();
let union = IntersectionBuilder::new(&db)
let Type::Union(union) = IntersectionBuilder::new(&db)
.add_positive(ta)
.add_positive(u0)
.build()
.expect_union();
else {
panic!("expected a union");
};
let [Type::Intersection(i0), Type::Intersection(i1)] = union.elements_vec(&db)[..] else {
panic!("expected a union of two intersections");
};
@@ -485,37 +426,4 @@ mod tests {
assert_eq!(ty, Type::Never);
}
#[test]
fn build_intersection_simplify_positive_unbound() {
let db = setup_db();
let ty = IntersectionBuilder::new(&db)
.add_positive(Type::Unbound)
.add_positive(Type::IntLiteral(1))
.build();
assert_eq!(ty, Type::Unbound);
}
#[test]
fn build_intersection_simplify_negative_unbound() {
let db = setup_db();
let ty = IntersectionBuilder::new(&db)
.add_negative(Type::Unbound)
.add_positive(Type::IntLiteral(1))
.build();
assert_eq!(ty, Type::IntLiteral(1));
}
#[test]
fn build_intersection_simplify_negative_none() {
let db = setup_db();
let ty = IntersectionBuilder::new(&db)
.add_negative(Type::None)
.add_positive(Type::IntLiteral(1))
.build();
assert_eq!(ty, Type::IntLiteral(1));
}
}

View File

@@ -1,111 +0,0 @@
use ruff_db::files::File;
use ruff_text_size::{Ranged, TextRange};
use std::fmt::Formatter;
use std::ops::Deref;
use std::sync::Arc;
#[derive(Debug, Eq, PartialEq)]
pub struct TypeCheckDiagnostic {
// TODO: Don't use string keys for rules
pub(super) rule: String,
pub(super) message: String,
pub(super) range: TextRange,
pub(super) file: File,
}
impl TypeCheckDiagnostic {
pub fn rule(&self) -> &str {
&self.rule
}
pub fn message(&self) -> &str {
&self.message
}
pub fn file(&self) -> File {
self.file
}
}
impl Ranged for TypeCheckDiagnostic {
fn range(&self) -> TextRange {
self.range
}
}
/// A collection of type check diagnostics.
///
/// The diagnostics are wrapped in an `Arc` because they need to be cloned multiple times
/// when going from `infer_expression` to `check_file`. We could consider
/// making [`TypeCheckDiagnostic`] a Salsa struct to have them Arena-allocated (once the Tables refactor is done).
/// Using Salsa struct does have the downside that it leaks the Salsa dependency into diagnostics and
/// each Salsa-struct comes with an overhead.
#[derive(Default, Eq, PartialEq)]
pub struct TypeCheckDiagnostics {
inner: Vec<std::sync::Arc<TypeCheckDiagnostic>>,
}
impl TypeCheckDiagnostics {
pub fn new() -> Self {
Self { inner: Vec::new() }
}
pub(super) fn push(&mut self, diagnostic: TypeCheckDiagnostic) {
self.inner.push(Arc::new(diagnostic));
}
pub(crate) fn shrink_to_fit(&mut self) {
self.inner.shrink_to_fit();
}
}
impl Extend<TypeCheckDiagnostic> for TypeCheckDiagnostics {
fn extend<T: IntoIterator<Item = TypeCheckDiagnostic>>(&mut self, iter: T) {
self.inner.extend(iter.into_iter().map(std::sync::Arc::new));
}
}
impl Extend<std::sync::Arc<TypeCheckDiagnostic>> for TypeCheckDiagnostics {
fn extend<T: IntoIterator<Item = Arc<TypeCheckDiagnostic>>>(&mut self, iter: T) {
self.inner.extend(iter);
}
}
impl<'a> Extend<&'a std::sync::Arc<TypeCheckDiagnostic>> for TypeCheckDiagnostics {
fn extend<T: IntoIterator<Item = &'a Arc<TypeCheckDiagnostic>>>(&mut self, iter: T) {
self.inner
.extend(iter.into_iter().map(std::sync::Arc::clone));
}
}
impl std::fmt::Debug for TypeCheckDiagnostics {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
self.inner.fmt(f)
}
}
impl Deref for TypeCheckDiagnostics {
type Target = [std::sync::Arc<TypeCheckDiagnostic>];
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl IntoIterator for TypeCheckDiagnostics {
type Item = Arc<TypeCheckDiagnostic>;
type IntoIter = std::vec::IntoIter<std::sync::Arc<TypeCheckDiagnostic>>;
fn into_iter(self) -> Self::IntoIter {
self.inner.into_iter()
}
}
impl<'a> IntoIterator for &'a TypeCheckDiagnostics {
type Item = &'a Arc<TypeCheckDiagnostic>;
type IntoIter = std::slice::Iter<'a, std::sync::Arc<TypeCheckDiagnostic>>;
fn into_iter(self) -> Self::IntoIter {
self.inner.iter()
}
}

View File

@@ -1,22 +1,14 @@
//! Display implementations for types.
use std::fmt::{self, Display, Formatter};
use ruff_db::display::FormatterJoinExtension;
use ruff_python_ast::str::Quote;
use ruff_python_literal::escape::AsciiEscape;
use std::fmt::{Display, Formatter};
use crate::types::{IntersectionType, Type, UnionType};
use crate::Db;
use rustc_hash::FxHashMap;
impl<'db> Type<'db> {
pub fn display(&self, db: &'db dyn Db) -> DisplayType {
pub fn display(&'db self, db: &'db dyn Db) -> DisplayType<'db> {
DisplayType { ty: self, db }
}
fn representation(self, db: &'db dyn Db) -> DisplayRepresentation<'db> {
DisplayRepresentation { db, ty: self }
}
}
#[derive(Copy, Clone)]
@@ -26,40 +18,7 @@ pub struct DisplayType<'db> {
}
impl Display for DisplayType<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let representation = self.ty.representation(self.db);
if matches!(
self.ty,
Type::IntLiteral(_)
| Type::BooleanLiteral(_)
| Type::StringLiteral(_)
| Type::BytesLiteral(_)
| Type::Class(_)
| Type::Function(_)
) {
write!(f, "Literal[{representation}]",)
} else {
representation.fmt(f)
}
}
}
impl fmt::Debug for DisplayType<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
Display::fmt(self, f)
}
}
/// Writes the string representation of a type, which is the value displayed either as
/// `Literal[<repr>]` or `Literal[<repr1>, <repr2>]` for literal types or as `<repr>` for
/// non literals
struct DisplayRepresentation<'db> {
ty: Type<'db>,
db: &'db dyn Db,
}
impl Display for DisplayRepresentation<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self.ty {
Type::Any => f.write_str("Any"),
Type::Never => f.write_str("Never"),
@@ -70,37 +29,25 @@ impl Display for DisplayRepresentation<'_> {
write!(f, "<module '{:?}'>", file.path(self.db))
}
// TODO functions and classes should display using a fully qualified name
Type::Class(class) => f.write_str(class.name(self.db)),
Type::Instance(class) => f.write_str(class.name(self.db)),
Type::Function(function) => f.write_str(function.name(self.db)),
Type::Class(class) => write!(f, "Literal[{}]", class.name(self.db)),
Type::Instance(class) => f.write_str(&class.name(self.db)),
Type::Function(function) => write!(f, "Literal[{}]", function.name(self.db)),
Type::Union(union) => union.display(self.db).fmt(f),
Type::Intersection(intersection) => intersection.display(self.db).fmt(f),
Type::IntLiteral(n) => n.fmt(f),
Type::BooleanLiteral(boolean) => f.write_str(if boolean { "True" } else { "False" }),
Type::StringLiteral(string) => {
write!(f, r#""{}""#, string.value(self.db).replace('"', r#"\""#))
}
Type::LiteralString => f.write_str("LiteralString"),
Type::BytesLiteral(bytes) => {
let escape =
AsciiEscape::with_preferred_quote(bytes.value(self.db).as_ref(), Quote::Double);
escape.bytes_repr().write(f)
}
Type::Tuple(tuple) => {
f.write_str("tuple[")?;
let elements = tuple.elements(self.db);
if elements.is_empty() {
f.write_str("()")?;
} else {
elements.display(self.db).fmt(f)?;
}
f.write_str("]")
Type::IntLiteral(n) => write!(f, "Literal[{n}]"),
Type::BooleanLiteral(boolean) => {
write!(f, "Literal[{}]", if *boolean { "True" } else { "False" })
}
}
}
}
impl std::fmt::Debug for DisplayType<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self, f)
}
}
impl<'db> UnionType<'db> {
fn display(&'db self, db: &'db dyn Db) -> DisplayUnionType<'db> {
DisplayUnionType { db, ty: self }
@@ -113,90 +60,54 @@ struct DisplayUnionType<'db> {
}
impl Display for DisplayUnionType<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let elements = self.ty.elements(self.db);
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let union = self.ty;
// Group literal types by kind.
let mut grouped_literals = FxHashMap::default();
let (int_literals, other_types): (Vec<Type>, Vec<Type>) = union
.elements(self.db)
.iter()
.copied()
.partition(|ty| matches!(ty, Type::IntLiteral(_)));
for element in elements {
if let Ok(literal_kind) = LiteralTypeKind::try_from(*element) {
grouped_literals
.entry(literal_kind)
.or_insert_with(Vec::new)
.push(*element);
}
}
let mut join = f.join(" | ");
for element in elements {
if let Ok(literal_kind) = LiteralTypeKind::try_from(*element) {
let Some(mut literals) = grouped_literals.remove(&literal_kind) else {
continue;
};
if literal_kind == LiteralTypeKind::IntLiteral {
literals.sort_unstable_by_key(|ty| ty.expect_int_literal());
let mut first = true;
if !int_literals.is_empty() {
f.write_str("Literal[")?;
let mut nums: Vec<_> = int_literals
.into_iter()
.filter_map(|ty| {
if let Type::IntLiteral(n) = ty {
Some(n)
} else {
None
}
})
.collect();
nums.sort_unstable();
for num in nums {
if !first {
f.write_str(", ")?;
}
join.entry(&DisplayLiteralGroup {
literals,
db: self.db,
});
} else {
join.entry(&element.display(self.db));
write!(f, "{num}")?;
first = false;
}
f.write_str("]")?;
}
join.finish()?;
debug_assert!(grouped_literals.is_empty());
for ty in other_types {
if !first {
f.write_str(" | ")?;
};
first = false;
write!(f, "{}", ty.display(self.db))?;
}
Ok(())
}
}
impl fmt::Debug for DisplayUnionType<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
Display::fmt(self, f)
}
}
struct DisplayLiteralGroup<'db> {
literals: Vec<Type<'db>>,
db: &'db dyn Db,
}
impl Display for DisplayLiteralGroup<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.write_str("Literal[")?;
f.join(", ")
.entries(self.literals.iter().map(|ty| ty.representation(self.db)))
.finish()?;
f.write_str("]")
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
enum LiteralTypeKind {
Class,
Function,
IntLiteral,
StringLiteral,
BytesLiteral,
}
impl TryFrom<Type<'_>> for LiteralTypeKind {
type Error = ();
fn try_from(value: Type<'_>) -> Result<Self, Self::Error> {
match value {
Type::Class(_) => Ok(Self::Class),
Type::Function(_) => Ok(Self::Function),
Type::IntLiteral(_) => Ok(Self::IntLiteral),
Type::StringLiteral(_) => Ok(Self::StringLiteral),
Type::BytesLiteral(_) => Ok(Self::BytesLiteral),
_ => Err(()),
}
impl std::fmt::Debug for DisplayUnionType<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self, f)
}
}
@@ -212,159 +123,30 @@ struct DisplayIntersectionType<'db> {
}
impl Display for DisplayIntersectionType<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let tys = self
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let mut first = true;
for (neg, ty) in self
.ty
.positive(self.db)
.iter()
.map(|&ty| DisplayMaybeNegatedType {
ty,
db: self.db,
negated: false,
})
.chain(
self.ty
.negative(self.db)
.iter()
.map(|&ty| DisplayMaybeNegatedType {
ty,
db: self.db,
negated: true,
}),
);
f.join(" & ").entries(tys).finish()
}
}
impl fmt::Debug for DisplayIntersectionType<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
Display::fmt(self, f)
}
}
struct DisplayMaybeNegatedType<'db> {
ty: Type<'db>,
db: &'db dyn Db,
negated: bool,
}
impl<'db> Display for DisplayMaybeNegatedType<'db> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
if self.negated {
f.write_str("~")?;
.map(|ty| (false, ty))
.chain(self.ty.negative(self.db).iter().map(|ty| (true, ty)))
{
if !first {
f.write_str(" & ")?;
};
first = false;
if neg {
f.write_str("~")?;
};
write!(f, "{}", ty.display(self.db))?;
}
self.ty.display(self.db).fmt(f)
}
}
pub(crate) trait TypeArrayDisplay<'db> {
fn display(&self, db: &'db dyn Db) -> DisplayTypeArray;
}
impl<'db> TypeArrayDisplay<'db> for Box<[Type<'db>]> {
fn display(&self, db: &'db dyn Db) -> DisplayTypeArray {
DisplayTypeArray { types: self, db }
}
}
impl<'db> TypeArrayDisplay<'db> for Vec<Type<'db>> {
fn display(&self, db: &'db dyn Db) -> DisplayTypeArray {
DisplayTypeArray { types: self, db }
}
}
pub(crate) struct DisplayTypeArray<'b, 'db> {
types: &'b [Type<'db>],
db: &'db dyn Db,
}
impl<'db> Display for DisplayTypeArray<'_, 'db> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.join(", ")
.entries(self.types.iter().map(|ty| ty.display(self.db)))
.finish()
}
}
#[cfg(test)]
mod tests {
use ruff_db::files::system_path_to_file;
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
use crate::db::tests::TestDb;
use crate::types::{global_symbol_ty, BytesLiteralType, StringLiteralType, Type, UnionType};
use crate::{Program, ProgramSettings, PythonVersion, SearchPathSettings};
fn setup_db() -> TestDb {
let db = TestDb::new();
let src_root = SystemPathBuf::from("/src");
db.memory_file_system()
.create_directory_all(&src_root)
.unwrap();
Program::from_settings(
&db,
&ProgramSettings {
target_version: PythonVersion::default(),
search_paths: SearchPathSettings::new(src_root),
},
)
.expect("Valid search path settings");
db
}
#[test]
fn test_condense_literal_display_by_type() -> anyhow::Result<()> {
let mut db = setup_db();
db.write_dedented(
"src/main.py",
"
def foo(x: int) -> int:
return x + 1
def bar(s: str) -> str:
return s
class A: ...
class B: ...
",
)?;
let mod_file = system_path_to_file(&db, "src/main.py").expect("Expected file to exist.");
let union_elements = &[
Type::Unknown,
Type::IntLiteral(-1),
global_symbol_ty(&db, mod_file, "A"),
Type::StringLiteral(StringLiteralType::new(&db, Box::from("A"))),
Type::BytesLiteral(BytesLiteralType::new(&db, Box::from([0]))),
Type::BytesLiteral(BytesLiteralType::new(&db, Box::from([7]))),
Type::IntLiteral(0),
Type::IntLiteral(1),
Type::StringLiteral(StringLiteralType::new(&db, Box::from("B"))),
global_symbol_ty(&db, mod_file, "foo"),
global_symbol_ty(&db, mod_file, "bar"),
global_symbol_ty(&db, mod_file, "B"),
Type::BooleanLiteral(true),
Type::None,
];
let union = UnionType::from_elements(&db, union_elements).expect_union();
let display = format!("{}", union.display(&db));
assert_eq!(
display,
concat!(
"Unknown | ",
"Literal[-1, 0, 1] | ",
"Literal[A, B] | ",
"Literal[\"A\", \"B\"] | ",
"Literal[b\"\\x00\", b\"\\x07\"] | ",
"Literal[foo, bar] | ",
"Literal[True] | ",
"None"
)
);
Ok(())
}
}
impl std::fmt::Debug for DisplayIntersectionType<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self, f)
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,186 +0,0 @@
use crate::semantic_index::ast_ids::HasScopedAstId;
use crate::semantic_index::constraint::{Constraint, PatternConstraint};
use crate::semantic_index::definition::Definition;
use crate::semantic_index::expression::Expression;
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId, SymbolTable};
use crate::semantic_index::symbol_table;
use crate::types::{infer_expression_types, IntersectionBuilder, Type};
use crate::Db;
use ruff_python_ast as ast;
use rustc_hash::FxHashMap;
use std::sync::Arc;
/// Return the type constraint that `test` (if true) would place on `definition`, if any.
///
/// For example, if we have this code:
///
/// ```python
/// y = 1 if flag else None
/// x = 1 if flag else None
/// if x is not None:
/// ...
/// ```
///
/// The `test` expression `x is not None` places the constraint "not None" on the definition of
/// `x`, so in that case we'd return `Some(Type::Intersection(negative=[Type::None]))`.
///
/// But if we called this with the same `test` expression, but the `definition` of `y`, no
/// constraint is applied to that definition, so we'd just return `None`.
pub(crate) fn narrowing_constraint<'db>(
db: &'db dyn Db,
constraint: Constraint<'db>,
definition: Definition<'db>,
) -> Option<Type<'db>> {
match constraint {
Constraint::Expression(expression) => {
all_narrowing_constraints_for_expression(db, expression)
.get(&definition.symbol(db))
.copied()
}
Constraint::Pattern(pattern) => all_narrowing_constraints_for_pattern(db, pattern)
.get(&definition.symbol(db))
.copied(),
}
}
#[salsa::tracked(return_ref)]
fn all_narrowing_constraints_for_pattern<'db>(
db: &'db dyn Db,
pattern: PatternConstraint<'db>,
) -> NarrowingConstraints<'db> {
NarrowingConstraintsBuilder::new(db, Constraint::Pattern(pattern)).finish()
}
#[salsa::tracked(return_ref)]
fn all_narrowing_constraints_for_expression<'db>(
db: &'db dyn Db,
expression: Expression<'db>,
) -> NarrowingConstraints<'db> {
NarrowingConstraintsBuilder::new(db, Constraint::Expression(expression)).finish()
}
type NarrowingConstraints<'db> = FxHashMap<ScopedSymbolId, Type<'db>>;
struct NarrowingConstraintsBuilder<'db> {
db: &'db dyn Db,
constraint: Constraint<'db>,
constraints: NarrowingConstraints<'db>,
}
impl<'db> NarrowingConstraintsBuilder<'db> {
fn new(db: &'db dyn Db, constraint: Constraint<'db>) -> Self {
Self {
db,
constraint,
constraints: NarrowingConstraints::default(),
}
}
fn finish(mut self) -> NarrowingConstraints<'db> {
match self.constraint {
Constraint::Expression(expression) => self.evaluate_expression_constraint(expression),
Constraint::Pattern(pattern) => self.evaluate_pattern_constraint(pattern),
}
self.constraints.shrink_to_fit();
self.constraints
}
fn evaluate_expression_constraint(&mut self, expression: Expression<'db>) {
if let ast::Expr::Compare(expr_compare) = expression.node_ref(self.db).node() {
self.add_expr_compare(expr_compare, expression);
}
// TODO other test expression kinds
}
fn evaluate_pattern_constraint(&mut self, pattern: PatternConstraint<'db>) {
let subject = pattern.subject(self.db);
match pattern.pattern(self.db).node() {
ast::Pattern::MatchValue(_) => {
// TODO
}
ast::Pattern::MatchSingleton(singleton_pattern) => {
self.add_match_pattern_singleton(subject, singleton_pattern);
}
ast::Pattern::MatchSequence(_) => {
// TODO
}
ast::Pattern::MatchMapping(_) => {
// TODO
}
ast::Pattern::MatchClass(_) => {
// TODO
}
ast::Pattern::MatchStar(_) => {
// TODO
}
ast::Pattern::MatchAs(_) => {
// TODO
}
ast::Pattern::MatchOr(_) => {
// TODO
}
}
}
fn symbols(&self) -> Arc<SymbolTable> {
symbol_table(self.db, self.scope())
}
fn scope(&self) -> ScopeId<'db> {
match self.constraint {
Constraint::Expression(expression) => expression.scope(self.db),
Constraint::Pattern(pattern) => pattern.scope(self.db),
}
}
fn add_expr_compare(&mut self, expr_compare: &ast::ExprCompare, expression: Expression<'db>) {
let ast::ExprCompare {
range: _,
left,
ops,
comparators,
} = expr_compare;
if let ast::Expr::Name(ast::ExprName {
range: _,
id,
ctx: _,
}) = left.as_ref()
{
// SAFETY: we should always have a symbol for every Name node.
let symbol = self.symbols().symbol_id_by_name(id).unwrap();
let scope = self.scope();
let inference = infer_expression_types(self.db, expression);
for (op, comparator) in std::iter::zip(&**ops, &**comparators) {
let comp_ty = inference.expression_ty(comparator.scoped_ast_id(self.db, scope));
if matches!(op, ast::CmpOp::IsNot) {
let ty = IntersectionBuilder::new(self.db)
.add_negative(comp_ty)
.build();
self.constraints.insert(symbol, ty);
};
// TODO other comparison types
}
}
}
fn add_match_pattern_singleton(
&mut self,
subject: &ast::Expr,
pattern: &ast::PatternMatchSingleton,
) {
if let Some(ast::ExprName { id, .. }) = subject.as_name_expr() {
// SAFETY: we should always have a symbol for every Name node.
let symbol = self.symbols().symbol_id_by_name(id).unwrap();
let ty = match pattern.value {
ast::Singleton::None => Type::None,
ast::Singleton::True => Type::BooleanLiteral(true),
ast::Singleton::False => Type::BooleanLiteral(false),
};
self.constraints.insert(symbol, ty);
}
}
}

View File

@@ -1 +1 @@
9e506eb5e8fc2823db8c60ad561b1145ff114947
4ef2d66663fc080fefa379e6ae5fc45d4f8b54eb

View File

@@ -41,7 +41,7 @@ _json: 3.0-
_locale: 3.0-
_lsprof: 3.0-
_markupbase: 3.0-
_msi: 3.0-3.12
_msi: 3.0-
_operator: 3.4-
_osx_support: 3.0-
_posixsubprocess: 3.2-

View File

@@ -1,7 +1,7 @@
import sys
import typing_extensions
from typing import Any, ClassVar, Generic, Literal, TypedDict, overload
from typing_extensions import Self, Unpack
from typing_extensions import Unpack
PyCF_ONLY_AST: Literal[1024]
PyCF_TYPE_COMMENTS: Literal[4096]
@@ -34,9 +34,6 @@ class AST:
if sys.version_info >= (3, 13):
_field_types: ClassVar[dict[str, Any]]
if sys.version_info >= (3, 14):
def __replace__(self) -> Self: ...
class mod(AST): ...
class type_ignore(AST): ...
@@ -47,9 +44,6 @@ class TypeIgnore(type_ignore):
tag: str
def __init__(self, lineno: int, tag: str) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, lineno: int = ..., tag: str = ...) -> Self: ...
class FunctionType(mod):
if sys.version_info >= (3, 10):
__match_args__ = ("argtypes", "returns")
@@ -63,9 +57,6 @@ class FunctionType(mod):
else:
def __init__(self, argtypes: list[expr], returns: expr) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, argtypes: list[expr] = ..., returns: expr = ...) -> Self: ...
class Module(mod):
if sys.version_info >= (3, 10):
__match_args__ = ("body", "type_ignores")
@@ -76,9 +67,6 @@ class Module(mod):
else:
def __init__(self, body: list[stmt], type_ignores: list[TypeIgnore]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, body: list[stmt] = ..., type_ignores: list[TypeIgnore] = ...) -> Self: ...
class Interactive(mod):
if sys.version_info >= (3, 10):
__match_args__ = ("body",)
@@ -88,18 +76,12 @@ class Interactive(mod):
else:
def __init__(self, body: list[stmt]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, body: list[stmt] = ...) -> Self: ...
class Expression(mod):
if sys.version_info >= (3, 10):
__match_args__ = ("body",)
body: expr
def __init__(self, body: expr) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, body: expr = ...) -> Self: ...
class stmt(AST):
lineno: int
col_offset: int
@@ -107,9 +89,6 @@ class stmt(AST):
end_col_offset: int | None
def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: ...
class FunctionDef(stmt):
if sys.version_info >= (3, 12):
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params")
@@ -173,19 +152,6 @@ class FunctionDef(stmt):
**kwargs: Unpack[_Attributes],
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self,
*,
name: _Identifier = ...,
args: arguments = ...,
body: list[stmt] = ...,
decorator_list: list[expr] = ...,
returns: expr | None = ...,
type_comment: str | None = ...,
type_params: list[type_param] = ...,
) -> Self: ...
class AsyncFunctionDef(stmt):
if sys.version_info >= (3, 12):
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment", "type_params")
@@ -249,19 +215,6 @@ class AsyncFunctionDef(stmt):
**kwargs: Unpack[_Attributes],
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self,
*,
name: _Identifier = ...,
args: arguments = ...,
body: list[stmt],
decorator_list: list[expr],
returns: expr | None,
type_comment: str | None,
type_params: list[type_param],
) -> Self: ...
class ClassDef(stmt):
if sys.version_info >= (3, 12):
__match_args__ = ("name", "bases", "keywords", "body", "decorator_list", "type_params")
@@ -307,28 +260,12 @@ class ClassDef(stmt):
**kwargs: Unpack[_Attributes],
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self,
*,
name: _Identifier,
bases: list[expr],
keywords: list[keyword],
body: list[stmt],
decorator_list: list[expr],
type_params: list[type_param],
**kwargs: Unpack[_Attributes],
) -> Self: ...
class Return(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("value",)
value: expr | None
def __init__(self, value: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class Delete(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("targets",)
@@ -338,9 +275,6 @@ class Delete(stmt):
else:
def __init__(self, targets: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, targets: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class Assign(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("targets", "value", "type_comment")
@@ -361,11 +295,6 @@ class Assign(stmt):
self, targets: list[expr], value: expr, type_comment: str | None = None, **kwargs: Unpack[_Attributes]
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, targets: list[expr] = ..., value: expr = ..., type_comment: str | None = ..., **kwargs: Unpack[_Attributes]
) -> Self: ...
class AugAssign(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("target", "op", "value")
@@ -376,16 +305,6 @@ class AugAssign(stmt):
self, target: Name | Attribute | Subscript, op: operator, value: expr, **kwargs: Unpack[_Attributes]
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self,
*,
target: Name | Attribute | Subscript = ...,
op: operator = ...,
value: expr = ...,
**kwargs: Unpack[_Attributes],
) -> Self: ...
class AnnAssign(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("target", "annotation", "value", "simple")
@@ -413,17 +332,6 @@ class AnnAssign(stmt):
**kwargs: Unpack[_Attributes],
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self,
*,
target: Name | Attribute | Subscript = ...,
annotation: expr = ...,
value: expr | None = ...,
simple: int = ...,
**kwargs: Unpack[_Attributes],
) -> Self: ...
class For(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
@@ -453,18 +361,6 @@ class For(stmt):
**kwargs: Unpack[_Attributes],
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self,
*,
target: expr = ...,
iter: expr = ...,
body: list[stmt] = ...,
orelse: list[stmt] = ...,
type_comment: str | None = ...,
**kwargs: Unpack[_Attributes],
) -> Self: ...
class AsyncFor(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
@@ -494,18 +390,6 @@ class AsyncFor(stmt):
**kwargs: Unpack[_Attributes],
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self,
*,
target: expr = ...,
iter: expr = ...,
body: list[stmt] = ...,
orelse: list[stmt] = ...,
type_comment: str | None = ...,
**kwargs: Unpack[_Attributes],
) -> Self: ...
class While(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("test", "body", "orelse")
@@ -519,9 +403,6 @@ class While(stmt):
else:
def __init__(self, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> Self: ...
class If(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("test", "body", "orelse")
@@ -535,11 +416,6 @@ class If(stmt):
else:
def __init__(self, test: expr, body: list[stmt], orelse: list[stmt], **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, test: expr = ..., body: list[stmt] = ..., orelse: list[stmt] = ..., **kwargs: Unpack[_Attributes]
) -> Self: ...
class With(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("items", "body", "type_comment")
@@ -559,16 +435,6 @@ class With(stmt):
self, items: list[withitem], body: list[stmt], type_comment: str | None = None, **kwargs: Unpack[_Attributes]
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self,
*,
items: list[withitem] = ...,
body: list[stmt] = ...,
type_comment: str | None = ...,
**kwargs: Unpack[_Attributes],
) -> Self: ...
class AsyncWith(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("items", "body", "type_comment")
@@ -588,16 +454,6 @@ class AsyncWith(stmt):
self, items: list[withitem], body: list[stmt], type_comment: str | None = None, **kwargs: Unpack[_Attributes]
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self,
*,
items: list[withitem] = ...,
body: list[stmt] = ...,
type_comment: str | None = ...,
**kwargs: Unpack[_Attributes],
) -> Self: ...
class Raise(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("exc", "cause")
@@ -605,9 +461,6 @@ class Raise(stmt):
cause: expr | None
def __init__(self, exc: expr | None = None, cause: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, exc: expr | None = ..., cause: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class Try(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("body", "handlers", "orelse", "finalbody")
@@ -634,17 +487,6 @@ class Try(stmt):
**kwargs: Unpack[_Attributes],
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self,
*,
body: list[stmt] = ...,
handlers: list[ExceptHandler] = ...,
orelse: list[stmt] = ...,
finalbody: list[stmt] = ...,
**kwargs: Unpack[_Attributes],
) -> Self: ...
if sys.version_info >= (3, 11):
class TryStar(stmt):
__match_args__ = ("body", "handlers", "orelse", "finalbody")
@@ -671,17 +513,6 @@ if sys.version_info >= (3, 11):
**kwargs: Unpack[_Attributes],
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self,
*,
body: list[stmt] = ...,
handlers: list[ExceptHandler] = ...,
orelse: list[stmt] = ...,
finalbody: list[stmt] = ...,
**kwargs: Unpack[_Attributes],
) -> Self: ...
class Assert(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("test", "msg")
@@ -689,9 +520,6 @@ class Assert(stmt):
msg: expr | None
def __init__(self, test: expr, msg: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, test: expr, msg: expr | None, **kwargs: Unpack[_Attributes]) -> Self: ...
class Import(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("names",)
@@ -701,9 +529,6 @@ class Import(stmt):
else:
def __init__(self, names: list[alias], **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, names: list[alias] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class ImportFrom(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("module", "names", "level")
@@ -725,11 +550,6 @@ class ImportFrom(stmt):
self, module: str | None = None, *, names: list[alias], level: int, **kwargs: Unpack[_Attributes]
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, module: str | None = ..., names: list[alias] = ..., level: int = ..., **kwargs: Unpack[_Attributes]
) -> Self: ...
class Global(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("names",)
@@ -739,9 +559,6 @@ class Global(stmt):
else:
def __init__(self, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> Self: ...
class Nonlocal(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("names",)
@@ -751,18 +568,12 @@ class Nonlocal(stmt):
else:
def __init__(self, names: list[_Identifier], **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, names: list[_Identifier] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class Expr(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("value",)
value: expr
def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class Pass(stmt): ...
class Break(stmt): ...
class Continue(stmt): ...
@@ -774,9 +585,6 @@ class expr(AST):
end_col_offset: int | None
def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, **kwargs: Unpack[_Attributes]) -> Self: ...
class BoolOp(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("op", "values")
@@ -787,9 +595,6 @@ class BoolOp(expr):
else:
def __init__(self, op: boolop, values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, op: boolop = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class BinOp(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("left", "op", "right")
@@ -798,11 +603,6 @@ class BinOp(expr):
right: expr
def __init__(self, left: expr, op: operator, right: expr, **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, left: expr = ..., op: operator = ..., right: expr = ..., **kwargs: Unpack[_Attributes]
) -> Self: ...
class UnaryOp(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("op", "operand")
@@ -810,9 +610,6 @@ class UnaryOp(expr):
operand: expr
def __init__(self, op: unaryop, operand: expr, **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, op: unaryop = ..., operand: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class Lambda(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("args", "body")
@@ -820,9 +617,6 @@ class Lambda(expr):
body: expr
def __init__(self, args: arguments, body: expr, **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, args: arguments = ..., body: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class IfExp(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("test", "body", "orelse")
@@ -831,11 +625,6 @@ class IfExp(expr):
orelse: expr
def __init__(self, test: expr, body: expr, orelse: expr, **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, test: expr = ..., body: expr = ..., orelse: expr = ..., **kwargs: Unpack[_Attributes]
) -> Self: ...
class Dict(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("keys", "values")
@@ -846,11 +635,6 @@ class Dict(expr):
else:
def __init__(self, keys: list[expr | None], values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, keys: list[expr | None] = ..., values: list[expr] = ..., **kwargs: Unpack[_Attributes]
) -> Self: ...
class Set(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("elts",)
@@ -860,9 +644,6 @@ class Set(expr):
else:
def __init__(self, elts: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, elts: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class ListComp(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("elt", "generators")
@@ -873,11 +654,6 @@ class ListComp(expr):
else:
def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]
) -> Self: ...
class SetComp(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("elt", "generators")
@@ -888,11 +664,6 @@ class SetComp(expr):
else:
def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]
) -> Self: ...
class DictComp(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("key", "value", "generators")
@@ -906,11 +677,6 @@ class DictComp(expr):
else:
def __init__(self, key: expr, value: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, key: expr = ..., value: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]
) -> Self: ...
class GeneratorExp(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("elt", "generators")
@@ -921,38 +687,24 @@ class GeneratorExp(expr):
else:
def __init__(self, elt: expr, generators: list[comprehension], **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, elt: expr = ..., generators: list[comprehension] = ..., **kwargs: Unpack[_Attributes]
) -> Self: ...
class Await(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("value",)
value: expr
def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class Yield(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("value",)
value: expr | None
def __init__(self, value: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, value: expr | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class YieldFrom(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("value",)
value: expr
def __init__(self, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class Compare(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("left", "ops", "comparators")
@@ -966,11 +718,6 @@ class Compare(expr):
else:
def __init__(self, left: expr, ops: list[cmpop], comparators: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, left: expr = ..., ops: list[cmpop] = ..., comparators: list[expr] = ..., **kwargs: Unpack[_Attributes]
) -> Self: ...
class Call(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("func", "args", "keywords")
@@ -984,11 +731,6 @@ class Call(expr):
else:
def __init__(self, func: expr, args: list[expr], keywords: list[keyword], **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, func: expr = ..., args: list[expr] = ..., keywords: list[keyword] = ..., **kwargs: Unpack[_Attributes]
) -> Self: ...
class FormattedValue(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("value", "conversion", "format_spec")
@@ -997,11 +739,6 @@ class FormattedValue(expr):
format_spec: expr | None
def __init__(self, value: expr, conversion: int, format_spec: expr | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, value: expr = ..., conversion: int = ..., format_spec: expr | None = ..., **kwargs: Unpack[_Attributes]
) -> Self: ...
class JoinedStr(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("values",)
@@ -1011,24 +748,16 @@ class JoinedStr(expr):
else:
def __init__(self, values: list[expr], **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, values: list[expr] = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class Constant(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("value", "kind")
value: Any # None, str, bytes, bool, int, float, complex, Ellipsis
kind: str | None
if sys.version_info < (3, 14):
# Aliases for value, for backwards compatibility
s: Any
n: int | float | complex
# Aliases for value, for backwards compatibility
s: Any
n: int | float | complex
def __init__(self, value: Any, kind: str | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, value: Any = ..., kind: str | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class NamedExpr(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("target", "value")
@@ -1036,9 +765,6 @@ class NamedExpr(expr):
value: expr
def __init__(self, target: Name, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, target: Name = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class Attribute(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("value", "attr", "ctx")
@@ -1047,11 +773,6 @@ class Attribute(expr):
ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__`
def __init__(self, value: expr, attr: _Identifier, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, value: expr = ..., attr: _Identifier = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]
) -> Self: ...
if sys.version_info >= (3, 9):
_Slice: typing_extensions.TypeAlias = expr
_SliceAttributes: typing_extensions.TypeAlias = _Attributes
@@ -1071,16 +792,6 @@ class Slice(_Slice):
self, lower: expr | None = None, upper: expr | None = None, step: expr | None = None, **kwargs: Unpack[_SliceAttributes]
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self,
*,
lower: expr | None = ...,
upper: expr | None = ...,
step: expr | None = ...,
**kwargs: Unpack[_SliceAttributes],
) -> Self: ...
if sys.version_info < (3, 9):
class ExtSlice(slice):
dims: list[slice]
@@ -1098,11 +809,6 @@ class Subscript(expr):
ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__`
def __init__(self, value: expr, slice: _Slice, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, value: expr = ..., slice: _Slice = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]
) -> Self: ...
class Starred(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("value", "ctx")
@@ -1110,9 +816,6 @@ class Starred(expr):
ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__`
def __init__(self, value: expr, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, value: expr = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class Name(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("id", "ctx")
@@ -1120,9 +823,6 @@ class Name(expr):
ctx: expr_context # Not present in Python < 3.13 if not passed to `__init__`
def __init__(self, id: _Identifier, ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, id: _Identifier = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class List(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("elts", "ctx")
@@ -1133,9 +833,6 @@ class List(expr):
else:
def __init__(self, elts: list[expr], ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class Tuple(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("elts", "ctx")
@@ -1148,9 +845,6 @@ class Tuple(expr):
else:
def __init__(self, elts: list[expr], ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, elts: list[expr] = ..., ctx: expr_context = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class expr_context(AST): ...
if sys.version_info < (3, 9):
@@ -1214,9 +908,6 @@ class comprehension(AST):
else:
def __init__(self, target: expr, iter: expr, ifs: list[expr], is_async: int) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, target: expr = ..., iter: expr = ..., ifs: list[expr] = ..., is_async: int = ...) -> Self: ...
class excepthandler(AST):
lineno: int
col_offset: int
@@ -1224,11 +915,6 @@ class excepthandler(AST):
end_col_offset: int | None
def __init__(self, **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, lineno: int = ..., col_offset: int = ..., end_lineno: int | None = ..., end_col_offset: int | None = ...
) -> Self: ...
class ExceptHandler(excepthandler):
if sys.version_info >= (3, 10):
__match_args__ = ("type", "name", "body")
@@ -1249,16 +935,6 @@ class ExceptHandler(excepthandler):
self, type: expr | None = None, name: _Identifier | None = None, *, body: list[stmt], **kwargs: Unpack[_Attributes]
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self,
*,
type: expr | None = ...,
name: _Identifier | None = ...,
body: list[stmt] = ...,
**kwargs: Unpack[_Attributes],
) -> Self: ...
class arguments(AST):
if sys.version_info >= (3, 10):
__match_args__ = ("posonlyargs", "args", "vararg", "kwonlyargs", "kw_defaults", "kwarg", "defaults")
@@ -1317,19 +993,6 @@ class arguments(AST):
defaults: list[expr],
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self,
*,
posonlyargs: list[arg] = ...,
args: list[arg] = ...,
vararg: arg | None = ...,
kwonlyargs: list[arg] = ...,
kw_defaults: list[expr | None] = ...,
kwarg: arg | None = ...,
defaults: list[expr] = ...,
) -> Self: ...
class arg(AST):
lineno: int
col_offset: int
@@ -1344,16 +1007,6 @@ class arg(AST):
self, arg: _Identifier, annotation: expr | None = None, type_comment: str | None = None, **kwargs: Unpack[_Attributes]
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self,
*,
arg: _Identifier = ...,
annotation: expr | None = ...,
type_comment: str | None = ...,
**kwargs: Unpack[_Attributes],
) -> Self: ...
class keyword(AST):
lineno: int
col_offset: int
@@ -1368,9 +1021,6 @@ class keyword(AST):
@overload
def __init__(self, arg: _Identifier | None = None, *, value: expr, **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, arg: _Identifier | None = ..., value: expr = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class alias(AST):
lineno: int
col_offset: int
@@ -1382,9 +1032,6 @@ class alias(AST):
asname: _Identifier | None
def __init__(self, name: str, asname: _Identifier | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, name: str = ..., asname: _Identifier | None = ..., **kwargs: Unpack[_Attributes]) -> Self: ...
class withitem(AST):
if sys.version_info >= (3, 10):
__match_args__ = ("context_expr", "optional_vars")
@@ -1392,9 +1039,6 @@ class withitem(AST):
optional_vars: expr | None
def __init__(self, context_expr: expr, optional_vars: expr | None = None) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, context_expr: expr = ..., optional_vars: expr | None = ...) -> Self: ...
if sys.version_info >= (3, 10):
class Match(stmt):
__match_args__ = ("subject", "cases")
@@ -1405,11 +1049,6 @@ if sys.version_info >= (3, 10):
else:
def __init__(self, subject: expr, cases: list[match_case], **kwargs: Unpack[_Attributes]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, subject: expr = ..., cases: list[match_case] = ..., **kwargs: Unpack[_Attributes]
) -> Self: ...
class pattern(AST):
lineno: int
col_offset: int
@@ -1417,11 +1056,6 @@ if sys.version_info >= (3, 10):
end_col_offset: int
def __init__(self, **kwargs: Unpack[_Attributes[int]]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, lineno: int = ..., col_offset: int = ..., end_lineno: int = ..., end_col_offset: int = ...
) -> Self: ...
# Without the alias, Pyright complains variables named pattern are recursively defined
_Pattern: typing_extensions.TypeAlias = pattern
@@ -1438,25 +1072,16 @@ if sys.version_info >= (3, 10):
@overload
def __init__(self, pattern: _Pattern, guard: expr | None = None, *, body: list[stmt]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, pattern: _Pattern = ..., guard: expr | None = ..., body: list[stmt] = ...) -> Self: ...
class MatchValue(pattern):
__match_args__ = ("value",)
value: expr
def __init__(self, value: expr, **kwargs: Unpack[_Attributes[int]]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, value: expr = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
class MatchSingleton(pattern):
__match_args__ = ("value",)
value: Literal[True, False] | None
def __init__(self, value: Literal[True, False] | None, **kwargs: Unpack[_Attributes[int]]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, value: Literal[True, False] | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
class MatchSequence(pattern):
__match_args__ = ("patterns",)
patterns: list[pattern]
@@ -1465,17 +1090,11 @@ if sys.version_info >= (3, 10):
else:
def __init__(self, patterns: list[pattern], **kwargs: Unpack[_Attributes[int]]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
class MatchStar(pattern):
__match_args__ = ("name",)
name: _Identifier | None
def __init__(self, name: _Identifier | None, **kwargs: Unpack[_Attributes[int]]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, name: _Identifier | None = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
class MatchMapping(pattern):
__match_args__ = ("keys", "patterns", "rest")
keys: list[expr]
@@ -1498,16 +1117,6 @@ if sys.version_info >= (3, 10):
**kwargs: Unpack[_Attributes[int]],
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self,
*,
keys: list[expr] = ...,
patterns: list[pattern] = ...,
rest: _Identifier | None = ...,
**kwargs: Unpack[_Attributes[int]],
) -> Self: ...
class MatchClass(pattern):
__match_args__ = ("cls", "patterns", "kwd_attrs", "kwd_patterns")
cls: expr
@@ -1533,17 +1142,6 @@ if sys.version_info >= (3, 10):
**kwargs: Unpack[_Attributes[int]],
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self,
*,
cls: expr = ...,
patterns: list[pattern] = ...,
kwd_attrs: list[_Identifier] = ...,
kwd_patterns: list[pattern] = ...,
**kwargs: Unpack[_Attributes[int]],
) -> Self: ...
class MatchAs(pattern):
__match_args__ = ("pattern", "name")
pattern: _Pattern | None
@@ -1552,11 +1150,6 @@ if sys.version_info >= (3, 10):
self, pattern: _Pattern | None = None, name: _Identifier | None = None, **kwargs: Unpack[_Attributes[int]]
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, pattern: _Pattern | None = ..., name: _Identifier | None = ..., **kwargs: Unpack[_Attributes[int]]
) -> Self: ...
class MatchOr(pattern):
__match_args__ = ("patterns",)
patterns: list[pattern]
@@ -1565,9 +1158,6 @@ if sys.version_info >= (3, 10):
else:
def __init__(self, patterns: list[pattern], **kwargs: Unpack[_Attributes[int]]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, *, patterns: list[pattern] = ..., **kwargs: Unpack[_Attributes[int]]) -> Self: ...
if sys.version_info >= (3, 12):
class type_param(AST):
lineno: int
@@ -1576,9 +1166,6 @@ if sys.version_info >= (3, 12):
end_col_offset: int
def __init__(self, **kwargs: Unpack[_Attributes[int]]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(self, **kwargs: Unpack[_Attributes[int]]) -> Self: ...
class TypeVar(type_param):
if sys.version_info >= (3, 13):
__match_args__ = ("name", "bound", "default_value")
@@ -1598,16 +1185,6 @@ if sys.version_info >= (3, 12):
else:
def __init__(self, name: _Identifier, bound: expr | None = None, **kwargs: Unpack[_Attributes[int]]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self,
*,
name: _Identifier = ...,
bound: expr | None = ...,
default_value: expr | None = ...,
**kwargs: Unpack[_Attributes[int]],
) -> Self: ...
class ParamSpec(type_param):
if sys.version_info >= (3, 13):
__match_args__ = ("name", "default_value")
@@ -1622,11 +1199,6 @@ if sys.version_info >= (3, 12):
else:
def __init__(self, name: _Identifier, **kwargs: Unpack[_Attributes[int]]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, name: _Identifier = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]]
) -> Self: ...
class TypeVarTuple(type_param):
if sys.version_info >= (3, 13):
__match_args__ = ("name", "default_value")
@@ -1641,11 +1213,6 @@ if sys.version_info >= (3, 12):
else:
def __init__(self, name: _Identifier, **kwargs: Unpack[_Attributes[int]]) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self, *, name: _Identifier = ..., default_value: expr | None = ..., **kwargs: Unpack[_Attributes[int]]
) -> Self: ...
class TypeAlias(stmt):
__match_args__ = ("name", "type_params", "value")
name: Name
@@ -1664,13 +1231,3 @@ if sys.version_info >= (3, 12):
def __init__(
self, name: Name, type_params: list[type_param], value: expr, **kwargs: Unpack[_Attributes[int]]
) -> None: ...
if sys.version_info >= (3, 14):
def __replace__(
self,
*,
name: Name = ...,
type_params: list[type_param] = ...,
value: expr = ...,
**kwargs: Unpack[_Attributes[int]],
) -> Self: ...

View File

@@ -1,12 +1,13 @@
import sys
from abc import abstractmethod
from types import MappingProxyType
from typing import ( # noqa: Y022,Y038
from typing import ( # noqa: Y022,Y038,Y057
AbstractSet as Set,
AsyncGenerator as AsyncGenerator,
AsyncIterable as AsyncIterable,
AsyncIterator as AsyncIterator,
Awaitable as Awaitable,
ByteString as ByteString,
Callable as Callable,
Collection as Collection,
Container as Container,
@@ -58,12 +59,8 @@ __all__ = [
"ValuesView",
"Sequence",
"MutableSequence",
"ByteString",
]
if sys.version_info < (3, 14):
from typing import ByteString as ByteString # noqa: Y057
__all__ += ["ByteString"]
if sys.version_info >= (3, 12):
__all__ += ["Buffer"]

View File

@@ -51,8 +51,8 @@ class _CDataMeta(type):
# By default mypy complains about the following two methods, because strictly speaking cls
# might not be a Type[_CT]. However this can never actually happen, because the only class that
# uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here.
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc]
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc]
class _CData(metaclass=_CDataMeta):
_b_base_: int

View File

@@ -493,7 +493,7 @@ class _CursesWindow:
def instr(self, y: int, x: int, n: int = ...) -> bytes: ...
def is_linetouched(self, line: int, /) -> bool: ...
def is_wintouched(self) -> bool: ...
def keypad(self, yes: bool, /) -> None: ...
def keypad(self, yes: bool) -> None: ...
def leaveok(self, yes: bool) -> None: ...
def move(self, new_y: int, new_x: int) -> None: ...
def mvderwin(self, y: int, x: int) -> None: ...

View File

@@ -1,38 +1,17 @@
import sys
from _typeshed import StrPath
from typing import Final, Literal, TypedDict, type_check_only
from collections.abc import Mapping
@type_check_only
class _LocaleConv(TypedDict):
decimal_point: str
grouping: list[int]
thousands_sep: str
int_curr_symbol: str
currency_symbol: str
p_cs_precedes: Literal[0, 1, 127]
n_cs_precedes: Literal[0, 1, 127]
p_sep_by_space: Literal[0, 1, 127]
n_sep_by_space: Literal[0, 1, 127]
mon_decimal_point: str
frac_digits: int
int_frac_digits: int
mon_thousands_sep: str
mon_grouping: list[int]
positive_sign: str
negative_sign: str
p_sign_posn: Literal[0, 1, 2, 3, 4, 127]
n_sign_posn: Literal[0, 1, 2, 3, 4, 127]
LC_CTYPE: Final[int]
LC_COLLATE: Final[int]
LC_TIME: Final[int]
LC_MONETARY: Final[int]
LC_NUMERIC: Final[int]
LC_ALL: Final[int]
CHAR_MAX: Final = 127
LC_CTYPE: int
LC_COLLATE: int
LC_TIME: int
LC_MONETARY: int
LC_NUMERIC: int
LC_ALL: int
CHAR_MAX: int
def setlocale(category: int, locale: str | None = None, /) -> str: ...
def localeconv() -> _LocaleConv: ...
def localeconv() -> Mapping[str, int | str | list[int]]: ...
if sys.version_info >= (3, 11):
def getencoding() -> str: ...
@@ -46,67 +25,67 @@ def strxfrm(string: str, /) -> str: ...
if sys.platform != "win32":
LC_MESSAGES: int
ABDAY_1: Final[int]
ABDAY_2: Final[int]
ABDAY_3: Final[int]
ABDAY_4: Final[int]
ABDAY_5: Final[int]
ABDAY_6: Final[int]
ABDAY_7: Final[int]
ABDAY_1: int
ABDAY_2: int
ABDAY_3: int
ABDAY_4: int
ABDAY_5: int
ABDAY_6: int
ABDAY_7: int
ABMON_1: Final[int]
ABMON_2: Final[int]
ABMON_3: Final[int]
ABMON_4: Final[int]
ABMON_5: Final[int]
ABMON_6: Final[int]
ABMON_7: Final[int]
ABMON_8: Final[int]
ABMON_9: Final[int]
ABMON_10: Final[int]
ABMON_11: Final[int]
ABMON_12: Final[int]
ABMON_1: int
ABMON_2: int
ABMON_3: int
ABMON_4: int
ABMON_5: int
ABMON_6: int
ABMON_7: int
ABMON_8: int
ABMON_9: int
ABMON_10: int
ABMON_11: int
ABMON_12: int
DAY_1: Final[int]
DAY_2: Final[int]
DAY_3: Final[int]
DAY_4: Final[int]
DAY_5: Final[int]
DAY_6: Final[int]
DAY_7: Final[int]
DAY_1: int
DAY_2: int
DAY_3: int
DAY_4: int
DAY_5: int
DAY_6: int
DAY_7: int
ERA: Final[int]
ERA_D_T_FMT: Final[int]
ERA_D_FMT: Final[int]
ERA_T_FMT: Final[int]
ERA: int
ERA_D_T_FMT: int
ERA_D_FMT: int
ERA_T_FMT: int
MON_1: Final[int]
MON_2: Final[int]
MON_3: Final[int]
MON_4: Final[int]
MON_5: Final[int]
MON_6: Final[int]
MON_7: Final[int]
MON_8: Final[int]
MON_9: Final[int]
MON_10: Final[int]
MON_11: Final[int]
MON_12: Final[int]
MON_1: int
MON_2: int
MON_3: int
MON_4: int
MON_5: int
MON_6: int
MON_7: int
MON_8: int
MON_9: int
MON_10: int
MON_11: int
MON_12: int
CODESET: Final[int]
D_T_FMT: Final[int]
D_FMT: Final[int]
T_FMT: Final[int]
T_FMT_AMPM: Final[int]
AM_STR: Final[int]
PM_STR: Final[int]
CODESET: int
D_T_FMT: int
D_FMT: int
T_FMT: int
T_FMT_AMPM: int
AM_STR: int
PM_STR: int
RADIXCHAR: Final[int]
THOUSEP: Final[int]
YESEXPR: Final[int]
NOEXPR: Final[int]
CRNCYSTR: Final[int]
ALT_DIGITS: Final[int]
RADIXCHAR: int
THOUSEP: int
YESEXPR: int
NOEXPR: int
CRNCYSTR: int
ALT_DIGITS: int
def nl_langinfo(key: int, /) -> str: ...

View File

@@ -2,7 +2,7 @@ import sys
from _typeshed import SupportsGetItem
from collections.abc import Callable, Container, Iterable, MutableMapping, MutableSequence, Sequence
from typing import Any, AnyStr, Generic, Protocol, SupportsAbs, SupportsIndex, TypeVar, final, overload
from typing_extensions import ParamSpec, TypeAlias, TypeIs, TypeVarTuple, Unpack
from typing_extensions import ParamSpec, TypeAlias, TypeVarTuple, Unpack
_R = TypeVar("_R")
_T = TypeVar("_T")
@@ -145,7 +145,3 @@ if sys.version_info >= (3, 11):
def call(obj: Callable[_P, _R], /, *args: _P.args, **kwargs: _P.kwargs) -> _R: ...
def _compare_digest(a: AnyStr, b: AnyStr, /) -> bool: ...
if sys.version_info >= (3, 14):
def is_none(a: object, /) -> TypeIs[None]: ...
def is_not_none(a: _T | None, /) -> TypeIs[_T]: ...

View File

@@ -28,9 +28,9 @@ S_IFDIR: Final = 0o040000
# These are 0 on systems that don't support the specific kind of file.
# Example: Linux doesn't support door files, so S_IFDOOR is 0 on linux.
S_IFDOOR: Final[int]
S_IFPORT: Final[int]
S_IFWHT: Final[int]
S_IFDOOR: int
S_IFPORT: int
S_IFWHT: int
S_ISUID: Final = 0o4000
S_ISGID: Final = 0o2000
@@ -79,9 +79,9 @@ def S_ISWHT(mode: int, /) -> bool: ...
def filemode(mode: int, /) -> str: ...
if sys.platform == "win32":
IO_REPARSE_TAG_SYMLINK: Final = 0xA000000C
IO_REPARSE_TAG_MOUNT_POINT: Final = 0xA0000003
IO_REPARSE_TAG_APPEXECLINK: Final = 0x8000001B
IO_REPARSE_TAG_SYMLINK: int
IO_REPARSE_TAG_MOUNT_POINT: int
IO_REPARSE_TAG_APPEXECLINK: int
if sys.platform == "win32":
FILE_ATTRIBUTE_ARCHIVE: Final = 32

View File

@@ -1,4 +1,3 @@
import signal
import sys
from _typeshed import structseq
from collections.abc import Callable
@@ -17,39 +16,16 @@ class LockType:
def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: ...
def release(self) -> None: ...
def locked(self) -> bool: ...
def acquire_lock(self, blocking: bool = True, timeout: float = -1) -> bool: ...
def release_lock(self) -> None: ...
def locked_lock(self) -> bool: ...
def __enter__(self) -> bool: ...
def __exit__(
self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None
) -> None: ...
if sys.version_info >= (3, 13):
@final
class _ThreadHandle:
ident: int
def join(self, timeout: float | None = None, /) -> None: ...
def is_done(self) -> bool: ...
def _set_done(self) -> None: ...
def start_joinable_thread(
function: Callable[[], object], handle: _ThreadHandle | None = None, daemon: bool = True
) -> _ThreadHandle: ...
lock = LockType
@overload
def start_new_thread(function: Callable[[Unpack[_Ts]], object], args: tuple[Unpack[_Ts]], /) -> int: ...
@overload
def start_new_thread(function: Callable[..., object], args: tuple[Any, ...], kwargs: dict[str, Any], /) -> int: ...
if sys.version_info >= (3, 10):
def interrupt_main(signum: signal.Signals = ..., /) -> None: ...
else:
def interrupt_main() -> None: ...
def interrupt_main() -> None: ...
def exit() -> NoReturn: ...
def allocate_lock() -> LockType: ...
def get_ident() -> int: ...

View File

@@ -106,8 +106,8 @@ EXCEPTION: Final = 8
READABLE: Final = 2
WRITABLE: Final = 4
TCL_VERSION: Final[str]
TK_VERSION: Final[str]
TCL_VERSION: str
TK_VERSION: str
@final
class TkttType:

View File

@@ -99,20 +99,6 @@ if sys.platform == "win32":
SEC_RESERVE: Final = 0x4000000
SEC_WRITECOMBINE: Final = 0x40000000
if sys.version_info >= (3, 13):
STARTF_FORCEOFFFEEDBACK: Final = 0x80
STARTF_FORCEONFEEDBACK: Final = 0x40
STARTF_PREVENTPINNING: Final = 0x2000
STARTF_RUNFULLSCREEN: Final = 0x20
STARTF_TITLEISAPPID: Final = 0x1000
STARTF_TITLEISLINKNAME: Final = 0x800
STARTF_UNTRUSTEDSOURCE: Final = 0x8000
STARTF_USECOUNTCHARS: Final = 0x8
STARTF_USEFILLATTRIBUTE: Final = 0x10
STARTF_USEHOTKEY: Final = 0x200
STARTF_USEPOSITION: Final = 0x4
STARTF_USESIZE: Final = 0x2
STARTF_USESHOWWINDOW: Final = 0x1
STARTF_USESTDHANDLES: Final = 0x100
@@ -264,20 +250,6 @@ if sys.platform == "win32":
def cancel(self) -> None: ...
def getbuffer(self) -> bytes | None: ...
if sys.version_info >= (3, 13):
def BatchedWaitForMultipleObjects(
handle_seq: Sequence[int], wait_all: bool, milliseconds: int = 0xFFFFFFFF
) -> list[int]: ...
def CreateEventW(security_attributes: int, manual_reset: bool, initial_state: bool, name: str | None) -> int: ...
def CreateMutexW(security_attributes: int, initial_owner: bool, name: str) -> int: ...
def GetLongPathName(path: str) -> str: ...
def GetShortPathName(path: str) -> str: ...
def OpenEventW(desired_access: int, inherit_handle: bool, name: str) -> int: ...
def OpenMutexW(desired_access: int, inherit_handle: bool, name: str) -> int: ...
def ReleaseMutex(mutex: int) -> None: ...
def ResetEvent(event: int) -> None: ...
def SetEvent(event: int) -> None: ...
if sys.version_info >= (3, 12):
def CopyFile2(existing_file_name: str, new_file_name: str, flags: int, progress_routine: int | None = None) -> int: ...
def NeedCurrentDirectoryForExePath(exe_name: str, /) -> bool: ...

View File

@@ -357,17 +357,7 @@ class Action(_AttributeHolder):
if sys.version_info >= (3, 12):
class BooleanOptionalAction(Action):
if sys.version_info >= (3, 14):
def __init__(
self,
option_strings: Sequence[str],
dest: str,
default: bool | None = None,
required: bool = False,
help: str | None = None,
deprecated: bool = False,
) -> None: ...
elif sys.version_info >= (3, 13):
if sys.version_info >= (3, 13):
@overload
def __init__(
self,

View File

@@ -10,28 +10,27 @@ class _ABC(type):
if sys.version_info >= (3, 9):
def __init__(cls, *args: Unused) -> None: ...
if sys.version_info < (3, 14):
@deprecated("Replaced by ast.Constant; removed in Python 3.14")
class Num(Constant, metaclass=_ABC):
value: int | float | complex
@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
class Num(Constant, metaclass=_ABC):
value: int | float | complex
@deprecated("Replaced by ast.Constant; removed in Python 3.14")
class Str(Constant, metaclass=_ABC):
value: str
# Aliases for value, for backwards compatibility
s: str
@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
class Str(Constant, metaclass=_ABC):
value: str
# Aliases for value, for backwards compatibility
s: str
@deprecated("Replaced by ast.Constant; removed in Python 3.14")
class Bytes(Constant, metaclass=_ABC):
value: bytes
# Aliases for value, for backwards compatibility
s: bytes
@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
class Bytes(Constant, metaclass=_ABC):
value: bytes
# Aliases for value, for backwards compatibility
s: bytes
@deprecated("Replaced by ast.Constant; removed in Python 3.14")
class NameConstant(Constant, metaclass=_ABC): ...
@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
class NameConstant(Constant, metaclass=_ABC): ...
@deprecated("Replaced by ast.Constant; removed in Python 3.14")
class Ellipsis(Constant, metaclass=_ABC): ...
@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
class Ellipsis(Constant, metaclass=_ABC): ...
if sys.version_info >= (3, 9):
class slice(AST): ...

View File

@@ -151,13 +151,13 @@ if sys.version_info >= (3, 10):
@overload
def gather(*coros_or_futures: _FutureLike[_T], return_exceptions: Literal[False] = False) -> Future[list[_T]]: ... # type: ignore[overload-overlap]
@overload
def gather(coro_or_future1: _FutureLike[_T1], /, *, return_exceptions: bool) -> Future[tuple[_T1 | BaseException]]: ...
def gather(coro_or_future1: _FutureLike[_T1], /, *, return_exceptions: bool) -> Future[tuple[_T1 | BaseException]]: ... # type: ignore[overload-overlap]
@overload
def gather(
def gather( # type: ignore[overload-overlap]
coro_or_future1: _FutureLike[_T1], coro_or_future2: _FutureLike[_T2], /, *, return_exceptions: bool
) -> Future[tuple[_T1 | BaseException, _T2 | BaseException]]: ...
@overload
def gather(
def gather( # type: ignore[overload-overlap]
coro_or_future1: _FutureLike[_T1],
coro_or_future2: _FutureLike[_T2],
coro_or_future3: _FutureLike[_T3],
@@ -166,7 +166,7 @@ if sys.version_info >= (3, 10):
return_exceptions: bool,
) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException]]: ...
@overload
def gather(
def gather( # type: ignore[overload-overlap]
coro_or_future1: _FutureLike[_T1],
coro_or_future2: _FutureLike[_T2],
coro_or_future3: _FutureLike[_T3],
@@ -176,7 +176,7 @@ if sys.version_info >= (3, 10):
return_exceptions: bool,
) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException]]: ...
@overload
def gather(
def gather( # type: ignore[overload-overlap]
coro_or_future1: _FutureLike[_T1],
coro_or_future2: _FutureLike[_T2],
coro_or_future3: _FutureLike[_T3],
@@ -189,7 +189,7 @@ if sys.version_info >= (3, 10):
tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException]
]: ...
@overload
def gather(
def gather( # type: ignore[overload-overlap]
coro_or_future1: _FutureLike[_T1],
coro_or_future2: _FutureLike[_T2],
coro_or_future3: _FutureLike[_T3],

View File

@@ -159,7 +159,7 @@ if sys.platform != "win32":
class _UnixSelectorEventLoop(BaseSelectorEventLoop):
if sys.version_info >= (3, 13):
async def create_unix_server(
async def create_unix_server( # type: ignore[override]
self,
protocol_factory: _ProtocolFactory,
path: StrPath | None = None,

View File

@@ -1,4 +1,3 @@
# ruff: noqa: PYI036 # This is the module declaring BaseException
import _ast
import _typeshed
import sys
@@ -34,8 +33,7 @@ from collections.abc import Awaitable, Callable, Iterable, Iterator, MutableSet,
from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper
from types import CellType, CodeType, TracebackType
# mypy crashes if any of {ByteString, Sequence, MutableSequence, Mapping, MutableMapping}
# are imported from collections.abc in builtins.pyi
# mypy crashes if any of {ByteString, Sequence, MutableSequence, Mapping, MutableMapping} are imported from collections.abc in builtins.pyi
from typing import ( # noqa: Y022
IO,
Any,
@@ -969,9 +967,7 @@ class tuple(Sequence[_T_co]):
if sys.version_info >= (3, 9):
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
# Doesn't exist at runtime, but deleting this breaks mypy and pyright. See:
# https://github.com/python/typeshed/issues/7580
# https://github.com/python/mypy/issues/8240
# Doesn't exist at runtime, but deleting this breaks mypy. See #2999
@final
@type_check_only
class function:
@@ -1088,8 +1084,7 @@ class dict(MutableMapping[_KT, _VT]):
def keys(self) -> dict_keys[_KT, _VT]: ...
def values(self) -> dict_values[_KT, _VT]: ...
def items(self) -> dict_items[_KT, _VT]: ...
# Signature of `dict.fromkeys` should be kept identical to
# `fromkeys` methods of `OrderedDict`/`ChainMap`/`UserDict` in `collections`
# Signature of `dict.fromkeys` should be kept identical to `fromkeys` methods of `OrderedDict`/`ChainMap`/`UserDict` in `collections`
# TODO: the true signature of `dict.fromkeys` is not expressible in the current type system.
# See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963.
@classmethod
@@ -1749,7 +1744,7 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit
# without creating many false-positive errors (see #7578).
# Instead, we special-case the most common examples of this: bool and literal integers.
@overload
def sum(iterable: Iterable[bool | _LiteralInteger], /, start: int = 0) -> int: ...
def sum(iterable: Iterable[bool | _LiteralInteger], /, start: int = 0) -> int: ... # type: ignore[overload-overlap]
@overload
def sum(iterable: Iterable[_SupportsSumNoDefaultT], /) -> _SupportsSumNoDefaultT | Literal[0]: ...
@overload
@@ -1757,8 +1752,9 @@ def sum(iterable: Iterable[_AddableT1], /, start: _AddableT2) -> _AddableT1 | _A
# The argument to `vars()` has to have a `__dict__` attribute, so the second overload can't be annotated with `object`
# (A "SupportsDunderDict" protocol doesn't work)
# Use a type: ignore to make complaints about overlapping overloads go away
@overload
def vars(object: type, /) -> types.MappingProxyType[str, Any]: ...
def vars(object: type, /) -> types.MappingProxyType[str, Any]: ... # type: ignore[overload-overlap]
@overload
def vars(object: Any = ..., /) -> dict[str, Any]: ...

View File

@@ -1,11 +1,10 @@
from collections.abc import Callable
from typing import IO, Any, Final
from typing_extensions import LiteralString
__all__ = ["Cmd"]
PROMPT: Final = "(Cmd) "
IDENTCHARS: Final[LiteralString] # Too big to be `Literal`
IDENTCHARS: str # Too big to be `Literal`
class Cmd:
prompt: str

View File

@@ -80,7 +80,7 @@ class _Encoder(Protocol):
def __call__(self, input: str, errors: str = ..., /) -> tuple[bytes, int]: ... # signature of Codec().encode
class _Decoder(Protocol):
def __call__(self, input: ReadableBuffer, errors: str = ..., /) -> tuple[str, int]: ... # signature of Codec().decode
def __call__(self, input: bytes, errors: str = ..., /) -> tuple[str, int]: ... # signature of Codec().decode
class _StreamReader(Protocol):
def __call__(self, stream: _ReadableStream, errors: str = ..., /) -> StreamReader: ...

View File

@@ -345,15 +345,15 @@ class _OrderedDictValuesView(ValuesView[_VT_co], Reversible[_VT_co]):
# but they are not exposed anywhere)
# pyright doesn't have a specific error code for subclassing error!
@final
class _odict_keys(dict_keys[_KT_co, _VT_co], Reversible[_KT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
class _odict_keys(dict_keys[_KT_co, _VT_co], Reversible[_KT_co]): # type: ignore[misc] # pyright: ignore
def __reversed__(self) -> Iterator[_KT_co]: ...
@final
class _odict_items(dict_items[_KT_co, _VT_co], Reversible[tuple[_KT_co, _VT_co]]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
class _odict_items(dict_items[_KT_co, _VT_co], Reversible[tuple[_KT_co, _VT_co]]): # type: ignore[misc] # pyright: ignore
def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ...
@final
class _odict_values(dict_values[_KT_co, _VT_co], Reversible[_VT_co], Generic[_KT_co, _VT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
class _odict_values(dict_values[_KT_co, _VT_co], Reversible[_VT_co], Generic[_KT_co, _VT_co]): # type: ignore[misc] # pyright: ignore
def __reversed__(self) -> Iterator[_VT_co]: ...
class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]):
@@ -475,8 +475,7 @@ class ChainMap(MutableMapping[_KT, _VT]):
def pop(self, key: _KT, default: _T) -> _VT | _T: ...
def copy(self) -> Self: ...
__copy__ = copy
# All arguments to `fromkeys` are passed to `dict.fromkeys` at runtime,
# so the signature should be kept in line with `dict.fromkeys`.
# All arguments to `fromkeys` are passed to `dict.fromkeys` at runtime, so the signature should be kept in line with `dict.fromkeys`.
@classmethod
@overload
def fromkeys(cls, iterable: Iterable[_T]) -> ChainMap[_T, Any | None]: ...

View File

@@ -55,7 +55,6 @@ class AbstractAsyncContextManager(Protocol[_T_co, _ExitT_co]):
) -> _ExitT_co: ...
class ContextDecorator:
def _recreate_cm(self) -> Self: ...
def __call__(self, func: _F) -> _F: ...
class _GeneratorContextManager(AbstractContextManager[_T_co, bool | None], ContextDecorator):
@@ -81,7 +80,6 @@ if sys.version_info >= (3, 10):
_AF = TypeVar("_AF", bound=Callable[..., Awaitable[Any]])
class AsyncContextDecorator:
def _recreate_cm(self) -> Self: ...
def __call__(self, func: _AF) -> _AF: ...
class _AsyncGeneratorContextManager(AbstractAsyncContextManager[_T_co, bool | None], AsyncContextDecorator):

View File

@@ -1,15 +1,16 @@
import sys
from typing import Any, Protocol, TypeVar
from typing_extensions import Self
from typing_extensions import ParamSpec, Self
__all__ = ["Error", "copy", "deepcopy"]
_T = TypeVar("_T")
_SR = TypeVar("_SR", bound=_SupportsReplace)
_SR = TypeVar("_SR", bound=_SupportsReplace[Any])
_P = ParamSpec("_P")
class _SupportsReplace(Protocol):
class _SupportsReplace(Protocol[_P]):
# In reality doesn't support args, but there's no other great way to express this.
def __replace__(self, *args: Any, **kwargs: Any) -> Self: ...
def __replace__(self, *args: _P.args, **kwargs: _P.kwargs) -> Self: ...
# None in CPython but non-None in Jython
PyStringMap: Any

View File

@@ -1,13 +1,12 @@
import sys
from typing import Final
if sys.platform != "win32":
class _Method: ...
METHOD_CRYPT: Final[_Method]
METHOD_MD5: Final[_Method]
METHOD_SHA256: Final[_Method]
METHOD_SHA512: Final[_Method]
METHOD_BLOWFISH: Final[_Method]
METHOD_CRYPT: _Method
METHOD_MD5: _Method
METHOD_SHA256: _Method
METHOD_SHA512: _Method
METHOD_BLOWFISH: _Method
methods: list[_Method]
def mksalt(method: _Method | None = None, *, rounds: int | None = None) -> str: ...
def crypt(word: str, salt: str | _Method | None = None) -> str: ...

View File

@@ -185,8 +185,3 @@ if sys.version_info >= (3, 12):
c_time_t: type[c_int32 | c_int64] # alias for one or the other at runtime
class py_object(_CanCastTo, _SimpleCData[_T]): ...
if sys.version_info >= (3, 14):
class c_float_complex(_SimpleCData[complex]): ...
class c_double_complex(_SimpleCData[complex]): ...
class c_longdouble_complex(_SimpleCData[complex]): ...

View File

@@ -1,5 +1,12 @@
import sys
from ctypes import Structure, Union
from _ctypes import RTLD_GLOBAL as RTLD_GLOBAL, RTLD_LOCAL as RTLD_LOCAL, Structure, Union
from ctypes import DEFAULT_MODE as DEFAULT_MODE, cdll as cdll, pydll as pydll, pythonapi as pythonapi
if sys.version_info >= (3, 12):
from _ctypes import SIZEOF_TIME_T as SIZEOF_TIME_T
if sys.platform == "win32":
from ctypes import oledll as oledll, windll as windll
# At runtime, the native endianness is an alias for Structure,
# while the other is a subclass with a metaclass added in.

View File

@@ -5,7 +5,7 @@ from _typeshed import DataclassInstance
from builtins import type as Type # alias to avoid name clashes with fields named "type"
from collections.abc import Callable, Iterable, Mapping
from typing import Any, Generic, Literal, Protocol, TypeVar, overload
from typing_extensions import Never, TypeAlias, TypeIs
from typing_extensions import TypeAlias, TypeIs
if sys.version_info >= (3, 9):
from types import GenericAlias
@@ -213,10 +213,6 @@ else:
) -> Any: ...
def fields(class_or_instance: DataclassInstance | type[DataclassInstance]) -> tuple[Field[Any], ...]: ...
# HACK: `obj: Never` typing matches if object argument is using `Any` type.
@overload
def is_dataclass(obj: Never) -> TypeIs[DataclassInstance | type[DataclassInstance]]: ... # type: ignore[narrowed-type-not-subtype] # pyright: ignore[reportGeneralTypeIssues]
@overload
def is_dataclass(obj: type) -> TypeIs[type[DataclassInstance]]: ...
@overload
@@ -229,17 +225,18 @@ if sys.version_info >= (3, 9):
else:
class _InitVarMeta(type):
# Not used, instead `InitVar.__class_getitem__` is called.
# pyright (not unreasonably) thinks this is an invalid use of InitVar.
def __getitem__(self, params: Any) -> InitVar[Any]: ... # pyright: ignore[reportInvalidTypeForm]
# pyright ignore is needed because pyright (not unreasonably) thinks this
# is an invalid use of InitVar.
def __getitem__(self, params: Any) -> InitVar[Any]: ... # pyright: ignore
class InitVar(Generic[_T], metaclass=_InitVarMeta):
type: Type[_T]
def __init__(self, type: Type[_T]) -> None: ...
if sys.version_info >= (3, 9):
@overload
def __class_getitem__(cls, type: Type[_T]) -> InitVar[_T]: ... # pyright: ignore[reportInvalidTypeForm]
def __class_getitem__(cls, type: Type[_T]) -> InitVar[_T]: ... # pyright: ignore
@overload
def __class_getitem__(cls, type: Any) -> InitVar[Any]: ... # pyright: ignore[reportInvalidTypeForm]
def __class_getitem__(cls, type: Any) -> InitVar[Any]: ... # pyright: ignore
if sys.version_info >= (3, 12):
def make_dataclass(

View File

@@ -265,12 +265,12 @@ class datetime(date):
def fromtimestamp(cls, timestamp: float, /, tz: _TzInfo | None = ...) -> Self: ...
@classmethod
@deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .fromtimestamp(datetime.timezone.utc)")
@deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .fromtimestamp(datetime.UTC)")
def utcfromtimestamp(cls, t: float, /) -> Self: ...
@classmethod
def now(cls, tz: _TzInfo | None = None) -> Self: ...
@classmethod
@deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .now(datetime.timezone.utc)")
@deprecated("Use timezone-aware objects to represent datetimes in UTC; e.g. by calling .now(datetime.UTC)")
def utcnow(cls) -> Self: ...
@classmethod
def combine(cls, date: _Date, time: _Time, tzinfo: _TzInfo | None = ...) -> Self: ...

View File

@@ -1,26 +1,6 @@
from _typeshed import BytesPath, Incomplete, StrOrBytesPath, StrPath, Unused
from abc import abstractmethod
from collections.abc import Callable, Iterable
from distutils.command.bdist import bdist
from distutils.command.bdist_dumb import bdist_dumb
from distutils.command.bdist_rpm import bdist_rpm
from distutils.command.build import build
from distutils.command.build_clib import build_clib
from distutils.command.build_ext import build_ext
from distutils.command.build_py import build_py
from distutils.command.build_scripts import build_scripts
from distutils.command.check import check
from distutils.command.clean import clean
from distutils.command.config import config
from distutils.command.install import install
from distutils.command.install_data import install_data
from distutils.command.install_egg_info import install_egg_info
from distutils.command.install_headers import install_headers
from distutils.command.install_lib import install_lib
from distutils.command.install_scripts import install_scripts
from distutils.command.register import register
from distutils.command.sdist import sdist
from distutils.command.upload import upload
from distutils.dist import Distribution
from distutils.file_util import _BytesPathT, _StrPathT
from typing import Any, ClassVar, Literal, TypeVar, overload
@@ -30,7 +10,6 @@ _CommandT = TypeVar("_CommandT", bound=Command)
_Ts = TypeVarTuple("_Ts")
class Command:
dry_run: Literal[0, 1] # Exposed from __getattr_. Same as Distribution.dry_run
distribution: Distribution
# Any to work around variance issues
sub_commands: ClassVar[list[tuple[str, Callable[[Any], bool] | None]]]
@@ -49,108 +28,8 @@ class Command:
def ensure_dirname(self, option: str) -> None: ...
def get_command_name(self) -> str: ...
def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: ...
# NOTE: This list comes directly from the distutils/command folder. Minus bdist_msi and bdist_wininst.
@overload
def get_finalized_command(self, command: Literal["bdist"], create: bool | Literal[0, 1] = 1) -> bdist: ...
@overload
def get_finalized_command(self, command: Literal["bdist_dumb"], create: bool | Literal[0, 1] = 1) -> bdist_dumb: ...
@overload
def get_finalized_command(self, command: Literal["bdist_rpm"], create: bool | Literal[0, 1] = 1) -> bdist_rpm: ...
@overload
def get_finalized_command(self, command: Literal["build"], create: bool | Literal[0, 1] = 1) -> build: ...
@overload
def get_finalized_command(self, command: Literal["build_clib"], create: bool | Literal[0, 1] = 1) -> build_clib: ...
@overload
def get_finalized_command(self, command: Literal["build_ext"], create: bool | Literal[0, 1] = 1) -> build_ext: ...
@overload
def get_finalized_command(self, command: Literal["build_py"], create: bool | Literal[0, 1] = 1) -> build_py: ...
@overload
def get_finalized_command(self, command: Literal["build_scripts"], create: bool | Literal[0, 1] = 1) -> build_scripts: ...
@overload
def get_finalized_command(self, command: Literal["check"], create: bool | Literal[0, 1] = 1) -> check: ...
@overload
def get_finalized_command(self, command: Literal["clean"], create: bool | Literal[0, 1] = 1) -> clean: ...
@overload
def get_finalized_command(self, command: Literal["config"], create: bool | Literal[0, 1] = 1) -> config: ...
@overload
def get_finalized_command(self, command: Literal["install"], create: bool | Literal[0, 1] = 1) -> install: ...
@overload
def get_finalized_command(self, command: Literal["install_data"], create: bool | Literal[0, 1] = 1) -> install_data: ...
@overload
def get_finalized_command(
self, command: Literal["install_egg_info"], create: bool | Literal[0, 1] = 1
) -> install_egg_info: ...
@overload
def get_finalized_command(self, command: Literal["install_headers"], create: bool | Literal[0, 1] = 1) -> install_headers: ...
@overload
def get_finalized_command(self, command: Literal["install_lib"], create: bool | Literal[0, 1] = 1) -> install_lib: ...
@overload
def get_finalized_command(self, command: Literal["install_scripts"], create: bool | Literal[0, 1] = 1) -> install_scripts: ...
@overload
def get_finalized_command(self, command: Literal["register"], create: bool | Literal[0, 1] = 1) -> register: ...
@overload
def get_finalized_command(self, command: Literal["sdist"], create: bool | Literal[0, 1] = 1) -> sdist: ...
@overload
def get_finalized_command(self, command: Literal["upload"], create: bool | Literal[0, 1] = 1) -> upload: ...
@overload
def get_finalized_command(self, command: str, create: bool | Literal[0, 1] = 1) -> Command: ...
@overload
def reinitialize_command(self, command: Literal["bdist"], reinit_subcommands: bool | Literal[0, 1] = 0) -> bdist: ...
@overload
def reinitialize_command(
self, command: Literal["bdist_dumb"], reinit_subcommands: bool | Literal[0, 1] = 0
) -> bdist_dumb: ...
@overload
def reinitialize_command(self, command: Literal["bdist_rpm"], reinit_subcommands: bool | Literal[0, 1] = 0) -> bdist_rpm: ...
@overload
def reinitialize_command(self, command: Literal["build"], reinit_subcommands: bool | Literal[0, 1] = 0) -> build: ...
@overload
def reinitialize_command(
self, command: Literal["build_clib"], reinit_subcommands: bool | Literal[0, 1] = 0
) -> build_clib: ...
@overload
def reinitialize_command(self, command: Literal["build_ext"], reinit_subcommands: bool | Literal[0, 1] = 0) -> build_ext: ...
@overload
def reinitialize_command(self, command: Literal["build_py"], reinit_subcommands: bool | Literal[0, 1] = 0) -> build_py: ...
@overload
def reinitialize_command(
self, command: Literal["build_scripts"], reinit_subcommands: bool | Literal[0, 1] = 0
) -> build_scripts: ...
@overload
def reinitialize_command(self, command: Literal["check"], reinit_subcommands: bool | Literal[0, 1] = 0) -> check: ...
@overload
def reinitialize_command(self, command: Literal["clean"], reinit_subcommands: bool | Literal[0, 1] = 0) -> clean: ...
@overload
def reinitialize_command(self, command: Literal["config"], reinit_subcommands: bool | Literal[0, 1] = 0) -> config: ...
@overload
def reinitialize_command(self, command: Literal["install"], reinit_subcommands: bool | Literal[0, 1] = 0) -> install: ...
@overload
def reinitialize_command(
self, command: Literal["install_data"], reinit_subcommands: bool | Literal[0, 1] = 0
) -> install_data: ...
@overload
def reinitialize_command(
self, command: Literal["install_egg_info"], reinit_subcommands: bool | Literal[0, 1] = 0
) -> install_egg_info: ...
@overload
def reinitialize_command(
self, command: Literal["install_headers"], reinit_subcommands: bool | Literal[0, 1] = 0
) -> install_headers: ...
@overload
def reinitialize_command(
self, command: Literal["install_lib"], reinit_subcommands: bool | Literal[0, 1] = 0
) -> install_lib: ...
@overload
def reinitialize_command(
self, command: Literal["install_scripts"], reinit_subcommands: bool | Literal[0, 1] = 0
) -> install_scripts: ...
@overload
def reinitialize_command(self, command: Literal["register"], reinit_subcommands: bool | Literal[0, 1] = 0) -> register: ...
@overload
def reinitialize_command(self, command: Literal["sdist"], reinit_subcommands: bool | Literal[0, 1] = 0) -> sdist: ...
@overload
def reinitialize_command(self, command: Literal["upload"], reinit_subcommands: bool | Literal[0, 1] = 0) -> upload: ...
@overload
def reinitialize_command(self, command: str, reinit_subcommands: bool | Literal[0, 1] = 0) -> Command: ...
@overload
def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool | Literal[0, 1] = 0) -> _CommandT: ...

View File

@@ -1,48 +0,0 @@
import sys
from . import (
bdist,
bdist_dumb,
bdist_rpm,
build,
build_clib,
build_ext,
build_py,
build_scripts,
check,
clean,
install,
install_data,
install_headers,
install_lib,
install_scripts,
register,
sdist,
upload,
)
__all__ = [
"build",
"build_py",
"build_ext",
"build_clib",
"build_scripts",
"clean",
"install",
"install_lib",
"install_headers",
"install_scripts",
"install_data",
"sdist",
"register",
"bdist",
"bdist_dumb",
"bdist_rpm",
"check",
"upload",
]
if sys.version_info < (3, 10):
from . import bdist_wininst
__all__ += ["bdist_wininst"]

View File

@@ -1,4 +1,4 @@
from typing import Any, ClassVar, Final, Literal
from typing import Any, ClassVar, Literal
from typing_extensions import TypeAlias
from ..cmd import Command
@@ -22,7 +22,7 @@ class SilentReporter(_Reporter):
) -> None: ...
def system_message(self, level, message, *children, **kwargs): ...
HAS_DOCUTILS: Final[bool]
HAS_DOCUTILS: bool
class check(Command):
description: str

View File

@@ -1,12 +1,12 @@
from _typeshed import StrOrBytesPath
from collections.abc import Sequence
from re import Pattern
from typing import Any, ClassVar, Final, Literal
from typing import Any, ClassVar, Literal
from ..ccompiler import CCompiler
from ..cmd import Command
LANG_EXT: Final[dict[str, str]]
LANG_EXT: dict[str, str]
class config(Command):
description: str

View File

@@ -1,16 +1,11 @@
import sys
from collections.abc import Callable
from typing import Any, ClassVar, Final, Literal
from typing import Any, ClassVar
from ..cmd import Command
HAS_USER_SITE: Final[bool]
SCHEME_KEYS: Final[tuple[Literal["purelib"], Literal["platlib"], Literal["headers"], Literal["scripts"], Literal["data"]]]
INSTALL_SCHEMES: Final[dict[str, dict[str, str]]]
if sys.version_info < (3, 10):
WINDOWS_SCHEME: Final[dict[str, str]]
HAS_USER_SITE: bool
SCHEME_KEYS: tuple[str, ...]
INSTALL_SCHEMES: dict[str, dict[Any, Any]]
class install(Command):
description: str

View File

@@ -1,8 +1,8 @@
from typing import Any, ClassVar, Final
from typing import Any, ClassVar
from ..cmd import Command
PYTHON_SOURCE_EXTENSION: Final = ".py"
PYTHON_SOURCE_EXTENSION: str
class install_lib(Command):
description: str

View File

@@ -3,9 +3,9 @@ from collections.abc import Mapping
from distutils.cmd import Command as Command
from distutils.dist import Distribution as Distribution
from distutils.extension import Extension as Extension
from typing import Any, Final, Literal
from typing import Any, Literal
USAGE: Final[str]
USAGE: str
def gen_usage(script_name: StrOrBytesPath) -> str: ...

View File

@@ -1,20 +1,20 @@
from distutils.unixccompiler import UnixCCompiler
from distutils.version import LooseVersion
from re import Pattern
from typing import Final, Literal
from typing import Literal
def get_msvcr() -> list[str] | None: ...
class CygwinCCompiler(UnixCCompiler): ...
class Mingw32CCompiler(CygwinCCompiler): ...
CONFIG_H_OK: Final = "ok"
CONFIG_H_NOTOK: Final = "not ok"
CONFIG_H_UNCERTAIN: Final = "uncertain"
CONFIG_H_OK: str
CONFIG_H_NOTOK: str
CONFIG_H_UNCERTAIN: str
def check_config_h() -> tuple[Literal["ok", "not ok", "uncertain"], str]: ...
RE_VERSION: Final[Pattern[bytes]]
RE_VERSION: Pattern[bytes]
def get_versions() -> tuple[LooseVersion | None, ...]: ...
def is_cygwingcc() -> bool: ...

View File

@@ -1,3 +1 @@
from typing import Final
DEBUG: Final[str | None]
DEBUG: bool | None

View File

@@ -1,26 +1,6 @@
from _typeshed import Incomplete, StrOrBytesPath, StrPath, SupportsWrite
from collections.abc import Iterable, MutableMapping
from distutils.cmd import Command
from distutils.command.bdist import bdist
from distutils.command.bdist_dumb import bdist_dumb
from distutils.command.bdist_rpm import bdist_rpm
from distutils.command.build import build
from distutils.command.build_clib import build_clib
from distutils.command.build_ext import build_ext
from distutils.command.build_py import build_py
from distutils.command.build_scripts import build_scripts
from distutils.command.check import check
from distutils.command.clean import clean
from distutils.command.config import config
from distutils.command.install import install
from distutils.command.install_data import install_data
from distutils.command.install_egg_info import install_egg_info
from distutils.command.install_headers import install_headers
from distutils.command.install_lib import install_lib
from distutils.command.install_scripts import install_scripts
from distutils.command.register import register
from distutils.command.sdist import sdist
from distutils.command.upload import upload
from re import Pattern
from typing import IO, ClassVar, Literal, TypeVar, overload
from typing_extensions import TypeAlias
@@ -83,14 +63,18 @@ class Distribution:
def __init__(self, attrs: MutableMapping[str, Incomplete] | None = None) -> None: ...
def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: ...
def parse_config_files(self, filenames: Iterable[str] | None = None) -> None: ...
@overload
def get_command_obj(self, command: str, create: Literal[1, True] = 1) -> Command: ...
@overload
def get_command_obj(self, command: str, create: Literal[0, False]) -> Command | None: ...
global_options: ClassVar[_OptionsList]
common_usage: ClassVar[str]
display_options: ClassVar[_OptionsList]
display_option_names: ClassVar[list[str]]
negative_opt: ClassVar[dict[str, str]]
verbose: Literal[0, 1]
dry_run: Literal[0, 1]
help: Literal[0, 1]
verbose: int
dry_run: int
help: int
command_packages: list[str] | None
script_name: str | None
script_args: list[str] | None
@@ -124,137 +108,8 @@ class Distribution:
def print_commands(self) -> None: ...
def get_command_list(self): ...
def get_command_packages(self): ...
# NOTE: This list comes directly from the distutils/command folder. Minus bdist_msi and bdist_wininst.
@overload
def get_command_obj(self, command: Literal["bdist"], create: Literal[1, True] = 1) -> bdist: ...
@overload
def get_command_obj(self, command: Literal["bdist_dumb"], create: Literal[1, True] = 1) -> bdist_dumb: ...
@overload
def get_command_obj(self, command: Literal["bdist_rpm"], create: Literal[1, True] = 1) -> bdist_rpm: ...
@overload
def get_command_obj(self, command: Literal["build"], create: Literal[1, True] = 1) -> build: ...
@overload
def get_command_obj(self, command: Literal["build_clib"], create: Literal[1, True] = 1) -> build_clib: ...
@overload
def get_command_obj(self, command: Literal["build_ext"], create: Literal[1, True] = 1) -> build_ext: ...
@overload
def get_command_obj(self, command: Literal["build_py"], create: Literal[1, True] = 1) -> build_py: ...
@overload
def get_command_obj(self, command: Literal["build_scripts"], create: Literal[1, True] = 1) -> build_scripts: ...
@overload
def get_command_obj(self, command: Literal["check"], create: Literal[1, True] = 1) -> check: ...
@overload
def get_command_obj(self, command: Literal["clean"], create: Literal[1, True] = 1) -> clean: ...
@overload
def get_command_obj(self, command: Literal["config"], create: Literal[1, True] = 1) -> config: ...
@overload
def get_command_obj(self, command: Literal["install"], create: Literal[1, True] = 1) -> install: ...
@overload
def get_command_obj(self, command: Literal["install_data"], create: Literal[1, True] = 1) -> install_data: ...
@overload
def get_command_obj(self, command: Literal["install_egg_info"], create: Literal[1, True] = 1) -> install_egg_info: ...
@overload
def get_command_obj(self, command: Literal["install_headers"], create: Literal[1, True] = 1) -> install_headers: ...
@overload
def get_command_obj(self, command: Literal["install_lib"], create: Literal[1, True] = 1) -> install_lib: ...
@overload
def get_command_obj(self, command: Literal["install_scripts"], create: Literal[1, True] = 1) -> install_scripts: ...
@overload
def get_command_obj(self, command: Literal["register"], create: Literal[1, True] = 1) -> register: ...
@overload
def get_command_obj(self, command: Literal["sdist"], create: Literal[1, True] = 1) -> sdist: ...
@overload
def get_command_obj(self, command: Literal["upload"], create: Literal[1, True] = 1) -> upload: ...
@overload
def get_command_obj(self, command: str, create: Literal[1, True] = 1) -> Command: ...
# Not replicating the overloads for "Command | None", user may use "isinstance"
@overload
def get_command_obj(self, command: str, create: Literal[0, False]) -> Command | None: ...
@overload
def get_command_class(self, command: Literal["bdist"]) -> type[bdist]: ...
@overload
def get_command_class(self, command: Literal["bdist_dumb"]) -> type[bdist_dumb]: ...
@overload
def get_command_class(self, command: Literal["bdist_rpm"]) -> type[bdist_rpm]: ...
@overload
def get_command_class(self, command: Literal["build"]) -> type[build]: ...
@overload
def get_command_class(self, command: Literal["build_clib"]) -> type[build_clib]: ...
@overload
def get_command_class(self, command: Literal["build_ext"]) -> type[build_ext]: ...
@overload
def get_command_class(self, command: Literal["build_py"]) -> type[build_py]: ...
@overload
def get_command_class(self, command: Literal["build_scripts"]) -> type[build_scripts]: ...
@overload
def get_command_class(self, command: Literal["check"]) -> type[check]: ...
@overload
def get_command_class(self, command: Literal["clean"]) -> type[clean]: ...
@overload
def get_command_class(self, command: Literal["config"]) -> type[config]: ...
@overload
def get_command_class(self, command: Literal["install"]) -> type[install]: ...
@overload
def get_command_class(self, command: Literal["install_data"]) -> type[install_data]: ...
@overload
def get_command_class(self, command: Literal["install_egg_info"]) -> type[install_egg_info]: ...
@overload
def get_command_class(self, command: Literal["install_headers"]) -> type[install_headers]: ...
@overload
def get_command_class(self, command: Literal["install_lib"]) -> type[install_lib]: ...
@overload
def get_command_class(self, command: Literal["install_scripts"]) -> type[install_scripts]: ...
@overload
def get_command_class(self, command: Literal["register"]) -> type[register]: ...
@overload
def get_command_class(self, command: Literal["sdist"]) -> type[sdist]: ...
@overload
def get_command_class(self, command: Literal["upload"]) -> type[upload]: ...
@overload
def get_command_class(self, command: str) -> type[Command]: ...
@overload
def reinitialize_command(self, command: Literal["bdist"], reinit_subcommands: bool = False) -> bdist: ...
@overload
def reinitialize_command(self, command: Literal["bdist_dumb"], reinit_subcommands: bool = False) -> bdist_dumb: ...
@overload
def reinitialize_command(self, command: Literal["bdist_rpm"], reinit_subcommands: bool = False) -> bdist_rpm: ...
@overload
def reinitialize_command(self, command: Literal["build"], reinit_subcommands: bool = False) -> build: ...
@overload
def reinitialize_command(self, command: Literal["build_clib"], reinit_subcommands: bool = False) -> build_clib: ...
@overload
def reinitialize_command(self, command: Literal["build_ext"], reinit_subcommands: bool = False) -> build_ext: ...
@overload
def reinitialize_command(self, command: Literal["build_py"], reinit_subcommands: bool = False) -> build_py: ...
@overload
def reinitialize_command(self, command: Literal["build_scripts"], reinit_subcommands: bool = False) -> build_scripts: ...
@overload
def reinitialize_command(self, command: Literal["check"], reinit_subcommands: bool = False) -> check: ...
@overload
def reinitialize_command(self, command: Literal["clean"], reinit_subcommands: bool = False) -> clean: ...
@overload
def reinitialize_command(self, command: Literal["config"], reinit_subcommands: bool = False) -> config: ...
@overload
def reinitialize_command(self, command: Literal["install"], reinit_subcommands: bool = False) -> install: ...
@overload
def reinitialize_command(self, command: Literal["install_data"], reinit_subcommands: bool = False) -> install_data: ...
@overload
def reinitialize_command(
self, command: Literal["install_egg_info"], reinit_subcommands: bool = False
) -> install_egg_info: ...
@overload
def reinitialize_command(self, command: Literal["install_headers"], reinit_subcommands: bool = False) -> install_headers: ...
@overload
def reinitialize_command(self, command: Literal["install_lib"], reinit_subcommands: bool = False) -> install_lib: ...
@overload
def reinitialize_command(self, command: Literal["install_scripts"], reinit_subcommands: bool = False) -> install_scripts: ...
@overload
def reinitialize_command(self, command: Literal["register"], reinit_subcommands: bool = False) -> register: ...
@overload
def reinitialize_command(self, command: Literal["sdist"], reinit_subcommands: bool = False) -> sdist: ...
@overload
def reinitialize_command(self, command: Literal["upload"], reinit_subcommands: bool = False) -> upload: ...
@overload
def reinitialize_command(self, command: str, reinit_subcommands: bool = False) -> Command: ...
@overload
def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool = False) -> _CommandT: ...
@@ -270,7 +125,7 @@ class Distribution:
def has_data_files(self) -> bool: ...
def is_pure(self) -> bool: ...
# Default getter methods generated in __init__ from self.metadata._METHOD_BASENAMES
# Getter methods generated in __init__
def get_name(self) -> str: ...
def get_version(self) -> str: ...
def get_fullname(self) -> str: ...
@@ -292,26 +147,3 @@ class Distribution:
def get_requires(self) -> list[str]: ...
def get_provides(self) -> list[str]: ...
def get_obsoletes(self) -> list[str]: ...
# Default attributes generated in __init__ from self.display_option_names
help_commands: bool | Literal[0]
name: str | Literal[0]
version: str | Literal[0]
fullname: str | Literal[0]
author: str | Literal[0]
author_email: str | Literal[0]
maintainer: str | Literal[0]
maintainer_email: str | Literal[0]
contact: str | Literal[0]
contact_email: str | Literal[0]
url: str | Literal[0]
license: str | Literal[0]
licence: str | Literal[0]
description: str | Literal[0]
long_description: str | Literal[0]
platforms: str | list[str] | Literal[0]
classifiers: str | list[str] | Literal[0]
keywords: str | list[str] | Literal[0]
provides: list[str] | Literal[0]
requires: list[str] | Literal[0]
obsoletes: list[str] | Literal[0]

View File

@@ -1,15 +1,15 @@
from collections.abc import Iterable, Mapping
from re import Pattern
from typing import Any, Final, overload
from typing import Any, overload
from typing_extensions import TypeAlias
_Option: TypeAlias = tuple[str, str | None, str]
_GR: TypeAlias = tuple[list[str], OptionDummy]
longopt_pat: Final = r"[a-zA-Z](?:[a-zA-Z0-9-]*)"
longopt_re: Final[Pattern[str]]
neg_alias_re: Final[Pattern[str]]
longopt_xlate: Final[dict[int, int]]
longopt_pat: str
longopt_re: Pattern[str]
neg_alias_re: Pattern[str]
longopt_xlate: dict[int, int]
class FancyGetopt:
def __init__(self, option_table: list[_Option] | None = None) -> None: ...
@@ -25,7 +25,7 @@ def fancy_getopt(
options: list[_Option], negative_opt: Mapping[_Option, _Option], object: Any, args: list[str] | None
) -> list[str] | _GR: ...
WS_TRANS: Final[dict[int, str]]
WS_TRANS: dict[int, str]
def wrap_text(text: str, width: int) -> list[str]: ...
def translate_longopt(opt: str) -> str: ...

View File

@@ -1,10 +1,10 @@
from typing import Any, Final
from typing import Any
DEBUG: Final = 1
INFO: Final = 2
WARN: Final = 3
ERROR: Final = 4
FATAL: Final = 5
DEBUG: int
INFO: int
WARN: int
ERROR: int
FATAL: int
class Log:
def __init__(self, threshold: int = 3) -> None: ...

View File

@@ -1,15 +1,15 @@
import sys
from collections.abc import Mapping
from distutils.ccompiler import CCompiler
from typing import Final, Literal, overload
from typing import Literal, overload
from typing_extensions import deprecated
PREFIX: Final[str]
EXEC_PREFIX: Final[str]
BASE_PREFIX: Final[str]
BASE_EXEC_PREFIX: Final[str]
project_base: Final[str]
python_build: Final[bool]
PREFIX: str
EXEC_PREFIX: str
BASE_PREFIX: str
BASE_EXEC_PREFIX: str
project_base: str
python_build: bool
def expand_makefile_vars(s: str, vars: Mapping[str, str]) -> str: ...
@overload

View File

@@ -1,10 +1,9 @@
import sys
import types
import unittest
from _typeshed import ExcInfo
from collections.abc import Callable
from typing import Any, ClassVar, NamedTuple
from typing_extensions import Self, TypeAlias
from typing import Any, NamedTuple
from typing_extensions import TypeAlias
__all__ = [
"register_optionflag",
@@ -42,22 +41,9 @@ __all__ = [
"debug",
]
# MyPy errors on conditionals within named tuples.
if sys.version_info >= (3, 13):
class TestResults(NamedTuple):
def __new__(cls, failed: int, attempted: int, *, skipped: int = 0) -> Self: ... # type: ignore[misc]
skipped: int
failed: int
attempted: int
_fields: ClassVar = ("failed", "attempted") # type: ignore[misc]
__match_args__ = ("failed", "attempted") # type: ignore[misc]
__doc__: None # type: ignore[misc]
else:
class TestResults(NamedTuple):
failed: int
attempted: int
class TestResults(NamedTuple):
failed: int
attempted: int
OPTIONFLAGS_BY_NAME: dict[str, int]
@@ -148,8 +134,6 @@ class DocTestRunner:
original_optionflags: int
tries: int
failures: int
if sys.version_info >= (3, 13):
skips: int
test: DocTest
def __init__(self, checker: OutputChecker | None = None, verbose: bool | None = None, optionflags: int = 0) -> None: ...
def report_start(self, out: _Out, test: DocTest, example: Example) -> None: ...

View File

@@ -16,10 +16,6 @@ TOKEN_ENDS: Final[set[str]]
ASPECIALS: Final[set[str]]
ATTRIBUTE_ENDS: Final[set[str]]
EXTENDED_ATTRIBUTE_ENDS: Final[set[str]]
# Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5
NLSET: Final[set[str]]
# Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5
SPECIALSNL: Final[set[str]]
def quote_string(value: Any) -> str: ...

View File

@@ -3,34 +3,12 @@ from collections.abc import Callable
from email.errors import MessageDefect
from email.header import Header
from email.message import Message
from typing import Any
from typing_extensions import Self
class _PolicyBase:
def __init__(
self,
*,
max_line_length: int | None = 78,
linesep: str = "\n",
cte_type: str = "8bit",
raise_on_defect: bool = False,
mangle_from_: bool = ..., # default depends on sub-class
message_factory: Callable[[Policy], Message] | None = None,
# Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5
verify_generated_headers: bool = True,
) -> None: ...
def clone(
self,
*,
max_line_length: int | None = ...,
linesep: str = ...,
cte_type: str = ...,
raise_on_defect: bool = ...,
mangle_from_: bool = ...,
message_factory: Callable[[Policy], Message] | None = ...,
# Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5
verify_generated_headers: bool = ...,
) -> Self: ...
def __add__(self, other: Policy) -> Self: ...
def __add__(self, other: Any) -> Self: ...
def clone(self, **kw: Any) -> Self: ...
class Policy(_PolicyBase, metaclass=ABCMeta):
max_line_length: int | None
@@ -39,9 +17,16 @@ class Policy(_PolicyBase, metaclass=ABCMeta):
raise_on_defect: bool
mangle_from_: bool
message_factory: Callable[[Policy], Message] | None
# Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5
verify_generated_headers: bool
def __init__(
self,
*,
max_line_length: int | None = 78,
linesep: str = "\n",
cte_type: str = "8bit",
raise_on_defect: bool = False,
mangle_from_: bool = False,
message_factory: Callable[[Policy], Message] | None = None,
) -> None: ...
def handle_defect(self, obj: Message, defect: MessageDefect) -> None: ...
def register_defect(self, obj: Message, defect: MessageDefect) -> None: ...
def header_max_count(self, name: str) -> int | None: ...

View File

@@ -7,9 +7,6 @@ class BoundaryError(MessageParseError): ...
class MultipartConversionError(MessageError, TypeError): ...
class CharsetError(MessageError): ...
# Added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5
class HeaderWriteError(MessageError): ...
class MessageDefect(ValueError):
def __init__(self, line: str | None = None) -> None: ...

View File

@@ -50,8 +50,7 @@ class Message(Generic[_HeaderT, _HeaderParamT]):
def get_payload(self, i: None = None, *, decode: Literal[True]) -> _EncodedPayloadType | Any: ...
@overload # not multipart, IDEM but w/o kwarg
def get_payload(self, i: None, decode: Literal[True]) -> _EncodedPayloadType | Any: ...
# If `charset=None` and payload supports both `encode` AND `decode`,
# then an invalid payload could be passed, but this is unlikely
# If `charset=None` and payload supports both `encode` AND `decode`, then an invalid payload could be passed, but this is unlikely
# Not[_SupportsEncodeToPayload]
@overload
def set_payload(
@@ -147,11 +146,7 @@ class Message(Generic[_HeaderT, _HeaderParamT]):
class MIMEPart(Message[_HeaderRegistryT, _HeaderRegistryParamT]):
def __init__(self, policy: Policy | None = None) -> None: ...
def get_body(self, preferencelist: Sequence[str] = ("related", "html", "plain")) -> MIMEPart[_HeaderRegistryT] | None: ...
def attach(self, payload: Self) -> None: ... # type: ignore[override]
# The attachments are created via type(self) in the attach method. It's theoretically
# possible to sneak other attachment types into a MIMEPart instance, but could cause
# cause unforseen consequences.
def iter_attachments(self) -> Iterator[Self]: ...
def iter_attachments(self) -> Iterator[MIMEPart[_HeaderRegistryT]]: ...
def iter_parts(self) -> Iterator[MIMEPart[_HeaderRegistryT]]: ...
def get_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> Any: ...
def set_content(self, *args: Any, content_manager: ContentManager | None = None, **kw: Any) -> None: ...

View File

@@ -30,12 +30,20 @@ _PDTZ: TypeAlias = tuple[int, int, int, int, int, int, int, int, int, int | None
def quote(str: str) -> str: ...
def unquote(str: str) -> str: ...
# `strict` parameter added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5
def parseaddr(addr: str | list[str], *, strict: bool = True) -> tuple[str, str]: ...
if sys.version_info >= (3, 13):
def parseaddr(addr: str | list[str], *, strict: bool = True) -> tuple[str, str]: ...
else:
def parseaddr(addr: str) -> tuple[str, str]: ...
def formataddr(pair: tuple[str | None, str], charset: str | Charset = "utf-8") -> str: ...
# `strict` parameter added in Python 3.8.20, 3.9.20, 3.10.15, 3.11.10, 3.12.5
def getaddresses(fieldvalues: Iterable[str], *, strict: bool = True) -> list[tuple[str, str]]: ...
if sys.version_info >= (3, 13):
def getaddresses(fieldvalues: Iterable[str], *, strict: bool = True) -> list[tuple[str, str]]: ...
else:
def getaddresses(fieldvalues: Iterable[str]) -> list[tuple[str, str]]: ...
@overload
def parsedate(data: None) -> None: ...
@overload
@@ -58,10 +66,7 @@ def mktime_tz(data: _PDTZ) -> int: ...
def formatdate(timeval: float | None = None, localtime: bool = False, usegmt: bool = False) -> str: ...
def format_datetime(dt: datetime.datetime, usegmt: bool = False) -> str: ...
if sys.version_info >= (3, 14):
def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: ...
elif sys.version_info >= (3, 12):
if sys.version_info >= (3, 12):
@overload
def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: ...
@overload

View File

@@ -17,24 +17,13 @@ def cmpfiles(
) -> tuple[list[AnyStr], list[AnyStr], list[AnyStr]]: ...
class dircmp(Generic[AnyStr]):
if sys.version_info >= (3, 13):
def __init__(
self,
a: GenericPath[AnyStr],
b: GenericPath[AnyStr],
ignore: Sequence[AnyStr] | None = None,
hide: Sequence[AnyStr] | None = None,
*,
shallow: bool = True,
) -> None: ...
else:
def __init__(
self,
a: GenericPath[AnyStr],
b: GenericPath[AnyStr],
ignore: Sequence[AnyStr] | None = None,
hide: Sequence[AnyStr] | None = None,
) -> None: ...
def __init__(
self,
a: GenericPath[AnyStr],
b: GenericPath[AnyStr],
ignore: Sequence[AnyStr] | None = None,
hide: Sequence[AnyStr] | None = None,
) -> None: ...
left: AnyStr
right: AnyStr
hide: Sequence[AnyStr]

View File

@@ -86,7 +86,7 @@ class FTP:
def makeport(self) -> socket: ...
def makepasv(self) -> tuple[str, int]: ...
def login(self, user: str = "", passwd: str = "", acct: str = "") -> str: ...
# In practice, `rest` can actually be anything whose str() is an integer sequence, so to make it simple we allow integers
# In practice, `rest` rest can actually be anything whose str() is an integer sequence, so to make it simple we allow integers.
def ntransfercmd(self, cmd: str, rest: int | str | None = None) -> tuple[socket, int | None]: ...
def transfercmd(self, cmd: str, rest: int | str | None = None) -> socket: ...
def retrbinary(

View File

@@ -155,7 +155,7 @@ if sys.version_info >= (3, 10) and sys.version_info < (3, 12):
@property
def names(self) -> set[str]: ...
@overload
def select(self) -> Self: ...
def select(self) -> Self: ... # type: ignore[misc]
@overload
def select(
self,
@@ -277,7 +277,7 @@ if sys.version_info >= (3, 12):
elif sys.version_info >= (3, 10):
@overload
def entry_points() -> SelectableGroups: ...
def entry_points() -> SelectableGroups: ... # type: ignore[overload-overlap]
@overload
def entry_points(
*, name: str = ..., value: str = ..., group: str = ..., module: str = ..., attr: str = ..., extras: list[str] = ...

View File

@@ -84,6 +84,7 @@ class RawIOBase(IOBase):
def read(self, size: int = -1, /) -> bytes | None: ...
class BufferedIOBase(IOBase):
raw: RawIOBase # This is not part of the BufferedIOBase API and may not exist on some implementations.
def detach(self) -> RawIOBase: ...
def readinto(self, buffer: WriteableBuffer, /) -> int: ...
def write(self, buffer: ReadableBuffer, /) -> int: ...
@@ -118,13 +119,11 @@ class BytesIO(BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible d
def read1(self, size: int | None = -1, /) -> bytes: ...
class BufferedReader(BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of methods in the base classes
raw: RawIOBase
def __enter__(self) -> Self: ...
def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ...
def peek(self, size: int = 0, /) -> bytes: ...
class BufferedWriter(BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of writelines in the base classes
raw: RawIOBase
def __enter__(self) -> Self: ...
def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ...
def write(self, buffer: ReadableBuffer, /) -> int: ...

View File

@@ -1,12 +1,12 @@
from collections.abc import Callable, Iterator
from re import Pattern
from typing import Any, Final
from typing import Any
ESCAPE: Final[Pattern[str]]
ESCAPE_ASCII: Final[Pattern[str]]
HAS_UTF8: Final[Pattern[bytes]]
ESCAPE_DCT: Final[dict[str, str]]
INFINITY: Final[float]
ESCAPE: Pattern[str]
ESCAPE_ASCII: Pattern[str]
HAS_UTF8: Pattern[bytes]
ESCAPE_DCT: dict[str, str]
INFINITY: float
def py_encode_basestring(s: str) -> str: ... # undocumented
def py_encode_basestring_ascii(s: str) -> str: ... # undocumented

View File

@@ -6,7 +6,7 @@ from ..pytree import Node
class FixUnicode(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
PATTERN: ClassVar[Literal["STRING | 'unicode' | 'unichr'"]] # type: ignore[name-defined] # Name "STRING" is not defined
unicode_literals: bool
def start_tree(self, tree: Node, filename: StrPath) -> None: ...
def transform(self, node, results): ...

View File

@@ -55,9 +55,10 @@ __all__ = [
"setLogRecordFactory",
"lastResort",
"raiseExceptions",
"warn",
]
if sys.version_info < (3, 13):
__all__ += ["warn"]
if sys.version_info >= (3, 11):
__all__ += ["getLevelNamesMapping"]
if sys.version_info >= (3, 12):
@@ -156,16 +157,17 @@ class Logger(Filterer):
stacklevel: int = 1,
extra: Mapping[str, object] | None = None,
) -> None: ...
@deprecated("Deprecated; use warning() instead.")
def warn(
self,
msg: object,
*args: object,
exc_info: _ExcInfoType = None,
stack_info: bool = False,
stacklevel: int = 1,
extra: Mapping[str, object] | None = None,
) -> None: ...
if sys.version_info < (3, 13):
def warn(
self,
msg: object,
*args: object,
exc_info: _ExcInfoType = None,
stack_info: bool = False,
stacklevel: int = 1,
extra: Mapping[str, object] | None = None,
) -> None: ...
def error(
self,
msg: object,
@@ -410,17 +412,18 @@ class LoggerAdapter(Generic[_L]):
extra: Mapping[str, object] | None = None,
**kwargs: object,
) -> None: ...
@deprecated("Deprecated; use warning() instead.")
def warn(
self,
msg: object,
*args: object,
exc_info: _ExcInfoType = None,
stack_info: bool = False,
stacklevel: int = 1,
extra: Mapping[str, object] | None = None,
**kwargs: object,
) -> None: ...
if sys.version_info < (3, 13):
def warn(
self,
msg: object,
*args: object,
exc_info: _ExcInfoType = None,
stack_info: bool = False,
stacklevel: int = 1,
extra: Mapping[str, object] | None = None,
**kwargs: object,
) -> None: ...
def error(
self,
msg: object,
@@ -520,15 +523,17 @@ def warning(
stacklevel: int = 1,
extra: Mapping[str, object] | None = None,
) -> None: ...
@deprecated("Deprecated; use warning() instead.")
def warn(
msg: object,
*args: object,
exc_info: _ExcInfoType = None,
stack_info: bool = False,
stacklevel: int = 1,
extra: Mapping[str, object] | None = None,
) -> None: ...
if sys.version_info < (3, 13):
def warn(
msg: object,
*args: object,
exc_info: _ExcInfoType = None,
stack_info: bool = False,
stacklevel: int = 1,
extra: Mapping[str, object] | None = None,
) -> None: ...
def error(
msg: object,
*args: object,

View File

@@ -73,7 +73,7 @@ def copy(obj: _CT) -> _CT: ...
@overload
def synchronized(obj: _SimpleCData[_T], lock: _LockLike | None = None, ctx: Any | None = None) -> Synchronized[_T]: ...
@overload
def synchronized(obj: ctypes.Array[c_char], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedString: ...
def synchronized(obj: ctypes.Array[c_char], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedString: ... # type: ignore
@overload
def synchronized(
obj: ctypes.Array[_SimpleCData[_T]], lock: _LockLike | None = None, ctx: Any | None = None
@@ -115,12 +115,12 @@ class SynchronizedArray(SynchronizedBase[ctypes.Array[_SimpleCData[_T]]], Generi
class SynchronizedString(SynchronizedArray[bytes]):
@overload # type: ignore[override]
def __getitem__(self, i: slice) -> bytes: ...
@overload
@overload # type: ignore[override]
def __getitem__(self, i: int) -> bytes: ...
@overload # type: ignore[override]
def __setitem__(self, i: slice, value: bytes) -> None: ...
@overload
def __setitem__(self, i: int, value: bytes) -> None: ...
@overload # type: ignore[override]
def __setitem__(self, i: int, value: bytes) -> None: ... # type: ignore[override]
def __getslice__(self, start: int, stop: int) -> bytes: ... # type: ignore[override]
def __setslice__(self, start: int, stop: int, values: bytes) -> None: ... # type: ignore[override]

Some files were not shown because too many files have changed in this diff Show More