Compare commits
61 Commits
charlie/wa
...
split-comp
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ee7294e556 | ||
|
|
52d27befe8 | ||
|
|
6ed06afd28 | ||
|
|
b9da31610a | ||
|
|
ac7b1770e2 | ||
|
|
e4c2859c0f | ||
|
|
6dcd743111 | ||
|
|
73160dc8b6 | ||
|
|
15aa5a6d57 | ||
|
|
33512a4249 | ||
|
|
d8ebb03591 | ||
|
|
2e211c5c22 | ||
|
|
9fd8aaaf29 | ||
|
|
d110bd4e60 | ||
|
|
eb9c7ae869 | ||
|
|
7defc0d136 | ||
|
|
45f459bafd | ||
|
|
99e946a005 | ||
|
|
78a7ac0722 | ||
|
|
fa2f3f9f2f | ||
|
|
3898d737d8 | ||
|
|
c487149b7d | ||
|
|
bebed67bf1 | ||
|
|
3ddcad64f5 | ||
|
|
05c35b6975 | ||
|
|
7fc39ad624 | ||
|
|
2520ebb145 | ||
|
|
89c8b49027 | ||
|
|
e05953a991 | ||
|
|
d0ac38f9d3 | ||
|
|
ff53db3d99 | ||
|
|
899a52390b | ||
|
|
82a3e69b8a | ||
|
|
7027344dfc | ||
|
|
fb9f0c448f | ||
|
|
75131c6f4a | ||
|
|
4b9ddc4a06 | ||
|
|
99dc208b00 | ||
|
|
540023262e | ||
|
|
2ea79572ae | ||
|
|
aa0db338d9 | ||
|
|
a99a45868c | ||
|
|
fabf19fdc9 | ||
|
|
59f712a566 | ||
|
|
1d080465de | ||
|
|
3481e16cdf | ||
|
|
d7e9280e1e | ||
|
|
f237d36d2f | ||
|
|
12f22b1fdd | ||
|
|
47d05ee9ea | ||
|
|
9caec36b59 | ||
|
|
cb364780b3 | ||
|
|
71b8bf211f | ||
|
|
109b9cc4f9 | ||
|
|
5d02627794 | ||
|
|
65444bb00e | ||
|
|
8822a79b4d | ||
|
|
2df4d23113 | ||
|
|
603b62607a | ||
|
|
2b71fc4510 | ||
|
|
1b78d872ec |
16
.github/renovate.json5
vendored
16
.github/renovate.json5
vendored
@@ -14,12 +14,26 @@
|
||||
rangeStrategy: "update-lockfile",
|
||||
},
|
||||
pep621: {
|
||||
// The default for this package manager is to only search for `pyproject.toml` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/pep621/#file-matching
|
||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||
},
|
||||
pip_requirements: {
|
||||
fileMatch: ["^docs/requirements.*\\.txt$"],
|
||||
// The default for this package manager is to run on all requirements.txt files:
|
||||
// https://docs.renovatebot.com/modules/manager/pip_requirements/#file-matching
|
||||
// `fileMatch` doesn't work for excluding files; to exclude `requirements.txt` files
|
||||
// outside the `doc/` directory, we instead have to use `ignorePaths`. Unlike `fileMatch`,
|
||||
// which takes a regex string, `ignorePaths` takes a glob string, so we have to use
|
||||
// a "negative glob pattern".
|
||||
// See:
|
||||
// - https://docs.renovatebot.com/modules/manager/#ignoring-files-that-match-the-default-filematch
|
||||
// - https://docs.renovatebot.com/configuration-options/#ignorepaths
|
||||
// - https://docs.renovatebot.com/string-pattern-matching/#negative-matching
|
||||
ignorePaths: ["!docs/requirements*.txt"]
|
||||
},
|
||||
npm: {
|
||||
// The default for this package manager is to only search for `package.json` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/npm/#file-matching
|
||||
fileMatch: ["^playground/.*package\\.json$"],
|
||||
},
|
||||
"pre-commit": {
|
||||
|
||||
7
.github/workflows/ci.yaml
vendored
7
.github/workflows/ci.yaml
vendored
@@ -142,6 +142,13 @@ jobs:
|
||||
|
||||
# Check for broken links in the documentation.
|
||||
- run: cargo doc --all --no-deps
|
||||
env:
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
# Use --document-private-items so that all our doc comments are kept in
|
||||
# sync, not just public items. Eventually we should do this for all
|
||||
# crates; for now add crates here as they are warning-clean to prevent
|
||||
# regression.
|
||||
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p ruff_db --document-private-items
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
|
||||
@@ -14,6 +14,9 @@ MD041: false
|
||||
# MD013/line-length
|
||||
MD013: false
|
||||
|
||||
# MD014/commands-show-output
|
||||
MD014: false
|
||||
|
||||
# MD024/no-duplicate-heading
|
||||
MD024:
|
||||
# Allow when nested under different parents e.g. CHANGELOG.md
|
||||
|
||||
@@ -57,7 +57,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.5.6
|
||||
rev: v0.5.7
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
|
||||
@@ -1,5 +1,43 @@
|
||||
# Breaking Changes
|
||||
|
||||
## 0.6.0
|
||||
|
||||
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
|
||||
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
|
||||
|
||||
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
|
||||
|
||||
You can disable specific rules for notebooks using [`per-file-ignores`](https://docs.astral.sh/ruff/settings/#lint_per-file-ignores):
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"*.ipynb" = ["E501"] # disable line-too-long in notebooks
|
||||
```
|
||||
|
||||
If you'd prefer to either only lint or only format Jupyter Notebook files, you can use the
|
||||
section-specific `exclude` option to do so. For example, the following would only lint Jupyter
|
||||
Notebook files and not format them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.format]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
And, conversely, the following would only format Jupyter Notebook files and not lint them:
|
||||
|
||||
```toml
|
||||
[tool.ruff.lint]
|
||||
exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
You can completely disable Jupyter Notebook support by updating the [`extend-exclude`](https://docs.astral.sh/ruff/settings/#extend-exclude) setting:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
extend-exclude = ["*.ipynb"]
|
||||
```
|
||||
|
||||
## 0.5.0
|
||||
|
||||
- Follow the XDG specification to discover user-level configurations on macOS (same as on other Unix platforms)
|
||||
|
||||
91
CHANGELOG.md
91
CHANGELOG.md
@@ -1,5 +1,96 @@
|
||||
# Changelog
|
||||
|
||||
## 0.6.0
|
||||
|
||||
Check out the [blog post](https://astral.sh/blog/ruff-v0.6.0) for a migration guide and overview of the changes!
|
||||
|
||||
### Breaking changes
|
||||
|
||||
See also, the "Remapped rules" section which may result in disabled rules.
|
||||
|
||||
- Lint and format Jupyter Notebook by default ([#12878](https://github.com/astral-sh/ruff/pull/12878)).
|
||||
- Detect imports in `src` layouts by default for `isort` rules ([#12848](https://github.com/astral-sh/ruff/pull/12848))
|
||||
- The pytest rules `PT001` and `PT023` now default to omitting the decorator parentheses when there are no arguments ([#12838](https://github.com/astral-sh/ruff/pull/12838)).
|
||||
|
||||
### Deprecations
|
||||
|
||||
The following rules are now deprecated:
|
||||
|
||||
- [`pytest-missing-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-missing-fixture-name-underscore/) (`PT004`)
|
||||
- [`pytest-incorrect-fixture-name-underscore`](https://docs.astral.sh/ruff/rules/pytest-incorrect-fixture-name-underscore/) (`PT005`)
|
||||
- [`unpacked-list-comprehension`](https://docs.astral.sh/ruff/rules/unpacked-list-comprehension/) (`UP027`)
|
||||
|
||||
### Remapped rules
|
||||
|
||||
The following rules have been remapped to new rule codes:
|
||||
|
||||
- [`unnecessary-dict-comprehension-for-iterable`](https://docs.astral.sh/ruff/rules/unnecessary-dict-comprehension-for-iterable/): `RUF025` to `C420`
|
||||
|
||||
### Stabilization
|
||||
|
||||
The following rules have been stabilized and are no longer in preview:
|
||||
|
||||
- [`singledispatch-method`](https://docs.astral.sh/ruff/rules/singledispatch-method/) (`PLE1519`)
|
||||
- [`singledispatchmethod-function`](https://docs.astral.sh/ruff/rules/singledispatchmethod-function/) (`PLE1520`)
|
||||
- [`bad-staticmethod-argument`](https://docs.astral.sh/ruff/rules/bad-staticmethod-argument/) (`PLW0211`)
|
||||
- [`if-stmt-min-max`](https://docs.astral.sh/ruff/rules/if-stmt-min-max/) (`PLR1730`)
|
||||
- [`invalid-bytes-return-type`](https://docs.astral.sh/ruff/rules/invalid-bytes-return-type/) (`PLE0308`)
|
||||
- [`invalid-hash-return-type`](https://docs.astral.sh/ruff/rules/invalid-hash-return-type/) (`PLE0309`)
|
||||
- [`invalid-index-return-type`](https://docs.astral.sh/ruff/rules/invalid-index-return-type/) (`PLE0305`)
|
||||
- [`invalid-length-return-type`](https://docs.astral.sh/ruff/rules/invalid-length-return-type/) (`E303`)
|
||||
- [`self-or-cls-assignment`](https://docs.astral.sh/ruff/rules/self-or-cls-assignment/) (`PLW0642`)
|
||||
- [`byte-string-usage`](https://docs.astral.sh/ruff/rules/byte-string-usage/) (`PYI057`)
|
||||
- [`duplicate-literal-member`](https://docs.astral.sh/ruff/rules/duplicate-literal-member/) (`PYI062`)
|
||||
- [`redirected-noqa`](https://docs.astral.sh/ruff/rules/redirected-noqa/) (`RUF101`)
|
||||
|
||||
The following behaviors have been stabilized:
|
||||
|
||||
- [`cancel-scope-no-checkpoint`](https://docs.astral.sh/ruff/rules/cancel-scope-no-checkpoint/) (`ASYNC100`): Support `asyncio` and `anyio` context mangers.
|
||||
- [`async-function-with-timeout`](https://docs.astral.sh/ruff/rules/async-function-with-timeout/) (`ASYNC109`): Support `asyncio` and `anyio` context mangers.
|
||||
- [`async-busy-wait`](https://docs.astral.sh/ruff/rules/async-busy-wait/) (`ASYNC110`): Support `asyncio` and `anyio` context mangers.
|
||||
- [`async-zero-sleep`](https://docs.astral.sh/ruff/rules/async-zero-sleep/) (`ASYNC115`): Support `anyio` context mangers.
|
||||
- [`long-sleep-not-forever`](https://docs.astral.sh/ruff/rules/long-sleep-not-forever/) (`ASYNC116`): Support `anyio` context mangers.
|
||||
|
||||
The following fixes have been stabilized:
|
||||
|
||||
- [`superfluous-else-return`](https://docs.astral.sh/ruff/rules/superfluous-else-return/) (`RET505`)
|
||||
- [`superfluous-else-raise`](https://docs.astral.sh/ruff/rules/superfluous-else-raise/) (`RET506`)
|
||||
- [`superfluous-else-continue`](https://docs.astral.sh/ruff/rules/superfluous-else-continue/) (`RET507`)
|
||||
- [`superfluous-else-break`](https://docs.astral.sh/ruff/rules/superfluous-else-break/) (`RET508`)
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-simplify`\] Further simplify to binary in preview for (`SIM108`) ([#12796](https://github.com/astral-sh/ruff/pull/12796))
|
||||
- \[`pyupgrade`\] Show violations without auto-fix (`UP031`) ([#11229](https://github.com/astral-sh/ruff/pull/11229))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-import-conventions`\] Add `xml.etree.ElementTree` to default conventions ([#12455](https://github.com/astral-sh/ruff/pull/12455))
|
||||
- \[`flake8-pytest-style`\] Add a space after comma in CSV output (`PT006`) ([#12853](https://github.com/astral-sh/ruff/pull/12853))
|
||||
|
||||
### Server
|
||||
|
||||
- Show a message for incorrect settings ([#12781](https://github.com/astral-sh/ruff/pull/12781))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- \[`flake8-async`\] Do not lint yield in context manager (`ASYNC100`) ([#12896](https://github.com/astral-sh/ruff/pull/12896))
|
||||
- \[`flake8-comprehensions`\] Do not lint `async for` comprehensions (`C419`) ([#12895](https://github.com/astral-sh/ruff/pull/12895))
|
||||
- \[`flake8-return`\] Only add return `None` at end of a function (`RET503`) ([#11074](https://github.com/astral-sh/ruff/pull/11074))
|
||||
- \[`flake8-type-checking`\] Avoid treating `dataclasses.KW_ONLY` as typing-only (`TCH003`) ([#12863](https://github.com/astral-sh/ruff/pull/12863))
|
||||
- \[`pep8-naming`\] Treat `type(Protocol)` et al as metaclass base (`N805`) ([#12770](https://github.com/astral-sh/ruff/pull/12770))
|
||||
- \[`pydoclint`\] Don't enforce returns and yields in abstract methods (`DOC201`, `DOC202`) ([#12771](https://github.com/astral-sh/ruff/pull/12771))
|
||||
- \[`ruff`\] Skip tuples with slice expressions in (`RUF031`) ([#12768](https://github.com/astral-sh/ruff/pull/12768))
|
||||
- \[`ruff`\] Ignore unparenthesized tuples in subscripts when the subscript is a type annotation or type alias (`RUF031`) ([#12762](https://github.com/astral-sh/ruff/pull/12762))
|
||||
- \[`ruff`\] Ignore template strings passed to logging and `builtins._()` calls (`RUF027`) ([#12889](https://github.com/astral-sh/ruff/pull/12889))
|
||||
- \[`ruff`\] Do not remove parens for tuples with starred expressions in Python \<=3.10 (`RUF031`) ([#12784](https://github.com/astral-sh/ruff/pull/12784))
|
||||
- Evaluate default parameter values for a function in that function's enclosing scope ([#12852](https://github.com/astral-sh/ruff/pull/12852))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Respect VS Code cell metadata when detecting the language of Jupyter Notebook cells ([#12864](https://github.com/astral-sh/ruff/pull/12864))
|
||||
- Respect `kernelspec` notebook metadata when detecting the preferred language for a Jupyter Notebook ([#12875](https://github.com/astral-sh/ruff/pull/12875))
|
||||
|
||||
## 0.5.7
|
||||
|
||||
### Preview features
|
||||
|
||||
@@ -911,9 +911,5 @@ There are three ways in which an import can be categorized as "first-party":
|
||||
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
|
||||
file `foo.py`.
|
||||
|
||||
By default, `src` is set to the project root. In the above example, we'd want to set
|
||||
`src = ["./src"]` to ensure that we locate `./my_project/src/foo` and thus categorize `import foo`
|
||||
as first-party in `baz.py`. In practice, for this limited example, setting `src = ["./src"]` is
|
||||
unnecessary, as all imports within `./my_project/src/foo` would be categorized as first-party via
|
||||
the same-package heuristic; but if your project contains multiple packages, you'll want to set `src`
|
||||
explicitly.
|
||||
By default, `src` is set to the project root, along with `"src"` subdirectory in the project root.
|
||||
This ensures that Ruff supports both flat and "src" layouts out of the box.
|
||||
|
||||
140
Cargo.lock
generated
140
Cargo.lock
generated
@@ -95,9 +95,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anstyle"
|
||||
version = "1.0.6"
|
||||
version = "1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc"
|
||||
checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1"
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-parse"
|
||||
@@ -288,7 +288,7 @@ dependencies = [
|
||||
"android-tzdata",
|
||||
"iana-time-zone",
|
||||
"num-traits",
|
||||
"windows-targets 0.52.5",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -320,9 +320,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.13"
|
||||
version = "4.5.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fbb260a053428790f3de475e304ff84cdbc4face759ea7a3e64c1edd938a7fc"
|
||||
checksum = "11d8838454fda655dafd3accb2b6e2bea645b9e4078abe84a22ceb947235c5cc"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -330,9 +330,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.13"
|
||||
version = "4.5.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "64b17d7ea74e9f833c7dbf2cbe4fb12ff26783eda4782a8975b72f895c9b4d99"
|
||||
checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -820,14 +820,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "filetime"
|
||||
version = "0.2.23"
|
||||
version = "0.2.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd"
|
||||
checksum = "bf401df4a4e3872c4fe8151134cf483738e74b67fc934d6532c882b3d24a4550"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"windows-sys 0.52.0",
|
||||
"libredox",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1143,9 +1143,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "is-macro"
|
||||
version = "0.3.5"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "59a85abdc13717906baccb5a1e435556ce0df215f242892f721dff62bf25288f"
|
||||
checksum = "2069faacbe981460232f880d26bf3c7634e322d49053aa48c27e3ae642f728f1"
|
||||
dependencies = [
|
||||
"Inflector",
|
||||
"proc-macro2",
|
||||
@@ -1297,6 +1297,7 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"libc",
|
||||
"redox_syscall 0.5.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1564,7 +1565,7 @@ checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"redox_syscall 0.4.1",
|
||||
"smallvec",
|
||||
"windows-targets 0.48.5",
|
||||
]
|
||||
@@ -1899,6 +1900,7 @@ dependencies = [
|
||||
"ruff_python_ast",
|
||||
"ruff_python_parser",
|
||||
"ruff_python_stdlib",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
@@ -1961,6 +1963,7 @@ dependencies = [
|
||||
"ruff_cache",
|
||||
"ruff_db",
|
||||
"ruff_python_ast",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"thiserror",
|
||||
@@ -1976,6 +1979,15 @@ dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_users"
|
||||
version = "0.4.5"
|
||||
@@ -2048,7 +2060,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.5.7"
|
||||
version = "0.6.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2156,6 +2168,7 @@ dependencies = [
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"tracing-tree",
|
||||
@@ -2239,7 +2252,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.5.7"
|
||||
version = "0.6.0"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"annotate-snippets 0.9.2",
|
||||
@@ -2559,7 +2572,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.5.7"
|
||||
version = "0.6.0"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -2786,9 +2799,9 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.204"
|
||||
version = "1.0.206"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12"
|
||||
checksum = "5b3e4cd94123dd520a128bcd11e34d9e9e423e7e3e50425cb1b4b1e3549d0284"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
@@ -2806,9 +2819,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.204"
|
||||
version = "1.0.206"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222"
|
||||
checksum = "fabfb6138d2383ea8208cf98ccf69cdfb1aff4088460681d84189aa259762f97"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2828,9 +2841,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.122"
|
||||
version = "1.0.124"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "784b6203951c57ff748476b126ccb5e8e2959a5c19e5c617ab1956be3dbc68da"
|
||||
checksum = "66ad62847a56b3dba58cc891acd13884b9c61138d330c0d7b6181713d4fce38d"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
@@ -2860,9 +2873,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_test"
|
||||
version = "1.0.176"
|
||||
version = "1.0.177"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a2f49ace1498612d14f7e0b8245519584db8299541dfe31a06374a828d620ab"
|
||||
checksum = "7f901ee573cab6b3060453d2d5f0bae4e6d628c23c0a962ff9b5f1d7c8d4f1ed"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
@@ -2989,9 +3002,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.72"
|
||||
version = "2.0.74"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af"
|
||||
checksum = "1fceb41e3d546d0bd83421d3409b1460cc7444cd389341a4c880fe7a042cb3d7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3011,15 +3024,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.11.0"
|
||||
version = "3.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8fcd239983515c23a32fb82099f97d0b11b8c72f654ed659363a95c3dad7a53"
|
||||
checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"fastrand",
|
||||
"once_cell",
|
||||
"rustix",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3395,9 +3408,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
|
||||
|
||||
[[package]]
|
||||
name = "ureq"
|
||||
version = "2.10.0"
|
||||
version = "2.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72139d247e5f97a3eff96229a7ae85ead5328a39efe76f8bf5a06313d505b6ea"
|
||||
checksum = "b74fc6b57825be3373f7054754755f03ac3a8f5d70015ccad699ba2029956f4a"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"flate2",
|
||||
@@ -3689,7 +3702,7 @@ version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.5",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3707,7 +3720,16 @@ version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.5",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.59.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3727,18 +3749,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb"
|
||||
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm 0.52.5",
|
||||
"windows_aarch64_msvc 0.52.5",
|
||||
"windows_i686_gnu 0.52.5",
|
||||
"windows_aarch64_gnullvm 0.52.6",
|
||||
"windows_aarch64_msvc 0.52.6",
|
||||
"windows_i686_gnu 0.52.6",
|
||||
"windows_i686_gnullvm",
|
||||
"windows_i686_msvc 0.52.5",
|
||||
"windows_x86_64_gnu 0.52.5",
|
||||
"windows_x86_64_gnullvm 0.52.5",
|
||||
"windows_x86_64_msvc 0.52.5",
|
||||
"windows_i686_msvc 0.52.6",
|
||||
"windows_x86_64_gnu 0.52.6",
|
||||
"windows_x86_64_gnullvm 0.52.6",
|
||||
"windows_x86_64_msvc 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3749,9 +3771,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263"
|
||||
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
@@ -3761,9 +3783,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6"
|
||||
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
@@ -3773,15 +3795,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670"
|
||||
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnullvm"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9"
|
||||
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
@@ -3791,9 +3813,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf"
|
||||
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
@@ -3803,9 +3825,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9"
|
||||
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
@@ -3815,9 +3837,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596"
|
||||
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
@@ -3827,9 +3849,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0"
|
||||
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||
|
||||
[[package]]
|
||||
name = "winnow"
|
||||
|
||||
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.5.7/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.5.7/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.6.0/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.6.0/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.7
|
||||
rev: v0.6.0
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -184,7 +184,7 @@ fn run() -> anyhow::Result<ExitStatus> {
|
||||
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
// cache and load the cache if it exists.
|
||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system);
|
||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system)?;
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ use std::io::Write;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
use salsa::Setter;
|
||||
|
||||
use red_knot_python_semantic::{
|
||||
resolve_module, ModuleName, Program, ProgramSettings, PythonVersion, SearchPathSettings,
|
||||
@@ -26,6 +25,7 @@ struct TestCase {
|
||||
/// We need to hold on to it in the test case or the temp files get deleted.
|
||||
_temp_dir: tempfile::TempDir,
|
||||
root_dir: SystemPathBuf,
|
||||
search_path_settings: SearchPathSettings,
|
||||
}
|
||||
|
||||
impl TestCase {
|
||||
@@ -108,18 +108,20 @@ impl TestCase {
|
||||
fn update_search_path_settings(
|
||||
&mut self,
|
||||
f: impl FnOnce(&SearchPathSettings) -> SearchPathSettings,
|
||||
) {
|
||||
) -> anyhow::Result<()> {
|
||||
let program = Program::get(self.db());
|
||||
let search_path_settings = program.search_paths(self.db());
|
||||
|
||||
let new_settings = f(search_path_settings);
|
||||
let new_settings = f(&self.search_path_settings);
|
||||
|
||||
program.set_search_paths(&mut self.db).to(new_settings);
|
||||
program.update_search_paths(&mut self.db, new_settings.clone())?;
|
||||
self.search_path_settings = new_settings;
|
||||
|
||||
if let Some(watcher) = &mut self.watcher {
|
||||
watcher.update(&self.db);
|
||||
assert!(!watcher.has_errored_paths());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn collect_package_files(&self, path: &SystemPath) -> Vec<File> {
|
||||
@@ -221,13 +223,13 @@ where
|
||||
let system = OsSystem::new(&workspace_path);
|
||||
|
||||
let workspace = WorkspaceMetadata::from_path(&workspace_path, &system)?;
|
||||
let search_paths = create_search_paths(&root_path, workspace.root());
|
||||
let search_path_settings = create_search_paths(&root_path, workspace.root());
|
||||
|
||||
for path in search_paths
|
||||
for path in search_path_settings
|
||||
.extra_paths
|
||||
.iter()
|
||||
.chain(search_paths.site_packages.iter())
|
||||
.chain(search_paths.custom_typeshed.iter())
|
||||
.chain(search_path_settings.site_packages.iter())
|
||||
.chain(search_path_settings.custom_typeshed.iter())
|
||||
{
|
||||
std::fs::create_dir_all(path.as_std_path())
|
||||
.with_context(|| format!("Failed to create search path '{path}'"))?;
|
||||
@@ -235,10 +237,10 @@ where
|
||||
|
||||
let settings = ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths,
|
||||
search_paths: search_path_settings.clone(),
|
||||
};
|
||||
|
||||
let db = RootDatabase::new(workspace, settings, system);
|
||||
let db = RootDatabase::new(workspace, settings, system)?;
|
||||
|
||||
let (sender, receiver) = crossbeam::channel::unbounded();
|
||||
let watcher = directory_watcher(move |events| sender.send(events).unwrap())
|
||||
@@ -253,6 +255,7 @@ where
|
||||
watcher: Some(watcher),
|
||||
_temp_dir: temp_dir,
|
||||
root_dir: root_path,
|
||||
search_path_settings,
|
||||
};
|
||||
|
||||
// Sometimes the file watcher reports changes for events that happened before the watcher was started.
|
||||
@@ -737,7 +740,8 @@ fn add_search_path() -> anyhow::Result<()> {
|
||||
case.update_search_path_settings(|settings| SearchPathSettings {
|
||||
site_packages: vec![site_packages.clone()],
|
||||
..settings.clone()
|
||||
});
|
||||
})
|
||||
.expect("Search path settings to be valid");
|
||||
|
||||
std::fs::write(site_packages.join("a.py").as_std_path(), "class A: ...")?;
|
||||
|
||||
@@ -767,7 +771,8 @@ fn remove_search_path() -> anyhow::Result<()> {
|
||||
case.update_search_path_settings(|settings| SearchPathSettings {
|
||||
site_packages: vec![],
|
||||
..settings.clone()
|
||||
});
|
||||
})
|
||||
.expect("Search path settings to be valid");
|
||||
|
||||
std::fs::write(site_packages.join("a.py").as_std_path(), "class A: ...")?;
|
||||
|
||||
|
||||
@@ -15,8 +15,10 @@ ruff_db = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
compact_str = { workspace = true }
|
||||
@@ -34,7 +36,7 @@ walkdir = { workspace = true }
|
||||
zip = { workspace = true, features = ["zstd", "deflate"] }
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_db = { workspace = true, features = ["os", "testing"]}
|
||||
ruff_db = { workspace = true, features = ["os", "testing"] }
|
||||
ruff_python_parser = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
|
||||
@@ -2,11 +2,13 @@ use std::iter::FusedIterator;
|
||||
|
||||
pub(crate) use module::Module;
|
||||
pub use resolver::resolve_module;
|
||||
pub(crate) use resolver::SearchPaths;
|
||||
use ruff_db::system::SystemPath;
|
||||
pub use typeshed::vendored_typeshed_stubs;
|
||||
|
||||
use crate::module_resolver::resolver::search_paths;
|
||||
use crate::Db;
|
||||
use resolver::{module_resolution_settings, SearchPathIterator};
|
||||
use resolver::SearchPathIterator;
|
||||
|
||||
mod module;
|
||||
mod path;
|
||||
@@ -20,7 +22,7 @@ mod testing;
|
||||
/// Returns an iterator over all search paths pointing to a system path
|
||||
pub fn system_module_search_paths(db: &dyn Db) -> SystemModuleSearchPathsIter {
|
||||
SystemModuleSearchPathsIter {
|
||||
inner: module_resolution_settings(db).search_paths(db),
|
||||
inner: search_paths(db),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -7,12 +7,13 @@ use ruff_db::files::{File, FilePath, FileRootKind};
|
||||
use ruff_db::system::{DirectoryEntry, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::VendoredPath;
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::{Program, SearchPathSettings};
|
||||
|
||||
use super::module::{Module, ModuleKind};
|
||||
use super::path::{ModulePath, SearchPath, SearchPathValidationError};
|
||||
use super::state::ResolverState;
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::{Program, PythonVersion, SearchPathSettings};
|
||||
|
||||
/// Resolves a module name to a module.
|
||||
pub fn resolve_module(db: &dyn Db, module_name: ModuleName) -> Option<Module> {
|
||||
@@ -84,9 +85,7 @@ pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module> {
|
||||
FilePath::SystemVirtual(_) => return None,
|
||||
};
|
||||
|
||||
let settings = module_resolution_settings(db);
|
||||
|
||||
let mut search_paths = settings.search_paths(db);
|
||||
let mut search_paths = search_paths(db);
|
||||
|
||||
let module_name = loop {
|
||||
let candidate = search_paths.next()?;
|
||||
@@ -119,106 +118,122 @@ pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate and normalize the raw settings given by the user
|
||||
/// into settings we can use for module resolution
|
||||
///
|
||||
/// This method also implements the typing spec's [module resolution order].
|
||||
///
|
||||
/// [module resolution order]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
|
||||
fn try_resolve_module_resolution_settings(
|
||||
db: &dyn Db,
|
||||
) -> Result<ModuleResolutionSettings, SearchPathValidationError> {
|
||||
let program = Program::get(db.upcast());
|
||||
|
||||
let SearchPathSettings {
|
||||
extra_paths,
|
||||
src_root,
|
||||
custom_typeshed,
|
||||
site_packages,
|
||||
} = program.search_paths(db.upcast());
|
||||
|
||||
if !extra_paths.is_empty() {
|
||||
tracing::info!("Extra search paths: {extra_paths:?}");
|
||||
}
|
||||
|
||||
if let Some(custom_typeshed) = custom_typeshed {
|
||||
tracing::info!("Custom typeshed directory: {custom_typeshed}");
|
||||
}
|
||||
|
||||
let system = db.system();
|
||||
let files = db.files();
|
||||
|
||||
let mut static_search_paths = vec![];
|
||||
|
||||
for path in extra_paths {
|
||||
let search_path = SearchPath::extra(system, path.clone())?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
search_path.as_system_path().unwrap(),
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
static_search_paths.push(search_path);
|
||||
}
|
||||
|
||||
static_search_paths.push(SearchPath::first_party(system, src_root.clone())?);
|
||||
|
||||
static_search_paths.push(if let Some(custom_typeshed) = custom_typeshed.as_ref() {
|
||||
let search_path = SearchPath::custom_stdlib(db, custom_typeshed.clone())?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
search_path.as_system_path().unwrap(),
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
search_path
|
||||
} else {
|
||||
SearchPath::vendored_stdlib()
|
||||
});
|
||||
|
||||
let mut site_packages_paths: Vec<_> = Vec::with_capacity(site_packages.len());
|
||||
|
||||
for path in site_packages {
|
||||
let search_path = SearchPath::site_packages(system, path.to_path_buf())?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
search_path.as_system_path().unwrap(),
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
site_packages_paths.push(search_path);
|
||||
}
|
||||
|
||||
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
|
||||
|
||||
let target_version = program.target_version(db.upcast());
|
||||
tracing::info!("Target version: {target_version}");
|
||||
|
||||
// Filter out module resolution paths that point to the same directory on disk (the same invariant maintained by [`sys.path` at runtime]).
|
||||
// (Paths may, however, *overlap* -- e.g. you could have both `src/` and `src/foo`
|
||||
// as module resolution paths simultaneously.)
|
||||
//
|
||||
// [`sys.path` at runtime]: https://docs.python.org/3/library/site.html#module-site
|
||||
// This code doesn't use an `IndexSet` because the key is the system path and not the search root.
|
||||
let mut seen_paths =
|
||||
FxHashSet::with_capacity_and_hasher(static_search_paths.len(), FxBuildHasher);
|
||||
|
||||
static_search_paths.retain(|path| {
|
||||
if let Some(path) = path.as_system_path() {
|
||||
seen_paths.insert(path.to_path_buf())
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
|
||||
Ok(ModuleResolutionSettings {
|
||||
target_version,
|
||||
static_search_paths,
|
||||
site_packages_paths,
|
||||
})
|
||||
pub(crate) fn search_paths(db: &dyn Db) -> SearchPathIterator {
|
||||
Program::get(db).search_paths(db).iter(db)
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn module_resolution_settings(db: &dyn Db) -> ModuleResolutionSettings {
|
||||
// TODO proper error handling if this returns an error:
|
||||
try_resolve_module_resolution_settings(db).unwrap()
|
||||
#[derive(Debug, PartialEq, Eq, Default)]
|
||||
pub(crate) struct SearchPaths {
|
||||
/// Search paths that have been statically determined purely from reading Ruff's configuration settings.
|
||||
/// These shouldn't ever change unless the config settings themselves change.
|
||||
static_paths: Vec<SearchPath>,
|
||||
|
||||
/// site-packages paths are not included in the above field:
|
||||
/// if there are multiple site-packages paths, editable installations can appear
|
||||
/// *between* the site-packages paths on `sys.path` at runtime.
|
||||
/// That means we can't know where a second or third `site-packages` path should sit
|
||||
/// in terms of module-resolution priority until we've discovered the editable installs
|
||||
/// for the first `site-packages` path
|
||||
site_packages: Vec<SearchPath>,
|
||||
}
|
||||
|
||||
impl SearchPaths {
|
||||
/// Validate and normalize the raw settings given by the user
|
||||
/// into settings we can use for module resolution
|
||||
///
|
||||
/// This method also implements the typing spec's [module resolution order].
|
||||
///
|
||||
/// [module resolution order]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
|
||||
pub(crate) fn from_settings(
|
||||
db: &dyn Db,
|
||||
settings: SearchPathSettings,
|
||||
) -> Result<Self, SearchPathValidationError> {
|
||||
let SearchPathSettings {
|
||||
extra_paths,
|
||||
src_root,
|
||||
custom_typeshed,
|
||||
site_packages: site_packages_paths,
|
||||
} = settings;
|
||||
|
||||
let system = db.system();
|
||||
let files = db.files();
|
||||
|
||||
let mut static_paths = vec![];
|
||||
|
||||
for path in extra_paths {
|
||||
tracing::debug!("Adding static extra search-path '{path}'");
|
||||
|
||||
let search_path = SearchPath::extra(system, path)?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
search_path.as_system_path().unwrap(),
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
static_paths.push(search_path);
|
||||
}
|
||||
|
||||
tracing::debug!("Adding static search path '{src_root}'");
|
||||
static_paths.push(SearchPath::first_party(system, src_root)?);
|
||||
|
||||
static_paths.push(if let Some(custom_typeshed) = custom_typeshed {
|
||||
tracing::debug!("Adding static custom-sdtlib search-path '{custom_typeshed}'");
|
||||
|
||||
let search_path = SearchPath::custom_stdlib(db, custom_typeshed)?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
search_path.as_system_path().unwrap(),
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
search_path
|
||||
} else {
|
||||
SearchPath::vendored_stdlib()
|
||||
});
|
||||
|
||||
let mut site_packages: Vec<_> = Vec::with_capacity(site_packages_paths.len());
|
||||
|
||||
for path in site_packages_paths {
|
||||
tracing::debug!("Adding site-package path '{path}'");
|
||||
let search_path = SearchPath::site_packages(system, path)?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
search_path.as_system_path().unwrap(),
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
site_packages.push(search_path);
|
||||
}
|
||||
|
||||
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
|
||||
|
||||
// Filter out module resolution paths that point to the same directory on disk (the same invariant maintained by [`sys.path` at runtime]).
|
||||
// (Paths may, however, *overlap* -- e.g. you could have both `src/` and `src/foo`
|
||||
// as module resolution paths simultaneously.)
|
||||
//
|
||||
// This code doesn't use an `IndexSet` because the key is the system path and not the search root.
|
||||
//
|
||||
// [`sys.path` at runtime]: https://docs.python.org/3/library/site.html#module-site
|
||||
let mut seen_paths = FxHashSet::with_capacity_and_hasher(static_paths.len(), FxBuildHasher);
|
||||
|
||||
static_paths.retain(|path| {
|
||||
if let Some(path) = path.as_system_path() {
|
||||
seen_paths.insert(path.to_path_buf())
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
|
||||
Ok(SearchPaths {
|
||||
static_paths,
|
||||
site_packages,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn iter<'a>(&'a self, db: &'a dyn Db) -> SearchPathIterator<'a> {
|
||||
SearchPathIterator {
|
||||
db,
|
||||
static_paths: self.static_paths.iter(),
|
||||
dynamic_paths: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Collect all dynamic search paths. For each `site-packages` path:
|
||||
@@ -231,19 +246,20 @@ pub(crate) fn module_resolution_settings(db: &dyn Db) -> ModuleResolutionSetting
|
||||
/// module-resolution priority.
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
|
||||
let ModuleResolutionSettings {
|
||||
target_version: _,
|
||||
static_search_paths,
|
||||
site_packages_paths,
|
||||
} = module_resolution_settings(db);
|
||||
tracing::debug!("Resolving dynamic module resolution paths");
|
||||
|
||||
let SearchPaths {
|
||||
static_paths,
|
||||
site_packages,
|
||||
} = Program::get(db).search_paths(db);
|
||||
|
||||
let mut dynamic_paths = Vec::new();
|
||||
|
||||
if site_packages_paths.is_empty() {
|
||||
if site_packages.is_empty() {
|
||||
return dynamic_paths;
|
||||
}
|
||||
|
||||
let mut existing_paths: FxHashSet<_> = static_search_paths
|
||||
let mut existing_paths: FxHashSet<_> = static_paths
|
||||
.iter()
|
||||
.filter_map(|path| path.as_system_path())
|
||||
.map(Cow::Borrowed)
|
||||
@@ -252,7 +268,7 @@ pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
|
||||
let files = db.files();
|
||||
let system = db.system();
|
||||
|
||||
for site_packages_search_path in site_packages_paths {
|
||||
for site_packages_search_path in site_packages {
|
||||
let site_packages_dir = site_packages_search_path
|
||||
.as_system_path()
|
||||
.expect("Expected site package path to be a system path");
|
||||
@@ -302,6 +318,10 @@ pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
|
||||
if existing_paths.insert(Cow::Owned(installation.clone())) {
|
||||
match SearchPath::editable(system, installation) {
|
||||
Ok(search_path) => {
|
||||
tracing::debug!(
|
||||
"Adding editable installation to module resolution path {path}",
|
||||
path = search_path.as_system_path().unwrap()
|
||||
);
|
||||
dynamic_paths.push(search_path);
|
||||
}
|
||||
|
||||
@@ -448,38 +468,6 @@ impl<'db> Iterator for PthFileIterator<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Validated and normalized module-resolution settings.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct ModuleResolutionSettings {
|
||||
target_version: PythonVersion,
|
||||
|
||||
/// Search paths that have been statically determined purely from reading Ruff's configuration settings.
|
||||
/// These shouldn't ever change unless the config settings themselves change.
|
||||
static_search_paths: Vec<SearchPath>,
|
||||
|
||||
/// site-packages paths are not included in the above field:
|
||||
/// if there are multiple site-packages paths, editable installations can appear
|
||||
/// *between* the site-packages paths on `sys.path` at runtime.
|
||||
/// That means we can't know where a second or third `site-packages` path should sit
|
||||
/// in terms of module-resolution priority until we've discovered the editable installs
|
||||
/// for the first `site-packages` path
|
||||
site_packages_paths: Vec<SearchPath>,
|
||||
}
|
||||
|
||||
impl ModuleResolutionSettings {
|
||||
fn target_version(&self) -> PythonVersion {
|
||||
self.target_version
|
||||
}
|
||||
|
||||
pub(crate) fn search_paths<'db>(&'db self, db: &'db dyn Db) -> SearchPathIterator<'db> {
|
||||
SearchPathIterator {
|
||||
db,
|
||||
static_paths: self.static_search_paths.iter(),
|
||||
dynamic_paths: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A thin wrapper around `ModuleName` to make it a Salsa ingredient.
|
||||
///
|
||||
/// This is needed because Salsa requires that all query arguments are salsa ingredients.
|
||||
@@ -492,13 +480,13 @@ struct ModuleNameIngredient<'db> {
|
||||
/// Given a module name and a list of search paths in which to lookup modules,
|
||||
/// attempt to resolve the module name
|
||||
fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(SearchPath, File, ModuleKind)> {
|
||||
let resolver_settings = module_resolution_settings(db);
|
||||
let target_version = resolver_settings.target_version();
|
||||
let program = Program::get(db);
|
||||
let target_version = program.target_version(db);
|
||||
let resolver_state = ResolverState::new(db, target_version);
|
||||
let is_builtin_module =
|
||||
ruff_python_stdlib::sys::is_builtin_module(target_version.minor, name.as_str());
|
||||
|
||||
for search_path in resolver_settings.search_paths(db) {
|
||||
for search_path in search_paths(db) {
|
||||
// When a builtin module is imported, standard module resolution is bypassed:
|
||||
// the module name always resolves to the stdlib module,
|
||||
// even if there's a module of the same name in the first-party root
|
||||
@@ -652,6 +640,8 @@ mod tests {
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::module::ModuleKind;
|
||||
use crate::module_resolver::testing::{FileSpec, MockedTypeshed, TestCase, TestCaseBuilder};
|
||||
use crate::ProgramSettings;
|
||||
use crate::PythonVersion;
|
||||
|
||||
use super::*;
|
||||
|
||||
@@ -1202,14 +1192,19 @@ mod tests {
|
||||
std::fs::write(foo.as_std_path(), "")?;
|
||||
std::os::unix::fs::symlink(foo.as_std_path(), bar.as_std_path())?;
|
||||
|
||||
let search_paths = SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(custom_typeshed.clone()),
|
||||
site_packages: vec![site_packages],
|
||||
};
|
||||
|
||||
Program::new(&db, PythonVersion::PY38, search_paths);
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::PY38,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(custom_typeshed.clone()),
|
||||
site_packages: vec![site_packages],
|
||||
},
|
||||
},
|
||||
)
|
||||
.context("Invalid program settings")?;
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
let bar_module = resolve_module(&db, ModuleName::new_static("bar").unwrap()).unwrap();
|
||||
@@ -1673,8 +1668,7 @@ not_a_directory
|
||||
.with_site_packages_files(&[("_foo.pth", "/src")])
|
||||
.build();
|
||||
|
||||
let search_paths: Vec<&SearchPath> =
|
||||
module_resolution_settings(&db).search_paths(&db).collect();
|
||||
let search_paths: Vec<&SearchPath> = search_paths(&db).collect();
|
||||
|
||||
assert!(search_paths.contains(
|
||||
&&SearchPath::first_party(db.system(), SystemPathBuf::from("/src")).unwrap()
|
||||
@@ -1703,16 +1697,19 @@ not_a_directory
|
||||
])
|
||||
.unwrap();
|
||||
|
||||
Program::new(
|
||||
Program::from_settings(
|
||||
&db,
|
||||
PythonVersion::default(),
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![venv_site_packages, system_site_packages],
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![venv_site_packages, system_site_packages],
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.expect("Valid program settings");
|
||||
|
||||
// The editable installs discovered from the `.pth` file in the first `site-packages` directory
|
||||
// take precedence over the second `site-packages` directory...
|
||||
|
||||
@@ -4,6 +4,7 @@ use ruff_db::vendored::VendoredPathBuf;
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::ProgramSettings;
|
||||
|
||||
/// A test case for the module resolver.
|
||||
///
|
||||
@@ -220,16 +221,19 @@ impl TestCaseBuilder<MockedTypeshed> {
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
let typeshed = Self::build_typeshed_mock(&mut db, &typeshed_option);
|
||||
|
||||
Program::new(
|
||||
Program::from_settings(
|
||||
&db,
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
site_packages: vec![site_packages.clone()],
|
||||
ProgramSettings {
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
site_packages: vec![site_packages.clone()],
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.expect("Valid program settings");
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
@@ -273,16 +277,19 @@ impl TestCaseBuilder<VendoredTypeshed> {
|
||||
Self::write_mock_directory(&mut db, "/site-packages", site_packages_files);
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
|
||||
Program::new(
|
||||
Program::from_settings(
|
||||
&db,
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![site_packages.clone()],
|
||||
ProgramSettings {
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![site_packages.clone()],
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
|
||||
@@ -1,21 +1,53 @@
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::Db;
|
||||
use ruff_db::system::SystemPathBuf;
|
||||
use anyhow::Context;
|
||||
use salsa::Durability;
|
||||
use salsa::Setter;
|
||||
|
||||
use ruff_db::system::SystemPathBuf;
|
||||
|
||||
use crate::module_resolver::SearchPaths;
|
||||
use crate::Db;
|
||||
|
||||
#[salsa::input(singleton)]
|
||||
pub struct Program {
|
||||
pub target_version: PythonVersion,
|
||||
|
||||
#[default]
|
||||
#[return_ref]
|
||||
pub search_paths: SearchPathSettings,
|
||||
pub(crate) search_paths: SearchPaths,
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn from_settings(db: &dyn Db, settings: ProgramSettings) -> Self {
|
||||
Program::builder(settings.target_version, settings.search_paths)
|
||||
pub fn from_settings(db: &dyn Db, settings: ProgramSettings) -> anyhow::Result<Self> {
|
||||
let ProgramSettings {
|
||||
target_version,
|
||||
search_paths,
|
||||
} = settings;
|
||||
|
||||
tracing::info!("Target version: {target_version}");
|
||||
|
||||
let search_paths = SearchPaths::from_settings(db, search_paths)
|
||||
.with_context(|| "Invalid search path settings")?;
|
||||
|
||||
Ok(Program::builder(settings.target_version)
|
||||
.durability(Durability::HIGH)
|
||||
.new(db)
|
||||
.search_paths(search_paths)
|
||||
.new(db))
|
||||
}
|
||||
|
||||
pub fn update_search_paths(
|
||||
&self,
|
||||
db: &mut dyn Db,
|
||||
search_path_settings: SearchPathSettings,
|
||||
) -> anyhow::Result<()> {
|
||||
let search_paths = SearchPaths::from_settings(db, search_path_settings)?;
|
||||
|
||||
if self.search_paths(db) != &search_paths {
|
||||
tracing::debug!("Update search paths");
|
||||
self.set_search_paths(db).to(search_paths);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -89,8 +89,6 @@ pub(crate) struct SemanticIndex<'db> {
|
||||
scopes: IndexVec<FileScopeId, Scope>,
|
||||
|
||||
/// Map expressions to their corresponding scope.
|
||||
/// We can't use [`ExpressionId`] here, because the challenge is how to get from
|
||||
/// an [`ast::Expr`] to an [`ExpressionId`] (which requires knowing the scope).
|
||||
scopes_by_expression: FxHashMap<ExpressionNodeKey, FileScopeId>,
|
||||
|
||||
/// Map from a node creating a definition to its definition.
|
||||
@@ -118,7 +116,7 @@ pub(crate) struct SemanticIndex<'db> {
|
||||
impl<'db> SemanticIndex<'db> {
|
||||
/// Returns the symbol table for a specific scope.
|
||||
///
|
||||
/// Use the Salsa cached [`symbol_table`] query if you only need the
|
||||
/// Use the Salsa cached [`symbol_table()`] query if you only need the
|
||||
/// symbol table for a single scope.
|
||||
pub(super) fn symbol_table(&self, scope_id: FileScopeId) -> Arc<SymbolTable> {
|
||||
self.symbol_tables[scope_id].clone()
|
||||
@@ -126,7 +124,7 @@ impl<'db> SemanticIndex<'db> {
|
||||
|
||||
/// Returns the use-def map for a specific scope.
|
||||
///
|
||||
/// Use the Salsa cached [`use_def_map`] query if you only need the
|
||||
/// Use the Salsa cached [`use_def_map()`] query if you only need the
|
||||
/// use-def map for a single scope.
|
||||
pub(super) fn use_def_map(&self, scope_id: FileScopeId) -> Arc<UseDefMap> {
|
||||
self.use_def_maps[scope_id].clone()
|
||||
@@ -309,6 +307,7 @@ mod tests {
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::DbWithTestSystem;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::semantic_index::ast_ids::HasScopedUseId;
|
||||
@@ -529,6 +528,138 @@ y = 2
|
||||
));
|
||||
}
|
||||
|
||||
/// Test case to validate that the comprehension scope is correctly identified and that the target
|
||||
/// variable is defined only in the comprehension scope and not in the global scope.
|
||||
#[test]
|
||||
fn comprehension_scope() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[x for x in iter1]
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&global_table), vec!["iter1"]);
|
||||
|
||||
let [(comprehension_scope_id, comprehension_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
|
||||
assert_eq!(comprehension_scope.kind(), ScopeKind::Comprehension);
|
||||
assert_eq!(
|
||||
comprehension_scope_id.to_scope_id(&db, file).name(&db),
|
||||
"<listcomp>"
|
||||
);
|
||||
|
||||
let comprehension_symbol_table = index.symbol_table(comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&comprehension_symbol_table), vec!["x"]);
|
||||
}
|
||||
|
||||
/// Test case to validate that the `x` variable used in the comprehension is referencing the
|
||||
/// `x` variable defined by the inner generator (`for x in iter2`) and not the outer one.
|
||||
#[test]
|
||||
fn multiple_generators() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[x for x in iter1 for x in iter2]
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let [(comprehension_scope_id, _)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
|
||||
let use_def = index.use_def_map(comprehension_scope_id);
|
||||
|
||||
let module = parsed_module(&db, file).syntax();
|
||||
let element = module.body[0]
|
||||
.as_expr_stmt()
|
||||
.unwrap()
|
||||
.value
|
||||
.as_list_comp_expr()
|
||||
.unwrap()
|
||||
.elt
|
||||
.as_name_expr()
|
||||
.unwrap();
|
||||
let element_use_id =
|
||||
element.scoped_use_id(&db, comprehension_scope_id.to_scope_id(&db, file));
|
||||
|
||||
let [definition] = use_def.use_definitions(element_use_id) else {
|
||||
panic!("expected one definition")
|
||||
};
|
||||
let DefinitionKind::Comprehension(comprehension) = definition.node(&db) else {
|
||||
panic!("expected generator definition")
|
||||
};
|
||||
let ast::Comprehension { target, .. } = comprehension.node();
|
||||
let name = target.as_name_expr().unwrap().id().as_str();
|
||||
|
||||
assert_eq!(name, "x");
|
||||
assert_eq!(target.range(), TextRange::new(23.into(), 24.into()));
|
||||
}
|
||||
|
||||
/// Test case to validate that the nested comprehension creates a new scope which is a child of
|
||||
/// the outer comprehension scope and the variables are correctly defined in the respective
|
||||
/// scopes.
|
||||
#[test]
|
||||
fn nested_generators() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[{x for x in iter2} for y in iter1]
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&global_table), vec!["iter1"]);
|
||||
|
||||
let [(comprehension_scope_id, comprehension_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
|
||||
assert_eq!(comprehension_scope.kind(), ScopeKind::Comprehension);
|
||||
assert_eq!(
|
||||
comprehension_scope_id.to_scope_id(&db, file).name(&db),
|
||||
"<listcomp>"
|
||||
);
|
||||
|
||||
let comprehension_symbol_table = index.symbol_table(comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&comprehension_symbol_table), vec!["y", "iter2"]);
|
||||
|
||||
let [(inner_comprehension_scope_id, inner_comprehension_scope)] = index
|
||||
.child_scopes(comprehension_scope_id)
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one inner generator scope")
|
||||
};
|
||||
|
||||
assert_eq!(inner_comprehension_scope.kind(), ScopeKind::Comprehension);
|
||||
assert_eq!(
|
||||
inner_comprehension_scope_id
|
||||
.to_scope_id(&db, file)
|
||||
.name(&db),
|
||||
"<setcomp>"
|
||||
);
|
||||
|
||||
let inner_comprehension_symbol_table = index.symbol_table(inner_comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&inner_comprehension_symbol_table), vec!["x"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dupes() {
|
||||
let TestCase { db, file } = test_case(
|
||||
|
||||
@@ -26,9 +26,9 @@ use crate::Db;
|
||||
/// ```
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct AstIds {
|
||||
/// Maps expressions to their expression id. Uses `NodeKey` because it avoids cloning [`Parsed`].
|
||||
/// Maps expressions to their expression id.
|
||||
expressions_map: FxHashMap<ExpressionNodeKey, ScopedExpressionId>,
|
||||
/// Maps expressions which "use" a symbol (that is, [`ExprName`]) to a use id.
|
||||
/// Maps expressions which "use" a symbol (that is, [`ast::ExprName`]) to a use id.
|
||||
uses_map: FxHashMap<ExpressionNodeKey, ScopedUseId>,
|
||||
}
|
||||
|
||||
|
||||
@@ -13,8 +13,8 @@ use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
|
||||
use crate::semantic_index::ast_ids::AstIdsBuilder;
|
||||
use crate::semantic_index::definition::{
|
||||
AssignmentDefinitionNodeRef, Definition, DefinitionNodeKey, DefinitionNodeRef,
|
||||
ImportFromDefinitionNodeRef,
|
||||
AssignmentDefinitionNodeRef, ComprehensionDefinitionNodeRef, Definition, DefinitionNodeKey,
|
||||
DefinitionNodeRef, ImportFromDefinitionNodeRef,
|
||||
};
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{
|
||||
@@ -174,7 +174,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
symbol: ScopedSymbolId,
|
||||
definition_node: impl Into<DefinitionNodeRef<'a>>,
|
||||
) -> Definition<'db> {
|
||||
let definition_node = definition_node.into();
|
||||
let definition_node: DefinitionNodeRef<'_> = definition_node.into();
|
||||
let definition = Definition::new(
|
||||
self.db,
|
||||
self.file,
|
||||
@@ -258,6 +258,49 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
nested_scope
|
||||
}
|
||||
|
||||
/// Visit a list of [`Comprehension`] nodes, assumed to be the "generators" that compose a
|
||||
/// comprehension (that is, the `for x in y` and `for y in z` parts of `x for x in y for y in z`.)
|
||||
///
|
||||
/// [`Comprehension`]: ast::Comprehension
|
||||
fn visit_generators(&mut self, scope: NodeWithScopeRef, generators: &'db [ast::Comprehension]) {
|
||||
let mut generators_iter = generators.iter();
|
||||
|
||||
let Some(generator) = generators_iter.next() else {
|
||||
unreachable!("Expression must contain at least one generator");
|
||||
};
|
||||
|
||||
// The `iter` of the first generator is evaluated in the outer scope, while all subsequent
|
||||
// nodes are evaluated in the inner scope.
|
||||
self.visit_expr(&generator.iter);
|
||||
self.push_scope(scope);
|
||||
|
||||
self.current_assignment = Some(CurrentAssignment::Comprehension {
|
||||
node: generator,
|
||||
first: true,
|
||||
});
|
||||
self.visit_expr(&generator.target);
|
||||
self.current_assignment = None;
|
||||
|
||||
for expr in &generator.ifs {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
|
||||
for generator in generators_iter {
|
||||
self.visit_expr(&generator.iter);
|
||||
|
||||
self.current_assignment = Some(CurrentAssignment::Comprehension {
|
||||
node: generator,
|
||||
first: false,
|
||||
});
|
||||
self.visit_expr(&generator.target);
|
||||
self.current_assignment = None;
|
||||
|
||||
for expr in &generator.ifs {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn build(mut self) -> SemanticIndex<'db> {
|
||||
let module = self.module;
|
||||
self.visit_body(module.suite());
|
||||
@@ -476,8 +519,7 @@ where
|
||||
self.current_ast_ids().record_expression(expr);
|
||||
|
||||
match expr {
|
||||
ast::Expr::Name(name_node) => {
|
||||
let ast::ExprName { id, ctx, .. } = name_node;
|
||||
ast::Expr::Name(name_node @ ast::ExprName { id, ctx, .. }) => {
|
||||
let flags = match ctx {
|
||||
ast::ExprContext::Load => SymbolFlags::IS_USED,
|
||||
ast::ExprContext::Store => SymbolFlags::IS_DEFINED,
|
||||
@@ -500,8 +542,17 @@ where
|
||||
self.add_definition(symbol, ann_assign);
|
||||
}
|
||||
Some(CurrentAssignment::Named(named)) => {
|
||||
// TODO(dhruvmanila): If the current scope is a comprehension, then the
|
||||
// named expression is implicitly nonlocal. This is yet to be
|
||||
// implemented.
|
||||
self.add_definition(symbol, named);
|
||||
}
|
||||
Some(CurrentAssignment::Comprehension { node, first }) => {
|
||||
self.add_definition(
|
||||
symbol,
|
||||
ComprehensionDefinitionNodeRef { node, first },
|
||||
);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
@@ -527,7 +578,6 @@ where
|
||||
}
|
||||
self.push_scope(NodeWithScopeRef::Lambda(lambda));
|
||||
self.visit_expr(lambda.body.as_ref());
|
||||
self.pop_scope();
|
||||
}
|
||||
ast::Expr::If(ast::ExprIf {
|
||||
body, test, orelse, ..
|
||||
@@ -543,10 +593,66 @@ where
|
||||
self.visit_expr(orelse);
|
||||
self.flow_merge(&post_body);
|
||||
}
|
||||
ast::Expr::ListComp(
|
||||
list_comprehension @ ast::ExprListComp {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
NodeWithScopeRef::ListComprehension(list_comprehension),
|
||||
generators,
|
||||
);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::SetComp(
|
||||
set_comprehension @ ast::ExprSetComp {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
NodeWithScopeRef::SetComprehension(set_comprehension),
|
||||
generators,
|
||||
);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::Generator(
|
||||
generator @ ast::ExprGenerator {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(NodeWithScopeRef::GeneratorExpression(generator), generators);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::DictComp(
|
||||
dict_comprehension @ ast::ExprDictComp {
|
||||
key,
|
||||
value,
|
||||
generators,
|
||||
..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
NodeWithScopeRef::DictComprehension(dict_comprehension),
|
||||
generators,
|
||||
);
|
||||
self.visit_expr(key);
|
||||
self.visit_expr(value);
|
||||
}
|
||||
_ => {
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
|
||||
if matches!(
|
||||
expr,
|
||||
ast::Expr::Lambda(_)
|
||||
| ast::Expr::ListComp(_)
|
||||
| ast::Expr::SetComp(_)
|
||||
| ast::Expr::Generator(_)
|
||||
| ast::Expr::DictComp(_)
|
||||
) {
|
||||
self.pop_scope();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -555,6 +661,10 @@ enum CurrentAssignment<'a> {
|
||||
Assign(&'a ast::StmtAssign),
|
||||
AnnAssign(&'a ast::StmtAnnAssign),
|
||||
Named(&'a ast::ExprNamed),
|
||||
Comprehension {
|
||||
node: &'a ast::Comprehension,
|
||||
first: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAssign> for CurrentAssignment<'a> {
|
||||
|
||||
@@ -44,6 +44,7 @@ pub(crate) enum DefinitionNodeRef<'a> {
|
||||
NamedExpression(&'a ast::ExprNamed),
|
||||
Assignment(AssignmentDefinitionNodeRef<'a>),
|
||||
AnnotatedAssignment(&'a ast::StmtAnnAssign),
|
||||
Comprehension(ComprehensionDefinitionNodeRef<'a>),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtFunctionDef> for DefinitionNodeRef<'a> {
|
||||
@@ -88,6 +89,12 @@ impl<'a> From<AssignmentDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ComprehensionDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node: ComprehensionDefinitionNodeRef<'a>) -> Self {
|
||||
Self::Comprehension(node)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ImportFromDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::StmtImportFrom,
|
||||
@@ -100,6 +107,12 @@ pub(crate) struct AssignmentDefinitionNodeRef<'a> {
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ComprehensionDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::Comprehension,
|
||||
pub(crate) first: bool,
|
||||
}
|
||||
|
||||
impl DefinitionNodeRef<'_> {
|
||||
#[allow(unsafe_code)]
|
||||
pub(super) unsafe fn into_owned(self, parsed: ParsedModule) -> DefinitionKind {
|
||||
@@ -131,6 +144,12 @@ impl DefinitionNodeRef<'_> {
|
||||
DefinitionNodeRef::AnnotatedAssignment(assign) => {
|
||||
DefinitionKind::AnnotatedAssignment(AstNodeRef::new(parsed, assign))
|
||||
}
|
||||
DefinitionNodeRef::Comprehension(ComprehensionDefinitionNodeRef { node, first }) => {
|
||||
DefinitionKind::Comprehension(ComprehensionDefinitionKind {
|
||||
node: AstNodeRef::new(parsed, node),
|
||||
first,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -148,6 +167,7 @@ impl DefinitionNodeRef<'_> {
|
||||
target,
|
||||
}) => target.into(),
|
||||
Self::AnnotatedAssignment(node) => node.into(),
|
||||
Self::Comprehension(ComprehensionDefinitionNodeRef { node, first: _ }) => node.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -161,6 +181,23 @@ pub enum DefinitionKind {
|
||||
NamedExpression(AstNodeRef<ast::ExprNamed>),
|
||||
Assignment(AssignmentDefinitionKind),
|
||||
AnnotatedAssignment(AstNodeRef<ast::StmtAnnAssign>),
|
||||
Comprehension(ComprehensionDefinitionKind),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ComprehensionDefinitionKind {
|
||||
node: AstNodeRef<ast::Comprehension>,
|
||||
first: bool,
|
||||
}
|
||||
|
||||
impl ComprehensionDefinitionKind {
|
||||
pub(crate) fn node(&self) -> &ast::Comprehension {
|
||||
self.node.node()
|
||||
}
|
||||
|
||||
pub(crate) fn is_first(&self) -> bool {
|
||||
self.first
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@@ -230,3 +267,9 @@ impl From<&ast::StmtAnnAssign> for DefinitionNodeKey {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Comprehension> for DefinitionNodeKey {
|
||||
fn from(node: &ast::Comprehension) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,6 +114,10 @@ impl<'db> ScopeId<'db> {
|
||||
NodeWithScopeKind::ClassTypeParameters(_)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(_)
|
||||
| NodeWithScopeKind::Function(_)
|
||||
| NodeWithScopeKind::ListComprehension(_)
|
||||
| NodeWithScopeKind::SetComprehension(_)
|
||||
| NodeWithScopeKind::DictComprehension(_)
|
||||
| NodeWithScopeKind::GeneratorExpression(_)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -127,6 +131,10 @@ impl<'db> ScopeId<'db> {
|
||||
NodeWithScopeKind::Function(function)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(function) => function.name.as_str(),
|
||||
NodeWithScopeKind::Lambda(_) => "<lambda>",
|
||||
NodeWithScopeKind::ListComprehension(_) => "<listcomp>",
|
||||
NodeWithScopeKind::SetComprehension(_) => "<setcomp>",
|
||||
NodeWithScopeKind::DictComprehension(_) => "<dictcomp>",
|
||||
NodeWithScopeKind::GeneratorExpression(_) => "<generator>",
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -170,6 +178,13 @@ pub enum ScopeKind {
|
||||
Annotation,
|
||||
Class,
|
||||
Function,
|
||||
Comprehension,
|
||||
}
|
||||
|
||||
impl ScopeKind {
|
||||
pub const fn is_comprehension(self) -> bool {
|
||||
matches!(self, ScopeKind::Comprehension)
|
||||
}
|
||||
}
|
||||
|
||||
/// Symbol table for a specific [`Scope`].
|
||||
@@ -300,6 +315,10 @@ pub(crate) enum NodeWithScopeRef<'a> {
|
||||
Lambda(&'a ast::ExprLambda),
|
||||
FunctionTypeParameters(&'a ast::StmtFunctionDef),
|
||||
ClassTypeParameters(&'a ast::StmtClassDef),
|
||||
ListComprehension(&'a ast::ExprListComp),
|
||||
SetComprehension(&'a ast::ExprSetComp),
|
||||
DictComprehension(&'a ast::ExprDictComp),
|
||||
GeneratorExpression(&'a ast::ExprGenerator),
|
||||
}
|
||||
|
||||
impl NodeWithScopeRef<'_> {
|
||||
@@ -326,6 +345,18 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::ClassTypeParameters(class) => {
|
||||
NodeWithScopeKind::ClassTypeParameters(AstNodeRef::new(module, class))
|
||||
}
|
||||
NodeWithScopeRef::ListComprehension(comprehension) => {
|
||||
NodeWithScopeKind::ListComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::SetComprehension(comprehension) => {
|
||||
NodeWithScopeKind::SetComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::DictComprehension(comprehension) => {
|
||||
NodeWithScopeKind::DictComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::GeneratorExpression(generator) => {
|
||||
NodeWithScopeKind::GeneratorExpression(AstNodeRef::new(module, generator))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -337,6 +368,10 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::Lambda(_) => ScopeKind::Function,
|
||||
NodeWithScopeRef::FunctionTypeParameters(_)
|
||||
| NodeWithScopeRef::ClassTypeParameters(_) => ScopeKind::Annotation,
|
||||
NodeWithScopeRef::ListComprehension(_)
|
||||
| NodeWithScopeRef::SetComprehension(_)
|
||||
| NodeWithScopeRef::DictComprehension(_)
|
||||
| NodeWithScopeRef::GeneratorExpression(_) => ScopeKind::Comprehension,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -356,6 +391,18 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::ClassTypeParameters(class) => {
|
||||
NodeWithScopeKey::ClassTypeParameters(NodeKey::from_node(class))
|
||||
}
|
||||
NodeWithScopeRef::ListComprehension(comprehension) => {
|
||||
NodeWithScopeKey::ListComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::SetComprehension(comprehension) => {
|
||||
NodeWithScopeKey::SetComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::DictComprehension(comprehension) => {
|
||||
NodeWithScopeKey::DictComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::GeneratorExpression(generator) => {
|
||||
NodeWithScopeKey::GeneratorExpression(NodeKey::from_node(generator))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -369,6 +416,10 @@ pub enum NodeWithScopeKind {
|
||||
Function(AstNodeRef<ast::StmtFunctionDef>),
|
||||
FunctionTypeParameters(AstNodeRef<ast::StmtFunctionDef>),
|
||||
Lambda(AstNodeRef<ast::ExprLambda>),
|
||||
ListComprehension(AstNodeRef<ast::ExprListComp>),
|
||||
SetComprehension(AstNodeRef<ast::ExprSetComp>),
|
||||
DictComprehension(AstNodeRef<ast::ExprDictComp>),
|
||||
GeneratorExpression(AstNodeRef<ast::ExprGenerator>),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
@@ -379,4 +430,8 @@ pub(crate) enum NodeWithScopeKey {
|
||||
Function(NodeKey),
|
||||
FunctionTypeParameters(NodeKey),
|
||||
Lambda(NodeKey),
|
||||
ListComprehension(NodeKey),
|
||||
SetComprehension(NodeKey),
|
||||
DictComprehension(NodeKey),
|
||||
GeneratorExpression(NodeKey),
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::files::{File, FilePath};
|
||||
use ruff_db::source::line_index;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::{Expr, ExpressionRef, StmtClassDef};
|
||||
use ruff_source_file::LineIndex;
|
||||
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::{resolve_module, Module};
|
||||
@@ -25,6 +27,14 @@ impl<'db> SemanticModel<'db> {
|
||||
self.db
|
||||
}
|
||||
|
||||
pub fn file_path(&self) -> &FilePath {
|
||||
self.file.path(self.db)
|
||||
}
|
||||
|
||||
pub fn line_index(&self) -> LineIndex {
|
||||
line_index(self.db.upcast(), self.file)
|
||||
}
|
||||
|
||||
pub fn resolve_module(&self, module_name: ModuleName) -> Option<Module> {
|
||||
resolve_module(self.db, module_name)
|
||||
}
|
||||
@@ -171,29 +181,32 @@ mod tests {
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::types::Type;
|
||||
use crate::{HasTy, SemanticModel};
|
||||
use crate::{HasTy, ProgramSettings, SemanticModel};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
Program::new(
|
||||
fn setup_db<'a>(files: impl IntoIterator<Item = (&'a str, &'a str)>) -> anyhow::Result<TestDb> {
|
||||
let mut db = TestDb::new();
|
||||
db.write_files(files)?;
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
PythonVersion::default(),
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
},
|
||||
},
|
||||
);
|
||||
)?;
|
||||
|
||||
db
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_ty() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
let db = setup_db([("/src/foo.py", "def test(): pass")])?;
|
||||
|
||||
db.write_file("/src/foo.py", "def test(): pass")?;
|
||||
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, foo);
|
||||
@@ -209,9 +222,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn class_ty() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
let db = setup_db([("/src/foo.py", "class Test: pass")])?;
|
||||
|
||||
db.write_file("/src/foo.py", "class Test: pass")?;
|
||||
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, foo);
|
||||
@@ -227,12 +239,11 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn alias_ty() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
let db = setup_db([
|
||||
("/src/foo.py", "class Test: pass"),
|
||||
("/src/bar.py", "from foo import Test"),
|
||||
])?;
|
||||
|
||||
let bar = system_path_to_file(&db, "/src/bar.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, bar);
|
||||
|
||||
@@ -7,9 +7,11 @@ use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId};
|
||||
use crate::semantic_index::{global_scope, symbol_table, use_def_map};
|
||||
use crate::{Db, FxOrderSet};
|
||||
|
||||
mod builder;
|
||||
mod display;
|
||||
mod infer;
|
||||
|
||||
pub(crate) use self::builder::UnionBuilder;
|
||||
pub(crate) use self::infer::{infer_definition_types, infer_scope_types};
|
||||
|
||||
/// Infer the public type of a symbol (its type as seen from outside its scope).
|
||||
@@ -91,14 +93,14 @@ pub(crate) fn definitions_ty<'db>(
|
||||
};
|
||||
|
||||
if let Some(second) = all_types.next() {
|
||||
let mut builder = UnionTypeBuilder::new(db);
|
||||
let mut builder = UnionBuilder::new(db);
|
||||
builder = builder.add(first).add(second);
|
||||
|
||||
for variant in all_types {
|
||||
builder = builder.add(variant);
|
||||
}
|
||||
|
||||
Type::Union(builder.build())
|
||||
builder.build()
|
||||
} else {
|
||||
first
|
||||
}
|
||||
@@ -117,7 +119,7 @@ pub enum Type<'db> {
|
||||
/// name does not exist or is not bound to any value (this represents an error, but with some
|
||||
/// leniency options it could be silently resolved to Unknown in some cases)
|
||||
Unbound,
|
||||
/// the None object (TODO remove this in favor of Instance(types.NoneType)
|
||||
/// the None object -- TODO remove this in favor of Instance(types.NoneType)
|
||||
None,
|
||||
/// a specific function object
|
||||
Function(FunctionType<'db>),
|
||||
@@ -127,8 +129,11 @@ pub enum Type<'db> {
|
||||
Class(ClassType<'db>),
|
||||
/// the set of Python objects with the given class in their __class__'s method resolution order
|
||||
Instance(ClassType<'db>),
|
||||
/// the set of objects in any of the types in the union
|
||||
Union(UnionType<'db>),
|
||||
/// the set of objects in all of the types in the intersection
|
||||
Intersection(IntersectionType<'db>),
|
||||
/// An integer literal
|
||||
IntLiteral(i64),
|
||||
/// A boolean literal, either `True` or `False`.
|
||||
BooleanLiteral(bool),
|
||||
@@ -159,15 +164,13 @@ impl<'db> Type<'db> {
|
||||
// TODO MRO? get_own_instance_member, get_instance_member
|
||||
todo!("attribute lookup on Instance type")
|
||||
}
|
||||
Type::Union(union) => Type::Union(
|
||||
union
|
||||
.elements(db)
|
||||
.iter()
|
||||
.fold(UnionTypeBuilder::new(db), |builder, element_ty| {
|
||||
builder.add(element_ty.member(db, name))
|
||||
})
|
||||
.build(),
|
||||
),
|
||||
Type::Union(union) => union
|
||||
.elements(db)
|
||||
.iter()
|
||||
.fold(UnionBuilder::new(db), |builder, element_ty| {
|
||||
builder.add(element_ty.member(db, name))
|
||||
})
|
||||
.build(),
|
||||
Type::Intersection(_) => {
|
||||
// TODO perform the get_member on each type in the intersection
|
||||
// TODO return the intersection of those results
|
||||
@@ -251,7 +254,7 @@ impl<'db> ClassType<'db> {
|
||||
|
||||
#[salsa::interned]
|
||||
pub struct UnionType<'db> {
|
||||
/// the union type includes values in any of these types
|
||||
/// The union type includes values in any of these types.
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
@@ -261,48 +264,15 @@ impl<'db> UnionType<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
struct UnionTypeBuilder<'db> {
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> UnionTypeBuilder<'db> {
|
||||
fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
elements: FxOrderSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a type to this union.
|
||||
fn add(mut self, ty: Type<'db>) -> Self {
|
||||
match ty {
|
||||
Type::Union(union) => {
|
||||
self.elements.extend(&union.elements(self.db));
|
||||
}
|
||||
_ => {
|
||||
self.elements.insert(ty);
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
fn build(self) -> UnionType<'db> {
|
||||
UnionType::new(self.db, self.elements)
|
||||
}
|
||||
}
|
||||
|
||||
// Negation types aren't expressible in annotations, and are most likely to arise from type
|
||||
// narrowing along with intersections (e.g. `if not isinstance(...)`), so we represent them
|
||||
// directly in intersections rather than as a separate type. This sacrifices some efficiency in the
|
||||
// case where a Not appears outside an intersection (unclear when that could even happen, but we'd
|
||||
// have to represent it as a single-element intersection if it did) in exchange for better
|
||||
// efficiency in the within-intersection case.
|
||||
#[salsa::interned]
|
||||
pub struct IntersectionType<'db> {
|
||||
// the intersection type includes only values in all of these types
|
||||
/// The intersection type includes only values in all of these types.
|
||||
positive: FxOrderSet<Type<'db>>,
|
||||
// the intersection type does not include any value in any of these types
|
||||
|
||||
/// The intersection type does not include any value in any of these types.
|
||||
///
|
||||
/// Negation types aren't expressible in annotations, and are most likely to arise from type
|
||||
/// narrowing along with intersections (e.g. `if not isinstance(...)`), so we represent them
|
||||
/// directly in intersections rather than as a separate type.
|
||||
negative: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
429
crates/red_knot_python_semantic/src/types/builder.rs
Normal file
429
crates/red_knot_python_semantic/src/types/builder.rs
Normal file
@@ -0,0 +1,429 @@
|
||||
//! Smart builders for union and intersection types.
|
||||
//!
|
||||
//! Invariants we maintain here:
|
||||
//! * No single-element union types (should just be the contained type instead.)
|
||||
//! * No single-positive-element intersection types. Single-negative-element are OK, we don't
|
||||
//! have a standalone negation type so there's no other representation for this.
|
||||
//! * The same type should never appear more than once in a union or intersection. (This should
|
||||
//! be expanded to cover subtyping -- see below -- but for now we only implement it for type
|
||||
//! identity.)
|
||||
//! * Disjunctive normal form (DNF): the tree of unions and intersections can never be deeper
|
||||
//! than a union-of-intersections. Unions cannot contain other unions (the inner union just
|
||||
//! flattens into the outer one), intersections cannot contain other intersections (also
|
||||
//! flattens), and intersections cannot contain unions (the intersection distributes over the
|
||||
//! union, inverting it into a union-of-intersections).
|
||||
//!
|
||||
//! The implication of these invariants is that a [`UnionBuilder`] does not necessarily build a
|
||||
//! [`Type::Union`]. For example, if only one type is added to the [`UnionBuilder`], `build()` will
|
||||
//! just return that type directly. The same is true for [`IntersectionBuilder`]; for example, if a
|
||||
//! union type is added to the intersection, it will distribute and [`IntersectionBuilder::build`]
|
||||
//! may end up returning a [`Type::Union`] of intersections.
|
||||
//!
|
||||
//! In the future we should have these additional invariants, but they aren't implemented yet:
|
||||
//! * No type in a union can be a subtype of any other type in the union (just eliminate the
|
||||
//! subtype from the union).
|
||||
//! * No type in an intersection can be a supertype of any other type in the intersection (just
|
||||
//! eliminate the supertype from the intersection).
|
||||
//! * An intersection containing two non-overlapping types should simplify to [`Type::Never`].
|
||||
use crate::types::{IntersectionType, Type, UnionType};
|
||||
use crate::{Db, FxOrderSet};
|
||||
|
||||
pub(crate) struct UnionBuilder<'db> {
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> UnionBuilder<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
elements: FxOrderSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a type to this union.
|
||||
pub(crate) fn add(mut self, ty: Type<'db>) -> Self {
|
||||
match ty {
|
||||
Type::Union(union) => {
|
||||
self.elements.extend(&union.elements(self.db));
|
||||
}
|
||||
Type::Never => {}
|
||||
_ => {
|
||||
self.elements.insert(ty);
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn build(self) -> Type<'db> {
|
||||
match self.elements.len() {
|
||||
0 => Type::Never,
|
||||
1 => self.elements[0],
|
||||
_ => Type::Union(UnionType::new(self.db, self.elements)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct IntersectionBuilder<'db> {
|
||||
// Really this builds a union-of-intersections, because we always keep our set-theoretic types
|
||||
// in disjunctive normal form (DNF), a union of intersections. In the simplest case there's
|
||||
// just a single intersection in this vector, and we are building a single intersection type,
|
||||
// but if a union is added to the intersection, we'll distribute ourselves over that union and
|
||||
// create a union of intersections.
|
||||
intersections: Vec<InnerIntersectionBuilder<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> IntersectionBuilder<'db> {
|
||||
#[allow(dead_code)]
|
||||
fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
intersections: vec![InnerIntersectionBuilder::new()],
|
||||
}
|
||||
}
|
||||
|
||||
fn empty(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
intersections: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn add_positive(mut self, ty: Type<'db>) -> Self {
|
||||
if let Type::Union(union) = ty {
|
||||
// Distribute ourself over this union: for each union element, clone ourself and
|
||||
// intersect with that union element, then create a new union-of-intersections with all
|
||||
// of those sub-intersections in it. E.g. if `self` is a simple intersection `T1 & T2`
|
||||
// and we add `T3 | T4` to the intersection, we don't get `T1 & T2 & (T3 | T4)` (that's
|
||||
// not in DNF), we distribute the union and get `(T1 & T3) | (T2 & T3) | (T1 & T4) |
|
||||
// (T2 & T4)`. If `self` is already a union-of-intersections `(T1 & T2) | (T3 & T4)`
|
||||
// and we add `T5 | T6` to it, that flattens all the way out to `(T1 & T2 & T5) | (T1 &
|
||||
// T2 & T6) | (T3 & T4 & T5) ...` -- you get the idea.
|
||||
union
|
||||
.elements(self.db)
|
||||
.iter()
|
||||
.map(|elem| self.clone().add_positive(*elem))
|
||||
.fold(IntersectionBuilder::empty(self.db), |mut builder, sub| {
|
||||
builder.intersections.extend(sub.intersections);
|
||||
builder
|
||||
})
|
||||
} else {
|
||||
// If we are already a union-of-intersections, distribute the new intersected element
|
||||
// across all of those intersections.
|
||||
for inner in &mut self.intersections {
|
||||
inner.add_positive(self.db, ty);
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn add_negative(mut self, ty: Type<'db>) -> Self {
|
||||
// See comments above in `add_positive`; this is just the negated version.
|
||||
if let Type::Union(union) = ty {
|
||||
union
|
||||
.elements(self.db)
|
||||
.iter()
|
||||
.map(|elem| self.clone().add_negative(*elem))
|
||||
.fold(IntersectionBuilder::empty(self.db), |mut builder, sub| {
|
||||
builder.intersections.extend(sub.intersections);
|
||||
builder
|
||||
})
|
||||
} else {
|
||||
for inner in &mut self.intersections {
|
||||
inner.add_negative(self.db, ty);
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn build(mut self) -> Type<'db> {
|
||||
// Avoid allocating the UnionBuilder unnecessarily if we have just one intersection:
|
||||
if self.intersections.len() == 1 {
|
||||
self.intersections.pop().unwrap().build(self.db)
|
||||
} else {
|
||||
let mut builder = UnionBuilder::new(self.db);
|
||||
for inner in self.intersections {
|
||||
builder = builder.add(inner.build(self.db));
|
||||
}
|
||||
builder.build()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
#[derive(Debug, Clone, Default)]
|
||||
struct InnerIntersectionBuilder<'db> {
|
||||
positive: FxOrderSet<Type<'db>>,
|
||||
negative: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> InnerIntersectionBuilder<'db> {
|
||||
fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Adds a positive type to this intersection.
|
||||
fn add_positive(&mut self, db: &'db dyn Db, ty: Type<'db>) {
|
||||
match ty {
|
||||
Type::Intersection(inter) => {
|
||||
let pos = inter.positive(db);
|
||||
let neg = inter.negative(db);
|
||||
self.positive.extend(pos.difference(&self.negative));
|
||||
self.negative.extend(neg.difference(&self.positive));
|
||||
self.positive.retain(|elem| !neg.contains(elem));
|
||||
self.negative.retain(|elem| !pos.contains(elem));
|
||||
}
|
||||
_ => {
|
||||
if !self.negative.remove(&ty) {
|
||||
self.positive.insert(ty);
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a negative type to this intersection.
|
||||
fn add_negative(&mut self, db: &'db dyn Db, ty: Type<'db>) {
|
||||
// TODO Any/Unknown actually should not self-cancel
|
||||
match ty {
|
||||
Type::Intersection(intersection) => {
|
||||
let pos = intersection.negative(db);
|
||||
let neg = intersection.positive(db);
|
||||
self.positive.extend(pos.difference(&self.negative));
|
||||
self.negative.extend(neg.difference(&self.positive));
|
||||
self.positive.retain(|elem| !neg.contains(elem));
|
||||
self.negative.retain(|elem| !pos.contains(elem));
|
||||
}
|
||||
Type::Never => {}
|
||||
_ => {
|
||||
if !self.positive.remove(&ty) {
|
||||
self.negative.insert(ty);
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn simplify(&mut self) {
|
||||
// TODO this should be generalized based on subtyping, for now we just handle a few cases
|
||||
|
||||
// Never is a subtype of all types
|
||||
if self.positive.contains(&Type::Never) {
|
||||
self.positive.clear();
|
||||
self.negative.clear();
|
||||
self.positive.insert(Type::Never);
|
||||
}
|
||||
}
|
||||
|
||||
fn build(mut self, db: &'db dyn Db) -> Type<'db> {
|
||||
self.simplify();
|
||||
match (self.positive.len(), self.negative.len()) {
|
||||
(0, 0) => Type::Never,
|
||||
(1, 0) => self.positive[0],
|
||||
_ => {
|
||||
self.positive.shrink_to_fit();
|
||||
self.negative.shrink_to_fit();
|
||||
Type::Intersection(IntersectionType::new(db, self.positive, self.negative))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{IntersectionBuilder, IntersectionType, Type, UnionBuilder, UnionType};
|
||||
use crate::db::tests::TestDb;
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
TestDb::new()
|
||||
}
|
||||
|
||||
impl<'db> UnionType<'db> {
|
||||
fn elements_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.elements(db).into_iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let Type::Union(union) = UnionBuilder::new(&db).add(t0).add(t1).build() else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_single() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ty = UnionBuilder::new(&db).add(t0).build();
|
||||
|
||||
assert_eq!(ty, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_empty() {
|
||||
let db = setup_db();
|
||||
let ty = UnionBuilder::new(&db).build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_never() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ty = UnionBuilder::new(&db).add(t0).add(Type::Never).build();
|
||||
|
||||
assert_eq!(ty, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_flatten() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let u1 = UnionBuilder::new(&db).add(t0).add(t1).build();
|
||||
let Type::Union(union) = UnionBuilder::new(&db).add(u1).add(t2).build() else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t1, t2]);
|
||||
}
|
||||
|
||||
impl<'db> IntersectionType<'db> {
|
||||
fn pos_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.positive(db).into_iter().collect()
|
||||
}
|
||||
|
||||
fn neg_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.negative(db).into_iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ta = Type::Any;
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[ta]);
|
||||
assert_eq!(inter.neg_vec(&db), &[t0]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_flatten_positive() {
|
||||
let db = setup_db();
|
||||
let ta = Type::Any;
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let i0 = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_positive(i0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[t2, ta]);
|
||||
assert_eq!(inter.neg_vec(&db), &[t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_flatten_negative() {
|
||||
let db = setup_db();
|
||||
let ta = Type::Any;
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let i0 = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_negative(i0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[t2, t1]);
|
||||
assert_eq!(inter.neg_vec(&db), &[ta]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersection_distributes_over_union() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let ta = Type::Any;
|
||||
let u0 = UnionBuilder::new(&db).add(t0).add(t1).build();
|
||||
|
||||
let Type::Union(union) = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_positive(u0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
let [Type::Intersection(i0), Type::Intersection(i1)] = union.elements_vec(&db)[..] else {
|
||||
panic!("expected a union of two intersections");
|
||||
};
|
||||
assert_eq!(i0.pos_vec(&db), &[ta, t0]);
|
||||
assert_eq!(i1.pos_vec(&db), &[ta, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_self_negation() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_negative(Type::None)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_never() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_negative(Type::Never)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_positive_never() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_positive(Type::Never)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
}
|
||||
@@ -40,7 +40,7 @@ use crate::semantic_index::symbol::{FileScopeId, NodeWithScopeKind, NodeWithScop
|
||||
use crate::semantic_index::SemanticIndex;
|
||||
use crate::types::{
|
||||
builtins_symbol_ty_by_name, definitions_ty, global_symbol_ty_by_name, ClassType, FunctionType,
|
||||
Name, Type, UnionTypeBuilder,
|
||||
Name, Type, UnionBuilder,
|
||||
};
|
||||
use crate::Db;
|
||||
|
||||
@@ -61,7 +61,7 @@ pub(crate) fn infer_scope_types<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Ty
|
||||
TypeInferenceBuilder::new(db, InferenceRegion::Scope(scope), index).finish()
|
||||
}
|
||||
|
||||
/// Cycle recovery for [`infer_definition_types`]: for now, just [`Type::Unknown`]
|
||||
/// Cycle recovery for [`infer_definition_types()`]: for now, just [`Type::Unknown`]
|
||||
/// TODO fixpoint iteration
|
||||
fn infer_definition_types_cycle_recovery<'db>(
|
||||
_db: &'db dyn Db,
|
||||
@@ -260,6 +260,18 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
NodeWithScopeKind::FunctionTypeParameters(function) => {
|
||||
self.infer_function_type_params(function.node());
|
||||
}
|
||||
NodeWithScopeKind::ListComprehension(comprehension) => {
|
||||
self.infer_list_comprehension_expression_scope(comprehension.node());
|
||||
}
|
||||
NodeWithScopeKind::SetComprehension(comprehension) => {
|
||||
self.infer_set_comprehension_expression_scope(comprehension.node());
|
||||
}
|
||||
NodeWithScopeKind::DictComprehension(comprehension) => {
|
||||
self.infer_dict_comprehension_expression_scope(comprehension.node());
|
||||
}
|
||||
NodeWithScopeKind::GeneratorExpression(generator) => {
|
||||
self.infer_generator_expression_scope(generator.node());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -288,6 +300,13 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
DefinitionKind::NamedExpression(named_expression) => {
|
||||
self.infer_named_expression_definition(named_expression.node(), definition);
|
||||
}
|
||||
DefinitionKind::Comprehension(comprehension) => {
|
||||
self.infer_comprehension_definition(
|
||||
comprehension.node(),
|
||||
comprehension.is_first(),
|
||||
definition,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -923,7 +942,6 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
ty
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_self)]
|
||||
fn infer_number_literal_expression(&mut self, literal: &ast::ExprNumberLiteral) -> Type<'db> {
|
||||
let ast::ExprNumberLiteral { range: _, value } = literal;
|
||||
|
||||
@@ -1054,18 +1072,24 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
builtins_symbol_ty_by_name(self.db, "dict").instance()
|
||||
}
|
||||
|
||||
/// Infer the type of the `iter` expression of the first comprehension.
|
||||
fn infer_first_comprehension_iter(&mut self, comprehensions: &[ast::Comprehension]) {
|
||||
let mut generators_iter = comprehensions.iter();
|
||||
let Some(first_generator) = generators_iter.next() else {
|
||||
unreachable!("Comprehension must contain at least one generator");
|
||||
};
|
||||
self.infer_expression(&first_generator.iter);
|
||||
}
|
||||
|
||||
fn infer_generator_expression(&mut self, generator: &ast::ExprGenerator) -> Type<'db> {
|
||||
let ast::ExprGenerator {
|
||||
range: _,
|
||||
elt,
|
||||
elt: _,
|
||||
generators,
|
||||
parenthesized: _,
|
||||
} = generator;
|
||||
|
||||
self.infer_expression(elt);
|
||||
for generator in generators {
|
||||
self.infer_comprehension(generator);
|
||||
}
|
||||
self.infer_first_comprehension_iter(generators);
|
||||
|
||||
// TODO generator type
|
||||
Type::Unknown
|
||||
@@ -1074,20 +1098,71 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
fn infer_list_comprehension_expression(&mut self, listcomp: &ast::ExprListComp) -> Type<'db> {
|
||||
let ast::ExprListComp {
|
||||
range: _,
|
||||
elt,
|
||||
elt: _,
|
||||
generators,
|
||||
} = listcomp;
|
||||
|
||||
self.infer_expression(elt);
|
||||
for generator in generators {
|
||||
self.infer_comprehension(generator);
|
||||
}
|
||||
self.infer_first_comprehension_iter(generators);
|
||||
|
||||
// TODO list type
|
||||
Type::Unknown
|
||||
}
|
||||
|
||||
fn infer_dict_comprehension_expression(&mut self, dictcomp: &ast::ExprDictComp) -> Type<'db> {
|
||||
let ast::ExprDictComp {
|
||||
range: _,
|
||||
key: _,
|
||||
value: _,
|
||||
generators,
|
||||
} = dictcomp;
|
||||
|
||||
self.infer_first_comprehension_iter(generators);
|
||||
|
||||
// TODO dict type
|
||||
Type::Unknown
|
||||
}
|
||||
|
||||
fn infer_set_comprehension_expression(&mut self, setcomp: &ast::ExprSetComp) -> Type<'db> {
|
||||
let ast::ExprSetComp {
|
||||
range: _,
|
||||
elt: _,
|
||||
generators,
|
||||
} = setcomp;
|
||||
|
||||
self.infer_first_comprehension_iter(generators);
|
||||
|
||||
// TODO set type
|
||||
Type::Unknown
|
||||
}
|
||||
|
||||
fn infer_generator_expression_scope(&mut self, generator: &ast::ExprGenerator) {
|
||||
let ast::ExprGenerator {
|
||||
range: _,
|
||||
elt,
|
||||
generators,
|
||||
parenthesized: _,
|
||||
} = generator;
|
||||
|
||||
self.infer_expression(elt);
|
||||
for comprehension in generators {
|
||||
self.infer_comprehension(comprehension);
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_list_comprehension_expression_scope(&mut self, listcomp: &ast::ExprListComp) {
|
||||
let ast::ExprListComp {
|
||||
range: _,
|
||||
elt,
|
||||
generators,
|
||||
} = listcomp;
|
||||
|
||||
self.infer_expression(elt);
|
||||
for comprehension in generators {
|
||||
self.infer_comprehension(comprehension);
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_dict_comprehension_expression_scope(&mut self, dictcomp: &ast::ExprDictComp) {
|
||||
let ast::ExprDictComp {
|
||||
range: _,
|
||||
key,
|
||||
@@ -1097,46 +1172,51 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
self.infer_expression(key);
|
||||
self.infer_expression(value);
|
||||
for generator in generators {
|
||||
self.infer_comprehension(generator);
|
||||
for comprehension in generators {
|
||||
self.infer_comprehension(comprehension);
|
||||
}
|
||||
|
||||
// TODO dict type
|
||||
Type::Unknown
|
||||
}
|
||||
|
||||
fn infer_set_comprehension_expression(&mut self, setcomp: &ast::ExprSetComp) -> Type<'db> {
|
||||
fn infer_set_comprehension_expression_scope(&mut self, setcomp: &ast::ExprSetComp) {
|
||||
let ast::ExprSetComp {
|
||||
range: _,
|
||||
elt,
|
||||
generators,
|
||||
} = setcomp;
|
||||
self.infer_expression(elt);
|
||||
for generator in generators {
|
||||
self.infer_comprehension(generator);
|
||||
}
|
||||
|
||||
// TODO set type
|
||||
Type::Unknown
|
||||
self.infer_expression(elt);
|
||||
for comprehension in generators {
|
||||
self.infer_comprehension(comprehension);
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_comprehension(&mut self, comprehension: &ast::Comprehension) -> Type<'db> {
|
||||
fn infer_comprehension(&mut self, comprehension: &ast::Comprehension) {
|
||||
self.infer_definition(comprehension);
|
||||
for expr in &comprehension.ifs {
|
||||
self.infer_expression(expr);
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_comprehension_definition(
|
||||
&mut self,
|
||||
comprehension: &ast::Comprehension,
|
||||
is_first: bool,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
let ast::Comprehension {
|
||||
range: _,
|
||||
target,
|
||||
iter,
|
||||
ifs,
|
||||
ifs: _,
|
||||
is_async: _,
|
||||
} = comprehension;
|
||||
|
||||
self.infer_expression(target);
|
||||
self.infer_expression(iter);
|
||||
for if_clause in ifs {
|
||||
self.infer_expression(if_clause);
|
||||
if !is_first {
|
||||
self.infer_expression(iter);
|
||||
}
|
||||
|
||||
// TODO comprehension type
|
||||
Type::Unknown
|
||||
// TODO(dhruvmanila): The target type should be inferred based on the iter type instead.
|
||||
let target_ty = self.infer_expression(target);
|
||||
self.types.definitions.insert(definition, target_ty);
|
||||
}
|
||||
|
||||
fn infer_named_expression(&mut self, named: &ast::ExprNamed) -> Type<'db> {
|
||||
@@ -1179,12 +1259,10 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
let body_ty = self.infer_expression(body);
|
||||
let orelse_ty = self.infer_expression(orelse);
|
||||
|
||||
let union = UnionTypeBuilder::new(self.db)
|
||||
UnionBuilder::new(self.db)
|
||||
.add(body_ty)
|
||||
.add(orelse_ty)
|
||||
.build();
|
||||
|
||||
Type::Union(union)
|
||||
.build()
|
||||
}
|
||||
|
||||
fn infer_lambda_body(&mut self, lambda_expression: &ast::ExprLambda) {
|
||||
@@ -1494,6 +1572,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Context;
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
@@ -1508,40 +1587,58 @@ mod tests {
|
||||
use crate::semantic_index::symbol::FileScopeId;
|
||||
use crate::semantic_index::{global_scope, semantic_index, symbol_table, use_def_map};
|
||||
use crate::types::{global_symbol_ty_by_name, infer_definition_types, symbol_ty_by_name, Type};
|
||||
use crate::{HasTy, SemanticModel};
|
||||
use crate::{HasTy, ProgramSettings, SemanticModel};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
Program::new(
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&src_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
PythonVersion::default(),
|
||||
SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
src_root,
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
fn setup_db_with_custom_typeshed(typeshed: &str) -> TestDb {
|
||||
let db = TestDb::new();
|
||||
fn setup_db_with_custom_typeshed<'a>(
|
||||
typeshed: &str,
|
||||
files: impl IntoIterator<Item = (&'a str, &'a str)>,
|
||||
) -> anyhow::Result<TestDb> {
|
||||
let mut db = TestDb::new();
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
|
||||
Program::new(
|
||||
db.write_files(files)
|
||||
.context("Failed to write test files")?;
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
PythonVersion::default(),
|
||||
SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: Some(SystemPathBuf::from(typeshed)),
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
src_root,
|
||||
site_packages: vec![],
|
||||
custom_typeshed: Some(SystemPathBuf::from(typeshed)),
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.context("Failed to create Program")?;
|
||||
|
||||
db
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
fn assert_public_ty(db: &TestDb, file_name: &str, symbol_name: &str, expected: &str) {
|
||||
@@ -2131,16 +2228,17 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn builtin_symbol_custom_stdlib() -> anyhow::Result<()> {
|
||||
let mut db = setup_db_with_custom_typeshed("/typeshed");
|
||||
|
||||
db.write_files([
|
||||
("/src/a.py", "c = copyright"),
|
||||
(
|
||||
"/typeshed/stdlib/builtins.pyi",
|
||||
"def copyright() -> None: ...",
|
||||
),
|
||||
("/typeshed/stdlib/VERSIONS", "builtins: 3.8-"),
|
||||
])?;
|
||||
let db = setup_db_with_custom_typeshed(
|
||||
"/typeshed",
|
||||
[
|
||||
("/src/a.py", "c = copyright"),
|
||||
(
|
||||
"/typeshed/stdlib/builtins.pyi",
|
||||
"def copyright() -> None: ...",
|
||||
),
|
||||
("/typeshed/stdlib/VERSIONS", "builtins: 3.8-"),
|
||||
],
|
||||
)?;
|
||||
|
||||
assert_public_ty(&db, "/src/a.py", "c", "Literal[copyright]");
|
||||
|
||||
@@ -2160,13 +2258,14 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn unknown_builtin_later_defined() -> anyhow::Result<()> {
|
||||
let mut db = setup_db_with_custom_typeshed("/typeshed");
|
||||
|
||||
db.write_files([
|
||||
("/src/a.py", "x = foo"),
|
||||
("/typeshed/stdlib/builtins.pyi", "foo = bar; bar = 1"),
|
||||
("/typeshed/stdlib/VERSIONS", "builtins: 3.8-"),
|
||||
])?;
|
||||
let db = setup_db_with_custom_typeshed(
|
||||
"/typeshed",
|
||||
[
|
||||
("/src/a.py", "x = foo"),
|
||||
("/typeshed/stdlib/builtins.pyi", "foo = bar; bar = 1"),
|
||||
("/typeshed/stdlib/VERSIONS", "builtins: 3.8-"),
|
||||
],
|
||||
)?;
|
||||
|
||||
assert_public_ty(&db, "/src/a.py", "x", "Unbound");
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
4ef2d66663fc080fefa379e6ae5fc45d4f8b54eb
|
||||
1ace5718deaf3041f8e3d1dc9c9e8a8e830e517f
|
||||
|
||||
@@ -753,9 +753,11 @@ class Constant(expr):
|
||||
__match_args__ = ("value", "kind")
|
||||
value: Any # None, str, bytes, bool, int, float, complex, Ellipsis
|
||||
kind: str | None
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: Any
|
||||
n: int | float | complex
|
||||
if sys.version_info < (3, 14):
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: Any
|
||||
n: int | float | complex
|
||||
|
||||
def __init__(self, value: Any, kind: str | None = None, **kwargs: Unpack[_Attributes]) -> None: ...
|
||||
|
||||
class NamedExpr(expr):
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
import sys
|
||||
from abc import abstractmethod
|
||||
from types import MappingProxyType
|
||||
from typing import ( # noqa: Y022,Y038,Y057
|
||||
from typing import ( # noqa: Y022,Y038
|
||||
AbstractSet as Set,
|
||||
AsyncGenerator as AsyncGenerator,
|
||||
AsyncIterable as AsyncIterable,
|
||||
AsyncIterator as AsyncIterator,
|
||||
Awaitable as Awaitable,
|
||||
ByteString as ByteString,
|
||||
Callable as Callable,
|
||||
Collection as Collection,
|
||||
Container as Container,
|
||||
@@ -59,8 +58,12 @@ __all__ = [
|
||||
"ValuesView",
|
||||
"Sequence",
|
||||
"MutableSequence",
|
||||
"ByteString",
|
||||
]
|
||||
if sys.version_info < (3, 14):
|
||||
from typing import ByteString as ByteString # noqa: Y057
|
||||
|
||||
__all__ += ["ByteString"]
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
__all__ += ["Buffer"]
|
||||
|
||||
|
||||
@@ -51,8 +51,8 @@ class _CDataMeta(type):
|
||||
# By default mypy complains about the following two methods, because strictly speaking cls
|
||||
# might not be a Type[_CT]. However this can never actually happen, because the only class that
|
||||
# uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here.
|
||||
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc]
|
||||
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc]
|
||||
def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
|
||||
|
||||
class _CData(metaclass=_CDataMeta):
|
||||
_b_base_: int
|
||||
|
||||
@@ -357,7 +357,17 @@ class Action(_AttributeHolder):
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
class BooleanOptionalAction(Action):
|
||||
if sys.version_info >= (3, 13):
|
||||
if sys.version_info >= (3, 14):
|
||||
def __init__(
|
||||
self,
|
||||
option_strings: Sequence[str],
|
||||
dest: str,
|
||||
default: bool | None = None,
|
||||
required: bool = False,
|
||||
help: str | None = None,
|
||||
deprecated: bool = False,
|
||||
) -> None: ...
|
||||
elif sys.version_info >= (3, 13):
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
@@ -10,27 +10,28 @@ class _ABC(type):
|
||||
if sys.version_info >= (3, 9):
|
||||
def __init__(cls, *args: Unused) -> None: ...
|
||||
|
||||
@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
|
||||
class Num(Constant, metaclass=_ABC):
|
||||
value: int | float | complex
|
||||
if sys.version_info < (3, 14):
|
||||
@deprecated("Replaced by ast.Constant; removed in Python 3.14")
|
||||
class Num(Constant, metaclass=_ABC):
|
||||
value: int | float | complex
|
||||
|
||||
@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
|
||||
class Str(Constant, metaclass=_ABC):
|
||||
value: str
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: str
|
||||
@deprecated("Replaced by ast.Constant; removed in Python 3.14")
|
||||
class Str(Constant, metaclass=_ABC):
|
||||
value: str
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: str
|
||||
|
||||
@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
|
||||
class Bytes(Constant, metaclass=_ABC):
|
||||
value: bytes
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: bytes
|
||||
@deprecated("Replaced by ast.Constant; removed in Python 3.14")
|
||||
class Bytes(Constant, metaclass=_ABC):
|
||||
value: bytes
|
||||
# Aliases for value, for backwards compatibility
|
||||
s: bytes
|
||||
|
||||
@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
|
||||
class NameConstant(Constant, metaclass=_ABC): ...
|
||||
@deprecated("Replaced by ast.Constant; removed in Python 3.14")
|
||||
class NameConstant(Constant, metaclass=_ABC): ...
|
||||
|
||||
@deprecated("Replaced by ast.Constant; removal scheduled for Python 3.14")
|
||||
class Ellipsis(Constant, metaclass=_ABC): ...
|
||||
@deprecated("Replaced by ast.Constant; removed in Python 3.14")
|
||||
class Ellipsis(Constant, metaclass=_ABC): ...
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
class slice(AST): ...
|
||||
|
||||
@@ -151,13 +151,13 @@ if sys.version_info >= (3, 10):
|
||||
@overload
|
||||
def gather(*coros_or_futures: _FutureLike[_T], return_exceptions: Literal[False] = False) -> Future[list[_T]]: ... # type: ignore[overload-overlap]
|
||||
@overload
|
||||
def gather(coro_or_future1: _FutureLike[_T1], /, *, return_exceptions: bool) -> Future[tuple[_T1 | BaseException]]: ... # type: ignore[overload-overlap]
|
||||
def gather(coro_or_future1: _FutureLike[_T1], /, *, return_exceptions: bool) -> Future[tuple[_T1 | BaseException]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
def gather(
|
||||
coro_or_future1: _FutureLike[_T1], coro_or_future2: _FutureLike[_T2], /, *, return_exceptions: bool
|
||||
) -> Future[tuple[_T1 | BaseException, _T2 | BaseException]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
def gather(
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
@@ -166,7 +166,7 @@ if sys.version_info >= (3, 10):
|
||||
return_exceptions: bool,
|
||||
) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
def gather(
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
@@ -176,7 +176,7 @@ if sys.version_info >= (3, 10):
|
||||
return_exceptions: bool,
|
||||
) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException]]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
def gather(
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
@@ -189,7 +189,7 @@ if sys.version_info >= (3, 10):
|
||||
tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException]
|
||||
]: ...
|
||||
@overload
|
||||
def gather( # type: ignore[overload-overlap]
|
||||
def gather(
|
||||
coro_or_future1: _FutureLike[_T1],
|
||||
coro_or_future2: _FutureLike[_T2],
|
||||
coro_or_future3: _FutureLike[_T3],
|
||||
|
||||
@@ -159,7 +159,7 @@ if sys.platform != "win32":
|
||||
|
||||
class _UnixSelectorEventLoop(BaseSelectorEventLoop):
|
||||
if sys.version_info >= (3, 13):
|
||||
async def create_unix_server( # type: ignore[override]
|
||||
async def create_unix_server(
|
||||
self,
|
||||
protocol_factory: _ProtocolFactory,
|
||||
path: StrPath | None = None,
|
||||
|
||||
@@ -1744,7 +1744,7 @@ _SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWit
|
||||
# without creating many false-positive errors (see #7578).
|
||||
# Instead, we special-case the most common examples of this: bool and literal integers.
|
||||
@overload
|
||||
def sum(iterable: Iterable[bool | _LiteralInteger], /, start: int = 0) -> int: ... # type: ignore[overload-overlap]
|
||||
def sum(iterable: Iterable[bool | _LiteralInteger], /, start: int = 0) -> int: ...
|
||||
@overload
|
||||
def sum(iterable: Iterable[_SupportsSumNoDefaultT], /) -> _SupportsSumNoDefaultT | Literal[0]: ...
|
||||
@overload
|
||||
@@ -1752,9 +1752,8 @@ def sum(iterable: Iterable[_AddableT1], /, start: _AddableT2) -> _AddableT1 | _A
|
||||
|
||||
# The argument to `vars()` has to have a `__dict__` attribute, so the second overload can't be annotated with `object`
|
||||
# (A "SupportsDunderDict" protocol doesn't work)
|
||||
# Use a type: ignore to make complaints about overlapping overloads go away
|
||||
@overload
|
||||
def vars(object: type, /) -> types.MappingProxyType[str, Any]: ... # type: ignore[overload-overlap]
|
||||
def vars(object: type, /) -> types.MappingProxyType[str, Any]: ...
|
||||
@overload
|
||||
def vars(object: Any = ..., /) -> dict[str, Any]: ...
|
||||
|
||||
|
||||
@@ -55,6 +55,7 @@ class AbstractAsyncContextManager(Protocol[_T_co, _ExitT_co]):
|
||||
) -> _ExitT_co: ...
|
||||
|
||||
class ContextDecorator:
|
||||
def _recreate_cm(self) -> Self: ...
|
||||
def __call__(self, func: _F) -> _F: ...
|
||||
|
||||
class _GeneratorContextManager(AbstractContextManager[_T_co, bool | None], ContextDecorator):
|
||||
@@ -80,6 +81,7 @@ if sys.version_info >= (3, 10):
|
||||
_AF = TypeVar("_AF", bound=Callable[..., Awaitable[Any]])
|
||||
|
||||
class AsyncContextDecorator:
|
||||
def _recreate_cm(self) -> Self: ...
|
||||
def __call__(self, func: _AF) -> _AF: ...
|
||||
|
||||
class _AsyncGeneratorContextManager(AbstractAsyncContextManager[_T_co, bool | None], AsyncContextDecorator):
|
||||
|
||||
@@ -1,12 +1,5 @@
|
||||
import sys
|
||||
from _ctypes import RTLD_GLOBAL as RTLD_GLOBAL, RTLD_LOCAL as RTLD_LOCAL, Structure, Union
|
||||
from ctypes import DEFAULT_MODE as DEFAULT_MODE, cdll as cdll, pydll as pydll, pythonapi as pythonapi
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
from _ctypes import SIZEOF_TIME_T as SIZEOF_TIME_T
|
||||
|
||||
if sys.platform == "win32":
|
||||
from ctypes import oledll as oledll, windll as windll
|
||||
from ctypes import Structure, Union
|
||||
|
||||
# At runtime, the native endianness is an alias for Structure,
|
||||
# while the other is a subclass with a metaclass added in.
|
||||
|
||||
@@ -5,7 +5,7 @@ from _typeshed import DataclassInstance
|
||||
from builtins import type as Type # alias to avoid name clashes with fields named "type"
|
||||
from collections.abc import Callable, Iterable, Mapping
|
||||
from typing import Any, Generic, Literal, Protocol, TypeVar, overload
|
||||
from typing_extensions import TypeAlias, TypeIs
|
||||
from typing_extensions import Never, TypeAlias, TypeIs
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
from types import GenericAlias
|
||||
@@ -213,6 +213,10 @@ else:
|
||||
) -> Any: ...
|
||||
|
||||
def fields(class_or_instance: DataclassInstance | type[DataclassInstance]) -> tuple[Field[Any], ...]: ...
|
||||
|
||||
# HACK: `obj: Never` typing matches if object argument is using `Any` type.
|
||||
@overload
|
||||
def is_dataclass(obj: Never) -> TypeIs[DataclassInstance | type[DataclassInstance]]: ... # type: ignore[narrowed-type-not-subtype] # pyright: ignore[reportGeneralTypeIssues]
|
||||
@overload
|
||||
def is_dataclass(obj: type) -> TypeIs[type[DataclassInstance]]: ...
|
||||
@overload
|
||||
|
||||
@@ -1,6 +1,26 @@
|
||||
from _typeshed import BytesPath, Incomplete, StrOrBytesPath, StrPath, Unused
|
||||
from abc import abstractmethod
|
||||
from collections.abc import Callable, Iterable
|
||||
from distutils.command.bdist import bdist
|
||||
from distutils.command.bdist_dumb import bdist_dumb
|
||||
from distutils.command.bdist_rpm import bdist_rpm
|
||||
from distutils.command.build import build
|
||||
from distutils.command.build_clib import build_clib
|
||||
from distutils.command.build_ext import build_ext
|
||||
from distutils.command.build_py import build_py
|
||||
from distutils.command.build_scripts import build_scripts
|
||||
from distutils.command.check import check
|
||||
from distutils.command.clean import clean
|
||||
from distutils.command.config import config
|
||||
from distutils.command.install import install
|
||||
from distutils.command.install_data import install_data
|
||||
from distutils.command.install_egg_info import install_egg_info
|
||||
from distutils.command.install_headers import install_headers
|
||||
from distutils.command.install_lib import install_lib
|
||||
from distutils.command.install_scripts import install_scripts
|
||||
from distutils.command.register import register
|
||||
from distutils.command.sdist import sdist
|
||||
from distutils.command.upload import upload
|
||||
from distutils.dist import Distribution
|
||||
from distutils.file_util import _BytesPathT, _StrPathT
|
||||
from typing import Any, ClassVar, Literal, TypeVar, overload
|
||||
@@ -28,8 +48,108 @@ class Command:
|
||||
def ensure_dirname(self, option: str) -> None: ...
|
||||
def get_command_name(self) -> str: ...
|
||||
def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: ...
|
||||
# NOTE: This list comes directly from the distutils/command folder. Minus bdist_msi and bdist_wininst.
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["bdist"], create: bool | Literal[0, 1] = 1) -> bdist: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["bdist_dumb"], create: bool | Literal[0, 1] = 1) -> bdist_dumb: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["bdist_rpm"], create: bool | Literal[0, 1] = 1) -> bdist_rpm: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["build"], create: bool | Literal[0, 1] = 1) -> build: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["build_clib"], create: bool | Literal[0, 1] = 1) -> build_clib: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["build_ext"], create: bool | Literal[0, 1] = 1) -> build_ext: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["build_py"], create: bool | Literal[0, 1] = 1) -> build_py: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["build_scripts"], create: bool | Literal[0, 1] = 1) -> build_scripts: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["check"], create: bool | Literal[0, 1] = 1) -> check: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["clean"], create: bool | Literal[0, 1] = 1) -> clean: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["config"], create: bool | Literal[0, 1] = 1) -> config: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["install"], create: bool | Literal[0, 1] = 1) -> install: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["install_data"], create: bool | Literal[0, 1] = 1) -> install_data: ...
|
||||
@overload
|
||||
def get_finalized_command(
|
||||
self, command: Literal["install_egg_info"], create: bool | Literal[0, 1] = 1
|
||||
) -> install_egg_info: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["install_headers"], create: bool | Literal[0, 1] = 1) -> install_headers: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["install_lib"], create: bool | Literal[0, 1] = 1) -> install_lib: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["install_scripts"], create: bool | Literal[0, 1] = 1) -> install_scripts: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["register"], create: bool | Literal[0, 1] = 1) -> register: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["sdist"], create: bool | Literal[0, 1] = 1) -> sdist: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: Literal["upload"], create: bool | Literal[0, 1] = 1) -> upload: ...
|
||||
@overload
|
||||
def get_finalized_command(self, command: str, create: bool | Literal[0, 1] = 1) -> Command: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["bdist"], reinit_subcommands: bool | Literal[0, 1] = 0) -> bdist: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: Literal["bdist_dumb"], reinit_subcommands: bool | Literal[0, 1] = 0
|
||||
) -> bdist_dumb: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["bdist_rpm"], reinit_subcommands: bool | Literal[0, 1] = 0) -> bdist_rpm: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["build"], reinit_subcommands: bool | Literal[0, 1] = 0) -> build: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: Literal["build_clib"], reinit_subcommands: bool | Literal[0, 1] = 0
|
||||
) -> build_clib: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["build_ext"], reinit_subcommands: bool | Literal[0, 1] = 0) -> build_ext: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["build_py"], reinit_subcommands: bool | Literal[0, 1] = 0) -> build_py: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: Literal["build_scripts"], reinit_subcommands: bool | Literal[0, 1] = 0
|
||||
) -> build_scripts: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["check"], reinit_subcommands: bool | Literal[0, 1] = 0) -> check: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["clean"], reinit_subcommands: bool | Literal[0, 1] = 0) -> clean: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["config"], reinit_subcommands: bool | Literal[0, 1] = 0) -> config: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["install"], reinit_subcommands: bool | Literal[0, 1] = 0) -> install: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: Literal["install_data"], reinit_subcommands: bool | Literal[0, 1] = 0
|
||||
) -> install_data: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: Literal["install_egg_info"], reinit_subcommands: bool | Literal[0, 1] = 0
|
||||
) -> install_egg_info: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: Literal["install_headers"], reinit_subcommands: bool | Literal[0, 1] = 0
|
||||
) -> install_headers: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: Literal["install_lib"], reinit_subcommands: bool | Literal[0, 1] = 0
|
||||
) -> install_lib: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: Literal["install_scripts"], reinit_subcommands: bool | Literal[0, 1] = 0
|
||||
) -> install_scripts: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["register"], reinit_subcommands: bool | Literal[0, 1] = 0) -> register: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["sdist"], reinit_subcommands: bool | Literal[0, 1] = 0) -> sdist: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["upload"], reinit_subcommands: bool | Literal[0, 1] = 0) -> upload: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: str, reinit_subcommands: bool | Literal[0, 1] = 0) -> Command: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool | Literal[0, 1] = 0) -> _CommandT: ...
|
||||
|
||||
@@ -0,0 +1,48 @@
|
||||
import sys
|
||||
|
||||
from . import (
|
||||
bdist,
|
||||
bdist_dumb,
|
||||
bdist_rpm,
|
||||
build,
|
||||
build_clib,
|
||||
build_ext,
|
||||
build_py,
|
||||
build_scripts,
|
||||
check,
|
||||
clean,
|
||||
install,
|
||||
install_data,
|
||||
install_headers,
|
||||
install_lib,
|
||||
install_scripts,
|
||||
register,
|
||||
sdist,
|
||||
upload,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"build",
|
||||
"build_py",
|
||||
"build_ext",
|
||||
"build_clib",
|
||||
"build_scripts",
|
||||
"clean",
|
||||
"install",
|
||||
"install_lib",
|
||||
"install_headers",
|
||||
"install_scripts",
|
||||
"install_data",
|
||||
"sdist",
|
||||
"register",
|
||||
"bdist",
|
||||
"bdist_dumb",
|
||||
"bdist_rpm",
|
||||
"check",
|
||||
"upload",
|
||||
]
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
from . import bdist_wininst
|
||||
|
||||
__all__ += ["bdist_wininst"]
|
||||
|
||||
@@ -1,6 +1,26 @@
|
||||
from _typeshed import Incomplete, StrOrBytesPath, StrPath, SupportsWrite
|
||||
from collections.abc import Iterable, MutableMapping
|
||||
from distutils.cmd import Command
|
||||
from distutils.command.bdist import bdist
|
||||
from distutils.command.bdist_dumb import bdist_dumb
|
||||
from distutils.command.bdist_rpm import bdist_rpm
|
||||
from distutils.command.build import build
|
||||
from distutils.command.build_clib import build_clib
|
||||
from distutils.command.build_ext import build_ext
|
||||
from distutils.command.build_py import build_py
|
||||
from distutils.command.build_scripts import build_scripts
|
||||
from distutils.command.check import check
|
||||
from distutils.command.clean import clean
|
||||
from distutils.command.config import config
|
||||
from distutils.command.install import install
|
||||
from distutils.command.install_data import install_data
|
||||
from distutils.command.install_egg_info import install_egg_info
|
||||
from distutils.command.install_headers import install_headers
|
||||
from distutils.command.install_lib import install_lib
|
||||
from distutils.command.install_scripts import install_scripts
|
||||
from distutils.command.register import register
|
||||
from distutils.command.sdist import sdist
|
||||
from distutils.command.upload import upload
|
||||
from re import Pattern
|
||||
from typing import IO, ClassVar, Literal, TypeVar, overload
|
||||
from typing_extensions import TypeAlias
|
||||
@@ -63,10 +83,6 @@ class Distribution:
|
||||
def __init__(self, attrs: MutableMapping[str, Incomplete] | None = None) -> None: ...
|
||||
def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: ...
|
||||
def parse_config_files(self, filenames: Iterable[str] | None = None) -> None: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: str, create: Literal[1, True] = 1) -> Command: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: str, create: Literal[0, False]) -> Command | None: ...
|
||||
global_options: ClassVar[_OptionsList]
|
||||
common_usage: ClassVar[str]
|
||||
display_options: ClassVar[_OptionsList]
|
||||
@@ -108,8 +124,137 @@ class Distribution:
|
||||
def print_commands(self) -> None: ...
|
||||
def get_command_list(self): ...
|
||||
def get_command_packages(self): ...
|
||||
# NOTE: This list comes directly from the distutils/command folder. Minus bdist_msi and bdist_wininst.
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["bdist"], create: Literal[1, True] = 1) -> bdist: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["bdist_dumb"], create: Literal[1, True] = 1) -> bdist_dumb: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["bdist_rpm"], create: Literal[1, True] = 1) -> bdist_rpm: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["build"], create: Literal[1, True] = 1) -> build: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["build_clib"], create: Literal[1, True] = 1) -> build_clib: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["build_ext"], create: Literal[1, True] = 1) -> build_ext: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["build_py"], create: Literal[1, True] = 1) -> build_py: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["build_scripts"], create: Literal[1, True] = 1) -> build_scripts: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["check"], create: Literal[1, True] = 1) -> check: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["clean"], create: Literal[1, True] = 1) -> clean: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["config"], create: Literal[1, True] = 1) -> config: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["install"], create: Literal[1, True] = 1) -> install: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["install_data"], create: Literal[1, True] = 1) -> install_data: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["install_egg_info"], create: Literal[1, True] = 1) -> install_egg_info: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["install_headers"], create: Literal[1, True] = 1) -> install_headers: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["install_lib"], create: Literal[1, True] = 1) -> install_lib: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["install_scripts"], create: Literal[1, True] = 1) -> install_scripts: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["register"], create: Literal[1, True] = 1) -> register: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["sdist"], create: Literal[1, True] = 1) -> sdist: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: Literal["upload"], create: Literal[1, True] = 1) -> upload: ...
|
||||
@overload
|
||||
def get_command_obj(self, command: str, create: Literal[1, True] = 1) -> Command: ...
|
||||
# Not replicating the overloads for "Command | None", user may use "isinstance"
|
||||
@overload
|
||||
def get_command_obj(self, command: str, create: Literal[0, False]) -> Command | None: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["bdist"]) -> type[bdist]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["bdist_dumb"]) -> type[bdist_dumb]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["bdist_rpm"]) -> type[bdist_rpm]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["build"]) -> type[build]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["build_clib"]) -> type[build_clib]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["build_ext"]) -> type[build_ext]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["build_py"]) -> type[build_py]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["build_scripts"]) -> type[build_scripts]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["check"]) -> type[check]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["clean"]) -> type[clean]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["config"]) -> type[config]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["install"]) -> type[install]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["install_data"]) -> type[install_data]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["install_egg_info"]) -> type[install_egg_info]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["install_headers"]) -> type[install_headers]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["install_lib"]) -> type[install_lib]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["install_scripts"]) -> type[install_scripts]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["register"]) -> type[register]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["sdist"]) -> type[sdist]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: Literal["upload"]) -> type[upload]: ...
|
||||
@overload
|
||||
def get_command_class(self, command: str) -> type[Command]: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["bdist"], reinit_subcommands: bool = False) -> bdist: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["bdist_dumb"], reinit_subcommands: bool = False) -> bdist_dumb: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["bdist_rpm"], reinit_subcommands: bool = False) -> bdist_rpm: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["build"], reinit_subcommands: bool = False) -> build: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["build_clib"], reinit_subcommands: bool = False) -> build_clib: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["build_ext"], reinit_subcommands: bool = False) -> build_ext: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["build_py"], reinit_subcommands: bool = False) -> build_py: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["build_scripts"], reinit_subcommands: bool = False) -> build_scripts: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["check"], reinit_subcommands: bool = False) -> check: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["clean"], reinit_subcommands: bool = False) -> clean: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["config"], reinit_subcommands: bool = False) -> config: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["install"], reinit_subcommands: bool = False) -> install: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["install_data"], reinit_subcommands: bool = False) -> install_data: ...
|
||||
@overload
|
||||
def reinitialize_command(
|
||||
self, command: Literal["install_egg_info"], reinit_subcommands: bool = False
|
||||
) -> install_egg_info: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["install_headers"], reinit_subcommands: bool = False) -> install_headers: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["install_lib"], reinit_subcommands: bool = False) -> install_lib: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["install_scripts"], reinit_subcommands: bool = False) -> install_scripts: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["register"], reinit_subcommands: bool = False) -> register: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["sdist"], reinit_subcommands: bool = False) -> sdist: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: Literal["upload"], reinit_subcommands: bool = False) -> upload: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: str, reinit_subcommands: bool = False) -> Command: ...
|
||||
@overload
|
||||
def reinitialize_command(self, command: _CommandT, reinit_subcommands: bool = False) -> _CommandT: ...
|
||||
|
||||
@@ -66,7 +66,10 @@ def mktime_tz(data: _PDTZ) -> int: ...
|
||||
def formatdate(timeval: float | None = None, localtime: bool = False, usegmt: bool = False) -> str: ...
|
||||
def format_datetime(dt: datetime.datetime, usegmt: bool = False) -> str: ...
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
if sys.version_info >= (3, 14):
|
||||
def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: ...
|
||||
|
||||
elif sys.version_info >= (3, 12):
|
||||
@overload
|
||||
def localtime(dt: datetime.datetime | None = None) -> datetime.datetime: ...
|
||||
@overload
|
||||
|
||||
@@ -17,13 +17,24 @@ def cmpfiles(
|
||||
) -> tuple[list[AnyStr], list[AnyStr], list[AnyStr]]: ...
|
||||
|
||||
class dircmp(Generic[AnyStr]):
|
||||
def __init__(
|
||||
self,
|
||||
a: GenericPath[AnyStr],
|
||||
b: GenericPath[AnyStr],
|
||||
ignore: Sequence[AnyStr] | None = None,
|
||||
hide: Sequence[AnyStr] | None = None,
|
||||
) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def __init__(
|
||||
self,
|
||||
a: GenericPath[AnyStr],
|
||||
b: GenericPath[AnyStr],
|
||||
ignore: Sequence[AnyStr] | None = None,
|
||||
hide: Sequence[AnyStr] | None = None,
|
||||
*,
|
||||
shallow: bool = True,
|
||||
) -> None: ...
|
||||
else:
|
||||
def __init__(
|
||||
self,
|
||||
a: GenericPath[AnyStr],
|
||||
b: GenericPath[AnyStr],
|
||||
ignore: Sequence[AnyStr] | None = None,
|
||||
hide: Sequence[AnyStr] | None = None,
|
||||
) -> None: ...
|
||||
left: AnyStr
|
||||
right: AnyStr
|
||||
hide: Sequence[AnyStr]
|
||||
|
||||
@@ -155,7 +155,7 @@ if sys.version_info >= (3, 10) and sys.version_info < (3, 12):
|
||||
@property
|
||||
def names(self) -> set[str]: ...
|
||||
@overload
|
||||
def select(self) -> Self: ... # type: ignore[misc]
|
||||
def select(self) -> Self: ...
|
||||
@overload
|
||||
def select(
|
||||
self,
|
||||
@@ -277,7 +277,7 @@ if sys.version_info >= (3, 12):
|
||||
|
||||
elif sys.version_info >= (3, 10):
|
||||
@overload
|
||||
def entry_points() -> SelectableGroups: ... # type: ignore[overload-overlap]
|
||||
def entry_points() -> SelectableGroups: ...
|
||||
@overload
|
||||
def entry_points(
|
||||
*, name: str = ..., value: str = ..., group: str = ..., module: str = ..., attr: str = ..., extras: list[str] = ...
|
||||
|
||||
@@ -6,7 +6,7 @@ from ..pytree import Node
|
||||
|
||||
class FixUnicode(fixer_base.BaseFix):
|
||||
BM_compatible: ClassVar[Literal[True]]
|
||||
PATTERN: ClassVar[Literal["STRING | 'unicode' | 'unichr'"]] # type: ignore[name-defined] # Name "STRING" is not defined
|
||||
PATTERN: ClassVar[str]
|
||||
unicode_literals: bool
|
||||
def start_tree(self, tree: Node, filename: StrPath) -> None: ...
|
||||
def transform(self, node, results): ...
|
||||
|
||||
@@ -55,10 +55,9 @@ __all__ = [
|
||||
"setLogRecordFactory",
|
||||
"lastResort",
|
||||
"raiseExceptions",
|
||||
"warn",
|
||||
]
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
__all__ += ["warn"]
|
||||
if sys.version_info >= (3, 11):
|
||||
__all__ += ["getLevelNamesMapping"]
|
||||
if sys.version_info >= (3, 12):
|
||||
@@ -157,17 +156,16 @@ class Logger(Filterer):
|
||||
stacklevel: int = 1,
|
||||
extra: Mapping[str, object] | None = None,
|
||||
) -> None: ...
|
||||
if sys.version_info < (3, 13):
|
||||
def warn(
|
||||
self,
|
||||
msg: object,
|
||||
*args: object,
|
||||
exc_info: _ExcInfoType = None,
|
||||
stack_info: bool = False,
|
||||
stacklevel: int = 1,
|
||||
extra: Mapping[str, object] | None = None,
|
||||
) -> None: ...
|
||||
|
||||
@deprecated("Deprecated; use warning() instead.")
|
||||
def warn(
|
||||
self,
|
||||
msg: object,
|
||||
*args: object,
|
||||
exc_info: _ExcInfoType = None,
|
||||
stack_info: bool = False,
|
||||
stacklevel: int = 1,
|
||||
extra: Mapping[str, object] | None = None,
|
||||
) -> None: ...
|
||||
def error(
|
||||
self,
|
||||
msg: object,
|
||||
@@ -412,18 +410,17 @@ class LoggerAdapter(Generic[_L]):
|
||||
extra: Mapping[str, object] | None = None,
|
||||
**kwargs: object,
|
||||
) -> None: ...
|
||||
if sys.version_info < (3, 13):
|
||||
def warn(
|
||||
self,
|
||||
msg: object,
|
||||
*args: object,
|
||||
exc_info: _ExcInfoType = None,
|
||||
stack_info: bool = False,
|
||||
stacklevel: int = 1,
|
||||
extra: Mapping[str, object] | None = None,
|
||||
**kwargs: object,
|
||||
) -> None: ...
|
||||
|
||||
@deprecated("Deprecated; use warning() instead.")
|
||||
def warn(
|
||||
self,
|
||||
msg: object,
|
||||
*args: object,
|
||||
exc_info: _ExcInfoType = None,
|
||||
stack_info: bool = False,
|
||||
stacklevel: int = 1,
|
||||
extra: Mapping[str, object] | None = None,
|
||||
**kwargs: object,
|
||||
) -> None: ...
|
||||
def error(
|
||||
self,
|
||||
msg: object,
|
||||
@@ -523,17 +520,15 @@ def warning(
|
||||
stacklevel: int = 1,
|
||||
extra: Mapping[str, object] | None = None,
|
||||
) -> None: ...
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
def warn(
|
||||
msg: object,
|
||||
*args: object,
|
||||
exc_info: _ExcInfoType = None,
|
||||
stack_info: bool = False,
|
||||
stacklevel: int = 1,
|
||||
extra: Mapping[str, object] | None = None,
|
||||
) -> None: ...
|
||||
|
||||
@deprecated("Deprecated; use warning() instead.")
|
||||
def warn(
|
||||
msg: object,
|
||||
*args: object,
|
||||
exc_info: _ExcInfoType = None,
|
||||
stack_info: bool = False,
|
||||
stacklevel: int = 1,
|
||||
extra: Mapping[str, object] | None = None,
|
||||
) -> None: ...
|
||||
def error(
|
||||
msg: object,
|
||||
*args: object,
|
||||
|
||||
@@ -73,7 +73,7 @@ def copy(obj: _CT) -> _CT: ...
|
||||
@overload
|
||||
def synchronized(obj: _SimpleCData[_T], lock: _LockLike | None = None, ctx: Any | None = None) -> Synchronized[_T]: ...
|
||||
@overload
|
||||
def synchronized(obj: ctypes.Array[c_char], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedString: ... # type: ignore
|
||||
def synchronized(obj: ctypes.Array[c_char], lock: _LockLike | None = None, ctx: Any | None = None) -> SynchronizedString: ...
|
||||
@overload
|
||||
def synchronized(
|
||||
obj: ctypes.Array[_SimpleCData[_T]], lock: _LockLike | None = None, ctx: Any | None = None
|
||||
@@ -115,12 +115,12 @@ class SynchronizedArray(SynchronizedBase[ctypes.Array[_SimpleCData[_T]]], Generi
|
||||
class SynchronizedString(SynchronizedArray[bytes]):
|
||||
@overload # type: ignore[override]
|
||||
def __getitem__(self, i: slice) -> bytes: ...
|
||||
@overload # type: ignore[override]
|
||||
@overload
|
||||
def __getitem__(self, i: int) -> bytes: ...
|
||||
@overload # type: ignore[override]
|
||||
def __setitem__(self, i: slice, value: bytes) -> None: ...
|
||||
@overload # type: ignore[override]
|
||||
def __setitem__(self, i: int, value: bytes) -> None: ... # type: ignore[override]
|
||||
@overload
|
||||
def __setitem__(self, i: int, value: bytes) -> None: ...
|
||||
def __getslice__(self, start: int, stop: int) -> bytes: ... # type: ignore[override]
|
||||
def __setslice__(self, start: int, stop: int, values: bytes) -> None: ... # type: ignore[override]
|
||||
|
||||
|
||||
@@ -159,6 +159,20 @@ class Path(PurePath):
|
||||
def lchmod(self, mode: int) -> None: ...
|
||||
def lstat(self) -> stat_result: ...
|
||||
def mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def copy(self, target: StrPath, *, follow_symlinks: bool = True, preserve_metadata: bool = False) -> None: ...
|
||||
def copytree(
|
||||
self,
|
||||
target: StrPath,
|
||||
*,
|
||||
follow_symlinks: bool = True,
|
||||
preserve_metadata: bool = False,
|
||||
dirs_exist_ok: bool = False,
|
||||
ignore: Callable[[Self], bool] | None = None,
|
||||
on_error: Callable[[OSError], object] | None = None,
|
||||
) -> None: ...
|
||||
|
||||
# Adapted from builtins.open
|
||||
# Text mode: always returns a TextIOWrapper
|
||||
# The Traversable .open in stdlib/importlib/abc.pyi should be kept in sync with this.
|
||||
@@ -232,10 +246,18 @@ class Path(PurePath):
|
||||
if sys.version_info >= (3, 9):
|
||||
def readlink(self) -> Self: ...
|
||||
|
||||
def rename(self, target: str | PurePath) -> Self: ...
|
||||
def replace(self, target: str | PurePath) -> Self: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
def rename(self, target: StrPath) -> Self: ...
|
||||
def replace(self, target: StrPath) -> Self: ...
|
||||
else:
|
||||
def rename(self, target: str | PurePath) -> Self: ...
|
||||
def replace(self, target: str | PurePath) -> Self: ...
|
||||
|
||||
def resolve(self, strict: bool = False) -> Self: ...
|
||||
def rmdir(self) -> None: ...
|
||||
if sys.version_info >= (3, 14):
|
||||
def delete(self, ignore_errors: bool = False, on_error: Callable[[OSError], object] | None = None) -> None: ...
|
||||
|
||||
def symlink_to(self, target: StrOrBytesPath, target_is_directory: bool = False) -> None: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
def hardlink_to(self, target: StrOrBytesPath) -> None: ...
|
||||
@@ -266,6 +288,9 @@ class Path(PurePath):
|
||||
self, top_down: bool = ..., on_error: Callable[[OSError], object] | None = ..., follow_symlinks: bool = ...
|
||||
) -> Iterator[tuple[Self, list[str], list[str]]]: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
def rmtree(self, ignore_errors: bool = False, on_error: Callable[[OSError], object] | None = None) -> None: ...
|
||||
|
||||
class PosixPath(Path, PurePosixPath): ...
|
||||
class WindowsPath(Path, PureWindowsPath): ...
|
||||
|
||||
|
||||
@@ -84,7 +84,7 @@ class Pdb(Bdb, Cmd):
|
||||
def _runscript(self, filename: str) -> None: ...
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
def completedefault(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ... # type: ignore[override]
|
||||
def completedefault(self, text: str, line: str, begidx: int, endidx: int) -> list[str]: ...
|
||||
|
||||
def do_commands(self, arg: str) -> bool | None: ...
|
||||
def do_break(self, arg: str, temporary: bool = ...) -> bool | None: ...
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import sys
|
||||
from collections.abc import Callable, Iterable
|
||||
from typing import Final
|
||||
from typing_extensions import TypeAlias
|
||||
from typing_extensions import TypeAlias, deprecated
|
||||
|
||||
if sys.platform != "win32":
|
||||
__all__ = ["openpty", "fork", "spawn"]
|
||||
@@ -13,7 +13,12 @@ if sys.platform != "win32":
|
||||
|
||||
CHILD: Final = 0
|
||||
def openpty() -> tuple[int, int]: ...
|
||||
def master_open() -> tuple[int, str]: ... # deprecated, use openpty()
|
||||
def slave_open(tty_name: str) -> int: ... # deprecated, use openpty()
|
||||
|
||||
if sys.version_info < (3, 14):
|
||||
@deprecated("Deprecated in 3.12, to be removed in 3.14; use openpty() instead")
|
||||
def master_open() -> tuple[int, str]: ...
|
||||
@deprecated("Deprecated in 3.12, to be removed in 3.14; use openpty() instead")
|
||||
def slave_open(tty_name: str) -> int: ...
|
||||
|
||||
def fork() -> tuple[int, int]: ...
|
||||
def spawn(argv: str | Iterable[str], master_read: _Reader = ..., stdin_read: _Reader = ...) -> int: ...
|
||||
|
||||
@@ -74,7 +74,7 @@ class Match(Generic[AnyStr]):
|
||||
@overload
|
||||
def expand(self: Match[str], template: str) -> str: ...
|
||||
@overload
|
||||
def expand(self: Match[bytes], template: ReadableBuffer) -> bytes: ... # type: ignore[overload-overlap]
|
||||
def expand(self: Match[bytes], template: ReadableBuffer) -> bytes: ...
|
||||
@overload
|
||||
def expand(self, template: AnyStr) -> AnyStr: ...
|
||||
# group() returns "AnyStr" or "AnyStr | None", depending on the pattern.
|
||||
@@ -124,19 +124,21 @@ class Pattern(Generic[AnyStr]):
|
||||
@overload
|
||||
def search(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ...
|
||||
@overload
|
||||
def search(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... # type: ignore[overload-overlap]
|
||||
def search(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ...
|
||||
@overload
|
||||
def search(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ...
|
||||
@overload
|
||||
def match(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ...
|
||||
@overload
|
||||
def match(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... # type: ignore[overload-overlap]
|
||||
def match(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ...
|
||||
@overload
|
||||
def match(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ...
|
||||
@overload
|
||||
def fullmatch(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Match[str] | None: ...
|
||||
@overload
|
||||
def fullmatch(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Match[bytes] | None: ... # type: ignore[overload-overlap]
|
||||
def fullmatch(
|
||||
self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize
|
||||
) -> Match[bytes] | None: ...
|
||||
@overload
|
||||
def fullmatch(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Match[AnyStr] | None: ...
|
||||
@overload
|
||||
@@ -155,13 +157,15 @@ class Pattern(Generic[AnyStr]):
|
||||
@overload
|
||||
def finditer(self: Pattern[str], string: str, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[str]]: ...
|
||||
@overload
|
||||
def finditer(self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[bytes]]: ... # type: ignore[overload-overlap]
|
||||
def finditer(
|
||||
self: Pattern[bytes], string: ReadableBuffer, pos: int = 0, endpos: int = sys.maxsize
|
||||
) -> Iterator[Match[bytes]]: ...
|
||||
@overload
|
||||
def finditer(self, string: AnyStr, pos: int = 0, endpos: int = sys.maxsize) -> Iterator[Match[AnyStr]]: ...
|
||||
@overload
|
||||
def sub(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> str: ...
|
||||
@overload
|
||||
def sub( # type: ignore[overload-overlap]
|
||||
def sub(
|
||||
self: Pattern[bytes],
|
||||
repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer],
|
||||
string: ReadableBuffer,
|
||||
@@ -172,7 +176,7 @@ class Pattern(Generic[AnyStr]):
|
||||
@overload
|
||||
def subn(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = 0) -> tuple[str, int]: ...
|
||||
@overload
|
||||
def subn( # type: ignore[overload-overlap]
|
||||
def subn(
|
||||
self: Pattern[bytes],
|
||||
repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer],
|
||||
string: ReadableBuffer,
|
||||
|
||||
@@ -29,7 +29,10 @@ def DateFromTicks(ticks: float) -> Date: ...
|
||||
def TimeFromTicks(ticks: float) -> Time: ...
|
||||
def TimestampFromTicks(ticks: float) -> Timestamp: ...
|
||||
|
||||
version_info: tuple[int, int, int]
|
||||
if sys.version_info < (3, 14):
|
||||
# Deprecated in 3.12, removed in 3.14.
|
||||
version_info: tuple[int, int, int]
|
||||
|
||||
sqlite_version_info: tuple[int, int, int]
|
||||
Binary = memoryview
|
||||
|
||||
@@ -90,7 +93,10 @@ SQLITE_UPDATE: Final[int]
|
||||
adapters: dict[tuple[type[Any], type[Any]], _Adapter[Any]]
|
||||
converters: dict[str, _Converter]
|
||||
sqlite_version: str
|
||||
version: str
|
||||
|
||||
if sys.version_info < (3, 14):
|
||||
# Deprecated in 3.12, removed in 3.14.
|
||||
version: str
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
SQLITE_ABORT: Final[int]
|
||||
|
||||
@@ -2,6 +2,7 @@ import sys
|
||||
from _collections_abc import dict_keys
|
||||
from collections.abc import Sequence
|
||||
from typing import Any
|
||||
from typing_extensions import deprecated
|
||||
|
||||
__all__ = ["symtable", "SymbolTable", "Class", "Function", "Symbol"]
|
||||
|
||||
@@ -51,7 +52,9 @@ class Function(SymbolTable):
|
||||
def get_nonlocals(self) -> tuple[str, ...]: ...
|
||||
|
||||
class Class(SymbolTable):
|
||||
def get_methods(self) -> tuple[str, ...]: ...
|
||||
if sys.version_info < (3, 16):
|
||||
@deprecated("deprecated in Python 3.14, will be removed in Python 3.16")
|
||||
def get_methods(self) -> tuple[str, ...]: ...
|
||||
|
||||
class Symbol:
|
||||
def __init__(
|
||||
|
||||
@@ -423,7 +423,7 @@ class TarInfo:
|
||||
name: str
|
||||
path: str
|
||||
size: int
|
||||
mtime: int
|
||||
mtime: int | float
|
||||
chksum: int
|
||||
devmajor: int
|
||||
devminor: int
|
||||
|
||||
@@ -463,7 +463,7 @@ class TemporaryDirectory(Generic[AnyStr]):
|
||||
|
||||
# The overloads overlap, but they should still work fine.
|
||||
@overload
|
||||
def mkstemp( # type: ignore[overload-overlap]
|
||||
def mkstemp(
|
||||
suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None, text: bool = False
|
||||
) -> tuple[int, str]: ...
|
||||
@overload
|
||||
@@ -473,7 +473,7 @@ def mkstemp(
|
||||
|
||||
# The overloads overlap, but they should still work fine.
|
||||
@overload
|
||||
def mkdtemp(suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None) -> str: ... # type: ignore[overload-overlap]
|
||||
def mkdtemp(suffix: str | None = None, prefix: str | None = None, dir: StrPath | None = None) -> str: ...
|
||||
@overload
|
||||
def mkdtemp(suffix: bytes | None = None, prefix: bytes | None = None, dir: BytesPath | None = None) -> bytes: ...
|
||||
def mktemp(suffix: str = "", prefix: str = "tmp", dir: StrPath | None = None) -> str: ...
|
||||
|
||||
@@ -2148,11 +2148,12 @@ class Listbox(Widget, XView, YView):
|
||||
selectborderwidth: _ScreenUnits = 0,
|
||||
selectforeground: str = ...,
|
||||
# from listbox man page: "The value of the [selectmode] option may be
|
||||
# arbitrary, but the default bindings expect it to be ..."
|
||||
# arbitrary, but the default bindings expect it to be either single,
|
||||
# browse, multiple, or extended"
|
||||
#
|
||||
# I have never seen anyone setting this to something else than what
|
||||
# "the default bindings expect", but let's support it anyway.
|
||||
selectmode: str = "browse",
|
||||
selectmode: str | Literal["single", "browse", "multiple", "extended"] = "browse", # noqa: Y051
|
||||
setgrid: bool = False,
|
||||
state: Literal["normal", "disabled"] = "normal",
|
||||
takefocus: _TakeFocusValue = "",
|
||||
@@ -2187,7 +2188,7 @@ class Listbox(Widget, XView, YView):
|
||||
selectbackground: str = ...,
|
||||
selectborderwidth: _ScreenUnits = ...,
|
||||
selectforeground: str = ...,
|
||||
selectmode: str = ...,
|
||||
selectmode: str | Literal["single", "browse", "multiple", "extended"] = ..., # noqa: Y051
|
||||
setgrid: bool = ...,
|
||||
state: Literal["normal", "disabled"] = ...,
|
||||
takefocus: _TakeFocusValue = ...,
|
||||
@@ -2907,6 +2908,9 @@ class Scrollbar(Widget):
|
||||
def set(self, first: float | str, last: float | str) -> None: ...
|
||||
|
||||
_TextIndex: TypeAlias = _tkinter.Tcl_Obj | str | float | Misc
|
||||
_WhatToCount: TypeAlias = Literal[
|
||||
"chars", "displaychars", "displayindices", "displaylines", "indices", "lines", "xpixels", "ypixels"
|
||||
]
|
||||
|
||||
class Text(Widget, XView, YView):
|
||||
def __init__(
|
||||
@@ -3021,7 +3025,27 @@ class Text(Widget, XView, YView):
|
||||
config = configure
|
||||
def bbox(self, index: _TextIndex) -> tuple[int, int, int, int] | None: ... # type: ignore[override]
|
||||
def compare(self, index1: _TextIndex, op: Literal["<", "<=", "==", ">=", ">", "!="], index2: _TextIndex) -> bool: ...
|
||||
def count(self, index1, index2, *args): ... # TODO
|
||||
@overload
|
||||
def count(self, index1: _TextIndex, index2: _TextIndex) -> tuple[int] | None: ...
|
||||
@overload
|
||||
def count(self, index1: _TextIndex, index2: _TextIndex, arg: _WhatToCount | Literal["update"], /) -> tuple[int] | None: ...
|
||||
@overload
|
||||
def count(self, index1: _TextIndex, index2: _TextIndex, arg1: Literal["update"], arg2: _WhatToCount, /) -> int | None: ...
|
||||
@overload
|
||||
def count(self, index1: _TextIndex, index2: _TextIndex, arg1: _WhatToCount, arg2: Literal["update"], /) -> int | None: ...
|
||||
@overload
|
||||
def count(self, index1: _TextIndex, index2: _TextIndex, arg1: _WhatToCount, arg2: _WhatToCount, /) -> tuple[int, int]: ...
|
||||
@overload
|
||||
def count(
|
||||
self,
|
||||
index1: _TextIndex,
|
||||
index2: _TextIndex,
|
||||
arg1: _WhatToCount | Literal["update"],
|
||||
arg2: _WhatToCount | Literal["update"],
|
||||
arg3: _WhatToCount | Literal["update"],
|
||||
/,
|
||||
*args: _WhatToCount | Literal["update"],
|
||||
) -> tuple[int, ...]: ...
|
||||
@overload
|
||||
def debug(self, boolean: None = None) -> bool: ...
|
||||
@overload
|
||||
@@ -3564,7 +3588,7 @@ class Spinbox(Widget, XView):
|
||||
def scan_dragto(self, x): ...
|
||||
def selection(self, *args) -> tuple[int, ...]: ...
|
||||
def selection_adjust(self, index): ...
|
||||
def selection_clear(self): ...
|
||||
def selection_clear(self): ... # type: ignore[override]
|
||||
def selection_element(self, element: Incomplete | None = None): ...
|
||||
def selection_from(self, index: int) -> None: ...
|
||||
def selection_present(self) -> None: ...
|
||||
|
||||
@@ -1040,7 +1040,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
|
||||
@overload
|
||||
def heading(self, column: str | int, option: str) -> Any: ...
|
||||
@overload
|
||||
def heading(self, column: str | int, option: None = None) -> _TreeviewHeaderDict: ... # type: ignore[overload-overlap]
|
||||
def heading(self, column: str | int, option: None = None) -> _TreeviewHeaderDict: ...
|
||||
@overload
|
||||
def heading(
|
||||
self,
|
||||
@@ -1052,7 +1052,8 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
|
||||
anchor: tkinter._Anchor = ...,
|
||||
command: str | Callable[[], object] = ...,
|
||||
) -> None: ...
|
||||
def identify(self, component, x, y): ... # Internal Method. Leave untyped
|
||||
# Internal Method. Leave untyped:
|
||||
def identify(self, component, x, y): ... # type: ignore[override]
|
||||
def identify_row(self, y: int) -> str: ...
|
||||
def identify_column(self, x: int) -> str: ...
|
||||
def identify_region(self, x: int, y: int) -> Literal["heading", "separator", "tree", "cell", "nothing"]: ...
|
||||
@@ -1084,7 +1085,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
|
||||
@overload
|
||||
def item(self, item: str | int, option: str) -> Any: ...
|
||||
@overload
|
||||
def item(self, item: str | int, option: None = None) -> _TreeviewItemDict: ... # type: ignore[overload-overlap]
|
||||
def item(self, item: str | int, option: None = None) -> _TreeviewItemDict: ...
|
||||
@overload
|
||||
def item(
|
||||
self,
|
||||
|
||||
@@ -338,7 +338,7 @@ class TPen:
|
||||
def isvisible(self) -> bool: ...
|
||||
# Note: signatures 1 and 2 overlap unsafely when no arguments are provided
|
||||
@overload
|
||||
def pen(self) -> _PenState: ... # type: ignore[overload-overlap]
|
||||
def pen(self) -> _PenState: ...
|
||||
@overload
|
||||
def pen(
|
||||
self,
|
||||
@@ -384,7 +384,7 @@ class RawTurtle(TPen, TNavigator):
|
||||
def shape(self, name: str) -> None: ...
|
||||
# Unsafely overlaps when no arguments are provided
|
||||
@overload
|
||||
def shapesize(self) -> tuple[float, float, float]: ... # type: ignore[overload-overlap]
|
||||
def shapesize(self) -> tuple[float, float, float]: ...
|
||||
@overload
|
||||
def shapesize(
|
||||
self, stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None
|
||||
@@ -395,7 +395,7 @@ class RawTurtle(TPen, TNavigator):
|
||||
def shearfactor(self, shear: float) -> None: ...
|
||||
# Unsafely overlaps when no arguments are provided
|
||||
@overload
|
||||
def shapetransform(self) -> tuple[float, float, float, float]: ... # type: ignore[overload-overlap]
|
||||
def shapetransform(self) -> tuple[float, float, float, float]: ...
|
||||
@overload
|
||||
def shapetransform(
|
||||
self, t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None
|
||||
@@ -622,7 +622,7 @@ def isvisible() -> bool: ...
|
||||
|
||||
# Note: signatures 1 and 2 overlap unsafely when no arguments are provided
|
||||
@overload
|
||||
def pen() -> _PenState: ... # type: ignore[overload-overlap]
|
||||
def pen() -> _PenState: ...
|
||||
@overload
|
||||
def pen(
|
||||
pen: _PenState | None = None,
|
||||
@@ -661,7 +661,7 @@ if sys.version_info >= (3, 12):
|
||||
|
||||
# Unsafely overlaps when no arguments are provided
|
||||
@overload
|
||||
def shapesize() -> tuple[float, float, float]: ... # type: ignore[overload-overlap]
|
||||
def shapesize() -> tuple[float, float, float]: ...
|
||||
@overload
|
||||
def shapesize(stretch_wid: float | None = None, stretch_len: float | None = None, outline: float | None = None) -> None: ...
|
||||
@overload
|
||||
@@ -671,7 +671,7 @@ def shearfactor(shear: float) -> None: ...
|
||||
|
||||
# Unsafely overlaps when no arguments are provided
|
||||
@overload
|
||||
def shapetransform() -> tuple[float, float, float, float]: ... # type: ignore[overload-overlap]
|
||||
def shapetransform() -> tuple[float, float, float, float]: ...
|
||||
@overload
|
||||
def shapetransform(
|
||||
t11: float | None = None, t12: float | None = None, t21: float | None = None, t22: float | None = None
|
||||
|
||||
@@ -305,9 +305,9 @@ class MappingProxyType(Mapping[_KT, _VT_co]):
|
||||
def values(self) -> ValuesView[_VT_co]: ...
|
||||
def items(self) -> ItemsView[_KT, _VT_co]: ...
|
||||
@overload
|
||||
def get(self, key: _KT, /) -> _VT_co | None: ... # type: ignore[override]
|
||||
def get(self, key: _KT, /) -> _VT_co | None: ...
|
||||
@overload
|
||||
def get(self, key: _KT, default: _VT_co | _T2, /) -> _VT_co | _T2: ... # type: ignore[override]
|
||||
def get(self, key: _KT, default: _VT_co | _T2, /) -> _VT_co | _T2: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
|
||||
def __reversed__(self) -> Iterator[_KT]: ...
|
||||
@@ -583,7 +583,7 @@ _P = ParamSpec("_P")
|
||||
|
||||
# it's not really an Awaitable, but can be used in an await expression. Real type: Generator & Awaitable
|
||||
@overload
|
||||
def coroutine(func: Callable[_P, Generator[Any, Any, _R]]) -> Callable[_P, Awaitable[_R]]: ... # type: ignore[overload-overlap]
|
||||
def coroutine(func: Callable[_P, Generator[Any, Any, _R]]) -> Callable[_P, Awaitable[_R]]: ...
|
||||
@overload
|
||||
def coroutine(func: _Fn) -> _Fn: ...
|
||||
|
||||
|
||||
@@ -846,7 +846,8 @@ class TextIO(IO[str]):
|
||||
@abstractmethod
|
||||
def __enter__(self) -> TextIO: ...
|
||||
|
||||
ByteString: typing_extensions.TypeAlias = bytes | bytearray | memoryview
|
||||
if sys.version_info < (3, 14):
|
||||
ByteString: typing_extensions.TypeAlias = bytes | bytearray | memoryview
|
||||
|
||||
# Functions
|
||||
|
||||
|
||||
@@ -299,7 +299,7 @@ class _patcher:
|
||||
# Ideally we'd be able to add an overload for it so that the return type is _patch[MagicMock],
|
||||
# but that's impossible with the current type system.
|
||||
@overload
|
||||
def __call__( # type: ignore[overload-overlap]
|
||||
def __call__(
|
||||
self,
|
||||
target: str,
|
||||
new: _T,
|
||||
|
||||
@@ -198,13 +198,13 @@ else:
|
||||
|
||||
# Requires an iterable of length 6
|
||||
@overload
|
||||
def urlunparse(components: Iterable[None]) -> Literal[b""]: ...
|
||||
def urlunparse(components: Iterable[None]) -> Literal[b""]: ... # type: ignore[overload-overlap]
|
||||
@overload
|
||||
def urlunparse(components: Iterable[AnyStr | None]) -> AnyStr: ...
|
||||
|
||||
# Requires an iterable of length 5
|
||||
@overload
|
||||
def urlunsplit(components: Iterable[None]) -> Literal[b""]: ...
|
||||
def urlunsplit(components: Iterable[None]) -> Literal[b""]: ... # type: ignore[overload-overlap]
|
||||
@overload
|
||||
def urlunsplit(components: Iterable[AnyStr | None]) -> AnyStr: ...
|
||||
def unwrap(url: str) -> str: ...
|
||||
|
||||
@@ -79,6 +79,7 @@ else:
|
||||
def pathname2url(pathname: str) -> str: ...
|
||||
|
||||
def getproxies() -> dict[str, str]: ...
|
||||
def getproxies_environment() -> dict[str, str]: ...
|
||||
def parse_http_list(s: str) -> list[str]: ...
|
||||
def parse_keqv_list(l: list[str]) -> dict[str, str]: ...
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Any
|
||||
from typing import Any, Final
|
||||
|
||||
from .domreg import getDOMImplementation as getDOMImplementation, registerDOMImplementation as registerDOMImplementation
|
||||
|
||||
@@ -17,22 +17,22 @@ class Node:
|
||||
NOTATION_NODE: int
|
||||
|
||||
# ExceptionCode
|
||||
INDEX_SIZE_ERR: int
|
||||
DOMSTRING_SIZE_ERR: int
|
||||
HIERARCHY_REQUEST_ERR: int
|
||||
WRONG_DOCUMENT_ERR: int
|
||||
INVALID_CHARACTER_ERR: int
|
||||
NO_DATA_ALLOWED_ERR: int
|
||||
NO_MODIFICATION_ALLOWED_ERR: int
|
||||
NOT_FOUND_ERR: int
|
||||
NOT_SUPPORTED_ERR: int
|
||||
INUSE_ATTRIBUTE_ERR: int
|
||||
INVALID_STATE_ERR: int
|
||||
SYNTAX_ERR: int
|
||||
INVALID_MODIFICATION_ERR: int
|
||||
NAMESPACE_ERR: int
|
||||
INVALID_ACCESS_ERR: int
|
||||
VALIDATION_ERR: int
|
||||
INDEX_SIZE_ERR: Final[int]
|
||||
DOMSTRING_SIZE_ERR: Final[int]
|
||||
HIERARCHY_REQUEST_ERR: Final[int]
|
||||
WRONG_DOCUMENT_ERR: Final[int]
|
||||
INVALID_CHARACTER_ERR: Final[int]
|
||||
NO_DATA_ALLOWED_ERR: Final[int]
|
||||
NO_MODIFICATION_ALLOWED_ERR: Final[int]
|
||||
NOT_FOUND_ERR: Final[int]
|
||||
NOT_SUPPORTED_ERR: Final[int]
|
||||
INUSE_ATTRIBUTE_ERR: Final[int]
|
||||
INVALID_STATE_ERR: Final[int]
|
||||
SYNTAX_ERR: Final[int]
|
||||
INVALID_MODIFICATION_ERR: Final[int]
|
||||
NAMESPACE_ERR: Final[int]
|
||||
INVALID_ACCESS_ERR: Final[int]
|
||||
VALIDATION_ERR: Final[int]
|
||||
|
||||
class DOMException(Exception):
|
||||
code: int
|
||||
@@ -62,8 +62,8 @@ class UserDataHandler:
|
||||
NODE_DELETED: int
|
||||
NODE_RENAMED: int
|
||||
|
||||
XML_NAMESPACE: str
|
||||
XMLNS_NAMESPACE: str
|
||||
XHTML_NAMESPACE: str
|
||||
EMPTY_NAMESPACE: None
|
||||
EMPTY_PREFIX: None
|
||||
XML_NAMESPACE: Final[str]
|
||||
XMLNS_NAMESPACE: Final[str]
|
||||
XHTML_NAMESPACE: Final[str]
|
||||
EMPTY_NAMESPACE: Final[None]
|
||||
EMPTY_PREFIX: Final[None]
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
import sys
|
||||
from _typeshed import FileDescriptorOrPath
|
||||
from collections.abc import Callable
|
||||
from typing import Final
|
||||
from xml.etree.ElementTree import Element
|
||||
|
||||
XINCLUDE: str
|
||||
XINCLUDE_INCLUDE: str
|
||||
XINCLUDE_FALLBACK: str
|
||||
XINCLUDE: Final[str]
|
||||
XINCLUDE_INCLUDE: Final[str]
|
||||
XINCLUDE_FALLBACK: Final[str]
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
DEFAULT_MAX_INCLUSION_DEPTH: int
|
||||
DEFAULT_MAX_INCLUSION_DEPTH: Final = 6
|
||||
|
||||
class FatalIncludeError(SyntaxError): ...
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import sys
|
||||
from _collections_abc import dict_keys
|
||||
from _typeshed import FileDescriptorOrPath, ReadableBuffer, SupportsRead, SupportsWrite
|
||||
from collections.abc import Callable, Generator, ItemsView, Iterable, Iterator, Mapping, Sequence
|
||||
from typing import Any, Literal, SupportsIndex, TypeVar, overload
|
||||
from typing import Any, Final, Literal, SupportsIndex, TypeVar, overload
|
||||
from typing_extensions import TypeAlias, TypeGuard, deprecated
|
||||
|
||||
__all__ = [
|
||||
@@ -41,7 +41,7 @@ _FileRead: TypeAlias = FileDescriptorOrPath | SupportsRead[bytes] | SupportsRead
|
||||
_FileWriteC14N: TypeAlias = FileDescriptorOrPath | SupportsWrite[bytes]
|
||||
_FileWrite: TypeAlias = _FileWriteC14N | SupportsWrite[str]
|
||||
|
||||
VERSION: str
|
||||
VERSION: Final[str]
|
||||
|
||||
class ParseError(SyntaxError):
|
||||
code: int
|
||||
|
||||
@@ -94,6 +94,20 @@ class ZipExtFile(io.BufferedIOBase):
|
||||
class _Writer(Protocol):
|
||||
def write(self, s: str, /) -> object: ...
|
||||
|
||||
class _ZipReadable(Protocol):
|
||||
def seek(self, offset: int, whence: int = 0, /) -> int: ...
|
||||
def read(self, n: int = -1, /) -> bytes: ...
|
||||
|
||||
class _ZipTellable(Protocol):
|
||||
def tell(self) -> int: ...
|
||||
|
||||
class _ZipReadableTellable(_ZipReadable, _ZipTellable, Protocol): ...
|
||||
|
||||
class _ZipWritable(Protocol):
|
||||
def flush(self) -> None: ...
|
||||
def close(self) -> None: ...
|
||||
def write(self, b: bytes, /) -> int: ...
|
||||
|
||||
class ZipFile:
|
||||
filename: str | None
|
||||
debug: int
|
||||
@@ -106,24 +120,50 @@ class ZipFile:
|
||||
compresslevel: int | None # undocumented
|
||||
mode: _ZipFileMode # undocumented
|
||||
pwd: bytes | None # undocumented
|
||||
# metadata_encoding is new in 3.11
|
||||
if sys.version_info >= (3, 11):
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
file: StrPath | IO[bytes],
|
||||
mode: _ZipFileMode = "r",
|
||||
compression: int = 0,
|
||||
allowZip64: bool = True,
|
||||
compresslevel: int | None = None,
|
||||
*,
|
||||
strict_timestamps: bool = True,
|
||||
metadata_encoding: str | None = None,
|
||||
) -> None: ...
|
||||
# metadata_encoding is only allowed for read mode
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
file: StrPath | _ZipReadable,
|
||||
mode: Literal["r"] = "r",
|
||||
compression: int = 0,
|
||||
allowZip64: bool = True,
|
||||
compresslevel: int | None = None,
|
||||
*,
|
||||
strict_timestamps: bool = True,
|
||||
metadata_encoding: str | None,
|
||||
metadata_encoding: str | None = None,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
file: StrPath | IO[bytes],
|
||||
mode: _ZipFileMode = "r",
|
||||
file: StrPath | _ZipWritable,
|
||||
mode: Literal["w", "x"] = ...,
|
||||
compression: int = 0,
|
||||
allowZip64: bool = True,
|
||||
compresslevel: int | None = None,
|
||||
*,
|
||||
strict_timestamps: bool = True,
|
||||
metadata_encoding: None = None,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
file: StrPath | _ZipReadableTellable,
|
||||
mode: Literal["a"] = ...,
|
||||
compression: int = 0,
|
||||
allowZip64: bool = True,
|
||||
compresslevel: int | None = None,
|
||||
@@ -132,6 +172,7 @@ class ZipFile:
|
||||
metadata_encoding: None = None,
|
||||
) -> None: ...
|
||||
else:
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
file: StrPath | IO[bytes],
|
||||
@@ -142,6 +183,39 @@ class ZipFile:
|
||||
*,
|
||||
strict_timestamps: bool = True,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
file: StrPath | _ZipReadable,
|
||||
mode: Literal["r"] = "r",
|
||||
compression: int = 0,
|
||||
allowZip64: bool = True,
|
||||
compresslevel: int | None = None,
|
||||
*,
|
||||
strict_timestamps: bool = True,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
file: StrPath | _ZipWritable,
|
||||
mode: Literal["w", "x"] = ...,
|
||||
compression: int = 0,
|
||||
allowZip64: bool = True,
|
||||
compresslevel: int | None = None,
|
||||
*,
|
||||
strict_timestamps: bool = True,
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
file: StrPath | _ZipReadableTellable,
|
||||
mode: Literal["a"] = ...,
|
||||
compression: int = 0,
|
||||
allowZip64: bool = True,
|
||||
compresslevel: int | None = None,
|
||||
*,
|
||||
strict_timestamps: bool = True,
|
||||
) -> None: ...
|
||||
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use anyhow::Ok;
|
||||
use lsp_types::NotebookCellKind;
|
||||
use ruff_notebook::CellMetadata;
|
||||
use rustc_hash::{FxBuildHasher, FxHashMap};
|
||||
|
||||
use crate::{PositionEncoding, TextDocument};
|
||||
@@ -65,7 +66,7 @@ impl NotebookDocument {
|
||||
NotebookCellKind::Code => ruff_notebook::Cell::Code(ruff_notebook::CodeCell {
|
||||
execution_count: None,
|
||||
id: None,
|
||||
metadata: serde_json::Value::Null,
|
||||
metadata: CellMetadata::default(),
|
||||
outputs: vec![],
|
||||
source: ruff_notebook::SourceValue::String(
|
||||
cell.document.contents().to_string(),
|
||||
@@ -75,7 +76,7 @@ impl NotebookDocument {
|
||||
ruff_notebook::Cell::Markdown(ruff_notebook::MarkdownCell {
|
||||
attachments: None,
|
||||
id: None,
|
||||
metadata: serde_json::Value::Null,
|
||||
metadata: CellMetadata::default(),
|
||||
source: ruff_notebook::SourceValue::String(
|
||||
cell.document.contents().to_string(),
|
||||
),
|
||||
|
||||
@@ -2,8 +2,9 @@ use std::borrow::Cow;
|
||||
|
||||
use lsp_types::request::DocumentDiagnosticRequest;
|
||||
use lsp_types::{
|
||||
Diagnostic, DocumentDiagnosticParams, DocumentDiagnosticReport, DocumentDiagnosticReportResult,
|
||||
FullDocumentDiagnosticReport, Range, RelatedFullDocumentDiagnosticReport, Url,
|
||||
Diagnostic, DiagnosticSeverity, DocumentDiagnosticParams, DocumentDiagnosticReport,
|
||||
DocumentDiagnosticReportResult, FullDocumentDiagnosticReport, Position, Range,
|
||||
RelatedFullDocumentDiagnosticReport, Url,
|
||||
};
|
||||
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
@@ -56,16 +57,37 @@ fn compute_diagnostics(snapshot: &DocumentSnapshot, db: &RootDatabase) -> Vec<Di
|
||||
diagnostics
|
||||
.as_slice()
|
||||
.iter()
|
||||
.map(|message| Diagnostic {
|
||||
range: Range::default(),
|
||||
severity: None,
|
||||
tags: None,
|
||||
code: None,
|
||||
code_description: None,
|
||||
source: Some("red-knot".into()),
|
||||
message: message.to_string(),
|
||||
related_information: None,
|
||||
data: None,
|
||||
})
|
||||
.map(|message| to_lsp_diagnostic(message))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn to_lsp_diagnostic(message: &str) -> Diagnostic {
|
||||
let words = message.split(':').collect::<Vec<_>>();
|
||||
|
||||
let (range, message) = match words.as_slice() {
|
||||
[_filename, line, column, message] => {
|
||||
let line = line.parse::<u32>().unwrap_or_default();
|
||||
let column = column.parse::<u32>().unwrap_or_default();
|
||||
(
|
||||
Range::new(
|
||||
Position::new(line.saturating_sub(1), column.saturating_sub(1)),
|
||||
Position::new(line, column),
|
||||
),
|
||||
message.trim(),
|
||||
)
|
||||
}
|
||||
_ => (Range::default(), message),
|
||||
};
|
||||
|
||||
Diagnostic {
|
||||
range,
|
||||
severity: Some(DiagnosticSeverity::ERROR),
|
||||
tags: None,
|
||||
code: None,
|
||||
code_description: None,
|
||||
source: Some("red-knot".into()),
|
||||
message: message.to_string(),
|
||||
related_information: None,
|
||||
data: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,7 +78,8 @@ impl Session {
|
||||
custom_typeshed: None,
|
||||
},
|
||||
};
|
||||
workspaces.insert(path, RootDatabase::new(metadata, program_settings, system));
|
||||
// TODO(micha): Handle the case where the program settings are incorrect more gracefully.
|
||||
workspaces.insert(path, RootDatabase::new(metadata, program_settings, system)?);
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
|
||||
@@ -49,7 +49,8 @@ impl Workspace {
|
||||
search_paths: SearchPathSettings::default(),
|
||||
};
|
||||
|
||||
let db = RootDatabase::new(workspace, program_settings, system.clone());
|
||||
let db =
|
||||
RootDatabase::new(workspace, program_settings, system.clone()).map_err(into_error)?;
|
||||
|
||||
Ok(Self { db, system })
|
||||
}
|
||||
|
||||
@@ -17,5 +17,5 @@ fn check() {
|
||||
|
||||
let result = workspace.check_file(&test).expect("Check to succeed");
|
||||
|
||||
assert_eq!(result, vec!["Unresolved import 'random22'"]);
|
||||
assert_eq!(result, vec!["/test.py:1:8: Unresolved import 'random22'"]);
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ red_knot_python_semantic = { workspace = true }
|
||||
ruff_cache = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
crossbeam = { workspace = true }
|
||||
|
||||
@@ -28,7 +28,11 @@ pub struct RootDatabase {
|
||||
}
|
||||
|
||||
impl RootDatabase {
|
||||
pub fn new<S>(workspace: WorkspaceMetadata, settings: ProgramSettings, system: S) -> Self
|
||||
pub fn new<S>(
|
||||
workspace: WorkspaceMetadata,
|
||||
settings: ProgramSettings,
|
||||
system: S,
|
||||
) -> anyhow::Result<Self>
|
||||
where
|
||||
S: System + 'static + Send + Sync + RefUnwindSafe,
|
||||
{
|
||||
@@ -41,10 +45,10 @@ impl RootDatabase {
|
||||
|
||||
let workspace = Workspace::from_metadata(&db, workspace);
|
||||
// Initialize the `Program` singleton
|
||||
Program::from_settings(&db, settings);
|
||||
Program::from_settings(&db, settings)?;
|
||||
|
||||
db.workspace = Some(workspace);
|
||||
db
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
pub fn workspace(&self) -> Workspace {
|
||||
@@ -150,6 +154,7 @@ impl Db for RootDatabase {}
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use salsa::Event;
|
||||
use std::sync::Arc;
|
||||
|
||||
use red_knot_python_semantic::{vendored_typeshed_stubs, Db as SemanticDb};
|
||||
use ruff_db::files::Files;
|
||||
@@ -162,6 +167,7 @@ pub(crate) mod tests {
|
||||
#[salsa::db]
|
||||
pub(crate) struct TestDb {
|
||||
storage: salsa::Storage<Self>,
|
||||
events: std::sync::Arc<std::sync::Mutex<Vec<salsa::Event>>>,
|
||||
files: Files,
|
||||
system: TestSystem,
|
||||
vendored: VendoredFileSystem,
|
||||
@@ -174,10 +180,24 @@ pub(crate) mod tests {
|
||||
system: TestSystem::default(),
|
||||
vendored: vendored_typeshed_stubs().clone(),
|
||||
files: Files::default(),
|
||||
events: Arc::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TestDb {
|
||||
/// Takes the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
pub(crate) fn take_salsa_events(&mut self) -> Vec<salsa::Event> {
|
||||
let inner = Arc::get_mut(&mut self.events).expect("no pending salsa snapshots");
|
||||
|
||||
let events = inner.get_mut().unwrap();
|
||||
std::mem::take(&mut *events)
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithTestSystem for TestDb {
|
||||
fn test_system(&self) -> &TestSystem {
|
||||
&self.system
|
||||
@@ -228,6 +248,9 @@ pub(crate) mod tests {
|
||||
|
||||
#[salsa::db]
|
||||
impl salsa::Database for TestDb {
|
||||
fn salsa_event(&self, _event: &dyn Fn() -> Event) {}
|
||||
fn salsa_event(&self, event: &dyn Fn() -> Event) {
|
||||
let mut events = self.events.lock().unwrap();
|
||||
events.push(event());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,9 +8,10 @@ use red_knot_python_semantic::types::Type;
|
||||
use red_knot_python_semantic::{HasTy, ModuleName, SemanticModel};
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::{parsed_module, ParsedModule};
|
||||
use ruff_db::source::{source_text, SourceText};
|
||||
use ruff_db::source::{line_index, source_text, SourceText};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::visitor::{walk_expr, walk_stmt, Visitor};
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
|
||||
use crate::db::Db;
|
||||
|
||||
@@ -49,7 +50,18 @@ pub(crate) fn lint_syntax(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
visitor.visit_body(&ast.body);
|
||||
diagnostics = visitor.diagnostics;
|
||||
} else {
|
||||
diagnostics.extend(parsed.errors().iter().map(ToString::to_string));
|
||||
let path = file_id.path(db);
|
||||
let line_index = line_index(db.upcast(), file_id);
|
||||
diagnostics.extend(parsed.errors().iter().map(|err| {
|
||||
let source_location = line_index.source_location(err.location.start(), source.as_str());
|
||||
format!(
|
||||
"{}:{}:{}: {}",
|
||||
path.as_str(),
|
||||
source_location.row,
|
||||
source_location.column,
|
||||
err,
|
||||
)
|
||||
}));
|
||||
}
|
||||
|
||||
Diagnostics::from(diagnostics)
|
||||
@@ -97,6 +109,20 @@ pub fn lint_semantic(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
Diagnostics::from(context.diagnostics.take())
|
||||
}
|
||||
|
||||
fn format_diagnostic(context: &SemanticLintContext, message: &str, start: TextSize) -> String {
|
||||
let source_location = context
|
||||
.semantic
|
||||
.line_index()
|
||||
.source_location(start, context.source_text());
|
||||
format!(
|
||||
"{}:{}:{}: {}",
|
||||
context.semantic.file_path().as_str(),
|
||||
source_location.row,
|
||||
source_location.column,
|
||||
message,
|
||||
)
|
||||
}
|
||||
|
||||
fn lint_unresolved_imports(context: &SemanticLintContext, import: AnyImportRef) {
|
||||
match import {
|
||||
AnyImportRef::Import(import) => {
|
||||
@@ -104,7 +130,11 @@ fn lint_unresolved_imports(context: &SemanticLintContext, import: AnyImportRef)
|
||||
let ty = alias.ty(&context.semantic);
|
||||
|
||||
if ty.is_unbound() {
|
||||
context.push_diagnostic(format!("Unresolved import '{}'", &alias.name));
|
||||
context.push_diagnostic(format_diagnostic(
|
||||
context,
|
||||
&format!("Unresolved import '{}'", &alias.name),
|
||||
alias.start(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -113,7 +143,11 @@ fn lint_unresolved_imports(context: &SemanticLintContext, import: AnyImportRef)
|
||||
let ty = alias.ty(&context.semantic);
|
||||
|
||||
if ty.is_unbound() {
|
||||
context.push_diagnostic(format!("Unresolved import '{}'", &alias.name));
|
||||
context.push_diagnostic(format_diagnostic(
|
||||
context,
|
||||
&format!("Unresolved import '{}'", &alias.name),
|
||||
alias.start(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -127,12 +161,17 @@ fn lint_maybe_undefined(context: &SemanticLintContext, name: &ast::ExprName) {
|
||||
let semantic = &context.semantic;
|
||||
match name.ty(semantic) {
|
||||
Type::Unbound => {
|
||||
context.push_diagnostic(format!("Name '{}' used when not defined.", &name.id));
|
||||
context.push_diagnostic(format_diagnostic(
|
||||
context,
|
||||
&format!("Name '{}' used when not defined.", &name.id),
|
||||
name.start(),
|
||||
));
|
||||
}
|
||||
Type::Union(union) if union.contains(semantic.db(), Type::Unbound) => {
|
||||
context.push_diagnostic(format!(
|
||||
"Name '{}' used when possibly not defined.",
|
||||
&name.id
|
||||
context.push_diagnostic(format_diagnostic(
|
||||
context,
|
||||
&format!("Name '{}' used when possibly not defined.", &name.id),
|
||||
name.start(),
|
||||
));
|
||||
}
|
||||
_ => {}
|
||||
@@ -303,9 +342,18 @@ enum AnyImportRef<'a> {
|
||||
ImportFrom(&'a ast::StmtImportFrom),
|
||||
}
|
||||
|
||||
impl Ranged for AnyImportRef<'_> {
|
||||
fn range(&self) -> ruff_text_size::TextRange {
|
||||
match self {
|
||||
AnyImportRef::Import(import) => import.range(),
|
||||
AnyImportRef::ImportFrom(import) => import.range(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use red_knot_python_semantic::{Program, PythonVersion, SearchPathSettings};
|
||||
use red_knot_python_semantic::{Program, ProgramSettings, PythonVersion, SearchPathSettings};
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
@@ -320,16 +368,23 @@ mod tests {
|
||||
fn setup_db_with_root(src_root: SystemPathBuf) -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
Program::new(
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&src_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
PythonVersion::default(),
|
||||
SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
src_root,
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
src_root,
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.expect("Valid program settings");
|
||||
|
||||
db
|
||||
}
|
||||
@@ -356,10 +411,17 @@ mod tests {
|
||||
|
||||
assert_eq!(
|
||||
*messages,
|
||||
vec![
|
||||
"Name 'flag' used when not defined.",
|
||||
"Name 'y' used when possibly not defined."
|
||||
]
|
||||
if cfg!(windows) {
|
||||
vec![
|
||||
"\\src\\a.py:3:4: Name 'flag' used when not defined.",
|
||||
"\\src\\a.py:5:1: Name 'y' used when possibly not defined.",
|
||||
]
|
||||
} else {
|
||||
vec![
|
||||
"/src/a.py:3:4: Name 'flag' used when not defined.",
|
||||
"/src/a.py:5:1: Name 'y' used when possibly not defined.",
|
||||
]
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
use salsa::{Durability, Setter as _};
|
||||
use std::{collections::BTreeMap, sync::Arc};
|
||||
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
use salsa::{Durability, Setter as _};
|
||||
|
||||
pub use metadata::{PackageMetadata, WorkspaceMetadata};
|
||||
use ruff_db::source::{source_text, SourceDiagnostic};
|
||||
use ruff_db::{
|
||||
files::{system_path_to_file, File},
|
||||
system::{walk_directory::WalkState, SystemPath, SystemPathBuf},
|
||||
@@ -345,12 +346,27 @@ impl Package {
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::tracked]
|
||||
pub(super) fn check_file(db: &dyn Db, file: File) -> Diagnostics {
|
||||
let path = file.path(db);
|
||||
let _span = tracing::debug_span!("check_file", file=%path).entered();
|
||||
tracing::debug!("Checking file {path}");
|
||||
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
let source_diagnostics = source_text::accumulated::<SourceDiagnostic>(db.upcast(), file);
|
||||
// TODO(micha): Consider using a single accumulator for all diagnostics
|
||||
diagnostics.extend(
|
||||
source_diagnostics
|
||||
.iter()
|
||||
.map(std::string::ToString::to_string),
|
||||
);
|
||||
|
||||
// Abort checking if there are IO errors.
|
||||
if source_text(db.upcast(), file).has_read_error() {
|
||||
return Diagnostics::from(diagnostics);
|
||||
}
|
||||
|
||||
diagnostics.extend_from_slice(lint_syntax(db, file));
|
||||
diagnostics.extend_from_slice(lint_semantic(db, file));
|
||||
Diagnostics::from(diagnostics)
|
||||
@@ -398,3 +414,48 @@ fn discover_package_files(db: &dyn Db, path: &SystemPath) -> FxHashSet<File> {
|
||||
|
||||
files
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::source::source_text;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPath};
|
||||
use ruff_db::testing::assert_function_query_was_not_run;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::lint::{lint_syntax, Diagnostics};
|
||||
use crate::workspace::check_file;
|
||||
|
||||
#[test]
|
||||
fn check_file_skips_linting_when_file_cant_be_read() -> ruff_db::system::Result<()> {
|
||||
let mut db = TestDb::new();
|
||||
let path = SystemPath::new("test.py");
|
||||
|
||||
db.write_file(path, "x = 10")?;
|
||||
let file = system_path_to_file(&db, path).unwrap();
|
||||
|
||||
// Now the file gets deleted before we had a chance to read its source text.
|
||||
db.memory_file_system().remove_file(path)?;
|
||||
file.sync(&mut db);
|
||||
|
||||
assert_eq!(source_text(&db, file).as_str(), "");
|
||||
assert_eq!(
|
||||
check_file(&db, file),
|
||||
Diagnostics::List(vec![
|
||||
"Failed to read file: No such file or directory".to_string()
|
||||
])
|
||||
);
|
||||
|
||||
let events = db.take_salsa_events();
|
||||
assert_function_query_was_not_run(&db, lint_syntax, file, &events);
|
||||
|
||||
// The user now creates a new file with an empty text. The source text
|
||||
// content returned by `source_text` remains unchanged, but the diagnostics should get updated.
|
||||
db.write_file(path, "").unwrap();
|
||||
|
||||
assert_eq!(source_text(&db, file).as_str(), "");
|
||||
assert_eq!(check_file(&db, file), Diagnostics::Empty);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,8 +20,7 @@ fn setup_db(workspace_root: SystemPathBuf) -> anyhow::Result<RootDatabase> {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths,
|
||||
};
|
||||
let db = RootDatabase::new(workspace, settings, system);
|
||||
Ok(db)
|
||||
RootDatabase::new(workspace, settings, system)
|
||||
}
|
||||
|
||||
/// Test that all snippets in testcorpus can be checked without panic
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.5.7"
|
||||
version = "0.6.0"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -268,8 +268,7 @@ mod test {
|
||||
|
||||
// Run
|
||||
let diagnostics = check(
|
||||
// Notebooks are not included by default
|
||||
&[tempdir.path().to_path_buf(), notebook],
|
||||
&[tempdir.path().to_path_buf()],
|
||||
&pyproject_config,
|
||||
&ConfigArguments::default(),
|
||||
flags::Cache::Disabled,
|
||||
|
||||
@@ -1806,7 +1806,7 @@ select = ["UP006"]
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn checks_notebooks_in_preview_mode() -> anyhow::Result<()> {
|
||||
fn checks_notebooks_in_stable() -> anyhow::Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
std::fs::write(
|
||||
tempdir.path().join("main.ipynb"),
|
||||
@@ -1853,7 +1853,6 @@ fn checks_notebooks_in_preview_mode() -> anyhow::Result<()> {
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--select")
|
||||
.arg("F401")
|
||||
.arg("--preview")
|
||||
.current_dir(&tempdir)
|
||||
, @r###"
|
||||
success: false
|
||||
@@ -1867,64 +1866,3 @@ fn checks_notebooks_in_preview_mode() -> anyhow::Result<()> {
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ignores_notebooks_in_stable() -> anyhow::Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
std::fs::write(
|
||||
tempdir.path().join("main.ipynb"),
|
||||
r#"
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "ad6f36d9-4b7d-4562-8d00-f15a0f1fbb6d",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import random"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.12.0"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--select")
|
||||
.arg("F401")
|
||||
.current_dir(&tempdir)
|
||||
, @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: No Python files found under the given path(s)
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -60,6 +60,7 @@ file_resolver.force_exclude = false
|
||||
file_resolver.include = [
|
||||
"*.py",
|
||||
"*.pyi",
|
||||
"*.ipynb",
|
||||
"**/pyproject.toml",
|
||||
]
|
||||
file_resolver.extend_include = []
|
||||
@@ -209,6 +210,7 @@ linter.logger_objects = []
|
||||
linter.namespace_packages = []
|
||||
linter.src = [
|
||||
"[BASEPATH]",
|
||||
"[BASEPATH]/src",
|
||||
]
|
||||
linter.tab_size = 4
|
||||
linter.line_length = 88
|
||||
@@ -260,10 +262,11 @@ linter.flake8_import_conventions.aliases = {
|
||||
seaborn = sns,
|
||||
tensorflow = tf,
|
||||
tkinter = tk,
|
||||
xml.etree.ElementTree = ET,
|
||||
}
|
||||
linter.flake8_import_conventions.banned_aliases = {}
|
||||
linter.flake8_import_conventions.banned_from = []
|
||||
linter.flake8_pytest_style.fixture_parentheses = true
|
||||
linter.flake8_pytest_style.fixture_parentheses = false
|
||||
linter.flake8_pytest_style.parametrize_names_type = tuple
|
||||
linter.flake8_pytest_style.parametrize_values_type = list
|
||||
linter.flake8_pytest_style.parametrize_values_row_type = tuple
|
||||
@@ -277,7 +280,7 @@ linter.flake8_pytest_style.raises_require_match_for = [
|
||||
socket.error,
|
||||
]
|
||||
linter.flake8_pytest_style.raises_extend_require_match_for = []
|
||||
linter.flake8_pytest_style.mark_parentheses = true
|
||||
linter.flake8_pytest_style.mark_parentheses = false
|
||||
linter.flake8_quotes.inline_quotes = double
|
||||
linter.flake8_quotes.multiline_quotes = double
|
||||
linter.flake8_quotes.docstring_quotes = double
|
||||
|
||||
@@ -52,7 +52,7 @@ fn setup_case() -> Case {
|
||||
},
|
||||
};
|
||||
|
||||
let mut db = RootDatabase::new(metadata, settings, system);
|
||||
let mut db = RootDatabase::new(metadata, settings, system).unwrap();
|
||||
let parser = system_path_to_file(&db, parser_path).unwrap();
|
||||
|
||||
db.workspace().open_file(&mut db, parser);
|
||||
@@ -89,7 +89,7 @@ fn benchmark_incremental(criterion: &mut Criterion) {
|
||||
let Case { db, parser, .. } = case;
|
||||
let result = db.check_file(*parser).unwrap();
|
||||
|
||||
assert_eq!(result.len(), 403);
|
||||
assert_eq!(result.len(), 402);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
@@ -104,7 +104,7 @@ fn benchmark_cold(criterion: &mut Criterion) {
|
||||
let Case { db, parser, .. } = case;
|
||||
let result = db.check_file(*parser).unwrap();
|
||||
|
||||
assert_eq!(result.len(), 403);
|
||||
assert_eq!(result.len(), 402);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
|
||||
@@ -27,6 +27,7 @@ ignore = { workspace = true, optional = true }
|
||||
matchit = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
path-slash = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true, optional = true }
|
||||
tracing-tree = { workspace = true, optional = true }
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use countme::Count;
|
||||
use salsa::Accumulator;
|
||||
|
||||
use ruff_notebook::Notebook;
|
||||
use ruff_python_ast::PySourceType;
|
||||
@@ -15,8 +17,42 @@ use crate::Db;
|
||||
pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
let path = file.path(db);
|
||||
let _span = tracing::trace_span!("source_text", file = %path).entered();
|
||||
let mut has_read_error = false;
|
||||
|
||||
let is_notebook = match path {
|
||||
let kind = if is_notebook(file.path(db)) {
|
||||
file.read_to_notebook(db)
|
||||
.unwrap_or_else(|error| {
|
||||
tracing::debug!("Failed to read notebook {path}: {error}");
|
||||
|
||||
has_read_error = true;
|
||||
SourceDiagnostic(Arc::new(SourceTextError::FailedToReadNotebook(error)))
|
||||
.accumulate(db);
|
||||
Notebook::empty()
|
||||
})
|
||||
.into()
|
||||
} else {
|
||||
file.read_to_string(db)
|
||||
.unwrap_or_else(|error| {
|
||||
tracing::debug!("Failed to read file {path}: {error}");
|
||||
|
||||
has_read_error = true;
|
||||
SourceDiagnostic(Arc::new(SourceTextError::FailedToReadFile(error))).accumulate(db);
|
||||
String::new()
|
||||
})
|
||||
.into()
|
||||
};
|
||||
|
||||
SourceText {
|
||||
inner: Arc::new(SourceTextInner {
|
||||
kind,
|
||||
has_read_error,
|
||||
count: Count::new(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_notebook(path: &FilePath) -> bool {
|
||||
match path {
|
||||
FilePath::System(system) => system.extension().is_some_and(|extension| {
|
||||
PySourceType::try_from_extension(extension) == Some(PySourceType::Ipynb)
|
||||
}),
|
||||
@@ -26,33 +62,6 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
})
|
||||
}
|
||||
FilePath::Vendored(_) => false,
|
||||
};
|
||||
|
||||
if is_notebook {
|
||||
// TODO(micha): Proper error handling and emit a diagnostic. Tackle it together with `source_text`.
|
||||
let notebook = file.read_to_notebook(db).unwrap_or_else(|error| {
|
||||
tracing::error!("Failed to load notebook: {error}");
|
||||
Notebook::empty()
|
||||
});
|
||||
|
||||
return SourceText {
|
||||
inner: Arc::new(SourceTextInner {
|
||||
kind: SourceTextKind::Notebook(notebook),
|
||||
count: Count::new(),
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
let content = file.read_to_string(db).unwrap_or_else(|error| {
|
||||
tracing::error!("Failed to load file: {error}");
|
||||
String::default()
|
||||
});
|
||||
|
||||
SourceText {
|
||||
inner: Arc::new(SourceTextInner {
|
||||
kind: SourceTextKind::Text(content),
|
||||
count: Count::new(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -87,6 +96,11 @@ impl SourceText {
|
||||
pub fn is_notebook(&self) -> bool {
|
||||
matches!(&self.inner.kind, SourceTextKind::Notebook(_))
|
||||
}
|
||||
|
||||
/// Returns `true` if there was an error when reading the content of the file.
|
||||
pub fn has_read_error(&self) -> bool {
|
||||
self.inner.has_read_error
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for SourceText {
|
||||
@@ -118,6 +132,7 @@ impl std::fmt::Debug for SourceText {
|
||||
struct SourceTextInner {
|
||||
count: Count<SourceText>,
|
||||
kind: SourceTextKind,
|
||||
has_read_error: bool,
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq)]
|
||||
@@ -126,6 +141,35 @@ enum SourceTextKind {
|
||||
Notebook(Notebook),
|
||||
}
|
||||
|
||||
impl From<String> for SourceTextKind {
|
||||
fn from(value: String) -> Self {
|
||||
SourceTextKind::Text(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Notebook> for SourceTextKind {
|
||||
fn from(notebook: Notebook) -> Self {
|
||||
SourceTextKind::Notebook(notebook)
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::accumulator]
|
||||
pub struct SourceDiagnostic(Arc<SourceTextError>);
|
||||
|
||||
impl std::fmt::Display for SourceDiagnostic {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum SourceTextError {
|
||||
#[error("Failed to read notebook: {0}`")]
|
||||
FailedToReadNotebook(#[from] ruff_notebook::NotebookError),
|
||||
#[error("Failed to read file: {0}")]
|
||||
FailedToReadFile(#[from] std::io::Error),
|
||||
}
|
||||
|
||||
/// Computes the [`LineIndex`] for `file`.
|
||||
#[salsa::tracked]
|
||||
pub fn line_index(db: &dyn Db, file: File) -> LineIndex {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.5.7"
|
||||
version = "0.6.0"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
||||
@@ -89,3 +89,26 @@ async def func():
|
||||
async def func():
|
||||
async with asyncio.timeout(delay=0.2), asyncio.timeout(delay=0.2):
|
||||
...
|
||||
|
||||
|
||||
# Don't trigger for blocks with a yield statement
|
||||
async def foo():
|
||||
with trio.fail_after(1):
|
||||
yield
|
||||
|
||||
|
||||
async def foo(): # even if only one branch contains a yield, we skip the lint
|
||||
with trio.fail_after(1):
|
||||
if something:
|
||||
...
|
||||
else:
|
||||
yield
|
||||
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/12873
|
||||
@asynccontextmanager
|
||||
async def good_code():
|
||||
with anyio.fail_after(10):
|
||||
# There's no await keyword here, but we presume that there
|
||||
# will be in the caller we yield to, so this is safe.
|
||||
yield
|
||||
|
||||
@@ -55,3 +55,14 @@ max({x.id for x in bar})
|
||||
|
||||
# should not be linted...
|
||||
sum({x.id for x in bar})
|
||||
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/12891
|
||||
from collections.abc import AsyncGenerator
|
||||
|
||||
|
||||
async def test() -> None:
|
||||
async def async_gen() -> AsyncGenerator[bool, None]:
|
||||
yield True
|
||||
|
||||
assert all([v async for v in async_gen()]) # OK
|
||||
|
||||
@@ -368,3 +368,11 @@ def foo() -> int:
|
||||
if baz() > 3:
|
||||
return 1
|
||||
bar()
|
||||
|
||||
|
||||
def f():
|
||||
if a:
|
||||
return b
|
||||
else:
|
||||
with c:
|
||||
d
|
||||
|
||||
@@ -244,3 +244,11 @@ def f():
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def has_untracted_files():
|
||||
if b'Untracked files' in result.stdout:
|
||||
return True
|
||||
else:
|
||||
\
|
||||
return False
|
||||
|
||||
18
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/kw_only.py
vendored
Normal file
18
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/kw_only.py
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Test: avoid marking a `KW_ONLY` annotation as typing-only."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import KW_ONLY, dataclass, Field
|
||||
|
||||
|
||||
@dataclass
|
||||
class Test1:
|
||||
a: int
|
||||
_: KW_ONLY
|
||||
b: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class Test2:
|
||||
a: int
|
||||
b: Field
|
||||
@@ -111,3 +111,7 @@ def can_access_inside_nested[T](t: T) -> T: # OK
|
||||
return x
|
||||
|
||||
bar(t)
|
||||
|
||||
|
||||
def cannot_access_in_default[T](t: T = T): # F821
|
||||
pass
|
||||
|
||||
@@ -139,3 +139,33 @@ print("%.20X" % 1)
|
||||
|
||||
print("%2X" % 1)
|
||||
print("%02X" % 1)
|
||||
|
||||
# UP031 (no longer false negatives, but offer no fix because of more complex syntax)
|
||||
|
||||
"%d.%d" % (a, b)
|
||||
|
||||
"%*s" % (5, "hi")
|
||||
|
||||
"%d" % (flt,)
|
||||
|
||||
"%c" % (some_string,)
|
||||
|
||||
"%.2r" % (1.25)
|
||||
|
||||
"%.*s" % (5, "hi")
|
||||
|
||||
"%i" % (flt,)
|
||||
|
||||
"%()s" % {"": "empty"}
|
||||
|
||||
"%s" % {"k": "v"}
|
||||
|
||||
"%()s" % {"": "bar"}
|
||||
|
||||
"%(1)s" % {"1": "bar"}
|
||||
|
||||
"%(a)s" % {"a": 1, "a": 2}
|
||||
|
||||
"%(1)s" % {1: 2, "1": 2}
|
||||
|
||||
"%(and)s" % {"and": 2}
|
||||
|
||||
@@ -1,34 +1,8 @@
|
||||
# OK
|
||||
b"%s" % (b"bytestring",)
|
||||
|
||||
"%*s" % (5, "hi")
|
||||
|
||||
"%d" % (flt,)
|
||||
|
||||
"%c" % (some_string,)
|
||||
|
||||
"%4%" % ()
|
||||
|
||||
"%.2r" % (1.25)
|
||||
|
||||
i % 3
|
||||
|
||||
"%.*s" % (5, "hi")
|
||||
|
||||
"%i" % (flt,)
|
||||
|
||||
"%()s" % {"": "empty"}
|
||||
|
||||
"%s" % {"k": "v"}
|
||||
|
||||
"%(1)s" % {"1": "bar"}
|
||||
|
||||
"%(a)s" % {"a": 1, "a": 2}
|
||||
|
||||
pytest.param('"%8s" % (None,)', id="unsafe width-string conversion"),
|
||||
|
||||
"%()s" % {"": "bar"}
|
||||
|
||||
"%(1)s" % {1: 2, "1": 2}
|
||||
|
||||
"%(and)s" % {"and": 2}
|
||||
|
||||
@@ -45,3 +45,17 @@ def negative_cases():
|
||||
|
||||
import django.utils.translations
|
||||
y = django.utils.translations.gettext("This {should} be understood as a translation string too!")
|
||||
|
||||
# Calling `gettext.install()` literall monkey-patches `builtins._ = ...`,
|
||||
# so even the fully qualified access of `builtins._()` should be considered
|
||||
# a possible `gettext` call.
|
||||
import builtins
|
||||
another = 42
|
||||
z = builtins._("{another} translation string")
|
||||
|
||||
# Usually logging strings use `%`-style string interpolation,
|
||||
# but `logging` can be configured to use `{}` the same as f-strings,
|
||||
# so these should also be ignored.
|
||||
# See https://docs.python.org/3/howto/logging-cookbook.html#formatting-styles
|
||||
import logging
|
||||
logging.info("yet {another} non-f-string")
|
||||
|
||||
@@ -1077,12 +1077,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
}
|
||||
if checker.enabled(Rule::MissingFStringSyntax) {
|
||||
for string_literal in value.literals() {
|
||||
ruff::rules::missing_fstring_syntax(
|
||||
&mut checker.diagnostics,
|
||||
string_literal,
|
||||
checker.locator,
|
||||
&checker.semantic,
|
||||
);
|
||||
ruff::rules::missing_fstring_syntax(checker, string_literal);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1378,12 +1373,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
}
|
||||
if checker.enabled(Rule::MissingFStringSyntax) {
|
||||
for string_literal in value.as_slice() {
|
||||
ruff::rules::missing_fstring_syntax(
|
||||
&mut checker.diagnostics,
|
||||
string_literal,
|
||||
checker.locator,
|
||||
&checker.semantic,
|
||||
);
|
||||
ruff::rules::missing_fstring_syntax(checker, string_literal);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1498,7 +1488,9 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
}
|
||||
|
||||
if checker.enabled(Rule::UnnecessaryDictComprehensionForIterable) {
|
||||
ruff::rules::unnecessary_dict_comprehension_for_iterable(checker, dict_comp);
|
||||
flake8_comprehensions::rules::unnecessary_dict_comprehension_for_iterable(
|
||||
checker, dict_comp,
|
||||
);
|
||||
}
|
||||
|
||||
if checker.enabled(Rule::FunctionUsesLoopVariable) {
|
||||
|
||||
@@ -229,12 +229,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
Rule::SuperfluousElseContinue,
|
||||
Rule::SuperfluousElseBreak,
|
||||
]) {
|
||||
flake8_return::rules::function(
|
||||
checker,
|
||||
body,
|
||||
decorator_list,
|
||||
returns.as_ref().map(AsRef::as_ref),
|
||||
);
|
||||
flake8_return::rules::function(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::UselessReturn) {
|
||||
pylint::rules::useless_return(
|
||||
@@ -268,8 +263,8 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::TooManyArguments) {
|
||||
pylint::rules::too_many_arguments(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::TooManyPositional) {
|
||||
pylint::rules::too_many_positional(checker, function_def);
|
||||
if checker.enabled(Rule::TooManyPositionalArguments) {
|
||||
pylint::rules::too_many_positional_arguments(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::TooManyReturnStatements) {
|
||||
if let Some(diagnostic) = pylint::rules::too_many_return_statements(
|
||||
|
||||
@@ -691,6 +691,14 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
self.semantic(),
|
||||
);
|
||||
|
||||
// The default values of the parameters needs to be evaluated in the enclosing
|
||||
// scope.
|
||||
for parameter in &**parameters {
|
||||
if let Some(expr) = parameter.default() {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
}
|
||||
|
||||
self.semantic.push_scope(ScopeKind::Type);
|
||||
|
||||
if let Some(type_params) = type_params {
|
||||
@@ -715,9 +723,6 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(expr) = parameter.default() {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
}
|
||||
if let Some(expr) = returns {
|
||||
match annotation {
|
||||
@@ -1290,8 +1295,8 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
let Keyword { arg, value, .. } = keyword;
|
||||
match (arg.as_ref(), value) {
|
||||
// Ex) NamedTuple("a", **{"a": int})
|
||||
(None, Expr::Dict(ast::ExprDict { items, .. })) => {
|
||||
for ast::DictItem { key, value } in items {
|
||||
(None, Expr::Dict(dict)) => {
|
||||
for ast::DictItem { key, value } in dict {
|
||||
if let Some(key) = key.as_ref() {
|
||||
self.visit_non_type_definition(key);
|
||||
self.visit_type_definition(value);
|
||||
|
||||
@@ -4,12 +4,12 @@
|
||||
/// `--select`. For pylint this is e.g. C0414 and E0118 but also C and E01.
|
||||
use std::fmt::Formatter;
|
||||
|
||||
use strum_macros::{AsRefStr, EnumIter};
|
||||
|
||||
use crate::registry::{AsRule, Linter};
|
||||
use crate::rule_selector::is_single_rule_selector;
|
||||
use crate::rules;
|
||||
|
||||
use strum_macros::{AsRefStr, EnumIter};
|
||||
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct NoqaCode(&'static str, &'static str);
|
||||
|
||||
@@ -206,12 +206,12 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pylint, "E0237") => (RuleGroup::Stable, rules::pylint::rules::NonSlotAssignment),
|
||||
(Pylint, "E0241") => (RuleGroup::Stable, rules::pylint::rules::DuplicateBases),
|
||||
(Pylint, "E0302") => (RuleGroup::Stable, rules::pylint::rules::UnexpectedSpecialMethodSignature),
|
||||
(Pylint, "E0303") => (RuleGroup::Preview, rules::pylint::rules::InvalidLengthReturnType),
|
||||
(Pylint, "E0303") => (RuleGroup::Stable, rules::pylint::rules::InvalidLengthReturnType),
|
||||
(Pylint, "E0304") => (RuleGroup::Preview, rules::pylint::rules::InvalidBoolReturnType),
|
||||
(Pylint, "E0305") => (RuleGroup::Preview, rules::pylint::rules::InvalidIndexReturnType),
|
||||
(Pylint, "E0305") => (RuleGroup::Stable, rules::pylint::rules::InvalidIndexReturnType),
|
||||
(Pylint, "E0307") => (RuleGroup::Stable, rules::pylint::rules::InvalidStrReturnType),
|
||||
(Pylint, "E0308") => (RuleGroup::Preview, rules::pylint::rules::InvalidBytesReturnType),
|
||||
(Pylint, "E0309") => (RuleGroup::Preview, rules::pylint::rules::InvalidHashReturnType),
|
||||
(Pylint, "E0308") => (RuleGroup::Stable, rules::pylint::rules::InvalidBytesReturnType),
|
||||
(Pylint, "E0309") => (RuleGroup::Stable, rules::pylint::rules::InvalidHashReturnType),
|
||||
(Pylint, "E0604") => (RuleGroup::Stable, rules::pylint::rules::InvalidAllObject),
|
||||
(Pylint, "E0605") => (RuleGroup::Stable, rules::pylint::rules::InvalidAllFormat),
|
||||
(Pylint, "E0643") => (RuleGroup::Stable, rules::pylint::rules::PotentialIndexError),
|
||||
@@ -225,8 +225,8 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pylint, "E1307") => (RuleGroup::Stable, rules::pylint::rules::BadStringFormatType),
|
||||
(Pylint, "E1310") => (RuleGroup::Stable, rules::pylint::rules::BadStrStripCall),
|
||||
(Pylint, "E1507") => (RuleGroup::Stable, rules::pylint::rules::InvalidEnvvarValue),
|
||||
(Pylint, "E1519") => (RuleGroup::Preview, rules::pylint::rules::SingledispatchMethod),
|
||||
(Pylint, "E1520") => (RuleGroup::Preview, rules::pylint::rules::SingledispatchmethodFunction),
|
||||
(Pylint, "E1519") => (RuleGroup::Stable, rules::pylint::rules::SingledispatchMethod),
|
||||
(Pylint, "E1520") => (RuleGroup::Stable, rules::pylint::rules::SingledispatchmethodFunction),
|
||||
(Pylint, "E1700") => (RuleGroup::Stable, rules::pylint::rules::YieldFromInAsyncFunction),
|
||||
(Pylint, "E2502") => (RuleGroup::Stable, rules::pylint::rules::BidirectionalUnicode),
|
||||
(Pylint, "E2510") => (RuleGroup::Stable, rules::pylint::rules::InvalidCharacterBackspace),
|
||||
@@ -248,7 +248,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pylint, "R0914") => (RuleGroup::Preview, rules::pylint::rules::TooManyLocals),
|
||||
(Pylint, "R0915") => (RuleGroup::Stable, rules::pylint::rules::TooManyStatements),
|
||||
(Pylint, "R0916") => (RuleGroup::Preview, rules::pylint::rules::TooManyBooleanExpressions),
|
||||
(Pylint, "R0917") => (RuleGroup::Preview, rules::pylint::rules::TooManyPositional),
|
||||
(Pylint, "R0917") => (RuleGroup::Preview, rules::pylint::rules::TooManyPositionalArguments),
|
||||
(Pylint, "R1701") => (RuleGroup::Removed, rules::pylint::rules::RepeatedIsinstanceCalls),
|
||||
(Pylint, "R1702") => (RuleGroup::Preview, rules::pylint::rules::TooManyNestedBlocks),
|
||||
(Pylint, "R1704") => (RuleGroup::Stable, rules::pylint::rules::RedefinedArgumentFromLocal),
|
||||
@@ -256,7 +256,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pylint, "R1711") => (RuleGroup::Stable, rules::pylint::rules::UselessReturn),
|
||||
(Pylint, "R1714") => (RuleGroup::Stable, rules::pylint::rules::RepeatedEqualityComparison),
|
||||
(Pylint, "R1722") => (RuleGroup::Stable, rules::pylint::rules::SysExitAlias),
|
||||
(Pylint, "R1730") => (RuleGroup::Preview, rules::pylint::rules::IfStmtMinMax),
|
||||
(Pylint, "R1730") => (RuleGroup::Stable, rules::pylint::rules::IfStmtMinMax),
|
||||
(Pylint, "R1733") => (RuleGroup::Preview, rules::pylint::rules::UnnecessaryDictIndexLookup),
|
||||
(Pylint, "R1736") => (RuleGroup::Stable, rules::pylint::rules::UnnecessaryListIndexLookup),
|
||||
(Pylint, "R2004") => (RuleGroup::Stable, rules::pylint::rules::MagicValueComparison),
|
||||
@@ -273,13 +273,13 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pylint, "W0129") => (RuleGroup::Stable, rules::pylint::rules::AssertOnStringLiteral),
|
||||
(Pylint, "W0131") => (RuleGroup::Stable, rules::pylint::rules::NamedExprWithoutContext),
|
||||
(Pylint, "W0133") => (RuleGroup::Stable, rules::pylint::rules::UselessExceptionStatement),
|
||||
(Pylint, "W0211") => (RuleGroup::Preview, rules::pylint::rules::BadStaticmethodArgument),
|
||||
(Pylint, "W0211") => (RuleGroup::Stable, rules::pylint::rules::BadStaticmethodArgument),
|
||||
(Pylint, "W0245") => (RuleGroup::Stable, rules::pylint::rules::SuperWithoutBrackets),
|
||||
(Pylint, "W0406") => (RuleGroup::Stable, rules::pylint::rules::ImportSelf),
|
||||
(Pylint, "W0602") => (RuleGroup::Stable, rules::pylint::rules::GlobalVariableNotAssigned),
|
||||
(Pylint, "W0603") => (RuleGroup::Stable, rules::pylint::rules::GlobalStatement),
|
||||
(Pylint, "W0604") => (RuleGroup::Stable, rules::pylint::rules::GlobalAtModuleLevel),
|
||||
(Pylint, "W0642") => (RuleGroup::Preview, rules::pylint::rules::SelfOrClsAssignment),
|
||||
(Pylint, "W0642") => (RuleGroup::Stable, rules::pylint::rules::SelfOrClsAssignment),
|
||||
(Pylint, "W0711") => (RuleGroup::Stable, rules::pylint::rules::BinaryOpException),
|
||||
(Pylint, "W1501") => (RuleGroup::Stable, rules::pylint::rules::BadOpenMode),
|
||||
(Pylint, "W1508") => (RuleGroup::Stable, rules::pylint::rules::InvalidEnvvarDefault),
|
||||
@@ -378,6 +378,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8Comprehensions, "17") => (RuleGroup::Stable, rules::flake8_comprehensions::rules::UnnecessaryMap),
|
||||
(Flake8Comprehensions, "18") => (RuleGroup::Stable, rules::flake8_comprehensions::rules::UnnecessaryLiteralWithinDictCall),
|
||||
(Flake8Comprehensions, "19") => (RuleGroup::Stable, rules::flake8_comprehensions::rules::UnnecessaryComprehensionInCall),
|
||||
(Flake8Comprehensions, "20") => (RuleGroup::Preview, rules::flake8_comprehensions::rules::UnnecessaryDictComprehensionForIterable),
|
||||
|
||||
// flake8-debugger
|
||||
(Flake8Debugger, "0") => (RuleGroup::Stable, rules::flake8_debugger::rules::Debugger),
|
||||
@@ -509,7 +510,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pyupgrade, "024") => (RuleGroup::Stable, rules::pyupgrade::rules::OSErrorAlias),
|
||||
(Pyupgrade, "025") => (RuleGroup::Stable, rules::pyupgrade::rules::UnicodeKindPrefix),
|
||||
(Pyupgrade, "026") => (RuleGroup::Stable, rules::pyupgrade::rules::DeprecatedMockImport),
|
||||
(Pyupgrade, "027") => (RuleGroup::Stable, rules::pyupgrade::rules::UnpackedListComprehension),
|
||||
(Pyupgrade, "027") => (RuleGroup::Deprecated, rules::pyupgrade::rules::UnpackedListComprehension),
|
||||
(Pyupgrade, "028") => (RuleGroup::Stable, rules::pyupgrade::rules::YieldInForLoop),
|
||||
(Pyupgrade, "029") => (RuleGroup::Stable, rules::pyupgrade::rules::UnnecessaryBuiltinImport),
|
||||
(Pyupgrade, "030") => (RuleGroup::Stable, rules::pyupgrade::rules::FormatLiterals),
|
||||
@@ -778,9 +779,9 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8Pyi, "055") => (RuleGroup::Stable, rules::flake8_pyi::rules::UnnecessaryTypeUnion),
|
||||
(Flake8Pyi, "056") => (RuleGroup::Stable, rules::flake8_pyi::rules::UnsupportedMethodCallOnAll),
|
||||
(Flake8Pyi, "058") => (RuleGroup::Stable, rules::flake8_pyi::rules::GeneratorReturnFromIterMethod),
|
||||
(Flake8Pyi, "057") => (RuleGroup::Preview, rules::flake8_pyi::rules::ByteStringUsage),
|
||||
(Flake8Pyi, "057") => (RuleGroup::Stable, rules::flake8_pyi::rules::ByteStringUsage),
|
||||
(Flake8Pyi, "059") => (RuleGroup::Preview, rules::flake8_pyi::rules::GenericNotLastBaseClass),
|
||||
(Flake8Pyi, "062") => (RuleGroup::Preview, rules::flake8_pyi::rules::DuplicateLiteralMember),
|
||||
(Flake8Pyi, "062") => (RuleGroup::Stable, rules::flake8_pyi::rules::DuplicateLiteralMember),
|
||||
(Flake8Pyi, "063") => (RuleGroup::Preview, rules::flake8_pyi::rules::PrePep570PositionalArgument),
|
||||
(Flake8Pyi, "064") => (RuleGroup::Preview, rules::flake8_pyi::rules::RedundantFinalLiteral),
|
||||
(Flake8Pyi, "066") => (RuleGroup::Preview, rules::flake8_pyi::rules::BadVersionInfoOrder),
|
||||
@@ -789,8 +790,8 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8PytestStyle, "001") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestFixtureIncorrectParenthesesStyle),
|
||||
(Flake8PytestStyle, "002") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestFixturePositionalArgs),
|
||||
(Flake8PytestStyle, "003") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestExtraneousScopeFunction),
|
||||
(Flake8PytestStyle, "004") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestMissingFixtureNameUnderscore),
|
||||
(Flake8PytestStyle, "005") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestIncorrectFixtureNameUnderscore),
|
||||
(Flake8PytestStyle, "004") => (RuleGroup::Deprecated, rules::flake8_pytest_style::rules::PytestMissingFixtureNameUnderscore),
|
||||
(Flake8PytestStyle, "005") => (RuleGroup::Deprecated, rules::flake8_pytest_style::rules::PytestIncorrectFixtureNameUnderscore),
|
||||
(Flake8PytestStyle, "006") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestParametrizeNamesWrongType),
|
||||
(Flake8PytestStyle, "007") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestParametrizeValuesWrongType),
|
||||
(Flake8PytestStyle, "008") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestPatchWithLambda),
|
||||
@@ -951,7 +952,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Ruff, "022") => (RuleGroup::Preview, rules::ruff::rules::UnsortedDunderAll),
|
||||
(Ruff, "023") => (RuleGroup::Preview, rules::ruff::rules::UnsortedDunderSlots),
|
||||
(Ruff, "024") => (RuleGroup::Stable, rules::ruff::rules::MutableFromkeysValue),
|
||||
(Ruff, "025") => (RuleGroup::Preview, rules::ruff::rules::UnnecessaryDictComprehensionForIterable),
|
||||
(Ruff, "026") => (RuleGroup::Stable, rules::ruff::rules::DefaultFactoryKwarg),
|
||||
(Ruff, "027") => (RuleGroup::Preview, rules::ruff::rules::MissingFStringSyntax),
|
||||
(Ruff, "028") => (RuleGroup::Preview, rules::ruff::rules::InvalidFormatterSuppressionComment),
|
||||
@@ -959,7 +959,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Ruff, "030") => (RuleGroup::Preview, rules::ruff::rules::AssertWithPrintMessage),
|
||||
(Ruff, "031") => (RuleGroup::Preview, rules::ruff::rules::IncorrectlyParenthesizedTupleInSubscript),
|
||||
(Ruff, "100") => (RuleGroup::Stable, rules::ruff::rules::UnusedNOQA),
|
||||
(Ruff, "101") => (RuleGroup::Preview, rules::ruff::rules::RedirectedNOQA),
|
||||
(Ruff, "101") => (RuleGroup::Stable, rules::ruff::rules::RedirectedNOQA),
|
||||
|
||||
(Ruff, "200") => (RuleGroup::Stable, rules::ruff::rules::InvalidPyprojectToml),
|
||||
#[cfg(any(feature = "test-rules", test))]
|
||||
|
||||
@@ -151,16 +151,15 @@ pub(crate) fn add_to_dunder_all<'a>(
|
||||
stylist: &Stylist,
|
||||
) -> Vec<Edit> {
|
||||
let (insertion_point, export_prefix_length) = match expr {
|
||||
Expr::List(ExprList { elts, range, .. }) => (
|
||||
elts.last()
|
||||
.map_or(range.end() - "]".text_len(), Ranged::end),
|
||||
Expr::List(ExprList { elts, .. }) => (
|
||||
elts.last().map_or(expr.end() - "]".text_len(), Ranged::end),
|
||||
elts.len(),
|
||||
),
|
||||
Expr::Tuple(tup) if tup.parenthesized => (
|
||||
tup.elts
|
||||
.last()
|
||||
.map_or(tup.end() - ")".text_len(), Ranged::end),
|
||||
tup.elts.len(),
|
||||
tup.len(),
|
||||
),
|
||||
Expr::Tuple(tup) if !tup.parenthesized => (
|
||||
tup.elts
|
||||
@@ -168,7 +167,7 @@ pub(crate) fn add_to_dunder_all<'a>(
|
||||
.expect("unparenthesized empty tuple is not possible")
|
||||
.range()
|
||||
.end(),
|
||||
tup.elts.len(),
|
||||
tup.len(),
|
||||
),
|
||||
_ => {
|
||||
// we don't know how to insert into this expression
|
||||
@@ -318,26 +317,28 @@ pub(crate) fn adjust_indentation(
|
||||
line_indentation.contains('\t') && line_indentation.contains(' ')
|
||||
});
|
||||
|
||||
if contains_multiline_string || mixed_indentation {
|
||||
let module_text = format!("def f():{}{contents}", stylist.line_ending().as_str());
|
||||
|
||||
let mut tree = match_statement(&module_text)?;
|
||||
|
||||
let embedding = match_function_def(&mut tree)?;
|
||||
|
||||
let indented_block = match_indented_block(&mut embedding.body)?;
|
||||
indented_block.indent = Some(indentation);
|
||||
|
||||
let module_text = indented_block.codegen_stylist(stylist);
|
||||
let module_text = module_text
|
||||
.strip_prefix(stylist.line_ending().as_str())
|
||||
.unwrap()
|
||||
.to_string();
|
||||
Ok(module_text)
|
||||
} else {
|
||||
// Otherwise, we can do a simple adjustment ourselves.
|
||||
Ok(dedent_to(contents, indentation))
|
||||
// For simple cases, try to do a manual dedent.
|
||||
if !contains_multiline_string && !mixed_indentation {
|
||||
if let Some(dedent) = dedent_to(contents, indentation) {
|
||||
return Ok(dedent);
|
||||
}
|
||||
}
|
||||
|
||||
let module_text = format!("def f():{}{contents}", stylist.line_ending().as_str());
|
||||
|
||||
let mut tree = match_statement(&module_text)?;
|
||||
|
||||
let embedding = match_function_def(&mut tree)?;
|
||||
|
||||
let indented_block = match_indented_block(&mut embedding.body)?;
|
||||
indented_block.indent = Some(indentation);
|
||||
|
||||
let module_text = indented_block.codegen_stylist(stylist);
|
||||
let module_text = module_text
|
||||
.strip_prefix(stylist.line_ending().as_str())
|
||||
.unwrap()
|
||||
.to_string();
|
||||
Ok(module_text)
|
||||
}
|
||||
|
||||
/// Determine if a vector contains only one, specific element.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user