Compare commits
131 Commits
PYI034
...
zb/fuzz-ca
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
48eb23b488 | ||
|
|
7f624cd0bb | ||
|
|
dbbe7a773c | ||
|
|
5f09d4a90a | ||
|
|
f8c20258ae | ||
|
|
d8538d8c98 | ||
|
|
3642381489 | ||
|
|
1f07880d5c | ||
|
|
d81b6cd334 | ||
|
|
d99210c049 | ||
|
|
577653551c | ||
|
|
38a385fb6f | ||
|
|
cd2ae5aa2d | ||
|
|
41694f21c6 | ||
|
|
fccbe56d23 | ||
|
|
c46555da41 | ||
|
|
0a27c9dabd | ||
|
|
3c9e76eb66 | ||
|
|
80f5cdcf66 | ||
|
|
35fe0e90da | ||
|
|
157b49a8ee | ||
|
|
8a6e223df5 | ||
|
|
5a48da53da | ||
|
|
58005b590c | ||
|
|
884835e386 | ||
|
|
efd4407f7f | ||
|
|
761588a60e | ||
|
|
e1eb188049 | ||
|
|
ff19629b11 | ||
|
|
cd80c9d907 | ||
|
|
abb34828bd | ||
|
|
cab7caf80b | ||
|
|
d470f29093 | ||
|
|
1fbed6c325 | ||
|
|
4dcb7ddafe | ||
|
|
5be90c3a67 | ||
|
|
d0dca7bfcf | ||
|
|
78210b198b | ||
|
|
4a2310b595 | ||
|
|
fc392c663a | ||
|
|
81d3c419e9 | ||
|
|
a6a3d3f656 | ||
|
|
c847cad389 | ||
|
|
81e5830585 | ||
|
|
2b58705cc1 | ||
|
|
9f3235a37f | ||
|
|
62d650226b | ||
|
|
5d8a391a3e | ||
|
|
ed7b98cf9b | ||
|
|
6591775cd9 | ||
|
|
1f82731856 | ||
|
|
874da9c400 | ||
|
|
375cead202 | ||
|
|
9ec690b8f8 | ||
|
|
a48d779c4e | ||
|
|
ba6c7f6897 | ||
|
|
8095ff0e55 | ||
|
|
24cd592a1d | ||
|
|
a40bc6a460 | ||
|
|
577de6c599 | ||
|
|
d8b1afbc6e | ||
|
|
9a3001b571 | ||
|
|
ec2c7cad0e | ||
|
|
924741cb11 | ||
|
|
77e8da7497 | ||
|
|
5e64863895 | ||
|
|
78e4753d74 | ||
|
|
eb55b9b5a0 | ||
|
|
0eb36e4345 | ||
|
|
5fcf0afff4 | ||
|
|
b946cfd1f7 | ||
|
|
95c8f5fd0f | ||
|
|
89aa804b2d | ||
|
|
f789b12705 | ||
|
|
3e36a7ab81 | ||
|
|
5c548dcc04 | ||
|
|
bd30701980 | ||
|
|
2b6d66b793 | ||
|
|
147ea399fd | ||
|
|
907047bf4b | ||
|
|
13a1483f1e | ||
|
|
be69f61b3e | ||
|
|
f1f3bd1cd3 | ||
|
|
3bef23669f | ||
|
|
f82ee8ea59 | ||
|
|
b8a65182dd | ||
|
|
fc15d8a3bd | ||
|
|
b3b5c19105 | ||
|
|
f8aae9b1d6 | ||
|
|
9180635171 | ||
|
|
3ef4b3bf32 | ||
|
|
5a3886c8b5 | ||
|
|
813ec23ecd | ||
|
|
13883414af | ||
|
|
84d4f114ef | ||
|
|
1c586b29e2 | ||
|
|
d76a8518c2 | ||
|
|
5f0ee2670a | ||
|
|
f8ca6c3316 | ||
|
|
ba7b023f26 | ||
|
|
e947d163b2 | ||
|
|
1cf4d2ff69 | ||
|
|
2308522f38 | ||
|
|
438f3d967b | ||
|
|
5bf4759cff | ||
|
|
2e9e96338e | ||
|
|
5fa7ace1f5 | ||
|
|
704868ca83 | ||
|
|
dc71c8a484 | ||
|
|
2499297392 | ||
|
|
7b9189bb2c | ||
|
|
d4cf61d98b | ||
|
|
5d91ba0b10 | ||
|
|
a7e9f0c4b9 | ||
|
|
c7d48e10e6 | ||
|
|
94dee2a36d | ||
|
|
555a5c9319 | ||
|
|
1279c20ee1 | ||
|
|
ce3af27f59 | ||
|
|
71da1d6df5 | ||
|
|
e598240f04 | ||
|
|
c9b84e2a85 | ||
|
|
d3f1c8e536 | ||
|
|
eea6b31980 | ||
|
|
b8dc780bdc | ||
|
|
93fdf7ed36 | ||
|
|
b19f388249 | ||
|
|
de947deee7 | ||
|
|
c0c4ae14ac | ||
|
|
645ce7e5ec | ||
|
|
1430f21283 |
@@ -17,4 +17,7 @@ indent_size = 4
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.md]
|
||||
max_line_length = 100
|
||||
max_line_length = 100
|
||||
|
||||
[*.toml]
|
||||
indent_size = 4
|
||||
1
.github/workflows/ci.yaml
vendored
1
.github/workflows/ci.yaml
vendored
@@ -268,6 +268,7 @@ jobs:
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: "fuzz -> target"
|
||||
cache-all-crates: "true"
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@main
|
||||
with:
|
||||
|
||||
2
.github/workflows/publish-playground.yml
vendored
2
.github/workflows/publish-playground.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
working-directory: playground
|
||||
- name: "Deploy to Cloudflare Pages"
|
||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||
uses: cloudflare/wrangler-action@v3.11.0
|
||||
uses: cloudflare/wrangler-action@v3.12.1
|
||||
with:
|
||||
apiToken: ${{ secrets.CF_API_TOKEN }}
|
||||
accountId: ${{ secrets.CF_ACCOUNT_ID }}
|
||||
|
||||
@@ -17,7 +17,7 @@ exclude: |
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.22
|
||||
rev: v0.23
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
@@ -53,13 +53,13 @@ repos:
|
||||
files: '^crates/.*/resources/mdtest/.*\.md'
|
||||
exclude: |
|
||||
(?x)^(
|
||||
.*?invalid(_.+)_syntax.md
|
||||
.*?invalid(_.+)*_syntax\.md
|
||||
)$
|
||||
additional_dependencies:
|
||||
- black==24.10.0
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.27.0
|
||||
rev: v1.27.3
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
@@ -73,7 +73,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.7.2
|
||||
rev: v0.7.4
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
|
||||
49
CHANGELOG.md
49
CHANGELOG.md
@@ -1,5 +1,54 @@
|
||||
# Changelog
|
||||
|
||||
## 0.7.4
|
||||
|
||||
### Preview features
|
||||
|
||||
- \[`flake8-datetimez`\] Detect usages of `datetime.max`/`datetime.min` (`DTZ901`) ([#14288](https://github.com/astral-sh/ruff/pull/14288))
|
||||
- \[`flake8-logging`\] Implement `root-logger-calls` (`LOG015`) ([#14302](https://github.com/astral-sh/ruff/pull/14302))
|
||||
- \[`flake8-no-pep420`\] Detect empty implicit namespace packages (`INP001`) ([#14236](https://github.com/astral-sh/ruff/pull/14236))
|
||||
- \[`flake8-pyi`\] Add "replace with `Self`" fix (`PYI019`) ([#14238](https://github.com/astral-sh/ruff/pull/14238))
|
||||
- \[`perflint`\] Implement quick-fix for `manual-list-comprehension` (`PERF401`) ([#13919](https://github.com/astral-sh/ruff/pull/13919))
|
||||
- \[`pylint`\] Implement `shallow-copy-environ` (`W1507`) ([#14241](https://github.com/astral-sh/ruff/pull/14241))
|
||||
- \[`ruff`\] Implement `none-not-at-end-of-union` (`RUF036`) ([#14314](https://github.com/astral-sh/ruff/pull/14314))
|
||||
- \[`ruff`\] Implementation `unsafe-markup-call` from `flake8-markupsafe` plugin (`RUF035`) ([#14224](https://github.com/astral-sh/ruff/pull/14224))
|
||||
- \[`ruff`\] Report problems for `attrs` dataclasses (`RUF008`, `RUF009`) ([#14327](https://github.com/astral-sh/ruff/pull/14327))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-boolean-trap`\] Exclude dunder methods that define operators (`FBT001`) ([#14203](https://github.com/astral-sh/ruff/pull/14203))
|
||||
- \[`flake8-pyi`\] Add "replace with `Self`" fix (`PYI034`) ([#14217](https://github.com/astral-sh/ruff/pull/14217))
|
||||
- \[`flake8-pyi`\] Always autofix `duplicate-union-members` (`PYI016`) ([#14270](https://github.com/astral-sh/ruff/pull/14270))
|
||||
- \[`flake8-pyi`\] Improve autofix for nested and mixed type unions for `unnecessary-type-union` (`PYI055`) ([#14272](https://github.com/astral-sh/ruff/pull/14272))
|
||||
- \[`flake8-pyi`\] Mark fix as unsafe when type annotation contains comments for `duplicate-literal-member` (`PYI062`) ([#14268](https://github.com/astral-sh/ruff/pull/14268))
|
||||
|
||||
### Server
|
||||
|
||||
- Use the current working directory to resolve settings from `ruff.configuration` ([#14352](https://github.com/astral-sh/ruff/pull/14352))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid conflicts between `PLC014` (`useless-import-alias`) and `I002` (`missing-required-import`) by considering `lint.isort.required-imports` for `PLC014` ([#14287](https://github.com/astral-sh/ruff/pull/14287))
|
||||
- \[`flake8-type-checking`\] Skip quoting annotation if it becomes invalid syntax (`TCH001`)
|
||||
- \[`flake8-pyi`\] Avoid using `typing.Self` in stub files pre-Python 3.11 (`PYI034`) ([#14230](https://github.com/astral-sh/ruff/pull/14230))
|
||||
- \[`flake8-pytest-style`\] Flag `pytest.raises` call with keyword argument `expected_exception` (`PT011`) ([#14298](https://github.com/astral-sh/ruff/pull/14298))
|
||||
- \[`flake8-simplify`\] Infer "unknown" truthiness for literal iterables whose items are all unpacks (`SIM222`) ([#14263](https://github.com/astral-sh/ruff/pull/14263))
|
||||
- \[`flake8-type-checking`\] Fix false positives for `typing.Annotated` (`TCH001`) ([#14311](https://github.com/astral-sh/ruff/pull/14311))
|
||||
- \[`pylint`\] Allow `await` at the top-level scope of a notebook (`PLE1142`) ([#14225](https://github.com/astral-sh/ruff/pull/14225))
|
||||
- \[`pylint`\] Fix miscellaneous issues in `await-outside-async` detection (`PLE1142`) ([#14218](https://github.com/astral-sh/ruff/pull/14218))
|
||||
- \[`pyupgrade`\] Avoid applying PEP 646 rewrites in invalid contexts (`UP044`) ([#14234](https://github.com/astral-sh/ruff/pull/14234))
|
||||
- \[`pyupgrade`\] Detect permutations in redundant open modes (`UP015`) ([#14255](https://github.com/astral-sh/ruff/pull/14255))
|
||||
- \[`refurb`\] Avoid triggering `hardcoded-string-charset` for reordered sets (`FURB156`) ([#14233](https://github.com/astral-sh/ruff/pull/14233))
|
||||
- \[`refurb`\] Further special cases added to `verbose-decimal-constructor` (`FURB157`) ([#14216](https://github.com/astral-sh/ruff/pull/14216))
|
||||
- \[`refurb`\] Use `UserString` instead of non-existent `UserStr` (`FURB189`) ([#14209](https://github.com/astral-sh/ruff/pull/14209))
|
||||
- \[`ruff`\] Avoid treating lowercase letters as `# noqa` codes (`RUF100`) ([#14229](https://github.com/astral-sh/ruff/pull/14229))
|
||||
- \[`ruff`\] Do not report when `Optional` has no type arguments (`RUF013`) ([#14181](https://github.com/astral-sh/ruff/pull/14181))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add "Notebook behavior" section for `F704`, `PLE1142` ([#14266](https://github.com/astral-sh/ruff/pull/14266))
|
||||
- Document comment policy around fix safety ([#14300](https://github.com/astral-sh/ruff/pull/14300))
|
||||
|
||||
## 0.7.3
|
||||
|
||||
### Preview features
|
||||
|
||||
435
Cargo.lock
generated
435
Cargo.lock
generated
@@ -2,12 +2,6 @@
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "Inflector"
|
||||
version = "0.11.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3"
|
||||
|
||||
[[package]]
|
||||
name = "adler"
|
||||
version = "1.0.2"
|
||||
@@ -123,9 +117,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.92"
|
||||
version = "1.0.93"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74f37166d7d48a0284b99dd824694c26119c700b53bf0d1540cdb147dbdaaf13"
|
||||
checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775"
|
||||
|
||||
[[package]]
|
||||
name = "append-only-vec"
|
||||
@@ -176,6 +170,12 @@ version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.13.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.22.0"
|
||||
@@ -214,9 +214,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
version = "1.10.0"
|
||||
version = "1.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c"
|
||||
checksum = "1a68f1f47cdf0ec8ee4b941b2eee2a80cb796db73118c0dd09ac63fbe405be22"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"regex-automata 0.4.8",
|
||||
@@ -347,9 +347,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.20"
|
||||
version = "4.5.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b97f376d85a664d5837dbae44bf546e6477a679ff6610010f17276f686d867e8"
|
||||
checksum = "fb3b4b9e5a7c7514dfa52869339ee98b3156b0bfb4e8a77c4ff4babb64b1604f"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -357,9 +357,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.20"
|
||||
version = "4.5.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "19bc80abd44e4bed93ca373a0704ccbd1b710dc5749406201bb018272808dc54"
|
||||
checksum = "b17a95aa67cc7b5ebd32aa5370189aa0d79069ef1c64ce893bd30fb24bff20ec"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -424,7 +424,7 @@ checksum = "2f8c93eb5f77c9050c7750e14f13ef1033a40a0aac70c6371535b6763a01438c"
|
||||
dependencies = [
|
||||
"nix 0.28.0",
|
||||
"terminfo",
|
||||
"thiserror",
|
||||
"thiserror 1.0.67",
|
||||
"which",
|
||||
"winapi",
|
||||
]
|
||||
@@ -812,6 +812,17 @@ dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "displaydoc"
|
||||
version = "0.2.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "doc-comment"
|
||||
version = "0.3.3"
|
||||
@@ -824,6 +835,12 @@ version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9bda8e21c04aca2ae33ffc2fd8c23134f3cac46db123ba97bd9d3f3b8a4a85e1"
|
||||
|
||||
[[package]]
|
||||
name = "dunce"
|
||||
version = "1.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813"
|
||||
|
||||
[[package]]
|
||||
name = "dyn-clone"
|
||||
version = "1.0.17"
|
||||
@@ -1045,9 +1062,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.15.0"
|
||||
version = "0.15.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb"
|
||||
checksum = "3a9bfc1af68b1726ea47d3d5109de126281def866b33970e10fbab11b5dafab3"
|
||||
|
||||
[[package]]
|
||||
name = "hashlink"
|
||||
@@ -1108,6 +1125,124 @@ dependencies = [
|
||||
"cc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "icu_collections"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"yoke",
|
||||
"zerofrom",
|
||||
"zerovec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "icu_locid"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"litemap",
|
||||
"tinystr",
|
||||
"writeable",
|
||||
"zerovec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "icu_locid_transform"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"icu_locid",
|
||||
"icu_locid_transform_data",
|
||||
"icu_provider",
|
||||
"tinystr",
|
||||
"zerovec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "icu_locid_transform_data"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e"
|
||||
|
||||
[[package]]
|
||||
name = "icu_normalizer"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"icu_collections",
|
||||
"icu_normalizer_data",
|
||||
"icu_properties",
|
||||
"icu_provider",
|
||||
"smallvec",
|
||||
"utf16_iter",
|
||||
"utf8_iter",
|
||||
"write16",
|
||||
"zerovec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "icu_normalizer_data"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516"
|
||||
|
||||
[[package]]
|
||||
name = "icu_properties"
|
||||
version = "1.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"icu_collections",
|
||||
"icu_locid_transform",
|
||||
"icu_properties_data",
|
||||
"icu_provider",
|
||||
"tinystr",
|
||||
"zerovec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "icu_properties_data"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569"
|
||||
|
||||
[[package]]
|
||||
name = "icu_provider"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"icu_locid",
|
||||
"icu_provider_macros",
|
||||
"stable_deref_trait",
|
||||
"tinystr",
|
||||
"writeable",
|
||||
"yoke",
|
||||
"zerofrom",
|
||||
"zerovec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "icu_provider_macros"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ident_case"
|
||||
version = "1.0.1"
|
||||
@@ -1116,12 +1251,23 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "0.5.0"
|
||||
version = "1.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6"
|
||||
checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e"
|
||||
dependencies = [
|
||||
"unicode-bidi",
|
||||
"unicode-normalization",
|
||||
"idna_adapter",
|
||||
"smallvec",
|
||||
"utf8_iter",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna_adapter"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71"
|
||||
dependencies = [
|
||||
"icu_normalizer",
|
||||
"icu_properties",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1167,22 +1313,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown 0.15.0",
|
||||
"hashbrown 0.15.1",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "indicatif"
|
||||
version = "0.17.8"
|
||||
version = "0.17.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3"
|
||||
checksum = "cbf675b85ed934d3c67b5c5469701eec7db22689d0a2139d856e0925fa28b281"
|
||||
dependencies = [
|
||||
"console",
|
||||
"instant",
|
||||
"number_prefix",
|
||||
"portable-atomic",
|
||||
"unicode-width 0.1.13",
|
||||
"unicode-width 0.2.0",
|
||||
"vt100",
|
||||
"web-time",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1224,6 +1370,7 @@ dependencies = [
|
||||
"pest",
|
||||
"pest_derive",
|
||||
"regex",
|
||||
"ron",
|
||||
"serde",
|
||||
"similar",
|
||||
"walkdir",
|
||||
@@ -1260,11 +1407,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "is-macro"
|
||||
version = "0.3.6"
|
||||
version = "0.3.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2069faacbe981460232f880d26bf3c7634e322d49053aa48c27e3ae642f728f1"
|
||||
checksum = "1d57a3e447e24c22647738e4607f1df1e0ec6f72e16182c4cd199f647cdfb0e4"
|
||||
dependencies = [
|
||||
"Inflector",
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
@@ -1367,9 +1514,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.161"
|
||||
version = "0.2.164"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1"
|
||||
checksum = "433bfe06b8c75da9b2e3fbea6e5329ff87748f0b144ef75306e674c3f6f7c13f"
|
||||
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
@@ -1383,7 +1530,7 @@ dependencies = [
|
||||
"paste",
|
||||
"peg",
|
||||
"regex",
|
||||
"thiserror",
|
||||
"thiserror 1.0.67",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1429,6 +1576,12 @@ version = "0.4.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89"
|
||||
|
||||
[[package]]
|
||||
name = "litemap"
|
||||
version = "0.7.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "643cb0b8d4fcc284004d5fd0d67ccf61dfffadb7f75e1e71bc420f4688a3a704"
|
||||
|
||||
[[package]]
|
||||
name = "lock_api"
|
||||
version = "0.4.11"
|
||||
@@ -1486,9 +1639,9 @@ checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5"
|
||||
|
||||
[[package]]
|
||||
name = "matchit"
|
||||
version = "0.8.4"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3"
|
||||
checksum = "bd0aa4b8ca861b08d68afc8702af3250776898c1508b278e1da9d01e01d4b45c"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
@@ -1815,7 +1968,7 @@ dependencies = [
|
||||
"pep440_rs 0.4.0",
|
||||
"regex",
|
||||
"serde",
|
||||
"thiserror",
|
||||
"thiserror 1.0.67",
|
||||
"tracing",
|
||||
"unicode-width 0.1.13",
|
||||
"url",
|
||||
@@ -1834,7 +1987,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cd53dff83f26735fdc1ca837098ccf133605d794cdae66acfc2bfac3ec809d95"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"thiserror",
|
||||
"thiserror 1.0.67",
|
||||
"ucd-trie",
|
||||
]
|
||||
|
||||
@@ -2004,7 +2157,7 @@ dependencies = [
|
||||
"newtype-uuid",
|
||||
"quick-xml",
|
||||
"strip-ansi-escapes",
|
||||
"thiserror",
|
||||
"thiserror 1.0.67",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
@@ -2111,7 +2264,7 @@ dependencies = [
|
||||
"compact_str",
|
||||
"countme",
|
||||
"dir-test",
|
||||
"hashbrown 0.15.0",
|
||||
"hashbrown 0.15.1",
|
||||
"indexmap",
|
||||
"insta",
|
||||
"itertools 0.13.0",
|
||||
@@ -2129,11 +2282,12 @@ dependencies = [
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"serde",
|
||||
"smallvec",
|
||||
"static_assertions",
|
||||
"tempfile",
|
||||
"test-case",
|
||||
"thiserror",
|
||||
"thiserror 2.0.3",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
@@ -2213,7 +2367,10 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"crossbeam",
|
||||
"glob",
|
||||
"insta",
|
||||
"notify",
|
||||
"pep440_rs 0.7.2",
|
||||
"rayon",
|
||||
"red_knot_python_semantic",
|
||||
"red_knot_vendored",
|
||||
@@ -2223,7 +2380,9 @@ dependencies = [
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"tempfile",
|
||||
"serde",
|
||||
"thiserror 2.0.3",
|
||||
"toml",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
@@ -2253,7 +2412,7 @@ checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
"libredox",
|
||||
"thiserror",
|
||||
"thiserror 1.0.67",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2315,9 +2474,20 @@ dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ron"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "88073939a61e5b7680558e6be56b419e208420c2adb92be54921fa6b72283f1a"
|
||||
dependencies = [
|
||||
"base64 0.13.1",
|
||||
"bitflags 1.3.2",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.7.3"
|
||||
version = "0.7.4"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
@@ -2364,7 +2534,7 @@ dependencies = [
|
||||
"strum",
|
||||
"tempfile",
|
||||
"test-case",
|
||||
"thiserror",
|
||||
"thiserror 2.0.3",
|
||||
"tikv-jemallocator",
|
||||
"toml",
|
||||
"tracing",
|
||||
@@ -2416,7 +2586,9 @@ dependencies = [
|
||||
"camino",
|
||||
"countme",
|
||||
"dashmap 6.1.0",
|
||||
"dunce",
|
||||
"filetime",
|
||||
"glob",
|
||||
"ignore",
|
||||
"insta",
|
||||
"matchit",
|
||||
@@ -2432,7 +2604,7 @@ dependencies = [
|
||||
"salsa",
|
||||
"serde",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
"thiserror 2.0.3",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"tracing-tree",
|
||||
@@ -2534,7 +2706,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.7.3"
|
||||
version = "0.7.4"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"annotate-snippets 0.9.2",
|
||||
@@ -2584,7 +2756,7 @@ dependencies = [
|
||||
"strum",
|
||||
"strum_macros",
|
||||
"test-case",
|
||||
"thiserror",
|
||||
"thiserror 2.0.3",
|
||||
"toml",
|
||||
"typed-arena",
|
||||
"unicode-normalization",
|
||||
@@ -2618,7 +2790,7 @@ dependencies = [
|
||||
"serde_json",
|
||||
"serde_with",
|
||||
"test-case",
|
||||
"thiserror",
|
||||
"thiserror 2.0.3",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
@@ -2638,7 +2810,6 @@ dependencies = [
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"schemars",
|
||||
"serde",
|
||||
]
|
||||
@@ -2691,7 +2862,7 @@ dependencies = [
|
||||
"similar",
|
||||
"smallvec",
|
||||
"static_assertions",
|
||||
"thiserror",
|
||||
"thiserror 2.0.3",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
@@ -2771,6 +2942,7 @@ dependencies = [
|
||||
name = "ruff_python_stdlib"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
@@ -2823,7 +2995,7 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
"thiserror",
|
||||
"thiserror 2.0.3",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
@@ -2849,7 +3021,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.7.3"
|
||||
version = "0.7.4"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
@@ -2932,9 +3104,9 @@ checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152"
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
version = "0.38.37"
|
||||
version = "0.38.40"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811"
|
||||
checksum = "99e4ea3e1cdc4b559b8e5650f9c8e5998e3e5c1343b4eaf034565f32318d63c0"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"errno",
|
||||
@@ -3077,9 +3249,9 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.214"
|
||||
version = "1.0.215"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f55c3193aca71c12ad7890f1785d2b73e1b9f63a0bbc353c08ef26fe03fc56b5"
|
||||
checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
@@ -3097,9 +3269,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.214"
|
||||
version = "1.0.215"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766"
|
||||
checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3119,9 +3291,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.132"
|
||||
version = "1.0.133"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03"
|
||||
checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
@@ -3234,6 +3406,12 @@ version = "0.9.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
|
||||
|
||||
[[package]]
|
||||
name = "stable_deref_trait"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
|
||||
|
||||
[[package]]
|
||||
name = "static_assertions"
|
||||
version = "1.1.0"
|
||||
@@ -3324,9 +3502,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.13.0"
|
||||
version = "3.14.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b"
|
||||
checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"fastrand",
|
||||
@@ -3403,7 +3581,16 @@ version = "1.0.67"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3b3c6efbfc763e64eb85c11c25320f0737cb7364c4b6336db90aa9ebe27a0bbd"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
"thiserror-impl 1.0.67",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "2.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c006c85c7651b3cf2ada4584faa36773bd07bac24acfb39f3c431b36d7e667aa"
|
||||
dependencies = [
|
||||
"thiserror-impl 2.0.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3417,6 +3604,17 @@ dependencies = [
|
||||
"syn 2.0.87",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "2.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f077553d607adc1caf65430528a576c757a71ed73944b66ebb58ef2bbd243568"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thread_local"
|
||||
version = "1.1.8"
|
||||
@@ -3447,6 +3645,16 @@ dependencies = [
|
||||
"tikv-jemalloc-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tinystr"
|
||||
version = "0.7.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"zerovec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tinytemplate"
|
||||
version = "1.2.1"
|
||||
@@ -3663,12 +3871,6 @@ dependencies = [
|
||||
"unic-common",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-bidi"
|
||||
version = "0.3.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.13"
|
||||
@@ -3736,7 +3938,7 @@ version = "2.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b74fc6b57825be3373f7054754755f03ac3a8f5d70015ccad699ba2029956f4a"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"base64 0.22.0",
|
||||
"flate2",
|
||||
"log",
|
||||
"once_cell",
|
||||
@@ -3748,9 +3950,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "2.5.2"
|
||||
version = "2.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c"
|
||||
checksum = "8d157f1b96d14500ffdc1f10ba712e780825526c03d9a49b4d0324b0d9113ada"
|
||||
dependencies = [
|
||||
"form_urlencoded",
|
||||
"idna",
|
||||
@@ -3758,6 +3960,18 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "utf16_iter"
|
||||
version = "1.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246"
|
||||
|
||||
[[package]]
|
||||
name = "utf8_iter"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
|
||||
|
||||
[[package]]
|
||||
name = "utf8parse"
|
||||
version = "0.2.1"
|
||||
@@ -4194,6 +4408,18 @@ version = "0.0.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904"
|
||||
|
||||
[[package]]
|
||||
name = "write16"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936"
|
||||
|
||||
[[package]]
|
||||
name = "writeable"
|
||||
version = "0.5.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51"
|
||||
|
||||
[[package]]
|
||||
name = "yansi"
|
||||
version = "1.0.1"
|
||||
@@ -4209,6 +4435,30 @@ dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yoke"
|
||||
version = "0.7.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6c5b1314b079b0930c31e3af543d8ee1757b1951ae1e1565ec704403a7240ca5"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"stable_deref_trait",
|
||||
"yoke-derive",
|
||||
"zerofrom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yoke-derive"
|
||||
version = "0.7.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.7.32"
|
||||
@@ -4229,12 +4479,55 @@ dependencies = [
|
||||
"syn 2.0.87",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerofrom"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "91ec111ce797d0e0784a1116d0ddcdbea84322cd79e5d5ad173daeba4f93ab55"
|
||||
dependencies = [
|
||||
"zerofrom-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerofrom-derive"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zeroize"
|
||||
version = "1.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d"
|
||||
|
||||
[[package]]
|
||||
name = "zerovec"
|
||||
version = "0.10.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079"
|
||||
dependencies = [
|
||||
"yoke",
|
||||
"zerofrom",
|
||||
"zerovec-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerovec-derive"
|
||||
version = "0.10.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.87",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zip"
|
||||
version = "0.6.6"
|
||||
|
||||
@@ -66,6 +66,7 @@ criterion = { version = "0.5.1", default-features = false }
|
||||
crossbeam = { version = "0.8.4" }
|
||||
dashmap = { version = "6.0.1" }
|
||||
dir-test = { version = "0.3.0" }
|
||||
dunce = { version = "1.0.5" }
|
||||
drop_bomb = { version = "0.1.5" }
|
||||
env_logger = { version = "0.11.0" }
|
||||
etcetera = { version = "0.8.0" }
|
||||
@@ -81,7 +82,7 @@ hashbrown = { version = "0.15.0", default-features = false, features = [
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff = { version = "0.1.5" }
|
||||
imperative = { version = "1.0.4" }
|
||||
indexmap = {version = "2.6.0" }
|
||||
indexmap = { version = "2.6.0" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indoc = { version = "2.0.4" }
|
||||
insta = { version = "1.35.1" }
|
||||
@@ -136,7 +137,7 @@ strum_macros = { version = "0.26.0" }
|
||||
syn = { version = "2.0.55" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
test-case = { version = "3.3.1" }
|
||||
thiserror = { version = "1.0.58" }
|
||||
thiserror = { version = "2.0.0" }
|
||||
tikv-jemallocator = { version = "0.6.0" }
|
||||
toml = { version = "0.8.11" }
|
||||
tracing = { version = "0.1.40" }
|
||||
|
||||
@@ -136,8 +136,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.7.3/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.7.3/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.7.4/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.7.4/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
@@ -170,7 +170,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.7.3
|
||||
rev: v0.7.4
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
[files]
|
||||
# https://github.com/crate-ci/typos/issues/868
|
||||
extend-exclude = ["crates/red_knot_vendored/vendor/**/*", "**/resources/**/*", "**/snapshots/**/*"]
|
||||
extend-exclude = [
|
||||
"crates/red_knot_vendored/vendor/**/*",
|
||||
"**/resources/**/*",
|
||||
"**/snapshots/**/*",
|
||||
"crates/red_knot_workspace/src/workspace/pyproject/package_name.rs"
|
||||
]
|
||||
|
||||
[default.extend-words]
|
||||
"arange" = "arange" # e.g. `numpy.arange`
|
||||
|
||||
@@ -34,6 +34,7 @@ tracing-tree = { workspace = true }
|
||||
[dev-dependencies]
|
||||
filetime = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["testing"] }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -183,10 +183,10 @@ fn run() -> anyhow::Result<ExitStatus> {
|
||||
|
||||
let system = OsSystem::new(cwd.clone());
|
||||
let cli_configuration = args.to_configuration(&cwd);
|
||||
let workspace_metadata = WorkspaceMetadata::from_path(
|
||||
let workspace_metadata = WorkspaceMetadata::discover(
|
||||
system.current_directory(),
|
||||
&system,
|
||||
Some(cli_configuration.clone()),
|
||||
Some(&cli_configuration),
|
||||
)?;
|
||||
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
|
||||
@@ -4,8 +4,8 @@
|
||||
#[derive(Copy, Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord, Default, clap::ValueEnum)]
|
||||
pub enum TargetVersion {
|
||||
Py37,
|
||||
#[default]
|
||||
Py38,
|
||||
#[default]
|
||||
Py39,
|
||||
Py310,
|
||||
Py311,
|
||||
@@ -46,3 +46,17 @@ impl From<TargetVersion> for red_knot_python_semantic::PythonVersion {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::target_version::TargetVersion;
|
||||
use red_knot_python_semantic::PythonVersion;
|
||||
|
||||
#[test]
|
||||
fn same_default_as_python_version() {
|
||||
assert_eq!(
|
||||
PythonVersion::from(TargetVersion::default()),
|
||||
PythonVersion::default()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ use std::time::Duration;
|
||||
use anyhow::{anyhow, Context};
|
||||
|
||||
use red_knot_python_semantic::{resolve_module, ModuleName, Program, PythonVersion, SitePackages};
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
use red_knot_workspace::db::{Db, RootDatabase};
|
||||
use red_knot_workspace::watch;
|
||||
use red_knot_workspace::watch::{directory_watcher, WorkspaceWatcher};
|
||||
use red_knot_workspace::workspace::settings::{Configuration, SearchPathConfiguration};
|
||||
@@ -14,6 +14,7 @@ use red_knot_workspace::workspace::WorkspaceMetadata;
|
||||
use ruff_db::files::{system_path_to_file, File, FileError};
|
||||
use ruff_db::source::source_text;
|
||||
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_db::testing::setup_logging;
|
||||
use ruff_db::Upcast;
|
||||
|
||||
struct TestCase {
|
||||
@@ -69,7 +70,6 @@ impl TestCase {
|
||||
Some(all_events)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn take_watch_changes(&self) -> Vec<watch::ChangeEvent> {
|
||||
self.try_take_watch_changes(Duration::from_secs(10))
|
||||
.expect("Expected watch changes but observed none")
|
||||
@@ -110,8 +110,8 @@ impl TestCase {
|
||||
) -> anyhow::Result<()> {
|
||||
let program = Program::get(self.db());
|
||||
|
||||
self.configuration.search_paths = configuration.clone();
|
||||
let new_settings = configuration.into_settings(self.db.workspace().root(&self.db));
|
||||
let new_settings = configuration.to_settings(self.db.workspace().root(&self.db));
|
||||
self.configuration.search_paths = configuration;
|
||||
|
||||
program.update_search_paths(&mut self.db, &new_settings)?;
|
||||
|
||||
@@ -204,7 +204,9 @@ where
|
||||
.as_utf8_path()
|
||||
.canonicalize_utf8()
|
||||
.with_context(|| "Failed to canonicalize root path.")?,
|
||||
);
|
||||
)
|
||||
.simplified()
|
||||
.to_path_buf();
|
||||
|
||||
let workspace_path = root_path.join("workspace");
|
||||
|
||||
@@ -241,8 +243,7 @@ where
|
||||
search_paths,
|
||||
};
|
||||
|
||||
let workspace =
|
||||
WorkspaceMetadata::from_path(&workspace_path, &system, Some(configuration.clone()))?;
|
||||
let workspace = WorkspaceMetadata::discover(&workspace_path, &system, Some(&configuration))?;
|
||||
|
||||
let db = RootDatabase::new(workspace, system)?;
|
||||
|
||||
@@ -1311,3 +1312,138 @@ mod unix {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nested_packages_delete_root() -> anyhow::Result<()> {
|
||||
let mut case = setup(|root: &SystemPath, workspace_root: &SystemPath| {
|
||||
std::fs::write(
|
||||
workspace_root.join("pyproject.toml").as_std_path(),
|
||||
r#"
|
||||
[project]
|
||||
name = "inner"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
std::fs::write(
|
||||
root.join("pyproject.toml").as_std_path(),
|
||||
r#"
|
||||
[project]
|
||||
name = "outer"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
assert_eq!(
|
||||
case.db().workspace().root(case.db()),
|
||||
&*case.workspace_path("")
|
||||
);
|
||||
|
||||
std::fs::remove_file(case.workspace_path("pyproject.toml").as_std_path())?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
// It should now pick up the outer workspace.
|
||||
assert_eq!(case.db().workspace().root(case.db()), case.root_path());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn added_package() -> anyhow::Result<()> {
|
||||
let _ = setup_logging();
|
||||
let mut case = setup([
|
||||
(
|
||||
"pyproject.toml",
|
||||
r#"
|
||||
[project]
|
||||
name = "inner"
|
||||
|
||||
[tool.knot.workspace]
|
||||
members = ["packages/*"]
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"packages/a/pyproject.toml",
|
||||
r#"
|
||||
[project]
|
||||
name = "a"
|
||||
"#,
|
||||
),
|
||||
])?;
|
||||
|
||||
assert_eq!(case.db().workspace().packages(case.db()).len(), 2);
|
||||
|
||||
std::fs::create_dir(case.workspace_path("packages/b").as_std_path())
|
||||
.context("failed to create folder for package 'b'")?;
|
||||
|
||||
// It seems that the file watcher won't pick up on file changes shortly after the folder
|
||||
// was created... I suspect this is because most file watchers don't support recursive
|
||||
// file watching. Instead, file-watching libraries manually implement recursive file watching
|
||||
// by setting a watcher for each directory. But doing this obviously "lags" behind.
|
||||
case.take_watch_changes();
|
||||
|
||||
std::fs::write(
|
||||
case.workspace_path("packages/b/pyproject.toml")
|
||||
.as_std_path(),
|
||||
r#"
|
||||
[project]
|
||||
name = "b"
|
||||
"#,
|
||||
)
|
||||
.context("failed to write pyproject.toml for package b")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
assert_eq!(case.db().workspace().packages(case.db()).len(), 3);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn removed_package() -> anyhow::Result<()> {
|
||||
let mut case = setup([
|
||||
(
|
||||
"pyproject.toml",
|
||||
r#"
|
||||
[project]
|
||||
name = "inner"
|
||||
|
||||
[tool.knot.workspace]
|
||||
members = ["packages/*"]
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"packages/a/pyproject.toml",
|
||||
r#"
|
||||
[project]
|
||||
name = "a"
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"packages/b/pyproject.toml",
|
||||
r#"
|
||||
[project]
|
||||
name = "b"
|
||||
"#,
|
||||
),
|
||||
])?;
|
||||
|
||||
assert_eq!(case.db().workspace().packages(case.db()).len(), 3);
|
||||
|
||||
std::fs::remove_dir_all(case.workspace_path("packages/b").as_std_path())
|
||||
.context("failed to remove package 'b'")?;
|
||||
|
||||
let changes = case.stop_watch();
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
assert_eq!(case.db().workspace().packages(case.db()).len(), 2);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -13,7 +13,8 @@ license = { workspace = true }
|
||||
[dependencies]
|
||||
ruff_db = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_python_ast = { workspace = true, features = ["salsa"] }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_parser = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
@@ -32,6 +33,7 @@ thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
smallvec = { workspace = true }
|
||||
static_assertions = { workspace = true }
|
||||
test-case = { workspace = true }
|
||||
|
||||
@@ -0,0 +1,47 @@
|
||||
# Optional
|
||||
|
||||
## Annotation
|
||||
|
||||
`typing.Optional` is equivalent to using the type with a None in a Union.
|
||||
|
||||
```py
|
||||
from typing import Optional
|
||||
|
||||
a: Optional[int]
|
||||
a1: Optional[bool]
|
||||
a2: Optional[Optional[bool]]
|
||||
a3: Optional[None]
|
||||
|
||||
def f():
|
||||
# revealed: int | None
|
||||
reveal_type(a)
|
||||
# revealed: bool | None
|
||||
reveal_type(a1)
|
||||
# revealed: bool | None
|
||||
reveal_type(a2)
|
||||
# revealed: None
|
||||
reveal_type(a3)
|
||||
```
|
||||
|
||||
## Assignment
|
||||
|
||||
```py
|
||||
from typing import Optional
|
||||
|
||||
a: Optional[int] = 1
|
||||
a = None
|
||||
# error: [invalid-assignment] "Object of type `Literal[""]` is not assignable to `int | None`"
|
||||
a = ""
|
||||
```
|
||||
|
||||
## Typing Extensions
|
||||
|
||||
```py
|
||||
from typing_extensions import Optional
|
||||
|
||||
a: Optional[int]
|
||||
|
||||
def f():
|
||||
# revealed: int | None
|
||||
reveal_type(a)
|
||||
```
|
||||
@@ -0,0 +1,191 @@
|
||||
# String annotations
|
||||
|
||||
## Simple
|
||||
|
||||
```py
|
||||
def f() -> "int":
|
||||
return 1
|
||||
|
||||
reveal_type(f()) # revealed: int
|
||||
```
|
||||
|
||||
## Nested
|
||||
|
||||
```py
|
||||
def f() -> "'int'":
|
||||
return 1
|
||||
|
||||
reveal_type(f()) # revealed: int
|
||||
```
|
||||
|
||||
## Type expression
|
||||
|
||||
```py
|
||||
def f1() -> "int | str":
|
||||
return 1
|
||||
|
||||
def f2() -> "tuple[int, str]":
|
||||
return 1
|
||||
|
||||
reveal_type(f1()) # revealed: int | str
|
||||
reveal_type(f2()) # revealed: tuple[int, str]
|
||||
```
|
||||
|
||||
## Partial
|
||||
|
||||
```py
|
||||
def f() -> tuple[int, "str"]:
|
||||
return 1
|
||||
|
||||
reveal_type(f()) # revealed: tuple[int, str]
|
||||
```
|
||||
|
||||
## Deferred
|
||||
|
||||
```py
|
||||
def f() -> "Foo":
|
||||
return Foo()
|
||||
|
||||
class Foo:
|
||||
pass
|
||||
|
||||
reveal_type(f()) # revealed: Foo
|
||||
```
|
||||
|
||||
## Deferred (undefined)
|
||||
|
||||
```py
|
||||
# error: [unresolved-reference]
|
||||
def f() -> "Foo":
|
||||
pass
|
||||
|
||||
reveal_type(f()) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Partial deferred
|
||||
|
||||
```py
|
||||
def f() -> int | "Foo":
|
||||
return 1
|
||||
|
||||
class Foo:
|
||||
pass
|
||||
|
||||
reveal_type(f()) # revealed: int | Foo
|
||||
```
|
||||
|
||||
## `typing.Literal`
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
def f1() -> Literal["Foo", "Bar"]:
|
||||
return "Foo"
|
||||
|
||||
def f2() -> 'Literal["Foo", "Bar"]':
|
||||
return "Foo"
|
||||
|
||||
class Foo:
|
||||
pass
|
||||
|
||||
reveal_type(f1()) # revealed: Literal["Foo", "Bar"]
|
||||
reveal_type(f2()) # revealed: Literal["Foo", "Bar"]
|
||||
```
|
||||
|
||||
## Various string kinds
|
||||
|
||||
```py
|
||||
# error: [annotation-raw-string] "Type expressions cannot use raw string literal"
|
||||
def f1() -> r"int":
|
||||
return 1
|
||||
|
||||
# error: [annotation-f-string] "Type expressions cannot use f-strings"
|
||||
def f2() -> f"int":
|
||||
return 1
|
||||
|
||||
# error: [annotation-byte-string] "Type expressions cannot use bytes literal"
|
||||
def f3() -> b"int":
|
||||
return 1
|
||||
|
||||
def f4() -> "int":
|
||||
return 1
|
||||
|
||||
# error: [annotation-implicit-concat] "Type expressions cannot span multiple string literals"
|
||||
def f5() -> "in" "t":
|
||||
return 1
|
||||
|
||||
# error: [annotation-escape-character] "Type expressions cannot contain escape characters"
|
||||
def f6() -> "\N{LATIN SMALL LETTER I}nt":
|
||||
return 1
|
||||
|
||||
# error: [annotation-escape-character] "Type expressions cannot contain escape characters"
|
||||
def f7() -> "\x69nt":
|
||||
return 1
|
||||
|
||||
def f8() -> """int""":
|
||||
return 1
|
||||
|
||||
# error: [annotation-byte-string] "Type expressions cannot use bytes literal"
|
||||
def f9() -> "b'int'":
|
||||
return 1
|
||||
|
||||
reveal_type(f1()) # revealed: Unknown
|
||||
reveal_type(f2()) # revealed: Unknown
|
||||
reveal_type(f3()) # revealed: Unknown
|
||||
reveal_type(f4()) # revealed: int
|
||||
reveal_type(f5()) # revealed: Unknown
|
||||
reveal_type(f6()) # revealed: Unknown
|
||||
reveal_type(f7()) # revealed: Unknown
|
||||
reveal_type(f8()) # revealed: int
|
||||
reveal_type(f9()) # revealed: Unknown
|
||||
```
|
||||
|
||||
## Various string kinds in `typing.Literal`
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
|
||||
def f() -> Literal["a", r"b", b"c", "d" "e", "\N{LATIN SMALL LETTER F}", "\x67", """h"""]:
|
||||
return "normal"
|
||||
|
||||
reveal_type(f()) # revealed: Literal["a", "b", "de", "f", "g", "h"] | Literal[b"c"]
|
||||
```
|
||||
|
||||
## Class variables
|
||||
|
||||
```py
|
||||
MyType = int
|
||||
|
||||
class Aliases:
|
||||
MyType = str
|
||||
|
||||
forward: "MyType"
|
||||
not_forward: MyType
|
||||
|
||||
reveal_type(Aliases.forward) # revealed: str
|
||||
reveal_type(Aliases.not_forward) # revealed: str
|
||||
```
|
||||
|
||||
## Annotated assignment
|
||||
|
||||
```py
|
||||
a: "int" = 1
|
||||
b: "'int'" = 1
|
||||
c: "Foo"
|
||||
# error: [invalid-assignment] "Object of type `Literal[1]` is not assignable to `Foo`"
|
||||
d: "Foo" = 1
|
||||
|
||||
class Foo:
|
||||
pass
|
||||
|
||||
c = Foo()
|
||||
|
||||
reveal_type(a) # revealed: Literal[1]
|
||||
reveal_type(b) # revealed: Literal[1]
|
||||
reveal_type(c) # revealed: Foo
|
||||
reveal_type(d) # revealed: Foo
|
||||
```
|
||||
|
||||
## Parameter
|
||||
|
||||
TODO: Add tests once parameter inference is supported
|
||||
@@ -1,9 +0,0 @@
|
||||
# String annotations
|
||||
|
||||
```py
|
||||
def f() -> "int":
|
||||
return 1
|
||||
|
||||
# TODO: We do not support string annotations, but we should not panic if we encounter them
|
||||
reveal_type(f()) # revealed: @Todo
|
||||
```
|
||||
@@ -110,3 +110,29 @@ c: builtins.tuple[builtins.tuple[builtins.int, builtins.int], builtins.int] = ((
|
||||
# error: [invalid-assignment] "Object of type `Literal["foo"]` is not assignable to `tuple[tuple[int, int], int]`"
|
||||
c: builtins.tuple[builtins.tuple[builtins.int, builtins.int], builtins.int] = "foo"
|
||||
```
|
||||
|
||||
## Future annotations are deferred
|
||||
|
||||
```py
|
||||
from __future__ import annotations
|
||||
|
||||
x: Foo
|
||||
|
||||
class Foo:
|
||||
pass
|
||||
|
||||
x = Foo()
|
||||
reveal_type(x) # revealed: Foo
|
||||
```
|
||||
|
||||
## Annotations in stub files are deferred
|
||||
|
||||
```pyi path=main.pyi
|
||||
x: Foo
|
||||
|
||||
class Foo:
|
||||
pass
|
||||
|
||||
x = Foo()
|
||||
reveal_type(x) # revealed: Foo
|
||||
```
|
||||
|
||||
@@ -35,6 +35,7 @@ class C:
|
||||
if flag:
|
||||
x = 2
|
||||
|
||||
# error: [possibly-unbound-attribute] "Attribute `x` on type `Literal[C]` is possibly unbound"
|
||||
reveal_type(C.x) # revealed: Literal[2]
|
||||
reveal_type(C.y) # revealed: Literal[1]
|
||||
```
|
||||
|
||||
@@ -9,14 +9,21 @@ def bool_instance() -> bool:
|
||||
flag = bool_instance()
|
||||
|
||||
if flag:
|
||||
class C:
|
||||
class C1:
|
||||
x = 1
|
||||
|
||||
else:
|
||||
class C:
|
||||
class C1:
|
||||
x = 2
|
||||
|
||||
reveal_type(C.x) # revealed: Literal[1, 2]
|
||||
class C2:
|
||||
if flag:
|
||||
x = 3
|
||||
else:
|
||||
x = 4
|
||||
|
||||
reveal_type(C1.x) # revealed: Literal[1, 2]
|
||||
reveal_type(C2.x) # revealed: Literal[3, 4]
|
||||
```
|
||||
|
||||
## Inherited attributes
|
||||
@@ -53,3 +60,77 @@ reveal_type(A.__mro__)
|
||||
# `E` is earlier in the MRO than `F`, so we should use the type of `E.X`
|
||||
reveal_type(A.X) # revealed: Literal[42]
|
||||
```
|
||||
|
||||
## Unions with possibly unbound paths
|
||||
|
||||
### Definite boundness within a class
|
||||
|
||||
In this example, the `x` attribute is not defined in the `C2` element of the union:
|
||||
|
||||
```py
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
class C1:
|
||||
x = 1
|
||||
|
||||
class C2: ...
|
||||
|
||||
class C3:
|
||||
x = 3
|
||||
|
||||
flag1 = bool_instance()
|
||||
flag2 = bool_instance()
|
||||
|
||||
C = C1 if flag1 else C2 if flag2 else C3
|
||||
|
||||
# error: [possibly-unbound-attribute] "Attribute `x` on type `Literal[C1, C2, C3]` is possibly unbound"
|
||||
reveal_type(C.x) # revealed: Literal[1, 3]
|
||||
```
|
||||
|
||||
### Possibly-unbound within a class
|
||||
|
||||
We raise the same diagnostic if the attribute is possibly-unbound in at least one element of the
|
||||
union:
|
||||
|
||||
```py
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
class C1:
|
||||
x = 1
|
||||
|
||||
class C2:
|
||||
if bool_instance():
|
||||
x = 2
|
||||
|
||||
class C3:
|
||||
x = 3
|
||||
|
||||
flag1 = bool_instance()
|
||||
flag2 = bool_instance()
|
||||
|
||||
C = C1 if flag1 else C2 if flag2 else C3
|
||||
|
||||
# error: [possibly-unbound-attribute] "Attribute `x` on type `Literal[C1, C2, C3]` is possibly unbound"
|
||||
reveal_type(C.x) # revealed: Literal[1, 2, 3]
|
||||
```
|
||||
|
||||
## Unions with all paths unbound
|
||||
|
||||
If the symbol is unbound in all elements of the union, we detect that:
|
||||
|
||||
```py
|
||||
def bool_instance() -> bool:
|
||||
return True
|
||||
|
||||
class C1: ...
|
||||
class C2: ...
|
||||
|
||||
flag = bool_instance()
|
||||
|
||||
C = C1 if flag else C2
|
||||
|
||||
# error: [unresolved-attribute] "Type `Literal[C1, C2]` has no attribute `x`"
|
||||
reveal_type(C.x) # revealed: Unknown
|
||||
```
|
||||
|
||||
@@ -19,6 +19,15 @@ async def get_int_async() -> int:
|
||||
reveal_type(get_int_async()) # revealed: @Todo
|
||||
```
|
||||
|
||||
## Generic
|
||||
|
||||
```py
|
||||
def get_int[T]() -> int:
|
||||
return 42
|
||||
|
||||
reveal_type(get_int()) # revealed: int
|
||||
```
|
||||
|
||||
## Decorated
|
||||
|
||||
```py
|
||||
|
||||
@@ -41,11 +41,10 @@ except EXCEPTIONS as f:
|
||||
## Dynamic exception types
|
||||
|
||||
```py
|
||||
# TODO: we should not emit these `call-possibly-unbound-method` errors for `tuple.__class_getitem__`
|
||||
def foo(
|
||||
x: type[AttributeError],
|
||||
y: tuple[type[OSError], type[RuntimeError]], # error: [call-possibly-unbound-method]
|
||||
z: tuple[type[BaseException], ...], # error: [call-possibly-unbound-method]
|
||||
y: tuple[type[OSError], type[RuntimeError]],
|
||||
z: tuple[type[BaseException], ...],
|
||||
):
|
||||
try:
|
||||
help()
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
# Attribute access
|
||||
|
||||
## Boundness
|
||||
|
||||
```py
|
||||
def flag() -> bool: ...
|
||||
|
||||
class A:
|
||||
always_bound = 1
|
||||
|
||||
if flag():
|
||||
union = 1
|
||||
else:
|
||||
union = "abc"
|
||||
|
||||
if flag():
|
||||
possibly_unbound = "abc"
|
||||
|
||||
reveal_type(A.always_bound) # revealed: Literal[1]
|
||||
|
||||
reveal_type(A.union) # revealed: Literal[1] | Literal["abc"]
|
||||
|
||||
# error: [possibly-unbound-attribute] "Attribute `possibly_unbound` on type `Literal[A]` is possibly unbound"
|
||||
reveal_type(A.possibly_unbound) # revealed: Literal["abc"]
|
||||
|
||||
# error: [unresolved-attribute] "Type `Literal[A]` has no attribute `non_existent`"
|
||||
reveal_type(A.non_existent) # revealed: Unknown
|
||||
```
|
||||
@@ -6,13 +6,9 @@ Basic PEP 695 generics
|
||||
|
||||
```py
|
||||
class MyBox[T]:
|
||||
# TODO: `T` is defined here
|
||||
# error: [unresolved-reference] "Name `T` used when not defined"
|
||||
data: T
|
||||
box_model_number = 695
|
||||
|
||||
# TODO: `T` is defined here
|
||||
# error: [unresolved-reference] "Name `T` used when not defined"
|
||||
def __init__(self, data: T):
|
||||
self.data = data
|
||||
|
||||
@@ -31,17 +27,12 @@ reveal_type(MyBox.box_model_number) # revealed: Literal[695]
|
||||
|
||||
```py
|
||||
class MyBox[T]:
|
||||
# TODO: `T` is defined here
|
||||
# error: [unresolved-reference] "Name `T` used when not defined"
|
||||
data: T
|
||||
|
||||
# TODO: `T` is defined here
|
||||
# error: [unresolved-reference] "Name `T` used when not defined"
|
||||
def __init__(self, data: T):
|
||||
self.data = data
|
||||
|
||||
# TODO not error on the subscripting or the use of type param
|
||||
# error: [unresolved-reference] "Name `T` used when not defined"
|
||||
# TODO not error on the subscripting
|
||||
# error: [non-subscriptable]
|
||||
class MySecureBox[T](MyBox[T]): ...
|
||||
|
||||
@@ -66,3 +57,55 @@ class S[T](Seq[S]): ... # error: [non-subscriptable]
|
||||
|
||||
reveal_type(S) # revealed: Literal[S]
|
||||
```
|
||||
|
||||
## Type params
|
||||
|
||||
A PEP695 type variable defines a value of type `typing.TypeVar` with attributes `__name__`,
|
||||
`__bounds__`, `__constraints__`, and `__default__` (the latter three all lazily evaluated):
|
||||
|
||||
```py
|
||||
def f[T, U: A, V: (A, B), W = A, X: A = A1]():
|
||||
reveal_type(T) # revealed: T
|
||||
reveal_type(T.__name__) # revealed: Literal["T"]
|
||||
reveal_type(T.__bound__) # revealed: None
|
||||
reveal_type(T.__constraints__) # revealed: tuple[()]
|
||||
reveal_type(T.__default__) # revealed: NoDefault
|
||||
|
||||
reveal_type(U) # revealed: U
|
||||
reveal_type(U.__name__) # revealed: Literal["U"]
|
||||
reveal_type(U.__bound__) # revealed: type[A]
|
||||
reveal_type(U.__constraints__) # revealed: tuple[()]
|
||||
reveal_type(U.__default__) # revealed: NoDefault
|
||||
|
||||
reveal_type(V) # revealed: V
|
||||
reveal_type(V.__name__) # revealed: Literal["V"]
|
||||
reveal_type(V.__bound__) # revealed: None
|
||||
reveal_type(V.__constraints__) # revealed: tuple[type[A], type[B]]
|
||||
reveal_type(V.__default__) # revealed: NoDefault
|
||||
|
||||
reveal_type(W) # revealed: W
|
||||
reveal_type(W.__name__) # revealed: Literal["W"]
|
||||
reveal_type(W.__bound__) # revealed: None
|
||||
reveal_type(W.__constraints__) # revealed: tuple[()]
|
||||
reveal_type(W.__default__) # revealed: type[A]
|
||||
|
||||
reveal_type(X) # revealed: X
|
||||
reveal_type(X.__name__) # revealed: Literal["X"]
|
||||
reveal_type(X.__bound__) # revealed: type[A]
|
||||
reveal_type(X.__constraints__) # revealed: tuple[()]
|
||||
reveal_type(X.__default__) # revealed: type[A1]
|
||||
|
||||
class A: ...
|
||||
class B: ...
|
||||
class A1(A): ...
|
||||
```
|
||||
|
||||
## Minimum two constraints
|
||||
|
||||
A typevar with less than two constraints emits a diagnostic and is treated as unconstrained:
|
||||
|
||||
```py
|
||||
# error: [invalid-typevar-constraints] "TypeVar must have at least two constrained types"
|
||||
def f[T: (int,)]():
|
||||
reveal_type(T.__constraints__) # revealed: tuple[()]
|
||||
```
|
||||
|
||||
@@ -21,6 +21,7 @@ reveal_type(y)
|
||||
```
|
||||
|
||||
```py
|
||||
# error: [possibly-unbound-import] "Member `y` of module `maybe_unbound` is possibly unbound"
|
||||
from maybe_unbound import x, y
|
||||
|
||||
reveal_type(x) # revealed: Literal[3]
|
||||
@@ -50,6 +51,7 @@ reveal_type(y)
|
||||
Importing an annotated name prefers the declared type over the inferred type:
|
||||
|
||||
```py
|
||||
# error: [possibly-unbound-import] "Member `y` of module `maybe_unbound_annotated` is possibly unbound"
|
||||
from maybe_unbound_annotated import x, y
|
||||
|
||||
reveal_type(x) # revealed: Literal[3]
|
||||
|
||||
@@ -51,6 +51,8 @@ invalid1: Literal[3 + 4]
|
||||
invalid2: Literal[4 + 3j]
|
||||
# error: [invalid-literal-parameter]
|
||||
invalid3: Literal[(3, 4)]
|
||||
|
||||
hello = "hello"
|
||||
invalid4: Literal[
|
||||
1 + 2, # error: [invalid-literal-parameter]
|
||||
"foo",
|
||||
|
||||
@@ -102,6 +102,9 @@ else:
|
||||
### Handling of `None`
|
||||
|
||||
```py
|
||||
# TODO: this error should ideally go away once we (1) understand `sys.version_info` branches,
|
||||
# and (2) set the target Python version for this test to 3.10.
|
||||
# error: [possibly-unbound-import] "Member `NoneType` of module `types` is possibly unbound"
|
||||
from types import NoneType
|
||||
|
||||
def flag() -> bool: ...
|
||||
|
||||
152
crates/red_knot_python_semantic/resources/mdtest/narrow/type.md
Normal file
152
crates/red_knot_python_semantic/resources/mdtest/narrow/type.md
Normal file
@@ -0,0 +1,152 @@
|
||||
# Narrowing for checks involving `type(x)`
|
||||
|
||||
## `type(x) is C`
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
def get_a_or_b() -> A | B:
|
||||
return A()
|
||||
|
||||
x = get_a_or_b()
|
||||
|
||||
if type(x) is A:
|
||||
reveal_type(x) # revealed: A
|
||||
else:
|
||||
# It would be wrong to infer `B` here. The type
|
||||
# of `x` could be a subclass of `A`, so we need
|
||||
# to infer the full union type:
|
||||
reveal_type(x) # revealed: A | B
|
||||
```
|
||||
|
||||
## `type(x) is not C`
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
def get_a_or_b() -> A | B:
|
||||
return A()
|
||||
|
||||
x = get_a_or_b()
|
||||
|
||||
if type(x) is not A:
|
||||
# Same reasoning as above: no narrowing should occur here.
|
||||
reveal_type(x) # revealed: A | B
|
||||
else:
|
||||
reveal_type(x) # revealed: A
|
||||
```
|
||||
|
||||
## `type(x) == C`, `type(x) != C`
|
||||
|
||||
No narrowing can occur for equality comparisons, since there might be a custom `__eq__`
|
||||
implementation on the metaclass.
|
||||
|
||||
TODO: Narrowing might be possible in some cases where the classes themselves are `@final` or their
|
||||
metaclass is `@final`.
|
||||
|
||||
```py
|
||||
class IsEqualToEverything(type):
|
||||
def __eq__(cls, other):
|
||||
return True
|
||||
|
||||
class A(metaclass=IsEqualToEverything): ...
|
||||
class B(metaclass=IsEqualToEverything): ...
|
||||
|
||||
def get_a_or_b() -> A | B:
|
||||
return B()
|
||||
|
||||
x = get_a_or_b()
|
||||
|
||||
if type(x) == A:
|
||||
reveal_type(x) # revealed: A | B
|
||||
|
||||
if type(x) != A:
|
||||
reveal_type(x) # revealed: A | B
|
||||
```
|
||||
|
||||
## No narrowing for custom `type` callable
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
def type(x):
|
||||
return int
|
||||
|
||||
def get_a_or_b() -> A | B:
|
||||
return A()
|
||||
|
||||
x = get_a_or_b()
|
||||
|
||||
if type(x) is A:
|
||||
reveal_type(x) # revealed: A | B
|
||||
else:
|
||||
reveal_type(x) # revealed: A | B
|
||||
```
|
||||
|
||||
## No narrowing for multiple arguments
|
||||
|
||||
No narrowing should occur if `type` is used to dynamically create a class:
|
||||
|
||||
```py
|
||||
def get_str_or_int() -> str | int:
|
||||
return "test"
|
||||
|
||||
x = get_str_or_int()
|
||||
|
||||
if type(x, (), {}) is str:
|
||||
reveal_type(x) # revealed: str | int
|
||||
else:
|
||||
reveal_type(x) # revealed: str | int
|
||||
```
|
||||
|
||||
## No narrowing for keyword arguments
|
||||
|
||||
`type` can't be used with a keyword argument:
|
||||
|
||||
```py
|
||||
def get_str_or_int() -> str | int:
|
||||
return "test"
|
||||
|
||||
x = get_str_or_int()
|
||||
|
||||
# TODO: we could issue a diagnostic here
|
||||
if type(object=x) is str:
|
||||
reveal_type(x) # revealed: str | int
|
||||
```
|
||||
|
||||
## Narrowing if `type` is aliased
|
||||
|
||||
```py
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
alias_for_type = type
|
||||
|
||||
def get_a_or_b() -> A | B:
|
||||
return A()
|
||||
|
||||
x = get_a_or_b()
|
||||
|
||||
if alias_for_type(x) is A:
|
||||
reveal_type(x) # revealed: A
|
||||
```
|
||||
|
||||
## Limitations
|
||||
|
||||
```py
|
||||
class Base: ...
|
||||
class Derived(Base): ...
|
||||
|
||||
def get_base() -> Base:
|
||||
return Base()
|
||||
|
||||
x = get_base()
|
||||
|
||||
if type(x) is Base:
|
||||
# Ideally, this could be narrower, but there is now way to
|
||||
# express a constraint like `Base & ~ProperSubtypeOf[Base]`.
|
||||
reveal_type(x) # revealed: Base
|
||||
```
|
||||
@@ -0,0 +1,13 @@
|
||||
# Regression test for #14334
|
||||
|
||||
Regression test for [this issue](https://github.com/astral-sh/ruff/issues/14334).
|
||||
|
||||
```py path=base.py
|
||||
# error: [invalid-base]
|
||||
class Base(2): ...
|
||||
```
|
||||
|
||||
```py path=a.py
|
||||
# No error here
|
||||
from base import Base
|
||||
```
|
||||
@@ -74,6 +74,7 @@ we're dealing with:
|
||||
```py path=__getattr__.py
|
||||
import typing
|
||||
|
||||
# error: [unresolved-attribute]
|
||||
reveal_type(typing.__getattr__) # revealed: Unknown
|
||||
```
|
||||
|
||||
|
||||
@@ -0,0 +1,139 @@
|
||||
# `sys.version_info`
|
||||
|
||||
## The type of `sys.version_info`
|
||||
|
||||
The type of `sys.version_info` is `sys._version_info`, at least according to typeshed's stubs (which
|
||||
we treat as the single source of truth for the standard library). This is quite a complicated type
|
||||
in typeshed, so there are many things we don't fully understand about the type yet; this is the
|
||||
source of several TODOs in this test file. Many of these TODOs should be naturally fixed as we
|
||||
implement more type-system features in the future.
|
||||
|
||||
```py
|
||||
import sys
|
||||
|
||||
reveal_type(sys.version_info) # revealed: _version_info
|
||||
```
|
||||
|
||||
## Literal types from comparisons
|
||||
|
||||
Comparing `sys.version_info` with a 2-element tuple of literal integers always produces a `Literal`
|
||||
type:
|
||||
|
||||
```py
|
||||
import sys
|
||||
|
||||
reveal_type(sys.version_info >= (3, 9)) # revealed: Literal[True]
|
||||
reveal_type((3, 9) <= sys.version_info) # revealed: Literal[True]
|
||||
|
||||
reveal_type(sys.version_info > (3, 9)) # revealed: Literal[True]
|
||||
reveal_type((3, 9) < sys.version_info) # revealed: Literal[True]
|
||||
|
||||
reveal_type(sys.version_info < (3, 9)) # revealed: Literal[False]
|
||||
reveal_type((3, 9) > sys.version_info) # revealed: Literal[False]
|
||||
|
||||
reveal_type(sys.version_info <= (3, 9)) # revealed: Literal[False]
|
||||
reveal_type((3, 9) >= sys.version_info) # revealed: Literal[False]
|
||||
|
||||
reveal_type(sys.version_info == (3, 9)) # revealed: Literal[False]
|
||||
reveal_type((3, 9) == sys.version_info) # revealed: Literal[False]
|
||||
|
||||
reveal_type(sys.version_info != (3, 9)) # revealed: Literal[True]
|
||||
reveal_type((3, 9) != sys.version_info) # revealed: Literal[True]
|
||||
```
|
||||
|
||||
## Non-literal types from comparisons
|
||||
|
||||
Comparing `sys.version_info` with tuples of other lengths will sometimes produce `Literal` types,
|
||||
sometimes not:
|
||||
|
||||
```py
|
||||
import sys
|
||||
|
||||
reveal_type(sys.version_info >= (3, 9, 1)) # revealed: bool
|
||||
reveal_type(sys.version_info >= (3, 9, 1, "final", 0)) # revealed: bool
|
||||
|
||||
# TODO: While this won't fail at runtime, the user has probably made a mistake
|
||||
# if they're comparing a tuple of length >5 with `sys.version_info`
|
||||
# (`sys.version_info` is a tuple of length 5). It might be worth
|
||||
# emitting a lint diagnostic of some kind warning them about the probable error?
|
||||
reveal_type(sys.version_info >= (3, 9, 1, "final", 0, 5)) # revealed: bool
|
||||
|
||||
# TODO: this should be `Literal[False]`; see #14279
|
||||
reveal_type(sys.version_info == (3, 9, 1, "finallllll", 0)) # revealed: bool
|
||||
```
|
||||
|
||||
## Imports and aliases
|
||||
|
||||
Comparisons with `sys.version_info` still produce literal types, even if the symbol is aliased to
|
||||
another name:
|
||||
|
||||
```py
|
||||
from sys import version_info
|
||||
from sys import version_info as foo
|
||||
|
||||
reveal_type(version_info >= (3, 9)) # revealed: Literal[True]
|
||||
reveal_type(foo >= (3, 9)) # revealed: Literal[True]
|
||||
|
||||
bar = version_info
|
||||
reveal_type(bar >= (3, 9)) # revealed: Literal[True]
|
||||
```
|
||||
|
||||
## Non-stdlib modules named `sys`
|
||||
|
||||
Only comparisons with the symbol `version_info` from the `sys` module produce literal types:
|
||||
|
||||
```py path=package/__init__.py
|
||||
```
|
||||
|
||||
```py path=package/sys.py
|
||||
version_info: tuple[int, int] = (4, 2)
|
||||
```
|
||||
|
||||
```py path=package/script.py
|
||||
from .sys import version_info
|
||||
|
||||
reveal_type(version_info >= (3, 9)) # revealed: bool
|
||||
```
|
||||
|
||||
## Accessing fields by name
|
||||
|
||||
The fields of `sys.version_info` can be accessed by name:
|
||||
|
||||
```py path=a.py
|
||||
import sys
|
||||
|
||||
reveal_type(sys.version_info.major >= 3) # revealed: Literal[True]
|
||||
reveal_type(sys.version_info.minor >= 9) # revealed: Literal[True]
|
||||
reveal_type(sys.version_info.minor >= 10) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
But the `micro`, `releaselevel` and `serial` fields are inferred as `@Todo` until we support
|
||||
properties on instance types:
|
||||
|
||||
```py path=b.py
|
||||
import sys
|
||||
|
||||
reveal_type(sys.version_info.micro) # revealed: @Todo
|
||||
reveal_type(sys.version_info.releaselevel) # revealed: @Todo
|
||||
reveal_type(sys.version_info.serial) # revealed: @Todo
|
||||
```
|
||||
|
||||
## Accessing fields by index/slice
|
||||
|
||||
The fields of `sys.version_info` can be accessed by index or by slice:
|
||||
|
||||
```py
|
||||
import sys
|
||||
|
||||
reveal_type(sys.version_info[0] < 3) # revealed: Literal[False]
|
||||
reveal_type(sys.version_info[1] > 9) # revealed: Literal[False]
|
||||
|
||||
# revealed: tuple[Literal[3], Literal[9], int, Literal["alpha", "beta", "candidate", "final"], int]
|
||||
reveal_type(sys.version_info[:5])
|
||||
|
||||
reveal_type(sys.version_info[:2] >= (3, 9)) # revealed: Literal[True]
|
||||
reveal_type(sys.version_info[0:2] >= (3, 10)) # revealed: Literal[False]
|
||||
reveal_type(sys.version_info[:3] >= (3, 10, 1)) # revealed: Literal[False]
|
||||
reveal_type(sys.version_info[3] == "final") # revealed: bool
|
||||
reveal_type(sys.version_info[3] == "finalllllll") # revealed: Literal[False]
|
||||
```
|
||||
@@ -1,4 +1,6 @@
|
||||
# Unary Operations
|
||||
# Invert, UAdd, USub
|
||||
|
||||
## Instance
|
||||
|
||||
```py
|
||||
from typing import Literal
|
||||
@@ -10,8 +10,6 @@ reveal_type(not not None) # revealed: Literal[False]
|
||||
## Function
|
||||
|
||||
```py
|
||||
from typing import reveal_type
|
||||
|
||||
def f():
|
||||
return 1
|
||||
|
||||
@@ -115,3 +113,101 @@ reveal_type(not ()) # revealed: Literal[True]
|
||||
reveal_type(not ("hello",)) # revealed: Literal[False]
|
||||
reveal_type(not (1, "hello")) # revealed: Literal[False]
|
||||
```
|
||||
|
||||
## Instance
|
||||
|
||||
Not operator is inferred based on
|
||||
<https://docs.python.org/3/library/stdtypes.html#truth-value-testing>. An instance is True or False
|
||||
if the `__bool__` method says so.
|
||||
|
||||
At runtime, the `__len__` method is a fallback for `__bool__`, but we can't make use of that. If we
|
||||
have a class that defines `__len__` but not `__bool__`, it is possible that any subclass could add a
|
||||
`__bool__` method that would invalidate whatever conclusion we drew from `__len__`. So instances of
|
||||
classes without a `__bool__` method, with or without `__len__`, must be inferred as unknown
|
||||
truthiness.
|
||||
|
||||
```py
|
||||
class AlwaysTrue:
|
||||
def __bool__(self) -> Literal[True]:
|
||||
return True
|
||||
|
||||
# revealed: Literal[False]
|
||||
reveal_type(not AlwaysTrue())
|
||||
|
||||
class AlwaysFalse:
|
||||
def __bool__(self) -> Literal[False]:
|
||||
return False
|
||||
|
||||
# revealed: Literal[True]
|
||||
reveal_type(not AlwaysFalse())
|
||||
|
||||
# We don't get into a cycle if someone sets their `__bool__` method to the `bool` builtin:
|
||||
class BoolIsBool:
|
||||
__bool__ = bool
|
||||
|
||||
# revealed: bool
|
||||
reveal_type(not BoolIsBool())
|
||||
|
||||
# At runtime, no `__bool__` and no `__len__` means truthy, but we can't rely on that, because
|
||||
# a subclass could add a `__bool__` method.
|
||||
class NoBoolMethod: ...
|
||||
|
||||
# revealed: bool
|
||||
reveal_type(not NoBoolMethod())
|
||||
|
||||
# And we can't rely on `__len__` for the same reason: a subclass could add `__bool__`.
|
||||
class LenZero:
|
||||
def __len__(self) -> Literal[0]:
|
||||
return 0
|
||||
|
||||
# revealed: bool
|
||||
reveal_type(not LenZero())
|
||||
|
||||
class LenNonZero:
|
||||
def __len__(self) -> Literal[1]:
|
||||
return 1
|
||||
|
||||
# revealed: bool
|
||||
reveal_type(not LenNonZero())
|
||||
|
||||
class WithBothLenAndBool1:
|
||||
def __bool__(self) -> Literal[False]:
|
||||
return False
|
||||
|
||||
def __len__(self) -> Literal[2]:
|
||||
return 2
|
||||
|
||||
# revealed: Literal[True]
|
||||
reveal_type(not WithBothLenAndBool1())
|
||||
|
||||
class WithBothLenAndBool2:
|
||||
def __bool__(self) -> Literal[True]:
|
||||
return True
|
||||
|
||||
def __len__(self) -> Literal[0]:
|
||||
return 0
|
||||
|
||||
# revealed: Literal[False]
|
||||
reveal_type(not WithBothLenAndBool2())
|
||||
|
||||
# TODO: raise diagnostic when __bool__ method is not valid: [unsupported-operator] "Method __bool__ for type `MethodBoolInvalid` should return `bool`, returned type `int`"
|
||||
# https://docs.python.org/3/reference/datamodel.html#object.__bool__
|
||||
class MethodBoolInvalid:
|
||||
def __bool__(self) -> int:
|
||||
return 0
|
||||
|
||||
# revealed: bool
|
||||
reveal_type(not MethodBoolInvalid())
|
||||
|
||||
# Don't trust a possibly-unbound `__bool__` method:
|
||||
def get_flag() -> bool:
|
||||
return True
|
||||
|
||||
class PossiblyUnboundBool:
|
||||
if get_flag():
|
||||
def __bool__(self) -> Literal[False]:
|
||||
return False
|
||||
|
||||
# revealed: bool
|
||||
reveal_type(not PossiblyUnboundBool())
|
||||
```
|
||||
|
||||
@@ -459,11 +459,11 @@ foo: 3.8- # trailing comment
|
||||
";
|
||||
let parsed_versions = TypeshedVersions::from_str(VERSIONS).unwrap();
|
||||
assert_eq!(parsed_versions.len(), 3);
|
||||
assert_snapshot!(parsed_versions.to_string(), @r###"
|
||||
assert_snapshot!(parsed_versions.to_string(), @r"
|
||||
bar: 2.7-3.10
|
||||
bar.baz: 3.1-3.9
|
||||
foo: 3.8-
|
||||
"###
|
||||
"
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
use ruff_python_ast::{AnyNodeRef, NodeKind};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use ruff_python_ast::AnyNodeRef;
|
||||
|
||||
/// Compact key for a node for use in a hash map.
|
||||
///
|
||||
/// Compares two nodes by their kind and text range.
|
||||
/// Stores the memory address of the node, because using the range and the kind
|
||||
/// of the node is not enough to uniquely identify them in ASTs resulting from
|
||||
/// invalid syntax. For example, parsing the input `for` results in a `StmtFor`
|
||||
/// AST node where both the `target` and the `iter` field are `ExprName` nodes
|
||||
/// with the same (empty) range `3..3`.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub(super) struct NodeKey {
|
||||
kind: NodeKind,
|
||||
range: TextRange,
|
||||
}
|
||||
pub(super) struct NodeKey(usize);
|
||||
|
||||
impl NodeKey {
|
||||
pub(super) fn from_node<'a, N>(node: N) -> Self
|
||||
@@ -16,9 +16,6 @@ impl NodeKey {
|
||||
N: Into<AnyNodeRef<'a>>,
|
||||
{
|
||||
let node = node.into();
|
||||
NodeKey {
|
||||
kind: node.kind(),
|
||||
range: node.range(),
|
||||
}
|
||||
NodeKey(node.as_ptr().as_ptr() as usize)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,6 +54,7 @@ impl Program {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
|
||||
pub struct ProgramSettings {
|
||||
pub target_version: PythonVersion,
|
||||
pub search_paths: SearchPathSettings,
|
||||
@@ -61,6 +62,7 @@ pub struct ProgramSettings {
|
||||
|
||||
/// Configures the search paths for module resolution.
|
||||
#[derive(Eq, PartialEq, Debug, Clone)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
|
||||
pub struct SearchPathSettings {
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
@@ -91,6 +93,7 @@ impl SearchPathSettings {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
|
||||
pub enum SitePackages {
|
||||
Derived {
|
||||
venv_path: SystemPathBuf,
|
||||
|
||||
@@ -5,6 +5,7 @@ use std::fmt;
|
||||
/// Unlike the `TargetVersion` enums in the CLI crates,
|
||||
/// this does not necessarily represent a Python version that we actually support.
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
|
||||
pub struct PythonVersion {
|
||||
pub major: u8,
|
||||
pub minor: u8,
|
||||
@@ -38,7 +39,7 @@ impl PythonVersion {
|
||||
|
||||
impl Default for PythonVersion {
|
||||
fn default() -> Self {
|
||||
Self::PY38
|
||||
Self::PY39
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -49,64 +49,50 @@ fn ast_ids<'db>(db: &'db dyn Db, scope: ScopeId) -> &'db AstIds {
|
||||
semantic_index(db, scope.file(db)).ast_ids(scope.file_scope_id(db))
|
||||
}
|
||||
|
||||
pub trait HasScopedUseId {
|
||||
/// The type of the ID uniquely identifying the use.
|
||||
type Id: Copy;
|
||||
|
||||
/// Returns the ID that uniquely identifies the use in `scope`.
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id;
|
||||
}
|
||||
|
||||
/// Uniquely identifies a use of a name in a [`crate::semantic_index::symbol::FileScopeId`].
|
||||
#[newtype_index]
|
||||
pub struct ScopedUseId;
|
||||
|
||||
impl HasScopedUseId for ast::ExprName {
|
||||
type Id = ScopedUseId;
|
||||
pub trait HasScopedUseId {
|
||||
/// Returns the ID that uniquely identifies the use in `scope`.
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedUseId;
|
||||
}
|
||||
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
impl HasScopedUseId for ast::ExprName {
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedUseId {
|
||||
let expression_ref = ExpressionRef::from(self);
|
||||
expression_ref.scoped_use_id(db, scope)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasScopedUseId for ast::ExpressionRef<'_> {
|
||||
type Id = ScopedUseId;
|
||||
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
fn scoped_use_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedUseId {
|
||||
let ast_ids = ast_ids(db, scope);
|
||||
ast_ids.use_id(*self)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait HasScopedAstId {
|
||||
/// The type of the ID uniquely identifying the node.
|
||||
type Id: Copy;
|
||||
|
||||
/// Returns the ID that uniquely identifies the node in `scope`.
|
||||
fn scoped_ast_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id;
|
||||
}
|
||||
|
||||
impl<T: HasScopedAstId> HasScopedAstId for Box<T> {
|
||||
type Id = <T as HasScopedAstId>::Id;
|
||||
|
||||
fn scoped_ast_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
self.as_ref().scoped_ast_id(db, scope)
|
||||
}
|
||||
}
|
||||
|
||||
/// Uniquely identifies an [`ast::Expr`] in a [`crate::semantic_index::symbol::FileScopeId`].
|
||||
#[newtype_index]
|
||||
pub struct ScopedExpressionId;
|
||||
|
||||
pub trait HasScopedExpressionId {
|
||||
/// Returns the ID that uniquely identifies the node in `scope`.
|
||||
fn scoped_expression_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedExpressionId;
|
||||
}
|
||||
|
||||
impl<T: HasScopedExpressionId> HasScopedExpressionId for Box<T> {
|
||||
fn scoped_expression_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedExpressionId {
|
||||
self.as_ref().scoped_expression_id(db, scope)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_has_scoped_expression_id {
|
||||
($ty: ty) => {
|
||||
impl HasScopedAstId for $ty {
|
||||
type Id = ScopedExpressionId;
|
||||
|
||||
fn scoped_ast_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
impl HasScopedExpressionId for $ty {
|
||||
fn scoped_expression_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedExpressionId {
|
||||
let expression_ref = ExpressionRef::from(self);
|
||||
expression_ref.scoped_ast_id(db, scope)
|
||||
expression_ref.scoped_expression_id(db, scope)
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -146,29 +132,20 @@ impl_has_scoped_expression_id!(ast::ExprSlice);
|
||||
impl_has_scoped_expression_id!(ast::ExprIpyEscapeCommand);
|
||||
impl_has_scoped_expression_id!(ast::Expr);
|
||||
|
||||
impl HasScopedAstId for ast::ExpressionRef<'_> {
|
||||
type Id = ScopedExpressionId;
|
||||
|
||||
fn scoped_ast_id(&self, db: &dyn Db, scope: ScopeId) -> Self::Id {
|
||||
impl HasScopedExpressionId for ast::ExpressionRef<'_> {
|
||||
fn scoped_expression_id(&self, db: &dyn Db, scope: ScopeId) -> ScopedExpressionId {
|
||||
let ast_ids = ast_ids(db, scope);
|
||||
ast_ids.expression_id(*self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct AstIdsBuilder {
|
||||
expressions_map: FxHashMap<ExpressionNodeKey, ScopedExpressionId>,
|
||||
uses_map: FxHashMap<ExpressionNodeKey, ScopedUseId>,
|
||||
}
|
||||
|
||||
impl AstIdsBuilder {
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
expressions_map: FxHashMap::default(),
|
||||
uses_map: FxHashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds `expr` to the expression ids map and returns its id.
|
||||
pub(super) fn record_expression(&mut self, expr: &ast::Expr) -> ScopedExpressionId {
|
||||
let expression_id = self.expressions_map.len().into();
|
||||
|
||||
@@ -124,9 +124,9 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
self.try_node_context_stack_manager.enter_nested_scope();
|
||||
|
||||
let file_scope_id = self.scopes.push(scope);
|
||||
self.symbol_tables.push(SymbolTableBuilder::new());
|
||||
self.use_def_maps.push(UseDefMapBuilder::new());
|
||||
let ast_id_scope = self.ast_ids.push(AstIdsBuilder::new());
|
||||
self.symbol_tables.push(SymbolTableBuilder::default());
|
||||
self.use_def_maps.push(UseDefMapBuilder::default());
|
||||
let ast_id_scope = self.ast_ids.push(AstIdsBuilder::default());
|
||||
|
||||
let scope_id = ScopeId::new(self.db, self.file, file_scope_id, countme::Count::default());
|
||||
|
||||
@@ -373,6 +373,11 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
if let Some(default) = default {
|
||||
self.visit_expr(default);
|
||||
}
|
||||
match type_param {
|
||||
ast::TypeParam::TypeVar(node) => self.add_definition(symbol, node),
|
||||
ast::TypeParam::ParamSpec(node) => self.add_definition(symbol, node),
|
||||
ast::TypeParam::TypeVarTuple(node) => self.add_definition(symbol, node),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -671,9 +676,18 @@ where
|
||||
if let Some(value) = &node.value {
|
||||
self.visit_expr(value);
|
||||
}
|
||||
self.push_assignment(node.into());
|
||||
self.visit_expr(&node.target);
|
||||
self.pop_assignment();
|
||||
|
||||
// See https://docs.python.org/3/library/ast.html#ast.AnnAssign
|
||||
if matches!(
|
||||
*node.target,
|
||||
ast::Expr::Attribute(_) | ast::Expr::Subscript(_) | ast::Expr::Name(_)
|
||||
) {
|
||||
self.push_assignment(node.into());
|
||||
self.visit_expr(&node.target);
|
||||
self.pop_assignment();
|
||||
} else {
|
||||
self.visit_expr(&node.target);
|
||||
}
|
||||
}
|
||||
ast::Stmt::AugAssign(
|
||||
aug_assign @ ast::StmtAugAssign {
|
||||
@@ -685,9 +699,18 @@ where
|
||||
) => {
|
||||
debug_assert_eq!(&self.current_assignments, &[]);
|
||||
self.visit_expr(value);
|
||||
self.push_assignment(aug_assign.into());
|
||||
self.visit_expr(target);
|
||||
self.pop_assignment();
|
||||
|
||||
// See https://docs.python.org/3/library/ast.html#ast.AugAssign
|
||||
if matches!(
|
||||
**target,
|
||||
ast::Expr::Attribute(_) | ast::Expr::Subscript(_) | ast::Expr::Name(_)
|
||||
) {
|
||||
self.push_assignment(aug_assign.into());
|
||||
self.visit_expr(target);
|
||||
self.pop_assignment();
|
||||
} else {
|
||||
self.visit_expr(target);
|
||||
}
|
||||
}
|
||||
ast::Stmt::If(node) => {
|
||||
self.visit_expr(&node.test);
|
||||
@@ -1067,9 +1090,15 @@ where
|
||||
ast::Expr::Named(node) => {
|
||||
// TODO walrus in comprehensions is implicitly nonlocal
|
||||
self.visit_expr(&node.value);
|
||||
self.push_assignment(node.into());
|
||||
self.visit_expr(&node.target);
|
||||
self.pop_assignment();
|
||||
|
||||
// See https://peps.python.org/pep-0572/#differences-between-assignment-expressions-and-assignment-statements
|
||||
if node.target.is_name_expr() {
|
||||
self.push_assignment(node.into());
|
||||
self.visit_expr(&node.target);
|
||||
self.pop_assignment();
|
||||
} else {
|
||||
self.visit_expr(&node.target);
|
||||
}
|
||||
}
|
||||
ast::Expr::Lambda(lambda) => {
|
||||
if let Some(parameters) = &lambda.parameters {
|
||||
|
||||
@@ -92,6 +92,9 @@ pub(crate) enum DefinitionNodeRef<'a> {
|
||||
WithItem(WithItemDefinitionNodeRef<'a>),
|
||||
MatchPattern(MatchPatternDefinitionNodeRef<'a>),
|
||||
ExceptHandler(ExceptHandlerDefinitionNodeRef<'a>),
|
||||
TypeVar(&'a ast::TypeParamTypeVar),
|
||||
ParamSpec(&'a ast::TypeParamParamSpec),
|
||||
TypeVarTuple(&'a ast::TypeParamTypeVarTuple),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtFunctionDef> for DefinitionNodeRef<'a> {
|
||||
@@ -130,6 +133,24 @@ impl<'a> From<&'a ast::Alias> for DefinitionNodeRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::TypeParamTypeVar> for DefinitionNodeRef<'a> {
|
||||
fn from(value: &'a ast::TypeParamTypeVar) -> Self {
|
||||
Self::TypeVar(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::TypeParamParamSpec> for DefinitionNodeRef<'a> {
|
||||
fn from(value: &'a ast::TypeParamParamSpec) -> Self {
|
||||
Self::ParamSpec(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::TypeParamTypeVarTuple> for DefinitionNodeRef<'a> {
|
||||
fn from(value: &'a ast::TypeParamTypeVarTuple) -> Self {
|
||||
Self::TypeVarTuple(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ImportFromDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node_ref: ImportFromDefinitionNodeRef<'a>) -> Self {
|
||||
Self::ImportFrom(node_ref)
|
||||
@@ -317,6 +338,15 @@ impl<'db> DefinitionNodeRef<'db> {
|
||||
handler: AstNodeRef::new(parsed, handler),
|
||||
is_star,
|
||||
}),
|
||||
DefinitionNodeRef::TypeVar(node) => {
|
||||
DefinitionKind::TypeVar(AstNodeRef::new(parsed, node))
|
||||
}
|
||||
DefinitionNodeRef::ParamSpec(node) => {
|
||||
DefinitionKind::ParamSpec(AstNodeRef::new(parsed, node))
|
||||
}
|
||||
DefinitionNodeRef::TypeVarTuple(node) => {
|
||||
DefinitionKind::TypeVarTuple(AstNodeRef::new(parsed, node))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -356,6 +386,9 @@ impl<'db> DefinitionNodeRef<'db> {
|
||||
identifier.into()
|
||||
}
|
||||
Self::ExceptHandler(ExceptHandlerDefinitionNodeRef { handler, .. }) => handler.into(),
|
||||
Self::TypeVar(node) => node.into(),
|
||||
Self::ParamSpec(node) => node.into(),
|
||||
Self::TypeVarTuple(node) => node.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -412,6 +445,9 @@ pub enum DefinitionKind<'db> {
|
||||
WithItem(WithItemDefinitionKind),
|
||||
MatchPattern(MatchPatternDefinitionKind),
|
||||
ExceptHandler(ExceptHandlerDefinitionKind),
|
||||
TypeVar(AstNodeRef<ast::TypeParamTypeVar>),
|
||||
ParamSpec(AstNodeRef<ast::TypeParamParamSpec>),
|
||||
TypeVarTuple(AstNodeRef<ast::TypeParamTypeVarTuple>),
|
||||
}
|
||||
|
||||
impl DefinitionKind<'_> {
|
||||
@@ -421,7 +457,10 @@ impl DefinitionKind<'_> {
|
||||
DefinitionKind::Function(_)
|
||||
| DefinitionKind::Class(_)
|
||||
| DefinitionKind::Import(_)
|
||||
| DefinitionKind::ImportFrom(_) => DefinitionCategory::DeclarationAndBinding,
|
||||
| DefinitionKind::ImportFrom(_)
|
||||
| DefinitionKind::TypeVar(_)
|
||||
| DefinitionKind::ParamSpec(_)
|
||||
| DefinitionKind::TypeVarTuple(_) => DefinitionCategory::DeclarationAndBinding,
|
||||
// a parameter always binds a value, but is only a declaration if annotated
|
||||
DefinitionKind::Parameter(parameter) => {
|
||||
if parameter.annotation.is_some() {
|
||||
@@ -696,3 +735,21 @@ impl From<&ast::ExceptHandlerExceptHandler> for DefinitionNodeKey {
|
||||
Self(NodeKey::from_node(handler))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::TypeParamTypeVar> for DefinitionNodeKey {
|
||||
fn from(value: &ast::TypeParamTypeVar) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::TypeParamParamSpec> for DefinitionNodeKey {
|
||||
fn from(value: &ast::TypeParamParamSpec) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::TypeParamTypeVarTuple> for DefinitionNodeKey {
|
||||
fn from(value: &ast::TypeParamTypeVarTuple) -> Self {
|
||||
Self(NodeKey::from_node(value))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -210,7 +210,7 @@ impl ScopeKind {
|
||||
}
|
||||
|
||||
/// Symbol table for a specific [`Scope`].
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SymbolTable {
|
||||
/// The symbols in this scope.
|
||||
symbols: IndexVec<ScopedSymbolId, Symbol>,
|
||||
@@ -220,13 +220,6 @@ pub struct SymbolTable {
|
||||
}
|
||||
|
||||
impl SymbolTable {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
symbols: IndexVec::new(),
|
||||
symbols_by_name: SymbolMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn shrink_to_fit(&mut self) {
|
||||
self.symbols.shrink_to_fit();
|
||||
}
|
||||
@@ -278,18 +271,12 @@ impl PartialEq for SymbolTable {
|
||||
|
||||
impl Eq for SymbolTable {}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct SymbolTableBuilder {
|
||||
table: SymbolTable,
|
||||
}
|
||||
|
||||
impl SymbolTableBuilder {
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
table: SymbolTable::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn add_symbol(&mut self, name: Name) -> (ScopedSymbolId, bool) {
|
||||
let hash = SymbolTable::hash_name(&name);
|
||||
let entry = self
|
||||
|
||||
@@ -277,7 +277,7 @@ impl<'db> UseDefMap<'db> {
|
||||
|
||||
pub(crate) fn use_boundness(&self, use_id: ScopedUseId) -> Boundness {
|
||||
if self.bindings_by_use[use_id].may_be_unbound() {
|
||||
Boundness::MayBeUnbound
|
||||
Boundness::PossiblyUnbound
|
||||
} else {
|
||||
Boundness::Bound
|
||||
}
|
||||
@@ -292,7 +292,7 @@ impl<'db> UseDefMap<'db> {
|
||||
|
||||
pub(crate) fn public_boundness(&self, symbol: ScopedSymbolId) -> Boundness {
|
||||
if self.public_symbols[symbol].may_be_unbound() {
|
||||
Boundness::MayBeUnbound
|
||||
Boundness::PossiblyUnbound
|
||||
} else {
|
||||
Boundness::Bound
|
||||
}
|
||||
@@ -459,10 +459,6 @@ pub(super) struct UseDefMapBuilder<'db> {
|
||||
}
|
||||
|
||||
impl<'db> UseDefMapBuilder<'db> {
|
||||
pub(super) fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub(super) fn add_symbol(&mut self, symbol: ScopedSymbolId) {
|
||||
let new_symbol = self.symbol_states.push(SymbolState::undefined());
|
||||
debug_assert_eq!(symbol, new_symbol);
|
||||
|
||||
@@ -6,7 +6,7 @@ use ruff_source_file::LineIndex;
|
||||
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::{resolve_module, Module};
|
||||
use crate::semantic_index::ast_ids::HasScopedAstId;
|
||||
use crate::semantic_index::ast_ids::HasScopedExpressionId;
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::types::{binding_ty, infer_scope_types, Type};
|
||||
use crate::Db;
|
||||
@@ -54,7 +54,7 @@ impl HasTy for ast::ExpressionRef<'_> {
|
||||
let file_scope = index.expression_scope_id(*self);
|
||||
let scope = file_scope.to_scope_id(model.db, model.file);
|
||||
|
||||
let expression_id = self.scoped_ast_id(model.db, scope);
|
||||
let expression_id = self.scoped_expression_id(model.db, scope);
|
||||
infer_scope_types(model.db, scope).expression_ty(expression_id)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -732,7 +732,20 @@ mod tests {
|
||||
let system = TestSystem::default();
|
||||
assert!(matches!(
|
||||
VirtualEnvironment::new("/.venv", &system),
|
||||
Err(SitePackagesDiscoveryError::VenvDirIsNotADirectory(_))
|
||||
Err(SitePackagesDiscoveryError::VenvDirCanonicalizationError(..))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reject_venv_that_is_not_a_directory() {
|
||||
let system = TestSystem::default();
|
||||
system
|
||||
.memory_file_system()
|
||||
.write_file("/.venv", "")
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
VirtualEnvironment::new("/.venv", &system),
|
||||
Err(SitePackagesDiscoveryError::VenvDirIsNotADirectory(..))
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
@@ -8,32 +8,38 @@ use crate::Db;
|
||||
|
||||
/// Enumeration of various core stdlib modules, for which we have dedicated Salsa queries.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
enum CoreStdlibModule {
|
||||
pub(crate) enum CoreStdlibModule {
|
||||
Builtins,
|
||||
Types,
|
||||
Typeshed,
|
||||
TypingExtensions,
|
||||
Typing,
|
||||
Sys,
|
||||
}
|
||||
|
||||
impl CoreStdlibModule {
|
||||
fn name(self) -> ModuleName {
|
||||
let module_name = match self {
|
||||
pub(crate) const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Builtins => "builtins",
|
||||
Self::Types => "types",
|
||||
Self::Typing => "typing",
|
||||
Self::Typeshed => "_typeshed",
|
||||
Self::TypingExtensions => "typing_extensions",
|
||||
};
|
||||
ModuleName::new_static(module_name)
|
||||
.unwrap_or_else(|| panic!("{module_name} should be a valid module name!"))
|
||||
Self::Sys => "sys",
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn name(self) -> ModuleName {
|
||||
let self_as_str = self.as_str();
|
||||
ModuleName::new_static(self_as_str)
|
||||
.unwrap_or_else(|| panic!("{self_as_str} should be a valid module name!"))
|
||||
}
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in a given core module
|
||||
///
|
||||
/// Returns `Symbol::Unbound` if the given core module cannot be resolved for some reason
|
||||
fn core_module_symbol<'db>(
|
||||
pub(crate) fn core_module_symbol<'db>(
|
||||
db: &'db dyn Db,
|
||||
core_module: CoreStdlibModule,
|
||||
symbol: &str,
|
||||
@@ -51,29 +57,14 @@ pub(crate) fn builtins_symbol<'db>(db: &'db dyn Db, symbol: &str) -> Symbol<'db>
|
||||
core_module_symbol(db, CoreStdlibModule::Builtins, symbol)
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in the `types` module namespace.
|
||||
///
|
||||
/// Returns `Symbol::Unbound` if the `types` module isn't available for some reason.
|
||||
#[inline]
|
||||
pub(crate) fn types_symbol<'db>(db: &'db dyn Db, symbol: &str) -> Symbol<'db> {
|
||||
core_module_symbol(db, CoreStdlibModule::Types, symbol)
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in the `typing` module namespace.
|
||||
///
|
||||
/// Returns `Symbol::Unbound` if the `typing` module isn't available for some reason.
|
||||
#[inline]
|
||||
#[allow(dead_code)] // currently only used in tests
|
||||
#[cfg(test)]
|
||||
pub(crate) fn typing_symbol<'db>(db: &'db dyn Db, symbol: &str) -> Symbol<'db> {
|
||||
core_module_symbol(db, CoreStdlibModule::Typing, symbol)
|
||||
}
|
||||
/// Lookup the type of `symbol` in the `_typeshed` module namespace.
|
||||
///
|
||||
/// Returns `Symbol::Unbound` if the `_typeshed` module isn't available for some reason.
|
||||
#[inline]
|
||||
pub(crate) fn typeshed_symbol<'db>(db: &'db dyn Db, symbol: &str) -> Symbol<'db> {
|
||||
core_module_symbol(db, CoreStdlibModule::Typeshed, symbol)
|
||||
}
|
||||
|
||||
/// Lookup the type of `symbol` in the `typing_extensions` module namespace.
|
||||
///
|
||||
|
||||
@@ -6,7 +6,16 @@ use crate::{
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(crate) enum Boundness {
|
||||
Bound,
|
||||
MayBeUnbound,
|
||||
PossiblyUnbound,
|
||||
}
|
||||
|
||||
impl Boundness {
|
||||
pub(crate) fn or(self, other: Boundness) -> Boundness {
|
||||
match (self, other) {
|
||||
(Boundness::Bound, _) | (_, Boundness::Bound) => Boundness::Bound,
|
||||
(Boundness::PossiblyUnbound, Boundness::PossiblyUnbound) => Boundness::PossiblyUnbound,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The result of a symbol lookup, which can either be a (possibly unbound) type
|
||||
@@ -17,14 +26,14 @@ pub(crate) enum Boundness {
|
||||
/// bound = 1
|
||||
///
|
||||
/// if flag:
|
||||
/// maybe_unbound = 2
|
||||
/// possibly_unbound = 2
|
||||
/// ```
|
||||
///
|
||||
/// If we look up symbols in this scope, we would get the following results:
|
||||
/// ```rs
|
||||
/// bound: Symbol::Type(Type::IntLiteral(1), Boundness::Bound),
|
||||
/// maybe_unbound: Symbol::Type(Type::IntLiteral(2), Boundness::MayBeUnbound),
|
||||
/// non_existent: Symbol::Unbound,
|
||||
/// bound: Symbol::Type(Type::IntLiteral(1), Boundness::Bound),
|
||||
/// possibly_unbound: Symbol::Type(Type::IntLiteral(2), Boundness::PossiblyUnbound),
|
||||
/// non_existent: Symbol::Unbound,
|
||||
/// ```
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub(crate) enum Symbol<'db> {
|
||||
@@ -37,21 +46,18 @@ impl<'db> Symbol<'db> {
|
||||
matches!(self, Symbol::Unbound)
|
||||
}
|
||||
|
||||
pub(crate) fn may_be_unbound(&self) -> bool {
|
||||
pub(crate) fn possibly_unbound(&self) -> bool {
|
||||
match self {
|
||||
Symbol::Type(_, Boundness::MayBeUnbound) | Symbol::Unbound => true,
|
||||
Symbol::Type(_, Boundness::PossiblyUnbound) | Symbol::Unbound => true,
|
||||
Symbol::Type(_, Boundness::Bound) => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn unwrap_or_unknown(&self) -> Type<'db> {
|
||||
match self {
|
||||
Symbol::Type(ty, _) => *ty,
|
||||
Symbol::Unbound => Type::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn as_type(&self) -> Option<Type<'db>> {
|
||||
/// Returns the type of the symbol, ignoring possible unboundness.
|
||||
///
|
||||
/// If the symbol is *definitely* unbound, this function will return `None`. Otherwise,
|
||||
/// if there is at least one control-flow path where the symbol is bound, return the type.
|
||||
pub(crate) fn ignore_possibly_unbound(&self) -> Option<Type<'db>> {
|
||||
match self {
|
||||
Symbol::Type(ty, _) => Some(*ty),
|
||||
Symbol::Unbound => None,
|
||||
@@ -61,28 +67,80 @@ impl<'db> Symbol<'db> {
|
||||
#[cfg(test)]
|
||||
#[track_caller]
|
||||
pub(crate) fn expect_type(self) -> Type<'db> {
|
||||
self.as_type()
|
||||
self.ignore_possibly_unbound()
|
||||
.expect("Expected a (possibly unbound) type, not an unbound symbol")
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn replace_unbound_with(
|
||||
self,
|
||||
db: &'db dyn Db,
|
||||
replacement: &Symbol<'db>,
|
||||
) -> Symbol<'db> {
|
||||
match replacement {
|
||||
Symbol::Type(replacement, _) => Symbol::Type(
|
||||
match self {
|
||||
Symbol::Type(ty, Boundness::Bound) => ty,
|
||||
Symbol::Type(ty, Boundness::MayBeUnbound) => {
|
||||
UnionType::from_elements(db, [*replacement, ty])
|
||||
}
|
||||
Symbol::Unbound => *replacement,
|
||||
},
|
||||
Boundness::Bound,
|
||||
),
|
||||
pub(crate) fn or_fall_back_to(self, db: &'db dyn Db, fallback: &Symbol<'db>) -> Symbol<'db> {
|
||||
match fallback {
|
||||
Symbol::Type(fallback_ty, fallback_boundness) => match self {
|
||||
Symbol::Type(_, Boundness::Bound) => self,
|
||||
Symbol::Type(ty, boundness @ Boundness::PossiblyUnbound) => Symbol::Type(
|
||||
UnionType::from_elements(db, [*fallback_ty, ty]),
|
||||
fallback_boundness.or(boundness),
|
||||
),
|
||||
Symbol::Unbound => fallback.clone(),
|
||||
},
|
||||
Symbol::Unbound => self,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::types::tests::setup_db;
|
||||
|
||||
#[test]
|
||||
fn test_symbol_or_fall_back_to() {
|
||||
use Boundness::{Bound, PossiblyUnbound};
|
||||
|
||||
let db = setup_db();
|
||||
let ty1 = Type::IntLiteral(1);
|
||||
let ty2 = Type::IntLiteral(2);
|
||||
|
||||
// Start from an unbound symbol
|
||||
assert_eq!(
|
||||
Symbol::Unbound.or_fall_back_to(&db, &Symbol::Unbound),
|
||||
Symbol::Unbound
|
||||
);
|
||||
assert_eq!(
|
||||
Symbol::Unbound.or_fall_back_to(&db, &Symbol::Type(ty1, PossiblyUnbound)),
|
||||
Symbol::Type(ty1, PossiblyUnbound)
|
||||
);
|
||||
assert_eq!(
|
||||
Symbol::Unbound.or_fall_back_to(&db, &Symbol::Type(ty1, Bound)),
|
||||
Symbol::Type(ty1, Bound)
|
||||
);
|
||||
|
||||
// Start from a possibly unbound symbol
|
||||
assert_eq!(
|
||||
Symbol::Type(ty1, PossiblyUnbound).or_fall_back_to(&db, &Symbol::Unbound),
|
||||
Symbol::Type(ty1, PossiblyUnbound)
|
||||
);
|
||||
assert_eq!(
|
||||
Symbol::Type(ty1, PossiblyUnbound)
|
||||
.or_fall_back_to(&db, &Symbol::Type(ty2, PossiblyUnbound)),
|
||||
Symbol::Type(UnionType::from_elements(&db, [ty2, ty1]), PossiblyUnbound)
|
||||
);
|
||||
assert_eq!(
|
||||
Symbol::Type(ty1, PossiblyUnbound).or_fall_back_to(&db, &Symbol::Type(ty2, Bound)),
|
||||
Symbol::Type(UnionType::from_elements(&db, [ty2, ty1]), Bound)
|
||||
);
|
||||
|
||||
// Start from a definitely bound symbol
|
||||
assert_eq!(
|
||||
Symbol::Type(ty1, Bound).or_fall_back_to(&db, &Symbol::Unbound),
|
||||
Symbol::Type(ty1, Bound)
|
||||
);
|
||||
assert_eq!(
|
||||
Symbol::Type(ty1, Bound).or_fall_back_to(&db, &Symbol::Type(ty2, PossiblyUnbound)),
|
||||
Symbol::Type(ty1, Bound)
|
||||
);
|
||||
assert_eq!(
|
||||
Symbol::Type(ty1, Bound).or_fall_back_to(&db, &Symbol::Type(ty2, Bound)),
|
||||
Symbol::Type(ty1, Bound)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -128,7 +128,7 @@ impl<'db> IntersectionBuilder<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
intersections: vec![InnerIntersectionBuilder::new()],
|
||||
intersections: vec![InnerIntersectionBuilder::default()],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -231,10 +231,6 @@ struct InnerIntersectionBuilder<'db> {
|
||||
}
|
||||
|
||||
impl<'db> InnerIntersectionBuilder<'db> {
|
||||
fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Adds a positive type to this intersection.
|
||||
fn add_positive(&mut self, db: &'db dyn Db, new_positive: Type<'db>) {
|
||||
if let Type::Intersection(other) = new_positive {
|
||||
@@ -253,7 +249,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
.iter()
|
||||
.find(|element| element.is_boolean_literal())
|
||||
{
|
||||
*self = Self::new();
|
||||
*self = Self::default();
|
||||
self.positive.insert(Type::BooleanLiteral(!value));
|
||||
return;
|
||||
}
|
||||
@@ -272,7 +268,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
}
|
||||
// A & B = Never if A and B are disjoint
|
||||
if new_positive.is_disjoint_from(db, *existing_positive) {
|
||||
*self = Self::new();
|
||||
*self = Self::default();
|
||||
self.positive.insert(Type::Never);
|
||||
return;
|
||||
}
|
||||
@@ -285,7 +281,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
for (index, existing_negative) in self.negative.iter().enumerate() {
|
||||
// S & ~T = Never if S <: T
|
||||
if new_positive.is_subtype_of(db, *existing_negative) {
|
||||
*self = Self::new();
|
||||
*self = Self::default();
|
||||
self.positive.insert(Type::Never);
|
||||
return;
|
||||
}
|
||||
@@ -326,7 +322,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
.iter()
|
||||
.any(|pos| *pos == KnownClass::Bool.to_instance(db)) =>
|
||||
{
|
||||
*self = Self::new();
|
||||
*self = Self::default();
|
||||
self.positive.insert(Type::BooleanLiteral(!bool));
|
||||
}
|
||||
_ => {
|
||||
@@ -348,7 +344,7 @@ impl<'db> InnerIntersectionBuilder<'db> {
|
||||
for existing_positive in &self.positive {
|
||||
// S & ~T = Never if S <: T
|
||||
if existing_positive.is_subtype_of(db, new_negative) {
|
||||
*self = Self::new();
|
||||
*self = Self::default();
|
||||
self.positive.insert(Type::Never);
|
||||
return;
|
||||
}
|
||||
@@ -383,7 +379,7 @@ mod tests {
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::stdlib::typing_symbol;
|
||||
use crate::types::{global_symbol, KnownClass, StringLiteralType, UnionBuilder};
|
||||
use crate::types::{global_symbol, KnownClass, UnionBuilder};
|
||||
use crate::ProgramSettings;
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
@@ -775,7 +771,7 @@ mod tests {
|
||||
.build();
|
||||
assert_eq!(ty, s);
|
||||
|
||||
let literal = Type::StringLiteral(StringLiteralType::new(&db, "a"));
|
||||
let literal = Type::string_literal(&db, "a");
|
||||
let expected = IntersectionBuilder::new(&db)
|
||||
.add_positive(s)
|
||||
.add_negative(literal)
|
||||
@@ -878,7 +874,7 @@ mod tests {
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(s)
|
||||
.add_negative(Type::StringLiteral(StringLiteralType::new(&db, "a")))
|
||||
.add_negative(Type::string_literal(&db, "a"))
|
||||
.add_negative(t)
|
||||
.build();
|
||||
assert_eq!(ty, Type::Never);
|
||||
@@ -912,7 +908,7 @@ mod tests {
|
||||
let db = setup_db();
|
||||
|
||||
let t_p = KnownClass::Int.to_instance(&db);
|
||||
let t_n = Type::StringLiteral(StringLiteralType::new(&db, "t_n"));
|
||||
let t_n = Type::string_literal(&db, "t_n");
|
||||
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(t_p)
|
||||
|
||||
@@ -73,10 +73,6 @@ pub struct TypeCheckDiagnostics {
|
||||
}
|
||||
|
||||
impl TypeCheckDiagnostics {
|
||||
pub fn new() -> Self {
|
||||
Self { inner: Vec::new() }
|
||||
}
|
||||
|
||||
pub(super) fn push(&mut self, diagnostic: TypeCheckDiagnostic) {
|
||||
self.inner.push(Arc::new(diagnostic));
|
||||
}
|
||||
@@ -148,7 +144,7 @@ impl<'db> TypeCheckDiagnosticsBuilder<'db> {
|
||||
Self {
|
||||
db,
|
||||
file,
|
||||
diagnostics: TypeCheckDiagnostics::new(),
|
||||
diagnostics: TypeCheckDiagnostics::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
//! Display implementations for types.
|
||||
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
use std::fmt::{self, Display, Formatter, Write};
|
||||
|
||||
use ruff_db::display::FormatterJoinExtension;
|
||||
use ruff_python_ast::str::Quote;
|
||||
use ruff_python_literal::escape::AsciiEscape;
|
||||
|
||||
use crate::types::{
|
||||
ClassLiteralType, InstanceType, IntersectionType, KnownClass, SubclassOfType, Type, UnionType,
|
||||
ClassLiteralType, InstanceType, IntersectionType, KnownClass, StringLiteralType,
|
||||
SubclassOfType, Type, UnionType,
|
||||
};
|
||||
use crate::Db;
|
||||
use rustc_hash::FxHashMap;
|
||||
@@ -66,10 +67,13 @@ impl Display for DisplayRepresentation<'_> {
|
||||
Type::Any => f.write_str("Any"),
|
||||
Type::Never => f.write_str("Never"),
|
||||
Type::Unknown => f.write_str("Unknown"),
|
||||
Type::Instance(InstanceType { class })
|
||||
if class.is_known(self.db, KnownClass::NoneType) =>
|
||||
{
|
||||
f.write_str("None")
|
||||
Type::Instance(InstanceType { class }) => {
|
||||
let representation = match class.known(self.db) {
|
||||
Some(KnownClass::NoneType) => "None",
|
||||
Some(KnownClass::NoDefaultType) => "NoDefault",
|
||||
_ => class.name(self.db),
|
||||
};
|
||||
f.write_str(representation)
|
||||
}
|
||||
// `[Type::Todo]`'s display should be explicit that is not a valid display of
|
||||
// any other type
|
||||
@@ -82,16 +86,13 @@ impl Display for DisplayRepresentation<'_> {
|
||||
Type::SubclassOf(SubclassOfType { class }) => {
|
||||
write!(f, "type[{}]", class.name(self.db))
|
||||
}
|
||||
Type::Instance(InstanceType { class }) => f.write_str(class.name(self.db)),
|
||||
Type::KnownInstance(known_instance) => f.write_str(known_instance.as_str()),
|
||||
Type::KnownInstance(known_instance) => f.write_str(known_instance.repr(self.db)),
|
||||
Type::FunctionLiteral(function) => f.write_str(function.name(self.db)),
|
||||
Type::Union(union) => union.display(self.db).fmt(f),
|
||||
Type::Intersection(intersection) => intersection.display(self.db).fmt(f),
|
||||
Type::IntLiteral(n) => n.fmt(f),
|
||||
Type::BooleanLiteral(boolean) => f.write_str(if boolean { "True" } else { "False" }),
|
||||
Type::StringLiteral(string) => {
|
||||
write!(f, r#""{}""#, string.value(self.db).replace('"', r#"\""#))
|
||||
}
|
||||
Type::StringLiteral(string) => string.display(self.db).fmt(f),
|
||||
Type::LiteralString => f.write_str("LiteralString"),
|
||||
Type::BytesLiteral(bytes) => {
|
||||
let escape =
|
||||
@@ -326,15 +327,40 @@ impl<'db> Display for DisplayTypeArray<'_, 'db> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> StringLiteralType<'db> {
|
||||
fn display(&'db self, db: &'db dyn Db) -> DisplayStringLiteralType<'db> {
|
||||
DisplayStringLiteralType { db, ty: self }
|
||||
}
|
||||
}
|
||||
|
||||
struct DisplayStringLiteralType<'db> {
|
||||
ty: &'db StringLiteralType<'db>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl Display for DisplayStringLiteralType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
let value = self.ty.value(self.db);
|
||||
f.write_char('"')?;
|
||||
for ch in value.chars() {
|
||||
match ch {
|
||||
// `escape_debug` will escape even single quotes, which is not necessary for our
|
||||
// use case as we are already using double quotes to wrap the string.
|
||||
'\'' => f.write_char('\'')?,
|
||||
_ => write!(f, "{}", ch.escape_debug())?,
|
||||
}
|
||||
}
|
||||
f.write_char('"')
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::types::{
|
||||
global_symbol, BytesLiteralType, SliceLiteralType, StringLiteralType, Type, UnionType,
|
||||
};
|
||||
use crate::types::{global_symbol, SliceLiteralType, StringLiteralType, Type, UnionType};
|
||||
use crate::{Program, ProgramSettings, PythonVersion, SearchPathSettings};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
@@ -380,12 +406,12 @@ mod tests {
|
||||
Type::Unknown,
|
||||
Type::IntLiteral(-1),
|
||||
global_symbol(&db, mod_file, "A").expect_type(),
|
||||
Type::StringLiteral(StringLiteralType::new(&db, "A")),
|
||||
Type::BytesLiteral(BytesLiteralType::new(&db, [0u8].as_slice())),
|
||||
Type::BytesLiteral(BytesLiteralType::new(&db, [7u8].as_slice())),
|
||||
Type::string_literal(&db, "A"),
|
||||
Type::bytes_literal(&db, &[0u8]),
|
||||
Type::bytes_literal(&db, &[7u8]),
|
||||
Type::IntLiteral(0),
|
||||
Type::IntLiteral(1),
|
||||
Type::StringLiteral(StringLiteralType::new(&db, "B")),
|
||||
Type::string_literal(&db, "B"),
|
||||
global_symbol(&db, mod_file, "foo").expect_type(),
|
||||
global_symbol(&db, mod_file, "bar").expect_type(),
|
||||
global_symbol(&db, mod_file, "B").expect_type(),
|
||||
@@ -451,4 +477,28 @@ mod tests {
|
||||
"slice[None, None, Literal[2]]"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn string_literal_display() {
|
||||
let db = setup_db();
|
||||
|
||||
assert_eq!(
|
||||
Type::StringLiteral(StringLiteralType::new(&db, r"\n"))
|
||||
.display(&db)
|
||||
.to_string(),
|
||||
r#"Literal["\\n"]"#
|
||||
);
|
||||
assert_eq!(
|
||||
Type::StringLiteral(StringLiteralType::new(&db, "'"))
|
||||
.display(&db)
|
||||
.to_string(),
|
||||
r#"Literal["'"]"#
|
||||
);
|
||||
assert_eq!(
|
||||
Type::StringLiteral(StringLiteralType::new(&db, r#"""#))
|
||||
.display(&db)
|
||||
.to_string(),
|
||||
r#"Literal["\""]"#
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,6 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::ops::Deref;
|
||||
|
||||
use indexmap::IndexSet;
|
||||
use itertools::Either;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
@@ -26,22 +25,30 @@ impl<'db> Mro<'db> {
|
||||
/// (We emit a diagnostic warning about the runtime `TypeError` in
|
||||
/// [`super::infer::TypeInferenceBuilder::infer_region_scope`].)
|
||||
pub(super) fn of_class(db: &'db dyn Db, class: Class<'db>) -> Result<Self, MroError<'db>> {
|
||||
Self::of_class_impl(db, class).map_err(|error_kind| {
|
||||
let fallback_mro = Self::from([
|
||||
ClassBase::Class(class),
|
||||
ClassBase::Unknown,
|
||||
ClassBase::object(db),
|
||||
]);
|
||||
MroError {
|
||||
kind: error_kind,
|
||||
fallback_mro,
|
||||
}
|
||||
Self::of_class_impl(db, class).map_err(|error_kind| MroError {
|
||||
kind: error_kind,
|
||||
fallback_mro: Self::from_error(db, class),
|
||||
})
|
||||
}
|
||||
|
||||
pub(super) fn from_error(db: &'db dyn Db, class: Class<'db>) -> Self {
|
||||
Self::from([
|
||||
ClassBase::Class(class),
|
||||
ClassBase::Unknown,
|
||||
ClassBase::object(db),
|
||||
])
|
||||
}
|
||||
|
||||
fn of_class_impl(db: &'db dyn Db, class: Class<'db>) -> Result<Self, MroErrorKind<'db>> {
|
||||
let class_bases = class.explicit_bases(db);
|
||||
|
||||
if !class_bases.is_empty() && class.is_cyclically_defined(db) {
|
||||
// We emit errors for cyclically defined classes elsewhere.
|
||||
// It's important that we don't even try to infer the MRO for a cyclically defined class,
|
||||
// or we'll end up in an infinite loop.
|
||||
return Ok(Mro::from_error(db, class));
|
||||
}
|
||||
|
||||
match class_bases {
|
||||
// `builtins.object` is the special case:
|
||||
// the only class in Python that has an MRO with length <2
|
||||
@@ -77,11 +84,6 @@ impl<'db> Mro<'db> {
|
||||
Err(MroErrorKind::InvalidBases(bases_info))
|
||||
},
|
||||
|single_base| {
|
||||
if let ClassBase::Class(class_base) = single_base {
|
||||
if class_is_cyclically_defined(db, class_base) {
|
||||
return Err(MroErrorKind::CyclicClassDefinition);
|
||||
}
|
||||
}
|
||||
let mro = std::iter::once(ClassBase::Class(class))
|
||||
.chain(single_base.mro(db))
|
||||
.collect();
|
||||
@@ -96,10 +98,6 @@ impl<'db> Mro<'db> {
|
||||
// what MRO Python will give this class at runtime
|
||||
// (if an MRO is indeed resolvable at all!)
|
||||
multiple_bases => {
|
||||
if class_is_cyclically_defined(db, class) {
|
||||
return Err(MroErrorKind::CyclicClassDefinition);
|
||||
}
|
||||
|
||||
let mut valid_bases = vec![];
|
||||
let mut invalid_bases = vec![];
|
||||
|
||||
@@ -282,13 +280,6 @@ pub(super) enum MroErrorKind<'db> {
|
||||
/// Each index is the index of a node representing an invalid base.
|
||||
InvalidBases(Box<[(usize, Type<'db>)]>),
|
||||
|
||||
/// The class inherits from itself!
|
||||
///
|
||||
/// This is very unlikely to happen in working real-world code,
|
||||
/// but it's important to explicitly account for it.
|
||||
/// If we don't, there's a possibility of an infinite loop and a panic.
|
||||
CyclicClassDefinition,
|
||||
|
||||
/// The class has one or more duplicate bases.
|
||||
///
|
||||
/// This variant records the indices and [`Class`]es
|
||||
@@ -349,7 +340,7 @@ impl<'db> ClassBase<'db> {
|
||||
/// Return a `ClassBase` representing the class `builtins.object`
|
||||
fn object(db: &'db dyn Db) -> Self {
|
||||
KnownClass::Object
|
||||
.to_class(db)
|
||||
.to_class_literal(db)
|
||||
.into_class_literal()
|
||||
.map_or(Self::Unknown, |ClassLiteralType { class }| {
|
||||
Self::Class(class)
|
||||
@@ -380,7 +371,9 @@ impl<'db> ClassBase<'db> {
|
||||
| Type::ModuleLiteral(_)
|
||||
| Type::SubclassOf(_) => None,
|
||||
Type::KnownInstance(known_instance) => match known_instance {
|
||||
KnownInstanceType::Literal => None,
|
||||
KnownInstanceType::TypeVar(_)
|
||||
| KnownInstanceType::Literal
|
||||
| KnownInstanceType::Optional => None,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -414,7 +407,7 @@ impl<'db> From<ClassBase<'db>> for Type<'db> {
|
||||
ClassBase::Any => Type::Any,
|
||||
ClassBase::Todo => Type::Todo,
|
||||
ClassBase::Unknown => Type::Unknown,
|
||||
ClassBase::Class(class) => Type::ClassLiteral(ClassLiteralType { class }),
|
||||
ClassBase::Class(class) => Type::class_literal(class),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -460,46 +453,3 @@ fn c3_merge(mut sequences: Vec<VecDeque<ClassBase>>) -> Option<Mro> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return `true` if this class appears to be a cyclic definition,
|
||||
/// i.e., it inherits either directly or indirectly from itself.
|
||||
///
|
||||
/// A class definition like this will fail at runtime,
|
||||
/// but we must be resilient to it or we could panic.
|
||||
fn class_is_cyclically_defined(db: &dyn Db, class: Class) -> bool {
|
||||
fn is_cyclically_defined_recursive<'db>(
|
||||
db: &'db dyn Db,
|
||||
class: Class<'db>,
|
||||
classes_to_watch: &mut IndexSet<Class<'db>>,
|
||||
) -> bool {
|
||||
if !classes_to_watch.insert(class) {
|
||||
return true;
|
||||
}
|
||||
for explicit_base_class in class
|
||||
.explicit_bases(db)
|
||||
.iter()
|
||||
.copied()
|
||||
.filter_map(Type::into_class_literal)
|
||||
.map(|ClassLiteralType { class }| class)
|
||||
{
|
||||
// Each base must be considered in isolation.
|
||||
// This is due to the fact that if a class uses multiple inheritance,
|
||||
// there could easily be a situation where two bases have the same class in their MROs;
|
||||
// that isn't enough to constitute the class being cyclically defined.
|
||||
let classes_to_watch_len = classes_to_watch.len();
|
||||
if is_cyclically_defined_recursive(db, explicit_base_class, classes_to_watch) {
|
||||
return true;
|
||||
}
|
||||
classes_to_watch.truncate(classes_to_watch_len);
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
class
|
||||
.explicit_bases(db)
|
||||
.iter()
|
||||
.copied()
|
||||
.filter_map(Type::into_class_literal)
|
||||
.map(|ClassLiteralType { class }| class)
|
||||
.any(|base_class| is_cyclically_defined_recursive(db, base_class, &mut IndexSet::default()))
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
use crate::semantic_index::ast_ids::HasScopedAstId;
|
||||
use crate::semantic_index::ast_ids::HasScopedExpressionId;
|
||||
use crate::semantic_index::constraint::{Constraint, ConstraintNode, PatternConstraint};
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId, SymbolTable};
|
||||
use crate::semantic_index::symbol_table;
|
||||
use crate::types::{
|
||||
infer_expression_types, ClassLiteralType, InstanceType, IntersectionBuilder, KnownClass,
|
||||
infer_expression_types, ClassLiteralType, IntersectionBuilder, KnownClass,
|
||||
KnownConstraintFunction, KnownFunction, Truthiness, Type, UnionBuilder,
|
||||
};
|
||||
use crate::Db;
|
||||
@@ -257,17 +257,26 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
expression: Expression<'db>,
|
||||
is_positive: bool,
|
||||
) -> Option<NarrowingConstraints<'db>> {
|
||||
fn is_narrowing_target_candidate(expr: &ast::Expr) -> bool {
|
||||
matches!(expr, ast::Expr::Name(_) | ast::Expr::Call(_))
|
||||
}
|
||||
|
||||
let ast::ExprCompare {
|
||||
range: _,
|
||||
left,
|
||||
ops,
|
||||
comparators,
|
||||
} = expr_compare;
|
||||
if !left.is_name_expr() && comparators.iter().all(|c| !c.is_name_expr()) {
|
||||
// If none of the comparators are name expressions,
|
||||
// we have no symbol to narrow down the type of.
|
||||
|
||||
// Performance optimization: early return if there are no potential narrowing targets.
|
||||
if !is_narrowing_target_candidate(left)
|
||||
&& comparators
|
||||
.iter()
|
||||
.all(|c| !is_narrowing_target_candidate(c))
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
if !is_positive && comparators.len() > 1 {
|
||||
// We can't negate a constraint made by a multi-comparator expression, since we can't
|
||||
// know which comparison part is the one being negated.
|
||||
@@ -283,42 +292,85 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
.tuple_windows::<(&ruff_python_ast::Expr, &ruff_python_ast::Expr)>();
|
||||
let mut constraints = NarrowingConstraints::default();
|
||||
for (op, (left, right)) in std::iter::zip(&**ops, comparator_tuples) {
|
||||
if let ast::Expr::Name(ast::ExprName {
|
||||
range: _,
|
||||
id,
|
||||
ctx: _,
|
||||
}) = left
|
||||
{
|
||||
// SAFETY: we should always have a symbol for every Name node.
|
||||
let symbol = self.symbols().symbol_id_by_name(id).unwrap();
|
||||
let rhs_ty = inference.expression_ty(right.scoped_ast_id(self.db, scope));
|
||||
let rhs_ty = inference.expression_ty(right.scoped_expression_id(self.db, scope));
|
||||
|
||||
match if is_positive { *op } else { op.negate() } {
|
||||
ast::CmpOp::IsNot => {
|
||||
if rhs_ty.is_singleton(self.db) {
|
||||
let ty = IntersectionBuilder::new(self.db)
|
||||
.add_negative(rhs_ty)
|
||||
.build();
|
||||
constraints.insert(symbol, ty);
|
||||
} else {
|
||||
// Non-singletons cannot be safely narrowed using `is not`
|
||||
match left {
|
||||
ast::Expr::Name(ast::ExprName {
|
||||
range: _,
|
||||
id,
|
||||
ctx: _,
|
||||
}) => {
|
||||
let symbol = self
|
||||
.symbols()
|
||||
.symbol_id_by_name(id)
|
||||
.expect("Should always have a symbol for every Name node");
|
||||
|
||||
match if is_positive { *op } else { op.negate() } {
|
||||
ast::CmpOp::IsNot => {
|
||||
if rhs_ty.is_singleton(self.db) {
|
||||
let ty = IntersectionBuilder::new(self.db)
|
||||
.add_negative(rhs_ty)
|
||||
.build();
|
||||
constraints.insert(symbol, ty);
|
||||
} else {
|
||||
// Non-singletons cannot be safely narrowed using `is not`
|
||||
}
|
||||
}
|
||||
}
|
||||
ast::CmpOp::Is => {
|
||||
constraints.insert(symbol, rhs_ty);
|
||||
}
|
||||
ast::CmpOp::NotEq => {
|
||||
if rhs_ty.is_single_valued(self.db) {
|
||||
let ty = IntersectionBuilder::new(self.db)
|
||||
.add_negative(rhs_ty)
|
||||
.build();
|
||||
constraints.insert(symbol, ty);
|
||||
ast::CmpOp::Is => {
|
||||
constraints.insert(symbol, rhs_ty);
|
||||
}
|
||||
ast::CmpOp::NotEq => {
|
||||
if rhs_ty.is_single_valued(self.db) {
|
||||
let ty = IntersectionBuilder::new(self.db)
|
||||
.add_negative(rhs_ty)
|
||||
.build();
|
||||
constraints.insert(symbol, ty);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// TODO other comparison types
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// TODO other comparison types
|
||||
}
|
||||
}
|
||||
ast::Expr::Call(ast::ExprCall {
|
||||
range: _,
|
||||
func: callable,
|
||||
arguments:
|
||||
ast::Arguments {
|
||||
args,
|
||||
keywords,
|
||||
range: _,
|
||||
},
|
||||
}) if rhs_ty.is_class_literal() && keywords.is_empty() => {
|
||||
let [ast::Expr::Name(ast::ExprName { id, .. })] = &**args else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let is_valid_constraint = if is_positive {
|
||||
op == &ast::CmpOp::Is
|
||||
} else {
|
||||
op == &ast::CmpOp::IsNot
|
||||
};
|
||||
|
||||
if !is_valid_constraint {
|
||||
continue;
|
||||
}
|
||||
|
||||
let callable_ty =
|
||||
inference.expression_ty(callable.scoped_expression_id(self.db, scope));
|
||||
|
||||
if callable_ty
|
||||
.into_class_literal()
|
||||
.is_some_and(|c| c.class.is_known(self.db, KnownClass::Type))
|
||||
{
|
||||
let symbol = self
|
||||
.symbols()
|
||||
.symbol_id_by_name(id)
|
||||
.expect("Should always have a symbol for every Name node");
|
||||
constraints.insert(symbol, rhs_ty.to_instance(self.db));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
Some(constraints)
|
||||
@@ -336,7 +388,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
// TODO: add support for PEP 604 union types on the right hand side of `isinstance`
|
||||
// and `issubclass`, for example `isinstance(x, str | (int | float))`.
|
||||
match inference
|
||||
.expression_ty(expr_call.func.scoped_ast_id(self.db, scope))
|
||||
.expression_ty(expr_call.func.scoped_expression_id(self.db, scope))
|
||||
.into_function_literal()
|
||||
.and_then(|f| f.known(self.db))
|
||||
.and_then(KnownFunction::constraint_function)
|
||||
@@ -348,19 +400,17 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
let symbol = self.symbols().symbol_id_by_name(id).unwrap();
|
||||
|
||||
let class_info_ty =
|
||||
inference.expression_ty(class_info.scoped_ast_id(self.db, scope));
|
||||
inference.expression_ty(class_info.scoped_expression_id(self.db, scope));
|
||||
|
||||
let to_constraint = match function {
|
||||
KnownConstraintFunction::IsInstance => {
|
||||
|class_literal: ClassLiteralType<'db>| {
|
||||
Type::Instance(InstanceType {
|
||||
class: class_literal.class,
|
||||
})
|
||||
Type::instance(class_literal.class)
|
||||
}
|
||||
}
|
||||
KnownConstraintFunction::IsSubclass => {
|
||||
|class_literal: ClassLiteralType<'db>| {
|
||||
Type::SubclassOf(class_literal.to_subclass_of_type())
|
||||
Type::subclass_of(class_literal.class)
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -416,7 +466,7 @@ impl<'db> NarrowingConstraintsBuilder<'db> {
|
||||
// filter our arms with statically known truthiness
|
||||
.filter(|expr| {
|
||||
inference
|
||||
.expression_ty(expr.scoped_ast_id(self.db, scope))
|
||||
.expression_ty(expr.scoped_expression_id(self.db, scope))
|
||||
.bool(self.db)
|
||||
!= match expr_bool_op.op {
|
||||
BoolOp::And => Truthiness::AlwaysTrue,
|
||||
|
||||
480
crates/red_knot_python_semantic/src/types/signatures.rs
Normal file
480
crates/red_knot_python_semantic/src/types/signatures.rs
Normal file
@@ -0,0 +1,480 @@
|
||||
#![allow(dead_code)]
|
||||
use super::{definition_expression_ty, Type};
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::Db;
|
||||
use ruff_python_ast::{self as ast, name::Name};
|
||||
|
||||
/// A typed callable signature.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct Signature<'db> {
|
||||
parameters: Parameters<'db>,
|
||||
|
||||
/// Annotated return type (Unknown if no annotation.)
|
||||
pub(crate) return_ty: Type<'db>,
|
||||
}
|
||||
|
||||
impl<'db> Signature<'db> {
|
||||
/// Return a todo signature: (*args: Todo, **kwargs: Todo) -> Todo
|
||||
pub(crate) fn todo() -> Self {
|
||||
Self {
|
||||
parameters: Parameters::todo(),
|
||||
return_ty: Type::Todo,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a typed signature from a function definition.
|
||||
pub(super) fn from_function(
|
||||
db: &'db dyn Db,
|
||||
definition: Definition<'db>,
|
||||
function_node: &'db ast::StmtFunctionDef,
|
||||
) -> Self {
|
||||
let return_ty = function_node
|
||||
.returns
|
||||
.as_ref()
|
||||
.map(|returns| {
|
||||
if function_node.is_async {
|
||||
// TODO: generic `types.CoroutineType`!
|
||||
Type::Todo
|
||||
} else {
|
||||
definition_expression_ty(db, definition, returns.as_ref())
|
||||
}
|
||||
})
|
||||
.unwrap_or(Type::Unknown);
|
||||
|
||||
Self {
|
||||
parameters: Parameters::from_parameters(
|
||||
db,
|
||||
definition,
|
||||
function_node.parameters.as_ref(),
|
||||
),
|
||||
return_ty,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The parameters portion of a typed signature.
|
||||
///
|
||||
/// The ordering of parameters is always as given in this struct: first positional-only parameters,
|
||||
/// then positional-or-keyword, then optionally the variadic parameter, then keyword-only
|
||||
/// parameters, and last, optionally the variadic keywords parameter.
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq)]
|
||||
pub(super) struct Parameters<'db> {
|
||||
/// Parameters which may only be filled by positional arguments.
|
||||
positional_only: Box<[ParameterWithDefault<'db>]>,
|
||||
|
||||
/// Parameters which may be filled by positional or keyword arguments.
|
||||
positional_or_keyword: Box<[ParameterWithDefault<'db>]>,
|
||||
|
||||
/// The `*args` variadic parameter, if any.
|
||||
variadic: Option<Parameter<'db>>,
|
||||
|
||||
/// Parameters which may only be filled by keyword arguments.
|
||||
keyword_only: Box<[ParameterWithDefault<'db>]>,
|
||||
|
||||
/// The `**kwargs` variadic keywords parameter, if any.
|
||||
keywords: Option<Parameter<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> Parameters<'db> {
|
||||
/// Return todo parameters: (*args: Todo, **kwargs: Todo)
|
||||
fn todo() -> Self {
|
||||
Self {
|
||||
variadic: Some(Parameter {
|
||||
name: Some(Name::new_static("args")),
|
||||
annotated_ty: Type::Todo,
|
||||
}),
|
||||
keywords: Some(Parameter {
|
||||
name: Some(Name::new_static("kwargs")),
|
||||
annotated_ty: Type::Todo,
|
||||
}),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
fn from_parameters(
|
||||
db: &'db dyn Db,
|
||||
definition: Definition<'db>,
|
||||
parameters: &'db ast::Parameters,
|
||||
) -> Self {
|
||||
let ast::Parameters {
|
||||
posonlyargs,
|
||||
args,
|
||||
vararg,
|
||||
kwonlyargs,
|
||||
kwarg,
|
||||
range: _,
|
||||
} = parameters;
|
||||
let positional_only = posonlyargs
|
||||
.iter()
|
||||
.map(|arg| ParameterWithDefault::from_node(db, definition, arg))
|
||||
.collect();
|
||||
let positional_or_keyword = args
|
||||
.iter()
|
||||
.map(|arg| ParameterWithDefault::from_node(db, definition, arg))
|
||||
.collect();
|
||||
let variadic = vararg
|
||||
.as_ref()
|
||||
.map(|arg| Parameter::from_node(db, definition, arg));
|
||||
let keyword_only = kwonlyargs
|
||||
.iter()
|
||||
.map(|arg| ParameterWithDefault::from_node(db, definition, arg))
|
||||
.collect();
|
||||
let keywords = kwarg
|
||||
.as_ref()
|
||||
.map(|arg| Parameter::from_node(db, definition, arg));
|
||||
Self {
|
||||
positional_only,
|
||||
positional_or_keyword,
|
||||
variadic,
|
||||
keyword_only,
|
||||
keywords,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A single parameter of a typed signature, with optional default value.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(super) struct ParameterWithDefault<'db> {
|
||||
parameter: Parameter<'db>,
|
||||
|
||||
/// Type of the default value, if any.
|
||||
default_ty: Option<Type<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> ParameterWithDefault<'db> {
|
||||
fn from_node(
|
||||
db: &'db dyn Db,
|
||||
definition: Definition<'db>,
|
||||
parameter_with_default: &'db ast::ParameterWithDefault,
|
||||
) -> Self {
|
||||
Self {
|
||||
default_ty: parameter_with_default
|
||||
.default
|
||||
.as_deref()
|
||||
.map(|default| definition_expression_ty(db, definition, default)),
|
||||
parameter: Parameter::from_node(db, definition, ¶meter_with_default.parameter),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A single parameter of a typed signature.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(super) struct Parameter<'db> {
|
||||
/// Parameter name.
|
||||
///
|
||||
/// It is possible for signatures to be defined in ways that leave positional-only parameters
|
||||
/// nameless (e.g. via `Callable` annotations).
|
||||
name: Option<Name>,
|
||||
|
||||
/// Annotated type of the parameter (Unknown if no annotation.)
|
||||
annotated_ty: Type<'db>,
|
||||
}
|
||||
|
||||
impl<'db> Parameter<'db> {
|
||||
fn from_node(
|
||||
db: &'db dyn Db,
|
||||
definition: Definition<'db>,
|
||||
parameter: &'db ast::Parameter,
|
||||
) -> Self {
|
||||
Parameter {
|
||||
name: Some(parameter.name.id.clone()),
|
||||
annotated_ty: parameter
|
||||
.annotation
|
||||
.as_deref()
|
||||
.map(|annotation| definition_expression_ty(db, definition, annotation))
|
||||
.unwrap_or(Type::Unknown),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::types::{global_symbol, FunctionType};
|
||||
use crate::ProgramSettings;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
pub(crate) fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&src_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
&ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings::new(src_root),
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn get_function_f<'db>(db: &'db TestDb, file: &'static str) -> FunctionType<'db> {
|
||||
let module = ruff_db::files::system_path_to_file(db, file).unwrap();
|
||||
global_symbol(db, module, "f")
|
||||
.expect_type()
|
||||
.expect_function_literal()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_param_with_default<'db>(
|
||||
db: &'db TestDb,
|
||||
param_with_default: &ParameterWithDefault<'db>,
|
||||
expected_name: &'static str,
|
||||
expected_annotation_ty_display: &'static str,
|
||||
expected_default_ty_display: Option<&'static str>,
|
||||
) {
|
||||
assert_eq!(
|
||||
param_with_default
|
||||
.default_ty
|
||||
.map(|ty| ty.display(db).to_string()),
|
||||
expected_default_ty_display.map(ToString::to_string)
|
||||
);
|
||||
assert_param(
|
||||
db,
|
||||
¶m_with_default.parameter,
|
||||
expected_name,
|
||||
expected_annotation_ty_display,
|
||||
);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_param<'db>(
|
||||
db: &'db TestDb,
|
||||
param: &Parameter<'db>,
|
||||
expected_name: &'static str,
|
||||
expected_annotation_ty_display: &'static str,
|
||||
) {
|
||||
assert_eq!(param.name.as_ref().unwrap(), expected_name);
|
||||
assert_eq!(
|
||||
param.annotated_ty.display(db).to_string(),
|
||||
expected_annotation_ty_display
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty() {
|
||||
let mut db = setup_db();
|
||||
db.write_dedented("/src/a.py", "def f(): ...").unwrap();
|
||||
let func = get_function_f(&db, "/src/a.py");
|
||||
|
||||
let sig = func.internal_signature(&db);
|
||||
|
||||
assert_eq!(sig.return_ty.display(&db).to_string(), "Unknown");
|
||||
let params = sig.parameters;
|
||||
assert!(params.positional_only.is_empty());
|
||||
assert!(params.positional_or_keyword.is_empty());
|
||||
assert!(params.variadic.is_none());
|
||||
assert!(params.keyword_only.is_empty());
|
||||
assert!(params.keywords.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::many_single_char_names)]
|
||||
fn full() {
|
||||
let mut db = setup_db();
|
||||
db.write_dedented(
|
||||
"/src/a.py",
|
||||
"
|
||||
def f(a, b: int, c = 1, d: int = 2, /,
|
||||
e = 3, f: Literal[4] = 4, *args: object,
|
||||
g = 5, h: Literal[6] = 6, **kwargs: str) -> bytes: ...
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
let func = get_function_f(&db, "/src/a.py");
|
||||
|
||||
let sig = func.internal_signature(&db);
|
||||
|
||||
assert_eq!(sig.return_ty.display(&db).to_string(), "bytes");
|
||||
let params = sig.parameters;
|
||||
let [a, b, c, d] = ¶ms.positional_only[..] else {
|
||||
panic!("expected four positional-only parameters");
|
||||
};
|
||||
let [e, f] = ¶ms.positional_or_keyword[..] else {
|
||||
panic!("expected two positional-or-keyword parameters");
|
||||
};
|
||||
let Some(args) = params.variadic else {
|
||||
panic!("expected a variadic parameter");
|
||||
};
|
||||
let [g, h] = ¶ms.keyword_only[..] else {
|
||||
panic!("expected two keyword-only parameters");
|
||||
};
|
||||
let Some(kwargs) = params.keywords else {
|
||||
panic!("expected a kwargs parameter");
|
||||
};
|
||||
|
||||
assert_param_with_default(&db, a, "a", "Unknown", None);
|
||||
assert_param_with_default(&db, b, "b", "int", None);
|
||||
assert_param_with_default(&db, c, "c", "Unknown", Some("Literal[1]"));
|
||||
assert_param_with_default(&db, d, "d", "int", Some("Literal[2]"));
|
||||
assert_param_with_default(&db, e, "e", "Unknown", Some("Literal[3]"));
|
||||
assert_param_with_default(&db, f, "f", "Literal[4]", Some("Literal[4]"));
|
||||
assert_param_with_default(&db, g, "g", "Unknown", Some("Literal[5]"));
|
||||
assert_param_with_default(&db, h, "h", "Literal[6]", Some("Literal[6]"));
|
||||
assert_param(&db, &args, "args", "object");
|
||||
assert_param(&db, &kwargs, "kwargs", "str");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn not_deferred() {
|
||||
let mut db = setup_db();
|
||||
db.write_dedented(
|
||||
"/src/a.py",
|
||||
"
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
alias = A
|
||||
|
||||
def f(a: alias): ...
|
||||
|
||||
alias = B
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
let func = get_function_f(&db, "/src/a.py");
|
||||
|
||||
let sig = func.internal_signature(&db);
|
||||
|
||||
let [a] = &sig.parameters.positional_or_keyword[..] else {
|
||||
panic!("expected one positional-or-keyword parameter");
|
||||
};
|
||||
// Parameter resolution not deferred; we should see A not B
|
||||
assert_param_with_default(&db, a, "a", "A", None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deferred_in_stub() {
|
||||
let mut db = setup_db();
|
||||
db.write_dedented(
|
||||
"/src/a.pyi",
|
||||
"
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
alias = A
|
||||
|
||||
def f(a: alias): ...
|
||||
|
||||
alias = B
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
let func = get_function_f(&db, "/src/a.pyi");
|
||||
|
||||
let sig = func.internal_signature(&db);
|
||||
|
||||
let [a] = &sig.parameters.positional_or_keyword[..] else {
|
||||
panic!("expected one positional-or-keyword parameter");
|
||||
};
|
||||
// Parameter resolution deferred; we should see B
|
||||
assert_param_with_default(&db, a, "a", "B", None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generic_not_deferred() {
|
||||
let mut db = setup_db();
|
||||
db.write_dedented(
|
||||
"/src/a.py",
|
||||
"
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
alias = A
|
||||
|
||||
def f[T](a: alias, b: T) -> T: ...
|
||||
|
||||
alias = B
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
let func = get_function_f(&db, "/src/a.py");
|
||||
|
||||
let sig = func.internal_signature(&db);
|
||||
|
||||
let [a, b] = &sig.parameters.positional_or_keyword[..] else {
|
||||
panic!("expected two positional-or-keyword parameters");
|
||||
};
|
||||
// TODO resolution should not be deferred; we should see A not B
|
||||
assert_param_with_default(&db, a, "a", "B", None);
|
||||
assert_param_with_default(&db, b, "b", "T", None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generic_deferred_in_stub() {
|
||||
let mut db = setup_db();
|
||||
db.write_dedented(
|
||||
"/src/a.pyi",
|
||||
"
|
||||
class A: ...
|
||||
class B: ...
|
||||
|
||||
alias = A
|
||||
|
||||
def f[T](a: alias, b: T) -> T: ...
|
||||
|
||||
alias = B
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
let func = get_function_f(&db, "/src/a.pyi");
|
||||
|
||||
let sig = func.internal_signature(&db);
|
||||
|
||||
let [a, b] = &sig.parameters.positional_or_keyword[..] else {
|
||||
panic!("expected two positional-or-keyword parameters");
|
||||
};
|
||||
// Parameter resolution deferred; we should see B
|
||||
assert_param_with_default(&db, a, "a", "B", None);
|
||||
assert_param_with_default(&db, b, "b", "T", None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn external_signature_no_decorator() {
|
||||
let mut db = setup_db();
|
||||
db.write_dedented(
|
||||
"/src/a.py",
|
||||
"
|
||||
def f(a: int) -> int: ...
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
let func = get_function_f(&db, "/src/a.py");
|
||||
|
||||
let expected_sig = func.internal_signature(&db);
|
||||
|
||||
// With no decorators, internal and external signature are the same
|
||||
assert_eq!(func.signature(&db), &expected_sig);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn external_signature_decorated() {
|
||||
let mut db = setup_db();
|
||||
db.write_dedented(
|
||||
"/src/a.py",
|
||||
"
|
||||
def deco(func): ...
|
||||
|
||||
@deco
|
||||
def f(a: int) -> int: ...
|
||||
",
|
||||
)
|
||||
.unwrap();
|
||||
let func = get_function_f(&db, "/src/a.py");
|
||||
|
||||
let expected_sig = Signature::todo();
|
||||
|
||||
// With no decorators, internal and external signature are the same
|
||||
assert_eq!(func.signature(&db), &expected_sig);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,77 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::source::source_text;
|
||||
use ruff_python_ast::str::raw_contents;
|
||||
use ruff_python_ast::{self as ast, ModExpression, StringFlags};
|
||||
use ruff_python_parser::{parse_expression_range, Parsed};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::types::diagnostic::{TypeCheckDiagnostics, TypeCheckDiagnosticsBuilder};
|
||||
use crate::Db;
|
||||
|
||||
type AnnotationParseResult = Result<Parsed<ModExpression>, TypeCheckDiagnostics>;
|
||||
|
||||
/// Parses the given expression as a string annotation.
|
||||
pub(crate) fn parse_string_annotation(
|
||||
db: &dyn Db,
|
||||
file: File,
|
||||
string_expr: &ast::ExprStringLiteral,
|
||||
) -> AnnotationParseResult {
|
||||
let _span = tracing::trace_span!("parse_string_annotation", string=?string_expr.range(), file=%file.path(db)).entered();
|
||||
|
||||
let source = source_text(db.upcast(), file);
|
||||
let node_text = &source[string_expr.range()];
|
||||
let mut diagnostics = TypeCheckDiagnosticsBuilder::new(db, file);
|
||||
|
||||
if let [string_literal] = string_expr.value.as_slice() {
|
||||
let prefix = string_literal.flags.prefix();
|
||||
if prefix.is_raw() {
|
||||
diagnostics.add(
|
||||
string_literal.into(),
|
||||
"annotation-raw-string",
|
||||
format_args!("Type expressions cannot use raw string literal"),
|
||||
);
|
||||
// Compare the raw contents (without quotes) of the expression with the parsed contents
|
||||
// contained in the string literal.
|
||||
} else if raw_contents(node_text)
|
||||
.is_some_and(|raw_contents| raw_contents == string_literal.as_str())
|
||||
{
|
||||
let range_excluding_quotes = string_literal
|
||||
.range()
|
||||
.add_start(string_literal.flags.opener_len())
|
||||
.sub_end(string_literal.flags.closer_len());
|
||||
|
||||
// TODO: Support multiline strings like:
|
||||
// ```py
|
||||
// x: """
|
||||
// int
|
||||
// | float
|
||||
// """ = 1
|
||||
// ```
|
||||
match parse_expression_range(source.as_str(), range_excluding_quotes) {
|
||||
Ok(parsed) => return Ok(parsed),
|
||||
Err(parse_error) => diagnostics.add(
|
||||
string_literal.into(),
|
||||
"forward-annotation-syntax-error",
|
||||
format_args!("Syntax error in forward annotation: {}", parse_error.error),
|
||||
),
|
||||
}
|
||||
} else {
|
||||
// The raw contents of the string doesn't match the parsed content. This could be the
|
||||
// case for annotations that contain escape sequences.
|
||||
diagnostics.add(
|
||||
string_expr.into(),
|
||||
"annotation-escape-character",
|
||||
format_args!("Type expressions cannot contain escape characters"),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// String is implicitly concatenated.
|
||||
diagnostics.add(
|
||||
string_expr.into(),
|
||||
"annotation-implicit-concat",
|
||||
format_args!("Type expressions cannot span multiple string literals"),
|
||||
);
|
||||
}
|
||||
|
||||
Err(diagnostics.finish())
|
||||
}
|
||||
@@ -4,9 +4,9 @@ use ruff_db::files::File;
|
||||
use ruff_python_ast::{self as ast, AnyNodeRef};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::semantic_index::ast_ids::{HasScopedAstId, ScopedExpressionId};
|
||||
use crate::semantic_index::ast_ids::{HasScopedExpressionId, ScopedExpressionId};
|
||||
use crate::semantic_index::symbol::ScopeId;
|
||||
use crate::types::{TupleType, Type, TypeCheckDiagnostics, TypeCheckDiagnosticsBuilder};
|
||||
use crate::types::{Type, TypeCheckDiagnostics, TypeCheckDiagnosticsBuilder};
|
||||
use crate::Db;
|
||||
|
||||
/// Unpacks the value expression type to their respective targets.
|
||||
@@ -29,7 +29,7 @@ impl<'db> Unpacker<'db> {
|
||||
match target {
|
||||
ast::Expr::Name(target_name) => {
|
||||
self.targets
|
||||
.insert(target_name.scoped_ast_id(self.db, scope), value_ty);
|
||||
.insert(target_name.scoped_expression_id(self.db, scope), value_ty);
|
||||
}
|
||||
ast::Expr::Starred(ast::ExprStarred { value, .. }) => {
|
||||
self.unpack(value, value_ty, scope);
|
||||
@@ -93,11 +93,10 @@ impl<'db> Unpacker<'db> {
|
||||
// further and deconstruct to an array of `StringLiteral` with each
|
||||
// individual character, instead of just an array of `LiteralString`, but
|
||||
// there would be a cost and it's not clear that it's worth it.
|
||||
let value_ty = Type::Tuple(TupleType::new(
|
||||
let value_ty = Type::tuple(
|
||||
self.db,
|
||||
vec![Type::LiteralString; string_literal_ty.len(self.db)]
|
||||
.into_boxed_slice(),
|
||||
));
|
||||
&vec![Type::LiteralString; string_literal_ty.len(self.db)],
|
||||
);
|
||||
self.unpack(target, value_ty, scope);
|
||||
}
|
||||
_ => {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
use dir_test::{dir_test, Fixture};
|
||||
use red_knot_test::run;
|
||||
use std::ffi::OsStr;
|
||||
use std::path::Path;
|
||||
|
||||
use dir_test::{dir_test, Fixture};
|
||||
|
||||
/// See `crates/red_knot_test/README.md` for documentation on these tests.
|
||||
#[dir_test(
|
||||
dir: "$CARGO_MANIFEST_DIR/resources/mdtest",
|
||||
@@ -9,16 +10,16 @@ use std::path::Path;
|
||||
)]
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
fn mdtest(fixture: Fixture<&str>) {
|
||||
let path = fixture.path();
|
||||
let fixture_path = Path::new(fixture.path());
|
||||
let crate_dir = Path::new(env!("CARGO_MANIFEST_DIR"));
|
||||
let workspace_root = crate_dir.parent().and_then(Path::parent).unwrap();
|
||||
|
||||
let crate_dir = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("resources/mdtest")
|
||||
.canonicalize()
|
||||
let long_title = fixture_path
|
||||
.strip_prefix(workspace_root)
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap();
|
||||
let short_title = fixture_path.file_name().and_then(OsStr::to_str).unwrap();
|
||||
|
||||
let relative_path = path
|
||||
.strip_prefix(crate_dir.to_str().unwrap())
|
||||
.unwrap_or(path);
|
||||
|
||||
run(Path::new(path), relative_path);
|
||||
red_knot_test::run(fixture_path, long_title, short_title);
|
||||
}
|
||||
|
||||
@@ -68,7 +68,7 @@ impl Session {
|
||||
let system = LSPSystem::new(index.clone());
|
||||
|
||||
// TODO(dhruvmanila): Get the values from the client settings
|
||||
let metadata = WorkspaceMetadata::from_path(system_path, &system, None)?;
|
||||
let metadata = WorkspaceMetadata::discover(system_path, &system, None)?;
|
||||
// TODO(micha): Handle the case where the program settings are incorrect more gracefully.
|
||||
workspaces.insert(path, RootDatabase::new(metadata, system)?);
|
||||
}
|
||||
|
||||
@@ -7,8 +7,8 @@ use lsp_types::Url;
|
||||
use ruff_db::file_revision::FileRevision;
|
||||
use ruff_db::system::walk_directory::WalkDirectoryBuilder;
|
||||
use ruff_db::system::{
|
||||
DirectoryEntry, FileType, Metadata, OsSystem, Result, System, SystemPath, SystemPathBuf,
|
||||
SystemVirtualPath, SystemVirtualPathBuf,
|
||||
DirectoryEntry, FileType, GlobError, Metadata, OsSystem, PatternError, Result, System,
|
||||
SystemPath, SystemPathBuf, SystemVirtualPath, SystemVirtualPathBuf,
|
||||
};
|
||||
use ruff_notebook::{Notebook, NotebookError};
|
||||
|
||||
@@ -198,6 +198,16 @@ impl System for LSPSystem {
|
||||
self.os_system.walk_directory(path)
|
||||
}
|
||||
|
||||
fn glob(
|
||||
&self,
|
||||
pattern: &str,
|
||||
) -> std::result::Result<
|
||||
Box<dyn Iterator<Item = std::result::Result<SystemPathBuf, GlobError>>>,
|
||||
PatternError,
|
||||
> {
|
||||
self.os_system.glob(pattern)
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn Any {
|
||||
self
|
||||
}
|
||||
|
||||
@@ -19,9 +19,9 @@ mod parser;
|
||||
///
|
||||
/// Panic on test failure, and print failure details.
|
||||
#[allow(clippy::print_stdout)]
|
||||
pub fn run(path: &Path, title: &str) {
|
||||
pub fn run(path: &Path, long_title: &str, short_title: &str) {
|
||||
let source = std::fs::read_to_string(path).unwrap();
|
||||
let suite = match test_parser::parse(title, &source) {
|
||||
let suite = match test_parser::parse(short_title, &source) {
|
||||
Ok(suite) => suite,
|
||||
Err(err) => {
|
||||
panic!("Error parsing `{}`: {err}", path.to_str().unwrap())
|
||||
@@ -49,8 +49,8 @@ pub fn run(path: &Path, title: &str) {
|
||||
for failure in failures {
|
||||
let absolute_line_number =
|
||||
backtick_line.checked_add(relative_line_number).unwrap();
|
||||
let line_info = format!("{title}:{absolute_line_number}").cyan();
|
||||
println!(" {line_info} {failure}");
|
||||
let line_info = format!("{long_title}:{absolute_line_number}").cyan();
|
||||
println!(" {line_info} {failure}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
d262beb07502cda412db2179fb406d45d1a9486f
|
||||
5052fa2f18db4493892e0f2775030683c9d06531
|
||||
|
||||
@@ -24,18 +24,22 @@ _asyncio: 3.0-
|
||||
_bisect: 3.0-
|
||||
_blake2: 3.6-
|
||||
_bootlocale: 3.4-3.9
|
||||
_bz2: 3.3-
|
||||
_codecs: 3.0-
|
||||
_collections_abc: 3.3-
|
||||
_compat_pickle: 3.1-
|
||||
_compression: 3.5-
|
||||
_contextvars: 3.7-
|
||||
_csv: 3.0-
|
||||
_ctypes: 3.0-
|
||||
_curses: 3.0-
|
||||
_dbm: 3.0-
|
||||
_decimal: 3.3-
|
||||
_dummy_thread: 3.0-3.8
|
||||
_dummy_threading: 3.0-3.8
|
||||
_frozen_importlib: 3.0-
|
||||
_frozen_importlib_external: 3.5-
|
||||
_gdbm: 3.0-
|
||||
_heapq: 3.0-
|
||||
_imp: 3.0-
|
||||
_interpchannels: 3.13-
|
||||
@@ -45,6 +49,7 @@ _io: 3.0-
|
||||
_json: 3.0-
|
||||
_locale: 3.0-
|
||||
_lsprof: 3.0-
|
||||
_lzma: 3.3-
|
||||
_markupbase: 3.0-
|
||||
_msi: 3.0-3.12
|
||||
_operator: 3.4-
|
||||
@@ -52,12 +57,14 @@ _osx_support: 3.0-
|
||||
_posixsubprocess: 3.2-
|
||||
_py_abc: 3.7-
|
||||
_pydecimal: 3.5-
|
||||
_queue: 3.7-
|
||||
_random: 3.0-
|
||||
_sitebuiltins: 3.4-
|
||||
_socket: 3.0- # present in 3.0 at runtime, but not in typeshed
|
||||
_sqlite3: 3.0-
|
||||
_ssl: 3.0-
|
||||
_stat: 3.4-
|
||||
_struct: 3.0-
|
||||
_thread: 3.0-
|
||||
_threading_local: 3.0-
|
||||
_tkinter: 3.0-
|
||||
|
||||
18
crates/red_knot_vendored/vendor/typeshed/stdlib/_bz2.pyi
vendored
Normal file
18
crates/red_knot_vendored/vendor/typeshed/stdlib/_bz2.pyi
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
from _typeshed import ReadableBuffer
|
||||
from typing import final
|
||||
|
||||
@final
|
||||
class BZ2Compressor:
|
||||
def __init__(self, compresslevel: int = 9) -> None: ...
|
||||
def compress(self, data: ReadableBuffer, /) -> bytes: ...
|
||||
def flush(self) -> bytes: ...
|
||||
|
||||
@final
|
||||
class BZ2Decompressor:
|
||||
def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: ...
|
||||
@property
|
||||
def eof(self) -> bool: ...
|
||||
@property
|
||||
def needs_input(self) -> bool: ...
|
||||
@property
|
||||
def unused_data(self) -> bytes: ...
|
||||
61
crates/red_knot_vendored/vendor/typeshed/stdlib/_contextvars.pyi
vendored
Normal file
61
crates/red_knot_vendored/vendor/typeshed/stdlib/_contextvars.pyi
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
import sys
|
||||
from collections.abc import Callable, Iterator, Mapping
|
||||
from typing import Any, ClassVar, Generic, TypeVar, final, overload
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
from types import GenericAlias
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_D = TypeVar("_D")
|
||||
_P = ParamSpec("_P")
|
||||
|
||||
@final
|
||||
class ContextVar(Generic[_T]):
|
||||
@overload
|
||||
def __init__(self, name: str) -> None: ...
|
||||
@overload
|
||||
def __init__(self, name: str, *, default: _T) -> None: ...
|
||||
def __hash__(self) -> int: ...
|
||||
@property
|
||||
def name(self) -> str: ...
|
||||
@overload
|
||||
def get(self) -> _T: ...
|
||||
@overload
|
||||
def get(self, default: _T, /) -> _T: ...
|
||||
@overload
|
||||
def get(self, default: _D, /) -> _D | _T: ...
|
||||
def set(self, value: _T, /) -> Token[_T]: ...
|
||||
def reset(self, token: Token[_T], /) -> None: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
|
||||
|
||||
@final
|
||||
class Token(Generic[_T]):
|
||||
@property
|
||||
def var(self) -> ContextVar[_T]: ...
|
||||
@property
|
||||
def old_value(self) -> Any: ... # returns either _T or MISSING, but that's hard to express
|
||||
MISSING: ClassVar[object]
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
|
||||
|
||||
def copy_context() -> Context: ...
|
||||
|
||||
# It doesn't make sense to make this generic, because for most Contexts each ContextVar will have
|
||||
# a different value.
|
||||
@final
|
||||
class Context(Mapping[ContextVar[Any], Any]):
|
||||
def __init__(self) -> None: ...
|
||||
@overload
|
||||
def get(self, key: ContextVar[_T], default: None = None, /) -> _T | None: ...
|
||||
@overload
|
||||
def get(self, key: ContextVar[_T], default: _T, /) -> _T: ...
|
||||
@overload
|
||||
def get(self, key: ContextVar[_T], default: _D, /) -> _T | _D: ...
|
||||
def run(self, callable: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ...
|
||||
def copy(self) -> Context: ...
|
||||
def __getitem__(self, key: ContextVar[_T], /) -> _T: ...
|
||||
def __iter__(self) -> Iterator[ContextVar[Any]]: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
43
crates/red_knot_vendored/vendor/typeshed/stdlib/_dbm.pyi
vendored
Normal file
43
crates/red_knot_vendored/vendor/typeshed/stdlib/_dbm.pyi
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
import sys
|
||||
from _typeshed import ReadOnlyBuffer, StrOrBytesPath
|
||||
from types import TracebackType
|
||||
from typing import TypeVar, overload
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
if sys.platform != "win32":
|
||||
_T = TypeVar("_T")
|
||||
_KeyType: TypeAlias = str | ReadOnlyBuffer
|
||||
_ValueType: TypeAlias = str | ReadOnlyBuffer
|
||||
|
||||
class error(OSError): ...
|
||||
library: str
|
||||
|
||||
# Actual typename dbm, not exposed by the implementation
|
||||
class _dbm:
|
||||
def close(self) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def clear(self) -> None: ...
|
||||
|
||||
def __getitem__(self, item: _KeyType) -> bytes: ...
|
||||
def __setitem__(self, key: _KeyType, value: _ValueType) -> None: ...
|
||||
def __delitem__(self, key: _KeyType) -> None: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __del__(self) -> None: ...
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
|
||||
) -> None: ...
|
||||
@overload
|
||||
def get(self, k: _KeyType) -> bytes | None: ...
|
||||
@overload
|
||||
def get(self, k: _KeyType, default: _T) -> bytes | _T: ...
|
||||
def keys(self) -> list[bytes]: ...
|
||||
def setdefault(self, k: _KeyType, default: _ValueType = ...) -> bytes: ...
|
||||
# Don't exist at runtime
|
||||
__new__: None # type: ignore[assignment]
|
||||
__init__: None # type: ignore[assignment]
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _dbm: ...
|
||||
else:
|
||||
def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _dbm: ...
|
||||
@@ -107,9 +107,9 @@ class FileLoader:
|
||||
def get_filename(self, name: str | None = None) -> str: ...
|
||||
def load_module(self, name: str | None = None) -> types.ModuleType: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
def get_resource_reader(self, module: types.ModuleType) -> importlib.readers.FileReader: ...
|
||||
def get_resource_reader(self, name: str | None = None) -> importlib.readers.FileReader: ...
|
||||
else:
|
||||
def get_resource_reader(self, module: types.ModuleType) -> Self | None: ...
|
||||
def get_resource_reader(self, name: str | None = None) -> Self | None: ...
|
||||
def open_resource(self, resource: str) -> _io.FileIO: ...
|
||||
def resource_path(self, resource: str) -> str: ...
|
||||
def is_resource(self, name: str) -> bool: ...
|
||||
|
||||
47
crates/red_knot_vendored/vendor/typeshed/stdlib/_gdbm.pyi
vendored
Normal file
47
crates/red_knot_vendored/vendor/typeshed/stdlib/_gdbm.pyi
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
import sys
|
||||
from _typeshed import ReadOnlyBuffer, StrOrBytesPath
|
||||
from types import TracebackType
|
||||
from typing import TypeVar, overload
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
if sys.platform != "win32":
|
||||
_T = TypeVar("_T")
|
||||
_KeyType: TypeAlias = str | ReadOnlyBuffer
|
||||
_ValueType: TypeAlias = str | ReadOnlyBuffer
|
||||
|
||||
open_flags: str
|
||||
|
||||
class error(OSError): ...
|
||||
# Actual typename gdbm, not exposed by the implementation
|
||||
class _gdbm:
|
||||
def firstkey(self) -> bytes | None: ...
|
||||
def nextkey(self, key: _KeyType) -> bytes | None: ...
|
||||
def reorganize(self) -> None: ...
|
||||
def sync(self) -> None: ...
|
||||
def close(self) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def clear(self) -> None: ...
|
||||
|
||||
def __getitem__(self, item: _KeyType) -> bytes: ...
|
||||
def __setitem__(self, key: _KeyType, value: _ValueType) -> None: ...
|
||||
def __delitem__(self, key: _KeyType) -> None: ...
|
||||
def __contains__(self, key: _KeyType) -> bool: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
|
||||
) -> None: ...
|
||||
@overload
|
||||
def get(self, k: _KeyType) -> bytes | None: ...
|
||||
@overload
|
||||
def get(self, k: _KeyType, default: _T) -> bytes | _T: ...
|
||||
def keys(self) -> list[bytes]: ...
|
||||
def setdefault(self, k: _KeyType, default: _ValueType = ...) -> bytes: ...
|
||||
# Don't exist at runtime
|
||||
__new__: None # type: ignore[assignment]
|
||||
__init__: None # type: ignore[assignment]
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _gdbm: ...
|
||||
else:
|
||||
def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _gdbm: ...
|
||||
@@ -33,10 +33,10 @@ class _IOBase:
|
||||
def readable(self) -> bool: ...
|
||||
read: Callable[..., Any]
|
||||
def readlines(self, hint: int = -1, /) -> list[bytes]: ...
|
||||
def seek(self, offset: int, whence: int = ..., /) -> int: ...
|
||||
def seek(self, offset: int, whence: int = 0, /) -> int: ...
|
||||
def seekable(self) -> bool: ...
|
||||
def tell(self) -> int: ...
|
||||
def truncate(self, size: int | None = ..., /) -> int: ...
|
||||
def truncate(self, size: int | None = None, /) -> int: ...
|
||||
def writable(self) -> bool: ...
|
||||
write: Callable[..., Any]
|
||||
def writelines(self, lines: Iterable[ReadableBuffer], /) -> None: ...
|
||||
@@ -59,8 +59,8 @@ class _BufferedIOBase(_IOBase):
|
||||
def readinto(self, buffer: WriteableBuffer, /) -> int: ...
|
||||
def write(self, buffer: ReadableBuffer, /) -> int: ...
|
||||
def readinto1(self, buffer: WriteableBuffer, /) -> int: ...
|
||||
def read(self, size: int | None = ..., /) -> bytes: ...
|
||||
def read1(self, size: int = ..., /) -> bytes: ...
|
||||
def read(self, size: int | None = -1, /) -> bytes: ...
|
||||
def read1(self, size: int = -1, /) -> bytes: ...
|
||||
|
||||
class FileIO(RawIOBase, _RawIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of writelines in the base classes
|
||||
mode: str
|
||||
@@ -69,13 +69,15 @@ class FileIO(RawIOBase, _RawIOBase, BinaryIO): # type: ignore[misc] # incompat
|
||||
# "name" is a str. In the future, making FileIO generic might help.
|
||||
name: Any
|
||||
def __init__(
|
||||
self, file: FileDescriptorOrPath, mode: str = ..., closefd: bool = ..., opener: _Opener | None = ...
|
||||
self, file: FileDescriptorOrPath, mode: str = "r", closefd: bool = True, opener: _Opener | None = None
|
||||
) -> None: ...
|
||||
@property
|
||||
def closefd(self) -> bool: ...
|
||||
def seek(self, pos: int, whence: int = 0, /) -> int: ...
|
||||
def read(self, size: int | None = -1, /) -> bytes | MaybeNone: ...
|
||||
|
||||
class BytesIO(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of methods in the base classes
|
||||
def __init__(self, initial_bytes: ReadableBuffer = ...) -> None: ...
|
||||
def __init__(self, initial_bytes: ReadableBuffer = b"") -> None: ...
|
||||
# BytesIO does not contain a "name" field. This workaround is necessary
|
||||
# to allow BytesIO sub-classes to add this field, as it is defined
|
||||
# as a read-only property on IO[].
|
||||
@@ -83,16 +85,22 @@ class BytesIO(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore[misc]
|
||||
def getvalue(self) -> bytes: ...
|
||||
def getbuffer(self) -> memoryview: ...
|
||||
def read1(self, size: int | None = -1, /) -> bytes: ...
|
||||
def readlines(self, size: int | None = None, /) -> list[bytes]: ...
|
||||
def seek(self, pos: int, whence: int = 0, /) -> int: ...
|
||||
|
||||
class BufferedReader(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of methods in the base classes
|
||||
raw: RawIOBase
|
||||
def __init__(self, raw: RawIOBase, buffer_size: int = 8192) -> None: ...
|
||||
def peek(self, size: int = 0, /) -> bytes: ...
|
||||
def seek(self, target: int, whence: int = 0, /) -> int: ...
|
||||
def truncate(self, pos: int | None = None, /) -> int: ...
|
||||
|
||||
class BufferedWriter(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of writelines in the base classes
|
||||
raw: RawIOBase
|
||||
def __init__(self, raw: RawIOBase, buffer_size: int = 8192) -> None: ...
|
||||
def write(self, buffer: ReadableBuffer, /) -> int: ...
|
||||
def seek(self, target: int, whence: int = 0, /) -> int: ...
|
||||
def truncate(self, pos: int | None = None, /) -> int: ...
|
||||
|
||||
class BufferedRandom(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore[misc] # incompatible definitions of methods in the base classes
|
||||
mode: str
|
||||
@@ -101,10 +109,11 @@ class BufferedRandom(BufferedIOBase, _BufferedIOBase, BinaryIO): # type: ignore
|
||||
def __init__(self, raw: RawIOBase, buffer_size: int = 8192) -> None: ...
|
||||
def seek(self, target: int, whence: int = 0, /) -> int: ... # stubtest needs this
|
||||
def peek(self, size: int = 0, /) -> bytes: ...
|
||||
def truncate(self, pos: int | None = None, /) -> int: ...
|
||||
|
||||
class BufferedRWPair(BufferedIOBase, _BufferedIOBase):
|
||||
def __init__(self, reader: RawIOBase, writer: RawIOBase, buffer_size: int = 8192) -> None: ...
|
||||
def peek(self, size: int = ..., /) -> bytes: ...
|
||||
def peek(self, size: int = 0, /) -> bytes: ...
|
||||
|
||||
class _TextIOBase(_IOBase):
|
||||
encoding: str
|
||||
@@ -115,9 +124,9 @@ class _TextIOBase(_IOBase):
|
||||
def detach(self) -> BinaryIO: ...
|
||||
def write(self, s: str, /) -> int: ...
|
||||
def writelines(self, lines: Iterable[str], /) -> None: ... # type: ignore[override]
|
||||
def readline(self, size: int = ..., /) -> str: ... # type: ignore[override]
|
||||
def readline(self, size: int = -1, /) -> str: ... # type: ignore[override]
|
||||
def readlines(self, hint: int = -1, /) -> list[str]: ... # type: ignore[override]
|
||||
def read(self, size: int | None = ..., /) -> str: ...
|
||||
def read(self, size: int | None = -1, /) -> str: ...
|
||||
|
||||
@type_check_only
|
||||
class _WrappedBuffer(Protocol):
|
||||
@@ -177,9 +186,10 @@ class TextIOWrapper(TextIOBase, _TextIOBase, TextIO, Generic[_BufferT_co]): # t
|
||||
# TextIOWrapper's version of seek only supports a limited subset of
|
||||
# operations.
|
||||
def seek(self, cookie: int, whence: int = 0, /) -> int: ...
|
||||
def truncate(self, pos: int | None = None, /) -> int: ...
|
||||
|
||||
class StringIO(TextIOBase, _TextIOBase, TextIO): # type: ignore[misc] # incompatible definitions of write in the base classes
|
||||
def __init__(self, initial_value: str | None = ..., newline: str | None = ...) -> None: ...
|
||||
def __init__(self, initial_value: str | None = "", newline: str | None = "\n") -> None: ...
|
||||
# StringIO does not contain a "name" field. This workaround is necessary
|
||||
# to allow StringIO sub-classes to add this field, as it is defined
|
||||
# as a read-only property on IO[].
|
||||
@@ -187,9 +197,11 @@ class StringIO(TextIOBase, _TextIOBase, TextIO): # type: ignore[misc] # incomp
|
||||
def getvalue(self) -> str: ...
|
||||
@property
|
||||
def line_buffering(self) -> bool: ...
|
||||
def seek(self, pos: int, whence: int = 0, /) -> int: ...
|
||||
def truncate(self, pos: int | None = None, /) -> int: ...
|
||||
|
||||
class IncrementalNewlineDecoder:
|
||||
def __init__(self, decoder: codecs.IncrementalDecoder | None, translate: bool, errors: str = ...) -> None: ...
|
||||
def __init__(self, decoder: codecs.IncrementalDecoder | None, translate: bool, errors: str = "strict") -> None: ...
|
||||
def decode(self, input: ReadableBuffer | str, final: bool = False) -> str: ...
|
||||
@property
|
||||
def newlines(self) -> str | tuple[str, ...] | None: ...
|
||||
|
||||
60
crates/red_knot_vendored/vendor/typeshed/stdlib/_lzma.pyi
vendored
Normal file
60
crates/red_knot_vendored/vendor/typeshed/stdlib/_lzma.pyi
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
from _typeshed import ReadableBuffer
|
||||
from collections.abc import Mapping, Sequence
|
||||
from typing import Any, Final, final
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
_FilterChain: TypeAlias = Sequence[Mapping[str, Any]]
|
||||
|
||||
FORMAT_AUTO: Final = 0
|
||||
FORMAT_XZ: Final = 1
|
||||
FORMAT_ALONE: Final = 2
|
||||
FORMAT_RAW: Final = 3
|
||||
CHECK_NONE: Final = 0
|
||||
CHECK_CRC32: Final = 1
|
||||
CHECK_CRC64: Final = 4
|
||||
CHECK_SHA256: Final = 10
|
||||
CHECK_ID_MAX: Final = 15
|
||||
CHECK_UNKNOWN: Final = 16
|
||||
FILTER_LZMA1: int # v big number
|
||||
FILTER_LZMA2: Final = 33
|
||||
FILTER_DELTA: Final = 3
|
||||
FILTER_X86: Final = 4
|
||||
FILTER_IA64: Final = 6
|
||||
FILTER_ARM: Final = 7
|
||||
FILTER_ARMTHUMB: Final = 8
|
||||
FILTER_SPARC: Final = 9
|
||||
FILTER_POWERPC: Final = 5
|
||||
MF_HC3: Final = 3
|
||||
MF_HC4: Final = 4
|
||||
MF_BT2: Final = 18
|
||||
MF_BT3: Final = 19
|
||||
MF_BT4: Final = 20
|
||||
MODE_FAST: Final = 1
|
||||
MODE_NORMAL: Final = 2
|
||||
PRESET_DEFAULT: Final = 6
|
||||
PRESET_EXTREME: int # v big number
|
||||
|
||||
@final
|
||||
class LZMADecompressor:
|
||||
def __init__(self, format: int | None = ..., memlimit: int | None = ..., filters: _FilterChain | None = ...) -> None: ...
|
||||
def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: ...
|
||||
@property
|
||||
def check(self) -> int: ...
|
||||
@property
|
||||
def eof(self) -> bool: ...
|
||||
@property
|
||||
def unused_data(self) -> bytes: ...
|
||||
@property
|
||||
def needs_input(self) -> bool: ...
|
||||
|
||||
@final
|
||||
class LZMACompressor:
|
||||
def __init__(
|
||||
self, format: int | None = ..., check: int = ..., preset: int | None = ..., filters: _FilterChain | None = ...
|
||||
) -> None: ...
|
||||
def compress(self, data: ReadableBuffer, /) -> bytes: ...
|
||||
def flush(self) -> bytes: ...
|
||||
|
||||
class LZMAError(Exception): ...
|
||||
|
||||
def is_check_supported(check_id: int, /) -> bool: ...
|
||||
20
crates/red_knot_vendored/vendor/typeshed/stdlib/_queue.pyi
vendored
Normal file
20
crates/red_knot_vendored/vendor/typeshed/stdlib/_queue.pyi
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import sys
|
||||
from typing import Any, Generic, TypeVar
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
from types import GenericAlias
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
class Empty(Exception): ...
|
||||
|
||||
class SimpleQueue(Generic[_T]):
|
||||
def __init__(self) -> None: ...
|
||||
def empty(self) -> bool: ...
|
||||
def get(self, block: bool = True, timeout: float | None = None) -> _T: ...
|
||||
def get_nowait(self) -> _T: ...
|
||||
def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: ...
|
||||
def put_nowait(self, item: _T) -> None: ...
|
||||
def qsize(self) -> int: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
|
||||
22
crates/red_knot_vendored/vendor/typeshed/stdlib/_struct.pyi
vendored
Normal file
22
crates/red_knot_vendored/vendor/typeshed/stdlib/_struct.pyi
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
from _typeshed import ReadableBuffer, WriteableBuffer
|
||||
from collections.abc import Iterator
|
||||
from typing import Any
|
||||
|
||||
def pack(fmt: str | bytes, /, *v: Any) -> bytes: ...
|
||||
def pack_into(fmt: str | bytes, buffer: WriteableBuffer, offset: int, /, *v: Any) -> None: ...
|
||||
def unpack(format: str | bytes, buffer: ReadableBuffer, /) -> tuple[Any, ...]: ...
|
||||
def unpack_from(format: str | bytes, /, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: ...
|
||||
def iter_unpack(format: str | bytes, buffer: ReadableBuffer, /) -> Iterator[tuple[Any, ...]]: ...
|
||||
def calcsize(format: str | bytes, /) -> int: ...
|
||||
|
||||
class Struct:
|
||||
@property
|
||||
def format(self) -> str: ...
|
||||
@property
|
||||
def size(self) -> int: ...
|
||||
def __init__(self, format: str | bytes) -> None: ...
|
||||
def pack(self, *v: Any) -> bytes: ...
|
||||
def pack_into(self, buffer: WriteableBuffer, offset: int, *v: Any) -> None: ...
|
||||
def unpack(self, buffer: ReadableBuffer, /) -> tuple[Any, ...]: ...
|
||||
def unpack_from(self, buffer: ReadableBuffer, offset: int = 0) -> tuple[Any, ...]: ...
|
||||
def iter_unpack(self, buffer: ReadableBuffer, /) -> Iterator[tuple[Any, ...]]: ...
|
||||
@@ -1,5 +1,5 @@
|
||||
from typing import Any
|
||||
from typing_extensions import TypeAlias
|
||||
from typing_extensions import Self, TypeAlias
|
||||
from weakref import ReferenceType
|
||||
|
||||
__all__ = ["local"]
|
||||
@@ -12,6 +12,7 @@ class _localimpl:
|
||||
def create_dict(self) -> _LocalDict: ...
|
||||
|
||||
class local:
|
||||
def __new__(cls, /, *args: Any, **kw: Any) -> Self: ...
|
||||
def __getattribute__(self, name: str) -> Any: ...
|
||||
def __setattr__(self, name: str, value: Any) -> None: ...
|
||||
def __delattr__(self, name: str) -> None: ...
|
||||
|
||||
@@ -1284,9 +1284,7 @@ class property:
|
||||
|
||||
@final
|
||||
class _NotImplementedType(Any):
|
||||
# A little weird, but typing the __call__ as NotImplemented makes the error message
|
||||
# for NotImplemented() much better
|
||||
__call__: NotImplemented # type: ignore[valid-type] # pyright: ignore[reportInvalidTypeForm]
|
||||
__call__: None
|
||||
|
||||
NotImplemented: _NotImplementedType
|
||||
|
||||
@@ -1917,7 +1915,7 @@ class StopIteration(Exception):
|
||||
|
||||
class OSError(Exception):
|
||||
errno: int | None
|
||||
strerror: str
|
||||
strerror: str | None
|
||||
# filename, filename2 are actually str | bytes | None
|
||||
filename: Any
|
||||
filename2: Any
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import _compression
|
||||
import sys
|
||||
from _bz2 import BZ2Compressor as BZ2Compressor, BZ2Decompressor as BZ2Decompressor
|
||||
from _compression import BaseStream
|
||||
from _typeshed import ReadableBuffer, StrOrBytesPath, WriteableBuffer
|
||||
from collections.abc import Iterable
|
||||
from typing import IO, Any, Literal, Protocol, SupportsIndex, TextIO, final, overload
|
||||
from typing import IO, Any, Literal, Protocol, SupportsIndex, TextIO, overload
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
__all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor", "open", "compress", "decompress"]
|
||||
@@ -128,19 +129,3 @@ class BZ2File(BaseStream, IO[bytes]):
|
||||
def seek(self, offset: int, whence: int = 0) -> int: ...
|
||||
def write(self, data: ReadableBuffer) -> int: ...
|
||||
def writelines(self, seq: Iterable[ReadableBuffer]) -> None: ...
|
||||
|
||||
@final
|
||||
class BZ2Compressor:
|
||||
def __init__(self, compresslevel: int = 9) -> None: ...
|
||||
def compress(self, data: ReadableBuffer, /) -> bytes: ...
|
||||
def flush(self) -> bytes: ...
|
||||
|
||||
@final
|
||||
class BZ2Decompressor:
|
||||
def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: ...
|
||||
@property
|
||||
def eof(self) -> bool: ...
|
||||
@property
|
||||
def needs_input(self) -> bool: ...
|
||||
@property
|
||||
def unused_data(self) -> bytes: ...
|
||||
|
||||
@@ -1,63 +1,3 @@
|
||||
import sys
|
||||
from collections.abc import Callable, Iterator, Mapping
|
||||
from typing import Any, ClassVar, Generic, TypeVar, final, overload
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
from types import GenericAlias
|
||||
from _contextvars import Context as Context, ContextVar as ContextVar, Token as Token, copy_context as copy_context
|
||||
|
||||
__all__ = ("Context", "ContextVar", "Token", "copy_context")
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_D = TypeVar("_D")
|
||||
_P = ParamSpec("_P")
|
||||
|
||||
@final
|
||||
class ContextVar(Generic[_T]):
|
||||
@overload
|
||||
def __init__(self, name: str) -> None: ...
|
||||
@overload
|
||||
def __init__(self, name: str, *, default: _T) -> None: ...
|
||||
def __hash__(self) -> int: ...
|
||||
@property
|
||||
def name(self) -> str: ...
|
||||
@overload
|
||||
def get(self) -> _T: ...
|
||||
@overload
|
||||
def get(self, default: _T, /) -> _T: ...
|
||||
@overload
|
||||
def get(self, default: _D, /) -> _D | _T: ...
|
||||
def set(self, value: _T, /) -> Token[_T]: ...
|
||||
def reset(self, token: Token[_T], /) -> None: ...
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
|
||||
|
||||
@final
|
||||
class Token(Generic[_T]):
|
||||
@property
|
||||
def var(self) -> ContextVar[_T]: ...
|
||||
@property
|
||||
def old_value(self) -> Any: ... # returns either _T or MISSING, but that's hard to express
|
||||
MISSING: ClassVar[object]
|
||||
if sys.version_info >= (3, 9):
|
||||
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
|
||||
|
||||
def copy_context() -> Context: ...
|
||||
|
||||
# It doesn't make sense to make this generic, because for most Contexts each ContextVar will have
|
||||
# a different value.
|
||||
@final
|
||||
class Context(Mapping[ContextVar[Any], Any]):
|
||||
def __init__(self) -> None: ...
|
||||
@overload
|
||||
def get(self, key: ContextVar[_T], default: None = None, /) -> _T | None: ...
|
||||
@overload
|
||||
def get(self, key: ContextVar[_T], default: _T, /) -> _T: ...
|
||||
@overload
|
||||
def get(self, key: ContextVar[_T], default: _D, /) -> _T | _D: ...
|
||||
def run(self, callable: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ...
|
||||
def copy(self) -> Context: ...
|
||||
def __getitem__(self, key: ContextVar[_T], /) -> _T: ...
|
||||
def __iter__(self) -> Iterator[ContextVar[Any]]: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __eq__(self, value: object, /) -> bool: ...
|
||||
|
||||
@@ -1,47 +1 @@
|
||||
import sys
|
||||
from _typeshed import ReadOnlyBuffer, StrOrBytesPath
|
||||
from types import TracebackType
|
||||
from typing import TypeVar, overload
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
if sys.platform != "win32":
|
||||
_T = TypeVar("_T")
|
||||
_KeyType: TypeAlias = str | ReadOnlyBuffer
|
||||
_ValueType: TypeAlias = str | ReadOnlyBuffer
|
||||
|
||||
open_flags: str
|
||||
|
||||
class error(OSError): ...
|
||||
# Actual typename gdbm, not exposed by the implementation
|
||||
class _gdbm:
|
||||
def firstkey(self) -> bytes | None: ...
|
||||
def nextkey(self, key: _KeyType) -> bytes | None: ...
|
||||
def reorganize(self) -> None: ...
|
||||
def sync(self) -> None: ...
|
||||
def close(self) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def clear(self) -> None: ...
|
||||
|
||||
def __getitem__(self, item: _KeyType) -> bytes: ...
|
||||
def __setitem__(self, key: _KeyType, value: _ValueType) -> None: ...
|
||||
def __delitem__(self, key: _KeyType) -> None: ...
|
||||
def __contains__(self, key: _KeyType) -> bool: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
|
||||
) -> None: ...
|
||||
@overload
|
||||
def get(self, k: _KeyType) -> bytes | None: ...
|
||||
@overload
|
||||
def get(self, k: _KeyType, default: _T) -> bytes | _T: ...
|
||||
def keys(self) -> list[bytes]: ...
|
||||
def setdefault(self, k: _KeyType, default: _ValueType = ...) -> bytes: ...
|
||||
# Don't exist at runtime
|
||||
__new__: None # type: ignore[assignment]
|
||||
__init__: None # type: ignore[assignment]
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _gdbm: ...
|
||||
else:
|
||||
def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _gdbm: ...
|
||||
from _gdbm import *
|
||||
|
||||
@@ -1,43 +1 @@
|
||||
import sys
|
||||
from _typeshed import ReadOnlyBuffer, StrOrBytesPath
|
||||
from types import TracebackType
|
||||
from typing import TypeVar, overload
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
if sys.platform != "win32":
|
||||
_T = TypeVar("_T")
|
||||
_KeyType: TypeAlias = str | ReadOnlyBuffer
|
||||
_ValueType: TypeAlias = str | ReadOnlyBuffer
|
||||
|
||||
class error(OSError): ...
|
||||
library: str
|
||||
|
||||
# Actual typename dbm, not exposed by the implementation
|
||||
class _dbm:
|
||||
def close(self) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def clear(self) -> None: ...
|
||||
|
||||
def __getitem__(self, item: _KeyType) -> bytes: ...
|
||||
def __setitem__(self, key: _KeyType, value: _ValueType) -> None: ...
|
||||
def __delitem__(self, key: _KeyType) -> None: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __del__(self) -> None: ...
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(
|
||||
self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
|
||||
) -> None: ...
|
||||
@overload
|
||||
def get(self, k: _KeyType) -> bytes | None: ...
|
||||
@overload
|
||||
def get(self, k: _KeyType, default: _T) -> bytes | _T: ...
|
||||
def keys(self) -> list[bytes]: ...
|
||||
def setdefault(self, k: _KeyType, default: _ValueType = ...) -> bytes: ...
|
||||
# Don't exist at runtime
|
||||
__new__: None # type: ignore[assignment]
|
||||
__init__: None # type: ignore[assignment]
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
def open(filename: StrOrBytesPath, flags: str = "r", mode: int = 0o666, /) -> _dbm: ...
|
||||
else:
|
||||
def open(filename: str, flags: str = "r", mode: int = 0o666, /) -> _dbm: ...
|
||||
from _dbm import *
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from _typeshed import BytesPath, StrPath, Unused
|
||||
from collections.abc import Callable, Iterable
|
||||
from collections.abc import Callable, Iterable, Sequence
|
||||
from distutils.file_util import _BytesPathT, _StrPathT
|
||||
from typing import Literal, overload
|
||||
from typing_extensions import TypeAlias, TypeVarTuple, Unpack
|
||||
@@ -63,7 +63,7 @@ class CCompiler:
|
||||
def set_executables(self, **args: str) -> None: ...
|
||||
def compile(
|
||||
self,
|
||||
sources: list[str],
|
||||
sources: Sequence[StrPath],
|
||||
output_dir: str | None = None,
|
||||
macros: list[_Macro] | None = None,
|
||||
include_dirs: list[str] | None = None,
|
||||
|
||||
@@ -2,7 +2,7 @@ import sys
|
||||
from collections.abc import Callable
|
||||
from decimal import Decimal
|
||||
from numbers import Integral, Rational, Real
|
||||
from typing import Any, Literal, SupportsIndex, overload
|
||||
from typing import Any, Literal, Protocol, SupportsIndex, overload
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
_ComparableNum: TypeAlias = int | float | Decimal | Real
|
||||
@@ -20,11 +20,19 @@ else:
|
||||
@overload
|
||||
def gcd(a: Integral, b: Integral) -> Integral: ...
|
||||
|
||||
class _ConvertibleToIntegerRatio(Protocol):
|
||||
def as_integer_ratio(self) -> tuple[int | Rational, int | Rational]: ...
|
||||
|
||||
class Fraction(Rational):
|
||||
@overload
|
||||
def __new__(cls, numerator: int | Rational = 0, denominator: int | Rational | None = None) -> Self: ...
|
||||
@overload
|
||||
def __new__(cls, value: float | Decimal | str, /) -> Self: ...
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
@overload
|
||||
def __new__(cls, value: _ConvertibleToIntegerRatio) -> Self: ...
|
||||
|
||||
@classmethod
|
||||
def from_float(cls, f: float) -> Self: ...
|
||||
@classmethod
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
from collections.abc import Iterable
|
||||
|
||||
__all__ = ["GetoptError", "error", "getopt", "gnu_getopt"]
|
||||
|
||||
def getopt(args: list[str], shortopts: str, longopts: list[str] = []) -> tuple[list[tuple[str, str]], list[str]]: ...
|
||||
def gnu_getopt(args: list[str], shortopts: str, longopts: list[str] = []) -> tuple[list[tuple[str, str]], list[str]]: ...
|
||||
def getopt(args: list[str], shortopts: str, longopts: Iterable[str] | str = []) -> tuple[list[tuple[str, str]], list[str]]: ...
|
||||
def gnu_getopt(
|
||||
args: list[str], shortopts: str, longopts: Iterable[str] | str = []
|
||||
) -> tuple[list[tuple[str, str]], list[str]]: ...
|
||||
|
||||
class GetoptError(Exception):
|
||||
msg: str
|
||||
|
||||
@@ -128,19 +128,6 @@ class _BaseNetwork(_IPAddressBase, Generic[_A]):
|
||||
@property
|
||||
def hostmask(self) -> _A: ...
|
||||
|
||||
class _BaseInterface(_BaseAddress, Generic[_A, _N]):
|
||||
hostmask: _A
|
||||
netmask: _A
|
||||
network: _N
|
||||
@property
|
||||
def ip(self) -> _A: ...
|
||||
@property
|
||||
def with_hostmask(self) -> str: ...
|
||||
@property
|
||||
def with_netmask(self) -> str: ...
|
||||
@property
|
||||
def with_prefixlen(self) -> str: ...
|
||||
|
||||
class _BaseV4:
|
||||
@property
|
||||
def version(self) -> Literal[4]: ...
|
||||
@@ -154,9 +141,21 @@ class IPv4Address(_BaseV4, _BaseAddress):
|
||||
|
||||
class IPv4Network(_BaseV4, _BaseNetwork[IPv4Address]): ...
|
||||
|
||||
class IPv4Interface(IPv4Address, _BaseInterface[IPv4Address, IPv4Network]):
|
||||
class IPv4Interface(IPv4Address):
|
||||
netmask: IPv4Address
|
||||
network: IPv4Network
|
||||
def __eq__(self, other: object) -> bool: ...
|
||||
def __hash__(self) -> int: ...
|
||||
@property
|
||||
def hostmask(self) -> IPv4Address: ...
|
||||
@property
|
||||
def ip(self) -> IPv4Address: ...
|
||||
@property
|
||||
def with_hostmask(self) -> str: ...
|
||||
@property
|
||||
def with_netmask(self) -> str: ...
|
||||
@property
|
||||
def with_prefixlen(self) -> str: ...
|
||||
|
||||
class _BaseV6:
|
||||
@property
|
||||
@@ -184,9 +183,21 @@ class IPv6Network(_BaseV6, _BaseNetwork[IPv6Address]):
|
||||
@property
|
||||
def is_site_local(self) -> bool: ...
|
||||
|
||||
class IPv6Interface(IPv6Address, _BaseInterface[IPv6Address, IPv6Network]):
|
||||
class IPv6Interface(IPv6Address):
|
||||
netmask: IPv6Address
|
||||
network: IPv6Network
|
||||
def __eq__(self, other: object) -> bool: ...
|
||||
def __hash__(self) -> int: ...
|
||||
@property
|
||||
def hostmask(self) -> IPv6Address: ...
|
||||
@property
|
||||
def ip(self) -> IPv6Address: ...
|
||||
@property
|
||||
def with_hostmask(self) -> str: ...
|
||||
@property
|
||||
def with_netmask(self) -> str: ...
|
||||
@property
|
||||
def with_prefixlen(self) -> str: ...
|
||||
|
||||
def v4_int_to_packed(address: int) -> bytes: ...
|
||||
def v6_int_to_packed(address: int) -> bytes: ...
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import sys
|
||||
from _typeshed import StrOrBytesPath
|
||||
from collections.abc import Callable, Hashable, Iterable, Sequence
|
||||
from collections.abc import Callable, Hashable, Iterable, Mapping, Sequence
|
||||
from configparser import RawConfigParser
|
||||
from re import Pattern
|
||||
from threading import Thread
|
||||
@@ -63,7 +63,7 @@ def dictConfig(config: _DictConfigArgs | dict[str, Any]) -> None: ...
|
||||
if sys.version_info >= (3, 10):
|
||||
def fileConfig(
|
||||
fname: StrOrBytesPath | IO[str] | RawConfigParser,
|
||||
defaults: dict[str, str] | None = None,
|
||||
defaults: Mapping[str, str] | None = None,
|
||||
disable_existing_loggers: bool = True,
|
||||
encoding: str | None = None,
|
||||
) -> None: ...
|
||||
@@ -71,7 +71,7 @@ if sys.version_info >= (3, 10):
|
||||
else:
|
||||
def fileConfig(
|
||||
fname: StrOrBytesPath | IO[str] | RawConfigParser,
|
||||
defaults: dict[str, str] | None = None,
|
||||
defaults: Mapping[str, str] | None = None,
|
||||
disable_existing_loggers: bool = True,
|
||||
) -> None: ...
|
||||
|
||||
|
||||
@@ -260,6 +260,8 @@ class QueueHandler(Handler):
|
||||
def __init__(self, queue: _QueueLike[Any]) -> None: ...
|
||||
def prepare(self, record: LogRecord) -> Any: ...
|
||||
def enqueue(self, record: LogRecord) -> None: ...
|
||||
if sys.version_info >= (3, 12):
|
||||
listener: QueueListener | None
|
||||
|
||||
class QueueListener:
|
||||
handlers: tuple[Handler, ...] # undocumented
|
||||
|
||||
@@ -1,7 +1,41 @@
|
||||
from _compression import BaseStream
|
||||
from _lzma import (
|
||||
CHECK_CRC32 as CHECK_CRC32,
|
||||
CHECK_CRC64 as CHECK_CRC64,
|
||||
CHECK_ID_MAX as CHECK_ID_MAX,
|
||||
CHECK_NONE as CHECK_NONE,
|
||||
CHECK_SHA256 as CHECK_SHA256,
|
||||
CHECK_UNKNOWN as CHECK_UNKNOWN,
|
||||
FILTER_ARM as FILTER_ARM,
|
||||
FILTER_ARMTHUMB as FILTER_ARMTHUMB,
|
||||
FILTER_DELTA as FILTER_DELTA,
|
||||
FILTER_IA64 as FILTER_IA64,
|
||||
FILTER_LZMA1 as FILTER_LZMA1,
|
||||
FILTER_LZMA2 as FILTER_LZMA2,
|
||||
FILTER_POWERPC as FILTER_POWERPC,
|
||||
FILTER_SPARC as FILTER_SPARC,
|
||||
FILTER_X86 as FILTER_X86,
|
||||
FORMAT_ALONE as FORMAT_ALONE,
|
||||
FORMAT_AUTO as FORMAT_AUTO,
|
||||
FORMAT_RAW as FORMAT_RAW,
|
||||
FORMAT_XZ as FORMAT_XZ,
|
||||
MF_BT2 as MF_BT2,
|
||||
MF_BT3 as MF_BT3,
|
||||
MF_BT4 as MF_BT4,
|
||||
MF_HC3 as MF_HC3,
|
||||
MF_HC4 as MF_HC4,
|
||||
MODE_FAST as MODE_FAST,
|
||||
MODE_NORMAL as MODE_NORMAL,
|
||||
PRESET_DEFAULT as PRESET_DEFAULT,
|
||||
PRESET_EXTREME as PRESET_EXTREME,
|
||||
LZMACompressor as LZMACompressor,
|
||||
LZMADecompressor as LZMADecompressor,
|
||||
LZMAError as LZMAError,
|
||||
_FilterChain,
|
||||
is_check_supported as is_check_supported,
|
||||
)
|
||||
from _typeshed import ReadableBuffer, StrOrBytesPath
|
||||
from collections.abc import Mapping, Sequence
|
||||
from typing import IO, Any, Final, Literal, TextIO, final, overload
|
||||
from typing import IO, Literal, TextIO, overload
|
||||
from typing_extensions import Self, TypeAlias
|
||||
|
||||
__all__ = [
|
||||
@@ -48,62 +82,6 @@ _OpenTextWritingMode: TypeAlias = Literal["wt", "xt", "at"]
|
||||
|
||||
_PathOrFile: TypeAlias = StrOrBytesPath | IO[bytes]
|
||||
|
||||
_FilterChain: TypeAlias = Sequence[Mapping[str, Any]]
|
||||
|
||||
FORMAT_AUTO: Final = 0
|
||||
FORMAT_XZ: Final = 1
|
||||
FORMAT_ALONE: Final = 2
|
||||
FORMAT_RAW: Final = 3
|
||||
CHECK_NONE: Final = 0
|
||||
CHECK_CRC32: Final = 1
|
||||
CHECK_CRC64: Final = 4
|
||||
CHECK_SHA256: Final = 10
|
||||
CHECK_ID_MAX: Final = 15
|
||||
CHECK_UNKNOWN: Final = 16
|
||||
FILTER_LZMA1: int # v big number
|
||||
FILTER_LZMA2: Final = 33
|
||||
FILTER_DELTA: Final = 3
|
||||
FILTER_X86: Final = 4
|
||||
FILTER_IA64: Final = 6
|
||||
FILTER_ARM: Final = 7
|
||||
FILTER_ARMTHUMB: Final = 8
|
||||
FILTER_SPARC: Final = 9
|
||||
FILTER_POWERPC: Final = 5
|
||||
MF_HC3: Final = 3
|
||||
MF_HC4: Final = 4
|
||||
MF_BT2: Final = 18
|
||||
MF_BT3: Final = 19
|
||||
MF_BT4: Final = 20
|
||||
MODE_FAST: Final = 1
|
||||
MODE_NORMAL: Final = 2
|
||||
PRESET_DEFAULT: Final = 6
|
||||
PRESET_EXTREME: int # v big number
|
||||
|
||||
# from _lzma.c
|
||||
@final
|
||||
class LZMADecompressor:
|
||||
def __init__(self, format: int | None = ..., memlimit: int | None = ..., filters: _FilterChain | None = ...) -> None: ...
|
||||
def decompress(self, data: ReadableBuffer, max_length: int = -1) -> bytes: ...
|
||||
@property
|
||||
def check(self) -> int: ...
|
||||
@property
|
||||
def eof(self) -> bool: ...
|
||||
@property
|
||||
def unused_data(self) -> bytes: ...
|
||||
@property
|
||||
def needs_input(self) -> bool: ...
|
||||
|
||||
# from _lzma.c
|
||||
@final
|
||||
class LZMACompressor:
|
||||
def __init__(
|
||||
self, format: int | None = ..., check: int = ..., preset: int | None = ..., filters: _FilterChain | None = ...
|
||||
) -> None: ...
|
||||
def compress(self, data: ReadableBuffer, /) -> bytes: ...
|
||||
def flush(self) -> bytes: ...
|
||||
|
||||
class LZMAError(Exception): ...
|
||||
|
||||
class LZMAFile(BaseStream, IO[bytes]): # type: ignore[misc] # incompatible definitions of writelines in the base classes
|
||||
def __init__(
|
||||
self,
|
||||
@@ -194,4 +172,3 @@ def compress(
|
||||
def decompress(
|
||||
data: ReadableBuffer, format: int = 0, memlimit: int | None = None, filters: _FilterChain | None = None
|
||||
) -> bytes: ...
|
||||
def is_check_supported(check_id: int, /) -> bool: ...
|
||||
|
||||
@@ -34,9 +34,22 @@ class mmap:
|
||||
if sys.platform == "win32":
|
||||
def __init__(self, fileno: int, length: int, tagname: str | None = ..., access: int = ..., offset: int = ...) -> None: ...
|
||||
else:
|
||||
def __init__(
|
||||
self, fileno: int, length: int, flags: int = ..., prot: int = ..., access: int = ..., offset: int = ...
|
||||
) -> None: ...
|
||||
if sys.version_info >= (3, 13):
|
||||
def __init__(
|
||||
self,
|
||||
fileno: int,
|
||||
length: int,
|
||||
flags: int = ...,
|
||||
prot: int = ...,
|
||||
access: int = ...,
|
||||
offset: int = ...,
|
||||
*,
|
||||
trackfd: bool = True,
|
||||
) -> None: ...
|
||||
else:
|
||||
def __init__(
|
||||
self, fileno: int, length: int, flags: int = ..., prot: int = ..., access: int = ..., offset: int = ...
|
||||
) -> None: ...
|
||||
|
||||
def close(self) -> None: ...
|
||||
def flush(self, offset: int = ..., size: int = ...) -> None: ...
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user