Compare commits
55 Commits
charlie/wa
...
cjm/phis
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bccc33c6fe | ||
|
|
73160dc8b6 | ||
|
|
15aa5a6d57 | ||
|
|
33512a4249 | ||
|
|
d8ebb03591 | ||
|
|
2e211c5c22 | ||
|
|
9fd8aaaf29 | ||
|
|
d110bd4e60 | ||
|
|
eb9c7ae869 | ||
|
|
7defc0d136 | ||
|
|
45f459bafd | ||
|
|
99e946a005 | ||
|
|
78a7ac0722 | ||
|
|
fa2f3f9f2f | ||
|
|
3898d737d8 | ||
|
|
c487149b7d | ||
|
|
bebed67bf1 | ||
|
|
3ddcad64f5 | ||
|
|
05c35b6975 | ||
|
|
7fc39ad624 | ||
|
|
2520ebb145 | ||
|
|
89c8b49027 | ||
|
|
e05953a991 | ||
|
|
d0ac38f9d3 | ||
|
|
ff53db3d99 | ||
|
|
899a52390b | ||
|
|
82a3e69b8a | ||
|
|
7027344dfc | ||
|
|
fb9f0c448f | ||
|
|
75131c6f4a | ||
|
|
4b9ddc4a06 | ||
|
|
99dc208b00 | ||
|
|
540023262e | ||
|
|
2ea79572ae | ||
|
|
aa0db338d9 | ||
|
|
a99a45868c | ||
|
|
fabf19fdc9 | ||
|
|
59f712a566 | ||
|
|
1d080465de | ||
|
|
3481e16cdf | ||
|
|
d7e9280e1e | ||
|
|
f237d36d2f | ||
|
|
12f22b1fdd | ||
|
|
47d05ee9ea | ||
|
|
9caec36b59 | ||
|
|
cb364780b3 | ||
|
|
71b8bf211f | ||
|
|
109b9cc4f9 | ||
|
|
5d02627794 | ||
|
|
65444bb00e | ||
|
|
8822a79b4d | ||
|
|
2df4d23113 | ||
|
|
603b62607a | ||
|
|
2b71fc4510 | ||
|
|
1b78d872ec |
16
.github/renovate.json5
vendored
16
.github/renovate.json5
vendored
@@ -14,12 +14,26 @@
|
||||
rangeStrategy: "update-lockfile",
|
||||
},
|
||||
pep621: {
|
||||
// The default for this package manager is to only search for `pyproject.toml` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/pep621/#file-matching
|
||||
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||
},
|
||||
pip_requirements: {
|
||||
fileMatch: ["^docs/requirements.*\\.txt$"],
|
||||
// The default for this package manager is to run on all requirements.txt files:
|
||||
// https://docs.renovatebot.com/modules/manager/pip_requirements/#file-matching
|
||||
// `fileMatch` doesn't work for excluding files; to exclude `requirements.txt` files
|
||||
// outside the `doc/` directory, we instead have to use `ignorePaths`. Unlike `fileMatch`,
|
||||
// which takes a regex string, `ignorePaths` takes a glob string, so we have to use
|
||||
// a "negative glob pattern".
|
||||
// See:
|
||||
// - https://docs.renovatebot.com/modules/manager/#ignoring-files-that-match-the-default-filematch
|
||||
// - https://docs.renovatebot.com/configuration-options/#ignorepaths
|
||||
// - https://docs.renovatebot.com/string-pattern-matching/#negative-matching
|
||||
ignorePaths: ["!docs/requirements*.txt"]
|
||||
},
|
||||
npm: {
|
||||
// The default for this package manager is to only search for `package.json` files
|
||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/npm/#file-matching
|
||||
fileMatch: ["^playground/.*package\\.json$"],
|
||||
},
|
||||
"pre-commit": {
|
||||
|
||||
7
.github/workflows/ci.yaml
vendored
7
.github/workflows/ci.yaml
vendored
@@ -142,6 +142,13 @@ jobs:
|
||||
|
||||
# Check for broken links in the documentation.
|
||||
- run: cargo doc --all --no-deps
|
||||
env:
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
# Use --document-private-items so that all our doc comments are kept in
|
||||
# sync, not just public items. Eventually we should do this for all
|
||||
# crates; for now add crates here as they are warning-clean to prevent
|
||||
# regression.
|
||||
- run: cargo doc --no-deps -p red_knot_python_semantic -p red_knot -p ruff_db --document-private-items
|
||||
env:
|
||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||
RUSTDOCFLAGS: "-D warnings"
|
||||
|
||||
@@ -14,6 +14,9 @@ MD041: false
|
||||
# MD013/line-length
|
||||
MD013: false
|
||||
|
||||
# MD014/commands-show-output
|
||||
MD014: false
|
||||
|
||||
# MD024/no-duplicate-heading
|
||||
MD024:
|
||||
# Allow when nested under different parents e.g. CHANGELOG.md
|
||||
|
||||
@@ -57,7 +57,7 @@ repos:
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.5.6
|
||||
rev: v0.5.7
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
|
||||
@@ -911,9 +911,5 @@ There are three ways in which an import can be categorized as "first-party":
|
||||
the `src` setting and, for each directory, check for the existence of a subdirectory `foo` or a
|
||||
file `foo.py`.
|
||||
|
||||
By default, `src` is set to the project root. In the above example, we'd want to set
|
||||
`src = ["./src"]` to ensure that we locate `./my_project/src/foo` and thus categorize `import foo`
|
||||
as first-party in `baz.py`. In practice, for this limited example, setting `src = ["./src"]` is
|
||||
unnecessary, as all imports within `./my_project/src/foo` would be categorized as first-party via
|
||||
the same-package heuristic; but if your project contains multiple packages, you'll want to set `src`
|
||||
explicitly.
|
||||
By default, `src` is set to the project root, along with `"src"` subdirectory in the project root.
|
||||
This ensures that Ruff supports both flat and "src" layouts out of the box.
|
||||
|
||||
135
Cargo.lock
generated
135
Cargo.lock
generated
@@ -95,9 +95,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anstyle"
|
||||
version = "1.0.6"
|
||||
version = "1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc"
|
||||
checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1"
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-parse"
|
||||
@@ -288,7 +288,7 @@ dependencies = [
|
||||
"android-tzdata",
|
||||
"iana-time-zone",
|
||||
"num-traits",
|
||||
"windows-targets 0.52.5",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -320,9 +320,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.13"
|
||||
version = "4.5.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fbb260a053428790f3de475e304ff84cdbc4face759ea7a3e64c1edd938a7fc"
|
||||
checksum = "11d8838454fda655dafd3accb2b6e2bea645b9e4078abe84a22ceb947235c5cc"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -330,9 +330,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.13"
|
||||
version = "4.5.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "64b17d7ea74e9f833c7dbf2cbe4fb12ff26783eda4782a8975b72f895c9b4d99"
|
||||
checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -820,14 +820,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "filetime"
|
||||
version = "0.2.23"
|
||||
version = "0.2.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd"
|
||||
checksum = "bf401df4a4e3872c4fe8151134cf483738e74b67fc934d6532c882b3d24a4550"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"windows-sys 0.52.0",
|
||||
"libredox",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1143,9 +1143,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "is-macro"
|
||||
version = "0.3.5"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "59a85abdc13717906baccb5a1e435556ce0df215f242892f721dff62bf25288f"
|
||||
checksum = "2069faacbe981460232f880d26bf3c7634e322d49053aa48c27e3ae642f728f1"
|
||||
dependencies = [
|
||||
"Inflector",
|
||||
"proc-macro2",
|
||||
@@ -1297,6 +1297,7 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
"libc",
|
||||
"redox_syscall 0.5.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1564,7 +1565,7 @@ checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"redox_syscall 0.4.1",
|
||||
"smallvec",
|
||||
"windows-targets 0.48.5",
|
||||
]
|
||||
@@ -1899,9 +1900,11 @@ dependencies = [
|
||||
"ruff_python_ast",
|
||||
"ruff_python_parser",
|
||||
"ruff_python_stdlib",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"smallvec",
|
||||
"tempfile",
|
||||
"tracing",
|
||||
"walkdir",
|
||||
@@ -1961,6 +1964,7 @@ dependencies = [
|
||||
"ruff_cache",
|
||||
"ruff_db",
|
||||
"ruff_python_ast",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"thiserror",
|
||||
@@ -1976,6 +1980,15 @@ dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4"
|
||||
dependencies = [
|
||||
"bitflags 2.6.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_users"
|
||||
version = "0.4.5"
|
||||
@@ -2156,6 +2169,7 @@ dependencies = [
|
||||
"rustc-hash 2.0.0",
|
||||
"salsa",
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"tracing-tree",
|
||||
@@ -2786,9 +2800,9 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.204"
|
||||
version = "1.0.206"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12"
|
||||
checksum = "5b3e4cd94123dd520a128bcd11e34d9e9e423e7e3e50425cb1b4b1e3549d0284"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
@@ -2806,9 +2820,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.204"
|
||||
version = "1.0.206"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222"
|
||||
checksum = "fabfb6138d2383ea8208cf98ccf69cdfb1aff4088460681d84189aa259762f97"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2828,9 +2842,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.122"
|
||||
version = "1.0.124"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "784b6203951c57ff748476b126ccb5e8e2959a5c19e5c617ab1956be3dbc68da"
|
||||
checksum = "66ad62847a56b3dba58cc891acd13884b9c61138d330c0d7b6181713d4fce38d"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
@@ -2860,9 +2874,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_test"
|
||||
version = "1.0.176"
|
||||
version = "1.0.177"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a2f49ace1498612d14f7e0b8245519584db8299541dfe31a06374a828d620ab"
|
||||
checksum = "7f901ee573cab6b3060453d2d5f0bae4e6d628c23c0a962ff9b5f1d7c8d4f1ed"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
@@ -2989,9 +3003,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.72"
|
||||
version = "2.0.74"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af"
|
||||
checksum = "1fceb41e3d546d0bd83421d3409b1460cc7444cd389341a4c880fe7a042cb3d7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -3011,15 +3025,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.11.0"
|
||||
version = "3.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8fcd239983515c23a32fb82099f97d0b11b8c72f654ed659363a95c3dad7a53"
|
||||
checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"fastrand",
|
||||
"once_cell",
|
||||
"rustix",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3395,9 +3409,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
|
||||
|
||||
[[package]]
|
||||
name = "ureq"
|
||||
version = "2.10.0"
|
||||
version = "2.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72139d247e5f97a3eff96229a7ae85ead5328a39efe76f8bf5a06313d505b6ea"
|
||||
checksum = "b74fc6b57825be3373f7054754755f03ac3a8f5d70015ccad699ba2029956f4a"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"flate2",
|
||||
@@ -3689,7 +3703,7 @@ version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.5",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3707,7 +3721,16 @@ version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.5",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.59.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3727,18 +3750,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb"
|
||||
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm 0.52.5",
|
||||
"windows_aarch64_msvc 0.52.5",
|
||||
"windows_i686_gnu 0.52.5",
|
||||
"windows_aarch64_gnullvm 0.52.6",
|
||||
"windows_aarch64_msvc 0.52.6",
|
||||
"windows_i686_gnu 0.52.6",
|
||||
"windows_i686_gnullvm",
|
||||
"windows_i686_msvc 0.52.5",
|
||||
"windows_x86_64_gnu 0.52.5",
|
||||
"windows_x86_64_gnullvm 0.52.5",
|
||||
"windows_x86_64_msvc 0.52.5",
|
||||
"windows_i686_msvc 0.52.6",
|
||||
"windows_x86_64_gnu 0.52.6",
|
||||
"windows_x86_64_gnullvm 0.52.6",
|
||||
"windows_x86_64_msvc 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3749,9 +3772,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263"
|
||||
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
@@ -3761,9 +3784,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6"
|
||||
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
@@ -3773,15 +3796,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670"
|
||||
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnullvm"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9"
|
||||
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
@@ -3791,9 +3814,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf"
|
||||
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
@@ -3803,9 +3826,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9"
|
||||
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
@@ -3815,9 +3838,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596"
|
||||
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
@@ -3827,9 +3850,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.52.5"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0"
|
||||
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||
|
||||
[[package]]
|
||||
name = "winnow"
|
||||
|
||||
@@ -184,7 +184,7 @@ fn run() -> anyhow::Result<ExitStatus> {
|
||||
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
// cache and load the cache if it exists.
|
||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system);
|
||||
let mut db = RootDatabase::new(workspace_metadata, program_settings, system)?;
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new();
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ use std::io::Write;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
use salsa::Setter;
|
||||
|
||||
use red_knot_python_semantic::{
|
||||
resolve_module, ModuleName, Program, ProgramSettings, PythonVersion, SearchPathSettings,
|
||||
@@ -26,6 +25,7 @@ struct TestCase {
|
||||
/// We need to hold on to it in the test case or the temp files get deleted.
|
||||
_temp_dir: tempfile::TempDir,
|
||||
root_dir: SystemPathBuf,
|
||||
search_path_settings: SearchPathSettings,
|
||||
}
|
||||
|
||||
impl TestCase {
|
||||
@@ -108,18 +108,20 @@ impl TestCase {
|
||||
fn update_search_path_settings(
|
||||
&mut self,
|
||||
f: impl FnOnce(&SearchPathSettings) -> SearchPathSettings,
|
||||
) {
|
||||
) -> anyhow::Result<()> {
|
||||
let program = Program::get(self.db());
|
||||
let search_path_settings = program.search_paths(self.db());
|
||||
|
||||
let new_settings = f(search_path_settings);
|
||||
let new_settings = f(&self.search_path_settings);
|
||||
|
||||
program.set_search_paths(&mut self.db).to(new_settings);
|
||||
program.update_search_paths(&mut self.db, new_settings.clone())?;
|
||||
self.search_path_settings = new_settings;
|
||||
|
||||
if let Some(watcher) = &mut self.watcher {
|
||||
watcher.update(&self.db);
|
||||
assert!(!watcher.has_errored_paths());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn collect_package_files(&self, path: &SystemPath) -> Vec<File> {
|
||||
@@ -221,13 +223,13 @@ where
|
||||
let system = OsSystem::new(&workspace_path);
|
||||
|
||||
let workspace = WorkspaceMetadata::from_path(&workspace_path, &system)?;
|
||||
let search_paths = create_search_paths(&root_path, workspace.root());
|
||||
let search_path_settings = create_search_paths(&root_path, workspace.root());
|
||||
|
||||
for path in search_paths
|
||||
for path in search_path_settings
|
||||
.extra_paths
|
||||
.iter()
|
||||
.chain(search_paths.site_packages.iter())
|
||||
.chain(search_paths.custom_typeshed.iter())
|
||||
.chain(search_path_settings.site_packages.iter())
|
||||
.chain(search_path_settings.custom_typeshed.iter())
|
||||
{
|
||||
std::fs::create_dir_all(path.as_std_path())
|
||||
.with_context(|| format!("Failed to create search path '{path}'"))?;
|
||||
@@ -235,10 +237,10 @@ where
|
||||
|
||||
let settings = ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths,
|
||||
search_paths: search_path_settings.clone(),
|
||||
};
|
||||
|
||||
let db = RootDatabase::new(workspace, settings, system);
|
||||
let db = RootDatabase::new(workspace, settings, system)?;
|
||||
|
||||
let (sender, receiver) = crossbeam::channel::unbounded();
|
||||
let watcher = directory_watcher(move |events| sender.send(events).unwrap())
|
||||
@@ -253,6 +255,7 @@ where
|
||||
watcher: Some(watcher),
|
||||
_temp_dir: temp_dir,
|
||||
root_dir: root_path,
|
||||
search_path_settings,
|
||||
};
|
||||
|
||||
// Sometimes the file watcher reports changes for events that happened before the watcher was started.
|
||||
@@ -737,7 +740,8 @@ fn add_search_path() -> anyhow::Result<()> {
|
||||
case.update_search_path_settings(|settings| SearchPathSettings {
|
||||
site_packages: vec![site_packages.clone()],
|
||||
..settings.clone()
|
||||
});
|
||||
})
|
||||
.expect("Search path settings to be valid");
|
||||
|
||||
std::fs::write(site_packages.join("a.py").as_std_path(), "class A: ...")?;
|
||||
|
||||
@@ -767,7 +771,8 @@ fn remove_search_path() -> anyhow::Result<()> {
|
||||
case.update_search_path_settings(|settings| SearchPathSettings {
|
||||
site_packages: vec![],
|
||||
..settings.clone()
|
||||
});
|
||||
})
|
||||
.expect("Search path settings to be valid");
|
||||
|
||||
std::fs::write(site_packages.join("a.py").as_std_path(), "class A: ...")?;
|
||||
|
||||
|
||||
@@ -15,8 +15,10 @@ ruff_db = { workspace = true }
|
||||
ruff_index = { workspace = true }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_python_stdlib = { workspace = true }
|
||||
ruff_source_file = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
compact_str = { workspace = true }
|
||||
@@ -24,6 +26,7 @@ countme = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
ordermap = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
@@ -34,7 +37,7 @@ walkdir = { workspace = true }
|
||||
zip = { workspace = true, features = ["zstd", "deflate"] }
|
||||
|
||||
[dev-dependencies]
|
||||
ruff_db = { workspace = true, features = ["os", "testing"]}
|
||||
ruff_db = { workspace = true, features = ["os", "testing"] }
|
||||
ruff_python_parser = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
|
||||
@@ -2,11 +2,13 @@ use std::iter::FusedIterator;
|
||||
|
||||
pub(crate) use module::Module;
|
||||
pub use resolver::resolve_module;
|
||||
pub(crate) use resolver::SearchPaths;
|
||||
use ruff_db::system::SystemPath;
|
||||
pub use typeshed::vendored_typeshed_stubs;
|
||||
|
||||
use crate::module_resolver::resolver::search_paths;
|
||||
use crate::Db;
|
||||
use resolver::{module_resolution_settings, SearchPathIterator};
|
||||
use resolver::SearchPathIterator;
|
||||
|
||||
mod module;
|
||||
mod path;
|
||||
@@ -20,7 +22,7 @@ mod testing;
|
||||
/// Returns an iterator over all search paths pointing to a system path
|
||||
pub fn system_module_search_paths(db: &dyn Db) -> SystemModuleSearchPathsIter {
|
||||
SystemModuleSearchPathsIter {
|
||||
inner: module_resolution_settings(db).search_paths(db),
|
||||
inner: search_paths(db),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -7,12 +7,13 @@ use ruff_db::files::{File, FilePath, FileRootKind};
|
||||
use ruff_db::system::{DirectoryEntry, SystemPath, SystemPathBuf};
|
||||
use ruff_db::vendored::VendoredPath;
|
||||
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::{Program, SearchPathSettings};
|
||||
|
||||
use super::module::{Module, ModuleKind};
|
||||
use super::path::{ModulePath, SearchPath, SearchPathValidationError};
|
||||
use super::state::ResolverState;
|
||||
use crate::db::Db;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::{Program, PythonVersion, SearchPathSettings};
|
||||
|
||||
/// Resolves a module name to a module.
|
||||
pub fn resolve_module(db: &dyn Db, module_name: ModuleName) -> Option<Module> {
|
||||
@@ -84,9 +85,7 @@ pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module> {
|
||||
FilePath::SystemVirtual(_) => return None,
|
||||
};
|
||||
|
||||
let settings = module_resolution_settings(db);
|
||||
|
||||
let mut search_paths = settings.search_paths(db);
|
||||
let mut search_paths = search_paths(db);
|
||||
|
||||
let module_name = loop {
|
||||
let candidate = search_paths.next()?;
|
||||
@@ -119,106 +118,122 @@ pub(crate) fn file_to_module(db: &dyn Db, file: File) -> Option<Module> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate and normalize the raw settings given by the user
|
||||
/// into settings we can use for module resolution
|
||||
///
|
||||
/// This method also implements the typing spec's [module resolution order].
|
||||
///
|
||||
/// [module resolution order]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
|
||||
fn try_resolve_module_resolution_settings(
|
||||
db: &dyn Db,
|
||||
) -> Result<ModuleResolutionSettings, SearchPathValidationError> {
|
||||
let program = Program::get(db.upcast());
|
||||
|
||||
let SearchPathSettings {
|
||||
extra_paths,
|
||||
src_root,
|
||||
custom_typeshed,
|
||||
site_packages,
|
||||
} = program.search_paths(db.upcast());
|
||||
|
||||
if !extra_paths.is_empty() {
|
||||
tracing::info!("Extra search paths: {extra_paths:?}");
|
||||
}
|
||||
|
||||
if let Some(custom_typeshed) = custom_typeshed {
|
||||
tracing::info!("Custom typeshed directory: {custom_typeshed}");
|
||||
}
|
||||
|
||||
let system = db.system();
|
||||
let files = db.files();
|
||||
|
||||
let mut static_search_paths = vec![];
|
||||
|
||||
for path in extra_paths {
|
||||
let search_path = SearchPath::extra(system, path.clone())?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
search_path.as_system_path().unwrap(),
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
static_search_paths.push(search_path);
|
||||
}
|
||||
|
||||
static_search_paths.push(SearchPath::first_party(system, src_root.clone())?);
|
||||
|
||||
static_search_paths.push(if let Some(custom_typeshed) = custom_typeshed.as_ref() {
|
||||
let search_path = SearchPath::custom_stdlib(db, custom_typeshed.clone())?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
search_path.as_system_path().unwrap(),
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
search_path
|
||||
} else {
|
||||
SearchPath::vendored_stdlib()
|
||||
});
|
||||
|
||||
let mut site_packages_paths: Vec<_> = Vec::with_capacity(site_packages.len());
|
||||
|
||||
for path in site_packages {
|
||||
let search_path = SearchPath::site_packages(system, path.to_path_buf())?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
search_path.as_system_path().unwrap(),
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
site_packages_paths.push(search_path);
|
||||
}
|
||||
|
||||
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
|
||||
|
||||
let target_version = program.target_version(db.upcast());
|
||||
tracing::info!("Target version: {target_version}");
|
||||
|
||||
// Filter out module resolution paths that point to the same directory on disk (the same invariant maintained by [`sys.path` at runtime]).
|
||||
// (Paths may, however, *overlap* -- e.g. you could have both `src/` and `src/foo`
|
||||
// as module resolution paths simultaneously.)
|
||||
//
|
||||
// [`sys.path` at runtime]: https://docs.python.org/3/library/site.html#module-site
|
||||
// This code doesn't use an `IndexSet` because the key is the system path and not the search root.
|
||||
let mut seen_paths =
|
||||
FxHashSet::with_capacity_and_hasher(static_search_paths.len(), FxBuildHasher);
|
||||
|
||||
static_search_paths.retain(|path| {
|
||||
if let Some(path) = path.as_system_path() {
|
||||
seen_paths.insert(path.to_path_buf())
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
|
||||
Ok(ModuleResolutionSettings {
|
||||
target_version,
|
||||
static_search_paths,
|
||||
site_packages_paths,
|
||||
})
|
||||
pub(crate) fn search_paths(db: &dyn Db) -> SearchPathIterator {
|
||||
Program::get(db).search_paths(db).iter(db)
|
||||
}
|
||||
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn module_resolution_settings(db: &dyn Db) -> ModuleResolutionSettings {
|
||||
// TODO proper error handling if this returns an error:
|
||||
try_resolve_module_resolution_settings(db).unwrap()
|
||||
#[derive(Debug, PartialEq, Eq, Default)]
|
||||
pub(crate) struct SearchPaths {
|
||||
/// Search paths that have been statically determined purely from reading Ruff's configuration settings.
|
||||
/// These shouldn't ever change unless the config settings themselves change.
|
||||
static_paths: Vec<SearchPath>,
|
||||
|
||||
/// site-packages paths are not included in the above field:
|
||||
/// if there are multiple site-packages paths, editable installations can appear
|
||||
/// *between* the site-packages paths on `sys.path` at runtime.
|
||||
/// That means we can't know where a second or third `site-packages` path should sit
|
||||
/// in terms of module-resolution priority until we've discovered the editable installs
|
||||
/// for the first `site-packages` path
|
||||
site_packages: Vec<SearchPath>,
|
||||
}
|
||||
|
||||
impl SearchPaths {
|
||||
/// Validate and normalize the raw settings given by the user
|
||||
/// into settings we can use for module resolution
|
||||
///
|
||||
/// This method also implements the typing spec's [module resolution order].
|
||||
///
|
||||
/// [module resolution order]: https://typing.readthedocs.io/en/latest/spec/distributing.html#import-resolution-ordering
|
||||
pub(crate) fn from_settings(
|
||||
db: &dyn Db,
|
||||
settings: SearchPathSettings,
|
||||
) -> Result<Self, SearchPathValidationError> {
|
||||
let SearchPathSettings {
|
||||
extra_paths,
|
||||
src_root,
|
||||
custom_typeshed,
|
||||
site_packages: site_packages_paths,
|
||||
} = settings;
|
||||
|
||||
let system = db.system();
|
||||
let files = db.files();
|
||||
|
||||
let mut static_paths = vec![];
|
||||
|
||||
for path in extra_paths {
|
||||
tracing::debug!("Adding static extra search-path '{path}'");
|
||||
|
||||
let search_path = SearchPath::extra(system, path)?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
search_path.as_system_path().unwrap(),
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
static_paths.push(search_path);
|
||||
}
|
||||
|
||||
tracing::debug!("Adding static search path '{src_root}'");
|
||||
static_paths.push(SearchPath::first_party(system, src_root)?);
|
||||
|
||||
static_paths.push(if let Some(custom_typeshed) = custom_typeshed {
|
||||
tracing::debug!("Adding static custom-sdtlib search-path '{custom_typeshed}'");
|
||||
|
||||
let search_path = SearchPath::custom_stdlib(db, custom_typeshed)?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
search_path.as_system_path().unwrap(),
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
search_path
|
||||
} else {
|
||||
SearchPath::vendored_stdlib()
|
||||
});
|
||||
|
||||
let mut site_packages: Vec<_> = Vec::with_capacity(site_packages_paths.len());
|
||||
|
||||
for path in site_packages_paths {
|
||||
tracing::debug!("Adding site-package path '{path}'");
|
||||
let search_path = SearchPath::site_packages(system, path)?;
|
||||
files.try_add_root(
|
||||
db.upcast(),
|
||||
search_path.as_system_path().unwrap(),
|
||||
FileRootKind::LibrarySearchPath,
|
||||
);
|
||||
site_packages.push(search_path);
|
||||
}
|
||||
|
||||
// TODO vendor typeshed's third-party stubs as well as the stdlib and fallback to them as a final step
|
||||
|
||||
// Filter out module resolution paths that point to the same directory on disk (the same invariant maintained by [`sys.path` at runtime]).
|
||||
// (Paths may, however, *overlap* -- e.g. you could have both `src/` and `src/foo`
|
||||
// as module resolution paths simultaneously.)
|
||||
//
|
||||
// This code doesn't use an `IndexSet` because the key is the system path and not the search root.
|
||||
//
|
||||
// [`sys.path` at runtime]: https://docs.python.org/3/library/site.html#module-site
|
||||
let mut seen_paths = FxHashSet::with_capacity_and_hasher(static_paths.len(), FxBuildHasher);
|
||||
|
||||
static_paths.retain(|path| {
|
||||
if let Some(path) = path.as_system_path() {
|
||||
seen_paths.insert(path.to_path_buf())
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
|
||||
Ok(SearchPaths {
|
||||
static_paths,
|
||||
site_packages,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn iter<'a>(&'a self, db: &'a dyn Db) -> SearchPathIterator<'a> {
|
||||
SearchPathIterator {
|
||||
db,
|
||||
static_paths: self.static_paths.iter(),
|
||||
dynamic_paths: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Collect all dynamic search paths. For each `site-packages` path:
|
||||
@@ -231,19 +246,20 @@ pub(crate) fn module_resolution_settings(db: &dyn Db) -> ModuleResolutionSetting
|
||||
/// module-resolution priority.
|
||||
#[salsa::tracked(return_ref)]
|
||||
pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
|
||||
let ModuleResolutionSettings {
|
||||
target_version: _,
|
||||
static_search_paths,
|
||||
site_packages_paths,
|
||||
} = module_resolution_settings(db);
|
||||
tracing::debug!("Resolving dynamic module resolution paths");
|
||||
|
||||
let SearchPaths {
|
||||
static_paths,
|
||||
site_packages,
|
||||
} = Program::get(db).search_paths(db);
|
||||
|
||||
let mut dynamic_paths = Vec::new();
|
||||
|
||||
if site_packages_paths.is_empty() {
|
||||
if site_packages.is_empty() {
|
||||
return dynamic_paths;
|
||||
}
|
||||
|
||||
let mut existing_paths: FxHashSet<_> = static_search_paths
|
||||
let mut existing_paths: FxHashSet<_> = static_paths
|
||||
.iter()
|
||||
.filter_map(|path| path.as_system_path())
|
||||
.map(Cow::Borrowed)
|
||||
@@ -252,7 +268,7 @@ pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
|
||||
let files = db.files();
|
||||
let system = db.system();
|
||||
|
||||
for site_packages_search_path in site_packages_paths {
|
||||
for site_packages_search_path in site_packages {
|
||||
let site_packages_dir = site_packages_search_path
|
||||
.as_system_path()
|
||||
.expect("Expected site package path to be a system path");
|
||||
@@ -302,6 +318,10 @@ pub(crate) fn dynamic_resolution_paths(db: &dyn Db) -> Vec<SearchPath> {
|
||||
if existing_paths.insert(Cow::Owned(installation.clone())) {
|
||||
match SearchPath::editable(system, installation) {
|
||||
Ok(search_path) => {
|
||||
tracing::debug!(
|
||||
"Adding editable installation to module resolution path {path}",
|
||||
path = search_path.as_system_path().unwrap()
|
||||
);
|
||||
dynamic_paths.push(search_path);
|
||||
}
|
||||
|
||||
@@ -448,38 +468,6 @@ impl<'db> Iterator for PthFileIterator<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Validated and normalized module-resolution settings.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct ModuleResolutionSettings {
|
||||
target_version: PythonVersion,
|
||||
|
||||
/// Search paths that have been statically determined purely from reading Ruff's configuration settings.
|
||||
/// These shouldn't ever change unless the config settings themselves change.
|
||||
static_search_paths: Vec<SearchPath>,
|
||||
|
||||
/// site-packages paths are not included in the above field:
|
||||
/// if there are multiple site-packages paths, editable installations can appear
|
||||
/// *between* the site-packages paths on `sys.path` at runtime.
|
||||
/// That means we can't know where a second or third `site-packages` path should sit
|
||||
/// in terms of module-resolution priority until we've discovered the editable installs
|
||||
/// for the first `site-packages` path
|
||||
site_packages_paths: Vec<SearchPath>,
|
||||
}
|
||||
|
||||
impl ModuleResolutionSettings {
|
||||
fn target_version(&self) -> PythonVersion {
|
||||
self.target_version
|
||||
}
|
||||
|
||||
pub(crate) fn search_paths<'db>(&'db self, db: &'db dyn Db) -> SearchPathIterator<'db> {
|
||||
SearchPathIterator {
|
||||
db,
|
||||
static_paths: self.static_search_paths.iter(),
|
||||
dynamic_paths: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A thin wrapper around `ModuleName` to make it a Salsa ingredient.
|
||||
///
|
||||
/// This is needed because Salsa requires that all query arguments are salsa ingredients.
|
||||
@@ -492,13 +480,13 @@ struct ModuleNameIngredient<'db> {
|
||||
/// Given a module name and a list of search paths in which to lookup modules,
|
||||
/// attempt to resolve the module name
|
||||
fn resolve_name(db: &dyn Db, name: &ModuleName) -> Option<(SearchPath, File, ModuleKind)> {
|
||||
let resolver_settings = module_resolution_settings(db);
|
||||
let target_version = resolver_settings.target_version();
|
||||
let program = Program::get(db);
|
||||
let target_version = program.target_version(db);
|
||||
let resolver_state = ResolverState::new(db, target_version);
|
||||
let is_builtin_module =
|
||||
ruff_python_stdlib::sys::is_builtin_module(target_version.minor, name.as_str());
|
||||
|
||||
for search_path in resolver_settings.search_paths(db) {
|
||||
for search_path in search_paths(db) {
|
||||
// When a builtin module is imported, standard module resolution is bypassed:
|
||||
// the module name always resolves to the stdlib module,
|
||||
// even if there's a module of the same name in the first-party root
|
||||
@@ -652,6 +640,8 @@ mod tests {
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::module::ModuleKind;
|
||||
use crate::module_resolver::testing::{FileSpec, MockedTypeshed, TestCase, TestCaseBuilder};
|
||||
use crate::ProgramSettings;
|
||||
use crate::PythonVersion;
|
||||
|
||||
use super::*;
|
||||
|
||||
@@ -1202,14 +1192,19 @@ mod tests {
|
||||
std::fs::write(foo.as_std_path(), "")?;
|
||||
std::os::unix::fs::symlink(foo.as_std_path(), bar.as_std_path())?;
|
||||
|
||||
let search_paths = SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(custom_typeshed.clone()),
|
||||
site_packages: vec![site_packages],
|
||||
};
|
||||
|
||||
Program::new(&db, PythonVersion::PY38, search_paths);
|
||||
Program::from_settings(
|
||||
&db,
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::PY38,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(custom_typeshed.clone()),
|
||||
site_packages: vec![site_packages],
|
||||
},
|
||||
},
|
||||
)
|
||||
.context("Invalid program settings")?;
|
||||
|
||||
let foo_module = resolve_module(&db, ModuleName::new_static("foo").unwrap()).unwrap();
|
||||
let bar_module = resolve_module(&db, ModuleName::new_static("bar").unwrap()).unwrap();
|
||||
@@ -1673,8 +1668,7 @@ not_a_directory
|
||||
.with_site_packages_files(&[("_foo.pth", "/src")])
|
||||
.build();
|
||||
|
||||
let search_paths: Vec<&SearchPath> =
|
||||
module_resolution_settings(&db).search_paths(&db).collect();
|
||||
let search_paths: Vec<&SearchPath> = search_paths(&db).collect();
|
||||
|
||||
assert!(search_paths.contains(
|
||||
&&SearchPath::first_party(db.system(), SystemPathBuf::from("/src")).unwrap()
|
||||
@@ -1703,16 +1697,19 @@ not_a_directory
|
||||
])
|
||||
.unwrap();
|
||||
|
||||
Program::new(
|
||||
Program::from_settings(
|
||||
&db,
|
||||
PythonVersion::default(),
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![venv_site_packages, system_site_packages],
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![venv_site_packages, system_site_packages],
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.expect("Valid program settings");
|
||||
|
||||
// The editable installs discovered from the `.pth` file in the first `site-packages` directory
|
||||
// take precedence over the second `site-packages` directory...
|
||||
|
||||
@@ -4,6 +4,7 @@ use ruff_db::vendored::VendoredPathBuf;
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::ProgramSettings;
|
||||
|
||||
/// A test case for the module resolver.
|
||||
///
|
||||
@@ -220,16 +221,19 @@ impl TestCaseBuilder<MockedTypeshed> {
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
let typeshed = Self::build_typeshed_mock(&mut db, &typeshed_option);
|
||||
|
||||
Program::new(
|
||||
Program::from_settings(
|
||||
&db,
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
site_packages: vec![site_packages.clone()],
|
||||
ProgramSettings {
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: Some(typeshed.clone()),
|
||||
site_packages: vec![site_packages.clone()],
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.expect("Valid program settings");
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
@@ -273,16 +277,19 @@ impl TestCaseBuilder<VendoredTypeshed> {
|
||||
Self::write_mock_directory(&mut db, "/site-packages", site_packages_files);
|
||||
let src = Self::write_mock_directory(&mut db, "/src", first_party_files);
|
||||
|
||||
Program::new(
|
||||
Program::from_settings(
|
||||
&db,
|
||||
target_version,
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![site_packages.clone()],
|
||||
ProgramSettings {
|
||||
target_version,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
custom_typeshed: None,
|
||||
site_packages: vec![site_packages.clone()],
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
TestCase {
|
||||
db,
|
||||
|
||||
@@ -1,21 +1,53 @@
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::Db;
|
||||
use ruff_db::system::SystemPathBuf;
|
||||
use anyhow::Context;
|
||||
use salsa::Durability;
|
||||
use salsa::Setter;
|
||||
|
||||
use ruff_db::system::SystemPathBuf;
|
||||
|
||||
use crate::module_resolver::SearchPaths;
|
||||
use crate::Db;
|
||||
|
||||
#[salsa::input(singleton)]
|
||||
pub struct Program {
|
||||
pub target_version: PythonVersion,
|
||||
|
||||
#[default]
|
||||
#[return_ref]
|
||||
pub search_paths: SearchPathSettings,
|
||||
pub(crate) search_paths: SearchPaths,
|
||||
}
|
||||
|
||||
impl Program {
|
||||
pub fn from_settings(db: &dyn Db, settings: ProgramSettings) -> Self {
|
||||
Program::builder(settings.target_version, settings.search_paths)
|
||||
pub fn from_settings(db: &dyn Db, settings: ProgramSettings) -> anyhow::Result<Self> {
|
||||
let ProgramSettings {
|
||||
target_version,
|
||||
search_paths,
|
||||
} = settings;
|
||||
|
||||
tracing::info!("Target version: {target_version}");
|
||||
|
||||
let search_paths = SearchPaths::from_settings(db, search_paths)
|
||||
.with_context(|| "Invalid search path settings")?;
|
||||
|
||||
Ok(Program::builder(settings.target_version)
|
||||
.durability(Durability::HIGH)
|
||||
.new(db)
|
||||
.search_paths(search_paths)
|
||||
.new(db))
|
||||
}
|
||||
|
||||
pub fn update_search_paths(
|
||||
&self,
|
||||
db: &mut dyn Db,
|
||||
search_path_settings: SearchPathSettings,
|
||||
) -> anyhow::Result<()> {
|
||||
let search_paths = SearchPaths::from_settings(db, search_path_settings)?;
|
||||
|
||||
if self.search_paths(db) != &search_paths {
|
||||
tracing::debug!("Update search paths");
|
||||
self.set_search_paths(db).to(search_paths);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -89,8 +89,6 @@ pub(crate) struct SemanticIndex<'db> {
|
||||
scopes: IndexVec<FileScopeId, Scope>,
|
||||
|
||||
/// Map expressions to their corresponding scope.
|
||||
/// We can't use [`ExpressionId`] here, because the challenge is how to get from
|
||||
/// an [`ast::Expr`] to an [`ExpressionId`] (which requires knowing the scope).
|
||||
scopes_by_expression: FxHashMap<ExpressionNodeKey, FileScopeId>,
|
||||
|
||||
/// Map from a node creating a definition to its definition.
|
||||
@@ -118,7 +116,7 @@ pub(crate) struct SemanticIndex<'db> {
|
||||
impl<'db> SemanticIndex<'db> {
|
||||
/// Returns the symbol table for a specific scope.
|
||||
///
|
||||
/// Use the Salsa cached [`symbol_table`] query if you only need the
|
||||
/// Use the Salsa cached [`symbol_table()`] query if you only need the
|
||||
/// symbol table for a single scope.
|
||||
pub(super) fn symbol_table(&self, scope_id: FileScopeId) -> Arc<SymbolTable> {
|
||||
self.symbol_tables[scope_id].clone()
|
||||
@@ -126,9 +124,9 @@ impl<'db> SemanticIndex<'db> {
|
||||
|
||||
/// Returns the use-def map for a specific scope.
|
||||
///
|
||||
/// Use the Salsa cached [`use_def_map`] query if you only need the
|
||||
/// Use the Salsa cached [`use_def_map()`] query if you only need the
|
||||
/// use-def map for a single scope.
|
||||
pub(super) fn use_def_map(&self, scope_id: FileScopeId) -> Arc<UseDefMap> {
|
||||
pub(super) fn use_def_map(&self, scope_id: FileScopeId) -> Arc<UseDefMap<'db>> {
|
||||
self.use_def_maps[scope_id].clone()
|
||||
}
|
||||
|
||||
@@ -309,10 +307,11 @@ mod tests {
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::DbWithTestSystem;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::semantic_index::ast_ids::HasScopedUseId;
|
||||
use crate::semantic_index::definition::DefinitionKind;
|
||||
use crate::semantic_index::definition::{DefinitionKind, DefinitionNode};
|
||||
use crate::semantic_index::symbol::{FileScopeId, Scope, ScopeKind, SymbolTable};
|
||||
use crate::semantic_index::{global_scope, semantic_index, symbol_table, use_def_map};
|
||||
use crate::Db;
|
||||
@@ -375,10 +374,11 @@ mod tests {
|
||||
let foo = global_table.symbol_id_by_name("foo").unwrap();
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.public_definitions(foo) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Import(_)));
|
||||
let definition = use_def.public_definition(foo).unwrap();
|
||||
assert!(matches!(
|
||||
definition.kind(&db),
|
||||
DefinitionKind::Node(DefinitionNode::Import(_))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -412,16 +412,16 @@ mod tests {
|
||||
);
|
||||
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.public_definitions(
|
||||
global_table
|
||||
.symbol_id_by_name("foo")
|
||||
.expect("symbol to exist"),
|
||||
) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def
|
||||
.public_definition(
|
||||
global_table
|
||||
.symbol_id_by_name("foo")
|
||||
.expect("symbol to exist"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::ImportFrom(_)
|
||||
definition.kind(&db),
|
||||
DefinitionKind::Node(DefinitionNode::ImportFrom(_))
|
||||
));
|
||||
}
|
||||
|
||||
@@ -439,14 +439,12 @@ mod tests {
|
||||
"a symbol used but not defined in a scope should have only the used flag"
|
||||
);
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] =
|
||||
use_def.public_definitions(global_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def
|
||||
.public_definition(global_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
definition.kind(&db),
|
||||
DefinitionKind::Node(DefinitionNode::Assignment(_))
|
||||
));
|
||||
}
|
||||
|
||||
@@ -478,14 +476,12 @@ y = 2
|
||||
assert_eq!(names(&class_table), vec!["x"]);
|
||||
|
||||
let use_def = index.use_def_map(class_scope_id);
|
||||
let [definition] =
|
||||
use_def.public_definitions(class_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def
|
||||
.public_definition(class_table.symbol_id_by_name("x").expect("symbol exists"))
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
definition.kind(&db),
|
||||
DefinitionKind::Node(DefinitionNode::Assignment(_))
|
||||
));
|
||||
}
|
||||
|
||||
@@ -516,19 +512,151 @@ y = 2
|
||||
assert_eq!(names(&function_table), vec!["x"]);
|
||||
|
||||
let use_def = index.use_def_map(function_scope_id);
|
||||
let [definition] = use_def.public_definitions(
|
||||
function_table
|
||||
.symbol_id_by_name("x")
|
||||
.expect("symbol exists"),
|
||||
) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let definition = use_def
|
||||
.public_definition(
|
||||
function_table
|
||||
.symbol_id_by_name("x")
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.node(&db),
|
||||
DefinitionKind::Assignment(_)
|
||||
definition.kind(&db),
|
||||
DefinitionKind::Node(DefinitionNode::Assignment(_))
|
||||
));
|
||||
}
|
||||
|
||||
/// Test case to validate that the comprehension scope is correctly identified and that the target
|
||||
/// variable is defined only in the comprehension scope and not in the global scope.
|
||||
#[test]
|
||||
fn comprehension_scope() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[x for x in iter1]
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&global_table), vec!["iter1"]);
|
||||
|
||||
let [(comprehension_scope_id, comprehension_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
|
||||
assert_eq!(comprehension_scope.kind(), ScopeKind::Comprehension);
|
||||
assert_eq!(
|
||||
comprehension_scope_id.to_scope_id(&db, file).name(&db),
|
||||
"<listcomp>"
|
||||
);
|
||||
|
||||
let comprehension_symbol_table = index.symbol_table(comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&comprehension_symbol_table), vec!["x"]);
|
||||
}
|
||||
|
||||
/// Test case to validate that the `x` variable used in the comprehension is referencing the
|
||||
/// `x` variable defined by the inner generator (`for x in iter2`) and not the outer one.
|
||||
#[test]
|
||||
fn multiple_generators() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[x for x in iter1 for x in iter2]
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let [(comprehension_scope_id, _)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
|
||||
let use_def = index.use_def_map(comprehension_scope_id);
|
||||
|
||||
let module = parsed_module(&db, file).syntax();
|
||||
let element = module.body[0]
|
||||
.as_expr_stmt()
|
||||
.unwrap()
|
||||
.value
|
||||
.as_list_comp_expr()
|
||||
.unwrap()
|
||||
.elt
|
||||
.as_name_expr()
|
||||
.unwrap();
|
||||
let element_use_id =
|
||||
element.scoped_use_id(&db, comprehension_scope_id.to_scope_id(&db, file));
|
||||
|
||||
let definition = use_def.definition_for_use(element_use_id).unwrap();
|
||||
let DefinitionKind::Node(DefinitionNode::Comprehension(comprehension)) =
|
||||
definition.kind(&db)
|
||||
else {
|
||||
panic!("expected generator definition")
|
||||
};
|
||||
let ast::Comprehension { target, .. } = comprehension.node();
|
||||
let name = target.as_name_expr().unwrap().id().as_str();
|
||||
|
||||
assert_eq!(name, "x");
|
||||
assert_eq!(target.range(), TextRange::new(23.into(), 24.into()));
|
||||
}
|
||||
|
||||
/// Test case to validate that the nested comprehension creates a new scope which is a child of
|
||||
/// the outer comprehension scope and the variables are correctly defined in the respective
|
||||
/// scopes.
|
||||
#[test]
|
||||
fn nested_comprehensions() {
|
||||
let TestCase { db, file } = test_case(
|
||||
"
|
||||
[{x for x in iter2} for y in iter1]
|
||||
",
|
||||
);
|
||||
|
||||
let index = semantic_index(&db, file);
|
||||
let global_table = index.symbol_table(FileScopeId::global());
|
||||
|
||||
assert_eq!(names(&global_table), vec!["iter1"]);
|
||||
|
||||
let [(comprehension_scope_id, comprehension_scope)] = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one child scope")
|
||||
};
|
||||
|
||||
assert_eq!(comprehension_scope.kind(), ScopeKind::Comprehension);
|
||||
assert_eq!(
|
||||
comprehension_scope_id.to_scope_id(&db, file).name(&db),
|
||||
"<listcomp>"
|
||||
);
|
||||
|
||||
let comprehension_symbol_table = index.symbol_table(comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&comprehension_symbol_table), vec!["y", "iter2"]);
|
||||
|
||||
let [(inner_comprehension_scope_id, inner_comprehension_scope)] = index
|
||||
.child_scopes(comprehension_scope_id)
|
||||
.collect::<Vec<_>>()[..]
|
||||
else {
|
||||
panic!("expected one inner comprehension scope")
|
||||
};
|
||||
|
||||
assert_eq!(inner_comprehension_scope.kind(), ScopeKind::Comprehension);
|
||||
assert_eq!(
|
||||
inner_comprehension_scope_id
|
||||
.to_scope_id(&db, file)
|
||||
.name(&db),
|
||||
"<setcomp>"
|
||||
);
|
||||
|
||||
let inner_comprehension_symbol_table = index.symbol_table(inner_comprehension_scope_id);
|
||||
|
||||
assert_eq!(names(&inner_comprehension_symbol_table), vec!["x"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dupes() {
|
||||
let TestCase { db, file } = test_case(
|
||||
@@ -562,14 +690,17 @@ def func():
|
||||
assert_eq!(names(&func2_table), vec!["y"]);
|
||||
|
||||
let use_def = index.use_def_map(FileScopeId::global());
|
||||
let [definition] = use_def.public_definitions(
|
||||
global_table
|
||||
.symbol_id_by_name("func")
|
||||
.expect("symbol exists"),
|
||||
) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
assert!(matches!(definition.node(&db), DefinitionKind::Function(_)));
|
||||
let definition = use_def
|
||||
.public_definition(
|
||||
global_table
|
||||
.symbol_id_by_name("func")
|
||||
.expect("symbol exists"),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
definition.kind(&db),
|
||||
DefinitionKind::Node(DefinitionNode::Function(_))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -669,10 +800,9 @@ class C[T]:
|
||||
};
|
||||
let x_use_id = x_use_expr_name.scoped_use_id(&db, scope);
|
||||
let use_def = use_def_map(&db, scope);
|
||||
let [definition] = use_def.use_definitions(x_use_id) else {
|
||||
panic!("expected one definition");
|
||||
};
|
||||
let DefinitionKind::Assignment(assignment) = definition.node(&db) else {
|
||||
let definition = use_def.definition_for_use(x_use_id).unwrap();
|
||||
let DefinitionKind::Node(DefinitionNode::Assignment(assignment)) = definition.kind(&db)
|
||||
else {
|
||||
panic!("should be an assignment definition")
|
||||
};
|
||||
let ast::Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
|
||||
@@ -26,9 +26,9 @@ use crate::Db;
|
||||
/// ```
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct AstIds {
|
||||
/// Maps expressions to their expression id. Uses `NodeKey` because it avoids cloning [`Parsed`].
|
||||
/// Maps expressions to their expression id.
|
||||
expressions_map: FxHashMap<ExpressionNodeKey, ScopedExpressionId>,
|
||||
/// Maps expressions which "use" a symbol (that is, [`ExprName`]) to a use id.
|
||||
/// Maps expressions which "use" a symbol (that is, [`ast::ExprName`]) to a use id.
|
||||
uses_map: FxHashMap<ExpressionNodeKey, ScopedUseId>,
|
||||
}
|
||||
|
||||
|
||||
@@ -13,15 +13,15 @@ use crate::ast_node_ref::AstNodeRef;
|
||||
use crate::semantic_index::ast_ids::node_key::ExpressionNodeKey;
|
||||
use crate::semantic_index::ast_ids::AstIdsBuilder;
|
||||
use crate::semantic_index::definition::{
|
||||
AssignmentDefinitionNodeRef, Definition, DefinitionNodeKey, DefinitionNodeRef,
|
||||
ImportFromDefinitionNodeRef,
|
||||
AssignmentDefinitionNodeRef, ComprehensionDefinitionNodeRef, Definition, DefinitionKind,
|
||||
DefinitionNodeKey, DefinitionNodeRef, ImportFromDefinitionNodeRef,
|
||||
};
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::symbol::{
|
||||
FileScopeId, NodeWithScopeKey, NodeWithScopeRef, Scope, ScopeId, ScopedSymbolId, SymbolFlags,
|
||||
SymbolTableBuilder,
|
||||
};
|
||||
use crate::semantic_index::use_def::{FlowSnapshot, UseDefMapBuilder};
|
||||
use crate::semantic_index::use_def::{BasicBlockId, UseDefMapBuilder};
|
||||
use crate::semantic_index::SemanticIndex;
|
||||
use crate::Db;
|
||||
|
||||
@@ -33,8 +33,8 @@ pub(super) struct SemanticIndexBuilder<'db> {
|
||||
scope_stack: Vec<FileScopeId>,
|
||||
/// The assignment we're currently visiting.
|
||||
current_assignment: Option<CurrentAssignment<'db>>,
|
||||
/// Flow states at each `break` in the current loop.
|
||||
loop_break_states: Vec<FlowSnapshot>,
|
||||
/// Basic block ending at each `break` in the current loop.
|
||||
loop_breaks: Vec<BasicBlockId>,
|
||||
|
||||
// Semantic Index fields
|
||||
scopes: IndexVec<FileScopeId, Scope>,
|
||||
@@ -56,7 +56,7 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
module: parsed,
|
||||
scope_stack: Vec::new(),
|
||||
current_assignment: None,
|
||||
loop_break_states: vec![],
|
||||
loop_breaks: vec![],
|
||||
|
||||
scopes: IndexVec::new(),
|
||||
symbol_tables: IndexVec::new(),
|
||||
@@ -98,7 +98,8 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
|
||||
let file_scope_id = self.scopes.push(scope);
|
||||
self.symbol_tables.push(SymbolTableBuilder::new());
|
||||
self.use_def_maps.push(UseDefMapBuilder::new());
|
||||
self.use_def_maps
|
||||
.push(UseDefMapBuilder::new(self.db, self.file, file_scope_id));
|
||||
let ast_id_scope = self.ast_ids.push(AstIdsBuilder::new());
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
@@ -132,41 +133,50 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
&mut self.symbol_tables[scope_id]
|
||||
}
|
||||
|
||||
fn current_use_def_map_mut(&mut self) -> &mut UseDefMapBuilder<'db> {
|
||||
fn current_use_def_map(&mut self) -> &mut UseDefMapBuilder<'db> {
|
||||
let scope_id = self.current_scope();
|
||||
&mut self.use_def_maps[scope_id]
|
||||
}
|
||||
|
||||
fn current_use_def_map(&self) -> &UseDefMapBuilder<'db> {
|
||||
let scope_id = self.current_scope();
|
||||
&self.use_def_maps[scope_id]
|
||||
}
|
||||
|
||||
fn current_ast_ids(&mut self) -> &mut AstIdsBuilder {
|
||||
let scope_id = self.current_scope();
|
||||
&mut self.ast_ids[scope_id]
|
||||
}
|
||||
|
||||
fn flow_snapshot(&self) -> FlowSnapshot {
|
||||
self.current_use_def_map().snapshot()
|
||||
/// Start a new basic block and return the previous block's ID.
|
||||
fn next_block(&mut self) -> BasicBlockId {
|
||||
self.current_use_def_map().next_block(/* sealed */ true)
|
||||
}
|
||||
|
||||
fn flow_restore(&mut self, state: FlowSnapshot) {
|
||||
self.current_use_def_map_mut().restore(state);
|
||||
/// Start a new unsealed basic block and return the previous block's ID.
|
||||
fn next_block_unsealed(&mut self) -> BasicBlockId {
|
||||
self.current_use_def_map().next_block(/* sealed */ false)
|
||||
}
|
||||
|
||||
fn flow_merge(&mut self, state: &FlowSnapshot) {
|
||||
self.current_use_def_map_mut().merge(state);
|
||||
/// Seal an unsealed basic block.
|
||||
fn seal_block(&mut self) {
|
||||
self.current_use_def_map().seal_current_block();
|
||||
}
|
||||
|
||||
/// Start a new basic block with the given block as predecessor.
|
||||
fn new_block_from(&mut self, predecessor: BasicBlockId) {
|
||||
self.current_use_def_map()
|
||||
.new_block_from(predecessor, /* sealed */ true);
|
||||
}
|
||||
|
||||
/// Add a predecessor to the current block.
|
||||
fn merge_block(&mut self, predecessor: BasicBlockId) {
|
||||
self.current_use_def_map().merge_block(predecessor);
|
||||
}
|
||||
|
||||
/// Add predecessors to the current block.
|
||||
fn merge_blocks(&mut self, predecessors: Vec<BasicBlockId>) {
|
||||
self.current_use_def_map().merge_blocks(predecessors);
|
||||
}
|
||||
|
||||
fn add_or_update_symbol(&mut self, name: Name, flags: SymbolFlags) -> ScopedSymbolId {
|
||||
let symbol_table = self.current_symbol_table();
|
||||
let (symbol_id, added) = symbol_table.add_or_update_symbol(name, flags);
|
||||
if added {
|
||||
let use_def_map = self.current_use_def_map_mut();
|
||||
use_def_map.add_symbol(symbol_id);
|
||||
}
|
||||
symbol_id
|
||||
symbol_table.add_or_update_symbol(name, flags)
|
||||
}
|
||||
|
||||
fn add_definition<'a>(
|
||||
@@ -174,22 +184,20 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
symbol: ScopedSymbolId,
|
||||
definition_node: impl Into<DefinitionNodeRef<'a>>,
|
||||
) -> Definition<'db> {
|
||||
let definition_node = definition_node.into();
|
||||
let definition_node: DefinitionNodeRef<'_> = definition_node.into();
|
||||
let definition = Definition::new(
|
||||
self.db,
|
||||
self.file,
|
||||
self.current_scope(),
|
||||
symbol,
|
||||
#[allow(unsafe_code)]
|
||||
unsafe {
|
||||
definition_node.into_owned(self.module.clone())
|
||||
},
|
||||
DefinitionKind::Node(unsafe { definition_node.into_owned(self.module.clone()) }),
|
||||
countme::Count::default(),
|
||||
);
|
||||
|
||||
self.definitions_by_node
|
||||
.insert(definition_node.key(), definition);
|
||||
self.current_use_def_map_mut()
|
||||
self.current_use_def_map()
|
||||
.record_definition(symbol, definition);
|
||||
|
||||
definition
|
||||
@@ -258,6 +266,49 @@ impl<'db> SemanticIndexBuilder<'db> {
|
||||
nested_scope
|
||||
}
|
||||
|
||||
/// Visit a list of [`Comprehension`] nodes, assumed to be the "generators" that compose a
|
||||
/// comprehension (that is, the `for x in y` and `for y in z` parts of `x for x in y for y in z`.)
|
||||
///
|
||||
/// [`Comprehension`]: ast::Comprehension
|
||||
fn visit_generators(&mut self, scope: NodeWithScopeRef, generators: &'db [ast::Comprehension]) {
|
||||
let mut generators_iter = generators.iter();
|
||||
|
||||
let Some(generator) = generators_iter.next() else {
|
||||
unreachable!("Expression must contain at least one generator");
|
||||
};
|
||||
|
||||
// The `iter` of the first generator is evaluated in the outer scope, while all subsequent
|
||||
// nodes are evaluated in the inner scope.
|
||||
self.visit_expr(&generator.iter);
|
||||
self.push_scope(scope);
|
||||
|
||||
self.current_assignment = Some(CurrentAssignment::Comprehension {
|
||||
node: generator,
|
||||
first: true,
|
||||
});
|
||||
self.visit_expr(&generator.target);
|
||||
self.current_assignment = None;
|
||||
|
||||
for expr in &generator.ifs {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
|
||||
for generator in generators_iter {
|
||||
self.visit_expr(&generator.iter);
|
||||
|
||||
self.current_assignment = Some(CurrentAssignment::Comprehension {
|
||||
node: generator,
|
||||
first: false,
|
||||
});
|
||||
self.visit_expr(&generator.target);
|
||||
self.current_assignment = None;
|
||||
|
||||
for expr in &generator.ifs {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn build(mut self) -> SemanticIndex<'db> {
|
||||
let module = self.module;
|
||||
self.visit_body(module.suite());
|
||||
@@ -412,21 +463,19 @@ where
|
||||
}
|
||||
ast::Stmt::If(node) => {
|
||||
self.visit_expr(&node.test);
|
||||
let pre_if = self.flow_snapshot();
|
||||
let pre_if = self.next_block();
|
||||
self.visit_body(&node.body);
|
||||
let mut post_clauses: Vec<FlowSnapshot> = vec![];
|
||||
let mut post_clauses: Vec<BasicBlockId> = vec![];
|
||||
for clause in &node.elif_else_clauses {
|
||||
// snapshot after every block except the last; the last one will just become
|
||||
// the state that we merge the other snapshots into
|
||||
post_clauses.push(self.flow_snapshot());
|
||||
post_clauses.push(self.next_block());
|
||||
// we can only take an elif/else branch if none of the previous ones were
|
||||
// taken, so the block entry state is always `pre_if`
|
||||
self.flow_restore(pre_if.clone());
|
||||
self.new_block_from(pre_if);
|
||||
self.visit_elif_else_clause(clause);
|
||||
}
|
||||
for post_clause_state in post_clauses {
|
||||
self.flow_merge(&post_clause_state);
|
||||
}
|
||||
self.next_block_unsealed();
|
||||
let has_else = node
|
||||
.elif_else_clauses
|
||||
.last()
|
||||
@@ -434,35 +483,39 @@ where
|
||||
if !has_else {
|
||||
// if there's no else clause, then it's possible we took none of the branches,
|
||||
// and the pre_if state can reach here
|
||||
self.flow_merge(&pre_if);
|
||||
self.merge_block(pre_if);
|
||||
}
|
||||
self.merge_blocks(post_clauses);
|
||||
self.seal_block();
|
||||
}
|
||||
ast::Stmt::While(node) => {
|
||||
self.visit_expr(&node.test);
|
||||
|
||||
let pre_loop = self.flow_snapshot();
|
||||
let pre_loop = self.next_block();
|
||||
|
||||
// Save aside any break states from an outer loop
|
||||
let saved_break_states = std::mem::take(&mut self.loop_break_states);
|
||||
let saved_break_states = std::mem::take(&mut self.loop_breaks);
|
||||
self.visit_body(&node.body);
|
||||
// Get the break states from the body of this loop, and restore the saved outer
|
||||
// ones.
|
||||
let break_states =
|
||||
std::mem::replace(&mut self.loop_break_states, saved_break_states);
|
||||
let break_states = std::mem::replace(&mut self.loop_breaks, saved_break_states);
|
||||
|
||||
// We may execute the `else` clause without ever executing the body, so merge in
|
||||
// the pre-loop state before visiting `else`.
|
||||
self.flow_merge(&pre_loop);
|
||||
self.next_block_unsealed();
|
||||
self.merge_block(pre_loop);
|
||||
self.seal_block();
|
||||
self.visit_body(&node.orelse);
|
||||
|
||||
// Breaking out of a while loop bypasses the `else` clause, so merge in the break
|
||||
// states after visiting `else`.
|
||||
for break_state in break_states {
|
||||
self.flow_merge(&break_state);
|
||||
}
|
||||
self.next_block_unsealed();
|
||||
self.merge_blocks(break_states);
|
||||
self.seal_block();
|
||||
}
|
||||
ast::Stmt::Break(_) => {
|
||||
self.loop_break_states.push(self.flow_snapshot());
|
||||
let block_id = self.next_block();
|
||||
self.loop_breaks.push(block_id);
|
||||
}
|
||||
_ => {
|
||||
walk_stmt(self, stmt);
|
||||
@@ -476,8 +529,7 @@ where
|
||||
self.current_ast_ids().record_expression(expr);
|
||||
|
||||
match expr {
|
||||
ast::Expr::Name(name_node) => {
|
||||
let ast::ExprName { id, ctx, .. } = name_node;
|
||||
ast::Expr::Name(name_node @ ast::ExprName { id, ctx, .. }) => {
|
||||
let flags = match ctx {
|
||||
ast::ExprContext::Load => SymbolFlags::IS_USED,
|
||||
ast::ExprContext::Store => SymbolFlags::IS_DEFINED,
|
||||
@@ -500,15 +552,24 @@ where
|
||||
self.add_definition(symbol, ann_assign);
|
||||
}
|
||||
Some(CurrentAssignment::Named(named)) => {
|
||||
// TODO(dhruvmanila): If the current scope is a comprehension, then the
|
||||
// named expression is implicitly nonlocal. This is yet to be
|
||||
// implemented.
|
||||
self.add_definition(symbol, named);
|
||||
}
|
||||
Some(CurrentAssignment::Comprehension { node, first }) => {
|
||||
self.add_definition(
|
||||
symbol,
|
||||
ComprehensionDefinitionNodeRef { node, first },
|
||||
);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
|
||||
if flags.contains(SymbolFlags::IS_USED) {
|
||||
let use_id = self.current_ast_ids().record_use(expr);
|
||||
self.current_use_def_map_mut().record_use(symbol, use_id);
|
||||
self.current_use_def_map().record_use(symbol, use_id);
|
||||
}
|
||||
|
||||
walk_expr(self, expr);
|
||||
@@ -527,7 +588,6 @@ where
|
||||
}
|
||||
self.push_scope(NodeWithScopeRef::Lambda(lambda));
|
||||
self.visit_expr(lambda.body.as_ref());
|
||||
self.pop_scope();
|
||||
}
|
||||
ast::Expr::If(ast::ExprIf {
|
||||
body, test, orelse, ..
|
||||
@@ -536,17 +596,75 @@ where
|
||||
// AST inspection, so we can't simplify here, need to record test expression for
|
||||
// later checking)
|
||||
self.visit_expr(test);
|
||||
let pre_if = self.flow_snapshot();
|
||||
let pre_if = self.next_block();
|
||||
self.visit_expr(body);
|
||||
let post_body = self.flow_snapshot();
|
||||
self.flow_restore(pre_if);
|
||||
let post_body = self.next_block();
|
||||
self.new_block_from(pre_if);
|
||||
self.visit_expr(orelse);
|
||||
self.flow_merge(&post_body);
|
||||
self.next_block_unsealed();
|
||||
self.merge_block(post_body);
|
||||
self.seal_block();
|
||||
}
|
||||
ast::Expr::ListComp(
|
||||
list_comprehension @ ast::ExprListComp {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
NodeWithScopeRef::ListComprehension(list_comprehension),
|
||||
generators,
|
||||
);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::SetComp(
|
||||
set_comprehension @ ast::ExprSetComp {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
NodeWithScopeRef::SetComprehension(set_comprehension),
|
||||
generators,
|
||||
);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::Generator(
|
||||
generator @ ast::ExprGenerator {
|
||||
elt, generators, ..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(NodeWithScopeRef::GeneratorExpression(generator), generators);
|
||||
self.visit_expr(elt);
|
||||
}
|
||||
ast::Expr::DictComp(
|
||||
dict_comprehension @ ast::ExprDictComp {
|
||||
key,
|
||||
value,
|
||||
generators,
|
||||
..
|
||||
},
|
||||
) => {
|
||||
self.visit_generators(
|
||||
NodeWithScopeRef::DictComprehension(dict_comprehension),
|
||||
generators,
|
||||
);
|
||||
self.visit_expr(key);
|
||||
self.visit_expr(value);
|
||||
}
|
||||
_ => {
|
||||
walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
|
||||
if matches!(
|
||||
expr,
|
||||
ast::Expr::Lambda(_)
|
||||
| ast::Expr::ListComp(_)
|
||||
| ast::Expr::SetComp(_)
|
||||
| ast::Expr::Generator(_)
|
||||
| ast::Expr::DictComp(_)
|
||||
) {
|
||||
self.pop_scope();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -555,6 +673,10 @@ enum CurrentAssignment<'a> {
|
||||
Assign(&'a ast::StmtAssign),
|
||||
AnnAssign(&'a ast::StmtAnnAssign),
|
||||
Named(&'a ast::ExprNamed),
|
||||
Comprehension {
|
||||
node: &'a ast::Comprehension,
|
||||
first: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtAssign> for CurrentAssignment<'a> {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::ParsedModule;
|
||||
use ruff_index::newtype_index;
|
||||
use ruff_python_ast as ast;
|
||||
|
||||
use crate::ast_node_ref::AstNodeRef;
|
||||
@@ -8,7 +9,7 @@ use crate::semantic_index::symbol::{FileScopeId, ScopeId, ScopedSymbolId};
|
||||
use crate::Db;
|
||||
|
||||
#[salsa::tracked]
|
||||
pub struct Definition<'db> {
|
||||
pub(crate) struct Definition<'db> {
|
||||
/// The file in which the definition occurs.
|
||||
#[id]
|
||||
pub(crate) file: File,
|
||||
@@ -23,7 +24,7 @@ pub struct Definition<'db> {
|
||||
|
||||
#[no_eq]
|
||||
#[return_ref]
|
||||
pub(crate) node: DefinitionKind,
|
||||
pub(crate) kind: DefinitionKind,
|
||||
|
||||
#[no_eq]
|
||||
count: countme::Count<Definition<'static>>,
|
||||
@@ -35,6 +36,22 @@ impl<'db> Definition<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) enum DefinitionKind {
|
||||
/// Inserted at control-flow merge points, if multiple definitions can reach the merge point.
|
||||
///
|
||||
/// Operands are not kept inline, since it's not possible to construct cyclically-referential
|
||||
/// Salsa tracked structs; they are kept instead in the
|
||||
/// [`UseDefMap`](super::use_def::UseDefMap).
|
||||
Phi(ScopedPhiId),
|
||||
|
||||
/// An assignment to the symbol.
|
||||
Node(DefinitionNode),
|
||||
}
|
||||
|
||||
#[newtype_index]
|
||||
pub(crate) struct ScopedPhiId;
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) enum DefinitionNodeRef<'a> {
|
||||
Import(&'a ast::Alias),
|
||||
@@ -44,6 +61,7 @@ pub(crate) enum DefinitionNodeRef<'a> {
|
||||
NamedExpression(&'a ast::ExprNamed),
|
||||
Assignment(AssignmentDefinitionNodeRef<'a>),
|
||||
AnnotatedAssignment(&'a ast::StmtAnnAssign),
|
||||
Comprehension(ComprehensionDefinitionNodeRef<'a>),
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ast::StmtFunctionDef> for DefinitionNodeRef<'a> {
|
||||
@@ -88,6 +106,12 @@ impl<'a> From<AssignmentDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<ComprehensionDefinitionNodeRef<'a>> for DefinitionNodeRef<'a> {
|
||||
fn from(node: ComprehensionDefinitionNodeRef<'a>) -> Self {
|
||||
Self::Comprehension(node)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ImportFromDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::StmtImportFrom,
|
||||
@@ -100,36 +124,48 @@ pub(crate) struct AssignmentDefinitionNodeRef<'a> {
|
||||
pub(crate) target: &'a ast::ExprName,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct ComprehensionDefinitionNodeRef<'a> {
|
||||
pub(crate) node: &'a ast::Comprehension,
|
||||
pub(crate) first: bool,
|
||||
}
|
||||
|
||||
impl DefinitionNodeRef<'_> {
|
||||
#[allow(unsafe_code)]
|
||||
pub(super) unsafe fn into_owned(self, parsed: ParsedModule) -> DefinitionKind {
|
||||
pub(super) unsafe fn into_owned(self, parsed: ParsedModule) -> DefinitionNode {
|
||||
match self {
|
||||
DefinitionNodeRef::Import(alias) => {
|
||||
DefinitionKind::Import(AstNodeRef::new(parsed, alias))
|
||||
DefinitionNode::Import(AstNodeRef::new(parsed, alias))
|
||||
}
|
||||
DefinitionNodeRef::ImportFrom(ImportFromDefinitionNodeRef { node, alias_index }) => {
|
||||
DefinitionKind::ImportFrom(ImportFromDefinitionKind {
|
||||
DefinitionNode::ImportFrom(ImportFromDefinitionNode {
|
||||
node: AstNodeRef::new(parsed, node),
|
||||
alias_index,
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::Function(function) => {
|
||||
DefinitionKind::Function(AstNodeRef::new(parsed, function))
|
||||
DefinitionNode::Function(AstNodeRef::new(parsed, function))
|
||||
}
|
||||
DefinitionNodeRef::Class(class) => {
|
||||
DefinitionKind::Class(AstNodeRef::new(parsed, class))
|
||||
DefinitionNode::Class(AstNodeRef::new(parsed, class))
|
||||
}
|
||||
DefinitionNodeRef::NamedExpression(named) => {
|
||||
DefinitionKind::NamedExpression(AstNodeRef::new(parsed, named))
|
||||
DefinitionNode::NamedExpression(AstNodeRef::new(parsed, named))
|
||||
}
|
||||
DefinitionNodeRef::Assignment(AssignmentDefinitionNodeRef { assignment, target }) => {
|
||||
DefinitionKind::Assignment(AssignmentDefinitionKind {
|
||||
DefinitionNode::Assignment(AssignmentDefinitionNode {
|
||||
assignment: AstNodeRef::new(parsed.clone(), assignment),
|
||||
target: AstNodeRef::new(parsed, target),
|
||||
})
|
||||
}
|
||||
DefinitionNodeRef::AnnotatedAssignment(assign) => {
|
||||
DefinitionKind::AnnotatedAssignment(AstNodeRef::new(parsed, assign))
|
||||
DefinitionNode::AnnotatedAssignment(AstNodeRef::new(parsed, assign))
|
||||
}
|
||||
DefinitionNodeRef::Comprehension(ComprehensionDefinitionNodeRef { node, first }) => {
|
||||
DefinitionNode::Comprehension(ComprehensionDefinitionNode {
|
||||
node: AstNodeRef::new(parsed, node),
|
||||
first,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -148,28 +184,46 @@ impl DefinitionNodeRef<'_> {
|
||||
target,
|
||||
}) => target.into(),
|
||||
Self::AnnotatedAssignment(node) => node.into(),
|
||||
Self::Comprehension(ComprehensionDefinitionNodeRef { node, first: _ }) => node.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum DefinitionKind {
|
||||
pub enum DefinitionNode {
|
||||
Import(AstNodeRef<ast::Alias>),
|
||||
ImportFrom(ImportFromDefinitionKind),
|
||||
ImportFrom(ImportFromDefinitionNode),
|
||||
Function(AstNodeRef<ast::StmtFunctionDef>),
|
||||
Class(AstNodeRef<ast::StmtClassDef>),
|
||||
NamedExpression(AstNodeRef<ast::ExprNamed>),
|
||||
Assignment(AssignmentDefinitionKind),
|
||||
Assignment(AssignmentDefinitionNode),
|
||||
AnnotatedAssignment(AstNodeRef<ast::StmtAnnAssign>),
|
||||
Comprehension(ComprehensionDefinitionNode),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ImportFromDefinitionKind {
|
||||
pub struct ComprehensionDefinitionNode {
|
||||
node: AstNodeRef<ast::Comprehension>,
|
||||
first: bool,
|
||||
}
|
||||
|
||||
impl ComprehensionDefinitionNode {
|
||||
pub(crate) fn node(&self) -> &ast::Comprehension {
|
||||
self.node.node()
|
||||
}
|
||||
|
||||
pub(crate) fn is_first(&self) -> bool {
|
||||
self.first
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ImportFromDefinitionNode {
|
||||
node: AstNodeRef<ast::StmtImportFrom>,
|
||||
alias_index: usize,
|
||||
}
|
||||
|
||||
impl ImportFromDefinitionKind {
|
||||
impl ImportFromDefinitionNode {
|
||||
pub(crate) fn import(&self) -> &ast::StmtImportFrom {
|
||||
self.node.node()
|
||||
}
|
||||
@@ -181,12 +235,12 @@ impl ImportFromDefinitionKind {
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub struct AssignmentDefinitionKind {
|
||||
pub struct AssignmentDefinitionNode {
|
||||
assignment: AstNodeRef<ast::StmtAssign>,
|
||||
target: AstNodeRef<ast::ExprName>,
|
||||
}
|
||||
|
||||
impl AssignmentDefinitionKind {
|
||||
impl AssignmentDefinitionNode {
|
||||
pub(crate) fn assignment(&self) -> &ast::StmtAssign {
|
||||
self.assignment.node()
|
||||
}
|
||||
@@ -230,3 +284,9 @@ impl From<&ast::StmtAnnAssign> for DefinitionNodeKey {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ast::Comprehension> for DefinitionNodeKey {
|
||||
fn from(node: &ast::Comprehension) -> Self {
|
||||
Self(NodeKey::from_node(node))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,6 +114,10 @@ impl<'db> ScopeId<'db> {
|
||||
NodeWithScopeKind::ClassTypeParameters(_)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(_)
|
||||
| NodeWithScopeKind::Function(_)
|
||||
| NodeWithScopeKind::ListComprehension(_)
|
||||
| NodeWithScopeKind::SetComprehension(_)
|
||||
| NodeWithScopeKind::DictComprehension(_)
|
||||
| NodeWithScopeKind::GeneratorExpression(_)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -127,6 +131,10 @@ impl<'db> ScopeId<'db> {
|
||||
NodeWithScopeKind::Function(function)
|
||||
| NodeWithScopeKind::FunctionTypeParameters(function) => function.name.as_str(),
|
||||
NodeWithScopeKind::Lambda(_) => "<lambda>",
|
||||
NodeWithScopeKind::ListComprehension(_) => "<listcomp>",
|
||||
NodeWithScopeKind::SetComprehension(_) => "<setcomp>",
|
||||
NodeWithScopeKind::DictComprehension(_) => "<dictcomp>",
|
||||
NodeWithScopeKind::GeneratorExpression(_) => "<generator>",
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -170,6 +178,13 @@ pub enum ScopeKind {
|
||||
Annotation,
|
||||
Class,
|
||||
Function,
|
||||
Comprehension,
|
||||
}
|
||||
|
||||
impl ScopeKind {
|
||||
pub const fn is_comprehension(self) -> bool {
|
||||
matches!(self, ScopeKind::Comprehension)
|
||||
}
|
||||
}
|
||||
|
||||
/// Symbol table for a specific [`Scope`].
|
||||
@@ -257,7 +272,7 @@ impl SymbolTableBuilder {
|
||||
&mut self,
|
||||
name: Name,
|
||||
flags: SymbolFlags,
|
||||
) -> (ScopedSymbolId, bool) {
|
||||
) -> ScopedSymbolId {
|
||||
let hash = SymbolTable::hash_name(&name);
|
||||
let entry = self
|
||||
.table
|
||||
@@ -270,7 +285,7 @@ impl SymbolTableBuilder {
|
||||
let symbol = &mut self.table.symbols[*entry.key()];
|
||||
symbol.insert_flags(flags);
|
||||
|
||||
(*entry.key(), false)
|
||||
*entry.key()
|
||||
}
|
||||
RawEntryMut::Vacant(entry) => {
|
||||
let mut symbol = Symbol::new(name);
|
||||
@@ -280,7 +295,7 @@ impl SymbolTableBuilder {
|
||||
entry.insert_with_hasher(hash, id, (), |id| {
|
||||
SymbolTable::hash_name(self.table.symbols[*id].name().as_str())
|
||||
});
|
||||
(id, true)
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -300,6 +315,10 @@ pub(crate) enum NodeWithScopeRef<'a> {
|
||||
Lambda(&'a ast::ExprLambda),
|
||||
FunctionTypeParameters(&'a ast::StmtFunctionDef),
|
||||
ClassTypeParameters(&'a ast::StmtClassDef),
|
||||
ListComprehension(&'a ast::ExprListComp),
|
||||
SetComprehension(&'a ast::ExprSetComp),
|
||||
DictComprehension(&'a ast::ExprDictComp),
|
||||
GeneratorExpression(&'a ast::ExprGenerator),
|
||||
}
|
||||
|
||||
impl NodeWithScopeRef<'_> {
|
||||
@@ -326,6 +345,18 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::ClassTypeParameters(class) => {
|
||||
NodeWithScopeKind::ClassTypeParameters(AstNodeRef::new(module, class))
|
||||
}
|
||||
NodeWithScopeRef::ListComprehension(comprehension) => {
|
||||
NodeWithScopeKind::ListComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::SetComprehension(comprehension) => {
|
||||
NodeWithScopeKind::SetComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::DictComprehension(comprehension) => {
|
||||
NodeWithScopeKind::DictComprehension(AstNodeRef::new(module, comprehension))
|
||||
}
|
||||
NodeWithScopeRef::GeneratorExpression(generator) => {
|
||||
NodeWithScopeKind::GeneratorExpression(AstNodeRef::new(module, generator))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -337,6 +368,10 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::Lambda(_) => ScopeKind::Function,
|
||||
NodeWithScopeRef::FunctionTypeParameters(_)
|
||||
| NodeWithScopeRef::ClassTypeParameters(_) => ScopeKind::Annotation,
|
||||
NodeWithScopeRef::ListComprehension(_)
|
||||
| NodeWithScopeRef::SetComprehension(_)
|
||||
| NodeWithScopeRef::DictComprehension(_)
|
||||
| NodeWithScopeRef::GeneratorExpression(_) => ScopeKind::Comprehension,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -356,6 +391,18 @@ impl NodeWithScopeRef<'_> {
|
||||
NodeWithScopeRef::ClassTypeParameters(class) => {
|
||||
NodeWithScopeKey::ClassTypeParameters(NodeKey::from_node(class))
|
||||
}
|
||||
NodeWithScopeRef::ListComprehension(comprehension) => {
|
||||
NodeWithScopeKey::ListComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::SetComprehension(comprehension) => {
|
||||
NodeWithScopeKey::SetComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::DictComprehension(comprehension) => {
|
||||
NodeWithScopeKey::DictComprehension(NodeKey::from_node(comprehension))
|
||||
}
|
||||
NodeWithScopeRef::GeneratorExpression(generator) => {
|
||||
NodeWithScopeKey::GeneratorExpression(NodeKey::from_node(generator))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -369,6 +416,10 @@ pub enum NodeWithScopeKind {
|
||||
Function(AstNodeRef<ast::StmtFunctionDef>),
|
||||
FunctionTypeParameters(AstNodeRef<ast::StmtFunctionDef>),
|
||||
Lambda(AstNodeRef<ast::ExprLambda>),
|
||||
ListComprehension(AstNodeRef<ast::ExprListComp>),
|
||||
SetComprehension(AstNodeRef<ast::ExprSetComp>),
|
||||
DictComprehension(AstNodeRef<ast::ExprDictComp>),
|
||||
GeneratorExpression(AstNodeRef<ast::ExprGenerator>),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
|
||||
@@ -379,4 +430,8 @@ pub(crate) enum NodeWithScopeKey {
|
||||
Function(NodeKey),
|
||||
FunctionTypeParameters(NodeKey),
|
||||
Lambda(NodeKey),
|
||||
ListComprehension(NodeKey),
|
||||
SetComprehension(NodeKey),
|
||||
DictComprehension(NodeKey),
|
||||
GeneratorExpression(NodeKey),
|
||||
}
|
||||
|
||||
@@ -56,299 +56,323 @@
|
||||
//! visible at the end of the scope.
|
||||
//!
|
||||
//! The data structure we build to answer these two questions is the `UseDefMap`. It has a
|
||||
//! `definitions_by_use` vector indexed by [`ScopedUseId`] and a `public_definitions` vector
|
||||
//! indexed by [`ScopedSymbolId`]. The values in each of these vectors are (in principle) a list of
|
||||
//! visible definitions at that use, or at the end of the scope for that symbol.
|
||||
//! `definitions_by_use` vector indexed by [`ScopedUseId`] and a `public_definitions` map
|
||||
//! indexed by [`ScopedSymbolId`]. The values in each are the visible definition of a symbol at
|
||||
//! that use, or at the end of the scope.
|
||||
//!
|
||||
//! In order to avoid vectors-of-vectors and all the allocations that would entail, we don't
|
||||
//! actually store these "list of visible definitions" as a vector of [`Definition`] IDs. Instead,
|
||||
//! the values in `definitions_by_use` and `public_definitions` are a [`Definitions`] struct that
|
||||
//! keeps a [`Range`] into a third vector of [`Definition`] IDs, `all_definitions`. The trick with
|
||||
//! this representation is that it requires that the definitions visible at any given use of a
|
||||
//! symbol are stored sequentially in `all_definitions`.
|
||||
//!
|
||||
//! There is another special kind of possible "definition" for a symbol: it might be unbound in the
|
||||
//! scope. (This isn't equivalent to "zero visible definitions", since we may go through an `if`
|
||||
//! that has a definition for the symbol, leaving us with one visible definition, but still also
|
||||
//! the "unbound" possibility, since we might not have taken the `if` branch.)
|
||||
//!
|
||||
//! The simplest way to model "unbound" would be as an actual [`Definition`] itself: the initial
|
||||
//! visible [`Definition`] for each symbol in a scope. But actually modeling it this way would
|
||||
//! dramatically increase the number of [`Definition`] that Salsa must track. Since "unbound" is a
|
||||
//! special definition in that all symbols share it, and it doesn't have any additional per-symbol
|
||||
//! state, we can represent it more efficiently: we use the `may_be_unbound` boolean on the
|
||||
//! [`Definitions`] struct. If this flag is `true`, it means the symbol/use really has one
|
||||
//! additional visible "definition", which is the unbound state. If this flag is `false`, it means
|
||||
//! we've eliminated the possibility of unbound: every path we've followed includes a definition
|
||||
//! for this symbol.
|
||||
//!
|
||||
//! To build a [`UseDefMap`], the [`UseDefMapBuilder`] is notified of each new use and definition
|
||||
//! as they are encountered by the
|
||||
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder) AST visit. For
|
||||
//! each symbol, the builder tracks the currently-visible definitions for that symbol. When we hit
|
||||
//! a use of a symbol, it records the currently-visible definitions for that symbol as the visible
|
||||
//! definitions for that use. When we reach the end of the scope, it records the currently-visible
|
||||
//! definitions for each symbol as the public definitions of that symbol.
|
||||
//!
|
||||
//! Let's walk through the above example. Initially we record for `x` that it has no visible
|
||||
//! definitions, and may be unbound. When we see `x = 1`, we record that as the sole visible
|
||||
//! definition of `x`, and flip `may_be_unbound` to `false`. Then we see `x = 2`, and it replaces
|
||||
//! `x = 1` as the sole visible definition of `x`. When we get to `y = x`, we record that the
|
||||
//! visible definitions for that use of `x` are just the `x = 2` definition.
|
||||
//!
|
||||
//! Then we hit the `if` branch. We visit the `test` node (`flag` in this case), since that will
|
||||
//! happen regardless. Then we take a pre-branch snapshot of the currently visible definitions for
|
||||
//! all symbols, which we'll need later. Then we go ahead and visit the `if` body. When we see `x =
|
||||
//! 3`, it replaces `x = 2` as the sole visible definition of `x`. At the end of the `if` body, we
|
||||
//! take another snapshot of the currently-visible definitions; we'll call this the post-if-body
|
||||
//! snapshot.
|
||||
//!
|
||||
//! Now we need to visit the `else` clause. The conditions when entering the `else` clause should
|
||||
//! be the pre-if conditions; if we are entering the `else` clause, we know that the `if` test
|
||||
//! failed and we didn't execute the `if` body. So we first reset the builder to the pre-if state,
|
||||
//! using the snapshot we took previously (meaning we now have `x = 2` as the sole visible
|
||||
//! definition for `x` again), then visit the `else` clause, where `x = 4` replaces `x = 2` as the
|
||||
//! sole visible definition of `x`.
|
||||
//!
|
||||
//! Now we reach the end of the if/else, and want to visit the following code. The state here needs
|
||||
//! to reflect that we might have gone through the `if` branch, or we might have gone through the
|
||||
//! `else` branch, and we don't know which. So we need to "merge" our current builder state
|
||||
//! (reflecting the end-of-else state, with `x = 4` as the only visible definition) with our
|
||||
//! post-if-body snapshot (which has `x = 3` as the only visible definition). The result of this
|
||||
//! merge is that we now have two visible definitions of `x`: `x = 3` and `x = 4`.
|
||||
//!
|
||||
//! The [`UseDefMapBuilder`] itself just exposes methods for taking a snapshot, resetting to a
|
||||
//! snapshot, and merging a snapshot into the current state. The logic using these methods lives in
|
||||
//! [`SemanticIndexBuilder`](crate::semantic_index::builder::SemanticIndexBuilder), e.g. where it
|
||||
//! visits a `StmtIf` node.
|
||||
//!
|
||||
//! (In the future we may have some other questions we want to answer as well, such as "is this
|
||||
//! definition used?", which will require tracking a bit more info in our map, e.g. a "used" bit
|
||||
//! for each [`Definition`] which is flipped to true when we record that definition for a use.)
|
||||
//! Rather than have multiple definitions, we use a Phi definition at control flow join points to
|
||||
//! merge the visible definition in each path. This means at any given point we always have exactly
|
||||
//! one definition for a symbol. (This is analogous to static-single-assignment, or SSA, form, and
|
||||
//! in fact we use the algorithm from [Simple and efficient construction of static single
|
||||
//! assignment form](https://dl.acm.org/doi/10.1007/978-3-642-37051-9_6) here.)
|
||||
use crate::semantic_index::ast_ids::ScopedUseId;
|
||||
use crate::semantic_index::definition::Definition;
|
||||
use crate::semantic_index::symbol::ScopedSymbolId;
|
||||
use ruff_index::IndexVec;
|
||||
use std::ops::Range;
|
||||
use crate::semantic_index::definition::{Definition, DefinitionKind, ScopedPhiId};
|
||||
use crate::semantic_index::symbol::{FileScopeId, ScopedSymbolId};
|
||||
use crate::Db;
|
||||
use ruff_db::files::File;
|
||||
use ruff_index::{newtype_index, IndexVec};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
||||
/// All definitions that can reach a given use of a name.
|
||||
/// Number of basic block predecessors we store inline.
|
||||
const PREDECESSORS: usize = 2;
|
||||
|
||||
/// Input operands (definitions) for a Phi definition. None means not defined.
|
||||
// TODO would like to use SmallVec here but can't due to lifetime invariance issue.
|
||||
type PhiOperands<'db> = Vec<Option<Definition<'db>>>;
|
||||
|
||||
/// Definition for each use of a name.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) struct UseDefMap<'db> {
|
||||
// TODO store constraints with definitions for type narrowing
|
||||
/// Definition IDs array for `definitions_by_use` and `public_definitions` to slice into.
|
||||
all_definitions: Vec<Definition<'db>>,
|
||||
/// Definition that reaches each [`ScopedUseId`].
|
||||
definitions_by_use: IndexVec<ScopedUseId, Option<Definition<'db>>>,
|
||||
|
||||
/// Definitions that can reach a [`ScopedUseId`].
|
||||
definitions_by_use: IndexVec<ScopedUseId, Definitions>,
|
||||
/// Definition of each symbol visible at end of scope.
|
||||
///
|
||||
/// Sparse, because it only includes symbols defined in the scope.
|
||||
public_definitions: FxHashMap<ScopedSymbolId, Definition<'db>>,
|
||||
|
||||
/// Definitions of each symbol visible at end of scope.
|
||||
public_definitions: IndexVec<ScopedSymbolId, Definitions>,
|
||||
/// Operands for each Phi definition in this scope.
|
||||
phi_operands: IndexVec<ScopedPhiId, PhiOperands<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> UseDefMap<'db> {
|
||||
pub(crate) fn use_definitions(&self, use_id: ScopedUseId) -> &[Definition<'db>] {
|
||||
&self.all_definitions[self.definitions_by_use[use_id].definitions_range.clone()]
|
||||
/// Return the dominating definition for a given use of a name; None means not-defined.
|
||||
pub(crate) fn definition_for_use(&self, use_id: ScopedUseId) -> Option<Definition<'db>> {
|
||||
self.definitions_by_use[use_id]
|
||||
}
|
||||
|
||||
pub(crate) fn use_may_be_unbound(&self, use_id: ScopedUseId) -> bool {
|
||||
self.definitions_by_use[use_id].may_be_unbound
|
||||
/// Return the definition visible at end of scope for a symbol.
|
||||
///
|
||||
/// Return None if the symbol is never defined in the scope.
|
||||
pub(crate) fn public_definition(&self, symbol_id: ScopedSymbolId) -> Option<Definition<'db>> {
|
||||
self.public_definitions.get(&symbol_id).copied()
|
||||
}
|
||||
|
||||
pub(crate) fn public_definitions(&self, symbol: ScopedSymbolId) -> &[Definition<'db>] {
|
||||
&self.all_definitions[self.public_definitions[symbol].definitions_range.clone()]
|
||||
}
|
||||
|
||||
pub(crate) fn public_may_be_unbound(&self, symbol: ScopedSymbolId) -> bool {
|
||||
self.public_definitions[symbol].may_be_unbound
|
||||
/// Return the operands for a Phi in this scope; a None means not-defined.
|
||||
pub(crate) fn phi_operands<'s>(&'s self, phi_id: ScopedPhiId) -> &'s [Option<Definition<'db>>] {
|
||||
self.phi_operands[phi_id].as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
/// Definitions visible for a symbol at a particular use (or end-of-scope).
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
struct Definitions {
|
||||
/// [`Range`] in `all_definitions` of the visible definition IDs.
|
||||
definitions_range: Range<usize>,
|
||||
/// Is the symbol possibly unbound at this point?
|
||||
may_be_unbound: bool,
|
||||
}
|
||||
type PredecessorBlocks = SmallVec<[BasicBlockId; PREDECESSORS]>;
|
||||
|
||||
impl Definitions {
|
||||
/// The default state of a symbol is "no definitions, may be unbound", aka definitely-unbound.
|
||||
fn unbound() -> Self {
|
||||
Self {
|
||||
definitions_range: Range::default(),
|
||||
may_be_unbound: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
/// A basic block is a linear region of code (no branches.)
|
||||
#[newtype_index]
|
||||
pub(super) struct BasicBlockId;
|
||||
|
||||
impl Default for Definitions {
|
||||
fn default() -> Self {
|
||||
Definitions::unbound()
|
||||
}
|
||||
}
|
||||
|
||||
/// A snapshot of the visible definitions for each symbol at a particular point in control flow.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(super) struct FlowSnapshot {
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, Definitions>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct UseDefMapBuilder<'db> {
|
||||
/// Definition IDs array for `definitions_by_use` and `definitions_by_symbol` to slice into.
|
||||
all_definitions: Vec<Definition<'db>>,
|
||||
db: &'db dyn Db,
|
||||
file: File,
|
||||
file_scope: FileScopeId,
|
||||
|
||||
/// Visible definitions at each so-far-recorded use.
|
||||
definitions_by_use: IndexVec<ScopedUseId, Definitions>,
|
||||
/// Predecessor blocks for each basic block.
|
||||
///
|
||||
/// Entry block has none, all other blocks have at least one, blocks that join control flow can
|
||||
/// have two or more.
|
||||
predecessors: IndexVec<BasicBlockId, PredecessorBlocks>,
|
||||
|
||||
/// Currently visible definitions for each symbol.
|
||||
definitions_by_symbol: IndexVec<ScopedSymbolId, Definitions>,
|
||||
/// The definition of each symbol which dominates each basic block.
|
||||
///
|
||||
/// No entry means "lazily unfilled"; we haven't had to query for it yet, and we may never have
|
||||
/// to, if the symbol isn't used in this block or any successor block.
|
||||
///
|
||||
/// Each block has an [`FxHashMap`] of symbols instead of an [`IndexVec`] because it is lazy
|
||||
/// and potentially sparse; it will only include a definition for a symbol that is actually
|
||||
/// used in that block or a successor. An [`IndexVec`] would have to be eagerly filled with
|
||||
/// placeholders.
|
||||
definitions_per_block:
|
||||
IndexVec<BasicBlockId, FxHashMap<ScopedSymbolId, Option<Definition<'db>>>>,
|
||||
|
||||
/// Incomplete Phi definitions in each block.
|
||||
///
|
||||
/// An incomplete Phi is used when we don't know, while processing a block's body, what new
|
||||
/// predecessors it may later gain (that is, backward jumps.)
|
||||
///
|
||||
/// Sparse, because relative few blocks (just loop headers) will have any incomplete Phis.
|
||||
incomplete_phis: FxHashMap<BasicBlockId, Vec<Definition<'db>>>,
|
||||
|
||||
/// Operands for each Phi definition in this scope.
|
||||
phi_operands: IndexVec<ScopedPhiId, PhiOperands<'db>>,
|
||||
|
||||
/// Are this block's predecessors fully populated?
|
||||
///
|
||||
/// If not, it isn't safe to recurse to predecessors yet; we might miss a predecessor block.
|
||||
sealed_blocks: IndexVec<BasicBlockId, bool>,
|
||||
|
||||
/// Definition for each so-far-recorded use.
|
||||
definitions_by_use: IndexVec<ScopedUseId, Option<Definition<'db>>>,
|
||||
|
||||
/// All symbols defined in this scope.
|
||||
defined_symbols: FxHashSet<ScopedSymbolId>,
|
||||
}
|
||||
|
||||
impl<'db> UseDefMapBuilder<'db> {
|
||||
pub(super) fn new() -> Self {
|
||||
Self {
|
||||
all_definitions: Vec::new(),
|
||||
pub(super) fn new(db: &'db dyn Db, file: File, file_scope: FileScopeId) -> Self {
|
||||
let mut new = Self {
|
||||
db,
|
||||
file,
|
||||
file_scope,
|
||||
predecessors: IndexVec::new(),
|
||||
definitions_per_block: IndexVec::new(),
|
||||
incomplete_phis: FxHashMap::default(),
|
||||
sealed_blocks: IndexVec::new(),
|
||||
definitions_by_use: IndexVec::new(),
|
||||
definitions_by_symbol: IndexVec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn add_symbol(&mut self, symbol: ScopedSymbolId) {
|
||||
let new_symbol = self.definitions_by_symbol.push(Definitions::unbound());
|
||||
debug_assert_eq!(symbol, new_symbol);
|
||||
phi_operands: IndexVec::new(),
|
||||
defined_symbols: FxHashSet::default(),
|
||||
};
|
||||
|
||||
// create the entry basic block
|
||||
new.predecessors.push(PredecessorBlocks::default());
|
||||
new.definitions_per_block.push(FxHashMap::default());
|
||||
new.sealed_blocks.push(true);
|
||||
|
||||
new
|
||||
}
|
||||
|
||||
/// Record a definition for a symbol.
|
||||
pub(super) fn record_definition(
|
||||
&mut self,
|
||||
symbol: ScopedSymbolId,
|
||||
symbol_id: ScopedSymbolId,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
// We have a new definition of a symbol; this replaces any previous definitions in this
|
||||
// path.
|
||||
let def_idx = self.all_definitions.len();
|
||||
self.all_definitions.push(definition);
|
||||
self.definitions_by_symbol[symbol] = Definitions {
|
||||
#[allow(clippy::range_plus_one)]
|
||||
definitions_range: def_idx..(def_idx + 1),
|
||||
may_be_unbound: false,
|
||||
};
|
||||
self.memoize(self.current_block_id(), symbol_id, Some(definition));
|
||||
self.defined_symbols.insert(symbol_id);
|
||||
}
|
||||
|
||||
pub(super) fn record_use(&mut self, symbol: ScopedSymbolId, use_id: ScopedUseId) {
|
||||
// We have a use of a symbol; clone the currently visible definitions for that symbol, and
|
||||
// record them as the visible definitions for this use.
|
||||
let new_use = self
|
||||
.definitions_by_use
|
||||
.push(self.definitions_by_symbol[symbol].clone());
|
||||
/// Record a use of a symbol.
|
||||
pub(super) fn record_use(&mut self, symbol_id: ScopedSymbolId, use_id: ScopedUseId) {
|
||||
let definition_id = self.lookup(symbol_id);
|
||||
let new_use = self.definitions_by_use.push(definition_id);
|
||||
debug_assert_eq!(use_id, new_use);
|
||||
}
|
||||
|
||||
/// Take a snapshot of the current visible-symbols state.
|
||||
pub(super) fn snapshot(&self) -> FlowSnapshot {
|
||||
FlowSnapshot {
|
||||
definitions_by_symbol: self.definitions_by_symbol.clone(),
|
||||
/// Get the id of the current basic block.
|
||||
pub(super) fn current_block_id(&self) -> BasicBlockId {
|
||||
BasicBlockId::from(self.definitions_per_block.len() - 1)
|
||||
}
|
||||
|
||||
/// Push a new basic block, with given block as predecessor.
|
||||
pub(super) fn new_block_from(&mut self, block_id: BasicBlockId, sealed: bool) {
|
||||
self.new_block_with_predecessors(smallvec![block_id], sealed);
|
||||
}
|
||||
|
||||
/// Push a new basic block, with current block as predecessor; return the current block's ID.
|
||||
pub(super) fn next_block(&mut self, sealed: bool) -> BasicBlockId {
|
||||
let current_block_id = self.current_block_id();
|
||||
self.new_block_from(current_block_id, sealed);
|
||||
current_block_id
|
||||
}
|
||||
|
||||
/// Add a predecessor to the current block.
|
||||
pub(super) fn merge_block(&mut self, new_predecessor: BasicBlockId) {
|
||||
let block_id = self.current_block_id();
|
||||
debug_assert!(!self.sealed_blocks[block_id]);
|
||||
self.predecessors[block_id].push(new_predecessor);
|
||||
}
|
||||
|
||||
/// Add predecessors to the current block.
|
||||
pub(super) fn merge_blocks(&mut self, new_predecessors: Vec<BasicBlockId>) {
|
||||
let block_id = self.current_block_id();
|
||||
debug_assert!(!self.sealed_blocks[block_id]);
|
||||
self.predecessors[block_id].extend(new_predecessors);
|
||||
}
|
||||
|
||||
/// Mark the current block as sealed; it cannot have any more predecessors added.
|
||||
pub(super) fn seal_current_block(&mut self) {
|
||||
self.seal_block(self.current_block_id());
|
||||
}
|
||||
|
||||
/// Mark a block as sealed; it cannot have any more predecessors added.
|
||||
pub(super) fn seal_block(&mut self, block_id: BasicBlockId) {
|
||||
debug_assert!(!self.sealed_blocks[block_id]);
|
||||
if let Some(phis) = self.incomplete_phis.get(&block_id) {
|
||||
for phi in phis.clone() {
|
||||
self.add_phi_operands(block_id, phi);
|
||||
}
|
||||
self.incomplete_phis.remove(&block_id);
|
||||
}
|
||||
self.sealed_blocks[block_id] = true;
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> UseDefMap<'db> {
|
||||
debug_assert!(self.incomplete_phis.is_empty());
|
||||
debug_assert!(self.sealed_blocks.iter().all(|&b| b));
|
||||
self.definitions_by_use.shrink_to_fit();
|
||||
self.phi_operands.shrink_to_fit();
|
||||
|
||||
let mut public_definitions: FxHashMap<ScopedSymbolId, Definition<'db>> =
|
||||
FxHashMap::default();
|
||||
|
||||
for symbol_id in self.defined_symbols.clone() {
|
||||
// SAFETY: We are only looking up defined symbols here, can't get None.
|
||||
public_definitions.insert(symbol_id, self.lookup(symbol_id).unwrap());
|
||||
}
|
||||
|
||||
UseDefMap {
|
||||
definitions_by_use: self.definitions_by_use,
|
||||
public_definitions,
|
||||
phi_operands: self.phi_operands,
|
||||
}
|
||||
}
|
||||
|
||||
/// Restore the current builder visible-definitions state to the given snapshot.
|
||||
pub(super) fn restore(&mut self, snapshot: FlowSnapshot) {
|
||||
// We never remove symbols from `definitions_by_symbol` (it's an IndexVec, and the symbol
|
||||
// IDs must line up), so the current number of known symbols must always be equal to or
|
||||
// greater than the number of known symbols in a previously-taken snapshot.
|
||||
let num_symbols = self.definitions_by_symbol.len();
|
||||
debug_assert!(num_symbols >= snapshot.definitions_by_symbol.len());
|
||||
/// Push a new basic block (with given predecessors) and return its ID.
|
||||
fn new_block_with_predecessors(
|
||||
&mut self,
|
||||
predecessors: PredecessorBlocks,
|
||||
sealed: bool,
|
||||
) -> BasicBlockId {
|
||||
let new_block_id = self.predecessors.push(predecessors);
|
||||
self.definitions_per_block.push(FxHashMap::default());
|
||||
self.sealed_blocks.push(sealed);
|
||||
|
||||
// Restore the current visible-definitions state to the given snapshot.
|
||||
self.definitions_by_symbol = snapshot.definitions_by_symbol;
|
||||
|
||||
// If the snapshot we are restoring is missing some symbols we've recorded since, we need
|
||||
// to fill them in so the symbol IDs continue to line up. Since they don't exist in the
|
||||
// snapshot, the correct state to fill them in with is "unbound", the default.
|
||||
self.definitions_by_symbol
|
||||
.resize(num_symbols, Definitions::unbound());
|
||||
new_block_id
|
||||
}
|
||||
|
||||
/// Merge the given snapshot into the current state, reflecting that we might have taken either
|
||||
/// path to get here. The new visible-definitions state for each symbol should include
|
||||
/// definitions from both the prior state and the snapshot.
|
||||
pub(super) fn merge(&mut self, snapshot: &FlowSnapshot) {
|
||||
// The tricky thing about merging two Ranges pointing into `all_definitions` is that if the
|
||||
// two Ranges aren't already adjacent in `all_definitions`, we will have to copy at least
|
||||
// one or the other of the ranges to the end of `all_definitions` so as to make them
|
||||
// adjacent. We can't ever move things around in `all_definitions` because previously
|
||||
// recorded uses may still have ranges pointing to any part of it; all we can do is append.
|
||||
// It's possible we may end up with some old entries in `all_definitions` that nobody is
|
||||
// pointing to, but that's OK.
|
||||
/// Look up the dominating definition for a symbol in the current block.
|
||||
///
|
||||
/// If there isn't a local definition, recursively look up the symbol in predecessor blocks,
|
||||
/// memoizing the found symbol in each block.
|
||||
fn lookup(&mut self, symbol_id: ScopedSymbolId) -> Option<Definition<'db>> {
|
||||
self.lookup_impl(self.current_block_id(), symbol_id)
|
||||
}
|
||||
|
||||
// We never remove symbols from `definitions_by_symbol` (it's an IndexVec, and the symbol
|
||||
// IDs must line up), so the current number of known symbols must always be equal to or
|
||||
// greater than the number of known symbols in a previously-taken snapshot.
|
||||
debug_assert!(self.definitions_by_symbol.len() >= snapshot.definitions_by_symbol.len());
|
||||
|
||||
for (symbol_id, current) in self.definitions_by_symbol.iter_mut_enumerated() {
|
||||
let Some(snapshot) = snapshot.definitions_by_symbol.get(symbol_id) else {
|
||||
// Symbol not present in snapshot, so it's unbound from that path.
|
||||
current.may_be_unbound = true;
|
||||
continue;
|
||||
};
|
||||
|
||||
// If the symbol can be unbound in either predecessor, it can be unbound post-merge.
|
||||
current.may_be_unbound |= snapshot.may_be_unbound;
|
||||
|
||||
// Merge the definition ranges.
|
||||
let current = &mut current.definitions_range;
|
||||
let snapshot = &snapshot.definitions_range;
|
||||
|
||||
// We never create reversed ranges.
|
||||
debug_assert!(current.end >= current.start);
|
||||
debug_assert!(snapshot.end >= snapshot.start);
|
||||
|
||||
if current == snapshot {
|
||||
// Ranges already identical, nothing to do.
|
||||
} else if snapshot.is_empty() {
|
||||
// Merging from an empty range; nothing to do.
|
||||
} else if (*current).is_empty() {
|
||||
// Merging to an empty range; just use the incoming range.
|
||||
*current = snapshot.clone();
|
||||
} else if snapshot.end >= current.start && snapshot.start <= current.end {
|
||||
// Ranges are adjacent or overlapping, merge them in-place.
|
||||
*current = current.start.min(snapshot.start)..current.end.max(snapshot.end);
|
||||
} else if current.end == self.all_definitions.len() {
|
||||
// Ranges are not adjacent or overlapping, `current` is at the end of
|
||||
// `all_definitions`, we need to copy `snapshot` to the end so they are adjacent
|
||||
// and can be merged into one range.
|
||||
self.all_definitions.extend_from_within(snapshot.clone());
|
||||
current.end = self.all_definitions.len();
|
||||
} else if snapshot.end == self.all_definitions.len() {
|
||||
// Ranges are not adjacent or overlapping, `snapshot` is at the end of
|
||||
// `all_definitions`, we need to copy `current` to the end so they are adjacent and
|
||||
// can be merged into one range.
|
||||
self.all_definitions.extend_from_within(current.clone());
|
||||
current.start = snapshot.start;
|
||||
current.end = self.all_definitions.len();
|
||||
} else {
|
||||
// Ranges are not adjacent and neither one is at the end of `all_definitions`, we
|
||||
// have to copy both to the end so they are adjacent and we can merge them.
|
||||
let start = self.all_definitions.len();
|
||||
self.all_definitions.extend_from_within(current.clone());
|
||||
self.all_definitions.extend_from_within(snapshot.clone());
|
||||
current.start = start;
|
||||
current.end = self.all_definitions.len();
|
||||
fn lookup_impl(
|
||||
&mut self,
|
||||
block_id: BasicBlockId,
|
||||
symbol_id: ScopedSymbolId,
|
||||
) -> Option<Definition<'db>> {
|
||||
if let Some(local) = self.definitions_per_block[block_id].get(&symbol_id) {
|
||||
return *local;
|
||||
}
|
||||
if !self.sealed_blocks[block_id] {
|
||||
// we may still be missing predecessors; insert an incomplete Phi.
|
||||
let definition = self.create_incomplete_phi(block_id, symbol_id);
|
||||
self.incomplete_phis
|
||||
.entry(block_id)
|
||||
.or_default()
|
||||
.push(definition);
|
||||
return Some(definition);
|
||||
}
|
||||
match self.predecessors[block_id].as_slice() {
|
||||
// entry block, no definition found: return None
|
||||
[] => None,
|
||||
// single predecessor, recurse
|
||||
&[single_predecessor_id] => {
|
||||
let definition = self.lookup_impl(single_predecessor_id, symbol_id);
|
||||
self.memoize(block_id, symbol_id, definition);
|
||||
definition
|
||||
}
|
||||
// multiple predecessors: create and memoize an incomplete Phi to break cycles, then
|
||||
// recurse into predecessors and fill the Phi operands.
|
||||
_ => {
|
||||
let phi = self.create_incomplete_phi(block_id, symbol_id);
|
||||
self.add_phi_operands(block_id, phi);
|
||||
Some(phi)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn finish(mut self) -> UseDefMap<'db> {
|
||||
self.all_definitions.shrink_to_fit();
|
||||
self.definitions_by_symbol.shrink_to_fit();
|
||||
self.definitions_by_use.shrink_to_fit();
|
||||
/// Recurse into predecessors to add operands for an incomplete Phi.
|
||||
fn add_phi_operands(&mut self, block_id: BasicBlockId, phi: Definition<'db>) {
|
||||
let predecessors: PredecessorBlocks = self.predecessors[block_id].clone();
|
||||
let operands: PhiOperands = predecessors
|
||||
.iter()
|
||||
.map(|pred_id| self.lookup_impl(*pred_id, phi.symbol(self.db)))
|
||||
.collect();
|
||||
let DefinitionKind::Phi(phi_id) = phi.kind(self.db) else {
|
||||
unreachable!("add_phi_operands called with non-Phi");
|
||||
};
|
||||
self.phi_operands[*phi_id] = operands;
|
||||
}
|
||||
|
||||
UseDefMap {
|
||||
all_definitions: self.all_definitions,
|
||||
definitions_by_use: self.definitions_by_use,
|
||||
public_definitions: self.definitions_by_symbol,
|
||||
}
|
||||
/// Remember a given definition for a given symbol in the given block.
|
||||
fn memoize(
|
||||
&mut self,
|
||||
block_id: BasicBlockId,
|
||||
symbol_id: ScopedSymbolId,
|
||||
definition_id: Option<Definition<'db>>,
|
||||
) {
|
||||
self.definitions_per_block[block_id].insert(symbol_id, definition_id);
|
||||
}
|
||||
|
||||
/// Create an incomplete Phi for the given block and symbol, memoize it, and return its ID.
|
||||
fn create_incomplete_phi(
|
||||
&mut self,
|
||||
block_id: BasicBlockId,
|
||||
symbol_id: ScopedSymbolId,
|
||||
) -> Definition<'db> {
|
||||
let phi_id = self.phi_operands.push(vec![]);
|
||||
let definition = Definition::new(
|
||||
self.db,
|
||||
self.file,
|
||||
self.file_scope,
|
||||
symbol_id,
|
||||
DefinitionKind::Phi(phi_id),
|
||||
countme::Count::default(),
|
||||
);
|
||||
self.memoize(block_id, symbol_id, Some(definition));
|
||||
definition
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::files::{File, FilePath};
|
||||
use ruff_db::source::line_index;
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::{Expr, ExpressionRef, StmtClassDef};
|
||||
use ruff_source_file::LineIndex;
|
||||
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::{resolve_module, Module};
|
||||
@@ -25,6 +27,14 @@ impl<'db> SemanticModel<'db> {
|
||||
self.db
|
||||
}
|
||||
|
||||
pub fn file_path(&self) -> &FilePath {
|
||||
self.file.path(self.db)
|
||||
}
|
||||
|
||||
pub fn line_index(&self) -> LineIndex {
|
||||
line_index(self.db.upcast(), self.file)
|
||||
}
|
||||
|
||||
pub fn resolve_module(&self, module_name: ModuleName) -> Option<Module> {
|
||||
resolve_module(self.db, module_name)
|
||||
}
|
||||
@@ -141,7 +151,7 @@ impl HasTy for ast::StmtFunctionDef {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
definition_ty(model.db, Some(definition))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -149,7 +159,7 @@ impl HasTy for StmtClassDef {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
definition_ty(model.db, Some(definition))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -157,7 +167,7 @@ impl HasTy for ast::Alias {
|
||||
fn ty<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
|
||||
let index = semantic_index(model.db, model.file);
|
||||
let definition = index.definition(self);
|
||||
definition_ty(model.db, definition)
|
||||
definition_ty(model.db, Some(definition))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -171,29 +181,32 @@ mod tests {
|
||||
use crate::program::{Program, SearchPathSettings};
|
||||
use crate::python_version::PythonVersion;
|
||||
use crate::types::Type;
|
||||
use crate::{HasTy, SemanticModel};
|
||||
use crate::{HasTy, ProgramSettings, SemanticModel};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
Program::new(
|
||||
fn setup_db<'a>(files: impl IntoIterator<Item = (&'a str, &'a str)>) -> anyhow::Result<TestDb> {
|
||||
let mut db = TestDb::new();
|
||||
db.write_files(files)?;
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
PythonVersion::default(),
|
||||
SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
},
|
||||
},
|
||||
);
|
||||
)?;
|
||||
|
||||
db
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn function_ty() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
let db = setup_db([("/src/foo.py", "def test(): pass")])?;
|
||||
|
||||
db.write_file("/src/foo.py", "def test(): pass")?;
|
||||
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, foo);
|
||||
@@ -209,9 +222,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn class_ty() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
let db = setup_db([("/src/foo.py", "class Test: pass")])?;
|
||||
|
||||
db.write_file("/src/foo.py", "class Test: pass")?;
|
||||
let foo = system_path_to_file(&db, "/src/foo.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, foo);
|
||||
@@ -227,12 +239,11 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn alias_ty() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_files([
|
||||
let db = setup_db([
|
||||
("/src/foo.py", "class Test: pass"),
|
||||
("/src/bar.py", "from foo import Test"),
|
||||
])?;
|
||||
|
||||
let bar = system_path_to_file(&db, "/src/bar.py").unwrap();
|
||||
|
||||
let ast = parsed_module(&db, bar);
|
||||
|
||||
@@ -7,9 +7,11 @@ use crate::semantic_index::symbol::{ScopeId, ScopedSymbolId};
|
||||
use crate::semantic_index::{global_scope, symbol_table, use_def_map};
|
||||
use crate::{Db, FxOrderSet};
|
||||
|
||||
mod builder;
|
||||
mod display;
|
||||
mod infer;
|
||||
|
||||
pub(crate) use self::builder::UnionBuilder;
|
||||
pub(crate) use self::infer::{infer_definition_types, infer_scope_types};
|
||||
|
||||
/// Infer the public type of a symbol (its type as seen from outside its scope).
|
||||
@@ -21,13 +23,7 @@ pub(crate) fn symbol_ty<'db>(
|
||||
let _span = tracing::trace_span!("symbol_ty", ?symbol).entered();
|
||||
|
||||
let use_def = use_def_map(db, scope);
|
||||
definitions_ty(
|
||||
db,
|
||||
use_def.public_definitions(symbol),
|
||||
use_def
|
||||
.public_may_be_unbound(symbol)
|
||||
.then_some(Type::Unbound),
|
||||
)
|
||||
definition_ty(db, use_def.public_definition(symbol))
|
||||
}
|
||||
|
||||
/// Shorthand for `symbol_ty` that takes a symbol name instead of an ID.
|
||||
@@ -58,49 +54,16 @@ pub(crate) fn builtins_symbol_ty_by_name<'db>(db: &'db dyn Db, name: &str) -> Ty
|
||||
}
|
||||
|
||||
/// Infer the type of a [`Definition`].
|
||||
pub(crate) fn definition_ty<'db>(db: &'db dyn Db, definition: Definition<'db>) -> Type<'db> {
|
||||
let inference = infer_definition_types(db, definition);
|
||||
inference.definition_ty(definition)
|
||||
}
|
||||
|
||||
/// Infer the combined type of an array of [`Definition`]s, plus one optional "unbound type".
|
||||
///
|
||||
/// Will return a union if there is more than one definition, or at least one plus an unbound
|
||||
/// type.
|
||||
///
|
||||
/// The "unbound type" represents the type in case control flow may not have passed through any
|
||||
/// definitions in this scope. If this isn't possible, then it will be `None`. If it is possible,
|
||||
/// and the result in that case should be Unbound (e.g. an unbound function local), then it will be
|
||||
/// `Some(Type::Unbound)`. If it is possible and the result should be something else (e.g. an
|
||||
/// implicit global lookup), then `unbound_type` will be `Some(the_global_symbol_type)`.
|
||||
///
|
||||
/// # Panics
|
||||
/// Will panic if called with zero definitions and no `unbound_ty`. This is a logic error,
|
||||
/// as any symbol with zero visible definitions clearly may be unbound, and the caller should
|
||||
/// provide an `unbound_ty`.
|
||||
pub(crate) fn definitions_ty<'db>(
|
||||
pub(crate) fn definition_ty<'db>(
|
||||
db: &'db dyn Db,
|
||||
definitions: &[Definition<'db>],
|
||||
unbound_ty: Option<Type<'db>>,
|
||||
definition: Option<Definition<'db>>,
|
||||
) -> Type<'db> {
|
||||
let def_types = definitions.iter().map(|def| definition_ty(db, *def));
|
||||
let mut all_types = unbound_ty.into_iter().chain(def_types);
|
||||
|
||||
let Some(first) = all_types.next() else {
|
||||
panic!("definitions_ty should never be called with zero definitions and no unbound_ty.")
|
||||
};
|
||||
|
||||
if let Some(second) = all_types.next() {
|
||||
let mut builder = UnionTypeBuilder::new(db);
|
||||
builder = builder.add(first).add(second);
|
||||
|
||||
for variant in all_types {
|
||||
builder = builder.add(variant);
|
||||
match definition {
|
||||
Some(definition) => {
|
||||
let inference = infer_definition_types(db, definition);
|
||||
inference.definition_ty(definition)
|
||||
}
|
||||
|
||||
Type::Union(builder.build())
|
||||
} else {
|
||||
first
|
||||
None => Type::Unbound,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -117,7 +80,7 @@ pub enum Type<'db> {
|
||||
/// name does not exist or is not bound to any value (this represents an error, but with some
|
||||
/// leniency options it could be silently resolved to Unknown in some cases)
|
||||
Unbound,
|
||||
/// the None object (TODO remove this in favor of Instance(types.NoneType)
|
||||
/// the None object -- TODO remove this in favor of Instance(types.NoneType)
|
||||
None,
|
||||
/// a specific function object
|
||||
Function(FunctionType<'db>),
|
||||
@@ -127,8 +90,11 @@ pub enum Type<'db> {
|
||||
Class(ClassType<'db>),
|
||||
/// the set of Python objects with the given class in their __class__'s method resolution order
|
||||
Instance(ClassType<'db>),
|
||||
/// the set of objects in any of the types in the union
|
||||
Union(UnionType<'db>),
|
||||
/// the set of objects in all of the types in the intersection
|
||||
Intersection(IntersectionType<'db>),
|
||||
/// An integer literal
|
||||
IntLiteral(i64),
|
||||
/// A boolean literal, either `True` or `False`.
|
||||
BooleanLiteral(bool),
|
||||
@@ -140,8 +106,27 @@ impl<'db> Type<'db> {
|
||||
matches!(self, Type::Unbound)
|
||||
}
|
||||
|
||||
pub const fn is_unknown(&self) -> bool {
|
||||
matches!(self, Type::Unknown)
|
||||
pub fn may_be_unbound(&self, db: &'db dyn Db) -> bool {
|
||||
match self {
|
||||
Type::Unbound => true,
|
||||
Type::Union(union) => union.contains(db, Type::Unbound),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn replace_unbound_with(&self, db: &'db dyn Db, replacement: Type<'db>) -> Type<'db> {
|
||||
match self {
|
||||
Type::Unbound => replacement,
|
||||
Type::Union(union) => union
|
||||
.elements(db)
|
||||
.into_iter()
|
||||
.fold(UnionBuilder::new(db), |builder, ty| {
|
||||
builder.add(ty.replace_unbound_with(db, replacement))
|
||||
})
|
||||
.build(),
|
||||
ty => *ty,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
@@ -159,15 +144,13 @@ impl<'db> Type<'db> {
|
||||
// TODO MRO? get_own_instance_member, get_instance_member
|
||||
todo!("attribute lookup on Instance type")
|
||||
}
|
||||
Type::Union(union) => Type::Union(
|
||||
union
|
||||
.elements(db)
|
||||
.iter()
|
||||
.fold(UnionTypeBuilder::new(db), |builder, element_ty| {
|
||||
builder.add(element_ty.member(db, name))
|
||||
})
|
||||
.build(),
|
||||
),
|
||||
Type::Union(union) => union
|
||||
.elements(db)
|
||||
.iter()
|
||||
.fold(UnionBuilder::new(db), |builder, element_ty| {
|
||||
builder.add(element_ty.member(db, name))
|
||||
})
|
||||
.build(),
|
||||
Type::Intersection(_) => {
|
||||
// TODO perform the get_member on each type in the intersection
|
||||
// TODO return the intersection of those results
|
||||
@@ -251,7 +234,7 @@ impl<'db> ClassType<'db> {
|
||||
|
||||
#[salsa::interned]
|
||||
pub struct UnionType<'db> {
|
||||
/// the union type includes values in any of these types
|
||||
/// The union type includes values in any of these types.
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
@@ -261,48 +244,15 @@ impl<'db> UnionType<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
struct UnionTypeBuilder<'db> {
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> UnionTypeBuilder<'db> {
|
||||
fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
elements: FxOrderSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a type to this union.
|
||||
fn add(mut self, ty: Type<'db>) -> Self {
|
||||
match ty {
|
||||
Type::Union(union) => {
|
||||
self.elements.extend(&union.elements(self.db));
|
||||
}
|
||||
_ => {
|
||||
self.elements.insert(ty);
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
fn build(self) -> UnionType<'db> {
|
||||
UnionType::new(self.db, self.elements)
|
||||
}
|
||||
}
|
||||
|
||||
// Negation types aren't expressible in annotations, and are most likely to arise from type
|
||||
// narrowing along with intersections (e.g. `if not isinstance(...)`), so we represent them
|
||||
// directly in intersections rather than as a separate type. This sacrifices some efficiency in the
|
||||
// case where a Not appears outside an intersection (unclear when that could even happen, but we'd
|
||||
// have to represent it as a single-element intersection if it did) in exchange for better
|
||||
// efficiency in the within-intersection case.
|
||||
#[salsa::interned]
|
||||
pub struct IntersectionType<'db> {
|
||||
// the intersection type includes only values in all of these types
|
||||
/// The intersection type includes only values in all of these types.
|
||||
positive: FxOrderSet<Type<'db>>,
|
||||
// the intersection type does not include any value in any of these types
|
||||
|
||||
/// The intersection type does not include any value in any of these types.
|
||||
///
|
||||
/// Negation types aren't expressible in annotations, and are most likely to arise from type
|
||||
/// narrowing along with intersections (e.g. `if not isinstance(...)`), so we represent them
|
||||
/// directly in intersections rather than as a separate type.
|
||||
negative: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
429
crates/red_knot_python_semantic/src/types/builder.rs
Normal file
429
crates/red_knot_python_semantic/src/types/builder.rs
Normal file
@@ -0,0 +1,429 @@
|
||||
//! Smart builders for union and intersection types.
|
||||
//!
|
||||
//! Invariants we maintain here:
|
||||
//! * No single-element union types (should just be the contained type instead.)
|
||||
//! * No single-positive-element intersection types. Single-negative-element are OK, we don't
|
||||
//! have a standalone negation type so there's no other representation for this.
|
||||
//! * The same type should never appear more than once in a union or intersection. (This should
|
||||
//! be expanded to cover subtyping -- see below -- but for now we only implement it for type
|
||||
//! identity.)
|
||||
//! * Disjunctive normal form (DNF): the tree of unions and intersections can never be deeper
|
||||
//! than a union-of-intersections. Unions cannot contain other unions (the inner union just
|
||||
//! flattens into the outer one), intersections cannot contain other intersections (also
|
||||
//! flattens), and intersections cannot contain unions (the intersection distributes over the
|
||||
//! union, inverting it into a union-of-intersections).
|
||||
//!
|
||||
//! The implication of these invariants is that a [`UnionBuilder`] does not necessarily build a
|
||||
//! [`Type::Union`]. For example, if only one type is added to the [`UnionBuilder`], `build()` will
|
||||
//! just return that type directly. The same is true for [`IntersectionBuilder`]; for example, if a
|
||||
//! union type is added to the intersection, it will distribute and [`IntersectionBuilder::build`]
|
||||
//! may end up returning a [`Type::Union`] of intersections.
|
||||
//!
|
||||
//! In the future we should have these additional invariants, but they aren't implemented yet:
|
||||
//! * No type in a union can be a subtype of any other type in the union (just eliminate the
|
||||
//! subtype from the union).
|
||||
//! * No type in an intersection can be a supertype of any other type in the intersection (just
|
||||
//! eliminate the supertype from the intersection).
|
||||
//! * An intersection containing two non-overlapping types should simplify to [`Type::Never`].
|
||||
use crate::types::{IntersectionType, Type, UnionType};
|
||||
use crate::{Db, FxOrderSet};
|
||||
|
||||
pub(crate) struct UnionBuilder<'db> {
|
||||
elements: FxOrderSet<Type<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> UnionBuilder<'db> {
|
||||
pub(crate) fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
elements: FxOrderSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a type to this union.
|
||||
pub(crate) fn add(mut self, ty: Type<'db>) -> Self {
|
||||
match ty {
|
||||
Type::Union(union) => {
|
||||
self.elements.extend(&union.elements(self.db));
|
||||
}
|
||||
Type::Never => {}
|
||||
_ => {
|
||||
self.elements.insert(ty);
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn build(self) -> Type<'db> {
|
||||
match self.elements.len() {
|
||||
0 => Type::Never,
|
||||
1 => self.elements[0],
|
||||
_ => Type::Union(UnionType::new(self.db, self.elements)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct IntersectionBuilder<'db> {
|
||||
// Really this builds a union-of-intersections, because we always keep our set-theoretic types
|
||||
// in disjunctive normal form (DNF), a union of intersections. In the simplest case there's
|
||||
// just a single intersection in this vector, and we are building a single intersection type,
|
||||
// but if a union is added to the intersection, we'll distribute ourselves over that union and
|
||||
// create a union of intersections.
|
||||
intersections: Vec<InnerIntersectionBuilder<'db>>,
|
||||
db: &'db dyn Db,
|
||||
}
|
||||
|
||||
impl<'db> IntersectionBuilder<'db> {
|
||||
#[allow(dead_code)]
|
||||
fn new(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
intersections: vec![InnerIntersectionBuilder::new()],
|
||||
}
|
||||
}
|
||||
|
||||
fn empty(db: &'db dyn Db) -> Self {
|
||||
Self {
|
||||
db,
|
||||
intersections: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn add_positive(mut self, ty: Type<'db>) -> Self {
|
||||
if let Type::Union(union) = ty {
|
||||
// Distribute ourself over this union: for each union element, clone ourself and
|
||||
// intersect with that union element, then create a new union-of-intersections with all
|
||||
// of those sub-intersections in it. E.g. if `self` is a simple intersection `T1 & T2`
|
||||
// and we add `T3 | T4` to the intersection, we don't get `T1 & T2 & (T3 | T4)` (that's
|
||||
// not in DNF), we distribute the union and get `(T1 & T3) | (T2 & T3) | (T1 & T4) |
|
||||
// (T2 & T4)`. If `self` is already a union-of-intersections `(T1 & T2) | (T3 & T4)`
|
||||
// and we add `T5 | T6` to it, that flattens all the way out to `(T1 & T2 & T5) | (T1 &
|
||||
// T2 & T6) | (T3 & T4 & T5) ...` -- you get the idea.
|
||||
union
|
||||
.elements(self.db)
|
||||
.iter()
|
||||
.map(|elem| self.clone().add_positive(*elem))
|
||||
.fold(IntersectionBuilder::empty(self.db), |mut builder, sub| {
|
||||
builder.intersections.extend(sub.intersections);
|
||||
builder
|
||||
})
|
||||
} else {
|
||||
// If we are already a union-of-intersections, distribute the new intersected element
|
||||
// across all of those intersections.
|
||||
for inner in &mut self.intersections {
|
||||
inner.add_positive(self.db, ty);
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn add_negative(mut self, ty: Type<'db>) -> Self {
|
||||
// See comments above in `add_positive`; this is just the negated version.
|
||||
if let Type::Union(union) = ty {
|
||||
union
|
||||
.elements(self.db)
|
||||
.iter()
|
||||
.map(|elem| self.clone().add_negative(*elem))
|
||||
.fold(IntersectionBuilder::empty(self.db), |mut builder, sub| {
|
||||
builder.intersections.extend(sub.intersections);
|
||||
builder
|
||||
})
|
||||
} else {
|
||||
for inner in &mut self.intersections {
|
||||
inner.add_negative(self.db, ty);
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn build(mut self) -> Type<'db> {
|
||||
// Avoid allocating the UnionBuilder unnecessarily if we have just one intersection:
|
||||
if self.intersections.len() == 1 {
|
||||
self.intersections.pop().unwrap().build(self.db)
|
||||
} else {
|
||||
let mut builder = UnionBuilder::new(self.db);
|
||||
for inner in self.intersections {
|
||||
builder = builder.add(inner.build(self.db));
|
||||
}
|
||||
builder.build()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
#[derive(Debug, Clone, Default)]
|
||||
struct InnerIntersectionBuilder<'db> {
|
||||
positive: FxOrderSet<Type<'db>>,
|
||||
negative: FxOrderSet<Type<'db>>,
|
||||
}
|
||||
|
||||
impl<'db> InnerIntersectionBuilder<'db> {
|
||||
fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Adds a positive type to this intersection.
|
||||
fn add_positive(&mut self, db: &'db dyn Db, ty: Type<'db>) {
|
||||
match ty {
|
||||
Type::Intersection(inter) => {
|
||||
let pos = inter.positive(db);
|
||||
let neg = inter.negative(db);
|
||||
self.positive.extend(pos.difference(&self.negative));
|
||||
self.negative.extend(neg.difference(&self.positive));
|
||||
self.positive.retain(|elem| !neg.contains(elem));
|
||||
self.negative.retain(|elem| !pos.contains(elem));
|
||||
}
|
||||
_ => {
|
||||
if !self.negative.remove(&ty) {
|
||||
self.positive.insert(ty);
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a negative type to this intersection.
|
||||
fn add_negative(&mut self, db: &'db dyn Db, ty: Type<'db>) {
|
||||
// TODO Any/Unknown actually should not self-cancel
|
||||
match ty {
|
||||
Type::Intersection(intersection) => {
|
||||
let pos = intersection.negative(db);
|
||||
let neg = intersection.positive(db);
|
||||
self.positive.extend(pos.difference(&self.negative));
|
||||
self.negative.extend(neg.difference(&self.positive));
|
||||
self.positive.retain(|elem| !neg.contains(elem));
|
||||
self.negative.retain(|elem| !pos.contains(elem));
|
||||
}
|
||||
Type::Never => {}
|
||||
_ => {
|
||||
if !self.positive.remove(&ty) {
|
||||
self.negative.insert(ty);
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn simplify(&mut self) {
|
||||
// TODO this should be generalized based on subtyping, for now we just handle a few cases
|
||||
|
||||
// Never is a subtype of all types
|
||||
if self.positive.contains(&Type::Never) {
|
||||
self.positive.clear();
|
||||
self.negative.clear();
|
||||
self.positive.insert(Type::Never);
|
||||
}
|
||||
}
|
||||
|
||||
fn build(mut self, db: &'db dyn Db) -> Type<'db> {
|
||||
self.simplify();
|
||||
match (self.positive.len(), self.negative.len()) {
|
||||
(0, 0) => Type::Never,
|
||||
(1, 0) => self.positive[0],
|
||||
_ => {
|
||||
self.positive.shrink_to_fit();
|
||||
self.negative.shrink_to_fit();
|
||||
Type::Intersection(IntersectionType::new(db, self.positive, self.negative))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{IntersectionBuilder, IntersectionType, Type, UnionBuilder, UnionType};
|
||||
use crate::db::tests::TestDb;
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
TestDb::new()
|
||||
}
|
||||
|
||||
impl<'db> UnionType<'db> {
|
||||
fn elements_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.elements(db).into_iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let Type::Union(union) = UnionBuilder::new(&db).add(t0).add(t1).build() else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_single() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ty = UnionBuilder::new(&db).add(t0).build();
|
||||
|
||||
assert_eq!(ty, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_empty() {
|
||||
let db = setup_db();
|
||||
let ty = UnionBuilder::new(&db).build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_never() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ty = UnionBuilder::new(&db).add(t0).add(Type::Never).build();
|
||||
|
||||
assert_eq!(ty, t0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_union_flatten() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let u1 = UnionBuilder::new(&db).add(t0).add(t1).build();
|
||||
let Type::Union(union) = UnionBuilder::new(&db).add(u1).add(t2).build() else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
|
||||
assert_eq!(union.elements_vec(&db), &[t0, t1, t2]);
|
||||
}
|
||||
|
||||
impl<'db> IntersectionType<'db> {
|
||||
fn pos_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.positive(db).into_iter().collect()
|
||||
}
|
||||
|
||||
fn neg_vec(self, db: &'db TestDb) -> Vec<Type<'db>> {
|
||||
self.negative(db).into_iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let ta = Type::Any;
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[ta]);
|
||||
assert_eq!(inter.neg_vec(&db), &[t0]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_flatten_positive() {
|
||||
let db = setup_db();
|
||||
let ta = Type::Any;
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let i0 = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_positive(i0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[t2, ta]);
|
||||
assert_eq!(inter.neg_vec(&db), &[t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_flatten_negative() {
|
||||
let db = setup_db();
|
||||
let ta = Type::Any;
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let t2 = Type::IntLiteral(2);
|
||||
let i0 = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_negative(t1)
|
||||
.build();
|
||||
let Type::Intersection(inter) = IntersectionBuilder::new(&db)
|
||||
.add_positive(t2)
|
||||
.add_negative(i0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected to be an intersection");
|
||||
};
|
||||
|
||||
assert_eq!(inter.pos_vec(&db), &[t2, t1]);
|
||||
assert_eq!(inter.neg_vec(&db), &[ta]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn intersection_distributes_over_union() {
|
||||
let db = setup_db();
|
||||
let t0 = Type::IntLiteral(0);
|
||||
let t1 = Type::IntLiteral(1);
|
||||
let ta = Type::Any;
|
||||
let u0 = UnionBuilder::new(&db).add(t0).add(t1).build();
|
||||
|
||||
let Type::Union(union) = IntersectionBuilder::new(&db)
|
||||
.add_positive(ta)
|
||||
.add_positive(u0)
|
||||
.build()
|
||||
else {
|
||||
panic!("expected a union");
|
||||
};
|
||||
let [Type::Intersection(i0), Type::Intersection(i1)] = union.elements_vec(&db)[..] else {
|
||||
panic!("expected a union of two intersections");
|
||||
};
|
||||
assert_eq!(i0.pos_vec(&db), &[ta, t0]);
|
||||
assert_eq!(i1.pos_vec(&db), &[ta, t1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_self_negation() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_negative(Type::None)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_negative_never() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_negative(Type::Never)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_intersection_simplify_positive_never() {
|
||||
let db = setup_db();
|
||||
let ty = IntersectionBuilder::new(&db)
|
||||
.add_positive(Type::None)
|
||||
.add_positive(Type::Never)
|
||||
.build();
|
||||
|
||||
assert_eq!(ty, Type::Never);
|
||||
}
|
||||
}
|
||||
@@ -33,14 +33,18 @@ use crate::builtins::builtins_scope;
|
||||
use crate::module_name::ModuleName;
|
||||
use crate::module_resolver::resolve_module;
|
||||
use crate::semantic_index::ast_ids::{HasScopedAstId, HasScopedUseId, ScopedExpressionId};
|
||||
use crate::semantic_index::definition::{Definition, DefinitionKind, DefinitionNodeKey};
|
||||
use crate::semantic_index::definition::{
|
||||
Definition, DefinitionKind, DefinitionNode, DefinitionNodeKey, ScopedPhiId,
|
||||
};
|
||||
use crate::semantic_index::expression::Expression;
|
||||
use crate::semantic_index::semantic_index;
|
||||
use crate::semantic_index::symbol::{FileScopeId, NodeWithScopeKind, NodeWithScopeRef, ScopeId};
|
||||
use crate::semantic_index::symbol::{
|
||||
FileScopeId, NodeWithScopeKind, NodeWithScopeRef, ScopeId, Symbol,
|
||||
};
|
||||
use crate::semantic_index::SemanticIndex;
|
||||
use crate::types::{
|
||||
builtins_symbol_ty_by_name, definitions_ty, global_symbol_ty_by_name, ClassType, FunctionType,
|
||||
Name, Type, UnionTypeBuilder,
|
||||
builtins_symbol_ty_by_name, definition_ty, global_symbol_ty_by_name, ClassType, FunctionType,
|
||||
Name, Type, UnionBuilder,
|
||||
};
|
||||
use crate::Db;
|
||||
|
||||
@@ -61,7 +65,7 @@ pub(crate) fn infer_scope_types<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Ty
|
||||
TypeInferenceBuilder::new(db, InferenceRegion::Scope(scope), index).finish()
|
||||
}
|
||||
|
||||
/// Cycle recovery for [`infer_definition_types`]: for now, just [`Type::Unknown`]
|
||||
/// Cycle recovery for [`infer_definition_types()`]: for now, just [`Type::Unknown`]
|
||||
/// TODO fixpoint iteration
|
||||
fn infer_definition_types_cycle_recovery<'db>(
|
||||
_db: &'db dyn Db,
|
||||
@@ -260,34 +264,61 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
NodeWithScopeKind::FunctionTypeParameters(function) => {
|
||||
self.infer_function_type_params(function.node());
|
||||
}
|
||||
NodeWithScopeKind::ListComprehension(comprehension) => {
|
||||
self.infer_list_comprehension_expression_scope(comprehension.node());
|
||||
}
|
||||
NodeWithScopeKind::SetComprehension(comprehension) => {
|
||||
self.infer_set_comprehension_expression_scope(comprehension.node());
|
||||
}
|
||||
NodeWithScopeKind::DictComprehension(comprehension) => {
|
||||
self.infer_dict_comprehension_expression_scope(comprehension.node());
|
||||
}
|
||||
NodeWithScopeKind::GeneratorExpression(generator) => {
|
||||
self.infer_generator_expression_scope(generator.node());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_region_definition(&mut self, definition: Definition<'db>) {
|
||||
match definition.node(self.db) {
|
||||
DefinitionKind::Function(function) => {
|
||||
self.infer_function_definition(function.node(), definition);
|
||||
}
|
||||
DefinitionKind::Class(class) => self.infer_class_definition(class.node(), definition),
|
||||
DefinitionKind::Import(import) => {
|
||||
self.infer_import_definition(import.node(), definition);
|
||||
}
|
||||
DefinitionKind::ImportFrom(import_from) => {
|
||||
self.infer_import_from_definition(
|
||||
import_from.import(),
|
||||
import_from.alias(),
|
||||
definition,
|
||||
);
|
||||
}
|
||||
DefinitionKind::Assignment(assignment) => {
|
||||
self.infer_assignment_definition(assignment.assignment(), definition);
|
||||
}
|
||||
DefinitionKind::AnnotatedAssignment(annotated_assignment) => {
|
||||
self.infer_annotated_assignment_definition(annotated_assignment.node(), definition);
|
||||
}
|
||||
DefinitionKind::NamedExpression(named_expression) => {
|
||||
self.infer_named_expression_definition(named_expression.node(), definition);
|
||||
}
|
||||
match definition.kind(self.db) {
|
||||
DefinitionKind::Phi(phi_id) => self.infer_phi_definition(*phi_id, definition),
|
||||
DefinitionKind::Node(node) => match node {
|
||||
DefinitionNode::Function(function) => {
|
||||
self.infer_function_definition(function.node(), definition);
|
||||
}
|
||||
DefinitionNode::Class(class) => {
|
||||
self.infer_class_definition(class.node(), definition);
|
||||
}
|
||||
DefinitionNode::Import(import) => {
|
||||
self.infer_import_definition(import.node(), definition);
|
||||
}
|
||||
DefinitionNode::ImportFrom(import_from) => {
|
||||
self.infer_import_from_definition(
|
||||
import_from.import(),
|
||||
import_from.alias(),
|
||||
definition,
|
||||
);
|
||||
}
|
||||
DefinitionNode::Assignment(assignment) => {
|
||||
self.infer_assignment_definition(assignment.assignment(), definition);
|
||||
}
|
||||
DefinitionNode::AnnotatedAssignment(annotated_assignment) => {
|
||||
self.infer_annotated_assignment_definition(
|
||||
annotated_assignment.node(),
|
||||
definition,
|
||||
);
|
||||
}
|
||||
DefinitionNode::NamedExpression(named_expression) => {
|
||||
self.infer_named_expression_definition(named_expression.node(), definition);
|
||||
}
|
||||
DefinitionNode::Comprehension(comprehension) => {
|
||||
self.infer_comprehension_definition(
|
||||
comprehension.node(),
|
||||
comprehension.is_first(),
|
||||
definition,
|
||||
);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -377,6 +408,18 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
self.extend(result);
|
||||
}
|
||||
|
||||
fn infer_phi_definition(&mut self, phi_id: ScopedPhiId, definition: Definition<'db>) {
|
||||
let file_scope_id = self.scope.file_scope_id(self.db);
|
||||
let use_def = self.index.use_def_map(file_scope_id);
|
||||
let ty = use_def
|
||||
.phi_operands(phi_id)
|
||||
.iter()
|
||||
.map(|&definition| definition_ty(self.db, definition))
|
||||
.fold(UnionBuilder::new(self.db), UnionBuilder::add)
|
||||
.build();
|
||||
self.types.definitions.insert(definition, ty);
|
||||
}
|
||||
|
||||
fn infer_function_definition_statement(&mut self, function: &ast::StmtFunctionDef) {
|
||||
self.infer_definition(function);
|
||||
}
|
||||
@@ -923,7 +966,6 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
ty
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_self)]
|
||||
fn infer_number_literal_expression(&mut self, literal: &ast::ExprNumberLiteral) -> Type<'db> {
|
||||
let ast::ExprNumberLiteral { range: _, value } = literal;
|
||||
|
||||
@@ -1054,18 +1096,24 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
builtins_symbol_ty_by_name(self.db, "dict").instance()
|
||||
}
|
||||
|
||||
/// Infer the type of the `iter` expression of the first comprehension.
|
||||
fn infer_first_comprehension_iter(&mut self, comprehensions: &[ast::Comprehension]) {
|
||||
let mut generators_iter = comprehensions.iter();
|
||||
let Some(first_generator) = generators_iter.next() else {
|
||||
unreachable!("Comprehension must contain at least one generator");
|
||||
};
|
||||
self.infer_expression(&first_generator.iter);
|
||||
}
|
||||
|
||||
fn infer_generator_expression(&mut self, generator: &ast::ExprGenerator) -> Type<'db> {
|
||||
let ast::ExprGenerator {
|
||||
range: _,
|
||||
elt,
|
||||
elt: _,
|
||||
generators,
|
||||
parenthesized: _,
|
||||
} = generator;
|
||||
|
||||
self.infer_expression(elt);
|
||||
for generator in generators {
|
||||
self.infer_comprehension(generator);
|
||||
}
|
||||
self.infer_first_comprehension_iter(generators);
|
||||
|
||||
// TODO generator type
|
||||
Type::Unknown
|
||||
@@ -1074,20 +1122,71 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
fn infer_list_comprehension_expression(&mut self, listcomp: &ast::ExprListComp) -> Type<'db> {
|
||||
let ast::ExprListComp {
|
||||
range: _,
|
||||
elt,
|
||||
elt: _,
|
||||
generators,
|
||||
} = listcomp;
|
||||
|
||||
self.infer_expression(elt);
|
||||
for generator in generators {
|
||||
self.infer_comprehension(generator);
|
||||
}
|
||||
self.infer_first_comprehension_iter(generators);
|
||||
|
||||
// TODO list type
|
||||
Type::Unknown
|
||||
}
|
||||
|
||||
fn infer_dict_comprehension_expression(&mut self, dictcomp: &ast::ExprDictComp) -> Type<'db> {
|
||||
let ast::ExprDictComp {
|
||||
range: _,
|
||||
key: _,
|
||||
value: _,
|
||||
generators,
|
||||
} = dictcomp;
|
||||
|
||||
self.infer_first_comprehension_iter(generators);
|
||||
|
||||
// TODO dict type
|
||||
Type::Unknown
|
||||
}
|
||||
|
||||
fn infer_set_comprehension_expression(&mut self, setcomp: &ast::ExprSetComp) -> Type<'db> {
|
||||
let ast::ExprSetComp {
|
||||
range: _,
|
||||
elt: _,
|
||||
generators,
|
||||
} = setcomp;
|
||||
|
||||
self.infer_first_comprehension_iter(generators);
|
||||
|
||||
// TODO set type
|
||||
Type::Unknown
|
||||
}
|
||||
|
||||
fn infer_generator_expression_scope(&mut self, generator: &ast::ExprGenerator) {
|
||||
let ast::ExprGenerator {
|
||||
range: _,
|
||||
elt,
|
||||
generators,
|
||||
parenthesized: _,
|
||||
} = generator;
|
||||
|
||||
self.infer_expression(elt);
|
||||
for comprehension in generators {
|
||||
self.infer_comprehension(comprehension);
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_list_comprehension_expression_scope(&mut self, listcomp: &ast::ExprListComp) {
|
||||
let ast::ExprListComp {
|
||||
range: _,
|
||||
elt,
|
||||
generators,
|
||||
} = listcomp;
|
||||
|
||||
self.infer_expression(elt);
|
||||
for comprehension in generators {
|
||||
self.infer_comprehension(comprehension);
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_dict_comprehension_expression_scope(&mut self, dictcomp: &ast::ExprDictComp) {
|
||||
let ast::ExprDictComp {
|
||||
range: _,
|
||||
key,
|
||||
@@ -1097,46 +1196,51 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
self.infer_expression(key);
|
||||
self.infer_expression(value);
|
||||
for generator in generators {
|
||||
self.infer_comprehension(generator);
|
||||
for comprehension in generators {
|
||||
self.infer_comprehension(comprehension);
|
||||
}
|
||||
|
||||
// TODO dict type
|
||||
Type::Unknown
|
||||
}
|
||||
|
||||
fn infer_set_comprehension_expression(&mut self, setcomp: &ast::ExprSetComp) -> Type<'db> {
|
||||
fn infer_set_comprehension_expression_scope(&mut self, setcomp: &ast::ExprSetComp) {
|
||||
let ast::ExprSetComp {
|
||||
range: _,
|
||||
elt,
|
||||
generators,
|
||||
} = setcomp;
|
||||
self.infer_expression(elt);
|
||||
for generator in generators {
|
||||
self.infer_comprehension(generator);
|
||||
}
|
||||
|
||||
// TODO set type
|
||||
Type::Unknown
|
||||
self.infer_expression(elt);
|
||||
for comprehension in generators {
|
||||
self.infer_comprehension(comprehension);
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_comprehension(&mut self, comprehension: &ast::Comprehension) -> Type<'db> {
|
||||
fn infer_comprehension(&mut self, comprehension: &ast::Comprehension) {
|
||||
self.infer_definition(comprehension);
|
||||
for expr in &comprehension.ifs {
|
||||
self.infer_expression(expr);
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_comprehension_definition(
|
||||
&mut self,
|
||||
comprehension: &ast::Comprehension,
|
||||
is_first: bool,
|
||||
definition: Definition<'db>,
|
||||
) {
|
||||
let ast::Comprehension {
|
||||
range: _,
|
||||
target,
|
||||
iter,
|
||||
ifs,
|
||||
ifs: _,
|
||||
is_async: _,
|
||||
} = comprehension;
|
||||
|
||||
self.infer_expression(target);
|
||||
self.infer_expression(iter);
|
||||
for if_clause in ifs {
|
||||
self.infer_expression(if_clause);
|
||||
if !is_first {
|
||||
self.infer_expression(iter);
|
||||
}
|
||||
|
||||
// TODO comprehension type
|
||||
Type::Unknown
|
||||
// TODO(dhruvmanila): The target type should be inferred based on the iter type instead.
|
||||
let target_ty = self.infer_expression(target);
|
||||
self.types.definitions.insert(definition, target_ty);
|
||||
}
|
||||
|
||||
fn infer_named_expression(&mut self, named: &ast::ExprNamed) -> Type<'db> {
|
||||
@@ -1179,12 +1283,10 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
let body_ty = self.infer_expression(body);
|
||||
let orelse_ty = self.infer_expression(orelse);
|
||||
|
||||
let union = UnionTypeBuilder::new(self.db)
|
||||
UnionBuilder::new(self.db)
|
||||
.add(body_ty)
|
||||
.add(orelse_ty)
|
||||
.build();
|
||||
|
||||
Type::Union(union)
|
||||
.build()
|
||||
}
|
||||
|
||||
fn infer_lambda_body(&mut self, lambda_expression: &ast::ExprLambda) {
|
||||
@@ -1260,6 +1362,22 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
Type::Unknown
|
||||
}
|
||||
|
||||
fn infer_global_name_reference(&self, symbol: &Symbol) -> Type<'db> {
|
||||
let file_scope_id = self.scope.file_scope_id(self.db);
|
||||
// implicit global
|
||||
let mut ty = if file_scope_id == FileScopeId::global() {
|
||||
Type::Unbound
|
||||
} else {
|
||||
global_symbol_ty_by_name(self.db, self.file, symbol.name())
|
||||
};
|
||||
// fallback to builtins
|
||||
if ty.may_be_unbound(self.db) && Some(self.scope) != builtins_scope(self.db) {
|
||||
ty = ty
|
||||
.replace_unbound_with(self.db, builtins_symbol_ty_by_name(self.db, symbol.name()));
|
||||
}
|
||||
ty
|
||||
}
|
||||
|
||||
fn infer_name_expression(&mut self, name: &ast::ExprName) -> Type<'db> {
|
||||
let ast::ExprName { range: _, id, ctx } = name;
|
||||
|
||||
@@ -1268,34 +1386,18 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
let file_scope_id = self.scope.file_scope_id(self.db);
|
||||
let use_def = self.index.use_def_map(file_scope_id);
|
||||
let use_id = name.scoped_use_id(self.db, self.scope);
|
||||
let may_be_unbound = use_def.use_may_be_unbound(use_id);
|
||||
|
||||
let unbound_ty = if may_be_unbound {
|
||||
let mut ty = definition_ty(self.db, use_def.definition_for_use(use_id));
|
||||
if ty.may_be_unbound(self.db) {
|
||||
let symbols = self.index.symbol_table(file_scope_id);
|
||||
// SAFETY: the symbol table always creates a symbol for every Name node.
|
||||
let symbol = symbols.symbol_by_name(id).unwrap();
|
||||
if !symbol.is_defined() || !self.scope.is_function_like(self.db) {
|
||||
// implicit global
|
||||
let mut unbound_ty = if file_scope_id == FileScopeId::global() {
|
||||
Type::Unbound
|
||||
} else {
|
||||
global_symbol_ty_by_name(self.db, self.file, id)
|
||||
};
|
||||
// fallback to builtins
|
||||
if matches!(unbound_ty, Type::Unbound)
|
||||
&& Some(self.scope) != builtins_scope(self.db)
|
||||
{
|
||||
unbound_ty = builtins_symbol_ty_by_name(self.db, id);
|
||||
}
|
||||
Some(unbound_ty)
|
||||
} else {
|
||||
Some(Type::Unbound)
|
||||
ty = ty.replace_unbound_with(
|
||||
self.db,
|
||||
self.infer_global_name_reference(symbol),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
definitions_ty(self.db, use_def.use_definitions(use_id), unbound_ty)
|
||||
}
|
||||
ty
|
||||
}
|
||||
ExprContext::Store | ExprContext::Del => Type::None,
|
||||
ExprContext::Invalid => Type::Unknown,
|
||||
@@ -1494,6 +1596,7 @@ impl<'db> TypeInferenceBuilder<'db> {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Context;
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
@@ -1508,40 +1611,58 @@ mod tests {
|
||||
use crate::semantic_index::symbol::FileScopeId;
|
||||
use crate::semantic_index::{global_scope, semantic_index, symbol_table, use_def_map};
|
||||
use crate::types::{global_symbol_ty_by_name, infer_definition_types, symbol_ty_by_name, Type};
|
||||
use crate::{HasTy, SemanticModel};
|
||||
use crate::{HasTy, ProgramSettings, SemanticModel};
|
||||
|
||||
fn setup_db() -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
Program::new(
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&src_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
PythonVersion::default(),
|
||||
SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
src_root,
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
fn setup_db_with_custom_typeshed(typeshed: &str) -> TestDb {
|
||||
let db = TestDb::new();
|
||||
fn setup_db_with_custom_typeshed<'a>(
|
||||
typeshed: &str,
|
||||
files: impl IntoIterator<Item = (&'a str, &'a str)>,
|
||||
) -> anyhow::Result<TestDb> {
|
||||
let mut db = TestDb::new();
|
||||
let src_root = SystemPathBuf::from("/src");
|
||||
|
||||
Program::new(
|
||||
db.write_files(files)
|
||||
.context("Failed to write test files")?;
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
PythonVersion::default(),
|
||||
SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
site_packages: vec![],
|
||||
custom_typeshed: Some(SystemPathBuf::from(typeshed)),
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
src_root,
|
||||
site_packages: vec![],
|
||||
custom_typeshed: Some(SystemPathBuf::from(typeshed)),
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.context("Failed to create Program")?;
|
||||
|
||||
db
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
fn assert_public_ty(db: &TestDb, file_name: &str, symbol_name: &str, expected: &str) {
|
||||
@@ -2066,6 +2187,38 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn conditionally_global_or_builtin() -> anyhow::Result<()> {
|
||||
let mut db = setup_db();
|
||||
|
||||
db.write_dedented(
|
||||
"/src/a.py",
|
||||
"
|
||||
if flag:
|
||||
copyright = 1
|
||||
def f():
|
||||
y = copyright
|
||||
",
|
||||
)?;
|
||||
|
||||
let file = system_path_to_file(&db, "src/a.py").expect("Expected file to exist.");
|
||||
let index = semantic_index(&db, file);
|
||||
let function_scope = index
|
||||
.child_scopes(FileScopeId::global())
|
||||
.next()
|
||||
.unwrap()
|
||||
.0
|
||||
.to_scope_id(&db, file);
|
||||
let y_ty = symbol_ty_by_name(&db, function_scope, "y");
|
||||
|
||||
assert_eq!(
|
||||
y_ty.display(&db).to_string(),
|
||||
"Literal[1] | Literal[copyright]"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Class name lookups do fall back to globals, but the public type never does.
|
||||
#[test]
|
||||
fn unbound_class_local() -> anyhow::Result<()> {
|
||||
@@ -2131,16 +2284,17 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn builtin_symbol_custom_stdlib() -> anyhow::Result<()> {
|
||||
let mut db = setup_db_with_custom_typeshed("/typeshed");
|
||||
|
||||
db.write_files([
|
||||
("/src/a.py", "c = copyright"),
|
||||
(
|
||||
"/typeshed/stdlib/builtins.pyi",
|
||||
"def copyright() -> None: ...",
|
||||
),
|
||||
("/typeshed/stdlib/VERSIONS", "builtins: 3.8-"),
|
||||
])?;
|
||||
let db = setup_db_with_custom_typeshed(
|
||||
"/typeshed",
|
||||
[
|
||||
("/src/a.py", "c = copyright"),
|
||||
(
|
||||
"/typeshed/stdlib/builtins.pyi",
|
||||
"def copyright() -> None: ...",
|
||||
),
|
||||
("/typeshed/stdlib/VERSIONS", "builtins: 3.8-"),
|
||||
],
|
||||
)?;
|
||||
|
||||
assert_public_ty(&db, "/src/a.py", "c", "Literal[copyright]");
|
||||
|
||||
@@ -2160,13 +2314,14 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn unknown_builtin_later_defined() -> anyhow::Result<()> {
|
||||
let mut db = setup_db_with_custom_typeshed("/typeshed");
|
||||
|
||||
db.write_files([
|
||||
("/src/a.py", "x = foo"),
|
||||
("/typeshed/stdlib/builtins.pyi", "foo = bar; bar = 1"),
|
||||
("/typeshed/stdlib/VERSIONS", "builtins: 3.8-"),
|
||||
])?;
|
||||
let db = setup_db_with_custom_typeshed(
|
||||
"/typeshed",
|
||||
[
|
||||
("/src/a.py", "x = foo"),
|
||||
("/typeshed/stdlib/builtins.pyi", "foo = bar; bar = 1"),
|
||||
("/typeshed/stdlib/VERSIONS", "builtins: 3.8-"),
|
||||
],
|
||||
)?;
|
||||
|
||||
assert_public_ty(&db, "/src/a.py", "x", "Unbound");
|
||||
|
||||
@@ -2287,11 +2442,10 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn first_public_def<'db>(db: &'db TestDb, file: File, name: &str) -> Definition<'db> {
|
||||
fn public_def<'db>(db: &'db TestDb, file: File, name: &str) -> Definition<'db> {
|
||||
let scope = global_scope(db, file);
|
||||
*use_def_map(db, scope)
|
||||
.public_definitions(symbol_table(db, scope).symbol_id_by_name(name).unwrap())
|
||||
.first()
|
||||
use_def_map(db, scope)
|
||||
.public_definition(symbol_table(db, scope).symbol_id_by_name(name).unwrap())
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
@@ -2434,7 +2588,7 @@ mod tests {
|
||||
assert_function_query_was_not_run(
|
||||
&db,
|
||||
infer_definition_types,
|
||||
first_public_def(&db, a, "x"),
|
||||
public_def(&db, a, "x"),
|
||||
&events,
|
||||
);
|
||||
|
||||
@@ -2470,7 +2624,7 @@ mod tests {
|
||||
assert_function_query_was_not_run(
|
||||
&db,
|
||||
infer_definition_types,
|
||||
first_public_def(&db, a, "x"),
|
||||
public_def(&db, a, "x"),
|
||||
&events,
|
||||
);
|
||||
Ok(())
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use anyhow::Ok;
|
||||
use lsp_types::NotebookCellKind;
|
||||
use ruff_notebook::CellMetadata;
|
||||
use rustc_hash::{FxBuildHasher, FxHashMap};
|
||||
|
||||
use crate::{PositionEncoding, TextDocument};
|
||||
@@ -65,7 +66,7 @@ impl NotebookDocument {
|
||||
NotebookCellKind::Code => ruff_notebook::Cell::Code(ruff_notebook::CodeCell {
|
||||
execution_count: None,
|
||||
id: None,
|
||||
metadata: serde_json::Value::Null,
|
||||
metadata: CellMetadata::default(),
|
||||
outputs: vec![],
|
||||
source: ruff_notebook::SourceValue::String(
|
||||
cell.document.contents().to_string(),
|
||||
@@ -75,7 +76,7 @@ impl NotebookDocument {
|
||||
ruff_notebook::Cell::Markdown(ruff_notebook::MarkdownCell {
|
||||
attachments: None,
|
||||
id: None,
|
||||
metadata: serde_json::Value::Null,
|
||||
metadata: CellMetadata::default(),
|
||||
source: ruff_notebook::SourceValue::String(
|
||||
cell.document.contents().to_string(),
|
||||
),
|
||||
|
||||
@@ -2,8 +2,9 @@ use std::borrow::Cow;
|
||||
|
||||
use lsp_types::request::DocumentDiagnosticRequest;
|
||||
use lsp_types::{
|
||||
Diagnostic, DocumentDiagnosticParams, DocumentDiagnosticReport, DocumentDiagnosticReportResult,
|
||||
FullDocumentDiagnosticReport, Range, RelatedFullDocumentDiagnosticReport, Url,
|
||||
Diagnostic, DiagnosticSeverity, DocumentDiagnosticParams, DocumentDiagnosticReport,
|
||||
DocumentDiagnosticReportResult, FullDocumentDiagnosticReport, Position, Range,
|
||||
RelatedFullDocumentDiagnosticReport, Url,
|
||||
};
|
||||
|
||||
use red_knot_workspace::db::RootDatabase;
|
||||
@@ -56,16 +57,37 @@ fn compute_diagnostics(snapshot: &DocumentSnapshot, db: &RootDatabase) -> Vec<Di
|
||||
diagnostics
|
||||
.as_slice()
|
||||
.iter()
|
||||
.map(|message| Diagnostic {
|
||||
range: Range::default(),
|
||||
severity: None,
|
||||
tags: None,
|
||||
code: None,
|
||||
code_description: None,
|
||||
source: Some("red-knot".into()),
|
||||
message: message.to_string(),
|
||||
related_information: None,
|
||||
data: None,
|
||||
})
|
||||
.map(|message| to_lsp_diagnostic(message))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn to_lsp_diagnostic(message: &str) -> Diagnostic {
|
||||
let words = message.split(':').collect::<Vec<_>>();
|
||||
|
||||
let (range, message) = match words.as_slice() {
|
||||
[_filename, line, column, message] => {
|
||||
let line = line.parse::<u32>().unwrap_or_default();
|
||||
let column = column.parse::<u32>().unwrap_or_default();
|
||||
(
|
||||
Range::new(
|
||||
Position::new(line.saturating_sub(1), column.saturating_sub(1)),
|
||||
Position::new(line, column),
|
||||
),
|
||||
message.trim(),
|
||||
)
|
||||
}
|
||||
_ => (Range::default(), message),
|
||||
};
|
||||
|
||||
Diagnostic {
|
||||
range,
|
||||
severity: Some(DiagnosticSeverity::ERROR),
|
||||
tags: None,
|
||||
code: None,
|
||||
code_description: None,
|
||||
source: Some("red-knot".into()),
|
||||
message: message.to_string(),
|
||||
related_information: None,
|
||||
data: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,7 +78,8 @@ impl Session {
|
||||
custom_typeshed: None,
|
||||
},
|
||||
};
|
||||
workspaces.insert(path, RootDatabase::new(metadata, program_settings, system));
|
||||
// TODO(micha): Handle the case where the program settings are incorrect more gracefully.
|
||||
workspaces.insert(path, RootDatabase::new(metadata, program_settings, system)?);
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
|
||||
@@ -49,7 +49,8 @@ impl Workspace {
|
||||
search_paths: SearchPathSettings::default(),
|
||||
};
|
||||
|
||||
let db = RootDatabase::new(workspace, program_settings, system.clone());
|
||||
let db =
|
||||
RootDatabase::new(workspace, program_settings, system.clone()).map_err(into_error)?;
|
||||
|
||||
Ok(Self { db, system })
|
||||
}
|
||||
|
||||
@@ -17,5 +17,5 @@ fn check() {
|
||||
|
||||
let result = workspace.check_file(&test).expect("Check to succeed");
|
||||
|
||||
assert_eq!(result, vec!["Unresolved import 'random22'"]);
|
||||
assert_eq!(result, vec!["/test.py:1:8: Unresolved import 'random22'"]);
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ red_knot_python_semantic = { workspace = true }
|
||||
ruff_cache = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["os", "cache"] }
|
||||
ruff_python_ast = { workspace = true }
|
||||
ruff_text_size = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
crossbeam = { workspace = true }
|
||||
|
||||
@@ -28,7 +28,11 @@ pub struct RootDatabase {
|
||||
}
|
||||
|
||||
impl RootDatabase {
|
||||
pub fn new<S>(workspace: WorkspaceMetadata, settings: ProgramSettings, system: S) -> Self
|
||||
pub fn new<S>(
|
||||
workspace: WorkspaceMetadata,
|
||||
settings: ProgramSettings,
|
||||
system: S,
|
||||
) -> anyhow::Result<Self>
|
||||
where
|
||||
S: System + 'static + Send + Sync + RefUnwindSafe,
|
||||
{
|
||||
@@ -41,10 +45,10 @@ impl RootDatabase {
|
||||
|
||||
let workspace = Workspace::from_metadata(&db, workspace);
|
||||
// Initialize the `Program` singleton
|
||||
Program::from_settings(&db, settings);
|
||||
Program::from_settings(&db, settings)?;
|
||||
|
||||
db.workspace = Some(workspace);
|
||||
db
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
pub fn workspace(&self) -> Workspace {
|
||||
@@ -150,6 +154,7 @@ impl Db for RootDatabase {}
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use salsa::Event;
|
||||
use std::sync::Arc;
|
||||
|
||||
use red_knot_python_semantic::{vendored_typeshed_stubs, Db as SemanticDb};
|
||||
use ruff_db::files::Files;
|
||||
@@ -162,6 +167,7 @@ pub(crate) mod tests {
|
||||
#[salsa::db]
|
||||
pub(crate) struct TestDb {
|
||||
storage: salsa::Storage<Self>,
|
||||
events: std::sync::Arc<std::sync::Mutex<Vec<salsa::Event>>>,
|
||||
files: Files,
|
||||
system: TestSystem,
|
||||
vendored: VendoredFileSystem,
|
||||
@@ -174,10 +180,24 @@ pub(crate) mod tests {
|
||||
system: TestSystem::default(),
|
||||
vendored: vendored_typeshed_stubs().clone(),
|
||||
files: Files::default(),
|
||||
events: Arc::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TestDb {
|
||||
/// Takes the salsa events.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If there are any pending salsa snapshots.
|
||||
pub(crate) fn take_salsa_events(&mut self) -> Vec<salsa::Event> {
|
||||
let inner = Arc::get_mut(&mut self.events).expect("no pending salsa snapshots");
|
||||
|
||||
let events = inner.get_mut().unwrap();
|
||||
std::mem::take(&mut *events)
|
||||
}
|
||||
}
|
||||
|
||||
impl DbWithTestSystem for TestDb {
|
||||
fn test_system(&self) -> &TestSystem {
|
||||
&self.system
|
||||
@@ -228,6 +248,9 @@ pub(crate) mod tests {
|
||||
|
||||
#[salsa::db]
|
||||
impl salsa::Database for TestDb {
|
||||
fn salsa_event(&self, _event: &dyn Fn() -> Event) {}
|
||||
fn salsa_event(&self, event: &dyn Fn() -> Event) {
|
||||
let mut events = self.events.lock().unwrap();
|
||||
events.push(event());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,9 +8,10 @@ use red_knot_python_semantic::types::Type;
|
||||
use red_knot_python_semantic::{HasTy, ModuleName, SemanticModel};
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::{parsed_module, ParsedModule};
|
||||
use ruff_db::source::{source_text, SourceText};
|
||||
use ruff_db::source::{line_index, source_text, SourceText};
|
||||
use ruff_python_ast as ast;
|
||||
use ruff_python_ast::visitor::{walk_expr, walk_stmt, Visitor};
|
||||
use ruff_text_size::{Ranged, TextSize};
|
||||
|
||||
use crate::db::Db;
|
||||
|
||||
@@ -49,7 +50,18 @@ pub(crate) fn lint_syntax(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
visitor.visit_body(&ast.body);
|
||||
diagnostics = visitor.diagnostics;
|
||||
} else {
|
||||
diagnostics.extend(parsed.errors().iter().map(ToString::to_string));
|
||||
let path = file_id.path(db);
|
||||
let line_index = line_index(db.upcast(), file_id);
|
||||
diagnostics.extend(parsed.errors().iter().map(|err| {
|
||||
let source_location = line_index.source_location(err.location.start(), source.as_str());
|
||||
format!(
|
||||
"{}:{}:{}: {}",
|
||||
path.as_str(),
|
||||
source_location.row,
|
||||
source_location.column,
|
||||
err,
|
||||
)
|
||||
}));
|
||||
}
|
||||
|
||||
Diagnostics::from(diagnostics)
|
||||
@@ -97,6 +109,20 @@ pub fn lint_semantic(db: &dyn Db, file_id: File) -> Diagnostics {
|
||||
Diagnostics::from(context.diagnostics.take())
|
||||
}
|
||||
|
||||
fn format_diagnostic(context: &SemanticLintContext, message: &str, start: TextSize) -> String {
|
||||
let source_location = context
|
||||
.semantic
|
||||
.line_index()
|
||||
.source_location(start, context.source_text());
|
||||
format!(
|
||||
"{}:{}:{}: {}",
|
||||
context.semantic.file_path().as_str(),
|
||||
source_location.row,
|
||||
source_location.column,
|
||||
message,
|
||||
)
|
||||
}
|
||||
|
||||
fn lint_unresolved_imports(context: &SemanticLintContext, import: AnyImportRef) {
|
||||
match import {
|
||||
AnyImportRef::Import(import) => {
|
||||
@@ -104,7 +130,11 @@ fn lint_unresolved_imports(context: &SemanticLintContext, import: AnyImportRef)
|
||||
let ty = alias.ty(&context.semantic);
|
||||
|
||||
if ty.is_unbound() {
|
||||
context.push_diagnostic(format!("Unresolved import '{}'", &alias.name));
|
||||
context.push_diagnostic(format_diagnostic(
|
||||
context,
|
||||
&format!("Unresolved import '{}'", &alias.name),
|
||||
alias.start(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -113,7 +143,11 @@ fn lint_unresolved_imports(context: &SemanticLintContext, import: AnyImportRef)
|
||||
let ty = alias.ty(&context.semantic);
|
||||
|
||||
if ty.is_unbound() {
|
||||
context.push_diagnostic(format!("Unresolved import '{}'", &alias.name));
|
||||
context.push_diagnostic(format_diagnostic(
|
||||
context,
|
||||
&format!("Unresolved import '{}'", &alias.name),
|
||||
alias.start(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -127,12 +161,17 @@ fn lint_maybe_undefined(context: &SemanticLintContext, name: &ast::ExprName) {
|
||||
let semantic = &context.semantic;
|
||||
match name.ty(semantic) {
|
||||
Type::Unbound => {
|
||||
context.push_diagnostic(format!("Name '{}' used when not defined.", &name.id));
|
||||
context.push_diagnostic(format_diagnostic(
|
||||
context,
|
||||
&format!("Name '{}' used when not defined.", &name.id),
|
||||
name.start(),
|
||||
));
|
||||
}
|
||||
Type::Union(union) if union.contains(semantic.db(), Type::Unbound) => {
|
||||
context.push_diagnostic(format!(
|
||||
"Name '{}' used when possibly not defined.",
|
||||
&name.id
|
||||
context.push_diagnostic(format_diagnostic(
|
||||
context,
|
||||
&format!("Name '{}' used when possibly not defined.", &name.id),
|
||||
name.start(),
|
||||
));
|
||||
}
|
||||
_ => {}
|
||||
@@ -303,9 +342,18 @@ enum AnyImportRef<'a> {
|
||||
ImportFrom(&'a ast::StmtImportFrom),
|
||||
}
|
||||
|
||||
impl Ranged for AnyImportRef<'_> {
|
||||
fn range(&self) -> ruff_text_size::TextRange {
|
||||
match self {
|
||||
AnyImportRef::Import(import) => import.range(),
|
||||
AnyImportRef::ImportFrom(import) => import.range(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use red_knot_python_semantic::{Program, PythonVersion, SearchPathSettings};
|
||||
use red_knot_python_semantic::{Program, ProgramSettings, PythonVersion, SearchPathSettings};
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||
|
||||
@@ -320,16 +368,23 @@ mod tests {
|
||||
fn setup_db_with_root(src_root: SystemPathBuf) -> TestDb {
|
||||
let db = TestDb::new();
|
||||
|
||||
Program::new(
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&src_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
&db,
|
||||
PythonVersion::default(),
|
||||
SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
src_root,
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
ProgramSettings {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: Vec::new(),
|
||||
src_root,
|
||||
site_packages: vec![],
|
||||
custom_typeshed: None,
|
||||
},
|
||||
},
|
||||
);
|
||||
)
|
||||
.expect("Valid program settings");
|
||||
|
||||
db
|
||||
}
|
||||
@@ -356,10 +411,17 @@ mod tests {
|
||||
|
||||
assert_eq!(
|
||||
*messages,
|
||||
vec![
|
||||
"Name 'flag' used when not defined.",
|
||||
"Name 'y' used when possibly not defined."
|
||||
]
|
||||
if cfg!(windows) {
|
||||
vec![
|
||||
"\\src\\a.py:3:4: Name 'flag' used when not defined.",
|
||||
"\\src\\a.py:5:1: Name 'y' used when possibly not defined.",
|
||||
]
|
||||
} else {
|
||||
vec![
|
||||
"/src/a.py:3:4: Name 'flag' used when not defined.",
|
||||
"/src/a.py:5:1: Name 'y' used when possibly not defined.",
|
||||
]
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
use salsa::{Durability, Setter as _};
|
||||
use std::{collections::BTreeMap, sync::Arc};
|
||||
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
use salsa::{Durability, Setter as _};
|
||||
|
||||
pub use metadata::{PackageMetadata, WorkspaceMetadata};
|
||||
use ruff_db::source::{source_text, SourceDiagnostic};
|
||||
use ruff_db::{
|
||||
files::{system_path_to_file, File},
|
||||
system::{walk_directory::WalkState, SystemPath, SystemPathBuf},
|
||||
@@ -345,12 +346,27 @@ impl Package {
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::tracked]
|
||||
pub(super) fn check_file(db: &dyn Db, file: File) -> Diagnostics {
|
||||
let path = file.path(db);
|
||||
let _span = tracing::debug_span!("check_file", file=%path).entered();
|
||||
tracing::debug!("Checking file {path}");
|
||||
|
||||
let mut diagnostics = Vec::new();
|
||||
|
||||
let source_diagnostics = source_text::accumulated::<SourceDiagnostic>(db.upcast(), file);
|
||||
// TODO(micha): Consider using a single accumulator for all diagnostics
|
||||
diagnostics.extend(
|
||||
source_diagnostics
|
||||
.iter()
|
||||
.map(std::string::ToString::to_string),
|
||||
);
|
||||
|
||||
// Abort checking if there are IO errors.
|
||||
if source_text(db.upcast(), file).has_read_error() {
|
||||
return Diagnostics::from(diagnostics);
|
||||
}
|
||||
|
||||
diagnostics.extend_from_slice(lint_syntax(db, file));
|
||||
diagnostics.extend_from_slice(lint_semantic(db, file));
|
||||
Diagnostics::from(diagnostics)
|
||||
@@ -398,3 +414,48 @@ fn discover_package_files(db: &dyn Db, path: &SystemPath) -> FxHashSet<File> {
|
||||
|
||||
files
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ruff_db::files::system_path_to_file;
|
||||
use ruff_db::source::source_text;
|
||||
use ruff_db::system::{DbWithTestSystem, SystemPath};
|
||||
use ruff_db::testing::assert_function_query_was_not_run;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::lint::{lint_syntax, Diagnostics};
|
||||
use crate::workspace::check_file;
|
||||
|
||||
#[test]
|
||||
fn check_file_skips_linting_when_file_cant_be_read() -> ruff_db::system::Result<()> {
|
||||
let mut db = TestDb::new();
|
||||
let path = SystemPath::new("test.py");
|
||||
|
||||
db.write_file(path, "x = 10")?;
|
||||
let file = system_path_to_file(&db, path).unwrap();
|
||||
|
||||
// Now the file gets deleted before we had a chance to read its source text.
|
||||
db.memory_file_system().remove_file(path)?;
|
||||
file.sync(&mut db);
|
||||
|
||||
assert_eq!(source_text(&db, file).as_str(), "");
|
||||
assert_eq!(
|
||||
check_file(&db, file),
|
||||
Diagnostics::List(vec![
|
||||
"Failed to read file: No such file or directory".to_string()
|
||||
])
|
||||
);
|
||||
|
||||
let events = db.take_salsa_events();
|
||||
assert_function_query_was_not_run(&db, lint_syntax, file, &events);
|
||||
|
||||
// The user now creates a new file with an empty text. The source text
|
||||
// content returned by `source_text` remains unchanged, but the diagnostics should get updated.
|
||||
db.write_file(path, "").unwrap();
|
||||
|
||||
assert_eq!(source_text(&db, file).as_str(), "");
|
||||
assert_eq!(check_file(&db, file), Diagnostics::Empty);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,8 +20,7 @@ fn setup_db(workspace_root: SystemPathBuf) -> anyhow::Result<RootDatabase> {
|
||||
target_version: PythonVersion::default(),
|
||||
search_paths,
|
||||
};
|
||||
let db = RootDatabase::new(workspace, settings, system);
|
||||
Ok(db)
|
||||
RootDatabase::new(workspace, settings, system)
|
||||
}
|
||||
|
||||
/// Test that all snippets in testcorpus can be checked without panic
|
||||
|
||||
@@ -268,8 +268,7 @@ mod test {
|
||||
|
||||
// Run
|
||||
let diagnostics = check(
|
||||
// Notebooks are not included by default
|
||||
&[tempdir.path().to_path_buf(), notebook],
|
||||
&[tempdir.path().to_path_buf()],
|
||||
&pyproject_config,
|
||||
&ConfigArguments::default(),
|
||||
flags::Cache::Disabled,
|
||||
|
||||
@@ -1806,7 +1806,7 @@ select = ["UP006"]
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn checks_notebooks_in_preview_mode() -> anyhow::Result<()> {
|
||||
fn checks_notebooks_in_stable() -> anyhow::Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
std::fs::write(
|
||||
tempdir.path().join("main.ipynb"),
|
||||
@@ -1853,7 +1853,6 @@ fn checks_notebooks_in_preview_mode() -> anyhow::Result<()> {
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--select")
|
||||
.arg("F401")
|
||||
.arg("--preview")
|
||||
.current_dir(&tempdir)
|
||||
, @r###"
|
||||
success: false
|
||||
@@ -1867,64 +1866,3 @@ fn checks_notebooks_in_preview_mode() -> anyhow::Result<()> {
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ignores_notebooks_in_stable() -> anyhow::Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
std::fs::write(
|
||||
tempdir.path().join("main.ipynb"),
|
||||
r#"
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "ad6f36d9-4b7d-4562-8d00-f15a0f1fbb6d",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import random"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.12.0"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(STDIN_BASE_OPTIONS)
|
||||
.arg("--select")
|
||||
.arg("F401")
|
||||
.current_dir(&tempdir)
|
||||
, @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: No Python files found under the given path(s)
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -60,6 +60,7 @@ file_resolver.force_exclude = false
|
||||
file_resolver.include = [
|
||||
"*.py",
|
||||
"*.pyi",
|
||||
"*.ipynb",
|
||||
"**/pyproject.toml",
|
||||
]
|
||||
file_resolver.extend_include = []
|
||||
@@ -209,6 +210,7 @@ linter.logger_objects = []
|
||||
linter.namespace_packages = []
|
||||
linter.src = [
|
||||
"[BASEPATH]",
|
||||
"[BASEPATH]/src",
|
||||
]
|
||||
linter.tab_size = 4
|
||||
linter.line_length = 88
|
||||
@@ -260,10 +262,11 @@ linter.flake8_import_conventions.aliases = {
|
||||
seaborn = sns,
|
||||
tensorflow = tf,
|
||||
tkinter = tk,
|
||||
xml.etree.ElementTree = ET,
|
||||
}
|
||||
linter.flake8_import_conventions.banned_aliases = {}
|
||||
linter.flake8_import_conventions.banned_from = []
|
||||
linter.flake8_pytest_style.fixture_parentheses = true
|
||||
linter.flake8_pytest_style.fixture_parentheses = false
|
||||
linter.flake8_pytest_style.parametrize_names_type = tuple
|
||||
linter.flake8_pytest_style.parametrize_values_type = list
|
||||
linter.flake8_pytest_style.parametrize_values_row_type = tuple
|
||||
@@ -277,7 +280,7 @@ linter.flake8_pytest_style.raises_require_match_for = [
|
||||
socket.error,
|
||||
]
|
||||
linter.flake8_pytest_style.raises_extend_require_match_for = []
|
||||
linter.flake8_pytest_style.mark_parentheses = true
|
||||
linter.flake8_pytest_style.mark_parentheses = false
|
||||
linter.flake8_quotes.inline_quotes = double
|
||||
linter.flake8_quotes.multiline_quotes = double
|
||||
linter.flake8_quotes.docstring_quotes = double
|
||||
|
||||
@@ -52,7 +52,7 @@ fn setup_case() -> Case {
|
||||
},
|
||||
};
|
||||
|
||||
let mut db = RootDatabase::new(metadata, settings, system);
|
||||
let mut db = RootDatabase::new(metadata, settings, system).unwrap();
|
||||
let parser = system_path_to_file(&db, parser_path).unwrap();
|
||||
|
||||
db.workspace().open_file(&mut db, parser);
|
||||
@@ -89,7 +89,7 @@ fn benchmark_incremental(criterion: &mut Criterion) {
|
||||
let Case { db, parser, .. } = case;
|
||||
let result = db.check_file(*parser).unwrap();
|
||||
|
||||
assert_eq!(result.len(), 403);
|
||||
assert_eq!(result.len(), 402);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
@@ -104,7 +104,7 @@ fn benchmark_cold(criterion: &mut Criterion) {
|
||||
let Case { db, parser, .. } = case;
|
||||
let result = db.check_file(*parser).unwrap();
|
||||
|
||||
assert_eq!(result.len(), 403);
|
||||
assert_eq!(result.len(), 402);
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
);
|
||||
|
||||
@@ -27,6 +27,7 @@ ignore = { workspace = true, optional = true }
|
||||
matchit = { workspace = true }
|
||||
salsa = { workspace = true }
|
||||
path-slash = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true, optional = true }
|
||||
tracing-tree = { workspace = true, optional = true }
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use countme::Count;
|
||||
use salsa::Accumulator;
|
||||
|
||||
use ruff_notebook::Notebook;
|
||||
use ruff_python_ast::PySourceType;
|
||||
@@ -15,8 +17,42 @@ use crate::Db;
|
||||
pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
let path = file.path(db);
|
||||
let _span = tracing::trace_span!("source_text", file = %path).entered();
|
||||
let mut has_read_error = false;
|
||||
|
||||
let is_notebook = match path {
|
||||
let kind = if is_notebook(file.path(db)) {
|
||||
file.read_to_notebook(db)
|
||||
.unwrap_or_else(|error| {
|
||||
tracing::debug!("Failed to read notebook {path}: {error}");
|
||||
|
||||
has_read_error = true;
|
||||
SourceDiagnostic(Arc::new(SourceTextError::FailedToReadNotebook(error)))
|
||||
.accumulate(db);
|
||||
Notebook::empty()
|
||||
})
|
||||
.into()
|
||||
} else {
|
||||
file.read_to_string(db)
|
||||
.unwrap_or_else(|error| {
|
||||
tracing::debug!("Failed to read file {path}: {error}");
|
||||
|
||||
has_read_error = true;
|
||||
SourceDiagnostic(Arc::new(SourceTextError::FailedToReadFile(error))).accumulate(db);
|
||||
String::new()
|
||||
})
|
||||
.into()
|
||||
};
|
||||
|
||||
SourceText {
|
||||
inner: Arc::new(SourceTextInner {
|
||||
kind,
|
||||
has_read_error,
|
||||
count: Count::new(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_notebook(path: &FilePath) -> bool {
|
||||
match path {
|
||||
FilePath::System(system) => system.extension().is_some_and(|extension| {
|
||||
PySourceType::try_from_extension(extension) == Some(PySourceType::Ipynb)
|
||||
}),
|
||||
@@ -26,33 +62,6 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
})
|
||||
}
|
||||
FilePath::Vendored(_) => false,
|
||||
};
|
||||
|
||||
if is_notebook {
|
||||
// TODO(micha): Proper error handling and emit a diagnostic. Tackle it together with `source_text`.
|
||||
let notebook = file.read_to_notebook(db).unwrap_or_else(|error| {
|
||||
tracing::error!("Failed to load notebook: {error}");
|
||||
Notebook::empty()
|
||||
});
|
||||
|
||||
return SourceText {
|
||||
inner: Arc::new(SourceTextInner {
|
||||
kind: SourceTextKind::Notebook(notebook),
|
||||
count: Count::new(),
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
let content = file.read_to_string(db).unwrap_or_else(|error| {
|
||||
tracing::error!("Failed to load file: {error}");
|
||||
String::default()
|
||||
});
|
||||
|
||||
SourceText {
|
||||
inner: Arc::new(SourceTextInner {
|
||||
kind: SourceTextKind::Text(content),
|
||||
count: Count::new(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -87,6 +96,11 @@ impl SourceText {
|
||||
pub fn is_notebook(&self) -> bool {
|
||||
matches!(&self.inner.kind, SourceTextKind::Notebook(_))
|
||||
}
|
||||
|
||||
/// Returns `true` if there was an error when reading the content of the file.
|
||||
pub fn has_read_error(&self) -> bool {
|
||||
self.inner.has_read_error
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for SourceText {
|
||||
@@ -118,6 +132,7 @@ impl std::fmt::Debug for SourceText {
|
||||
struct SourceTextInner {
|
||||
count: Count<SourceText>,
|
||||
kind: SourceTextKind,
|
||||
has_read_error: bool,
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq)]
|
||||
@@ -126,6 +141,35 @@ enum SourceTextKind {
|
||||
Notebook(Notebook),
|
||||
}
|
||||
|
||||
impl From<String> for SourceTextKind {
|
||||
fn from(value: String) -> Self {
|
||||
SourceTextKind::Text(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Notebook> for SourceTextKind {
|
||||
fn from(notebook: Notebook) -> Self {
|
||||
SourceTextKind::Notebook(notebook)
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::accumulator]
|
||||
pub struct SourceDiagnostic(Arc<SourceTextError>);
|
||||
|
||||
impl std::fmt::Display for SourceDiagnostic {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum SourceTextError {
|
||||
#[error("Failed to read notebook: {0}`")]
|
||||
FailedToReadNotebook(#[from] ruff_notebook::NotebookError),
|
||||
#[error("Failed to read file: {0}")]
|
||||
FailedToReadFile(#[from] std::io::Error),
|
||||
}
|
||||
|
||||
/// Computes the [`LineIndex`] for `file`.
|
||||
#[salsa::tracked]
|
||||
pub fn line_index(db: &dyn Db, file: File) -> LineIndex {
|
||||
|
||||
@@ -368,3 +368,11 @@ def foo() -> int:
|
||||
if baz() > 3:
|
||||
return 1
|
||||
bar()
|
||||
|
||||
|
||||
def f():
|
||||
if a:
|
||||
return b
|
||||
else:
|
||||
with c:
|
||||
d
|
||||
|
||||
@@ -244,3 +244,11 @@ def f():
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def has_untracted_files():
|
||||
if b'Untracked files' in result.stdout:
|
||||
return True
|
||||
else:
|
||||
\
|
||||
return False
|
||||
|
||||
18
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/kw_only.py
vendored
Normal file
18
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/kw_only.py
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Test: avoid marking a `KW_ONLY` annotation as typing-only."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import KW_ONLY, dataclass, Field
|
||||
|
||||
|
||||
@dataclass
|
||||
class Test1:
|
||||
a: int
|
||||
_: KW_ONLY
|
||||
b: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class Test2:
|
||||
a: int
|
||||
b: Field
|
||||
@@ -111,3 +111,7 @@ def can_access_inside_nested[T](t: T) -> T: # OK
|
||||
return x
|
||||
|
||||
bar(t)
|
||||
|
||||
|
||||
def cannot_access_in_default[T](t: T = T): # F821
|
||||
pass
|
||||
|
||||
@@ -139,3 +139,33 @@ print("%.20X" % 1)
|
||||
|
||||
print("%2X" % 1)
|
||||
print("%02X" % 1)
|
||||
|
||||
# UP031 (no longer false negatives, but offer no fix because of more complex syntax)
|
||||
|
||||
"%d.%d" % (a, b)
|
||||
|
||||
"%*s" % (5, "hi")
|
||||
|
||||
"%d" % (flt,)
|
||||
|
||||
"%c" % (some_string,)
|
||||
|
||||
"%.2r" % (1.25)
|
||||
|
||||
"%.*s" % (5, "hi")
|
||||
|
||||
"%i" % (flt,)
|
||||
|
||||
"%()s" % {"": "empty"}
|
||||
|
||||
"%s" % {"k": "v"}
|
||||
|
||||
"%()s" % {"": "bar"}
|
||||
|
||||
"%(1)s" % {"1": "bar"}
|
||||
|
||||
"%(a)s" % {"a": 1, "a": 2}
|
||||
|
||||
"%(1)s" % {1: 2, "1": 2}
|
||||
|
||||
"%(and)s" % {"and": 2}
|
||||
|
||||
@@ -1,34 +1,8 @@
|
||||
# OK
|
||||
b"%s" % (b"bytestring",)
|
||||
|
||||
"%*s" % (5, "hi")
|
||||
|
||||
"%d" % (flt,)
|
||||
|
||||
"%c" % (some_string,)
|
||||
|
||||
"%4%" % ()
|
||||
|
||||
"%.2r" % (1.25)
|
||||
|
||||
i % 3
|
||||
|
||||
"%.*s" % (5, "hi")
|
||||
|
||||
"%i" % (flt,)
|
||||
|
||||
"%()s" % {"": "empty"}
|
||||
|
||||
"%s" % {"k": "v"}
|
||||
|
||||
"%(1)s" % {"1": "bar"}
|
||||
|
||||
"%(a)s" % {"a": 1, "a": 2}
|
||||
|
||||
pytest.param('"%8s" % (None,)', id="unsafe width-string conversion"),
|
||||
|
||||
"%()s" % {"": "bar"}
|
||||
|
||||
"%(1)s" % {1: 2, "1": 2}
|
||||
|
||||
"%(and)s" % {"and": 2}
|
||||
|
||||
@@ -45,3 +45,17 @@ def negative_cases():
|
||||
|
||||
import django.utils.translations
|
||||
y = django.utils.translations.gettext("This {should} be understood as a translation string too!")
|
||||
|
||||
# Calling `gettext.install()` literall monkey-patches `builtins._ = ...`,
|
||||
# so even the fully qualified access of `builtins._()` should be considered
|
||||
# a possible `gettext` call.
|
||||
import builtins
|
||||
another = 42
|
||||
z = builtins._("{another} translation string")
|
||||
|
||||
# Usually logging strings use `%`-style string interpolation,
|
||||
# but `logging` can be configured to use `{}` the same as f-strings,
|
||||
# so these should also be ignored.
|
||||
# See https://docs.python.org/3/howto/logging-cookbook.html#formatting-styles
|
||||
import logging
|
||||
logging.info("yet {another} non-f-string")
|
||||
|
||||
@@ -1077,12 +1077,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
}
|
||||
if checker.enabled(Rule::MissingFStringSyntax) {
|
||||
for string_literal in value.literals() {
|
||||
ruff::rules::missing_fstring_syntax(
|
||||
&mut checker.diagnostics,
|
||||
string_literal,
|
||||
checker.locator,
|
||||
&checker.semantic,
|
||||
);
|
||||
ruff::rules::missing_fstring_syntax(checker, string_literal);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1378,12 +1373,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
}
|
||||
if checker.enabled(Rule::MissingFStringSyntax) {
|
||||
for string_literal in value.as_slice() {
|
||||
ruff::rules::missing_fstring_syntax(
|
||||
&mut checker.diagnostics,
|
||||
string_literal,
|
||||
checker.locator,
|
||||
&checker.semantic,
|
||||
);
|
||||
ruff::rules::missing_fstring_syntax(checker, string_literal);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1498,7 +1488,9 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
}
|
||||
|
||||
if checker.enabled(Rule::UnnecessaryDictComprehensionForIterable) {
|
||||
ruff::rules::unnecessary_dict_comprehension_for_iterable(checker, dict_comp);
|
||||
flake8_comprehensions::rules::unnecessary_dict_comprehension_for_iterable(
|
||||
checker, dict_comp,
|
||||
);
|
||||
}
|
||||
|
||||
if checker.enabled(Rule::FunctionUsesLoopVariable) {
|
||||
|
||||
@@ -229,12 +229,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
Rule::SuperfluousElseContinue,
|
||||
Rule::SuperfluousElseBreak,
|
||||
]) {
|
||||
flake8_return::rules::function(
|
||||
checker,
|
||||
body,
|
||||
decorator_list,
|
||||
returns.as_ref().map(AsRef::as_ref),
|
||||
);
|
||||
flake8_return::rules::function(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::UselessReturn) {
|
||||
pylint::rules::useless_return(
|
||||
|
||||
@@ -691,6 +691,14 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
self.semantic(),
|
||||
);
|
||||
|
||||
// The default values of the parameters needs to be evaluated in the enclosing
|
||||
// scope.
|
||||
for parameter in &**parameters {
|
||||
if let Some(expr) = parameter.default() {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
}
|
||||
|
||||
self.semantic.push_scope(ScopeKind::Type);
|
||||
|
||||
if let Some(type_params) = type_params {
|
||||
@@ -715,9 +723,6 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(expr) = parameter.default() {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
}
|
||||
if let Some(expr) = returns {
|
||||
match annotation {
|
||||
@@ -1290,8 +1295,8 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
let Keyword { arg, value, .. } = keyword;
|
||||
match (arg.as_ref(), value) {
|
||||
// Ex) NamedTuple("a", **{"a": int})
|
||||
(None, Expr::Dict(ast::ExprDict { items, .. })) => {
|
||||
for ast::DictItem { key, value } in items {
|
||||
(None, Expr::Dict(dict)) => {
|
||||
for ast::DictItem { key, value } in dict {
|
||||
if let Some(key) = key.as_ref() {
|
||||
self.visit_non_type_definition(key);
|
||||
self.visit_type_definition(value);
|
||||
|
||||
@@ -4,12 +4,12 @@
|
||||
/// `--select`. For pylint this is e.g. C0414 and E0118 but also C and E01.
|
||||
use std::fmt::Formatter;
|
||||
|
||||
use strum_macros::{AsRefStr, EnumIter};
|
||||
|
||||
use crate::registry::{AsRule, Linter};
|
||||
use crate::rule_selector::is_single_rule_selector;
|
||||
use crate::rules;
|
||||
|
||||
use strum_macros::{AsRefStr, EnumIter};
|
||||
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct NoqaCode(&'static str, &'static str);
|
||||
|
||||
@@ -206,12 +206,12 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pylint, "E0237") => (RuleGroup::Stable, rules::pylint::rules::NonSlotAssignment),
|
||||
(Pylint, "E0241") => (RuleGroup::Stable, rules::pylint::rules::DuplicateBases),
|
||||
(Pylint, "E0302") => (RuleGroup::Stable, rules::pylint::rules::UnexpectedSpecialMethodSignature),
|
||||
(Pylint, "E0303") => (RuleGroup::Preview, rules::pylint::rules::InvalidLengthReturnType),
|
||||
(Pylint, "E0303") => (RuleGroup::Stable, rules::pylint::rules::InvalidLengthReturnType),
|
||||
(Pylint, "E0304") => (RuleGroup::Preview, rules::pylint::rules::InvalidBoolReturnType),
|
||||
(Pylint, "E0305") => (RuleGroup::Preview, rules::pylint::rules::InvalidIndexReturnType),
|
||||
(Pylint, "E0305") => (RuleGroup::Stable, rules::pylint::rules::InvalidIndexReturnType),
|
||||
(Pylint, "E0307") => (RuleGroup::Stable, rules::pylint::rules::InvalidStrReturnType),
|
||||
(Pylint, "E0308") => (RuleGroup::Preview, rules::pylint::rules::InvalidBytesReturnType),
|
||||
(Pylint, "E0309") => (RuleGroup::Preview, rules::pylint::rules::InvalidHashReturnType),
|
||||
(Pylint, "E0308") => (RuleGroup::Stable, rules::pylint::rules::InvalidBytesReturnType),
|
||||
(Pylint, "E0309") => (RuleGroup::Stable, rules::pylint::rules::InvalidHashReturnType),
|
||||
(Pylint, "E0604") => (RuleGroup::Stable, rules::pylint::rules::InvalidAllObject),
|
||||
(Pylint, "E0605") => (RuleGroup::Stable, rules::pylint::rules::InvalidAllFormat),
|
||||
(Pylint, "E0643") => (RuleGroup::Stable, rules::pylint::rules::PotentialIndexError),
|
||||
@@ -225,8 +225,8 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pylint, "E1307") => (RuleGroup::Stable, rules::pylint::rules::BadStringFormatType),
|
||||
(Pylint, "E1310") => (RuleGroup::Stable, rules::pylint::rules::BadStrStripCall),
|
||||
(Pylint, "E1507") => (RuleGroup::Stable, rules::pylint::rules::InvalidEnvvarValue),
|
||||
(Pylint, "E1519") => (RuleGroup::Preview, rules::pylint::rules::SingledispatchMethod),
|
||||
(Pylint, "E1520") => (RuleGroup::Preview, rules::pylint::rules::SingledispatchmethodFunction),
|
||||
(Pylint, "E1519") => (RuleGroup::Stable, rules::pylint::rules::SingledispatchMethod),
|
||||
(Pylint, "E1520") => (RuleGroup::Stable, rules::pylint::rules::SingledispatchmethodFunction),
|
||||
(Pylint, "E1700") => (RuleGroup::Stable, rules::pylint::rules::YieldFromInAsyncFunction),
|
||||
(Pylint, "E2502") => (RuleGroup::Stable, rules::pylint::rules::BidirectionalUnicode),
|
||||
(Pylint, "E2510") => (RuleGroup::Stable, rules::pylint::rules::InvalidCharacterBackspace),
|
||||
@@ -256,7 +256,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pylint, "R1711") => (RuleGroup::Stable, rules::pylint::rules::UselessReturn),
|
||||
(Pylint, "R1714") => (RuleGroup::Stable, rules::pylint::rules::RepeatedEqualityComparison),
|
||||
(Pylint, "R1722") => (RuleGroup::Stable, rules::pylint::rules::SysExitAlias),
|
||||
(Pylint, "R1730") => (RuleGroup::Preview, rules::pylint::rules::IfStmtMinMax),
|
||||
(Pylint, "R1730") => (RuleGroup::Stable, rules::pylint::rules::IfStmtMinMax),
|
||||
(Pylint, "R1733") => (RuleGroup::Preview, rules::pylint::rules::UnnecessaryDictIndexLookup),
|
||||
(Pylint, "R1736") => (RuleGroup::Stable, rules::pylint::rules::UnnecessaryListIndexLookup),
|
||||
(Pylint, "R2004") => (RuleGroup::Stable, rules::pylint::rules::MagicValueComparison),
|
||||
@@ -273,13 +273,13 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pylint, "W0129") => (RuleGroup::Stable, rules::pylint::rules::AssertOnStringLiteral),
|
||||
(Pylint, "W0131") => (RuleGroup::Stable, rules::pylint::rules::NamedExprWithoutContext),
|
||||
(Pylint, "W0133") => (RuleGroup::Stable, rules::pylint::rules::UselessExceptionStatement),
|
||||
(Pylint, "W0211") => (RuleGroup::Preview, rules::pylint::rules::BadStaticmethodArgument),
|
||||
(Pylint, "W0211") => (RuleGroup::Stable, rules::pylint::rules::BadStaticmethodArgument),
|
||||
(Pylint, "W0245") => (RuleGroup::Stable, rules::pylint::rules::SuperWithoutBrackets),
|
||||
(Pylint, "W0406") => (RuleGroup::Stable, rules::pylint::rules::ImportSelf),
|
||||
(Pylint, "W0602") => (RuleGroup::Stable, rules::pylint::rules::GlobalVariableNotAssigned),
|
||||
(Pylint, "W0603") => (RuleGroup::Stable, rules::pylint::rules::GlobalStatement),
|
||||
(Pylint, "W0604") => (RuleGroup::Stable, rules::pylint::rules::GlobalAtModuleLevel),
|
||||
(Pylint, "W0642") => (RuleGroup::Preview, rules::pylint::rules::SelfOrClsAssignment),
|
||||
(Pylint, "W0642") => (RuleGroup::Stable, rules::pylint::rules::SelfOrClsAssignment),
|
||||
(Pylint, "W0711") => (RuleGroup::Stable, rules::pylint::rules::BinaryOpException),
|
||||
(Pylint, "W1501") => (RuleGroup::Stable, rules::pylint::rules::BadOpenMode),
|
||||
(Pylint, "W1508") => (RuleGroup::Stable, rules::pylint::rules::InvalidEnvvarDefault),
|
||||
@@ -378,6 +378,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8Comprehensions, "17") => (RuleGroup::Stable, rules::flake8_comprehensions::rules::UnnecessaryMap),
|
||||
(Flake8Comprehensions, "18") => (RuleGroup::Stable, rules::flake8_comprehensions::rules::UnnecessaryLiteralWithinDictCall),
|
||||
(Flake8Comprehensions, "19") => (RuleGroup::Stable, rules::flake8_comprehensions::rules::UnnecessaryComprehensionInCall),
|
||||
(Flake8Comprehensions, "20") => (RuleGroup::Preview, rules::flake8_comprehensions::rules::UnnecessaryDictComprehensionForIterable),
|
||||
|
||||
// flake8-debugger
|
||||
(Flake8Debugger, "0") => (RuleGroup::Stable, rules::flake8_debugger::rules::Debugger),
|
||||
@@ -509,7 +510,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pyupgrade, "024") => (RuleGroup::Stable, rules::pyupgrade::rules::OSErrorAlias),
|
||||
(Pyupgrade, "025") => (RuleGroup::Stable, rules::pyupgrade::rules::UnicodeKindPrefix),
|
||||
(Pyupgrade, "026") => (RuleGroup::Stable, rules::pyupgrade::rules::DeprecatedMockImport),
|
||||
(Pyupgrade, "027") => (RuleGroup::Stable, rules::pyupgrade::rules::UnpackedListComprehension),
|
||||
(Pyupgrade, "027") => (RuleGroup::Deprecated, rules::pyupgrade::rules::UnpackedListComprehension),
|
||||
(Pyupgrade, "028") => (RuleGroup::Stable, rules::pyupgrade::rules::YieldInForLoop),
|
||||
(Pyupgrade, "029") => (RuleGroup::Stable, rules::pyupgrade::rules::UnnecessaryBuiltinImport),
|
||||
(Pyupgrade, "030") => (RuleGroup::Stable, rules::pyupgrade::rules::FormatLiterals),
|
||||
@@ -778,9 +779,9 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8Pyi, "055") => (RuleGroup::Stable, rules::flake8_pyi::rules::UnnecessaryTypeUnion),
|
||||
(Flake8Pyi, "056") => (RuleGroup::Stable, rules::flake8_pyi::rules::UnsupportedMethodCallOnAll),
|
||||
(Flake8Pyi, "058") => (RuleGroup::Stable, rules::flake8_pyi::rules::GeneratorReturnFromIterMethod),
|
||||
(Flake8Pyi, "057") => (RuleGroup::Preview, rules::flake8_pyi::rules::ByteStringUsage),
|
||||
(Flake8Pyi, "057") => (RuleGroup::Stable, rules::flake8_pyi::rules::ByteStringUsage),
|
||||
(Flake8Pyi, "059") => (RuleGroup::Preview, rules::flake8_pyi::rules::GenericNotLastBaseClass),
|
||||
(Flake8Pyi, "062") => (RuleGroup::Preview, rules::flake8_pyi::rules::DuplicateLiteralMember),
|
||||
(Flake8Pyi, "062") => (RuleGroup::Stable, rules::flake8_pyi::rules::DuplicateLiteralMember),
|
||||
(Flake8Pyi, "063") => (RuleGroup::Preview, rules::flake8_pyi::rules::PrePep570PositionalArgument),
|
||||
(Flake8Pyi, "064") => (RuleGroup::Preview, rules::flake8_pyi::rules::RedundantFinalLiteral),
|
||||
(Flake8Pyi, "066") => (RuleGroup::Preview, rules::flake8_pyi::rules::BadVersionInfoOrder),
|
||||
@@ -789,8 +790,8 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Flake8PytestStyle, "001") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestFixtureIncorrectParenthesesStyle),
|
||||
(Flake8PytestStyle, "002") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestFixturePositionalArgs),
|
||||
(Flake8PytestStyle, "003") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestExtraneousScopeFunction),
|
||||
(Flake8PytestStyle, "004") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestMissingFixtureNameUnderscore),
|
||||
(Flake8PytestStyle, "005") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestIncorrectFixtureNameUnderscore),
|
||||
(Flake8PytestStyle, "004") => (RuleGroup::Deprecated, rules::flake8_pytest_style::rules::PytestMissingFixtureNameUnderscore),
|
||||
(Flake8PytestStyle, "005") => (RuleGroup::Deprecated, rules::flake8_pytest_style::rules::PytestIncorrectFixtureNameUnderscore),
|
||||
(Flake8PytestStyle, "006") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestParametrizeNamesWrongType),
|
||||
(Flake8PytestStyle, "007") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestParametrizeValuesWrongType),
|
||||
(Flake8PytestStyle, "008") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestPatchWithLambda),
|
||||
@@ -951,7 +952,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Ruff, "022") => (RuleGroup::Preview, rules::ruff::rules::UnsortedDunderAll),
|
||||
(Ruff, "023") => (RuleGroup::Preview, rules::ruff::rules::UnsortedDunderSlots),
|
||||
(Ruff, "024") => (RuleGroup::Stable, rules::ruff::rules::MutableFromkeysValue),
|
||||
(Ruff, "025") => (RuleGroup::Preview, rules::ruff::rules::UnnecessaryDictComprehensionForIterable),
|
||||
(Ruff, "026") => (RuleGroup::Stable, rules::ruff::rules::DefaultFactoryKwarg),
|
||||
(Ruff, "027") => (RuleGroup::Preview, rules::ruff::rules::MissingFStringSyntax),
|
||||
(Ruff, "028") => (RuleGroup::Preview, rules::ruff::rules::InvalidFormatterSuppressionComment),
|
||||
@@ -959,7 +959,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Ruff, "030") => (RuleGroup::Preview, rules::ruff::rules::AssertWithPrintMessage),
|
||||
(Ruff, "031") => (RuleGroup::Preview, rules::ruff::rules::IncorrectlyParenthesizedTupleInSubscript),
|
||||
(Ruff, "100") => (RuleGroup::Stable, rules::ruff::rules::UnusedNOQA),
|
||||
(Ruff, "101") => (RuleGroup::Preview, rules::ruff::rules::RedirectedNOQA),
|
||||
(Ruff, "101") => (RuleGroup::Stable, rules::ruff::rules::RedirectedNOQA),
|
||||
|
||||
(Ruff, "200") => (RuleGroup::Stable, rules::ruff::rules::InvalidPyprojectToml),
|
||||
#[cfg(any(feature = "test-rules", test))]
|
||||
|
||||
@@ -151,16 +151,15 @@ pub(crate) fn add_to_dunder_all<'a>(
|
||||
stylist: &Stylist,
|
||||
) -> Vec<Edit> {
|
||||
let (insertion_point, export_prefix_length) = match expr {
|
||||
Expr::List(ExprList { elts, range, .. }) => (
|
||||
elts.last()
|
||||
.map_or(range.end() - "]".text_len(), Ranged::end),
|
||||
Expr::List(ExprList { elts, .. }) => (
|
||||
elts.last().map_or(expr.end() - "]".text_len(), Ranged::end),
|
||||
elts.len(),
|
||||
),
|
||||
Expr::Tuple(tup) if tup.parenthesized => (
|
||||
tup.elts
|
||||
.last()
|
||||
.map_or(tup.end() - ")".text_len(), Ranged::end),
|
||||
tup.elts.len(),
|
||||
tup.len(),
|
||||
),
|
||||
Expr::Tuple(tup) if !tup.parenthesized => (
|
||||
tup.elts
|
||||
@@ -168,7 +167,7 @@ pub(crate) fn add_to_dunder_all<'a>(
|
||||
.expect("unparenthesized empty tuple is not possible")
|
||||
.range()
|
||||
.end(),
|
||||
tup.elts.len(),
|
||||
tup.len(),
|
||||
),
|
||||
_ => {
|
||||
// we don't know how to insert into this expression
|
||||
@@ -318,26 +317,28 @@ pub(crate) fn adjust_indentation(
|
||||
line_indentation.contains('\t') && line_indentation.contains(' ')
|
||||
});
|
||||
|
||||
if contains_multiline_string || mixed_indentation {
|
||||
let module_text = format!("def f():{}{contents}", stylist.line_ending().as_str());
|
||||
|
||||
let mut tree = match_statement(&module_text)?;
|
||||
|
||||
let embedding = match_function_def(&mut tree)?;
|
||||
|
||||
let indented_block = match_indented_block(&mut embedding.body)?;
|
||||
indented_block.indent = Some(indentation);
|
||||
|
||||
let module_text = indented_block.codegen_stylist(stylist);
|
||||
let module_text = module_text
|
||||
.strip_prefix(stylist.line_ending().as_str())
|
||||
.unwrap()
|
||||
.to_string();
|
||||
Ok(module_text)
|
||||
} else {
|
||||
// Otherwise, we can do a simple adjustment ourselves.
|
||||
Ok(dedent_to(contents, indentation))
|
||||
// For simple cases, try to do a manual dedent.
|
||||
if !contains_multiline_string && !mixed_indentation {
|
||||
if let Some(dedent) = dedent_to(contents, indentation) {
|
||||
return Ok(dedent);
|
||||
}
|
||||
}
|
||||
|
||||
let module_text = format!("def f():{}{contents}", stylist.line_ending().as_str());
|
||||
|
||||
let mut tree = match_statement(&module_text)?;
|
||||
|
||||
let embedding = match_function_def(&mut tree)?;
|
||||
|
||||
let indented_block = match_indented_block(&mut embedding.body)?;
|
||||
indented_block.indent = Some(indentation);
|
||||
|
||||
let module_text = indented_block.codegen_stylist(stylist);
|
||||
let module_text = module_text
|
||||
.strip_prefix(stylist.line_ending().as_str())
|
||||
.unwrap()
|
||||
.to_string();
|
||||
Ok(module_text)
|
||||
}
|
||||
|
||||
/// Determine if a vector contains only one, specific element.
|
||||
|
||||
@@ -791,6 +791,23 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_vscode_language_id() -> Result<()> {
|
||||
let actual = notebook_path("vscode_language_id.ipynb");
|
||||
let expected = notebook_path("vscode_language_id_expected.ipynb");
|
||||
let TestedNotebook {
|
||||
messages,
|
||||
source_notebook,
|
||||
..
|
||||
} = assert_notebook_path(
|
||||
&actual,
|
||||
expected,
|
||||
&settings::LinterSettings::for_rule(Rule::UnusedImport),
|
||||
)?;
|
||||
assert_messages!(messages, actual, source_notebook);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Path::new("before_fix.ipynb"), true; "trailing_newline")]
|
||||
#[test_case(Path::new("no_trailing_newline.ipynb"), false; "no_trailing_newline")]
|
||||
fn test_trailing_newline(path: &Path, trailing_newline: bool) -> Result<()> {
|
||||
|
||||
@@ -123,5 +123,7 @@ static REDIRECTS: Lazy<HashMap<&'static str, &'static str>> = Lazy::new(|| {
|
||||
("RUF96", "RUF95"),
|
||||
// See: https://github.com/astral-sh/ruff/issues/10791
|
||||
("PLW0117", "PLW0177"),
|
||||
// See: https://github.com/astral-sh/ruff/issues/12110
|
||||
("RUF025", "C420"),
|
||||
])
|
||||
});
|
||||
|
||||
@@ -122,17 +122,15 @@ fn is_identical_types(
|
||||
return_value: &Expr,
|
||||
semantic: &SemanticModel,
|
||||
) -> bool {
|
||||
if let (Some(response_mode_name_expr), Some(return_value_name_expr)) = (
|
||||
response_model_arg.as_name_expr(),
|
||||
return_value.as_name_expr(),
|
||||
) {
|
||||
if let (Expr::Name(response_mode_name_expr), Expr::Name(return_value_name_expr)) =
|
||||
(response_model_arg, return_value)
|
||||
{
|
||||
return semantic.resolve_name(response_mode_name_expr)
|
||||
== semantic.resolve_name(return_value_name_expr);
|
||||
}
|
||||
if let (Some(response_mode_subscript), Some(return_value_subscript)) = (
|
||||
response_model_arg.as_subscript_expr(),
|
||||
return_value.as_subscript_expr(),
|
||||
) {
|
||||
if let (Expr::Subscript(response_mode_subscript), Expr::Subscript(return_value_subscript)) =
|
||||
(response_model_arg, return_value)
|
||||
{
|
||||
return is_identical_types(
|
||||
&response_mode_subscript.value,
|
||||
&return_value_subscript.value,
|
||||
@@ -143,15 +141,13 @@ fn is_identical_types(
|
||||
semantic,
|
||||
);
|
||||
}
|
||||
if let (Some(response_mode_tuple), Some(return_value_tuple)) = (
|
||||
response_model_arg.as_tuple_expr(),
|
||||
return_value.as_tuple_expr(),
|
||||
) {
|
||||
return response_mode_tuple.elts.len() == return_value_tuple.elts.len()
|
||||
if let (Expr::Tuple(response_mode_tuple), Expr::Tuple(return_value_tuple)) =
|
||||
(response_model_arg, return_value)
|
||||
{
|
||||
return response_mode_tuple.len() == return_value_tuple.len()
|
||||
&& response_mode_tuple
|
||||
.elts
|
||||
.iter()
|
||||
.zip(return_value_tuple.elts.iter())
|
||||
.zip(return_value_tuple)
|
||||
.all(|(x, y)| is_identical_types(x, y, semantic));
|
||||
}
|
||||
false
|
||||
|
||||
@@ -115,44 +115,6 @@ impl MethodName {
|
||||
| MethodName::TrioCancelScope
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns associated module
|
||||
pub(super) fn module(self) -> AsyncModule {
|
||||
match self {
|
||||
MethodName::AsyncIoTimeout | MethodName::AsyncIoTimeoutAt => AsyncModule::AsyncIo,
|
||||
MethodName::AnyIoMoveOnAfter
|
||||
| MethodName::AnyIoFailAfter
|
||||
| MethodName::AnyIoCancelScope => AsyncModule::AnyIo,
|
||||
MethodName::TrioAcloseForcefully
|
||||
| MethodName::TrioCancelScope
|
||||
| MethodName::TrioCancelShieldedCheckpoint
|
||||
| MethodName::TrioCheckpoint
|
||||
| MethodName::TrioCheckpointIfCancelled
|
||||
| MethodName::TrioFailAfter
|
||||
| MethodName::TrioFailAt
|
||||
| MethodName::TrioMoveOnAfter
|
||||
| MethodName::TrioMoveOnAt
|
||||
| MethodName::TrioOpenFile
|
||||
| MethodName::TrioOpenProcess
|
||||
| MethodName::TrioOpenSslOverTcpListeners
|
||||
| MethodName::TrioOpenSslOverTcpStream
|
||||
| MethodName::TrioOpenTcpListeners
|
||||
| MethodName::TrioOpenTcpStream
|
||||
| MethodName::TrioOpenUnixSocket
|
||||
| MethodName::TrioPermanentlyDetachCoroutineObject
|
||||
| MethodName::TrioReattachDetachedCoroutineObject
|
||||
| MethodName::TrioRunProcess
|
||||
| MethodName::TrioServeListeners
|
||||
| MethodName::TrioServeSslOverTcp
|
||||
| MethodName::TrioServeTcp
|
||||
| MethodName::TrioSleep
|
||||
| MethodName::TrioSleepForever
|
||||
| MethodName::TrioTemporarilyDetachCoroutineObject
|
||||
| MethodName::TrioWaitReadable
|
||||
| MethodName::TrioWaitTaskRescheduled
|
||||
| MethodName::TrioWaitWritable => AsyncModule::Trio,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MethodName {
|
||||
|
||||
@@ -9,11 +9,10 @@ mod tests {
|
||||
use anyhow::Result;
|
||||
use test_case::test_case;
|
||||
|
||||
use crate::assert_messages;
|
||||
use crate::registry::Rule;
|
||||
use crate::settings::types::PreviewMode;
|
||||
use crate::settings::LinterSettings;
|
||||
use crate::test::test_path;
|
||||
use crate::{assert_messages, settings};
|
||||
|
||||
#[test_case(Rule::CancelScopeNoCheckpoint, Path::new("ASYNC100.py"))]
|
||||
#[test_case(Rule::TrioSyncCall, Path::new("ASYNC105.py"))]
|
||||
@@ -37,27 +36,4 @@ mod tests {
|
||||
assert_messages!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::CancelScopeNoCheckpoint, Path::new("ASYNC100.py"))]
|
||||
#[test_case(Rule::AsyncFunctionWithTimeout, Path::new("ASYNC109_0.py"))]
|
||||
#[test_case(Rule::AsyncFunctionWithTimeout, Path::new("ASYNC109_1.py"))]
|
||||
#[test_case(Rule::AsyncBusyWait, Path::new("ASYNC110.py"))]
|
||||
#[test_case(Rule::AsyncZeroSleep, Path::new("ASYNC115.py"))]
|
||||
#[test_case(Rule::LongSleepNotForever, Path::new("ASYNC116.py"))]
|
||||
fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!(
|
||||
"preview__{}_{}",
|
||||
rule_code.noqa_code(),
|
||||
path.to_string_lossy()
|
||||
);
|
||||
let diagnostics = test_path(
|
||||
Path::new("flake8_async").join(path).as_path(),
|
||||
&settings::LinterSettings {
|
||||
preview: PreviewMode::Enabled,
|
||||
..settings::LinterSettings::for_rule(rule_code)
|
||||
},
|
||||
)?;
|
||||
assert_messages!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::rules::flake8_async::helpers::AsyncModule;
|
||||
use crate::settings::types::PreviewMode;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for the use of an async sleep function in a `while` loop.
|
||||
@@ -71,26 +70,15 @@ pub(crate) fn async_busy_wait(checker: &mut Checker, while_stmt: &ast::StmtWhile
|
||||
return;
|
||||
};
|
||||
|
||||
if matches!(checker.settings.preview, PreviewMode::Disabled) {
|
||||
if matches!(qualified_name.segments(), ["trio", "sleep" | "sleep_until"]) {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
AsyncBusyWait {
|
||||
module: AsyncModule::Trio,
|
||||
},
|
||||
while_stmt.range(),
|
||||
));
|
||||
}
|
||||
} else {
|
||||
if matches!(
|
||||
qualified_name.segments(),
|
||||
["trio" | "anyio", "sleep" | "sleep_until"] | ["asyncio", "sleep"]
|
||||
) {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
AsyncBusyWait {
|
||||
module: AsyncModule::try_from(&qualified_name).unwrap(),
|
||||
},
|
||||
while_stmt.range(),
|
||||
));
|
||||
}
|
||||
if matches!(
|
||||
qualified_name.segments(),
|
||||
["trio" | "anyio", "sleep" | "sleep_until"] | ["asyncio", "sleep"]
|
||||
) {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
AsyncBusyWait {
|
||||
module: AsyncModule::try_from(&qualified_name).unwrap(),
|
||||
},
|
||||
while_stmt.range(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::rules::flake8_async::helpers::AsyncModule;
|
||||
use crate::settings::types::PreviewMode;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for `async` functions with a `timeout` argument.
|
||||
@@ -87,17 +86,8 @@ pub(crate) fn async_function_with_timeout(
|
||||
AsyncModule::AsyncIo
|
||||
};
|
||||
|
||||
if matches!(checker.settings.preview, PreviewMode::Disabled) {
|
||||
if matches!(module, AsyncModule::Trio) {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
AsyncFunctionWithTimeout { module },
|
||||
timeout.range(),
|
||||
));
|
||||
}
|
||||
} else {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
AsyncFunctionWithTimeout { module },
|
||||
timeout.range(),
|
||||
));
|
||||
}
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
AsyncFunctionWithTimeout { module },
|
||||
timeout.range(),
|
||||
));
|
||||
}
|
||||
|
||||
@@ -83,11 +83,7 @@ pub(crate) fn async_zero_sleep(checker: &mut Checker, call: &ExprCall) {
|
||||
};
|
||||
|
||||
if let Some(module) = AsyncModule::try_from(&qualified_name) {
|
||||
let is_relevant_module = if checker.settings.preview.is_enabled() {
|
||||
matches!(module, AsyncModule::Trio | AsyncModule::AnyIo)
|
||||
} else {
|
||||
matches!(module, AsyncModule::Trio)
|
||||
};
|
||||
let is_relevant_module = matches!(module, AsyncModule::Trio | AsyncModule::AnyIo);
|
||||
|
||||
let is_sleep = is_relevant_module && matches!(qualified_name.segments(), [_, "sleep"]);
|
||||
|
||||
|
||||
@@ -5,8 +5,7 @@ use ruff_python_ast::visitor::Visitor;
|
||||
use ruff_python_ast::{StmtWith, WithItem};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::rules::flake8_async::helpers::{AsyncModule, MethodName};
|
||||
use crate::settings::types::PreviewMode;
|
||||
use crate::rules::flake8_async::helpers::MethodName;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for timeout context managers which do not contain a checkpoint.
|
||||
@@ -88,17 +87,8 @@ pub(crate) fn cancel_scope_no_checkpoint(
|
||||
return;
|
||||
}
|
||||
|
||||
if matches!(checker.settings.preview, PreviewMode::Disabled) {
|
||||
if matches!(method_name.module(), AsyncModule::Trio) {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
CancelScopeNoCheckpoint { method_name },
|
||||
with_stmt.range,
|
||||
));
|
||||
}
|
||||
} else {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
CancelScopeNoCheckpoint { method_name },
|
||||
with_stmt.range,
|
||||
));
|
||||
}
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
CancelScopeNoCheckpoint { method_name },
|
||||
with_stmt.range,
|
||||
));
|
||||
}
|
||||
|
||||
@@ -107,11 +107,7 @@ pub(crate) fn long_sleep_not_forever(checker: &mut Checker, call: &ExprCall) {
|
||||
return;
|
||||
};
|
||||
|
||||
let is_relevant_module = if checker.settings.preview.is_enabled() {
|
||||
matches!(module, AsyncModule::AnyIo | AsyncModule::Trio)
|
||||
} else {
|
||||
matches!(module, AsyncModule::Trio)
|
||||
};
|
||||
let is_relevant_module = matches!(module, AsyncModule::AnyIo | AsyncModule::Trio);
|
||||
|
||||
let is_sleep = is_relevant_module && matches!(qualified_name.segments(), [_, "sleep"]);
|
||||
|
||||
|
||||
@@ -18,3 +18,84 @@ ASYNC100.py:18:5: ASYNC100 A `with trio.move_on_after(...):` context does not co
|
||||
19 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
ASYNC100.py:40:5: ASYNC100 A `with anyio.move_on_after(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
39 | async def func():
|
||||
40 | with anyio.move_on_after(delay=0.2):
|
||||
| _____^
|
||||
41 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
ASYNC100.py:45:5: ASYNC100 A `with anyio.fail_after(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
44 | async def func():
|
||||
45 | with anyio.fail_after():
|
||||
| _____^
|
||||
46 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
ASYNC100.py:50:5: ASYNC100 A `with anyio.CancelScope(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
49 | async def func():
|
||||
50 | with anyio.CancelScope():
|
||||
| _____^
|
||||
51 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
ASYNC100.py:55:5: ASYNC100 A `with anyio.CancelScope(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
54 | async def func():
|
||||
55 | with anyio.CancelScope(), nullcontext():
|
||||
| _____^
|
||||
56 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
ASYNC100.py:60:5: ASYNC100 A `with anyio.CancelScope(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
59 | async def func():
|
||||
60 | with nullcontext(), anyio.CancelScope():
|
||||
| _____^
|
||||
61 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
ASYNC100.py:65:5: ASYNC100 A `with asyncio.timeout(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
64 | async def func():
|
||||
65 | async with asyncio.timeout(delay=0.2):
|
||||
| _____^
|
||||
66 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
ASYNC100.py:70:5: ASYNC100 A `with asyncio.timeout_at(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
69 | async def func():
|
||||
70 | async with asyncio.timeout_at(when=0.2):
|
||||
| _____^
|
||||
71 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
ASYNC100.py:80:5: ASYNC100 A `with asyncio.timeout(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
79 | async def func():
|
||||
80 | async with asyncio.timeout(delay=0.2), asyncio.TaskGroup(), asyncio.timeout(delay=0.2):
|
||||
| _____^
|
||||
81 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
ASYNC100.py:90:5: ASYNC100 A `with asyncio.timeout(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
89 | async def func():
|
||||
90 | async with asyncio.timeout(delay=0.2), asyncio.timeout(delay=0.2):
|
||||
| _____^
|
||||
91 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
@@ -1,4 +1,18 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_async/mod.rs
|
||||
---
|
||||
ASYNC109_1.py:5:16: ASYNC109 Async function definition with a `timeout` parameter
|
||||
|
|
||||
5 | async def func(timeout):
|
||||
| ^^^^^^^ ASYNC109
|
||||
6 | ...
|
||||
|
|
||||
= help: Use `asyncio.timeout` instead
|
||||
|
||||
ASYNC109_1.py:9:16: ASYNC109 Async function definition with a `timeout` parameter
|
||||
|
|
||||
9 | async def func(timeout=10):
|
||||
| ^^^^^^^^^^ ASYNC109
|
||||
10 | ...
|
||||
|
|
||||
= help: Use `asyncio.timeout` instead
|
||||
|
||||
@@ -18,3 +18,30 @@ ASYNC110.py:12:5: ASYNC110 Use `trio.Event` instead of awaiting `trio.sleep` in
|
||||
13 | | await trio.sleep_until(10)
|
||||
| |__________________________________^ ASYNC110
|
||||
|
|
||||
|
||||
ASYNC110.py:22:5: ASYNC110 Use `asyncio.Event` instead of awaiting `asyncio.sleep` in a `while` loop
|
||||
|
|
||||
21 | async def func():
|
||||
22 | while True:
|
||||
| _____^
|
||||
23 | | await anyio.sleep(10)
|
||||
| |_____________________________^ ASYNC110
|
||||
|
|
||||
|
||||
ASYNC110.py:27:5: ASYNC110 Use `asyncio.Event` instead of awaiting `asyncio.sleep` in a `while` loop
|
||||
|
|
||||
26 | async def func():
|
||||
27 | while True:
|
||||
| _____^
|
||||
28 | | await anyio.sleep_until(10)
|
||||
| |___________________________________^ ASYNC110
|
||||
|
|
||||
|
||||
ASYNC110.py:37:5: ASYNC110 Use `anyio.Event` instead of awaiting `anyio.sleep` in a `while` loop
|
||||
|
|
||||
36 | async def func():
|
||||
37 | while True:
|
||||
| _____^
|
||||
38 | | await asyncio.sleep(10)
|
||||
| |_______________________________^ ASYNC110
|
||||
|
|
||||
|
||||
@@ -132,3 +132,117 @@ ASYNC115.py:59:11: ASYNC115 [*] Use `trio.lowlevel.checkpoint()` instead of `tri
|
||||
60 60 |
|
||||
61 61 |
|
||||
62 62 | def func():
|
||||
|
||||
ASYNC115.py:85:11: ASYNC115 [*] Use `asyncio.lowlevel.checkpoint()` instead of `asyncio.sleep(0)`
|
||||
|
|
||||
83 | from anyio import sleep
|
||||
84 |
|
||||
85 | await anyio.sleep(0) # ASYNC115
|
||||
| ^^^^^^^^^^^^^^ ASYNC115
|
||||
86 | await anyio.sleep(1) # OK
|
||||
87 | await anyio.sleep(0, 1) # OK
|
||||
|
|
||||
= help: Replace with `asyncio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
49 49 |
|
||||
50 50 |
|
||||
51 51 | from trio import Event, sleep
|
||||
52 |+from asyncio import lowlevel
|
||||
52 53 |
|
||||
53 54 |
|
||||
54 55 | def func():
|
||||
--------------------------------------------------------------------------------
|
||||
82 83 | import anyio
|
||||
83 84 | from anyio import sleep
|
||||
84 85 |
|
||||
85 |- await anyio.sleep(0) # ASYNC115
|
||||
86 |+ await lowlevel.checkpoint() # ASYNC115
|
||||
86 87 | await anyio.sleep(1) # OK
|
||||
87 88 | await anyio.sleep(0, 1) # OK
|
||||
88 89 | await anyio.sleep(...) # OK
|
||||
|
||||
ASYNC115.py:91:5: ASYNC115 [*] Use `asyncio.lowlevel.checkpoint()` instead of `asyncio.sleep(0)`
|
||||
|
|
||||
89 | await anyio.sleep() # OK
|
||||
90 |
|
||||
91 | anyio.sleep(0) # ASYNC115
|
||||
| ^^^^^^^^^^^^^^ ASYNC115
|
||||
92 | foo = 0
|
||||
93 | anyio.sleep(foo) # OK
|
||||
|
|
||||
= help: Replace with `asyncio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
49 49 |
|
||||
50 50 |
|
||||
51 51 | from trio import Event, sleep
|
||||
52 |+from asyncio import lowlevel
|
||||
52 53 |
|
||||
53 54 |
|
||||
54 55 | def func():
|
||||
--------------------------------------------------------------------------------
|
||||
88 89 | await anyio.sleep(...) # OK
|
||||
89 90 | await anyio.sleep() # OK
|
||||
90 91 |
|
||||
91 |- anyio.sleep(0) # ASYNC115
|
||||
92 |+ lowlevel.checkpoint() # ASYNC115
|
||||
92 93 | foo = 0
|
||||
93 94 | anyio.sleep(foo) # OK
|
||||
94 95 | anyio.sleep(1) # OK
|
||||
|
||||
ASYNC115.py:97:5: ASYNC115 [*] Use `asyncio.lowlevel.checkpoint()` instead of `asyncio.sleep(0)`
|
||||
|
|
||||
95 | time.sleep(0) # OK
|
||||
96 |
|
||||
97 | sleep(0) # ASYNC115
|
||||
| ^^^^^^^^ ASYNC115
|
||||
98 |
|
||||
99 | bar = "bar"
|
||||
|
|
||||
= help: Replace with `asyncio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
49 49 |
|
||||
50 50 |
|
||||
51 51 | from trio import Event, sleep
|
||||
52 |+from asyncio import lowlevel
|
||||
52 53 |
|
||||
53 54 |
|
||||
54 55 | def func():
|
||||
--------------------------------------------------------------------------------
|
||||
94 95 | anyio.sleep(1) # OK
|
||||
95 96 | time.sleep(0) # OK
|
||||
96 97 |
|
||||
97 |- sleep(0) # ASYNC115
|
||||
98 |+ lowlevel.checkpoint() # ASYNC115
|
||||
98 99 |
|
||||
99 100 | bar = "bar"
|
||||
100 101 | anyio.sleep(bar)
|
||||
|
||||
ASYNC115.py:128:15: ASYNC115 [*] Use `asyncio.lowlevel.checkpoint()` instead of `asyncio.sleep(0)`
|
||||
|
|
||||
126 | import anyio
|
||||
127 |
|
||||
128 | anyio.run(anyio.sleep(0)) # ASYNC115
|
||||
| ^^^^^^^^^^^^^^ ASYNC115
|
||||
|
|
||||
= help: Replace with `asyncio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
49 49 |
|
||||
50 50 |
|
||||
51 51 | from trio import Event, sleep
|
||||
52 |+from asyncio import lowlevel
|
||||
52 53 |
|
||||
53 54 |
|
||||
54 55 | def func():
|
||||
--------------------------------------------------------------------------------
|
||||
125 126 | def func():
|
||||
126 127 | import anyio
|
||||
127 128 |
|
||||
128 |- anyio.run(anyio.sleep(0)) # ASYNC115
|
||||
129 |+ anyio.run(lowlevel.checkpoint()) # ASYNC115
|
||||
129 130 |
|
||||
130 131 |
|
||||
131 132 | def func():
|
||||
|
||||
@@ -146,3 +146,194 @@ ASYNC116.py:57:11: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usu
|
||||
58 59 |
|
||||
59 60 |
|
||||
60 61 | async def import_anyio():
|
||||
|
||||
ASYNC116.py:64:11: ASYNC116 [*] `asyncio.sleep()` with >24 hour interval should usually be `asyncio.sleep_forever()`
|
||||
|
|
||||
63 | # These examples are probably not meant to ever wake up:
|
||||
64 | await anyio.sleep(100000) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
65 |
|
||||
66 | # 'inf literal' overflow trick
|
||||
|
|
||||
= help: Replace with `asyncio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | # ASYNCIO_NO_ERROR - no asyncio.sleep_forever, so check intentionally doesn't trigger.
|
||||
3 3 | import math
|
||||
4 4 | from math import inf
|
||||
5 |+from asyncio import sleep_forever
|
||||
5 6 |
|
||||
6 7 |
|
||||
7 8 | async def import_trio():
|
||||
--------------------------------------------------------------------------------
|
||||
61 62 | import anyio
|
||||
62 63 |
|
||||
63 64 | # These examples are probably not meant to ever wake up:
|
||||
64 |- await anyio.sleep(100000) # error: 116, "async"
|
||||
65 |+ await sleep_forever() # error: 116, "async"
|
||||
65 66 |
|
||||
66 67 | # 'inf literal' overflow trick
|
||||
67 68 | await anyio.sleep(1e999) # error: 116, "async"
|
||||
|
||||
ASYNC116.py:67:11: ASYNC116 [*] `asyncio.sleep()` with >24 hour interval should usually be `asyncio.sleep_forever()`
|
||||
|
|
||||
66 | # 'inf literal' overflow trick
|
||||
67 | await anyio.sleep(1e999) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
68 |
|
||||
69 | await anyio.sleep(86399)
|
||||
|
|
||||
= help: Replace with `asyncio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | # ASYNCIO_NO_ERROR - no asyncio.sleep_forever, so check intentionally doesn't trigger.
|
||||
3 3 | import math
|
||||
4 4 | from math import inf
|
||||
5 |+from asyncio import sleep_forever
|
||||
5 6 |
|
||||
6 7 |
|
||||
7 8 | async def import_trio():
|
||||
--------------------------------------------------------------------------------
|
||||
64 65 | await anyio.sleep(100000) # error: 116, "async"
|
||||
65 66 |
|
||||
66 67 | # 'inf literal' overflow trick
|
||||
67 |- await anyio.sleep(1e999) # error: 116, "async"
|
||||
68 |+ await sleep_forever() # error: 116, "async"
|
||||
68 69 |
|
||||
69 70 | await anyio.sleep(86399)
|
||||
70 71 | await anyio.sleep(86400)
|
||||
|
||||
ASYNC116.py:71:11: ASYNC116 [*] `asyncio.sleep()` with >24 hour interval should usually be `asyncio.sleep_forever()`
|
||||
|
|
||||
69 | await anyio.sleep(86399)
|
||||
70 | await anyio.sleep(86400)
|
||||
71 | await anyio.sleep(86400.01) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
72 | await anyio.sleep(86401) # error: 116, "async"
|
||||
|
|
||||
= help: Replace with `asyncio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | # ASYNCIO_NO_ERROR - no asyncio.sleep_forever, so check intentionally doesn't trigger.
|
||||
3 3 | import math
|
||||
4 4 | from math import inf
|
||||
5 |+from asyncio import sleep_forever
|
||||
5 6 |
|
||||
6 7 |
|
||||
7 8 | async def import_trio():
|
||||
--------------------------------------------------------------------------------
|
||||
68 69 |
|
||||
69 70 | await anyio.sleep(86399)
|
||||
70 71 | await anyio.sleep(86400)
|
||||
71 |- await anyio.sleep(86400.01) # error: 116, "async"
|
||||
72 |+ await sleep_forever() # error: 116, "async"
|
||||
72 73 | await anyio.sleep(86401) # error: 116, "async"
|
||||
73 74 |
|
||||
74 75 | await anyio.sleep(-1) # will raise a runtime error
|
||||
|
||||
ASYNC116.py:72:11: ASYNC116 [*] `asyncio.sleep()` with >24 hour interval should usually be `asyncio.sleep_forever()`
|
||||
|
|
||||
70 | await anyio.sleep(86400)
|
||||
71 | await anyio.sleep(86400.01) # error: 116, "async"
|
||||
72 | await anyio.sleep(86401) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
73 |
|
||||
74 | await anyio.sleep(-1) # will raise a runtime error
|
||||
|
|
||||
= help: Replace with `asyncio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | # ASYNCIO_NO_ERROR - no asyncio.sleep_forever, so check intentionally doesn't trigger.
|
||||
3 3 | import math
|
||||
4 4 | from math import inf
|
||||
5 |+from asyncio import sleep_forever
|
||||
5 6 |
|
||||
6 7 |
|
||||
7 8 | async def import_trio():
|
||||
--------------------------------------------------------------------------------
|
||||
69 70 | await anyio.sleep(86399)
|
||||
70 71 | await anyio.sleep(86400)
|
||||
71 72 | await anyio.sleep(86400.01) # error: 116, "async"
|
||||
72 |- await anyio.sleep(86401) # error: 116, "async"
|
||||
73 |+ await sleep_forever() # error: 116, "async"
|
||||
73 74 |
|
||||
74 75 | await anyio.sleep(-1) # will raise a runtime error
|
||||
75 76 | await anyio.sleep(0) # handled by different check
|
||||
|
||||
ASYNC116.py:101:5: ASYNC116 [*] `asyncio.sleep()` with >24 hour interval should usually be `asyncio.sleep_forever()`
|
||||
|
|
||||
100 | # does not require the call to be awaited, nor in an async fun
|
||||
101 | anyio.sleep(86401) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
102 | # also checks that we don't break visit_Call
|
||||
103 | anyio.run(anyio.sleep(86401)) # error: 116, "async"
|
||||
|
|
||||
= help: Replace with `asyncio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | # ASYNCIO_NO_ERROR - no asyncio.sleep_forever, so check intentionally doesn't trigger.
|
||||
3 3 | import math
|
||||
4 4 | from math import inf
|
||||
5 |+from asyncio import sleep_forever
|
||||
5 6 |
|
||||
6 7 |
|
||||
7 8 | async def import_trio():
|
||||
--------------------------------------------------------------------------------
|
||||
98 99 | import anyio
|
||||
99 100 |
|
||||
100 101 | # does not require the call to be awaited, nor in an async fun
|
||||
101 |- anyio.sleep(86401) # error: 116, "async"
|
||||
102 |+ sleep_forever() # error: 116, "async"
|
||||
102 103 | # also checks that we don't break visit_Call
|
||||
103 104 | anyio.run(anyio.sleep(86401)) # error: 116, "async"
|
||||
104 105 |
|
||||
|
||||
ASYNC116.py:103:15: ASYNC116 [*] `asyncio.sleep()` with >24 hour interval should usually be `asyncio.sleep_forever()`
|
||||
|
|
||||
101 | anyio.sleep(86401) # error: 116, "async"
|
||||
102 | # also checks that we don't break visit_Call
|
||||
103 | anyio.run(anyio.sleep(86401)) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
|
|
||||
= help: Replace with `asyncio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | # ASYNCIO_NO_ERROR - no asyncio.sleep_forever, so check intentionally doesn't trigger.
|
||||
3 3 | import math
|
||||
4 4 | from math import inf
|
||||
5 |+from asyncio import sleep_forever
|
||||
5 6 |
|
||||
6 7 |
|
||||
7 8 | async def import_trio():
|
||||
--------------------------------------------------------------------------------
|
||||
100 101 | # does not require the call to be awaited, nor in an async fun
|
||||
101 102 | anyio.sleep(86401) # error: 116, "async"
|
||||
102 103 | # also checks that we don't break visit_Call
|
||||
103 |- anyio.run(anyio.sleep(86401)) # error: 116, "async"
|
||||
104 |+ anyio.run(sleep_forever()) # error: 116, "async"
|
||||
104 105 |
|
||||
105 106 |
|
||||
106 107 | async def import_from_anyio():
|
||||
|
||||
ASYNC116.py:110:11: ASYNC116 [*] `asyncio.sleep()` with >24 hour interval should usually be `asyncio.sleep_forever()`
|
||||
|
|
||||
109 | # catch from import
|
||||
110 | await sleep(86401) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^ ASYNC116
|
||||
|
|
||||
= help: Replace with `asyncio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | # ASYNCIO_NO_ERROR - no asyncio.sleep_forever, so check intentionally doesn't trigger.
|
||||
3 3 | import math
|
||||
4 4 | from math import inf
|
||||
5 |+from asyncio import sleep_forever
|
||||
5 6 |
|
||||
6 7 |
|
||||
7 8 | async def import_trio():
|
||||
--------------------------------------------------------------------------------
|
||||
107 108 | from anyio import sleep
|
||||
108 109 |
|
||||
109 110 | # catch from import
|
||||
110 |- await sleep(86401) # error: 116, "async"
|
||||
111 |+ await sleep_forever() # error: 116, "async"
|
||||
|
||||
@@ -1,101 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_async/mod.rs
|
||||
---
|
||||
ASYNC100.py:8:5: ASYNC100 A `with trio.fail_after(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
7 | async def func():
|
||||
8 | with trio.fail_after():
|
||||
| _____^
|
||||
9 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
ASYNC100.py:18:5: ASYNC100 A `with trio.move_on_after(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
17 | async def func():
|
||||
18 | with trio.move_on_after():
|
||||
| _____^
|
||||
19 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
ASYNC100.py:40:5: ASYNC100 A `with anyio.move_on_after(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
39 | async def func():
|
||||
40 | with anyio.move_on_after(delay=0.2):
|
||||
| _____^
|
||||
41 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
ASYNC100.py:45:5: ASYNC100 A `with anyio.fail_after(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
44 | async def func():
|
||||
45 | with anyio.fail_after():
|
||||
| _____^
|
||||
46 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
ASYNC100.py:50:5: ASYNC100 A `with anyio.CancelScope(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
49 | async def func():
|
||||
50 | with anyio.CancelScope():
|
||||
| _____^
|
||||
51 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
ASYNC100.py:55:5: ASYNC100 A `with anyio.CancelScope(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
54 | async def func():
|
||||
55 | with anyio.CancelScope(), nullcontext():
|
||||
| _____^
|
||||
56 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
ASYNC100.py:60:5: ASYNC100 A `with anyio.CancelScope(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
59 | async def func():
|
||||
60 | with nullcontext(), anyio.CancelScope():
|
||||
| _____^
|
||||
61 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
ASYNC100.py:65:5: ASYNC100 A `with asyncio.timeout(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
64 | async def func():
|
||||
65 | async with asyncio.timeout(delay=0.2):
|
||||
| _____^
|
||||
66 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
ASYNC100.py:70:5: ASYNC100 A `with asyncio.timeout_at(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
69 | async def func():
|
||||
70 | async with asyncio.timeout_at(when=0.2):
|
||||
| _____^
|
||||
71 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
ASYNC100.py:80:5: ASYNC100 A `with asyncio.timeout(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
79 | async def func():
|
||||
80 | async with asyncio.timeout(delay=0.2), asyncio.TaskGroup(), asyncio.timeout(delay=0.2):
|
||||
| _____^
|
||||
81 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
|
||||
ASYNC100.py:90:5: ASYNC100 A `with asyncio.timeout(...):` context does not contain any `await` statements. This makes it pointless, as the timeout can only be triggered by a checkpoint.
|
||||
|
|
||||
89 | async def func():
|
||||
90 | async with asyncio.timeout(delay=0.2), asyncio.timeout(delay=0.2):
|
||||
| _____^
|
||||
91 | | ...
|
||||
| |___________^ ASYNC100
|
||||
|
|
||||
@@ -1,18 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_async/mod.rs
|
||||
---
|
||||
ASYNC109_0.py:8:16: ASYNC109 Async function definition with a `timeout` parameter
|
||||
|
|
||||
8 | async def func(timeout):
|
||||
| ^^^^^^^ ASYNC109
|
||||
9 | ...
|
||||
|
|
||||
= help: Use `trio.fail_after` instead
|
||||
|
||||
ASYNC109_0.py:12:16: ASYNC109 Async function definition with a `timeout` parameter
|
||||
|
|
||||
12 | async def func(timeout=10):
|
||||
| ^^^^^^^^^^ ASYNC109
|
||||
13 | ...
|
||||
|
|
||||
= help: Use `trio.fail_after` instead
|
||||
@@ -1,18 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_async/mod.rs
|
||||
---
|
||||
ASYNC109_1.py:5:16: ASYNC109 Async function definition with a `timeout` parameter
|
||||
|
|
||||
5 | async def func(timeout):
|
||||
| ^^^^^^^ ASYNC109
|
||||
6 | ...
|
||||
|
|
||||
= help: Use `asyncio.timeout` instead
|
||||
|
||||
ASYNC109_1.py:9:16: ASYNC109 Async function definition with a `timeout` parameter
|
||||
|
|
||||
9 | async def func(timeout=10):
|
||||
| ^^^^^^^^^^ ASYNC109
|
||||
10 | ...
|
||||
|
|
||||
= help: Use `asyncio.timeout` instead
|
||||
@@ -1,47 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_async/mod.rs
|
||||
---
|
||||
ASYNC110.py:7:5: ASYNC110 Use `trio.Event` instead of awaiting `trio.sleep` in a `while` loop
|
||||
|
|
||||
6 | async def func():
|
||||
7 | while True:
|
||||
| _____^
|
||||
8 | | await trio.sleep(10)
|
||||
| |____________________________^ ASYNC110
|
||||
|
|
||||
|
||||
ASYNC110.py:12:5: ASYNC110 Use `trio.Event` instead of awaiting `trio.sleep` in a `while` loop
|
||||
|
|
||||
11 | async def func():
|
||||
12 | while True:
|
||||
| _____^
|
||||
13 | | await trio.sleep_until(10)
|
||||
| |__________________________________^ ASYNC110
|
||||
|
|
||||
|
||||
ASYNC110.py:22:5: ASYNC110 Use `asyncio.Event` instead of awaiting `asyncio.sleep` in a `while` loop
|
||||
|
|
||||
21 | async def func():
|
||||
22 | while True:
|
||||
| _____^
|
||||
23 | | await anyio.sleep(10)
|
||||
| |_____________________________^ ASYNC110
|
||||
|
|
||||
|
||||
ASYNC110.py:27:5: ASYNC110 Use `asyncio.Event` instead of awaiting `asyncio.sleep` in a `while` loop
|
||||
|
|
||||
26 | async def func():
|
||||
27 | while True:
|
||||
| _____^
|
||||
28 | | await anyio.sleep_until(10)
|
||||
| |___________________________________^ ASYNC110
|
||||
|
|
||||
|
||||
ASYNC110.py:37:5: ASYNC110 Use `anyio.Event` instead of awaiting `anyio.sleep` in a `while` loop
|
||||
|
|
||||
36 | async def func():
|
||||
37 | while True:
|
||||
| _____^
|
||||
38 | | await asyncio.sleep(10)
|
||||
| |_______________________________^ ASYNC110
|
||||
|
|
||||
@@ -1,248 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_async/mod.rs
|
||||
---
|
||||
ASYNC115.py:5:11: ASYNC115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
3 | from trio import sleep
|
||||
4 |
|
||||
5 | await trio.sleep(0) # ASYNC115
|
||||
| ^^^^^^^^^^^^^ ASYNC115
|
||||
6 | await trio.sleep(1) # OK
|
||||
7 | await trio.sleep(0, 1) # OK
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
2 2 | import trio
|
||||
3 3 | from trio import sleep
|
||||
4 4 |
|
||||
5 |- await trio.sleep(0) # ASYNC115
|
||||
5 |+ await trio.lowlevel.checkpoint() # ASYNC115
|
||||
6 6 | await trio.sleep(1) # OK
|
||||
7 7 | await trio.sleep(0, 1) # OK
|
||||
8 8 | await trio.sleep(...) # OK
|
||||
|
||||
ASYNC115.py:11:5: ASYNC115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
9 | await trio.sleep() # OK
|
||||
10 |
|
||||
11 | trio.sleep(0) # ASYNC115
|
||||
| ^^^^^^^^^^^^^ ASYNC115
|
||||
12 | foo = 0
|
||||
13 | trio.sleep(foo) # OK
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
8 8 | await trio.sleep(...) # OK
|
||||
9 9 | await trio.sleep() # OK
|
||||
10 10 |
|
||||
11 |- trio.sleep(0) # ASYNC115
|
||||
11 |+ trio.lowlevel.checkpoint() # ASYNC115
|
||||
12 12 | foo = 0
|
||||
13 13 | trio.sleep(foo) # OK
|
||||
14 14 | trio.sleep(1) # OK
|
||||
|
||||
ASYNC115.py:17:5: ASYNC115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
15 | time.sleep(0) # OK
|
||||
16 |
|
||||
17 | sleep(0) # ASYNC115
|
||||
| ^^^^^^^^ ASYNC115
|
||||
18 |
|
||||
19 | bar = "bar"
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
14 14 | trio.sleep(1) # OK
|
||||
15 15 | time.sleep(0) # OK
|
||||
16 16 |
|
||||
17 |- sleep(0) # ASYNC115
|
||||
17 |+ trio.lowlevel.checkpoint() # ASYNC115
|
||||
18 18 |
|
||||
19 19 | bar = "bar"
|
||||
20 20 | trio.sleep(bar)
|
||||
|
||||
ASYNC115.py:48:14: ASYNC115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
46 | import trio
|
||||
47 |
|
||||
48 | trio.run(trio.sleep(0)) # ASYNC115
|
||||
| ^^^^^^^^^^^^^ ASYNC115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
45 45 | def func():
|
||||
46 46 | import trio
|
||||
47 47 |
|
||||
48 |- trio.run(trio.sleep(0)) # ASYNC115
|
||||
48 |+ trio.run(trio.lowlevel.checkpoint()) # ASYNC115
|
||||
49 49 |
|
||||
50 50 |
|
||||
51 51 | from trio import Event, sleep
|
||||
|
||||
ASYNC115.py:55:5: ASYNC115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
54 | def func():
|
||||
55 | sleep(0) # ASYNC115
|
||||
| ^^^^^^^^ ASYNC115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
48 48 | trio.run(trio.sleep(0)) # ASYNC115
|
||||
49 49 |
|
||||
50 50 |
|
||||
51 |-from trio import Event, sleep
|
||||
51 |+from trio import Event, sleep, lowlevel
|
||||
52 52 |
|
||||
53 53 |
|
||||
54 54 | def func():
|
||||
55 |- sleep(0) # ASYNC115
|
||||
55 |+ lowlevel.checkpoint() # ASYNC115
|
||||
56 56 |
|
||||
57 57 |
|
||||
58 58 | async def func():
|
||||
|
||||
ASYNC115.py:59:11: ASYNC115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
58 | async def func():
|
||||
59 | await sleep(seconds=0) # ASYNC115
|
||||
| ^^^^^^^^^^^^^^^^ ASYNC115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
48 48 | trio.run(trio.sleep(0)) # ASYNC115
|
||||
49 49 |
|
||||
50 50 |
|
||||
51 |-from trio import Event, sleep
|
||||
51 |+from trio import Event, sleep, lowlevel
|
||||
52 52 |
|
||||
53 53 |
|
||||
54 54 | def func():
|
||||
--------------------------------------------------------------------------------
|
||||
56 56 |
|
||||
57 57 |
|
||||
58 58 | async def func():
|
||||
59 |- await sleep(seconds=0) # ASYNC115
|
||||
59 |+ await lowlevel.checkpoint() # ASYNC115
|
||||
60 60 |
|
||||
61 61 |
|
||||
62 62 | def func():
|
||||
|
||||
ASYNC115.py:85:11: ASYNC115 [*] Use `asyncio.lowlevel.checkpoint()` instead of `asyncio.sleep(0)`
|
||||
|
|
||||
83 | from anyio import sleep
|
||||
84 |
|
||||
85 | await anyio.sleep(0) # ASYNC115
|
||||
| ^^^^^^^^^^^^^^ ASYNC115
|
||||
86 | await anyio.sleep(1) # OK
|
||||
87 | await anyio.sleep(0, 1) # OK
|
||||
|
|
||||
= help: Replace with `asyncio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
49 49 |
|
||||
50 50 |
|
||||
51 51 | from trio import Event, sleep
|
||||
52 |+from asyncio import lowlevel
|
||||
52 53 |
|
||||
53 54 |
|
||||
54 55 | def func():
|
||||
--------------------------------------------------------------------------------
|
||||
82 83 | import anyio
|
||||
83 84 | from anyio import sleep
|
||||
84 85 |
|
||||
85 |- await anyio.sleep(0) # ASYNC115
|
||||
86 |+ await lowlevel.checkpoint() # ASYNC115
|
||||
86 87 | await anyio.sleep(1) # OK
|
||||
87 88 | await anyio.sleep(0, 1) # OK
|
||||
88 89 | await anyio.sleep(...) # OK
|
||||
|
||||
ASYNC115.py:91:5: ASYNC115 [*] Use `asyncio.lowlevel.checkpoint()` instead of `asyncio.sleep(0)`
|
||||
|
|
||||
89 | await anyio.sleep() # OK
|
||||
90 |
|
||||
91 | anyio.sleep(0) # ASYNC115
|
||||
| ^^^^^^^^^^^^^^ ASYNC115
|
||||
92 | foo = 0
|
||||
93 | anyio.sleep(foo) # OK
|
||||
|
|
||||
= help: Replace with `asyncio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
49 49 |
|
||||
50 50 |
|
||||
51 51 | from trio import Event, sleep
|
||||
52 |+from asyncio import lowlevel
|
||||
52 53 |
|
||||
53 54 |
|
||||
54 55 | def func():
|
||||
--------------------------------------------------------------------------------
|
||||
88 89 | await anyio.sleep(...) # OK
|
||||
89 90 | await anyio.sleep() # OK
|
||||
90 91 |
|
||||
91 |- anyio.sleep(0) # ASYNC115
|
||||
92 |+ lowlevel.checkpoint() # ASYNC115
|
||||
92 93 | foo = 0
|
||||
93 94 | anyio.sleep(foo) # OK
|
||||
94 95 | anyio.sleep(1) # OK
|
||||
|
||||
ASYNC115.py:97:5: ASYNC115 [*] Use `asyncio.lowlevel.checkpoint()` instead of `asyncio.sleep(0)`
|
||||
|
|
||||
95 | time.sleep(0) # OK
|
||||
96 |
|
||||
97 | sleep(0) # ASYNC115
|
||||
| ^^^^^^^^ ASYNC115
|
||||
98 |
|
||||
99 | bar = "bar"
|
||||
|
|
||||
= help: Replace with `asyncio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
49 49 |
|
||||
50 50 |
|
||||
51 51 | from trio import Event, sleep
|
||||
52 |+from asyncio import lowlevel
|
||||
52 53 |
|
||||
53 54 |
|
||||
54 55 | def func():
|
||||
--------------------------------------------------------------------------------
|
||||
94 95 | anyio.sleep(1) # OK
|
||||
95 96 | time.sleep(0) # OK
|
||||
96 97 |
|
||||
97 |- sleep(0) # ASYNC115
|
||||
98 |+ lowlevel.checkpoint() # ASYNC115
|
||||
98 99 |
|
||||
99 100 | bar = "bar"
|
||||
100 101 | anyio.sleep(bar)
|
||||
|
||||
ASYNC115.py:128:15: ASYNC115 [*] Use `asyncio.lowlevel.checkpoint()` instead of `asyncio.sleep(0)`
|
||||
|
|
||||
126 | import anyio
|
||||
127 |
|
||||
128 | anyio.run(anyio.sleep(0)) # ASYNC115
|
||||
| ^^^^^^^^^^^^^^ ASYNC115
|
||||
|
|
||||
= help: Replace with `asyncio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
49 49 |
|
||||
50 50 |
|
||||
51 51 | from trio import Event, sleep
|
||||
52 |+from asyncio import lowlevel
|
||||
52 53 |
|
||||
53 54 |
|
||||
54 55 | def func():
|
||||
--------------------------------------------------------------------------------
|
||||
125 126 | def func():
|
||||
126 127 | import anyio
|
||||
127 128 |
|
||||
128 |- anyio.run(anyio.sleep(0)) # ASYNC115
|
||||
129 |+ anyio.run(lowlevel.checkpoint()) # ASYNC115
|
||||
129 130 |
|
||||
130 131 |
|
||||
131 132 | def func():
|
||||
@@ -1,339 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_async/mod.rs
|
||||
---
|
||||
ASYNC116.py:11:11: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
||||
|
|
||||
10 | # These examples are probably not meant to ever wake up:
|
||||
11 | await trio.sleep(100000) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
12 |
|
||||
13 | # 'inf literal' overflow trick
|
||||
|
|
||||
= help: Replace with `trio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
8 8 | import trio
|
||||
9 9 |
|
||||
10 10 | # These examples are probably not meant to ever wake up:
|
||||
11 |- await trio.sleep(100000) # error: 116, "async"
|
||||
11 |+ await trio.sleep_forever() # error: 116, "async"
|
||||
12 12 |
|
||||
13 13 | # 'inf literal' overflow trick
|
||||
14 14 | await trio.sleep(1e999) # error: 116, "async"
|
||||
|
||||
ASYNC116.py:14:11: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
||||
|
|
||||
13 | # 'inf literal' overflow trick
|
||||
14 | await trio.sleep(1e999) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
15 |
|
||||
16 | await trio.sleep(86399)
|
||||
|
|
||||
= help: Replace with `trio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
11 11 | await trio.sleep(100000) # error: 116, "async"
|
||||
12 12 |
|
||||
13 13 | # 'inf literal' overflow trick
|
||||
14 |- await trio.sleep(1e999) # error: 116, "async"
|
||||
14 |+ await trio.sleep_forever() # error: 116, "async"
|
||||
15 15 |
|
||||
16 16 | await trio.sleep(86399)
|
||||
17 17 | await trio.sleep(86400)
|
||||
|
||||
ASYNC116.py:18:11: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
||||
|
|
||||
16 | await trio.sleep(86399)
|
||||
17 | await trio.sleep(86400)
|
||||
18 | await trio.sleep(86400.01) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
19 | await trio.sleep(86401) # error: 116, "async"
|
||||
|
|
||||
= help: Replace with `trio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
15 15 |
|
||||
16 16 | await trio.sleep(86399)
|
||||
17 17 | await trio.sleep(86400)
|
||||
18 |- await trio.sleep(86400.01) # error: 116, "async"
|
||||
18 |+ await trio.sleep_forever() # error: 116, "async"
|
||||
19 19 | await trio.sleep(86401) # error: 116, "async"
|
||||
20 20 |
|
||||
21 21 | await trio.sleep(-1) # will raise a runtime error
|
||||
|
||||
ASYNC116.py:19:11: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
||||
|
|
||||
17 | await trio.sleep(86400)
|
||||
18 | await trio.sleep(86400.01) # error: 116, "async"
|
||||
19 | await trio.sleep(86401) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
20 |
|
||||
21 | await trio.sleep(-1) # will raise a runtime error
|
||||
|
|
||||
= help: Replace with `trio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
16 16 | await trio.sleep(86399)
|
||||
17 17 | await trio.sleep(86400)
|
||||
18 18 | await trio.sleep(86400.01) # error: 116, "async"
|
||||
19 |- await trio.sleep(86401) # error: 116, "async"
|
||||
19 |+ await trio.sleep_forever() # error: 116, "async"
|
||||
20 20 |
|
||||
21 21 | await trio.sleep(-1) # will raise a runtime error
|
||||
22 22 | await trio.sleep(0) # handled by different check
|
||||
|
||||
ASYNC116.py:48:5: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
||||
|
|
||||
47 | # does not require the call to be awaited, nor in an async fun
|
||||
48 | trio.sleep(86401) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
49 | # also checks that we don't break visit_Call
|
||||
50 | trio.run(trio.sleep(86401)) # error: 116, "async"
|
||||
|
|
||||
= help: Replace with `trio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
45 45 | import trio
|
||||
46 46 |
|
||||
47 47 | # does not require the call to be awaited, nor in an async fun
|
||||
48 |- trio.sleep(86401) # error: 116, "async"
|
||||
48 |+ trio.sleep_forever() # error: 116, "async"
|
||||
49 49 | # also checks that we don't break visit_Call
|
||||
50 50 | trio.run(trio.sleep(86401)) # error: 116, "async"
|
||||
51 51 |
|
||||
|
||||
ASYNC116.py:50:14: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
||||
|
|
||||
48 | trio.sleep(86401) # error: 116, "async"
|
||||
49 | # also checks that we don't break visit_Call
|
||||
50 | trio.run(trio.sleep(86401)) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
|
|
||||
= help: Replace with `trio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
47 47 | # does not require the call to be awaited, nor in an async fun
|
||||
48 48 | trio.sleep(86401) # error: 116, "async"
|
||||
49 49 | # also checks that we don't break visit_Call
|
||||
50 |- trio.run(trio.sleep(86401)) # error: 116, "async"
|
||||
50 |+ trio.run(trio.sleep_forever()) # error: 116, "async"
|
||||
51 51 |
|
||||
52 52 |
|
||||
53 53 | async def import_from_trio():
|
||||
|
||||
ASYNC116.py:57:11: ASYNC116 [*] `trio.sleep()` with >24 hour interval should usually be `trio.sleep_forever()`
|
||||
|
|
||||
56 | # catch from import
|
||||
57 | await sleep(86401) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^ ASYNC116
|
||||
|
|
||||
= help: Replace with `trio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | # ASYNCIO_NO_ERROR - no asyncio.sleep_forever, so check intentionally doesn't trigger.
|
||||
3 3 | import math
|
||||
4 4 | from math import inf
|
||||
5 |+from trio import sleep_forever
|
||||
5 6 |
|
||||
6 7 |
|
||||
7 8 | async def import_trio():
|
||||
--------------------------------------------------------------------------------
|
||||
54 55 | from trio import sleep
|
||||
55 56 |
|
||||
56 57 | # catch from import
|
||||
57 |- await sleep(86401) # error: 116, "async"
|
||||
58 |+ await sleep_forever() # error: 116, "async"
|
||||
58 59 |
|
||||
59 60 |
|
||||
60 61 | async def import_anyio():
|
||||
|
||||
ASYNC116.py:64:11: ASYNC116 [*] `asyncio.sleep()` with >24 hour interval should usually be `asyncio.sleep_forever()`
|
||||
|
|
||||
63 | # These examples are probably not meant to ever wake up:
|
||||
64 | await anyio.sleep(100000) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
65 |
|
||||
66 | # 'inf literal' overflow trick
|
||||
|
|
||||
= help: Replace with `asyncio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | # ASYNCIO_NO_ERROR - no asyncio.sleep_forever, so check intentionally doesn't trigger.
|
||||
3 3 | import math
|
||||
4 4 | from math import inf
|
||||
5 |+from asyncio import sleep_forever
|
||||
5 6 |
|
||||
6 7 |
|
||||
7 8 | async def import_trio():
|
||||
--------------------------------------------------------------------------------
|
||||
61 62 | import anyio
|
||||
62 63 |
|
||||
63 64 | # These examples are probably not meant to ever wake up:
|
||||
64 |- await anyio.sleep(100000) # error: 116, "async"
|
||||
65 |+ await sleep_forever() # error: 116, "async"
|
||||
65 66 |
|
||||
66 67 | # 'inf literal' overflow trick
|
||||
67 68 | await anyio.sleep(1e999) # error: 116, "async"
|
||||
|
||||
ASYNC116.py:67:11: ASYNC116 [*] `asyncio.sleep()` with >24 hour interval should usually be `asyncio.sleep_forever()`
|
||||
|
|
||||
66 | # 'inf literal' overflow trick
|
||||
67 | await anyio.sleep(1e999) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
68 |
|
||||
69 | await anyio.sleep(86399)
|
||||
|
|
||||
= help: Replace with `asyncio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | # ASYNCIO_NO_ERROR - no asyncio.sleep_forever, so check intentionally doesn't trigger.
|
||||
3 3 | import math
|
||||
4 4 | from math import inf
|
||||
5 |+from asyncio import sleep_forever
|
||||
5 6 |
|
||||
6 7 |
|
||||
7 8 | async def import_trio():
|
||||
--------------------------------------------------------------------------------
|
||||
64 65 | await anyio.sleep(100000) # error: 116, "async"
|
||||
65 66 |
|
||||
66 67 | # 'inf literal' overflow trick
|
||||
67 |- await anyio.sleep(1e999) # error: 116, "async"
|
||||
68 |+ await sleep_forever() # error: 116, "async"
|
||||
68 69 |
|
||||
69 70 | await anyio.sleep(86399)
|
||||
70 71 | await anyio.sleep(86400)
|
||||
|
||||
ASYNC116.py:71:11: ASYNC116 [*] `asyncio.sleep()` with >24 hour interval should usually be `asyncio.sleep_forever()`
|
||||
|
|
||||
69 | await anyio.sleep(86399)
|
||||
70 | await anyio.sleep(86400)
|
||||
71 | await anyio.sleep(86400.01) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
72 | await anyio.sleep(86401) # error: 116, "async"
|
||||
|
|
||||
= help: Replace with `asyncio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | # ASYNCIO_NO_ERROR - no asyncio.sleep_forever, so check intentionally doesn't trigger.
|
||||
3 3 | import math
|
||||
4 4 | from math import inf
|
||||
5 |+from asyncio import sleep_forever
|
||||
5 6 |
|
||||
6 7 |
|
||||
7 8 | async def import_trio():
|
||||
--------------------------------------------------------------------------------
|
||||
68 69 |
|
||||
69 70 | await anyio.sleep(86399)
|
||||
70 71 | await anyio.sleep(86400)
|
||||
71 |- await anyio.sleep(86400.01) # error: 116, "async"
|
||||
72 |+ await sleep_forever() # error: 116, "async"
|
||||
72 73 | await anyio.sleep(86401) # error: 116, "async"
|
||||
73 74 |
|
||||
74 75 | await anyio.sleep(-1) # will raise a runtime error
|
||||
|
||||
ASYNC116.py:72:11: ASYNC116 [*] `asyncio.sleep()` with >24 hour interval should usually be `asyncio.sleep_forever()`
|
||||
|
|
||||
70 | await anyio.sleep(86400)
|
||||
71 | await anyio.sleep(86400.01) # error: 116, "async"
|
||||
72 | await anyio.sleep(86401) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
73 |
|
||||
74 | await anyio.sleep(-1) # will raise a runtime error
|
||||
|
|
||||
= help: Replace with `asyncio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | # ASYNCIO_NO_ERROR - no asyncio.sleep_forever, so check intentionally doesn't trigger.
|
||||
3 3 | import math
|
||||
4 4 | from math import inf
|
||||
5 |+from asyncio import sleep_forever
|
||||
5 6 |
|
||||
6 7 |
|
||||
7 8 | async def import_trio():
|
||||
--------------------------------------------------------------------------------
|
||||
69 70 | await anyio.sleep(86399)
|
||||
70 71 | await anyio.sleep(86400)
|
||||
71 72 | await anyio.sleep(86400.01) # error: 116, "async"
|
||||
72 |- await anyio.sleep(86401) # error: 116, "async"
|
||||
73 |+ await sleep_forever() # error: 116, "async"
|
||||
73 74 |
|
||||
74 75 | await anyio.sleep(-1) # will raise a runtime error
|
||||
75 76 | await anyio.sleep(0) # handled by different check
|
||||
|
||||
ASYNC116.py:101:5: ASYNC116 [*] `asyncio.sleep()` with >24 hour interval should usually be `asyncio.sleep_forever()`
|
||||
|
|
||||
100 | # does not require the call to be awaited, nor in an async fun
|
||||
101 | anyio.sleep(86401) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
102 | # also checks that we don't break visit_Call
|
||||
103 | anyio.run(anyio.sleep(86401)) # error: 116, "async"
|
||||
|
|
||||
= help: Replace with `asyncio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | # ASYNCIO_NO_ERROR - no asyncio.sleep_forever, so check intentionally doesn't trigger.
|
||||
3 3 | import math
|
||||
4 4 | from math import inf
|
||||
5 |+from asyncio import sleep_forever
|
||||
5 6 |
|
||||
6 7 |
|
||||
7 8 | async def import_trio():
|
||||
--------------------------------------------------------------------------------
|
||||
98 99 | import anyio
|
||||
99 100 |
|
||||
100 101 | # does not require the call to be awaited, nor in an async fun
|
||||
101 |- anyio.sleep(86401) # error: 116, "async"
|
||||
102 |+ sleep_forever() # error: 116, "async"
|
||||
102 103 | # also checks that we don't break visit_Call
|
||||
103 104 | anyio.run(anyio.sleep(86401)) # error: 116, "async"
|
||||
104 105 |
|
||||
|
||||
ASYNC116.py:103:15: ASYNC116 [*] `asyncio.sleep()` with >24 hour interval should usually be `asyncio.sleep_forever()`
|
||||
|
|
||||
101 | anyio.sleep(86401) # error: 116, "async"
|
||||
102 | # also checks that we don't break visit_Call
|
||||
103 | anyio.run(anyio.sleep(86401)) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^^^^^^^ ASYNC116
|
||||
|
|
||||
= help: Replace with `asyncio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | # ASYNCIO_NO_ERROR - no asyncio.sleep_forever, so check intentionally doesn't trigger.
|
||||
3 3 | import math
|
||||
4 4 | from math import inf
|
||||
5 |+from asyncio import sleep_forever
|
||||
5 6 |
|
||||
6 7 |
|
||||
7 8 | async def import_trio():
|
||||
--------------------------------------------------------------------------------
|
||||
100 101 | # does not require the call to be awaited, nor in an async fun
|
||||
101 102 | anyio.sleep(86401) # error: 116, "async"
|
||||
102 103 | # also checks that we don't break visit_Call
|
||||
103 |- anyio.run(anyio.sleep(86401)) # error: 116, "async"
|
||||
104 |+ anyio.run(sleep_forever()) # error: 116, "async"
|
||||
104 105 |
|
||||
105 106 |
|
||||
106 107 | async def import_from_anyio():
|
||||
|
||||
ASYNC116.py:110:11: ASYNC116 [*] `asyncio.sleep()` with >24 hour interval should usually be `asyncio.sleep_forever()`
|
||||
|
|
||||
109 | # catch from import
|
||||
110 | await sleep(86401) # error: 116, "async"
|
||||
| ^^^^^^^^^^^^ ASYNC116
|
||||
|
|
||||
= help: Replace with `asyncio.sleep_forever()`
|
||||
|
||||
ℹ Unsafe fix
|
||||
2 2 | # ASYNCIO_NO_ERROR - no asyncio.sleep_forever, so check intentionally doesn't trigger.
|
||||
3 3 | import math
|
||||
4 4 | from math import inf
|
||||
5 |+from asyncio import sleep_forever
|
||||
5 6 |
|
||||
6 7 |
|
||||
7 8 | async def import_trio():
|
||||
--------------------------------------------------------------------------------
|
||||
107 108 | from anyio import sleep
|
||||
108 109 |
|
||||
109 110 | # catch from import
|
||||
110 |- await sleep(86401) # error: 116, "async"
|
||||
111 |+ await sleep_forever() # error: 116, "async"
|
||||
@@ -1,6 +1,6 @@
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, Expr, ExprAttribute, ExprDict, ExprList};
|
||||
use ruff_python_ast::{self as ast, Expr, ExprAttribute};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
@@ -65,8 +65,8 @@ fn is_call_insecure(call: &ast::ExprCall) -> bool {
|
||||
if let Some(argument) = call.arguments.find_argument(argument_name, position) {
|
||||
match argument_name {
|
||||
"select" => match argument {
|
||||
Expr::Dict(ExprDict { items, .. }) => {
|
||||
if items.iter().any(|ast::DictItem { key, value }| {
|
||||
Expr::Dict(dict) => {
|
||||
if dict.iter().any(|ast::DictItem { key, value }| {
|
||||
key.as_ref()
|
||||
.is_some_and(|key| !key.is_string_literal_expr())
|
||||
|| !value.is_string_literal_expr()
|
||||
@@ -77,8 +77,8 @@ fn is_call_insecure(call: &ast::ExprCall) -> bool {
|
||||
_ => return true,
|
||||
},
|
||||
"where" | "tables" => match argument {
|
||||
Expr::List(ExprList { elts, .. }) => {
|
||||
if !elts.iter().all(Expr::is_string_literal_expr) {
|
||||
Expr::List(list) => {
|
||||
if !list.iter().all(Expr::is_string_literal_expr) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -530,11 +530,11 @@ fn is_partial_path(expr: &Expr) -> bool {
|
||||
/// subprocess.Popen(["/usr/local/bin/rsync", "*", "some_where:"], shell=True)
|
||||
/// ```
|
||||
fn is_wildcard_command(expr: &Expr) -> bool {
|
||||
if let Expr::List(ast::ExprList { elts, .. }) = expr {
|
||||
if let Expr::List(list) = expr {
|
||||
let mut has_star = false;
|
||||
let mut has_command = false;
|
||||
for elt in elts {
|
||||
if let Some(text) = string_literal(elt) {
|
||||
for item in list {
|
||||
if let Some(text) = string_literal(item) {
|
||||
has_star |= text.contains('*');
|
||||
has_command |= text.contains("chown")
|
||||
|| text.contains("chmod")
|
||||
|
||||
@@ -49,16 +49,16 @@ impl Violation for DuplicateValue {
|
||||
/// B033
|
||||
pub(crate) fn duplicate_value(checker: &mut Checker, set: &ast::ExprSet) {
|
||||
let mut seen_values: FxHashSet<ComparableExpr> = FxHashSet::default();
|
||||
for (index, elt) in set.elts.iter().enumerate() {
|
||||
if elt.is_literal_expr() {
|
||||
let comparable_value: ComparableExpr = elt.into();
|
||||
for (index, value) in set.iter().enumerate() {
|
||||
if value.is_literal_expr() {
|
||||
let comparable_value = ComparableExpr::from(value);
|
||||
|
||||
if !seen_values.insert(comparable_value) {
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
DuplicateValue {
|
||||
value: checker.generator().expr(elt),
|
||||
value: checker.generator().expr(value),
|
||||
},
|
||||
elt.range(),
|
||||
value.range(),
|
||||
);
|
||||
|
||||
diagnostic.try_set_fix(|| {
|
||||
@@ -73,7 +73,7 @@ pub(crate) fn duplicate_value(checker: &mut Checker, set: &ast::ExprSet) {
|
||||
|
||||
/// Remove the member at the given index from the [`ast::ExprSet`].
|
||||
fn remove_member(set: &ast::ExprSet, index: usize, source: &str) -> Result<Edit> {
|
||||
if index < set.elts.len() - 1 {
|
||||
if index < set.len() - 1 {
|
||||
// Case 1: the expression is _not_ the last node, so delete from the start of the
|
||||
// expression to the end of the subsequent comma.
|
||||
// Ex) Delete `"a"` in `{"a", "b", "c"}`.
|
||||
|
||||
@@ -315,15 +315,15 @@ pub(crate) fn reuse_of_groupby_generator(
|
||||
let Expr::Call(ast::ExprCall { func, .. }) = &iter else {
|
||||
return;
|
||||
};
|
||||
let Expr::Tuple(ast::ExprTuple { elts, .. }) = target else {
|
||||
let Expr::Tuple(tuple) = target else {
|
||||
// Ignore any `groupby()` invocation that isn't unpacked
|
||||
return;
|
||||
};
|
||||
if elts.len() != 2 {
|
||||
if tuple.len() != 2 {
|
||||
return;
|
||||
}
|
||||
// We have an invocation of groupby which is a simple unpacking
|
||||
let Expr::Name(ast::ExprName { id: group_name, .. }) = &elts[1] else {
|
||||
let Expr::Name(ast::ExprName { id: group_name, .. }) = &tuple.elts[1] else {
|
||||
return;
|
||||
};
|
||||
// Check if the function call is `itertools.groupby`
|
||||
|
||||
@@ -72,7 +72,7 @@ pub(crate) fn static_key_dict_comprehension(checker: &mut Checker, dict_comp: &a
|
||||
/// comprehension.
|
||||
fn is_constant(key: &Expr, names: &FxHashMap<&str, &ast::ExprName>) -> bool {
|
||||
match key {
|
||||
Expr::Tuple(ast::ExprTuple { elts, .. }) => elts.iter().all(|elt| is_constant(elt, names)),
|
||||
Expr::Tuple(tuple) => tuple.iter().all(|elem| is_constant(elem, names)),
|
||||
Expr::Name(ast::ExprName { id, .. }) => !names.contains_key(id.as_str()),
|
||||
Expr::Attribute(ast::ExprAttribute { value, .. }) => is_constant(value, names),
|
||||
Expr::Subscript(ast::ExprSubscript { value, slice, .. }) => {
|
||||
|
||||
@@ -25,6 +25,11 @@ use super::super::helpers::at_last_top_level_expression_in_cell;
|
||||
/// assert foo == bar, "`foo` and `bar` should be equal."
|
||||
/// ```
|
||||
///
|
||||
/// ## Notebook behavior
|
||||
/// For Jupyter Notebooks, this rule is not applied to the last top-level expression in a cell.
|
||||
/// This is because it's common to have a notebook cell that ends with an expression,
|
||||
/// which will result in the `repr` of the evaluated expression being printed as the cell's output.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `assert` statement](https://docs.python.org/3/reference/simple_stmts.html#the-assert-statement)
|
||||
#[violation]
|
||||
@@ -43,9 +48,6 @@ impl Violation for UselessComparison {
|
||||
/// B015
|
||||
pub(crate) fn useless_comparison(checker: &mut Checker, expr: &Expr) {
|
||||
if expr.is_compare_expr() {
|
||||
// For Jupyter Notebooks, ignore the last top-level expression for each cell.
|
||||
// This is because it's common to have a cell that ends with an expression
|
||||
// to display it's value.
|
||||
if checker.source_type.is_ipynb()
|
||||
&& at_last_top_level_expression_in_cell(
|
||||
checker.semantic(),
|
||||
|
||||
@@ -26,6 +26,11 @@ use super::super::helpers::at_last_top_level_expression_in_cell;
|
||||
/// foo = 1 + 1
|
||||
/// ```
|
||||
///
|
||||
/// ## Notebook behavior
|
||||
/// For Jupyter Notebooks, this rule is not applied to the last top-level expression in a cell.
|
||||
/// This is because it's common to have a notebook cell that ends with an expression,
|
||||
/// which will result in the `repr` of the evaluated expression being printed as the cell's output.
|
||||
///
|
||||
/// ## Known problems
|
||||
/// This rule ignores expression types that are commonly used for their side
|
||||
/// effects, such as function calls.
|
||||
@@ -81,9 +86,6 @@ pub(crate) fn useless_expression(checker: &mut Checker, value: &Expr) {
|
||||
return;
|
||||
}
|
||||
|
||||
// For Jupyter Notebooks, ignore the last top-level expression for each cell.
|
||||
// This is because it's common to have a cell that ends with an expression
|
||||
// to display it's value.
|
||||
if checker.source_type.is_ipynb()
|
||||
&& at_last_top_level_expression_in_cell(
|
||||
checker.semantic(),
|
||||
|
||||
@@ -21,6 +21,7 @@ mod tests {
|
||||
#[test_case(Rule::UnnecessaryComprehension, Path::new("C416.py"))]
|
||||
#[test_case(Rule::UnnecessaryComprehensionInCall, Path::new("C419.py"))]
|
||||
#[test_case(Rule::UnnecessaryComprehensionInCall, Path::new("C419_2.py"))]
|
||||
#[test_case(Rule::UnnecessaryDictComprehensionForIterable, Path::new("C420.py"))]
|
||||
#[test_case(Rule::UnnecessaryDoubleCastOrProcess, Path::new("C414.py"))]
|
||||
#[test_case(Rule::UnnecessaryGeneratorDict, Path::new("C402.py"))]
|
||||
#[test_case(Rule::UnnecessaryGeneratorList, Path::new("C400.py"))]
|
||||
|
||||
@@ -2,6 +2,7 @@ pub(crate) use unnecessary_call_around_sorted::*;
|
||||
pub(crate) use unnecessary_collection_call::*;
|
||||
pub(crate) use unnecessary_comprehension::*;
|
||||
pub(crate) use unnecessary_comprehension_in_call::*;
|
||||
pub(crate) use unnecessary_dict_comprehension_for_iterable::*;
|
||||
pub(crate) use unnecessary_double_cast_or_process::*;
|
||||
pub(crate) use unnecessary_generator_dict::*;
|
||||
pub(crate) use unnecessary_generator_list::*;
|
||||
@@ -22,6 +23,7 @@ mod unnecessary_call_around_sorted;
|
||||
mod unnecessary_collection_call;
|
||||
mod unnecessary_comprehension;
|
||||
mod unnecessary_comprehension_in_call;
|
||||
mod unnecessary_dict_comprehension_for_iterable;
|
||||
mod unnecessary_double_cast_or_process;
|
||||
mod unnecessary_generator_dict;
|
||||
mod unnecessary_generator_list;
|
||||
|
||||
@@ -61,13 +61,13 @@ pub(crate) fn unnecessary_generator_dict(
|
||||
let Expr::Generator(ast::ExprGenerator { elt, .. }) = argument else {
|
||||
return;
|
||||
};
|
||||
let Expr::Tuple(ast::ExprTuple { elts, .. }) = elt.as_ref() else {
|
||||
let Expr::Tuple(tuple) = &**elt else {
|
||||
return;
|
||||
};
|
||||
if elts.len() != 2 {
|
||||
if tuple.len() != 2 {
|
||||
return;
|
||||
}
|
||||
if elts.iter().any(Expr::is_starred_expr) {
|
||||
if tuple.iter().any(Expr::is_starred_expr) {
|
||||
return;
|
||||
}
|
||||
let mut diagnostic = Diagnostic::new(UnnecessaryGeneratorDict, expr.range());
|
||||
|
||||
@@ -62,10 +62,10 @@ pub(crate) fn unnecessary_list_comprehension_dict(
|
||||
let Expr::ListComp(ast::ExprListComp { elt, .. }) = argument else {
|
||||
return;
|
||||
};
|
||||
let Expr::Tuple(ast::ExprTuple { elts, .. }) = elt.as_ref() else {
|
||||
let Expr::Tuple(tuple) = &**elt else {
|
||||
return;
|
||||
};
|
||||
if elts.len() != 2 {
|
||||
if tuple.len() != 2 {
|
||||
return;
|
||||
}
|
||||
let mut diagnostic = Diagnostic::new(UnnecessaryListComprehensionDict, expr.range());
|
||||
|
||||
@@ -74,7 +74,7 @@ pub(crate) fn unnecessary_literal_dict(
|
||||
// Accept `dict((1, 2), ...))` `dict([(1, 2), ...])`.
|
||||
if !elts
|
||||
.iter()
|
||||
.all(|elt| matches!(&elt, Expr::Tuple(ast::ExprTuple { elts, .. }) if elts.len() == 2))
|
||||
.all(|elt| matches!(&elt, Expr::Tuple(tuple) if tuple.len() == 2))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/ruff/mod.rs
|
||||
source: crates/ruff_linter/src/rules/flake8_comprehensions/mod.rs
|
||||
---
|
||||
RUF025.py:6:5: RUF025 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
C420.py:6:5: C420 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
|
|
||||
4 | def func():
|
||||
5 | numbers = [1, 2, 3]
|
||||
6 | {n: None for n in numbers} # RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ C420
|
||||
|
|
||||
= help: Replace with `dict.fromkeys(iterable, value)`)
|
||||
|
||||
@@ -20,11 +20,11 @@ RUF025.py:6:5: RUF025 [*] Unnecessary dict comprehension for iterable; use `dict
|
||||
8 8 |
|
||||
9 9 | def func():
|
||||
|
||||
RUF025.py:10:23: RUF025 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
C420.py:10:23: C420 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
|
|
||||
9 | def func():
|
||||
10 | for key, value in {n: 1 for n in [1, 2, 3]}.items(): # RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ C420
|
||||
11 | pass
|
||||
|
|
||||
= help: Replace with `dict.fromkeys(iterable)`)
|
||||
@@ -39,11 +39,11 @@ RUF025.py:10:23: RUF025 [*] Unnecessary dict comprehension for iterable; use `di
|
||||
12 12 |
|
||||
13 13 |
|
||||
|
||||
RUF025.py:15:5: RUF025 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
C420.py:15:5: C420 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
|
|
||||
14 | def func():
|
||||
15 | {n: 1.1 for n in [1, 2, 3]} # RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ C420
|
||||
|
|
||||
= help: Replace with `dict.fromkeys(iterable)`)
|
||||
|
||||
@@ -57,12 +57,12 @@ RUF025.py:15:5: RUF025 [*] Unnecessary dict comprehension for iterable; use `dic
|
||||
17 17 |
|
||||
18 18 | def func():
|
||||
|
||||
RUF025.py:26:7: RUF025 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
C420.py:26:7: C420 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
|
|
||||
24 | return data
|
||||
25 |
|
||||
26 | f({c: "a" for c in "12345"}) # RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ C420
|
||||
|
|
||||
= help: Replace with `dict.fromkeys(iterable)`)
|
||||
|
||||
@@ -76,11 +76,11 @@ RUF025.py:26:7: RUF025 [*] Unnecessary dict comprehension for iterable; use `dic
|
||||
28 28 |
|
||||
29 29 | def func():
|
||||
|
||||
RUF025.py:30:5: RUF025 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
C420.py:30:5: C420 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
|
|
||||
29 | def func():
|
||||
30 | {n: True for n in [1, 2, 2]} # RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ C420
|
||||
|
|
||||
= help: Replace with `dict.fromkeys(iterable)`)
|
||||
|
||||
@@ -94,11 +94,11 @@ RUF025.py:30:5: RUF025 [*] Unnecessary dict comprehension for iterable; use `dic
|
||||
32 32 |
|
||||
33 33 | def func():
|
||||
|
||||
RUF025.py:34:5: RUF025 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
C420.py:34:5: C420 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
|
|
||||
33 | def func():
|
||||
34 | {n: b"hello" for n in (1, 2, 2)} # RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ C420
|
||||
|
|
||||
= help: Replace with `dict.fromkeys(iterable)`)
|
||||
|
||||
@@ -112,11 +112,11 @@ RUF025.py:34:5: RUF025 [*] Unnecessary dict comprehension for iterable; use `dic
|
||||
36 36 |
|
||||
37 37 | def func():
|
||||
|
||||
RUF025.py:38:5: RUF025 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
C420.py:38:5: C420 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
|
|
||||
37 | def func():
|
||||
38 | {n: ... for n in [1, 2, 3]} # RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ C420
|
||||
|
|
||||
= help: Replace with `dict.fromkeys(iterable)`)
|
||||
|
||||
@@ -130,11 +130,11 @@ RUF025.py:38:5: RUF025 [*] Unnecessary dict comprehension for iterable; use `dic
|
||||
40 40 |
|
||||
41 41 | def func():
|
||||
|
||||
RUF025.py:42:5: RUF025 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
C420.py:42:5: C420 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
|
|
||||
41 | def func():
|
||||
42 | {n: False for n in {1: "a", 2: "b"}} # RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ C420
|
||||
|
|
||||
= help: Replace with `dict.fromkeys(iterable)`)
|
||||
|
||||
@@ -148,11 +148,11 @@ RUF025.py:42:5: RUF025 [*] Unnecessary dict comprehension for iterable; use `dic
|
||||
44 44 |
|
||||
45 45 | def func():
|
||||
|
||||
RUF025.py:46:5: RUF025 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
C420.py:46:5: C420 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
|
|
||||
45 | def func():
|
||||
46 | {(a, b): 1 for (a, b) in [(1, 2), (3, 4)]} # RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ C420
|
||||
|
|
||||
= help: Replace with `dict.fromkeys(iterable)`)
|
||||
|
||||
@@ -166,11 +166,11 @@ RUF025.py:46:5: RUF025 [*] Unnecessary dict comprehension for iterable; use `dic
|
||||
48 48 |
|
||||
49 49 | def func():
|
||||
|
||||
RUF025.py:54:5: RUF025 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
C420.py:54:5: C420 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
|
|
||||
53 | a = f()
|
||||
54 | {n: a for n in [1, 2, 3]} # RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ C420
|
||||
|
|
||||
= help: Replace with `dict.fromkeys(iterable)`)
|
||||
|
||||
@@ -184,12 +184,12 @@ RUF025.py:54:5: RUF025 [*] Unnecessary dict comprehension for iterable; use `dic
|
||||
56 56 |
|
||||
57 57 | def func():
|
||||
|
||||
RUF025.py:59:6: RUF025 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
C420.py:59:6: C420 [*] Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
|
||||
|
|
||||
57 | def func():
|
||||
58 | values = ["a", "b", "c"]
|
||||
59 | [{n: values for n in [1, 2, 3]}] # RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ RUF025
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ C420
|
||||
|
|
||||
= help: Replace with `dict.fromkeys(iterable)`)
|
||||
|
||||
@@ -202,5 +202,3 @@ RUF025.py:59:6: RUF025 [*] Unnecessary dict comprehension for iterable; use `dic
|
||||
60 60 |
|
||||
61 61 |
|
||||
62 62 | # Non-violation cases: RUF025
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@ const CONVENTIONAL_ALIASES: &[(&str, &str)] = &[
|
||||
("plotly.express", "px"),
|
||||
("polars", "pl"),
|
||||
("pyarrow", "pa"),
|
||||
("xml.etree.ElementTree", "ET"),
|
||||
];
|
||||
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize, CacheKey)]
|
||||
|
||||
@@ -163,8 +163,8 @@ pub(crate) fn multiple_starts_ends_with(checker: &mut Checker, expr: &Expr) {
|
||||
elts: words
|
||||
.iter()
|
||||
.flat_map(|value| {
|
||||
if let Expr::Tuple(ast::ExprTuple { elts, .. }) = value {
|
||||
Left(elts.iter())
|
||||
if let Expr::Tuple(tuple) = value {
|
||||
Left(tuple.iter())
|
||||
} else {
|
||||
Right(iter::once(*value))
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use ruff_python_ast::{self as ast, Expr, ExprLambda};
|
||||
use ruff_python_ast::{Expr, ExprLambda};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix};
|
||||
use ruff_diagnostics::{FixAvailability, Violation};
|
||||
@@ -70,8 +70,8 @@ pub(crate) fn reimplemented_container_builtin(checker: &mut Checker, expr: &Expr
|
||||
}
|
||||
|
||||
let container = match &**body {
|
||||
Expr::List(ast::ExprList { elts, .. }) if elts.is_empty() => Container::List,
|
||||
Expr::Dict(ast::ExprDict { items, .. }) if items.is_empty() => Container::Dict,
|
||||
Expr::List(list) if list.is_empty() => Container::List,
|
||||
Expr::Dict(dict) if dict.is_empty() => Container::Dict,
|
||||
_ => return,
|
||||
};
|
||||
let mut diagnostic = Diagnostic::new(ReimplementedContainerBuiltin { container }, expr.range());
|
||||
|
||||
@@ -87,13 +87,13 @@ pub(crate) fn unnecessary_dict_kwargs(checker: &mut Checker, call: &ast::ExprCal
|
||||
.iter_keys()
|
||||
.filter_map(|key| key.and_then(as_kwarg))
|
||||
.collect();
|
||||
if kwargs.len() != dict.items.len() {
|
||||
if kwargs.len() != dict.len() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut diagnostic = Diagnostic::new(UnnecessaryDictKwargs, keyword.range());
|
||||
|
||||
if dict.items.is_empty() {
|
||||
if dict.is_empty() {
|
||||
diagnostic.try_set_fix(|| {
|
||||
remove_argument(
|
||||
keyword,
|
||||
|
||||
@@ -49,7 +49,7 @@ impl Violation for UnnecessarySpread {
|
||||
pub(crate) fn unnecessary_spread(checker: &mut Checker, dict: &ast::ExprDict) {
|
||||
// The first "end" is the start of the dictionary, immediately following the open bracket.
|
||||
let mut prev_end = dict.start() + TextSize::from(1);
|
||||
for ast::DictItem { key, value } in &dict.items {
|
||||
for ast::DictItem { key, value } in dict {
|
||||
if key.is_none() {
|
||||
// We only care about when the key is None which indicates a spread `**`
|
||||
// inside a dict.
|
||||
|
||||
@@ -162,12 +162,11 @@ pub(crate) fn bad_generator_return_type(
|
||||
// - if not, don't emit the diagnostic
|
||||
let yield_type_info = match returns {
|
||||
ast::Expr::Subscript(ast::ExprSubscript { slice, .. }) => match slice.as_ref() {
|
||||
ast::Expr::Tuple(slice_tuple @ ast::ExprTuple { .. }) => {
|
||||
ast::Expr::Tuple(slice_tuple) => {
|
||||
if !slice_tuple
|
||||
.elts
|
||||
.iter()
|
||||
.skip(1)
|
||||
.all(|elt| is_any_or_none(elt, semantic))
|
||||
.all(|element| is_any_or_none(element, semantic))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -67,8 +67,8 @@ pub(crate) fn redundant_literal_union<'a>(checker: &mut Checker, union: &'a Expr
|
||||
let mut func = |expr: &'a Expr, _parent: &'a Expr| {
|
||||
if let Expr::Subscript(ast::ExprSubscript { value, slice, .. }) = expr {
|
||||
if checker.semantic().match_typing_expr(value, "Literal") {
|
||||
if let Expr::Tuple(ast::ExprTuple { elts, .. }) = slice.as_ref() {
|
||||
typing_literal_exprs.extend(elts.iter());
|
||||
if let Expr::Tuple(tuple) = &**slice {
|
||||
typing_literal_exprs.extend(tuple);
|
||||
} else {
|
||||
typing_literal_exprs.push(slice);
|
||||
}
|
||||
|
||||
@@ -298,10 +298,10 @@ fn is_valid_default_value_with_annotation(
|
||||
.iter()
|
||||
.all(|e| is_valid_default_value_with_annotation(e, false, locator, semantic));
|
||||
}
|
||||
Expr::Dict(ast::ExprDict { items, range: _ }) => {
|
||||
Expr::Dict(dict) => {
|
||||
return allow_container
|
||||
&& items.len() <= 10
|
||||
&& items.iter().all(|ast::DictItem { key, value }| {
|
||||
&& dict.len() <= 10
|
||||
&& dict.iter().all(|ast::DictItem { key, value }| {
|
||||
key.as_ref().is_some_and(|key| {
|
||||
is_valid_default_value_with_annotation(key, false, locator, semantic)
|
||||
}) && is_valid_default_value_with_annotation(value, false, locator, semantic)
|
||||
|
||||
@@ -70,19 +70,15 @@ pub(crate) fn unnecessary_literal_union<'a>(checker: &mut Checker, expr: &'a Exp
|
||||
literal_subscript = Some(value.as_ref());
|
||||
}
|
||||
|
||||
let slice = &**slice;
|
||||
|
||||
// flatten already-unioned literals to later union again
|
||||
if let Expr::Tuple(ast::ExprTuple {
|
||||
elts,
|
||||
range: _,
|
||||
ctx: _,
|
||||
parenthesized: _,
|
||||
}) = slice.as_ref()
|
||||
{
|
||||
for expr in elts {
|
||||
literal_exprs.push(expr);
|
||||
if let Expr::Tuple(tuple) = slice {
|
||||
for item in tuple {
|
||||
literal_exprs.push(item);
|
||||
}
|
||||
} else {
|
||||
literal_exprs.push(slice.as_ref());
|
||||
literal_exprs.push(slice);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -181,7 +181,7 @@ fn version_check(
|
||||
}
|
||||
|
||||
// Tuple comparison, e.g., `sys.version_info == (3, 4)`.
|
||||
let Expr::Tuple(ast::ExprTuple { elts, .. }) = comparator else {
|
||||
let Expr::Tuple(tuple) = comparator else {
|
||||
if checker.enabled(Rule::UnrecognizedVersionInfoCheck) {
|
||||
checker
|
||||
.diagnostics
|
||||
@@ -190,7 +190,7 @@ fn version_check(
|
||||
return;
|
||||
};
|
||||
|
||||
if !elts.iter().all(is_int_constant) {
|
||||
if !tuple.iter().all(is_int_constant) {
|
||||
// All tuple elements must be integers, e.g., `sys.version_info == (3, 4)` instead of
|
||||
// `sys.version_info == (3.0, 4)`.
|
||||
if checker.enabled(Rule::UnrecognizedVersionInfoCheck) {
|
||||
@@ -198,7 +198,7 @@ fn version_check(
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(UnrecognizedVersionInfoCheck, test.range()));
|
||||
}
|
||||
} else if elts.len() > 2 {
|
||||
} else if tuple.len() > 2 {
|
||||
// Must compare against major and minor version only, e.g., `sys.version_info == (3, 4)`
|
||||
// instead of `sys.version_info == (3, 4, 0)`.
|
||||
if checker.enabled(Rule::PatchVersionComparison) {
|
||||
@@ -216,7 +216,7 @@ fn version_check(
|
||||
_ => return,
|
||||
};
|
||||
|
||||
if elts.len() != expected_length {
|
||||
if tuple.len() != expected_length {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
WrongTupleLengthVersionComparison { expected_length },
|
||||
test.range(),
|
||||
|
||||
@@ -28,10 +28,10 @@ mod tests {
|
||||
Rule::PytestFixtureIncorrectParenthesesStyle,
|
||||
Path::new("PT001.py"),
|
||||
Settings {
|
||||
fixture_parentheses: false,
|
||||
fixture_parentheses: true,
|
||||
..Settings::default()
|
||||
},
|
||||
"PT001_no_parentheses"
|
||||
"PT001_parentheses"
|
||||
)]
|
||||
#[test_case(
|
||||
Rule::PytestFixturePositionalArgs,
|
||||
@@ -252,10 +252,10 @@ mod tests {
|
||||
Rule::PytestIncorrectMarkParenthesesStyle,
|
||||
Path::new("PT023.py"),
|
||||
Settings {
|
||||
mark_parentheses: false,
|
||||
mark_parentheses: true,
|
||||
..Settings::default()
|
||||
},
|
||||
"PT023_no_parentheses"
|
||||
"PT023_parentheses"
|
||||
)]
|
||||
#[test_case(
|
||||
Rule::PytestUnnecessaryAsyncioMarkOnFixture,
|
||||
|
||||
@@ -32,10 +32,9 @@ use super::helpers::{
|
||||
/// optional.
|
||||
///
|
||||
/// Either removing those unnecessary parentheses _or_ requiring them for all
|
||||
/// fixtures is fine, but it's best to be consistent.
|
||||
///
|
||||
/// In [preview], this rule defaults to removing unnecessary parentheses, to match
|
||||
/// the behavior of official pytest projects.
|
||||
/// fixtures is fine, but it's best to be consistent. The rule defaults to
|
||||
/// removing unnecessary parentheses, to match the documentation of the
|
||||
/// official pytest projects.
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
@@ -62,8 +61,6 @@ use super::helpers::{
|
||||
///
|
||||
/// ## References
|
||||
/// - [`pytest` documentation: API Reference: Fixtures](https://docs.pytest.org/en/latest/reference/reference.html#fixtures-api)
|
||||
///
|
||||
/// [preview]: https://docs.astral.sh/ruff/preview/
|
||||
#[violation]
|
||||
pub struct PytestFixtureIncorrectParenthesesStyle {
|
||||
expected: Parentheses,
|
||||
@@ -170,6 +167,10 @@ impl AlwaysFixableViolation for PytestExtraneousScopeFunction {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## Deprecation
|
||||
/// Marking fixtures that do not return a value with an underscore
|
||||
/// isn't a practice recommended by the pytest community.
|
||||
///
|
||||
/// ## What it does
|
||||
/// Checks for `pytest` fixtures that do not return a value, but are not named
|
||||
/// with a leading underscore.
|
||||
@@ -227,6 +228,10 @@ impl Violation for PytestMissingFixtureNameUnderscore {
|
||||
}
|
||||
}
|
||||
|
||||
/// ## Deprecation
|
||||
/// Marking fixtures that do not return a value with an underscore
|
||||
/// isn't a practice recommended by the pytest community.
|
||||
///
|
||||
/// ## What it does
|
||||
/// Checks for `pytest` fixtures that return a value, but are named with a
|
||||
/// leading underscore.
|
||||
@@ -930,9 +935,7 @@ pub(crate) fn fixture(
|
||||
check_fixture_decorator(checker, name, decorator);
|
||||
}
|
||||
|
||||
if checker.enabled(Rule::PytestDeprecatedYieldFixture)
|
||||
&& checker.settings.flake8_pytest_style.fixture_parentheses
|
||||
{
|
||||
if checker.enabled(Rule::PytestDeprecatedYieldFixture) {
|
||||
check_fixture_decorator_name(checker, decorator);
|
||||
}
|
||||
|
||||
|
||||
@@ -14,8 +14,8 @@ use super::helpers::get_mark_decorators;
|
||||
/// without parentheses, depending on the [`lint.flake8-pytest-style.mark-parentheses`]
|
||||
/// setting.
|
||||
///
|
||||
/// In [preview], this rule defaults to removing unnecessary parentheses, to match
|
||||
/// the behavior of official pytest projects.
|
||||
/// The rule defaults to removing unnecessary parentheses,
|
||||
/// to match the documentation of the official pytest projects.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// If a `@pytest.mark.<marker>()` doesn't take any arguments, the parentheses are
|
||||
@@ -49,8 +49,6 @@ use super::helpers::get_mark_decorators;
|
||||
///
|
||||
/// ## References
|
||||
/// - [`pytest` documentation: Marks](https://docs.pytest.org/en/latest/reference/reference.html#marks)
|
||||
///
|
||||
/// [preview]: https://docs.astral.sh/ruff/preview/
|
||||
#[violation]
|
||||
pub struct PytestIncorrectMarkParenthesesStyle {
|
||||
mark_name: String,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user