Compare commits
92 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
386ca7c9a1 | ||
|
|
40c5abf16e | ||
|
|
7e7aec7d74 | ||
|
|
4b5538f74e | ||
|
|
36d134fd41 | ||
|
|
0b7d6b9097 | ||
|
|
994e2e0903 | ||
|
|
bc79f540e4 | ||
|
|
3a78b59314 | ||
|
|
5f83851329 | ||
|
|
484ce7b8fc | ||
|
|
1c75071136 | ||
|
|
51bca19c1d | ||
|
|
a8a312e862 | ||
|
|
33c31cda27 | ||
|
|
cd9fbeb560 | ||
|
|
84e96cdcd9 | ||
|
|
248590224a | ||
|
|
bbc55cdb04 | ||
|
|
2792439eac | ||
|
|
0694aee1b6 | ||
|
|
a17b5c134a | ||
|
|
42f61535b5 | ||
|
|
1c01b3c934 | ||
|
|
39b9a1637f | ||
|
|
2c692e3acf | ||
|
|
24add5f56c | ||
|
|
0b7736ad79 | ||
|
|
eef85067c8 | ||
|
|
da98fab4ae | ||
|
|
0f37a98d91 | ||
|
|
f38624824d | ||
|
|
159422071e | ||
|
|
6eaacf96be | ||
|
|
eb15371453 | ||
|
|
198b301baf | ||
|
|
c8c575dd43 | ||
|
|
6e54cd8233 | ||
|
|
a688a237d7 | ||
|
|
bda2a0007a | ||
|
|
32d165b7ad | ||
|
|
ac79bf4ee9 | ||
|
|
376eab3a53 | ||
|
|
08be7bd285 | ||
|
|
f5241451d8 | ||
|
|
c9fe0708cb | ||
|
|
09f8c487ea | ||
|
|
1e7233a8eb | ||
|
|
f967f344fc | ||
|
|
095f005bf4 | ||
|
|
0f04aa2a5f | ||
|
|
ad7ba77fff | ||
|
|
77d43795f8 | ||
|
|
4357f2be0f | ||
|
|
e5c1f95545 | ||
|
|
227ff62a4e | ||
|
|
d8e4902516 | ||
|
|
e66739884f | ||
|
|
5fd827545b | ||
|
|
c1ddcb8a60 | ||
|
|
48a317d5f6 | ||
|
|
74e18b6cff | ||
|
|
21d02cd51f | ||
|
|
049e77b939 | ||
|
|
b9bfb81e36 | ||
|
|
2d4fae45d9 | ||
|
|
726adb7efc | ||
|
|
dbdfdeb0e1 | ||
|
|
1c41789c2a | ||
|
|
2f9de335db | ||
|
|
ba61bb6a6c | ||
|
|
17ab71ff75 | ||
|
|
4ad4e3e091 | ||
|
|
6ced5122e4 | ||
|
|
7d55b417f7 | ||
|
|
f0e0efc46f | ||
|
|
1efa2e07ad | ||
|
|
df3932f750 | ||
|
|
817d0b4902 | ||
|
|
ffd8e958fc | ||
|
|
ed33b75bad | ||
|
|
262e768fd3 | ||
|
|
bc3a9ce003 | ||
|
|
48005d87f8 | ||
|
|
e37e9c2ca3 | ||
|
|
8fde63b323 | ||
|
|
97338e4cd6 | ||
|
|
9645790a8b | ||
|
|
18800c6884 | ||
|
|
fd638a2e54 | ||
|
|
fa1459d56e | ||
|
|
d93c5811ea |
@@ -13,7 +13,6 @@ repos:
|
||||
- --disable
|
||||
- MD013 # line-length
|
||||
- MD033 # no-inline-html
|
||||
- MD041 # first-line-h1
|
||||
- --
|
||||
|
||||
- repo: local
|
||||
@@ -46,5 +45,16 @@ repos:
|
||||
pass_filenames: false
|
||||
exclude: target
|
||||
|
||||
# Black
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.1.0
|
||||
hooks:
|
||||
- id: black
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/ruff/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*
|
||||
)$
|
||||
|
||||
ci:
|
||||
skip: [cargo-fmt, clippy, dev-generate-all]
|
||||
|
||||
@@ -56,9 +56,9 @@ Prior to opening a pull request, ensure that your code has been auto-formatted,
|
||||
and that it passes both the lint and test validation checks:
|
||||
|
||||
```shell
|
||||
cargo fmt --all # Auto-formatting...
|
||||
cargo fmt # Auto-formatting...
|
||||
cargo clippy --fix --workspace --all-targets --all-features # Linting...
|
||||
cargo test --all # Testing...
|
||||
cargo test # Testing...
|
||||
```
|
||||
|
||||
These checks will run on GitHub Actions when you open your Pull Request, but running them locally
|
||||
@@ -135,7 +135,7 @@ Run `cargo dev generate-all` to generate the code for your new fixture. Then run
|
||||
locally with (e.g.) `cargo run -p ruff_cli -- check crates/ruff/resources/test/fixtures/pycodestyle/E402.py --no-cache --select E402`.
|
||||
|
||||
Once you're satisfied with the output, codify the behavior as a snapshot test by adding a new
|
||||
`test_case` macro in the relevant `crates/ruff/src/[linter]/mod.rs` file. Then, run `cargo test --all`.
|
||||
`test_case` macro in the relevant `crates/ruff/src/[linter]/mod.rs` file. Then, run `cargo test`.
|
||||
Your test will fail, but you'll be prompted to follow-up with `cargo insta review`. Accept the
|
||||
generated snapshot, then commit the snapshot file alongside the rest of your changes.
|
||||
|
||||
@@ -152,6 +152,9 @@ This implies that rule names:
|
||||
* should not contain instructions on what you what you should use instead
|
||||
(these belong in the rule documentation and the `autofix_title` for rules that have autofix)
|
||||
|
||||
When re-implementing rules from other linters, this convention is given more importance than
|
||||
preserving the original rule name.
|
||||
|
||||
### Example: Adding a new configuration option
|
||||
|
||||
Ruff's user-facing settings live in a few different places.
|
||||
|
||||
195
Cargo.lock
generated
195
Cargo.lock
generated
@@ -2,6 +2,12 @@
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "Inflector"
|
||||
version = "0.11.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3"
|
||||
|
||||
[[package]]
|
||||
name = "adler"
|
||||
version = "1.0.2"
|
||||
@@ -86,7 +92,7 @@ version = "2.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9834fcc22e0874394a010230586367d4a3e9f11b560f469262678547e1d2575e"
|
||||
dependencies = [
|
||||
"bstr 1.2.0",
|
||||
"bstr 1.3.0",
|
||||
"doc-comment",
|
||||
"predicates",
|
||||
"predicates-core",
|
||||
@@ -175,9 +181,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
version = "1.2.0"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b7f0778972c64420fdedc63f09919c8a88bda7b25135357fd25a5d9f3257e832"
|
||||
checksum = "5ffdb39cb703212f3c11973452c2861b972f757b021158f3516ba10f2fa8b2c1"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"once_cell",
|
||||
@@ -298,9 +304,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_complete"
|
||||
version = "4.1.1"
|
||||
version = "4.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3d6540eedc41f8a5a76cf3d8d458057dcdf817be4158a55b5f861f7a5483de75"
|
||||
checksum = "bd125be87bf4c255ebc50de0b7f4d2a6201e8ac3dc86e39c0ad081dc5e7236fe"
|
||||
dependencies = [
|
||||
"clap 4.1.6",
|
||||
]
|
||||
@@ -318,9 +324,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_complete_fig"
|
||||
version = "4.1.0"
|
||||
version = "4.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf0c76d8fcf782a1102ccfcd10ca8246e7fdd609c1cd6deddbb96cb638e9bb5c"
|
||||
checksum = "63a06158a72dbb088f864887b4409fd22600ba379f3cee3040f842234cc5c2d0"
|
||||
dependencies = [
|
||||
"clap 4.1.6",
|
||||
"clap_complete",
|
||||
@@ -550,9 +556,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cxx"
|
||||
version = "1.0.90"
|
||||
version = "1.0.91"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "90d59d9acd2a682b4e40605a242f6670eaa58c5957471cbf85e8aa6a0b97a5e8"
|
||||
checksum = "86d3488e7665a7a483b57e25bdd90d0aeb2bc7608c8d0346acf2ad3f1caf1d62"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"cxxbridge-flags",
|
||||
@@ -562,9 +568,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cxx-build"
|
||||
version = "1.0.90"
|
||||
version = "1.0.91"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ebfa40bda659dd5c864e65f4c9a2b0aff19bea56b017b9b77c73d3766a453a38"
|
||||
checksum = "48fcaf066a053a41a81dfb14d57d99738b767febb8b735c3016e469fac5da690"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"codespan-reporting",
|
||||
@@ -577,15 +583,26 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cxxbridge-flags"
|
||||
version = "1.0.90"
|
||||
version = "1.0.91"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "457ce6757c5c70dc6ecdbda6925b958aae7f959bda7d8fb9bde889e34a09dc03"
|
||||
checksum = "a2ef98b8b717a829ca5603af80e1f9e2e48013ab227b68ef37872ef84ee479bf"
|
||||
|
||||
[[package]]
|
||||
name = "cxxbridge-macro"
|
||||
version = "1.0.90"
|
||||
version = "1.0.91"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ebf883b7aacd7b2aeb2a7b338648ee19f57c140d4ee8e52c68979c6b2f7f2263"
|
||||
checksum = "086c685979a698443656e5cf7856c95c642295a38599f12fb1ff76fb28d19892"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derivative"
|
||||
version = "2.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -753,7 +770,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.250"
|
||||
version = "0.0.253"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap 4.1.6",
|
||||
@@ -840,7 +857,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "029d74589adefde59de1a0c4f4732695c32805624aec7b68d91503d4dba79afc"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"bstr 1.2.0",
|
||||
"bstr 1.3.0",
|
||||
"fnv",
|
||||
"log",
|
||||
"regex",
|
||||
@@ -990,9 +1007,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "insta"
|
||||
version = "1.26.0"
|
||||
version = "1.28.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f6f0f08b46e4379744de2ab67aa8f7de3ffd1da3e275adc41fcc82053ede46ff"
|
||||
checksum = "fea5b3894afe466b4bcf0388630fc15e11938a6074af0cd637c825ba2ec8a099"
|
||||
dependencies = [
|
||||
"console",
|
||||
"lazy_static",
|
||||
@@ -1024,10 +1041,23 @@ dependencies = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "is-terminal"
|
||||
version = "0.4.3"
|
||||
name = "is-macro"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22e18b0a45d56fe973d6db23972bf5bc46f988a4a2385deac9cc29572f09daef"
|
||||
checksum = "8a7d079e129b77477a49c5c4f1cfe9ce6c2c909ef52520693e8e811a714c7b20"
|
||||
dependencies = [
|
||||
"Inflector",
|
||||
"pmutil",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "is-terminal"
|
||||
version = "0.4.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "21b6b32576413a8e69b90e952e4a026476040d81017b80445deda5f2d3921857"
|
||||
dependencies = [
|
||||
"hermit-abi 0.3.1",
|
||||
"io-lifetimes",
|
||||
@@ -1035,15 +1065,6 @@ dependencies = [
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "is_executable"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fa9acdc6d67b75e626ad644734e8bc6df893d9cd2a834129065d3dd6158ea9c8"
|
||||
dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itertools"
|
||||
version = "0.10.5"
|
||||
@@ -1171,7 +1192,7 @@ checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79"
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/charliermarsh/LibCST?rev=f2f0b7a487a8725d161fe8b3ed73a6758b21e177#f2f0b7a487a8725d161fe8b3ed73a6758b21e177"
|
||||
source = "git+https://github.com/charliermarsh/LibCST?rev=80e4c1399f95e5beb532fdd1e209ad2dbb470438#80e4c1399f95e5beb532fdd1e209ad2dbb470438"
|
||||
dependencies = [
|
||||
"chic",
|
||||
"itertools",
|
||||
@@ -1186,7 +1207,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "libcst_derive"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/charliermarsh/LibCST?rev=f2f0b7a487a8725d161fe8b3ed73a6758b21e177#f2f0b7a487a8725d161fe8b3ed73a6758b21e177"
|
||||
source = "git+https://github.com/charliermarsh/LibCST?rev=80e4c1399f95e5beb532fdd1e209ad2dbb470438#80e4c1399f95e5beb532fdd1e209ad2dbb470438"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
@@ -1292,14 +1313,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "mio"
|
||||
version = "0.8.5"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de"
|
||||
checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"log",
|
||||
"wasi 0.11.0+wasi-snapshot-preview1",
|
||||
"windows-sys 0.42.0",
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1428,18 +1449,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "num_enum"
|
||||
version = "0.5.9"
|
||||
version = "0.5.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8d829733185c1ca374f17e52b762f24f535ec625d2cc1f070e34c8a9068f341b"
|
||||
checksum = "3e0072973714303aa6e3631c7e8e777970cf4bdd25dc4932e41031027b8bcc4e"
|
||||
dependencies = [
|
||||
"num_enum_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num_enum_derive"
|
||||
version = "0.5.9"
|
||||
version = "0.5.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2be1598bf1c313dcdd12092e3f1920f463462525a21b7b4e11b4168353d0123e"
|
||||
checksum = "0629cbd6b897944899b1f10496d9c4a7ac5878d45fd61bc22e9e79bfbbc29597"
|
||||
dependencies = [
|
||||
"proc-macro-crate",
|
||||
"proc-macro2",
|
||||
@@ -1449,9 +1470,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.17.0"
|
||||
version = "1.17.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66"
|
||||
checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
|
||||
|
||||
[[package]]
|
||||
name = "oorandom"
|
||||
@@ -1686,6 +1707,17 @@ dependencies = [
|
||||
"plotters-backend",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pmutil"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3894e5d549cccbe44afecf72922f277f603cd4bb0219c8342631ef18fffbe004"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ppv-lite86"
|
||||
version = "0.2.17"
|
||||
@@ -1910,6 +1942,28 @@ dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "result-like"
|
||||
version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ccc7ce6435c33898517a30e85578cd204cbb696875efb93dec19a2d31294f810"
|
||||
dependencies = [
|
||||
"result-like-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "result-like-derive"
|
||||
version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fabf0a2e54f711c68c50d49f648a1a8a37adcb57353f518ac4df374f0788f42"
|
||||
dependencies = [
|
||||
"pmutil",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn-ext",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ring"
|
||||
version = "0.16.20"
|
||||
@@ -1927,7 +1981,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.0.250"
|
||||
version = "0.0.253"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bisection",
|
||||
@@ -1939,6 +1993,7 @@ dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
"criterion",
|
||||
"derivative",
|
||||
"dirs",
|
||||
"fern",
|
||||
"getrandom",
|
||||
@@ -1947,7 +2002,7 @@ dependencies = [
|
||||
"ignore",
|
||||
"imperative",
|
||||
"insta",
|
||||
"is_executable",
|
||||
"is-macro",
|
||||
"itertools",
|
||||
"js-sys",
|
||||
"libcst",
|
||||
@@ -1959,8 +2014,10 @@ dependencies = [
|
||||
"once_cell",
|
||||
"path-absolutize",
|
||||
"regex",
|
||||
"result-like",
|
||||
"ruff_macros",
|
||||
"ruff_python",
|
||||
"ruff_rustpython",
|
||||
"rustc-hash",
|
||||
"rustpython-common",
|
||||
"rustpython-parser",
|
||||
@@ -1983,7 +2040,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_cli"
|
||||
version = "0.0.250"
|
||||
version = "0.0.253"
|
||||
dependencies = [
|
||||
"annotate-snippets 0.9.1",
|
||||
"anyhow",
|
||||
@@ -2012,6 +2069,7 @@ dependencies = [
|
||||
"rustc-hash",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
"similar",
|
||||
"strum",
|
||||
"textwrap",
|
||||
@@ -2082,15 +2140,41 @@ dependencies = [
|
||||
"anyhow",
|
||||
"clap 4.1.6",
|
||||
"insta",
|
||||
"is-macro",
|
||||
"once_cell",
|
||||
"ruff_formatter",
|
||||
"ruff_python",
|
||||
"ruff_rustpython",
|
||||
"ruff_testing_macros",
|
||||
"ruff_text_size",
|
||||
"rustc-hash",
|
||||
"rustpython-common",
|
||||
"rustpython-parser",
|
||||
"similar",
|
||||
"test-case",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff_rustpython"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"once_cell",
|
||||
"rustpython-common",
|
||||
"rustpython-parser",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff_testing_macros"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"glob",
|
||||
"proc-macro-error",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff_text_size"
|
||||
version = "0.0.0"
|
||||
@@ -2146,7 +2230,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rustpython-ast"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=ddf497623ae56d21aa4166ff1c0725a7db67e955#ddf497623ae56d21aa4166ff1c0725a7db67e955"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=c4b67896662b16b5699a779c0e52aa0ca2587fec#c4b67896662b16b5699a779c0e52aa0ca2587fec"
|
||||
dependencies = [
|
||||
"num-bigint",
|
||||
"rustpython-compiler-core",
|
||||
@@ -2155,7 +2239,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rustpython-common"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=ddf497623ae56d21aa4166ff1c0725a7db67e955#ddf497623ae56d21aa4166ff1c0725a7db67e955"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=c4b67896662b16b5699a779c0e52aa0ca2587fec#c4b67896662b16b5699a779c0e52aa0ca2587fec"
|
||||
dependencies = [
|
||||
"ascii",
|
||||
"bitflags",
|
||||
@@ -2180,7 +2264,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rustpython-compiler-core"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=ddf497623ae56d21aa4166ff1c0725a7db67e955#ddf497623ae56d21aa4166ff1c0725a7db67e955"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=c4b67896662b16b5699a779c0e52aa0ca2587fec#c4b67896662b16b5699a779c0e52aa0ca2587fec"
|
||||
dependencies = [
|
||||
"bincode",
|
||||
"bitflags",
|
||||
@@ -2197,7 +2281,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rustpython-parser"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=ddf497623ae56d21aa4166ff1c0725a7db67e955#ddf497623ae56d21aa4166ff1c0725a7db67e955"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=c4b67896662b16b5699a779c0e52aa0ca2587fec#c4b67896662b16b5699a779c0e52aa0ca2587fec"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"anyhow",
|
||||
@@ -2468,15 +2552,24 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.107"
|
||||
version = "1.0.108"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
|
||||
checksum = "d56e159d99e6c2b93995d171050271edb50ecc5288fbc7cc17de8fdce4e58c14"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn-ext"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b86cb2b68c5b3c078cac02588bc23f3c04bb828c5d3aedd17980876ec6a7be6"
|
||||
dependencies = [
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.3.0"
|
||||
|
||||
@@ -3,18 +3,19 @@ members = ["crates/*"]
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
rust-version = "1.65.0"
|
||||
rust-version = "1.67.0"
|
||||
|
||||
[workspace.dependencies]
|
||||
anyhow = { version = "1.0.66" }
|
||||
clap = { version = "4.0.1", features = ["derive"] }
|
||||
itertools = { version = "0.10.5" }
|
||||
libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "f2f0b7a487a8725d161fe8b3ed73a6758b21e177" }
|
||||
is-macro = { version = "0.2.2" }
|
||||
libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "80e4c1399f95e5beb532fdd1e209ad2dbb470438" }
|
||||
once_cell = { version = "1.16.0" }
|
||||
regex = { version = "1.6.0" }
|
||||
rustc-hash = { version = "1.1.0" }
|
||||
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "ddf497623ae56d21aa4166ff1c0725a7db67e955" }
|
||||
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/RustPython/RustPython.git", rev = "ddf497623ae56d21aa4166ff1c0725a7db67e955" }
|
||||
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "c4b67896662b16b5699a779c0e52aa0ca2587fec" }
|
||||
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/RustPython/RustPython.git", rev = "c4b67896662b16b5699a779c0e52aa0ca2587fec" }
|
||||
schemars = { version = "0.8.11" }
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
serde_json = { version = "1.0.87" }
|
||||
|
||||
496
README.md
496
README.md
@@ -30,11 +30,11 @@ An extremely fast Python linter, written in Rust.
|
||||
* 🤝 Python 3.11 compatibility
|
||||
* 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
* 🔧 Autofix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
* 📏 Over [400 built-in rules](https://beta.ruff.rs/docs/rules/) (and growing)
|
||||
* 📏 Over [500 built-in rules](https://beta.ruff.rs/docs/rules/) (and growing)
|
||||
* ⚖️ [Near-parity](https://beta.ruff.rs/docs/faq/#how-does-ruff-compare-to-flake8) with the built-in Flake8 rule set
|
||||
* 🔌 Native re-implementations of dozens of Flake8 plugins, like flake8-bugbear
|
||||
* ⌨️ First-party editor integrations for [VS Code](https://github.com/charliermarsh/ruff-vscode) and [more](https://github.com/charliermarsh/ruff-lsp)
|
||||
* 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](#pyprojecttoml-discovery)
|
||||
* 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://beta.ruff.rs/docs/configuration/#pyprojecttoml-discovery)
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
functionality behind a single, common interface.
|
||||
@@ -97,21 +97,19 @@ developer of [Zulip](https://github.com/zulip/zulip):
|
||||
|
||||
For more, see the [documentation](https://beta.ruff.rs/docs/).
|
||||
|
||||
1. [Installation and Usage](#installation-and-usage)
|
||||
1. [Getting Started](#getting-started)
|
||||
2. [Configuration](#configuration)
|
||||
3. [Supported Rules](#supported-rules)
|
||||
3. [Rules](#rules)
|
||||
4. [Contributing](#contributing)
|
||||
5. [Support](#support)
|
||||
6. [Acknowledgements](#acknowledgements)
|
||||
7. [Who's Using Ruff?](#whos-using-ruff)
|
||||
8. [License](#license)
|
||||
|
||||
## Installation and Usage
|
||||
## Getting Started
|
||||
|
||||
For more, see the [documentation](https://beta.ruff.rs/docs/).
|
||||
|
||||
<!-- Begin section: Installation and Usage -->
|
||||
|
||||
### Installation
|
||||
|
||||
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||
@@ -120,31 +118,8 @@ Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||
pip install ruff
|
||||
```
|
||||
|
||||
For **macOS Homebrew** and **Linuxbrew** users, Ruff is also available as [`ruff`](https://formulae.brew.sh/formula/ruff) on Homebrew:
|
||||
|
||||
```shell
|
||||
brew install ruff
|
||||
```
|
||||
|
||||
For **Conda** users, Ruff is also available as [`ruff`](https://anaconda.org/conda-forge/ruff) on `conda-forge`:
|
||||
|
||||
```shell
|
||||
conda install -c conda-forge ruff
|
||||
```
|
||||
|
||||
For **Arch Linux** users, Ruff is also available as [`ruff`](https://archlinux.org/packages/community/x86_64/ruff/) on the official repositories:
|
||||
|
||||
```shell
|
||||
pacman -S ruff
|
||||
```
|
||||
|
||||
For **Alpine** users, Ruff is also available as [`ruff`](https://pkgs.alpinelinux.org/package/edge/testing/x86_64/ruff) on the testing repositories:
|
||||
|
||||
```shell
|
||||
apk add ruff
|
||||
```
|
||||
|
||||
[](https://repology.org/project/ruff-python-linter/versions)
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
and with [a variety of other package managers](https://beta.ruff.rs/docs/installation/).
|
||||
|
||||
### Usage
|
||||
|
||||
@@ -157,51 +132,30 @@ ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`
|
||||
ruff check path/to/code/to/file.py # Lint `file.py`
|
||||
```
|
||||
|
||||
You can run Ruff in `--watch` mode to automatically re-run on-change:
|
||||
|
||||
```shell
|
||||
ruff check path/to/code/ --watch
|
||||
```
|
||||
|
||||
Ruff also works with [pre-commit](https://pre-commit.com):
|
||||
Ruff can also be used as a [pre-commit](https://pre-commit.com) hook:
|
||||
|
||||
```yaml
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.0.250'
|
||||
rev: 'v0.0.253'
|
||||
hooks:
|
||||
- id: ruff
|
||||
```
|
||||
|
||||
Or, to enable autofix:
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/charliermarsh/ruff-vscode) or
|
||||
alongside any other editor through the [Ruff LSP](https://github.com/charliermarsh/ruff-lsp).
|
||||
|
||||
```yaml
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.0.250'
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
```
|
||||
### Configuration
|
||||
|
||||
<!-- End section: Installation and Usage -->
|
||||
|
||||
## Configuration
|
||||
|
||||
<!-- Begin section: Configuration -->
|
||||
|
||||
Ruff can be configured via a `pyproject.toml` file, a `ruff.toml` file, or through the command line.
|
||||
|
||||
For a complete enumeration of the available configuration options, see the
|
||||
[documentation](https://beta.ruff.rs/docs/settings/).
|
||||
|
||||
### Configure via `pyproject.toml`
|
||||
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
|
||||
[_Configuration_](https://beta.ruff.rs/docs/configuration/), or [_Settings_](https://beta.ruff.rs/docs/settings/)
|
||||
for a complete list of all configuration options).
|
||||
|
||||
If left unspecified, the default configuration is equivalent to:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
# Enable Pyflakes `E` and `F` codes by default.
|
||||
# Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default.
|
||||
select = ["E", "F"]
|
||||
ignore = []
|
||||
|
||||
@@ -248,428 +202,39 @@ target-version = "py310"
|
||||
max-complexity = 10
|
||||
```
|
||||
|
||||
As an example, the following would configure Ruff to: (1) enforce flake8-bugbear rules, in addition
|
||||
to the defaults; (2) avoid enforcing line-length violations (`E501`); (3) avoid attempting to fix
|
||||
flake8-bugbear (`B`) violations; and (3) ignore import-at-top-of-file violations (`E402`) in
|
||||
`__init__.py` files:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
# Enable flake8-bugbear (`B`) rules.
|
||||
select = ["E", "F", "B"]
|
||||
|
||||
# Never enforce `E501` (line length violations).
|
||||
ignore = ["E501"]
|
||||
|
||||
# Avoid trying to fix flake8-bugbear (`B`) violations.
|
||||
unfixable = ["B"]
|
||||
|
||||
# Ignore `E402` (import violations) in all `__init__.py` files, and in `path/to/file.py`.
|
||||
[tool.ruff.per-file-ignores]
|
||||
"__init__.py" = ["E402"]
|
||||
"path/to/file.py" = ["E402"]
|
||||
```
|
||||
|
||||
Plugin configurations should be expressed as subsections, e.g.:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
# Add "Q" to the list of enabled codes.
|
||||
select = ["E", "F", "Q"]
|
||||
|
||||
[tool.ruff.flake8-quotes]
|
||||
docstring-quotes = "double"
|
||||
```
|
||||
|
||||
Ruff mirrors Flake8's rule code system, in which each rule code consists of a one-to-three letter
|
||||
prefix, followed by three digits (e.g., `F401`). The prefix indicates that "source" of the rule
|
||||
(e.g., `F` for Pyflakes, `E` for pycodestyle, `ANN` for flake8-annotations). The set of enabled
|
||||
rules is determined by the `select` and `ignore` options, which support both the full code (e.g.,
|
||||
`F401`) and the prefix (e.g., `F`).
|
||||
|
||||
As a special-case, Ruff also supports the `ALL` code, which enables all rules. Note that some of the
|
||||
pydocstyle rules conflict (e.g., `D203` and `D211`) as they represent alternative docstring
|
||||
formats. Enabling `ALL` without further configuration may result in suboptimal behavior, especially
|
||||
for the pydocstyle plugin.
|
||||
|
||||
If you're wondering how to configure Ruff, here are some **recommended guidelines**:
|
||||
|
||||
* Prefer `select` and `ignore` over `extend-select` and `extend-ignore`, to make your rule set
|
||||
explicit.
|
||||
* Use `ALL` with discretion. Enabling `ALL` will implicitly enable new rules whenever you upgrade.
|
||||
* Start with a small set of rules (`select = ["E", "F"]`) and add a category at-a-time. For example,
|
||||
you might consider expanding to `select = ["E", "F", "B"]` to enable the popular flake8-bugbear
|
||||
extension.
|
||||
* By default, Ruff's autofix is aggressive. If you find that it's too aggressive for your liking,
|
||||
consider turning off autofix for specific rules or categories (see: [FAQ](https://beta.ruff.rs/docs/faq/#ruff-tried-to-fix-something-but-it-broke-my-code-what-should-i-do)).
|
||||
|
||||
### Configure via `ruff.toml`
|
||||
|
||||
As an alternative to `pyproject.toml`, Ruff will also respect a `ruff.toml` file, which implements
|
||||
an equivalent schema (though the `[tool.ruff]` hierarchy can be omitted). For example, the
|
||||
`pyproject.toml` described above would be represented via the following `ruff.toml`:
|
||||
|
||||
```toml
|
||||
# Enable flake8-bugbear (`B`) rules.
|
||||
select = ["E", "F", "B"]
|
||||
|
||||
# Never enforce `E501` (line length violations).
|
||||
ignore = ["E501"]
|
||||
|
||||
# Avoid trying to fix flake8-bugbear (`B`) violations.
|
||||
unfixable = ["B"]
|
||||
|
||||
# Ignore `E402` (import violations) in all `__init__.py` files, and in `path/to/file.py`.
|
||||
[per-file-ignores]
|
||||
"__init__.py" = ["E402"]
|
||||
"path/to/file.py" = ["E402"]
|
||||
```
|
||||
|
||||
For a full list of configurable options, see the [list of all options](https://beta.ruff.rs/docs/settings/).
|
||||
|
||||
### Command-line interface
|
||||
|
||||
Some configuration settings can be provided via the command-line, such as those related to
|
||||
Some configuration options can be provided via the command-line, such as those related to
|
||||
rule enablement and disablement, file discovery, logging level, and more:
|
||||
|
||||
```shell
|
||||
ruff check path/to/code/ --select F401 --select F403 --quiet
|
||||
```
|
||||
|
||||
See `ruff help` for more on Ruff's top-level commands:
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` for more on the
|
||||
linting command.
|
||||
|
||||
<!-- Begin auto-generated command help. -->
|
||||
|
||||
```text
|
||||
Ruff: An extremely fast Python linter.
|
||||
|
||||
Usage: ruff [OPTIONS] <COMMAND>
|
||||
|
||||
Commands:
|
||||
check Run Ruff on the given files or directories (default)
|
||||
rule Explain a rule
|
||||
config List or describe the available configuration options
|
||||
linter List all supported upstream linters
|
||||
clean Clear any caches in the current directory and any subdirectories
|
||||
help Print this message or the help of the given subcommand(s)
|
||||
|
||||
Options:
|
||||
-h, --help Print help
|
||||
-V, --version Print version
|
||||
|
||||
Log levels:
|
||||
-v, --verbose Enable verbose logging
|
||||
-q, --quiet Print lint violations, but nothing else
|
||||
-s, --silent Disable all logging (but still exit with status code "1" upon detecting lint violations)
|
||||
|
||||
For help with a specific command, see: `ruff help <command>`.
|
||||
```
|
||||
|
||||
<!-- End auto-generated command help. -->
|
||||
|
||||
Or `ruff help check` for more on the linting command:
|
||||
|
||||
<!-- Begin auto-generated subcommand help. -->
|
||||
|
||||
```text
|
||||
Run Ruff on the given files or directories (default)
|
||||
|
||||
Usage: ruff check [OPTIONS] [FILES]...
|
||||
|
||||
Arguments:
|
||||
[FILES]... List of files or directories to check
|
||||
|
||||
Options:
|
||||
--fix
|
||||
Attempt to automatically fix lint violations
|
||||
--show-source
|
||||
Show violations with source code
|
||||
--show-fixes
|
||||
Show an enumeration of all autofixed lint violations
|
||||
--diff
|
||||
Avoid writing any fixed files back; instead, output a diff for each changed file to stdout
|
||||
-w, --watch
|
||||
Run in watch mode by re-running whenever files change
|
||||
--fix-only
|
||||
Fix any fixable lint violations, but don't report on leftover violations. Implies `--fix`
|
||||
--format <FORMAT>
|
||||
Output serialization format for violations [env: RUFF_FORMAT=] [possible values: text, json, junit, grouped, github, gitlab, pylint]
|
||||
--target-version <TARGET_VERSION>
|
||||
The minimum Python version that should be supported
|
||||
--config <CONFIG>
|
||||
Path to the `pyproject.toml` or `ruff.toml` file to use for configuration
|
||||
--statistics
|
||||
Show counts for every rule with at least one violation
|
||||
--add-noqa
|
||||
Enable automatic additions of `noqa` directives to failing lines
|
||||
--show-files
|
||||
See the files Ruff will be run against with the current settings
|
||||
--show-settings
|
||||
See the settings Ruff will use to lint a given Python file
|
||||
-h, --help
|
||||
Print help
|
||||
|
||||
Rule selection:
|
||||
--select <RULE_CODE>
|
||||
Comma-separated list of rule codes to enable (or ALL, to enable all rules)
|
||||
--ignore <RULE_CODE>
|
||||
Comma-separated list of rule codes to disable
|
||||
--extend-select <RULE_CODE>
|
||||
Like --select, but adds additional rule codes on top of the selected ones
|
||||
--per-file-ignores <PER_FILE_IGNORES>
|
||||
List of mappings from file pattern to code to exclude
|
||||
--fixable <RULE_CODE>
|
||||
List of rule codes to treat as eligible for autofix. Only applicable when autofix itself is enabled (e.g., via `--fix`)
|
||||
--unfixable <RULE_CODE>
|
||||
List of rule codes to treat as ineligible for autofix. Only applicable when autofix itself is enabled (e.g., via `--fix`)
|
||||
|
||||
File selection:
|
||||
--exclude <FILE_PATTERN> List of paths, used to omit files and/or directories from analysis
|
||||
--extend-exclude <FILE_PATTERN> Like --exclude, but adds additional files and directories on top of those already excluded
|
||||
--respect-gitignore Respect file exclusions via `.gitignore` and other standard ignore files
|
||||
--force-exclude Enforce exclusions, even for paths passed to Ruff directly on the command-line
|
||||
|
||||
Miscellaneous:
|
||||
-n, --no-cache
|
||||
Disable cache reads
|
||||
--isolated
|
||||
Ignore all configuration files
|
||||
--cache-dir <CACHE_DIR>
|
||||
Path to the cache directory [env: RUFF_CACHE_DIR=]
|
||||
--stdin-filename <STDIN_FILENAME>
|
||||
The name of the file when passing it through stdin
|
||||
-e, --exit-zero
|
||||
Exit with status code "0", even upon detecting lint violations
|
||||
--exit-non-zero-on-fix
|
||||
Exit with a non-zero status code if any files were modified via autofix, even if no lint violations remain
|
||||
|
||||
Log levels:
|
||||
-v, --verbose Enable verbose logging
|
||||
-q, --quiet Print lint violations, but nothing else
|
||||
-s, --silent Disable all logging (but still exit with status code "1" upon detecting lint violations)
|
||||
```
|
||||
|
||||
<!-- End auto-generated subcommand help. -->
|
||||
|
||||
### `pyproject.toml` discovery
|
||||
|
||||
Similar to [ESLint](https://eslint.org/docs/latest/user-guide/configuring/configuration-files#cascading-and-hierarchy),
|
||||
Ruff supports hierarchical configuration, such that the "closest" `pyproject.toml` file in the
|
||||
directory hierarchy is used for every individual file, with all paths in the `pyproject.toml` file
|
||||
(e.g., `exclude` globs, `src` paths) being resolved relative to the directory containing the
|
||||
`pyproject.toml` file.
|
||||
|
||||
There are a few exceptions to these rules:
|
||||
|
||||
1. In locating the "closest" `pyproject.toml` file for a given path, Ruff ignores any
|
||||
`pyproject.toml` files that lack a `[tool.ruff]` section.
|
||||
2. If a configuration file is passed directly via `--config`, those settings are used for across
|
||||
files. Any relative paths in that configuration file (like `exclude` globs or `src` paths) are
|
||||
resolved relative to the _current working directory_.
|
||||
3. If no `pyproject.toml` file is found in the filesystem hierarchy, Ruff will fall back to using
|
||||
a default configuration. If a user-specific configuration file exists
|
||||
at `${config_dir}/ruff/pyproject.toml`, that file will be used instead of the default
|
||||
configuration, with `${config_dir}` being determined via the [`dirs`](https://docs.rs/dirs/4.0.0/dirs/fn.config_dir.html)
|
||||
crate, and all relative paths being again resolved relative to the _current working directory_.
|
||||
4. Any `pyproject.toml`-supported settings that are provided on the command-line (e.g., via
|
||||
`--select`) will override the settings in _every_ resolved configuration file.
|
||||
|
||||
Unlike [ESLint](https://eslint.org/docs/latest/user-guide/configuring/configuration-files#cascading-and-hierarchy),
|
||||
Ruff does not merge settings across configuration files; instead, the "closest" configuration file
|
||||
is used, and any parent configuration files are ignored. In lieu of this implicit cascade, Ruff
|
||||
supports an [`extend`](https://beta.ruff.rs/docs/settings#extend) field, which allows you to inherit the settings from another
|
||||
`pyproject.toml` file, like so:
|
||||
|
||||
```toml
|
||||
# Extend the `pyproject.toml` file in the parent directory.
|
||||
extend = "../pyproject.toml"
|
||||
# But use a different line length.
|
||||
line-length = 100
|
||||
```
|
||||
|
||||
All of the above rules apply equivalently to `ruff.toml` files. If Ruff detects both a `ruff.toml`
|
||||
and `pyproject.toml` file, it will defer to the `ruff.toml`.
|
||||
|
||||
### Python file discovery
|
||||
|
||||
When passed a path on the command-line, Ruff will automatically discover all Python files in that
|
||||
path, taking into account the [`exclude`](https://beta.ruff.rs/docs/settings#exclude) and
|
||||
[`extend-exclude`](https://beta.ruff.rs/docs/settings#extend-exclude) settings in each directory's
|
||||
`pyproject.toml` file.
|
||||
|
||||
By default, Ruff will also skip any files that are omitted via `.ignore`, `.gitignore`,
|
||||
`.git/info/exclude`, and global `gitignore` files (see: [`respect-gitignore`](https://beta.ruff.rs/docs/settings#respect-gitignore)).
|
||||
|
||||
Files that are passed to `ruff` directly are always linted, regardless of the above criteria.
|
||||
For example, `ruff check /path/to/excluded/file.py` will always lint `file.py`.
|
||||
|
||||
### Rule resolution
|
||||
|
||||
The set of enabled rules is controlled via the [`select`](https://beta.ruff.rs/docs/settings#select)
|
||||
and [`ignore`](https://beta.ruff.rs/docs/settings#ignore) settings, along with the
|
||||
[`extend-select`](https://beta.ruff.rs/docs/settings#extend-select) and
|
||||
[`extend-ignore`](https://beta.ruff.rs/docs/settings#extend-ignore) modifiers.
|
||||
|
||||
To resolve the enabled rule set, Ruff may need to reconcile `select` and `ignore` from a variety
|
||||
of sources, including the current `pyproject.toml`, any inherited `pyproject.toml` files, and the
|
||||
CLI (e.g., `--select`).
|
||||
|
||||
In those scenarios, Ruff uses the "highest-priority" `select` as the basis for the rule set, and
|
||||
then applies any `extend-select`, `ignore`, and `extend-ignore` adjustments. CLI options are given
|
||||
higher priority than `pyproject.toml` options, and the current `pyproject.toml` file is given higher
|
||||
priority than any inherited `pyproject.toml` files.
|
||||
|
||||
For example, given the following `pyproject.toml` file:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
select = ["E", "F"]
|
||||
ignore = ["F401"]
|
||||
```
|
||||
|
||||
Running `ruff check --select F401` would result in Ruff enforcing `F401`, and no other rules.
|
||||
|
||||
Running `ruff check --extend-select B` would result in Ruff enforcing the `E`, `F`, and `B` rules, with
|
||||
the exception of `F401`.
|
||||
|
||||
### Suppressing errors
|
||||
|
||||
To omit a lint rule entirely, add it to the "ignore" list via [`ignore`](https://beta.ruff.rs/docs/settings#ignore)
|
||||
or [`extend-ignore`](https://beta.ruff.rs/docs/settings#extend-ignore), either on the command-line
|
||||
or in your `pyproject.toml` file.
|
||||
|
||||
To ignore a violation inline, Ruff uses a `noqa` system similar to [Flake8](https://flake8.pycqa.org/en/3.1.1/user/ignoring-errors.html).
|
||||
To ignore an individual violation, add `# noqa: {code}` to the end of the line, like so:
|
||||
|
||||
```python
|
||||
# Ignore F841.
|
||||
x = 1 # noqa: F841
|
||||
|
||||
# Ignore E741 and F841.
|
||||
i = 1 # noqa: E741, F841
|
||||
|
||||
# Ignore _all_ violations.
|
||||
x = 1 # noqa
|
||||
```
|
||||
|
||||
Note that, for multi-line strings, the `noqa` directive should come at the end of the string, and
|
||||
will apply to the entire string, like so:
|
||||
|
||||
```python
|
||||
"""Lorem ipsum dolor sit amet.
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor.
|
||||
""" # noqa: E501
|
||||
```
|
||||
|
||||
To ignore all violations across an entire file, add `# ruff: noqa` to any line in the file, like so:
|
||||
|
||||
```python
|
||||
# ruff: noqa
|
||||
```
|
||||
|
||||
To ignore a specific rule across an entire file, add `# ruff: noqa: {code}` to any line in the file,
|
||||
like so:
|
||||
|
||||
```python
|
||||
# ruff: noqa: F841
|
||||
```
|
||||
|
||||
Or see the [`per-file-ignores`](https://beta.ruff.rs/docs/settings#per-file-ignores) configuration
|
||||
setting, which enables the same functionality via a `pyproject.toml` file.
|
||||
|
||||
Note that Ruff will also respect Flake8's `# flake8: noqa` directive, and will treat it as
|
||||
equivalent to `# ruff: noqa`.
|
||||
|
||||
#### Automatic error suppression
|
||||
|
||||
Ruff supports several workflows to aid in `noqa` management.
|
||||
|
||||
First, Ruff provides a special rule code, `RUF100`, to enforce that your `noqa` directives are
|
||||
"valid", in that the violations they _say_ they ignore are actually being triggered on that line (and
|
||||
thus suppressed). You can run `ruff check /path/to/file.py --extend-select RUF100` to flag unused `noqa`
|
||||
directives.
|
||||
|
||||
Second, Ruff can _automatically remove_ unused `noqa` directives via its autofix functionality.
|
||||
You can run `ruff check /path/to/file.py --extend-select RUF100 --fix` to automatically remove unused
|
||||
`noqa` directives.
|
||||
|
||||
Third, Ruff can _automatically add_ `noqa` directives to all failing lines. This is useful when
|
||||
migrating a new codebase to Ruff. You can run `ruff check /path/to/file.py --add-noqa` to automatically
|
||||
add `noqa` directives to all failing lines, with the appropriate rule codes.
|
||||
|
||||
#### Action comments
|
||||
|
||||
Ruff respects `isort`'s [action comments](https://pycqa.github.io/isort/docs/configuration/action_comments.html)
|
||||
(`# isort: skip_file`, `# isort: on`, `# isort: off`, `# isort: skip`, and `# isort: split`), which
|
||||
enable selectively enabling and disabling import sorting for blocks of code and other inline
|
||||
configuration.
|
||||
|
||||
See the [`isort` documentation](https://pycqa.github.io/isort/docs/configuration/action_comments.html)
|
||||
for more.
|
||||
|
||||
### Exit codes
|
||||
|
||||
By default, Ruff exits with the following status codes:
|
||||
|
||||
* `0` if no violations were found, or if all present violations were fixed automatically.
|
||||
* `1` if violations were found.
|
||||
* `2` if Ruff terminates abnormally due to invalid configuration, invalid CLI options, or an internal error.
|
||||
|
||||
This convention mirrors that of tools like ESLint, Prettier, and RuboCop.
|
||||
|
||||
Ruff supports two command-line flags that alter its exit code behavior:
|
||||
|
||||
* `--exit-zero` will cause Ruff to exit with a status code of `0` even if violations were found.
|
||||
Note that Ruff will still exit with a status code of `2` if it terminates abnormally.
|
||||
* `--exit-non-zero-on-fix` will cause Ruff to exit with a status code of `1` if violations were
|
||||
found, _even if_ all such violations were fixed automatically. Note that the use of
|
||||
`--exit-non-zero-on-fix` can result in a non-zero exit code even if no violations remain after
|
||||
autofixing.
|
||||
|
||||
### Autocompletion
|
||||
|
||||
Ruff supports autocompletion for most shells. A shell-specific completion script can be generated
|
||||
by `ruff generate-shell-completion <SHELL>`, where `<SHELL>` is one of `bash`, `elvish`, `fig`, `fish`,
|
||||
`powershell`, or `zsh`.
|
||||
|
||||
The exact steps required to enable autocompletion will vary by shell. For example instructions,
|
||||
see the [Poetry](https://python-poetry.org/docs/#enable-tab-completion-for-bash-fish-or-zsh) or
|
||||
[ripgrep](https://github.com/BurntSushi/ripgrep/blob/master/FAQ.md#complete) documentation.
|
||||
|
||||
As an example: to enable autocompletion for Zsh, run
|
||||
`ruff generate-shell-completion zsh > ~/.zfunc/_ruff`. Then add the following line to your
|
||||
`~/.zshrc` file, if they're not already present:
|
||||
|
||||
```zsh
|
||||
fpath+=~/.zfunc
|
||||
autoload -Uz compinit && compinit
|
||||
```
|
||||
|
||||
<!-- End section: Configuration -->
|
||||
|
||||
## Supported Rules
|
||||
## Rules
|
||||
|
||||
<!-- Begin section: Rules -->
|
||||
|
||||
Ruff supports over 400 lint rules, many of which are inspired by popular tools like Flake8, isort,
|
||||
pyupgrade, and others. Regardless of the rule's origin, Ruff re-implements every rule in
|
||||
**Ruff supports over 500 lint rules**, many of which are inspired by popular tools like Flake8,
|
||||
isort, pyupgrade, and others. Regardless of the rule's origin, Ruff re-implements every rule in
|
||||
Rust as a first-party feature.
|
||||
|
||||
By default, Ruff enables Flake8's `E` and `F` rules. Ruff supports all rules from the `F` category,
|
||||
and a [subset](https://beta.ruff.rs/docs/rules/#error-e) of the `E` category, omitting those
|
||||
stylistic rules made obsolete by the use of an autoformatter, like [Black](https://github.com/psf/black).
|
||||
stylistic rules made obsolete by the use of an autoformatter, like
|
||||
[Black](https://github.com/psf/black).
|
||||
|
||||
<!-- End section: Rules -->
|
||||
|
||||
For a complete enumeration, see the [list of rules](https://beta.ruff.rs/docs/rules/) in the
|
||||
Ruff documentation.
|
||||
For a complete enumeration of the supported rules, see [_Rules_](https://beta.ruff.rs/docs/rules/).
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome and highly appreciated. To get started, check out the
|
||||
[**contributing guidelines**](https://github.com/charliermarsh/ruff/blob/main/CONTRIBUTING.md). You
|
||||
can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||
[**contributing guidelines**](https://beta.ruff.rs/docs/contributing/).
|
||||
|
||||
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||
|
||||
## Support
|
||||
|
||||
@@ -678,8 +243,6 @@ or feel free to [**open a new one**](https://github.com/charliermarsh/ruff/issue
|
||||
|
||||
You can also ask for help on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||
|
||||
<!-- Begin section: Acknowledgements -->
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
Ruff's linter draws on both the APIs and implementation details of many other
|
||||
@@ -702,8 +265,6 @@ Ruff is the beneficiary of a large number of [contributors](https://github.com/c
|
||||
|
||||
Ruff is released under the MIT license.
|
||||
|
||||
<!-- End section: Acknowledgements -->
|
||||
|
||||
## Who's Using Ruff?
|
||||
|
||||
Ruff is used in a number of major open-source projects, including:
|
||||
@@ -742,6 +303,7 @@ Ruff is used in a number of major open-source projects, including:
|
||||
* [featuretools](https://github.com/alteryx/featuretools)
|
||||
* [meson-python](https://github.com/mesonbuild/meson-python)
|
||||
* [ZenML](https://github.com/zenml-io/zenml)
|
||||
* [delta-rs](https://github.com/delta-io/delta-rs)
|
||||
|
||||
## License
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.250"
|
||||
version = "0.0.253"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.0.250"
|
||||
version = "0.0.253"
|
||||
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
@@ -16,6 +16,10 @@ crate-type = ["cdylib", "rlib"]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
ruff_macros = { path = "../ruff_macros" }
|
||||
ruff_python = { path = "../ruff_python" }
|
||||
ruff_rustpython = { path = "../ruff_rustpython" }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bisection = { version = "0.1.0" }
|
||||
bitflags = { version = "1.3.2" }
|
||||
@@ -23,12 +27,14 @@ cfg-if = { version = "1.0.0" }
|
||||
chrono = { version = "0.4.21", default-features = false, features = ["clock"] }
|
||||
clap = { workspace = true, features = ["derive", "env", "string"] }
|
||||
colored = { version = "2.0.0" }
|
||||
derivative = { version = "2.2.0" }
|
||||
dirs = { version = "4.0.0" }
|
||||
fern = { version = "0.6.1" }
|
||||
glob = { version = "0.3.0" }
|
||||
globset = { version = "0.4.9" }
|
||||
ignore = { version = "0.4.18" }
|
||||
imperative = { version = "1.0.3" }
|
||||
is-macro = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
libcst = { workspace = true }
|
||||
log = { version = "0.4.17" }
|
||||
@@ -39,8 +45,7 @@ num-traits = "0.2.15"
|
||||
once_cell = { workspace = true }
|
||||
path-absolutize = { version = "3.0.14", features = ["once_cell_cache", "use_unix_paths_on_wasm"] }
|
||||
regex = { workspace = true }
|
||||
ruff_macros = { path = "../ruff_macros" }
|
||||
ruff_python = { path = "../ruff_python" }
|
||||
result-like = "0.4.6"
|
||||
rustc-hash = { workspace = true }
|
||||
rustpython-common = { workspace = true }
|
||||
rustpython-parser = { workspace = true }
|
||||
@@ -57,7 +62,6 @@ titlecase = { version = "2.2.1" }
|
||||
toml = { workspace = true }
|
||||
|
||||
# https://docs.rs/getrandom/0.2.7/getrandom/#webassembly-support
|
||||
# For (future) wasm-pack support
|
||||
[target.'cfg(all(target_family = "wasm", target_os = "unknown"))'.dependencies]
|
||||
getrandom = { version = "0.2.7", features = ["js"] }
|
||||
console_error_panic_hook = { version = "0.1.7" }
|
||||
@@ -66,9 +70,6 @@ serde-wasm-bindgen = { version = "0.4" }
|
||||
js-sys = { version = "0.3.60" }
|
||||
wasm-bindgen = { version = "0.2.83" }
|
||||
|
||||
[target.'cfg(not(target_family = "wasm"))'.dependencies]
|
||||
is_executable = "1.0.1"
|
||||
|
||||
[dev-dependencies]
|
||||
insta = { version = "1.19.0", features = ["yaml", "redactions"] }
|
||||
test-case = { version = "2.2.2" }
|
||||
|
||||
@@ -107,3 +107,7 @@ class Foo:
|
||||
# ANN101
|
||||
def foo(self, /, a: int, b: int) -> int:
|
||||
pass
|
||||
|
||||
|
||||
# OK
|
||||
def f(*args: *tuple[int]) -> None: ...
|
||||
|
||||
@@ -61,3 +61,13 @@ if token == "3\t4":
|
||||
|
||||
if token == "5\r6":
|
||||
pass
|
||||
|
||||
|
||||
# These should not be flagged
|
||||
passed_msg = "You have passed!"
|
||||
compassion = "Please don't match!"
|
||||
impassable = "You shall not pass!"
|
||||
passwords = ""
|
||||
passphrases = ""
|
||||
tokens = ""
|
||||
secrets = ""
|
||||
|
||||
@@ -59,6 +59,10 @@ getattr(someobj, attrname, False)
|
||||
mylist.index(True)
|
||||
int(True)
|
||||
str(int(False))
|
||||
cfg.get("hello", True)
|
||||
cfg.getint("hello", True)
|
||||
cfg.getfloat("hello", True)
|
||||
cfg.getboolean("hello", True)
|
||||
|
||||
|
||||
class Registry:
|
||||
|
||||
@@ -626,3 +626,8 @@ result = function(
|
||||
bar,
|
||||
**{'ham': spam}
|
||||
)
|
||||
|
||||
# Make sure the COM812 and UP034 rules don't autofix simultaneously and cause a syntax error.
|
||||
the_first_one = next(
|
||||
(i for i in range(10) if i // 2 == 0) # COM812 fix should include the final bracket
|
||||
)
|
||||
|
||||
21
crates/ruff/resources/test/fixtures/flake8_django/DJ003.py
vendored
Normal file
21
crates/ruff/resources/test/fixtures/flake8_django/DJ003.py
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
from django.shortcuts import render
|
||||
|
||||
|
||||
def test_view1(request):
|
||||
return render(request, "index.html", locals())
|
||||
|
||||
|
||||
def test_view2(request):
|
||||
return render(request, "index.html", context=locals())
|
||||
|
||||
|
||||
def test_view3(request):
|
||||
return render(request, "index.html")
|
||||
|
||||
|
||||
def test_view4(request):
|
||||
return render(request, "index.html", {})
|
||||
|
||||
|
||||
def test_view5(request):
|
||||
return render(request, "index.html", context={})
|
||||
11
crates/ruff/resources/test/fixtures/flake8_django/DJ006.py
vendored
Normal file
11
crates/ruff/resources/test/fixtures/flake8_django/DJ006.py
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
from django import forms
|
||||
|
||||
|
||||
class TestModelForm1(forms.ModelForm):
|
||||
class Meta:
|
||||
exclude = ["bar"]
|
||||
|
||||
|
||||
class TestModelForm2(forms.ModelForm):
|
||||
class Meta:
|
||||
fields = ["foo"]
|
||||
16
crates/ruff/resources/test/fixtures/flake8_django/DJ007.py
vendored
Normal file
16
crates/ruff/resources/test/fixtures/flake8_django/DJ007.py
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
from django import forms
|
||||
|
||||
|
||||
class TestModelForm1(forms.ModelForm):
|
||||
class Meta:
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class TestModelForm2(forms.ModelForm):
|
||||
class Meta:
|
||||
fields = b"__all__"
|
||||
|
||||
|
||||
class TestModelForm3(forms.ModelForm):
|
||||
class Meta:
|
||||
fields = ["foo"]
|
||||
10
crates/ruff/resources/test/fixtures/flake8_pie/PIE802.py
vendored
Normal file
10
crates/ruff/resources/test/fixtures/flake8_pie/PIE802.py
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
# no error
|
||||
all((x.id for x in bar))
|
||||
all(x.id for x in bar)
|
||||
all(x.id for x in bar)
|
||||
any(x.id for x in bar)
|
||||
any({x.id for x in bar})
|
||||
|
||||
# PIE 802
|
||||
any([x.id for x in bar])
|
||||
all([x.id for x in bar])
|
||||
@@ -18,3 +18,10 @@ class Foo:
|
||||
|
||||
class FooTable(BaseTable):
|
||||
bar = fields.ListField(list)
|
||||
|
||||
|
||||
lambda *args, **kwargs: []
|
||||
|
||||
lambda *args: []
|
||||
|
||||
lambda **kwargs: []
|
||||
|
||||
14
crates/ruff/resources/test/fixtures/flake8_pyi/PYI009.py
vendored
Normal file
14
crates/ruff/resources/test/fixtures/flake8_pyi/PYI009.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
def bar():
|
||||
... # OK
|
||||
|
||||
|
||||
def foo():
|
||||
pass # OK, since we're not in a stub file
|
||||
|
||||
|
||||
class Bar:
|
||||
... # OK
|
||||
|
||||
|
||||
class Foo:
|
||||
pass # OK, since we're not in a stub file
|
||||
8
crates/ruff/resources/test/fixtures/flake8_pyi/PYI009.pyi
vendored
Normal file
8
crates/ruff/resources/test/fixtures/flake8_pyi/PYI009.pyi
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
def bar(): ... # OK
|
||||
def foo():
|
||||
pass # ERROR PYI009, since we're in a stub file
|
||||
|
||||
class Bar: ... # OK
|
||||
|
||||
class Foo:
|
||||
pass # ERROR PYI009, since we're in a stub file
|
||||
18
crates/ruff/resources/test/fixtures/flake8_pyi/PYI010.py
vendored
Normal file
18
crates/ruff/resources/test/fixtures/flake8_pyi/PYI010.py
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
def bar():
|
||||
... # OK
|
||||
|
||||
|
||||
def foo():
|
||||
"""foo""" # OK
|
||||
|
||||
|
||||
def buzz():
|
||||
print("buzz") # OK, not in stub file
|
||||
|
||||
|
||||
def foo2():
|
||||
123 # OK, not in a stub file
|
||||
|
||||
|
||||
def bizz():
|
||||
x = 123 # OK, not in a stub file
|
||||
12
crates/ruff/resources/test/fixtures/flake8_pyi/PYI010.pyi
vendored
Normal file
12
crates/ruff/resources/test/fixtures/flake8_pyi/PYI010.pyi
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
def bar(): ... # OK
|
||||
def foo():
|
||||
"""foo""" # OK, strings are handled by another rule
|
||||
|
||||
def buzz():
|
||||
print("buzz") # ERROR PYI010
|
||||
|
||||
def foo2():
|
||||
123 # ERROR PYI010
|
||||
|
||||
def bizz():
|
||||
x = 123 # ERROR PYI010
|
||||
79
crates/ruff/resources/test/fixtures/flake8_pyi/PYI011.py
vendored
Normal file
79
crates/ruff/resources/test/fixtures/flake8_pyi/PYI011.py
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
def f12(
|
||||
x,
|
||||
y: str = os.pathsep, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f11(*, x: str = "x") -> None: # OK
|
||||
...
|
||||
|
||||
|
||||
def f13(
|
||||
x: list[str] = [
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
] # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f14(
|
||||
x: tuple[str, ...] = (
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
) # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f15(
|
||||
x: set[str] = {
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
} # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f16(x: frozenset[bytes] = frozenset({b"foo", b"bar", b"baz"})) -> None: # OK
|
||||
...
|
||||
|
||||
|
||||
def f17(
|
||||
x: str = "foo" + "bar", # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f18(
|
||||
x: str = b"foo" + b"bar", # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f19(
|
||||
x: object = "foo" + 4, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f20(
|
||||
x: int = 5 + 5, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f21(
|
||||
x: complex = 3j - 3j, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f22(
|
||||
x: complex = -42.5j + 4.3j, # OK
|
||||
) -> None:
|
||||
...
|
||||
63
crates/ruff/resources/test/fixtures/flake8_pyi/PYI011.pyi
vendored
Normal file
63
crates/ruff/resources/test/fixtures/flake8_pyi/PYI011.pyi
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
def f12(
|
||||
x,
|
||||
y: str = os.pathsep, # Error PYI011 Only simple default values allowed for typed arguments
|
||||
) -> None: ...
|
||||
def f11(*, x: str = "x") -> None: ... # OK
|
||||
def f13(
|
||||
x: list[
|
||||
str
|
||||
] = [ # Error PYI011 Only simple default values allowed for typed arguments
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
]
|
||||
) -> None: ...
|
||||
def f14(
|
||||
x: tuple[
|
||||
str, ...
|
||||
] = ( # Error PYI011 Only simple default values allowed for typed arguments
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
)
|
||||
) -> None: ...
|
||||
def f15(
|
||||
x: set[
|
||||
str
|
||||
] = { # Error PYI011 Only simple default values allowed for typed arguments
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
}
|
||||
) -> None: ...
|
||||
def f16(
|
||||
x: frozenset[
|
||||
bytes
|
||||
] = frozenset( # Error PYI011 Only simple default values allowed for typed arguments
|
||||
{b"foo", b"bar", b"baz"}
|
||||
)
|
||||
) -> None: ...
|
||||
def f17(
|
||||
x: str = "foo" # Error PYI011 Only simple default values allowed for typed arguments
|
||||
+ "bar",
|
||||
) -> None: ...
|
||||
def f18(
|
||||
x: str = b"foo" # Error PYI011 Only simple default values allowed for typed arguments
|
||||
+ b"bar",
|
||||
) -> None: ...
|
||||
def f19(
|
||||
x: object = "foo" # Error PYI011 Only simple default values allowed for typed arguments
|
||||
+ 4,
|
||||
) -> None: ...
|
||||
def f20(
|
||||
x: int = 5
|
||||
+ 5, # Error PYI011 Only simple default values allowed for typed arguments
|
||||
) -> None: ...
|
||||
def f21(
|
||||
x: complex = 3j
|
||||
- 3j, # Error PYI011 Only simple default values allowed for typed arguments
|
||||
) -> None: ...
|
||||
def f22(
|
||||
x: complex = -42.5j # Error PYI011 Only simple default values allowed for typed arguments
|
||||
+ 4.3j,
|
||||
) -> None: ...
|
||||
79
crates/ruff/resources/test/fixtures/flake8_pyi/PYI014.py
vendored
Normal file
79
crates/ruff/resources/test/fixtures/flake8_pyi/PYI014.py
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
def f12(
|
||||
x,
|
||||
y=os.pathsep, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f11(*, x="x") -> None:
|
||||
... # OK
|
||||
|
||||
|
||||
def f13(
|
||||
x=[ # OK
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
]
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f14(
|
||||
x=( # OK
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
)
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f15(
|
||||
x={ # OK
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
}
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f16(x=frozenset({b"foo", b"bar", b"baz"})) -> None:
|
||||
... # OK
|
||||
|
||||
|
||||
def f17(
|
||||
x="foo" + "bar", # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f18(
|
||||
x=b"foo" + b"bar", # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f19(
|
||||
x="foo" + 4, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f20(
|
||||
x=5 + 5, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f21(
|
||||
x=3j - 3j, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f22(
|
||||
x=-42.5j + 4.3j, # OK
|
||||
) -> None:
|
||||
...
|
||||
45
crates/ruff/resources/test/fixtures/flake8_pyi/PYI014.pyi
vendored
Normal file
45
crates/ruff/resources/test/fixtures/flake8_pyi/PYI014.pyi
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
def f12(
|
||||
x,
|
||||
y=os.pathsep, # Error PYI014
|
||||
) -> None: ...
|
||||
def f11(*, x="x") -> None: ... # OK
|
||||
def f13(
|
||||
x=[ # Error PYI014
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
]
|
||||
) -> None: ...
|
||||
def f14(
|
||||
x=( # Error PYI014
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
)
|
||||
) -> None: ...
|
||||
def f15(
|
||||
x={ # Error PYI014
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
}
|
||||
) -> None: ...
|
||||
def f16(x=frozenset({b"foo", b"bar", b"baz"})) -> None: ... # Error PYI014
|
||||
def f17(
|
||||
x="foo" + "bar", # Error PYI014
|
||||
) -> None: ...
|
||||
def f18(
|
||||
x=b"foo" + b"bar", # Error PYI014
|
||||
) -> None: ...
|
||||
def f19(
|
||||
x="foo" + 4, # Error PYI014
|
||||
) -> None: ...
|
||||
def f20(
|
||||
x=5 + 5, # Error PYI014
|
||||
) -> None: ...
|
||||
def f21(
|
||||
x=3j - 3j, # Error PYI014
|
||||
) -> None: ...
|
||||
def f22(
|
||||
x=-42.5j + 4.3j, # Error PYI014
|
||||
) -> None: ...
|
||||
14
crates/ruff/resources/test/fixtures/flake8_pyi/PYI021.py
vendored
Normal file
14
crates/ruff/resources/test/fixtures/flake8_pyi/PYI021.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
"""foo""" # OK, not in stub
|
||||
|
||||
|
||||
def foo():
|
||||
"""foo""" # OK, doc strings are allowed in non-stubs
|
||||
|
||||
|
||||
class Bar:
|
||||
"""bar""" # OK, doc strings are allowed in non-stubs
|
||||
|
||||
|
||||
def bar():
|
||||
x = 1
|
||||
"""foo""" # OK, not a doc string
|
||||
11
crates/ruff/resources/test/fixtures/flake8_pyi/PYI021.pyi
vendored
Normal file
11
crates/ruff/resources/test/fixtures/flake8_pyi/PYI021.pyi
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
"""foo""" # ERROR PYI021
|
||||
|
||||
def foo():
|
||||
"""foo""" # ERROR PYI021
|
||||
|
||||
class Bar:
|
||||
"""bar""" # ERROR PYI021
|
||||
|
||||
def bar():
|
||||
x = 1
|
||||
"""foo""" # OK, not a doc string
|
||||
@@ -23,7 +23,7 @@ def test_error():
|
||||
"""
|
||||
|
||||
# recursive case
|
||||
assert not (a or not (b or c)) # note that we only reduce once here
|
||||
assert not (a or not (b or c))
|
||||
assert not (a or not (b and c))
|
||||
|
||||
# detected, but no autofix for messages
|
||||
|
||||
@@ -3,6 +3,8 @@ import os
|
||||
import posix
|
||||
from posix import abort
|
||||
import sys as std_sys
|
||||
import typing
|
||||
import typing_extensions
|
||||
import _thread
|
||||
import _winapi
|
||||
|
||||
@@ -211,6 +213,18 @@ def noreturn_sys_exit():
|
||||
std_sys.exit(0)
|
||||
|
||||
|
||||
def noreturn_typing_assert_never():
|
||||
if x > 0:
|
||||
return 1
|
||||
typing.assert_never(0)
|
||||
|
||||
|
||||
def noreturn_typing_extensions_assert_never():
|
||||
if x > 0:
|
||||
return 1
|
||||
typing_extensions.assert_never(0)
|
||||
|
||||
|
||||
def noreturn__thread_exit():
|
||||
if x > 0:
|
||||
return 1
|
||||
@@ -275,3 +289,7 @@ def x(y):
|
||||
return 1
|
||||
case 1:
|
||||
print() # error
|
||||
|
||||
|
||||
def foo(baz: str) -> str:
|
||||
return baz
|
||||
|
||||
15
crates/ruff/resources/test/fixtures/isort/as_imports_comments.py
vendored
Normal file
15
crates/ruff/resources/test/fixtures/isort/as_imports_comments.py
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
from foo import ( # Comment on `foo`
|
||||
Member as Alias, # Comment on `Alias`
|
||||
)
|
||||
|
||||
from bar import ( # Comment on `bar`
|
||||
Member, # Comment on `Member`
|
||||
)
|
||||
|
||||
from baz import ( # Comment on `baz`
|
||||
Member as Alias # Comment on `Alias`
|
||||
)
|
||||
|
||||
from bop import ( # Comment on `bop`
|
||||
Member # Comment on `Member`
|
||||
)
|
||||
3
crates/ruff/resources/test/fixtures/isort/no_lines_before_with_empty_sections.py
vendored
Normal file
3
crates/ruff/resources/test/fixtures/isort/no_lines_before_with_empty_sections.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
from __future__ import annotations
|
||||
from typing import Any
|
||||
from . import my_local_folder_object
|
||||
0
crates/ruff/resources/test/fixtures/pep8_naming/N999/module/valid_name/file-with-dashes
vendored
Normal file
0
crates/ruff/resources/test/fixtures/pep8_naming/N999/module/valid_name/file-with-dashes
vendored
Normal file
@@ -57,3 +57,6 @@ class C: ...; ...
|
||||
#: E701:2:12
|
||||
match *0, 1, *2:
|
||||
case 0,: y = 0
|
||||
#:
|
||||
class Foo:
|
||||
match: Optional[Match] = None
|
||||
|
||||
26
crates/ruff/resources/test/fixtures/pycodestyle/W29.py
vendored
Normal file
26
crates/ruff/resources/test/fixtures/pycodestyle/W29.py
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
#: Okay
|
||||
# 情
|
||||
#: W291:1:6
|
||||
print
|
||||
#: W293:2:1
|
||||
class Foo(object):
|
||||
|
||||
bang = 12
|
||||
#: W291:2:35
|
||||
'''multiline
|
||||
string with trailing whitespace'''
|
||||
#: W291 W292 noeol
|
||||
x = 1
|
||||
#: W191 W292 noeol
|
||||
if False:
|
||||
pass # indented with tabs
|
||||
#: W292:1:36 noeol
|
||||
# This line doesn't have a linefeed
|
||||
#: W292:1:5 E225:1:2 noeol
|
||||
1+ 1
|
||||
#: W292:1:27 E261:1:12 noeol
|
||||
import this # no line feed
|
||||
#: W292:3:22 noeol
|
||||
class Test(object):
|
||||
def __repr__(self):
|
||||
return 'test'
|
||||
@@ -16,21 +16,17 @@ if False == None: # E711, E712 (fix)
|
||||
if None == False: # E711, E712 (fix)
|
||||
pass
|
||||
|
||||
###
|
||||
# Unfixable errors
|
||||
###
|
||||
if "abc" == None: # E711
|
||||
pass
|
||||
if None == "abc": # E711
|
||||
pass
|
||||
if "abc" == False: # E712
|
||||
pass
|
||||
if False == "abc": # E712
|
||||
pass
|
||||
|
||||
###
|
||||
# Non-errors
|
||||
###
|
||||
if "abc" == None:
|
||||
pass
|
||||
if None == "abc":
|
||||
pass
|
||||
if "abc" == False:
|
||||
pass
|
||||
if False == "abc":
|
||||
pass
|
||||
if "def" == "abc":
|
||||
pass
|
||||
if False is None:
|
||||
|
||||
4
crates/ruff/resources/test/fixtures/pyflakes/F821_10.py
vendored
Normal file
4
crates/ruff/resources/test/fixtures/pyflakes/F821_10.py
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
try:
|
||||
pass
|
||||
except ExceptionGroup:
|
||||
pass
|
||||
46
crates/ruff/resources/test/fixtures/pyflakes/F821_9.py
vendored
Normal file
46
crates/ruff/resources/test/fixtures/pyflakes/F821_9.py
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
"""Test: match statements."""
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class Car:
|
||||
make: str
|
||||
model: str
|
||||
|
||||
|
||||
def f():
|
||||
match Car("Toyota", "Corolla"):
|
||||
case Car("Toyota", model):
|
||||
print(model)
|
||||
case Car(make, "Corolla"):
|
||||
print(make)
|
||||
|
||||
|
||||
def f(provided: int) -> int:
|
||||
match provided:
|
||||
case True:
|
||||
return captured # F821
|
||||
|
||||
|
||||
def f(provided: int) -> int:
|
||||
match provided:
|
||||
case captured:
|
||||
return captured
|
||||
|
||||
|
||||
def f(provided: int) -> int:
|
||||
match provided:
|
||||
case [captured, *_]:
|
||||
return captured
|
||||
|
||||
|
||||
def f(provided: int) -> int:
|
||||
match provided:
|
||||
case [*captured]:
|
||||
return captured
|
||||
|
||||
|
||||
def f(provided: int) -> int:
|
||||
match provided:
|
||||
case {**captured}:
|
||||
return captured
|
||||
@@ -119,3 +119,5 @@ def f(x: int):
|
||||
print("A")
|
||||
case [Bar.A, *_]:
|
||||
print("A")
|
||||
case y:
|
||||
pass
|
||||
|
||||
49
crates/ruff/resources/test/fixtures/pylint/collapsible_else_if.py
vendored
Normal file
49
crates/ruff/resources/test/fixtures/pylint/collapsible_else_if.py
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
"""
|
||||
Test for else-if-used
|
||||
"""
|
||||
|
||||
|
||||
def ok0():
|
||||
"""Should not trigger on elif"""
|
||||
if 1:
|
||||
pass
|
||||
elif 2:
|
||||
pass
|
||||
|
||||
|
||||
def ok1():
|
||||
"""If the orelse has more than 1 item in it, shouldn't trigger"""
|
||||
if 1:
|
||||
pass
|
||||
else:
|
||||
print()
|
||||
if 1:
|
||||
pass
|
||||
|
||||
|
||||
def ok2():
|
||||
"""If the orelse has more than 1 item in it, shouldn't trigger"""
|
||||
if 1:
|
||||
pass
|
||||
else:
|
||||
if 1:
|
||||
pass
|
||||
print()
|
||||
|
||||
|
||||
def not_ok0():
|
||||
if 1:
|
||||
pass
|
||||
else:
|
||||
if 2:
|
||||
pass
|
||||
|
||||
|
||||
def not_ok1():
|
||||
if 1:
|
||||
pass
|
||||
else:
|
||||
if 2:
|
||||
pass
|
||||
else:
|
||||
pass
|
||||
75
crates/ruff/resources/test/fixtures/pylint/global_statement.py
vendored
Normal file
75
crates/ruff/resources/test/fixtures/pylint/global_statement.py
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
# Adapted from:
|
||||
# https://github.com/PyCQA/pylint/blob/b70d2abd7fabe9bfd735a30b593b9cd5eaa36194/tests/functional/g/globals.py
|
||||
|
||||
CONSTANT = 1
|
||||
|
||||
|
||||
def FUNC():
|
||||
pass
|
||||
|
||||
|
||||
class CLASS:
|
||||
pass
|
||||
|
||||
|
||||
def fix_constant(value):
|
||||
"""All this is ok, but try not to use `global` ;)"""
|
||||
global CONSTANT # [global-statement]
|
||||
print(CONSTANT)
|
||||
CONSTANT = value
|
||||
|
||||
|
||||
def global_with_import():
|
||||
"""Should only warn for global-statement when using `Import` node"""
|
||||
global sys # [global-statement]
|
||||
import sys
|
||||
|
||||
|
||||
def global_with_import_from():
|
||||
"""Should only warn for global-statement when using `ImportFrom` node"""
|
||||
global namedtuple # [global-statement]
|
||||
from collections import namedtuple
|
||||
|
||||
|
||||
def global_del():
|
||||
"""Deleting the global name prevents `global-variable-not-assigned`"""
|
||||
global CONSTANT # [global-statement]
|
||||
print(CONSTANT)
|
||||
del CONSTANT
|
||||
|
||||
|
||||
def global_operator_assign():
|
||||
"""Operator assigns should only throw a global statement error"""
|
||||
global CONSTANT # [global-statement]
|
||||
print(CONSTANT)
|
||||
CONSTANT += 1
|
||||
|
||||
|
||||
def global_function_assign():
|
||||
"""Function assigns should only throw a global statement error"""
|
||||
global CONSTANT # [global-statement]
|
||||
|
||||
def CONSTANT():
|
||||
pass
|
||||
|
||||
CONSTANT()
|
||||
|
||||
|
||||
def override_func():
|
||||
"""Overriding a function should only throw a global statement error"""
|
||||
global FUNC # [global-statement]
|
||||
|
||||
def FUNC():
|
||||
pass
|
||||
|
||||
FUNC()
|
||||
|
||||
|
||||
def override_class():
|
||||
"""Overriding a class should only throw a global statement error"""
|
||||
global CLASS # [global-statement]
|
||||
|
||||
class CLASS:
|
||||
pass
|
||||
|
||||
CLASS()
|
||||
21
crates/ruff/resources/test/fixtures/pylint/logging_too_few_args.py
vendored
Normal file
21
crates/ruff/resources/test/fixtures/pylint/logging_too_few_args.py
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
import logging
|
||||
|
||||
logging.warning("Hello %s %s", "World!") # [logging-too-few-args]
|
||||
|
||||
# do not handle calls with kwargs (like pylint)
|
||||
logging.warning("Hello %s", "World!", "again", something="else")
|
||||
|
||||
logging.warning("Hello %s", "World!")
|
||||
|
||||
# do not handle calls without any args
|
||||
logging.info("100% dynamic")
|
||||
|
||||
# do not handle calls with *args
|
||||
logging.error("Example log %s, %s", "foo", "bar", "baz", *args)
|
||||
|
||||
# do not handle calls with **kwargs
|
||||
logging.error("Example log %s, %s", "foo", "bar", "baz", **kwargs)
|
||||
|
||||
import warning
|
||||
|
||||
warning.warning("Hello %s %s", "World!")
|
||||
17
crates/ruff/resources/test/fixtures/pylint/logging_too_many_args.py
vendored
Normal file
17
crates/ruff/resources/test/fixtures/pylint/logging_too_many_args.py
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
import logging
|
||||
|
||||
logging.warning("Hello %s", "World!", "again") # [logging-too-many-args]
|
||||
|
||||
logging.warning("Hello %s", "World!", "again", something="else")
|
||||
|
||||
logging.warning("Hello %s", "World!")
|
||||
|
||||
# do not handle calls with *args
|
||||
logging.error("Example log %s, %s", "foo", "bar", "baz", *args)
|
||||
|
||||
# do not handle calls with **kwargs
|
||||
logging.error("Example log %s, %s", "foo", "bar", "baz", **kwargs)
|
||||
|
||||
import warning
|
||||
|
||||
warning.warning("Hello %s", "World!", "again")
|
||||
155
crates/ruff/resources/test/fixtures/pylint/redefined_loop_name.py
vendored
Normal file
155
crates/ruff/resources/test/fixtures/pylint/redefined_loop_name.py
vendored
Normal file
@@ -0,0 +1,155 @@
|
||||
# For -> for, variable reused
|
||||
for i in []:
|
||||
for i in []: # error
|
||||
pass
|
||||
|
||||
# With -> for, variable reused
|
||||
with None as i:
|
||||
for i in []: # error
|
||||
pass
|
||||
|
||||
# For -> with, variable reused
|
||||
for i in []:
|
||||
with None as i: # error
|
||||
pass
|
||||
|
||||
# With -> with, variable reused
|
||||
with None as i:
|
||||
with None as i: # error
|
||||
pass
|
||||
|
||||
# For -> for, different variable
|
||||
for i in []:
|
||||
for j in []: # ok
|
||||
pass
|
||||
|
||||
# With -> with, different variable
|
||||
with None as i:
|
||||
with None as j: # ok
|
||||
pass
|
||||
|
||||
# For -> for -> for, doubly nested variable reuse
|
||||
for i in []:
|
||||
for j in []:
|
||||
for i in []: # error
|
||||
pass
|
||||
|
||||
# For -> for -> for -> for, doubly nested variable reuse x2
|
||||
for i in []:
|
||||
for j in []:
|
||||
for i in []: # error
|
||||
for j in []: # error
|
||||
pass
|
||||
|
||||
# For -> assignment
|
||||
for i in []:
|
||||
i = 5 # error
|
||||
|
||||
# For -> augmented assignment
|
||||
for i in []:
|
||||
i += 5 # error
|
||||
|
||||
# For -> annotated assignment
|
||||
for i in []:
|
||||
i: int = 5 # error
|
||||
|
||||
# Async for -> for, variable reused
|
||||
async for i in []:
|
||||
for i in []: # error
|
||||
pass
|
||||
|
||||
# For -> async for, variable reused
|
||||
for i in []:
|
||||
async for i in []: # error
|
||||
pass
|
||||
|
||||
# For -> for, outer loop unpacks tuple
|
||||
for i, j in enumerate([]):
|
||||
for i in []: # error
|
||||
pass
|
||||
|
||||
# For -> for, inner loop unpacks tuple
|
||||
for i in []:
|
||||
for i, j in enumerate([]): # error
|
||||
pass
|
||||
|
||||
# For -> for, both loops unpack tuple
|
||||
for (i, (j, k)) in []:
|
||||
for i, j in enumerate([]): # two errors
|
||||
pass
|
||||
|
||||
# For else -> for, variable reused in else
|
||||
for i in []:
|
||||
pass
|
||||
else:
|
||||
for i in []: # no error
|
||||
pass
|
||||
|
||||
# For -> for, ignore dummy variables
|
||||
for _ in []:
|
||||
for _ in []: # no error
|
||||
pass
|
||||
|
||||
# For -> for, outer loop unpacks with asterisk
|
||||
for i, *j in []:
|
||||
for j in []: # error
|
||||
pass
|
||||
|
||||
# For -> function definition
|
||||
for i in []:
|
||||
def f():
|
||||
i = 2 # no error
|
||||
|
||||
# For -> class definition
|
||||
for i in []:
|
||||
class A:
|
||||
i = 2 # no error
|
||||
|
||||
# For -> function definition -> for -> assignment
|
||||
for i in []:
|
||||
def f():
|
||||
for i in []: # no error
|
||||
i = 2 # error
|
||||
|
||||
# For -> class definition -> for -> for
|
||||
for i in []:
|
||||
class A:
|
||||
for i in []: # no error
|
||||
for i in []: # error
|
||||
pass
|
||||
|
||||
# For -> use in assignment target without actual assignment; subscript
|
||||
for i in []:
|
||||
a[i] = 2 # no error
|
||||
i[a] = 2 # no error
|
||||
|
||||
# For -> use in assignment target without actual assignment; attribute
|
||||
for i in []:
|
||||
a.i = 2 # no error
|
||||
i.a = 2 # no error
|
||||
|
||||
# For target with subscript -> assignment
|
||||
for a[0] in []:
|
||||
a[0] = 2 # error
|
||||
a[1] = 2 # no error
|
||||
|
||||
# For target with subscript -> assignment
|
||||
for a['i'] in []:
|
||||
a['i'] = 2 # error
|
||||
a['j'] = 2 # no error
|
||||
|
||||
# For target with attribute -> assignment
|
||||
for a.i in []:
|
||||
a.i = 2 # error
|
||||
a.j = 2 # no error
|
||||
|
||||
# For target with double nested attribute -> assignment
|
||||
for a.i.j in []:
|
||||
a.i.j = 2 # error
|
||||
a.j.i = 2 # no error
|
||||
|
||||
# For target with attribute -> assignment with different spacing
|
||||
for a.i in []:
|
||||
a. i = 2 # error
|
||||
for a. i in []:
|
||||
a.i = 2 # error
|
||||
@@ -124,3 +124,14 @@ def test_break_in_with():
|
||||
else:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def test_break_in_match():
|
||||
"""no false positive for break in match"""
|
||||
for name in ["demo"]:
|
||||
match name:
|
||||
case "demo":
|
||||
break
|
||||
else:
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -41,3 +41,8 @@ def f(x: Union["str", int]) -> None:
|
||||
|
||||
def f(x: Union[("str", "int"), float]) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f() -> None:
|
||||
x: Optional[str]
|
||||
x = Optional[str]
|
||||
|
||||
@@ -2,36 +2,36 @@ from typing import TypedDict, NotRequired, Literal
|
||||
import typing
|
||||
|
||||
# dict literal
|
||||
MyType1 = TypedDict("MyType1", {"a": int, "b": str})
|
||||
MyType = TypedDict("MyType", {"a": int, "b": str})
|
||||
|
||||
# dict call
|
||||
MyType2 = TypedDict("MyType2", dict(a=int, b=str))
|
||||
MyType = TypedDict("MyType", dict(a=int, b=str))
|
||||
|
||||
# kwargs
|
||||
MyType3 = TypedDict("MyType3", a=int, b=str)
|
||||
MyType = TypedDict("MyType", a=int, b=str)
|
||||
|
||||
# Empty TypedDict
|
||||
MyType4 = TypedDict("MyType4")
|
||||
MyType = TypedDict("MyType")
|
||||
|
||||
# Literal values
|
||||
MyType5 = TypedDict("MyType5", {"a": "hello"})
|
||||
MyType6 = TypedDict("MyType6", a="hello")
|
||||
MyType = TypedDict("MyType", {"a": "hello"})
|
||||
MyType = TypedDict("MyType", a="hello")
|
||||
|
||||
# NotRequired
|
||||
MyType7 = TypedDict("MyType7", {"a": NotRequired[dict]})
|
||||
MyType = TypedDict("MyType", {"a": NotRequired[dict]})
|
||||
|
||||
# total
|
||||
MyType8 = TypedDict("MyType8", {"x": int, "y": int}, total=False)
|
||||
|
||||
# invalid identifiers
|
||||
MyType9 = TypedDict("MyType9", {"in": int, "x-y": int})
|
||||
MyType = TypedDict("MyType", {"x": int, "y": int}, total=False)
|
||||
|
||||
# using Literal type
|
||||
MyType10 = TypedDict("MyType10", {"key": Literal["value"]})
|
||||
MyType = TypedDict("MyType", {"key": Literal["value"]})
|
||||
|
||||
# using namespace TypedDict
|
||||
MyType11 = typing.TypedDict("MyType11", {"key": int})
|
||||
MyType = typing.TypedDict("MyType", {"key": int})
|
||||
|
||||
# unpacking
|
||||
# invalid identifiers (OK)
|
||||
MyType = TypedDict("MyType", {"in": int, "x-y": int})
|
||||
|
||||
# unpacking (OK)
|
||||
c = {"c": float}
|
||||
MyType12 = TypedDict("MyType1", {"a": int, "b": str, **c})
|
||||
MyType = TypedDict("MyType", {"a": int, "b": str, **c})
|
||||
|
||||
@@ -2,21 +2,24 @@ from typing import NamedTuple
|
||||
import typing
|
||||
|
||||
# with complex annotations
|
||||
NT1 = NamedTuple("NT1", [("a", int), ("b", tuple[str, ...])])
|
||||
MyType = NamedTuple("MyType", [("a", int), ("b", tuple[str, ...])])
|
||||
|
||||
# with default values as list
|
||||
NT2 = NamedTuple(
|
||||
"NT2",
|
||||
MyType = NamedTuple(
|
||||
"MyType",
|
||||
[("a", int), ("b", str), ("c", list[bool])],
|
||||
defaults=["foo", [True]],
|
||||
)
|
||||
|
||||
# with namespace
|
||||
NT3 = typing.NamedTuple("NT3", [("a", int), ("b", str)])
|
||||
MyType = typing.NamedTuple("MyType", [("a", int), ("b", str)])
|
||||
|
||||
# with too many default values
|
||||
NT4 = NamedTuple(
|
||||
"NT4",
|
||||
# too many default values (OK)
|
||||
MyType = NamedTuple(
|
||||
"MyType",
|
||||
[("a", int), ("b", str)],
|
||||
defaults=[1, "bar", "baz"],
|
||||
)
|
||||
|
||||
# invalid identifiers (OK)
|
||||
MyType = NamedTuple("MyType", [("x-y", int), ("b", tuple[str, ...])])
|
||||
|
||||
@@ -9,9 +9,7 @@ use rustpython_parser::ast::{
|
||||
Arguments, Constant, Excepthandler, ExcepthandlerKind, Expr, ExprKind, Keyword, KeywordData,
|
||||
Located, Location, MatchCase, Pattern, PatternKind, Stmt, StmtKind,
|
||||
};
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::Tok;
|
||||
use rustpython_parser::token::StringKind;
|
||||
use rustpython_parser::{lexer, Mode, StringKind, Tok};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
||||
use crate::ast::types::{Binding, BindingKind, CallPath, Range};
|
||||
@@ -655,7 +653,7 @@ pub fn has_comments<T>(located: &Located<T>, locator: &Locator) -> bool {
|
||||
|
||||
/// Returns `true` if a [`Range`] includes at least one comment.
|
||||
pub fn has_comments_in(range: Range, locator: &Locator) -> bool {
|
||||
for tok in lexer::make_tokenizer(locator.slice(&range)) {
|
||||
for tok in lexer::lex_located(locator.slice(&range), Mode::Module, range.location) {
|
||||
match tok {
|
||||
Ok((_, tok, _)) => {
|
||||
if matches!(tok, Tok::Comment(..)) {
|
||||
@@ -870,7 +868,7 @@ pub fn match_parens(start: Location, locator: &Locator) -> Option<Range> {
|
||||
let mut fix_start = None;
|
||||
let mut fix_end = None;
|
||||
let mut count: usize = 0;
|
||||
for (start, tok, end) in lexer::make_tokenizer_located(contents, start).flatten() {
|
||||
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, start).flatten() {
|
||||
if matches!(tok, Tok::Lpar) {
|
||||
if count == 0 {
|
||||
fix_start = Some(start);
|
||||
@@ -902,7 +900,8 @@ pub fn identifier_range(stmt: &Stmt, locator: &Locator) -> Range {
|
||||
| StmtKind::AsyncFunctionDef { .. }
|
||||
) {
|
||||
let contents = locator.slice(&Range::from_located(stmt));
|
||||
for (start, tok, end) in lexer::make_tokenizer_located(contents, stmt.location).flatten() {
|
||||
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, stmt.location).flatten()
|
||||
{
|
||||
if matches!(tok, Tok::Name { .. }) {
|
||||
return Range::new(start, end);
|
||||
}
|
||||
@@ -928,16 +927,18 @@ pub fn binding_range(binding: &Binding, locator: &Locator) -> Range {
|
||||
}
|
||||
|
||||
// Return the ranges of `Name` tokens within a specified node.
|
||||
pub fn find_names<T>(located: &Located<T>, locator: &Locator) -> Vec<Range> {
|
||||
pub fn find_names<'a, T, U>(
|
||||
located: &'a Located<T, U>,
|
||||
locator: &'a Locator,
|
||||
) -> impl Iterator<Item = Range> + 'a {
|
||||
let contents = locator.slice(&Range::from_located(located));
|
||||
lexer::make_tokenizer_located(contents, located.location)
|
||||
lexer::lex_located(contents, Mode::Module, located.location)
|
||||
.flatten()
|
||||
.filter(|(_, tok, _)| matches!(tok, Tok::Name { .. }))
|
||||
.map(|(start, _, end)| Range {
|
||||
location: start,
|
||||
end_location: end,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Return the `Range` of `name` in `Excepthandler`.
|
||||
@@ -949,7 +950,7 @@ pub fn excepthandler_name_range(handler: &Excepthandler, locator: &Locator) -> O
|
||||
(Some(_), Some(type_)) => {
|
||||
let type_end_location = type_.end_location.unwrap();
|
||||
let contents = locator.slice(&Range::new(type_end_location, body[0].location));
|
||||
let range = lexer::make_tokenizer_located(contents, type_end_location)
|
||||
let range = lexer::lex_located(contents, Mode::Module, type_end_location)
|
||||
.flatten()
|
||||
.tuple_windows()
|
||||
.find(|(tok, next_tok)| {
|
||||
@@ -976,7 +977,7 @@ pub fn except_range(handler: &Excepthandler, locator: &Locator) -> Range {
|
||||
location: handler.location,
|
||||
end_location: end,
|
||||
});
|
||||
let range = lexer::make_tokenizer_located(contents, handler.location)
|
||||
let range = lexer::lex_located(contents, Mode::Module, handler.location)
|
||||
.flatten()
|
||||
.find(|(_, kind, _)| matches!(kind, Tok::Except { .. }))
|
||||
.map(|(location, _, end_location)| Range {
|
||||
@@ -990,7 +991,7 @@ pub fn except_range(handler: &Excepthandler, locator: &Locator) -> Range {
|
||||
/// Find f-strings that don't contain any formatted values in a `JoinedStr`.
|
||||
pub fn find_useless_f_strings(expr: &Expr, locator: &Locator) -> Vec<(Range, Range)> {
|
||||
let contents = locator.slice(&Range::from_located(expr));
|
||||
lexer::make_tokenizer_located(contents, expr.location)
|
||||
lexer::lex_located(contents, Mode::Module, expr.location)
|
||||
.flatten()
|
||||
.filter_map(|(location, tok, end_location)| match tok {
|
||||
Tok::String {
|
||||
@@ -1044,7 +1045,7 @@ pub fn else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
|
||||
.expect("Expected orelse to be non-empty")
|
||||
.location,
|
||||
});
|
||||
let range = lexer::make_tokenizer_located(contents, body_end)
|
||||
let range = lexer::lex_located(contents, Mode::Module, body_end)
|
||||
.flatten()
|
||||
.find(|(_, kind, _)| matches!(kind, Tok::Else))
|
||||
.map(|(location, _, end_location)| Range {
|
||||
@@ -1060,7 +1061,7 @@ pub fn else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
|
||||
/// Return the `Range` of the first `Tok::Colon` token in a `Range`.
|
||||
pub fn first_colon_range(range: Range, locator: &Locator) -> Option<Range> {
|
||||
let contents = locator.slice(&range);
|
||||
let range = lexer::make_tokenizer_located(contents, range.location)
|
||||
let range = lexer::lex_located(contents, Mode::Module, range.location)
|
||||
.flatten()
|
||||
.find(|(_, kind, _)| matches!(kind, Tok::Colon))
|
||||
.map(|(location, _, end_location)| Range {
|
||||
@@ -1090,7 +1091,7 @@ pub fn elif_else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
|
||||
_ => return None,
|
||||
};
|
||||
let contents = locator.slice(&Range::new(start, end));
|
||||
let range = lexer::make_tokenizer_located(contents, start)
|
||||
let range = lexer::lex_located(contents, Mode::Module, start)
|
||||
.flatten()
|
||||
.find(|(_, kind, _)| matches!(kind, Tok::Elif | Tok::Else))
|
||||
.map(|(location, _, end_location)| Range {
|
||||
@@ -1206,8 +1207,8 @@ pub fn is_logger_candidate(func: &Expr) -> bool {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
use rustpython_parser as parser;
|
||||
use rustpython_parser::ast::Location;
|
||||
use rustpython_parser::parser;
|
||||
|
||||
use crate::ast::helpers::{
|
||||
elif_else_range, else_range, first_colon_range, identifier_range, match_trailing_content,
|
||||
|
||||
24
crates/ruff/src/ast/logging.rs
Normal file
24
crates/ruff/src/ast/logging.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
pub enum LoggingLevel {
|
||||
Debug,
|
||||
Critical,
|
||||
Error,
|
||||
Exception,
|
||||
Info,
|
||||
Warn,
|
||||
Warning,
|
||||
}
|
||||
|
||||
impl LoggingLevel {
|
||||
pub fn from_str(level: &str) -> Option<Self> {
|
||||
match level {
|
||||
"debug" => Some(LoggingLevel::Debug),
|
||||
"critical" => Some(LoggingLevel::Critical),
|
||||
"error" => Some(LoggingLevel::Error),
|
||||
"exception" => Some(LoggingLevel::Exception),
|
||||
"info" => Some(LoggingLevel::Info),
|
||||
"warn" => Some(LoggingLevel::Warn),
|
||||
"warning" => Some(LoggingLevel::Warning),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@ pub mod comparable;
|
||||
pub mod function_type;
|
||||
pub mod hashable;
|
||||
pub mod helpers;
|
||||
pub mod logging;
|
||||
pub mod operations;
|
||||
pub mod relocate;
|
||||
pub mod types;
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use bitflags::bitflags;
|
||||
use rustc_hash::FxHashMap;
|
||||
use rustpython_parser::ast::{Cmpop, Constant, Expr, ExprKind, Located, Stmt, StmtKind};
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::Tok;
|
||||
use rustpython_parser::{lexer, Mode, Tok};
|
||||
|
||||
use crate::ast::helpers::any_over_expr;
|
||||
use crate::ast::types::{BindingKind, Scope};
|
||||
@@ -205,6 +204,7 @@ pub fn in_nested_block<'a>(mut parents: impl Iterator<Item = &'a Stmt>) -> bool
|
||||
| StmtKind::TryStar { .. }
|
||||
| StmtKind::If { .. }
|
||||
| StmtKind::With { .. }
|
||||
| StmtKind::Match { .. }
|
||||
)
|
||||
})
|
||||
}
|
||||
@@ -283,7 +283,7 @@ pub type LocatedCmpop<U = ()> = Located<Cmpop, U>;
|
||||
/// `CPython` doesn't either. This method iterates over the token stream and
|
||||
/// re-identifies [`Cmpop`] nodes, annotating them with valid ranges.
|
||||
pub fn locate_cmpops(contents: &str) -> Vec<LocatedCmpop> {
|
||||
let mut tok_iter = lexer::make_tokenizer(contents).flatten().peekable();
|
||||
let mut tok_iter = lexer::lex(contents, Mode::Module).flatten().peekable();
|
||||
let mut ops: Vec<LocatedCmpop> = vec![];
|
||||
let mut count: usize = 0;
|
||||
loop {
|
||||
|
||||
@@ -29,7 +29,7 @@ impl Range {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_located<T>(located: &Located<T>) -> Self {
|
||||
pub fn from_located<T, U>(located: &Located<T, U>) -> Self {
|
||||
Range::new(located.location, located.end_location.unwrap())
|
||||
}
|
||||
|
||||
@@ -124,7 +124,7 @@ impl<'a> Scope<'a> {
|
||||
// StarImportation
|
||||
// FutureImportation
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Debug, is_macro::Is)]
|
||||
pub enum BindingKind<'a> {
|
||||
Annotation,
|
||||
Argument,
|
||||
|
||||
@@ -4,8 +4,7 @@ use libcst_native::{
|
||||
Codegen, CodegenState, ImportNames, ParenthesizableWhitespace, SmallStatement, Statement,
|
||||
};
|
||||
use rustpython_parser::ast::{ExcepthandlerKind, Expr, Keyword, Location, Stmt, StmtKind};
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::Tok;
|
||||
use rustpython_parser::{lexer, Mode, Tok};
|
||||
|
||||
use crate::ast::helpers;
|
||||
use crate::ast::helpers::to_absolute;
|
||||
@@ -371,7 +370,7 @@ pub fn remove_argument(
|
||||
if n_arguments == 1 {
|
||||
// Case 1: there is only one argument.
|
||||
let mut count: usize = 0;
|
||||
for (start, tok, end) in lexer::make_tokenizer_located(contents, stmt_at).flatten() {
|
||||
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, stmt_at).flatten() {
|
||||
if matches!(tok, Tok::Lpar) {
|
||||
if count == 0 {
|
||||
fix_start = Some(if remove_parentheses {
|
||||
@@ -403,7 +402,7 @@ pub fn remove_argument(
|
||||
{
|
||||
// Case 2: argument or keyword is _not_ the last node.
|
||||
let mut seen_comma = false;
|
||||
for (start, tok, end) in lexer::make_tokenizer_located(contents, stmt_at).flatten() {
|
||||
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, stmt_at).flatten() {
|
||||
if seen_comma {
|
||||
if matches!(tok, Tok::NonLogicalNewline) {
|
||||
// Also delete any non-logical newlines after the comma.
|
||||
@@ -426,7 +425,7 @@ pub fn remove_argument(
|
||||
} else {
|
||||
// Case 3: argument or keyword is the last node, so we have to find the last
|
||||
// comma in the stmt.
|
||||
for (start, tok, _) in lexer::make_tokenizer_located(contents, stmt_at).flatten() {
|
||||
for (start, tok, _) in lexer::lex_located(contents, Mode::Module, stmt_at).flatten() {
|
||||
if start == expr_at {
|
||||
fix_end = Some(expr_end);
|
||||
break;
|
||||
@@ -448,8 +447,8 @@ pub fn remove_argument(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
use rustpython_parser as parser;
|
||||
use rustpython_parser::ast::Location;
|
||||
use rustpython_parser::parser;
|
||||
|
||||
use crate::autofix::helpers::{next_stmt_break, trailing_semicolon};
|
||||
use crate::source_code::Locator;
|
||||
|
||||
@@ -6,17 +6,17 @@ use std::path::Path;
|
||||
use itertools::Itertools;
|
||||
use log::error;
|
||||
use nohash_hasher::IntMap;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use rustpython_common::cformat::{CFormatError, CFormatErrorType};
|
||||
use rustpython_parser::ast::{
|
||||
Arg, Arguments, Comprehension, Constant, Excepthandler, ExcepthandlerKind, Expr, ExprContext,
|
||||
ExprKind, KeywordData, Located, Location, Operator, Stmt, StmtKind, Suite,
|
||||
};
|
||||
use rustpython_parser::parser;
|
||||
use smallvec::smallvec;
|
||||
|
||||
use ruff_python::builtins::{BUILTINS, MAGIC_GLOBALS};
|
||||
use ruff_python::typing::TYPING_EXTENSIONS;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use rustpython_common::cformat::{CFormatError, CFormatErrorType};
|
||||
use rustpython_parser as parser;
|
||||
use rustpython_parser::ast::{
|
||||
Arg, Arguments, Comprehension, Constant, Excepthandler, ExcepthandlerKind, Expr, ExprContext,
|
||||
ExprKind, KeywordData, Located, Location, Operator, Pattern, PatternKind, Stmt, StmtKind,
|
||||
Suite,
|
||||
};
|
||||
use smallvec::smallvec;
|
||||
|
||||
use crate::ast::helpers::{
|
||||
binding_range, collect_call_path, extract_handler_names, from_relative_import, to_module_path,
|
||||
@@ -28,7 +28,7 @@ use crate::ast::types::{
|
||||
RefEquality, Scope, ScopeKind,
|
||||
};
|
||||
use crate::ast::typing::{match_annotated_subscript, Callable, SubscriptKind};
|
||||
use crate::ast::visitor::{walk_excepthandler, Visitor};
|
||||
use crate::ast::visitor::{walk_excepthandler, walk_pattern, Visitor};
|
||||
use crate::ast::{branch_detection, cast, helpers, operations, typing, visitor};
|
||||
use crate::docstrings::definition::{Definition, DefinitionKind, Docstring, Documentable};
|
||||
use crate::registry::{Diagnostic, Rule};
|
||||
@@ -187,7 +187,7 @@ impl<'a> Checker<'a> {
|
||||
/// Return `true` if a patch should be generated under the given autofix
|
||||
/// `Mode`.
|
||||
pub fn patch(&self, code: &Rule) -> bool {
|
||||
matches!(self.autofix, flags::Autofix::Enabled) && self.settings.rules.should_fix(code)
|
||||
self.autofix.into() && self.settings.rules.should_fix(code)
|
||||
}
|
||||
|
||||
/// Return `true` if the `Expr` is a reference to `typing.${target}`.
|
||||
@@ -229,9 +229,8 @@ impl<'a> Checker<'a> {
|
||||
|
||||
/// Return `true` if `member` is bound as a builtin.
|
||||
pub fn is_builtin(&self, member: &str) -> bool {
|
||||
self.find_binding(member).map_or(false, |binding| {
|
||||
matches!(binding.kind, BindingKind::Builtin)
|
||||
})
|
||||
self.find_binding(member)
|
||||
.map_or(false, |binding| binding.kind.is_builtin())
|
||||
}
|
||||
|
||||
pub fn resolve_call_path<'b>(&'a self, value: &'b Expr) -> Option<CallPath<'a>>
|
||||
@@ -295,7 +294,7 @@ impl<'a> Checker<'a> {
|
||||
// members from the fix that will eventually be excluded by a `noqa`.
|
||||
// Unfortunately, we _do_ want to register a `Diagnostic` for each
|
||||
// eventually-ignored import, so that our `noqa` counts are accurate.
|
||||
if matches!(self.noqa, flags::Noqa::Disabled) {
|
||||
if !self.noqa.to_bool() {
|
||||
return false;
|
||||
}
|
||||
noqa::rule_is_ignored(code, lineno, self.noqa_line_for, self.locator)
|
||||
@@ -341,7 +340,7 @@ where
|
||||
match &stmt.node {
|
||||
StmtKind::Global { names } => {
|
||||
let scope_index = *self.scope_stack.last().expect("No current scope found");
|
||||
let ranges = helpers::find_names(stmt, self.locator);
|
||||
let ranges: Vec<Range> = helpers::find_names(stmt, self.locator).collect();
|
||||
if scope_index != GLOBAL_SCOPE_INDEX {
|
||||
// Add the binding to the current scope.
|
||||
let context = self.execution_context();
|
||||
@@ -371,7 +370,7 @@ where
|
||||
}
|
||||
StmtKind::Nonlocal { names } => {
|
||||
let scope_index = *self.scope_stack.last().expect("No current scope found");
|
||||
let ranges = helpers::find_names(stmt, self.locator);
|
||||
let ranges: Vec<Range> = helpers::find_names(stmt, self.locator).collect();
|
||||
if scope_index != GLOBAL_SCOPE_INDEX {
|
||||
let context = self.execution_context();
|
||||
let scope = &mut self.scopes[scope_index];
|
||||
@@ -537,6 +536,15 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
if self.is_interface_definition {
|
||||
if self.settings.rules.enabled(&Rule::PassStatementStubBody) {
|
||||
flake8_pyi::rules::pass_statement_stub_body(self, body);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::NonEmptyStubBody) {
|
||||
flake8_pyi::rules::non_empty_stub_body(self, body);
|
||||
}
|
||||
}
|
||||
|
||||
if self.settings.rules.enabled(&Rule::DunderFunctionName) {
|
||||
if let Some(diagnostic) = pep8_naming::rules::dunder_function_name(
|
||||
self.current_scope(),
|
||||
@@ -548,6 +556,10 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
if self.settings.rules.enabled(&Rule::GlobalStatement) {
|
||||
pylint::rules::global_statement(self, name);
|
||||
}
|
||||
|
||||
if self
|
||||
.settings
|
||||
.rules
|
||||
@@ -804,6 +816,21 @@ where
|
||||
self, body,
|
||||
));
|
||||
}
|
||||
|
||||
if self.settings.rules.enabled(&Rule::ExcludeWithModelForm) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_django::rules::exclude_with_model_form(self, bases, body)
|
||||
{
|
||||
self.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::AllWithModelForm) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_django::rules::all_with_model_form(self, bases, body)
|
||||
{
|
||||
self.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::ModelWithoutDunderStr) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_django::rules::model_without_dunder_str(self, bases, body, stmt)
|
||||
@@ -811,6 +838,9 @@ where
|
||||
self.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::GlobalStatement) {
|
||||
pylint::rules::global_statement(self, name);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::UselessObjectInheritance) {
|
||||
pyupgrade::rules::useless_object_inheritance(self, stmt, name, bases, keywords);
|
||||
}
|
||||
@@ -865,6 +895,11 @@ where
|
||||
);
|
||||
}
|
||||
}
|
||||
if self.is_interface_definition {
|
||||
if self.settings.rules.enabled(&Rule::PassStatementStubBody) {
|
||||
flake8_pyi::rules::pass_statement_stub_body(self, body);
|
||||
}
|
||||
}
|
||||
|
||||
if self
|
||||
.settings
|
||||
@@ -909,6 +944,17 @@ where
|
||||
{
|
||||
pycodestyle::rules::module_import_not_at_top_of_file(self, stmt);
|
||||
}
|
||||
|
||||
if self.settings.rules.enabled(&Rule::GlobalStatement) {
|
||||
for name in names.iter() {
|
||||
if let Some(asname) = name.node.asname.as_ref() {
|
||||
pylint::rules::global_statement(self, asname);
|
||||
} else {
|
||||
pylint::rules::global_statement(self, &name.node.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if self.settings.rules.enabled(&Rule::RewriteCElementTree) {
|
||||
pyupgrade::rules::replace_c_element_tree(self, stmt);
|
||||
}
|
||||
@@ -1166,6 +1212,16 @@ where
|
||||
pycodestyle::rules::module_import_not_at_top_of_file(self, stmt);
|
||||
}
|
||||
|
||||
if self.settings.rules.enabled(&Rule::GlobalStatement) {
|
||||
for name in names.iter() {
|
||||
if let Some(asname) = name.node.asname.as_ref() {
|
||||
pylint::rules::global_statement(self, asname);
|
||||
} else {
|
||||
pylint::rules::global_statement(self, &name.node.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if self.settings.rules.enabled(&Rule::UnnecessaryFutureImport)
|
||||
&& self.settings.target_version >= PythonVersion::Py37
|
||||
{
|
||||
@@ -1548,6 +1604,12 @@ where
|
||||
}
|
||||
StmtKind::AugAssign { target, .. } => {
|
||||
self.handle_node_load(target);
|
||||
|
||||
if self.settings.rules.enabled(&Rule::GlobalStatement) {
|
||||
if let ExprKind::Name { id, .. } = &target.node {
|
||||
pylint::rules::global_statement(self, id);
|
||||
}
|
||||
}
|
||||
}
|
||||
StmtKind::If { test, body, orelse } => {
|
||||
if self.settings.rules.enabled(&Rule::IfTuple) {
|
||||
@@ -1571,7 +1633,7 @@ where
|
||||
);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::NeedlessBool) {
|
||||
flake8_simplify::rules::return_bool_condition_directly(self, stmt);
|
||||
flake8_simplify::rules::needless_bool(self, stmt);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::ManualDictLookup) {
|
||||
flake8_simplify::rules::manual_dict_lookup(self, stmt, test, body, orelse);
|
||||
@@ -1605,6 +1667,13 @@ where
|
||||
if self.settings.rules.enabled(&Rule::OutdatedVersionBlock) {
|
||||
pyupgrade::rules::outdated_version_block(self, stmt, test, body, orelse);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::CollapsibleElseIf) {
|
||||
if let Some(diagnostic) =
|
||||
pylint::rules::collapsible_else_if(orelse, self.locator)
|
||||
{
|
||||
self.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
StmtKind::Assert { test, msg } => {
|
||||
if self.settings.rules.enabled(&Rule::AssertTuple) {
|
||||
@@ -1650,6 +1719,9 @@ where
|
||||
self.current_stmt_parent().map(Into::into),
|
||||
);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::RedefinedLoopName) {
|
||||
pylint::rules::redefined_loop_name(self, &Node::Stmt(stmt));
|
||||
}
|
||||
}
|
||||
StmtKind::While { body, orelse, .. } => {
|
||||
if self.settings.rules.enabled(&Rule::FunctionUsesLoopVariable) {
|
||||
@@ -1694,6 +1766,9 @@ where
|
||||
if self.settings.rules.enabled(&Rule::UselessElseOnLoop) {
|
||||
pylint::rules::useless_else_on_loop(self, stmt, body, orelse);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::RedefinedLoopName) {
|
||||
pylint::rules::redefined_loop_name(self, &Node::Stmt(stmt));
|
||||
}
|
||||
if matches!(stmt.node, StmtKind::For { .. }) {
|
||||
if self.settings.rules.enabled(&Rule::ReimplementedBuiltin) {
|
||||
flake8_simplify::rules::convert_for_loop_to_any_all(
|
||||
@@ -1805,6 +1880,14 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
if self.settings.rules.enabled(&Rule::GlobalStatement) {
|
||||
for target in targets.iter() {
|
||||
if let ExprKind::Name { id, .. } = &target.node {
|
||||
pylint::rules::global_statement(self, id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if self.settings.rules.enabled(&Rule::UselessMetaclassType) {
|
||||
pyupgrade::rules::useless_metaclass_type(self, stmt, value, targets);
|
||||
}
|
||||
@@ -1855,7 +1938,15 @@ where
|
||||
);
|
||||
}
|
||||
}
|
||||
StmtKind::Delete { .. } => {}
|
||||
StmtKind::Delete { targets } => {
|
||||
if self.settings.rules.enabled(&Rule::GlobalStatement) {
|
||||
for target in targets.iter() {
|
||||
if let ExprKind::Name { id, .. } = &target.node {
|
||||
pylint::rules::global_statement(self, id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
StmtKind::Expr { value, .. } => {
|
||||
if self.settings.rules.enabled(&Rule::UselessComparison) {
|
||||
flake8_bugbear::rules::useless_comparison(self, value);
|
||||
@@ -1922,9 +2013,7 @@ where
|
||||
if self.scopes[GLOBAL_SCOPE_INDEX]
|
||||
.bindings
|
||||
.get(name)
|
||||
.map_or(true, |index| {
|
||||
matches!(self.bindings[*index].kind, BindingKind::Annotation)
|
||||
})
|
||||
.map_or(true, |index| self.bindings[*index].kind.is_annotation())
|
||||
{
|
||||
let index = self.bindings.len();
|
||||
self.bindings.push(Binding {
|
||||
@@ -1986,9 +2075,7 @@ where
|
||||
if self.scopes[GLOBAL_SCOPE_INDEX]
|
||||
.bindings
|
||||
.get(name)
|
||||
.map_or(true, |index| {
|
||||
matches!(self.bindings[*index].kind, BindingKind::Annotation)
|
||||
})
|
||||
.map_or(true, |index| self.bindings[*index].kind.is_annotation())
|
||||
{
|
||||
let index = self.bindings.len();
|
||||
self.bindings.push(Binding {
|
||||
@@ -2053,8 +2140,8 @@ where
|
||||
value,
|
||||
..
|
||||
} => {
|
||||
// If we're in a class or module scope, then the annotation needs to be available
|
||||
// at runtime.
|
||||
// If we're in a class or module scope, then the annotation needs to be
|
||||
// available at runtime.
|
||||
// See: https://docs.python.org/3/reference/simple_stmts.html#annotated-assignment-statements
|
||||
if !self.annotations_future_enabled
|
||||
&& matches!(
|
||||
@@ -2171,8 +2258,9 @@ where
|
||||
// Pre-visit.
|
||||
match &expr.node {
|
||||
ExprKind::Subscript { value, slice, .. } => {
|
||||
// Ex) Optional[...]
|
||||
if !self.in_deferred_string_type_definition
|
||||
// Ex) Optional[...], Union[...]
|
||||
if self.in_type_definition
|
||||
&& !self.in_deferred_string_type_definition
|
||||
&& !self.settings.pyupgrade.keep_runtime_typing
|
||||
&& self.settings.rules.enabled(&Rule::TypingUnion)
|
||||
&& (self.settings.target_version >= PythonVersion::Py310
|
||||
@@ -2484,6 +2572,13 @@ where
|
||||
if self.settings.rules.enabled(&Rule::UnnecessaryDictKwargs) {
|
||||
flake8_pie::rules::no_unnecessary_dict_kwargs(self, expr, keywords);
|
||||
}
|
||||
if self
|
||||
.settings
|
||||
.rules
|
||||
.enabled(&Rule::UnnecessaryComprehensionAnyAll)
|
||||
{
|
||||
flake8_pie::rules::unnecessary_comprehension_any_all(self, expr, func, args);
|
||||
}
|
||||
|
||||
// flake8-bandit
|
||||
if self.settings.rules.enabled(&Rule::ExecBuiltin) {
|
||||
@@ -2911,6 +3006,18 @@ where
|
||||
{
|
||||
flake8_logging_format::rules::logging_call(self, func, args, keywords);
|
||||
}
|
||||
|
||||
// pylint logging checker
|
||||
if self.settings.rules.enabled(&Rule::LoggingTooFewArgs)
|
||||
|| self.settings.rules.enabled(&Rule::LoggingTooManyArgs)
|
||||
{
|
||||
pylint::rules::logging_call(self, func, args, keywords);
|
||||
}
|
||||
|
||||
// flake8-django
|
||||
if self.settings.rules.enabled(&Rule::LocalsInRenderFunction) {
|
||||
flake8_django::rules::locals_in_render_function(self, func, args, keywords);
|
||||
}
|
||||
}
|
||||
ExprKind::Dict { keys, values } => {
|
||||
if self
|
||||
@@ -3404,17 +3511,17 @@ where
|
||||
if self.settings.rules.enabled(&Rule::CompareWithTuple) {
|
||||
flake8_simplify::rules::compare_with_tuple(self, expr);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::AAndNotA) {
|
||||
flake8_simplify::rules::a_and_not_a(self, expr);
|
||||
if self.settings.rules.enabled(&Rule::ExprAndNotExpr) {
|
||||
flake8_simplify::rules::expr_and_not_expr(self, expr);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::AOrNotA) {
|
||||
flake8_simplify::rules::a_or_not_a(self, expr);
|
||||
if self.settings.rules.enabled(&Rule::ExprOrNotExpr) {
|
||||
flake8_simplify::rules::expr_or_not_expr(self, expr);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::OrTrue) {
|
||||
flake8_simplify::rules::or_true(self, expr);
|
||||
if self.settings.rules.enabled(&Rule::ExprOrTrue) {
|
||||
flake8_simplify::rules::expr_or_true(self, expr);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::AndFalse) {
|
||||
flake8_simplify::rules::and_false(self, expr);
|
||||
if self.settings.rules.enabled(&Rule::ExprAndFalse) {
|
||||
flake8_simplify::rules::expr_and_false(self, expr);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
@@ -3840,6 +3947,32 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_pattern(&mut self, pattern: &'b Pattern) {
|
||||
if let PatternKind::MatchAs {
|
||||
name: Some(name), ..
|
||||
}
|
||||
| PatternKind::MatchStar { name: Some(name) }
|
||||
| PatternKind::MatchMapping {
|
||||
rest: Some(name), ..
|
||||
} = &pattern.node
|
||||
{
|
||||
self.add_binding(
|
||||
name,
|
||||
Binding {
|
||||
kind: BindingKind::Assignment,
|
||||
runtime_usage: None,
|
||||
synthetic_usage: None,
|
||||
typing_usage: None,
|
||||
range: Range::from_located(pattern),
|
||||
source: Some(self.current_stmt().clone()),
|
||||
context: self.execution_context(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
walk_pattern(self, pattern);
|
||||
}
|
||||
|
||||
fn visit_format_spec(&mut self, format_spec: &'b Expr) {
|
||||
match &format_spec.node {
|
||||
ExprKind::JoinedStr { values } => {
|
||||
@@ -3874,6 +4007,21 @@ where
|
||||
flake8_bugbear::rules::function_call_argument_default(self, arguments);
|
||||
}
|
||||
|
||||
if self.is_interface_definition {
|
||||
if self
|
||||
.settings
|
||||
.rules
|
||||
.enabled(&Rule::TypedArgumentSimpleDefaults)
|
||||
{
|
||||
flake8_pyi::rules::typed_argument_simple_defaults(self, arguments);
|
||||
}
|
||||
}
|
||||
if self.is_interface_definition {
|
||||
if self.settings.rules.enabled(&Rule::ArgumentSimpleDefaults) {
|
||||
flake8_pyi::rules::argument_simple_defaults(self, arguments);
|
||||
}
|
||||
}
|
||||
|
||||
// Bind, but intentionally avoid walking default expressions, as we handle them
|
||||
// upstream.
|
||||
for arg in &arguments.posonlyargs {
|
||||
@@ -4090,7 +4238,7 @@ impl<'a> Checker<'a> {
|
||||
let existing_binding_index = self.scopes[*scope_index].bindings.get(&name).unwrap();
|
||||
let existing = &self.bindings[*existing_binding_index];
|
||||
let in_current_scope = stack_index == 0;
|
||||
if !matches!(existing.kind, BindingKind::Builtin)
|
||||
if !existing.kind.is_builtin()
|
||||
&& existing.source.as_ref().map_or(true, |left| {
|
||||
binding.source.as_ref().map_or(true, |right| {
|
||||
!branch_detection::different_forks(
|
||||
@@ -4110,7 +4258,7 @@ impl<'a> Checker<'a> {
|
||||
| BindingKind::StarImportation(..)
|
||||
| BindingKind::FutureImportation
|
||||
);
|
||||
if matches!(binding.kind, BindingKind::LoopVar) && existing_is_import {
|
||||
if binding.kind.is_loop_var() && existing_is_import {
|
||||
if self.settings.rules.enabled(&Rule::ImportShadowedByLoopVar) {
|
||||
self.diagnostics.push(Diagnostic::new(
|
||||
pyflakes::rules::ImportShadowedByLoopVar {
|
||||
@@ -4124,7 +4272,7 @@ impl<'a> Checker<'a> {
|
||||
if !existing.used()
|
||||
&& binding.redefines(existing)
|
||||
&& (!self.settings.dummy_variable_rgx.is_match(name) || existing_is_import)
|
||||
&& !(matches!(existing.kind, BindingKind::FunctionDefinition)
|
||||
&& !(existing.kind.is_function_definition()
|
||||
&& visibility::is_overload(
|
||||
self,
|
||||
cast::decorator_list(existing.source.as_ref().unwrap()),
|
||||
@@ -4159,36 +4307,29 @@ impl<'a> Checker<'a> {
|
||||
|
||||
let scope = self.current_scope();
|
||||
let binding = if let Some(index) = scope.bindings.get(&name) {
|
||||
if matches!(self.bindings[*index].kind, BindingKind::Builtin) {
|
||||
// Avoid overriding builtins.
|
||||
binding
|
||||
} else if matches!(self.bindings[*index].kind, BindingKind::Global) {
|
||||
// If the original binding was a global, and the new binding conflicts within
|
||||
// the current scope, then the new binding is also a global.
|
||||
Binding {
|
||||
runtime_usage: self.bindings[*index].runtime_usage,
|
||||
synthetic_usage: self.bindings[*index].synthetic_usage,
|
||||
typing_usage: self.bindings[*index].typing_usage,
|
||||
kind: BindingKind::Global,
|
||||
..binding
|
||||
let existing = &self.bindings[*index];
|
||||
match &existing.kind {
|
||||
BindingKind::Builtin => {
|
||||
// Avoid overriding builtins.
|
||||
binding
|
||||
}
|
||||
} else if matches!(self.bindings[*index].kind, BindingKind::Nonlocal) {
|
||||
// If the original binding was a nonlocal, and the new binding conflicts within
|
||||
// the current scope, then the new binding is also a nonlocal.
|
||||
Binding {
|
||||
runtime_usage: self.bindings[*index].runtime_usage,
|
||||
synthetic_usage: self.bindings[*index].synthetic_usage,
|
||||
typing_usage: self.bindings[*index].typing_usage,
|
||||
kind: BindingKind::Nonlocal,
|
||||
..binding
|
||||
kind @ (BindingKind::Global | BindingKind::Nonlocal) => {
|
||||
// If the original binding was a global or nonlocal, and the new binding conflicts within
|
||||
// the current scope, then the new binding is also as the same.
|
||||
Binding {
|
||||
runtime_usage: existing.runtime_usage,
|
||||
synthetic_usage: existing.synthetic_usage,
|
||||
typing_usage: existing.typing_usage,
|
||||
kind: kind.clone(),
|
||||
..binding
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Binding {
|
||||
runtime_usage: self.bindings[*index].runtime_usage,
|
||||
synthetic_usage: self.bindings[*index].synthetic_usage,
|
||||
typing_usage: self.bindings[*index].typing_usage,
|
||||
_ => Binding {
|
||||
runtime_usage: existing.runtime_usage,
|
||||
synthetic_usage: existing.synthetic_usage,
|
||||
typing_usage: existing.typing_usage,
|
||||
..binding
|
||||
}
|
||||
},
|
||||
}
|
||||
} else {
|
||||
binding
|
||||
@@ -4197,7 +4338,7 @@ impl<'a> Checker<'a> {
|
||||
// Don't treat annotations as assignments if there is an existing value
|
||||
// in scope.
|
||||
let scope = &mut self.scopes[*(self.scope_stack.last().expect("No current scope found"))];
|
||||
if !(matches!(binding.kind, BindingKind::Annotation) && scope.bindings.contains_key(name)) {
|
||||
if !(binding.kind.is_annotation() && scope.bindings.contains_key(name)) {
|
||||
if let Some(rebound_index) = scope.bindings.insert(name, binding_index) {
|
||||
scope
|
||||
.rebounds
|
||||
@@ -4236,7 +4377,7 @@ impl<'a> Checker<'a> {
|
||||
let context = self.execution_context();
|
||||
self.bindings[*index].mark_used(scope_id, Range::from_located(expr), context);
|
||||
|
||||
if matches!(self.bindings[*index].kind, BindingKind::Annotation)
|
||||
if self.bindings[*index].kind.is_annotation()
|
||||
&& !self.in_deferred_string_type_definition
|
||||
&& !self.in_deferred_type_definition
|
||||
{
|
||||
@@ -4384,9 +4525,7 @@ impl<'a> Checker<'a> {
|
||||
.current_scope()
|
||||
.bindings
|
||||
.get(id)
|
||||
.map_or(false, |index| {
|
||||
matches!(self.bindings[*index].kind, BindingKind::Global)
|
||||
})
|
||||
.map_or(false, |index| self.bindings[*index].kind.is_global())
|
||||
{
|
||||
pep8_naming::rules::non_lowercase_variable_in_function(self, expr, parent, id);
|
||||
}
|
||||
@@ -4889,7 +5028,7 @@ impl<'a> Checker<'a> {
|
||||
{
|
||||
for (name, index) in &scope.bindings {
|
||||
let binding = &self.bindings[*index];
|
||||
if matches!(binding.kind, BindingKind::Global) {
|
||||
if binding.kind.is_global() {
|
||||
if let Some(stmt) = &binding.source {
|
||||
if matches!(stmt.node, StmtKind::Global { .. }) {
|
||||
diagnostics.push(Diagnostic::new(
|
||||
@@ -5313,9 +5452,22 @@ impl<'a> Checker<'a> {
|
||||
}
|
||||
overloaded_name = flake8_annotations::helpers::overloaded_name(self, &definition);
|
||||
}
|
||||
if self.is_interface_definition {
|
||||
if self.settings.rules.enabled(&Rule::DocstringInStub) {
|
||||
flake8_pyi::rules::docstring_in_stubs(self, definition.docstring);
|
||||
}
|
||||
}
|
||||
|
||||
// pydocstyle
|
||||
if enforce_docstrings {
|
||||
if pydocstyle::helpers::should_ignore_definition(
|
||||
self,
|
||||
&definition,
|
||||
&self.settings.pydocstyle.ignore_decorators,
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if definition.docstring.is_none() {
|
||||
pydocstyle::rules::not_missing(self, &definition, &visibility);
|
||||
continue;
|
||||
|
||||
@@ -7,8 +7,8 @@ use crate::ast::types::Range;
|
||||
use crate::registry::Diagnostic;
|
||||
use crate::rules::pycodestyle::logical_lines::{iter_logical_lines, TokenFlags};
|
||||
use crate::rules::pycodestyle::rules::{
|
||||
extraneous_whitespace, indentation, space_around_operator, whitespace_around_keywords,
|
||||
whitespace_before_comment,
|
||||
extraneous_whitespace, indentation, missing_whitespace_after_keyword, space_around_operator,
|
||||
whitespace_around_keywords, whitespace_before_comment,
|
||||
};
|
||||
use crate::settings::Settings;
|
||||
use crate::source_code::{Locator, Stylist};
|
||||
@@ -106,6 +106,18 @@ pub fn check_logical_lines(
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (location, kind) in missing_whitespace_after_keyword(&line.tokens) {
|
||||
if settings.rules.enabled(kind.rule()) {
|
||||
diagnostics.push(Diagnostic {
|
||||
kind,
|
||||
location,
|
||||
end_location: location,
|
||||
fix: None,
|
||||
parent: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
if line.flags.contains(TokenFlags::COMMENT) {
|
||||
for (range, kind) in whitespace_before_comment(&line.tokens, locator) {
|
||||
@@ -152,8 +164,8 @@ pub fn check_logical_lines(
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::{lexer, Mode};
|
||||
|
||||
use crate::checkers::logical_lines::iter_logical_lines;
|
||||
use crate::source_code::Locator;
|
||||
@@ -164,7 +176,7 @@ mod tests {
|
||||
x = 1
|
||||
y = 2
|
||||
z = x + 1"#;
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
let locator = Locator::new(contents);
|
||||
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
|
||||
.into_iter()
|
||||
@@ -185,7 +197,7 @@ x = [
|
||||
]
|
||||
y = 2
|
||||
z = x + 1"#;
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
let locator = Locator::new(contents);
|
||||
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
|
||||
.into_iter()
|
||||
@@ -199,7 +211,7 @@ z = x + 1"#;
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let contents = "x = 'abc'";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
let locator = Locator::new(contents);
|
||||
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
|
||||
.into_iter()
|
||||
@@ -212,7 +224,7 @@ z = x + 1"#;
|
||||
def f():
|
||||
x = 1
|
||||
f()"#;
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
let locator = Locator::new(contents);
|
||||
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
|
||||
.into_iter()
|
||||
@@ -227,7 +239,7 @@ def f():
|
||||
# Comment goes here.
|
||||
x = 1
|
||||
f()"#;
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
let locator = Locator::new(contents);
|
||||
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
|
||||
.into_iter()
|
||||
|
||||
@@ -148,9 +148,7 @@ pub fn check_noqa(
|
||||
UnusedNOQA { codes: None },
|
||||
Range::new(Location::new(row + 1, start), Location::new(row + 1, end)),
|
||||
);
|
||||
if matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(diagnostic.kind.rule())
|
||||
{
|
||||
if autofix.into() && settings.rules.should_fix(diagnostic.kind.rule()) {
|
||||
diagnostic.amend(Fix::deletion(
|
||||
Location::new(row + 1, start - spaces),
|
||||
Location::new(row + 1, lines[row].chars().count()),
|
||||
@@ -217,9 +215,7 @@ pub fn check_noqa(
|
||||
},
|
||||
Range::new(Location::new(row + 1, start), Location::new(row + 1, end)),
|
||||
);
|
||||
if matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(diagnostic.kind.rule())
|
||||
{
|
||||
if autofix.into() && settings.rules.should_fix(diagnostic.kind.rule()) {
|
||||
if valid_codes.is_empty() {
|
||||
diagnostic.amend(Fix::deletion(
|
||||
Location::new(row + 1, start - spaces),
|
||||
|
||||
@@ -9,6 +9,7 @@ use crate::rules::flake8_executable::rules::{
|
||||
};
|
||||
use crate::rules::pycodestyle::rules::{
|
||||
doc_line_too_long, line_too_long, mixed_spaces_and_tabs, no_newline_at_end_of_file,
|
||||
trailing_whitespace,
|
||||
};
|
||||
use crate::rules::pygrep_hooks::rules::{blanket_noqa, blanket_type_ignore};
|
||||
use crate::rules::pylint;
|
||||
@@ -41,11 +42,14 @@ pub fn check_physical_lines(
|
||||
let enforce_unnecessary_coding_comment = settings.rules.enabled(&Rule::UTF8EncodingDeclaration);
|
||||
let enforce_mixed_spaces_and_tabs = settings.rules.enabled(&Rule::MixedSpacesAndTabs);
|
||||
let enforce_bidirectional_unicode = settings.rules.enabled(&Rule::BidirectionalUnicode);
|
||||
let enforce_trailing_whitespace = settings.rules.enabled(&Rule::TrailingWhitespace);
|
||||
let enforce_blank_line_contains_whitespace =
|
||||
settings.rules.enabled(&Rule::BlankLineContainsWhitespace);
|
||||
|
||||
let fix_unnecessary_coding_comment = matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(&Rule::UTF8EncodingDeclaration);
|
||||
let fix_shebang_whitespace = matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(&Rule::ShebangWhitespace);
|
||||
let fix_unnecessary_coding_comment =
|
||||
autofix.into() && settings.rules.should_fix(&Rule::UTF8EncodingDeclaration);
|
||||
let fix_shebang_whitespace =
|
||||
autofix.into() && settings.rules.should_fix(&Rule::ShebangWhitespace);
|
||||
|
||||
let mut commented_lines_iter = commented_lines.iter().peekable();
|
||||
let mut doc_lines_iter = doc_lines.iter().peekable();
|
||||
@@ -139,14 +143,19 @@ pub fn check_physical_lines(
|
||||
if enforce_bidirectional_unicode {
|
||||
diagnostics.extend(pylint::rules::bidirectional_unicode(index, line));
|
||||
}
|
||||
|
||||
if enforce_trailing_whitespace || enforce_blank_line_contains_whitespace {
|
||||
if let Some(diagnostic) = trailing_whitespace(index, line, settings, autofix) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if enforce_no_newline_at_end_of_file {
|
||||
if let Some(diagnostic) = no_newline_at_end_of_file(
|
||||
stylist,
|
||||
contents,
|
||||
matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(&Rule::NoNewLineAtEndOfFile),
|
||||
autofix.into() && settings.rules.should_fix(&Rule::NoNewLineAtEndOfFile),
|
||||
) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
//! Lint rules based on token traversal.
|
||||
|
||||
use rustpython_parser::lexer::{LexResult, Tok};
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::Tok;
|
||||
|
||||
use crate::lex::docstring_detection::StateMachine;
|
||||
use crate::registry::{Diagnostic, Rule};
|
||||
@@ -107,8 +108,7 @@ pub fn check_tokens(
|
||||
locator,
|
||||
*start,
|
||||
*end,
|
||||
matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(&Rule::InvalidEscapeSequence),
|
||||
autofix.into() && settings.rules.should_fix(&Rule::InvalidEscapeSequence),
|
||||
));
|
||||
}
|
||||
}
|
||||
@@ -147,7 +147,7 @@ pub fn check_tokens(
|
||||
// COM812, COM818, COM819
|
||||
if enforce_trailing_comma {
|
||||
diagnostics.extend(
|
||||
flake8_commas::rules::trailing_commas(tokens, settings, autofix)
|
||||
flake8_commas::rules::trailing_commas(tokens, locator, settings, autofix)
|
||||
.into_iter()
|
||||
.filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
|
||||
);
|
||||
|
||||
@@ -52,6 +52,8 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Pycodestyle, "E273") => Rule::TabAfterKeyword,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E274") => Rule::TabBeforeKeyword,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E275") => Rule::MissingWhitespaceAfterKeyword,
|
||||
(Pycodestyle, "E401") => Rule::MultipleImportsOnOneLine,
|
||||
(Pycodestyle, "E402") => Rule::ModuleImportNotAtTopOfFile,
|
||||
(Pycodestyle, "E501") => Rule::LineTooLong,
|
||||
@@ -72,7 +74,9 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Pycodestyle, "E999") => Rule::SyntaxError,
|
||||
|
||||
// pycodestyle warnings
|
||||
(Pycodestyle, "W291") => Rule::TrailingWhitespace,
|
||||
(Pycodestyle, "W292") => Rule::NoNewLineAtEndOfFile,
|
||||
(Pycodestyle, "W293") => Rule::BlankLineContainsWhitespace,
|
||||
(Pycodestyle, "W505") => Rule::DocLineTooLong,
|
||||
(Pycodestyle, "W605") => Rule::InvalidEscapeSequence,
|
||||
|
||||
@@ -126,6 +130,8 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Pylint, "E0101") => Rule::ReturnInInit,
|
||||
(Pylint, "E0604") => Rule::InvalidAllObject,
|
||||
(Pylint, "E0605") => Rule::InvalidAllFormat,
|
||||
(Pylint, "E1205") => Rule::LoggingTooManyArgs,
|
||||
(Pylint, "E1206") => Rule::LoggingTooFewArgs,
|
||||
(Pylint, "E1307") => Rule::BadStringFormatType,
|
||||
(Pylint, "E2502") => Rule::BidirectionalUnicode,
|
||||
(Pylint, "E1310") => Rule::BadStrStripCall,
|
||||
@@ -139,13 +145,16 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Pylint, "R0133") => Rule::ComparisonOfConstant,
|
||||
(Pylint, "R1701") => Rule::ConsiderMergingIsinstance,
|
||||
(Pylint, "R1722") => Rule::ConsiderUsingSysExit,
|
||||
(Pylint, "R5501") => Rule::CollapsibleElseIf,
|
||||
(Pylint, "R2004") => Rule::MagicValueComparison,
|
||||
(Pylint, "W0120") => Rule::UselessElseOnLoop,
|
||||
(Pylint, "W0602") => Rule::GlobalVariableNotAssigned,
|
||||
(Pylint, "W0603") => Rule::GlobalStatement,
|
||||
(Pylint, "R0911") => Rule::TooManyReturnStatements,
|
||||
(Pylint, "R0913") => Rule::TooManyArguments,
|
||||
(Pylint, "R0912") => Rule::TooManyBranches,
|
||||
(Pylint, "R0915") => Rule::TooManyStatements,
|
||||
(Pylint, "W2901") => Rule::RedefinedLoopName,
|
||||
|
||||
// flake8-builtins
|
||||
(Flake8Builtins, "001") => Rule::BuiltinVariableShadowing,
|
||||
@@ -287,10 +296,10 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Flake8Simplify, "210") => Rule::IfExprWithTrueFalse,
|
||||
(Flake8Simplify, "211") => Rule::IfExprWithFalseTrue,
|
||||
(Flake8Simplify, "212") => Rule::IfExprWithTwistedArms,
|
||||
(Flake8Simplify, "220") => Rule::AAndNotA,
|
||||
(Flake8Simplify, "221") => Rule::AOrNotA,
|
||||
(Flake8Simplify, "222") => Rule::OrTrue,
|
||||
(Flake8Simplify, "223") => Rule::AndFalse,
|
||||
(Flake8Simplify, "220") => Rule::ExprAndNotExpr,
|
||||
(Flake8Simplify, "221") => Rule::ExprOrNotExpr,
|
||||
(Flake8Simplify, "222") => Rule::ExprOrTrue,
|
||||
(Flake8Simplify, "223") => Rule::ExprAndFalse,
|
||||
(Flake8Simplify, "300") => Rule::YodaConditions,
|
||||
(Flake8Simplify, "401") => Rule::DictGetWithDefault,
|
||||
|
||||
@@ -480,6 +489,11 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Flake8Pyi, "001") => Rule::PrefixTypeParams,
|
||||
(Flake8Pyi, "007") => Rule::UnrecognizedPlatformCheck,
|
||||
(Flake8Pyi, "008") => Rule::UnrecognizedPlatformName,
|
||||
(Flake8Pyi, "009") => Rule::PassStatementStubBody,
|
||||
(Flake8Pyi, "010") => Rule::NonEmptyStubBody,
|
||||
(Flake8Pyi, "011") => Rule::TypedArgumentSimpleDefaults,
|
||||
(Flake8Pyi, "014") => Rule::ArgumentSimpleDefaults,
|
||||
(Flake8Pyi, "021") => Rule::DocstringInStub,
|
||||
|
||||
// flake8-pytest-style
|
||||
(Flake8PytestStyle, "001") => Rule::IncorrectFixtureParenthesesStyle,
|
||||
@@ -513,6 +527,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Flake8Pie, "794") => Rule::DupeClassFieldDefinitions,
|
||||
(Flake8Pie, "796") => Rule::PreferUniqueEnums,
|
||||
(Flake8Pie, "800") => Rule::UnnecessarySpread,
|
||||
(Flake8Pie, "802") => Rule::UnnecessaryComprehensionAnyAll,
|
||||
(Flake8Pie, "804") => Rule::UnnecessaryDictKwargs,
|
||||
(Flake8Pie, "807") => Rule::PreferListBuiltin,
|
||||
(Flake8Pie, "810") => Rule::SingleStartsEndsWith,
|
||||
@@ -608,6 +623,9 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
|
||||
// flake8-django
|
||||
(Flake8Django, "001") => Rule::NullableModelStringField,
|
||||
(Flake8Django, "003") => Rule::LocalsInRenderFunction,
|
||||
(Flake8Django, "006") => Rule::ExcludeWithModelForm,
|
||||
(Flake8Django, "007") => Rule::AllWithModelForm,
|
||||
(Flake8Django, "008") => Rule::ModelWithoutDunderStr,
|
||||
(Flake8Django, "013") => Rule::NonLeadingReceiverDecorator,
|
||||
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
use bitflags::bitflags;
|
||||
use nohash_hasher::{IntMap, IntSet};
|
||||
use rustpython_parser::ast::Location;
|
||||
use rustpython_parser::lexer::{LexResult, Tok};
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::Tok;
|
||||
|
||||
use crate::registry::LintSource;
|
||||
use crate::settings::Settings;
|
||||
|
||||
bitflags! {
|
||||
@@ -20,7 +20,7 @@ impl Flags {
|
||||
if settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::Imports))
|
||||
.any(|rule_code| rule_code.lint_source().is_imports())
|
||||
{
|
||||
Self::NOQA | Self::ISORT
|
||||
} else {
|
||||
@@ -150,56 +150,61 @@ pub fn extract_isort_directives(lxr: &[LexResult]) -> IsortDirectives {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use nohash_hasher::{IntMap, IntSet};
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::{lexer, Mode};
|
||||
|
||||
use crate::directives::{extract_isort_directives, extract_noqa_line_for};
|
||||
|
||||
#[test]
|
||||
fn noqa_extraction() {
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"x = 1
|
||||
y = 2
|
||||
z = x + 1",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(extract_noqa_line_for(&lxr), IntMap::default());
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"
|
||||
x = 1
|
||||
y = 2
|
||||
z = x + 1",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(extract_noqa_line_for(&lxr), IntMap::default());
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"x = 1
|
||||
y = 2
|
||||
z = x + 1
|
||||
",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(extract_noqa_line_for(&lxr), IntMap::default());
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"x = 1
|
||||
|
||||
y = 2
|
||||
z = x + 1
|
||||
",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(extract_noqa_line_for(&lxr), IntMap::default());
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"x = '''abc
|
||||
def
|
||||
ghi
|
||||
'''
|
||||
y = 2
|
||||
z = x + 1",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(
|
||||
@@ -207,13 +212,14 @@ z = x + 1",
|
||||
IntMap::from_iter([(1, 4), (2, 4), (3, 4)])
|
||||
);
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"x = 1
|
||||
y = '''abc
|
||||
def
|
||||
ghi
|
||||
'''
|
||||
z = 2",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(
|
||||
@@ -221,12 +227,13 @@ z = 2",
|
||||
IntMap::from_iter([(2, 5), (3, 5), (4, 5)])
|
||||
);
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"x = 1
|
||||
y = '''abc
|
||||
def
|
||||
ghi
|
||||
'''",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(
|
||||
@@ -234,17 +241,19 @@ ghi
|
||||
IntMap::from_iter([(2, 5), (3, 5), (4, 5)])
|
||||
);
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
r#"x = \
|
||||
1"#,
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(extract_noqa_line_for(&lxr), IntMap::from_iter([(1, 2)]));
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
r#"from foo import \
|
||||
bar as baz, \
|
||||
qux as quux"#,
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(
|
||||
@@ -252,7 +261,7 @@ ghi
|
||||
IntMap::from_iter([(1, 3), (2, 3)])
|
||||
);
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
r#"
|
||||
# Foo
|
||||
from foo import \
|
||||
@@ -262,6 +271,7 @@ x = \
|
||||
1
|
||||
y = \
|
||||
2"#,
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(
|
||||
@@ -275,7 +285,7 @@ y = \
|
||||
let contents = "x = 1
|
||||
y = 2
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(extract_isort_directives(&lxr).exclusions, IntSet::default());
|
||||
|
||||
let contents = "# isort: off
|
||||
@@ -283,7 +293,7 @@ x = 1
|
||||
y = 2
|
||||
# isort: on
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(
|
||||
extract_isort_directives(&lxr).exclusions,
|
||||
IntSet::from_iter([2, 3, 4])
|
||||
@@ -296,7 +306,7 @@ y = 2
|
||||
# isort: on
|
||||
z = x + 1
|
||||
# isort: on";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(
|
||||
extract_isort_directives(&lxr).exclusions,
|
||||
IntSet::from_iter([2, 3, 4, 5])
|
||||
@@ -306,7 +316,7 @@ z = x + 1
|
||||
x = 1
|
||||
y = 2
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(
|
||||
extract_isort_directives(&lxr).exclusions,
|
||||
IntSet::from_iter([2, 3, 4])
|
||||
@@ -316,7 +326,7 @@ z = x + 1";
|
||||
x = 1
|
||||
y = 2
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(extract_isort_directives(&lxr).exclusions, IntSet::default());
|
||||
|
||||
let contents = "# isort: off
|
||||
@@ -325,7 +335,7 @@ x = 1
|
||||
y = 2
|
||||
# isort: skip_file
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(extract_isort_directives(&lxr).exclusions, IntSet::default());
|
||||
}
|
||||
|
||||
@@ -334,20 +344,20 @@ z = x + 1";
|
||||
let contents = "x = 1
|
||||
y = 2
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(extract_isort_directives(&lxr).splits, Vec::<usize>::new());
|
||||
|
||||
let contents = "x = 1
|
||||
y = 2
|
||||
# isort: split
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(extract_isort_directives(&lxr).splits, vec![3]);
|
||||
|
||||
let contents = "x = 1
|
||||
y = 2 # isort: split
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(extract_isort_directives(&lxr).splits, vec![2]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,34 +1,62 @@
|
||||
//! Doc line extraction. In this context, a doc line is a line consisting of a
|
||||
//! standalone comment or a constant string statement.
|
||||
|
||||
use std::iter::FusedIterator;
|
||||
|
||||
use rustpython_parser::ast::{Constant, ExprKind, Stmt, StmtKind, Suite};
|
||||
use rustpython_parser::lexer::{LexResult, Tok};
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::Tok;
|
||||
|
||||
use crate::ast::visitor;
|
||||
use crate::ast::visitor::Visitor;
|
||||
|
||||
/// Extract doc lines (standalone comments) from a token sequence.
|
||||
pub fn doc_lines_from_tokens(lxr: &[LexResult]) -> Vec<usize> {
|
||||
let mut doc_lines: Vec<usize> = Vec::default();
|
||||
let mut prev: Option<usize> = None;
|
||||
for (start, tok, end) in lxr.iter().flatten() {
|
||||
if matches!(tok, Tok::Indent | Tok::Dedent | Tok::Newline) {
|
||||
continue;
|
||||
}
|
||||
if matches!(tok, Tok::Comment(..)) {
|
||||
if let Some(prev) = prev {
|
||||
if start.row() > prev {
|
||||
doc_lines.push(start.row());
|
||||
}
|
||||
} else {
|
||||
doc_lines.push(start.row());
|
||||
}
|
||||
}
|
||||
prev = Some(end.row());
|
||||
}
|
||||
doc_lines
|
||||
pub fn doc_lines_from_tokens(lxr: &[LexResult]) -> DocLines {
|
||||
DocLines::new(lxr)
|
||||
}
|
||||
|
||||
pub struct DocLines<'a> {
|
||||
inner: std::iter::Flatten<core::slice::Iter<'a, LexResult>>,
|
||||
prev: Option<usize>,
|
||||
}
|
||||
|
||||
impl<'a> DocLines<'a> {
|
||||
fn new(lxr: &'a [LexResult]) -> Self {
|
||||
Self {
|
||||
inner: lxr.iter().flatten(),
|
||||
prev: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for DocLines<'_> {
|
||||
type Item = usize;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
let (start, tok, end) = self.inner.next()?;
|
||||
|
||||
match tok {
|
||||
Tok::Indent | Tok::Dedent | Tok::Newline => continue,
|
||||
Tok::Comment(..) => {
|
||||
if let Some(prev) = self.prev {
|
||||
if start.row() > prev {
|
||||
break Some(start.row());
|
||||
}
|
||||
} else {
|
||||
break Some(start.row());
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
self.prev = Some(end.row());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedIterator for DocLines<'_> {}
|
||||
|
||||
#[derive(Default)]
|
||||
struct StringLinesVisitor {
|
||||
string_lines: Vec<usize>,
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
/// See: <https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals>
|
||||
|
||||
pub const TRIPLE_QUOTE_PREFIXES: &[&str] = &[
|
||||
"u\"\"\"", "u'''", "r\"\"\"", "r'''", "U\"\"\"", "U'''", "R\"\"\"", "R'''", "\"\"\"", "'''",
|
||||
];
|
||||
|
||||
pub const SINGLE_QUOTE_PREFIXES: &[&str] = &[
|
||||
"u\"", "u'", "r\"", "r'", "u\"", "u'", "r\"", "r'", "U\"", "U'", "R\"", "R'", "\"", "'",
|
||||
];
|
||||
|
||||
pub const TRIPLE_QUOTE_SUFFIXES: &[&str] = &["\"\"\"", "'''"];
|
||||
|
||||
pub const SINGLE_QUOTE_SUFFIXES: &[&str] = &["\"", "'"];
|
||||
@@ -1,4 +1,3 @@
|
||||
pub mod constants;
|
||||
pub mod definition;
|
||||
pub mod extraction;
|
||||
pub mod google;
|
||||
|
||||
@@ -576,6 +576,7 @@ mod tests {
|
||||
let expected = Pyproject::new(Options {
|
||||
pydocstyle: Some(pydocstyle::settings::Options {
|
||||
convention: Some(Convention::Numpy),
|
||||
ignore_decorators: None,
|
||||
}),
|
||||
..default_options([Linter::Pydocstyle.into()])
|
||||
});
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
use std::fs::File;
|
||||
use std::io::{BufReader, Read};
|
||||
use std::ops::Deref;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
@@ -88,12 +86,3 @@ pub fn relativize_path(path: impl AsRef<Path>) -> String {
|
||||
}
|
||||
format!("{}", path.display())
|
||||
}
|
||||
|
||||
/// Read a file's contents from disk.
|
||||
pub fn read_file<P: AsRef<Path>>(path: P) -> Result<String> {
|
||||
let file = File::open(path)?;
|
||||
let mut buf_reader = BufReader::new(file);
|
||||
let mut contents = String::new();
|
||||
buf_reader.read_to_string(&mut contents)?;
|
||||
Ok(contents)
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
//!
|
||||
//! TODO(charlie): Consolidate with the existing AST-based docstring extraction.
|
||||
|
||||
use rustpython_parser::lexer::Tok;
|
||||
use rustpython_parser::Tok;
|
||||
|
||||
#[derive(Default)]
|
||||
enum State {
|
||||
|
||||
@@ -33,10 +33,8 @@ pub mod resolver;
|
||||
mod rule_redirects;
|
||||
mod rule_selector;
|
||||
mod rules;
|
||||
mod rustpython_helpers;
|
||||
pub mod settings;
|
||||
pub mod source_code;
|
||||
mod vendor;
|
||||
mod violation;
|
||||
mod visibility;
|
||||
|
||||
|
||||
@@ -14,7 +14,6 @@ use crate::rules::{
|
||||
flake8_quotes, flake8_self, flake8_tidy_imports, flake8_type_checking, flake8_unused_arguments,
|
||||
isort, mccabe, pep8_naming, pycodestyle, pydocstyle, pylint, pyupgrade,
|
||||
};
|
||||
use crate::rustpython_helpers::tokenize;
|
||||
use crate::settings::configuration::Configuration;
|
||||
use crate::settings::options::Options;
|
||||
use crate::settings::{defaults, flags, Settings};
|
||||
@@ -175,7 +174,7 @@ pub fn check(contents: &str, options: JsValue) -> Result<JsValue, JsValue> {
|
||||
Settings::from_configuration(configuration, Path::new(".")).map_err(|e| e.to_string())?;
|
||||
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = tokenize(contents);
|
||||
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(contents);
|
||||
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = Locator::new(contents);
|
||||
|
||||
@@ -5,8 +5,8 @@ use anyhow::{anyhow, Result};
|
||||
use colored::Colorize;
|
||||
use log::error;
|
||||
use rustc_hash::FxHashMap;
|
||||
use rustpython_parser::error::ParseError;
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::ParseError;
|
||||
|
||||
use crate::autofix::fix_file;
|
||||
use crate::checkers::ast::check_ast;
|
||||
@@ -20,11 +20,11 @@ use crate::directives::Directives;
|
||||
use crate::doc_lines::{doc_lines_from_ast, doc_lines_from_tokens};
|
||||
use crate::message::{Message, Source};
|
||||
use crate::noqa::{add_noqa, rule_is_ignored};
|
||||
use crate::registry::{Diagnostic, LintSource, Rule};
|
||||
use crate::registry::{Diagnostic, Rule};
|
||||
use crate::rules::pycodestyle;
|
||||
use crate::settings::{flags, Settings};
|
||||
use crate::source_code::{Indexer, Locator, Stylist};
|
||||
use crate::{directives, fs, rustpython_helpers};
|
||||
use crate::{directives, fs};
|
||||
|
||||
const CARGO_PKG_NAME: &str = env!("CARGO_PKG_NAME");
|
||||
const CARGO_PKG_REPOSITORY: &str = env!("CARGO_PKG_REPOSITORY");
|
||||
@@ -81,7 +81,7 @@ pub fn check_path(
|
||||
if settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::Tokens))
|
||||
.any(|rule_code| rule_code.lint_source().is_tokens())
|
||||
{
|
||||
diagnostics.extend(check_tokens(locator, &tokens, settings, autofix));
|
||||
}
|
||||
@@ -90,7 +90,7 @@ pub fn check_path(
|
||||
if settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::Filesystem))
|
||||
.any(|rule_code| rule_code.lint_source().is_filesystem())
|
||||
{
|
||||
diagnostics.extend(check_file_path(path, package, settings));
|
||||
}
|
||||
@@ -99,7 +99,7 @@ pub fn check_path(
|
||||
if settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::LogicalLines))
|
||||
.any(|rule_code| rule_code.lint_source().is_logical_lines())
|
||||
{
|
||||
diagnostics.extend(check_logical_lines(&tokens, locator, stylist, settings));
|
||||
}
|
||||
@@ -108,14 +108,14 @@ pub fn check_path(
|
||||
let use_ast = settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::Ast));
|
||||
.any(|rule_code| rule_code.lint_source().is_ast());
|
||||
let use_imports = !directives.isort.skip_file
|
||||
&& settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::Imports));
|
||||
.any(|rule_code| rule_code.lint_source().is_imports());
|
||||
if use_ast || use_imports || use_doc_lines {
|
||||
match rustpython_helpers::parse_program_tokens(tokens, &path.to_string_lossy()) {
|
||||
match ruff_rustpython::parse_program_tokens(tokens, &path.to_string_lossy()) {
|
||||
Ok(python_ast) => {
|
||||
if use_ast {
|
||||
diagnostics.extend(check_ast(
|
||||
@@ -177,7 +177,7 @@ pub fn check_path(
|
||||
if settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::PhysicalLines))
|
||||
.any(|rule_code| rule_code.lint_source().is_physical_lines())
|
||||
{
|
||||
diagnostics.extend(check_physical_lines(
|
||||
path,
|
||||
@@ -199,11 +199,11 @@ pub fn check_path(
|
||||
};
|
||||
|
||||
// Enforce `noqa` directives.
|
||||
if (matches!(noqa, flags::Noqa::Enabled) && !diagnostics.is_empty())
|
||||
if (noqa.into() && !diagnostics.is_empty())
|
||||
|| settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::NoQa))
|
||||
.any(|rule_code| rule_code.lint_source().is_noqa())
|
||||
{
|
||||
check_noqa(
|
||||
&mut diagnostics,
|
||||
@@ -223,10 +223,10 @@ const MAX_ITERATIONS: usize = 100;
|
||||
/// Add any missing `# noqa` pragmas to the source code at the given `Path`.
|
||||
pub fn add_noqa_to_path(path: &Path, package: Option<&Path>, settings: &Settings) -> Result<usize> {
|
||||
// Read the file from disk.
|
||||
let contents = fs::read_file(path)?;
|
||||
let contents = std::fs::read_to_string(path)?;
|
||||
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
|
||||
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(&contents);
|
||||
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = Locator::new(&contents);
|
||||
@@ -290,7 +290,7 @@ pub fn lint_only(
|
||||
autofix: flags::Autofix,
|
||||
) -> LinterResult<Vec<Message>> {
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(contents);
|
||||
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(contents);
|
||||
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = Locator::new(contents);
|
||||
@@ -331,7 +331,9 @@ pub fn lint_only(
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Message::from_diagnostic(diagnostic, path_lossy.to_string(), source)
|
||||
let lineno = diagnostic.location.row();
|
||||
let noqa_row = *directives.noqa_line_for.get(&lineno).unwrap_or(&lineno);
|
||||
Message::from_diagnostic(diagnostic, path_lossy.to_string(), source, noqa_row)
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
@@ -359,7 +361,7 @@ pub fn lint_fix<'a>(
|
||||
// Continuously autofix until the source code stabilizes.
|
||||
loop {
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&transformed);
|
||||
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(&transformed);
|
||||
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = Locator::new(&transformed);
|
||||
@@ -469,7 +471,14 @@ This indicates a bug in `{}`. If you could open an issue at:
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Message::from_diagnostic(diagnostic, path_lossy.to_string(), source)
|
||||
let lineno = diagnostic.location.row();
|
||||
let noqa_row = *directives.noqa_line_for.get(&lineno).unwrap_or(&lineno);
|
||||
Message::from_diagnostic(
|
||||
diagnostic,
|
||||
path_lossy.to_string(),
|
||||
source,
|
||||
noqa_row,
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
}),
|
||||
|
||||
@@ -16,6 +16,7 @@ pub struct Message {
|
||||
pub fix: Option<Fix>,
|
||||
pub filename: String,
|
||||
pub source: Option<Source>,
|
||||
pub noqa_row: usize,
|
||||
}
|
||||
|
||||
impl Message {
|
||||
@@ -23,6 +24,7 @@ impl Message {
|
||||
diagnostic: Diagnostic,
|
||||
filename: String,
|
||||
source: Option<Source>,
|
||||
noqa_row: usize,
|
||||
) -> Self {
|
||||
Self {
|
||||
kind: diagnostic.kind,
|
||||
@@ -34,6 +36,7 @@ impl Message {
|
||||
fix: diagnostic.fix,
|
||||
filename,
|
||||
source,
|
||||
noqa_row,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,6 +53,8 @@ ruff_macros::register_rules!(
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::MultipleSpacesAfterKeyword,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::MissingWhitespaceAfterKeyword,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::MultipleSpacesBeforeKeyword,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::TabAfterKeyword,
|
||||
@@ -77,7 +79,9 @@ ruff_macros::register_rules!(
|
||||
rules::pycodestyle::rules::IOError,
|
||||
rules::pycodestyle::rules::SyntaxError,
|
||||
// pycodestyle warnings
|
||||
rules::pycodestyle::rules::TrailingWhitespace,
|
||||
rules::pycodestyle::rules::NoNewLineAtEndOfFile,
|
||||
rules::pycodestyle::rules::BlankLineContainsWhitespace,
|
||||
rules::pycodestyle::rules::DocLineTooLong,
|
||||
rules::pycodestyle::rules::InvalidEscapeSequence,
|
||||
// pyflakes
|
||||
@@ -131,6 +135,7 @@ ruff_macros::register_rules!(
|
||||
rules::pylint::rules::BadStringFormatType,
|
||||
rules::pylint::rules::BidirectionalUnicode,
|
||||
rules::pylint::rules::BadStrStripCall,
|
||||
rules::pylint::rules::CollapsibleElseIf,
|
||||
rules::pylint::rules::UselessImportAlias,
|
||||
rules::pylint::rules::UnnecessaryDirectLambdaCall,
|
||||
rules::pylint::rules::NonlocalWithoutBinding,
|
||||
@@ -144,11 +149,15 @@ ruff_macros::register_rules!(
|
||||
rules::pylint::rules::ConsiderUsingSysExit,
|
||||
rules::pylint::rules::MagicValueComparison,
|
||||
rules::pylint::rules::UselessElseOnLoop,
|
||||
rules::pylint::rules::GlobalStatement,
|
||||
rules::pylint::rules::GlobalVariableNotAssigned,
|
||||
rules::pylint::rules::TooManyReturnStatements,
|
||||
rules::pylint::rules::TooManyArguments,
|
||||
rules::pylint::rules::TooManyBranches,
|
||||
rules::pylint::rules::TooManyStatements,
|
||||
rules::pylint::rules::RedefinedLoopName,
|
||||
rules::pylint::rules::LoggingTooFewArgs,
|
||||
rules::pylint::rules::LoggingTooManyArgs,
|
||||
// flake8-builtins
|
||||
rules::flake8_builtins::rules::BuiltinVariableShadowing,
|
||||
rules::flake8_builtins::rules::BuiltinArgumentShadowing,
|
||||
@@ -275,10 +284,10 @@ ruff_macros::register_rules!(
|
||||
rules::flake8_simplify::rules::IfExprWithTrueFalse,
|
||||
rules::flake8_simplify::rules::IfExprWithFalseTrue,
|
||||
rules::flake8_simplify::rules::IfExprWithTwistedArms,
|
||||
rules::flake8_simplify::rules::AAndNotA,
|
||||
rules::flake8_simplify::rules::AOrNotA,
|
||||
rules::flake8_simplify::rules::OrTrue,
|
||||
rules::flake8_simplify::rules::AndFalse,
|
||||
rules::flake8_simplify::rules::ExprAndNotExpr,
|
||||
rules::flake8_simplify::rules::ExprOrNotExpr,
|
||||
rules::flake8_simplify::rules::ExprOrTrue,
|
||||
rules::flake8_simplify::rules::ExprAndFalse,
|
||||
rules::flake8_simplify::rules::YodaConditions,
|
||||
rules::flake8_simplify::rules::DictGetWithDefault,
|
||||
// pyupgrade
|
||||
@@ -454,6 +463,11 @@ ruff_macros::register_rules!(
|
||||
rules::flake8_pyi::rules::PrefixTypeParams,
|
||||
rules::flake8_pyi::rules::UnrecognizedPlatformCheck,
|
||||
rules::flake8_pyi::rules::UnrecognizedPlatformName,
|
||||
rules::flake8_pyi::rules::PassStatementStubBody,
|
||||
rules::flake8_pyi::rules::NonEmptyStubBody,
|
||||
rules::flake8_pyi::rules::DocstringInStub,
|
||||
rules::flake8_pyi::rules::TypedArgumentSimpleDefaults,
|
||||
rules::flake8_pyi::rules::ArgumentSimpleDefaults,
|
||||
// flake8-pytest-style
|
||||
rules::flake8_pytest_style::rules::IncorrectFixtureParenthesesStyle,
|
||||
rules::flake8_pytest_style::rules::FixturePositionalArgs,
|
||||
@@ -488,6 +502,7 @@ ruff_macros::register_rules!(
|
||||
rules::flake8_pie::rules::UnnecessaryDictKwargs,
|
||||
rules::flake8_pie::rules::PreferListBuiltin,
|
||||
rules::flake8_pie::rules::SingleStartsEndsWith,
|
||||
rules::flake8_pie::rules::UnnecessaryComprehensionAnyAll,
|
||||
// flake8-commas
|
||||
rules::flake8_commas::rules::TrailingCommaMissing,
|
||||
rules::flake8_commas::rules::TrailingCommaOnBareTupleProhibited,
|
||||
@@ -568,6 +583,9 @@ ruff_macros::register_rules!(
|
||||
rules::ruff::rules::UnusedNOQA,
|
||||
// flake8-django
|
||||
rules::flake8_django::rules::NullableModelStringField,
|
||||
rules::flake8_django::rules::LocalsInRenderFunction,
|
||||
rules::flake8_django::rules::ExcludeWithModelForm,
|
||||
rules::flake8_django::rules::AllWithModelForm,
|
||||
rules::flake8_django::rules::ModelWithoutDunderStr,
|
||||
rules::flake8_django::rules::NonLeadingReceiverDecorator,
|
||||
);
|
||||
@@ -753,6 +771,7 @@ impl Linter {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(is_macro::Is)]
|
||||
pub enum LintSource {
|
||||
Ast,
|
||||
Io,
|
||||
@@ -760,7 +779,7 @@ pub enum LintSource {
|
||||
LogicalLines,
|
||||
Tokens,
|
||||
Imports,
|
||||
NoQa,
|
||||
Noqa,
|
||||
Filesystem,
|
||||
}
|
||||
|
||||
@@ -769,7 +788,7 @@ impl Rule {
|
||||
/// physical lines).
|
||||
pub const fn lint_source(&self) -> &'static LintSource {
|
||||
match self {
|
||||
Rule::UnusedNOQA => &LintSource::NoQa,
|
||||
Rule::UnusedNOQA => &LintSource::Noqa,
|
||||
Rule::BlanketNOQA
|
||||
| Rule::BlanketTypeIgnore
|
||||
| Rule::DocLineTooLong
|
||||
@@ -782,7 +801,9 @@ impl Rule {
|
||||
| Rule::ShebangNewline
|
||||
| Rule::BidirectionalUnicode
|
||||
| Rule::ShebangPython
|
||||
| Rule::ShebangWhitespace => &LintSource::PhysicalLines,
|
||||
| Rule::ShebangWhitespace
|
||||
| Rule::TrailingWhitespace
|
||||
| Rule::BlankLineContainsWhitespace => &LintSource::PhysicalLines,
|
||||
Rule::AmbiguousUnicodeCharacterComment
|
||||
| Rule::AmbiguousUnicodeCharacterDocstring
|
||||
| Rule::AmbiguousUnicodeCharacterString
|
||||
@@ -812,6 +833,7 @@ impl Rule {
|
||||
| Rule::MultipleSpacesAfterOperator
|
||||
| Rule::MultipleSpacesBeforeKeyword
|
||||
| Rule::MultipleSpacesBeforeOperator
|
||||
| Rule::MissingWhitespaceAfterKeyword
|
||||
| Rule::NoIndentedBlock
|
||||
| Rule::NoIndentedBlockComment
|
||||
| Rule::NoSpaceAfterBlockComment
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
//! Discover Python files, and their corresponding `Settings`, from the
|
||||
//! Discover Python files, and their corresponding [`Settings`], from the
|
||||
//! filesystem.
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
@@ -19,7 +19,7 @@ use crate::settings::{pyproject, AllSettings, Settings};
|
||||
|
||||
/// The strategy used to discover the relevant `pyproject.toml` file for each
|
||||
/// Python file.
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, is_macro::Is)]
|
||||
pub enum PyprojectDiscovery {
|
||||
/// Use a fixed `pyproject.toml` file for all Python files (i.e., one
|
||||
/// provided on the command-line).
|
||||
@@ -30,7 +30,7 @@ pub enum PyprojectDiscovery {
|
||||
}
|
||||
|
||||
impl PyprojectDiscovery {
|
||||
fn top_level_settings(&self) -> &AllSettings {
|
||||
pub fn top_level_settings(&self) -> &AllSettings {
|
||||
match self {
|
||||
PyprojectDiscovery::Fixed(settings) => settings,
|
||||
PyprojectDiscovery::Hierarchical(settings) => settings,
|
||||
@@ -65,12 +65,12 @@ pub struct Resolver {
|
||||
}
|
||||
|
||||
impl Resolver {
|
||||
/// Add a resolved `Settings` under a given `PathBuf` scope.
|
||||
/// Add a resolved [`Settings`] under a given [`PathBuf`] scope.
|
||||
pub fn add(&mut self, path: PathBuf, settings: AllSettings) {
|
||||
self.settings.insert(path, settings);
|
||||
}
|
||||
|
||||
/// Return the appropriate `AllSettings` for a given `Path`.
|
||||
/// Return the appropriate [`AllSettings`] for a given [`Path`].
|
||||
pub fn resolve_all<'a>(
|
||||
&'a self,
|
||||
path: &Path,
|
||||
@@ -82,13 +82,7 @@ impl Resolver {
|
||||
.settings
|
||||
.iter()
|
||||
.rev()
|
||||
.find_map(|(root, settings)| {
|
||||
if path.starts_with(root) {
|
||||
Some(settings)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.find_map(|(root, settings)| path.starts_with(root).then_some(settings))
|
||||
.unwrap_or(default),
|
||||
}
|
||||
}
|
||||
@@ -97,7 +91,7 @@ impl Resolver {
|
||||
&self.resolve_all(path, strategy).lib
|
||||
}
|
||||
|
||||
/// Return an iterator over the resolved `Settings` in this `Resolver`.
|
||||
/// Return an iterator over the resolved [`Settings`] in this [`Resolver`].
|
||||
pub fn iter(&self) -> impl Iterator<Item = &AllSettings> {
|
||||
self.settings.values()
|
||||
}
|
||||
@@ -112,8 +106,8 @@ impl ConfigProcessor for &NoOpProcessor {
|
||||
fn process_config(&self, _config: &mut Configuration) {}
|
||||
}
|
||||
|
||||
/// Recursively resolve a `Configuration` from a `pyproject.toml` file at the
|
||||
/// specified `Path`.
|
||||
/// Recursively resolve a [`Configuration`] from a `pyproject.toml` file at the
|
||||
/// specified [`Path`].
|
||||
// TODO(charlie): This whole system could do with some caching. Right now, if a
|
||||
// configuration file extends another in the same path, we'll re-parse the same
|
||||
// file at least twice (possibly more than twice, since we'll also parse it when
|
||||
@@ -161,26 +155,26 @@ pub fn resolve_configuration(
|
||||
Ok(configuration)
|
||||
}
|
||||
|
||||
/// Extract the project root (scope) and `Settings` from a given
|
||||
/// Extract the project root (scope) and [`Settings`] from a given
|
||||
/// `pyproject.toml`.
|
||||
pub fn resolve_scoped_settings(
|
||||
pyproject: &Path,
|
||||
relativity: &Relativity,
|
||||
processor: impl ConfigProcessor,
|
||||
) -> Result<(PathBuf, AllSettings)> {
|
||||
let project_root = relativity.resolve(pyproject);
|
||||
let configuration = resolve_configuration(pyproject, relativity, processor)?;
|
||||
let project_root = relativity.resolve(pyproject);
|
||||
let settings = AllSettings::from_configuration(configuration, &project_root)?;
|
||||
Ok((project_root, settings))
|
||||
}
|
||||
|
||||
/// Extract the `Settings` from a given `pyproject.toml`.
|
||||
/// Extract the [`Settings`] from a given `pyproject.toml`.
|
||||
pub fn resolve_settings(pyproject: &Path, relativity: &Relativity) -> Result<AllSettings> {
|
||||
let (_project_root, settings) = resolve_scoped_settings(pyproject, relativity, &NoOpProcessor)?;
|
||||
Ok(settings)
|
||||
}
|
||||
|
||||
/// Extract the `Settings` from a given `pyproject.toml` and process the
|
||||
/// Extract the [`Settings`] from a given `pyproject.toml` and process the
|
||||
/// configuration with the given [`ConfigProcessor`].
|
||||
pub fn resolve_settings_with_processor(
|
||||
pyproject: &Path,
|
||||
@@ -197,18 +191,18 @@ fn match_exclusion(file_path: &str, file_basename: &str, exclusion: &globset::Gl
|
||||
exclusion.is_match(file_path) || exclusion.is_match(file_basename)
|
||||
}
|
||||
|
||||
/// Return `true` if the `Path` appears to be that of a Python file.
|
||||
/// Return `true` if the [`Path`] appears to be that of a Python file.
|
||||
fn is_python_path(path: &Path) -> bool {
|
||||
path.extension()
|
||||
.map_or(false, |ext| ext == "py" || ext == "pyi")
|
||||
}
|
||||
|
||||
/// Return `true` if the `Path` appears to be that of a Python interface definition file (`.pyi`).
|
||||
/// Return `true` if the [`Path`] appears to be that of a Python interface definition file (`.pyi`).
|
||||
pub fn is_interface_definition_path(path: &Path) -> bool {
|
||||
path.extension().map_or(false, |ext| ext == "pyi")
|
||||
}
|
||||
|
||||
/// Return `true` if the `Entry` appears to be that of a Python file.
|
||||
/// Return `true` if the [`DirEntry`] appears to be that of a Python file.
|
||||
pub fn is_python_entry(entry: &DirEntry) -> bool {
|
||||
is_python_path(entry.path())
|
||||
&& !entry
|
||||
@@ -228,7 +222,7 @@ pub fn python_files_in_path(
|
||||
// Search for `pyproject.toml` files in all parent directories.
|
||||
let mut resolver = Resolver::default();
|
||||
let mut seen = FxHashSet::default();
|
||||
if matches!(pyproject_strategy, PyprojectDiscovery::Hierarchical(..)) {
|
||||
if pyproject_strategy.is_hierarchical() {
|
||||
for path in &paths {
|
||||
for ancestor in path.ancestors() {
|
||||
if seen.insert(ancestor) {
|
||||
@@ -277,7 +271,7 @@ pub fn python_files_in_path(
|
||||
Box::new(|result| {
|
||||
// Search for the `pyproject.toml` file in this directory, before we visit any
|
||||
// of its contents.
|
||||
if matches!(pyproject_strategy, PyprojectDiscovery::Hierarchical(..)) {
|
||||
if pyproject_strategy.is_hierarchical() {
|
||||
if let Ok(entry) = &result {
|
||||
if entry
|
||||
.file_type()
|
||||
@@ -357,7 +351,7 @@ pub fn python_files_in_path(
|
||||
Ok((files.into_inner().unwrap(), resolver.into_inner().unwrap()))
|
||||
}
|
||||
|
||||
/// Return `true` if the Python file at `Path` is _not_ excluded.
|
||||
/// Return `true` if the Python file at [`Path`] is _not_ excluded.
|
||||
pub fn python_file_at_path(
|
||||
path: &Path,
|
||||
pyproject_strategy: &PyprojectDiscovery,
|
||||
@@ -372,7 +366,7 @@ pub fn python_file_at_path(
|
||||
|
||||
// Search for `pyproject.toml` files in all parent directories.
|
||||
let mut resolver = Resolver::default();
|
||||
if matches!(pyproject_strategy, PyprojectDiscovery::Hierarchical(..)) {
|
||||
if pyproject_strategy.is_hierarchical() {
|
||||
for ancestor in path.ancestors() {
|
||||
if let Some(pyproject) = settings_toml(ancestor)? {
|
||||
let (root, settings) =
|
||||
@@ -386,7 +380,7 @@ pub fn python_file_at_path(
|
||||
Ok(!is_file_excluded(&path, &resolver, pyproject_strategy))
|
||||
}
|
||||
|
||||
/// Return `true` if the given top-level `Path` should be excluded.
|
||||
/// Return `true` if the given top-level [`Path`] should be excluded.
|
||||
fn is_file_excluded(
|
||||
path: &Path,
|
||||
resolver: &Resolver,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
/// See: [eradicate.py](https://github.com/myint/eradicate/blob/98f199940979c94447a461d50d27862b118b282d/eradicate.py)
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use rustpython_parser as parser;
|
||||
|
||||
static ALLOWLIST_REGEX: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(
|
||||
@@ -77,7 +78,7 @@ pub fn comment_contains_code(line: &str, task_tags: &[String]) -> bool {
|
||||
}
|
||||
|
||||
// Finally, compile the source code.
|
||||
rustpython_parser::parser::parse_program(&line, "<filename>").is_ok()
|
||||
parser::parse_program(&line, "<filename>").is_ok()
|
||||
}
|
||||
|
||||
/// Returns `true` if a line is probably part of some multiline code.
|
||||
|
||||
@@ -60,9 +60,7 @@ pub fn commented_out_code(
|
||||
// Verify that the comment is on its own line, and that it contains code.
|
||||
if is_standalone_comment(line) && comment_contains_code(line, &settings.task_tags[..]) {
|
||||
let mut diagnostic = Diagnostic::new(CommentedOutCode, Range::new(start, end));
|
||||
if matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(&Rule::CommentedOutCode)
|
||||
{
|
||||
if autofix.into() && settings.rules.should_fix(&Rule::CommentedOutCode) {
|
||||
diagnostic.amend(Fix::deletion(location, end_location));
|
||||
}
|
||||
Some(diagnostic)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use anyhow::{bail, Result};
|
||||
use rustpython_parser::ast::Stmt;
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::Tok;
|
||||
use rustpython_parser::{lexer, Mode, Tok};
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::fix::Fix;
|
||||
@@ -16,7 +15,7 @@ pub fn add_return_none_annotation(locator: &Locator, stmt: &Stmt) -> Result<Fix>
|
||||
let mut seen_lpar = false;
|
||||
let mut seen_rpar = false;
|
||||
let mut count: usize = 0;
|
||||
for (start, tok, ..) in lexer::make_tokenizer_located(contents, range.location).flatten() {
|
||||
for (start, tok, ..) in lexer::lex_located(contents, Mode::Module, range.location).flatten() {
|
||||
if seen_lpar && seen_rpar {
|
||||
if matches!(tok, Tok::Colon) {
|
||||
return Ok(Fix::insertion(" -> None".to_string(), start));
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use rustpython_parser::ast::{Constant, Expr, ExprKind};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
const PASSWORD_NAMES: [&str; 7] = [
|
||||
"password", "pass", "passwd", "pwd", "secret", "token", "secrete",
|
||||
];
|
||||
static PASSWORD_CANDIDATE_REGEX: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"(^|_)(pas+wo?r?d|pass(phrase)?|pwd|token|secrete?)($|_)").unwrap());
|
||||
|
||||
pub fn string_literal(expr: &Expr) -> Option<&str> {
|
||||
match &expr.node {
|
||||
@@ -18,9 +19,7 @@ pub fn string_literal(expr: &Expr) -> Option<&str> {
|
||||
|
||||
// Maybe use regex for this?
|
||||
pub fn matches_password_name(string: &str) -> bool {
|
||||
PASSWORD_NAMES
|
||||
.iter()
|
||||
.any(|name| string.to_lowercase().contains(name))
|
||||
PASSWORD_CANDIDATE_REGEX.is_match(string)
|
||||
}
|
||||
|
||||
pub fn is_untyped_exception(type_: Option<&Expr>, checker: &Checker) -> bool {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use rustpython_parser::ast::Stmt;
|
||||
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
use rustpython_parser::ast::{Located, StmtKind};
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::registry::Diagnostic;
|
||||
@@ -16,7 +17,7 @@ impl Violation for Assert {
|
||||
}
|
||||
|
||||
/// S101
|
||||
pub fn assert_used(stmt: &Located<StmtKind>) -> Diagnostic {
|
||||
pub fn assert_used(stmt: &Stmt) -> Diagnostic {
|
||||
Diagnostic::new(
|
||||
Assert,
|
||||
Range::new(stmt.location, stmt.location.with_col_offset("assert".len())),
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
use rustpython_parser::ast::{ArgData, Arguments, Expr, Located};
|
||||
use rustpython_parser::ast::{Arg, Arguments, Expr};
|
||||
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
|
||||
use super::super::helpers::{matches_password_name, string_literal};
|
||||
use crate::ast::types::Range;
|
||||
use crate::registry::Diagnostic;
|
||||
use crate::violation::Violation;
|
||||
|
||||
use super::super::helpers::{matches_password_name, string_literal};
|
||||
|
||||
define_violation!(
|
||||
pub struct HardcodedPasswordDefault {
|
||||
pub string: String,
|
||||
@@ -19,7 +21,7 @@ impl Violation for HardcodedPasswordDefault {
|
||||
}
|
||||
}
|
||||
|
||||
fn check_password_kwarg(arg: &Located<ArgData>, default: &Expr) -> Option<Diagnostic> {
|
||||
fn check_password_kwarg(arg: &Arg, default: &Expr) -> Option<Diagnostic> {
|
||||
let string = string_literal(default).filter(|string| !string.is_empty())?;
|
||||
let kwarg_name = &arg.node.arg;
|
||||
if !matches_password_name(kwarg_name) {
|
||||
|
||||
@@ -56,6 +56,9 @@ const FUNC_CALL_NAME_ALLOWLIST: &[&str] = &[
|
||||
"bytes",
|
||||
"int",
|
||||
"float",
|
||||
"getint",
|
||||
"getfloat",
|
||||
"getboolean",
|
||||
];
|
||||
|
||||
const FUNC_DEF_NAME_ALLOWLIST: &[&str] = &["__setitem__"];
|
||||
|
||||
@@ -85,10 +85,10 @@ expression: diagnostics
|
||||
- kind:
|
||||
BooleanPositionalArgInFunctionDefinition: ~
|
||||
location:
|
||||
row: 77
|
||||
row: 81
|
||||
column: 18
|
||||
end_location:
|
||||
row: 77
|
||||
row: 81
|
||||
column: 29
|
||||
fix: ~
|
||||
parent: ~
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
use ruff_python::string::is_lower;
|
||||
use ruff_python::str::is_lower;
|
||||
use rustpython_parser::ast::{ExprKind, Stmt, StmtKind};
|
||||
|
||||
use crate::ast::types::Range;
|
||||
|
||||
@@ -23,7 +23,7 @@ use rustc_hash::FxHashMap;
|
||||
use rustpython_parser::ast::{Expr, ExprKind, Stmt};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::ast::types::{BindingKind, Range, RefEquality};
|
||||
use crate::ast::types::{Range, RefEquality};
|
||||
use crate::ast::visitor::Visitor;
|
||||
use crate::ast::{helpers, visitor};
|
||||
use crate::checkers::ast::Checker;
|
||||
@@ -31,7 +31,7 @@ use crate::fix::Fix;
|
||||
use crate::registry::Diagnostic;
|
||||
use crate::violation::{AutofixKind, Availability, Violation};
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, result_like::BoolLike)]
|
||||
pub enum Certainty {
|
||||
Certain,
|
||||
Uncertain,
|
||||
@@ -59,7 +59,7 @@ impl Violation for UnusedLoopControlVariable {
|
||||
let UnusedLoopControlVariable {
|
||||
name, certainty, ..
|
||||
} = self;
|
||||
if matches!(certainty, Certainty::Certain) {
|
||||
if certainty.to_bool() {
|
||||
format!("Loop control variable `{name}` not used within loop body")
|
||||
} else {
|
||||
format!("Loop control variable `{name}` may not be used within loop body")
|
||||
@@ -70,7 +70,7 @@ impl Violation for UnusedLoopControlVariable {
|
||||
let UnusedLoopControlVariable {
|
||||
certainty, rename, ..
|
||||
} = self;
|
||||
if matches!(certainty, Certainty::Certain) && rename.is_some() {
|
||||
if certainty.to_bool() && rename.is_some() {
|
||||
Some(|UnusedLoopControlVariable { name, rename, .. }| {
|
||||
let rename = rename.as_ref().unwrap();
|
||||
format!("Rename unused `{name}` to `{rename}`")
|
||||
@@ -140,25 +140,17 @@ pub fn unused_loop_control_variable(
|
||||
}
|
||||
|
||||
// Avoid fixing any variables that _may_ be used, but undetectably so.
|
||||
let certainty = if helpers::uses_magic_variable_access(checker, body) {
|
||||
Certainty::Uncertain
|
||||
} else {
|
||||
Certainty::Certain
|
||||
};
|
||||
let certainty = Certainty::from(!helpers::uses_magic_variable_access(checker, body));
|
||||
|
||||
// Attempt to rename the variable by prepending an underscore, but avoid
|
||||
// applying the fix if doing so wouldn't actually cause us to ignore the
|
||||
// violation in the next pass.
|
||||
let rename = format!("_{name}");
|
||||
let rename = if checker
|
||||
let rename = checker
|
||||
.settings
|
||||
.dummy_variable_rgx
|
||||
.is_match(rename.as_str())
|
||||
{
|
||||
Some(rename)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
.then_some(rename);
|
||||
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
UnusedLoopControlVariable {
|
||||
@@ -169,7 +161,7 @@ pub fn unused_loop_control_variable(
|
||||
Range::from_located(expr),
|
||||
);
|
||||
if let Some(rename) = rename {
|
||||
if matches!(certainty, Certainty::Certain) && checker.patch(diagnostic.kind.rule()) {
|
||||
if certainty.into() && checker.patch(diagnostic.kind.rule()) {
|
||||
// Find the `BindingKind::LoopVar` corresponding to the name.
|
||||
let scope = checker.current_scope();
|
||||
let binding = scope
|
||||
@@ -185,7 +177,7 @@ pub fn unused_loop_control_variable(
|
||||
.and_then(|source| (source == &RefEquality(stmt)).then_some(binding))
|
||||
});
|
||||
if let Some(binding) = binding {
|
||||
if matches!(binding.kind, BindingKind::LoopVar) {
|
||||
if binding.kind.is_loop_var() {
|
||||
if !binding.used() {
|
||||
diagnostic.amend(Fix::replacement(
|
||||
rename,
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
use itertools::Itertools;
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
use rustpython_parser::lexer::{LexResult, Spanned};
|
||||
use rustpython_parser::token::Tok;
|
||||
use rustpython_parser::Tok;
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::fix::Fix;
|
||||
use crate::registry::{Diagnostic, Rule};
|
||||
use crate::settings::{flags, Settings};
|
||||
use crate::source_code::Locator;
|
||||
use crate::violation::{AlwaysAutofixableViolation, Violation};
|
||||
|
||||
/// Simplified token type.
|
||||
@@ -149,6 +150,7 @@ impl AlwaysAutofixableViolation for TrailingCommaProhibited {
|
||||
/// COM812, COM818, COM819
|
||||
pub fn trailing_commas(
|
||||
tokens: &[LexResult],
|
||||
locator: &Locator,
|
||||
settings: &Settings,
|
||||
autofix: flags::Autofix,
|
||||
) -> Vec<Diagnostic> {
|
||||
@@ -257,9 +259,7 @@ pub fn trailing_commas(
|
||||
end_location: comma.2,
|
||||
},
|
||||
);
|
||||
if matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(&Rule::TrailingCommaProhibited)
|
||||
{
|
||||
if autofix.into() && settings.rules.should_fix(&Rule::TrailingCommaProhibited) {
|
||||
diagnostic.amend(Fix::deletion(comma.0, comma.2));
|
||||
}
|
||||
diagnostics.push(diagnostic);
|
||||
@@ -303,10 +303,17 @@ pub fn trailing_commas(
|
||||
end_location: missing_comma.2,
|
||||
},
|
||||
);
|
||||
if matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(&Rule::TrailingCommaMissing)
|
||||
{
|
||||
diagnostic.amend(Fix::insertion(",".to_owned(), missing_comma.2));
|
||||
if autofix.into() && settings.rules.should_fix(&Rule::TrailingCommaMissing) {
|
||||
// Create a replacement that includes the final bracket (or other token),
|
||||
// rather than just inserting a comma at the end. This prevents the UP034 autofix
|
||||
// removing any brackets in the same linter pass - doing both at the same time could
|
||||
// lead to a syntax error.
|
||||
let contents = locator.slice(&Range::new(missing_comma.0, missing_comma.2));
|
||||
diagnostic.amend(Fix::replacement(
|
||||
format!("{contents},"),
|
||||
missing_comma.0,
|
||||
missing_comma.2,
|
||||
));
|
||||
}
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
|
||||
@@ -11,10 +11,10 @@ expression: diagnostics
|
||||
row: 4
|
||||
column: 17
|
||||
fix:
|
||||
content: ","
|
||||
content: "'test',"
|
||||
location:
|
||||
row: 4
|
||||
column: 17
|
||||
column: 11
|
||||
end_location:
|
||||
row: 4
|
||||
column: 17
|
||||
@@ -28,10 +28,10 @@ expression: diagnostics
|
||||
row: 10
|
||||
column: 5
|
||||
fix:
|
||||
content: ","
|
||||
content: "3,"
|
||||
location:
|
||||
row: 10
|
||||
column: 5
|
||||
column: 4
|
||||
end_location:
|
||||
row: 10
|
||||
column: 5
|
||||
@@ -45,10 +45,10 @@ expression: diagnostics
|
||||
row: 16
|
||||
column: 5
|
||||
fix:
|
||||
content: ","
|
||||
content: "3,"
|
||||
location:
|
||||
row: 16
|
||||
column: 5
|
||||
column: 4
|
||||
end_location:
|
||||
row: 16
|
||||
column: 5
|
||||
@@ -62,10 +62,10 @@ expression: diagnostics
|
||||
row: 23
|
||||
column: 5
|
||||
fix:
|
||||
content: ","
|
||||
content: "3,"
|
||||
location:
|
||||
row: 23
|
||||
column: 5
|
||||
column: 4
|
||||
end_location:
|
||||
row: 23
|
||||
column: 5
|
||||
@@ -149,10 +149,10 @@ expression: diagnostics
|
||||
row: 70
|
||||
column: 7
|
||||
fix:
|
||||
content: ","
|
||||
content: "bar,"
|
||||
location:
|
||||
row: 70
|
||||
column: 7
|
||||
column: 4
|
||||
end_location:
|
||||
row: 70
|
||||
column: 7
|
||||
@@ -166,10 +166,10 @@ expression: diagnostics
|
||||
row: 78
|
||||
column: 7
|
||||
fix:
|
||||
content: ","
|
||||
content: "bar,"
|
||||
location:
|
||||
row: 78
|
||||
column: 7
|
||||
column: 4
|
||||
end_location:
|
||||
row: 78
|
||||
column: 7
|
||||
@@ -183,10 +183,10 @@ expression: diagnostics
|
||||
row: 86
|
||||
column: 7
|
||||
fix:
|
||||
content: ","
|
||||
content: "bar,"
|
||||
location:
|
||||
row: 86
|
||||
column: 7
|
||||
column: 4
|
||||
end_location:
|
||||
row: 86
|
||||
column: 7
|
||||
@@ -200,10 +200,10 @@ expression: diagnostics
|
||||
row: 152
|
||||
column: 5
|
||||
fix:
|
||||
content: ","
|
||||
content: "y,"
|
||||
location:
|
||||
row: 152
|
||||
column: 5
|
||||
column: 4
|
||||
end_location:
|
||||
row: 152
|
||||
column: 5
|
||||
@@ -217,10 +217,10 @@ expression: diagnostics
|
||||
row: 158
|
||||
column: 10
|
||||
fix:
|
||||
content: ","
|
||||
content: "Anyway,"
|
||||
location:
|
||||
row: 158
|
||||
column: 10
|
||||
column: 4
|
||||
end_location:
|
||||
row: 158
|
||||
column: 10
|
||||
@@ -234,10 +234,10 @@ expression: diagnostics
|
||||
row: 293
|
||||
column: 14
|
||||
fix:
|
||||
content: ","
|
||||
content: "123,"
|
||||
location:
|
||||
row: 293
|
||||
column: 14
|
||||
column: 11
|
||||
end_location:
|
||||
row: 293
|
||||
column: 14
|
||||
@@ -251,10 +251,10 @@ expression: diagnostics
|
||||
row: 304
|
||||
column: 13
|
||||
fix:
|
||||
content: ","
|
||||
content: "2,"
|
||||
location:
|
||||
row: 304
|
||||
column: 13
|
||||
column: 12
|
||||
end_location:
|
||||
row: 304
|
||||
column: 13
|
||||
@@ -268,10 +268,10 @@ expression: diagnostics
|
||||
row: 310
|
||||
column: 13
|
||||
fix:
|
||||
content: ","
|
||||
content: "3,"
|
||||
location:
|
||||
row: 310
|
||||
column: 13
|
||||
column: 12
|
||||
end_location:
|
||||
row: 310
|
||||
column: 13
|
||||
@@ -285,10 +285,10 @@ expression: diagnostics
|
||||
row: 316
|
||||
column: 9
|
||||
fix:
|
||||
content: ","
|
||||
content: "3,"
|
||||
location:
|
||||
row: 316
|
||||
column: 9
|
||||
column: 8
|
||||
end_location:
|
||||
row: 316
|
||||
column: 9
|
||||
@@ -302,10 +302,10 @@ expression: diagnostics
|
||||
row: 322
|
||||
column: 14
|
||||
fix:
|
||||
content: ","
|
||||
content: "123,"
|
||||
location:
|
||||
row: 322
|
||||
column: 14
|
||||
column: 11
|
||||
end_location:
|
||||
row: 322
|
||||
column: 14
|
||||
@@ -319,10 +319,10 @@ expression: diagnostics
|
||||
row: 368
|
||||
column: 14
|
||||
fix:
|
||||
content: ","
|
||||
content: "\"not good\","
|
||||
location:
|
||||
row: 368
|
||||
column: 14
|
||||
column: 4
|
||||
end_location:
|
||||
row: 368
|
||||
column: 14
|
||||
@@ -336,10 +336,10 @@ expression: diagnostics
|
||||
row: 375
|
||||
column: 14
|
||||
fix:
|
||||
content: ","
|
||||
content: "\"not good\","
|
||||
location:
|
||||
row: 375
|
||||
column: 14
|
||||
column: 4
|
||||
end_location:
|
||||
row: 375
|
||||
column: 14
|
||||
@@ -353,10 +353,10 @@ expression: diagnostics
|
||||
row: 404
|
||||
column: 14
|
||||
fix:
|
||||
content: ","
|
||||
content: "\"not fine\","
|
||||
location:
|
||||
row: 404
|
||||
column: 14
|
||||
column: 4
|
||||
end_location:
|
||||
row: 404
|
||||
column: 14
|
||||
@@ -370,10 +370,10 @@ expression: diagnostics
|
||||
row: 432
|
||||
column: 14
|
||||
fix:
|
||||
content: ","
|
||||
content: "\"not fine\","
|
||||
location:
|
||||
row: 432
|
||||
column: 14
|
||||
column: 4
|
||||
end_location:
|
||||
row: 432
|
||||
column: 14
|
||||
@@ -557,10 +557,10 @@ expression: diagnostics
|
||||
row: 519
|
||||
column: 12
|
||||
fix:
|
||||
content: ","
|
||||
content: "kwargs,"
|
||||
location:
|
||||
row: 519
|
||||
column: 12
|
||||
column: 6
|
||||
end_location:
|
||||
row: 519
|
||||
column: 12
|
||||
@@ -574,10 +574,10 @@ expression: diagnostics
|
||||
row: 526
|
||||
column: 9
|
||||
fix:
|
||||
content: ","
|
||||
content: "args,"
|
||||
location:
|
||||
row: 526
|
||||
column: 9
|
||||
column: 5
|
||||
end_location:
|
||||
row: 526
|
||||
column: 9
|
||||
@@ -591,10 +591,10 @@ expression: diagnostics
|
||||
row: 534
|
||||
column: 15
|
||||
fix:
|
||||
content: ","
|
||||
content: "extra_kwarg,"
|
||||
location:
|
||||
row: 534
|
||||
column: 15
|
||||
column: 4
|
||||
end_location:
|
||||
row: 534
|
||||
column: 15
|
||||
@@ -608,10 +608,10 @@ expression: diagnostics
|
||||
row: 541
|
||||
column: 12
|
||||
fix:
|
||||
content: ","
|
||||
content: "kwargs,"
|
||||
location:
|
||||
row: 541
|
||||
column: 12
|
||||
column: 6
|
||||
end_location:
|
||||
row: 541
|
||||
column: 12
|
||||
@@ -625,10 +625,10 @@ expression: diagnostics
|
||||
row: 547
|
||||
column: 23
|
||||
fix:
|
||||
content: ","
|
||||
content: "not_called_kwargs,"
|
||||
location:
|
||||
row: 547
|
||||
column: 23
|
||||
column: 6
|
||||
end_location:
|
||||
row: 547
|
||||
column: 23
|
||||
@@ -642,10 +642,10 @@ expression: diagnostics
|
||||
row: 554
|
||||
column: 14
|
||||
fix:
|
||||
content: ","
|
||||
content: "kwarg_only,"
|
||||
location:
|
||||
row: 554
|
||||
column: 14
|
||||
column: 4
|
||||
end_location:
|
||||
row: 554
|
||||
column: 14
|
||||
@@ -659,10 +659,10 @@ expression: diagnostics
|
||||
row: 561
|
||||
column: 12
|
||||
fix:
|
||||
content: ","
|
||||
content: "kwargs,"
|
||||
location:
|
||||
row: 561
|
||||
column: 12
|
||||
column: 6
|
||||
end_location:
|
||||
row: 561
|
||||
column: 12
|
||||
@@ -676,10 +676,10 @@ expression: diagnostics
|
||||
row: 565
|
||||
column: 12
|
||||
fix:
|
||||
content: ","
|
||||
content: "kwargs,"
|
||||
location:
|
||||
row: 565
|
||||
column: 12
|
||||
column: 6
|
||||
end_location:
|
||||
row: 565
|
||||
column: 12
|
||||
@@ -693,10 +693,10 @@ expression: diagnostics
|
||||
row: 573
|
||||
column: 9
|
||||
fix:
|
||||
content: ","
|
||||
content: "args,"
|
||||
location:
|
||||
row: 573
|
||||
column: 9
|
||||
column: 5
|
||||
end_location:
|
||||
row: 573
|
||||
column: 9
|
||||
@@ -710,10 +710,10 @@ expression: diagnostics
|
||||
row: 577
|
||||
column: 9
|
||||
fix:
|
||||
content: ","
|
||||
content: "args,"
|
||||
location:
|
||||
row: 577
|
||||
column: 9
|
||||
column: 5
|
||||
end_location:
|
||||
row: 577
|
||||
column: 9
|
||||
@@ -727,10 +727,10 @@ expression: diagnostics
|
||||
row: 583
|
||||
column: 9
|
||||
fix:
|
||||
content: ","
|
||||
content: "args,"
|
||||
location:
|
||||
row: 583
|
||||
column: 9
|
||||
column: 5
|
||||
end_location:
|
||||
row: 583
|
||||
column: 9
|
||||
@@ -744,10 +744,10 @@ expression: diagnostics
|
||||
row: 590
|
||||
column: 12
|
||||
fix:
|
||||
content: ","
|
||||
content: "kwargs,"
|
||||
location:
|
||||
row: 590
|
||||
column: 12
|
||||
column: 6
|
||||
end_location:
|
||||
row: 590
|
||||
column: 12
|
||||
@@ -761,10 +761,10 @@ expression: diagnostics
|
||||
row: 598
|
||||
column: 14
|
||||
fix:
|
||||
content: ","
|
||||
content: "kwarg_only,"
|
||||
location:
|
||||
row: 598
|
||||
column: 14
|
||||
column: 4
|
||||
end_location:
|
||||
row: 598
|
||||
column: 14
|
||||
@@ -778,12 +778,29 @@ expression: diagnostics
|
||||
row: 627
|
||||
column: 19
|
||||
fix:
|
||||
content: ","
|
||||
content: "},"
|
||||
location:
|
||||
row: 627
|
||||
column: 19
|
||||
column: 18
|
||||
end_location:
|
||||
row: 627
|
||||
column: 19
|
||||
parent: ~
|
||||
- kind:
|
||||
TrailingCommaMissing: ~
|
||||
location:
|
||||
row: 632
|
||||
column: 41
|
||||
end_location:
|
||||
row: 632
|
||||
column: 41
|
||||
fix:
|
||||
content: "),"
|
||||
location:
|
||||
row: 632
|
||||
column: 40
|
||||
end_location:
|
||||
row: 632
|
||||
column: 41
|
||||
parent: ~
|
||||
|
||||
|
||||
@@ -522,11 +522,13 @@ pub fn fix_unnecessary_collection_call(
|
||||
// Quote each argument.
|
||||
for arg in &call.args {
|
||||
let quoted = format!(
|
||||
"\"{}\"",
|
||||
"{}{}{}",
|
||||
stylist.quote(),
|
||||
arg.keyword
|
||||
.as_ref()
|
||||
.expect("Expected dictionary argument to be kwarg")
|
||||
.value
|
||||
.value,
|
||||
stylist.quote(),
|
||||
);
|
||||
arena.push(quoted);
|
||||
}
|
||||
|
||||
@@ -14,6 +14,9 @@ mod tests {
|
||||
use crate::test::test_path;
|
||||
|
||||
#[test_case(Rule::NullableModelStringField, Path::new("DJ001.py"); "DJ001")]
|
||||
#[test_case(Rule::LocalsInRenderFunction, Path::new("DJ003.py"); "DJ003")]
|
||||
#[test_case(Rule::ExcludeWithModelForm, Path::new("DJ006.py"); "DJ006")]
|
||||
#[test_case(Rule::AllWithModelForm, Path::new("DJ007.py"); "DJ007")]
|
||||
#[test_case(Rule::ModelWithoutDunderStr, Path::new("DJ008.py"); "DJ008")]
|
||||
#[test_case(Rule::NonLeadingReceiverDecorator, Path::new("DJ013.py"); "DJ013")]
|
||||
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
|
||||
@@ -0,0 +1,96 @@
|
||||
use rustpython_parser::ast::{Constant, Expr, ExprKind, Stmt, StmtKind};
|
||||
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
|
||||
use crate::rules::flake8_django::rules::helpers::is_model_form;
|
||||
use crate::violation::Violation;
|
||||
use crate::{checkers::ast::Checker, registry::Diagnostic, Range};
|
||||
|
||||
define_violation!(
|
||||
/// ## What it does
|
||||
/// Checks for the use of `fields = "__all__"` in Django `ModelForm`
|
||||
/// classes.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// If a `ModelForm` includes the `fields = "__all__"` attribute, any new
|
||||
/// field that is added to the model will automatically be exposed for
|
||||
/// modification.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from django.forms import ModelForm
|
||||
///
|
||||
/// class PostForm(ModelForm):
|
||||
/// class Meta:
|
||||
/// model = Post
|
||||
/// fields = "__all__"
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from django.forms import ModelForm
|
||||
///
|
||||
/// class PostForm(ModelForm):
|
||||
/// class Meta:
|
||||
/// model = Post
|
||||
/// fields = ["title", "content"]
|
||||
/// ```
|
||||
pub struct AllWithModelForm;
|
||||
);
|
||||
impl Violation for AllWithModelForm {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("Do not use `__all__` with `ModelForm`, use `fields` instead")
|
||||
}
|
||||
}
|
||||
|
||||
/// DJ007
|
||||
pub fn all_with_model_form(checker: &Checker, bases: &[Expr], body: &[Stmt]) -> Option<Diagnostic> {
|
||||
if !bases.iter().any(|base| is_model_form(checker, base)) {
|
||||
return None;
|
||||
}
|
||||
for element in body.iter() {
|
||||
let StmtKind::ClassDef { name, body, .. } = &element.node else {
|
||||
continue;
|
||||
};
|
||||
if name != "Meta" {
|
||||
continue;
|
||||
}
|
||||
for element in body.iter() {
|
||||
let StmtKind::Assign { targets, value, .. } = &element.node else {
|
||||
continue;
|
||||
};
|
||||
for target in targets.iter() {
|
||||
let ExprKind::Name { id, .. } = &target.node else {
|
||||
continue;
|
||||
};
|
||||
if id != "fields" {
|
||||
continue;
|
||||
}
|
||||
let ExprKind::Constant { value, .. } = &value.node else {
|
||||
continue;
|
||||
};
|
||||
match &value {
|
||||
Constant::Str(s) => {
|
||||
if s == "__all__" {
|
||||
return Some(Diagnostic::new(
|
||||
AllWithModelForm,
|
||||
Range::from_located(element),
|
||||
));
|
||||
}
|
||||
}
|
||||
Constant::Bytes(b) => {
|
||||
if b == "__all__".as_bytes() {
|
||||
return Some(Diagnostic::new(
|
||||
AllWithModelForm,
|
||||
Range::from_located(element),
|
||||
));
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
@@ -0,0 +1,79 @@
|
||||
use rustpython_parser::ast::{Expr, ExprKind, Stmt, StmtKind};
|
||||
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
|
||||
use crate::rules::flake8_django::rules::helpers::is_model_form;
|
||||
use crate::violation::Violation;
|
||||
use crate::{checkers::ast::Checker, registry::Diagnostic, Range};
|
||||
|
||||
define_violation!(
|
||||
/// ## What it does
|
||||
/// Checks for the use of `exclude` in Django `ModelForm` classes.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// If a `ModelForm` includes the `exclude` attribute, any new field that
|
||||
/// is added to the model will automatically be exposed for modification.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from django.forms import ModelForm
|
||||
///
|
||||
/// class PostForm(ModelForm):
|
||||
/// class Meta:
|
||||
/// model = Post
|
||||
/// exclude = ["author"]
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from django.forms import ModelForm
|
||||
///
|
||||
/// class PostForm(ModelForm):
|
||||
/// class Meta:
|
||||
/// model = Post
|
||||
/// fields = ["title", "content"]
|
||||
/// ```
|
||||
pub struct ExcludeWithModelForm;
|
||||
);
|
||||
impl Violation for ExcludeWithModelForm {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("Do not use `exclude` with `ModelForm`, use `fields` instead")
|
||||
}
|
||||
}
|
||||
|
||||
/// DJ006
|
||||
pub fn exclude_with_model_form(
|
||||
checker: &Checker,
|
||||
bases: &[Expr],
|
||||
body: &[Stmt],
|
||||
) -> Option<Diagnostic> {
|
||||
if !bases.iter().any(|base| is_model_form(checker, base)) {
|
||||
return None;
|
||||
}
|
||||
for element in body.iter() {
|
||||
let StmtKind::ClassDef { name, body, .. } = &element.node else {
|
||||
continue;
|
||||
};
|
||||
if name != "Meta" {
|
||||
continue;
|
||||
}
|
||||
for element in body.iter() {
|
||||
let StmtKind::Assign { targets, .. } = &element.node else {
|
||||
continue;
|
||||
};
|
||||
for target in targets.iter() {
|
||||
let ExprKind::Name { id, .. } = &target.node else {
|
||||
continue;
|
||||
};
|
||||
if id == "exclude" {
|
||||
return Some(Diagnostic::new(
|
||||
ExcludeWithModelForm,
|
||||
Range::from_located(target),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
@@ -2,13 +2,22 @@ use rustpython_parser::ast::Expr;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
/// Return `true` if a Python class appears to be a Django model based on a base class.
|
||||
/// Return `true` if a Python class appears to be a Django model, based on its base classes.
|
||||
pub fn is_model(checker: &Checker, base: &Expr) -> bool {
|
||||
checker.resolve_call_path(base).map_or(false, |call_path| {
|
||||
call_path.as_slice() == ["django", "db", "models", "Model"]
|
||||
})
|
||||
}
|
||||
|
||||
/// Return `true` if a Python class appears to be a Django model form, based on its base classes.
|
||||
pub fn is_model_form(checker: &Checker, base: &Expr) -> bool {
|
||||
checker.resolve_call_path(base).map_or(false, |call_path| {
|
||||
call_path.as_slice() == ["django", "forms", "ModelForm"]
|
||||
|| call_path.as_slice() == ["django", "forms", "models", "ModelForm"]
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the name of the field type, if the expression is constructor for a Django model field.
|
||||
pub fn get_model_field_name<'a>(checker: &'a Checker, expr: &'a Expr) -> Option<&'a str> {
|
||||
checker.resolve_call_path(expr).and_then(|call_path| {
|
||||
let call_path = call_path.as_slice();
|
||||
|
||||
@@ -0,0 +1,88 @@
|
||||
use rustpython_parser::ast::{Expr, ExprKind, Keyword};
|
||||
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
|
||||
use crate::{checkers::ast::Checker, registry::Diagnostic, violation::Violation, Range};
|
||||
|
||||
define_violation!(
|
||||
/// ## What it does
|
||||
/// Checks for the use of `locals()` in `render` functions.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Using `locals()` can expose internal variables or other unintentional
|
||||
/// data to the rendered template.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// from django.shortcuts import render
|
||||
///
|
||||
/// def index(request):
|
||||
/// posts = Post.objects.all()
|
||||
/// return render(request, "app/index.html", locals())
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// from django.shortcuts import render
|
||||
///
|
||||
/// def index(request):
|
||||
/// posts = Post.objects.all()
|
||||
/// context = {"posts": posts}
|
||||
/// return render(request, "app/index.html", context)
|
||||
/// ```
|
||||
pub struct LocalsInRenderFunction;
|
||||
);
|
||||
impl Violation for LocalsInRenderFunction {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("Avoid passing `locals()` as context to a `render` function")
|
||||
}
|
||||
}
|
||||
|
||||
/// DJ003
|
||||
pub fn locals_in_render_function(
|
||||
checker: &mut Checker,
|
||||
func: &Expr,
|
||||
args: &[Expr],
|
||||
keywords: &[Keyword],
|
||||
) {
|
||||
if !checker.resolve_call_path(func).map_or(false, |call_path| {
|
||||
call_path.as_slice() == ["django", "shortcuts", "render"]
|
||||
}) {
|
||||
return;
|
||||
}
|
||||
|
||||
let locals = if args.len() >= 3 {
|
||||
if !is_locals_call(checker, &args[2]) {
|
||||
return;
|
||||
}
|
||||
&args[2]
|
||||
} else if let Some(keyword) = keywords.iter().find(|keyword| {
|
||||
keyword
|
||||
.node
|
||||
.arg
|
||||
.as_ref()
|
||||
.map_or(false, |arg| arg == "context")
|
||||
}) {
|
||||
if !is_locals_call(checker, &keyword.node.value) {
|
||||
return;
|
||||
}
|
||||
&keyword.node.value
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
LocalsInRenderFunction,
|
||||
Range::from_located(locals),
|
||||
));
|
||||
}
|
||||
|
||||
fn is_locals_call(checker: &Checker, expr: &Expr) -> bool {
|
||||
let ExprKind::Call { func, .. } = &expr.node else {
|
||||
return false
|
||||
};
|
||||
checker
|
||||
.resolve_call_path(func)
|
||||
.map_or(false, |call_path| call_path.as_slice() == ["", "locals"])
|
||||
}
|
||||
@@ -1,10 +1,16 @@
|
||||
pub use all_with_model_form::{all_with_model_form, AllWithModelForm};
|
||||
pub use exclude_with_model_form::{exclude_with_model_form, ExcludeWithModelForm};
|
||||
pub use locals_in_render_function::{locals_in_render_function, LocalsInRenderFunction};
|
||||
pub use model_without_dunder_str::{model_without_dunder_str, ModelWithoutDunderStr};
|
||||
pub use non_leading_receiver_decorator::{
|
||||
non_leading_receiver_decorator, NonLeadingReceiverDecorator,
|
||||
};
|
||||
pub use nullable_model_string_field::{nullable_model_string_field, NullableModelStringField};
|
||||
|
||||
mod all_with_model_form;
|
||||
mod exclude_with_model_form;
|
||||
mod helpers;
|
||||
mod locals_in_render_function;
|
||||
mod model_without_dunder_str;
|
||||
mod non_leading_receiver_decorator;
|
||||
mod nullable_model_string_field;
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
---
|
||||
source: crates/ruff/src/rules/flake8_django/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
LocalsInRenderFunction: ~
|
||||
location:
|
||||
row: 5
|
||||
column: 41
|
||||
end_location:
|
||||
row: 5
|
||||
column: 49
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
LocalsInRenderFunction: ~
|
||||
location:
|
||||
row: 9
|
||||
column: 49
|
||||
end_location:
|
||||
row: 9
|
||||
column: 57
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -0,0 +1,14 @@
|
||||
---
|
||||
source: crates/ruff/src/rules/flake8_django/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
ExcludeWithModelForm: ~
|
||||
location:
|
||||
row: 6
|
||||
column: 8
|
||||
end_location:
|
||||
row: 6
|
||||
column: 15
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -0,0 +1,24 @@
|
||||
---
|
||||
source: crates/ruff/src/rules/flake8_django/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
AllWithModelForm: ~
|
||||
location:
|
||||
row: 6
|
||||
column: 8
|
||||
end_location:
|
||||
row: 6
|
||||
column: 26
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
AllWithModelForm: ~
|
||||
location:
|
||||
row: 11
|
||||
column: 8
|
||||
end_location:
|
||||
row: 11
|
||||
column: 27
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -1,3 +1,10 @@
|
||||
#[cfg(target_family = "unix")]
|
||||
use anyhow::Result;
|
||||
#[cfg(target_family = "unix")]
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
#[cfg(target_family = "unix")]
|
||||
use std::path::Path;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
|
||||
@@ -33,6 +40,15 @@ pub fn extract_shebang(line: &str) -> ShebangDirective {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_family = "unix")]
|
||||
pub fn is_executable(filepath: &Path) -> Result<bool> {
|
||||
{
|
||||
let metadata = filepath.metadata()?;
|
||||
let permissions = metadata.permissions();
|
||||
Ok(permissions.mode() & 0o111 != 0)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::rules::flake8_executable::helpers::{
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
#![allow(unused_imports)]
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
use is_executable::IsExecutable;
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
use crate::ast::types::Range;
|
||||
use crate::registry::Diagnostic;
|
||||
#[cfg(target_family = "unix")]
|
||||
use crate::rules::flake8_executable::helpers::is_executable;
|
||||
use crate::violation::Violation;
|
||||
|
||||
define_violation!(
|
||||
@@ -20,17 +21,16 @@ impl Violation for ShebangMissingExecutableFile {
|
||||
}
|
||||
|
||||
/// EXE002
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
#[cfg(target_family = "unix")]
|
||||
pub fn shebang_missing(filepath: &Path) -> Option<Diagnostic> {
|
||||
if filepath.is_executable() {
|
||||
if let Ok(true) = is_executable(filepath) {
|
||||
let diagnostic = Diagnostic::new(ShebangMissingExecutableFile, Range::default());
|
||||
Some(diagnostic)
|
||||
} else {
|
||||
None
|
||||
return Some(diagnostic);
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(target_family = "wasm")]
|
||||
#[cfg(not(target_family = "unix"))]
|
||||
pub fn shebang_missing(_filepath: &Path) -> Option<Diagnostic> {
|
||||
None
|
||||
}
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
#![allow(unused_imports)]
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
use is_executable::IsExecutable;
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
use rustpython_parser::ast::Location;
|
||||
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::registry::Diagnostic;
|
||||
#[cfg(target_family = "unix")]
|
||||
use crate::rules::flake8_executable::helpers::is_executable;
|
||||
use crate::rules::flake8_executable::helpers::ShebangDirective;
|
||||
use crate::violation::Violation;
|
||||
|
||||
@@ -23,16 +24,14 @@ impl Violation for ShebangNotExecutable {
|
||||
}
|
||||
|
||||
/// EXE001
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
#[cfg(target_family = "unix")]
|
||||
pub fn shebang_not_executable(
|
||||
filepath: &Path,
|
||||
lineno: usize,
|
||||
shebang: &ShebangDirective,
|
||||
) -> Option<Diagnostic> {
|
||||
if let ShebangDirective::Match(_, start, end, _) = shebang {
|
||||
if filepath.is_executable() {
|
||||
None
|
||||
} else {
|
||||
if let Ok(false) = is_executable(filepath) {
|
||||
let diagnostic = Diagnostic::new(
|
||||
ShebangNotExecutable,
|
||||
Range::new(
|
||||
@@ -40,14 +39,13 @@ pub fn shebang_not_executable(
|
||||
Location::new(lineno + 1, *end),
|
||||
),
|
||||
);
|
||||
Some(diagnostic)
|
||||
return Some(diagnostic);
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(target_family = "wasm")]
|
||||
#[cfg(not(target_family = "unix"))]
|
||||
pub fn shebang_not_executable(
|
||||
_filepath: &Path,
|
||||
_lineno: usize,
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
use itertools::Itertools;
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
use rustpython_parser::ast::{Constant, Expr, ExprKind, Operator};
|
||||
use rustpython_parser::lexer::{LexResult, Tok};
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::Tok;
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::registry::Diagnostic;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use rustpython_parser::ast::{Constant, Expr, ExprKind, Keyword, Location, Operator};
|
||||
|
||||
use crate::ast::helpers::{find_keyword, is_logger_candidate, SimpleCallArgs};
|
||||
use crate::ast::logging::LoggingLevel;
|
||||
use crate::ast::types::Range;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::fix::Fix;
|
||||
@@ -10,31 +11,6 @@ use crate::rules::flake8_logging_format::violations::{
|
||||
LoggingRedundantExcInfo, LoggingStringConcat, LoggingStringFormat, LoggingWarn,
|
||||
};
|
||||
|
||||
enum LoggingLevel {
|
||||
Debug,
|
||||
Critical,
|
||||
Error,
|
||||
Exception,
|
||||
Info,
|
||||
Warn,
|
||||
Warning,
|
||||
}
|
||||
|
||||
impl LoggingLevel {
|
||||
fn from_str(level: &str) -> Option<Self> {
|
||||
match level {
|
||||
"debug" => Some(LoggingLevel::Debug),
|
||||
"critical" => Some(LoggingLevel::Critical),
|
||||
"error" => Some(LoggingLevel::Error),
|
||||
"exception" => Some(LoggingLevel::Exception),
|
||||
"info" => Some(LoggingLevel::Info),
|
||||
"warn" => Some(LoggingLevel::Warn),
|
||||
"warning" => Some(LoggingLevel::Warning),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const RESERVED_ATTRS: &[&str; 22] = &[
|
||||
"args",
|
||||
"asctime",
|
||||
|
||||
45
crates/ruff/src/rules/flake8_pie/fixes.rs
Normal file
45
crates/ruff/src/rules/flake8_pie/fixes.rs
Normal file
@@ -0,0 +1,45 @@
|
||||
use anyhow::{bail, Result};
|
||||
use libcst_native::{Codegen, CodegenState, Expression, GeneratorExp};
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::cst::matchers::{match_expr, match_module};
|
||||
use crate::fix::Fix;
|
||||
use crate::source_code::{Locator, Stylist};
|
||||
|
||||
/// (PIE802) Convert `[i for i in a]` into `i for i in a`
|
||||
pub fn fix_unnecessary_comprehension_any_all(
|
||||
locator: &Locator,
|
||||
stylist: &Stylist,
|
||||
expr: &rustpython_parser::ast::Expr,
|
||||
) -> Result<Fix> {
|
||||
// Expr(ListComp) -> Expr(GeneratorExp)
|
||||
let module_text = locator.slice(&Range::from_located(expr));
|
||||
let mut tree = match_module(module_text)?;
|
||||
let mut body = match_expr(&mut tree)?;
|
||||
|
||||
let Expression::ListComp(list_comp) = &body.value else {
|
||||
bail!(
|
||||
"Expected Expression::ListComp"
|
||||
);
|
||||
};
|
||||
|
||||
body.value = Expression::GeneratorExp(Box::new(GeneratorExp {
|
||||
elt: list_comp.elt.clone(),
|
||||
for_in: list_comp.for_in.clone(),
|
||||
lpar: list_comp.lpar.clone(),
|
||||
rpar: list_comp.rpar.clone(),
|
||||
}));
|
||||
|
||||
let mut state = CodegenState {
|
||||
default_newline: stylist.line_ending(),
|
||||
default_indent: stylist.indentation(),
|
||||
..CodegenState::default()
|
||||
};
|
||||
tree.codegen(&mut state);
|
||||
|
||||
Ok(Fix::replacement(
|
||||
state.to_string(),
|
||||
expr.location,
|
||||
expr.end_location.unwrap(),
|
||||
))
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
//! Rules from [flake8-pie](https://pypi.org/project/flake8-pie/).
|
||||
mod fixes;
|
||||
pub(crate) mod rules;
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -20,6 +21,7 @@ mod tests {
|
||||
#[test_case(Rule::UnnecessarySpread, Path::new("PIE800.py"); "PIE800")]
|
||||
#[test_case(Rule::PreferListBuiltin, Path::new("PIE807.py"); "PIE807")]
|
||||
#[test_case(Rule::PreferUniqueEnums, Path::new("PIE796.py"); "PIE796")]
|
||||
#[test_case(Rule::UnnecessaryComprehensionAnyAll, Path::new("PIE802.py"); "PIE802")]
|
||||
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user