Compare commits

...

23 Commits

Author SHA1 Message Date
Charlie Marsh
995994be3e Bump version to 0.0.162 2022-12-05 19:07:44 -05:00
Charlie Marsh
f001305b2e Only autofix D205 by deleting blank lines (#1091) 2022-12-05 19:01:32 -05:00
Charlie Marsh
e88093541f Avoid wrapping import-star statements (#1089) 2022-12-05 18:39:16 -05:00
Charlie Marsh
da41a495f1 Remove extraneous plugin creation script 2022-12-05 18:31:19 -05:00
Charlie Marsh
55b7ec8f85 Ignore newline enforcement when imports break indentation boundaries (#1085) 2022-12-05 18:02:41 -05:00
Charlie Marsh
4b41ae3f53 Bump version to 0.0.161 2022-12-05 17:02:05 -05:00
Charlie Marsh
f944e1e1cf Add action comments to README.md (#1082) 2022-12-05 16:56:28 -05:00
Charlie Marsh
4fbc1082de Support isort: split directive (#1081) 2022-12-05 16:48:10 -05:00
Charlie Marsh
cf2e887e38 Tweak summary message to include total error counts (#1067) 2022-12-05 16:12:12 -05:00
Charlie Marsh
ee994e8c07 Import compatibility with isort newline-insertion behavior (#1078) 2022-12-05 16:07:07 -05:00
Charlie Marsh
c69c4fd655 Support isort: skip_file directive (#1075) 2022-12-05 15:02:01 -05:00
Charlie Marsh
e01e45ca35 Remove extraneous test file 2022-12-05 14:58:54 -05:00
Charlie Marsh
4be74785fe Support unterminated isort: off directives (#1074) 2022-12-05 14:54:47 -05:00
Charlie Marsh
40b7c64f7d Bump version to 0.0.160 2022-12-05 12:56:38 -05:00
Jonathan Plasse
a76c5d1226 Add allowed-confusable settings (#1059) 2022-12-05 12:53:55 -05:00
Charlie Marsh
5aeddeb825 Include pyproject.toml path in error message (#1068) 2022-12-05 12:04:50 -05:00
Charlie Marsh
5f8294aea4 Preserve star imports when re-formatting import blocks (#1066) 2022-12-05 11:48:38 -05:00
Charlie Marsh
e07d3f6313 Fix clippy 2022-12-05 11:47:42 -05:00
Charlie Marsh
1d1662cb9c Bump version to 0.0.159 2022-12-05 11:22:02 -05:00
Charlie Marsh
55ce7bd0df Migrate invalid_literal_comparisons fix to token-based logic (#1065) 2022-12-05 11:16:59 -05:00
Charlie Marsh
e695f6eb25 Avoid false-positive on PLR1701 for multi-type isinstance calls (#1063) 2022-12-05 10:07:05 -05:00
messense
fb2c457a9b Upgrade to notify 5.0.0 (#1048) 2022-12-05 09:58:42 -05:00
Jeong YunWon
523cf62eda Style fixes (#1049) 2022-12-05 09:57:48 -05:00
100 changed files with 2957 additions and 2261 deletions

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.158
rev: v0.0.162
hooks:
- id: ruff

391
Cargo.lock generated
View File

@@ -21,9 +21,9 @@ dependencies = [
[[package]]
name = "aho-corasick"
version = "0.7.19"
version = "0.7.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac"
dependencies = [
"memchr",
]
@@ -82,9 +82,9 @@ dependencies = [
[[package]]
name = "assert_cmd"
version = "2.0.6"
version = "2.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba45b8163c49ab5f972e59a8a5a03b6d2972619d486e19ec9fe744f7c2753d3c"
checksum = "fa3d466004a8b4cb1bc34044240a2fd29d17607e2e3bd613eb44fd48e8100da3"
dependencies = [
"bstr 1.0.1",
"doc-comment",
@@ -100,9 +100,9 @@ version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
"hermit-abi",
"hermit-abi 0.1.19",
"libc",
"winapi 0.3.9",
"winapi",
]
[[package]]
@@ -193,9 +193,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
[[package]]
name = "cc"
version = "1.0.75"
version = "1.0.77"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41ca34107f97baef6cfb231b32f36115781856b8f8208e8c580e0bcaea374842"
checksum = "e9f73505338f7d905b19d18738976aae232eb46b8efc15554ffc56deb5d9ebe4"
[[package]]
name = "cfg-if"
@@ -230,7 +230,7 @@ dependencies = [
"num-traits",
"time",
"wasm-bindgen",
"winapi 0.3.9",
"winapi",
]
[[package]]
@@ -280,14 +280,14 @@ dependencies = [
[[package]]
name = "clap"
version = "4.0.22"
version = "4.0.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91b9970d7505127a162fdaa9b96428d28a479ba78c9ec7550a63a5d9863db682"
checksum = "4d63b9e9c07271b9957ad22c173bae2a4d9a81127680962039296abcd2f8251d"
dependencies = [
"atty",
"bitflags",
"clap_derive",
"clap_lex 0.3.0",
"is-terminal",
"once_cell",
"strsim",
"termcolor",
@@ -299,7 +299,7 @@ version = "4.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7b3c9eae0de7bf8e3f904a5e40612b21fb2e2e566456d177809a48b892d24da"
dependencies = [
"clap 4.0.22",
"clap 4.0.29",
]
[[package]]
@@ -308,7 +308,7 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4160b4a4f72ef58bd766bad27c09e6ef1cc9d82a22f6a0f55d152985a4a48e31"
dependencies = [
"clap 4.0.22",
"clap 4.0.29",
"clap_complete",
"clap_complete_fig",
]
@@ -319,7 +319,7 @@ version = "4.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46b30e010e669cd021e5004f3be26cff6b7c08d2a8a0d65b48d43a8cc0efd6c3"
dependencies = [
"clap 4.0.22",
"clap 4.0.29",
"clap_complete",
]
@@ -364,7 +364,7 @@ dependencies = [
"terminfo",
"thiserror",
"which",
"winapi 0.3.9",
"winapi",
]
[[package]]
@@ -394,7 +394,7 @@ checksum = "b3616f750b84d8f0de8a58bda93e08e2a81ad3f523089b05f1dffecab48c6cbd"
dependencies = [
"atty",
"lazy_static",
"winapi 0.3.9",
"winapi",
]
[[package]]
@@ -419,7 +419,7 @@ dependencies = [
"lazy_static",
"libc",
"terminal_size",
"winapi 0.3.9",
"winapi",
]
[[package]]
@@ -496,9 +496,9 @@ dependencies = [
[[package]]
name = "crossbeam-epoch"
version = "0.9.11"
version = "0.9.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f916dfc5d356b0ed9dae65f1db9fc9770aa2851d2662b988ccf4fe3516e86348"
checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a"
dependencies = [
"autocfg",
"cfg-if 1.0.0",
@@ -509,9 +509,9 @@ dependencies = [
[[package]]
name = "crossbeam-utils"
version = "0.8.12"
version = "0.8.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edbafec5fa1f196ca66527c1b12c2ec4745ca14b50f1ad8f9f6f720b55d11fac"
checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f"
dependencies = [
"cfg-if 1.0.0",
]
@@ -524,9 +524,9 @@ checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7"
[[package]]
name = "cxx"
version = "1.0.81"
version = "1.0.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97abf9f0eca9e52b7f81b945524e76710e6cb2366aead23b7d4fbf72e281f888"
checksum = "bdf07d07d6531bfcdbe9b8b739b104610c6508dcc4d63b410585faf338241daf"
dependencies = [
"cc",
"cxxbridge-flags",
@@ -536,9 +536,9 @@ dependencies = [
[[package]]
name = "cxx-build"
version = "1.0.81"
version = "1.0.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cc32cc5fea1d894b77d269ddb9f192110069a8a9c1f1d441195fba90553dea3"
checksum = "d2eb5b96ecdc99f72657332953d4d9c50135af1bac34277801cc3937906ebd39"
dependencies = [
"cc",
"codespan-reporting",
@@ -551,15 +551,15 @@ dependencies = [
[[package]]
name = "cxxbridge-flags"
version = "1.0.81"
version = "1.0.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ca220e4794c934dc6b1207c3b42856ad4c302f2df1712e9f8d2eec5afaacf1f"
checksum = "ac040a39517fd1674e0f32177648334b0f4074625b5588a64519804ba0553b12"
[[package]]
name = "cxxbridge-macro"
version = "1.0.81"
version = "1.0.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b846f081361125bfc8dc9d3940c84e1fd83ba54bbca7b17cd29483c828be0704"
checksum = "1362b0ddcfc4eb0a1f57b68bd77dd99f0e826958a96abd0ae9bd092e114ffed6"
dependencies = [
"proc-macro2",
"quote",
@@ -624,7 +624,7 @@ checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6"
dependencies = [
"libc",
"redox_users",
"winapi 0.3.9",
"winapi",
]
[[package]]
@@ -635,7 +635,7 @@ checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d"
dependencies = [
"libc",
"redox_users",
"winapi 0.3.9",
"winapi",
]
[[package]]
@@ -665,6 +665,27 @@ version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
[[package]]
name = "errno"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1"
dependencies = [
"errno-dragonfly",
"libc",
"winapi",
]
[[package]]
name = "errno-dragonfly"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
dependencies = [
"cc",
"libc",
]
[[package]]
name = "fastrand"
version = "1.8.0"
@@ -703,10 +724,10 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8-to-ruff"
version = "0.0.158-dev.0"
version = "0.0.162-dev.0"
dependencies = [
"anyhow",
"clap 4.0.22",
"clap 4.0.29",
"configparser",
"once_cell",
"regex",
@@ -719,9 +740,9 @@ dependencies = [
[[package]]
name = "flate2"
version = "1.0.24"
version = "1.0.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6"
checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841"
dependencies = [
"crc32fast",
"miniz_oxide",
@@ -742,41 +763,15 @@ dependencies = [
"percent-encoding",
]
[[package]]
name = "fsevent"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ab7d1bd1bd33cc98b0889831b72da23c0aa4df9cec7e0702f46ecea04b35db6"
dependencies = [
"bitflags",
"fsevent-sys",
]
[[package]]
name = "fsevent-sys"
version = "2.0.1"
version = "4.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f41b048a94555da0f42f1d632e2e19510084fb8e303b0daa2816e733fb3644a0"
checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2"
dependencies = [
"libc",
]
[[package]]
name = "fuchsia-zircon"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
dependencies = [
"bitflags",
"fuchsia-zircon-sys",
]
[[package]]
name = "fuchsia-zircon-sys"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
[[package]]
name = "getrandom"
version = "0.1.16"
@@ -844,6 +839,15 @@ dependencies = [
"libc",
]
[[package]]
name = "hermit-abi"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
dependencies = [
"libc",
]
[[package]]
name = "hexf-parse"
version = "0.2.1"
@@ -861,7 +865,7 @@ dependencies = [
"iana-time-zone-haiku",
"js-sys",
"wasm-bindgen",
"winapi 0.3.9",
"winapi",
]
[[package]]
@@ -896,9 +900,9 @@ dependencies = [
[[package]]
name = "inotify"
version = "0.7.1"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4816c66d2c8ae673df83366c18341538f234a26d65a9ecea5c348b453ac1d02f"
checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff"
dependencies = [
"bitflags",
"inotify-sys",
@@ -916,9 +920,9 @@ dependencies = [
[[package]]
name = "insta"
version = "1.21.0"
version = "1.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "581d4e3314cae4536e5d22ffd23189d4a374696c5ef733eadafae0ed273fd303"
checksum = "197f4e300af8b23664d4077bf5c40e0afa9ba66a567bb5a51d3def3c7b287d1c"
dependencies = [
"console",
"lazy_static",
@@ -938,12 +942,25 @@ dependencies = [
]
[[package]]
name = "iovec"
version = "0.1.4"
name = "io-lifetimes"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e"
checksum = "46112a93252b123d31a119a8d1a1ac19deac4fac6e0e8b0df58f0d4e5870e63c"
dependencies = [
"libc",
"windows-sys",
]
[[package]]
name = "is-terminal"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "927609f78c2913a6f6ac3c27a4fe87f43e2a35367c0c4b0f8265e8f49a104330"
dependencies = [
"hermit-abi 0.2.6",
"io-lifetimes",
"rustix",
"windows-sys",
]
[[package]]
@@ -977,13 +994,23 @@ dependencies = [
]
[[package]]
name = "kernel32-sys"
version = "0.2.2"
name = "kqueue"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
checksum = "2c8fc60ba15bf51257aa9807a48a61013db043fcf3a78cb0d916e8e396dcad98"
dependencies = [
"winapi 0.2.8",
"winapi-build",
"kqueue-sys",
"libc",
]
[[package]]
name = "kqueue-sys"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8367585489f01bc55dd27404dcf56b95e6da061a256a666ab23be9ba96a2e587"
dependencies = [
"bitflags",
"libc",
]
[[package]]
@@ -1024,12 +1051,6 @@ version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "lazycell"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
[[package]]
name = "lexical-parse-float"
version = "0.8.5"
@@ -1062,9 +1083,9 @@ dependencies = [
[[package]]
name = "libc"
version = "0.2.137"
version = "0.2.138"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc7fcc620a3bff7cdd7a365be3376c97191aeaccc2a603e600951e452615bf89"
checksum = "db6d7e329c562c5dfab7a46a2afabc8b987ab9a4834c9d1ca04dc54c1546cef8"
[[package]]
name = "libcst"
@@ -1105,6 +1126,12 @@ version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
[[package]]
name = "linux-raw-sys"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f9f08d8963a6c613f4b1a78f4f4a4dbfadf8e6545b2d72861731e4858b8b47f"
[[package]]
name = "lock_api"
version = "0.4.9"
@@ -1147,74 +1174,32 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "memoffset"
version = "0.6.5"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4"
dependencies = [
"autocfg",
]
[[package]]
name = "miniz_oxide"
version = "0.5.4"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34"
checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa"
dependencies = [
"adler",
]
[[package]]
name = "mio"
version = "0.6.23"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4afd66f5b91bf2a3bc13fad0e21caedac168ca4c707504e75585648ae80e4cc4"
checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de"
dependencies = [
"cfg-if 0.1.10",
"fuchsia-zircon",
"fuchsia-zircon-sys",
"iovec",
"kernel32-sys",
"libc",
"log",
"miow",
"net2",
"slab",
"winapi 0.2.8",
]
[[package]]
name = "mio-extras"
version = "2.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52403fe290012ce777c4626790c8951324a2b9e3316b3143779c72b029742f19"
dependencies = [
"lazycell",
"log",
"mio",
"slab",
]
[[package]]
name = "miow"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ebd808424166322d4a38da87083bfddd3ac4c131334ed55856112eb06d46944d"
dependencies = [
"kernel32-sys",
"net2",
"winapi 0.2.8",
"ws2_32-sys",
]
[[package]]
name = "net2"
version = "0.2.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74d0df99cfcd2530b2e694f6e17e7f37b8e26bb23983ac530c0c97408837c631"
dependencies = [
"cfg-if 0.1.10",
"libc",
"winapi 0.3.9",
"wasi 0.11.0+wasi-snapshot-preview1",
"windows-sys",
]
[[package]]
@@ -1231,9 +1216,9 @@ checksum = "d906846a98739ed9d73d66e62c2641eef8321f1734b7a1156ab045a0248fb2b3"
[[package]]
name = "nix"
version = "0.24.2"
version = "0.24.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "195cdbc1741b8134346d515b3a56a1c94b0912758009cfd53f99ea0f57b065fc"
checksum = "fa52e972a9a719cecb6864fb88568781eb706bac2cd1d4f04a648542dbf78069"
dependencies = [
"bitflags",
"cfg-if 1.0.0",
@@ -1258,20 +1243,20 @@ dependencies = [
[[package]]
name = "notify"
version = "4.0.17"
version = "5.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae03c8c853dba7bfd23e571ff0cff7bc9dceb40a4cd684cd1681824183f45257"
checksum = "ed2c66da08abae1c024c01d635253e402341b4060a12e99b31c7594063bf490a"
dependencies = [
"bitflags",
"crossbeam-channel",
"filetime",
"fsevent",
"fsevent-sys",
"inotify",
"kqueue",
"libc",
"mio",
"mio-extras",
"walkdir",
"winapi 0.3.9",
"winapi",
]
[[package]]
@@ -1321,7 +1306,7 @@ version = "1.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6058e64324c71e02bc2b150e4f3bc8286db6c83092132ffa3f6b1eab0f9def5"
dependencies = [
"hermit-abi",
"hermit-abi 0.1.19",
"libc",
]
@@ -1339,9 +1324,9 @@ checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575"
[[package]]
name = "os_str_bytes"
version = "6.3.1"
version = "6.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3baf96e39c5359d2eb0dd6ccb42c62b91d9678aa68160d261b9e0ccbf9e9dea9"
checksum = "9b7820b9daea5457c9f21c69448905d723fbd21136ccf521748f23fd49e723ee"
[[package]]
name = "parking_lot"
@@ -1355,9 +1340,9 @@ dependencies = [
[[package]]
name = "parking_lot_core"
version = "0.9.4"
version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4dc9e0dc2adc1c69d09143aff38d3d30c5c3f0df0dad82e6d25547af174ebec0"
checksum = "7ff9f3fef3968a3ec5945535ed654cb38ff72d7495a25619e2247fb15a2ed9ba"
dependencies = [
"cfg-if 1.0.0",
"libc",
@@ -1557,9 +1542,9 @@ checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
[[package]]
name = "predicates"
version = "2.1.2"
version = "2.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab68289ded120dcbf9d571afcf70163233229052aec9b08ab09532f698d0e1e6"
checksum = "f54fc5dc63ed3bbf19494623db4f3af16842c0d975818e469022d09e53f0aa05"
dependencies = [
"difflib",
"itertools",
@@ -1568,15 +1553,15 @@ dependencies = [
[[package]]
name = "predicates-core"
version = "1.0.4"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6e7125585d872860e9955ca571650b27a4979c5823084168c5ed5bbfb016b56"
checksum = "72f883590242d3c6fc5bf50299011695fa6590c2c70eac95ee1bdb9a733ad1a2"
[[package]]
name = "predicates-tree"
version = "1.0.6"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad3f7fa8d61e139cbc7c3edfebf3b6678883a53f5ffac65d1259329a93ee43a5"
checksum = "54ff541861505aabf6ea722d2131ee980b8276e10a1297b94e896dd8b621850d"
dependencies = [
"predicates-core",
"termtree",
@@ -1736,11 +1721,10 @@ dependencies = [
[[package]]
name = "rayon"
version = "1.5.3"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d"
checksum = "1e060280438193c554f654141c9ea9417886713b7acd75974c85b18a69a88e0b"
dependencies = [
"autocfg",
"crossbeam-deque",
"either",
"rayon-core",
@@ -1748,9 +1732,9 @@ dependencies = [
[[package]]
name = "rayon-core"
version = "1.9.3"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f"
checksum = "cac410af5d00ab6884528b4ab69d1e8e146e8d471201800fa1b4524126de6ad3"
dependencies = [
"crossbeam-channel",
"crossbeam-deque",
@@ -1807,7 +1791,7 @@ version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
dependencies = [
"winapi 0.3.9",
"winapi",
]
[[package]]
@@ -1822,7 +1806,7 @@ dependencies = [
"spin",
"untrusted",
"web-sys",
"winapi 0.3.9",
"winapi",
]
[[package]]
@@ -1837,7 +1821,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.158"
version = "0.0.162"
dependencies = [
"annotate-snippets 0.9.1",
"anyhow",
@@ -1847,7 +1831,7 @@ dependencies = [
"bitflags",
"cachedir",
"chrono",
"clap 4.0.22",
"clap 4.0.29",
"clap_complete_command",
"clearscreen",
"colored",
@@ -1889,10 +1873,10 @@ dependencies = [
[[package]]
name = "ruff_dev"
version = "0.0.158"
version = "0.0.162"
dependencies = [
"anyhow",
"clap 4.0.22",
"clap 4.0.29",
"codegen",
"itertools",
"libcst",
@@ -1911,6 +1895,20 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "rustix"
version = "0.36.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb93e85278e08bb5788653183213d3a60fc242b10cb9be96586f5a73dcb67c23"
dependencies = [
"bitflags",
"errno",
"io-lifetimes",
"libc",
"linux-raw-sys",
"windows-sys",
]
[[package]]
name = "rustls"
version = "0.20.7"
@@ -2049,18 +2047,18 @@ checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4"
[[package]]
name = "serde"
version = "1.0.147"
version = "1.0.148"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d193d69bae983fc11a79df82342761dfbf28a99fc8d203dca4c3c1b590948965"
checksum = "e53f64bb4ba0191d6d0676e1b141ca55047d83b74f5607e6d8eb88126c52c2dc"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.147"
version = "1.0.148"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4f1d362ca8fc9c3e3a7484440752472d68a6caa98f1ab81d99b5dfe517cec852"
checksum = "a55492425aa53521babf6137309e7d34c20bbfbbfcfe2c7f3a047fd1f6b92c0c"
dependencies = [
"proc-macro2",
"quote",
@@ -2069,9 +2067,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.87"
version = "1.0.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ce777b7b150d76b9cf60d28b55f5847135a003f7d7350c6be7a773508ce7d45"
checksum = "020ff22c755c2ed3f8cf162dbb41a7268d934702f3ed3631656ea597e08fc3db"
dependencies = [
"itoa",
"ryu",
@@ -2080,9 +2078,9 @@ dependencies = [
[[package]]
name = "similar"
version = "2.2.0"
version = "2.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62ac7f900db32bf3fd12e0117dd3dc4da74bc52ebaac97f39668446d89694803"
checksum = "420acb44afdae038210c99e69aae24109f32f15500aa708e81d46c9f29d55fcf"
[[package]]
name = "siphasher"
@@ -2090,15 +2088,6 @@ version = "0.3.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de"
[[package]]
name = "slab"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef"
dependencies = [
"autocfg",
]
[[package]]
name = "smallvec"
version = "1.10.0"
@@ -2172,9 +2161,9 @@ dependencies = [
[[package]]
name = "syn"
version = "1.0.103"
version = "1.0.105"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a864042229133ada95abf3b54fdc62ef5ccabe9515b64717bcb9a1919e59445d"
checksum = "60b9b43d45702de4c839cb9b51d9f529c5dd26a4aff255b42b1ebc03e88ee908"
dependencies = [
"proc-macro2",
"quote",
@@ -2192,7 +2181,7 @@ dependencies = [
"libc",
"redox_syscall",
"remove_dir_all",
"winapi 0.3.9",
"winapi",
]
[[package]]
@@ -2203,7 +2192,7 @@ checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f"
dependencies = [
"dirs-next",
"rustversion",
"winapi 0.3.9",
"winapi",
]
[[package]]
@@ -2222,7 +2211,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "633c1a546cee861a1a6d0dc69ebeca693bf4296661ba7852b9d21d159e0506df"
dependencies = [
"libc",
"winapi 0.3.9",
"winapi",
]
[[package]]
@@ -2305,7 +2294,7 @@ checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a"
dependencies = [
"libc",
"wasi 0.10.0+wasi-snapshot-preview1",
"winapi 0.3.9",
"winapi",
]
[[package]]
@@ -2568,7 +2557,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56"
dependencies = [
"same-file",
"winapi 0.3.9",
"winapi",
"winapi-util",
]
@@ -2690,12 +2679,6 @@ version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17882f045410753661207383517a6f62ec3dbeb6a4ed2acce01f0728238d1983"
[[package]]
name = "winapi"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
[[package]]
name = "winapi"
version = "0.3.9"
@@ -2706,12 +2689,6 @@ dependencies = [
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-build"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
@@ -2724,7 +2701,7 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
dependencies = [
"winapi 0.3.9",
"winapi",
]
[[package]]
@@ -2790,16 +2767,6 @@ version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5"
[[package]]
name = "ws2_32-sys"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
dependencies = [
"winapi 0.2.8",
"winapi-build",
]
[[package]]
name = "yaml-rust"
version = "0.4.5"
@@ -2815,5 +2782,5 @@ version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe5c30ade05e61656247b2e334a031dfd0cc466fadef865bdcdea8d537951bf1"
dependencies = [
"winapi 0.3.9",
"winapi",
]

View File

@@ -6,7 +6,7 @@ members = [
[package]
name = "ruff"
version = "0.0.158"
version = "0.0.162"
edition = "2021"
rust-version = "1.65.0"
@@ -33,7 +33,7 @@ itertools = { version = "0.10.5" }
libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "f2f0b7a487a8725d161fe8b3ed73a6758b21e177" }
log = { version = "0.4.17" }
nohash-hasher = { version = "0.2.0" }
notify = { version = "4.0.17" }
notify = { version = "5.0.0" }
num-bigint = { version = "0.4.3" }
once_cell = { version = "1.16.0" }
path-absolutize = { version = "3.0.14", features = ["once_cell_cache", "use_unix_paths_on_wasm"] }

View File

@@ -145,7 +145,7 @@ Ruff also works with [pre-commit](https://pre-commit.com):
```yaml
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.158
rev: v0.0.162
hooks:
- id: ruff
```
@@ -350,6 +350,16 @@ error reporting for the entire file.
For targeted exclusions across entire files (e.g., "Ignore all F841 violations in
`/path/to/file.py`"), see the [`per-file-ignores`](#per-file-ignores) configuration setting.
### "Action Comments"
Ruff respects `isort`'s ["Action Comments"](https://pycqa.github.io/isort/docs/configuration/action_comments.html)
(`# isort: skip_file`, `# isort: on`, `# isort: off`, `# isort: skip`, and `isort: split`), which
enable selectively enabling and disabling import sorting for blocks of code and other inline
configuration.
See the [`isort` documentation](https://pycqa.github.io/isort/docs/configuration/action_comments.html)
for more.
### Automating `noqa` Directives
Ruff supports several workflows to aid in `noqa` management.
@@ -1295,7 +1305,7 @@ paths.
```toml
[tool.ruff]
exclude = [".venv"]
````
```
---
@@ -1313,7 +1323,7 @@ A list of file patterns to omit from linting, in addition to those specified by
[tool.ruff]
# In addition to the standard set of exclusions, omit all tests, plus a specific file.
extend-exclude = ["tests", "src/bad.py"]
````
```
---
@@ -1509,6 +1519,26 @@ dummy-variable-rgx = "^_$"
---
#### [`allowed-confusables`](#allowed-confusables)
A list of allowed "confusable" Unicode characters to ignore when enforcing `RUF001`, `RUF002`,
and `RUF003`.
**Default value**: `[]`
**Type**: `Vec<char>`
**Example usage**:
```toml
[tool.ruff]
# Allow minus-sign (U+2212), greek-small-letter-rho (U+03C1), and greek-small-letter-alpha (U+03B1),
# which could be confused for "-", "p", and "*", respectively.
allowed-confusables = ["", "ρ", ""]
```
---
#### [`ignore-init-module-imports`](#ignore-init-module-imports)
Avoid automatically removing unused imports in `__init__.py` files. Such imports will still be

View File

@@ -1,11 +0,0 @@
set -euxo pipefail
NAME=$1
mkdir -p src/$1
mkdir -p resources/test/fixtures/$1
touch src/$1/mod.rs
sed -i "" "s/mod flake8_print;/mod flake8_print; mod flake8_return;/g" src/lib.rs
sed -i "" "s|// flake8-print|// flake8-return\n// flake8-print|g" src/checks.rs

View File

@@ -771,7 +771,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8_to_ruff"
version = "0.0.158"
version = "0.0.162"
dependencies = [
"anyhow",
"clap",
@@ -1975,7 +1975,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.158"
version = "0.0.162"
dependencies = [
"anyhow",
"bincode",

View File

@@ -1,6 +1,6 @@
[package]
name = "flake8-to-ruff"
version = "0.0.158-dev.0"
version = "0.0.162-dev.0"
edition = "2021"
[lib]

View File

@@ -243,6 +243,7 @@ mod tests {
fn it_converts_empty() -> Result<()> {
let actual = convert(&HashMap::from([]), None)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
@@ -286,6 +287,7 @@ mod tests {
Some(vec![]),
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
@@ -329,6 +331,7 @@ mod tests {
Some(vec![]),
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
@@ -372,6 +375,7 @@ mod tests {
Some(vec![]),
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
@@ -415,6 +419,7 @@ mod tests {
Some(vec![]),
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
@@ -466,6 +471,7 @@ mod tests {
Some(vec![Plugin::Flake8Docstrings]),
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
@@ -544,6 +550,7 @@ mod tests {
None,
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,

View File

@@ -0,0 +1,29 @@
import a
import b
x = 1
import os
import sys
def f():
pass
if True:
x = 1
import collections
import typing
class X: pass
y = 1
import os
import sys
"""Docstring"""
if True:
import os
def f():
pass
if True:
import os
def f():
pass

View File

@@ -0,0 +1 @@
from .subscription import * # type: ignore # some very long comment explaining why this needs a type ignore

View File

@@ -0,0 +1,6 @@
from some_other_module import some_class
from some_other_module import *
# Above
from some_module import some_class # Aside
# Above
from some_module import * # Aside

View File

@@ -0,0 +1,10 @@
# isort: skip_file
import e
import f
# isort: split
import a
import b
import c
import d

View File

@@ -0,0 +1,9 @@
import e
import f
# isort: split
import a
import b
import c
import d

View File

@@ -4,5 +4,18 @@ if x is "abc":
if 123 is not y:
pass
if 123 is \
not y:
pass
if "123" is x < 3:
pass
if "123" != x is 3:
pass
if ("123" != x) is 3:
pass
if "123" != (x is 3):
pass

View File

@@ -34,4 +34,5 @@ def isinstances():
result = isinstance(var[7], int) or not isinstance(var[7], float)
result = isinstance(var[6], int) or isinstance(var[7], float)
result = isinstance(var[6], int) or isinstance(var[7], int)
result = isinstance(var[6], (float, int)) or False
return result

View File

@@ -1,4 +1,5 @@
[tool.ruff]
allowed-confusables = ["", "ρ", ""]
line-length = 88
extend-exclude = [
"excluded_file.py",
@@ -35,13 +36,8 @@ ignore-names = [
"longMessage",
"maxDiff",
]
classmethod-decorators = [
"classmethod",
"pydantic.validator",
]
staticmethod-decorators = [
"staticmethod",
]
classmethod-decorators = ["classmethod", "pydantic.validator"]
staticmethod-decorators = ["staticmethod"]
[tool.ruff.flake8-tidy-imports]
ban-relative-imports = "parents"

View File

@@ -1,7 +0,0 @@
x = "𝐁ad string"
def f():
"""Here's a docstring with an unusual parenthesis: """
# And here's a comment with an unusual punctuation mark:
...

View File

@@ -1,7 +0,0 @@
x = "𝐁ad string"
def f():
"""Here's a docstring with an unusual parenthesis: """
# And here's a comment with an unusual punctuation mark:
...

View File

@@ -1,7 +1,14 @@
x = "𝐁ad string"
y = ""
def f():
"""Here's a docstring with an unusual parenthesis: """
# And here's a comment with an unusual punctuation mark:
...
def g():
"""Here's a docstring with a greek rho: ρ"""
# And here's a comment with a greek alpha:
...

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_dev"
version = "0.0.158"
version = "0.0.162"
edition = "2021"
[dependencies]

View File

@@ -301,6 +301,16 @@ pub fn match_trailing_content(stmt: &Stmt, locator: &SourceCodeLocator) -> bool
false
}
/// Return the number of trailing empty lines following a statement.
pub fn count_trailing_lines(stmt: &Stmt, locator: &SourceCodeLocator) -> usize {
let suffix =
locator.slice_source_code_at(&Location::new(stmt.end_location.unwrap().row() + 1, 0));
suffix
.lines()
.take_while(|line| line.trim().is_empty())
.count()
}
#[cfg(test)]
mod tests {
use anyhow::Result;

View File

@@ -1,4 +1,7 @@
use rustpython_ast::{Cmpop, Located};
use rustpython_parser::ast::{Constant, Expr, ExprKind, Stmt, StmtKind};
use rustpython_parser::lexer;
use rustpython_parser::lexer::Tok;
use crate::ast::types::{BindingKind, Scope};
@@ -93,20 +96,164 @@ pub fn in_nested_block<'a>(parents: &mut impl Iterator<Item = &'a Stmt>) -> bool
/// Check if a node represents an unpacking assignment.
pub fn is_unpacking_assignment(stmt: &Stmt) -> bool {
if let StmtKind::Assign { targets, value, .. } = &stmt.node {
if !targets.iter().any(|child| {
matches!(
child.node,
ExprKind::Set { .. } | ExprKind::List { .. } | ExprKind::Tuple { .. }
)
}) {
return false;
}
match &value.node {
ExprKind::Set { .. } | ExprKind::List { .. } | ExprKind::Tuple { .. } => return false,
_ => {}
}
return true;
let StmtKind::Assign { targets, value, .. } = &stmt.node else {
return false;
};
if !targets.iter().any(|child| {
matches!(
child.node,
ExprKind::Set { .. } | ExprKind::List { .. } | ExprKind::Tuple { .. }
)
}) {
return false;
}
!matches!(
&value.node,
ExprKind::Set { .. } | ExprKind::List { .. } | ExprKind::Tuple { .. }
)
}
pub type LocatedCmpop<U = ()> = Located<Cmpop, U>;
/// Extract all `Cmpop` operators from a source code snippet, with appropriate
/// ranges.
///
/// `RustPython` doesn't include line and column information on `Cmpop` nodes.
/// `CPython` doesn't either. This method iterates over the token stream and
/// re-identifies `Cmpop` nodes, annotating them with valid ranges.
pub fn locate_cmpops(contents: &str) -> Vec<LocatedCmpop> {
let mut tok_iter = lexer::make_tokenizer(contents)
.flatten()
.into_iter()
.peekable();
let mut ops: Vec<LocatedCmpop> = vec![];
let mut count: usize = 0;
loop {
let Some((start, tok, end)) = tok_iter.next() else {
break;
};
if matches!(tok, Tok::Lpar) {
count += 1;
continue;
} else if matches!(tok, Tok::Rpar) {
count -= 1;
continue;
}
if count == 0 {
match tok {
Tok::Not => {
if let Some((_, _, end)) =
tok_iter.next_if(|(_, tok, _)| matches!(tok, Tok::In))
{
ops.push(LocatedCmpop::new(start, end, Cmpop::NotIn));
}
}
Tok::In => {
ops.push(LocatedCmpop::new(start, end, Cmpop::In));
}
Tok::Is => {
if let Some((_, _, end)) =
tok_iter.next_if(|(_, tok, _)| matches!(tok, Tok::Not))
{
ops.push(LocatedCmpop::new(start, end, Cmpop::IsNot));
} else {
ops.push(LocatedCmpop::new(start, end, Cmpop::Is));
}
}
Tok::NotEqual => {
ops.push(LocatedCmpop::new(start, end, Cmpop::NotEq));
}
Tok::EqEqual => {
ops.push(LocatedCmpop::new(start, end, Cmpop::Eq));
}
Tok::GreaterEqual => {
ops.push(LocatedCmpop::new(start, end, Cmpop::GtE));
}
Tok::Greater => {
ops.push(LocatedCmpop::new(start, end, Cmpop::Gt));
}
Tok::LessEqual => {
ops.push(LocatedCmpop::new(start, end, Cmpop::LtE));
}
Tok::Less => {
ops.push(LocatedCmpop::new(start, end, Cmpop::Lt));
}
_ => {}
}
}
}
ops
}
#[cfg(test)]
mod tests {
use rustpython_ast::{Cmpop, Location};
use crate::ast::operations::{locate_cmpops, LocatedCmpop};
#[test]
fn locates_cmpops() {
assert_eq!(
locate_cmpops("x == 1"),
vec![LocatedCmpop::new(
Location::new(1, 2),
Location::new(1, 4),
Cmpop::Eq
)]
);
assert_eq!(
locate_cmpops("x != 1"),
vec![LocatedCmpop::new(
Location::new(1, 2),
Location::new(1, 4),
Cmpop::NotEq
)]
);
assert_eq!(
locate_cmpops("x is 1"),
vec![LocatedCmpop::new(
Location::new(1, 2),
Location::new(1, 4),
Cmpop::Is
)]
);
assert_eq!(
locate_cmpops("x is not 1"),
vec![LocatedCmpop::new(
Location::new(1, 2),
Location::new(1, 8),
Cmpop::IsNot
)]
);
assert_eq!(
locate_cmpops("x in 1"),
vec![LocatedCmpop::new(
Location::new(1, 2),
Location::new(1, 4),
Cmpop::In
)]
);
assert_eq!(
locate_cmpops("x not in 1"),
vec![LocatedCmpop::new(
Location::new(1, 2),
Location::new(1, 8),
Cmpop::NotIn
)]
);
assert_eq!(
locate_cmpops("x != (1 is not 2)"),
vec![LocatedCmpop::new(
Location::new(1, 2),
Location::new(1, 4),
Cmpop::NotEq
)]
);
}
false
}

View File

@@ -92,7 +92,7 @@ fn apply_fixes<'a>(
}
// Add the remaining content.
let slice = locator.slice_source_code_at(last_pos);
let slice = locator.slice_source_code_at(&last_pos);
output.append(&slice);
(Cow::from(output.finish()), num_fixed)

View File

@@ -3117,7 +3117,7 @@ impl<'a> Checker<'a> {
Some(fix)
}
Err(e) => {
error!("Failed to remove unused imports: {}", e);
error!("Failed to remove unused imports: {e}");
None
}
}

View File

@@ -1,10 +1,10 @@
//! Lint rules based on import analysis.
use nohash_hasher::IntSet;
use rustpython_parser::ast::Suite;
use crate::ast::visitor::Visitor;
use crate::checks::Check;
use crate::directives::IsortDirectives;
use crate::isort;
use crate::isort::track::ImportTracker;
use crate::settings::Settings;
@@ -18,7 +18,7 @@ fn check_import_blocks(
) -> Vec<Check> {
let mut checks = vec![];
for block in tracker.into_iter() {
if !block.is_empty() {
if !block.imports.is_empty() {
if let Some(check) = isort::plugins::check_imports(&block, locator, settings, autofix) {
checks.push(check);
}
@@ -30,11 +30,11 @@ fn check_import_blocks(
pub fn check_imports(
python_ast: &Suite,
locator: &SourceCodeLocator,
exclusions: &IntSet<usize>,
directives: &IsortDirectives,
settings: &Settings,
autofix: bool,
) -> Vec<Check> {
let mut tracker = ImportTracker::new(exclusions);
let mut tracker = ImportTracker::new(directives);
for stmt in python_ast {
tracker.visit_stmt(stmt);
}

View File

@@ -2617,8 +2617,7 @@ mod tests {
for check_code in CheckCode::iter() {
assert!(
CheckCode::from_str(check_code.as_ref()).is_ok(),
"{:?} could not be round-trip serialized.",
check_code
"{check_code:?} could not be round-trip serialized."
);
}
}

View File

@@ -30,9 +30,15 @@ impl Flags {
}
}
#[derive(Default)]
pub struct IsortDirectives {
pub exclusions: IntSet<usize>,
pub splits: Vec<usize>,
}
pub struct Directives {
pub noqa_line_for: IntMap<usize, usize>,
pub isort_exclusions: IntSet<usize>,
pub isort: IsortDirectives,
}
pub fn extract_directives(
@@ -46,10 +52,10 @@ pub fn extract_directives(
} else {
IntMap::default()
},
isort_exclusions: if flags.contains(Flags::ISORT) {
extract_isort_exclusions(lxr, locator)
isort: if flags.contains(Flags::ISORT) {
extract_isort_directives(lxr, locator)
} else {
IntSet::default()
IsortDirectives::default()
},
}
}
@@ -73,17 +79,32 @@ pub fn extract_noqa_line_for(lxr: &[LexResult]) -> IntMap<usize, usize> {
}
/// Extract a set of lines over which to disable isort.
pub fn extract_isort_exclusions(lxr: &[LexResult], locator: &SourceCodeLocator) -> IntSet<usize> {
pub fn extract_isort_directives(lxr: &[LexResult], locator: &SourceCodeLocator) -> IsortDirectives {
let mut exclusions: IntSet<usize> = IntSet::default();
let mut splits: Vec<usize> = Vec::default();
let mut skip_file: bool = false;
let mut off: Option<Location> = None;
let mut last: Option<Location> = None;
for &(start, ref tok, end) in lxr.iter().flatten() {
// TODO(charlie): Modify RustPython to include the comment text in the token.
last = Some(end);
// No need to keep processing, but we do need to determine the last token.
if skip_file {
continue;
}
if matches!(tok, Tok::Comment) {
// TODO(charlie): Modify RustPython to include the comment text in the token.
let comment_text = locator.slice_source_code_range(&Range {
location: start,
end_location: end,
});
if off.is_some() {
if comment_text == "# isort: split" {
splits.push(start.row());
} else if comment_text == "# isort: skip_file" {
skip_file = true;
} else if off.is_some() {
if comment_text == "# isort: on" {
if let Some(start) = off {
for row in start.row() + 1..=end.row() {
@@ -93,43 +114,50 @@ pub fn extract_isort_exclusions(lxr: &[LexResult], locator: &SourceCodeLocator)
off = None;
}
} else {
if comment_text.contains("isort: skip") || comment_text.contains("isort:skip") {
if comment_text.contains("isort: skip") {
exclusions.insert(start.row());
} else if comment_text == "# isort: off" {
off = Some(start);
}
}
} else if matches!(tok, Tok::EndOfFile) {
if let Some(start) = off {
for row in start.row() + 1..=end.row() {
exclusions.insert(row);
}
}
break;
}
}
exclusions
if skip_file {
// Enforce `isort: skip_file`.
if let Some(end) = last {
for row in 1..=end.row() {
exclusions.insert(row);
}
}
} else if let Some(start) = off {
// Enforce unterminated `isort: off`.
if let Some(end) = last {
for row in start.row() + 1..=end.row() {
exclusions.insert(row);
}
}
}
IsortDirectives { exclusions, splits }
}
#[cfg(test)]
mod tests {
use nohash_hasher::IntMap;
use nohash_hasher::{IntMap, IntSet};
use rustpython_parser::lexer;
use rustpython_parser::lexer::LexResult;
use crate::directives::extract_noqa_line_for;
use crate::directives::{extract_isort_directives, extract_noqa_line_for};
use crate::SourceCodeLocator;
#[test]
fn extraction() {
let empty: IntMap<usize, usize> = IntMap::default();
fn noqa_extraction() {
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = 1
y = 2
z = x + 1",
)
.collect();
assert_eq!(extract_noqa_line_for(&lxr), empty);
assert_eq!(extract_noqa_line_for(&lxr), IntMap::default());
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"
@@ -138,7 +166,7 @@ y = 2
z = x + 1",
)
.collect();
assert_eq!(extract_noqa_line_for(&lxr), empty);
assert_eq!(extract_noqa_line_for(&lxr), IntMap::default());
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = 1
@@ -147,7 +175,7 @@ z = x + 1
",
)
.collect();
assert_eq!(extract_noqa_line_for(&lxr), empty);
assert_eq!(extract_noqa_line_for(&lxr), IntMap::default());
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = 1
@@ -157,7 +185,7 @@ z = x + 1
",
)
.collect();
assert_eq!(extract_noqa_line_for(&lxr), empty);
assert_eq!(extract_noqa_line_for(&lxr), IntMap::default());
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = '''abc
@@ -200,4 +228,106 @@ z = x + 1",
IntMap::from_iter([(2, 5), (3, 5), (4, 5)])
);
}
#[test]
fn isort_exclusions() {
let contents = "x = 1
y = 2
z = x + 1";
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
let locator = SourceCodeLocator::new(contents);
assert_eq!(
extract_isort_directives(&lxr, &locator).exclusions,
IntSet::default()
);
let contents = "# isort: off
x = 1
y = 2
# isort: on
z = x + 1";
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
let locator = SourceCodeLocator::new(contents);
assert_eq!(
extract_isort_directives(&lxr, &locator).exclusions,
IntSet::from_iter([2, 3, 4])
);
let contents = "# isort: off
x = 1
# isort: off
y = 2
# isort: on
z = x + 1
# isort: on";
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
let locator = SourceCodeLocator::new(contents);
assert_eq!(
extract_isort_directives(&lxr, &locator).exclusions,
IntSet::from_iter([2, 3, 4, 5])
);
let contents = "# isort: off
x = 1
y = 2
z = x + 1";
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
let locator = SourceCodeLocator::new(contents);
assert_eq!(
extract_isort_directives(&lxr, &locator).exclusions,
IntSet::from_iter([2, 3, 4])
);
let contents = "# isort: skip_file
x = 1
y = 2
z = x + 1";
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
let locator = SourceCodeLocator::new(contents);
assert_eq!(
extract_isort_directives(&lxr, &locator).exclusions,
IntSet::from_iter([1, 2, 3, 4])
);
let contents = "# isort: off
x = 1
# isort: on
y = 2
# isort: skip_file
z = x + 1";
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
let locator = SourceCodeLocator::new(contents);
assert_eq!(
extract_isort_directives(&lxr, &locator).exclusions,
IntSet::from_iter([1, 2, 3, 4, 5, 6])
);
}
#[test]
fn isort_splits() {
let contents = "x = 1
y = 2
z = x + 1";
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
let locator = SourceCodeLocator::new(contents);
assert_eq!(
extract_isort_directives(&lxr, &locator).splits,
Vec::<usize>::new()
);
let contents = "x = 1
y = 2
# isort: split
z = x + 1";
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
let locator = SourceCodeLocator::new(contents);
assert_eq!(extract_isort_directives(&lxr, &locator).splits, vec![3]);
let contents = "x = 1
y = 2 # isort: split
z = x + 1";
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
let locator = SourceCodeLocator::new(contents);
assert_eq!(extract_isort_directives(&lxr, &locator).splits, vec![2]);
}
}

View File

@@ -7,20 +7,20 @@ use crate::visibility::{Modifier, VisibleScope};
/// Extract a docstring from a function or class body.
pub fn docstring_from(suite: &[Stmt]) -> Option<&Expr> {
if let Some(stmt) = suite.first() {
if let StmtKind::Expr { value } = &stmt.node {
if matches!(
&value.node,
ExprKind::Constant {
value: Constant::Str(_),
..
}
) {
return Some(value);
}
let stmt = suite.first()?;
let StmtKind::Expr { value } = &stmt.node else {
return None;
};
if !matches!(
&value.node,
ExprKind::Constant {
value: Constant::Str(_),
..
}
) {
return None;
}
None
Some(value)
}
/// Extract a `Definition` from the AST node defined by a `Stmt`.

View File

@@ -5,10 +5,11 @@ use crate::checks::{Check, CheckKind};
/// S102
pub fn exec_used(expr: &Expr, func: &Expr) -> Option<Check> {
if let ExprKind::Name { id, .. } = &func.node {
if id == "exec" {
return Some(Check::new(CheckKind::ExecUsed, Range::from_located(expr)));
}
let ExprKind::Name { id, .. } = &func.node else {
return None;
};
if id != "exec" {
return None;
}
None
Some(Check::new(CheckKind::ExecUsed, Range::from_located(expr)))
}

View File

@@ -5,16 +5,15 @@ use crate::checks::{Check, CheckKind};
use crate::flake8_bandit::helpers::{matches_password_name, string_literal};
fn check_password_kwarg(arg: &Located<ArgData>, default: &Expr) -> Option<Check> {
if let Some(string) = string_literal(default) {
let kwarg_name = &arg.node.arg;
if matches_password_name(kwarg_name) {
return Some(Check::new(
CheckKind::HardcodedPasswordDefault(string.to_string()),
Range::from_located(default),
));
}
let string = string_literal(default)?;
let kwarg_name = &arg.node.arg;
if !matches_password_name(kwarg_name) {
return None;
}
None
Some(Check::new(
CheckKind::HardcodedPasswordDefault(string.to_string()),
Range::from_located(default),
))
}
/// S107

View File

@@ -9,17 +9,15 @@ pub fn hardcoded_password_func_arg(keywords: &[Keyword]) -> Vec<Check> {
keywords
.iter()
.filter_map(|keyword| {
if let Some(string) = string_literal(&keyword.node.value) {
if let Some(arg) = &keyword.node.arg {
if matches_password_name(arg) {
return Some(Check::new(
CheckKind::HardcodedPasswordFuncArg(string.to_string()),
Range::from_located(keyword),
));
}
}
let string = string_literal(&keyword.node.value)?;
let arg = keyword.node.arg.as_ref()?;
if !matches_password_name(arg) {
return None;
}
None
Some(Check::new(
CheckKind::HardcodedPasswordFuncArg(string.to_string()),
Range::from_located(keyword),
))
})
.collect()
}

View File

@@ -29,15 +29,14 @@ pub fn compare_to_hardcoded_password_string(left: &Expr, comparators: &[Expr]) -
comparators
.iter()
.filter_map(|comp| {
if let Some(string) = string_literal(comp) {
if is_password_target(left) {
return Some(Check::new(
CheckKind::HardcodedPasswordString(string.to_string()),
Range::from_located(comp),
));
}
let string = string_literal(comp)?;
if !is_password_target(left) {
return None;
}
None
Some(Check::new(
CheckKind::HardcodedPasswordString(string.to_string()),
Range::from_located(comp),
))
})
.collect()
}

View File

@@ -6,17 +6,18 @@ use crate::checks::{Check, CheckKind};
pub fn blind_except(checker: &mut Checker, handlers: &[Excepthandler]) {
for handler in handlers {
let ExcepthandlerKind::ExceptHandler { type_, .. } = &handler.node;
if let Some(type_) = type_ {
if let ExprKind::Name { id, .. } = &type_.node {
for exception in ["BaseException", "Exception"] {
if id == exception {
checker.add_check(Check::new(
CheckKind::BlindExcept,
Range::from_located(type_),
));
}
}
let ExcepthandlerKind::ExceptHandler { type_: Some(type_), .. } = &handler.node else {
continue;
};
let ExprKind::Name { id, .. } = &type_.node else {
continue;
};
for exception in ["BaseException", "Exception"] {
if id == exception {
checker.add_check(Check::new(
CheckKind::BlindExcept,
Range::from_located(type_),
));
}
}
}

View File

@@ -11,11 +11,10 @@ const FUNC_NAME_ALLOWLIST: &[&str] = &["get", "setdefault", "pop", "fromkeys"];
/// `true`, the function name must be explicitly allowed, and the argument must
/// be either the first or second argument in the call.
fn allow_boolean_trap(func: &Expr) -> bool {
if let ExprKind::Attribute { attr, .. } = &func.node {
FUNC_NAME_ALLOWLIST.contains(&attr.as_ref())
} else {
false
}
let ExprKind::Attribute { attr, .. } = &func.node else {
return false;
};
FUNC_NAME_ALLOWLIST.contains(&attr.as_ref())
}
fn is_boolean_arg(arg: &Expr) -> bool {
@@ -39,24 +38,26 @@ pub fn check_positional_boolean_in_def(checker: &mut Checker, arguments: &Argume
if arg.node.annotation.is_none() {
continue;
}
let Some(expr) = &arg.node.annotation else {
continue;
};
if let Some(expr) = &arg.node.annotation {
// check for both bool (python class) and 'bool' (string annotation)
let hint = match &expr.node {
ExprKind::Name { id, .. } => id == "bool",
ExprKind::Constant {
value: Constant::Str(value),
..
} => value == "bool",
_ => false,
};
if hint {
checker.add_check(Check::new(
CheckKind::BooleanPositionalArgInFunctionDefinition,
Range::from_located(arg),
));
}
// check for both bool (python class) and 'bool' (string annotation)
let hint = match &expr.node {
ExprKind::Name { id, .. } => id == "bool",
ExprKind::Constant {
value: Constant::Str(value),
..
} => value == "bool",
_ => false,
};
if !hint {
continue;
}
checker.add_check(Check::new(
CheckKind::BooleanPositionalArgInFunctionDefinition,
Range::from_located(arg),
));
}
}

View File

@@ -67,24 +67,28 @@ pub fn abstract_base_class(
keywords: &[Keyword],
body: &[Stmt],
) {
if bases.len() + keywords.len() == 1
&& is_abc_class(
bases,
keywords,
&checker.from_imports,
&checker.import_aliases,
)
{
let mut has_abstract_method = false;
for stmt in body {
// https://github.com/PyCQA/flake8-bugbear/issues/293
// Ignore abc's that declares a class attribute that must be set
if let StmtKind::AnnAssign { .. } | StmtKind::Assign { .. } = &stmt.node {
has_abstract_method = true;
continue;
}
if bases.len() + keywords.len() != 1 {
return;
}
if !is_abc_class(
bases,
keywords,
&checker.from_imports,
&checker.import_aliases,
) {
return;
}
if let StmtKind::FunctionDef {
let mut has_abstract_method = false;
for stmt in body {
// https://github.com/PyCQA/flake8-bugbear/issues/293
// Ignore abc's that declares a class attribute that must be set
if let StmtKind::AnnAssign { .. } | StmtKind::Assign { .. } = &stmt.node {
has_abstract_method = true;
continue;
}
let (StmtKind::FunctionDef {
decorator_list,
body,
..
@@ -93,36 +97,38 @@ pub fn abstract_base_class(
decorator_list,
body,
..
} = &stmt.node
{
let has_abstract_decorator = decorator_list
.iter()
.any(|d| is_abstractmethod(d, &checker.from_imports, &checker.import_aliases));
}) = &stmt.node else {
continue;
};
has_abstract_method |= has_abstract_decorator;
let has_abstract_decorator = decorator_list
.iter()
.any(|d| is_abstractmethod(d, &checker.from_imports, &checker.import_aliases));
if checker.settings.enabled.contains(&CheckCode::B027) {
if !has_abstract_decorator
&& is_empty_body(body)
&& !decorator_list
.iter()
.any(|d| is_overload(d, &checker.from_imports, &checker.import_aliases))
{
checker.add_check(Check::new(
CheckKind::EmptyMethodWithoutAbstractDecorator(name.to_string()),
Range::from_located(stmt),
));
}
}
}
has_abstract_method |= has_abstract_decorator;
if !checker.settings.enabled.contains(&CheckCode::B027) {
continue;
}
if checker.settings.enabled.contains(&CheckCode::B024) {
if !has_abstract_method {
checker.add_check(Check::new(
CheckKind::AbstractBaseClassWithoutAbstractMethod(name.to_string()),
Range::from_located(stmt),
));
}
if !has_abstract_decorator
&& is_empty_body(body)
&& !decorator_list
.iter()
.any(|d| is_overload(d, &checker.from_imports, &checker.import_aliases))
{
checker.add_check(Check::new(
CheckKind::EmptyMethodWithoutAbstractDecorator(name.to_string()),
Range::from_located(stmt),
));
}
}
if checker.settings.enabled.contains(&CheckCode::B024) {
if !has_abstract_method {
checker.add_check(Check::new(
CheckKind::AbstractBaseClassWithoutAbstractMethod(name.to_string()),
Range::from_located(stmt),
));
}
}
}

View File

@@ -38,23 +38,24 @@ fn assertion_error(msg: Option<&Expr>) -> Stmt {
/// B011
pub fn assert_false(checker: &mut Checker, stmt: &Stmt, test: &Expr, msg: Option<&Expr>) {
if let ExprKind::Constant {
let ExprKind::Constant {
value: Constant::Bool(false),
..
} = &test.node
{
let mut check = Check::new(CheckKind::DoNotAssertFalse, Range::from_located(test));
if checker.patch(check.kind.code()) {
let mut generator = SourceGenerator::new();
generator.unparse_stmt(&assertion_error(msg));
if let Ok(content) = generator.generate() {
check.amend(Fix::replacement(
content,
stmt.location,
stmt.end_location.unwrap(),
));
}
} = &test.node else {
return;
};
let mut check = Check::new(CheckKind::DoNotAssertFalse, Range::from_located(test));
if checker.patch(check.kind.code()) {
let mut generator = SourceGenerator::new();
generator.unparse_stmt(&assertion_error(msg));
if let Ok(content) = generator.generate() {
check.amend(Fix::replacement(
content,
stmt.location,
stmt.end_location.unwrap(),
));
}
checker.add_check(check);
}
checker.add_check(check);
}

View File

@@ -7,25 +7,34 @@ use crate::checks::{Check, CheckKind};
/// B017
pub fn assert_raises_exception(checker: &mut Checker, stmt: &Stmt, items: &[Withitem]) {
if let Some(item) = items.first() {
let item_context = &item.context_expr;
if let ExprKind::Call { func, args, .. } = &item_context.node {
if args.len() == 1
&& item.optional_vars.is_none()
&& matches!(&func.node, ExprKind::Attribute { attr, .. } if attr == "assertRaises")
&& match_module_member(
args.first().unwrap(),
"",
"Exception",
&checker.from_imports,
&checker.import_aliases,
)
{
checker.add_check(Check::new(
CheckKind::NoAssertRaisesException,
Range::from_located(stmt),
));
}
}
let Some(item) = items.first() else {
return;
};
let item_context = &item.context_expr;
let ExprKind::Call { func, args, .. } = &item_context.node else {
return;
};
if args.len() != 1 {
return;
}
if item.optional_vars.is_some() {
return;
}
if !matches!(&func.node, ExprKind::Attribute { attr, .. } if attr == "assertRaises") {
return;
}
if !match_module_member(
args.first().unwrap(),
"",
"Exception",
&checker.from_imports,
&checker.import_aliases,
) {
return;
}
checker.add_check(Check::new(
CheckKind::NoAssertRaisesException,
Range::from_located(stmt),
));
}

View File

@@ -6,19 +6,24 @@ use crate::checks::{Check, CheckKind};
/// B003
pub fn assignment_to_os_environ(checker: &mut Checker, targets: &[Expr]) {
if targets.len() == 1 {
let target = &targets[0];
if let ExprKind::Attribute { value, attr, .. } = &target.node {
if attr == "environ" {
if let ExprKind::Name { id, .. } = &value.node {
if id == "os" {
checker.add_check(Check::new(
CheckKind::AssignmentToOsEnviron,
Range::from_located(target),
));
}
}
}
}
if targets.len() != 1 {
return;
}
let target = &targets[0];
let ExprKind::Attribute { value, attr, .. } = &target.node else {
return;
};
if attr != "environ" {
return;
}
let ExprKind::Name { id, .. } = &value.node else {
return;
};
if id != "os" {
return;
}
checker.add_check(Check::new(
CheckKind::AssignmentToOsEnviron,
Range::from_located(target),
));
}

View File

@@ -13,29 +13,30 @@ fn is_cache_func(checker: &Checker, expr: &Expr) -> bool {
/// B019
pub fn cached_instance_method(checker: &mut Checker, decorator_list: &[Expr]) {
if matches!(checker.current_scope().kind, ScopeKind::Class(_)) {
for decorator in decorator_list {
// TODO(charlie): This should take into account `classmethod-decorators` and
// `staticmethod-decorators`.
if let ExprKind::Name { id, .. } = &decorator.node {
if id == "classmethod" || id == "staticmethod" {
return;
}
}
}
for decorator in decorator_list {
if is_cache_func(
checker,
match &decorator.node {
ExprKind::Call { func, .. } => func,
_ => decorator,
},
) {
checker.add_check(Check::new(
CheckKind::CachedInstanceMethod,
Range::from_located(decorator),
));
if !matches!(checker.current_scope().kind, ScopeKind::Class(_)) {
return;
}
for decorator in decorator_list {
// TODO(charlie): This should take into account `classmethod-decorators` and
// `staticmethod-decorators`.
if let ExprKind::Name { id, .. } = &decorator.node {
if id == "classmethod" || id == "staticmethod" {
return;
}
}
}
for decorator in decorator_list {
if is_cache_func(
checker,
match &decorator.node {
ExprKind::Call { func, .. } => func,
_ => decorator,
},
) {
checker.add_check(Check::new(
CheckKind::CachedInstanceMethod,
Range::from_located(decorator),
));
}
}
}

View File

@@ -6,10 +6,11 @@ use crate::checks::{Check, CheckKind};
/// B016
pub fn cannot_raise_literal(checker: &mut Checker, expr: &Expr) {
if let ExprKind::Constant { .. } = &expr.node {
checker.add_check(Check::new(
CheckKind::CannotRaiseLiteral,
Range::from_located(expr),
));
}
let ExprKind::Constant { .. } = &expr.node else {
return;
};
checker.add_check(Check::new(
CheckKind::CannotRaiseLiteral,
Range::from_located(expr),
));
}

View File

@@ -79,33 +79,30 @@ pub fn duplicate_exceptions(checker: &mut Checker, stmt: &Stmt, handlers: &[Exce
let mut seen: BTreeSet<Vec<&str>> = BTreeSet::default();
let mut duplicates: BTreeSet<Vec<&str>> = BTreeSet::default();
for handler in handlers {
match &handler.node {
ExcepthandlerKind::ExceptHandler { type_, .. } => {
if let Some(type_) = type_ {
match &type_.node {
ExprKind::Attribute { .. } | ExprKind::Name { .. } => {
let call_path = helpers::collect_call_paths(type_);
if !call_path.is_empty() {
if seen.contains(&call_path) {
duplicates.insert(call_path);
} else {
seen.insert(call_path);
}
}
}
ExprKind::Tuple { elts, .. } => {
for name in duplicate_handler_exceptions(checker, type_, elts) {
if seen.contains(&name) {
duplicates.insert(name);
} else {
seen.insert(name);
}
}
}
_ => {}
let ExcepthandlerKind::ExceptHandler { type_: Some(type_), .. } = &handler.node else {
continue;
};
match &type_.node {
ExprKind::Attribute { .. } | ExprKind::Name { .. } => {
let call_path = helpers::collect_call_paths(type_);
if !call_path.is_empty() {
if seen.contains(&call_path) {
duplicates.insert(call_path);
} else {
seen.insert(call_path);
}
}
}
ExprKind::Tuple { elts, .. } => {
for name in duplicate_handler_exceptions(checker, type_, elts) {
if seen.contains(&name) {
duplicates.insert(name);
} else {
seen.insert(name);
}
}
}
_ => {}
}
}

View File

@@ -6,14 +6,17 @@ use crate::checks::{Check, CheckKind};
/// B021
pub fn f_string_docstring(checker: &mut Checker, body: &[Stmt]) {
if let Some(stmt) = body.first() {
if let StmtKind::Expr { value } = &stmt.node {
if let ExprKind::JoinedStr { .. } = value.node {
checker.add_check(Check::new(
CheckKind::FStringDocstring,
Range::from_located(stmt),
));
}
}
}
let Some(stmt) = body.first() else {
return;
};
let StmtKind::Expr { value } = &stmt.node else {
return;
};
let ExprKind::JoinedStr { .. } = value.node else {
return;
};
checker.add_check(Check::new(
CheckKind::FStringDocstring,
Range::from_located(stmt),
));
}

View File

@@ -71,29 +71,26 @@ where
}
fn is_nan_or_infinity(expr: &Expr, args: &[Expr]) -> bool {
if let ExprKind::Name { id, .. } = &expr.node {
if id == "float" {
if let Some(arg) = args.first() {
if let ExprKind::Constant {
value: Constant::Str(value),
..
} = &arg.node
{
let lowercased = value.to_lowercase();
return lowercased == "nan"
|| lowercased == "+nan"
|| lowercased == "-nan"
|| lowercased == "inf"
|| lowercased == "+inf"
|| lowercased == "-inf"
|| lowercased == "infinity"
|| lowercased == "+infinity"
|| lowercased == "-infinity";
}
}
}
let ExprKind::Name { id, .. } = &expr.node else {
return false;
};
if id != "float" {
return false;
}
false
let Some(arg) = args.first() else {
return false;
};
let ExprKind::Constant {
value: Constant::Str(value),
..
} = &arg.node else {
return false;
};
let lowercased = value.to_lowercase();
matches!(
lowercased.as_str(),
"nan" | "+nan" | "-nan" | "inf" | "+inf" | "-inf" | "infinity" | "+infinity" | "-infinity"
)
}
/// B008

View File

@@ -22,32 +22,39 @@ fn attribute(value: &Expr, attr: &str) -> Expr {
/// B009
pub fn getattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
if let ExprKind::Name { id, .. } = &func.node {
if id == "getattr" {
if let [obj, arg] = args {
if let ExprKind::Constant {
value: Constant::Str(value),
..
} = &arg.node
{
if IDENTIFIER_REGEX.is_match(value) && !KWLIST.contains(&value.as_str()) {
let mut check =
Check::new(CheckKind::GetAttrWithConstant, Range::from_located(expr));
if checker.patch(check.kind.code()) {
let mut generator = SourceGenerator::new();
generator.unparse_expr(&attribute(obj, value), 0);
if let Ok(content) = generator.generate() {
check.amend(Fix::replacement(
content,
expr.location,
expr.end_location.unwrap(),
));
}
}
checker.add_check(check);
}
}
}
let ExprKind::Name { id, .. } = &func.node else {
return;
};
if id != "getattr" {
return;
}
let [obj, arg] = args else {
return;
};
let ExprKind::Constant {
value: Constant::Str(value),
..
} = &arg.node else {
return;
};
if !IDENTIFIER_REGEX.is_match(value) {
return;
}
if KWLIST.contains(&value.as_str()) {
return;
}
let mut check = Check::new(CheckKind::GetAttrWithConstant, Range::from_located(expr));
if checker.patch(check.kind.code()) {
let mut generator = SourceGenerator::new();
generator.unparse_expr(&attribute(obj, value), 0);
if let Ok(content) = generator.generate() {
check.amend(Fix::replacement(
content,
expr.location,
expr.end_location.unwrap(),
));
}
}
checker.add_check(check);
}

View File

@@ -13,21 +13,18 @@ struct RaiseVisitor {
impl<'a> Visitor<'a> for RaiseVisitor {
fn visit_stmt(&mut self, stmt: &'a Stmt) {
match &stmt.node {
StmtKind::Raise { exc, cause } => {
if cause.is_none() {
if let Some(exc) = exc {
match &exc.node {
ExprKind::Name { id, .. } if is_lower(id) => {}
_ => {
self.checks.push(Check::new(
CheckKind::RaiseWithoutFromInsideExcept,
Range::from_located(stmt),
));
}
}
}
StmtKind::Raise {
exc: Some(exc),
cause: None,
} => match &exc.node {
ExprKind::Name { id, .. } if is_lower(id) => {}
_ => {
self.checks.push(Check::new(
CheckKind::RaiseWithoutFromInsideExcept,
Range::from_located(stmt),
));
}
}
},
StmtKind::ClassDef { .. }
| StmtKind::FunctionDef { .. }
| StmtKind::AsyncFunctionDef { .. }

View File

@@ -39,46 +39,47 @@ fn match_tuple_range<T>(located: &Located<T>, locator: &SourceCodeLocator) -> Re
}
}
}
if let (Some(location), Some(end_location)) = (location, end_location) {
Ok(Range {
location,
end_location,
})
} else {
let (Some(location), Some(end_location)) = (location, end_location) else {
bail!("Unable to find left and right parentheses");
}
};
Ok(Range {
location,
end_location,
})
}
/// B013
pub fn redundant_tuple_in_exception_handler(checker: &mut Checker, handlers: &[Excepthandler]) {
for handler in handlers {
let ExcepthandlerKind::ExceptHandler { type_, .. } = &handler.node;
if let Some(type_) = type_ {
if let ExprKind::Tuple { elts, .. } = &type_.node {
if let [elt] = &elts[..] {
let mut check = Check::new(
CheckKind::RedundantTupleInExceptionHandler(elt.to_string()),
Range::from_located(type_),
);
if checker.patch(check.kind.code()) {
let mut generator = SourceGenerator::new();
generator.unparse_expr(elt, 0);
if let Ok(content) = generator.generate() {
match match_tuple_range(handler, checker.locator) {
Ok(range) => {
check.amend(Fix::replacement(
content,
range.location,
range.end_location,
));
}
Err(e) => error!("Failed to locate parentheses: {}", e),
}
}
let ExcepthandlerKind::ExceptHandler { type_: Some(type_), .. } = &handler.node else {
continue;
};
let ExprKind::Tuple { elts, .. } = &type_.node else {
continue;
};
let [elt] = &elts[..] else {
continue;
};
let mut check = Check::new(
CheckKind::RedundantTupleInExceptionHandler(elt.to_string()),
Range::from_located(type_),
);
if checker.patch(check.kind.code()) {
let mut generator = SourceGenerator::new();
generator.unparse_expr(elt, 0);
if let Ok(content) = generator.generate() {
match match_tuple_range(handler, checker.locator) {
Ok(range) => {
check.amend(Fix::replacement(
content,
range.location,
range.end_location,
));
}
checker.add_check(check);
Err(e) => error!("Failed to locate parentheses: {e}"),
}
}
}
checker.add_check(check);
}
}

View File

@@ -35,31 +35,37 @@ fn assignment(obj: &Expr, name: &str, value: &Expr) -> Result<String> {
/// B010
pub fn setattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
if let ExprKind::Name { id, .. } = &func.node {
if id == "setattr" {
if let [obj, name, value] = args {
if let ExprKind::Constant {
value: Constant::Str(name),
..
} = &name.node
{
if IDENTIFIER_REGEX.is_match(name) && !KWLIST.contains(&name.as_str()) {
let mut check =
Check::new(CheckKind::SetAttrWithConstant, Range::from_located(expr));
if checker.patch(check.kind.code()) {
match assignment(obj, name, value) {
Ok(content) => check.amend(Fix::replacement(
content,
expr.location,
expr.end_location.unwrap(),
)),
Err(e) => error!("Failed to fix invalid comparison: {}", e),
};
}
checker.add_check(check);
}
}
}
}
let ExprKind::Name { id, .. } = &func.node else {
return;
};
if id != "setattr" {
return;
}
let [obj, name, value] = args else {
return;
};
let ExprKind::Constant {
value: Constant::Str(name),
..
} = &name.node else {
return;
};
if !IDENTIFIER_REGEX.is_match(name) {
return;
}
if KWLIST.contains(&name.as_str()) {
return;
}
let mut check = Check::new(CheckKind::SetAttrWithConstant, Range::from_located(expr));
if checker.patch(check.kind.code()) {
match assignment(obj, name, value) {
Ok(content) => check.amend(Fix::replacement(
content,
expr.location,
expr.end_location.unwrap(),
)),
Err(e) => error!("Failed to fix invalid comparison: {e}"),
};
}
checker.add_check(check);
}

View File

@@ -10,16 +10,19 @@ pub fn star_arg_unpacking_after_keyword_arg(
args: &[Expr],
keywords: &[Keyword],
) {
if let Some(keyword) = keywords.first() {
for arg in args {
if let ExprKind::Starred { .. } = arg.node {
if arg.location > keyword.location {
checker.add_check(Check::new(
CheckKind::StarArgUnpackingAfterKeywordArg,
Range::from_located(arg),
));
}
}
let Some(keyword) = keywords.first() else {
return;
};
for arg in args {
let ExprKind::Starred { .. } = arg.node else {
continue;
};
if arg.location <= keyword.location {
continue;
}
checker.add_check(Check::new(
CheckKind::StarArgUnpackingAfterKeywordArg,
Range::from_located(arg),
));
}
}

View File

@@ -7,22 +7,27 @@ use crate::checks::{Check, CheckKind};
/// B005
pub fn strip_with_multi_characters(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
if let ExprKind::Attribute { attr, .. } = &func.node {
if attr == "strip" || attr == "lstrip" || attr == "rstrip" {
if args.len() == 1 {
if let ExprKind::Constant {
value: Constant::Str(value),
..
} = &args[0].node
{
if value.len() > 1 && value.chars().unique().count() != value.len() {
checker.add_check(Check::new(
CheckKind::StripWithMultiCharacters,
Range::from_located(expr),
));
}
}
}
}
let ExprKind::Attribute { attr, .. } = &func.node else {
return;
};
if !matches!(attr.as_str(), "strip" | "lstrip" | "rstrip") {
return;
}
if args.len() != 1 {
return;
}
let ExprKind::Constant {
value: Constant::Str(value),
..
} = &args[0].node else {
return;
};
if value.len() > 1 && value.chars().unique().count() != value.len() {
checker.add_check(Check::new(
CheckKind::StripWithMultiCharacters,
Range::from_located(expr),
));
}
}

View File

@@ -6,14 +6,17 @@ use crate::checks::{Check, CheckKind};
/// B002
pub fn unary_prefix_increment(checker: &mut Checker, expr: &Expr, op: &Unaryop, operand: &Expr) {
if matches!(op, Unaryop::UAdd) {
if let ExprKind::UnaryOp { op, .. } = &operand.node {
if matches!(op, Unaryop::UAdd) {
checker.add_check(Check::new(
CheckKind::UnaryPrefixIncrement,
Range::from_located(expr),
));
}
}
if !matches!(op, Unaryop::UAdd) {
return;
}
let ExprKind::UnaryOp { op, .. } = &operand.node else {
return;
};
if !matches!(op, Unaryop::UAdd) {
return;
}
checker.add_check(Check::new(
CheckKind::UnaryPrefixIncrement,
Range::from_located(expr),
));
}

View File

@@ -6,22 +6,27 @@ use crate::checks::{Check, CheckKind};
/// B004
pub fn unreliable_callable_check(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
if let ExprKind::Name { id, .. } = &func.node {
if id == "getattr" || id == "hasattr" {
if args.len() >= 2 {
if let ExprKind::Constant {
value: Constant::Str(s),
..
} = &args[1].node
{
if s == "__call__" {
checker.add_check(Check::new(
CheckKind::UnreliableCallableCheck,
Range::from_located(expr),
));
}
}
}
}
let ExprKind::Name { id, .. } = &func.node else {
return;
};
if id != "getattr" && id != "hasattr" {
return;
}
if args.len() < 2 {
return;
};
let ExprKind::Constant {
value: Constant::Str(s),
..
} = &args[1].node else
{
return;
};
if s != "__call__" {
return;
}
checker.add_check(Check::new(
CheckKind::UnreliableCallableCheck,
Range::from_located(expr),
));
}

View File

@@ -62,7 +62,7 @@ pub fn unnecessary_generator_list(
if fix {
match fixes::fix_unnecessary_generator_list(locator, expr) {
Ok(fix) => check.amend(fix),
Err(e) => error!("Failed to generate fix: {}", e),
Err(e) => error!("Failed to generate fix: {e}"),
}
}
return Some(check);
@@ -86,7 +86,7 @@ pub fn unnecessary_generator_set(
if fix {
match fixes::fix_unnecessary_generator_set(locator, expr) {
Ok(fix) => check.amend(fix),
Err(e) => error!("Failed to generate fix: {}", e),
Err(e) => error!("Failed to generate fix: {e}"),
}
}
return Some(check);
@@ -112,7 +112,7 @@ pub fn unnecessary_generator_dict(
if fix {
match fixes::fix_unnecessary_generator_dict(locator, expr) {
Ok(fix) => check.amend(fix),
Err(e) => error!("Failed to generate fix: {}", e),
Err(e) => error!("Failed to generate fix: {e}"),
}
}
return Some(check);
@@ -139,7 +139,7 @@ pub fn unnecessary_list_comprehension_set(
if fix {
match fixes::fix_unnecessary_list_comprehension_set(locator, expr) {
Ok(fix) => check.amend(fix),
Err(e) => error!("Failed to generate fix: {}", e),
Err(e) => error!("Failed to generate fix: {e}"),
}
}
return Some(check);
@@ -158,22 +158,23 @@ pub fn unnecessary_list_comprehension_dict(
location: Range,
) -> Option<Check> {
let argument = exactly_one_argument_with_matching_function("dict", func, args, keywords)?;
if let ExprKind::ListComp { elt, .. } = &argument {
match &elt.node {
ExprKind::Tuple { elts, .. } if elts.len() == 2 => {
let mut check = Check::new(CheckKind::UnnecessaryListComprehensionDict, location);
if fix {
match fixes::fix_unnecessary_list_comprehension_dict(locator, expr) {
Ok(fix) => check.amend(fix),
Err(e) => error!("Failed to generate fix: {}", e),
}
}
return Some(check);
}
_ => {}
let ExprKind::ListComp { elt, .. } = &argument else {
return None;
};
let ExprKind::Tuple { elts, .. } = &elt.node else {
return None;
};
if elts.len() != 2 {
return None;
}
let mut check = Check::new(CheckKind::UnnecessaryListComprehensionDict, location);
if fix {
match fixes::fix_unnecessary_list_comprehension_dict(locator, expr) {
Ok(fix) => check.amend(fix),
Err(e) => error!("Failed to generate fix: {e}"),
}
}
None
Some(check)
}
/// C405 (`set([1, 2])`)
@@ -196,7 +197,7 @@ pub fn unnecessary_literal_set(
if fix {
match fixes::fix_unnecessary_literal_set(locator, expr) {
Ok(fix) => check.amend(fix),
Err(e) => error!("Failed to generate fix: {}", e),
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(check)
@@ -232,7 +233,7 @@ pub fn unnecessary_literal_dict(
if fix {
match fixes::fix_unnecessary_literal_dict(locator, expr) {
Ok(fix) => check.amend(fix),
Err(e) => error!("Failed to generate fix: {}", e),
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(check)
@@ -268,7 +269,7 @@ pub fn unnecessary_collection_call(
if fix {
match fixes::fix_unnecessary_collection_call(locator, expr) {
Ok(fix) => check.amend(fix),
Err(e) => error!("Failed to generate fix: {}", e),
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(check)
@@ -296,7 +297,7 @@ pub fn unnecessary_literal_within_tuple_call(
if fix {
match fixes::fix_unnecessary_literal_within_tuple_call(locator, expr) {
Ok(fix) => check.amend(fix),
Err(e) => error!("Failed to generate fix: {}", e),
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(check)
@@ -324,7 +325,7 @@ pub fn unnecessary_literal_within_list_call(
if fix {
match fixes::fix_unnecessary_literal_within_list_call(locator, expr) {
Ok(fix) => check.amend(fix),
Err(e) => error!("Failed to generate fix: {}", e),
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(check)
@@ -340,17 +341,17 @@ pub fn unnecessary_list_call(
location: Range,
) -> Option<Check> {
let argument = first_argument_with_matching_function("list", func, args)?;
if let ExprKind::ListComp { .. } = argument {
let mut check = Check::new(CheckKind::UnnecessaryListCall, location);
if fix {
match fixes::fix_unnecessary_list_call(locator, expr) {
Ok(fix) => check.amend(fix),
Err(e) => error!("Failed to generate fix: {}", e),
}
}
return Some(check);
if !matches!(argument, ExprKind::ListComp { .. }) {
return None;
}
None
let mut check = Check::new(CheckKind::UnnecessaryListCall, location);
if fix {
match fixes::fix_unnecessary_list_call(locator, expr) {
Ok(fix) => check.amend(fix),
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(check)
}
/// C413
@@ -366,22 +367,24 @@ pub fn unnecessary_call_around_sorted(
if !(outer == "list" || outer == "reversed") {
return None;
}
if let ExprKind::Call { func, .. } = &args.first()?.node {
if function_name(func)? == "sorted" {
let mut check = Check::new(
CheckKind::UnnecessaryCallAroundSorted(outer.to_string()),
location,
);
if fix {
match fixes::fix_unnecessary_call_around_sorted(locator, expr) {
Ok(fix) => check.amend(fix),
Err(e) => error!("Failed to generate fix: {}", e),
}
}
return Some(check);
let ExprKind::Call { func, .. } = &args.first()?.node else {
return None;
};
if function_name(func)? != "sorted" {
return None;
}
let mut check = Check::new(
CheckKind::UnnecessaryCallAroundSorted(outer.to_string()),
location,
);
if fix {
match fixes::fix_unnecessary_call_around_sorted(locator, expr) {
Ok(fix) => check.amend(fix),
Err(e) => error!("Failed to generate fix: {e}"),
}
}
None
Some(check)
}
/// C414
@@ -402,25 +405,28 @@ pub fn unnecessary_double_cast_or_process(
return None;
}
if let ExprKind::Call { func, .. } = &args.first()?.node {
let inner = function_name(func)?;
// Ex) set(tuple(...))
if (outer == "set" || outer == "sorted")
&& (inner == "list" || inner == "tuple" || inner == "reversed" || inner == "sorted")
{
return Some(new_check(inner, outer, location));
}
let ExprKind::Call { func, .. } = &args.first()?.node else {
return None;
};
// Ex) list(tuple(...))
if (outer == "list" || outer == "tuple") && (inner == "list" || inner == "tuple") {
return Some(new_check(inner, outer, location));
}
// Ex) set(set(...))
if outer == "set" && inner == "set" {
return Some(new_check(inner, outer, location));
}
let inner = function_name(func)?;
// Ex) set(tuple(...))
if (outer == "set" || outer == "sorted")
&& (inner == "list" || inner == "tuple" || inner == "reversed" || inner == "sorted")
{
return Some(new_check(inner, outer, location));
}
// Ex) list(tuple(...))
if (outer == "list" || outer == "tuple") && (inner == "list" || inner == "tuple") {
return Some(new_check(inner, outer, location));
}
// Ex) set(set(...))
if outer == "set" && inner == "set" {
return Some(new_check(inner, outer, location));
}
None
}
@@ -435,33 +441,34 @@ pub fn unnecessary_subscript_reversal(
if !["set", "sorted", "reversed"].contains(&id) {
return None;
}
if let ExprKind::Subscript { slice, .. } = &first_arg.node {
if let ExprKind::Slice { lower, upper, step } = &slice.node {
if lower.is_none() && upper.is_none() {
if let Some(step) = step {
if let ExprKind::UnaryOp {
op: Unaryop::USub,
operand,
} = &step.node
{
if let ExprKind::Constant {
value: Constant::Int(val),
..
} = &operand.node
{
if *val == BigInt::from(1) {
return Some(Check::new(
CheckKind::UnnecessarySubscriptReversal(id.to_string()),
location,
));
}
}
}
}
}
}
let ExprKind::Subscript { slice, .. } = &first_arg.node else {
return None;
};
let ExprKind::Slice { lower, upper, step } = &slice.node else {
return None;
};
if lower.is_some() || upper.is_some() {
return None;
}
None
let ExprKind::UnaryOp {
op: Unaryop::USub,
operand,
} = &step.as_ref()?.node else {
return None;
};
let ExprKind::Constant {
value: Constant::Int(val),
..
} = &operand.node else {
return None;
};
if *val != BigInt::from(1) {
return None;
};
Some(Check::new(
CheckKind::UnnecessarySubscriptReversal(id.to_string()),
location,
))
}
/// C416
@@ -497,7 +504,7 @@ pub fn unnecessary_comprehension(
if fix {
match fixes::fix_unnecessary_comprehension(locator, expr) {
Ok(fix) => check.amend(fix),
Err(e) => error!("Failed to generate fix: {}", e),
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(check)

View File

@@ -9,39 +9,41 @@ use crate::flake8_print::checks;
/// T201, T203
pub fn print_call(checker: &mut Checker, expr: &Expr, func: &Expr) {
if let Some(mut check) = checks::print_call(
let Some(mut check) = checks::print_call(
func,
checker.settings.enabled.contains(&CheckCode::T201),
checker.settings.enabled.contains(&CheckCode::T203),
Range::from_located(expr),
) {
if checker.patch(check.kind.code()) {
let context = checker.binding_context();
if matches!(
checker.parents[context.defined_by].node,
StmtKind::Expr { .. }
) else {
return;
};
if checker.patch(check.kind.code()) {
let context = checker.binding_context();
if matches!(
checker.parents[context.defined_by].node,
StmtKind::Expr { .. }
) {
let deleted: Vec<&Stmt> = checker
.deletions
.iter()
.map(|index| checker.parents[*index])
.collect();
match helpers::remove_stmt(
checker.parents[context.defined_by],
context.defined_in.map(|index| checker.parents[index]),
&deleted,
) {
let deleted: Vec<&Stmt> = checker
.deletions
.iter()
.map(|index| checker.parents[*index])
.collect();
match helpers::remove_stmt(
checker.parents[context.defined_by],
context.defined_in.map(|index| checker.parents[index]),
&deleted,
) {
Ok(fix) => {
if fix.content.is_empty() || fix.content == "pass" {
checker.deletions.insert(context.defined_by);
}
check.amend(fix);
Ok(fix) => {
if fix.content.is_empty() || fix.content == "pass" {
checker.deletions.insert(context.defined_by);
}
Err(e) => error!("Failed to remove print call: {}", e),
check.amend(fix);
}
Err(e) => error!("Failed to remove print call: {e}"),
}
}
checker.add_check(check);
}
checker.add_check(check);
}

View File

@@ -14,43 +14,45 @@ use crate::Check;
/// RET501
fn unnecessary_return_none(checker: &mut Checker, stack: &Stack) {
for (stmt, expr) in &stack.returns {
if let Some(expr) = expr {
if matches!(
expr.node,
ExprKind::Constant {
value: Constant::None,
..
}
) {
let mut check =
Check::new(CheckKind::UnnecessaryReturnNone, Range::from_located(stmt));
if checker.patch(&CheckCode::RET501) {
check.amend(Fix::replacement(
"return".to_string(),
stmt.location,
stmt.end_location.unwrap(),
));
}
checker.add_check(check);
let Some(expr) = expr else {
continue;
};
if !matches!(
expr.node,
ExprKind::Constant {
value: Constant::None,
..
}
) {
continue;
}
let mut check = Check::new(CheckKind::UnnecessaryReturnNone, Range::from_located(stmt));
if checker.patch(&CheckCode::RET501) {
check.amend(Fix::replacement(
"return".to_string(),
stmt.location,
stmt.end_location.unwrap(),
));
}
checker.add_check(check);
}
}
/// RET502
fn implicit_return_value(checker: &mut Checker, stack: &Stack) {
for (stmt, expr) in &stack.returns {
if expr.is_none() {
let mut check = Check::new(CheckKind::ImplicitReturnValue, Range::from_located(stmt));
if checker.patch(&CheckCode::RET502) {
check.amend(Fix::replacement(
"return None".to_string(),
stmt.location,
stmt.end_location.unwrap(),
));
}
checker.add_check(check);
if expr.is_some() {
continue;
}
let mut check = Check::new(CheckKind::ImplicitReturnValue, Range::from_located(stmt));
if checker.patch(&CheckCode::RET502) {
check.amend(Fix::replacement(
"return None".to_string(),
stmt.location,
stmt.end_location.unwrap(),
));
}
checker.add_check(check);
}
}
@@ -210,44 +212,45 @@ fn unnecessary_assign(checker: &mut Checker, stack: &Stack, expr: &Expr) {
/// RET505, RET506, RET507, RET508
fn superfluous_else_node(checker: &mut Checker, stmt: &Stmt, branch: Branch) -> bool {
if let StmtKind::If { body, .. } = &stmt.node {
for child in body {
if matches!(child.node, StmtKind::Return { .. }) {
if checker.settings.enabled.contains(&CheckCode::RET505) {
checker.add_check(Check::new(
CheckKind::SuperfluousElseReturn(branch),
Range::from_located(stmt),
));
}
return true;
let StmtKind::If { body, .. } = &stmt.node else {
return false;
};
for child in body {
if matches!(child.node, StmtKind::Return { .. }) {
if checker.settings.enabled.contains(&CheckCode::RET505) {
checker.add_check(Check::new(
CheckKind::SuperfluousElseReturn(branch),
Range::from_located(stmt),
));
}
if matches!(child.node, StmtKind::Break) {
if checker.settings.enabled.contains(&CheckCode::RET508) {
checker.add_check(Check::new(
CheckKind::SuperfluousElseBreak(branch),
Range::from_located(stmt),
));
}
return true;
return true;
}
if matches!(child.node, StmtKind::Break) {
if checker.settings.enabled.contains(&CheckCode::RET508) {
checker.add_check(Check::new(
CheckKind::SuperfluousElseBreak(branch),
Range::from_located(stmt),
));
}
if matches!(child.node, StmtKind::Raise { .. }) {
if checker.settings.enabled.contains(&CheckCode::RET506) {
checker.add_check(Check::new(
CheckKind::SuperfluousElseRaise(branch),
Range::from_located(stmt),
));
}
return true;
return true;
}
if matches!(child.node, StmtKind::Raise { .. }) {
if checker.settings.enabled.contains(&CheckCode::RET506) {
checker.add_check(Check::new(
CheckKind::SuperfluousElseRaise(branch),
Range::from_located(stmt),
));
}
if matches!(child.node, StmtKind::Continue) {
if checker.settings.enabled.contains(&CheckCode::RET507) {
checker.add_check(Check::new(
CheckKind::SuperfluousElseContinue(branch),
Range::from_located(stmt),
));
}
return true;
return true;
}
if matches!(child.node, StmtKind::Continue) {
if checker.settings.enabled.contains(&CheckCode::RET507) {
checker.add_check(Check::new(
CheckKind::SuperfluousElseContinue(branch),
Range::from_located(stmt),
));
}
return true;
}
}
false
@@ -266,12 +269,14 @@ fn superfluous_elif(checker: &mut Checker, stack: &Stack) -> bool {
/// RET505, RET506, RET507, RET508
fn superfluous_else(checker: &mut Checker, stack: &Stack) -> bool {
for stmt in &stack.ifs {
if let StmtKind::If { orelse, .. } = &stmt.node {
if !orelse.is_empty() {
if superfluous_else_node(checker, stmt, Branch::Else) {
return true;
}
}
let StmtKind::If { orelse, .. } = &stmt.node else {
continue;
};
if orelse.is_empty() {
continue;
}
if superfluous_else_node(checker, stmt, Branch::Else) {
return true;
}
}
false

View File

@@ -9,18 +9,16 @@ pub fn banned_relative_import(
level: Option<&usize>,
strictness: &Strictness,
) -> Option<Check> {
if let Some(level) = level {
if level
> &match strictness {
Strictness::All => 0,
Strictness::Parents => 1,
}
{
return Some(Check::new(
CheckKind::BannedRelativeImport(strictness.clone()),
Range::from_located(stmt),
));
}
let strictness_level = match strictness {
Strictness::All => 0,
Strictness::Parents => 1,
};
if level? > &strictness_level {
Some(Check::new(
CheckKind::BannedRelativeImport(strictness.clone()),
Range::from_located(stmt),
))
} else {
None
}
None
}

View File

@@ -42,6 +42,15 @@ pub fn format_import_from(
force_wrap_aliases: bool,
is_first: bool,
) -> String {
if aliases.len() == 1
&& aliases
.iter()
.all(|(alias, _)| alias.name == "*" && alias.asname.is_none())
{
let (single_line, ..) = format_single_line(import_from, comments, aliases, is_first);
return single_line;
}
// We can only inline if: (1) none of the aliases have atop comments, and (3)
// only the last alias (if any) has inline comments.
if aliases
@@ -58,7 +67,7 @@ pub fn format_import_from(
{
let (single_line, import_length) =
format_single_line(import_from, comments, aliases, is_first);
if import_length <= line_length {
if import_length <= line_length || aliases.iter().any(|(alias, _)| alias.name == "*") {
return single_line;
}
}

View File

@@ -10,6 +10,7 @@ use rustpython_ast::{Stmt, StmtKind};
use crate::isort::categorize::{categorize, ImportType};
use crate::isort::comments::Comment;
use crate::isort::sorting::{member_key, module_key};
use crate::isort::track::{Block, Trailer};
use crate::isort::types::{
AliasData, CommentSet, ImportBlock, ImportFromData, Importable, OrderedImportBlock,
};
@@ -191,7 +192,18 @@ fn normalize_imports(imports: Vec<AnnotatedImport>, combine_as_imports: bool) ->
} => {
// Associate the comments with the first alias (best effort).
if let Some(alias) = names.first() {
if alias.asname.is_none() || combine_as_imports {
if alias.name == "*" {
let entry = block
.import_from_star
.entry(ImportFromData { module, level })
.or_default();
for comment in atop {
entry.atop.push(comment.value);
}
for comment in inline {
entry.inline.push(comment.value);
}
} else if alias.asname.is_none() || combine_as_imports {
let entry = &mut block
.import_from
.entry(ImportFromData { module, level })
@@ -225,7 +237,18 @@ fn normalize_imports(imports: Vec<AnnotatedImport>, combine_as_imports: bool) ->
// Create an entry for every alias.
for alias in names {
if alias.asname.is_none() || combine_as_imports {
if alias.name == "*" {
let entry = block
.import_from_star
.entry(ImportFromData { module, level })
.or_default();
for comment in alias.atop {
entry.atop.push(comment.value);
}
for comment in alias.inline {
entry.inline.push(comment.value);
}
} else if alias.asname.is_none() || combine_as_imports {
let entry = block
.import_from
.entry(ImportFromData { module, level })
@@ -323,6 +346,22 @@ fn categorize_imports<'a>(
.import_from_as
.insert((import_from, alias), comments);
}
// Categorize `StmtKind::ImportFrom` (with star).
for (import_from, comments) in block.import_from_star {
let classification = categorize(
&import_from.module_base(),
import_from.level,
src,
known_first_party,
known_third_party,
extra_standard_library,
);
block_by_type
.entry(classification)
.or_default()
.import_from_star
.insert(import_from, comments);
}
block_by_type
}
@@ -367,6 +406,33 @@ fn sort_imports(block: ImportBlock) -> OrderedImportBlock {
)
}),
)
.chain(
// Include all star imports.
block
.import_from_star
.into_iter()
.map(|(import_from, comments)| {
(
import_from,
(
CommentSet {
atop: comments.atop,
inline: vec![],
},
FxHashMap::from_iter([(
AliasData {
name: "*",
asname: None,
},
CommentSet {
atop: vec![],
inline: comments.inline,
},
)]),
),
)
}),
)
.map(|(import_from, (comments, aliases))| {
// Within each `StmtKind::ImportFrom`, sort the members.
(
@@ -399,7 +465,7 @@ fn sort_imports(block: ImportBlock) -> OrderedImportBlock {
#[allow(clippy::too_many_arguments)]
pub fn format_imports(
block: &[&Stmt],
block: &Block,
comments: Vec<Comment>,
line_length: usize,
src: &[PathBuf],
@@ -409,7 +475,8 @@ pub fn format_imports(
combine_as_imports: bool,
force_wrap_aliases: bool,
) -> String {
let block = annotate_imports(block, comments);
let trailer = &block.trailer;
let block = annotate_imports(&block.imports, comments);
// Normalize imports (i.e., deduplicate, aggregate `from` imports).
let block = normalize_imports(block, combine_as_imports);
@@ -458,6 +525,16 @@ pub fn format_imports(
is_first_statement = false;
}
}
match trailer {
None => {}
Some(Trailer::Sibling) => {
output.append("\n");
}
Some(Trailer::FunctionDef | Trailer::ClassDef) => {
output.append("\n");
output.append("\n");
}
}
output.finish().to_string()
}
@@ -481,11 +558,14 @@ mod tests {
#[test_case(Path::new("fit_line_length_comment.py"))]
#[test_case(Path::new("force_wrap_aliases.py"))]
#[test_case(Path::new("import_from_after_import.py"))]
#[test_case(Path::new("insert_empty_lines.py"))]
#[test_case(Path::new("leading_prefix.py"))]
#[test_case(Path::new("no_reorder_within_section.py"))]
#[test_case(Path::new("no_wrap_star.py"))]
#[test_case(Path::new("order_by_type.py"))]
#[test_case(Path::new("order_relative_imports_by_level.py"))]
#[test_case(Path::new("preserve_comment_order.py"))]
#[test_case(Path::new("preserve_import_star.py"))]
#[test_case(Path::new("preserve_indentation.py"))]
#[test_case(Path::new("reorder_within_section.py"))]
#[test_case(Path::new("separate_first_party_imports.py"))]
@@ -493,7 +573,9 @@ mod tests {
#[test_case(Path::new("separate_local_folder_imports.py"))]
#[test_case(Path::new("separate_third_party_imports.py"))]
#[test_case(Path::new("skip.py"))]
#[test_case(Path::new("skip_file.py"))]
#[test_case(Path::new("sort_similar_imports.py"))]
#[test_case(Path::new("split.py"))]
#[test_case(Path::new("trailing_suffix.py"))]
#[test_case(Path::new("type_comments.py"))]
fn default(path: &Path) -> Result<()> {

View File

@@ -1,11 +1,12 @@
use rustpython_ast::{Location, Stmt};
use textwrap::{dedent, indent};
use crate::ast::helpers::{match_leading_content, match_trailing_content};
use crate::ast::helpers::{count_trailing_lines, match_leading_content, match_trailing_content};
use crate::ast::types::Range;
use crate::ast::whitespace::leading_space;
use crate::autofix::Fix;
use crate::checks::CheckKind;
use crate::isort::track::Block;
use crate::isort::{comments, format_imports};
use crate::{Check, Settings, SourceCodeLocator};
@@ -30,13 +31,13 @@ fn extract_indentation(body: &[&Stmt], locator: &SourceCodeLocator) -> String {
/// I001
pub fn check_imports(
body: &[&Stmt],
block: &Block,
locator: &SourceCodeLocator,
settings: &Settings,
autofix: bool,
) -> Option<Check> {
let range = extract_range(body);
let indentation = extract_indentation(body, locator);
let range = extract_range(&block.imports);
let indentation = extract_indentation(&block.imports, locator);
// Extract comments. Take care to grab any inline comments from the last line.
let comments = comments::collect_comments(
@@ -48,12 +49,17 @@ pub fn check_imports(
);
// Special-cases: there's leading or trailing content in the import block.
let has_leading_content = match_leading_content(body.first().unwrap(), locator);
let has_trailing_content = match_trailing_content(body.last().unwrap(), locator);
let has_leading_content = match_leading_content(block.imports.first().unwrap(), locator);
let has_trailing_content = match_trailing_content(block.imports.last().unwrap(), locator);
let num_trailing_lines = if block.trailer.is_none() {
0
} else {
count_trailing_lines(block.imports.last().unwrap(), locator)
};
// Generate the sorted import block.
let expected = format_imports(
body,
block,
comments,
settings.line_length - indentation.len(),
&settings.src,
@@ -81,7 +87,7 @@ pub fn check_imports(
Location::new(range.location.row(), 0)
},
// TODO(charlie): Preserve trailing suffixes. Right now, we strip them.
Location::new(range.end_location.row() + 1, 0),
Location::new(range.end_location.row() + 1 + num_trailing_lines, 0),
));
}
Some(check)
@@ -89,7 +95,7 @@ pub fn check_imports(
// Expand the span the entire range, including leading and trailing space.
let range = Range {
location: Location::new(range.location.row(), 0),
end_location: Location::new(range.end_location.row() + 1, 0),
end_location: Location::new(range.end_location.row() + 1 + num_trailing_lines, 0),
};
let actual = dedent(&locator.slice_source_code_range(&range));
if actual == expected {

View File

@@ -0,0 +1,50 @@
---
source: src/isort/mod.rs
expression: checks
---
- kind: UnsortedImports
location:
row: 1
column: 0
end_location:
row: 3
column: 0
fix:
content: "import a\nimport b\n\n"
location:
row: 1
column: 0
end_location:
row: 3
column: 0
- kind: UnsortedImports
location:
row: 4
column: 0
end_location:
row: 6
column: 0
fix:
content: "import os\nimport sys\n\n\n"
location:
row: 4
column: 0
end_location:
row: 6
column: 0
- kind: UnsortedImports
location:
row: 14
column: 0
end_location:
row: 16
column: 0
fix:
content: "import os\nimport sys\n\n"
location:
row: 14
column: 0
end_location:
row: 16
column: 0

View File

@@ -10,12 +10,12 @@ expression: checks
row: 2
column: 9
fix:
content: "\nimport os\nimport sys\n"
content: "\nimport os\nimport sys\n\n"
location:
row: 1
column: 7
end_location:
row: 3
row: 4
column: 0
- kind: UnsortedImports
location:

View File

@@ -0,0 +1,20 @@
---
source: src/isort/mod.rs
expression: checks
---
- kind: UnsortedImports
location:
row: 1
column: 0
end_location:
row: 2
column: 0
fix:
content: "from .subscription import * # type: ignore # some very long comment explaining why this needs a type ignore\n"
location:
row: 1
column: 0
end_location:
row: 2
column: 0

View File

@@ -0,0 +1,20 @@
---
source: src/isort/mod.rs
expression: checks
---
- kind: UnsortedImports
location:
row: 1
column: 0
end_location:
row: 7
column: 0
fix:
content: "# Above\nfrom some_module import * # Aside\n\n# Above\nfrom some_module import some_class # Aside\nfrom some_other_module import *\nfrom some_other_module import some_class\n"
location:
row: 1
column: 0
end_location:
row: 7
column: 0

View File

@@ -2,6 +2,21 @@
source: src/isort/mod.rs
expression: checks
---
- kind: UnsortedImports
location:
row: 7
column: 0
end_location:
row: 8
column: 0
fix:
content: "import sys\n\n"
location:
row: 7
column: 0
end_location:
row: 8
column: 0
- kind: UnsortedImports
location:
row: 9

View File

@@ -0,0 +1,6 @@
---
source: src/isort/mod.rs
expression: checks
---
[]

View File

@@ -0,0 +1,6 @@
---
source: src/isort/mod.rs
expression: checks
---
[]

View File

@@ -10,12 +10,12 @@ expression: checks
row: 2
column: 9
fix:
content: "import os\nimport sys\n"
content: "import os\nimport sys\n\n"
location:
row: 1
column: 0
end_location:
row: 3
row: 4
column: 0
- kind: UnsortedImports
location:
@@ -25,7 +25,7 @@ expression: checks
row: 6
column: 13
fix:
content: " import os\n import sys\n"
content: " import os\n import sys\n\n"
location:
row: 5
column: 0

View File

@@ -1,4 +1,3 @@
use nohash_hasher::IntSet;
use rustpython_ast::{
Alias, Arg, Arguments, Boolop, Cmpop, Comprehension, Constant, Excepthandler,
ExcepthandlerKind, Expr, ExprContext, Keyword, MatchCase, Operator, Pattern, Stmt, StmtKind,
@@ -6,34 +5,49 @@ use rustpython_ast::{
};
use crate::ast::visitor::Visitor;
use crate::directives::IsortDirectives;
pub enum Trailer {
Sibling,
ClassDef,
FunctionDef,
}
#[derive(Default)]
pub struct Block<'a> {
pub imports: Vec<&'a Stmt>,
pub trailer: Option<Trailer>,
}
#[derive(Debug)]
pub struct ImportTracker<'a> {
exclusions: &'a IntSet<usize>,
blocks: Vec<Vec<&'a Stmt>>,
blocks: Vec<Block<'a>>,
directives: &'a IsortDirectives,
split_index: usize,
}
impl<'a> ImportTracker<'a> {
pub fn new(exclusions: &'a IntSet<usize>) -> Self {
pub fn new(directives: &'a IsortDirectives) -> Self {
Self {
exclusions,
blocks: vec![vec![]],
directives,
blocks: vec![Block::default()],
split_index: 0,
}
}
fn track_import(&mut self, stmt: &'a Stmt) {
let index = self.blocks.len() - 1;
self.blocks[index].push(stmt);
self.blocks[index].imports.push(stmt);
}
fn finalize(&mut self) {
fn finalize(&mut self, trailer: Option<Trailer>) {
let index = self.blocks.len() - 1;
if !self.blocks[index].is_empty() {
self.blocks.push(vec![]);
if !self.blocks[index].imports.is_empty() {
self.blocks[index].trailer = trailer;
self.blocks.push(Block::default());
}
}
pub fn into_iter(self) -> impl IntoIterator<Item = Vec<&'a Stmt>> {
pub fn into_iter(self) -> impl IntoIterator<Item = Block<'a>> {
self.blocks.into_iter()
}
}
@@ -43,15 +57,37 @@ where
'b: 'a,
{
fn visit_stmt(&mut self, stmt: &'b Stmt) {
// Track manual splits.
while self.split_index < self.directives.splits.len() {
if stmt.location.row() >= self.directives.splits[self.split_index] {
self.finalize(Some(match &stmt.node {
StmtKind::FunctionDef { .. } | StmtKind::AsyncFunctionDef { .. } => {
Trailer::FunctionDef
}
StmtKind::ClassDef { .. } => Trailer::ClassDef,
_ => Trailer::Sibling,
}));
self.split_index += 1;
} else {
break;
}
}
// Track imports.
if matches!(
stmt.node,
StmtKind::Import { .. } | StmtKind::ImportFrom { .. }
) && !self.exclusions.contains(&stmt.location.row())
) && !self.directives.exclusions.contains(&stmt.location.row())
{
self.track_import(stmt);
} else {
self.finalize();
self.finalize(Some(match &stmt.node {
StmtKind::FunctionDef { .. } | StmtKind::AsyncFunctionDef { .. } => {
Trailer::FunctionDef
}
StmtKind::ClassDef { .. } => Trailer::ClassDef,
_ => Trailer::Sibling,
}));
}
// Track scope.
@@ -60,75 +96,75 @@ where
for stmt in body {
self.visit_stmt(stmt);
}
self.finalize();
self.finalize(None);
}
StmtKind::AsyncFunctionDef { body, .. } => {
for stmt in body {
self.visit_stmt(stmt);
}
self.finalize();
self.finalize(None);
}
StmtKind::ClassDef { body, .. } => {
for stmt in body {
self.visit_stmt(stmt);
}
self.finalize();
self.finalize(None);
}
StmtKind::For { body, orelse, .. } => {
for stmt in body {
self.visit_stmt(stmt);
}
self.finalize();
self.finalize(None);
for stmt in orelse {
self.visit_stmt(stmt);
}
self.finalize();
self.finalize(None);
}
StmtKind::AsyncFor { body, orelse, .. } => {
for stmt in body {
self.visit_stmt(stmt);
}
self.finalize();
self.finalize(None);
for stmt in orelse {
self.visit_stmt(stmt);
}
self.finalize();
self.finalize(None);
}
StmtKind::While { body, orelse, .. } => {
for stmt in body {
self.visit_stmt(stmt);
}
self.finalize();
self.finalize(None);
for stmt in orelse {
self.visit_stmt(stmt);
}
self.finalize();
self.finalize(None);
}
StmtKind::If { body, orelse, .. } => {
for stmt in body {
self.visit_stmt(stmt);
}
self.finalize();
self.finalize(None);
for stmt in orelse {
self.visit_stmt(stmt);
}
self.finalize();
self.finalize(None);
}
StmtKind::With { body, .. } => {
for stmt in body {
self.visit_stmt(stmt);
}
self.finalize();
self.finalize(None);
}
StmtKind::AsyncWith { body, .. } => {
for stmt in body {
self.visit_stmt(stmt);
}
self.finalize();
self.finalize(None);
}
StmtKind::Match { cases, .. } => {
for match_case in cases {
@@ -148,17 +184,17 @@ where
for stmt in body {
self.visit_stmt(stmt);
}
self.finalize();
self.finalize(None);
for stmt in orelse {
self.visit_stmt(stmt);
}
self.finalize();
self.finalize(None);
for stmt in finalbody {
self.visit_stmt(stmt);
}
self.finalize();
self.finalize(None);
}
_ => {}
}
@@ -187,7 +223,7 @@ where
for stmt in body {
self.visit_stmt(stmt);
}
self.finalize();
self.finalize(None);
}
fn visit_arguments(&mut self, _: &'b Arguments) {}
@@ -204,7 +240,7 @@ where
for stmt in &match_case.body {
self.visit_stmt(stmt);
}
self.finalize();
self.finalize(None);
}
fn visit_pattern(&mut self, _: &'b Pattern) {}

View File

@@ -59,6 +59,9 @@ pub struct ImportBlock<'a> {
// Set of (module, level, name, asname), used to track re-exported 'from' imports.
// Ex) `from module import member as member`
pub import_from_as: FxHashMap<(ImportFromData<'a>, AliasData<'a>), CommentSet<'a>>,
// Map from (module, level) to `AliasData`, used to track star imports.
// Ex) `from module import *`
pub import_from_star: FxHashMap<ImportFromData<'a>, CommentSet<'a>>,
}
type AliasDataWithComments<'a> = (AliasData<'a>, CommentSet<'a>);

View File

@@ -89,7 +89,7 @@ pub(crate) fn check_path(
checks.extend(check_imports(
&python_ast,
locator,
&directives.isort_exclusions,
&directives.isort,
settings,
autofix,
));
@@ -151,79 +151,17 @@ pub fn lint_path(
}
// Read the file from disk.
let mut contents = fs::read_file(path)?;
let contents = fs::read_file(path)?;
// Track the number of fixed errors across iterations.
let mut fixed = 0;
// As an escape hatch, bail after 100 iterations.
let mut iterations = 0;
// Continuously autofix until the source code stabilizes.
let messages = loop {
// Tokenize once.
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
// Initialize the SourceCodeLocator (which computes offsets lazily).
let locator = SourceCodeLocator::new(&contents);
// Determine the noqa and isort exclusions.
let directives = directives::extract_directives(
&tokens,
&locator,
directives::Flags::from_settings(settings),
);
// Generate checks.
let checks = check_path(
path,
&contents,
tokens,
&locator,
&directives,
settings,
autofix.into(),
false,
)?;
// Apply autofix.
if matches!(autofix, fixer::Mode::Apply) && iterations < MAX_ITERATIONS {
if let Some((fixed_contents, applied)) = fix_file(&checks, &locator) {
// Count the number of fixed errors.
fixed += applied;
// Store the fixed contents.
contents = fixed_contents.to_string();
// Increment the iteration count.
iterations += 1;
// Re-run the linter pass (by avoiding the break).
continue;
}
}
// Convert to messages.
let filename = path.to_string_lossy().to_string();
break checks
.into_iter()
.map(|check| {
let source = if settings.show_source {
Some(Source::from_check(&check, &locator))
} else {
None
};
Message::from_check(check, filename.clone(), source)
})
.collect::<Vec<_>>();
};
// Lint the file.
let (contents, fixed, messages) = lint(contents, path, settings, autofix)?;
// Re-populate the cache.
cache::set(path, &metadata, settings, autofix, &messages, mode);
// If we applied any fixes, write the contents back to disk.
if fixed > 0 {
write(path, &contents)?;
write(path, contents)?;
}
Ok(Diagnostics { messages, fixed })
@@ -255,7 +193,7 @@ pub fn add_noqa_to_path(path: &Path, settings: &Settings) -> Result<usize> {
&locator,
&Directives {
noqa_line_for: IntMap::default(),
isort_exclusions: directives.isort_exclusions,
isort: directives.isort,
},
settings,
false,
@@ -297,14 +235,32 @@ pub fn lint_stdin(
autofix: &fixer::Mode,
) -> Result<Diagnostics> {
// Read the file from disk.
let mut contents = stdin.to_string();
let contents = stdin.to_string();
// Lint the file.
let (contents, fixed, messages) = lint(contents, path, settings, autofix)?;
// Write the fixed contents to stdout.
if matches!(autofix, fixer::Mode::Apply) {
io::stdout().write_all(contents.as_bytes())?;
}
Ok(Diagnostics { messages, fixed })
}
fn lint(
mut contents: String,
path: &Path,
settings: &Settings,
autofix: &fixer::Mode,
) -> Result<(String, usize, Vec<Message>)> {
// Track the number of fixed errors across iterations.
let mut fixed = 0;
// As an escape hatch, bail after 100 iterations.
let mut iterations = 0;
// Continuously autofix until the source code stabilizes.
let messages = loop {
// Tokenize once.
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
@@ -363,12 +319,7 @@ pub fn lint_stdin(
.collect();
};
// Write the fixed contents to stdout.
if matches!(autofix, fixer::Mode::Apply) {
io::stdout().write_all(contents.as_bytes())?;
}
Ok(Diagnostics { messages, fixed })
Ok((contents, fixed, messages))
}
#[cfg(test)]

View File

@@ -34,7 +34,7 @@ use anyhow::Result;
use clap::{CommandFactory, Parser};
use colored::Colorize;
use log::{debug, error};
use notify::{raw_watcher, RecursiveMode, Watcher};
use notify::{recommended_watcher, RecursiveMode, Watcher};
#[cfg(not(target_family = "wasm"))]
use rayon::prelude::*;
use ruff::autofix::fixer;
@@ -338,7 +338,7 @@ fn inner_main() -> Result<ExitCode> {
// Configure the file watcher.
let (tx, rx) = channel();
let mut watcher = raw_watcher(tx)?;
let mut watcher = recommended_watcher(tx)?;
for file in &cli.files {
watcher.watch(file, RecursiveMode::Recursive)?;
}
@@ -346,15 +346,19 @@ fn inner_main() -> Result<ExitCode> {
loop {
match rx.recv() {
Ok(e) => {
if let Some(path) = e.path {
if path.to_string_lossy().ends_with(".py") {
printer.clear_screen()?;
printer.write_to_user("File change detected...\n");
let paths = e?.paths;
let py_changed = paths.iter().any(|p| {
p.extension()
.map(|ext| ext.eq_ignore_ascii_case("py"))
.unwrap_or_default()
});
if py_changed {
printer.clear_screen()?;
printer.write_to_user("File change detected...\n");
let messages =
run_once(&cli.files, &settings, cache_enabled, &fixer::Mode::None);
printer.write_continuously(&messages)?;
}
let messages =
run_once(&cli.files, &settings, cache_enabled, &fixer::Mode::None);
printer.write_continuously(&messages)?;
}
}
Err(e) => return Err(e.into()),

View File

@@ -57,7 +57,7 @@ pub fn invalid_first_argument_name_for_class_method(
import_aliases: &FxHashMap<&str, &str>,
settings: &Settings,
) -> Option<Check> {
if matches!(
if !matches!(
helpers::function_type(
scope,
name,
@@ -68,20 +68,21 @@ pub fn invalid_first_argument_name_for_class_method(
),
FunctionType::ClassMethod
) {
if let Some(arg) = args.posonlyargs.first() {
if arg.node.arg != "cls" {
return Some(Check::new(
CheckKind::InvalidFirstArgumentNameForClassMethod,
Range::from_located(arg),
));
}
} else if let Some(arg) = args.args.first() {
if arg.node.arg != "cls" {
return Some(Check::new(
CheckKind::InvalidFirstArgumentNameForClassMethod,
Range::from_located(arg),
));
}
return None;
}
if let Some(arg) = args.posonlyargs.first() {
if arg.node.arg != "cls" {
return Some(Check::new(
CheckKind::InvalidFirstArgumentNameForClassMethod,
Range::from_located(arg),
));
}
} else if let Some(arg) = args.args.first() {
if arg.node.arg != "cls" {
return Some(Check::new(
CheckKind::InvalidFirstArgumentNameForClassMethod,
Range::from_located(arg),
));
}
}
None
@@ -97,7 +98,7 @@ pub fn invalid_first_argument_name_for_method(
import_aliases: &FxHashMap<&str, &str>,
settings: &Settings,
) -> Option<Check> {
if matches!(
if !matches!(
helpers::function_type(
scope,
name,
@@ -108,16 +109,16 @@ pub fn invalid_first_argument_name_for_method(
),
FunctionType::Method
) {
if let Some(arg) = args.args.first() {
if arg.node.arg != "self" {
return Some(Check::new(
CheckKind::InvalidFirstArgumentNameForMethod,
Range::from_located(arg),
));
}
}
return None;
}
None
let arg = args.args.first()?;
if arg.node.arg == "self" {
return None;
}
Some(Check::new(
CheckKind::InvalidFirstArgumentNameForMethod,
Range::from_located(arg),
))
}
/// N807
@@ -125,18 +126,18 @@ pub fn dunder_function_name(scope: &Scope, stmt: &Stmt, name: &str) -> Option<Ch
if matches!(scope.kind, ScopeKind::Class(_)) {
return None;
}
if name.starts_with("__") && name.ends_with("__") {
// Allowed under PEP 562 (https://peps.python.org/pep-0562/).
if matches!(scope.kind, ScopeKind::Module) && (name == "__getattr__" || name == "__dir__") {
return None;
}
return Some(Check::new(
CheckKind::DunderFunctionName,
Range::from_located(stmt),
));
if !(name.starts_with("__") && name.ends_with("__")) {
return None;
}
None
// Allowed under PEP 562 (https://peps.python.org/pep-0562/).
if matches!(scope.kind, ScopeKind::Module) && (name == "__getattr__" || name == "__dir__") {
return None;
}
Some(Check::new(
CheckKind::DunderFunctionName,
Range::from_located(stmt),
))
}
/// N811
@@ -228,19 +229,21 @@ pub fn error_suffix_on_exception_name(
bases: &[Expr],
name: &str,
) -> Option<Check> {
if bases.iter().any(|base| {
if !bases.iter().any(|base| {
if let ExprKind::Name { id, .. } = &base.node {
id == "Exception" || id.ends_with("Error")
} else {
false
}
}) {
if !name.ends_with("Error") {
return Some(Check::new(
CheckKind::ErrorSuffixOnExceptionName(name.to_string()),
Range::from_located(class_def),
));
}
return None;
}
None
if name.ends_with("Error") {
return None;
}
Some(Check::new(
CheckKind::ErrorSuffixOnExceptionName(name.to_string()),
Range::from_located(class_def),
))
}

View File

@@ -28,42 +28,41 @@ pub fn function_type(
import_aliases: &FxHashMap<&str, &str>,
settings: &Settings,
) -> FunctionType {
if let ScopeKind::Class(scope) = &scope.kind {
// Special-case class method, like `__new__`.
if CLASS_METHODS.contains(&name)
|| scope.bases.iter().any(|expr| {
// The class itself extends a known metaclass, so all methods are class methods.
let call_path = dealias_call_path(collect_call_paths(expr), import_aliases);
METACLASS_BASES.iter().any(|(module, member)| {
match_call_path(&call_path, module, member, from_imports)
})
})
|| decorator_list.iter().any(|expr| {
// The method is decorated with a class method decorator (like `@classmethod`).
let call_path = dealias_call_path(collect_call_paths(expr), import_aliases);
settings.classmethod_decorators.iter().any(|decorator| {
let (module, member) = to_module_and_member(decorator);
match_call_path(&call_path, module, member, from_imports)
})
})
{
FunctionType::ClassMethod
} else if decorator_list.iter().any(|expr| {
// The method is decorated with a static method decorator (like
// `@staticmethod`).
let ScopeKind::Class(scope) = &scope.kind else {
return FunctionType::Function;
};
// Special-case class method, like `__new__`.
if CLASS_METHODS.contains(&name)
|| scope.bases.iter().any(|expr| {
// The class itself extends a known metaclass, so all methods are class methods.
let call_path = dealias_call_path(collect_call_paths(expr), import_aliases);
settings.staticmethod_decorators.iter().any(|decorator| {
METACLASS_BASES
.iter()
.any(|(module, member)| match_call_path(&call_path, module, member, from_imports))
})
|| decorator_list.iter().any(|expr| {
// The method is decorated with a class method decorator (like `@classmethod`).
let call_path = dealias_call_path(collect_call_paths(expr), import_aliases);
settings.classmethod_decorators.iter().any(|decorator| {
let (module, member) = to_module_and_member(decorator);
match_call_path(&call_path, module, member, from_imports)
})
}) {
FunctionType::StaticMethod
} else {
// It's an instance method.
FunctionType::Method
}
})
{
FunctionType::ClassMethod
} else if decorator_list.iter().any(|expr| {
// The method is decorated with a static method decorator (like
// `@staticmethod`).
let call_path = dealias_call_path(collect_call_paths(expr), import_aliases);
settings.staticmethod_decorators.iter().any(|decorator| {
let (module, member) = to_module_and_member(decorator);
match_call_path(&call_path, module, member, from_imports)
})
}) {
FunctionType::StaticMethod
} else {
FunctionType::Function
// It's an instance method.
FunctionType::Method
}
}
@@ -89,16 +88,15 @@ pub fn is_namedtuple_assignment(
stmt: &Stmt,
from_imports: &FxHashMap<&str, FxHashSet<&str>>,
) -> bool {
if let StmtKind::Assign { value, .. } = &stmt.node {
match_call_path(
&collect_call_paths(value),
"collections",
"namedtuple",
from_imports,
)
} else {
false
}
let StmtKind::Assign { value, .. } = &stmt.node else {
return false;
};
match_call_path(
&collect_call_paths(value),
"collections",
"namedtuple",
from_imports,
)
}
#[cfg(test)]

View File

@@ -46,14 +46,13 @@ impl<'a> Printer<'a> {
fn pre_text(&self, diagnostics: &Diagnostics) {
if self.log_level >= &LogLevel::Default {
if diagnostics.fixed > 0 {
println!(
"Found {} error(s) ({} fixed).",
diagnostics.messages.len(),
diagnostics.fixed,
);
} else if !diagnostics.messages.is_empty() {
println!("Found {} error(s).", diagnostics.messages.len());
let fixed = diagnostics.fixed;
let remaining = diagnostics.messages.len();
let total = fixed + remaining;
if fixed > 0 {
println!("Found {total} error(s) ({fixed} fixed, {remaining} remaining).");
} else if remaining > 0 {
println!("Found {remaining} error(s).");
}
}
}

View File

@@ -51,31 +51,32 @@ pub fn type_comparison(ops: &[Cmpop], comparators: &[Expr], location: Range) ->
let mut checks: Vec<Check> = vec![];
for (op, right) in izip!(ops, comparators) {
if matches!(op, Cmpop::Is | Cmpop::IsNot | Cmpop::Eq | Cmpop::NotEq) {
match &right.node {
ExprKind::Call { func, args, .. } => {
if let ExprKind::Name { id, .. } = &func.node {
// Ex) type(False)
if id == "type" {
if let Some(arg) = args.first() {
// Allow comparison for types which are not obvious.
if !matches!(arg.node, ExprKind::Name { .. }) {
checks.push(Check::new(CheckKind::TypeComparison, location));
}
if !matches!(op, Cmpop::Is | Cmpop::IsNot | Cmpop::Eq | Cmpop::NotEq) {
continue;
}
match &right.node {
ExprKind::Call { func, args, .. } => {
if let ExprKind::Name { id, .. } = &func.node {
// Ex) type(False)
if id == "type" {
if let Some(arg) = args.first() {
// Allow comparison for types which are not obvious.
if !matches!(arg.node, ExprKind::Name { .. }) {
checks.push(Check::new(CheckKind::TypeComparison, location));
}
}
}
}
ExprKind::Attribute { value, .. } => {
if let ExprKind::Name { id, .. } = &value.node {
// Ex) types.IntType
if id == "types" {
checks.push(Check::new(CheckKind::TypeComparison, location));
}
}
ExprKind::Attribute { value, .. } => {
if let ExprKind::Name { id, .. } = &value.node {
// Ex) types.IntType
if id == "types" {
checks.push(Check::new(CheckKind::TypeComparison, location));
}
}
_ => {}
}
_ => {}
}
}
@@ -123,42 +124,42 @@ pub fn invalid_escape_sequence(
for (row_offset, line) in body.lines().enumerate() {
let chars: Vec<char> = line.chars().collect();
for col_offset in 0..chars.len() {
if chars[col_offset] == '\\' {
// If the previous character was also a backslash, skip.
if col_offset > 0 && chars[col_offset - 1] == '\\' {
continue;
}
// If we're at the end of the line, skip.
if col_offset == chars.len() - 1 {
continue;
}
// If the next character is a valid escape sequence, skip.
let next_char = chars[col_offset + 1];
if VALID_ESCAPE_SEQUENCES.contains(&next_char) {
continue;
}
// Compute the location of the escape sequence by offsetting the location of the
// string token by the characters we've seen thus far.
let location = if row_offset == 0 {
Location::new(
start.row() + row_offset,
start.column() + prefix.len() + quote.len() + col_offset,
)
} else {
Location::new(start.row() + row_offset, col_offset)
};
let end_location = Location::new(location.row(), location.column() + 2);
checks.push(Check::new(
CheckKind::InvalidEscapeSequence(next_char),
Range {
location,
end_location,
},
));
if chars[col_offset] != '\\' {
continue;
}
// If the previous character was also a backslash, skip.
if col_offset > 0 && chars[col_offset - 1] == '\\' {
continue;
}
// If we're at the end of the line, skip.
if col_offset == chars.len() - 1 {
continue;
}
// If the next character is a valid escape sequence, skip.
let next_char = chars[col_offset + 1];
if VALID_ESCAPE_SEQUENCES.contains(&next_char) {
continue;
}
// Compute the location of the escape sequence by offsetting the location of the
// string token by the characters we've seen thus far.
let col = if row_offset == 0 {
start.column() + prefix.len() + quote.len() + col_offset
} else {
col_offset
};
let location = Location::new(start.row() + row_offset, col);
let end_location = Location::new(location.row(), location.column() + 2);
checks.push(Check::new(
CheckKind::InvalidEscapeSequence(next_char),
Range {
location,
end_location,
},
));
}
}
}

View File

@@ -25,10 +25,7 @@ fn compare(left: &Expr, ops: &[Cmpop], comparators: &[Expr]) -> Option<String> {
);
let mut generator = SourceGenerator::new();
generator.unparse_expr(&cmp, 0);
if let Ok(content) = generator.generate() {
return Some(content);
}
None
generator.generate().ok()
}
/// E711, E712
@@ -322,7 +319,7 @@ pub fn do_not_assign_lambda(checker: &mut Checker, target: &Expr, value: &Expr,
stmt.end_location.unwrap(),
));
}
Err(e) => error!("Failed to generate fix: {}", e),
Err(e) => error!("Failed to generate fix: {e}"),
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -9,14 +9,7 @@ expression: checks
end_location:
row: 203
column: 7
fix:
content: "\n"
location:
row: 201
column: 0
end_location:
row: 201
column: 0
fix: ~
- kind: BlankLineAfterSummary
location:
row: 210

View File

@@ -1,14 +1,11 @@
use anyhow::{bail, Result};
use libcst_native::{
Codegen, CodegenState, CompOp, Comparison, ComparisonTarget, Expr, Expression, ImportNames,
SmallStatement, Statement,
};
use libcst_native::{Codegen, CodegenState, ImportNames, SmallStatement, Statement};
use rustpython_ast::Stmt;
use crate::ast::types::Range;
use crate::autofix::{helpers, Fix};
use crate::cst::helpers::compose_module_path;
use crate::cst::matchers::{match_expr, match_module};
use crate::cst::matchers::match_module;
use crate::source_code_locator::SourceCodeLocator;
/// Generate a Fix to remove any unused imports from an `import` statement.
@@ -76,61 +73,3 @@ pub fn remove_unused_imports(
))
}
}
fn match_comparison<'a, 'b>(expr: &'a mut Expr<'b>) -> Result<&'a mut Comparison<'b>> {
if let Expression::Comparison(comparison) = &mut expr.value {
Ok(comparison)
} else {
bail!("Expected Expression::Comparison")
}
}
/// Generate a Fix to replace invalid is/is not comparisons with equal/not equal
pub fn fix_invalid_literal_comparison(locator: &SourceCodeLocator, location: Range) -> Result<Fix> {
let module_text = locator.slice_source_code_range(&location);
let mut tree = match_module(&module_text)?;
let mut expr = match_expr(&mut tree)?;
let cmp = match_comparison(expr)?;
let target = cmp
.comparisons
.get(0)
.ok_or_else(|| anyhow::anyhow!("Expected one ComparisonTarget"))?;
let new_operator = match &target.operator {
CompOp::Is {
whitespace_before: b,
whitespace_after: a,
} => CompOp::Equal {
whitespace_before: b.clone(),
whitespace_after: a.clone(),
},
CompOp::IsNot {
whitespace_before: b,
whitespace_after: a,
whitespace_between: _,
} => CompOp::NotEqual {
whitespace_before: b.clone(),
whitespace_after: a.clone(),
},
op => bail!("Unexpected operator: {op:?} (expected CompOp::Is or CompOp::IsNot)"),
};
expr.value = Expression::Comparison(Box::new(Comparison {
left: cmp.left.clone(),
comparisons: vec![ComparisonTarget {
operator: new_operator,
comparator: target.comparator.clone(),
}],
lpar: cmp.lpar.clone(),
rpar: cmp.rpar.clone(),
}));
let mut state = CodegenState::default();
tree.codegen(&mut state);
Ok(Fix::replacement(
state.to_string(),
location.location,
location.end_location,
))
}

View File

@@ -1,11 +1,13 @@
use itertools::izip;
use log::error;
use once_cell::unsync::Lazy;
use rustpython_ast::{Cmpop, Constant, Expr, ExprKind};
use crate::ast::helpers;
use crate::ast::operations::locate_cmpops;
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
use crate::pyflakes::fixes::fix_invalid_literal_comparison;
fn is_singleton(expr: &Expr) -> bool {
matches!(
@@ -37,22 +39,35 @@ pub fn invalid_literal_comparison(
comparators: &[Expr],
location: Range,
) {
let located = Lazy::new(|| locate_cmpops(&checker.locator.slice_source_code_range(&location)));
let mut left = left;
for (op, right) in izip!(ops, comparators) {
for (index, (op, right)) in izip!(ops, comparators).enumerate() {
if matches!(op, Cmpop::Is | Cmpop::IsNot)
&& (is_constant_non_singleton(left) || is_constant_non_singleton(right))
{
let mut check = Check::new(CheckKind::IsLiteral, location);
if checker.patch(check.kind.code()) {
match fix_invalid_literal_comparison(
checker.locator,
Range {
location: left.location,
end_location: right.end_location.unwrap(),
},
) {
Ok(fix) => check.amend(fix),
Err(e) => error!("Failed to fix invalid comparison: {}", e),
if let Some(located_op) = &located.get(index) {
assert_eq!(&located_op.node, op);
if let Some(content) = match &located_op.node {
Cmpop::Is => Some("==".to_string()),
Cmpop::IsNot => Some("!=".to_string()),
node => {
eprintln!("Failed to fix invalid comparison: {node:?}");
None
}
} {
check.amend(Fix::replacement(
content,
helpers::to_absolute(located_op.location, location.location),
helpers::to_absolute(
located_op.end_location.unwrap(),
location.location,
),
));
}
} else {
eprintln!("Failed to fix invalid comparison due to missing op");
}
}
checker.add_check(check);

View File

@@ -10,13 +10,13 @@ expression: checks
row: 1
column: 13
fix:
content: "x == \"abc\""
content: "=="
location:
row: 1
column: 3
column: 5
end_location:
row: 1
column: 13
column: 7
- kind: IsLiteral
location:
row: 4
@@ -25,26 +25,86 @@ expression: checks
row: 4
column: 15
fix:
content: 123 != y
content: "!="
location:
row: 4
column: 3
column: 7
end_location:
row: 4
column: 15
column: 13
- kind: IsLiteral
location:
row: 7
column: 3
end_location:
row: 7
column: 17
row: 8
column: 13
fix:
content: "\"123\" == x"
content: "!="
location:
row: 7
column: 3
column: 7
end_location:
row: 7
column: 13
row: 8
column: 11
- kind: IsLiteral
location:
row: 11
column: 3
end_location:
row: 11
column: 17
fix:
content: "=="
location:
row: 11
column: 9
end_location:
row: 11
column: 11
- kind: IsLiteral
location:
row: 14
column: 3
end_location:
row: 14
column: 18
fix:
content: "=="
location:
row: 14
column: 14
end_location:
row: 14
column: 16
- kind: IsLiteral
location:
row: 17
column: 3
end_location:
row: 17
column: 20
fix:
content: "=="
location:
row: 17
column: 16
end_location:
row: 17
column: 18
- kind: IsLiteral
location:
row: 20
column: 13
end_location:
row: 20
column: 19
fix:
content: "=="
location:
row: 20
column: 15
end_location:
row: 20
column: 17

View File

@@ -18,29 +18,31 @@ pub fn consider_merging_isinstance(
return;
}
let mut obj_to_types: FxHashMap<String, FxHashSet<String>> = FxHashMap::default();
let mut obj_to_types: FxHashMap<String, (usize, FxHashSet<String>)> = FxHashMap::default();
for value in values {
if let ExprKind::Call { func, args, .. } = &value.node {
if matches!(&func.node, ExprKind::Name { id, .. } if id == "isinstance") {
if let [obj, types] = &args[..] {
obj_to_types
let (num_calls, matches) = obj_to_types
.entry(obj.to_string())
.or_insert_with(FxHashSet::default)
.extend(match &types.node {
ExprKind::Tuple { elts, .. } => {
elts.iter().map(std::string::ToString::to_string).collect()
}
_ => {
vec![types.to_string()]
}
});
.or_insert_with(|| (0, FxHashSet::default()));
*num_calls += 1;
matches.extend(match &types.node {
ExprKind::Tuple { elts, .. } => {
elts.iter().map(std::string::ToString::to_string).collect()
}
_ => {
vec![types.to_string()]
}
});
}
}
}
}
for (obj, types) in obj_to_types {
if types.len() > 1 {
for (obj, (num_calls, types)) in obj_to_types {
if num_calls > 1 && types.len() > 1 {
checker.add_check(Check::new(
CheckKind::ConsiderMergingIsinstance(obj, types.into_iter().sorted().collect()),
Range::from_located(expr),

View File

@@ -30,45 +30,51 @@ pub fn super_args(
// For a `super` invocation to be unnecessary, the first argument needs to match
// the enclosing class, and the second argument needs to match the first
// argument to the enclosing function.
if let [first_arg, second_arg] = args {
// Find the enclosing function definition (if any).
if let Some(StmtKind::FunctionDef {
args: parent_args, ..
}) = parents
.find(|stmt| matches!(stmt.node, StmtKind::FunctionDef { .. }))
.map(|stmt| &stmt.node)
{
// Extract the name of the first argument to the enclosing function.
if let Some(ArgData {
arg: parent_arg, ..
}) = parent_args.args.first().map(|expr| &expr.node)
{
// Find the enclosing class definition (if any).
if let Some(StmtKind::ClassDef {
name: parent_name, ..
}) = parents
.find(|stmt| matches!(stmt.node, StmtKind::ClassDef { .. }))
.map(|stmt| &stmt.node)
{
if let (
ExprKind::Name {
id: first_arg_id, ..
},
ExprKind::Name {
id: second_arg_id, ..
},
) = (&first_arg.node, &second_arg.node)
{
if first_arg_id == parent_name && second_arg_id == parent_arg {
return Some(Check::new(
CheckKind::SuperCallWithParameters,
Range::from_located(expr),
));
}
}
}
}
}
let [first_arg, second_arg] = args else {
return None;
};
// Find the enclosing function definition (if any).
let Some(StmtKind::FunctionDef {
args: parent_args, ..
}) = parents
.find(|stmt| matches!(stmt.node, StmtKind::FunctionDef { .. }))
.map(|stmt| &stmt.node) else {
return None;
};
// Extract the name of the first argument to the enclosing function.
let Some(ArgData {
arg: parent_arg, ..
}) = parent_args.args.first().map(|expr| &expr.node) else {
return None;
};
// Find the enclosing class definition (if any).
let Some(StmtKind::ClassDef {
name: parent_name, ..
}) = parents
.find(|stmt| matches!(stmt.node, StmtKind::ClassDef { .. }))
.map(|stmt| &stmt.node) else {
return None;
};
let (
ExprKind::Name {
id: first_arg_id, ..
},
ExprKind::Name {
id: second_arg_id, ..
},
) = (&first_arg.node, &second_arg.node) else {
return None;
};
if first_arg_id == parent_name && second_arg_id == parent_arg {
return Some(Check::new(
CheckKind::SuperCallWithParameters,
Range::from_located(expr),
));
}
None
@@ -76,40 +82,46 @@ pub fn super_args(
/// UP001
pub fn useless_metaclass_type(targets: &[Expr], value: &Expr, location: Range) -> Option<Check> {
if targets.len() == 1 {
if let ExprKind::Name { id, .. } = targets.first().map(|expr| &expr.node).unwrap() {
if id == "__metaclass__" {
if let ExprKind::Name { id, .. } = &value.node {
if id == "type" {
return Some(Check::new(CheckKind::UselessMetaclassType, location));
}
}
}
}
if targets.len() != 1 {
return None;
}
None
let ExprKind::Name { id, .. } = targets.first().map(|expr| &expr.node).unwrap() else {
return None;
};
if id != "__metaclass__" {
return None;
}
let ExprKind::Name { id, .. } = &value.node else {
return None;
};
if id != "type" {
return None;
}
Some(Check::new(CheckKind::UselessMetaclassType, location))
}
/// UP004
pub fn useless_object_inheritance(name: &str, bases: &[Expr], scope: &Scope) -> Option<Check> {
for expr in bases {
if let ExprKind::Name { id, .. } = &expr.node {
if id == "object" {
match scope.values.get(&id.as_str()) {
None
| Some(Binding {
kind: BindingKind::Builtin,
..
}) => {
return Some(Check::new(
CheckKind::UselessObjectInheritance(name.to_string()),
Range::from_located(expr),
));
}
_ => {}
}
}
let ExprKind::Name { id, .. } = &expr.node else {
continue;
};
if id != "object" {
continue;
}
if !matches!(
scope.values.get(&id.as_str()),
None | Some(Binding {
kind: BindingKind::Builtin,
..
})
) {
continue;
}
return Some(Check::new(
CheckKind::UselessObjectInheritance(name.to_string()),
Range::from_located(expr),
));
}
None
@@ -118,25 +130,23 @@ pub fn useless_object_inheritance(name: &str, bases: &[Expr], scope: &Scope) ->
/// UP003
pub fn type_of_primitive(func: &Expr, args: &[Expr], location: Range) -> Option<Check> {
// Validate the arguments.
if args.len() == 1 {
match &func.node {
ExprKind::Attribute { attr: id, .. } | ExprKind::Name { id, .. } => {
if id == "type" {
if let ExprKind::Constant { value, .. } = &args[0].node {
if let Some(primitive) = Primitive::from_constant(value) {
return Some(Check::new(
CheckKind::TypeOfPrimitive(primitive),
location,
));
}
}
}
}
_ => {}
}
if args.len() != 1 {
return None;
}
None
let (ExprKind::Attribute { attr: id, .. } | ExprKind::Name { id, .. }) = &func.node else {
return None;
};
if id != "type" {
return None;
}
let ExprKind::Constant { value, .. } = &args[0].node else {
return None;
};
let primitive = Primitive::from_constant(value)?;
Some(Check::new(CheckKind::TypeOfPrimitive(primitive), location))
}
/// UP011
@@ -147,46 +157,53 @@ pub fn unnecessary_lru_cache_params(
import_aliases: &FxHashMap<&str, &str>,
) -> Option<Check> {
for expr in decorator_list.iter() {
if let ExprKind::Call {
let ExprKind::Call {
func,
args,
keywords,
} = &expr.node
else {
continue;
};
if !(args.is_empty()
&& helpers::match_module_member(
func,
"functools",
"lru_cache",
from_imports,
import_aliases,
))
{
if args.is_empty()
&& helpers::match_module_member(
func,
"functools",
"lru_cache",
from_imports,
import_aliases,
)
{
let range = Range {
location: func.end_location.unwrap(),
end_location: expr.end_location.unwrap(),
};
// Ex) `functools.lru_cache()`
if keywords.is_empty() {
return Some(Check::new(CheckKind::UnnecessaryLRUCacheParams, range));
}
// Ex) `functools.lru_cache(maxsize=None)`
if target_version >= PythonVersion::Py39 && keywords.len() == 1 {
let KeywordData { arg, value } = &keywords[0].node;
if arg.as_ref().map(|arg| arg == "maxsize").unwrap_or_default()
&& matches!(
value.node,
ExprKind::Constant {
value: Constant::None,
kind: None,
}
)
{
return Some(Check::new(CheckKind::UnnecessaryLRUCacheParams, range));
}
}
}
continue;
}
let range = Range {
location: func.end_location.unwrap(),
end_location: expr.end_location.unwrap(),
};
// Ex) `functools.lru_cache()`
if keywords.is_empty() {
return Some(Check::new(CheckKind::UnnecessaryLRUCacheParams, range));
}
// Ex) `functools.lru_cache(maxsize=None)`
if !(target_version >= PythonVersion::Py39 && keywords.len() == 1) {
continue;
}
let KeywordData { arg, value } = &keywords[0].node;
if !(arg.as_ref().map(|arg| arg == "maxsize").unwrap_or_default()
&& matches!(
value.node,
ExprKind::Constant {
value: Constant::None,
kind: None,
}
))
{
continue;
}
return Some(Check::new(CheckKind::UnnecessaryLRUCacheParams, range));
}
None
}

View File

@@ -21,7 +21,7 @@ pub fn remove_class_def_base(
bases: &[Expr],
keywords: &[Keyword],
) -> Option<Fix> {
let contents = locator.slice_source_code_at(stmt_at);
let contents = locator.slice_source_code_at(&stmt_at);
// Case 1: `object` is the only base.
if bases.len() == 1 && keywords.is_empty() {

View File

@@ -201,13 +201,13 @@ pub fn convert_named_tuple_functional_to_class(
if checker.patch(check.kind.code()) {
match convert_to_class(stmt, typename, properties, base_class) {
Ok(fix) => check.amend(fix),
Err(err) => error!("Failed to convert `NamedTuple`: {}", err),
Err(err) => error!("Failed to convert `NamedTuple`: {err}"),
}
}
checker.add_check(check);
}
}
Err(err) => error!("Failed to parse defaults: {}", err),
Err(err) => error!("Failed to parse defaults: {err}"),
}
}
}

View File

@@ -229,7 +229,7 @@ pub fn convert_typed_dict_functional_to_class(
match_typed_dict_assign(checker, targets, value)
{
match get_properties_and_total(args, keywords) {
Err(err) => error!("Failed to parse TypedDict: {}", err),
Err(err) => error!("Failed to parse TypedDict: {err}"),
Ok((body, total_keyword)) => {
let mut check = Check::new(
CheckKind::ConvertTypedDictFunctionalToClass(class_name.to_string()),
@@ -238,7 +238,7 @@ pub fn convert_typed_dict_functional_to_class(
if checker.patch(check.kind.code()) {
match convert_to_class(stmt, class_name, body, total_keyword, base_class) {
Ok(fix) => check.amend(fix),
Err(err) => error!("Failed to convert TypedDict: {}", err),
Err(err) => error!("Failed to convert TypedDict: {err}"),
};
}
checker.add_check(check);

View File

@@ -90,7 +90,7 @@ fn create_check(
} else {
match create_remove_param_fix(locator, expr, mode_param) {
Ok(fix) => check.amend(fix),
Err(e) => error!("Failed to remove parameter: {}", e),
Err(e) => error!("Failed to remove parameter: {e}"),
}
}
}

View File

@@ -76,7 +76,7 @@ pub fn unnecessary_future_import(checker: &mut Checker, stmt: &Stmt, names: &[Lo
}
check.amend(fix);
}
Err(e) => error!("Failed to remove __future__ import: {}", e),
Err(e) => error!("Failed to remove __future__ import: {e}"),
}
}
checker.add_check(check);

View File

@@ -30,7 +30,7 @@ pub fn useless_metaclass_type(checker: &mut Checker, stmt: &Stmt, value: &Expr,
}
check.amend(fix);
}
Err(e) => error!("Failed to fix remove metaclass type: {}", e),
Err(e) => error!("Failed to fix remove metaclass type: {e}"),
}
}
checker.add_check(check);

View File

@@ -1623,40 +1623,45 @@ pub fn ambiguous_unicode_character(
for current_char in text.chars() {
// Search for confusing characters.
if let Some(representant) = CONFUSABLES.get(&(current_char as u32)) {
if let Some(representant) = char::from_u32(*representant) {
let location = if row_offset == 0 {
Location::new(start.row() + row_offset, start.column() + col_offset)
} else {
Location::new(start.row() + row_offset, col_offset)
};
let end_location = Location::new(location.row(), location.column() + 1);
let mut check = Check::new(
match context {
Context::String => {
CheckKind::AmbiguousUnicodeCharacterString(current_char, representant)
}
Context::Docstring => CheckKind::AmbiguousUnicodeCharacterDocstring(
current_char,
representant,
),
Context::Comment => {
CheckKind::AmbiguousUnicodeCharacterComment(current_char, representant)
}
},
Range {
location,
end_location,
},
);
if settings.enabled.contains(check.kind.code()) {
if autofix && settings.fixable.contains(check.kind.code()) {
check.amend(Fix::replacement(
representant.to_string(),
if !settings.allowed_confusables.contains(&current_char) {
if let Some(representant) = char::from_u32(*representant) {
let col = if row_offset == 0 {
start.column() + col_offset
} else {
col_offset
};
let location = Location::new(start.row() + row_offset, col);
let end_location = Location::new(location.row(), location.column() + 1);
let mut check = Check::new(
match context {
Context::String => CheckKind::AmbiguousUnicodeCharacterString(
current_char,
representant,
),
Context::Docstring => CheckKind::AmbiguousUnicodeCharacterDocstring(
current_char,
representant,
),
Context::Comment => CheckKind::AmbiguousUnicodeCharacterComment(
current_char,
representant,
),
},
Range {
location,
end_location,
));
},
);
if settings.enabled.contains(check.kind.code()) {
if autofix && settings.fixable.contains(check.kind.code()) {
check.amend(Fix::replacement(
representant.to_string(),
location,
end_location,
));
}
checks.push(check);
}
checks.push(check);
}
}
}

View File

@@ -4,30 +4,31 @@ pub mod checks;
#[cfg(test)]
mod tests {
use std::convert::AsRef;
use std::path::Path;
use anyhow::Result;
use test_case::test_case;
use rustc_hash::FxHashSet;
use crate::checks::CheckCode;
use crate::linter::test_path;
use crate::settings;
#[test_case(CheckCode::RUF001, Path::new("RUF001.py"); "RUF001")]
#[test_case(CheckCode::RUF002, Path::new("RUF002.py"); "RUF002")]
#[test_case(CheckCode::RUF003, Path::new("RUF003.py"); "RUF003")]
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());
#[test]
fn confusables() -> Result<()> {
let mut checks = test_path(
Path::new("./resources/test/fixtures/ruff")
.join(path)
.as_path(),
&settings::Settings::for_rule(check_code),
Path::new("./resources/test/fixtures/ruff/confusables.py"),
&settings::Settings {
allowed_confusables: FxHashSet::from_iter(['', 'ρ', '']),
..settings::Settings::for_rules(vec![
CheckCode::RUF001,
CheckCode::RUF002,
CheckCode::RUF003,
])
},
true,
)?;
checks.sort_by_key(|check| check.location);
insta::assert_yaml_snapshot!(snapshot, checks);
insta::assert_yaml_snapshot!(checks);
Ok(())
}

View File

@@ -1,23 +0,0 @@
---
source: src/rules/mod.rs
expression: checks
---
- kind:
AmbiguousUnicodeCharacterString:
- 𝐁
- B
location:
row: 1
column: 5
end_location:
row: 1
column: 6
fix:
content: B
location:
row: 1
column: 5
end_location:
row: 1
column: 6

View File

@@ -1,23 +0,0 @@
---
source: src/rules/mod.rs
expression: checks
---
- kind:
AmbiguousUnicodeCharacterDocstring:
-
- )
location:
row: 5
column: 55
end_location:
row: 5
column: 56
fix:
content: )
location:
row: 5
column: 55
end_location:
row: 5
column: 56

View File

@@ -1,23 +0,0 @@
---
source: src/rules/mod.rs
expression: checks
---
- kind:
AmbiguousUnicodeCharacterComment:
-
- /
location:
row: 6
column: 61
end_location:
row: 6
column: 62
fix:
content: /
location:
row: 6
column: 61
end_location:
row: 6
column: 62

View File

@@ -0,0 +1,59 @@
---
source: src/rules/mod.rs
expression: checks
---
- kind:
AmbiguousUnicodeCharacterString:
- 𝐁
- B
location:
row: 1
column: 5
end_location:
row: 1
column: 6
fix:
content: B
location:
row: 1
column: 5
end_location:
row: 1
column: 6
- kind:
AmbiguousUnicodeCharacterDocstring:
-
- )
location:
row: 6
column: 55
end_location:
row: 6
column: 56
fix:
content: )
location:
row: 6
column: 55
end_location:
row: 6
column: 56
- kind:
AmbiguousUnicodeCharacterComment:
-
- /
location:
row: 7
column: 61
end_location:
row: 7
column: 62
fix:
content: /
location:
row: 7
column: 61
end_location:
row: 7
column: 62

View File

@@ -8,6 +8,7 @@ use anyhow::{anyhow, Result};
use once_cell::sync::Lazy;
use path_absolutize::path_dedot;
use regex::Regex;
use rustc_hash::FxHashSet;
use crate::checks_gen::{CheckCodePrefix, CATEGORIES};
use crate::settings::pyproject::load_options;
@@ -19,6 +20,7 @@ use crate::{
#[derive(Debug)]
pub struct Configuration {
pub allowed_confusables: FxHashSet<char>,
pub dummy_variable_rgx: Regex,
pub exclude: Vec<FilePattern>,
pub extend_exclude: Vec<FilePattern>,
@@ -82,9 +84,12 @@ impl Configuration {
) -> Result<Self> {
let options = load_options(pyproject)?;
Ok(Configuration {
allowed_confusables: FxHashSet::from_iter(
options.allowed_confusables.unwrap_or_default(),
),
dummy_variable_rgx: match options.dummy_variable_rgx {
Some(pattern) => Regex::new(&pattern)
.map_err(|e| anyhow!("Invalid dummy-variable-rgx value: {e}"))?,
.map_err(|e| anyhow!("Invalid `dummy-variable-rgx` value: {e}"))?,
None => DEFAULT_DUMMY_VARIABLE_RGX.clone(),
},
src: options.src.map_or_else(

View File

@@ -28,6 +28,7 @@ pub mod types;
#[derive(Debug)]
pub struct Settings {
pub allowed_confusables: FxHashSet<char>,
pub dummy_variable_rgx: Regex,
pub enabled: FxHashSet<CheckCode>,
pub exclude: GlobSet,
@@ -58,6 +59,7 @@ impl Settings {
project_root: Option<&PathBuf>,
) -> Result<Self> {
Ok(Self {
allowed_confusables: config.allowed_confusables,
dummy_variable_rgx: config.dummy_variable_rgx,
enabled: resolve_codes(
&config
@@ -95,6 +97,7 @@ impl Settings {
pub fn for_rule(check_code: CheckCode) -> Self {
Self {
allowed_confusables: FxHashSet::from_iter([]),
dummy_variable_rgx: Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$").unwrap(),
enabled: FxHashSet::from_iter([check_code.clone()]),
exclude: GlobSet::empty(),
@@ -121,6 +124,7 @@ impl Settings {
pub fn for_rules(check_codes: Vec<CheckCode>) -> Self {
Self {
allowed_confusables: FxHashSet::from_iter([]),
dummy_variable_rgx: Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$").unwrap(),
enabled: FxHashSet::from_iter(check_codes.clone()),
exclude: GlobSet::empty(),
@@ -149,6 +153,9 @@ impl Settings {
impl Hash for Settings {
fn hash<H: Hasher>(&self, state: &mut H) {
// Add base properties in alphabetical order.
for confusable in &self.allowed_confusables {
confusable.hash(state);
}
self.dummy_variable_rgx.as_str().hash(state);
for value in &self.enabled {
value.hash(state);

View File

@@ -13,6 +13,7 @@ use crate::{
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
pub struct Options {
pub allowed_confusables: Option<Vec<char>>,
pub dummy_variable_rgx: Option<String>,
pub exclude: Option<Vec<String>>,
pub extend_exclude: Option<Vec<String>>,

View File

@@ -2,7 +2,7 @@
use std::path::{Path, PathBuf};
use anyhow::Result;
use anyhow::{anyhow, Result};
use common_path::common_path_all;
use log::debug;
use path_absolutize::Absolutize;
@@ -82,7 +82,8 @@ pub fn find_project_root(sources: &[PathBuf]) -> Option<PathBuf> {
pub fn load_options(pyproject: Option<&PathBuf>) -> Result<Options> {
if let Some(pyproject) = pyproject {
Ok(parse_pyproject_toml(pyproject)?
Ok(parse_pyproject_toml(pyproject)
.map_err(|err| anyhow!("Failed to parse `{}`: {}", pyproject.to_string_lossy(), err))?
.tool
.and_then(|tool| tool.ruff)
.unwrap_or_default())
@@ -133,6 +134,7 @@ mod tests {
pyproject.tool,
Some(Tools {
ruff: Some(Options {
allowed_confusables: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
@@ -174,6 +176,7 @@ line-length = 79
pyproject.tool,
Some(Tools {
ruff: Some(Options {
allowed_confusables: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
@@ -215,6 +218,7 @@ exclude = ["foo.py"]
pyproject.tool,
Some(Tools {
ruff: Some(Options {
allowed_confusables: None,
line_length: None,
fix: None,
exclude: Some(vec!["foo.py".to_string()]),
@@ -256,6 +260,7 @@ select = ["E501"]
pyproject.tool,
Some(Tools {
ruff: Some(Options {
allowed_confusables: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
@@ -298,6 +303,7 @@ ignore = ["E501"]
pyproject.tool,
Some(Tools {
ruff: Some(Options {
allowed_confusables: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
@@ -374,6 +380,7 @@ other-attribute = 1
assert_eq!(
config,
Options {
allowed_confusables: Some(vec!['', 'ρ', '']),
line_length: Some(88),
fix: None,
exclude: None,

View File

@@ -25,7 +25,7 @@ impl<'a> SourceCodeLocator<'a> {
self.rope.get_or_init(|| Rope::from_str(self.contents))
}
pub fn slice_source_code_at(&self, location: Location) -> Cow<'_, str> {
pub fn slice_source_code_at(&self, location: &Location) -> Cow<'_, str> {
let rope = self.get_or_init_rope();
let offset = rope.line_to_char(location.row() - 1) + location.column();
Cow::from(rope.slice(offset..))

View File

@@ -406,8 +406,7 @@ mod tests {
let result = fmt.parse::<CFormatString>();
assert_eq!(
result, expected,
"left = {:#?} \n\n\n right = {:#?}",
result, expected
"left = {result:#?} \n\n\n right = {expected:#?}"
);
}
}