Compare commits

...

20 Commits

Author SHA1 Message Date
Charlie Marsh
695b06ba60 Bump version to 0.0.115 2022-11-12 16:46:26 -05:00
Charlie Marsh
3a2e6926d4 Include flake8-bugbear settings in flake8-to-ruff (#712) 2022-11-12 16:46:12 -05:00
Charlie Marsh
d16c3a1186 Use an FNVHashSet for settings.enabled (#711) 2022-11-12 16:36:56 -05:00
Charlie Marsh
53a2187f02 Run cargo fmt 2022-11-12 16:33:12 -05:00
Charlie Marsh
00b5d1059c Validate that mutable and immutable defaults are imported (#710) 2022-11-12 16:32:21 -05:00
Charlie Marsh
b7acf76aaf Track all import-from members (#709) 2022-11-12 16:10:43 -05:00
Charlie Marsh
8cfc0e5cf5 Use FnvHasher for unordered maps and sets (#708) 2022-11-12 16:09:34 -05:00
Edgar R. M
aa7681f9ad Add extend-immutable-calls setting for B008 (#706) 2022-11-12 15:48:34 -05:00
Charlie Marsh
2493d48725 Add flake8-bandit to flake8-to-ruff (#701) 2022-11-12 12:08:15 -05:00
Edgar R. M
1b422a7f12 Add flake8-bandit (#697) 2022-11-12 12:04:49 -05:00
Charlie Marsh
da051624e4 Add backticks around functools.lru_cache 2022-11-12 11:56:23 -05:00
Charlie Marsh
da9ae6a42a Bump version to 0.0.114 2022-11-12 11:55:18 -05:00
Martin Lehoux
afa59d78bb feat: no unnecessary encode utf8 (#686) 2022-11-12 11:54:36 -05:00
Charlie Marsh
bbc38fea73 Avoid generating empty statement bodies (#700) 2022-11-12 11:39:09 -05:00
Chammika Mannakkara
6bcc11a90f add fixes for __future__ import removal (#682) 2022-11-12 11:28:05 -05:00
Harutaka Kawamura
6f36e5dd25 Implement B019 (#695) 2022-11-12 11:14:03 -05:00
Anders Kaseorg
1d13752eb1 Remove static isort classifications for __main__, disutils (#694) 2022-11-12 09:13:38 -05:00
Anders Kaseorg
394af0dcff Disable default features of chrono (#696) 2022-11-12 09:02:02 -05:00
Charlie Marsh
51cee471a0 Add test case for import-from wrapping 2022-11-11 23:46:19 -05:00
Charlie Marsh
8df3a5437a Take indentation into account for import-from wrapping (#693) 2022-11-11 23:45:04 -05:00
79 changed files with 2440 additions and 288 deletions

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.113
rev: v0.0.115
hooks:
- id: ruff

27
Cargo.lock generated
View File

@@ -427,11 +427,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfd4d1b31faaa3a89d7934dbded3111da0d2ef28e3ebccdb4f0179f5929d1ef1"
dependencies = [
"iana-time-zone",
"js-sys",
"num-integer",
"num-traits",
"time",
"wasm-bindgen",
"winapi 0.3.9",
]
@@ -933,7 +930,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8-to-ruff"
version = "0.0.113-dev.0"
version = "0.0.115-dev.0"
dependencies = [
"anyhow",
"clap 4.0.22",
@@ -2240,7 +2237,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.113"
version = "0.0.115"
dependencies = [
"anyhow",
"assert_cmd",
@@ -2256,6 +2253,7 @@ dependencies = [
"dirs 4.0.0",
"fern",
"filetime",
"fnv",
"getrandom 0.2.8",
"glob",
"insta",
@@ -2287,7 +2285,7 @@ dependencies = [
[[package]]
name = "ruff_dev"
version = "0.0.113"
version = "0.0.115"
dependencies = [
"anyhow",
"clap 4.0.22",
@@ -2781,17 +2779,6 @@ dependencies = [
"syn",
]
[[package]]
name = "time"
version = "0.1.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255"
dependencies = [
"libc",
"wasi 0.10.0+wasi-snapshot-preview1",
"winapi 0.3.9",
]
[[package]]
name = "tiny-keccak"
version = "2.0.2"
@@ -3078,12 +3065,6 @@ version = "0.9.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
[[package]]
name = "wasi"
version = "0.10.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
[[package]]
name = "wasi"
version = "0.11.0+wasi-snapshot-preview1"

View File

@@ -6,7 +6,7 @@ members = [
[package]
name = "ruff"
version = "0.0.113"
version = "0.0.115"
edition = "2021"
[lib]
@@ -16,13 +16,14 @@ name = "ruff"
anyhow = { version = "1.0.66" }
bincode = { version = "1.3.3" }
bitflags = { version = "1.3.2" }
chrono = { version = "0.4.21" }
chrono = { version = "0.4.21", default-features = false, features = ["clock"] }
clap = { version = "4.0.1", features = ["derive"] }
colored = { version = "2.0.0" }
common-path = { version = "1.0.0" }
dirs = { version = "4.0.0" }
fern = { version = "0.6.1" }
filetime = { version = "0.2.17" }
fnv = { version = "1.0.7" }
glob = { version = "0.3.0" }
itertools = { version = "0.10.5" }
libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "a13ec97dd4eb925bde4d426c6e422582793b260c" }

View File

@@ -99,7 +99,7 @@ Ruff also works with [pre-commit](https://pre-commit.com):
```yaml
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.113
rev: v0.0.115
hooks:
- id: ruff
```
@@ -441,8 +441,9 @@ For more, see [pyupgrade](https://pypi.org/project/pyupgrade/3.2.0/) on PyPI.
| U007 | UsePEP604Annotation | Use `X \| Y` for type annotations | 🛠 |
| U008 | SuperCallWithParameters | Use `super()` instead of `super(__class__, self)` | 🛠 |
| U009 | PEP3120UnnecessaryCodingComment | utf-8 encoding declaration is unnecessary | 🛠 |
| U010 | UnnecessaryFutureImport | Unnessary __future__ import `...` for target Python version | |
| U011 | UnnecessaryLRUCacheParams | Unnessary parameters to functools.lru_cache | 🛠 |
| U010 | UnnecessaryFutureImport | Unnecessary `__future__` import `...` for target Python version | 🛠 |
| U011 | UnnecessaryLRUCacheParams | Unnecessary parameters to `functools.lru_cache` | 🛠 |
| U012 | UnnecessaryEncodeUTF8 | Unnecessary call to `encode` as UTF-8 | 🛠 |
### pep8-naming
@@ -466,6 +467,19 @@ For more, see [pep8-naming](https://pypi.org/project/pep8-naming/0.13.2/) on PyP
| N817 | CamelcaseImportedAsAcronym | Camelcase `...` imported as acronym `...` | |
| N818 | ErrorSuffixOnExceptionName | Exception name `...` should be named with an Error suffix | |
### flake8-bandit
For more, see [flake8-bandit](https://pypi.org/project/flake8-bandit/4.1.1/) on PyPI.
| Code | Name | Message | Fix |
| ---- | ---- | ------- | --- |
| S101 | AssertUsed | Use of `assert` detected | |
| S102 | ExecUsed | Use of `exec` detected | |
| S104 | HardcodedBindAllInterfaces | Possible binding to all interfaces | |
| S105 | HardcodedPasswordString | Possible hardcoded password: `'...'` | |
| S106 | HardcodedPasswordFuncArg | Possible hardcoded password: `'...'` | |
| S107 | HardcodedPasswordDefault | Possible hardcoded password: `'...'` | |
### flake8-comprehensions
For more, see [flake8-comprehensions](https://pypi.org/project/flake8-comprehensions/3.10.1/) on PyPI.
@@ -511,6 +525,7 @@ For more, see [flake8-bugbear](https://pypi.org/project/flake8-bugbear/22.10.27/
| B016 | CannotRaiseLiteral | Cannot raise a literal. Did you intend to return it or raise an Exception? | |
| B017 | NoAssertRaisesException | `assertRaises(Exception):` should be considered evil. | |
| B018 | UselessExpression | Found useless expression. Either assign it to a variable or remove it. | |
| B019 | CachedInstanceMethod | Use of `functools.lru_cache` or `functools.cache` on methods can lead to memory leaks. | |
| B025 | DuplicateTryBlockException | try-except block with duplicate exception `Exception` | |
| B026 | StarArgUnpackingAfterKeywordArg | Star-arg unpacking after a keyword argument is strongly discouraged. | |
@@ -684,9 +699,10 @@ including:
- [`flake8-quotes`](https://pypi.org/project/flake8-quotes/)
- [`flake8-annotations`](https://pypi.org/project/flake8-annotations/)
- [`flake8-comprehensions`](https://pypi.org/project/flake8-comprehensions/)
- [`flake8-bugbear`](https://pypi.org/project/flake8-bugbear/) (20/32)
- [`flake8-bandit`](https://pypi.org/project/flake8-bandit/) (6/40)
- [`flake8-bugbear`](https://pypi.org/project/flake8-bugbear/) (21/32)
- [`flake8-2020`](https://pypi.org/project/flake8-2020/)
- [`pyupgrade`](https://pypi.org/project/pyupgrade/) (14/34)
- [`pyupgrade`](https://pypi.org/project/pyupgrade/) (15/34)
- [`autoflake`](https://pypi.org/project/autoflake/) (1/7)
Beyond rule-set parity, Ruff suffers from the following limitations vis-à-vis Flake8:
@@ -707,12 +723,13 @@ Today, Ruff can be used to replace Flake8 when used with any of the following pl
- [`flake8-print`](https://pypi.org/project/flake8-print/)
- [`flake8-quotes`](https://pypi.org/project/flake8-quotes/)
- [`flake8-annotations`](https://pypi.org/project/flake8-annotations/)
- [`flake8-bandit`](https://pypi.org/project/flake8-bandit/) (6/40)
- [`flake8-comprehensions`](https://pypi.org/project/flake8-comprehensions/)
- [`flake8-bugbear`](https://pypi.org/project/flake8-bugbear/) (20/32)
- [`flake8-bugbear`](https://pypi.org/project/flake8-bugbear/) (21/32)
- [`flake8-2020`](https://pypi.org/project/flake8-2020/)
Ruff can also replace [`isort`](https://pypi.org/project/isort/), [`yesqa`](https://github.com/asottile/yesqa),
and a subset of the rules implemented in [`pyupgrade`](https://pypi.org/project/pyupgrade/) (14/34).
and a subset of the rules implemented in [`pyupgrade`](https://pypi.org/project/pyupgrade/) (15/34).
If you're looking to use Ruff, but rely on an unsupported Flake8 plugin, free to file an Issue.

View File

@@ -771,7 +771,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8_to_ruff"
version = "0.0.113"
version = "0.0.115"
dependencies = [
"anyhow",
"clap",
@@ -1975,7 +1975,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.113"
version = "0.0.115"
dependencies = [
"anyhow",
"bincode",

View File

@@ -1,6 +1,6 @@
[package]
name = "flake8-to-ruff"
version = "0.0.113-dev.0"
version = "0.0.115-dev.0"
edition = "2021"
[lib]

View File

@@ -5,7 +5,7 @@ use ruff::checks_gen::CheckCodePrefix;
use ruff::flake8_quotes::settings::Quote;
use ruff::settings::options::Options;
use ruff::settings::pyproject::Pyproject;
use ruff::{flake8_annotations, flake8_quotes, pep8_naming};
use ruff::{flake8_annotations, flake8_bugbear, flake8_quotes, pep8_naming};
use crate::plugin::Plugin;
use crate::{parser, plugin};
@@ -69,6 +69,7 @@ pub fn convert(
// Parse each supported option.
let mut options: Options = Default::default();
let mut flake8_annotations: flake8_annotations::settings::Options = Default::default();
let mut flake8_bugbear: flake8_bugbear::settings::Options = Default::default();
let mut flake8_quotes: flake8_quotes::settings::Options = Default::default();
let mut pep8_naming: pep8_naming::settings::Options = Default::default();
for (key, value) in flake8 {
@@ -109,6 +110,11 @@ pub fn convert(
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
}
}
// flake8-bugbear
"extend-immutable-calls" | "extend_immutable_calls" => {
flake8_bugbear.extend_immutable_calls =
Some(parser::parse_strings(value.as_ref()));
}
// flake8-annotations
"suppress-none-returning" | "suppress_none_returning" => {
match parser::parse_bool(value.as_ref()) {
@@ -179,6 +185,12 @@ pub fn convert(
// Deduplicate and sort.
options.select = Some(Vec::from_iter(select));
options.ignore = Some(Vec::from_iter(ignore));
if flake8_annotations != Default::default() {
options.flake8_annotations = Some(flake8_annotations);
}
if flake8_bugbear != Default::default() {
options.flake8_bugbear = Some(flake8_bugbear);
}
if flake8_quotes != Default::default() {
options.flake8_quotes = Some(flake8_quotes);
}
@@ -224,6 +236,7 @@ mod tests {
dummy_variable_rgx: None,
target_version: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
isort: None,
pep8_naming: None,
@@ -257,6 +270,7 @@ mod tests {
dummy_variable_rgx: None,
target_version: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
isort: None,
pep8_naming: None,
@@ -290,6 +304,7 @@ mod tests {
dummy_variable_rgx: None,
target_version: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
isort: None,
pep8_naming: None,
@@ -323,6 +338,7 @@ mod tests {
dummy_variable_rgx: None,
target_version: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
isort: None,
pep8_naming: None,
@@ -356,6 +372,7 @@ mod tests {
dummy_variable_rgx: None,
target_version: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: Some(flake8_quotes::settings::Options {
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
multiline_quotes: None,
@@ -432,6 +449,7 @@ mod tests {
dummy_variable_rgx: None,
target_version: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
isort: None,
pep8_naming: None,
@@ -466,6 +484,7 @@ mod tests {
dummy_variable_rgx: None,
target_version: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: Some(flake8_quotes::settings::Options {
inline_quotes: Some(flake8_quotes::settings::Quote::Single),
multiline_quotes: None,

View File

@@ -6,6 +6,7 @@ use ruff::checks_gen::CheckCodePrefix;
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq)]
pub enum Plugin {
Flake8Bandit,
Flake8Bugbear,
Flake8Builtins,
Flake8Comprehensions,
@@ -22,6 +23,7 @@ impl FromStr for Plugin {
fn from_str(string: &str) -> Result<Self, Self::Err> {
match string {
"flake8-bandit" => Ok(Plugin::Flake8Bandit),
"flake8-bugbear" => Ok(Plugin::Flake8Bugbear),
"flake8-builtins" => Ok(Plugin::Flake8Builtins),
"flake8-comprehensions" => Ok(Plugin::Flake8Comprehensions),
@@ -39,6 +41,7 @@ impl FromStr for Plugin {
impl Plugin {
pub fn default(&self) -> CheckCodePrefix {
match self {
Plugin::Flake8Bandit => CheckCodePrefix::S,
Plugin::Flake8Bugbear => CheckCodePrefix::B,
Plugin::Flake8Builtins => CheckCodePrefix::A,
Plugin::Flake8Comprehensions => CheckCodePrefix::C,
@@ -53,6 +56,7 @@ impl Plugin {
pub fn select(&self, flake8: &HashMap<String, Option<String>>) -> Vec<CheckCodePrefix> {
match self {
Plugin::Flake8Bandit => vec![CheckCodePrefix::S],
Plugin::Flake8Bugbear => vec![CheckCodePrefix::B],
Plugin::Flake8Builtins => vec![CheckCodePrefix::A],
Plugin::Flake8Comprehensions => vec![CheckCodePrefix::C],
@@ -265,6 +269,10 @@ pub fn infer_plugins_from_options(flake8: &HashMap<String, Option<String>>) -> V
"docstring-convention" | "docstring_convention" => {
plugins.insert(Plugin::Flake8Docstrings);
}
// flake8-bugbear
"extend-immutable-calls" | "extend_immutable_calls" => {
plugins.insert(Plugin::Flake8Bugbear);
}
// flake8-builtins
"builtins-ignorelist" | "builtins_ignorelist" => {
plugins.insert(Plugin::Flake8Builtins);
@@ -329,6 +337,7 @@ pub fn infer_plugins_from_options(flake8: &HashMap<String, Option<String>>) -> V
/// `flake8-annotations` is active.
pub fn infer_plugins_from_codes(codes: &BTreeSet<CheckCodePrefix>) -> Vec<Plugin> {
[
Plugin::Flake8Bandit,
Plugin::Flake8Bugbear,
Plugin::Flake8Builtins,
Plugin::Flake8Comprehensions,

View File

@@ -0,0 +1,20 @@
from typing import List
import fastapi
from fastapi import Query
def okay(db=fastapi.Depends(get_db)):
...
def okay(data: List[str] = fastapi.Query(None)):
...
def okay(data: List[str] = Query(None)):
...
def error_due_to_missing_import(data: List[str] = Depends(None)):
...

108
resources/test/fixtures/B019.py vendored Normal file
View File

@@ -0,0 +1,108 @@
"""
Should emit:
B019 - on lines 73, 77, 81, 85, 89, 93, 97, 101
"""
import functools
from functools import cache, cached_property, lru_cache
def some_other_cache():
...
@functools.cache
def compute_func(self, y):
...
class Foo:
def __init__(self, x):
self.x = x
def compute_method(self, y):
...
@some_other_cache
def user_cached_instance_method(self, y):
...
@classmethod
@functools.cache
def cached_classmethod(cls, y):
...
@classmethod
@cache
def other_cached_classmethod(cls, y):
...
@classmethod
@functools.lru_cache
def lru_cached_classmethod(cls, y):
...
@classmethod
@lru_cache
def other_lru_cached_classmethod(cls, y):
...
@staticmethod
@functools.cache
def cached_staticmethod(y):
...
@staticmethod
@cache
def other_cached_staticmethod(y):
...
@staticmethod
@functools.lru_cache
def lru_cached_staticmethod(y):
...
@staticmethod
@lru_cache
def other_lru_cached_staticmethod(y):
...
@functools.cached_property
def some_cached_property(self):
...
@cached_property
def some_other_cached_property(self):
...
# Remaining methods should emit B019
@functools.cache
def cached_instance_method(self, y):
...
@cache
def another_cached_instance_method(self, y):
...
@functools.cache()
def called_cached_instance_method(self, y):
...
@cache()
def another_called_cached_instance_method(self, y):
...
@functools.lru_cache
def lru_cached_instance_method(self, y):
...
@lru_cache
def another_lru_cached_instance_method(self, y):
...
@functools.lru_cache()
def called_lru_cached_instance_method(self, y):
...
@lru_cache()
def another_called_lru_cached_instance_method(self, y):
...

View File

@@ -28,7 +28,6 @@ from blah import ClassA, ClassB, ClassC
if TYPE_CHECKING:
from models import Fruit, Nut, Vegetable
if TYPE_CHECKING:
import shelve
import importlib

11
resources/test/fixtures/S101.py vendored Normal file
View File

@@ -0,0 +1,11 @@
# Error
assert True
def fn():
x = 1
# Error
assert x == 1
# Error
assert x == 2

5
resources/test/fixtures/S102.py vendored Normal file
View File

@@ -0,0 +1,5 @@
def fn():
# Error
exec('x = 2')
exec('y = 3')

19
resources/test/fixtures/S104.py vendored Normal file
View File

@@ -0,0 +1,19 @@
def func(address):
print(address)
# OK
"OK"
# Error
"0.0.0.0"
'0.0.0.0'
# Error
func("0.0.0.0")
def my_func():
x = "0.0.0.0"
print(x)

53
resources/test/fixtures/S105.py vendored Normal file
View File

@@ -0,0 +1,53 @@
d = {}
# OK
safe = "s3cr3t"
password = True
password = safe
password is True
password == 1
d["safe"] = "s3cr3t"
# Errors
password = "s3cr3t"
_pass = "s3cr3t"
passwd = "s3cr3t"
pwd = "s3cr3t"
secret = "s3cr3t"
token = "s3cr3t"
secrete = "s3cr3t"
safe = password = "s3cr3t"
password = safe = "s3cr3t"
d["password"] = "s3cr3t"
d["pass"] = "s3cr3t"
d["passwd"] = "s3cr3t"
d["pwd"] = "s3cr3t"
d["secret"] = "s3cr3t"
d["token"] = "s3cr3t"
d["secrete"] = "s3cr3t"
safe = d["password"] = "s3cr3t"
d["password"] = safe = "s3cr3t"
class MyClass:
password = "s3cr3t"
safe = password
MyClass.password = "s3cr3t"
MyClass._pass = "s3cr3t"
MyClass.passwd = "s3cr3t"
MyClass.pwd = "s3cr3t"
MyClass.secret = "s3cr3t"
MyClass.token = "s3cr3t"
MyClass.secrete = "s3cr3t"
password == "s3cr3t"
_pass == "s3cr3t"
passwd == "s3cr3t"
pwd == "s3cr3t"
secret == "s3cr3t"
token == "s3cr3t"
secrete == "s3cr3t"
password == safe == "s3cr3t"

13
resources/test/fixtures/S106.py vendored Normal file
View File

@@ -0,0 +1,13 @@
def func(pos, password):
pass
string = "Hello World"
# OK
func("s3cr3t")
func(1, password=string)
func(pos="s3cr3t", password=string)
# Error
func(1, password="s3cr3t")

30
resources/test/fixtures/S107.py vendored Normal file
View File

@@ -0,0 +1,30 @@
def ok(first, default="default"):
pass
def default(first, password="default"):
pass
def ok_posonly(first, /, pos, default="posonly"):
pass
def default_posonly(first, /, pos, password="posonly"):
pass
def ok_kwonly(first, *, default="kwonly"):
pass
def default_kwonly(first, *, password="kwonly"):
pass
def ok_all(first, /, pos, default="posonly", *, kwonly="kwonly"):
pass
def default_all(first, /, pos, secret="posonly", *, password="kwonly"):
pass

View File

@@ -1,5 +1,14 @@
from __future__ import annotations, nested_scopes, generators
from __future__ import nested_scopes, generators
from __future__ import with_statement, unicode_literals
from __future__ import absolute_import, division
from __future__ import generator_stop
from __future__ import print_function, generator_stop
from __future__ import invalid_module, generators
if True:
from __future__ import generator_stop
from __future__ import generators
if True:
from __future__ import generator_stop
from __future__ import invalid_module, generators

52
resources/test/fixtures/U012.py vendored Normal file
View File

@@ -0,0 +1,52 @@
# ASCII literals should be replaced by a bytes literal
"foo".encode("utf-8") # b"foo"
"foo".encode("u8") # b"foo"
"foo".encode() # b"foo"
"foo".encode("UTF8") # b"foo"
U"foo".encode("utf-8") # b"foo"
"foo".encode(encoding="utf-8") # b"foo"
"""
Lorem
Ipsum
""".encode(
"utf-8"
)
# b"""
# Lorem
#
# Ipsum
# """
# `encode` on variables should not be processed.
string = "hello there"
string.encode("utf-8")
bar = "bar"
f"foo{bar}".encode("utf-8") # f"foo{bar}".encode()
encoding = "latin"
"foo".encode(encoding)
f"foo{bar}".encode(encoding)
# `encode` with custom args and kwargs should not be processed.
"foo".encode("utf-8", errors="replace")
"foo".encode("utf-8", "replace")
"foo".encode(errors="replace")
"foo".encode(encoding="utf-8", errors="replace")
# `encode` with custom args and kwargs on unicode should not be processed.
"unicode text©".encode("utf-8", errors="replace")
"unicode text©".encode("utf-8", "replace")
"unicode text©".encode(errors="replace")
"unicode text©".encode(encoding="utf-8", errors="replace")
# Unicode literals should only be stripped of default encoding.
"unicode text©".encode("utf-8") # "unicode text©".encode()
"unicode text©".encode()
"unicode text©".encode(encoding="UTF8") # "unicode text©".encode()
r"fo\o".encode("utf-8") # br"fo\o"
u"foo".encode("utf-8") # b"foo"
R"fo\o".encode("utf-8") # br"fo\o"
U"foo".encode("utf-8") # b"foo"
print("foo".encode()) # print(b"foo")

View File

@@ -1 +1,14 @@
from collections import Collection
from line_with_88 import aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
from line_with_89 import (
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
)
if indented:
from line_with_88 import aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
from line_with_89 import aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
from line_with_90 import aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
from line_with_91 import aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
from line_with_92 import aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
from line_with_93 import (
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
)

View File

@@ -7,6 +7,9 @@ extend-exclude = [
]
per-file-ignores = { "__init__.py" = ["F401"] }
[tool.ruff.flake8-bugbear]
extend-immutable-calls = ["fastapi.Depends", "fastapi.Query"]
[tool.ruff.flake8-quotes]
inline-quotes = "single"
multiline-quotes = "double"

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_dev"
version = "0.0.113"
version = "0.0.115"
edition = "2021"
[dependencies]

View File

@@ -1,5 +1,4 @@
use std::collections::BTreeSet;
use fnv::FnvHashSet;
use once_cell::sync::Lazy;
use regex::Regex;
use rustpython_ast::{Excepthandler, ExcepthandlerKind, Expr, ExprKind, Location, StmtKind};
@@ -47,7 +46,7 @@ pub fn match_name_or_attr_from_module(
expr: &Expr,
target: &str,
module: &str,
imports: Option<&BTreeSet<&str>>,
imports: Option<&FnvHashSet<&str>>,
) -> bool {
match &expr.node {
ExprKind::Attribute { value, attr, .. } => match &value.node {

View File

@@ -1,9 +1,10 @@
//! Lint rules based on AST traversal.
use std::collections::{BTreeMap, BTreeSet};
use std::collections::BTreeMap;
use std::ops::Deref;
use std::path::Path;
use fnv::{FnvHashMap, FnvHashSet};
use log::error;
use rustpython_parser::ast::{
Arg, Arguments, Constant, Excepthandler, ExcepthandlerKind, Expr, ExprContext, ExprKind,
@@ -32,25 +33,11 @@ use crate::settings::Settings;
use crate::source_code_locator::SourceCodeLocator;
use crate::visibility::{module_visibility, transition_scope, Modifier, Visibility, VisibleScope};
use crate::{
docstrings, flake8_2020, flake8_annotations, flake8_bugbear, flake8_builtins,
docstrings, flake8_2020, flake8_annotations, flake8_bandit, flake8_bugbear, flake8_builtins,
flake8_comprehensions, flake8_print, pep8_naming, pycodestyle, pydocstyle, pyflakes, pyupgrade,
};
const GLOBAL_SCOPE_INDEX: usize = 0;
const TRACK_FROM_IMPORTS: [&str; 12] = [
"collections",
"collections.abc",
"contextlib",
"functools",
"re",
"six",
"sys",
"typing",
"typing.io",
"typing.re",
"typing_extensions",
"weakref",
];
pub struct Checker<'a> {
// Input data.
@@ -64,9 +51,9 @@ pub struct Checker<'a> {
definitions: Vec<(Definition<'a>, Visibility)>,
// Edit tracking.
// TODO(charlie): Instead of exposing deletions, wrap in a public API.
pub(crate) deletions: BTreeSet<usize>,
pub(crate) deletions: FnvHashSet<usize>,
// Import tracking.
pub(crate) from_imports: BTreeMap<&'a str, BTreeSet<&'a str>>,
pub(crate) from_imports: FnvHashMap<&'a str, FnvHashSet<&'a str>>,
// Retain all scopes and parent nodes, along with a stack of indexes to track which are active
// at various points in time.
pub(crate) parents: Vec<&'a Stmt>,
@@ -346,6 +333,15 @@ where
if self.settings.enabled.contains(&CheckCode::B018) {
flake8_bugbear::plugins::useless_expression(self, body);
}
if self.settings.enabled.contains(&CheckCode::B019) {
flake8_bugbear::plugins::cached_instance_method(self, decorator_list);
}
if self.settings.enabled.contains(&CheckCode::S107) {
self.add_checks(
flake8_bandit::plugins::hardcoded_password_default(args).into_iter(),
);
}
self.check_builtin_shadowing(name, Range::from_located(stmt), true);
@@ -400,14 +396,14 @@ where
StmtKind::Return { .. } => {
if self.settings.enabled.contains(&CheckCode::F706) {
if let Some(index) = self.scope_stack.last().cloned() {
match self.scopes[index].kind {
ScopeKind::Class(_) | ScopeKind::Module => {
self.add_check(Check::new(
CheckKind::ReturnOutsideFunction,
Range::from_located(stmt),
));
}
_ => {}
if matches!(
self.scopes[index].kind,
ScopeKind::Class(_) | ScopeKind::Module
) {
self.add_check(Check::new(
CheckKind::ReturnOutsideFunction,
Range::from_located(stmt),
));
}
}
}
@@ -580,17 +576,15 @@ where
// references like `from typing import Union`.
if level.map(|level| level == 0).unwrap_or(true) {
if let Some(module) = module {
if TRACK_FROM_IMPORTS.contains(&module.as_str()) {
self.from_imports
.entry(module)
.or_insert_with(BTreeSet::new)
.extend(
names
.iter()
.filter(|alias| alias.node.asname.is_none())
.map(|alias| alias.node.name.as_str()),
)
}
self.from_imports
.entry(module)
.or_insert_with(FnvHashSet::default)
.extend(
names
.iter()
.filter(|alias| alias.node.asname.is_none())
.map(|alias| alias.node.name.as_str()),
)
}
}
@@ -603,6 +597,12 @@ where
}
}
if let Some("__future__") = module.as_deref() {
if self.settings.enabled.contains(&CheckCode::U010) {
pyupgrade::plugins::unnecessary_future_import(self, stmt, names);
}
}
for alias in names {
if let Some("__future__") = module.as_deref() {
let name = alias.node.asname.as_ref().unwrap_or(&alias.node.name);
@@ -635,14 +635,6 @@ where
}
}
if self.settings.enabled.contains(&CheckCode::U010) {
pyupgrade::plugins::unnecessary_future_import(
self,
stmt,
&alias.node.name,
);
}
if self.settings.enabled.contains(&CheckCode::F404) && !self.futures_allowed
{
self.add_check(Check::new(
@@ -802,6 +794,9 @@ where
if self.settings.enabled.contains(&CheckCode::B011) {
flake8_bugbear::plugins::assert_false(self, stmt, test, msg);
}
if self.settings.enabled.contains(&CheckCode::S101) {
self.add_check(flake8_bandit::plugins::assert_used(stmt));
}
}
StmtKind::With { items, .. } | StmtKind::AsyncWith { items, .. } => {
if self.settings.enabled.contains(&CheckCode::B017) {
@@ -842,6 +837,13 @@ where
if self.settings.enabled.contains(&CheckCode::B003) {
flake8_bugbear::plugins::assignment_to_os_environ(self, targets);
}
if self.settings.enabled.contains(&CheckCode::S105) {
if let Some(check) =
flake8_bandit::plugins::assign_hardcoded_password_string(value, targets)
{
self.add_check(check);
}
}
}
StmtKind::AnnAssign { value, .. } => {
if self.settings.enabled.contains(&CheckCode::E731) {
@@ -1072,6 +1074,10 @@ where
pyupgrade::plugins::super_call_with_parameters(self, expr, func, args);
}
if self.settings.enabled.contains(&CheckCode::U012) {
pyupgrade::plugins::unnecessary_encode_utf8(self, expr, func, args, keywords);
}
// flake8-print
if self.settings.enabled.contains(&CheckCode::T201)
|| self.settings.enabled.contains(&CheckCode::T203)
@@ -1103,6 +1109,16 @@ where
self, args, keywords,
);
}
if self.settings.enabled.contains(&CheckCode::S102) {
if let Some(check) = flake8_bandit::plugins::exec_used(expr, func) {
self.add_check(check);
}
}
if self.settings.enabled.contains(&CheckCode::S106) {
self.add_checks(
flake8_bandit::plugins::hardcoded_password_func_arg(keywords).into_iter(),
);
}
// flake8-comprehensions
if self.settings.enabled.contains(&CheckCode::C400) {
@@ -1451,6 +1467,16 @@ where
{
flake8_2020::plugins::compare(self, left, ops, comparators);
}
if self.settings.enabled.contains(&CheckCode::S105) {
self.add_checks(
flake8_bandit::plugins::compare_to_hardcoded_password_string(
left,
comparators,
)
.into_iter(),
);
}
}
ExprKind::Constant {
value: Constant::Str(value),
@@ -1460,6 +1486,14 @@ where
self.deferred_string_annotations
.push((Range::from_located(expr), value));
}
if self.settings.enabled.contains(&CheckCode::S104) {
if let Some(check) = flake8_bandit::plugins::hardcoded_bind_all_interfaces(
value,
&Range::from_located(expr),
) {
self.add_check(check);
}
}
}
ExprKind::Lambda { args, .. } => {
// Visit the arguments, but avoid the body, which will be deferred.
@@ -2410,16 +2444,20 @@ impl<'a> Checker<'a> {
.iter()
.map(|index| self.parents[*index])
.collect();
let removal_fn = match kind {
match match kind {
ImportKind::Import => pyflakes::fixes::remove_unused_imports,
ImportKind::ImportFrom => pyflakes::fixes::remove_unused_import_froms,
};
match removal_fn(self.locator, &full_names, child, parent, &deleted) {
Ok(fix) => Some(fix),
}(
self.locator, &full_names, child, parent, &deleted
) {
Ok(fix) => {
if fix.patch.content.is_empty() || fix.patch.content == "pass" {
self.deletions.insert(defined_by);
}
Some(fix)
}
Err(e) => {
error!("Failed to fix unused imports: {}", e);
error!("Failed to remove unused imports: {}", e);
None
}
}

View File

@@ -93,6 +93,7 @@ pub enum CheckCode {
B016,
B017,
B018,
B019,
B025,
B026,
// flake8-comprehensions
@@ -155,6 +156,7 @@ pub enum CheckCode {
U009,
U010,
U011,
U012,
// pydocstyle
D100,
D101,
@@ -218,6 +220,13 @@ pub enum CheckCode {
N818,
// isort
I001,
// flake8-bandit
S101,
S102,
S104,
S105,
S106,
S107,
// Ruff
RUF001,
RUF002,
@@ -234,6 +243,7 @@ pub enum CheckCategory {
Pydocstyle,
Pyupgrade,
PEP8Naming,
Flake8Bandit,
Flake8Comprehensions,
Flake8Bugbear,
Flake8Builtins,
@@ -251,6 +261,7 @@ impl CheckCategory {
CheckCategory::Pycodestyle => "pycodestyle",
CheckCategory::Pyflakes => "Pyflakes",
CheckCategory::Isort => "isort",
CheckCategory::Flake8Bandit => "flake8-bandit",
CheckCategory::Flake8Builtins => "flake8-builtins",
CheckCategory::Flake8Bugbear => "flake8-bugbear",
CheckCategory::Flake8Comprehensions => "flake8-comprehensions",
@@ -289,6 +300,7 @@ impl CheckCategory {
CheckCategory::Pyupgrade => Some("https://pypi.org/project/pyupgrade/3.2.0/"),
CheckCategory::Pydocstyle => Some("https://pypi.org/project/pydocstyle/6.1.1/"),
CheckCategory::PEP8Naming => Some("https://pypi.org/project/pep8-naming/0.13.2/"),
CheckCategory::Flake8Bandit => Some("https://pypi.org/project/flake8-bandit/4.1.1/"),
CheckCategory::Ruff => None,
CheckCategory::Meta => None,
}
@@ -380,6 +392,7 @@ pub enum CheckKind {
CannotRaiseLiteral,
NoAssertRaisesException,
UselessExpression,
CachedInstanceMethod,
DuplicateTryBlockException(String),
StarArgUnpackingAfterKeywordArg,
// flake8-comprehensions
@@ -440,8 +453,9 @@ pub enum CheckKind {
UsePEP604Annotation,
SuperCallWithParameters,
PEP3120UnnecessaryCodingComment,
UnnecessaryFutureImport(String),
UnnecessaryFutureImport(Vec<String>),
UnnecessaryLRUCacheParams,
UnnecessaryEncodeUTF8,
// pydocstyle
BlankLineAfterLastSection(String),
BlankLineAfterSection(String),
@@ -505,6 +519,13 @@ pub enum CheckKind {
ErrorSuffixOnExceptionName(String),
// isort
UnsortedImports,
// flake8-bandit
AssertUsed,
ExecUsed,
HardcodedBindAllInterfaces,
HardcodedPasswordString(String),
HardcodedPasswordFuncArg(String),
HardcodedPasswordDefault(String),
// Ruff
AmbiguousUnicodeCharacterString(char, char),
AmbiguousUnicodeCharacterDocstring(char, char),
@@ -610,6 +631,7 @@ impl CheckCode {
CheckCode::B016 => CheckKind::CannotRaiseLiteral,
CheckCode::B017 => CheckKind::NoAssertRaisesException,
CheckCode::B018 => CheckKind::UselessExpression,
CheckCode::B019 => CheckKind::CachedInstanceMethod,
CheckCode::B025 => CheckKind::DuplicateTryBlockException("Exception".to_string()),
CheckCode::B026 => CheckKind::StarArgUnpackingAfterKeywordArg,
// flake8-comprehensions
@@ -686,8 +708,9 @@ impl CheckCode {
CheckCode::U007 => CheckKind::UsePEP604Annotation,
CheckCode::U008 => CheckKind::SuperCallWithParameters,
CheckCode::U009 => CheckKind::PEP3120UnnecessaryCodingComment,
CheckCode::U010 => CheckKind::UnnecessaryFutureImport("...".to_string()),
CheckCode::U010 => CheckKind::UnnecessaryFutureImport(vec!["...".to_string()]),
CheckCode::U011 => CheckKind::UnnecessaryLRUCacheParams,
CheckCode::U012 => CheckKind::UnnecessaryEncodeUTF8,
// pydocstyle
CheckCode::D100 => CheckKind::PublicModule,
CheckCode::D101 => CheckKind::PublicClass,
@@ -767,6 +790,13 @@ impl CheckCode {
CheckCode::N818 => CheckKind::ErrorSuffixOnExceptionName("...".to_string()),
// isort
CheckCode::I001 => CheckKind::UnsortedImports,
// flake8-bandit
CheckCode::S101 => CheckKind::AssertUsed,
CheckCode::S102 => CheckKind::ExecUsed,
CheckCode::S104 => CheckKind::HardcodedBindAllInterfaces,
CheckCode::S105 => CheckKind::HardcodedPasswordString("...".to_string()),
CheckCode::S106 => CheckKind::HardcodedPasswordFuncArg("...".to_string()),
CheckCode::S107 => CheckKind::HardcodedPasswordDefault("...".to_string()),
// Ruff
CheckCode::RUF001 => CheckKind::AmbiguousUnicodeCharacterString('𝐁', 'B'),
CheckCode::RUF002 => CheckKind::AmbiguousUnicodeCharacterDocstring('𝐁', 'B'),
@@ -841,6 +871,7 @@ impl CheckCode {
CheckCode::B016 => CheckCategory::Flake8Bugbear,
CheckCode::B017 => CheckCategory::Flake8Bugbear,
CheckCode::B018 => CheckCategory::Flake8Bugbear,
CheckCode::B019 => CheckCategory::Flake8Bugbear,
CheckCode::B025 => CheckCategory::Flake8Bugbear,
CheckCode::B026 => CheckCategory::Flake8Bugbear,
CheckCode::C400 => CheckCategory::Flake8Comprehensions,
@@ -897,6 +928,7 @@ impl CheckCode {
CheckCode::U009 => CheckCategory::Pyupgrade,
CheckCode::U010 => CheckCategory::Pyupgrade,
CheckCode::U011 => CheckCategory::Pyupgrade,
CheckCode::U012 => CheckCategory::Pyupgrade,
CheckCode::D100 => CheckCategory::Pydocstyle,
CheckCode::D101 => CheckCategory::Pydocstyle,
CheckCode::D102 => CheckCategory::Pydocstyle,
@@ -957,6 +989,12 @@ impl CheckCode {
CheckCode::N817 => CheckCategory::PEP8Naming,
CheckCode::N818 => CheckCategory::PEP8Naming,
CheckCode::I001 => CheckCategory::Isort,
CheckCode::S101 => CheckCategory::Flake8Bandit,
CheckCode::S102 => CheckCategory::Flake8Bandit,
CheckCode::S104 => CheckCategory::Flake8Bandit,
CheckCode::S105 => CheckCategory::Flake8Bandit,
CheckCode::S106 => CheckCategory::Flake8Bandit,
CheckCode::S107 => CheckCategory::Flake8Bandit,
CheckCode::RUF001 => CheckCategory::Ruff,
CheckCode::RUF002 => CheckCategory::Ruff,
CheckCode::RUF003 => CheckCategory::Ruff,
@@ -1036,6 +1074,7 @@ impl CheckKind {
CheckKind::CannotRaiseLiteral => &CheckCode::B016,
CheckKind::NoAssertRaisesException => &CheckCode::B017,
CheckKind::UselessExpression => &CheckCode::B018,
CheckKind::CachedInstanceMethod => &CheckCode::B019,
CheckKind::DuplicateTryBlockException(_) => &CheckCode::B025,
CheckKind::StarArgUnpackingAfterKeywordArg => &CheckCode::B026,
// flake8-comprehensions
@@ -1098,6 +1137,7 @@ impl CheckKind {
CheckKind::PEP3120UnnecessaryCodingComment => &CheckCode::U009,
CheckKind::UnnecessaryFutureImport(_) => &CheckCode::U010,
CheckKind::UnnecessaryLRUCacheParams => &CheckCode::U011,
CheckKind::UnnecessaryEncodeUTF8 => &CheckCode::U012,
// pydocstyle
CheckKind::BlankLineAfterLastSection(_) => &CheckCode::D413,
CheckKind::BlankLineAfterSection(_) => &CheckCode::D410,
@@ -1161,6 +1201,13 @@ impl CheckKind {
CheckKind::ErrorSuffixOnExceptionName(..) => &CheckCode::N818,
// isort
CheckKind::UnsortedImports => &CheckCode::I001,
// flake8-bandit
CheckKind::AssertUsed => &CheckCode::S101,
CheckKind::ExecUsed => &CheckCode::S102,
CheckKind::HardcodedBindAllInterfaces => &CheckCode::S104,
CheckKind::HardcodedPasswordString(..) => &CheckCode::S105,
CheckKind::HardcodedPasswordFuncArg(..) => &CheckCode::S106,
CheckKind::HardcodedPasswordDefault(..) => &CheckCode::S107,
// Ruff
CheckKind::AmbiguousUnicodeCharacterString(..) => &CheckCode::RUF001,
CheckKind::AmbiguousUnicodeCharacterDocstring(..) => &CheckCode::RUF002,
@@ -1388,6 +1435,9 @@ impl CheckKind {
CheckKind::UselessExpression => {
"Found useless expression. Either assign it to a variable or remove it.".to_string()
}
CheckKind::CachedInstanceMethod => "Use of `functools.lru_cache` or `functools.cache` \
on methods can lead to memory leaks."
.to_string(),
CheckKind::DuplicateTryBlockException(name) => {
format!("try-except block with duplicate exception `{name}`")
}
@@ -1587,12 +1637,19 @@ impl CheckKind {
CheckKind::SuperCallWithParameters => {
"Use `super()` instead of `super(__class__, self)`".to_string()
}
CheckKind::UnnecessaryFutureImport(name) => {
format!("Unnessary __future__ import `{name}` for target Python version")
CheckKind::UnnecessaryFutureImport(names) => {
if names.len() == 1 {
let import = &names[0];
format!("Unnecessary `__future__` import `{import}` for target Python version")
} else {
let imports = names.iter().map(|name| format!("`{name}`")).join(", ");
format!("Unnecessary `__future__` imports {imports} for target Python version")
}
}
CheckKind::UnnecessaryLRUCacheParams => {
"Unnessary parameters to functools.lru_cache".to_string()
"Unnecessary parameters to `functools.lru_cache`".to_string()
}
CheckKind::UnnecessaryEncodeUTF8 => "Unnecessary call to `encode` as UTF-8".to_string(),
// pydocstyle
CheckKind::FitsOnOneLine => "One-line docstring should fit on one line".to_string(),
CheckKind::BlankLineAfterSummary => {
@@ -1758,6 +1815,21 @@ impl CheckKind {
}
// isort
CheckKind::UnsortedImports => "Import block is un-sorted or un-formatted".to_string(),
// flake8-bandit
CheckKind::AssertUsed => "Use of `assert` detected".to_string(),
CheckKind::ExecUsed => "Use of `exec` detected".to_string(),
CheckKind::HardcodedBindAllInterfaces => {
"Possible binding to all interfaces".to_string()
}
CheckKind::HardcodedPasswordString(string) => {
format!("Possible hardcoded password: `'{string}'`")
}
CheckKind::HardcodedPasswordFuncArg(string) => {
format!("Possible hardcoded password: `'{string}'`")
}
CheckKind::HardcodedPasswordDefault(string) => {
format!("Possible hardcoded password: `'{string}'`")
}
// Ruff
CheckKind::AmbiguousUnicodeCharacterString(confusable, representant) => {
format!(
@@ -1859,6 +1931,8 @@ impl CheckKind {
| CheckKind::UnnecessaryAbspath
| CheckKind::UnnecessaryCollectionCall(_)
| CheckKind::UnnecessaryComprehension(_)
| CheckKind::UnnecessaryEncodeUTF8
| CheckKind::UnnecessaryFutureImport(_)
| CheckKind::UnnecessaryGeneratorDict
| CheckKind::UnnecessaryGeneratorList
| CheckKind::UnnecessaryGeneratorSet

View File

@@ -53,6 +53,7 @@ pub enum CheckCodePrefix {
B016,
B017,
B018,
B019,
B02,
B025,
B026,
@@ -244,6 +245,15 @@ pub enum CheckCodePrefix {
RUF001,
RUF002,
RUF003,
S,
S1,
S10,
S101,
S102,
S104,
S105,
S106,
S107,
T,
T2,
T20,
@@ -264,6 +274,7 @@ pub enum CheckCodePrefix {
U01,
U010,
U011,
U012,
W,
W2,
W29,
@@ -368,6 +379,7 @@ impl CheckCodePrefix {
CheckCode::B016,
CheckCode::B017,
CheckCode::B018,
CheckCode::B019,
CheckCode::B025,
CheckCode::B026,
],
@@ -388,6 +400,7 @@ impl CheckCodePrefix {
CheckCode::B016,
CheckCode::B017,
CheckCode::B018,
CheckCode::B019,
CheckCode::B025,
CheckCode::B026,
],
@@ -418,6 +431,7 @@ impl CheckCodePrefix {
CheckCode::B016,
CheckCode::B017,
CheckCode::B018,
CheckCode::B019,
],
CheckCodePrefix::B010 => vec![CheckCode::B010],
CheckCodePrefix::B011 => vec![CheckCode::B011],
@@ -427,6 +441,7 @@ impl CheckCodePrefix {
CheckCodePrefix::B016 => vec![CheckCode::B016],
CheckCodePrefix::B017 => vec![CheckCode::B017],
CheckCodePrefix::B018 => vec![CheckCode::B018],
CheckCodePrefix::B019 => vec![CheckCode::B019],
CheckCodePrefix::B02 => vec![CheckCode::B025, CheckCode::B026],
CheckCodePrefix::B025 => vec![CheckCode::B025],
CheckCodePrefix::B026 => vec![CheckCode::B026],
@@ -982,6 +997,36 @@ impl CheckCodePrefix {
CheckCodePrefix::RUF001 => vec![CheckCode::RUF001],
CheckCodePrefix::RUF002 => vec![CheckCode::RUF002],
CheckCodePrefix::RUF003 => vec![CheckCode::RUF003],
CheckCodePrefix::S => vec![
CheckCode::S101,
CheckCode::S102,
CheckCode::S104,
CheckCode::S105,
CheckCode::S106,
CheckCode::S107,
],
CheckCodePrefix::S1 => vec![
CheckCode::S101,
CheckCode::S102,
CheckCode::S104,
CheckCode::S105,
CheckCode::S106,
CheckCode::S107,
],
CheckCodePrefix::S10 => vec![
CheckCode::S101,
CheckCode::S102,
CheckCode::S104,
CheckCode::S105,
CheckCode::S106,
CheckCode::S107,
],
CheckCodePrefix::S101 => vec![CheckCode::S101],
CheckCodePrefix::S102 => vec![CheckCode::S102],
CheckCodePrefix::S104 => vec![CheckCode::S104],
CheckCodePrefix::S105 => vec![CheckCode::S105],
CheckCodePrefix::S106 => vec![CheckCode::S106],
CheckCodePrefix::S107 => vec![CheckCode::S107],
CheckCodePrefix::T => vec![CheckCode::T201, CheckCode::T203],
CheckCodePrefix::T2 => vec![CheckCode::T201, CheckCode::T203],
CheckCodePrefix::T20 => vec![CheckCode::T201, CheckCode::T203],
@@ -999,6 +1044,7 @@ impl CheckCodePrefix {
CheckCode::U009,
CheckCode::U010,
CheckCode::U011,
CheckCode::U012,
],
CheckCodePrefix::U0 => vec![
CheckCode::U001,
@@ -1012,6 +1058,7 @@ impl CheckCodePrefix {
CheckCode::U009,
CheckCode::U010,
CheckCode::U011,
CheckCode::U012,
],
CheckCodePrefix::U00 => vec![
CheckCode::U001,
@@ -1033,9 +1080,10 @@ impl CheckCodePrefix {
CheckCodePrefix::U007 => vec![CheckCode::U007],
CheckCodePrefix::U008 => vec![CheckCode::U008],
CheckCodePrefix::U009 => vec![CheckCode::U009],
CheckCodePrefix::U01 => vec![CheckCode::U010, CheckCode::U011],
CheckCodePrefix::U01 => vec![CheckCode::U010, CheckCode::U011, CheckCode::U012],
CheckCodePrefix::U010 => vec![CheckCode::U010],
CheckCodePrefix::U011 => vec![CheckCode::U011],
CheckCodePrefix::U012 => vec![CheckCode::U012],
CheckCodePrefix::W => vec![CheckCode::W292, CheckCode::W605],
CheckCodePrefix::W2 => vec![CheckCode::W292],
CheckCodePrefix::W29 => vec![CheckCode::W292],
@@ -1134,6 +1182,7 @@ impl CheckCodePrefix {
CheckCodePrefix::B016 => PrefixSpecificity::Explicit,
CheckCodePrefix::B017 => PrefixSpecificity::Explicit,
CheckCodePrefix::B018 => PrefixSpecificity::Explicit,
CheckCodePrefix::B019 => PrefixSpecificity::Explicit,
CheckCodePrefix::B02 => PrefixSpecificity::Tens,
CheckCodePrefix::B025 => PrefixSpecificity::Explicit,
CheckCodePrefix::B026 => PrefixSpecificity::Explicit,
@@ -1289,6 +1338,15 @@ impl CheckCodePrefix {
CheckCodePrefix::I0 => PrefixSpecificity::Hundreds,
CheckCodePrefix::I00 => PrefixSpecificity::Tens,
CheckCodePrefix::I001 => PrefixSpecificity::Explicit,
CheckCodePrefix::S => PrefixSpecificity::Category,
CheckCodePrefix::S1 => PrefixSpecificity::Hundreds,
CheckCodePrefix::S10 => PrefixSpecificity::Tens,
CheckCodePrefix::S101 => PrefixSpecificity::Explicit,
CheckCodePrefix::S102 => PrefixSpecificity::Explicit,
CheckCodePrefix::S104 => PrefixSpecificity::Explicit,
CheckCodePrefix::S105 => PrefixSpecificity::Explicit,
CheckCodePrefix::S106 => PrefixSpecificity::Explicit,
CheckCodePrefix::S107 => PrefixSpecificity::Explicit,
CheckCodePrefix::M => PrefixSpecificity::Category,
CheckCodePrefix::M0 => PrefixSpecificity::Hundreds,
CheckCodePrefix::M00 => PrefixSpecificity::Tens,
@@ -1345,6 +1403,7 @@ impl CheckCodePrefix {
CheckCodePrefix::U01 => PrefixSpecificity::Tens,
CheckCodePrefix::U010 => PrefixSpecificity::Explicit,
CheckCodePrefix::U011 => PrefixSpecificity::Explicit,
CheckCodePrefix::U012 => PrefixSpecificity::Explicit,
CheckCodePrefix::W => PrefixSpecificity::Category,
CheckCodePrefix::W2 => PrefixSpecificity::Hundreds,
CheckCodePrefix::W29 => PrefixSpecificity::Tens,

View File

@@ -0,0 +1,22 @@
use rustpython_ast::{Constant, Expr, ExprKind};
const PASSWORD_NAMES: [&str; 7] = [
"password", "pass", "passwd", "pwd", "secret", "token", "secrete",
];
pub fn string_literal(expr: &Expr) -> Option<&str> {
match &expr.node {
ExprKind::Constant {
value: Constant::Str(string),
..
} => Some(string),
_ => None,
}
}
// Maybe use regex for this?
pub fn matches_password_name(string: &str) -> bool {
PASSWORD_NAMES
.iter()
.any(|name| string.to_lowercase().contains(name))
}

2
src/flake8_bandit/mod.rs Normal file
View File

@@ -0,0 +1,2 @@
mod helpers;
pub mod plugins;

View File

@@ -0,0 +1,9 @@
use rustpython_ast::{Located, StmtKind};
use crate::ast::types::Range;
use crate::checks::{Check, CheckKind};
/// S101
pub fn assert_used(stmt: &Located<StmtKind>) -> Check {
Check::new(CheckKind::AssertUsed, Range::from_located(stmt))
}

View File

@@ -0,0 +1,14 @@
use rustpython_ast::{Expr, ExprKind};
use crate::ast::types::Range;
use crate::checks::{Check, CheckKind};
/// S102
pub fn exec_used(expr: &Expr, func: &Expr) -> Option<Check> {
if let ExprKind::Name { id, .. } = &func.node {
if id == "exec" {
return Some(Check::new(CheckKind::ExecUsed, Range::from_located(expr)));
}
}
None
}

View File

@@ -0,0 +1,11 @@
use crate::ast::types::Range;
use crate::checks::{Check, CheckKind};
/// S104
pub fn hardcoded_bind_all_interfaces(value: &str, range: &Range) -> Option<Check> {
if value == "0.0.0.0" {
Some(Check::new(CheckKind::HardcodedBindAllInterfaces, *range))
} else {
None
}
}

View File

@@ -0,0 +1,51 @@
use rustpython_ast::{ArgData, Arguments, Expr, Located};
use crate::ast::types::Range;
use crate::checks::{Check, CheckKind};
use crate::flake8_bandit::helpers::{matches_password_name, string_literal};
fn check_password_kwarg(arg: &Located<ArgData>, default: &Expr) -> Option<Check> {
if let Some(string) = string_literal(default) {
let kwarg_name = &arg.node.arg;
if matches_password_name(kwarg_name) {
return Some(Check::new(
CheckKind::HardcodedPasswordDefault(string.to_string()),
Range::from_located(default),
));
}
}
None
}
/// S107
pub fn hardcoded_password_default(arguments: &Arguments) -> Vec<Check> {
let mut checks: Vec<Check> = Vec::new();
let defaults_start =
arguments.posonlyargs.len() + arguments.args.len() - arguments.defaults.len();
for (i, arg) in arguments
.posonlyargs
.iter()
.chain(&arguments.args)
.enumerate()
{
if let Some(i) = i.checked_sub(defaults_start) {
let default = &arguments.defaults[i];
if let Some(check) = check_password_kwarg(arg, default) {
checks.push(check);
}
}
}
let defaults_start = arguments.kwonlyargs.len() - arguments.kw_defaults.len();
for (i, kwarg) in arguments.kwonlyargs.iter().enumerate() {
if let Some(i) = i.checked_sub(defaults_start) {
let default = &arguments.kw_defaults[i];
if let Some(check) = check_password_kwarg(kwarg, default) {
checks.push(check);
}
}
}
checks
}

View File

@@ -0,0 +1,25 @@
use rustpython_ast::Keyword;
use crate::ast::types::Range;
use crate::checks::{Check, CheckKind};
use crate::flake8_bandit::helpers::{matches_password_name, string_literal};
/// S106
pub fn hardcoded_password_func_arg(keywords: &[Keyword]) -> Vec<Check> {
keywords
.iter()
.filter_map(|keyword| {
if let Some(string) = string_literal(&keyword.node.value) {
if let Some(arg) = &keyword.node.arg {
if matches_password_name(arg) {
return Some(Check::new(
CheckKind::HardcodedPasswordFuncArg(string.to_string()),
Range::from_located(keyword),
));
}
}
}
None
})
.collect()
}

View File

@@ -0,0 +1,58 @@
use rustpython_ast::{Constant, Expr, ExprKind};
use crate::ast::types::Range;
use crate::checks::{Check, CheckKind};
use crate::flake8_bandit::helpers::{matches_password_name, string_literal};
fn is_password_target(target: &Expr) -> bool {
let target_name = match &target.node {
// variable = "s3cr3t"
ExprKind::Name { id, .. } => id,
// d["password"] = "s3cr3t"
ExprKind::Subscript { slice, .. } => match &slice.node {
ExprKind::Constant {
value: Constant::Str(string),
..
} => string,
_ => return false,
},
// obj.password = "s3cr3t"
ExprKind::Attribute { attr, .. } => attr,
_ => return false,
};
matches_password_name(target_name)
}
/// S105
pub fn compare_to_hardcoded_password_string(left: &Expr, comparators: &[Expr]) -> Vec<Check> {
comparators
.iter()
.filter_map(|comp| {
if let Some(string) = string_literal(comp) {
if is_password_target(left) {
return Some(Check::new(
CheckKind::HardcodedPasswordString(string.to_string()),
Range::from_located(comp),
));
}
}
None
})
.collect()
}
/// S105
pub fn assign_hardcoded_password_string(value: &Expr, targets: &Vec<Expr>) -> Option<Check> {
if let Some(string) = string_literal(value) {
for target in targets {
if is_password_target(target) {
return Some(Check::new(
CheckKind::HardcodedPasswordString(string.to_string()),
Range::from_located(value),
));
}
}
}
None
}

View File

@@ -0,0 +1,15 @@
pub use assert_used::assert_used;
pub use exec_used::exec_used;
pub use hardcoded_bind_all_interfaces::hardcoded_bind_all_interfaces;
pub use hardcoded_password_default::hardcoded_password_default;
pub use hardcoded_password_func_arg::hardcoded_password_func_arg;
pub use hardcoded_password_string::{
assign_hardcoded_password_string, compare_to_hardcoded_password_string,
};
mod assert_used;
mod exec_used;
mod hardcoded_bind_all_interfaces;
mod hardcoded_password_default;
mod hardcoded_password_func_arg;
mod hardcoded_password_string;

View File

@@ -1,2 +1,36 @@
mod constants;
pub mod plugins;
pub mod settings;
#[cfg(test)]
mod tests {
use std::path::Path;
use anyhow::Result;
use crate::autofix::fixer;
use crate::checks::CheckCode;
use crate::linter::test_path;
use crate::{flake8_bugbear, Settings};
#[test]
fn extend_immutable_calls() -> Result<()> {
let snapshot = "extend_immutable_calls".to_string();
let mut checks = test_path(
Path::new("./resources/test/fixtures/B008_extended.py"),
&Settings {
flake8_bugbear: flake8_bugbear::settings::Settings {
extend_immutable_calls: vec![
"fastapi.Depends".to_string(),
"fastapi.Query".to_string(),
],
},
..Settings::for_rules(vec![CheckCode::B008])
},
&fixer::Mode::Generate,
)?;
checks.sort_by_key(|check| check.location);
insta::assert_yaml_snapshot!(snapshot, checks);
Ok(())
}
}

View File

@@ -0,0 +1,49 @@
use rustpython_ast::{Expr, ExprKind};
use crate::ast::helpers::{compose_call_path, match_name_or_attr_from_module};
use crate::ast::types::{Range, ScopeKind};
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
fn is_cache_func(checker: &Checker, expr: &Expr) -> bool {
match_name_or_attr_from_module(
expr,
"lru_cache",
"functools",
checker.from_imports.get("functools"),
) || match_name_or_attr_from_module(
expr,
"cache",
"functools",
checker.from_imports.get("functools"),
)
}
/// B019
pub fn cached_instance_method(checker: &mut Checker, decorator_list: &[Expr]) {
if matches!(checker.current_scope().kind, ScopeKind::Class(_)) {
for decorator in decorator_list {
// TODO(charlie): This should take into account `classmethod-decorators` and
// `staticmethod-decorators`.
if let Some(decorator_path) = compose_call_path(decorator) {
if decorator_path == "classmethod" || decorator_path == "staticmethod" {
return;
}
}
}
for decorator in decorator_list {
if is_cache_func(
checker,
match &decorator.node {
ExprKind::Call { func, .. } => func,
_ => decorator,
},
) {
checker.add_check(Check::new(
CheckKind::CachedInstanceMethod,
Range::from_located(decorator),
));
}
}
}
}

View File

@@ -1,3 +1,4 @@
use fnv::{FnvHashMap, FnvHashSet};
use rustpython_ast::{Arguments, Constant, Expr, ExprKind};
use crate::ast::helpers::compose_call_path;
@@ -8,38 +9,69 @@ use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
use crate::flake8_bugbear::plugins::mutable_argument_default::is_mutable_func;
// TODO(charlie): Verify imports for each of the imported members.
const IMMUTABLE_FUNCS: [&str; 11] = [
const IMMUTABLE_FUNCS: [&str; 7] = [
"tuple",
"frozenset",
"operator.attrgetter",
"operator.itemgetter",
"operator.methodcaller",
"attrgetter",
"itemgetter",
"methodcaller",
"types.MappingProxyType",
"MappingProxyType",
"re.compile",
];
fn is_immutable_func(expr: &Expr) -> bool {
compose_call_path(expr).map_or_else(|| false, |func| IMMUTABLE_FUNCS.contains(&func.as_str()))
fn is_immutable_func(
expr: &Expr,
extend_immutable_calls: &[&str],
from_imports: &FnvHashMap<&str, FnvHashSet<&str>>,
) -> bool {
compose_call_path(expr).map_or_else(
|| false,
|call_path| {
// It matches the call path exactly (`operator.methodcaller`).
for target in IMMUTABLE_FUNCS.iter().chain(extend_immutable_calls) {
if &call_path == target {
return true;
}
}
// It matches the member name, and was imported from that module (`methodcaller`
// following `from operator import methodcaller`).
if !call_path.contains('.') {
for target in IMMUTABLE_FUNCS.iter().chain(extend_immutable_calls) {
let mut splitter = target.rsplit('.');
if let (Some(member), Some(module)) = (splitter.next(), splitter.next()) {
if call_path == member
&& from_imports
.get(module)
.map(|module| module.contains(member))
.unwrap_or(false)
{
return true;
}
}
}
}
false
},
)
}
struct ArgumentDefaultVisitor {
struct ArgumentDefaultVisitor<'a> {
checks: Vec<(CheckKind, Range)>,
extend_immutable_calls: &'a [&'a str],
from_imports: &'a FnvHashMap<&'a str, FnvHashSet<&'a str>>,
}
impl<'a, 'b> Visitor<'b> for ArgumentDefaultVisitor
impl<'a, 'b> Visitor<'b> for ArgumentDefaultVisitor<'b>
where
'b: 'a,
{
fn visit_expr(&mut self, expr: &'a Expr) {
fn visit_expr(&mut self, expr: &'b Expr) {
match &expr.node {
ExprKind::Call { func, args, .. } => {
if !is_mutable_func(func)
&& !is_immutable_func(func)
if !is_mutable_func(func, self.from_imports)
&& !is_immutable_func(func, self.extend_immutable_calls, self.from_imports)
&& !is_nan_or_infinity(func, args)
{
self.checks.push((
@@ -83,7 +115,18 @@ fn is_nan_or_infinity(expr: &Expr, args: &[Expr]) -> bool {
/// B008
pub fn function_call_argument_default(checker: &mut Checker, arguments: &Arguments) {
let mut visitor = ArgumentDefaultVisitor { checks: vec![] };
let extend_immutable_cells: Vec<&str> = checker
.settings
.flake8_bugbear
.extend_immutable_calls
.iter()
.map(|s| s.as_str())
.collect();
let mut visitor = ArgumentDefaultVisitor {
checks: vec![],
extend_immutable_calls: &extend_immutable_cells,
from_imports: &checker.from_imports,
};
for expr in arguments
.defaults
.iter()

View File

@@ -1,6 +1,7 @@
pub use assert_false::assert_false;
pub use assert_raises_exception::assert_raises_exception;
pub use assignment_to_os_environ::assignment_to_os_environ;
pub use cached_instance_method::cached_instance_method;
pub use cannot_raise_literal::cannot_raise_literal;
pub use duplicate_exceptions::{duplicate_exceptions, duplicate_handler_exceptions};
pub use function_call_argument_default::function_call_argument_default;
@@ -19,6 +20,7 @@ pub use useless_expression::useless_expression;
mod assert_false;
mod assert_raises_exception;
mod assignment_to_os_environ;
mod cached_instance_method;
mod cannot_raise_literal;
mod duplicate_exceptions;
mod function_call_argument_default;

View File

@@ -1,33 +1,53 @@
use fnv::{FnvHashMap, FnvHashSet};
use rustpython_ast::{Arguments, Expr, ExprKind};
use crate::ast::helpers::compose_call_path;
use crate::ast::types::Range;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
// TODO(charlie): Verify imports for each of the imported members.
pub fn is_mutable_func(expr: &Expr) -> bool {
match &expr.node {
ExprKind::Name { id, .. }
if id == "dict"
|| id == "list"
|| id == "set"
|| id == "Counter"
|| id == "OrderedDict"
|| id == "defaultdict"
|| id == "deque" =>
{
true
}
ExprKind::Attribute { value, attr, .. }
if (attr == "Counter"
|| attr == "OrderedDict"
|| attr == "defaultdict"
|| attr == "deque") =>
{
matches!(&value.node, ExprKind::Name { id, .. } if id == "collections")
}
_ => false,
}
const MUTABLE_FUNCS: [&str; 7] = [
"dict",
"list",
"set",
"collections.Counter",
"collections.OrderedDict",
"collections.defaultdict",
"collections.deque",
];
pub fn is_mutable_func(expr: &Expr, from_imports: &FnvHashMap<&str, FnvHashSet<&str>>) -> bool {
compose_call_path(expr).map_or_else(
|| false,
|call_path| {
// It matches the call path exactly (`collections.Counter`).
for target in MUTABLE_FUNCS {
if call_path == target {
return true;
}
}
// It matches the member name, and was imported from that module (`Counter`
// following `from collections import Counter`).
if !call_path.contains('.') {
for target in MUTABLE_FUNCS {
let mut splitter = target.rsplit('.');
if let (Some(member), Some(module)) = (splitter.next(), splitter.next()) {
if call_path == member
&& from_imports
.get(module)
.map(|module| module.contains(member))
.unwrap_or(false)
{
return true;
}
}
}
}
false
},
)
}
/// B006
@@ -50,7 +70,7 @@ pub fn mutable_argument_default(checker: &mut Checker, arguments: &Arguments) {
));
}
ExprKind::Call { func, .. } => {
if is_mutable_func(func) {
if is_mutable_func(func, &checker.from_imports) {
checker.add_check(Check::new(
CheckKind::MutableArgumentDefault,
Range::from_located(expr),

View File

@@ -1,5 +1,4 @@
use std::collections::BTreeMap;
use fnv::FnvHashMap;
use rustpython_ast::{Expr, ExprKind, Stmt};
use crate::ast::types::Range;
@@ -12,7 +11,7 @@ use crate::checks::{Check, CheckKind};
/// Identify all `ExprKind::Name` nodes in an AST.
struct NameFinder<'a> {
/// A map from identifier to defining expression.
names: BTreeMap<&'a str, &'a Expr>,
names: FnvHashMap<&'a str, &'a Expr>,
}
impl NameFinder<'_> {

View File

@@ -0,0 +1,22 @@
//! Settings for the `pep8-naming` plugin.
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
pub struct Options {
pub extend_immutable_calls: Option<Vec<String>>,
}
#[derive(Debug, Hash, Default)]
pub struct Settings {
pub extend_immutable_calls: Vec<String>,
}
impl Settings {
pub fn from_options(options: Options) -> Self {
Self {
extend_immutable_calls: options.extend_immutable_calls.unwrap_or_default(),
}
}
}

View File

@@ -0,0 +1,13 @@
---
source: src/flake8_bugbear/mod.rs
expression: checks
---
- kind: FunctionCallArgumentDefault
location:
row: 19
column: 50
end_location:
row: 19
column: 63
fix: ~

View File

@@ -7,6 +7,7 @@ use crate::check_ast::Checker;
use crate::checks::CheckCode;
use crate::flake8_print::checks;
/// T201, T203
pub fn print_call(checker: &mut Checker, expr: &Expr, func: &Expr) {
if let Some(mut check) = checks::print_call(
expr,
@@ -26,7 +27,6 @@ pub fn print_call(checker: &mut Checker, expr: &Expr, func: &Expr) {
.iter()
.map(|index| checker.parents[*index])
.collect();
match helpers::remove_stmt(
checker.parents[context.defined_by],
context.defined_in.map(|index| checker.parents[index]),
@@ -38,7 +38,7 @@ pub fn print_call(checker: &mut Checker, expr: &Expr, func: &Expr) {
}
check.amend(fix)
}
Err(e) => error!("Failed to fix unused imports: {}", e),
Err(e) => error!("Failed to remove print call: {}", e),
}
}
}

View File

@@ -45,9 +45,6 @@ pub fn categorize(
static STATIC_CLASSIFICATIONS: Lazy<BTreeMap<&'static str, ImportType>> = Lazy::new(|| {
BTreeMap::from([
("__future__", ImportType::Future),
("__main__", ImportType::FirstParty),
// Force `disutils` to be considered third-party.
("disutils", ImportType::ThirdParty),
// Relative imports (e.g., `from . import module`).
("", ImportType::FirstParty),
])

View File

@@ -64,7 +64,7 @@ pub fn check_imports(
// Generate the sorted import block.
let expected = format_imports(
body,
&settings.line_length,
&(settings.line_length - indentation.len()),
&settings.src,
&settings.isort.known_first_party,
&settings.isort.known_third_party,

View File

@@ -2,5 +2,21 @@
source: src/isort/mod.rs
expression: checks
---
[]
- kind: UnsortedImports
location:
row: 7
column: 0
end_location:
row: 15
column: 0
fix:
patch:
content: " from line_with_88 import aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\n from line_with_89 import (\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,\n )\n from line_with_90 import (\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,\n )\n from line_with_91 import (\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,\n )\n from line_with_92 import (\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,\n )\n from line_with_93 import (\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,\n )\n"
location:
row: 7
column: 0
end_location:
row: 15
column: 0
applied: false

View File

@@ -29,7 +29,8 @@ mod directives;
mod docstrings;
mod flake8_2020;
pub mod flake8_annotations;
mod flake8_bugbear;
pub mod flake8_bandit;
pub mod flake8_bugbear;
mod flake8_builtins;
mod flake8_comprehensions;
mod flake8_print;

View File

@@ -342,6 +342,7 @@ mod tests {
#[test_case(CheckCode::B016, Path::new("B016.py"); "B016")]
#[test_case(CheckCode::B017, Path::new("B017.py"); "B017")]
#[test_case(CheckCode::B018, Path::new("B018.py"); "B018")]
#[test_case(CheckCode::B019, Path::new("B019.py"); "B019")]
#[test_case(CheckCode::B025, Path::new("B025.py"); "B025")]
#[test_case(CheckCode::B026, Path::new("B026.py"); "B026")]
#[test_case(CheckCode::C400, Path::new("C400.py"); "C400")]
@@ -468,6 +469,12 @@ mod tests {
#[test_case(CheckCode::N816, Path::new("N816.py"); "N816")]
#[test_case(CheckCode::N817, Path::new("N817.py"); "N817")]
#[test_case(CheckCode::N818, Path::new("N818.py"); "N818")]
#[test_case(CheckCode::S101, Path::new("S101.py"); "S101")]
#[test_case(CheckCode::S102, Path::new("S102.py"); "S102")]
#[test_case(CheckCode::S104, Path::new("S104.py"); "S104")]
#[test_case(CheckCode::S105, Path::new("S105.py"); "S105")]
#[test_case(CheckCode::S106, Path::new("S106.py"); "S106")]
#[test_case(CheckCode::S107, Path::new("S107.py"); "S107")]
#[test_case(CheckCode::T201, Path::new("T201.py"); "T201")]
#[test_case(CheckCode::T203, Path::new("T203.py"); "T203")]
#[test_case(CheckCode::U001, Path::new("U001.py"); "U001")]
@@ -485,6 +492,7 @@ mod tests {
#[test_case(CheckCode::U010, Path::new("U010.py"); "U010")]
#[test_case(CheckCode::U011, Path::new("U011_0.py"); "U011_0")]
#[test_case(CheckCode::U011, Path::new("U011_1.py"); "U011_1")]
#[test_case(CheckCode::U012, Path::new("U012.py"); "U012")]
#[test_case(CheckCode::W292, Path::new("W292_0.py"); "W292_0")]
#[test_case(CheckCode::W292, Path::new("W292_1.py"); "W292_1")]
#[test_case(CheckCode::W292, Path::new("W292_2.py"); "W292_2")]

View File

@@ -1,11 +1,10 @@
use std::collections::{BTreeMap, BTreeSet};
use fnv::{FnvHashMap, FnvHashSet};
use once_cell::sync::Lazy;
use rustpython_ast::{Expr, ExprKind};
// See: https://pypi.org/project/typing-extensions/
static TYPING_EXTENSIONS: Lazy<BTreeSet<&'static str>> = Lazy::new(|| {
BTreeSet::from([
static TYPING_EXTENSIONS: Lazy<FnvHashSet<&'static str>> = Lazy::new(|| {
FnvHashSet::from_iter([
"Annotated",
"Any",
"AsyncContextManager",
@@ -64,9 +63,9 @@ pub fn in_extensions(name: &str) -> bool {
}
// See: https://docs.python.org/3/library/typing.html
static IMPORTED_SUBSCRIPTS: Lazy<BTreeMap<&'static str, BTreeSet<&'static str>>> =
static IMPORTED_SUBSCRIPTS: Lazy<FnvHashMap<&'static str, FnvHashSet<&'static str>>> =
Lazy::new(|| {
let mut import_map = BTreeMap::new();
let mut import_map = FnvHashMap::default();
for (name, module) in [
// `collections`
("ChainMap", "collections"),
@@ -183,7 +182,7 @@ static IMPORTED_SUBSCRIPTS: Lazy<BTreeMap<&'static str, BTreeSet<&'static str>>>
] {
import_map
.entry(name)
.or_insert_with(BTreeSet::new)
.or_insert_with(FnvHashSet::default)
.insert(module);
}
import_map
@@ -191,13 +190,13 @@ static IMPORTED_SUBSCRIPTS: Lazy<BTreeMap<&'static str, BTreeSet<&'static str>>>
// These are all assumed to come from the `typing` module.
// See: https://peps.python.org/pep-0585/
static PEP_585_BUILTINS_ELIGIBLE: Lazy<BTreeSet<&'static str>> =
Lazy::new(|| BTreeSet::from(["Dict", "FrozenSet", "List", "Set", "Tuple", "Type"]));
static PEP_585_BUILTINS_ELIGIBLE: Lazy<FnvHashSet<&'static str>> =
Lazy::new(|| FnvHashSet::from_iter(["Dict", "FrozenSet", "List", "Set", "Tuple", "Type"]));
// These are all assumed to come from the `typing` module.
// See: https://peps.python.org/pep-0585/
static PEP_585_BUILTINS: Lazy<BTreeSet<&'static str>> =
Lazy::new(|| BTreeSet::from(["dict", "frozenset", "list", "set", "tuple", "type"]));
static PEP_585_BUILTINS: Lazy<FnvHashSet<&'static str>> =
Lazy::new(|| FnvHashSet::from_iter(["dict", "frozenset", "list", "set", "tuple", "type"]));
fn is_pep593_annotated_subscript(name: &str) -> bool {
name == "Annotated"
@@ -210,7 +209,7 @@ pub enum SubscriptKind {
pub fn match_annotated_subscript(
expr: &Expr,
imports: &BTreeMap<&str, BTreeSet<&str>>,
imports: &FnvHashMap<&str, FnvHashSet<&str>>,
) -> Option<SubscriptKind> {
match &expr.node {
ExprKind::Attribute { attr, value, .. } => {
@@ -261,7 +260,7 @@ pub fn match_annotated_subscript(
/// Returns `true` if `Expr` represents a reference to a typing object with a
/// PEP 585 built-in. Note that none of the PEP 585 built-ins are in
/// `typing_extensions`.
pub fn is_pep585_builtin(expr: &Expr, typing_imports: Option<&BTreeSet<&str>>) -> bool {
pub fn is_pep585_builtin(expr: &Expr, typing_imports: Option<&FnvHashSet<&str>>) -> bool {
match &expr.node {
ExprKind::Attribute { attr, value, .. } => {
if let ExprKind::Name { id, .. } = &value.node {

View File

@@ -1,5 +1,4 @@
use std::collections::BTreeSet;
use fnv::FnvHashSet;
use rustpython_ast::{Constant, KeywordData};
use rustpython_parser::ast::{ArgData, Expr, ExprKind, Stmt, StmtKind};
@@ -9,29 +8,6 @@ use crate::checks::{Check, CheckKind};
use crate::pyupgrade::types::Primitive;
use crate::settings::types::PythonVersion;
pub const PY33_PLUS_REMOVE_FUTURES: &[&str] = &[
"nested_scopes",
"generators",
"with_statement",
"division",
"absolute_import",
"with_statement",
"print_function",
"unicode_literals",
];
pub const PY37_PLUS_REMOVE_FUTURES: &[&str] = &[
"nested_scopes",
"generators",
"with_statement",
"division",
"absolute_import",
"with_statement",
"print_function",
"unicode_literals",
"generator_stop",
];
/// U008
pub fn super_args(
scope: &Scope,
@@ -183,28 +159,11 @@ pub fn type_of_primitive(func: &Expr, args: &[Expr], location: Range) -> Option<
None
}
/// U010
pub fn unnecessary_future_import(
version: PythonVersion,
name: &str,
location: Range,
) -> Option<Check> {
if (version >= PythonVersion::Py33 && PY33_PLUS_REMOVE_FUTURES.contains(&name))
|| (version >= PythonVersion::Py37 && PY37_PLUS_REMOVE_FUTURES.contains(&name))
{
return Some(Check::new(
CheckKind::UnnecessaryFutureImport(name.to_string()),
location,
));
}
None
}
/// U011
pub fn unnecessary_lru_cache_params(
decorator_list: &[Expr],
target_version: PythonVersion,
imports: Option<&BTreeSet<&str>>,
imports: Option<&FnvHashSet<&str>>,
) -> Option<Check> {
for expr in decorator_list.iter() {
if let ExprKind::Call {

View File

@@ -1,11 +1,13 @@
use libcst_native::{Codegen, Expression, SmallStatement, Statement};
use rustpython_ast::{Expr, Keyword, Location};
use anyhow::Result;
use libcst_native::{Codegen, Expression, ImportNames, SmallStatement, Statement};
use rustpython_ast::{Expr, Keyword, Location, Stmt};
use rustpython_parser::lexer;
use rustpython_parser::lexer::Tok;
use crate::ast::helpers;
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::autofix::{self, Fix};
use crate::cst::matchers::match_module;
use crate::source_code_locator::SourceCodeLocator;
/// Generate a fix to remove a base from a ClassDef statement.
@@ -41,7 +43,7 @@ pub fn remove_class_def_base(
}
return match (fix_start, fix_end) {
(Some(start), Some(end)) => Some(Fix::replacement("".to_string(), start, end)),
(Some(start), Some(end)) => Some(Fix::deletion(start, end)),
_ => None,
};
}
@@ -133,6 +135,63 @@ pub fn remove_super_arguments(locator: &SourceCodeLocator, expr: &Expr) -> Optio
None
}
/// U010
pub fn remove_unnecessary_future_import(
locator: &SourceCodeLocator,
removable: &[usize],
stmt: &Stmt,
parent: Option<&Stmt>,
deleted: &[&Stmt],
) -> Result<Fix> {
// TODO(charlie): DRY up with pyflakes::fixes::remove_unused_import_froms.
let module_text = locator.slice_source_code_range(&Range::from_located(stmt));
let mut tree = match_module(&module_text)?;
let body = if let Some(Statement::Simple(body)) = tree.body.first_mut() {
body
} else {
return Err(anyhow::anyhow!("Expected node to be: Statement::Simple"));
};
let body = if let Some(SmallStatement::ImportFrom(body)) = body.body.first_mut() {
body
} else {
return Err(anyhow::anyhow!(
"Expected node to be: SmallStatement::ImportFrom"
));
};
let aliases = if let ImportNames::Aliases(aliases) = &mut body.names {
aliases
} else {
return Err(anyhow::anyhow!("Expected node to be: Aliases"));
};
// Preserve the trailing comma (or not) from the last entry.
let trailing_comma = aliases.last().and_then(|alias| alias.comma.clone());
// TODO(charlie): This is quadratic.
for index in removable.iter().rev() {
aliases.remove(*index);
}
if let Some(alias) = aliases.last_mut() {
alias.comma = trailing_comma;
}
if aliases.is_empty() {
autofix::helpers::remove_stmt(stmt, parent, deleted)
} else {
let mut state = Default::default();
tree.codegen(&mut state);
Ok(Fix::replacement(
state.to_string(),
stmt.location,
stmt.end_location.unwrap(),
))
}
}
/// U011
pub fn remove_unnecessary_lru_cache_params(
locator: &SourceCodeLocator,

View File

@@ -1,5 +1,4 @@
use std::collections::BTreeMap;
use fnv::FnvHashMap;
use once_cell::sync::Lazy;
use rustpython_ast::{Expr, ExprKind};
@@ -8,8 +7,8 @@ use crate::autofix::Fix;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
static DEPRECATED_ALIASES: Lazy<BTreeMap<&'static str, &'static str>> = Lazy::new(|| {
BTreeMap::from([
static DEPRECATED_ALIASES: Lazy<FnvHashMap<&'static str, &'static str>> = Lazy::new(|| {
FnvHashMap::from_iter([
("failUnlessEqual", "assertEqual"),
("assertEquals", "assertEqual"),
("failIfEqual", "assertNotEqual"),
@@ -28,6 +27,7 @@ static DEPRECATED_ALIASES: Lazy<BTreeMap<&'static str, &'static str>> = Lazy::ne
])
});
/// U005
pub fn deprecated_unittest_alias(checker: &mut Checker, expr: &Expr) {
if let ExprKind::Attribute { value, attr, .. } = &expr.node {
if let Some(target) = DEPRECATED_ALIASES.get(attr.as_str()) {

View File

@@ -2,6 +2,7 @@ pub use deprecated_unittest_alias::deprecated_unittest_alias;
pub use super_call_with_parameters::super_call_with_parameters;
pub use type_of_primitive::type_of_primitive;
pub use unnecessary_abspath::unnecessary_abspath;
pub use unnecessary_encode_utf8::unnecessary_encode_utf8;
pub use unnecessary_future_import::unnecessary_future_import;
pub use unnecessary_lru_cache_params::unnecessary_lru_cache_params;
pub use use_pep585_annotation::use_pep585_annotation;
@@ -13,6 +14,7 @@ mod deprecated_unittest_alias;
mod super_call_with_parameters;
mod type_of_primitive;
mod unnecessary_abspath;
mod unnecessary_encode_utf8;
mod unnecessary_future_import;
mod unnecessary_lru_cache_params;
mod use_pep585_annotation;

View File

@@ -5,6 +5,7 @@ use crate::check_ast::Checker;
use crate::pyupgrade;
use crate::pyupgrade::checks;
/// U008
pub fn super_call_with_parameters(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
// Only bother going through the super check at all if we're in a `super` call.
// (We check this in `check_super_args` too, so this is just an optimization.)

View File

@@ -6,6 +6,7 @@ use crate::check_ast::Checker;
use crate::checks::CheckKind;
use crate::pyupgrade::checks;
/// U003
pub fn type_of_primitive(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
if let Some(mut check) = checks::type_of_primitive(func, args, Range::from_located(expr)) {
if checker.patch() {

View File

@@ -5,6 +5,7 @@ use crate::autofix::Fix;
use crate::check_ast::Checker;
use crate::pyupgrade::checks;
/// U002
pub fn unnecessary_abspath(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
if let Some(mut check) = checks::unnecessary_abspath(func, args, Range::from_located(expr)) {
if checker.patch() {

View File

@@ -0,0 +1,152 @@
use rustpython_ast::{Constant, Expr, ExprKind, Keyword};
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
use crate::source_code_locator::SourceCodeLocator;
const UTF8_LITERALS: &[&str] = &["utf-8", "utf8", "utf_8", "u8", "utf", "cp65001"];
fn match_encoded_variable(func: &Expr) -> Option<&Expr> {
if let ExprKind::Attribute {
value: variable,
attr,
..
} = &func.node
{
if attr == "encode" {
return Some(variable);
}
}
None
}
fn is_utf8_encoding_arg(arg: &Expr) -> bool {
if let ExprKind::Constant {
value: Constant::Str(value),
..
} = &arg.node
{
UTF8_LITERALS.contains(&value.to_lowercase().as_str())
} else {
false
}
}
fn is_default_encode(args: &Vec<Expr>, kwargs: &Vec<Keyword>) -> bool {
match (args.len(), kwargs.len()) {
// .encode()
(0, 0) => true,
// .encode(encoding)
(1, 0) => is_utf8_encoding_arg(&args[0]),
// .encode(kwarg=kwarg)
(0, 1) => {
kwargs[0].node.arg == Some("encoding".to_string())
&& is_utf8_encoding_arg(&kwargs[0].node.value)
}
// .encode(*args, **kwargs)
_ => false,
}
}
// Return a Fix for a default `encode` call removing the encoding argument,
// keyword, or positional.
fn delete_default_encode_arg_or_kwarg(
expr: &Expr,
args: &[Expr],
kwargs: &[Keyword],
patch: bool,
) -> Option<Check> {
if let Some(arg) = args.get(0) {
let mut check = Check::new(CheckKind::UnnecessaryEncodeUTF8, Range::from_located(expr));
if patch {
check.amend(Fix::deletion(arg.location, arg.end_location.unwrap()));
}
Some(check)
} else if let Some(kwarg) = kwargs.get(0) {
let mut check = Check::new(CheckKind::UnnecessaryEncodeUTF8, Range::from_located(expr));
if patch {
check.amend(Fix::deletion(kwarg.location, kwarg.end_location.unwrap()));
}
Some(check)
} else {
None
}
}
// Return a Fix replacing the call to encode by a `"b"` prefix on the string.
fn replace_with_bytes_literal(
expr: &Expr,
constant: &Expr,
locator: &SourceCodeLocator,
patch: bool,
) -> Check {
let mut check = Check::new(CheckKind::UnnecessaryEncodeUTF8, Range::from_located(expr));
if patch {
let content = locator.slice_source_code_range(&Range {
location: constant.location,
end_location: constant.end_location.unwrap(),
});
let content = format!(
"b{}",
content.trim_start_matches('u').trim_start_matches('U')
);
check.amend(Fix::replacement(
content,
expr.location,
expr.end_location.unwrap(),
))
}
check
}
/// U012
pub fn unnecessary_encode_utf8(
checker: &mut Checker,
expr: &Expr,
func: &Expr,
args: &Vec<Expr>,
kwargs: &Vec<Keyword>,
) {
if let Some(variable) = match_encoded_variable(func) {
match &variable.node {
ExprKind::Constant {
value: Constant::Str(literal),
..
} => {
// "str".encode()
// "str".encode("utf-8")
if is_default_encode(args, kwargs) {
if literal.is_ascii() {
// "foo".encode()
checker.add_check(replace_with_bytes_literal(
expr,
variable,
checker.locator,
checker.patch(),
));
} else {
// "unicode text©".encode("utf-8")
if let Some(check) =
delete_default_encode_arg_or_kwarg(expr, args, kwargs, checker.patch())
{
checker.add_check(check);
}
}
}
}
// f"foo{bar}".encode(*args, **kwargs)
ExprKind::JoinedStr { .. } => {
if is_default_encode(args, kwargs) {
if let Some(check) =
delete_default_encode_arg_or_kwarg(expr, args, kwargs, checker.patch())
{
checker.add_check(check);
}
}
}
_ => {}
}
}
}

View File

@@ -1,15 +1,84 @@
use std::collections::BTreeSet;
use log::error;
use rustpython_ast::{AliasData, Located};
use rustpython_parser::ast::Stmt;
use crate::ast::types::Range;
use crate::check_ast::Checker;
use crate::pyupgrade::checks;
use crate::checks::{Check, CheckKind};
use crate::pyupgrade::fixes;
use crate::settings::types::PythonVersion;
pub fn unnecessary_future_import(checker: &mut Checker, stmt: &Stmt, name: &str) {
if let Some(check) = checks::unnecessary_future_import(
checker.settings.target_version,
name,
Range::from_located(stmt),
) {
const PY33_PLUS_REMOVE_FUTURES: &[&str] = &[
"nested_scopes",
"generators",
"with_statement",
"division",
"absolute_import",
"with_statement",
"print_function",
"unicode_literals",
];
const PY37_PLUS_REMOVE_FUTURES: &[&str] = &[
"nested_scopes",
"generators",
"with_statement",
"division",
"absolute_import",
"with_statement",
"print_function",
"unicode_literals",
"generator_stop",
];
/// U010
pub fn unnecessary_future_import(checker: &mut Checker, stmt: &Stmt, names: &[Located<AliasData>]) {
let target_version = checker.settings.target_version;
let mut removable_index: Vec<usize> = vec![];
let mut removable_names: BTreeSet<&str> = BTreeSet::new();
for (index, alias) in names.iter().enumerate() {
let name = alias.node.name.as_str();
if (target_version >= PythonVersion::Py33 && PY33_PLUS_REMOVE_FUTURES.contains(&name))
|| (target_version >= PythonVersion::Py37 && PY37_PLUS_REMOVE_FUTURES.contains(&name))
{
removable_index.push(index);
removable_names.insert(name);
}
}
if !removable_index.is_empty() {
let mut check = Check::new(
CheckKind::UnnecessaryFutureImport(
removable_names.into_iter().map(String::from).collect(),
),
Range::from_located(stmt),
);
if checker.patch() {
let context = checker.binding_context();
let deleted: Vec<&Stmt> = checker
.deletions
.iter()
.map(|index| checker.parents[*index])
.collect();
match fixes::remove_unnecessary_future_import(
checker.locator,
&removable_index,
checker.parents[context.defined_by],
context.defined_in.map(|index| checker.parents[index]),
&deleted,
) {
Ok(fix) => {
if fix.patch.content.is_empty() || fix.patch.content == "pass" {
checker.deletions.insert(context.defined_by);
}
check.amend(fix);
}
Err(e) => error!("Failed to remove __future__ import: {}", e),
}
}
checker.add_check(check);
}
}

View File

@@ -3,6 +3,7 @@ use rustpython_parser::ast::Expr;
use crate::check_ast::Checker;
use crate::pyupgrade::{checks, fixes};
/// U011
pub fn unnecessary_lru_cache_params(checker: &mut Checker, decorator_list: &[Expr]) {
if let Some(mut check) = checks::unnecessary_lru_cache_params(
decorator_list,

View File

@@ -5,6 +5,7 @@ use crate::autofix::Fix;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
/// U006
pub fn use_pep585_annotation(checker: &mut Checker, expr: &Expr, id: &str) {
let mut check = Check::new(
CheckKind::UsePEP585Annotation(id.to_string()),

View File

@@ -41,6 +41,7 @@ fn union(elts: &[Expr]) -> Expr {
}
}
/// U007
pub fn use_pep604_annotation(checker: &mut Checker, expr: &Expr, value: &Expr, slice: &Expr) {
if checker.match_typing_module(value, "Optional") {
let mut check = Check::new(CheckKind::UsePEP604Annotation, Range::from_located(expr));

View File

@@ -6,6 +6,7 @@ use crate::autofix::helpers;
use crate::check_ast::Checker;
use crate::pyupgrade::checks;
/// U001
pub fn useless_metaclass_type(checker: &mut Checker, stmt: &Stmt, value: &Expr, targets: &[Expr]) {
if let Some(mut check) =
checks::useless_metaclass_type(targets, value, Range::from_located(stmt))
@@ -29,7 +30,7 @@ pub fn useless_metaclass_type(checker: &mut Checker, stmt: &Stmt, value: &Expr,
}
check.amend(fix)
}
Err(e) => error!("Failed to fix unused imports: {}", e),
Err(e) => error!("Failed to fix remove metaclass type: {}", e),
}
}
checker.add_check(check);

View File

@@ -4,6 +4,7 @@ use crate::check_ast::Checker;
use crate::pyupgrade;
use crate::pyupgrade::checks;
/// U004
pub fn useless_object_inheritance(
checker: &mut Checker,
stmt: &Stmt,

View File

@@ -1,5 +1,4 @@
use std::collections::BTreeMap;
use fnv::FnvHashMap;
use once_cell::sync::Lazy;
use rustpython_ast::Location;
@@ -10,8 +9,8 @@ use crate::source_code_locator::SourceCodeLocator;
use crate::Check;
/// See: https://github.com/microsoft/vscode/blob/095ddabc52b82498ee7f718a34f9dd11d59099a8/src/vs/base/common/strings.ts#L1094
static CONFUSABLES: Lazy<BTreeMap<u32, u32>> = Lazy::new(|| {
BTreeMap::from([
static CONFUSABLES: Lazy<FnvHashMap<u32, u32>> = Lazy::new(|| {
FnvHashMap::from_iter([
(8232, 32),
(8233, 32),
(5760, 32),

View File

@@ -12,7 +12,7 @@ use regex::Regex;
use crate::checks_gen::CheckCodePrefix;
use crate::settings::pyproject::load_options;
use crate::settings::types::{FilePattern, PerFileIgnore, PythonVersion};
use crate::{flake8_annotations, flake8_quotes, fs, isort, pep8_naming};
use crate::{flake8_annotations, flake8_bugbear, flake8_quotes, fs, isort, pep8_naming};
#[derive(Debug)]
pub struct Configuration {
@@ -30,6 +30,7 @@ pub struct Configuration {
pub target_version: PythonVersion,
// Plugins
pub flake8_annotations: flake8_annotations::settings::Settings,
pub flake8_bugbear: flake8_bugbear::settings::Settings,
pub flake8_quotes: flake8_quotes::settings::Settings,
pub isort: isort::settings::Settings,
pub pep8_naming: pep8_naming::settings::Settings,
@@ -133,6 +134,10 @@ impl Configuration {
.flake8_annotations
.map(flake8_annotations::settings::Settings::from_options)
.unwrap_or_default(),
flake8_bugbear: options
.flake8_bugbear
.map(flake8_bugbear::settings::Settings::from_options)
.unwrap_or_default(),
flake8_quotes: options
.flake8_quotes
.map(flake8_quotes::settings::Settings::from_options)

View File

@@ -2,10 +2,10 @@
//! command-line options. Structure is optimized for internal usage, as opposed
//! to external visibility or parsing.
use std::collections::BTreeSet;
use std::hash::{Hash, Hasher};
use std::path::PathBuf;
use fnv::FnvHashSet;
use path_absolutize::path_dedot;
use regex::Regex;
@@ -13,7 +13,7 @@ use crate::checks::CheckCode;
use crate::checks_gen::{CheckCodePrefix, PrefixSpecificity};
use crate::settings::configuration::Configuration;
use crate::settings::types::{FilePattern, PerFileIgnore, PythonVersion};
use crate::{flake8_annotations, flake8_quotes, isort, pep8_naming};
use crate::{flake8_annotations, flake8_bugbear, flake8_quotes, isort, pep8_naming};
pub mod configuration;
pub mod options;
@@ -24,7 +24,7 @@ pub mod user;
#[derive(Debug)]
pub struct Settings {
pub dummy_variable_rgx: Regex,
pub enabled: BTreeSet<CheckCode>,
pub enabled: FnvHashSet<CheckCode>,
pub exclude: Vec<FilePattern>,
pub extend_exclude: Vec<FilePattern>,
pub line_length: usize,
@@ -33,6 +33,7 @@ pub struct Settings {
pub target_version: PythonVersion,
// Plugins
pub flake8_annotations: flake8_annotations::settings::Settings,
pub flake8_bugbear: flake8_bugbear::settings::Settings,
pub flake8_quotes: flake8_quotes::settings::Settings,
pub isort: isort::settings::Settings,
pub pep8_naming: pep8_naming::settings::Settings,
@@ -51,6 +52,7 @@ impl Settings {
exclude: config.exclude,
extend_exclude: config.extend_exclude,
flake8_annotations: config.flake8_annotations,
flake8_bugbear: config.flake8_bugbear,
flake8_quotes: config.flake8_quotes,
isort: config.isort,
line_length: config.line_length,
@@ -64,7 +66,7 @@ impl Settings {
pub fn for_rule(check_code: CheckCode) -> Self {
Self {
dummy_variable_rgx: Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$").unwrap(),
enabled: BTreeSet::from([check_code]),
enabled: FnvHashSet::from_iter([check_code]),
exclude: Default::default(),
extend_exclude: Default::default(),
line_length: 88,
@@ -72,6 +74,7 @@ impl Settings {
src: vec![path_dedot::CWD.clone()],
target_version: PythonVersion::Py310,
flake8_annotations: Default::default(),
flake8_bugbear: Default::default(),
flake8_quotes: Default::default(),
isort: Default::default(),
pep8_naming: Default::default(),
@@ -81,7 +84,7 @@ impl Settings {
pub fn for_rules(check_codes: Vec<CheckCode>) -> Self {
Self {
dummy_variable_rgx: Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$").unwrap(),
enabled: BTreeSet::from_iter(check_codes),
enabled: FnvHashSet::from_iter(check_codes),
exclude: Default::default(),
extend_exclude: Default::default(),
line_length: 88,
@@ -89,6 +92,7 @@ impl Settings {
src: vec![path_dedot::CWD.clone()],
target_version: PythonVersion::Py310,
flake8_annotations: Default::default(),
flake8_bugbear: Default::default(),
flake8_quotes: Default::default(),
isort: Default::default(),
pep8_naming: Default::default(),
@@ -123,8 +127,8 @@ fn resolve_codes(
extend_select: &[CheckCodePrefix],
ignore: &[CheckCodePrefix],
extend_ignore: &[CheckCodePrefix],
) -> BTreeSet<CheckCode> {
let mut codes: BTreeSet<CheckCode> = BTreeSet::new();
) -> FnvHashSet<CheckCode> {
let mut codes: FnvHashSet<CheckCode> = FnvHashSet::default();
for specificity in [
PrefixSpecificity::Category,
PrefixSpecificity::Hundreds,
@@ -161,7 +165,7 @@ fn resolve_codes(
#[cfg(test)]
mod tests {
use std::collections::BTreeSet;
use fnv::FnvHashSet;
use crate::checks::CheckCode;
use crate::checks_gen::CheckCodePrefix;
@@ -170,19 +174,19 @@ mod tests {
#[test]
fn resolver() {
let actual = resolve_codes(&[CheckCodePrefix::W], &[], &[], &[]);
let expected = BTreeSet::from_iter([CheckCode::W292, CheckCode::W605]);
let expected = FnvHashSet::from_iter([CheckCode::W292, CheckCode::W605]);
assert_eq!(actual, expected);
let actual = resolve_codes(&[CheckCodePrefix::W6], &[], &[], &[]);
let expected = BTreeSet::from_iter([CheckCode::W605]);
let expected = FnvHashSet::from_iter([CheckCode::W605]);
assert_eq!(actual, expected);
let actual = resolve_codes(&[CheckCodePrefix::W], &[], &[CheckCodePrefix::W292], &[]);
let expected = BTreeSet::from_iter([CheckCode::W605]);
let expected = FnvHashSet::from_iter([CheckCode::W605]);
assert_eq!(actual, expected);
let actual = resolve_codes(&[CheckCodePrefix::W605], &[], &[CheckCodePrefix::W605], &[]);
let expected = BTreeSet::from_iter([]);
let expected = FnvHashSet::from_iter([]);
assert_eq!(actual, expected);
}
}

View File

@@ -6,7 +6,7 @@ use serde::{Deserialize, Serialize};
use crate::checks_gen::CheckCodePrefix;
use crate::settings::types::PythonVersion;
use crate::{flake8_annotations, flake8_quotes, isort, pep8_naming};
use crate::{flake8_annotations, flake8_bugbear, flake8_quotes, isort, pep8_naming};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
@@ -19,13 +19,15 @@ pub struct Options {
pub fix: Option<bool>,
pub ignore: Option<Vec<CheckCodePrefix>>,
pub line_length: Option<usize>,
pub per_file_ignores: Option<BTreeMap<String, Vec<CheckCodePrefix>>>,
pub select: Option<Vec<CheckCodePrefix>>,
pub src: Option<Vec<String>>,
pub target_version: Option<PythonVersion>,
// Plugins
pub flake8_annotations: Option<flake8_annotations::settings::Options>,
pub flake8_bugbear: Option<flake8_bugbear::settings::Options>,
pub flake8_quotes: Option<flake8_quotes::settings::Options>,
pub isort: Option<isort::settings::Options>,
pub pep8_naming: Option<pep8_naming::settings::Options>,
// Tables are required to go last.
pub per_file_ignores: Option<BTreeMap<String, Vec<CheckCodePrefix>>>,
}

View File

@@ -109,7 +109,7 @@ mod tests {
find_project_root, find_pyproject_toml, parse_pyproject_toml, Options, Pyproject, Tools,
};
use crate::settings::types::PatternPrefixPair;
use crate::{flake8_quotes, pep8_naming};
use crate::{flake8_bugbear, flake8_quotes, pep8_naming};
#[test]
fn deserialize() -> Result<()> {
@@ -146,6 +146,7 @@ mod tests {
src: None,
target_version: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
isort: None,
pep8_naming: None,
@@ -177,6 +178,7 @@ line-length = 79
src: None,
target_version: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
isort: None,
pep8_naming: None,
@@ -208,6 +210,7 @@ exclude = ["foo.py"]
src: None,
target_version: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
isort: None,
pep8_naming: None,
@@ -239,6 +242,7 @@ select = ["E501"]
src: None,
target_version: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
isort: None,
pep8_naming: None,
@@ -271,6 +275,7 @@ ignore = ["E501"]
src: None,
target_version: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
isort: None,
pep8_naming: None,
@@ -349,6 +354,12 @@ other-attribute = 1
src: None,
target_version: None,
flake8_annotations: None,
flake8_bugbear: Some(flake8_bugbear::settings::Options {
extend_immutable_calls: Some(vec![
"fastapi.Depends".to_string(),
"fastapi.Query".to_string(),
]),
}),
flake8_quotes: Some(flake8_quotes::settings::Options {
inline_quotes: Some(Quote::Single),
multiline_quotes: Some(Quote::Double),

View File

@@ -0,0 +1,69 @@
---
source: src/linter.rs
expression: checks
---
- kind: CachedInstanceMethod
location:
row: 78
column: 5
end_location:
row: 78
column: 20
fix: ~
- kind: CachedInstanceMethod
location:
row: 82
column: 5
end_location:
row: 82
column: 10
fix: ~
- kind: CachedInstanceMethod
location:
row: 86
column: 5
end_location:
row: 86
column: 22
fix: ~
- kind: CachedInstanceMethod
location:
row: 90
column: 5
end_location:
row: 90
column: 12
fix: ~
- kind: CachedInstanceMethod
location:
row: 94
column: 5
end_location:
row: 94
column: 24
fix: ~
- kind: CachedInstanceMethod
location:
row: 98
column: 5
end_location:
row: 98
column: 14
fix: ~
- kind: CachedInstanceMethod
location:
row: 102
column: 5
end_location:
row: 102
column: 26
fix: ~
- kind: CachedInstanceMethod
location:
row: 106
column: 5
end_location:
row: 106
column: 16
fix: ~

View File

@@ -67,19 +67,19 @@ expression: checks
- - shelve
- false
location:
row: 33
row: 32
column: 4
end_location:
row: 33
row: 32
column: 17
fix:
patch:
content: ""
location:
row: 33
row: 32
column: 0
end_location:
row: 34
row: 33
column: 0
applied: false
- kind:
@@ -87,39 +87,39 @@ expression: checks
- - importlib
- false
location:
row: 34
row: 33
column: 4
end_location:
row: 34
row: 33
column: 20
fix:
patch:
content: ""
content: pass
location:
row: 34
column: 0
row: 33
column: 4
end_location:
row: 35
column: 0
row: 33
column: 20
applied: false
- kind:
UnusedImport:
- - pathlib
- false
location:
row: 38
row: 37
column: 4
end_location:
row: 38
row: 37
column: 18
fix:
patch:
content: ""
location:
row: 38
row: 37
column: 0
end_location:
row: 39
row: 38
column: 0
applied: false
- kind:
@@ -127,19 +127,19 @@ expression: checks
- - pickle
- false
location:
row: 53
row: 52
column: 8
end_location:
row: 53
row: 52
column: 21
fix:
patch:
content: pass
location:
row: 53
row: 52
column: 8
end_location:
row: 53
row: 52
column: 21
applied: false

View File

@@ -0,0 +1,29 @@
---
source: src/linter.rs
expression: checks
---
- kind: AssertUsed
location:
row: 2
column: 0
end_location:
row: 2
column: 11
fix: ~
- kind: AssertUsed
location:
row: 8
column: 4
end_location:
row: 8
column: 17
fix: ~
- kind: AssertUsed
location:
row: 11
column: 4
end_location:
row: 11
column: 17
fix: ~

View File

@@ -0,0 +1,21 @@
---
source: src/linter.rs
expression: checks
---
- kind: ExecUsed
location:
row: 3
column: 4
end_location:
row: 3
column: 17
fix: ~
- kind: ExecUsed
location:
row: 5
column: 0
end_location:
row: 5
column: 13
fix: ~

View File

@@ -0,0 +1,37 @@
---
source: src/linter.rs
expression: checks
---
- kind: HardcodedBindAllInterfaces
location:
row: 9
column: 0
end_location:
row: 9
column: 9
fix: ~
- kind: HardcodedBindAllInterfaces
location:
row: 10
column: 0
end_location:
row: 10
column: 9
fix: ~
- kind: HardcodedBindAllInterfaces
location:
row: 14
column: 5
end_location:
row: 14
column: 14
fix: ~
- kind: HardcodedBindAllInterfaces
location:
row: 18
column: 8
end_location:
row: 18
column: 17
fix: ~

View File

@@ -0,0 +1,311 @@
---
source: src/linter.rs
expression: checks
---
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 12
column: 11
end_location:
row: 12
column: 19
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 13
column: 8
end_location:
row: 13
column: 16
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 14
column: 9
end_location:
row: 14
column: 17
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 15
column: 6
end_location:
row: 15
column: 14
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 16
column: 9
end_location:
row: 16
column: 17
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 17
column: 8
end_location:
row: 17
column: 16
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 18
column: 10
end_location:
row: 18
column: 18
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 19
column: 18
end_location:
row: 19
column: 26
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 20
column: 18
end_location:
row: 20
column: 26
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 22
column: 16
end_location:
row: 22
column: 24
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 23
column: 12
end_location:
row: 23
column: 20
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 24
column: 14
end_location:
row: 24
column: 22
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 25
column: 11
end_location:
row: 25
column: 19
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 26
column: 14
end_location:
row: 26
column: 22
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 27
column: 13
end_location:
row: 27
column: 21
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 28
column: 15
end_location:
row: 28
column: 23
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 29
column: 23
end_location:
row: 29
column: 31
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 30
column: 23
end_location:
row: 30
column: 31
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 34
column: 15
end_location:
row: 34
column: 23
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 38
column: 19
end_location:
row: 38
column: 27
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 39
column: 16
end_location:
row: 39
column: 24
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 40
column: 17
end_location:
row: 40
column: 25
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 41
column: 14
end_location:
row: 41
column: 22
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 42
column: 17
end_location:
row: 42
column: 25
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 43
column: 16
end_location:
row: 43
column: 24
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 44
column: 18
end_location:
row: 44
column: 26
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 46
column: 12
end_location:
row: 46
column: 20
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 47
column: 9
end_location:
row: 47
column: 17
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 48
column: 10
end_location:
row: 48
column: 18
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 49
column: 7
end_location:
row: 49
column: 15
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 50
column: 10
end_location:
row: 50
column: 18
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 51
column: 9
end_location:
row: 51
column: 17
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 52
column: 11
end_location:
row: 52
column: 19
fix: ~
- kind:
HardcodedPasswordString: s3cr3t
location:
row: 53
column: 20
end_location:
row: 53
column: 28
fix: ~

View File

@@ -0,0 +1,14 @@
---
source: src/linter.rs
expression: checks
---
- kind:
HardcodedPasswordFuncArg: s3cr3t
location:
row: 13
column: 8
end_location:
row: 13
column: 25
fix: ~

View File

@@ -0,0 +1,50 @@
---
source: src/linter.rs
expression: checks
---
- kind:
HardcodedPasswordDefault: default
location:
row: 5
column: 28
end_location:
row: 5
column: 37
fix: ~
- kind:
HardcodedPasswordDefault: posonly
location:
row: 13
column: 44
end_location:
row: 13
column: 53
fix: ~
- kind:
HardcodedPasswordDefault: kwonly
location:
row: 21
column: 38
end_location:
row: 21
column: 46
fix: ~
- kind:
HardcodedPasswordDefault: posonly
location:
row: 29
column: 38
end_location:
row: 29
column: 47
fix: ~
- kind:
HardcodedPasswordDefault: kwonly
location:
row: 29
column: 61
end_location:
row: 29
column: 69
fix: ~

View File

@@ -3,48 +3,197 @@ source: src/linter.rs
expression: checks
---
- kind:
UnnecessaryFutureImport: nested_scopes
UnnecessaryFutureImport:
- generators
- nested_scopes
location:
row: 1
column: 0
end_location:
row: 1
column: 61
fix: ~
column: 48
fix:
patch:
content: ""
location:
row: 1
column: 0
end_location:
row: 2
column: 0
applied: false
- kind:
UnnecessaryFutureImport: generators
UnnecessaryFutureImport:
- unicode_literals
- with_statement
location:
row: 1
row: 2
column: 0
end_location:
row: 1
column: 61
fix: ~
row: 2
column: 55
fix:
patch:
content: ""
location:
row: 2
column: 0
end_location:
row: 3
column: 0
applied: false
- kind:
UnnecessaryFutureImport: absolute_import
UnnecessaryFutureImport:
- absolute_import
- division
location:
row: 3
column: 0
end_location:
row: 3
column: 48
fix: ~
fix:
patch:
content: ""
location:
row: 3
column: 0
end_location:
row: 4
column: 0
applied: false
- kind:
UnnecessaryFutureImport: division
UnnecessaryFutureImport:
- generator_stop
location:
row: 3
row: 4
column: 0
end_location:
row: 3
column: 48
fix: ~
- kind:
UnnecessaryFutureImport: generator_stop
location:
row: 5
column: 0
end_location:
row: 5
row: 4
column: 37
fix: ~
fix:
patch:
content: ""
location:
row: 4
column: 0
end_location:
row: 5
column: 0
applied: false
- kind:
UnnecessaryFutureImport:
- generator_stop
- print_function
location:
row: 5
column: 0
end_location:
row: 5
column: 53
fix:
patch:
content: ""
location:
row: 5
column: 0
end_location:
row: 6
column: 0
applied: false
- kind:
UnnecessaryFutureImport:
- generators
location:
row: 6
column: 0
end_location:
row: 6
column: 49
fix:
patch:
content: from __future__ import invalid_module
location:
row: 6
column: 0
end_location:
row: 6
column: 49
applied: false
- kind:
UnnecessaryFutureImport:
- generator_stop
location:
row: 9
column: 4
end_location:
row: 9
column: 41
fix:
patch:
content: ""
location:
row: 9
column: 0
end_location:
row: 10
column: 0
applied: false
- kind:
UnnecessaryFutureImport:
- generators
location:
row: 10
column: 4
end_location:
row: 10
column: 37
fix:
patch:
content: pass
location:
row: 10
column: 4
end_location:
row: 10
column: 37
applied: false
- kind:
UnnecessaryFutureImport:
- generator_stop
location:
row: 13
column: 4
end_location:
row: 13
column: 41
fix:
patch:
content: ""
location:
row: 13
column: 0
end_location:
row: 14
column: 0
applied: false
- kind:
UnnecessaryFutureImport:
- generators
location:
row: 14
column: 4
end_location:
row: 14
column: 53
fix:
patch:
content: from __future__ import invalid_module
location:
row: 14
column: 4
end_location:
row: 14
column: 53
applied: false

View File

@@ -0,0 +1,260 @@
---
source: src/linter.rs
expression: checks
---
- kind: UnnecessaryEncodeUTF8
location:
row: 2
column: 0
end_location:
row: 2
column: 21
fix:
patch:
content: "b\"foo\""
location:
row: 2
column: 0
end_location:
row: 2
column: 21
applied: false
- kind: UnnecessaryEncodeUTF8
location:
row: 3
column: 0
end_location:
row: 3
column: 18
fix:
patch:
content: "b\"foo\""
location:
row: 3
column: 0
end_location:
row: 3
column: 18
applied: false
- kind: UnnecessaryEncodeUTF8
location:
row: 4
column: 0
end_location:
row: 4
column: 14
fix:
patch:
content: "b\"foo\""
location:
row: 4
column: 0
end_location:
row: 4
column: 14
applied: false
- kind: UnnecessaryEncodeUTF8
location:
row: 5
column: 0
end_location:
row: 5
column: 20
fix:
patch:
content: "b\"foo\""
location:
row: 5
column: 0
end_location:
row: 5
column: 20
applied: false
- kind: UnnecessaryEncodeUTF8
location:
row: 6
column: 0
end_location:
row: 6
column: 22
fix:
patch:
content: "b\"foo\""
location:
row: 6
column: 0
end_location:
row: 6
column: 22
applied: false
- kind: UnnecessaryEncodeUTF8
location:
row: 7
column: 0
end_location:
row: 7
column: 30
fix:
patch:
content: "b\"foo\""
location:
row: 7
column: 0
end_location:
row: 7
column: 30
applied: false
- kind: UnnecessaryEncodeUTF8
location:
row: 8
column: 0
end_location:
row: 14
column: 1
fix:
patch:
content: "b\"\"\"\nLorem\n\nIpsum\n\"\"\""
location:
row: 8
column: 0
end_location:
row: 14
column: 1
applied: false
- kind: UnnecessaryEncodeUTF8
location:
row: 26
column: 0
end_location:
row: 26
column: 27
fix:
patch:
content: ""
location:
row: 26
column: 19
end_location:
row: 26
column: 26
applied: false
- kind: UnnecessaryEncodeUTF8
location:
row: 44
column: 0
end_location:
row: 44
column: 31
fix:
patch:
content: ""
location:
row: 44
column: 23
end_location:
row: 44
column: 30
applied: false
- kind: UnnecessaryEncodeUTF8
location:
row: 46
column: 0
end_location:
row: 46
column: 39
fix:
patch:
content: ""
location:
row: 46
column: 23
end_location:
row: 46
column: 38
applied: false
- kind: UnnecessaryEncodeUTF8
location:
row: 48
column: 0
end_location:
row: 48
column: 23
fix:
patch:
content: "br\"fo\\o\""
location:
row: 48
column: 0
end_location:
row: 48
column: 23
applied: false
- kind: UnnecessaryEncodeUTF8
location:
row: 49
column: 0
end_location:
row: 49
column: 22
fix:
patch:
content: "b\"foo\""
location:
row: 49
column: 0
end_location:
row: 49
column: 22
applied: false
- kind: UnnecessaryEncodeUTF8
location:
row: 50
column: 0
end_location:
row: 50
column: 23
fix:
patch:
content: "bR\"fo\\o\""
location:
row: 50
column: 0
end_location:
row: 50
column: 23
applied: false
- kind: UnnecessaryEncodeUTF8
location:
row: 51
column: 0
end_location:
row: 51
column: 22
fix:
patch:
content: "b\"foo\""
location:
row: 51
column: 0
end_location:
row: 51
column: 22
applied: false
- kind: UnnecessaryEncodeUTF8
location:
row: 52
column: 6
end_location:
row: 52
column: 20
fix:
patch:
content: "b\"foo\""
location:
row: 52
column: 6
end_location:
row: 52
column: 20
applied: false