Compare commits

...

17 Commits

Author SHA1 Message Date
Charlie Marsh
d8162ce79d Bump version to 0.0.219 2023-01-11 23:46:01 -05:00
Charlie Marsh
e11ef54bda Improve globset documentation and help message (#1808)
Closes #1545.
2023-01-11 23:41:56 -05:00
messense
9a07b0623e Move top level ruff into python folder (#1806)
https://maturin.rs/project_layout.html#mixed-rustpython-project

Resolves #1805
2023-01-11 23:12:55 -05:00
Charlie Marsh
f450e2e79d Implement doc line length enforcement (#1804)
This PR implements `W505` (`DocLineTooLong`), which is similar to `E501`
(`LineTooLong`) but confined to doc lines.

I based the "doc line" definition on pycodestyle, which defines a doc
line as a standalone comment or string statement. Our definition is a
bit more liberal, since we consider any string statement a doc line
(even if it's part of a multi-line statement) -- but that seems fine to
me.

Note that, unusually, this rule requires custom extraction from both the
token stream (to find standalone comments) and the AST (to find string
statements).

Closes #1784.
2023-01-11 22:32:14 -05:00
Colin Delahunty
329946f162 Avoid erroneous Q002 error message for single-quote docstrings (#1777)
Fixes #1775. Before implementing your solution I thought of a slightly
simpler one. However, it will let this function pass:
```
def double_inside_single(a):
    'Double inside "single "'
```
If we want function to pass, my implementation works. But if we do not,
then I can go with how you suggested I implemented this (I left how I
would begin to handle it commented out). The bottom of the flake8-quotes
documentation seems to suggest that this should pass:
https://pypi.org/project/flake8-quotes/

Co-authored-by: Charlie Marsh <charlie.r.marsh@gmail.com>
2023-01-11 20:01:54 -05:00
Charlie Marsh
588399e415 Fix Clippy error 2023-01-11 19:59:00 -05:00
Chammika Mannakkara
4523885268 flake8_simplify : SIM401 (#1778)
Ref #998 

- Implements SIM401 with fix
- Added tests

Notes: 
- only recognize simple ExprKind::Name variables in expr patterns for
now
- bug-fix from reference implementation: check 3-conditions (dict-key,
target-variable, dict-name) to be equal, `flake8_simplify` only test
first two (only first in second pattern)
2023-01-11 19:51:37 -05:00
Maksudul Haque
de81b0cd38 [flake8-simplify] Add Rule for SIM115 (Use context handler for opening files) (#1782)
ref: https://github.com/charliermarsh/ruff/issues/998

Co-authored-by: Charlie Marsh <charlie.r.marsh@gmail.com>
2023-01-11 19:28:05 -05:00
Charlie Marsh
4fce296e3f Skip SIM108 violations for complex if-statements (#1802)
We now skip SIM108 violations if: the resulting statement would exceed
the user-specified line length, or the `if` statement contains comments.

Closes #1719.

Closes #1766.
2023-01-11 19:21:30 -05:00
Charlie Marsh
9d48d7bbd1 Skip unused argument checks for magic methods (#1801)
We still check `__init__`, `__call__`, and `__new__`.

Closes #1796.
2023-01-11 19:02:20 -05:00
Charlie Marsh
c56f263618 Avoid flagging builtins for OSError rewrites (#1800)
Related to (but does not fix) #1790.
2023-01-11 18:49:25 -05:00
Grzegorz Bokota
fb2382fbc3 Update readme to reflect #1763 (#1780)
When checking changes in the 0.0.218 release I noticed that auto fixing
PT004 and PT005 was disabled but this change was not reflected in
README. So I create this small PR to do this.

Co-authored-by: Charlie Marsh <charlie.r.marsh@gmail.com>
2023-01-11 18:37:41 -05:00
Charlie Marsh
c92a5a8704 Avoid rewriting flake8-comprehensions expressions for builtin overrides (#1799)
Closes #1788.
2023-01-11 18:33:55 -05:00
Charlie Marsh
d7cf3147b7 Refactor flake8-comprehensions rules to take fewer arguments (#1797) 2023-01-11 18:21:18 -05:00
Charlie Marsh
bf4d35c705 Convert flake8-comprehensions checks to Checker style (#1795) 2023-01-11 18:11:20 -05:00
Charlie Marsh
4e97e9c7cf Improve PIE794 autofix behavior (#1794)
We now: (1) trigger PIE794 for objects without bases (not sure why this
was omitted before); and (2) remove the entire line, rather than leaving
behind trailing whitespace.

Resolves #1787.
2023-01-11 18:01:29 -05:00
Charlie Marsh
a3fcc3b28d Disable update check by default (#1786)
This has received enough criticism that I'm comfortable making it
opt-in.
2023-01-11 13:47:40 -05:00
68 changed files with 1484 additions and 565 deletions

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.218
rev: v0.0.219
hooks:
- id: ruff

8
Cargo.lock generated
View File

@@ -735,7 +735,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8-to-ruff"
version = "0.0.218-dev.0"
version = "0.0.219-dev.0"
dependencies = [
"anyhow",
"clap 4.0.32",
@@ -1874,7 +1874,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.218"
version = "0.0.219"
dependencies = [
"annotate-snippets 0.9.1",
"anyhow",
@@ -1942,7 +1942,7 @@ dependencies = [
[[package]]
name = "ruff_dev"
version = "0.0.218"
version = "0.0.219"
dependencies = [
"anyhow",
"clap 4.0.32",
@@ -1962,7 +1962,7 @@ dependencies = [
[[package]]
name = "ruff_macros"
version = "0.0.218"
version = "0.0.219"
dependencies = [
"once_cell",
"proc-macro2",

View File

@@ -6,7 +6,7 @@ members = [
[package]
name = "ruff"
version = "0.0.218"
version = "0.0.219"
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
edition = "2021"
rust-version = "1.65.0"
@@ -52,7 +52,7 @@ path-absolutize = { version = "3.0.14", features = ["once_cell_cache", "use_unix
quick-junit = { version = "0.3.2" }
regex = { version = "1.6.0" }
ropey = { version = "1.5.0", features = ["cr_lines", "simd"], default-features = false }
ruff_macros = { version = "0.0.218", path = "ruff_macros" }
ruff_macros = { version = "0.0.219", path = "ruff_macros" }
rustc-hash = { version = "1.1.0" }
rustpython-ast = { features = ["unparse"], git = "https://github.com/RustPython/RustPython.git", rev = "d532160333ffeb6dbeca2c2728c2391cd1e53b7f" }
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "d532160333ffeb6dbeca2c2728c2391cd1e53b7f" }

View File

@@ -180,7 +180,7 @@ Ruff also works with [pre-commit](https://pre-commit.com):
```yaml
- repo: https://github.com/charliermarsh/ruff-pre-commit
# Ruff version.
rev: 'v0.0.218'
rev: 'v0.0.219'
hooks:
- id: ruff
# Respect `exclude` and `extend-exclude` settings.
@@ -343,21 +343,21 @@ Options:
Disable cache reads
--isolated
Ignore all configuration files
--select <SELECT>
--select <RULE_CODE>
Comma-separated list of rule codes to enable (or ALL, to enable all rules)
--extend-select <EXTEND_SELECT>
--extend-select <RULE_CODE>
Like --select, but adds additional rule codes on top of the selected ones
--ignore <IGNORE>
--ignore <RULE_CODE>
Comma-separated list of rule codes to disable
--extend-ignore <EXTEND_IGNORE>
--extend-ignore <RULE_CODE>
Like --ignore, but adds additional rule codes on top of the ignored ones
--exclude <EXCLUDE>
--exclude <FILE_PATTERN>
List of paths, used to omit files and/or directories from analysis
--extend-exclude <EXTEND_EXCLUDE>
--extend-exclude <FILE_PATTERN>
Like --exclude, but adds additional files and directories on top of those already excluded
--fixable <FIXABLE>
--fixable <RULE_CODE>
List of rule codes to treat as eligible for autofix. Only applicable when autofix itself is enabled (e.g., via `--fix`)
--unfixable <UNFIXABLE>
--unfixable <RULE_CODE>
List of rule codes to treat as ineligible for autofix. Only applicable when autofix itself is enabled (e.g., via `--fix`)
--per-file-ignores <PER_FILE_IGNORES>
List of mappings from file pattern to code to exclude
@@ -597,6 +597,7 @@ For more, see [pycodestyle](https://pypi.org/project/pycodestyle/2.9.1/) on PyPI
| E902 | IOError | IOError: `...` | |
| E999 | SyntaxError | SyntaxError: `...` | |
| W292 | NoNewLineAtEndOfFile | No newline at end of file | 🛠 |
| W505 | DocLineTooLong | Doc line too long (89 > 88 characters) | |
| W605 | InvalidEscapeSequence | Invalid escape sequence: '\c' | 🛠 |
### mccabe (C90)
@@ -920,8 +921,8 @@ For more, see [flake8-pytest-style](https://pypi.org/project/flake8-pytest-style
| PT001 | IncorrectFixtureParenthesesStyle | Use `@pytest.fixture()` over `@pytest.fixture` | 🛠 |
| PT002 | FixturePositionalArgs | Configuration for fixture `...` specified via positional args, use kwargs | |
| PT003 | ExtraneousScopeFunction | `scope='function'` is implied in `@pytest.fixture()` | |
| PT004 | MissingFixtureNameUnderscore | Fixture `...` does not return anything, add leading underscore | 🛠 |
| PT005 | IncorrectFixtureNameUnderscore | Fixture `...` returns a value, remove leading underscore | 🛠 |
| PT004 | MissingFixtureNameUnderscore | Fixture `...` does not return anything, add leading underscore | |
| PT005 | IncorrectFixtureNameUnderscore | Fixture `...` returns a value, remove leading underscore | |
| PT006 | ParametrizeNamesWrongType | Wrong name(s) type in `@pytest.mark.parametrize`, expected `tuple` | 🛠 |
| PT007 | ParametrizeValuesWrongType | Wrong values type in `@pytest.mark.parametrize` expected `list` of `tuple` | |
| PT008 | PatchWithLambda | Use `return_value=` instead of patching with `lambda` | |
@@ -975,6 +976,7 @@ For more, see [flake8-simplify](https://pypi.org/project/flake8-simplify/0.19.3/
| Code | Name | Message | Fix |
| ---- | ---- | ------- | --- |
| SIM115 | OpenFileWithContextHandler | Use context handler for opening files | |
| SIM101 | DuplicateIsinstanceCall | Multiple `isinstance` calls for `...`, merge into a single call | 🛠 |
| SIM102 | NestedIfStatements | Use a single `if` statement instead of nested `if` statements | |
| SIM103 | ReturnBoolConditionDirectly | Return the condition `...` directly | 🛠 |
@@ -998,6 +1000,7 @@ For more, see [flake8-simplify](https://pypi.org/project/flake8-simplify/0.19.3/
| SIM222 | OrTrue | Use `True` instead of `... or True` | 🛠 |
| SIM223 | AndFalse | Use `False` instead of `... and False` | 🛠 |
| SIM300 | YodaConditions | Yoda conditions are discouraged, use `left == right` instead | 🛠 |
| SIM401 | DictGetWithDefault | Use `var = dict.get(key, "default")` instead of an `if` block | 🛠 |
### flake8-tidy-imports (TID)
@@ -1822,6 +1825,8 @@ Exclusions are based on globs, and can be either:
`directory`). Note that these paths are relative to the project root
(e.g., the directory containing your `pyproject.toml`).
For more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).
Note that you'll typically want to use
[`extend-exclude`](#extend-exclude) to modify the excluded paths.
@@ -1869,6 +1874,18 @@ line-length = 100
A list of file patterns to omit from linting, in addition to those
specified by `exclude`.
Exclusions are based on globs, and can be either:
- Single-path patterns, like `.mypy_cache` (to exclude any directory
named `.mypy_cache` in the tree), `foo.py` (to exclude any file named
`foo.py`), or `foo_*.py` (to exclude any file matching `foo_*.py` ).
- Relative patterns, like `directory/foo.py` (to exclude that specific
file) or `directory/*.py` (to exclude any Python files in
`directory`). Note that these paths are relative to the project root
(e.g., the directory containing your `pyproject.toml`).
For more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).
**Default value**: `[]`
**Type**: `Vec<FilePattern>`
@@ -2340,7 +2357,7 @@ unfixable = ["F401"]
Enable or disable automatic update checks (overridden by the
`--update-check` and `--no-update-check` command-line flags).
**Default value**: `true`
**Default value**: `false`
**Type**: `bool`
@@ -2348,7 +2365,7 @@ Enable or disable automatic update checks (overridden by the
```toml
[tool.ruff]
update-check = false
update-check = true
```
---
@@ -3159,6 +3176,24 @@ ignore-overlong-task-comments = true
---
#### [`max-doc-length`](#max-doc-length)
The maximum line length to allow for line-length violations within
documentation (`W505`), including standalone comments.
**Default value**: `None`
**Type**: `usize`
**Example usage**:
```toml
[tool.ruff.pycodestyle]
max-doc-length = 88
```
---
### `pydocstyle`
#### [`convention`](#convention)

View File

@@ -771,7 +771,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8_to_ruff"
version = "0.0.218"
version = "0.0.219"
dependencies = [
"anyhow",
"clap",
@@ -1975,7 +1975,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.218"
version = "0.0.219"
dependencies = [
"anyhow",
"bincode",

View File

@@ -1,6 +1,6 @@
[package]
name = "flake8-to-ruff"
version = "0.0.218-dev.0"
version = "0.0.219-dev.0"
edition = "2021"
[lib]

View File

@@ -4,7 +4,7 @@ build-backend = "maturin"
[project]
name = "ruff"
version = "0.0.218"
version = "0.0.219"
description = "An extremely fast Python linter, written in Rust."
authors = [
{ name = "Charlie Marsh", email = "charlie.r.marsh@gmail.com" },
@@ -35,6 +35,7 @@ urls = { repository = "https://github.com/charliermarsh/ruff" }
[tool.maturin]
bindings = "bin"
python-source = "python"
strip = true
[tool.setuptools]

View File

@@ -2,3 +2,10 @@ x = list(x for x in range(3))
x = list(
x for x in range(3)
)
def list(*args, **kwargs):
return None
list(x for x in range(3))

View File

@@ -2,3 +2,10 @@ x = set(x for x in range(3))
x = set(
x for x in range(3)
)
def set(*args, **kwargs):
return None
set(x for x in range(3))

View File

@@ -3,3 +3,10 @@ l = list()
d1 = dict()
d2 = dict(a=1)
d3 = dict(**d2)
def list():
return [1, 2, 3]
a = list()

View File

@@ -4,3 +4,10 @@ list(sorted(x))
reversed(sorted(x))
reversed(sorted(x, key=lambda e: e))
reversed(sorted(x, reverse=True))
def reversed(*args, **kwargs):
return None
reversed(sorted(x, reverse=True))

View File

@@ -31,3 +31,10 @@ class User(BaseModel):
@buzz.setter
def buzz(self, value: str | int) -> None:
...
class User:
bar: str = StringField()
foo: bool = BooleanField()
# ...
bar = StringField() # PIE794

View File

@@ -17,6 +17,15 @@ def fun_with_params_no_docstring(a, b="""
""" """docstring"""):
pass
def fun_with_params_no_docstring2(a, b=c[foo():], c=\
""" not a docstring """):
pass
def function_with_single_docstring(a):
"Single line docstring"
def double_inside_single(a):
'Double inside "single "'

View File

@@ -13,11 +13,19 @@ def foo2():
def fun_with_params_no_docstring(a, b='''
not a
not a
''' '''docstring'''):
pass
def fun_with_params_no_docstring2(a, b=c[foo():], c=\
''' not a docstring '''):
pass
def function_with_single_docstring(a):
'Single line docstring'
def double_inside_single(a):
"Double inside 'single '"

View File

@@ -1,13 +1,13 @@
# Bad
# SIM108
if a:
b = c
else:
b = d
# Good
# OK
b = c if a else d
# https://github.com/MartinThoma/flake8-simplify/issues/115
# OK
if a:
b = c
elif c:
@@ -15,6 +15,7 @@ elif c:
else:
b = d
# OK
if True:
pass
elif a:
@@ -22,6 +23,7 @@ elif a:
else:
b = 2
# OK (false negative)
if True:
pass
else:
@@ -30,19 +32,62 @@ else:
else:
b = 2
import sys
# OK
if sys.version_info >= (3, 9):
randbytes = random.randbytes
else:
randbytes = _get_random_bytes
# OK
if sys.platform == "darwin":
randbytes = random.randbytes
else:
randbytes = _get_random_bytes
# OK
if sys.platform.startswith("linux"):
randbytes = random.randbytes
else:
randbytes = _get_random_bytes
# OK (includes comments)
if x > 0:
# test test
abc = x
else:
# test test test
abc = -x
# OK (too long)
if parser.errno == BAD_FIRST_LINE:
req = wrappers.Request(sock, server=self._server)
else:
req = wrappers.Request(
sock,
parser.get_method(),
parser.get_scheme() or _scheme,
parser.get_path(),
parser.get_version(),
parser.get_query_string(),
server=self._server,
)
# SIM108
if a:
b = cccccccccccccccccccccccccccccccccccc
else:
b = ddddddddddddddddddddddddddddddddddddd
# OK (too long)
if True:
if a:
b = cccccccccccccccccccccccccccccccccccc
else:
b = ddddddddddddddddddddddddddddddddddddd

View File

@@ -0,0 +1,6 @@
f = open('foo.txt') # SIM115
data = f.read()
f.close()
with open('foo.txt') as f: # OK
data = f.read()

View File

@@ -0,0 +1,81 @@
###
# Positive cases
###
# SIM401 (pattern-1)
if key in a_dict:
var = a_dict[key]
else:
var = "default1"
# SIM401 (pattern-2)
if key not in a_dict:
var = "default2"
else:
var = a_dict[key]
# SIM401 (default with a complex expression)
if key in a_dict:
var = a_dict[key]
else:
var = val1 + val2
# SIM401 (complex expression in key)
if keys[idx] in a_dict:
var = a_dict[keys[idx]]
else:
var = "default"
# SIM401 (complex expression in dict)
if key in dicts[idx]:
var = dicts[idx][key]
else:
var = "default"
# SIM401 (complex expression in var)
if key in a_dict:
vars[idx] = a_dict[key]
else:
vars[idx] = "default"
###
# Negative cases
###
# OK (false negative)
if not key in a_dict:
var = "default"
else:
var = a_dict[key]
# OK (different dict)
if key in a_dict:
var = other_dict[key]
else:
var = "default"
# OK (different key)
if key in a_dict:
var = a_dict[other_key]
else:
var = "default"
# OK (different var)
if key in a_dict:
var = a_dict[key]
else:
other_var = "default"
# OK (extra vars in body)
if key in a_dict:
var = a_dict[key]
var2 = value2
else:
var = "default"
# OK (extra vars in orelse)
if key in a_dict:
var = a_dict[key]
else:
var2 = value2
var = "default"

View File

@@ -181,3 +181,17 @@ def f(a: int, b: int) -> str:
def f(a, b):
return f"{a}{b}"
###
# Unused arguments on magic methods.
###
class C:
def __init__(self, x) -> None:
print("Hello, world!")
def __str__(self) -> str:
return "Hello, world!"
def __exit__(self, exc_type, exc_value, traceback) -> None:
print("Hello, world!")

View File

@@ -0,0 +1,15 @@
#!/usr/bin/env python3
"""Here's a top-level docstring that's over the limit."""
def f():
"""Here's a docstring that's also over the limit."""
x = 1 # Here's a comment that's over the limit, but it's not standalone.
# Here's a standalone comment that's over the limit.
print("Here's a string that's over the limit, but it's not a docstring.")
"This is also considered a docstring, and is over the limit."

View File

@@ -0,0 +1,8 @@
class SocketError(Exception):
pass
try:
raise SocketError()
except SocketError:
pass

View File

@@ -40,7 +40,7 @@
]
},
"exclude": {
"description": "A list of file patterns to exclude from linting.\n\nExclusions are based on globs, and can be either:\n\n- Single-path patterns, like `.mypy_cache` (to exclude any directory named `.mypy_cache` in the tree), `foo.py` (to exclude any file named `foo.py`), or `foo_*.py` (to exclude any file matching `foo_*.py` ). - Relative patterns, like `directory/foo.py` (to exclude that specific file) or `directory/*.py` (to exclude any Python files in `directory`). Note that these paths are relative to the project root (e.g., the directory containing your `pyproject.toml`).\n\nNote that you'll typically want to use [`extend-exclude`](#extend-exclude) to modify the excluded paths.",
"description": "A list of file patterns to exclude from linting.\n\nExclusions are based on globs, and can be either:\n\n- Single-path patterns, like `.mypy_cache` (to exclude any directory named `.mypy_cache` in the tree), `foo.py` (to exclude any file named `foo.py`), or `foo_*.py` (to exclude any file matching `foo_*.py` ). - Relative patterns, like `directory/foo.py` (to exclude that specific file) or `directory/*.py` (to exclude any Python files in `directory`). Note that these paths are relative to the project root (e.g., the directory containing your `pyproject.toml`).\n\nFor more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).\n\nNote that you'll typically want to use [`extend-exclude`](#extend-exclude) to modify the excluded paths.",
"type": [
"array",
"null"
@@ -57,7 +57,7 @@
]
},
"extend-exclude": {
"description": "A list of file patterns to omit from linting, in addition to those specified by `exclude`.",
"description": "A list of file patterns to omit from linting, in addition to those specified by `exclude`.\n\nExclusions are based on globs, and can be either:\n\n- Single-path patterns, like `.mypy_cache` (to exclude any directory named `.mypy_cache` in the tree), `foo.py` (to exclude any file named `foo.py`), or `foo_*.py` (to exclude any file matching `foo_*.py` ). - Relative patterns, like `directory/foo.py` (to exclude that specific file) or `directory/*.py` (to exclude any Python files in `directory`). Note that these paths are relative to the project root (e.g., the directory containing your `pyproject.toml`).\n\nFor more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).",
"type": [
"array",
"null"
@@ -945,6 +945,15 @@
"boolean",
"null"
]
},
"max-doc-length": {
"description": "The maximum line length to allow for line-length violations within documentation (`W505`), including standalone comments.",
"type": [
"integer",
"null"
],
"format": "uint",
"minimum": 0.0
}
},
"additionalProperties": false
@@ -1520,6 +1529,7 @@
"SIM110",
"SIM111",
"SIM112",
"SIM115",
"SIM117",
"SIM118",
"SIM2",
@@ -1539,6 +1549,9 @@
"SIM3",
"SIM30",
"SIM300",
"SIM4",
"SIM40",
"SIM401",
"T",
"T1",
"T10",
@@ -1612,6 +1625,9 @@
"W2",
"W29",
"W292",
"W5",
"W50",
"W505",
"W6",
"W60",
"W605",

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_dev"
version = "0.0.218"
version = "0.0.219"
edition = "2021"
[lib]

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_macros"
version = "0.0.218"
version = "0.0.219"
edition = "2021"
[lib]

View File

@@ -1,5 +1,12 @@
use rustpython_ast::{Expr, Stmt, StmtKind};
pub fn name(stmt: &Stmt) -> &str {
match &stmt.node {
StmtKind::FunctionDef { name, .. } | StmtKind::AsyncFunctionDef { name, .. } => name,
_ => panic!("Expected StmtKind::FunctionDef | StmtKind::AsyncFunctionDef"),
}
}
pub fn decorator_list(stmt: &Stmt) -> &Vec<Expr> {
match &stmt.node {
StmtKind::FunctionDef { decorator_list, .. }

View File

@@ -388,6 +388,12 @@ impl<'a> From<&'a Box<Expr>> for Box<ComparableExpr<'a>> {
}
}
impl<'a> From<&'a Box<Expr>> for ComparableExpr<'a> {
fn from(expr: &'a Box<Expr>) -> Self {
(&**expr).into()
}
}
impl<'a> From<&'a Expr> for ComparableExpr<'a> {
fn from(expr: &'a Expr) -> Self {
match &expr.node {

View File

@@ -430,6 +430,13 @@ pub fn collect_arg_names<'a>(arguments: &'a Arguments) -> FxHashSet<&'a str> {
arg_names
}
/// Returns `true` if a statement or expression includes at least one comment.
pub fn has_comments<T>(located: &Located<T>, locator: &SourceCodeLocator) -> bool {
lexer::make_tokenizer(&locator.slice_source_code_range(&Range::from_located(located)))
.flatten()
.any(|(_, tok, _)| matches!(tok, Tok::Comment(..)))
}
/// Returns `true` if a call is an argumented `super` invocation.
pub fn is_super_call_with_arguments(func: &Expr, args: &[Expr]) -> bool {
if let ExprKind::Name { id, .. } = &func.node {

View File

@@ -658,7 +658,7 @@ where
}
if self.settings.enabled.contains(&RuleCode::PIE794) {
flake8_pie::rules::dupe_class_field_definitions(self, bases, body);
flake8_pie::rules::dupe_class_field_definitions(self, stmt, body);
}
self.check_builtin_shadowing(name, stmt, false);
@@ -1207,14 +1207,14 @@ where
}
if self.settings.enabled.contains(&RuleCode::UP024) {
if let Some(item) = exc {
pyupgrade::rules::os_error_alias(self, item);
pyupgrade::rules::os_error_alias(self, &item);
}
}
}
StmtKind::AugAssign { target, .. } => {
self.handle_node_load(target);
}
StmtKind::If { test, .. } => {
StmtKind::If { test, body, orelse } => {
if self.settings.enabled.contains(&RuleCode::F634) {
pyflakes::rules::if_tuple(self, stmt, test);
}
@@ -1231,6 +1231,11 @@ where
self.current_stmt_parent().map(|parent| parent.0),
);
}
if self.settings.enabled.contains(&RuleCode::SIM401) {
flake8_simplify::rules::use_dict_get_with_default(
self, stmt, test, body, orelse,
);
}
}
StmtKind::Assert { test, msg } => {
if self.settings.enabled.contains(&RuleCode::F631) {
@@ -1333,7 +1338,7 @@ where
flake8_bugbear::rules::redundant_tuple_in_exception_handler(self, handlers);
}
if self.settings.enabled.contains(&RuleCode::UP024) {
pyupgrade::rules::os_error_alias(self, handlers);
pyupgrade::rules::os_error_alias(self, &handlers);
}
if self.settings.enabled.contains(&RuleCode::PT017) {
self.diagnostics.extend(
@@ -1921,7 +1926,7 @@ where
pyupgrade::rules::replace_stdout_stderr(self, expr, keywords);
}
if self.settings.enabled.contains(&RuleCode::UP024) {
pyupgrade::rules::os_error_alias(self, expr);
pyupgrade::rules::os_error_alias(self, &expr);
}
// flake8-print
@@ -2048,205 +2053,75 @@ where
// flake8-comprehensions
if self.settings.enabled.contains(&RuleCode::C400) {
if let Some(diagnostic) =
flake8_comprehensions::rules::unnecessary_generator_list(
expr,
func,
args,
keywords,
self.locator,
self.patch(&RuleCode::C400),
Range::from_located(expr),
)
{
self.diagnostics.push(diagnostic);
};
flake8_comprehensions::rules::unnecessary_generator_list(
self, expr, func, args, keywords,
);
}
if self.settings.enabled.contains(&RuleCode::C401) {
if let Some(diagnostic) =
flake8_comprehensions::rules::unnecessary_generator_set(
expr,
func,
args,
keywords,
self.locator,
self.patch(&RuleCode::C401),
Range::from_located(expr),
)
{
self.diagnostics.push(diagnostic);
};
flake8_comprehensions::rules::unnecessary_generator_set(
self, expr, func, args, keywords,
);
}
if self.settings.enabled.contains(&RuleCode::C402) {
if let Some(diagnostic) =
flake8_comprehensions::rules::unnecessary_generator_dict(
expr,
func,
args,
keywords,
self.locator,
self.patch(&RuleCode::C402),
Range::from_located(expr),
)
{
self.diagnostics.push(diagnostic);
};
flake8_comprehensions::rules::unnecessary_generator_dict(
self, expr, func, args, keywords,
);
}
if self.settings.enabled.contains(&RuleCode::C403) {
if let Some(diagnostic) =
flake8_comprehensions::rules::unnecessary_list_comprehension_set(
expr,
func,
args,
keywords,
self.locator,
self.patch(&RuleCode::C403),
Range::from_located(expr),
)
{
self.diagnostics.push(diagnostic);
};
flake8_comprehensions::rules::unnecessary_list_comprehension_set(
self, expr, func, args, keywords,
);
}
if self.settings.enabled.contains(&RuleCode::C404) {
if let Some(diagnostic) =
flake8_comprehensions::rules::unnecessary_list_comprehension_dict(
expr,
func,
args,
keywords,
self.locator,
self.patch(&RuleCode::C404),
Range::from_located(expr),
)
{
self.diagnostics.push(diagnostic);
};
flake8_comprehensions::rules::unnecessary_list_comprehension_dict(
self, expr, func, args, keywords,
);
}
if self.settings.enabled.contains(&RuleCode::C405) {
if let Some(diagnostic) = flake8_comprehensions::rules::unnecessary_literal_set(
expr,
func,
args,
keywords,
self.locator,
self.patch(&RuleCode::C405),
Range::from_located(expr),
) {
self.diagnostics.push(diagnostic);
};
flake8_comprehensions::rules::unnecessary_literal_set(
self, expr, func, args, keywords,
);
}
if self.settings.enabled.contains(&RuleCode::C406) {
if let Some(diagnostic) = flake8_comprehensions::rules::unnecessary_literal_dict(
expr,
func,
args,
keywords,
self.locator,
self.patch(&RuleCode::C406),
Range::from_located(expr),
) {
self.diagnostics.push(diagnostic);
};
flake8_comprehensions::rules::unnecessary_literal_dict(
self, expr, func, args, keywords,
);
}
if self.settings.enabled.contains(&RuleCode::C408) {
if let Some(diagnostic) =
flake8_comprehensions::rules::unnecessary_collection_call(
expr,
func,
args,
keywords,
self.locator,
self.patch(&RuleCode::C408),
Range::from_located(expr),
)
{
self.diagnostics.push(diagnostic);
};
flake8_comprehensions::rules::unnecessary_collection_call(
self, expr, func, args, keywords,
);
}
if self.settings.enabled.contains(&RuleCode::C409) {
if let Some(diagnostic) =
flake8_comprehensions::rules::unnecessary_literal_within_tuple_call(
expr,
func,
args,
self.locator,
self.patch(&RuleCode::C409),
Range::from_located(expr),
)
{
self.diagnostics.push(diagnostic);
};
flake8_comprehensions::rules::unnecessary_literal_within_tuple_call(
self, expr, func, args,
);
}
if self.settings.enabled.contains(&RuleCode::C410) {
if let Some(diagnostic) =
flake8_comprehensions::rules::unnecessary_literal_within_list_call(
expr,
func,
args,
self.locator,
self.patch(&RuleCode::C410),
Range::from_located(expr),
)
{
self.diagnostics.push(diagnostic);
};
flake8_comprehensions::rules::unnecessary_literal_within_list_call(
self, expr, func, args,
);
}
if self.settings.enabled.contains(&RuleCode::C411) {
if let Some(diagnostic) = flake8_comprehensions::rules::unnecessary_list_call(
expr,
func,
args,
self.locator,
self.patch(&RuleCode::C411),
Range::from_located(expr),
) {
self.diagnostics.push(diagnostic);
};
flake8_comprehensions::rules::unnecessary_list_call(self, expr, func, args);
}
if self.settings.enabled.contains(&RuleCode::C413) {
if let Some(diagnostic) =
flake8_comprehensions::rules::unnecessary_call_around_sorted(
expr,
func,
args,
self.locator,
self.patch(&RuleCode::C413),
Range::from_located(expr),
)
{
self.diagnostics.push(diagnostic);
};
flake8_comprehensions::rules::unnecessary_call_around_sorted(
self, expr, func, args,
);
}
if self.settings.enabled.contains(&RuleCode::C414) {
if let Some(diagnostic) =
flake8_comprehensions::rules::unnecessary_double_cast_or_process(
func,
args,
Range::from_located(expr),
)
{
self.diagnostics.push(diagnostic);
};
flake8_comprehensions::rules::unnecessary_double_cast_or_process(
self, expr, func, args,
);
}
if self.settings.enabled.contains(&RuleCode::C415) {
if let Some(diagnostic) =
flake8_comprehensions::rules::unnecessary_subscript_reversal(
func,
args,
Range::from_located(expr),
)
{
self.diagnostics.push(diagnostic);
};
flake8_comprehensions::rules::unnecessary_subscript_reversal(
self, expr, func, args,
);
}
if self.settings.enabled.contains(&RuleCode::C417) {
if let Some(diagnostic) = flake8_comprehensions::rules::unnecessary_map(
func,
args,
Range::from_located(expr),
) {
self.diagnostics.push(diagnostic);
};
flake8_comprehensions::rules::unnecessary_map(self, expr, func, args);
}
// flake8-boolean-trap
@@ -2451,6 +2326,11 @@ where
args, keywords,
));
}
// flake8-simplify
if self.settings.enabled.contains(&RuleCode::SIM115) {
flake8_simplify::rules::open_file_with_context_handler(self, func);
}
}
ExprKind::Dict { keys, values } => {
if self.settings.enabled.contains(&RuleCode::F601)
@@ -2790,18 +2670,9 @@ where
}
ExprKind::ListComp { elt, generators } | ExprKind::SetComp { elt, generators } => {
if self.settings.enabled.contains(&RuleCode::C416) {
if let Some(diagnostic) =
flake8_comprehensions::rules::unnecessary_comprehension(
expr,
elt,
generators,
self.locator,
self.patch(&RuleCode::C416),
Range::from_located(expr),
)
{
self.diagnostics.push(diagnostic);
};
flake8_comprehensions::rules::unnecessary_comprehension(
self, expr, elt, generators,
);
}
if self.settings.enabled.contains(&RuleCode::B023) {
flake8_bugbear::rules::function_uses_loop_variable(self, &Node::Expr(expr));

View File

@@ -1,6 +1,6 @@
//! Lint rules based on checking raw physical lines.
use crate::pycodestyle::rules::{line_too_long, no_newline_at_end_of_file};
use crate::pycodestyle::rules::{doc_line_too_long, line_too_long, no_newline_at_end_of_file};
use crate::pygrep_hooks::rules::{blanket_noqa, blanket_type_ignore};
use crate::pyupgrade::rules::unnecessary_coding_comment;
use crate::registry::{Diagnostic, RuleCode};
@@ -9,18 +9,21 @@ use crate::settings::{flags, Settings};
pub fn check_lines(
contents: &str,
commented_lines: &[usize],
doc_lines: &[usize],
settings: &Settings,
autofix: flags::Autofix,
) -> Vec<Diagnostic> {
let mut diagnostics: Vec<Diagnostic> = vec![];
let enforce_unnecessary_coding_comment = settings.enabled.contains(&RuleCode::UP009);
let enforce_blanket_noqa = settings.enabled.contains(&RuleCode::PGH004);
let enforce_blanket_type_ignore = settings.enabled.contains(&RuleCode::PGH003);
let enforce_doc_line_too_long = settings.enabled.contains(&RuleCode::W505);
let enforce_line_too_long = settings.enabled.contains(&RuleCode::E501);
let enforce_no_newline_at_end_of_file = settings.enabled.contains(&RuleCode::W292);
let enforce_blanket_type_ignore = settings.enabled.contains(&RuleCode::PGH003);
let enforce_blanket_noqa = settings.enabled.contains(&RuleCode::PGH004);
let enforce_unnecessary_coding_comment = settings.enabled.contains(&RuleCode::UP009);
let mut commented_lines_iter = commented_lines.iter().peekable();
let mut doc_lines_iter = doc_lines.iter().peekable();
for (index, line) in contents.lines().enumerate() {
while commented_lines_iter
.next_if(|lineno| &(index + 1) == *lineno)
@@ -40,18 +43,25 @@ pub fn check_lines(
}
if enforce_blanket_type_ignore {
if commented_lines.contains(&(index + 1)) {
if let Some(diagnostic) = blanket_type_ignore(index, line) {
diagnostics.push(diagnostic);
}
if let Some(diagnostic) = blanket_type_ignore(index, line) {
diagnostics.push(diagnostic);
}
}
if enforce_blanket_noqa {
if commented_lines.contains(&(index + 1)) {
if let Some(diagnostic) = blanket_noqa(index, line) {
diagnostics.push(diagnostic);
}
if let Some(diagnostic) = blanket_noqa(index, line) {
diagnostics.push(diagnostic);
}
}
}
while doc_lines_iter
.next_if(|lineno| &(index + 1) == *lineno)
.is_some()
{
if enforce_doc_line_too_long {
if let Some(diagnostic) = doc_line_too_long(index, line, settings) {
diagnostics.push(diagnostic);
}
}
}
@@ -90,6 +100,7 @@ mod tests {
check_lines(
line,
&[],
&[],
&Settings {
line_length,
..Settings::for_rule(RuleCode::E501)

View File

@@ -61,33 +61,33 @@ pub struct Cli {
pub isolated: bool,
/// Comma-separated list of rule codes to enable (or ALL, to enable all
/// rules).
#[arg(long, value_delimiter = ',')]
#[arg(long, value_delimiter = ',', value_name = "RULE_CODE")]
pub select: Option<Vec<RuleCodePrefix>>,
/// Like --select, but adds additional rule codes on top of the selected
/// ones.
#[arg(long, value_delimiter = ',')]
#[arg(long, value_delimiter = ',', value_name = "RULE_CODE")]
pub extend_select: Option<Vec<RuleCodePrefix>>,
/// Comma-separated list of rule codes to disable.
#[arg(long, value_delimiter = ',')]
#[arg(long, value_delimiter = ',', value_name = "RULE_CODE")]
pub ignore: Option<Vec<RuleCodePrefix>>,
/// Like --ignore, but adds additional rule codes on top of the ignored
/// ones.
#[arg(long, value_delimiter = ',')]
#[arg(long, value_delimiter = ',', value_name = "RULE_CODE")]
pub extend_ignore: Option<Vec<RuleCodePrefix>>,
/// List of paths, used to omit files and/or directories from analysis.
#[arg(long, value_delimiter = ',')]
#[arg(long, value_delimiter = ',', value_name = "FILE_PATTERN")]
pub exclude: Option<Vec<FilePattern>>,
/// Like --exclude, but adds additional files and directories on top of
/// those already excluded.
#[arg(long, value_delimiter = ',')]
#[arg(long, value_delimiter = ',', value_name = "FILE_PATTERN")]
pub extend_exclude: Option<Vec<FilePattern>>,
/// List of rule codes to treat as eligible for autofix. Only applicable
/// when autofix itself is enabled (e.g., via `--fix`).
#[arg(long, value_delimiter = ',')]
#[arg(long, value_delimiter = ',', value_name = "RULE_CODE")]
pub fixable: Option<Vec<RuleCodePrefix>>,
/// List of rule codes to treat as ineligible for autofix. Only applicable
/// when autofix itself is enabled (e.g., via `--fix`).
#[arg(long, value_delimiter = ',')]
#[arg(long, value_delimiter = ',', value_name = "RULE_CODE")]
pub unfixable: Option<Vec<RuleCodePrefix>>,
/// List of mappings from file pattern to code to exclude
#[arg(long, value_delimiter = ',')]

58
src/doc_lines.rs Normal file
View File

@@ -0,0 +1,58 @@
//! Doc line extraction. In this context, a doc line is a line consisting of a
//! standalone comment or a constant string statement.
use rustpython_ast::{Constant, ExprKind, Stmt, StmtKind, Suite};
use rustpython_parser::lexer::{LexResult, Tok};
use crate::ast::visitor;
use crate::ast::visitor::Visitor;
/// Extract doc lines (standalone comments) from a token sequence.
pub fn doc_lines_from_tokens(lxr: &[LexResult]) -> Vec<usize> {
let mut doc_lines: Vec<usize> = Vec::default();
let mut prev: Option<usize> = None;
for (start, tok, end) in lxr.iter().flatten() {
if matches!(tok, Tok::Indent | Tok::Dedent | Tok::Newline) {
continue;
}
if matches!(tok, Tok::Comment(..)) {
if let Some(prev) = prev {
if start.row() > prev {
doc_lines.push(start.row());
}
} else {
doc_lines.push(start.row());
}
}
prev = Some(end.row());
}
doc_lines
}
#[derive(Default)]
struct StringLinesVisitor {
string_lines: Vec<usize>,
}
impl Visitor<'_> for StringLinesVisitor {
fn visit_stmt(&mut self, stmt: &Stmt) {
if let StmtKind::Expr { value } = &stmt.node {
if let ExprKind::Constant {
value: Constant::Str(..),
..
} = &value.node
{
self.string_lines
.extend(value.location.row()..=value.end_location.unwrap().row());
}
}
visitor::walk_stmt(self, stmt);
}
}
/// Extract doc lines (standalone strings) from an AST.
pub fn doc_lines_from_ast(python_ast: &Suite) -> Vec<usize> {
let mut visitor = StringLinesVisitor::default();
visitor.visit_body(python_ast);
visitor.string_lines
}

View File

@@ -319,7 +319,7 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
helpers::identifier_range(stmt, checker.locator),
));
}
} else if visibility::is_init(stmt) {
} else if visibility::is_init(cast::name(stmt)) {
// Allow omission of return annotation in `__init__` functions, as long as at
// least one argument is typed.
if checker.settings.enabled.contains(&RuleCode::ANN204) {
@@ -341,7 +341,7 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
checker.diagnostics.push(diagnostic);
}
}
} else if visibility::is_magic(stmt) {
} else if visibility::is_magic(cast::name(stmt)) {
if checker.settings.enabled.contains(&RuleCode::ANN204) {
checker.diagnostics.push(Diagnostic::new(
violations::MissingReturnTypeSpecialMethod(name.to_string()),

View File

@@ -1,13 +1,11 @@
use log::error;
use num_bigint::BigInt;
use rustpython_ast::{
Comprehension, Constant, Expr, ExprKind, Keyword, KeywordData, Located, Unaryop,
};
use rustpython_ast::{Comprehension, Constant, Expr, ExprKind, Keyword, Unaryop};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::flake8_comprehensions::fixes;
use crate::registry::Diagnostic;
use crate::source_code_locator::SourceCodeLocator;
use crate::registry::{Diagnostic, RuleCode};
use crate::violations;
fn function_name(func: &Expr) -> Option<&str> {
@@ -41,237 +39,266 @@ fn first_argument_with_matching_function<'a>(
func: &Expr,
args: &'a [Expr],
) -> Option<&'a ExprKind> {
if function_name(func)? != name {
return None;
if function_name(func)? == name {
Some(&args.first()?.node)
} else {
None
}
Some(&args.first()?.node)
}
/// C400 (`list(generator)`)
pub fn unnecessary_generator_list(
checker: &mut Checker,
expr: &Expr,
func: &Expr,
args: &[Expr],
keywords: &[Keyword],
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Diagnostic> {
let argument = exactly_one_argument_with_matching_function("list", func, args, keywords)?;
) {
let Some(argument) = exactly_one_argument_with_matching_function("list", func, args, keywords) else {
return;
};
if !checker.is_builtin("list") {
return;
}
if let ExprKind::GeneratorExp { .. } = argument {
let mut diagnostic = Diagnostic::new(violations::UnnecessaryGeneratorList, location);
if fix {
match fixes::fix_unnecessary_generator_list(locator, expr) {
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryGeneratorList,
Range::from_located(expr),
);
if checker.patch(&RuleCode::C400) {
match fixes::fix_unnecessary_generator_list(checker.locator, expr) {
Ok(fix) => {
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
return Some(diagnostic);
checker.diagnostics.push(diagnostic);
}
None
}
/// C401 (`set(generator)`)
pub fn unnecessary_generator_set(
checker: &mut Checker,
expr: &Expr,
func: &Expr,
args: &[Expr],
keywords: &[Keyword],
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Diagnostic> {
let argument = exactly_one_argument_with_matching_function("set", func, args, keywords)?;
) {
let Some(argument) = exactly_one_argument_with_matching_function("set", func, args, keywords) else {
return;
};
if !checker.is_builtin("set") {
return;
}
if let ExprKind::GeneratorExp { .. } = argument {
let mut diagnostic = Diagnostic::new(violations::UnnecessaryGeneratorSet, location);
if fix {
match fixes::fix_unnecessary_generator_set(locator, expr) {
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryGeneratorSet,
Range::from_located(expr),
);
if checker.patch(&RuleCode::C401) {
match fixes::fix_unnecessary_generator_set(checker.locator, expr) {
Ok(fix) => {
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
return Some(diagnostic);
checker.diagnostics.push(diagnostic);
}
None
}
/// C402 (`dict((x, y) for x, y in iterable)`)
pub fn unnecessary_generator_dict(
checker: &mut Checker,
expr: &Expr,
func: &Expr,
args: &[Expr],
keywords: &[Keyword],
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Diagnostic> {
let argument = exactly_one_argument_with_matching_function("dict", func, args, keywords)?;
) {
let Some(argument) = exactly_one_argument_with_matching_function("dict", func, args, keywords) else {
return;
};
if let ExprKind::GeneratorExp { elt, .. } = argument {
match &elt.node {
ExprKind::Tuple { elts, .. } if elts.len() == 2 => {
let mut diagnostic =
Diagnostic::new(violations::UnnecessaryGeneratorDict, location);
if fix {
match fixes::fix_unnecessary_generator_dict(locator, expr) {
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryGeneratorDict,
Range::from_located(expr),
);
if checker.patch(&RuleCode::C402) {
match fixes::fix_unnecessary_generator_dict(checker.locator, expr) {
Ok(fix) => {
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
return Some(diagnostic);
checker.diagnostics.push(diagnostic);
}
_ => {}
}
}
None
}
/// C403 (`set([...])`)
pub fn unnecessary_list_comprehension_set(
checker: &mut Checker,
expr: &Expr,
func: &Expr,
args: &[Expr],
keywords: &[Keyword],
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Diagnostic> {
let argument = exactly_one_argument_with_matching_function("set", func, args, keywords)?;
) {
let Some(argument) = exactly_one_argument_with_matching_function("set", func, args, keywords) else {
return;
};
if !checker.is_builtin("set") {
return;
}
if let ExprKind::ListComp { .. } = &argument {
let mut diagnostic = Diagnostic::new(violations::UnnecessaryListComprehensionSet, location);
if fix {
match fixes::fix_unnecessary_list_comprehension_set(locator, expr) {
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryListComprehensionSet,
Range::from_located(expr),
);
if checker.patch(&RuleCode::C403) {
match fixes::fix_unnecessary_list_comprehension_set(checker.locator, expr) {
Ok(fix) => {
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
return Some(diagnostic);
checker.diagnostics.push(diagnostic);
}
None
}
/// C404 (`dict([...])`)
pub fn unnecessary_list_comprehension_dict(
checker: &mut Checker,
expr: &Expr,
func: &Expr,
args: &[Expr],
keywords: &[Keyword],
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Diagnostic> {
let argument = exactly_one_argument_with_matching_function("dict", func, args, keywords)?;
) {
let Some(argument) = exactly_one_argument_with_matching_function("dict", func, args, keywords) else {
return;
};
if !checker.is_builtin("dict") {
return;
}
let ExprKind::ListComp { elt, .. } = &argument else {
return None;
return;
};
let ExprKind::Tuple { elts, .. } = &elt.node else {
return None;
return;
};
if elts.len() != 2 {
return None;
return;
}
let mut diagnostic = Diagnostic::new(violations::UnnecessaryListComprehensionDict, location);
if fix {
match fixes::fix_unnecessary_list_comprehension_dict(locator, expr) {
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryListComprehensionDict,
Range::from_located(expr),
);
if checker.patch(&RuleCode::C404) {
match fixes::fix_unnecessary_list_comprehension_dict(checker.locator, expr) {
Ok(fix) => {
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(diagnostic)
checker.diagnostics.push(diagnostic);
}
/// C405 (`set([1, 2])`)
pub fn unnecessary_literal_set(
checker: &mut Checker,
expr: &Expr,
func: &Expr,
args: &[Expr],
keywords: &[Keyword],
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Diagnostic> {
let argument = exactly_one_argument_with_matching_function("set", func, args, keywords)?;
) {
let Some(argument) = exactly_one_argument_with_matching_function("set", func, args, keywords) else {
return;
};
if !checker.is_builtin("set") {
return;
}
let kind = match argument {
ExprKind::List { .. } => "list",
ExprKind::Tuple { .. } => "tuple",
_ => return None,
_ => return,
};
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryLiteralSet(kind.to_string()),
location,
Range::from_located(expr),
);
if fix {
match fixes::fix_unnecessary_literal_set(locator, expr) {
if checker.patch(&RuleCode::C405) {
match fixes::fix_unnecessary_literal_set(checker.locator, expr) {
Ok(fix) => {
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(diagnostic)
checker.diagnostics.push(diagnostic);
}
/// C406 (`dict([(1, 2)])`)
pub fn unnecessary_literal_dict(
checker: &mut Checker,
expr: &Expr,
func: &Expr,
args: &[Expr],
keywords: &[Keyword],
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Diagnostic> {
let argument = exactly_one_argument_with_matching_function("dict", func, args, keywords)?;
) {
let Some(argument) = exactly_one_argument_with_matching_function("dict", func, args, keywords) else {
return;
};
if !checker.is_builtin("dict") {
return;
}
let (kind, elts) = match argument {
ExprKind::Tuple { elts, .. } => ("tuple", elts),
ExprKind::List { elts, .. } => ("list", elts),
_ => return None,
_ => return,
};
// Accept `dict((1, 2), ...))` `dict([(1, 2), ...])`.
if !elts
.iter()
.all(|elt| matches!(&elt.node, ExprKind::Tuple { elts, .. } if elts.len() == 2))
{
return None;
return;
}
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryLiteralDict(kind.to_string()),
location,
Range::from_located(expr),
);
if fix {
match fixes::fix_unnecessary_literal_dict(locator, expr) {
if checker.patch(&RuleCode::C406) {
match fixes::fix_unnecessary_literal_dict(checker.locator, expr) {
Ok(fix) => {
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(diagnostic)
checker.diagnostics.push(diagnostic);
}
/// C408
pub fn unnecessary_collection_call(
checker: &mut Checker,
expr: &Expr,
func: &Expr,
args: &[Expr],
keywords: &[Located<KeywordData>],
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Diagnostic> {
keywords: &[Keyword],
) {
if !args.is_empty() {
return None;
return;
}
let id = function_name(func)?;
let Some(id) = function_name(func) else {
return;
};
match id {
"dict" if keywords.is_empty() || keywords.iter().all(|kw| kw.node.arg.is_some()) => {
// `dict()` or `dict(a=1)` (as opposed to `dict(**a)`)
@@ -279,296 +306,377 @@ pub fn unnecessary_collection_call(
"list" | "tuple" => {
// `list()` or `tuple()`
}
_ => return None,
_ => return,
};
if !checker.is_builtin(id) {
return;
}
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryCollectionCall(id.to_string()),
location,
Range::from_located(expr),
);
if fix {
match fixes::fix_unnecessary_collection_call(locator, expr) {
if checker.patch(&RuleCode::C408) {
match fixes::fix_unnecessary_collection_call(checker.locator, expr) {
Ok(fix) => {
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(diagnostic)
checker.diagnostics.push(diagnostic);
}
/// C409
pub fn unnecessary_literal_within_tuple_call(
checker: &mut Checker,
expr: &Expr,
func: &Expr,
args: &[Expr],
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Diagnostic> {
let argument = first_argument_with_matching_function("tuple", func, args)?;
) {
let Some(argument) = first_argument_with_matching_function("tuple", func, args) else {
return;
};
if !checker.is_builtin("tuple") {
return;
}
let argument_kind = match argument {
ExprKind::Tuple { .. } => "tuple",
ExprKind::List { .. } => "list",
_ => return None,
_ => return,
};
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryLiteralWithinTupleCall(argument_kind.to_string()),
location,
Range::from_located(expr),
);
if fix {
match fixes::fix_unnecessary_literal_within_tuple_call(locator, expr) {
if checker.patch(&RuleCode::C409) {
match fixes::fix_unnecessary_literal_within_tuple_call(checker.locator, expr) {
Ok(fix) => {
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(diagnostic)
checker.diagnostics.push(diagnostic);
}
/// C410
pub fn unnecessary_literal_within_list_call(
checker: &mut Checker,
expr: &Expr,
func: &Expr,
args: &[Expr],
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Diagnostic> {
let argument = first_argument_with_matching_function("list", func, args)?;
) {
let Some(argument) = first_argument_with_matching_function("list", func, args) else {
return;
};
if !checker.is_builtin("list") {
return;
}
let argument_kind = match argument {
ExprKind::Tuple { .. } => "tuple",
ExprKind::List { .. } => "list",
_ => return None,
_ => return,
};
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryLiteralWithinListCall(argument_kind.to_string()),
location,
Range::from_located(expr),
);
if fix {
match fixes::fix_unnecessary_literal_within_list_call(locator, expr) {
if checker.patch(&RuleCode::C410) {
match fixes::fix_unnecessary_literal_within_list_call(checker.locator, expr) {
Ok(fix) => {
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(diagnostic)
checker.diagnostics.push(diagnostic);
}
/// C411
pub fn unnecessary_list_call(
expr: &Expr,
func: &Expr,
args: &[Expr],
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Diagnostic> {
let argument = first_argument_with_matching_function("list", func, args)?;
if !matches!(argument, ExprKind::ListComp { .. }) {
return None;
pub fn unnecessary_list_call(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
let Some(argument) = first_argument_with_matching_function("list", func, args) else {
return;
};
if !checker.is_builtin("list") {
return;
}
let mut diagnostic = Diagnostic::new(violations::UnnecessaryListCall, location);
if fix {
match fixes::fix_unnecessary_list_call(locator, expr) {
if !matches!(argument, ExprKind::ListComp { .. }) {
return;
}
let mut diagnostic =
Diagnostic::new(violations::UnnecessaryListCall, Range::from_located(expr));
if checker.patch(&RuleCode::C411) {
match fixes::fix_unnecessary_list_call(checker.locator, expr) {
Ok(fix) => {
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(diagnostic)
checker.diagnostics.push(diagnostic);
}
/// C413
pub fn unnecessary_call_around_sorted(
checker: &mut Checker,
expr: &Expr,
func: &Expr,
args: &[Expr],
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Diagnostic> {
let outer = function_name(func)?;
if !(outer == "list" || outer == "reversed") {
return None;
}
let ExprKind::Call { func, .. } = &args.first()?.node else {
return None;
) {
let Some(outer) = function_name(func) else {
return;
};
if function_name(func)? != "sorted" {
return None;
if !(outer == "list" || outer == "reversed") {
return;
}
let Some(arg) = args.first() else {
return;
};
let ExprKind::Call { func, .. } = &arg.node else {
return;
};
let Some(inner) = function_name(func) else {
return;
};
if inner != "sorted" {
return;
}
if !checker.is_builtin(inner) || !checker.is_builtin(outer) {
return;
}
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryCallAroundSorted(outer.to_string()),
location,
Range::from_located(expr),
);
if fix {
match fixes::fix_unnecessary_call_around_sorted(locator, expr) {
if checker.patch(&RuleCode::C413) {
match fixes::fix_unnecessary_call_around_sorted(checker.locator, expr) {
Ok(fix) => {
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(diagnostic)
checker.diagnostics.push(diagnostic);
}
/// C414
pub fn unnecessary_double_cast_or_process(
checker: &mut Checker,
expr: &Expr,
func: &Expr,
args: &[Expr],
location: Range,
) -> Option<Diagnostic> {
fn new_check(inner: &str, outer: &str, location: Range) -> Diagnostic {
) {
fn diagnostic(inner: &str, outer: &str, location: Range) -> Diagnostic {
Diagnostic::new(
violations::UnnecessaryDoubleCastOrProcess(inner.to_string(), outer.to_string()),
location,
)
}
let outer = function_name(func)?;
if !["list", "tuple", "set", "reversed", "sorted"].contains(&outer) {
return None;
let Some(outer) = function_name(func) else {
return;
};
if !(outer == "list"
|| outer == "tuple"
|| outer == "set"
|| outer == "reversed"
|| outer == "sorted")
{
return;
}
let Some(arg) = args.first() else {
return;
};
let ExprKind::Call { func, .. } = &arg.node else {
return;
};
let Some(inner) = function_name(func) else {
return;
};
if !checker.is_builtin(inner) || !checker.is_builtin(outer) {
return;
}
let ExprKind::Call { func, .. } = &args.first()?.node else {
return None;
};
let inner = function_name(func)?;
// Ex) set(tuple(...))
if (outer == "set" || outer == "sorted")
&& (inner == "list" || inner == "tuple" || inner == "reversed" || inner == "sorted")
{
return Some(new_check(inner, outer, location));
checker
.diagnostics
.push(diagnostic(inner, outer, Range::from_located(expr)));
return;
}
// Ex) list(tuple(...))
if (outer == "list" || outer == "tuple") && (inner == "list" || inner == "tuple") {
return Some(new_check(inner, outer, location));
checker
.diagnostics
.push(diagnostic(inner, outer, Range::from_located(expr)));
return;
}
// Ex) set(set(...))
if outer == "set" && inner == "set" {
return Some(new_check(inner, outer, location));
checker
.diagnostics
.push(diagnostic(inner, outer, Range::from_located(expr)));
}
None
}
/// C415
pub fn unnecessary_subscript_reversal(
checker: &mut Checker,
expr: &Expr,
func: &Expr,
args: &[Expr],
location: Range,
) -> Option<Diagnostic> {
let first_arg = args.first()?;
let id = function_name(func)?;
if !["set", "sorted", "reversed"].contains(&id) {
return None;
) {
let Some(first_arg) = args.first() else {
return;
};
let Some(id) = function_name(func) else {
return;
};
if !(id == "set" || id == "sorted" || id == "reversed") {
return;
}
if !checker.is_builtin(id) {
return;
}
let ExprKind::Subscript { slice, .. } = &first_arg.node else {
return None;
return;
};
let ExprKind::Slice { lower, upper, step } = &slice.node else {
return None;
return;
};
if lower.is_some() || upper.is_some() {
return None;
return;
}
let Some(step) = step.as_ref() else {
return;
};
let ExprKind::UnaryOp {
op: Unaryop::USub,
operand,
} = &step.as_ref()?.node else {
return None;
} = &step.node else {
return;
};
let ExprKind::Constant {
value: Constant::Int(val),
..
} = &operand.node else {
return None;
return;
};
if *val != BigInt::from(1) {
return None;
return;
};
Some(Diagnostic::new(
checker.diagnostics.push(Diagnostic::new(
violations::UnnecessarySubscriptReversal(id.to_string()),
location,
))
Range::from_located(expr),
));
}
/// C416
pub fn unnecessary_comprehension(
checker: &mut Checker,
expr: &Expr,
elt: &Expr,
generators: &[Comprehension],
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Diagnostic> {
) {
if generators.len() != 1 {
return None;
return;
}
let generator = &generators[0];
if !(generator.ifs.is_empty() && generator.is_async == 0) {
return None;
return;
}
let elt_id = function_name(elt)?;
let target_id = function_name(&generator.target)?;
let Some(elt_id) = function_name(elt) else {
return;
};
let Some(target_id) = function_name(&generator.target) else {
return;
};
if elt_id != target_id {
return None;
return;
}
let expr_kind = match &expr.node {
let id = match &expr.node {
ExprKind::ListComp { .. } => "list",
ExprKind::SetComp { .. } => "set",
_ => return None,
_ => return,
};
if !checker.is_builtin(id) {
return;
}
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryComprehension(expr_kind.to_string()),
location,
violations::UnnecessaryComprehension(id.to_string()),
Range::from_located(expr),
);
if fix {
match fixes::fix_unnecessary_comprehension(locator, expr) {
if checker.patch(&RuleCode::C416) {
match fixes::fix_unnecessary_comprehension(checker.locator, expr) {
Ok(fix) => {
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(diagnostic)
checker.diagnostics.push(diagnostic);
}
/// C417
pub fn unnecessary_map(func: &Expr, args: &[Expr], location: Range) -> Option<Diagnostic> {
fn new_check(kind: &str, location: Range) -> Diagnostic {
pub fn unnecessary_map(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
fn diagnostic(kind: &str, location: Range) -> Diagnostic {
Diagnostic::new(violations::UnnecessaryMap(kind.to_string()), location)
}
let id = function_name(func)?;
let Some(id) = function_name(func) else {
return;
};
match id {
"map" => {
if !checker.is_builtin(id) {
return;
}
if args.len() == 2 && matches!(&args[0].node, ExprKind::Lambda { .. }) {
return Some(new_check("generator", location));
checker
.diagnostics
.push(diagnostic("generator", Range::from_located(expr)));
}
}
"list" | "set" => {
if let ExprKind::Call { func, args, .. } = &args.first()?.node {
let argument = first_argument_with_matching_function("map", func, args)?;
if let ExprKind::Lambda { .. } = argument {
return Some(new_check(id, location));
if !checker.is_builtin(id) {
return;
}
if let Some(arg) = args.first() {
if let ExprKind::Call { func, args, .. } = &arg.node {
let Some(argument) = first_argument_with_matching_function("map", func, args) else {
return;
};
if let ExprKind::Lambda { .. } = argument {
checker
.diagnostics
.push(diagnostic(id, Range::from_located(expr)));
}
}
}
}
"dict" => {
if !checker.is_builtin(id) {
return;
}
if args.len() == 1 {
if let ExprKind::Call { func, args, .. } = &args[0].node {
let argument = first_argument_with_matching_function("map", func, args)?;
let Some(argument) = first_argument_with_matching_function("map", func, args) else {
return;
};
if let ExprKind::Lambda { body, .. } = &argument {
if matches!(&body.node, ExprKind::Tuple { elts, .. } | ExprKind::List { elts, .. } if elts.len() == 2)
{
return Some(new_check(id, location));
checker
.diagnostics
.push(diagnostic(id, Range::from_located(expr)));
}
}
}
@@ -576,5 +684,4 @@ pub fn unnecessary_map(func: &Expr, args: &[Expr], location: Range) -> Option<Di
}
_ => (),
}
None
}

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_comprehensions/mod.rs
expression: checks
expression: diagnostics
---
- kind:
UnnecessaryGeneratorList: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_comprehensions/mod.rs
expression: checks
expression: diagnostics
---
- kind:
UnnecessaryGeneratorSet: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_comprehensions/mod.rs
expression: checks
expression: diagnostics
---
- kind:
UnnecessaryGeneratorDict: ~

View File

@@ -2,7 +2,7 @@ use log::error;
use rustc_hash::FxHashSet;
use rustpython_ast::{Constant, Expr, ExprKind, Stmt, StmtKind};
use crate::ast::types::Range;
use crate::ast::types::{Range, RefEquality};
use crate::autofix::helpers::delete_stmt;
use crate::autofix::Fix;
use crate::checkers::ast::Checker;
@@ -48,12 +48,14 @@ pub fn no_unnecessary_pass(checker: &mut Checker, body: &[Stmt]) {
}
/// PIE794
pub fn dupe_class_field_definitions(checker: &mut Checker, bases: &[Expr], body: &[Stmt]) {
if bases.is_empty() {
return;
}
let mut seen_targets = FxHashSet::default();
pub fn dupe_class_field_definitions<'a, 'b>(
checker: &mut Checker<'a>,
parent: &'b Stmt,
body: &'b [Stmt],
) where
'b: 'a,
{
let mut seen_targets: FxHashSet<&str> = FxHashSet::default();
for stmt in body {
// Extract the property name from the assignment statement.
let target = match &stmt.node {
@@ -77,17 +79,29 @@ pub fn dupe_class_field_definitions(checker: &mut Checker, bases: &[Expr], body:
_ => continue,
};
if seen_targets.contains(target) {
if !seen_targets.insert(target) {
let mut diagnostic = Diagnostic::new(
violations::DupeClassFieldDefinitions(target.to_string()),
Range::from_located(stmt),
);
if checker.patch(&RuleCode::PIE794) {
diagnostic.amend(Fix::deletion(stmt.location, stmt.end_location.unwrap()));
let deleted: Vec<&Stmt> = checker
.deletions
.iter()
.map(std::convert::Into::into)
.collect();
let locator = checker.locator;
match delete_stmt(stmt, Some(parent), &deleted, locator) {
Ok(fix) => {
checker.deletions.insert(RefEquality(stmt));
diagnostic.amend(fix);
}
Err(err) => {
error!("Failed to remove duplicate class definition: {}", err);
}
}
}
checker.diagnostics.push(diagnostic);
} else {
seen_targets.insert(target);
}
}
}

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_pie/mod.rs
expression: checks
expression: diagnostics
---
- kind:
DupeClassFieldDefinitions: name
@@ -14,10 +14,10 @@ expression: checks
content: ""
location:
row: 4
column: 4
column: 0
end_location:
row: 4
column: 24
row: 5
column: 0
parent: ~
- kind:
DupeClassFieldDefinitions: name
@@ -31,10 +31,10 @@ expression: checks
content: ""
location:
row: 13
column: 4
column: 0
end_location:
row: 13
column: 24
row: 14
column: 0
parent: ~
- kind:
DupeClassFieldDefinitions: bar
@@ -48,9 +48,26 @@ expression: checks
content: ""
location:
row: 23
column: 4
column: 0
end_location:
row: 23
column: 23
row: 24
column: 0
parent: ~
- kind:
DupeClassFieldDefinitions: bar
location:
row: 40
column: 4
end_location:
row: 40
column: 23
fix:
content: ""
location:
row: 40
column: 0
end_location:
row: 41
column: 0
parent: ~

View File

@@ -36,8 +36,8 @@ fn good_multiline_ending(quote: &Quote) -> &str {
fn good_docstring(quote: &Quote) -> &str {
match quote {
Quote::Single => "'''",
Quote::Double => "\"\"\"",
Quote::Single => "'",
Quote::Double => "\"",
}
}

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesMultilineString: single
@@ -45,10 +45,10 @@ expression: checks
- kind:
BadQuotesMultilineString: single
location:
row: 21
row: 22
column: 4
end_location:
row: 21
row: 22
column: 27
fix: ~
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesDocstring: double
@@ -22,4 +22,14 @@ expression: checks
column: 7
fix: ~
parent: ~
- kind:
BadQuotesDocstring: double
location:
row: 27
column: 4
end_location:
row: 27
column: 27
fix: ~
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesDocstring: single
@@ -22,4 +22,14 @@ expression: checks
column: 7
fix: ~
parent: ~
- kind:
BadQuotesDocstring: single
location:
row: 27
column: 4
end_location:
row: 27
column: 27
fix: ~
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesMultilineString: double
@@ -45,10 +45,10 @@ expression: checks
- kind:
BadQuotesMultilineString: double
location:
row: 21
row: 22
column: 4
end_location:
row: 21
row: 22
column: 27
fix: ~
parent: ~

View File

@@ -22,19 +22,21 @@ mod tests {
#[test_case(RuleCode::SIM110, Path::new("SIM110.py"); "SIM110")]
#[test_case(RuleCode::SIM111, Path::new("SIM111.py"); "SIM111")]
#[test_case(RuleCode::SIM112, Path::new("SIM112.py"); "SIM112")]
#[test_case(RuleCode::SIM115, Path::new("SIM115.py"); "SIM115")]
#[test_case(RuleCode::SIM117, Path::new("SIM117.py"); "SIM117")]
#[test_case(RuleCode::SIM118, Path::new("SIM118.py"); "SIM118")]
#[test_case(RuleCode::SIM201, Path::new("SIM201.py"); "SIM201")]
#[test_case(RuleCode::SIM202, Path::new("SIM202.py"); "SIM202")]
#[test_case(RuleCode::SIM208, Path::new("SIM208.py"); "SIM208")]
#[test_case(RuleCode::SIM210, Path::new("SIM210.py"); "SIM210")]
#[test_case(RuleCode::SIM211, Path::new("SIM211.py"); "SIM211")]
#[test_case(RuleCode::SIM212, Path::new("SIM212.py"); "SIM212")]
#[test_case(RuleCode::SIM118, Path::new("SIM118.py"); "SIM118")]
#[test_case(RuleCode::SIM220, Path::new("SIM220.py"); "SIM220")]
#[test_case(RuleCode::SIM221, Path::new("SIM221.py"); "SIM221")]
#[test_case(RuleCode::SIM222, Path::new("SIM222.py"); "SIM222")]
#[test_case(RuleCode::SIM223, Path::new("SIM223.py"); "SIM223")]
#[test_case(RuleCode::SIM300, Path::new("SIM300.py"); "SIM300")]
#[test_case(RuleCode::SIM401, Path::new("SIM401.py"); "SIM401")]
fn rules(rule_code: RuleCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(

View File

@@ -1,7 +1,8 @@
use rustpython_ast::{Constant, Expr, ExprKind, Stmt, StmtKind};
use rustpython_ast::{Cmpop, Constant, Expr, ExprContext, ExprKind, Stmt, StmtKind};
use crate::ast::comparable::ComparableExpr;
use crate::ast::helpers::{
contains_call_path, create_expr, create_stmt, unparse_expr, unparse_stmt,
contains_call_path, create_expr, create_stmt, has_comments, unparse_expr, unparse_stmt,
};
use crate::ast::types::Range;
use crate::autofix::Fix;
@@ -201,14 +202,127 @@ pub fn use_ternary_operator(checker: &mut Checker, stmt: &Stmt, parent: Option<&
let target_var = &body_targets[0];
let ternary = ternary(target_var, body_value, test, orelse_value);
let content = unparse_stmt(&ternary, checker.style);
let contents = unparse_stmt(&ternary, checker.style);
// Don't flag for simplified ternaries if the resulting expression would exceed
// the maximum line length.
if stmt.location.column() + contents.len() > checker.settings.line_length {
return;
}
// Don't flag for simplified ternaries if the if-expression contains any
// comments.
if has_comments(stmt, checker.locator) {
return;
}
let mut diagnostic = Diagnostic::new(
violations::UseTernaryOperator(content.clone()),
violations::UseTernaryOperator(contents.clone()),
Range::from_located(stmt),
);
if checker.patch(&RuleCode::SIM108) {
diagnostic.amend(Fix::replacement(
content,
contents,
stmt.location,
stmt.end_location.unwrap(),
));
}
checker.diagnostics.push(diagnostic);
}
fn compare_expr(expr1: &ComparableExpr, expr2: &ComparableExpr) -> bool {
expr1.eq(expr2)
}
/// SIM401
pub fn use_dict_get_with_default(
checker: &mut Checker,
stmt: &Stmt,
test: &Expr,
body: &Vec<Stmt>,
orelse: &Vec<Stmt>,
) {
if body.len() != 1 || orelse.len() != 1 {
return;
}
let StmtKind::Assign { targets: body_var, value: body_val, ..} = &body[0].node else {
return;
};
if body_var.len() != 1 {
return;
};
let StmtKind::Assign { targets: orelse_var, value: orelse_val, .. } = &orelse[0].node else {
return;
};
if orelse_var.len() != 1 {
return;
};
let ExprKind::Compare { left: test_key, ops , comparators: test_dict } = &test.node else {
return;
};
if test_dict.len() != 1 {
return;
}
let (expected_var, expected_val, default_var, default_val) = match ops[..] {
[Cmpop::In] => (&body_var[0], body_val, &orelse_var[0], orelse_val),
[Cmpop::NotIn] => (&orelse_var[0], orelse_val, &body_var[0], body_val),
_ => {
return;
}
};
let test_dict = &test_dict[0];
let ExprKind::Subscript { value: expected_subscript, slice: expected_slice, .. } = &expected_val.node else {
return;
};
// Check that the dictionary key, target variables, and dictionary name are all
// equivalent.
if !compare_expr(&expected_slice.into(), &test_key.into())
|| !compare_expr(&expected_var.into(), &default_var.into())
|| !compare_expr(&test_dict.into(), &expected_subscript.into())
{
return;
}
let contents = unparse_stmt(
&create_stmt(StmtKind::Assign {
targets: vec![create_expr(expected_var.node.clone())],
value: Box::new(create_expr(ExprKind::Call {
func: Box::new(create_expr(ExprKind::Attribute {
value: expected_subscript.clone(),
attr: "get".to_string(),
ctx: ExprContext::Load,
})),
args: vec![
create_expr(test_key.node.clone()),
create_expr(default_val.node.clone()),
],
keywords: vec![],
})),
type_comment: None,
}),
checker.style,
);
// Don't flag for simplified `dict.get` if the resulting expression would exceed
// the maximum line length.
if stmt.location.column() + contents.len() > checker.settings.line_length {
return;
}
// Don't flag for simplified `dict.get` if the if-expression contains any
// comments.
if has_comments(stmt, checker.locator) {
return;
}
let mut diagnostic = Diagnostic::new(
violations::DictGetWithDefault(contents.clone()),
Range::from_located(stmt),
);
if checker.patch(&RuleCode::SIM401) {
diagnostic.amend(Fix::replacement(
contents,
stmt.location,
stmt.end_location.unwrap(),
));

View File

@@ -3,13 +3,17 @@ pub use ast_bool_op::{
};
pub use ast_expr::use_capital_environment_variables;
pub use ast_for::convert_loop_to_any_all;
pub use ast_if::{nested_if_statements, return_bool_condition_directly, use_ternary_operator};
pub use ast_if::{
nested_if_statements, return_bool_condition_directly, use_dict_get_with_default,
use_ternary_operator,
};
pub use ast_ifexp::{
explicit_false_true_in_ifexpr, explicit_true_false_in_ifexpr, twisted_arms_in_ifexpr,
};
pub use ast_unary_op::{double_negation, negation_with_equal_op, negation_with_not_equal_op};
pub use ast_with::multiple_with_statements;
pub use key_in_dict::{key_in_dict_compare, key_in_dict_for};
pub use open_file_with_context_handler::open_file_with_context_handler;
pub use return_in_try_except_finally::return_in_try_except_finally;
pub use use_contextlib_suppress::use_contextlib_suppress;
pub use yoda_conditions::yoda_conditions;
@@ -22,6 +26,7 @@ mod ast_ifexp;
mod ast_unary_op;
mod ast_with;
mod key_in_dict;
mod open_file_with_context_handler;
mod return_in_try_except_finally;
mod use_contextlib_suppress;
mod yoda_conditions;

View File

@@ -0,0 +1,30 @@
use rustpython_ast::Expr;
use rustpython_parser::ast::StmtKind;
use crate::ast::helpers::{collect_call_paths, dealias_call_path, match_call_path};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::Diagnostic;
use crate::violations;
/// SIM115
pub fn open_file_with_context_handler(checker: &mut Checker, func: &Expr) {
if match_call_path(
&dealias_call_path(collect_call_paths(func), &checker.import_aliases),
"",
"open",
&checker.from_imports,
) {
if checker.is_builtin("open") {
match checker.current_stmt().node {
StmtKind::With { .. } => (),
_ => {
checker.diagnostics.push(Diagnostic::new(
violations::OpenFileWithContextHandler,
Range::from_located(func),
));
}
}
}
}
}

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_simplify/mod.rs
expression: checks
expression: diagnostics
---
- kind:
UseTernaryOperator: b = c if a else d
@@ -19,4 +19,21 @@ expression: checks
row: 5
column: 9
parent: ~
- kind:
UseTernaryOperator: b = cccccccccccccccccccccccccccccccccccc if a else ddddddddddddddddddddddddddddddddddddd
location:
row: 82
column: 0
end_location:
row: 85
column: 45
fix:
content: b = cccccccccccccccccccccccccccccccccccc if a else ddddddddddddddddddddddddddddddddddddd
location:
row: 82
column: 0
end_location:
row: 85
column: 45
parent: ~

View File

@@ -0,0 +1,15 @@
---
source: src/flake8_simplify/mod.rs
expression: diagnostics
---
- kind:
OpenFileWithContextHandler: ~
location:
row: 1
column: 4
end_location:
row: 1
column: 8
fix: ~
parent: ~

View File

@@ -0,0 +1,107 @@
---
source: src/flake8_simplify/mod.rs
expression: diagnostics
---
- kind:
DictGetWithDefault: "var = a_dict.get(key, \"default1\")"
location:
row: 6
column: 0
end_location:
row: 9
column: 20
fix:
content: "var = a_dict.get(key, \"default1\")"
location:
row: 6
column: 0
end_location:
row: 9
column: 20
parent: ~
- kind:
DictGetWithDefault: "var = a_dict.get(key, \"default2\")"
location:
row: 12
column: 0
end_location:
row: 15
column: 21
fix:
content: "var = a_dict.get(key, \"default2\")"
location:
row: 12
column: 0
end_location:
row: 15
column: 21
parent: ~
- kind:
DictGetWithDefault: "var = a_dict.get(key, val1 + val2)"
location:
row: 18
column: 0
end_location:
row: 21
column: 21
fix:
content: "var = a_dict.get(key, val1 + val2)"
location:
row: 18
column: 0
end_location:
row: 21
column: 21
parent: ~
- kind:
DictGetWithDefault: "var = a_dict.get(keys[idx], \"default\")"
location:
row: 24
column: 0
end_location:
row: 27
column: 19
fix:
content: "var = a_dict.get(keys[idx], \"default\")"
location:
row: 24
column: 0
end_location:
row: 27
column: 19
parent: ~
- kind:
DictGetWithDefault: "var = dicts[idx].get(key, \"default\")"
location:
row: 30
column: 0
end_location:
row: 33
column: 19
fix:
content: "var = dicts[idx].get(key, \"default\")"
location:
row: 30
column: 0
end_location:
row: 33
column: 19
parent: ~
- kind:
DictGetWithDefault: "vars[idx] = a_dict.get(key, \"default\")"
location:
row: 36
column: 0
end_location:
row: 39
column: 25
fix:
content: "vars[idx] = a_dict.get(key, \"default\")"
location:
row: 36
column: 0
end_location:
row: 39
column: 25
parent: ~

View File

@@ -153,6 +153,10 @@ pub fn unused_arguments(
.enabled
.contains(Argumentable::Method.rule_code())
&& !helpers::is_empty(body)
&& (!visibility::is_magic(name)
|| visibility::is_init(name)
|| visibility::is_new(name)
|| visibility::is_call(name))
&& !visibility::is_abstract(checker, decorator_list)
&& !visibility::is_override(checker, decorator_list)
&& !visibility::is_overload(checker, decorator_list)
@@ -178,6 +182,10 @@ pub fn unused_arguments(
.enabled
.contains(Argumentable::ClassMethod.rule_code())
&& !helpers::is_empty(body)
&& (!visibility::is_magic(name)
|| visibility::is_init(name)
|| visibility::is_new(name)
|| visibility::is_call(name))
&& !visibility::is_abstract(checker, decorator_list)
&& !visibility::is_override(checker, decorator_list)
&& !visibility::is_overload(checker, decorator_list)
@@ -203,6 +211,10 @@ pub fn unused_arguments(
.enabled
.contains(Argumentable::StaticMethod.rule_code())
&& !helpers::is_empty(body)
&& (!visibility::is_magic(name)
|| visibility::is_init(name)
|| visibility::is_new(name)
|| visibility::is_call(name))
&& !visibility::is_abstract(checker, decorator_list)
&& !visibility::is_override(checker, decorator_list)
&& !visibility::is_overload(checker, decorator_list)

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_unused_arguments/mod.rs
expression: checks
expression: diagnostics
---
- kind:
UnusedMethodArgument: x
@@ -32,4 +32,14 @@ expression: checks
column: 16
fix: ~
parent: ~
- kind:
UnusedMethodArgument: x
location:
row: 190
column: 23
end_location:
row: 190
column: 24
fix: ~
parent: ~

View File

@@ -93,4 +93,5 @@ cfg_if! {
pub use lib_wasm::check;
}
}
pub mod doc_lines;
pub mod flake8_pie;

View File

@@ -19,6 +19,7 @@ use crate::checkers::lines::check_lines;
use crate::checkers::noqa::check_noqa;
use crate::checkers::tokens::check_tokens;
use crate::directives::Directives;
use crate::doc_lines::{doc_lines_from_ast, doc_lines_from_tokens};
use crate::message::{Message, Source};
use crate::noqa::add_noqa;
use crate::registry::{Diagnostic, LintSource, RuleCode};
@@ -70,6 +71,14 @@ pub(crate) fn check_path(
// Aggregate all diagnostics.
let mut diagnostics: Vec<Diagnostic> = vec![];
// Collect doc lines. This requires a rare mix of tokens (for comments) and AST
// (for docstrings), which demands special-casing at this level.
let use_doc_lines = settings.enabled.contains(&RuleCode::W505);
let mut doc_lines = vec![];
if use_doc_lines {
doc_lines.extend(doc_lines_from_tokens(&tokens));
}
// Run the token-based rules.
if settings
.enabled
@@ -89,7 +98,7 @@ pub(crate) fn check_path(
.enabled
.iter()
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::Imports));
if use_ast || use_imports {
if use_ast || use_imports || use_doc_lines {
match rustpython_helpers::parse_program_tokens(tokens, "<filename>") {
Ok(python_ast) => {
if use_ast {
@@ -116,6 +125,9 @@ pub(crate) fn check_path(
package,
));
}
if use_doc_lines {
doc_lines.extend(doc_lines_from_ast(&python_ast));
}
}
Err(parse_error) => {
if settings.enabled.contains(&RuleCode::E999) {
@@ -128,6 +140,12 @@ pub(crate) fn check_path(
}
}
// Deduplicate and reorder any doc lines.
if use_doc_lines {
doc_lines.sort_unstable();
doc_lines.dedup();
}
// Run the lines-based rules.
if settings
.enabled
@@ -137,6 +155,7 @@ pub(crate) fn check_path(
diagnostics.extend(check_lines(
contents,
&directives.commented_lines,
&doc_lines,
settings,
autofix,
));

View File

@@ -67,6 +67,7 @@ mod tests {
&settings::Settings {
pycodestyle: Settings {
ignore_overlong_task_comments,
..Settings::default()
},
..settings::Settings::for_rule(RuleCode::E501)
},
@@ -74,4 +75,20 @@ mod tests {
insta::assert_yaml_snapshot!(snapshot, diagnostics);
Ok(())
}
#[test]
fn max_doc_length() -> Result<()> {
let diagnostics = test_path(
Path::new("./resources/test/fixtures/pycodestyle/W505.py"),
&settings::Settings {
pycodestyle: Settings {
max_doc_length: Some(50),
..Settings::default()
},
..settings::Settings::for_rule(RuleCode::W505)
},
)?;
insta::assert_yaml_snapshot!(diagnostics);
Ok(())
}
}

View File

@@ -22,42 +22,88 @@ use crate::violations;
static URL_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"^https?://\S+$").unwrap());
/// E501
pub fn line_too_long(lineno: usize, line: &str, settings: &Settings) -> Option<Diagnostic> {
let line_length = line.chars().count();
if line_length <= settings.line_length {
return None;
fn is_overlong(
line: &str,
line_length: usize,
limit: usize,
ignore_overlong_task_comments: bool,
task_tags: &[String],
) -> bool {
if line_length <= limit {
return false;
}
let mut chunks = line.split_whitespace();
let (Some(first), Some(second)) = (chunks.next(), chunks.next()) else {
// Single word / no printable chars - no way to make the line shorter
return None;
return false;
};
if first == "#" {
if settings.pycodestyle.ignore_overlong_task_comments {
if ignore_overlong_task_comments {
let second = second.trim_end_matches(':');
if settings.task_tags.iter().any(|tag| tag == second) {
return None;
if task_tags.iter().any(|tag| tag == second) {
return false;
}
}
// Do not enforce the line length for commented lines that end with a URL
// or contain only a single word.
if chunks.last().map_or(true, |c| URL_REGEX.is_match(c)) {
return None;
return false;
}
}
Some(Diagnostic::new(
violations::LineTooLong(line_length, settings.line_length),
Range::new(
Location::new(lineno + 1, settings.line_length),
Location::new(lineno + 1, line_length),
),
))
true
}
/// E501
pub fn line_too_long(lineno: usize, line: &str, settings: &Settings) -> Option<Diagnostic> {
let line_length = line.chars().count();
let limit = settings.line_length;
if is_overlong(
line,
line_length,
limit,
settings.pycodestyle.ignore_overlong_task_comments,
&settings.task_tags,
) {
Some(Diagnostic::new(
violations::LineTooLong(line_length, limit),
Range::new(
Location::new(lineno + 1, limit),
Location::new(lineno + 1, line_length),
),
))
} else {
None
}
}
/// W505
pub fn doc_line_too_long(lineno: usize, line: &str, settings: &Settings) -> Option<Diagnostic> {
let Some(limit) = settings.pycodestyle.max_doc_length else {
return None;
};
let line_length = line.chars().count();
if is_overlong(
line,
line_length,
limit,
settings.pycodestyle.ignore_overlong_task_comments,
&settings.task_tags,
) {
Some(Diagnostic::new(
violations::DocLineTooLong(line_length, limit),
Range::new(
Location::new(lineno + 1, limit),
Location::new(lineno + 1, line_length),
),
))
} else {
None
}
}
fn compare(

View File

@@ -9,6 +9,16 @@ use serde::{Deserialize, Serialize};
)]
#[serde(deny_unknown_fields, rename_all = "kebab-case", rename = "Pycodestyle")]
pub struct Options {
#[option(
default = "None",
value_type = "usize",
example = r#"
max-doc-length = 88
"#
)]
/// The maximum line length to allow for line-length violations within
/// documentation (`W505`), including standalone comments.
pub max_doc_length: Option<usize>,
#[option(
default = "false",
value_type = "bool",
@@ -24,12 +34,14 @@ pub struct Options {
#[derive(Debug, Default, Hash)]
pub struct Settings {
pub max_doc_length: Option<usize>,
pub ignore_overlong_task_comments: bool,
}
impl From<Options> for Settings {
fn from(options: Options) -> Self {
Self {
max_doc_length: options.max_doc_length,
ignore_overlong_task_comments: options
.ignore_overlong_task_comments
.unwrap_or_default(),
@@ -40,6 +52,7 @@ impl From<Options> for Settings {
impl From<Settings> for Options {
fn from(settings: Settings) -> Self {
Self {
max_doc_length: settings.max_doc_length,
ignore_overlong_task_comments: Some(settings.ignore_overlong_task_comments),
}
}

View File

@@ -0,0 +1,53 @@
---
source: src/pycodestyle/mod.rs
expression: diagnostics
---
- kind:
DocLineTooLong:
- 57
- 50
location:
row: 2
column: 50
end_location:
row: 2
column: 57
fix: ~
parent: ~
- kind:
DocLineTooLong:
- 56
- 50
location:
row: 6
column: 50
end_location:
row: 6
column: 56
fix: ~
parent: ~
- kind:
DocLineTooLong:
- 56
- 50
location:
row: 10
column: 50
end_location:
row: 10
column: 56
fix: ~
parent: ~
- kind:
DocLineTooLong:
- 61
- 50
location:
row: 15
column: 50
end_location:
row: 15
column: 61
fix: ~
parent: ~

View File

@@ -18,7 +18,9 @@ use crate::pydocstyle::helpers::{leading_quote, logical_line};
use crate::pydocstyle::settings::Convention;
use crate::registry::{Diagnostic, RuleCode};
use crate::violations;
use crate::visibility::{is_init, is_magic, is_overload, is_override, is_staticmethod, Visibility};
use crate::visibility::{
is_call, is_init, is_magic, is_new, is_overload, is_override, is_staticmethod, Visibility,
};
/// D100, D101, D102, D103, D104, D105, D106, D107
pub fn not_missing(
@@ -85,18 +87,26 @@ pub fn not_missing(
|| is_override(checker, cast::decorator_list(stmt))
{
true
} else if is_magic(stmt) {
if checker.settings.enabled.contains(&RuleCode::D105) {
} else if is_init(cast::name(stmt)) {
if checker.settings.enabled.contains(&RuleCode::D107) {
checker.diagnostics.push(Diagnostic::new(
violations::MagicMethod,
violations::PublicInit,
identifier_range(stmt, checker.locator),
));
}
true
} else if is_init(stmt) {
if checker.settings.enabled.contains(&RuleCode::D107) {
} else if is_new(cast::name(stmt)) || is_call(cast::name(stmt)) {
if checker.settings.enabled.contains(&RuleCode::D102) {
checker.diagnostics.push(Diagnostic::new(
violations::PublicInit,
violations::PublicMethod,
identifier_range(stmt, checker.locator),
));
}
true
} else if is_magic(cast::name(stmt)) {
if checker.settings.enabled.contains(&RuleCode::D105) {
checker.diagnostics.push(Diagnostic::new(
violations::MagicMethod,
identifier_range(stmt, checker.locator),
));
}

View File

@@ -44,6 +44,7 @@ mod tests {
#[test_case(RuleCode::UP024, Path::new("UP024_0.py"); "UP024_0")]
#[test_case(RuleCode::UP024, Path::new("UP024_1.py"); "UP024_1")]
#[test_case(RuleCode::UP024, Path::new("UP024_2.py"); "UP024_2")]
#[test_case(RuleCode::UP024, Path::new("UP024_3.py"); "UP024_3")]
#[test_case(RuleCode::UP025, Path::new("UP025.py"); "UP025")]
#[test_case(RuleCode::UP026, Path::new("UP026.py"); "UP026")]
#[test_case(RuleCode::UP027, Path::new("UP027.py"); "UP027")]

View File

@@ -1,5 +1,3 @@
#![allow(clippy::len_zero, clippy::needless_pass_by_value)]
use itertools::Itertools;
use rustpython_ast::{Excepthandler, ExcepthandlerKind, Expr, ExprKind, Located};
@@ -13,8 +11,11 @@ use crate::violations;
const ERROR_NAMES: &[&str] = &["EnvironmentError", "IOError", "WindowsError"];
const ERROR_MODULES: &[&str] = &["mmap", "select", "socket"];
fn get_correct_name(original: &str) -> String {
if ERROR_NAMES.contains(&original) {
fn corrected_name(checker: &Checker, original: &str) -> String {
if ERROR_NAMES.contains(&original)
&& checker.is_builtin(original)
&& checker.is_builtin("OSError")
{
"OSError".to_string()
} else {
original.to_string()
@@ -64,7 +65,7 @@ fn handle_name_or_attribute(
replacements.extend(temp_replacements);
before_replace.extend(temp_before_replace);
if replacements.is_empty() {
let new_name = get_correct_name(id);
let new_name = corrected_name(checker, id);
replacements.push(new_name);
before_replace.push(id.to_string());
}
@@ -102,7 +103,7 @@ fn handle_except_block(checker: &mut Checker, handler: &Located<ExcepthandlerKin
for elt in elts {
match &elt.node {
ExprKind::Name { id, .. } => {
let new_name = get_correct_name(id);
let new_name = corrected_name(checker, id);
replacements.push(new_name);
}
ExprKind::Attribute { .. } => {
@@ -138,7 +139,7 @@ fn handle_making_changes(
before_replace: &[String],
replacements: &[String],
) {
if before_replace != replacements && replacements.len() > 0 {
if before_replace != replacements && !replacements.is_empty() {
let range = Range::new(target.location, target.end_location.unwrap());
let contents = checker.locator.slice_source_code_range(&range);
// Pyyupgrade does not want imports changed if a module only is
@@ -171,8 +172,6 @@ fn handle_making_changes(
}
}
// This is a hacky way to handle the different variable types we get since
// raise and try are very different. Would love input on a cleaner way
pub trait OSErrorAliasChecker {
fn check_error(&self, checker: &mut Checker)
where
@@ -181,7 +180,6 @@ pub trait OSErrorAliasChecker {
impl OSErrorAliasChecker for &Vec<Excepthandler> {
fn check_error(&self, checker: &mut Checker) {
// Each separate except block is a separate error and fix
for handler in self.iter() {
handle_except_block(checker, handler);
}
@@ -233,6 +231,6 @@ impl OSErrorAliasChecker for &Expr {
}
/// UP024
pub fn os_error_alias<U: OSErrorAliasChecker>(checker: &mut Checker, handlers: U) {
pub fn os_error_alias<U: OSErrorAliasChecker>(checker: &mut Checker, handlers: &U) {
handlers.check_error(checker);
}

View File

@@ -0,0 +1,6 @@
---
source: src/pyupgrade/mod.rs
expression: diagnostics
---
[]

View File

@@ -132,6 +132,7 @@ define_rule_mapping!(
E999 => violations::SyntaxError,
// pycodestyle warnings
W292 => violations::NoNewLineAtEndOfFile,
W505 => violations::DocLineTooLong,
W605 => violations::InvalidEscapeSequence,
// pyflakes
F401 => violations::UnusedImport,
@@ -294,6 +295,7 @@ define_rule_mapping!(
YTT302 => violations::SysVersionCmpStr10,
YTT303 => violations::SysVersionSlice1Referenced,
// flake8-simplify
SIM115 => violations::OpenFileWithContextHandler,
SIM101 => violations::DuplicateIsinstanceCall,
SIM102 => violations::NestedIfStatements,
SIM103 => violations::ReturnBoolConditionDirectly,
@@ -317,6 +319,7 @@ define_rule_mapping!(
SIM222 => violations::OrTrue,
SIM223 => violations::AndFalse,
SIM300 => violations::YodaConditions,
SIM401 => violations::DictGetWithDefault,
// pyupgrade
UP001 => violations::UselessMetaclassType,
UP003 => violations::TypeOfPrimitive,
@@ -784,6 +787,7 @@ impl RuleCode {
RuleCode::RUF100 => &LintSource::NoQA,
RuleCode::E501
| RuleCode::W292
| RuleCode::W505
| RuleCode::UP009
| RuleCode::PGH003
| RuleCode::PGH004 => &LintSource::Lines,

View File

@@ -182,7 +182,7 @@ impl Settings {
vec!["TODO".to_string(), "FIXME".to_string(), "XXX".to_string()]
}),
typing_modules: config.typing_modules.unwrap_or_default(),
update_check: config.update_check.unwrap_or(true),
update_check: config.update_check.unwrap_or_default(),
// Plugins
flake8_annotations: config
.flake8_annotations
@@ -458,7 +458,7 @@ mod tests {
}]
.into_iter(),
);
let expected = FxHashSet::from_iter([RuleCode::W292, RuleCode::W605]);
let expected = FxHashSet::from_iter([RuleCode::W292, RuleCode::W505, RuleCode::W605]);
assert_eq!(actual, expected);
let actual = resolve_codes(
@@ -478,7 +478,7 @@ mod tests {
}]
.into_iter(),
);
let expected = FxHashSet::from_iter([RuleCode::W605]);
let expected = FxHashSet::from_iter([RuleCode::W505, RuleCode::W605]);
assert_eq!(actual, expected);
let actual = resolve_codes(
@@ -504,7 +504,7 @@ mod tests {
]
.into_iter(),
);
let expected = FxHashSet::from_iter([RuleCode::W292, RuleCode::W605]);
let expected = FxHashSet::from_iter([RuleCode::W292, RuleCode::W505, RuleCode::W605]);
assert_eq!(actual, expected);
let actual = resolve_codes(

View File

@@ -87,6 +87,8 @@ pub struct Options {
/// `directory`). Note that these paths are relative to the project root
/// (e.g., the directory containing your `pyproject.toml`).
///
/// For more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).
///
/// Note that you'll typically want to use
/// [`extend-exclude`](#extend-exclude) to modify the excluded paths.
pub exclude: Option<Vec<String>>,
@@ -118,6 +120,18 @@ pub struct Options {
)]
/// A list of file patterns to omit from linting, in addition to those
/// specified by `exclude`.
///
/// Exclusions are based on globs, and can be either:
///
/// - Single-path patterns, like `.mypy_cache` (to exclude any directory
/// named `.mypy_cache` in the tree), `foo.py` (to exclude any file named
/// `foo.py`), or `foo_*.py` (to exclude any file matching `foo_*.py` ).
/// - Relative patterns, like `directory/foo.py` (to exclude that specific
/// file) or `directory/*.py` (to exclude any Python files in
/// `directory`). Note that these paths are relative to the project root
/// (e.g., the directory containing your `pyproject.toml`).
///
/// For more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).
pub extend_exclude: Option<Vec<String>>,
#[option(
default = "[]",
@@ -375,9 +389,9 @@ pub struct Options {
/// A list of rule codes or prefixes to consider non-autofix-able.
pub unfixable: Option<Vec<RuleCodePrefix>>,
#[option(
default = "true",
default = "false",
value_type = "bool",
example = "update-check = false"
example = "update-check = true"
)]
/// Enable or disable automatic update checks (overridden by the
/// `--update-check` and `--no-update-check` command-line flags).

View File

@@ -315,6 +315,20 @@ impl AlwaysAutofixableViolation for InvalidEscapeSequence {
}
}
define_violation!(
pub struct DocLineTooLong(pub usize, pub usize);
);
impl Violation for DocLineTooLong {
fn message(&self) -> String {
let DocLineTooLong(length, limit) = self;
format!("Doc line too long ({length} > {limit} characters)")
}
fn placeholder() -> Self {
DocLineTooLong(89, 88)
}
}
// pyflakes
define_violation!(
@@ -2677,6 +2691,20 @@ impl Violation for SysVersionSlice1Referenced {
}
// flake8-simplify
define_violation!(
pub struct OpenFileWithContextHandler;
);
impl Violation for OpenFileWithContextHandler {
fn message(&self) -> String {
"Use context handler for opening files".to_string()
}
fn placeholder() -> Self {
OpenFileWithContextHandler
}
}
define_violation!(
pub struct UseCapitalEnvironmentVariables(pub String, pub String);
);
@@ -2779,13 +2807,13 @@ define_violation!(
);
impl AlwaysAutofixableViolation for UseTernaryOperator {
fn message(&self) -> String {
let UseTernaryOperator(new_code) = self;
format!("Use ternary operator `{new_code}` instead of if-else-block")
let UseTernaryOperator(contents) = self;
format!("Use ternary operator `{contents}` instead of if-else-block")
}
fn autofix_title(&self) -> String {
let UseTernaryOperator(new_code) = self;
format!("Replace if-else-block with `{new_code}`")
let UseTernaryOperator(contents) = self;
format!("Replace if-else-block with `{contents}`")
}
fn placeholder() -> Self {
@@ -3093,6 +3121,24 @@ impl AlwaysAutofixableViolation for IfExprWithTwistedArms {
}
}
define_violation!(
pub struct DictGetWithDefault(pub String);
);
impl AlwaysAutofixableViolation for DictGetWithDefault {
fn message(&self) -> String {
let DictGetWithDefault(contents) = self;
format!("Use `{contents}` instead of an `if` block")
}
fn autofix_title(&self) -> String {
let DictGetWithDefault(contents) = self;
format!("Replace with `{contents}`")
}
fn placeholder() -> Self {
DictGetWithDefault("var = dict.get(key, \"default\")".to_string())
}
}
// pyupgrade
define_violation!(
@@ -5439,16 +5485,12 @@ impl Violation for ExtraneousScopeFunction {
define_violation!(
pub struct MissingFixtureNameUnderscore(pub String);
);
impl AlwaysAutofixableViolation for MissingFixtureNameUnderscore {
impl Violation for MissingFixtureNameUnderscore {
fn message(&self) -> String {
let MissingFixtureNameUnderscore(function) = self;
format!("Fixture `{function}` does not return anything, add leading underscore")
}
fn autofix_title(&self) -> String {
"Add leading underscore".to_string()
}
fn placeholder() -> Self {
MissingFixtureNameUnderscore("...".to_string())
}
@@ -5457,16 +5499,12 @@ impl AlwaysAutofixableViolation for MissingFixtureNameUnderscore {
define_violation!(
pub struct IncorrectFixtureNameUnderscore(pub String);
);
impl AlwaysAutofixableViolation for IncorrectFixtureNameUnderscore {
impl Violation for IncorrectFixtureNameUnderscore {
fn message(&self) -> String {
let IncorrectFixtureNameUnderscore(function) = self;
format!("Fixture `{function}` returns a value, remove leading underscore")
}
fn autofix_title(&self) -> String {
"Remove leading underscore".to_string()
}
fn placeholder() -> Self {
IncorrectFixtureNameUnderscore("...".to_string())
}

View File

@@ -82,27 +82,23 @@ pub fn is_abstract(checker: &Checker, decorator_list: &[Expr]) -> bool {
}
/// Returns `true` if a function is a "magic method".
pub fn is_magic(stmt: &Stmt) -> bool {
match &stmt.node {
StmtKind::FunctionDef { name, .. } | StmtKind::AsyncFunctionDef { name, .. } => {
name.starts_with("__")
&& name.ends_with("__")
&& name != "__init__"
&& name != "__call__"
&& name != "__new__"
}
_ => panic!("Found non-FunctionDef in is_magic"),
}
pub fn is_magic(name: &str) -> bool {
name.starts_with("__") && name.ends_with("__")
}
/// Returns `true` if a function is an `__init__`.
pub fn is_init(stmt: &Stmt) -> bool {
match &stmt.node {
StmtKind::FunctionDef { name, .. } | StmtKind::AsyncFunctionDef { name, .. } => {
name == "__init__"
}
_ => panic!("Found non-FunctionDef in is_init"),
}
pub fn is_init(name: &str) -> bool {
name == "__init__"
}
/// Returns `true` if a function is an `__new__`.
pub fn is_new(name: &str) -> bool {
name == "__new__"
}
/// Returns `true` if a function is an `__call__`.
pub fn is_call(name: &str) -> bool {
name == "__call__"
}
/// Returns `true` if a module name indicates public visibility.