Compare commits

...

22 Commits

Author SHA1 Message Date
Charlie Marsh
4be09b45ea Bump version to 0.0.129 2022-11-19 19:52:40 -05:00
Harutaka Kawamura
13e8ed0a0a Implement autofix for E731 (#814) 2022-11-19 19:51:41 -05:00
Anders Kaseorg
4161d4ae32 Exempt parameters with immutable annotations from B006 (#821) 2022-11-19 19:46:08 -05:00
Charlie Marsh
99f7854d8c Mark nonlocal variables as used in parent scopes (#822) 2022-11-19 19:21:02 -05:00
Harutaka Kawamura
a580d1a858 Adjust UnusedNOQA start location (#817) 2022-11-19 09:30:02 -05:00
Martin Lehoux
86806a9e39 U013: Also convert typing.TypedDict (#810) 2022-11-19 09:29:05 -05:00
Charlie Marsh
89afc9db74 Bump version to 0.0.128 2022-11-18 18:50:03 -05:00
Charlie Marsh
0f34cdb7a3 Enable customization of autofixable error codes (#811) 2022-11-18 18:49:13 -05:00
Charlie Marsh
437b6f23b9 Remove warn_on checks (#812) 2022-11-18 18:48:24 -05:00
Charlie Marsh
0fe2b15676 Change NotInTest to NotIsTest 2022-11-18 18:23:40 -05:00
Harutaka Kawamura
e81efa5a3d Implement a --show-source setting (#698) 2022-11-18 14:02:29 -05:00
Charlie Marsh
49559da54e Bump version to 0.0.127 2022-11-18 13:31:22 -05:00
Jonathan Plasse
b74fd1fe13 Change error code of flake8-blind-except (#808) 2022-11-18 13:30:36 -05:00
Charlie Marsh
9c4d24a452 Add flake8-boolean-trap to README 2022-11-18 12:36:13 -05:00
pwoolvett
7a4449eacb Add flake8-boolean-trap (#790) 2022-11-18 12:30:07 -05:00
Charlie Marsh
ee31fa6109 Reduce newlines in code gen (#807) 2022-11-18 12:27:56 -05:00
Harutaka Kawamura
6ffe767252 Implement autofix for E713 and E714 (#804) 2022-11-18 12:16:11 -05:00
Jonathan Plasse
2f894e3951 Add flake8-blind-except (#805) 2022-11-18 12:15:10 -05:00
Charlie Marsh
589d923c99 Misc. follow-ups to #716 (#806) 2022-11-18 12:14:41 -05:00
Martin Lehoux
c5722d8a4d Implement U013: Unnecessary TypedDict syntactic form (#716) 2022-11-18 12:10:47 -05:00
Jonathan Plasse
c2d6307e9b Add missing plugins in some sections of README.md (#802) 2022-11-18 09:28:33 -05:00
Edgar R. M
f44fada446 Implement C901 (mccabe) (#765) 2022-11-17 17:40:50 -05:00
95 changed files with 2906 additions and 678 deletions

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.126
rev: v0.0.129
hooks:
- id: ruff

View File

@@ -98,6 +98,13 @@ _and_ a `pyproject.toml` parameter to `src/pyproject.rs`. If you want to pattern
existing example, grep for `dummy_variable_rgx`, which defines a regular expression to match against
acceptable unused variables (e.g., `_`).
If the new plugin's configuration should be cached between runs, you'll need to add it to the
`Hash` implementation for `Settings` in `src/settings/mod.rs`.
You may also want to add the new configuration option to the `flake8-to-ruff` tool, which is
responsible for converting `flake8` configuration files to Ruff's TOML format. This logic
lives in `flake8_to_ruff/src/converter.rs`.
## Release process
As of now, Ruff has an ad hoc release process: releases are cut with high frequency via GitHub

28
Cargo.lock generated
View File

@@ -49,6 +49,16 @@ version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7021ce4924a3f25f802b2cccd1af585e39ea1a363a1aa2e72afe54b67a3a7a7"
[[package]]
name = "annotate-snippets"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3b9d411ecbaf79885c6df4d75fff75858d5995ff25385657a28af47e82f9c36"
dependencies = [
"unicode-width",
"yansi-term",
]
[[package]]
name = "anyhow"
version = "1.0.66"
@@ -417,7 +427,7 @@ version = "1.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a5b5db619f3556839cb2223ae86ff3f9a09da2c5013be42bc9af08c9589bf70c"
dependencies = [
"annotate-snippets",
"annotate-snippets 0.6.1",
]
[[package]]
@@ -930,7 +940,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8-to-ruff"
version = "0.0.126-dev.0"
version = "0.0.129-dev.0"
dependencies = [
"anyhow",
"clap 4.0.22",
@@ -2238,8 +2248,9 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.126"
version = "0.0.129"
dependencies = [
"annotate-snippets 0.9.1",
"anyhow",
"assert_cmd",
"atty",
@@ -2287,7 +2298,7 @@ dependencies = [
[[package]]
name = "ruff_dev"
version = "0.0.126"
version = "0.0.129"
dependencies = [
"anyhow",
"clap 4.0.22",
@@ -3312,3 +3323,12 @@ checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85"
dependencies = [
"linked-hash-map",
]
[[package]]
name = "yansi-term"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe5c30ade05e61656247b2e334a031dfd0cc466fadef865bdcdea8d537951bf1"
dependencies = [
"winapi 0.3.9",
]

View File

@@ -6,13 +6,14 @@ members = [
[package]
name = "ruff"
version = "0.0.126"
version = "0.0.129"
edition = "2021"
[lib]
name = "ruff"
[dependencies]
annotate-snippets = { version = "0.9.1", features = ["color"] }
anyhow = { version = "1.0.66" }
atty = { version = "0.2.14" }
bincode = { version = "1.3.3" }

24
LICENSE
View File

@@ -168,6 +168,30 @@ are:
THE SOFTWARE.
"""
- flake8-blind-except, licensed as follows:
"""
The MIT License (MIT)
Copyright (c) 2014 Elijah Andrews
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
- flake8-bugbear, licensed as follows:
"""
The MIT License (MIT)

View File

@@ -61,6 +61,9 @@ Read the [launch blog post](https://notes.crmarsh.com/python-tooling-could-be-mu
1. [flake8-quotes (Q)](#flake8-quotes)
1. [flake8-annotations (ANN)](#flake8-annotations)
1. [flake8-2020 (YTT)](#flake8-2020)
1. [flake8-blind-except (BLE)](#flake8-blind-except)
1. [flake8-boolean-trap (FBT)](#flake8-boolean-trap)
1. [mccabe (C90)](#mccabe)
1. [Ruff-specific rules (RUF)](#ruff-specific-rules)
1. [Meta rules (M)](#meta-rules)
1. [Editor Integrations](#editor-integrations)
@@ -119,7 +122,7 @@ default configuration is equivalent to:
[tool.ruff]
line-length = 88
# Enable Flake's "E" and "F" codes by default.
# Enable Pyflakes `E` and `F` codes by default.
select = ["E", "F"]
ignore = []
@@ -154,18 +157,24 @@ dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
target-version = "py310"
```
As an example, the following would configure Ruff to (1) avoid checking for line-length
violations (`E501`) and (2) ignore unused import rules in `__init__.py` files:
As an example, the following would configure Ruff to: (1) avoid checking for line-length
violations (`E501`); (2), always autofix, but never remove unused imports (`F401`); and (3) ignore
import-at-top-of-file errors (`E402`) in `__init__.py` files:
```toml
[tool.ruff]
# Enable Pyflakes and pycodestyle rules.
select = ["E", "F"]
# Never enforce `E501`.
# Never enforce `E501` (line length violations).
ignore = ["E501"]
# Ignore `F401` violations in any `__init__.py` file, and in `path/to/file.py`.
per-file-ignores = {"__init__.py" = ["F401"], "path/to/file.py" = ["F401"]}
# Always autofix, but never try to fix `F401` (unused imports).
fix = true
unfixable = ["F401"]
# Ignore `E402` (import violations in any `__init__.py` file, and in `path/to/file.py`.
per-file-ignores = {"__init__.py" = ["E402"], "path/to/file.py" = ["E402"]}
```
Plugin configurations should be expressed as subsections, e.g.:
@@ -188,7 +197,7 @@ ruff path/to/code/ --select F401 --select F403
See `ruff --help` for more:
```shell
ruff: An extremely fast Python linter.
Ruff: An extremely fast Python linter.
Usage: ruff [OPTIONS] <FILES>...
@@ -224,14 +233,20 @@ Options:
List of paths, used to exclude files and/or directories from checks
--extend-exclude <EXTEND_EXCLUDE>
Like --exclude, but adds additional files and directories on top of the excluded ones
--fixable <FIXABLE>
List of error codes to treat as eligible for autofix. Only applicable when autofix itself is enabled (e.g., via `--fix`)
--unfixable <UNFIXABLE>
List of error codes to treat as ineligible for autofix. Only applicable when autofix itself is enabled (e.g., via `--fix`)
--per-file-ignores <PER_FILE_IGNORES>
List of mappings from file pattern to code to exclude
--format <FORMAT>
Output serialization format for error messages [default: text] [possible values: text, json]
--show-source
Show violations with source code
--show-files
See the files ruff will be run against with the current settings
See the files Ruff will be run against with the current settings
--show-settings
See ruff's settings
See Ruff's settings
--add-noqa
Enable automatic additions of noqa directives to failing lines
--dummy-variable-rgx <DUMMY_VARIABLE_RGX>
@@ -240,6 +255,8 @@ Options:
The minimum Python version that should be supported
--line-length <LINE_LENGTH>
Set the line-length for length-associated checks and automatic formatting
--max-complexity <MAX_COMPLEXITY>
Max McCabe complexity allowed for a function
--stdin-filename <STDIN_FILENAME>
The name of the file when passing it through stdin
-h, --help
@@ -359,11 +376,11 @@ For more, see [pycodestyle](https://pypi.org/project/pycodestyle/2.9.1/) on PyPI
| E501 | LineTooLong | Line too long (89 > 88 characters) | |
| E711 | NoneComparison | Comparison to `None` should be `cond is None` | 🛠 |
| E712 | TrueFalseComparison | Comparison to `True` should be `cond is True` | 🛠 |
| E713 | NotInTest | Test for membership should be `not in` | |
| E714 | NotIsTest | Test for object identity should be `is not` | |
| E713 | NotInTest | Test for membership should be `not in` | 🛠 |
| E714 | NotIsTest | Test for object identity should be `is not` | 🛠 |
| E721 | TypeComparison | Do not compare types, use `isinstance()` | |
| E722 | DoNotUseBareExcept | Do not use bare `except` | |
| E731 | DoNotAssignLambda | Do not assign a lambda expression, use a def | |
| E731 | DoNotAssignLambda | Do not assign a lambda expression, use a def | 🛠 |
| E741 | AmbiguousVariableName | Ambiguous variable name: `...` | |
| E742 | AmbiguousClassName | Ambiguous class name: `...` | |
| E743 | AmbiguousFunctionName | Ambiguous function name: `...` | |
@@ -448,6 +465,7 @@ For more, see [pyupgrade](https://pypi.org/project/pyupgrade/3.2.0/) on PyPI.
| U010 | UnnecessaryFutureImport | Unnecessary `__future__` import `...` for target Python version | 🛠 |
| U011 | UnnecessaryLRUCacheParams | Unnecessary parameters to `functools.lru_cache` | 🛠 |
| U012 | UnnecessaryEncodeUTF8 | Unnecessary call to `encode` as UTF-8 | 🛠 |
| U013 | ConvertTypedDictFunctionalToClass | Convert `TypedDict` functional syntax to class syntax | 🛠 |
### pep8-naming
@@ -507,6 +525,16 @@ For more, see [flake8-comprehensions](https://pypi.org/project/flake8-comprehens
| C416 | UnnecessaryComprehension | Unnecessary `(list\|set)` comprehension (rewrite using `(list\|set)()`) | 🛠 |
| C417 | UnnecessaryMap | Unnecessary `map` usage (rewrite using a `(list\|set\|dict)` comprehension) | |
### flake8-boolean-trap
For more, see [flake8-boolean-trap](https://pypi.org/project/flake8-boolean-trap/0.1.0/) on PyPI.
| Code | Name | Message | Fix |
| ---- | ---- | ------- | --- |
| FBT001 | BooleanPositionalArgInFunctionDefinition | Boolean positional arg in function definition | |
| FBT002 | BooleanDefaultValueInFunctionDefinition | Boolean default value in function definition | |
| FBT003 | BooleanPositionalValueInFunctionCall | Boolean positional value in function call | |
### flake8-bugbear
For more, see [flake8-bugbear](https://pypi.org/project/flake8-bugbear/22.10.27/) on PyPI.
@@ -612,6 +640,22 @@ For more, see [flake8-2020](https://pypi.org/project/flake8-2020/1.7.0/) on PyPI
| YTT302 | SysVersionCmpStr10 | `sys.version` compared to string (python10), use `sys.version_info` | |
| YTT303 | SysVersionSlice1Referenced | `sys.version[:1]` referenced (python10), use `sys.version_info` | |
### flake8-blind-except
For more, see [flake8-blind-except](https://pypi.org/project/flake8-blind-except/0.2.1/) on PyPI.
| Code | Name | Message | Fix |
| ---- | ---- | ------- | --- |
| BLE001 | BlindExcept | Blind except Exception: statement | |
### mccabe
For more, see [mccabe](https://pypi.org/project/mccabe/0.7.0/) on PyPI.
| Code | Name | Message | Fix |
| ---- | ---- | ------- | --- |
| C901 | FunctionIsTooComplex | `...` is too complex (10) | |
### Ruff-specific rules
| Code | Name | Message | Fix |
@@ -665,6 +709,7 @@ tools:
format-command: 'ruff --stdin-filename ${INPUT} --config ~/myconfigs/linters/ruff.toml --fix --exit-zero --quiet -'
format-stdin: true
```
</details>
<details>
@@ -701,6 +746,7 @@ null_ls.setup({
}
})
```
</details>
### Language Server Protocol (Unofficial)
@@ -770,6 +816,10 @@ including:
- [`flake8-bandit`](https://pypi.org/project/flake8-bandit/) (6/40)
- [`flake8-bugbear`](https://pypi.org/project/flake8-bugbear/) (25/32)
- [`flake8-2020`](https://pypi.org/project/flake8-2020/)
- [`flake8-blind-except`](https://pypi.org/project/flake8-blind-except/)
- [`flake8-boolean-trap`](https://pypi.org/project/flake8-boolean-trap/)
- [`mccabe`](https://pypi.org/project/mccabe/)
- [`isort`](https://pypi.org/project/isort/)
- [`pyupgrade`](https://pypi.org/project/pyupgrade/) (14/33)
- [`autoflake`](https://pypi.org/project/autoflake/) (1/7)
@@ -784,6 +834,7 @@ Beyond rule-set parity, Ruff suffers from the following limitations vis-à-vis F
Today, Ruff can be used to replace Flake8 when used with any of the following plugins:
- [`pydocstyle`](https://pypi.org/project/pydocstyle/)
- [`pep8-naming`](https://pypi.org/project/pep8-naming/)
- [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/)
- [`flake8-builtins`](https://pypi.org/project/flake8-builtins/)
@@ -796,6 +847,9 @@ Today, Ruff can be used to replace Flake8 when used with any of the following pl
- [`flake8-comprehensions`](https://pypi.org/project/flake8-comprehensions/)
- [`flake8-bugbear`](https://pypi.org/project/flake8-bugbear/) (26/32)
- [`flake8-2020`](https://pypi.org/project/flake8-2020/)
- [`flake8-blind-except`](https://pypi.org/project/flake8-blind-except/)
- [`flake8-boolean-trap`](https://pypi.org/project/flake8-boolean-trap/)
- [`mccabe`](https://pypi.org/project/mccabe/)
Ruff can also replace [`isort`](https://pypi.org/project/isort/), [`yesqa`](https://github.com/asottile/yesqa),
and a subset of the rules implemented in [`pyupgrade`](https://pypi.org/project/pyupgrade/) (14/33).

View File

@@ -771,7 +771,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8_to_ruff"
version = "0.0.126"
version = "0.0.129"
dependencies = [
"anyhow",
"clap",
@@ -1975,7 +1975,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.126"
version = "0.0.129"
dependencies = [
"anyhow",
"bincode",

View File

@@ -1,6 +1,6 @@
[package]
name = "flake8-to-ruff"
version = "0.0.126-dev.0"
version = "0.0.129-dev.0"
edition = "2021"
[lib]

View File

@@ -6,7 +6,9 @@ use ruff::flake8_quotes::settings::Quote;
use ruff::flake8_tidy_imports::settings::Strictness;
use ruff::settings::options::Options;
use ruff::settings::pyproject::Pyproject;
use ruff::{flake8_annotations, flake8_bugbear, flake8_quotes, flake8_tidy_imports, pep8_naming};
use ruff::{
flake8_annotations, flake8_bugbear, flake8_quotes, flake8_tidy_imports, mccabe, pep8_naming,
};
use crate::plugin::Plugin;
use crate::{parser, plugin};
@@ -73,6 +75,7 @@ pub fn convert(
let mut flake8_bugbear: flake8_bugbear::settings::Options = Default::default();
let mut flake8_quotes: flake8_quotes::settings::Options = Default::default();
let mut flake8_tidy_imports: flake8_tidy_imports::settings::Options = Default::default();
let mut mccabe: mccabe::settings::Options = Default::default();
let mut pep8_naming: pep8_naming::settings::Options = Default::default();
for (key, value) in flake8 {
if let Some(value) = value {
@@ -186,6 +189,11 @@ pub fn convert(
"docstring-convention" => {
// No-op (handled above).
}
// mccabe
"max-complexity" | "max_complexity" => match value.clone().parse::<usize>() {
Ok(max_complexity) => mccabe.max_complexity = Some(max_complexity),
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
},
// Unknown
_ => eprintln!("Skipping unsupported property: {key}"),
}
@@ -207,6 +215,9 @@ pub fn convert(
if flake8_tidy_imports != Default::default() {
options.flake8_tidy_imports = Some(flake8_tidy_imports);
}
if mccabe != Default::default() {
options.mccabe = Some(mccabe);
}
if pep8_naming != Default::default() {
options.pep8_naming = Some(pep8_naming);
}
@@ -232,27 +243,31 @@ mod tests {
fn it_converts_empty() -> Result<()> {
let actual = convert(&HashMap::from([]), None)?;
let expected = Pyproject::new(Options {
line_length: None,
src: None,
fix: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
extend_ignore: None,
extend_select: None,
fix: None,
fixable: None,
ignore: Some(vec![]),
line_length: None,
per_file_ignores: None,
select: Some(vec![
CheckCodePrefix::E,
CheckCodePrefix::F,
CheckCodePrefix::W,
]),
extend_select: None,
ignore: Some(vec![]),
extend_ignore: None,
per_file_ignores: None,
dummy_variable_rgx: None,
show_source: None,
src: None,
target_version: None,
unfixable: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
flake8_tidy_imports: None,
isort: None,
mccabe: None,
pep8_naming: None,
});
assert_eq!(actual, expected);
@@ -267,27 +282,31 @@ mod tests {
Some(vec![]),
)?;
let expected = Pyproject::new(Options {
line_length: Some(100),
src: None,
fix: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
extend_ignore: None,
extend_select: None,
fix: None,
fixable: None,
ignore: Some(vec![]),
line_length: Some(100),
per_file_ignores: None,
select: Some(vec![
CheckCodePrefix::E,
CheckCodePrefix::F,
CheckCodePrefix::W,
]),
extend_select: None,
ignore: Some(vec![]),
extend_ignore: None,
per_file_ignores: None,
dummy_variable_rgx: None,
show_source: None,
src: None,
target_version: None,
unfixable: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
flake8_tidy_imports: None,
isort: None,
mccabe: None,
pep8_naming: None,
});
assert_eq!(actual, expected);
@@ -302,27 +321,31 @@ mod tests {
Some(vec![]),
)?;
let expected = Pyproject::new(Options {
line_length: Some(100),
src: None,
fix: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
extend_ignore: None,
extend_select: None,
fix: None,
fixable: None,
ignore: Some(vec![]),
line_length: Some(100),
per_file_ignores: None,
select: Some(vec![
CheckCodePrefix::E,
CheckCodePrefix::F,
CheckCodePrefix::W,
]),
extend_select: None,
ignore: Some(vec![]),
extend_ignore: None,
per_file_ignores: None,
dummy_variable_rgx: None,
show_source: None,
src: None,
target_version: None,
unfixable: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
flake8_tidy_imports: None,
isort: None,
mccabe: None,
pep8_naming: None,
});
assert_eq!(actual, expected);
@@ -337,27 +360,31 @@ mod tests {
Some(vec![]),
)?;
let expected = Pyproject::new(Options {
line_length: None,
src: None,
fix: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
extend_ignore: None,
extend_select: None,
fix: None,
fixable: None,
ignore: Some(vec![]),
line_length: None,
per_file_ignores: None,
select: Some(vec![
CheckCodePrefix::E,
CheckCodePrefix::F,
CheckCodePrefix::W,
]),
extend_select: None,
ignore: Some(vec![]),
extend_ignore: None,
per_file_ignores: None,
dummy_variable_rgx: None,
show_source: None,
src: None,
target_version: None,
unfixable: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
flake8_tidy_imports: None,
isort: None,
mccabe: None,
pep8_naming: None,
});
assert_eq!(actual, expected);
@@ -372,22 +399,25 @@ mod tests {
Some(vec![]),
)?;
let expected = Pyproject::new(Options {
line_length: None,
src: None,
fix: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
extend_ignore: None,
extend_select: None,
fix: None,
fixable: None,
ignore: Some(vec![]),
line_length: None,
per_file_ignores: None,
select: Some(vec![
CheckCodePrefix::E,
CheckCodePrefix::F,
CheckCodePrefix::W,
]),
extend_select: None,
ignore: Some(vec![]),
extend_ignore: None,
per_file_ignores: None,
dummy_variable_rgx: None,
show_source: None,
src: None,
target_version: None,
unfixable: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: Some(flake8_quotes::settings::Options {
@@ -398,6 +428,7 @@ mod tests {
}),
flake8_tidy_imports: None,
isort: None,
mccabe: None,
pep8_naming: None,
});
assert_eq!(actual, expected);
@@ -415,11 +446,16 @@ mod tests {
Some(vec![Plugin::Flake8Docstrings]),
)?;
let expected = Pyproject::new(Options {
line_length: None,
src: None,
fix: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
extend_ignore: None,
extend_select: None,
fix: None,
fixable: None,
ignore: Some(vec![]),
line_length: None,
per_file_ignores: None,
select: Some(vec![
CheckCodePrefix::D100,
CheckCodePrefix::D101,
@@ -460,17 +496,16 @@ mod tests {
CheckCodePrefix::F,
CheckCodePrefix::W,
]),
extend_select: None,
ignore: Some(vec![]),
extend_ignore: None,
per_file_ignores: None,
dummy_variable_rgx: None,
show_source: None,
src: None,
target_version: None,
unfixable: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
flake8_tidy_imports: None,
isort: None,
mccabe: None,
pep8_naming: None,
});
assert_eq!(actual, expected);
@@ -485,23 +520,26 @@ mod tests {
None,
)?;
let expected = Pyproject::new(Options {
line_length: None,
src: None,
fix: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
extend_ignore: None,
extend_select: None,
fix: None,
fixable: None,
ignore: Some(vec![]),
line_length: None,
per_file_ignores: None,
select: Some(vec![
CheckCodePrefix::E,
CheckCodePrefix::F,
CheckCodePrefix::Q,
CheckCodePrefix::W,
]),
extend_select: None,
ignore: Some(vec![]),
extend_ignore: None,
per_file_ignores: None,
dummy_variable_rgx: None,
show_source: None,
src: None,
target_version: None,
unfixable: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: Some(flake8_quotes::settings::Options {
@@ -512,6 +550,7 @@ mod tests {
}),
flake8_tidy_imports: None,
isort: None,
mccabe: None,
pep8_naming: None,
});
assert_eq!(actual, expected);

View File

@@ -15,6 +15,8 @@ pub enum Plugin {
Flake8Print,
Flake8Quotes,
Flake8Annotations,
McCabe,
Flake8BlindExcept,
PEP8Naming,
Pyupgrade,
}
@@ -33,6 +35,8 @@ impl FromStr for Plugin {
"flake8-print" => Ok(Plugin::Flake8Print),
"flake8-quotes" => Ok(Plugin::Flake8Quotes),
"flake8-annotations" => Ok(Plugin::Flake8Annotations),
"flake8-blind-except" => Ok(Plugin::Flake8BlindExcept),
"mccabe" => Ok(Plugin::McCabe),
"pep8-naming" => Ok(Plugin::PEP8Naming),
"pyupgrade" => Ok(Plugin::Pyupgrade),
_ => Err(anyhow!("Unknown plugin: {}", string)),
@@ -46,12 +50,14 @@ impl Plugin {
Plugin::Flake8Bandit => CheckCodePrefix::S,
Plugin::Flake8Bugbear => CheckCodePrefix::B,
Plugin::Flake8Builtins => CheckCodePrefix::A,
Plugin::Flake8Comprehensions => CheckCodePrefix::C,
Plugin::Flake8Comprehensions => CheckCodePrefix::C4,
Plugin::Flake8Docstrings => CheckCodePrefix::D,
Plugin::Flake8TidyImports => CheckCodePrefix::I25,
Plugin::Flake8Print => CheckCodePrefix::T,
Plugin::Flake8Quotes => CheckCodePrefix::Q,
Plugin::Flake8Annotations => CheckCodePrefix::ANN,
Plugin::Flake8BlindExcept => CheckCodePrefix::BLE,
Plugin::McCabe => CheckCodePrefix::C9,
Plugin::PEP8Naming => CheckCodePrefix::N,
Plugin::Pyupgrade => CheckCodePrefix::U,
}
@@ -62,7 +68,7 @@ impl Plugin {
Plugin::Flake8Bandit => vec![CheckCodePrefix::S],
Plugin::Flake8Bugbear => vec![CheckCodePrefix::B],
Plugin::Flake8Builtins => vec![CheckCodePrefix::A],
Plugin::Flake8Comprehensions => vec![CheckCodePrefix::C],
Plugin::Flake8Comprehensions => vec![CheckCodePrefix::C4],
Plugin::Flake8Docstrings => {
// Use the user-provided docstring.
for key in ["docstring-convention", "docstring_convention"] {
@@ -83,6 +89,8 @@ impl Plugin {
Plugin::Flake8Print => vec![CheckCodePrefix::T],
Plugin::Flake8Quotes => vec![CheckCodePrefix::Q],
Plugin::Flake8Annotations => vec![CheckCodePrefix::ANN],
Plugin::Flake8BlindExcept => vec![CheckCodePrefix::BLE],
Plugin::McCabe => vec![CheckCodePrefix::C9],
Plugin::PEP8Naming => vec![CheckCodePrefix::N],
Plugin::Pyupgrade => vec![CheckCodePrefix::U],
}
@@ -326,6 +334,10 @@ pub fn infer_plugins_from_options(flake8: &HashMap<String, Option<String>>) -> V
"banned-modules" | "banned_modules" => {
plugins.insert(Plugin::Flake8TidyImports);
}
// mccabe
"max-complexity" | "max_complexity" => {
plugins.insert(Plugin::McCabe);
}
// pep8-naming
"ignore-names" | "ignore_names" => {
plugins.insert(Plugin::PEP8Naming);
@@ -357,6 +369,7 @@ pub fn infer_plugins_from_codes(codes: &BTreeSet<CheckCodePrefix>) -> Vec<Plugin
Plugin::Flake8Print,
Plugin::Flake8Quotes,
Plugin::Flake8Annotations,
Plugin::Flake8BlindExcept,
Plugin::PEP8Naming,
Plugin::Pyupgrade,
]

View File

@@ -185,3 +185,23 @@ def nested_b008(a=random.randint(0, dt.datetime.now().year)):
# Ignore lambda contents since they are evaluated at call time.
def foo(f=lambda x: print(x)):
f(1)
from collections import abc
from typing import Annotated, Dict, Optional, Sequence, Union, Set
def immutable_annotations(
a: Sequence[int] | None = [],
b: Optional[abc.Mapping[int, int]] = {},
c: Annotated[Union[abc.Set[str], abc.Sized], "annotation"] = set(),
):
pass
def mutable_annotations(
a: list[int] | None = [],
b: Optional[Dict[int, int]] = {},
c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
):
pass

55
resources/test/fixtures/BLE.py vendored Normal file
View File

@@ -0,0 +1,55 @@
try:
pass
except ValueError:
pass
except Exception as e:
raise e
finally:
pass
try:
pass
except BaseException as e:
raise e
except TypeError:
pass
else:
pass
try:
pass
except Exception as e:
raise e
except BaseException:
pass
try:
pass
except Exception:
pass
finally:
try:
pass
except BaseException as e:
raise e
try:
pass
except Exception as e:
try:
raise e
except BaseException:
pass
try:
try:
pass
except BaseException as e:
raise e
except Exception:
pass

108
resources/test/fixtures/C901.py vendored Normal file
View File

@@ -0,0 +1,108 @@
# Complexity = 1
def trivial():
pass
# Complexity = 1
def expr_as_statement():
0xF00D
# Complexity = 1
def sequential(n):
k = n + 4
s = k + n
return s
# Complexity = 3
def if_elif_else_dead_path(n):
if n > 3:
return "bigger than three"
elif n > 4:
return "is never executed"
else:
return "smaller than or equal to three"
# Complexity = 3
def nested_ifs():
if n > 3:
if n > 4:
return "bigger than four"
else:
return "bigger than three"
else:
return "smaller than or equal to three"
# Complexity = 2
def for_loop():
for i in range(10):
print(i)
# Complexity = 2
def for_else(mylist):
for i in mylist:
print(i)
else:
print(None)
# Complexity = 2
def recursive(n):
if n > 4:
return f(n - 1)
else:
return n
# Complexity = 3
def nested_functions():
def a():
def b():
pass
b()
a()
# Complexity = 4
def try_else():
try:
print(1)
except TypeA:
print(2)
except TypeB:
print(3)
else:
print(4)
# Complexity = 3
def nested_try_finally():
try:
try:
print(1)
finally:
print(2)
finally:
print(3)
# Complexity = 3
async def foobar(a, b, c):
await whatever(a, b, c)
if await b:
pass
async with c:
pass
async for x in a:
pass
# Complexity = 1
def annotated_assign():
x: Any = None

View File

@@ -35,3 +35,20 @@ def f4():
_ = 1
__ = 1
_discarded = 1
a = 1
def f5():
global a
# Used in `f7` via `nonlocal`.
b = 1
def f6():
# F841
b = 1
def f7():
nonlocal b

42
resources/test/fixtures/FBT.py vendored Normal file
View File

@@ -0,0 +1,42 @@
def function(
posonly_nohint,
posonly_nonboolhint: int,
posonly_boolhint: bool,
posonly_boolstrhint: "bool",
/,
offset,
posorkw_nonvalued_nohint,
posorkw_nonvalued_nonboolhint: int,
posorkw_nonvalued_boolhint: bool,
posorkw_nonvalued_boolstrhint: "bool",
posorkw_boolvalued_nohint=True,
posorkw_boolvalued_nonboolhint: int = True,
posorkw_boolvalued_boolhint: bool = True,
posorkw_boolvalued_boolstrhint: "bool" = True,
posorkw_nonboolvalued_nohint=1,
posorkw_nonboolvalued_nonboolhint: int = 2,
posorkw_nonboolvalued_boolhint: bool = 3,
posorkw_nonboolvalued_boolstrhint: "bool" = 4,
*,
kwonly_nonvalued_nohint,
kwonly_nonvalued_nonboolhint: int,
kwonly_nonvalued_boolhint: bool,
kwonly_nonvalued_boolstrhint: "bool",
kwonly_boolvalued_nohint=True,
kwonly_boolvalued_nonboolhint: int = False,
kwonly_boolvalued_boolhint: bool = True,
kwonly_boolvalued_boolstrhint: "bool" = True,
kwonly_nonboolvalued_nohint=5,
kwonly_nonboolvalued_nonboolhint: int = 1,
kwonly_nonboolvalued_boolhint: bool = 1,
kwonly_nonboolvalued_boolstrhint: "bool" = 1,
**kw,
):
...
def used(do):
return do
used("a", True)
used(do=True)

View File

@@ -18,6 +18,14 @@ def f() -> None:
# Invalid (and unimplemented)
d = 1 # noqa: F841, W191
# fmt: off
# Invalid - no space before #
d = 1# noqa: E501
# Invalid - many spaces before #
d = 1 # noqa: E501
# fmt: on
# Valid
_ = """Lorem ipsum dolor sit amet.

33
resources/test/fixtures/U013.py vendored Normal file
View File

@@ -0,0 +1,33 @@
from typing import TypedDict, NotRequired, Literal
import typing
# dict literal
MyType1 = TypedDict("MyType1", {"a": int, "b": str})
# dict call
MyType2 = TypedDict("MyType2", dict(a=int, b=str))
# kwargs
MyType3 = TypedDict("MyType3", a=int, b=str)
# Empty TypedDict
MyType4 = TypedDict("MyType4")
# Literal values
MyType5 = TypedDict("MyType5", {"a": "hello"})
MyType6 = TypedDict("MyType6", a="hello")
# NotRequired
MyType7 = TypedDict("MyType7", {"a": NotRequired[dict]})
# total
MyType8 = TypedDict("MyType8", {"x": int, "y": int}, total=False)
# invalid identifiers
MyType9 = TypedDict("MyType9", {"in": int, "x-y": int})
# using Literal type
MyType10 = TypedDict("MyType10", {"key": Literal["value"]})
# using namespace TypedDict
MyType11 = typing.TypedDict("MyType11", {"key": int})

View File

@@ -16,6 +16,9 @@ multiline-quotes = "double"
docstring-quotes = "double"
avoid-escape = true
[tool.ruff.mccabe]
max-complexity = 10
[tool.ruff.pep8-naming]
ignore-names = [
"setUp",

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_dev"
version = "0.0.126"
version = "0.0.129"
edition = "2021"
[dependencies]

View File

@@ -1,7 +1,10 @@
use fnv::{FnvHashMap, FnvHashSet};
use once_cell::sync::Lazy;
use regex::Regex;
use rustpython_ast::{Excepthandler, ExcepthandlerKind, Expr, ExprKind, Location, StmtKind};
use rustpython_ast::{Excepthandler, ExcepthandlerKind, Expr, ExprKind, Location, Stmt, StmtKind};
use crate::ast::types::Range;
use crate::SourceCodeLocator;
#[inline(always)]
fn collect_call_path_inner<'a>(expr: &'a Expr, parts: &mut Vec<&'a str>) {
@@ -261,6 +264,34 @@ pub fn to_absolute(relative: &Location, base: &Location) -> Location {
}
}
/// Return `true` if a `Stmt` has leading content.
pub fn match_leading_content(stmt: &Stmt, locator: &SourceCodeLocator) -> bool {
let range = Range {
location: Location::new(stmt.location.row(), 0),
end_location: stmt.location,
};
let prefix = locator.slice_source_code_range(&range);
prefix.chars().any(|char| !char.is_whitespace())
}
/// Return `true` if a `Stmt` has trailing content.
pub fn match_trailing_content(stmt: &Stmt, locator: &SourceCodeLocator) -> bool {
let range = Range {
location: stmt.end_location.unwrap(),
end_location: Location::new(stmt.end_location.unwrap().row() + 1, 0),
};
let suffix = locator.slice_source_code_range(&range);
for char in suffix.chars() {
if char == '#' {
return false;
}
if !char.is_whitespace() {
return true;
}
}
false
}
#[cfg(test)]
mod tests {
use anyhow::Result;

View File

@@ -3,3 +3,4 @@ pub mod operations;
pub mod relocate;
pub mod types;
pub mod visitor;
pub mod whitespace;

View File

@@ -83,6 +83,7 @@ pub enum BindingKind {
Binding,
LoopVar,
Global,
Nonlocal,
Builtin,
ClassDefinition,
Definition,

View File

@@ -3,12 +3,6 @@ use rustpython_ast::{Located, Location};
use crate::ast::types::Range;
use crate::check_ast::Checker;
pub const TRIPLE_QUOTE_PREFIXES: &[&str] = &[
"ur\"\"\"", "ur'''", "u\"\"\"", "u'''", "r\"\"\"", "r'''", "\"\"\"", "'''",
];
pub const SINGLE_QUOTE_PREFIXES: &[&str] = &["ur\"", "ur'", "u\"", "u'", "r\"", "r'", "\"", "'"];
/// Extract the leading words from a line of text.
pub fn leading_words(line: &str) -> String {
line.trim()

View File

@@ -35,9 +35,9 @@ use crate::settings::Settings;
use crate::source_code_locator::SourceCodeLocator;
use crate::visibility::{module_visibility, transition_scope, Modifier, Visibility, VisibleScope};
use crate::{
docstrings, flake8_2020, flake8_annotations, flake8_bandit, flake8_bugbear, flake8_builtins,
flake8_comprehensions, flake8_print, flake8_tidy_imports, pep8_naming, pycodestyle, pydocstyle,
pyflakes, pyupgrade,
docstrings, flake8_2020, flake8_annotations, flake8_bandit, flake8_blind_except,
flake8_boolean_trap, flake8_bugbear, flake8_builtins, flake8_comprehensions, flake8_print,
flake8_tidy_imports, mccabe, pep8_naming, pycodestyle, pydocstyle, pyflakes, pyupgrade,
};
const GLOBAL_SCOPE_INDEX: usize = 0;
@@ -152,10 +152,10 @@ impl<'a> Checker<'a> {
/// Return `true` if a patch should be generated under the given autofix
/// `Mode`.
pub fn patch(&self) -> bool {
pub fn patch(&self, code: &CheckCode) -> bool {
// TODO(charlie): We can't fix errors in f-strings until RustPython adds
// location data.
self.autofix.patch() && self.in_f_string.is_none()
self.autofix.patch() && self.in_f_string.is_none() && self.settings.fixable.contains(code)
}
/// Return `true` if the `Expr` is a reference to `typing.${target}`.
@@ -213,21 +213,74 @@ where
// Pre-visit.
match &stmt.node {
StmtKind::Global { names } | StmtKind::Nonlocal { names } => {
let global_scope_id = self.scopes[GLOBAL_SCOPE_INDEX].id;
let scope =
&mut self.scopes[*(self.scope_stack.last().expect("No current scope found."))];
if scope.id != global_scope_id {
StmtKind::Global { names } => {
let scope_index = *self.scope_stack.last().expect("No current scope found.");
if scope_index != GLOBAL_SCOPE_INDEX {
let scope = &mut self.scopes[scope_index];
let usage = Some((scope.id, Range::from_located(stmt)));
for name in names {
// Add a binding to the current scope.
scope.values.insert(
name,
Binding {
kind: BindingKind::Global,
used: Some((global_scope_id, Range::from_located(stmt))),
used: usage,
range: Range::from_located(stmt),
},
);
}
// Mark the binding in the global scope as used.
for name in names {
if let Some(mut existing) = self.scopes[GLOBAL_SCOPE_INDEX]
.values
.get_mut(&name.as_str())
{
existing.used = usage;
}
}
}
if self.settings.enabled.contains(&CheckCode::E741) {
let location = Range::from_located(stmt);
self.add_checks(
names
.iter()
.filter_map(|name| {
pycodestyle::checks::ambiguous_variable_name(name, location)
})
.into_iter(),
);
}
}
StmtKind::Nonlocal { names } => {
let scope_index = *self.scope_stack.last().expect("No current scope found.");
if scope_index != GLOBAL_SCOPE_INDEX {
let scope = &mut self.scopes[scope_index];
let usage = Some((scope.id, Range::from_located(stmt)));
for name in names {
// Add a binding to the current scope.
scope.values.insert(
name,
Binding {
kind: BindingKind::Global,
used: usage,
range: Range::from_located(stmt),
},
);
}
// Mark the binding in the defining scopes as used too. (Skip the global scope
// and the current scope.)
for name in names {
for index in self.scope_stack.iter().skip(1).rev().skip(1) {
if let Some(mut existing) =
self.scopes[*index].values.get_mut(&name.as_str())
{
existing.used = usage;
}
}
}
}
if self.settings.enabled.contains(&CheckCode::E741) {
@@ -349,6 +402,16 @@ where
if self.settings.enabled.contains(&CheckCode::B019) {
flake8_bugbear::plugins::cached_instance_method(self, decorator_list);
}
if self.settings.enabled.contains(&CheckCode::C901) {
if let Some(check) = mccabe::checks::function_is_too_complex(
stmt,
name,
body,
self.settings.mccabe.max_complexity,
) {
self.add_check(check);
}
}
if self.settings.enabled.contains(&CheckCode::S107) {
self.add_checks(
@@ -911,15 +974,14 @@ where
if self.settings.enabled.contains(&CheckCode::B013) {
flake8_bugbear::plugins::redundant_tuple_in_exception_handler(self, handlers);
}
if self.settings.enabled.contains(&CheckCode::BLE001) {
flake8_blind_except::plugins::blind_except(self, handlers);
}
}
StmtKind::Assign { targets, value, .. } => {
if self.settings.enabled.contains(&CheckCode::E731) {
if let [target] = &targets[..] {
if let Some(check) =
pycodestyle::checks::do_not_assign_lambda(target, value, stmt)
{
self.add_check(check);
}
pycodestyle::plugins::do_not_assign_lambda(self, target, value, stmt)
}
}
if self.settings.enabled.contains(&CheckCode::U001) {
@@ -935,15 +997,16 @@ where
self.add_check(check);
}
}
if self.settings.enabled.contains(&CheckCode::U013) {
pyupgrade::plugins::convert_typed_dict_functional_to_class(
self, stmt, targets, value,
);
}
}
StmtKind::AnnAssign { target, value, .. } => {
if self.settings.enabled.contains(&CheckCode::E731) {
if let Some(value) = value {
if let Some(check) =
pycodestyle::checks::do_not_assign_lambda(target, value, stmt)
{
self.add_check(check);
}
pycodestyle::plugins::do_not_assign_lambda(self, target, value, stmt);
}
}
}
@@ -1238,7 +1301,7 @@ where
args,
keywords,
self.locator,
self.patch(),
self.patch(&CheckCode::C400),
Range::from_located(expr),
) {
self.add_check(check);
@@ -1252,7 +1315,7 @@ where
args,
keywords,
self.locator,
self.patch(),
self.patch(&CheckCode::C401),
Range::from_located(expr),
) {
self.add_check(check);
@@ -1266,7 +1329,7 @@ where
args,
keywords,
self.locator,
self.patch(),
self.patch(&CheckCode::C402),
Range::from_located(expr),
) {
self.add_check(check);
@@ -1281,7 +1344,7 @@ where
args,
keywords,
self.locator,
self.patch(),
self.patch(&CheckCode::C403),
Range::from_located(expr),
)
{
@@ -1297,7 +1360,7 @@ where
args,
keywords,
self.locator,
self.patch(),
self.patch(&CheckCode::C404),
Range::from_located(expr),
)
{
@@ -1312,7 +1375,7 @@ where
args,
keywords,
self.locator,
self.patch(),
self.patch(&CheckCode::C405),
Range::from_located(expr),
) {
self.add_check(check);
@@ -1326,7 +1389,7 @@ where
args,
keywords,
self.locator,
self.patch(),
self.patch(&CheckCode::C406),
Range::from_located(expr),
) {
self.add_check(check);
@@ -1340,7 +1403,7 @@ where
args,
keywords,
self.locator,
self.patch(),
self.patch(&CheckCode::C408),
Range::from_located(expr),
) {
self.add_check(check);
@@ -1354,7 +1417,7 @@ where
func,
args,
self.locator,
self.patch(),
self.patch(&CheckCode::C409),
Range::from_located(expr),
)
{
@@ -1369,7 +1432,7 @@ where
func,
args,
self.locator,
self.patch(),
self.patch(&CheckCode::C410),
Range::from_located(expr),
)
{
@@ -1383,7 +1446,7 @@ where
func,
args,
self.locator,
self.patch(),
self.patch(&CheckCode::C411),
Range::from_located(expr),
) {
self.add_check(check);
@@ -1397,7 +1460,7 @@ where
func,
args,
self.locator,
self.patch(),
self.patch(&CheckCode::C413),
Range::from_located(expr),
)
{
@@ -1444,6 +1507,12 @@ where
pyupgrade::plugins::type_of_primitive(self, expr, func, args);
}
// flake8-boolean-trap
if self.settings.enabled.contains(&CheckCode::FBT003) {
flake8_boolean_trap::plugins::check_boolean_positional_value_in_function_call(
self, args,
);
}
if let ExprKind::Name { id, ctx } = &func.node {
if id == "locals" && matches!(ctx, ExprContext::Load) {
let scope = &mut self.scopes
@@ -1507,9 +1576,13 @@ where
let check_not_in = self.settings.enabled.contains(&CheckCode::E713);
let check_not_is = self.settings.enabled.contains(&CheckCode::E714);
if check_not_in || check_not_is {
self.add_checks(
pycodestyle::checks::not_tests(op, operand, check_not_in, check_not_is)
.into_iter(),
pycodestyle::plugins::not_tests(
self,
expr,
op,
operand,
check_not_in,
check_not_is,
);
}
@@ -1639,7 +1712,7 @@ where
elt,
generators,
self.locator,
self.patch(),
self.patch(&CheckCode::C416),
Range::from_located(expr),
) {
self.add_check(check);
@@ -1933,6 +2006,16 @@ where
flake8_bugbear::plugins::function_call_argument_default(self, arguments)
}
// flake8-boolean-trap
if self.settings.enabled.contains(&CheckCode::FBT001) {
flake8_boolean_trap::plugins::check_positional_boolean_in_def(self, arguments);
}
if self.settings.enabled.contains(&CheckCode::FBT002) {
flake8_boolean_trap::plugins::check_boolean_default_value_in_function_definition(
self, arguments,
);
}
// Bind, but intentionally avoid walking default expressions, as we handle them
// upstream.
for arg in &arguments.posonlyargs {
@@ -2590,7 +2673,7 @@ impl<'a> Checker<'a> {
let child = self.parents[defined_by];
let parent = defined_in.map(|defined_in| self.parents[defined_in]);
let fix = if self.patch() {
let fix = if self.patch(&CheckCode::F401) {
let deleted: Vec<&Stmt> = self
.deletions
.iter()

View File

@@ -73,7 +73,7 @@ pub fn check_lines(
end_location: Location::new(lineno + 1, line_length + 1),
},
);
if autofix.patch() {
if autofix.patch() && settings.fixable.contains(check.kind.code()) {
check.amend(Fix::deletion(
Location::new(lineno + 1, 0),
Location::new(lineno + 1, line_length + 1),
@@ -103,7 +103,7 @@ pub fn check_lines(
matches.push(check.kind.code().as_ref());
ignored.push(index)
}
(Directive::Codes(_, _, codes), matches) => {
(Directive::Codes(.., codes), matches) => {
if codes.contains(&check.kind.code().as_ref()) {
matches.push(check.kind.code().as_ref());
ignored.push(index);
@@ -133,7 +133,7 @@ pub fn check_lines(
(Directive::All(..), matches) => {
matches.push(check.kind.code().as_ref());
}
(Directive::Codes(_, _, codes), matches) => {
(Directive::Codes(.., codes), matches) => {
if codes.contains(&check.kind.code().as_ref()) {
matches.push(check.kind.code().as_ref());
} else {
@@ -170,7 +170,7 @@ pub fn check_lines(
(Directive::All(..), matches) => {
matches.push(check.kind.code().as_ref());
}
(Directive::Codes(_, _, codes), matches) => {
(Directive::Codes(.., codes), matches) => {
if codes.contains(&check.kind.code().as_ref()) {
matches.push(check.kind.code().as_ref());
} else {
@@ -186,7 +186,7 @@ pub fn check_lines(
if enforce_noqa {
for (row, (directive, matches)) in noqa_directives {
match directive {
Directive::All(start, end) => {
Directive::All(spaces, start, end) => {
if matches.is_empty() {
let mut check = Check::new(
CheckKind::UnusedNOQA(None),
@@ -195,16 +195,16 @@ pub fn check_lines(
end_location: Location::new(row + 1, end),
},
);
if autofix.patch() {
if autofix.patch() && settings.fixable.contains(check.kind.code()) {
check.amend(Fix::deletion(
Location::new(row + 1, start),
Location::new(row + 1, start - spaces),
Location::new(row + 1, lines[row].chars().count()),
));
}
line_checks.push(check);
}
}
Directive::Codes(start, end, codes) => {
Directive::Codes(spaces, start, end, codes) => {
let mut invalid_codes = vec![];
let mut valid_codes = vec![];
for code in codes {
@@ -223,15 +223,15 @@ pub fn check_lines(
end_location: Location::new(row + 1, end),
},
);
if autofix.patch() {
if autofix.patch() && settings.fixable.contains(check.kind.code()) {
if valid_codes.is_empty() {
check.amend(Fix::deletion(
Location::new(row + 1, start),
Location::new(row + 1, start - spaces),
Location::new(row + 1, lines[row].chars().count()),
));
} else {
check.amend(Fix::replacement(
format!(" # noqa: {}", valid_codes.join(", ")),
format!("# noqa: {}", valid_codes.join(", ")),
Location::new(row + 1, start),
Location::new(row + 1, lines[row].chars().count()),
));

View File

@@ -36,7 +36,7 @@ pub fn check_tokens(
// RUF001, RUF002, RUF003
if enforce_ambiguous_unicode_character {
if matches!(tok, Tok::String { .. } | Tok::Comment) {
for check in rules::checks::ambiguous_unicode_character(
checks.extend(rules::checks::ambiguous_unicode_character(
locator,
start,
end,
@@ -49,12 +49,9 @@ pub fn check_tokens(
} else {
Context::Comment
},
autofix.patch(),
) {
if settings.enabled.contains(check.kind.code()) {
checks.push(check);
}
}
settings,
autofix,
));
}
}

View File

@@ -103,6 +103,8 @@ pub enum CheckCode {
B025,
B026,
B027,
// flake8-blind-except
BLE001,
// flake8-comprehensions
C400,
C401,
@@ -120,6 +122,8 @@ pub enum CheckCode {
C415,
C416,
C417,
// mccabe
C901,
// flake8-tidy-imports
I252,
// flake8-print
@@ -165,6 +169,7 @@ pub enum CheckCode {
U010,
U011,
U012,
U013,
// pydocstyle
D100,
D101,
@@ -235,6 +240,10 @@ pub enum CheckCode {
S105,
S106,
S107,
// flake8-boolean-trap
FBT001,
FBT002,
FBT003,
// Ruff
RUF001,
RUF002,
@@ -253,6 +262,7 @@ pub enum CheckCategory {
PEP8Naming,
Flake8Bandit,
Flake8Comprehensions,
Flake8BooleanTrap,
Flake8Bugbear,
Flake8Builtins,
Flake8TidyImports,
@@ -260,6 +270,8 @@ pub enum CheckCategory {
Flake8Quotes,
Flake8Annotations,
Flake82020,
Flake8BlindExcept,
McCabe,
Ruff,
Meta,
}
@@ -271,6 +283,7 @@ impl CheckCategory {
CheckCategory::Pyflakes => "Pyflakes",
CheckCategory::Isort => "isort",
CheckCategory::Flake8Bandit => "flake8-bandit",
CheckCategory::Flake8BooleanTrap => "flake8-boolean-trap",
CheckCategory::Flake8Builtins => "flake8-builtins",
CheckCategory::Flake8Bugbear => "flake8-bugbear",
CheckCategory::Flake8Comprehensions => "flake8-comprehensions",
@@ -279,9 +292,11 @@ impl CheckCategory {
CheckCategory::Flake8Quotes => "flake8-quotes",
CheckCategory::Flake8Annotations => "flake8-annotations",
CheckCategory::Flake82020 => "flake8-2020",
CheckCategory::Flake8BlindExcept => "flake8-blind-except",
CheckCategory::Pyupgrade => "pyupgrade",
CheckCategory::Pydocstyle => "pydocstyle",
CheckCategory::PEP8Naming => "pep8-naming",
CheckCategory::McCabe => "mccabe",
CheckCategory::Ruff => "Ruff-specific rules",
CheckCategory::Meta => "Meta rules",
}
@@ -314,6 +329,13 @@ impl CheckCategory {
CheckCategory::Pydocstyle => Some("https://pypi.org/project/pydocstyle/6.1.1/"),
CheckCategory::PEP8Naming => Some("https://pypi.org/project/pep8-naming/0.13.2/"),
CheckCategory::Flake8Bandit => Some("https://pypi.org/project/flake8-bandit/4.1.1/"),
CheckCategory::Flake8BlindExcept => {
Some("https://pypi.org/project/flake8-blind-except/0.2.1/")
}
CheckCategory::McCabe => Some("https://pypi.org/project/mccabe/0.7.0/"),
CheckCategory::Flake8BooleanTrap => {
Some("https://pypi.org/project/flake8-boolean-trap/0.1.0/")
}
CheckCategory::Ruff => None,
CheckCategory::Meta => None,
}
@@ -388,6 +410,8 @@ pub enum CheckKind {
BuiltinVariableShadowing(String),
BuiltinArgumentShadowing(String),
BuiltinAttributeShadowing(String),
// flake8-blind-except
BlindExcept,
// flake8-bugbear
UnaryPrefixIncrement,
AssignmentToOsEnviron,
@@ -476,6 +500,7 @@ pub enum CheckKind {
UnnecessaryFutureImport(Vec<String>),
UnnecessaryLRUCacheParams,
UnnecessaryEncodeUTF8,
ConvertTypedDictFunctionalToClass,
// pydocstyle
BlankLineAfterLastSection(String),
BlankLineAfterSection(String),
@@ -546,6 +571,12 @@ pub enum CheckKind {
HardcodedPasswordString(String),
HardcodedPasswordFuncArg(String),
HardcodedPasswordDefault(String),
// mccabe
FunctionIsTooComplex(String, usize),
// flake8-boolean-trap
BooleanPositionalArgInFunctionDefinition,
BooleanDefaultValueInFunctionDefinition,
BooleanPositionalValueInFunctionCall,
// Ruff
AmbiguousUnicodeCharacterString(char, char),
AmbiguousUnicodeCharacterDocstring(char, char),
@@ -725,6 +756,8 @@ impl CheckCode {
CheckCode::YTT301 => CheckKind::SysVersion0Referenced,
CheckCode::YTT302 => CheckKind::SysVersionCmpStr10,
CheckCode::YTT303 => CheckKind::SysVersionSlice1Referenced,
// flake8-blind-except
CheckCode::BLE001 => CheckKind::BlindExcept,
// pyupgrade
CheckCode::U001 => CheckKind::UselessMetaclassType,
CheckCode::U003 => CheckKind::TypeOfPrimitive(Primitive::Str),
@@ -740,6 +773,7 @@ impl CheckCode {
CheckCode::U010 => CheckKind::UnnecessaryFutureImport(vec!["...".to_string()]),
CheckCode::U011 => CheckKind::UnnecessaryLRUCacheParams,
CheckCode::U012 => CheckKind::UnnecessaryEncodeUTF8,
CheckCode::U013 => CheckKind::ConvertTypedDictFunctionalToClass,
// pydocstyle
CheckCode::D100 => CheckKind::PublicModule,
CheckCode::D101 => CheckKind::PublicClass,
@@ -826,6 +860,11 @@ impl CheckCode {
CheckCode::S105 => CheckKind::HardcodedPasswordString("...".to_string()),
CheckCode::S106 => CheckKind::HardcodedPasswordFuncArg("...".to_string()),
CheckCode::S107 => CheckKind::HardcodedPasswordDefault("...".to_string()),
CheckCode::C901 => CheckKind::FunctionIsTooComplex("...".to_string(), 10),
// flake8-boolean-trap
CheckCode::FBT001 => CheckKind::BooleanPositionalArgInFunctionDefinition,
CheckCode::FBT002 => CheckKind::BooleanDefaultValueInFunctionDefinition,
CheckCode::FBT003 => CheckKind::BooleanPositionalValueInFunctionCall,
// Ruff
CheckCode::RUF001 => CheckKind::AmbiguousUnicodeCharacterString('𝐁', 'B'),
CheckCode::RUF002 => CheckKind::AmbiguousUnicodeCharacterDocstring('𝐁', 'B'),
@@ -909,6 +948,7 @@ impl CheckCode {
CheckCode::B025 => CheckCategory::Flake8Bugbear,
CheckCode::B026 => CheckCategory::Flake8Bugbear,
CheckCode::B027 => CheckCategory::Flake8Bugbear,
CheckCode::BLE001 => CheckCategory::Flake8BlindExcept,
CheckCode::C400 => CheckCategory::Flake8Comprehensions,
CheckCode::C401 => CheckCategory::Flake8Comprehensions,
CheckCode::C402 => CheckCategory::Flake8Comprehensions,
@@ -964,6 +1004,7 @@ impl CheckCode {
CheckCode::U010 => CheckCategory::Pyupgrade,
CheckCode::U011 => CheckCategory::Pyupgrade,
CheckCode::U012 => CheckCategory::Pyupgrade,
CheckCode::U013 => CheckCategory::Pyupgrade,
CheckCode::D100 => CheckCategory::Pydocstyle,
CheckCode::D101 => CheckCategory::Pydocstyle,
CheckCode::D102 => CheckCategory::Pydocstyle,
@@ -1030,6 +1071,10 @@ impl CheckCode {
CheckCode::S105 => CheckCategory::Flake8Bandit,
CheckCode::S106 => CheckCategory::Flake8Bandit,
CheckCode::S107 => CheckCategory::Flake8Bandit,
CheckCode::C901 => CheckCategory::McCabe,
CheckCode::FBT001 => CheckCategory::Flake8BooleanTrap,
CheckCode::FBT002 => CheckCategory::Flake8BooleanTrap,
CheckCode::FBT003 => CheckCategory::Flake8BooleanTrap,
CheckCode::RUF001 => CheckCategory::Ruff,
CheckCode::RUF002 => CheckCategory::Ruff,
CheckCode::RUF003 => CheckCategory::Ruff,
@@ -1118,6 +1163,8 @@ impl CheckKind {
CheckKind::DuplicateTryBlockException(_) => &CheckCode::B025,
CheckKind::StarArgUnpackingAfterKeywordArg => &CheckCode::B026,
CheckKind::EmptyMethodWithoutAbstractDecorator(_) => &CheckCode::B027,
// flake8-blind-except
CheckKind::BlindExcept => &CheckCode::BLE001,
// flake8-comprehensions
CheckKind::UnnecessaryGeneratorList => &CheckCode::C400,
CheckKind::UnnecessaryGeneratorSet => &CheckCode::C401,
@@ -1180,6 +1227,7 @@ impl CheckKind {
CheckKind::UnnecessaryFutureImport(_) => &CheckCode::U010,
CheckKind::UnnecessaryLRUCacheParams => &CheckCode::U011,
CheckKind::UnnecessaryEncodeUTF8 => &CheckCode::U012,
CheckKind::ConvertTypedDictFunctionalToClass => &CheckCode::U013,
// pydocstyle
CheckKind::BlankLineAfterLastSection(_) => &CheckCode::D413,
CheckKind::BlankLineAfterSection(_) => &CheckCode::D410,
@@ -1250,6 +1298,12 @@ impl CheckKind {
CheckKind::HardcodedPasswordString(..) => &CheckCode::S105,
CheckKind::HardcodedPasswordFuncArg(..) => &CheckCode::S106,
CheckKind::HardcodedPasswordDefault(..) => &CheckCode::S107,
// McCabe
CheckKind::FunctionIsTooComplex(..) => &CheckCode::C901,
// flake8-boolean-trap
CheckKind::BooleanPositionalArgInFunctionDefinition => &CheckCode::FBT001,
CheckKind::BooleanDefaultValueInFunctionDefinition => &CheckCode::FBT002,
CheckKind::BooleanPositionalValueInFunctionCall => &CheckCode::FBT003,
// Ruff
CheckKind::AmbiguousUnicodeCharacterString(..) => &CheckCode::RUF001,
CheckKind::AmbiguousUnicodeCharacterDocstring(..) => &CheckCode::RUF002,
@@ -1722,6 +1776,9 @@ impl CheckKind {
"Unnecessary parameters to `functools.lru_cache`".to_string()
}
CheckKind::UnnecessaryEncodeUTF8 => "Unnecessary call to `encode` as UTF-8".to_string(),
CheckKind::ConvertTypedDictFunctionalToClass => {
"Convert `TypedDict` functional syntax to class syntax".to_string()
}
// pydocstyle
CheckKind::FitsOnOneLine => "One-line docstring should fit on one line".to_string(),
CheckKind::BlankLineAfterSummary => {
@@ -1902,6 +1959,22 @@ impl CheckKind {
CheckKind::HardcodedPasswordDefault(string) => {
format!("Possible hardcoded password: `\"{string}\"`")
}
// flake8-blind-except
CheckKind::BlindExcept => "Blind except Exception: statement".to_string(),
// McCabe
CheckKind::FunctionIsTooComplex(name, complexity) => {
format!("`{name}` is too complex ({complexity})")
}
// flake8-boolean-trap
CheckKind::BooleanPositionalArgInFunctionDefinition => {
"Boolean positional arg in function definition".to_string()
}
CheckKind::BooleanDefaultValueInFunctionDefinition => {
"Boolean default value in function definition".to_string()
}
CheckKind::BooleanPositionalValueInFunctionCall => {
"Boolean positional value in function call".to_string()
}
// Ruff
CheckKind::AmbiguousUnicodeCharacterString(confusable, representant) => {
format!(
@@ -1965,45 +2038,51 @@ impl CheckKind {
pub fn fixable(&self) -> bool {
matches!(
self,
CheckKind::AmbiguousUnicodeCharacterString(_, _)
| CheckKind::AmbiguousUnicodeCharacterDocstring(_, _)
| CheckKind::BlankLineAfterLastSection(_)
| CheckKind::BlankLineAfterSection(_)
CheckKind::AmbiguousUnicodeCharacterString(..)
| CheckKind::AmbiguousUnicodeCharacterDocstring(..)
| CheckKind::BlankLineAfterLastSection(..)
| CheckKind::BlankLineAfterSection(..)
| CheckKind::BlankLineAfterSummary
| CheckKind::BlankLineBeforeSection(_)
| CheckKind::CapitalizeSectionName(_)
| CheckKind::DashedUnderlineAfterSection(_)
| CheckKind::DeprecatedUnittestAlias(_, _)
| CheckKind::BlankLineBeforeSection(..)
| CheckKind::CapitalizeSectionName(..)
| CheckKind::ConvertTypedDictFunctionalToClass
| CheckKind::DashedUnderlineAfterSection(..)
| CheckKind::DeprecatedUnittestAlias(..)
| CheckKind::DoNotAssertFalse
| CheckKind::DuplicateHandlerException(_)
| CheckKind::DoNotAssignLambda
| CheckKind::DuplicateHandlerException(..)
| CheckKind::GetAttrWithConstant
| CheckKind::IsLiteral
| CheckKind::NewLineAfterLastParagraph
| CheckKind::NewLineAfterSectionName(_)
| CheckKind::NoBlankLineAfterFunction(_)
| CheckKind::NoBlankLineBeforeClass(_)
| CheckKind::NoBlankLineBeforeFunction(_)
| CheckKind::NoBlankLinesBetweenHeaderAndContent(_)
| CheckKind::NewLineAfterSectionName(..)
| CheckKind::NoBlankLineAfterFunction(..)
| CheckKind::NoBlankLineBeforeClass(..)
| CheckKind::NoBlankLineBeforeFunction(..)
| CheckKind::NoBlankLinesBetweenHeaderAndContent(..)
| CheckKind::NoOverIndentation
| CheckKind::NoSurroundingWhitespace
| CheckKind::NoUnderIndentation
| CheckKind::OneBlankLineAfterClass(_)
| CheckKind::OneBlankLineBeforeClass(_)
| CheckKind::NoneComparison(..)
| CheckKind::NotInTest
| CheckKind::NotIsTest
| CheckKind::OneBlankLineAfterClass(..)
| CheckKind::OneBlankLineBeforeClass(..)
| CheckKind::PEP3120UnnecessaryCodingComment
| CheckKind::PPrintFound
| CheckKind::PrintFound
| CheckKind::RaiseNotImplemented
| CheckKind::SectionNameEndsInColon(_)
| CheckKind::SectionNotOverIndented(_)
| CheckKind::SectionUnderlineAfterName(_)
| CheckKind::SectionUnderlineMatchesSectionLength(_)
| CheckKind::SectionUnderlineNotOverIndented(_)
| CheckKind::SectionNameEndsInColon(..)
| CheckKind::SectionNotOverIndented(..)
| CheckKind::SectionUnderlineAfterName(..)
| CheckKind::SectionUnderlineMatchesSectionLength(..)
| CheckKind::SectionUnderlineNotOverIndented(..)
| CheckKind::SuperCallWithParameters
| CheckKind::TypeOfPrimitive(_)
| CheckKind::UnnecessaryCollectionCall(_)
| CheckKind::UnnecessaryComprehension(_)
| CheckKind::TrueFalseComparison(..)
| CheckKind::TypeOfPrimitive(..)
| CheckKind::UnnecessaryCollectionCall(..)
| CheckKind::UnnecessaryComprehension(..)
| CheckKind::UnnecessaryEncodeUTF8
| CheckKind::UnnecessaryFutureImport(_)
| CheckKind::UnnecessaryFutureImport(..)
| CheckKind::UnnecessaryGeneratorDict
| CheckKind::UnnecessaryGeneratorList
| CheckKind::UnnecessaryGeneratorSet
@@ -2011,18 +2090,18 @@ impl CheckKind {
| CheckKind::UnnecessaryListCall
| CheckKind::UnnecessaryListComprehensionDict
| CheckKind::UnnecessaryListComprehensionSet
| CheckKind::UnnecessaryLiteralDict(_)
| CheckKind::UnnecessaryLiteralSet(_)
| CheckKind::UnnecessaryLiteralWithinListCall(_)
| CheckKind::UnnecessaryLiteralWithinTupleCall(_)
| CheckKind::UnnecessaryLiteralDict(..)
| CheckKind::UnnecessaryLiteralSet(..)
| CheckKind::UnnecessaryLiteralWithinListCall(..)
| CheckKind::UnnecessaryLiteralWithinTupleCall(..)
| CheckKind::UnsortedImports
| CheckKind::UnusedImport(_, false)
| CheckKind::UnusedLoopControlVariable(_)
| CheckKind::UnusedNOQA(_)
| CheckKind::UsePEP585Annotation(_)
| CheckKind::UnusedLoopControlVariable(..)
| CheckKind::UnusedNOQA(..)
| CheckKind::UsePEP585Annotation(..)
| CheckKind::UsePEP604Annotation
| CheckKind::UselessMetaclassType
| CheckKind::UselessObjectInheritance(_)
| CheckKind::UselessObjectInheritance(..)
)
}
}

View File

@@ -63,6 +63,10 @@ pub enum CheckCodePrefix {
B025,
B026,
B027,
BLE,
BLE0,
BLE00,
BLE001,
C,
C4,
C40,
@@ -83,6 +87,9 @@ pub enum CheckCodePrefix {
C415,
C416,
C417,
C9,
C90,
C901,
D,
D1,
D10,
@@ -211,6 +218,12 @@ pub enum CheckCodePrefix {
F9,
F90,
F901,
FBT,
FBT0,
FBT00,
FBT001,
FBT002,
FBT003,
I,
I0,
I00,
@@ -283,6 +296,7 @@ pub enum CheckCodePrefix {
U010,
U011,
U012,
U013,
W,
W2,
W29,
@@ -480,6 +494,10 @@ impl CheckCodePrefix {
CheckCodePrefix::B025 => vec![CheckCode::B025],
CheckCodePrefix::B026 => vec![CheckCode::B026],
CheckCodePrefix::B027 => vec![CheckCode::B027],
CheckCodePrefix::BLE => vec![CheckCode::BLE001],
CheckCodePrefix::BLE0 => vec![CheckCode::BLE001],
CheckCodePrefix::BLE00 => vec![CheckCode::BLE001],
CheckCodePrefix::BLE001 => vec![CheckCode::BLE001],
CheckCodePrefix::C => vec![
CheckCode::C400,
CheckCode::C401,
@@ -497,6 +515,7 @@ impl CheckCodePrefix {
CheckCode::C415,
CheckCode::C416,
CheckCode::C417,
CheckCode::C901,
],
CheckCodePrefix::C4 => vec![
CheckCode::C400,
@@ -552,6 +571,9 @@ impl CheckCodePrefix {
CheckCodePrefix::C415 => vec![CheckCode::C415],
CheckCodePrefix::C416 => vec![CheckCode::C416],
CheckCodePrefix::C417 => vec![CheckCode::C417],
CheckCodePrefix::C9 => vec![CheckCode::C901],
CheckCodePrefix::C90 => vec![CheckCode::C901],
CheckCodePrefix::C901 => vec![CheckCode::C901],
CheckCodePrefix::D => vec![
CheckCode::D100,
CheckCode::D101,
@@ -928,6 +950,12 @@ impl CheckCodePrefix {
CheckCodePrefix::F9 => vec![CheckCode::F901],
CheckCodePrefix::F90 => vec![CheckCode::F901],
CheckCodePrefix::F901 => vec![CheckCode::F901],
CheckCodePrefix::FBT => vec![CheckCode::FBT001, CheckCode::FBT002, CheckCode::FBT003],
CheckCodePrefix::FBT0 => vec![CheckCode::FBT001, CheckCode::FBT002, CheckCode::FBT003],
CheckCodePrefix::FBT00 => vec![CheckCode::FBT001, CheckCode::FBT002, CheckCode::FBT003],
CheckCodePrefix::FBT001 => vec![CheckCode::FBT001],
CheckCodePrefix::FBT002 => vec![CheckCode::FBT002],
CheckCodePrefix::FBT003 => vec![CheckCode::FBT003],
CheckCodePrefix::I => vec![CheckCode::I252, CheckCode::I001],
CheckCodePrefix::I0 => vec![CheckCode::I001],
CheckCodePrefix::I00 => vec![CheckCode::I001],
@@ -1082,6 +1110,7 @@ impl CheckCodePrefix {
CheckCode::U010,
CheckCode::U011,
CheckCode::U012,
CheckCode::U013,
],
CheckCodePrefix::U0 => vec![
CheckCode::U001,
@@ -1095,6 +1124,7 @@ impl CheckCodePrefix {
CheckCode::U010,
CheckCode::U011,
CheckCode::U012,
CheckCode::U013,
],
CheckCodePrefix::U00 => vec![
CheckCode::U001,
@@ -1114,10 +1144,16 @@ impl CheckCodePrefix {
CheckCodePrefix::U007 => vec![CheckCode::U007],
CheckCodePrefix::U008 => vec![CheckCode::U008],
CheckCodePrefix::U009 => vec![CheckCode::U009],
CheckCodePrefix::U01 => vec![CheckCode::U010, CheckCode::U011, CheckCode::U012],
CheckCodePrefix::U01 => vec![
CheckCode::U010,
CheckCode::U011,
CheckCode::U012,
CheckCode::U013,
],
CheckCodePrefix::U010 => vec![CheckCode::U010],
CheckCodePrefix::U011 => vec![CheckCode::U011],
CheckCodePrefix::U012 => vec![CheckCode::U012],
CheckCodePrefix::U013 => vec![CheckCode::U013],
CheckCodePrefix::W => vec![CheckCode::W292, CheckCode::W605],
CheckCodePrefix::W2 => vec![CheckCode::W292],
CheckCodePrefix::W29 => vec![CheckCode::W292],
@@ -1226,6 +1262,10 @@ impl CheckCodePrefix {
CheckCodePrefix::B025 => PrefixSpecificity::Explicit,
CheckCodePrefix::B026 => PrefixSpecificity::Explicit,
CheckCodePrefix::B027 => PrefixSpecificity::Explicit,
CheckCodePrefix::BLE => PrefixSpecificity::Category,
CheckCodePrefix::BLE0 => PrefixSpecificity::Hundreds,
CheckCodePrefix::BLE00 => PrefixSpecificity::Tens,
CheckCodePrefix::BLE001 => PrefixSpecificity::Explicit,
CheckCodePrefix::C => PrefixSpecificity::Category,
CheckCodePrefix::C4 => PrefixSpecificity::Hundreds,
CheckCodePrefix::C40 => PrefixSpecificity::Tens,
@@ -1246,6 +1286,9 @@ impl CheckCodePrefix {
CheckCodePrefix::C415 => PrefixSpecificity::Explicit,
CheckCodePrefix::C416 => PrefixSpecificity::Explicit,
CheckCodePrefix::C417 => PrefixSpecificity::Explicit,
CheckCodePrefix::C9 => PrefixSpecificity::Hundreds,
CheckCodePrefix::C90 => PrefixSpecificity::Tens,
CheckCodePrefix::C901 => PrefixSpecificity::Explicit,
CheckCodePrefix::D => PrefixSpecificity::Category,
CheckCodePrefix::D1 => PrefixSpecificity::Hundreds,
CheckCodePrefix::D10 => PrefixSpecificity::Tens,
@@ -1374,6 +1417,12 @@ impl CheckCodePrefix {
CheckCodePrefix::F9 => PrefixSpecificity::Hundreds,
CheckCodePrefix::F90 => PrefixSpecificity::Tens,
CheckCodePrefix::F901 => PrefixSpecificity::Explicit,
CheckCodePrefix::FBT => PrefixSpecificity::Category,
CheckCodePrefix::FBT0 => PrefixSpecificity::Hundreds,
CheckCodePrefix::FBT00 => PrefixSpecificity::Tens,
CheckCodePrefix::FBT001 => PrefixSpecificity::Explicit,
CheckCodePrefix::FBT002 => PrefixSpecificity::Explicit,
CheckCodePrefix::FBT003 => PrefixSpecificity::Explicit,
CheckCodePrefix::I => PrefixSpecificity::Category,
CheckCodePrefix::I0 => PrefixSpecificity::Hundreds,
CheckCodePrefix::I00 => PrefixSpecificity::Tens,
@@ -1446,6 +1495,7 @@ impl CheckCodePrefix {
CheckCodePrefix::U010 => PrefixSpecificity::Explicit,
CheckCodePrefix::U011 => PrefixSpecificity::Explicit,
CheckCodePrefix::U012 => PrefixSpecificity::Explicit,
CheckCodePrefix::U013 => PrefixSpecificity::Explicit,
CheckCodePrefix::W => PrefixSpecificity::Category,
CheckCodePrefix::W2 => PrefixSpecificity::Hundreds,
CheckCodePrefix::W29 => PrefixSpecificity::Tens,

View File

@@ -1,19 +1,16 @@
use std::fmt;
use std::path::PathBuf;
use clap::{command, Parser};
use fnv::FnvHashMap;
use log::warn;
use regex::Regex;
use crate::checks_gen::CheckCodePrefix;
use crate::logging::LogLevel;
use crate::printer::SerializationFormat;
use crate::settings::configuration::Configuration;
use crate::settings::types::{PatternPrefixPair, PerFileIgnore, PythonVersion};
#[derive(Debug, Parser)]
#[command(author, about = "ruff: An extremely fast Python linter.")]
#[command(author, about = "Ruff: An extremely fast Python linter.")]
#[command(version)]
pub struct Cli {
#[arg(required = true)]
@@ -66,16 +63,27 @@ pub struct Cli {
/// excluded ones.
#[arg(long, value_delimiter = ',')]
pub extend_exclude: Vec<String>,
/// List of error codes to treat as eligible for autofix. Only applicable
/// when autofix itself is enabled (e.g., via `--fix`).
#[arg(long, value_delimiter = ',')]
pub fixable: Vec<CheckCodePrefix>,
/// List of error codes to treat as ineligible for autofix. Only applicable
/// when autofix itself is enabled (e.g., via `--fix`).
#[arg(long, value_delimiter = ',')]
pub unfixable: Vec<CheckCodePrefix>,
/// List of mappings from file pattern to code to exclude
#[arg(long, value_delimiter = ',')]
pub per_file_ignores: Vec<PatternPrefixPair>,
/// Output serialization format for error messages.
#[arg(long, value_enum, default_value_t=SerializationFormat::Text)]
pub format: SerializationFormat,
/// See the files ruff will be run against with the current settings.
/// Show violations with source code.
#[arg(long)]
pub show_source: bool,
/// See the files Ruff will be run against with the current settings.
#[arg(long)]
pub show_files: bool,
/// See ruff's settings.
/// See Ruff's settings.
#[arg(long)]
pub show_settings: bool,
/// Enable automatic additions of noqa directives to failing lines.
@@ -91,6 +99,9 @@ pub struct Cli {
/// formatting.
#[arg(long)]
pub line_length: Option<usize>,
/// Max McCabe complexity allowed for a function.
#[arg(long)]
pub max_complexity: Option<usize>,
/// Round-trip auto-formatting.
// TODO(charlie): This should be a sub-command.
#[arg(long, hide = true)]
@@ -131,64 +142,6 @@ pub fn extract_log_level(cli: &Cli) -> LogLevel {
}
}
pub enum Warnable {
Select,
ExtendSelect,
}
impl fmt::Display for Warnable {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self {
Warnable::Select => fmt.write_str("--select"),
Warnable::ExtendSelect => fmt.write_str("--extend-select"),
}
}
}
/// Warn the user if they attempt to enable a code that won't be respected.
pub fn warn_on(
flag: Warnable,
codes: &[CheckCodePrefix],
cli_ignore: &[CheckCodePrefix],
cli_extend_ignore: &[CheckCodePrefix],
pyproject_configuration: &Configuration,
pyproject_path: Option<&PathBuf>,
) {
for code in codes {
if !cli_ignore.is_empty() {
if cli_ignore.contains(code) {
warn!("{code:?} was passed to {flag}, but ignored via --ignore")
}
} else if pyproject_configuration.ignore.contains(code) {
if let Some(path) = pyproject_path {
warn!(
"{code:?} was passed to {flag}, but ignored by the `ignore` field in {}",
path.to_string_lossy()
)
} else {
warn!("{code:?} was passed to {flag}, but ignored by the default `ignore` field",)
}
}
if !cli_extend_ignore.is_empty() {
if cli_extend_ignore.contains(code) {
warn!("{code:?} was passed to {flag}, but ignored via --extend-ignore")
}
} else if pyproject_configuration.extend_ignore.contains(code) {
if let Some(path) = pyproject_path {
warn!(
"{code:?} was passed to {flag}, but ignored by the `extend_ignore` field in {}",
path.to_string_lossy()
)
} else {
warn!(
"{code:?} was passed to {flag}, but ignored by the default `extend_ignore` \
field"
)
}
}
}
}
/// Convert a list of `PatternPrefixPair` structs to `PerFileIgnore`.
pub fn collect_per_file_ignores(
pairs: Vec<PatternPrefixPair>,

View File

@@ -55,18 +55,14 @@ impl SourceGenerator {
}
fn newline(&mut self) -> fmt::Result {
if self.initial {
self.initial = false;
} else {
if !self.initial {
self.new_lines = std::cmp::max(self.new_lines, 1);
}
Ok(())
}
fn newlines(&mut self, extra: usize) -> fmt::Result {
if self.initial {
self.initial = false;
} else {
if !self.initial {
self.new_lines = std::cmp::max(self.new_lines, 1 + extra);
}
Ok(())
@@ -121,6 +117,7 @@ impl SourceGenerator {
self.newline()?;
self.p(&" ".repeat(self.indentation))?;
$body
self.initial = false;
}};
}
@@ -145,12 +142,11 @@ impl SourceGenerator {
self.unparse_expr(returns, precedence::EXPR)?;
}
self.p(":")?;
self.body(body)?;
if self.indentation == 0 {
self.newlines(2)?;
}
})
});
self.body(body)?;
if self.indentation == 0 {
self.newlines(2)?;
}
}
StmtKind::AsyncFunctionDef {
name,
@@ -172,11 +168,11 @@ impl SourceGenerator {
self.unparse_expr(returns, precedence::EXPR)?;
}
self.p(":")?;
self.body(body)?;
if self.indentation == 0 {
self.newlines(2)?;
}
})
});
self.body(body)?;
if self.indentation == 0 {
self.newlines(2)?;
}
}
StmtKind::ClassDef {
name,
@@ -209,11 +205,11 @@ impl SourceGenerator {
}
self.p_if(!first, ")")?;
self.p(":")?;
self.body(body)?;
if self.indentation == 0 {
self.newlines(2)?;
}
})
});
self.body(body)?;
if self.indentation == 0 {
self.newlines(2)?;
}
}
StmtKind::Return { value } => {
statement!({
@@ -299,14 +295,14 @@ impl SourceGenerator {
self.p(" in ")?;
self.unparse_expr(iter, precedence::TEST)?;
self.p(":")?;
self.body(body)?;
if !orelse.is_empty() {
statement!({
self.p("else:")?;
self.body(orelse)?;
});
}
})
});
self.body(body)?;
if !orelse.is_empty() {
statement!({
self.p("else:")?;
});
self.body(orelse)?;
}
}
StmtKind::AsyncFor {
target,
@@ -321,59 +317,59 @@ impl SourceGenerator {
self.p(" in ")?;
self.unparse_expr(iter, precedence::TEST)?;
self.p(":")?;
self.body(body)?;
if !orelse.is_empty() {
statement!({
self.p("else:")?;
self.body(orelse)?;
});
}
})
});
self.body(body)?;
if !orelse.is_empty() {
statement!({
self.p("else:")?;
});
self.body(orelse)?;
}
}
StmtKind::While { test, body, orelse } => {
statement!({
self.p("while ")?;
self.unparse_expr(test, precedence::TEST)?;
self.p(":")?;
self.body(body)?;
if !orelse.is_empty() {
statement!({
self.p("else:")?;
self.body(orelse)?;
});
}
})
});
self.body(body)?;
if !orelse.is_empty() {
statement!({
self.p("else:")?;
});
self.body(orelse)?;
}
}
StmtKind::If { test, body, orelse } => {
statement!({
self.p("if ")?;
self.unparse_expr(test, precedence::TEST)?;
self.p(":")?;
self.body(body)?;
let mut orelse_: &Vec<Stmt<U>> = orelse;
loop {
if orelse_.len() == 1 && matches!(orelse_[0].node, StmtKind::If { .. }) {
if let StmtKind::If { body, test, orelse } = &orelse_[0].node {
statement!({
self.p("elif ")?;
self.unparse_expr(test, precedence::TEST)?;
self.p(":")?;
self.body(body)?;
});
orelse_ = orelse;
}
} else {
if !orelse_.is_empty() {
statement!({
self.p("else:")?;
self.body(orelse_)?;
});
}
break;
}
}
});
self.body(body)?;
let mut orelse_: &Vec<Stmt<U>> = orelse;
loop {
if orelse_.len() == 1 && matches!(orelse_[0].node, StmtKind::If { .. }) {
if let StmtKind::If { body, test, orelse } = &orelse_[0].node {
statement!({
self.p("elif ")?;
self.unparse_expr(test, precedence::TEST)?;
self.p(":")?;
});
self.body(body)?;
orelse_ = orelse;
}
} else {
if !orelse_.is_empty() {
statement!({
self.p("else:")?;
});
self.body(orelse_)?;
}
break;
}
}
}
StmtKind::With { items, body, .. } => {
statement!({
@@ -384,8 +380,8 @@ impl SourceGenerator {
self.unparse_withitem(item)?;
}
self.p(":")?;
self.body(body)?;
})
});
self.body(body)?;
}
StmtKind::AsyncWith { items, body, .. } => {
statement!({
@@ -396,8 +392,8 @@ impl SourceGenerator {
self.unparse_withitem(item)?;
}
self.p(":")?;
self.body(body)?;
})
});
self.body(body)?;
}
StmtKind::Match { .. } => {}
StmtKind::Raise { exc, cause } => {
@@ -421,27 +417,27 @@ impl SourceGenerator {
} => {
statement!({
self.p("try:")?;
self.body(body)?;
});
self.body(body)?;
for handler in handlers {
statement!({
self.unparse_excepthandler(handler)?;
});
}
for handler in handlers {
statement!({
self.unparse_excepthandler(handler)?;
});
}
if !orelse.is_empty() {
statement!({
self.p("else:")?;
self.body(orelse)?;
});
}
if !finalbody.is_empty() {
statement!({
self.p("finally:")?;
self.body(finalbody)?;
});
}
})
if !orelse.is_empty() {
statement!({
self.p("else:")?;
});
self.body(orelse)?;
}
if !finalbody.is_empty() {
statement!({
self.p("finally:")?;
});
self.body(finalbody)?;
}
}
StmtKind::Assert { test, msg } => {
statement!({

View File

@@ -0,0 +1,5 @@
pub const TRIPLE_QUOTE_PREFIXES: &[&str] = &[
"ur\"\"\"", "ur'''", "u\"\"\"", "u'''", "r\"\"\"", "r'''", "\"\"\"", "'''",
];
pub const SINGLE_QUOTE_PREFIXES: &[&str] = &["ur\"", "ur'", "u\"", "u'", "r\"", "r'", "\"", "'"];

View File

@@ -1,7 +1,7 @@
pub mod constants;
pub mod definition;
pub mod extraction;
pub mod google;
pub mod helpers;
pub mod numpy;
pub mod sections;
pub mod styles;

View File

@@ -1,4 +1,4 @@
use crate::docstrings::helpers;
use crate::ast::whitespace;
use crate::docstrings::styles::SectionStyle;
#[derive(Debug)]
@@ -14,7 +14,7 @@ pub(crate) struct SectionContext<'a> {
fn suspected_as_section(line: &str, style: &SectionStyle) -> bool {
style
.lowercase_section_names()
.contains(&helpers::leading_words(line).to_lowercase().as_str())
.contains(&whitespace::leading_words(line).to_lowercase().as_str())
}
/// Check if the suspected context is really a section header.
@@ -64,7 +64,7 @@ pub(crate) fn section_contexts<'a>(
let mut contexts = vec![];
for lineno in suspected_section_indices {
let context = SectionContext {
section_name: helpers::leading_words(lines[lineno]),
section_name: whitespace::leading_words(lines[lineno]),
previous_line: lines[lineno - 1],
line: lines[lineno],
following_lines: &lines[lineno + 1..],

View File

@@ -0,0 +1 @@
pub mod plugins;

View File

@@ -0,0 +1,23 @@
use rustpython_ast::{Excepthandler, ExcepthandlerKind, ExprKind};
use crate::ast::types::Range;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
pub fn blind_except(checker: &mut Checker, handlers: &[Excepthandler]) {
for handler in handlers {
let ExcepthandlerKind::ExceptHandler { type_, .. } = &handler.node;
if let Some(type_) = type_ {
if let ExprKind::Name { id, .. } = &type_.node {
for exception in ["BaseException", "Exception"] {
if id == exception {
checker.add_check(Check::new(
CheckKind::BlindExcept,
Range::from_located(type_),
));
}
}
}
}
}
}

View File

@@ -0,0 +1 @@
pub mod plugins;

View File

@@ -0,0 +1,71 @@
use rustpython_ast::{Arguments, ExprKind};
use rustpython_parser::ast::{Constant, Expr};
use crate::ast::types::Range;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
fn is_boolean_arg(arg: &Expr) -> bool {
matches!(
&arg.node,
ExprKind::Constant {
value: Constant::Bool(_),
..
}
)
}
fn add_if_boolean(checker: &mut Checker, arg: &Expr, kind: CheckKind) {
if is_boolean_arg(arg) {
checker.add_check(Check::new(kind, Range::from_located(arg)));
}
}
pub fn check_positional_boolean_in_def(checker: &mut Checker, arguments: &Arguments) {
for arg in arguments.posonlyargs.iter().chain(arguments.args.iter()) {
if arg.node.annotation.is_none() {
continue;
}
if let Some(expr) = &arg.node.annotation {
// check for both bool (python class) and 'bool' (string annotation)
let hint = match &expr.node {
ExprKind::Name { id, .. } => id == "bool",
ExprKind::Constant {
value: Constant::Str(value),
..
} => value == "bool",
_ => false,
};
if hint {
checker.add_check(Check::new(
CheckKind::BooleanPositionalArgInFunctionDefinition,
Range::from_located(arg),
));
}
}
}
}
pub fn check_boolean_default_value_in_function_definition(
checker: &mut Checker,
arguments: &Arguments,
) {
for arg in arguments.defaults.iter() {
add_if_boolean(
checker,
arg,
CheckKind::BooleanDefaultValueInFunctionDefinition,
);
}
}
pub fn check_boolean_positional_value_in_function_call(checker: &mut Checker, args: &[Expr]) {
for arg in args {
add_if_boolean(
checker,
arg,
CheckKind::BooleanPositionalValueInFunctionCall,
);
}
}

View File

@@ -1,4 +1,3 @@
mod constants;
pub mod plugins;
pub mod settings;

View File

@@ -36,6 +36,7 @@ fn assertion_error(msg: Option<&Expr>) -> Stmt {
)
}
/// B011
pub fn assert_false(checker: &mut Checker, stmt: &Stmt, test: &Expr, msg: Option<&Expr>) {
if let ExprKind::Constant {
value: Constant::Bool(false),
@@ -43,7 +44,7 @@ pub fn assert_false(checker: &mut Checker, stmt: &Stmt, test: &Expr, msg: Option
} = &test.node
{
let mut check = Check::new(CheckKind::DoNotAssertFalse, Range::from_located(test));
if checker.patch() {
if checker.patch(check.kind.code()) {
let mut generator = SourceGenerator::new();
if let Ok(()) = generator.unparse_stmt(&assertion_error(msg)) {
if let Ok(content) = generator.generate() {

View File

@@ -54,7 +54,7 @@ fn duplicate_handler_exceptions<'a>(
),
Range::from_located(expr),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// TODO(charlie): If we have a single element, remove the tuple.
let mut generator = SourceGenerator::new();
if let Ok(()) = generator.unparse_expr(&type_pattern(unique_elts), 0) {

View File

@@ -5,7 +5,7 @@ use crate::autofix::Fix;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
use crate::code_gen::SourceGenerator;
use crate::flake8_bugbear::constants::IDENTIFIER_REGEX;
use crate::python::identifiers::IDENTIFIER_REGEX;
use crate::python::keyword::KWLIST;
fn attribute(value: &Expr, attr: &str) -> Expr {
@@ -20,6 +20,7 @@ fn attribute(value: &Expr, attr: &str) -> Expr {
)
}
/// B009
pub fn getattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
if let ExprKind::Name { id, .. } = &func.node {
if id == "getattr" {
@@ -32,7 +33,7 @@ pub fn getattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, ar
if IDENTIFIER_REGEX.is_match(value) && !KWLIST.contains(&value.as_str()) {
let mut check =
Check::new(CheckKind::GetAttrWithConstant, Range::from_located(expr));
if checker.patch() {
if checker.patch(check.kind.code()) {
let mut generator = SourceGenerator::new();
if let Ok(()) = generator.unparse_expr(&attribute(obj, value), 0) {
if let Ok(content) = generator.generate() {

View File

@@ -1,12 +1,12 @@
use fnv::{FnvHashMap, FnvHashSet};
use rustpython_ast::{Arguments, Expr, ExprKind};
use rustpython_ast::{Arguments, Constant, Expr, ExprKind, Operator};
use crate::ast::helpers::{collect_call_paths, dealias_call_path, match_call_path};
use crate::ast::types::Range;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
const MUTABLE_FUNCS: [(&str, &str); 7] = [
const MUTABLE_FUNCS: &[(&str, &str)] = &[
("", "dict"),
("", "list"),
("", "set"),
@@ -16,6 +16,47 @@ const MUTABLE_FUNCS: [(&str, &str); 7] = [
("collections", "deque"),
];
const IMMUTABLE_TYPES: &[(&str, &str)] = &[
("", "bool"),
("", "bytes"),
("", "complex"),
("", "float"),
("", "frozenset"),
("", "int"),
("", "object"),
("", "range"),
("", "str"),
("collections.abc", "Sized"),
("typing", "LiteralString"),
("typing", "Sized"),
];
const IMMUTABLE_GENERIC_TYPES: &[(&str, &str)] = &[
("", "tuple"),
("collections.abc", "ByteString"),
("collections.abc", "Collection"),
("collections.abc", "Container"),
("collections.abc", "Iterable"),
("collections.abc", "Mapping"),
("collections.abc", "Reversible"),
("collections.abc", "Sequence"),
("collections.abc", "Set"),
("typing", "AbstractSet"),
("typing", "ByteString"),
("typing", "Callable"),
("typing", "Collection"),
("typing", "Container"),
("typing", "FrozenSet"),
("typing", "Iterable"),
("typing", "Literal"),
("typing", "Mapping"),
("typing", "Never"),
("typing", "NoReturn"),
("typing", "Reversible"),
("typing", "Sequence"),
("typing", "Tuple"),
];
pub fn is_mutable_func(
expr: &Expr,
from_imports: &FnvHashMap<&str, FnvHashSet<&str>>,
@@ -27,34 +68,106 @@ pub fn is_mutable_func(
.any(|(module, member)| match_call_path(&call_path, module, member, from_imports))
}
fn is_mutable_expr(
expr: &Expr,
from_imports: &FnvHashMap<&str, FnvHashSet<&str>>,
import_aliases: &FnvHashMap<&str, &str>,
) -> bool {
match &expr.node {
ExprKind::List { .. }
| ExprKind::Dict { .. }
| ExprKind::Set { .. }
| ExprKind::ListComp { .. }
| ExprKind::DictComp { .. }
| ExprKind::SetComp { .. } => true,
ExprKind::Call { func, .. } => is_mutable_func(func, from_imports, import_aliases),
_ => false,
}
}
fn is_immutable_annotation(
expr: &Expr,
from_imports: &FnvHashMap<&str, FnvHashSet<&str>>,
import_aliases: &FnvHashMap<&str, &str>,
) -> bool {
match &expr.node {
ExprKind::Name { .. } | ExprKind::Attribute { .. } => {
let call_path = dealias_call_path(collect_call_paths(expr), import_aliases);
IMMUTABLE_TYPES
.iter()
.chain(IMMUTABLE_GENERIC_TYPES)
.any(|(module, member)| match_call_path(&call_path, module, member, from_imports))
}
ExprKind::Subscript { value, slice, .. } => {
let call_path = dealias_call_path(collect_call_paths(value), import_aliases);
if IMMUTABLE_GENERIC_TYPES
.iter()
.any(|(module, member)| match_call_path(&call_path, module, member, from_imports))
{
true
} else if match_call_path(&call_path, "typing", "Union", from_imports) {
if let ExprKind::Tuple { elts, .. } = &slice.node {
elts.iter()
.all(|elt| is_immutable_annotation(elt, from_imports, import_aliases))
} else {
false
}
} else if match_call_path(&call_path, "typing", "Optional", from_imports) {
is_immutable_annotation(slice, from_imports, import_aliases)
} else if match_call_path(&call_path, "typing", "Annotated", from_imports) {
if let ExprKind::Tuple { elts, .. } = &slice.node {
elts.first().map_or(false, |elt| {
is_immutable_annotation(elt, from_imports, import_aliases)
})
} else {
false
}
} else {
false
}
}
ExprKind::BinOp {
left,
op: Operator::BitOr,
right,
} => {
is_immutable_annotation(left, from_imports, import_aliases)
&& is_immutable_annotation(right, from_imports, import_aliases)
}
ExprKind::Constant {
value: Constant::None,
..
} => true,
_ => false,
}
}
/// B006
pub fn mutable_argument_default(checker: &mut Checker, arguments: &Arguments) {
for expr in arguments
.defaults
// Scan in reverse order to right-align zip()
for (arg, default) in arguments
.kwonlyargs
.iter()
.chain(arguments.kw_defaults.iter())
.rev()
.zip(arguments.kw_defaults.iter().rev())
.chain(
arguments
.args
.iter()
.rev()
.chain(arguments.posonlyargs.iter().rev())
.zip(arguments.defaults.iter().rev()),
)
{
match &expr.node {
ExprKind::List { .. }
| ExprKind::Dict { .. }
| ExprKind::Set { .. }
| ExprKind::ListComp { .. }
| ExprKind::DictComp { .. }
| ExprKind::SetComp { .. } => {
checker.add_check(Check::new(
CheckKind::MutableArgumentDefault,
Range::from_located(expr),
));
}
ExprKind::Call { func, .. } => {
if is_mutable_func(func, &checker.from_imports, &checker.import_aliases) {
checker.add_check(Check::new(
CheckKind::MutableArgumentDefault,
Range::from_located(expr),
));
}
}
_ => {}
if is_mutable_expr(default, &checker.from_imports, &checker.import_aliases)
&& arg.node.annotation.as_ref().map_or(true, |expr| {
!is_immutable_annotation(expr, &checker.from_imports, &checker.import_aliases)
})
{
checker.add_check(Check::new(
CheckKind::MutableArgumentDefault,
Range::from_located(default),
));
}
}
}

View File

@@ -3,7 +3,7 @@ use rustpython_ast::{Constant, Expr, ExprKind};
use crate::ast::types::Range;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
use crate::flake8_bugbear::constants::IDENTIFIER_REGEX;
use crate::python::identifiers::IDENTIFIER_REGEX;
use crate::python::keyword::KWLIST;
/// B010

View File

@@ -65,7 +65,7 @@ pub fn unused_loop_control_variable(checker: &mut Checker, target: &Expr, body:
CheckKind::UnusedLoopControlVariable(name.to_string()),
Range::from_located(expr),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Prefix the variable name with an underscore.
check.amend(Fix::replacement(
format!("_{name}"),

View File

@@ -16,7 +16,7 @@ pub fn print_call(checker: &mut Checker, expr: &Expr, func: &Expr) {
checker.settings.enabled.contains(&CheckCode::T203),
Range::from_located(expr),
) {
if checker.patch() {
if checker.patch(check.kind.code()) {
let context = checker.binding_context();
if matches!(
checker.parents[context.defined_by].node,

View File

@@ -1,10 +1,11 @@
use rustpython_ast::{Location, Stmt};
use textwrap::{dedent, indent};
use crate::ast::helpers::{match_leading_content, match_trailing_content};
use crate::ast::types::Range;
use crate::ast::whitespace::leading_space;
use crate::autofix::{fixer, Fix};
use crate::checks::CheckKind;
use crate::docstrings::helpers::leading_space;
use crate::isort::{comments, format_imports};
use crate::{Check, Settings, SourceCodeLocator};
@@ -27,34 +28,6 @@ fn extract_indentation(body: &[&Stmt], locator: &SourceCodeLocator) -> String {
leading_space(&existing)
}
fn match_leading_content(body: &[&Stmt], locator: &SourceCodeLocator) -> bool {
let location = body.first().unwrap().location;
let range = Range {
location: Location::new(location.row(), 0),
end_location: location,
};
let prefix = locator.slice_source_code_range(&range);
prefix.chars().any(|char| !char.is_whitespace())
}
fn match_trailing_content(body: &[&Stmt], locator: &SourceCodeLocator) -> bool {
let end_location = body.last().unwrap().end_location.unwrap();
let range = Range {
location: end_location,
end_location: Location::new(end_location.row() + 1, 0),
};
let suffix = locator.slice_source_code_range(&range);
for char in suffix.chars() {
if char == '#' {
return false;
}
if !char.is_whitespace() {
return true;
}
}
false
}
/// I001
pub fn check_imports(
body: Vec<&Stmt>,
@@ -75,8 +48,8 @@ pub fn check_imports(
);
// Special-cases: there's leading or trailing content in the import block.
let has_leading_content = match_leading_content(&body, locator);
let has_trailing_content = match_trailing_content(&body, locator);
let has_leading_content = match_leading_content(body.first().unwrap(), locator);
let has_trailing_content = match_trailing_content(body.last().unwrap(), locator);
// Generate the sorted import block.
let expected = format_imports(
@@ -91,7 +64,7 @@ pub fn check_imports(
if has_leading_content || has_trailing_content {
let mut check = Check::new(CheckKind::UnsortedImports, range);
if autofix.patch() {
if autofix.patch() && settings.fixable.contains(check.kind.code()) {
let mut content = String::new();
if has_leading_content {
content.push('\n');
@@ -119,7 +92,7 @@ pub fn check_imports(
let actual = dedent(&locator.slice_source_code_range(&range));
if actual != expected {
let mut check = Check::new(CheckKind::UnsortedImports, range);
if autofix.patch() {
if autofix.patch() && settings.fixable.contains(check.kind.code()) {
check.amend(Fix::replacement(
indent(&expected, &indentation),
range.location,

View File

@@ -30,6 +30,8 @@ mod docstrings;
mod flake8_2020;
pub mod flake8_annotations;
pub mod flake8_bandit;
mod flake8_blind_except;
pub mod flake8_boolean_trap;
pub mod flake8_bugbear;
mod flake8_builtins;
mod flake8_comprehensions;
@@ -41,6 +43,7 @@ mod isort;
mod lex;
pub mod linter;
pub mod logging;
pub mod mccabe;
pub mod message;
mod noqa;
pub mod pep8_naming;

View File

@@ -22,7 +22,7 @@ use crate::check_tokens::check_tokens;
use crate::checks::{Check, CheckCode, CheckKind, LintSource};
use crate::code_gen::SourceGenerator;
use crate::directives::Directives;
use crate::message::Message;
use crate::message::{Message, Source};
use crate::noqa::add_noqa;
use crate::settings::Settings;
use crate::source_code_locator::SourceCodeLocator;
@@ -176,7 +176,15 @@ pub fn lint_stdin(
// Convert to messages.
Ok(checks
.into_iter()
.map(|check| Message::from_check(path.to_string_lossy().to_string(), check))
.map(|check| {
let filename = path.to_string_lossy().to_string();
let source = if settings.show_source {
Some(Source::from_check(&check, &locator))
} else {
None
};
Message::from_check(check, filename, source)
})
.collect())
}
@@ -233,7 +241,15 @@ pub fn lint_path(
// Convert to messages.
let messages: Vec<Message> = checks
.into_iter()
.map(|check| Message::from_check(path.to_string_lossy().to_string(), check))
.map(|check| {
let filename = path.to_string_lossy().to_string();
let source = if settings.show_source {
Some(Source::from_check(&check, &locator))
} else {
None
};
Message::from_check(check, filename, source)
})
.collect();
#[cfg(not(target_family = "wasm"))]
cache::set(path, &metadata, settings, autofix, &messages, mode);
@@ -351,6 +367,7 @@ mod tests {
#[test_case(CheckCode::B025, Path::new("B025.py"); "B025")]
#[test_case(CheckCode::B026, Path::new("B026.py"); "B026")]
#[test_case(CheckCode::B027, Path::new("B027.py"); "B027")]
#[test_case(CheckCode::BLE001, Path::new("BLE.py"); "BLE001")]
#[test_case(CheckCode::C400, Path::new("C400.py"); "C400")]
#[test_case(CheckCode::C401, Path::new("C401.py"); "C401")]
#[test_case(CheckCode::C402, Path::new("C402.py"); "C402")]
@@ -501,6 +518,7 @@ mod tests {
#[test_case(CheckCode::U011, Path::new("U011_0.py"); "U011_0")]
#[test_case(CheckCode::U011, Path::new("U011_1.py"); "U011_1")]
#[test_case(CheckCode::U012, Path::new("U012.py"); "U012")]
#[test_case(CheckCode::U013, Path::new("U013.py"); "U013")]
#[test_case(CheckCode::W292, Path::new("W292_0.py"); "W292_0")]
#[test_case(CheckCode::W292, Path::new("W292_1.py"); "W292_1")]
#[test_case(CheckCode::W292, Path::new("W292_2.py"); "W292_2")]
@@ -519,6 +537,9 @@ mod tests {
#[test_case(CheckCode::YTT301, Path::new("YTT301.py"); "YTT301")]
#[test_case(CheckCode::YTT302, Path::new("YTT302.py"); "YTT302")]
#[test_case(CheckCode::YTT303, Path::new("YTT303.py"); "YTT303")]
#[test_case(CheckCode::FBT001, Path::new("FBT.py"); "FBT001")]
#[test_case(CheckCode::FBT002, Path::new("FBT.py"); "FBT002")]
#[test_case(CheckCode::FBT003, Path::new("FBT.py"); "FBT003")]
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());
let mut checks = test_path(

View File

@@ -7,7 +7,7 @@ use std::time::Instant;
#[cfg(not(target_family = "wasm"))]
use ::ruff::cache;
use ::ruff::checks::{CheckCode, CheckKind};
use ::ruff::cli::{collect_per_file_ignores, extract_log_level, warn_on, Cli, Warnable};
use ::ruff::cli::{collect_per_file_ignores, extract_log_level, Cli};
use ::ruff::fs::iter_python_files;
use ::ruff::linter::{add_noqa_to_path, autoformat_path, lint_path, lint_stdin};
use ::ruff::logging::{set_up_logging, LogLevel};
@@ -117,6 +117,7 @@ fn run_once(
location: Default::default(),
end_location: Default::default(),
filename: path.to_string_lossy().to_string(),
source: None,
}]
} else {
error!("Failed to check {}: {message}", path.to_string_lossy());
@@ -239,25 +240,9 @@ fn inner_main() -> Result<ExitCode> {
collect_per_file_ignores(cli.per_file_ignores, project_root.as_ref());
}
if !cli.select.is_empty() {
warn_on(
Warnable::Select,
&cli.select,
&cli.ignore,
&cli.extend_ignore,
&configuration,
pyproject.as_ref(),
);
configuration.select = cli.select;
}
if !cli.extend_select.is_empty() {
warn_on(
Warnable::ExtendSelect,
&cli.extend_select,
&cli.ignore,
&cli.extend_ignore,
&configuration,
pyproject.as_ref(),
);
configuration.extend_select = cli.extend_select;
}
if !cli.ignore.is_empty() {
@@ -266,9 +251,18 @@ fn inner_main() -> Result<ExitCode> {
if !cli.extend_ignore.is_empty() {
configuration.extend_ignore = cli.extend_ignore;
}
if !cli.fixable.is_empty() {
configuration.fixable = cli.fixable;
}
if !cli.unfixable.is_empty() {
configuration.unfixable = cli.unfixable;
}
if let Some(line_length) = cli.line_length {
configuration.line_length = line_length;
}
if let Some(max_complexity) = cli.max_complexity {
configuration.mccabe.max_complexity = max_complexity;
}
if let Some(target_version) = cli.target_version {
configuration.target_version = target_version;
}
@@ -278,6 +272,9 @@ fn inner_main() -> Result<ExitCode> {
if let Some(fix) = fix {
configuration.fix = fix;
}
if cli.show_source {
configuration.show_source = true;
}
if cli.show_settings && cli.show_files {
eprintln!("Error: specify --show-settings or show-files (not both).");

73
src/mccabe/checks.rs Normal file
View File

@@ -0,0 +1,73 @@
use rustpython_ast::{ExcepthandlerKind, ExprKind, Stmt, StmtKind};
use crate::ast::types::Range;
use crate::checks::{Check, CheckKind};
fn get_complexity_number(stmts: &[Stmt]) -> usize {
let mut complexity = 0;
for stmt in stmts {
match &stmt.node {
StmtKind::If { body, orelse, .. } => {
complexity += 1;
complexity += get_complexity_number(body);
complexity += get_complexity_number(orelse);
}
StmtKind::For { body, orelse, .. } | StmtKind::AsyncFor { body, orelse, .. } => {
complexity += 1;
complexity += get_complexity_number(body);
complexity += get_complexity_number(orelse);
}
StmtKind::While { test, body, orelse } => {
complexity += 1;
complexity += get_complexity_number(body);
complexity += get_complexity_number(orelse);
if let ExprKind::BoolOp { .. } = &test.node {
complexity += 1;
}
}
StmtKind::Try {
body,
handlers,
orelse,
finalbody,
} => {
complexity += 1;
complexity += get_complexity_number(body);
complexity += get_complexity_number(orelse);
complexity += get_complexity_number(finalbody);
for handler in handlers {
complexity += 1;
let ExcepthandlerKind::ExceptHandler { body, .. } = &handler.node;
complexity += get_complexity_number(body);
}
}
StmtKind::FunctionDef { body, .. } | StmtKind::AsyncFunctionDef { body, .. } => {
complexity += 1;
complexity += get_complexity_number(body);
}
StmtKind::ClassDef { body, .. } => {
complexity += 1;
complexity += get_complexity_number(body);
}
_ => {}
}
}
complexity
}
pub fn function_is_too_complex(
stmt: &Stmt,
name: &str,
body: &[Stmt],
max_complexity: usize,
) -> Option<Check> {
let complexity = get_complexity_number(body) + 1;
if complexity > max_complexity {
Some(Check::new(
CheckKind::FunctionIsTooComplex(name.to_string(), complexity),
Range::from_located(stmt),
))
} else {
None
}
}

33
src/mccabe/mod.rs Normal file
View File

@@ -0,0 +1,33 @@
pub mod checks;
pub mod settings;
#[cfg(test)]
mod tests {
use std::path::Path;
use anyhow::Result;
use test_case::test_case;
use crate::autofix::fixer;
use crate::checks::CheckCode;
use crate::linter::test_path;
use crate::{mccabe, Settings};
#[test_case(0)]
#[test_case(3)]
#[test_case(10)]
fn max_complexity_zero(max_complexity: usize) -> Result<()> {
let snapshot = format!("max_complexity_{}", max_complexity);
let mut checks = test_path(
Path::new("./resources/test/fixtures/C901.py"),
&Settings {
mccabe: mccabe::settings::Settings { max_complexity },
..Settings::for_rules(vec![CheckCode::C901])
},
&fixer::Mode::Generate,
)?;
checks.sort_by_key(|check| check.location);
insta::assert_yaml_snapshot!(snapshot, checks);
Ok(())
}
}

28
src/mccabe/settings.rs Normal file
View File

@@ -0,0 +1,28 @@
//! Settings for the `mccabe` plugin.
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
pub struct Options {
pub max_complexity: Option<usize>,
}
#[derive(Debug, Hash)]
pub struct Settings {
pub max_complexity: usize,
}
impl Settings {
pub fn from_options(options: Options) -> Self {
Self {
max_complexity: options.max_complexity.unwrap_or_default(),
}
}
}
impl Default for Settings {
fn default() -> Self {
Self { max_complexity: 10 }
}
}

View File

@@ -0,0 +1,170 @@
---
source: src/mccabe/mod.rs
expression: checks
---
- kind:
FunctionIsTooComplex:
- trivial
- 1
location:
row: 2
column: 0
end_location:
row: 7
column: 0
fix: ~
- kind:
FunctionIsTooComplex:
- expr_as_statement
- 1
location:
row: 7
column: 0
end_location:
row: 12
column: 0
fix: ~
- kind:
FunctionIsTooComplex:
- sequential
- 1
location:
row: 12
column: 0
end_location:
row: 19
column: 0
fix: ~
- kind:
FunctionIsTooComplex:
- if_elif_else_dead_path
- 3
location:
row: 19
column: 0
end_location:
row: 29
column: 0
fix: ~
- kind:
FunctionIsTooComplex:
- nested_ifs
- 3
location:
row: 29
column: 0
end_location:
row: 40
column: 0
fix: ~
- kind:
FunctionIsTooComplex:
- for_loop
- 2
location:
row: 40
column: 0
end_location:
row: 46
column: 0
fix: ~
- kind:
FunctionIsTooComplex:
- for_else
- 2
location:
row: 46
column: 0
end_location:
row: 54
column: 0
fix: ~
- kind:
FunctionIsTooComplex:
- recursive
- 2
location:
row: 54
column: 0
end_location:
row: 62
column: 0
fix: ~
- kind:
FunctionIsTooComplex:
- nested_functions
- 3
location:
row: 62
column: 0
end_location:
row: 73
column: 0
fix: ~
- kind:
FunctionIsTooComplex:
- a
- 2
location:
row: 63
column: 4
end_location:
row: 69
column: 4
fix: ~
- kind:
FunctionIsTooComplex:
- b
- 1
location:
row: 64
column: 8
end_location:
row: 67
column: 8
fix: ~
- kind:
FunctionIsTooComplex:
- try_else
- 4
location:
row: 73
column: 0
end_location:
row: 85
column: 0
fix: ~
- kind:
FunctionIsTooComplex:
- nested_try_finally
- 3
location:
row: 85
column: 0
end_location:
row: 96
column: 0
fix: ~
- kind:
FunctionIsTooComplex:
- foobar
- 3
location:
row: 96
column: 0
end_location:
row: 107
column: 0
fix: ~
- kind:
FunctionIsTooComplex:
- annotated_assign
- 1
location:
row: 107
column: 0
end_location:
row: 109
column: 0
fix: ~

View File

@@ -0,0 +1,6 @@
---
source: src/mccabe/mod.rs
expression: checks
---
[]

View File

@@ -0,0 +1,16 @@
---
source: src/mccabe/mod.rs
expression: checks
---
- kind:
FunctionIsTooComplex:
- try_else
- 4
location:
row: 73
column: 0
end_location:
row: 85
column: 0
fix: ~

View File

@@ -2,12 +2,16 @@ use std::cmp::Ordering;
use std::fmt;
use std::path::Path;
use annotate_snippets::display_list::{DisplayList, FormatOptions};
use annotate_snippets::snippet::{Annotation, AnnotationType, Slice, Snippet, SourceAnnotation};
use colored::Colorize;
use rustpython_parser::ast::Location;
use serde::{Deserialize, Serialize};
use crate::ast::types::Range;
use crate::checks::{Check, CheckKind};
use crate::fs::relativize_path;
use crate::source_code_locator::SourceCodeLocator;
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Message {
@@ -16,16 +20,18 @@ pub struct Message {
pub location: Location,
pub end_location: Location,
pub filename: String,
pub source: Option<Source>,
}
impl Message {
pub fn from_check(filename: String, check: Check) -> Self {
pub fn from_check(check: Check, filename: String, source: Option<Source>) -> Self {
Self {
kind: check.kind,
fixed: check.fix.map(|fix| fix.applied).unwrap_or_default(),
location: Location::new(check.location.row(), check.location.column() + 1),
end_location: Location::new(check.end_location.row(), check.end_location.column() + 1),
filename,
source,
}
}
}
@@ -48,8 +54,7 @@ impl PartialOrd for Message {
impl fmt::Display for Message {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
let label = format!(
"{}{}{}{}{}{} {} {}",
relativize_path(Path::new(&self.filename)).white().bold(),
":".cyan(),
@@ -58,7 +63,71 @@ impl fmt::Display for Message {
self.location.column(),
":".cyan(),
self.kind.code().as_ref().red().bold(),
self.kind.body()
)
self.kind.body(),
);
match &self.source {
None => write!(f, "{}", label),
Some(source) => {
let snippet = Snippet {
title: Some(Annotation {
label: Some(&label),
annotation_type: AnnotationType::Error,
// The ID (error number) is already encoded in the `label`.
id: None,
}),
footer: vec![],
slices: vec![Slice {
source: &source.contents,
line_start: self.location.row(),
annotations: vec![SourceAnnotation {
label: self.kind.code().as_ref(),
annotation_type: AnnotationType::Error,
range: source.range,
}],
// The origin (file name, line number, and column number) is already encoded
// in the `label`.
origin: None,
fold: false,
}],
opt: FormatOptions {
color: true,
..Default::default()
},
};
// `split_once(' ')` strips "error: " from `message`.
let message = DisplayList::from(snippet).to_string();
let (_, message) = message.split_once(' ').unwrap();
write!(f, "{}", message)
}
}
}
}
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Source {
pub contents: String,
pub range: (usize, usize),
}
impl Source {
pub fn from_check(check: &Check, locator: &SourceCodeLocator) -> Self {
let source = locator.slice_source_code_range(&Range {
location: Location::new(check.location.row(), 0),
end_location: Location::new(check.end_location.row() + 1, 0),
});
let num_chars_in_range = locator
.slice_source_code_range(&Range {
location: check.location,
end_location: check.end_location,
})
.chars()
.count();
Source {
contents: source.to_string(),
range: (
check.location.column(),
check.location.column() + num_chars_in_range,
),
}
}
}

View File

@@ -10,32 +10,40 @@ use regex::Regex;
use crate::checks::{Check, CheckCode};
static NO_QA_REGEX: Lazy<Regex> = Lazy::new(|| {
Regex::new(r"(?P<noqa>\s*(?i:# noqa)(?::\s?(?P<codes>([A-Z]+[0-9]+(?:[,\s]+)?)+))?)")
.expect("Invalid regex")
Regex::new(
r"(?P<spaces>\s*)(?P<noqa>(?i:# noqa)(?::\s?(?P<codes>([A-Z]+[0-9]+(?:[,\s]+)?)+))?)",
)
.expect("Invalid regex")
});
static SPLIT_COMMA_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").expect("Invalid regex"));
#[derive(Debug)]
pub enum Directive<'a> {
None,
All(usize, usize),
Codes(usize, usize, Vec<&'a str>),
All(usize, usize, usize),
Codes(usize, usize, usize, Vec<&'a str>),
}
pub fn extract_noqa_directive(line: &str) -> Directive {
match NO_QA_REGEX.captures(line) {
Some(caps) => match caps.name("noqa") {
Some(noqa) => match caps.name("codes") {
Some(codes) => Directive::Codes(
noqa.start(),
noqa.end(),
SPLIT_COMMA_REGEX
.split(codes.as_str())
.map(|code| code.trim())
.filter(|code| !code.is_empty())
.collect(),
),
None => Directive::All(noqa.start(), noqa.end()),
Some(caps) => match caps.name("spaces") {
Some(spaces) => match caps.name("noqa") {
Some(noqa) => match caps.name("codes") {
Some(codes) => Directive::Codes(
spaces.as_str().chars().count(),
noqa.start(),
noqa.end(),
SPLIT_COMMA_REGEX
.split(codes.as_str())
.map(|code| code.trim())
.filter(|code| !code.is_empty())
.collect(),
),
None => {
Directive::All(spaces.as_str().chars().count(), noqa.start(), noqa.end())
}
},
None => Directive::None,
},
None => Directive::None,
},
@@ -92,12 +100,14 @@ fn add_noqa_inner(
match extract_noqa_directive(line) {
Directive::None => {
output.push_str(line);
output.push_str(" # noqa: ");
}
Directive::All(_, start, _) | Directive::Codes(_, start, ..) => {
output.push_str(&line[..start]);
output.push_str("# noqa: ");
}
Directive::All(start, _) => output.push_str(&line[..start]),
Directive::Codes(start, ..) => output.push_str(&line[..start]),
};
let codes: Vec<&str> = codes.iter().map(|code| code.as_ref()).collect();
output.push_str(" # noqa: ");
output.push_str(&codes.join(", "));
output.push('\n');
count += 1;

View File

@@ -1,6 +1,6 @@
use itertools::izip;
use rustpython_ast::Location;
use rustpython_parser::ast::{Cmpop, Expr, ExprKind, Stmt, Unaryop};
use rustpython_parser::ast::{Cmpop, Expr, ExprKind};
use crate::ast::types::Range;
use crate::checks::{Check, CheckKind};
@@ -46,57 +46,6 @@ pub fn ambiguous_function_name(name: &str, location: Range) -> Option<Check> {
}
}
/// E731
pub fn do_not_assign_lambda(target: &Expr, value: &Expr, stmt: &Stmt) -> Option<Check> {
if let ExprKind::Name { .. } = &target.node {
if let ExprKind::Lambda { .. } = &value.node {
return Some(Check::new(
CheckKind::DoNotAssignLambda,
Range::from_located(stmt),
));
}
}
None
}
/// E713, E714
pub fn not_tests(
op: &Unaryop,
operand: &Expr,
check_not_in: bool,
check_not_is: bool,
) -> Vec<Check> {
let mut checks: Vec<Check> = vec![];
if matches!(op, Unaryop::Not) {
if let ExprKind::Compare { ops, .. } = &operand.node {
for op in ops {
match op {
Cmpop::In => {
if check_not_in {
checks.push(Check::new(
CheckKind::NotInTest,
Range::from_located(operand),
));
}
}
Cmpop::Is => {
if check_not_is {
checks.push(Check::new(
CheckKind::NotIsTest,
Range::from_located(operand),
));
}
}
_ => {}
}
}
}
}
checks
}
/// E721
pub fn type_comparison(ops: &[Cmpop], comparators: &[Expr], location: Range) -> Vec<Check> {
let mut checks: Vec<Check> = vec![];

View File

@@ -1,8 +1,13 @@
use anyhow::Result;
use fnv::FnvHashMap;
use itertools::izip;
use rustpython_parser::ast::{Cmpop, Constant, Expr, ExprKind};
use log::error;
use rustpython_ast::{Arguments, Location, StmtKind};
use rustpython_parser::ast::{Cmpop, Constant, Expr, ExprKind, Stmt, Unaryop};
use crate::ast::helpers::{match_leading_content, match_trailing_content};
use crate::ast::types::Range;
use crate::ast::whitespace::leading_space;
use crate::autofix::Fix;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind, RejectedCmpop};
@@ -62,7 +67,7 @@ pub fn literal_comparisons(
CheckKind::NoneComparison(RejectedCmpop::Eq),
Range::from_located(comparator),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Dummy replacement
check.amend(Fix::dummy(expr.location));
bad_ops.insert(0, Cmpop::Is);
@@ -74,7 +79,7 @@ pub fn literal_comparisons(
CheckKind::NoneComparison(RejectedCmpop::NotEq),
Range::from_located(comparator),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
check.amend(Fix::dummy(expr.location));
bad_ops.insert(0, Cmpop::IsNot);
}
@@ -93,7 +98,7 @@ pub fn literal_comparisons(
CheckKind::TrueFalseComparison(value, RejectedCmpop::Eq),
Range::from_located(comparator),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
check.amend(Fix::dummy(expr.location));
bad_ops.insert(0, Cmpop::Is);
}
@@ -104,7 +109,7 @@ pub fn literal_comparisons(
CheckKind::TrueFalseComparison(value, RejectedCmpop::NotEq),
Range::from_located(comparator),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
check.amend(Fix::dummy(expr.location));
bad_ops.insert(0, Cmpop::IsNot);
}
@@ -129,7 +134,7 @@ pub fn literal_comparisons(
CheckKind::NoneComparison(RejectedCmpop::Eq),
Range::from_located(comparator),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
check.amend(Fix::dummy(expr.location));
bad_ops.insert(idx, Cmpop::Is);
}
@@ -140,7 +145,7 @@ pub fn literal_comparisons(
CheckKind::NoneComparison(RejectedCmpop::NotEq),
Range::from_located(comparator),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
check.amend(Fix::dummy(expr.location));
bad_ops.insert(idx, Cmpop::IsNot);
}
@@ -159,7 +164,7 @@ pub fn literal_comparisons(
CheckKind::TrueFalseComparison(value, RejectedCmpop::Eq),
Range::from_located(comparator),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
check.amend(Fix::dummy(expr.location));
bad_ops.insert(idx, Cmpop::Is);
}
@@ -170,7 +175,7 @@ pub fn literal_comparisons(
CheckKind::TrueFalseComparison(value, RejectedCmpop::NotEq),
Range::from_located(comparator),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
check.amend(Fix::dummy(expr.location));
bad_ops.insert(idx, Cmpop::IsNot);
}
@@ -201,3 +206,128 @@ pub fn literal_comparisons(
checker.add_checks(checks.into_iter());
}
/// E713, E714
pub fn not_tests(
checker: &mut Checker,
expr: &Expr,
op: &Unaryop,
operand: &Expr,
check_not_in: bool,
check_not_is: bool,
) {
if matches!(op, Unaryop::Not) {
if let ExprKind::Compare {
left,
ops,
comparators,
..
} = &operand.node
{
let should_fix = ops.len() == 1;
for op in ops.iter() {
match op {
Cmpop::In => {
if check_not_in {
let mut check =
Check::new(CheckKind::NotInTest, Range::from_located(operand));
if checker.patch(check.kind.code()) && should_fix {
if let Some(content) = compare(left, &[Cmpop::NotIn], comparators) {
check.amend(Fix::replacement(
content,
expr.location,
expr.end_location.unwrap(),
));
}
}
checker.add_check(check);
}
}
Cmpop::Is => {
if check_not_is {
let mut check =
Check::new(CheckKind::NotIsTest, Range::from_located(operand));
if checker.patch(check.kind.code()) && should_fix {
if let Some(content) = compare(left, &[Cmpop::IsNot], comparators) {
check.amend(Fix::replacement(
content,
expr.location,
expr.end_location.unwrap(),
));
}
}
checker.add_check(check);
}
}
_ => {}
}
}
}
}
}
fn function(name: &str, args: &Arguments, body: &Expr) -> Result<String> {
let body = Stmt::new(
Default::default(),
Default::default(),
StmtKind::Return {
value: Some(Box::new(body.clone())),
},
);
let func = Stmt::new(
Default::default(),
Default::default(),
StmtKind::FunctionDef {
name: name.to_string(),
args: Box::new(args.clone()),
body: vec![body],
decorator_list: vec![],
returns: None,
type_comment: None,
},
);
let mut generator = SourceGenerator::new();
generator.unparse_stmt(&func)?;
generator.generate().map_err(|e| e.into())
}
/// E731
pub fn do_not_assign_lambda(checker: &mut Checker, target: &Expr, value: &Expr, stmt: &Stmt) {
if let ExprKind::Name { id, .. } = &target.node {
if let ExprKind::Lambda { args, body } = &value.node {
let mut check = Check::new(CheckKind::DoNotAssignLambda, Range::from_located(stmt));
if checker.patch(check.kind.code()) {
if !match_leading_content(stmt, checker.locator)
&& !match_trailing_content(stmt, checker.locator)
{
match function(id, args, body) {
Ok(content) => {
let indentation =
&leading_space(&checker.locator.slice_source_code_range(&Range {
location: Location::new(stmt.location.row(), 0),
end_location: Location::new(stmt.location.row() + 1, 0),
}));
let mut indented = String::new();
for (idx, line) in content.lines().enumerate() {
if idx == 0 {
indented.push_str(line);
} else {
indented.push('\n');
indented.push_str(indentation);
indented.push_str(line);
}
}
check.amend(Fix::replacement(
indented,
stmt.location,
stmt.end_location.unwrap(),
));
}
Err(e) => error!("Failed to generate fix: {}", e),
}
}
}
checker.add_check(check);
}
}
}

View File

@@ -7,11 +7,12 @@ use regex::Regex;
use rustpython_ast::{Arg, Constant, ExprKind, Location, StmtKind};
use crate::ast::types::Range;
use crate::ast::whitespace;
use crate::autofix::Fix;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckCode, CheckKind};
use crate::docstrings::constants;
use crate::docstrings::definition::{Definition, DefinitionKind};
use crate::docstrings::helpers;
use crate::docstrings::sections::{section_contexts, SectionContext};
use crate::docstrings::styles::SectionStyle;
use crate::visibility::{is_init, is_magic, is_overload, is_staticmethod, Visibility};
@@ -179,7 +180,7 @@ pub fn blank_before_after_function(checker: &mut Checker, definition: &Definitio
CheckKind::NoBlankLineBeforeFunction(blank_lines_before),
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Delete the blank line before the docstring.
check.amend(Fix::deletion(
Location::new(docstring.location.row() - blank_lines_before, 0),
@@ -220,7 +221,7 @@ pub fn blank_before_after_function(checker: &mut Checker, definition: &Definitio
CheckKind::NoBlankLineAfterFunction(blank_lines_after),
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Delete the blank line after the docstring.
check.amend(Fix::deletion(
Location::new(docstring.end_location.unwrap().row() + 1, 0),
@@ -269,7 +270,7 @@ pub fn blank_before_after_class(checker: &mut Checker, definition: &Definition)
CheckKind::NoBlankLineBeforeClass(blank_lines_before),
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Delete the blank line before the class.
check.amend(Fix::deletion(
Location::new(docstring.location.row() - blank_lines_before, 0),
@@ -285,7 +286,7 @@ pub fn blank_before_after_class(checker: &mut Checker, definition: &Definition)
CheckKind::OneBlankLineBeforeClass(blank_lines_before),
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Insert one blank line before the class.
check.amend(Fix::replacement(
"\n".to_string(),
@@ -322,7 +323,7 @@ pub fn blank_before_after_class(checker: &mut Checker, definition: &Definition)
CheckKind::OneBlankLineAfterClass(blank_lines_after),
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Insert a blank line before the class (replacing any existing lines).
check.amend(Fix::replacement(
"\n".to_string(),
@@ -364,7 +365,7 @@ pub fn blank_after_summary(checker: &mut Checker, definition: &Definition) {
CheckKind::BlankLineAfterSummary,
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Insert one blank line after the summary (replacing any existing lines).
check.amend(Fix::replacement(
"\n".to_string(),
@@ -391,7 +392,7 @@ pub fn indent(checker: &mut Checker, definition: &Definition) {
return;
}
let docstring_indent = helpers::indentation(checker, docstring);
let docstring_indent = whitespace::indentation(checker, docstring);
let mut has_seen_tab = docstring_indent.contains('\t');
let mut is_over_indented = true;
let mut over_indented_lines = vec![];
@@ -408,7 +409,7 @@ pub fn indent(checker: &mut Checker, definition: &Definition) {
continue;
}
let line_indent = helpers::leading_space(lines[i]);
let line_indent = whitespace::leading_space(lines[i]);
// We only report tab indentation once, so only check if we haven't seen a tab
// yet.
@@ -425,9 +426,9 @@ pub fn indent(checker: &mut Checker, definition: &Definition) {
end_location: Location::new(docstring.location.row() + i, 0),
},
);
if checker.patch() {
if checker.patch(check.kind.code()) {
check.amend(Fix::replacement(
helpers::clean(&docstring_indent),
whitespace::clean(&docstring_indent),
Location::new(docstring.location.row() + i, 0),
Location::new(docstring.location.row() + i, line_indent.len()),
));
@@ -464,7 +465,7 @@ pub fn indent(checker: &mut Checker, definition: &Definition) {
// If every line (except the last) is over-indented...
if is_over_indented {
for i in over_indented_lines {
let line_indent = helpers::leading_space(lines[i]);
let line_indent = whitespace::leading_space(lines[i]);
if line_indent.len() > docstring_indent.len() {
// We report over-indentation on every line. This isn't great, but
// enables autofix.
@@ -475,9 +476,9 @@ pub fn indent(checker: &mut Checker, definition: &Definition) {
end_location: Location::new(docstring.location.row() + i, 0),
},
);
if checker.patch() {
if checker.patch(check.kind.code()) {
check.amend(Fix::replacement(
helpers::clean(&docstring_indent),
whitespace::clean(&docstring_indent),
Location::new(docstring.location.row() + i, 0),
Location::new(docstring.location.row() + i, line_indent.len()),
));
@@ -490,7 +491,7 @@ pub fn indent(checker: &mut Checker, definition: &Definition) {
// If the last line is over-indented...
if !lines.is_empty() {
let i = lines.len() - 1;
let line_indent = helpers::leading_space(lines[i]);
let line_indent = whitespace::leading_space(lines[i]);
if line_indent.len() > docstring_indent.len() {
let mut check = Check::new(
CheckKind::NoOverIndentation,
@@ -499,9 +500,9 @@ pub fn indent(checker: &mut Checker, definition: &Definition) {
end_location: Location::new(docstring.location.row() + i, 0),
},
);
if checker.patch() {
if checker.patch(check.kind.code()) {
check.amend(Fix::replacement(
helpers::clean(&docstring_indent),
whitespace::clean(&docstring_indent),
Location::new(docstring.location.row() + i, 0),
Location::new(docstring.location.row() + i, line_indent.len()),
));
@@ -537,11 +538,11 @@ pub fn newline_after_last_paragraph(checker: &mut Checker, definition: &Definiti
CheckKind::NewLineAfterLastParagraph,
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Insert a newline just before the end-quote(s).
let content = format!(
"\n{}",
helpers::clean(&helpers::indentation(checker, docstring))
whitespace::clean(&whitespace::indentation(checker, docstring))
);
check.amend(Fix::insertion(
content,
@@ -580,7 +581,7 @@ pub fn no_surrounding_whitespace(checker: &mut Checker, definition: &Definition)
CheckKind::NoSurroundingWhitespace,
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
if let Some(first_line) = checker
.locator
.slice_source_code_range(&Range::from_located(docstring))
@@ -588,9 +589,9 @@ pub fn no_surrounding_whitespace(checker: &mut Checker, definition: &Definition)
.next()
.map(|line| line.to_lowercase())
{
for pattern in helpers::TRIPLE_QUOTE_PREFIXES
for pattern in constants::TRIPLE_QUOTE_PREFIXES
.iter()
.chain(helpers::SINGLE_QUOTE_PREFIXES)
.chain(constants::SINGLE_QUOTE_PREFIXES)
{
if first_line.starts_with(pattern) {
check.amend(Fix::replacement(
@@ -634,7 +635,7 @@ pub fn multi_line_summary_start(checker: &mut Checker, definition: &Definition)
.next()
.map(|line| line.to_lowercase())
{
if helpers::TRIPLE_QUOTE_PREFIXES.contains(&first_line.as_str()) {
if constants::TRIPLE_QUOTE_PREFIXES.contains(&first_line.as_str()) {
if checker.settings.enabled.contains(&CheckCode::D212) {
checker.add_check(Check::new(
CheckKind::MultiLineSummaryFirstLine,
@@ -916,11 +917,11 @@ fn blanks_and_section_underline(
CheckKind::DashedUnderlineAfterSection(context.section_name.to_string()),
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Add a dashed line (of the appropriate length) under the section header.
let content = format!(
"{}{}\n",
helpers::clean(&helpers::indentation(checker, docstring)),
whitespace::clean(&whitespace::indentation(checker, docstring)),
"-".repeat(context.section_name.len())
);
check.amend(Fix::insertion(
@@ -950,11 +951,11 @@ fn blanks_and_section_underline(
CheckKind::DashedUnderlineAfterSection(context.section_name.to_string()),
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Add a dashed line (of the appropriate length) under the section header.
let content = format!(
"{}{}\n",
helpers::clean(&helpers::indentation(checker, docstring)),
whitespace::clean(&whitespace::indentation(checker, docstring)),
"-".repeat(context.section_name.len())
);
check.amend(Fix::insertion(
@@ -972,7 +973,7 @@ fn blanks_and_section_underline(
),
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Delete any blank lines between the header and content.
check.amend(Fix::deletion(
Location::new(docstring.location.row() + context.original_index + 1, 0),
@@ -995,7 +996,7 @@ fn blanks_and_section_underline(
CheckKind::SectionUnderlineAfterName(context.section_name.to_string()),
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Delete any blank lines between the header and the underline.
check.amend(Fix::deletion(
Location::new(docstring.location.row() + context.original_index + 1, 0),
@@ -1026,11 +1027,11 @@ fn blanks_and_section_underline(
),
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Replace the existing underline with a line of the appropriate length.
let content = format!(
"{}{}\n",
helpers::clean(&helpers::indentation(checker, docstring)),
whitespace::clean(&whitespace::indentation(checker, docstring)),
"-".repeat(context.section_name.len())
);
check.amend(Fix::replacement(
@@ -1057,17 +1058,17 @@ fn blanks_and_section_underline(
}
if checker.settings.enabled.contains(&CheckCode::D215) {
let leading_space = helpers::leading_space(non_empty_line);
let indentation = helpers::indentation(checker, docstring);
let leading_space = whitespace::leading_space(non_empty_line);
let indentation = whitespace::indentation(checker, docstring);
if leading_space.len() > indentation.len() {
let mut check = Check::new(
CheckKind::SectionUnderlineNotOverIndented(context.section_name.to_string()),
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Replace the existing indentation with whitespace of the appropriate length.
check.amend(Fix::replacement(
helpers::clean(&indentation),
whitespace::clean(&indentation),
Location::new(
docstring.location.row()
+ context.original_index
@@ -1113,7 +1114,7 @@ fn blanks_and_section_underline(
),
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Delete any blank lines between the header and content.
check.amend(Fix::deletion(
Location::new(
@@ -1172,7 +1173,7 @@ fn common_section(
CheckKind::CapitalizeSectionName(context.section_name.to_string()),
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Replace the section title with the capitalized variant. This requires
// locating the start and end of the section name.
if let Some(index) = context.line.find(&context.section_name) {
@@ -1198,17 +1199,17 @@ fn common_section(
}
if checker.settings.enabled.contains(&CheckCode::D214) {
let leading_space = helpers::leading_space(context.line);
let indentation = helpers::indentation(checker, docstring);
let leading_space = whitespace::leading_space(context.line);
let indentation = whitespace::indentation(checker, docstring);
if leading_space.len() > indentation.len() {
let mut check = Check::new(
CheckKind::SectionNotOverIndented(context.section_name.to_string()),
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Replace the existing indentation with whitespace of the appropriate length.
check.amend(Fix::replacement(
helpers::clean(&indentation),
whitespace::clean(&indentation),
Location::new(docstring.location.row() + context.original_index, 0),
Location::new(
docstring.location.row() + context.original_index,
@@ -1232,7 +1233,7 @@ fn common_section(
CheckKind::BlankLineAfterLastSection(context.section_name.to_string()),
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Add a newline after the section.
check.amend(Fix::insertion(
"\n".to_string(),
@@ -1253,7 +1254,7 @@ fn common_section(
CheckKind::BlankLineAfterSection(context.section_name.to_string()),
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Add a newline after the section.
check.amend(Fix::insertion(
"\n".to_string(),
@@ -1277,7 +1278,7 @@ fn common_section(
CheckKind::BlankLineBeforeSection(context.section_name.to_string()),
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Add a blank line before the section.
check.amend(Fix::insertion(
"\n".to_string(),
@@ -1400,13 +1401,13 @@ fn args_section(checker: &mut Checker, definition: &Definition, context: &Sectio
fn parameters_section(checker: &mut Checker, definition: &Definition, context: &SectionContext) {
// Collect the list of arguments documented in the docstring.
let mut docstring_args: FnvHashSet<&str> = FnvHashSet::default();
let section_level_indent = helpers::leading_space(context.line);
let section_level_indent = whitespace::leading_space(context.line);
for i in 1..context.following_lines.len() {
let current_line = context.following_lines[i - 1];
let current_leading_space = helpers::leading_space(current_line);
let current_leading_space = whitespace::leading_space(current_line);
let next_line = context.following_lines[i];
if current_leading_space == section_level_indent
&& (helpers::leading_space(next_line).len() > current_leading_space.len())
&& (whitespace::leading_space(next_line).len() > current_leading_space.len())
&& !next_line.trim().is_empty()
{
let parameters = if let Some(semi_index) = current_line.find(':') {
@@ -1444,7 +1445,7 @@ fn numpy_section(checker: &mut Checker, definition: &Definition, context: &Secti
CheckKind::NewLineAfterSectionName(context.section_name.to_string()),
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Delete the suffix. This requires locating the end of the section name.
if let Some(index) = context.line.find(&context.section_name) {
// Map from bytes to characters.
@@ -1493,7 +1494,7 @@ fn google_section(checker: &mut Checker, definition: &Definition, context: &Sect
CheckKind::SectionNameEndsInColon(context.section_name.to_string()),
Range::from_located(docstring),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
// Replace the suffix. This requires locating the end of the section name.
if let Some(index) = context.line.find(&context.section_name) {
// Map from bytes to characters.

View File

@@ -43,7 +43,7 @@ pub fn invalid_literal_comparison(
&& (is_constant_non_singleton(left) || is_constant_non_singleton(right))
{
let mut check = Check::new(CheckKind::IsLiteral, location);
if checker.patch() {
if checker.patch(check.kind.code()) {
match fix_invalid_literal_comparison(
checker.locator,
Range {

View File

@@ -28,7 +28,7 @@ fn match_not_implemented(expr: &Expr) -> Option<&Expr> {
pub fn raise_not_implemented(checker: &mut Checker, expr: &Expr) {
if let Some(expr) = match_not_implemented(expr) {
let mut check = Check::new(CheckKind::RaiseNotImplemented, Range::from_located(expr));
if checker.patch() {
if checker.patch(check.kind.code()) {
check.amend(Fix::replacement(
"NotImplementedError".to_string(),
expr.location,

View File

@@ -1,5 +1,6 @@
pub mod builtins;
pub mod future;
pub mod identifiers;
pub mod keyword;
pub mod string;
pub mod sys;

View File

@@ -0,0 +1,246 @@
use anyhow::{bail, Result};
use log::error;
use rustpython_ast::{Constant, Expr, ExprContext, ExprKind, Keyword, KeywordData, Stmt, StmtKind};
use crate::ast::helpers::match_module_member;
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
use crate::code_gen::SourceGenerator;
use crate::python::identifiers::IDENTIFIER_REGEX;
use crate::python::keyword::KWLIST;
/// Return the class name, arguments, keywords and base class for a `TypedDict`
/// assignment.
fn match_typed_dict_assign<'a>(
checker: &Checker,
targets: &'a [Expr],
value: &'a Expr,
) -> Option<(&'a str, &'a [Expr], &'a [Keyword], &'a ExprKind)> {
if let Some(target) = targets.get(0) {
if let ExprKind::Name { id: class_name, .. } = &target.node {
if let ExprKind::Call {
func,
args,
keywords,
} = &value.node
{
if match_module_member(
func,
"typing",
"TypedDict",
&checker.from_imports,
&checker.import_aliases,
) {
return Some((class_name, args, keywords, &func.node));
}
}
}
}
None
}
/// Generate a `StmtKind::AnnAssign` representing the provided property
/// definition.
fn create_property_assignment_stmt(property: &str, annotation: &ExprKind) -> Stmt {
Stmt::new(
Default::default(),
Default::default(),
StmtKind::AnnAssign {
target: Box::new(Expr::new(
Default::default(),
Default::default(),
ExprKind::Name {
id: property.to_string(),
ctx: ExprContext::Load,
},
)),
annotation: Box::new(Expr::new(
Default::default(),
Default::default(),
annotation.clone(),
)),
value: None,
simple: 1,
},
)
}
/// Generate a `StmtKind::Pass` statement.
fn create_pass_stmt() -> Stmt {
Stmt::new(Default::default(), Default::default(), StmtKind::Pass)
}
/// Generate a `StmtKind:ClassDef` statement based on the provided body,
/// keywords and base class.
fn create_class_def_stmt(
class_name: &str,
body: Vec<Stmt>,
total_keyword: Option<KeywordData>,
base_class: &ExprKind,
) -> Stmt {
let keywords = match total_keyword {
Some(keyword) => vec![Keyword::new(
Default::default(),
Default::default(),
keyword,
)],
None => vec![],
};
Stmt::new(
Default::default(),
Default::default(),
StmtKind::ClassDef {
name: class_name.to_string(),
bases: vec![Expr::new(
Default::default(),
Default::default(),
base_class.clone(),
)],
keywords,
body,
decorator_list: vec![],
},
)
}
fn get_properties_from_dict_literal(keys: &[Expr], values: &[Expr]) -> Result<Vec<Stmt>> {
keys.iter()
.zip(values.iter())
.map(|(key, value)| match &key.node {
ExprKind::Constant {
value: Constant::Str(property),
..
} => {
if IDENTIFIER_REGEX.is_match(property) && !KWLIST.contains(&property.as_str()) {
Ok(create_property_assignment_stmt(property, &value.node))
} else {
bail!("Invalid property name: {}", property)
}
}
_ => bail!("Expected `key` to be `Constant::Str`"),
})
.collect()
}
fn get_properties_from_dict_call(func: &Expr, keywords: &[Keyword]) -> Result<Vec<Stmt>> {
if let ExprKind::Name { id, .. } = &func.node {
if id == "dict" {
get_properties_from_keywords(keywords)
} else {
bail!("Expected `id` to be `\"dict\"`")
}
} else {
bail!("Expected `func` to be `ExprKind::Name`")
}
}
// Deprecated in Python 3.11, removed in Python 3.13.
fn get_properties_from_keywords(keywords: &[Keyword]) -> Result<Vec<Stmt>> {
keywords
.iter()
.map(|keyword| {
if let Some(property) = &keyword.node.arg {
Ok(create_property_assignment_stmt(
property,
&keyword.node.value.node,
))
} else {
bail!("Expected `arg` to be `Some`")
}
})
.collect()
}
// The only way to have the `total` keyword is to use the args version, like:
// (`TypedDict('name', {'a': int}, total=True)`)
fn get_total_from_only_keyword(keywords: &[Keyword]) -> Option<&KeywordData> {
match keywords.get(0) {
Some(keyword) => match &keyword.node.arg {
Some(arg) => match arg.as_str() {
"total" => Some(&keyword.node),
_ => None,
},
None => None,
},
None => None,
}
}
fn get_properties_and_total(
args: &[Expr],
keywords: &[Keyword],
) -> Result<(Vec<Stmt>, Option<KeywordData>)> {
// We don't have to manage the hybrid case because it's not possible to have a
// dict and keywords. For example, the following is illegal:
// MyType = TypedDict('MyType', {'a': int, 'b': str}, a=int, b=str)
if let Some(dict) = args.get(1) {
let total = get_total_from_only_keyword(keywords).cloned();
match &dict.node {
ExprKind::Dict { keys, values } => {
Ok((get_properties_from_dict_literal(keys, values)?, total))
}
ExprKind::Call { func, keywords, .. } => {
Ok((get_properties_from_dict_call(func, keywords)?, total))
}
_ => Ok((vec![create_pass_stmt()], total)),
}
} else if !keywords.is_empty() {
Ok((get_properties_from_keywords(keywords)?, None))
} else {
Ok((vec![create_pass_stmt()], None))
}
}
/// Generate a `Fix` to convert a `TypedDict` from functional to class.
fn convert_to_class(
stmt: &Stmt,
class_name: &str,
body: Vec<Stmt>,
total_keyword: Option<KeywordData>,
base_class: &ExprKind,
) -> Result<Fix> {
let mut generator = SourceGenerator::new();
generator.unparse_stmt(&create_class_def_stmt(
class_name,
body,
total_keyword,
base_class,
))?;
let content = generator.generate()?;
Ok(Fix::replacement(
content,
stmt.location,
stmt.end_location.unwrap(),
))
}
/// U013
pub fn convert_typed_dict_functional_to_class(
checker: &mut Checker,
stmt: &Stmt,
targets: &[Expr],
value: &Expr,
) {
if let Some((class_name, args, keywords, base_class)) =
match_typed_dict_assign(checker, targets, value)
{
match get_properties_and_total(args, keywords) {
Err(err) => error!("Failed to parse TypedDict: {}", err),
Ok((body, total_keyword)) => {
let mut check = Check::new(
CheckKind::ConvertTypedDictFunctionalToClass,
Range::from_located(stmt),
);
if checker.patch(check.kind.code()) {
match convert_to_class(stmt, class_name, body, total_keyword, base_class) {
Ok(fix) => check.amend(fix),
Err(err) => error!("Failed to convert TypedDict: {}", err),
};
}
checker.add_check(check);
}
}
}
}

View File

@@ -37,7 +37,7 @@ pub fn deprecated_unittest_alias(checker: &mut Checker, expr: &Expr) {
CheckKind::DeprecatedUnittestAlias(attr.to_string(), target.to_string()),
Range::from_located(expr),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
check.amend(Fix::replacement(
format!("self.{}", target),
expr.location,

View File

@@ -1,3 +1,4 @@
pub use convert_typed_dict_functional_to_class::convert_typed_dict_functional_to_class;
pub use deprecated_unittest_alias::deprecated_unittest_alias;
pub use super_call_with_parameters::super_call_with_parameters;
pub use type_of_primitive::type_of_primitive;
@@ -9,6 +10,7 @@ pub use use_pep604_annotation::use_pep604_annotation;
pub use useless_metaclass_type::useless_metaclass_type;
pub use useless_object_inheritance::useless_object_inheritance;
mod convert_typed_dict_functional_to_class;
mod deprecated_unittest_alias;
mod super_call_with_parameters;
mod type_of_primitive;

View File

@@ -17,7 +17,7 @@ pub fn super_call_with_parameters(checker: &mut Checker, expr: &Expr, func: &Exp
.map(|index| checker.parents[*index])
.collect();
if let Some(mut check) = checks::super_args(scope, &parents, expr, func, args) {
if checker.patch() {
if checker.patch(check.kind.code()) {
if let Some(fix) = pyupgrade::fixes::remove_super_arguments(checker.locator, expr) {
check.amend(fix);
}

View File

@@ -9,7 +9,7 @@ use crate::pyupgrade::checks;
/// U003
pub fn type_of_primitive(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
if let Some(mut check) = checks::type_of_primitive(func, args, Range::from_located(expr)) {
if checker.patch() {
if checker.patch(check.kind.code()) {
if let CheckKind::TypeOfPrimitive(primitive) = &check.kind {
check.amend(Fix::replacement(
primitive.builtin(),

View File

@@ -3,7 +3,7 @@ use rustpython_ast::{Constant, Expr, ExprKind, Keyword};
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
use crate::checks::{Check, CheckCode, CheckKind};
use crate::source_code_locator::SourceCodeLocator;
const UTF8_LITERALS: &[&str] = &["utf-8", "utf8", "utf_8", "u8", "utf", "cp65001"];
@@ -124,13 +124,16 @@ pub fn unnecessary_encode_utf8(
expr,
variable,
checker.locator,
checker.patch(),
checker.patch(&CheckCode::U012),
));
} else {
// "unicode text©".encode("utf-8")
if let Some(check) =
delete_default_encode_arg_or_kwarg(expr, args, kwargs, checker.patch())
{
if let Some(check) = delete_default_encode_arg_or_kwarg(
expr,
args,
kwargs,
checker.patch(&CheckCode::U012),
) {
checker.add_check(check);
}
}
@@ -139,9 +142,12 @@ pub fn unnecessary_encode_utf8(
// f"foo{bar}".encode(*args, **kwargs)
ExprKind::JoinedStr { .. } => {
if is_default_encode(args, kwargs) {
if let Some(check) =
delete_default_encode_arg_or_kwarg(expr, args, kwargs, checker.patch())
{
if let Some(check) = delete_default_encode_arg_or_kwarg(
expr,
args,
kwargs,
checker.patch(&CheckCode::U012),
) {
checker.add_check(check);
}
}

View File

@@ -56,7 +56,7 @@ pub fn unnecessary_future_import(checker: &mut Checker, stmt: &Stmt, names: &[Lo
),
Range::from_located(stmt),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
let context = checker.binding_context();
let deleted: Vec<&Stmt> = checker
.deletions

View File

@@ -11,7 +11,7 @@ pub fn unnecessary_lru_cache_params(checker: &mut Checker, decorator_list: &[Exp
&checker.from_imports,
&checker.import_aliases,
) {
if checker.patch() {
if checker.patch(check.kind.code()) {
if let Some(fix) =
fixes::remove_unnecessary_lru_cache_params(checker.locator, &check.location)
{

View File

@@ -12,7 +12,7 @@ pub fn use_pep585_annotation(checker: &mut Checker, expr: &Expr, id: &str) {
CheckKind::UsePEP585Annotation(replacement.to_string()),
Range::from_located(expr),
);
if checker.patch() {
if checker.patch(check.kind.code()) {
check.amend(Fix::replacement(
replacement.to_lowercase(),
expr.location,

View File

@@ -47,7 +47,7 @@ pub fn use_pep604_annotation(checker: &mut Checker, expr: &Expr, value: &Expr, s
let call_path = dealias_call_path(collect_call_paths(value), &checker.import_aliases);
if checker.match_typing_call_path(&call_path, "Optional") {
let mut check = Check::new(CheckKind::UsePEP604Annotation, Range::from_located(expr));
if checker.patch() {
if checker.patch(check.kind.code()) {
let mut generator = SourceGenerator::new();
if let Ok(()) = generator.unparse_expr(&optional(slice), 0) {
if let Ok(content) = generator.generate() {
@@ -62,7 +62,7 @@ pub fn use_pep604_annotation(checker: &mut Checker, expr: &Expr, value: &Expr, s
checker.add_check(check);
} else if checker.match_typing_call_path(&call_path, "Union") {
let mut check = Check::new(CheckKind::UsePEP604Annotation, Range::from_located(expr));
if checker.patch() {
if checker.patch(check.kind.code()) {
match &slice.node {
ExprKind::Slice { .. } => {
// Invalid type annotation.

View File

@@ -11,7 +11,7 @@ pub fn useless_metaclass_type(checker: &mut Checker, stmt: &Stmt, value: &Expr,
if let Some(mut check) =
checks::useless_metaclass_type(targets, value, Range::from_located(stmt))
{
if checker.patch() {
if checker.patch(check.kind.code()) {
let context = checker.binding_context();
let deleted: Vec<&Stmt> = checker
.deletions

View File

@@ -14,7 +14,7 @@ pub fn useless_object_inheritance(
) {
let scope = checker.current_scope();
if let Some(mut check) = checks::useless_object_inheritance(name, bases, scope) {
if checker.patch() {
if checker.patch(check.kind.code()) {
if let Some(fix) = pyupgrade::fixes::remove_class_def_base(
checker.locator,
&stmt.location,

View File

@@ -3,10 +3,10 @@ use once_cell::sync::Lazy;
use rustpython_ast::Location;
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::autofix::{fixer, Fix};
use crate::checks::CheckKind;
use crate::source_code_locator::SourceCodeLocator;
use crate::Check;
use crate::{Check, Settings};
/// See: https://github.com/microsoft/vscode/blob/095ddabc52b82498ee7f718a34f9dd11d59099a8/src/vs/base/common/strings.ts#L1094
static CONFUSABLES: Lazy<FnvHashMap<u32, u32>> = Lazy::new(|| {
@@ -1606,7 +1606,8 @@ pub fn ambiguous_unicode_character(
start: &Location,
end: &Location,
context: Context,
fix: bool,
settings: &Settings,
autofix: &fixer::Mode,
) -> Vec<Check> {
let mut checks = vec![];
@@ -1645,14 +1646,16 @@ pub fn ambiguous_unicode_character(
end_location,
},
);
if fix {
check.amend(Fix::replacement(
representant.to_string(),
location,
end_location,
));
if settings.enabled.contains(check.kind.code()) {
if autofix.patch() && settings.fixable.contains(check.kind.code()) {
check.amend(Fix::replacement(
representant.to_string(),
location,
end_location,
));
}
checks.push(check);
}
checks.push(check);
}
}

View File

@@ -13,7 +13,8 @@ use crate::checks_gen::CheckCodePrefix;
use crate::settings::pyproject::load_options;
use crate::settings::types::{FilePattern, PerFileIgnore, PythonVersion};
use crate::{
flake8_annotations, flake8_bugbear, flake8_quotes, flake8_tidy_imports, fs, isort, pep8_naming,
flake8_annotations, flake8_bugbear, flake8_quotes, flake8_tidy_imports, fs, isort, mccabe,
pep8_naming,
};
#[derive(Debug)]
@@ -24,18 +25,22 @@ pub struct Configuration {
pub extend_ignore: Vec<CheckCodePrefix>,
pub extend_select: Vec<CheckCodePrefix>,
pub fix: bool,
pub fixable: Vec<CheckCodePrefix>,
pub ignore: Vec<CheckCodePrefix>,
pub line_length: usize,
pub per_file_ignores: Vec<PerFileIgnore>,
pub select: Vec<CheckCodePrefix>,
pub show_source: bool,
pub src: Vec<PathBuf>,
pub target_version: PythonVersion,
pub unfixable: Vec<CheckCodePrefix>,
// Plugins
pub flake8_annotations: flake8_annotations::settings::Settings,
pub flake8_bugbear: flake8_bugbear::settings::Settings,
pub flake8_quotes: flake8_quotes::settings::Settings,
pub flake8_tidy_imports: flake8_tidy_imports::settings::Settings,
pub isort: isort::settings::Settings,
pub mccabe: mccabe::settings::Settings,
pub pep8_naming: pep8_naming::settings::Settings,
}
@@ -119,6 +124,28 @@ impl Configuration {
.unwrap_or_else(|| vec![CheckCodePrefix::E, CheckCodePrefix::F]),
extend_select: options.extend_select.unwrap_or_default(),
fix: options.fix.unwrap_or_default(),
fixable: options.fixable.unwrap_or_else(|| {
// TODO(charlie): Autogenerate this list.
vec![
CheckCodePrefix::A,
CheckCodePrefix::B,
CheckCodePrefix::BLE,
CheckCodePrefix::C,
CheckCodePrefix::D,
CheckCodePrefix::E,
CheckCodePrefix::F,
CheckCodePrefix::I,
CheckCodePrefix::M,
CheckCodePrefix::N,
CheckCodePrefix::Q,
CheckCodePrefix::S,
CheckCodePrefix::T,
CheckCodePrefix::U,
CheckCodePrefix::W,
CheckCodePrefix::YTT,
]
}),
unfixable: options.unfixable.unwrap_or_default(),
ignore: options.ignore.unwrap_or_default(),
line_length: options.line_length.unwrap_or(88),
per_file_ignores: options
@@ -132,6 +159,7 @@ impl Configuration {
.collect()
})
.unwrap_or_default(),
show_source: options.show_source.unwrap_or_default(),
// Plugins
flake8_annotations: options
.flake8_annotations
@@ -153,6 +181,10 @@ impl Configuration {
.isort
.map(isort::settings::Settings::from_options)
.unwrap_or_default(),
mccabe: options
.mccabe
.map(mccabe::settings::Settings::from_options)
.unwrap_or_default(),
pep8_naming: options
.pep8_naming
.map(pep8_naming::settings::Settings::from_options)

View File

@@ -14,7 +14,8 @@ use crate::checks_gen::{CheckCodePrefix, PrefixSpecificity};
use crate::settings::configuration::Configuration;
use crate::settings::types::{FilePattern, PerFileIgnore, PythonVersion};
use crate::{
flake8_annotations, flake8_bugbear, flake8_quotes, flake8_tidy_imports, isort, pep8_naming,
flake8_annotations, flake8_bugbear, flake8_quotes, flake8_tidy_imports, isort, mccabe,
pep8_naming,
};
pub mod configuration;
@@ -29,8 +30,10 @@ pub struct Settings {
pub enabled: FnvHashSet<CheckCode>,
pub exclude: Vec<FilePattern>,
pub extend_exclude: Vec<FilePattern>,
pub fixable: FnvHashSet<CheckCode>,
pub line_length: usize,
pub per_file_ignores: Vec<PerFileIgnore>,
pub show_source: bool,
pub src: Vec<PathBuf>,
pub target_version: PythonVersion,
// Plugins
@@ -39,6 +42,7 @@ pub struct Settings {
pub flake8_quotes: flake8_quotes::settings::Settings,
pub flake8_tidy_imports: flake8_tidy_imports::settings::Settings,
pub isort: isort::settings::Settings,
pub mccabe: mccabe::settings::Settings,
pub pep8_naming: pep8_naming::settings::Settings,
}
@@ -47,30 +51,40 @@ impl Settings {
Self {
dummy_variable_rgx: config.dummy_variable_rgx,
enabled: resolve_codes(
&config.select,
&config.extend_select,
&config.ignore,
&config.extend_ignore,
&config
.select
.into_iter()
.chain(config.extend_select.into_iter())
.collect::<Vec<_>>(),
&config
.ignore
.into_iter()
.chain(config.extend_ignore.into_iter())
.collect::<Vec<_>>(),
),
exclude: config.exclude,
extend_exclude: config.extend_exclude,
fixable: resolve_codes(&config.fixable, &config.unfixable),
flake8_annotations: config.flake8_annotations,
flake8_bugbear: config.flake8_bugbear,
flake8_quotes: config.flake8_quotes,
flake8_tidy_imports: config.flake8_tidy_imports,
isort: config.isort,
mccabe: config.mccabe,
line_length: config.line_length,
pep8_naming: config.pep8_naming,
per_file_ignores: config.per_file_ignores,
src: config.src,
target_version: config.target_version,
show_source: config.show_source,
}
}
pub fn for_rule(check_code: CheckCode) -> Self {
Self {
dummy_variable_rgx: Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$").unwrap(),
enabled: FnvHashSet::from_iter([check_code]),
enabled: FnvHashSet::from_iter([check_code.clone()]),
fixable: FnvHashSet::from_iter([check_code]),
exclude: Default::default(),
extend_exclude: Default::default(),
line_length: 88,
@@ -82,14 +96,17 @@ impl Settings {
flake8_quotes: Default::default(),
flake8_tidy_imports: Default::default(),
isort: Default::default(),
mccabe: Default::default(),
pep8_naming: Default::default(),
show_source: Default::default(),
}
}
pub fn for_rules(check_codes: Vec<CheckCode>) -> Self {
Self {
dummy_variable_rgx: Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$").unwrap(),
enabled: FnvHashSet::from_iter(check_codes),
enabled: FnvHashSet::from_iter(check_codes.clone()),
fixable: FnvHashSet::from_iter(check_codes),
exclude: Default::default(),
extend_exclude: Default::default(),
line_length: 88,
@@ -101,7 +118,9 @@ impl Settings {
flake8_quotes: Default::default(),
flake8_tidy_imports: Default::default(),
isort: Default::default(),
mccabe: Default::default(),
pep8_naming: Default::default(),
show_source: Default::default(),
}
}
}
@@ -113,10 +132,14 @@ impl Hash for Settings {
for value in self.enabled.iter() {
value.hash(state);
}
for value in self.fixable.iter() {
value.hash(state);
}
self.line_length.hash(state);
for value in self.per_file_ignores.iter() {
value.hash(state);
}
self.show_source.hash(state);
self.target_version.hash(state);
// Add plugin properties in alphabetical order.
self.flake8_annotations.hash(state);
@@ -124,18 +147,14 @@ impl Hash for Settings {
self.flake8_quotes.hash(state);
self.flake8_tidy_imports.hash(state);
self.isort.hash(state);
self.mccabe.hash(state);
self.pep8_naming.hash(state);
}
}
/// Given a set of selected and ignored prefixes, resolve the set of enabled
/// error codes.
fn resolve_codes(
select: &[CheckCodePrefix],
extend_select: &[CheckCodePrefix],
ignore: &[CheckCodePrefix],
extend_ignore: &[CheckCodePrefix],
) -> FnvHashSet<CheckCode> {
fn resolve_codes(select: &[CheckCodePrefix], ignore: &[CheckCodePrefix]) -> FnvHashSet<CheckCode> {
let mut codes: FnvHashSet<CheckCode> = FnvHashSet::default();
for specificity in [
PrefixSpecificity::Category,
@@ -148,11 +167,6 @@ fn resolve_codes(
codes.extend(prefix.codes());
}
}
for prefix in extend_select {
if prefix.specificity() == specificity {
codes.extend(prefix.codes());
}
}
for prefix in ignore {
if prefix.specificity() == specificity {
for code in prefix.codes() {
@@ -160,13 +174,6 @@ fn resolve_codes(
}
}
}
for prefix in extend_ignore {
if prefix.specificity() == specificity {
for code in prefix.codes() {
codes.remove(&code);
}
}
}
}
codes
}
@@ -181,19 +188,19 @@ mod tests {
#[test]
fn resolver() {
let actual = resolve_codes(&[CheckCodePrefix::W], &[], &[], &[]);
let actual = resolve_codes(&[CheckCodePrefix::W], &[]);
let expected = FnvHashSet::from_iter([CheckCode::W292, CheckCode::W605]);
assert_eq!(actual, expected);
let actual = resolve_codes(&[CheckCodePrefix::W6], &[], &[], &[]);
let actual = resolve_codes(&[CheckCodePrefix::W6], &[]);
let expected = FnvHashSet::from_iter([CheckCode::W605]);
assert_eq!(actual, expected);
let actual = resolve_codes(&[CheckCodePrefix::W], &[], &[CheckCodePrefix::W292], &[]);
let actual = resolve_codes(&[CheckCodePrefix::W], &[CheckCodePrefix::W292]);
let expected = FnvHashSet::from_iter([CheckCode::W605]);
assert_eq!(actual, expected);
let actual = resolve_codes(&[CheckCodePrefix::W605], &[], &[CheckCodePrefix::W605], &[]);
let actual = resolve_codes(&[CheckCodePrefix::W605], &[CheckCodePrefix::W605]);
let expected = FnvHashSet::from_iter([]);
assert_eq!(actual, expected);
}

View File

@@ -6,7 +6,8 @@ use serde::{Deserialize, Serialize};
use crate::checks_gen::CheckCodePrefix;
use crate::settings::types::PythonVersion;
use crate::{
flake8_annotations, flake8_bugbear, flake8_quotes, flake8_tidy_imports, isort, pep8_naming,
flake8_annotations, flake8_bugbear, flake8_quotes, flake8_tidy_imports, isort, mccabe,
pep8_naming,
};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
@@ -18,17 +19,21 @@ pub struct Options {
pub extend_ignore: Option<Vec<CheckCodePrefix>>,
pub extend_select: Option<Vec<CheckCodePrefix>>,
pub fix: Option<bool>,
pub fixable: Option<Vec<CheckCodePrefix>>,
pub ignore: Option<Vec<CheckCodePrefix>>,
pub line_length: Option<usize>,
pub select: Option<Vec<CheckCodePrefix>>,
pub show_source: Option<bool>,
pub src: Option<Vec<String>>,
pub target_version: Option<PythonVersion>,
pub unfixable: Option<Vec<CheckCodePrefix>>,
// Plugins
pub flake8_annotations: Option<flake8_annotations::settings::Options>,
pub flake8_bugbear: Option<flake8_bugbear::settings::Options>,
pub flake8_quotes: Option<flake8_quotes::settings::Options>,
pub flake8_tidy_imports: Option<flake8_tidy_imports::settings::Options>,
pub isort: Option<isort::settings::Options>,
pub mccabe: Option<mccabe::settings::Options>,
pub pep8_naming: Option<pep8_naming::settings::Options>,
// Tables are required to go last.
pub per_file_ignores: Option<FnvHashMap<String, Vec<CheckCodePrefix>>>,

View File

@@ -110,7 +110,7 @@ mod tests {
find_project_root, find_pyproject_toml, parse_pyproject_toml, Options, Pyproject, Tools,
};
use crate::settings::types::PatternPrefixPair;
use crate::{flake8_bugbear, flake8_quotes, flake8_tidy_imports, pep8_naming};
use crate::{flake8_bugbear, flake8_quotes, flake8_tidy_imports, mccabe, pep8_naming};
#[test]
fn deserialize() -> Result<()> {
@@ -134,23 +134,27 @@ mod tests {
pyproject.tool,
Some(Tools {
ruff: Some(Options {
line_length: None,
fix: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
select: None,
extend_select: None,
ignore: None,
extend_ignore: None,
extend_select: None,
fix: None,
fixable: None,
ignore: None,
line_length: None,
per_file_ignores: None,
dummy_variable_rgx: None,
select: None,
show_source: None,
src: None,
target_version: None,
unfixable: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
flake8_tidy_imports: None,
isort: None,
mccabe: None,
pep8_naming: None,
})
})
@@ -167,23 +171,27 @@ line-length = 79
pyproject.tool,
Some(Tools {
ruff: Some(Options {
line_length: Some(79),
fix: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
select: None,
extend_select: None,
ignore: None,
extend_ignore: None,
extend_select: None,
fix: None,
fixable: None,
ignore: None,
line_length: Some(79),
per_file_ignores: None,
dummy_variable_rgx: None,
select: None,
show_source: None,
src: None,
target_version: None,
unfixable: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
flake8_tidy_imports: None,
isort: None,
mccabe: None,
pep8_naming: None,
})
})
@@ -208,15 +216,19 @@ exclude = ["foo.py"]
extend_select: None,
ignore: None,
extend_ignore: None,
fixable: None,
unfixable: None,
per_file_ignores: None,
dummy_variable_rgx: None,
src: None,
target_version: None,
show_source: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
flake8_tidy_imports: None,
isort: None,
mccabe: None,
pep8_naming: None,
})
})
@@ -233,23 +245,27 @@ select = ["E501"]
pyproject.tool,
Some(Tools {
ruff: Some(Options {
line_length: None,
fix: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
select: Some(vec![CheckCodePrefix::E501]),
extend_select: None,
ignore: None,
extend_ignore: None,
extend_select: None,
fix: None,
fixable: None,
ignore: None,
line_length: None,
per_file_ignores: None,
dummy_variable_rgx: None,
select: Some(vec![CheckCodePrefix::E501]),
show_source: None,
src: None,
target_version: None,
unfixable: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
flake8_tidy_imports: None,
isort: None,
mccabe: None,
pep8_naming: None,
})
})
@@ -267,23 +283,27 @@ ignore = ["E501"]
pyproject.tool,
Some(Tools {
ruff: Some(Options {
line_length: None,
fix: None,
dummy_variable_rgx: None,
exclude: None,
extend_exclude: None,
select: None,
extend_select: Some(vec![CheckCodePrefix::M001]),
ignore: Some(vec![CheckCodePrefix::E501]),
extend_ignore: None,
extend_select: Some(vec![CheckCodePrefix::M001]),
fix: None,
fixable: None,
ignore: Some(vec![CheckCodePrefix::E501]),
line_length: None,
per_file_ignores: None,
dummy_variable_rgx: None,
select: None,
show_source: None,
src: None,
target_version: None,
unfixable: None,
flake8_annotations: None,
flake8_bugbear: None,
flake8_quotes: None,
flake8_tidy_imports: None,
isort: None,
mccabe: None,
pep8_naming: None,
})
})
@@ -352,6 +372,8 @@ other-attribute = 1
extend_select: None,
ignore: None,
extend_ignore: None,
fixable: None,
unfixable: None,
per_file_ignores: Some(FnvHashMap::from_iter([(
"__init__.py".to_string(),
vec![CheckCodePrefix::F401]
@@ -359,6 +381,7 @@ other-attribute = 1
dummy_variable_rgx: None,
src: None,
target_version: None,
show_source: None,
flake8_annotations: None,
flake8_bugbear: Some(flake8_bugbear::settings::Options {
extend_immutable_calls: Some(vec![
@@ -376,6 +399,9 @@ other-attribute = 1
ban_relative_imports: Some(Strictness::Parents)
}),
isort: None,
mccabe: Some(mccabe::settings::Options {
max_complexity: Some(10),
}),
pep8_naming: Some(pep8_naming::settings::Options {
ignore_names: Some(vec![
"setUp".to_string(),

View File

@@ -43,12 +43,15 @@ pub struct UserConfiguration {
pub extend_ignore: Vec<CheckCodePrefix>,
pub extend_select: Vec<CheckCodePrefix>,
pub fix: bool,
pub fixable: Vec<CheckCodePrefix>,
pub ignore: Vec<CheckCodePrefix>,
pub line_length: usize,
pub per_file_ignores: Vec<(Exclusion, Vec<CheckCode>)>,
pub select: Vec<CheckCodePrefix>,
pub show_source: bool,
pub src: Vec<PathBuf>,
pub target_version: PythonVersion,
pub unfixable: Vec<CheckCodePrefix>,
// Plugins
pub flake8_annotations: flake8_annotations::settings::Settings,
pub flake8_quotes: flake8_quotes::settings::Settings,
@@ -81,6 +84,8 @@ impl UserConfiguration {
extend_ignore: configuration.extend_ignore,
extend_select: configuration.extend_select,
fix: configuration.fix,
fixable: configuration.fixable,
unfixable: configuration.unfixable,
ignore: configuration.ignore,
line_length: configuration.line_length,
per_file_ignores: configuration
@@ -96,6 +101,7 @@ impl UserConfiguration {
select: configuration.select,
src: configuration.src,
target_version: configuration.target_version,
show_source: configuration.show_source,
flake8_annotations: configuration.flake8_annotations,
flake8_quotes: configuration.flake8_quotes,
flake8_tidy_imports: configuration.flake8_tidy_imports,

View File

@@ -90,4 +90,28 @@ expression: checks
row: 170
column: 48
fix: ~
- kind: MutableArgumentDefault
location:
row: 203
column: 26
end_location:
row: 203
column: 28
fix: ~
- kind: MutableArgumentDefault
location:
row: 204
column: 34
end_location:
row: 204
column: 36
fix: ~
- kind: MutableArgumentDefault
location:
row: 205
column: 61
end_location:
row: 205
column: 66
fix: ~

View File

@@ -0,0 +1,85 @@
---
source: src/linter.rs
expression: checks
---
- kind: BlindExcept
location:
row: 5
column: 7
end_location:
row: 5
column: 16
fix: ~
- kind: BlindExcept
location:
row: 13
column: 7
end_location:
row: 13
column: 20
fix: ~
- kind: BlindExcept
location:
row: 23
column: 7
end_location:
row: 23
column: 16
fix: ~
- kind: BlindExcept
location:
row: 25
column: 7
end_location:
row: 25
column: 20
fix: ~
- kind: BlindExcept
location:
row: 31
column: 7
end_location:
row: 31
column: 16
fix: ~
- kind: BlindExcept
location:
row: 36
column: 11
end_location:
row: 36
column: 24
fix: ~
- kind: BlindExcept
location:
row: 42
column: 7
end_location:
row: 42
column: 16
fix: ~
- kind: BlindExcept
location:
row: 45
column: 11
end_location:
row: 45
column: 24
fix: ~
- kind: BlindExcept
location:
row: 52
column: 11
end_location:
row: 52
column: 24
fix: ~
- kind: BlindExcept
location:
row: 54
column: 7
end_location:
row: 54
column: 16
fix: ~

View File

@@ -9,7 +9,16 @@ expression: checks
end_location:
row: 2
column: 13
fix: ~
fix:
patch:
content: X not in Y
location:
row: 2
column: 3
end_location:
row: 2
column: 13
applied: false
- kind: NotInTest
location:
row: 5
@@ -17,7 +26,16 @@ expression: checks
end_location:
row: 5
column: 15
fix: ~
fix:
patch:
content: X.B not in Y
location:
row: 5
column: 3
end_location:
row: 5
column: 15
applied: false
- kind: NotInTest
location:
row: 8
@@ -25,7 +43,16 @@ expression: checks
end_location:
row: 8
column: 13
fix: ~
fix:
patch:
content: X not in Y
location:
row: 8
column: 3
end_location:
row: 8
column: 13
applied: false
- kind: NotInTest
location:
row: 11
@@ -33,7 +60,16 @@ expression: checks
end_location:
row: 11
column: 28
fix: ~
fix:
patch:
content: Y not in Z
location:
row: 11
column: 18
end_location:
row: 11
column: 28
applied: false
- kind: NotInTest
location:
row: 14
@@ -41,5 +77,14 @@ expression: checks
end_location:
row: 14
column: 14
fix: ~
fix:
patch:
content: X not in Y
location:
row: 14
column: 3
end_location:
row: 14
column: 15
applied: false

View File

@@ -9,7 +9,16 @@ expression: checks
end_location:
row: 2
column: 13
fix: ~
fix:
patch:
content: X is not Y
location:
row: 2
column: 3
end_location:
row: 2
column: 13
applied: false
- kind: NotIsTest
location:
row: 5
@@ -17,7 +26,16 @@ expression: checks
end_location:
row: 5
column: 15
fix: ~
fix:
patch:
content: X.B is not Y
location:
row: 5
column: 3
end_location:
row: 5
column: 15
applied: false
- kind: NotIsTest
location:
row: 8

View File

@@ -9,7 +9,16 @@ expression: checks
end_location:
row: 2
column: 19
fix: ~
fix:
patch:
content: "def f(x):\n return (2 * x)"
location:
row: 2
column: 0
end_location:
row: 2
column: 19
applied: false
- kind: DoNotAssignLambda
location:
row: 4
@@ -17,7 +26,16 @@ expression: checks
end_location:
row: 4
column: 19
fix: ~
fix:
patch:
content: "def f(x):\n return (2 * x)"
location:
row: 4
column: 0
end_location:
row: 4
column: 19
applied: false
- kind: DoNotAssignLambda
location:
row: 7
@@ -25,5 +43,14 @@ expression: checks
end_location:
row: 7
column: 29
fix: ~
fix:
patch:
content: "def this(y, z):\n return (2 * x)"
location:
row: 7
column: 4
end_location:
row: 7
column: 29
applied: false

View File

@@ -47,4 +47,13 @@ expression: checks
row: 21
column: 9
fix: ~
- kind:
UnusedVariable: b
location:
row: 51
column: 8
end_location:
row: 51
column: 9
fix: ~

View File

@@ -0,0 +1,69 @@
---
source: src/linter.rs
expression: checks
---
- kind: BooleanPositionalArgInFunctionDefinition
location:
row: 4
column: 4
end_location:
row: 4
column: 26
fix: ~
- kind: BooleanPositionalArgInFunctionDefinition
location:
row: 5
column: 4
end_location:
row: 5
column: 31
fix: ~
- kind: BooleanPositionalArgInFunctionDefinition
location:
row: 10
column: 4
end_location:
row: 10
column: 36
fix: ~
- kind: BooleanPositionalArgInFunctionDefinition
location:
row: 11
column: 4
end_location:
row: 11
column: 41
fix: ~
- kind: BooleanPositionalArgInFunctionDefinition
location:
row: 14
column: 4
end_location:
row: 14
column: 37
fix: ~
- kind: BooleanPositionalArgInFunctionDefinition
location:
row: 15
column: 4
end_location:
row: 15
column: 42
fix: ~
- kind: BooleanPositionalArgInFunctionDefinition
location:
row: 18
column: 4
end_location:
row: 18
column: 40
fix: ~
- kind: BooleanPositionalArgInFunctionDefinition
location:
row: 19
column: 4
end_location:
row: 19
column: 45
fix: ~

View File

@@ -0,0 +1,37 @@
---
source: src/linter.rs
expression: checks
---
- kind: BooleanDefaultValueInFunctionDefinition
location:
row: 12
column: 30
end_location:
row: 12
column: 34
fix: ~
- kind: BooleanDefaultValueInFunctionDefinition
location:
row: 13
column: 42
end_location:
row: 13
column: 46
fix: ~
- kind: BooleanDefaultValueInFunctionDefinition
location:
row: 14
column: 40
end_location:
row: 14
column: 44
fix: ~
- kind: BooleanDefaultValueInFunctionDefinition
location:
row: 15
column: 45
end_location:
row: 15
column: 49
fix: ~

View File

@@ -0,0 +1,13 @@
---
source: src/linter.rs
expression: checks
---
- kind: BooleanPositionalValueInFunctionCall
location:
row: 41
column: 10
end_location:
row: 41
column: 14
fix: ~

View File

@@ -0,0 +1,175 @@
---
source: src/linter.rs
expression: checks
---
- kind: ConvertTypedDictFunctionalToClass
location:
row: 5
column: 0
end_location:
row: 5
column: 52
fix:
patch:
content: "class MyType1(TypedDict):\n a: int\n b: str"
location:
row: 5
column: 0
end_location:
row: 5
column: 52
applied: false
- kind: ConvertTypedDictFunctionalToClass
location:
row: 8
column: 0
end_location:
row: 8
column: 50
fix:
patch:
content: "class MyType2(TypedDict):\n a: int\n b: str"
location:
row: 8
column: 0
end_location:
row: 8
column: 50
applied: false
- kind: ConvertTypedDictFunctionalToClass
location:
row: 11
column: 0
end_location:
row: 11
column: 44
fix:
patch:
content: "class MyType3(TypedDict):\n a: int\n b: str"
location:
row: 11
column: 0
end_location:
row: 11
column: 44
applied: false
- kind: ConvertTypedDictFunctionalToClass
location:
row: 14
column: 0
end_location:
row: 14
column: 30
fix:
patch:
content: "class MyType4(TypedDict):\n pass"
location:
row: 14
column: 0
end_location:
row: 14
column: 30
applied: false
- kind: ConvertTypedDictFunctionalToClass
location:
row: 17
column: 0
end_location:
row: 17
column: 46
fix:
patch:
content: "class MyType5(TypedDict):\n a: 'hello'"
location:
row: 17
column: 0
end_location:
row: 17
column: 46
applied: false
- kind: ConvertTypedDictFunctionalToClass
location:
row: 18
column: 0
end_location:
row: 18
column: 41
fix:
patch:
content: "class MyType6(TypedDict):\n a: 'hello'"
location:
row: 18
column: 0
end_location:
row: 18
column: 41
applied: false
- kind: ConvertTypedDictFunctionalToClass
location:
row: 21
column: 0
end_location:
row: 21
column: 56
fix:
patch:
content: "class MyType7(TypedDict):\n a: NotRequired[dict]"
location:
row: 21
column: 0
end_location:
row: 21
column: 56
applied: false
- kind: ConvertTypedDictFunctionalToClass
location:
row: 24
column: 0
end_location:
row: 24
column: 65
fix:
patch:
content: "class MyType8(TypedDict, total=False):\n x: int\n y: int"
location:
row: 24
column: 0
end_location:
row: 24
column: 65
applied: false
- kind: ConvertTypedDictFunctionalToClass
location:
row: 30
column: 0
end_location:
row: 30
column: 59
fix:
patch:
content: "class MyType10(TypedDict):\n key: Literal['value']"
location:
row: 30
column: 0
end_location:
row: 30
column: 59
applied: false
- kind: ConvertTypedDictFunctionalToClass
location:
row: 33
column: 0
end_location:
row: 33
column: 53
fix:
patch:
content: "class MyType11(typing.TypedDict):\n key: int"
location:
row: 33
column: 0
end_location:
row: 33
column: 53
applied: false

View File

@@ -65,4 +65,13 @@ expression: checks
row: 37
column: 14
fix: ~
- kind:
UnusedVariable: b
location:
row: 51
column: 8
end_location:
row: 51
column: 9
fix: ~

View File

@@ -6,7 +6,7 @@ expression: checks
UnusedNOQA: ~
location:
row: 9
column: 9
column: 11
end_location:
row: 9
column: 17
@@ -25,7 +25,7 @@ expression: checks
- E501
location:
row: 13
column: 9
column: 11
end_location:
row: 13
column: 23
@@ -45,7 +45,7 @@ expression: checks
- E501
location:
row: 16
column: 9
column: 11
end_location:
row: 16
column: 29
@@ -61,16 +61,17 @@ expression: checks
applied: false
- kind:
UnusedNOQA:
- F841
- W191
location:
row: 19
column: 9
column: 11
end_location:
row: 19
column: 29
fix:
patch:
content: " # noqa: F841"
content: ""
location:
row: 19
column: 9
@@ -78,60 +79,107 @@ expression: checks
row: 19
column: 29
applied: false
- kind:
UnusedNOQA:
- E501
location:
row: 23
column: 9
end_location:
row: 23
column: 21
fix:
patch:
content: ""
location:
row: 23
column: 9
end_location:
row: 23
column: 21
applied: false
- kind:
UnusedVariable: d
location:
row: 26
column: 4
end_location:
row: 26
column: 5
fix: ~
- kind:
UnusedNOQA:
- E501
location:
row: 26
column: 32
end_location:
row: 26
column: 44
fix:
patch:
content: ""
location:
row: 26
column: 9
end_location:
row: 26
column: 44
applied: false
- kind:
UnusedNOQA:
- F841
location:
row: 44
column: 3
row: 52
column: 5
end_location:
row: 44
row: 52
column: 23
fix:
patch:
content: " # noqa: E501"
content: "# noqa: E501"
location:
row: 44
column: 3
row: 52
column: 5
end_location:
row: 44
row: 52
column: 23
applied: false
- kind:
UnusedNOQA:
- E501
location:
row: 52
column: 3
row: 60
column: 5
end_location:
row: 52
row: 60
column: 17
fix:
patch:
content: ""
location:
row: 52
row: 60
column: 3
end_location:
row: 52
row: 60
column: 17
applied: false
- kind:
UnusedNOQA: ~
location:
row: 60
column: 3
row: 68
column: 5
end_location:
row: 60
row: 68
column: 11
fix:
patch:
content: ""
location:
row: 60
row: 68
column: 3
end_location:
row: 60
row: 68
column: 11
applied: false

View File

@@ -78,3 +78,15 @@ fn test_stdin_autofix_when_no_issues_should_still_print_contents() -> Result<()>
);
Ok(())
}
#[test]
fn test_show_source() -> Result<()> {
let mut cmd = Command::cargo_bin(crate_name!())?;
let output = cmd
.args(["-", "--show-source"])
.write_stdin("l = 1")
.assert()
.failure();
assert!(str::from_utf8(&output.get_output().stdout)?.contains("l = 1"));
Ok(())
}