Compare commits

...

25 Commits

Author SHA1 Message Date
Charlie Marsh
8a26c8b4e0 Fix wasm builds 2023-01-09 12:58:07 -05:00
Charlie Marsh
2cb59b0f45 Use dedicated warnings for flake8-to-ruff (#1748) 2023-01-09 12:48:06 -05:00
Charlie Marsh
2729f3d207 Add support for defining extra builtins (#1747)
Resolves #1745.
2023-01-09 12:24:28 -05:00
Charlie Marsh
59155ce9f6 Rename checks and plugins to rules (#1739) 2023-01-09 01:39:51 -05:00
Charlie Marsh
caf6c65de7 Bump version to 0.0.216 2023-01-09 01:14:28 -05:00
Matt Oberle
147d594b38 Add isort.force-sort-within-sections setting (#1635)
This commit is a first attempt at addressing issue #1003.

The default `isort` behavior is `force-sort-within-sections = false`,
which places `from X import Y` statements after `import X` statements.

When `force-sort-within-sections = true` all imports are sorted by
module name.

When module names are equivalent, the `import` statement comes before
the `from` statement.
2023-01-09 01:06:48 -05:00
Charlie Marsh
f18078a1eb Allow unused arguments for empty methods with docstrings (#1742)
Resolves #1741.
2023-01-09 00:34:07 -05:00
Harutaka Kawamura
fe4eb13601 Autofix PT004, PT005, PT024, and PT025 (#1740) 2023-01-08 22:41:00 -05:00
Charlie Marsh
161ab05533 Rename more local usages of check to diagnostic (#1738) 2023-01-08 18:10:08 -05:00
Charlie Marsh
2c537e24cc Move violation structs out of registry.rs (#1728) 2023-01-08 17:54:20 -05:00
Charlie Marsh
0fe349b5f8 Rename CheckCategory to RuleOrigin (#1726) 2023-01-08 17:50:18 -05:00
Charlie Marsh
09dc3c7225 Rename Check to Diagnostic (#1725)
Along with:

- `CheckKind` -> `DiagnosticKind`
- `CheckCode` -> `RuleCode`
- `CheckCodePrefix` -> `RuleCodePrefix`
2023-01-08 17:46:20 -05:00
Harutaka Kawamura
498134b7ee Audit unittest assert methods (#1736)
I ran the following code in Python 3.10 to automatically generate a list
of enums.

```python
import unittest

print(
    ",\n".join(
        sorted(
            m.removeprefix("assert") if m != "assert_" else "Underscore"
            for m in dir(unittest.TestCase)
            if m.startswith("assert")
        )
    )
)
```
2023-01-08 16:21:34 -05:00
Charlie Marsh
0152814a00 Bump version to 0.0.215 2023-01-07 22:17:29 -05:00
Harutaka Kawamura
0b3fab256b Remove assertNotContains (#1729)
`unittest.TestCase` doens't have a method named `assertNotContains`.
2023-01-07 22:15:48 -05:00
Chammika Mannakkara
212ce4d331 buf-fix: flake8_simplify SIM212 (#1732)
bug-fix in #1717

Use the correct `IfExprWithTwistedArms` struct.
2023-01-07 22:03:48 -05:00
Charlie Marsh
491b1e4968 Move RUFF_CACHE_DIR to Clap's env support (#1733) 2023-01-07 22:01:27 -05:00
Charlie Marsh
8b01b53d89 Move RUFF_CACHE_DIR to Clap's env support (#1733) 2023-01-07 22:01:20 -05:00
messense
f9a5867d3e Add RUFF_FORMAT environment variable support (#1731)
Resolves #1716
2023-01-07 21:54:19 -05:00
Harutaka Kawamura
4149627f19 Add more unittest assert methods to PT009 (#1730) 2023-01-07 21:52:48 -05:00
Charlie Marsh
7d24146df7 Implement --isolated CLI flag (#1727)
Closes #1724.
2023-01-07 18:43:58 -05:00
Charlie Marsh
1c6ef3666c Treat failures to fix TypedDict conversions as debug logs (#1723)
This also allows us to flag the error, even if we can't fix it.

Closes #1212.
2023-01-07 17:51:45 -05:00
Charlie Marsh
16d933fcf5 Respect isort:skip action comment (#1722)
Resolves: #1718.
2023-01-07 17:30:18 -05:00
Charlie Marsh
a9cc56b2ac Add ComparableExpr hierarchy for comparing expressions (#1721) 2023-01-07 17:29:21 -05:00
Charlie Marsh
4de6c26ff9 Automatically remove duplicate dictionary keys (#1710)
For now, to be safe, we're only removing keys with duplicate _values_.

See: #1647.
2023-01-07 16:16:42 -05:00
264 changed files with 8155 additions and 6479 deletions

View File

@@ -4,7 +4,7 @@ Thank you for taking the time to report an issue! We're glad to have you involve
If you're filing a bug report, please consider including the following information:
- A minimal code snippet that reproduces the bug.
- The command you invoked (e.g., `ruff /path/to/file.py --fix`).
- The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
- The current Ruff settings (any relevant sections from your `pyproject.toml`).
- The current Ruff version (`ruff --version`).
-->

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.214
rev: v0.0.216
hooks:
- id: ruff

9
Cargo.lock generated
View File

@@ -735,10 +735,11 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8-to-ruff"
version = "0.0.214-dev.0"
version = "0.0.216-dev.0"
dependencies = [
"anyhow",
"clap 4.0.32",
"colored",
"configparser",
"once_cell",
"regex",
@@ -1873,7 +1874,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.214"
version = "0.0.216"
dependencies = [
"annotate-snippets 0.9.1",
"anyhow",
@@ -1941,7 +1942,7 @@ dependencies = [
[[package]]
name = "ruff_dev"
version = "0.0.214"
version = "0.0.216"
dependencies = [
"anyhow",
"clap 4.0.32",
@@ -1961,7 +1962,7 @@ dependencies = [
[[package]]
name = "ruff_macros"
version = "0.0.214"
version = "0.0.216"
dependencies = [
"once_cell",
"proc-macro2",

View File

@@ -6,7 +6,7 @@ members = [
[package]
name = "ruff"
version = "0.0.214"
version = "0.0.216"
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
edition = "2021"
rust-version = "1.65.0"
@@ -29,7 +29,7 @@ bitflags = { version = "1.3.2" }
cachedir = { version = "0.3.0" }
cfg-if = { version = "1.0.0" }
chrono = { version = "0.4.21", default-features = false, features = ["clock"] }
clap = { version = "4.0.1", features = ["derive"] }
clap = { version = "4.0.1", features = ["derive", "env"] }
clap_complete_command = { version = "0.4.0" }
colored = { version = "2.0.0" }
dirs = { version = "4.0.0" }
@@ -51,7 +51,7 @@ path-absolutize = { version = "3.0.14", features = ["once_cell_cache", "use_unix
quick-junit = { version = "0.3.2" }
regex = { version = "1.6.0" }
ropey = { version = "1.5.0", features = ["cr_lines", "simd"], default-features = false }
ruff_macros = { version = "0.0.214", path = "ruff_macros" }
ruff_macros = { version = "0.0.216", path = "ruff_macros" }
rustc-hash = { version = "1.1.0" }
rustpython-ast = { features = ["unparse"], git = "https://github.com/RustPython/RustPython.git", rev = "d532160333ffeb6dbeca2c2728c2391cd1e53b7f" }
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "d532160333ffeb6dbeca2c2728c2391cd1e53b7f" }

View File

@@ -180,7 +180,7 @@ Ruff also works with [pre-commit](https://pre-commit.com):
```yaml
- repo: https://github.com/charliermarsh/ruff-pre-commit
# Ruff version.
rev: 'v0.0.214'
rev: 'v0.0.216'
hooks:
- id: ruff
# Respect `exclude` and `extend-exclude` settings.
@@ -341,8 +341,10 @@ Options:
Avoid writing any fixed files back; instead, output a diff for each changed file to stdout
-n, --no-cache
Disable cache reads
--isolated
Ignore all configuration files
--select <SELECT>
Comma-separated list of error codes to enable (or ALL, to enable all checks)
Comma-separated list of rule codes to enable (or ALL, to enable all rules)
--extend-select <EXTEND_SELECT>
Like --select, but adds additional error codes on top of the selected ones
--ignore <IGNORE>
@@ -360,11 +362,11 @@ Options:
--per-file-ignores <PER_FILE_IGNORES>
List of mappings from file pattern to code to exclude
--format <FORMAT>
Output serialization format for error messages [possible values: text, json, junit, grouped, github, gitlab]
Output serialization format for error messages [env: RUFF_FORMAT=] [possible values: text, json, junit, grouped, github, gitlab]
--stdin-filename <STDIN_FILENAME>
The name of the file when passing it through stdin
--cache-dir <CACHE_DIR>
Path to the cache directory
Path to the cache directory [env: RUFF_CACHE_DIR=]
--show-source
Show violations with source code
--respect-gitignore
@@ -551,8 +553,8 @@ For more, see [Pyflakes](https://pypi.org/project/pyflakes/2.5.0/) on PyPI.
| F524 | StringDotFormatMissingArguments | '...'.format(...) is missing argument(s) for placeholder(s): ... | |
| F525 | StringDotFormatMixingAutomatic | '...'.format(...) mixes automatic and manual numbering | |
| F541 | FStringMissingPlaceholders | f-string without any placeholders | 🛠 |
| F601 | MultiValueRepeatedKeyLiteral | Dictionary key literal repeated | |
| F602 | MultiValueRepeatedKeyVariable | Dictionary key `...` repeated | |
| F601 | MultiValueRepeatedKeyLiteral | Dictionary key literal `...` repeated | 🛠 |
| F602 | MultiValueRepeatedKeyVariable | Dictionary key `...` repeated | 🛠 |
| F621 | ExpressionsInStarAssignment | Too many expressions in star-unpacking assignment | |
| F622 | TwoStarredExpressions | Two starred expressions in assignment | |
| F631 | AssertTuple | Assert test is a non-empty tuple, which is always `True` | |
@@ -914,8 +916,8 @@ For more, see [flake8-pytest-style](https://pypi.org/project/flake8-pytest-style
| PT001 | IncorrectFixtureParenthesesStyle | Use `@pytest.fixture()` over `@pytest.fixture` | 🛠 |
| PT002 | FixturePositionalArgs | Configuration for fixture `...` specified via positional args, use kwargs | |
| PT003 | ExtraneousScopeFunction | `scope='function'` is implied in `@pytest.fixture()` | |
| PT004 | MissingFixtureNameUnderscore | Fixture `...` does not return anything, add leading underscore | |
| PT005 | IncorrectFixtureNameUnderscore | Fixture `...` returns a value, remove leading underscore | |
| PT004 | MissingFixtureNameUnderscore | Fixture `...` does not return anything, add leading underscore | 🛠 |
| PT005 | IncorrectFixtureNameUnderscore | Fixture `...` returns a value, remove leading underscore | 🛠 |
| PT006 | ParametrizeNamesWrongType | Wrong name(s) type in `@pytest.mark.parametrize`, expected `tuple` | 🛠 |
| PT007 | ParametrizeValuesWrongType | Wrong values type in `@pytest.mark.parametrize` expected `list` of `tuple` | |
| PT008 | PatchWithLambda | Use `return_value=` instead of patching with `lambda` | |
@@ -933,8 +935,8 @@ For more, see [flake8-pytest-style](https://pypi.org/project/flake8-pytest-style
| PT021 | FixtureFinalizerCallback | Use `yield` instead of `request.addfinalizer` | |
| PT022 | UselessYieldFixture | No teardown in fixture `...`, use `return` instead of `yield` | 🛠 |
| PT023 | IncorrectMarkParenthesesStyle | Use `@pytest.mark....` over `@pytest.mark....()` | 🛠 |
| PT024 | UnnecessaryAsyncioMarkOnFixture | `pytest.mark.asyncio` is unnecessary for fixtures | |
| PT025 | ErroneousUseFixturesOnFixture | `pytest.mark.usefixtures` has no effect on fixtures | |
| PT024 | UnnecessaryAsyncioMarkOnFixture | `pytest.mark.asyncio` is unnecessary for fixtures | 🛠 |
| PT025 | ErroneousUseFixturesOnFixture | `pytest.mark.usefixtures` has no effect on fixtures | 🛠 |
| PT026 | UseFixturesWithoutParameters | Useless `pytest.mark.usefixtures` without parameters | 🛠 |
### flake8-quotes (Q)
@@ -1737,6 +1739,24 @@ allowed-confusables = ["", "ρ", ""]
---
#### [`builtins`](#builtins)
A list of builtins to treat as defined references, in addition to the
system builtins.
**Default value**: `[]`
**Type**: `Vec<String>`
**Example usage**:
```toml
[tool.ruff]
builtins = ["_"]
```
---
#### [`cache-dir`](#cache-dir)
A path to the cache directory.
@@ -1865,7 +1885,7 @@ by `ignore`.
**Default value**: `[]`
**Type**: `Vec<CheckCodePrefix>`
**Type**: `Vec<RuleCodePrefix>`
**Example usage**:
@@ -1884,7 +1904,7 @@ by `select`.
**Default value**: `[]`
**Type**: `Vec<CheckCodePrefix>`
**Type**: `Vec<RuleCodePrefix>`
**Example usage**:
@@ -1901,7 +1921,7 @@ extend-select = ["B", "Q"]
A list of check codes that are unsupported by Ruff, but should be
preserved when (e.g.) validating `# noqa` directives. Useful for
retaining `# noqa` directives that cover plugins not yet implemented
in Ruff.
by Ruff.
**Default value**: `[]`
@@ -1959,7 +1979,7 @@ A list of check code prefixes to consider autofix-able.
**Default value**: `["A", "ANN", "ARG", "B", "BLE", "C", "D", "E", "ERA", "F", "FBT", "I", "ICN", "N", "PGH", "PLC", "PLE", "PLR", "PLW", "Q", "RET", "RUF", "S", "T", "TID", "UP", "W", "YTT"]`
**Type**: `Vec<CheckCodePrefix>`
**Type**: `Vec<RuleCodePrefix>`
**Example usage**:
@@ -2031,7 +2051,7 @@ specific prefixes.
**Default value**: `[]`
**Type**: `Vec<CheckCodePrefix>`
**Type**: `Vec<RuleCodePrefix>`
**Example usage**:
@@ -2090,7 +2110,7 @@ when considering any matching files.
**Default value**: `{}`
**Type**: `HashMap<String, Vec<CheckCodePrefix>>`
**Type**: `HashMap<String, Vec<RuleCodePrefix>>`
**Example usage**:
@@ -2154,7 +2174,7 @@ specific prefixes.
**Default value**: `["E", "F"]`
**Type**: `Vec<CheckCodePrefix>`
**Type**: `Vec<RuleCodePrefix>`
**Example usage**:
@@ -2274,7 +2294,7 @@ A list of check code prefixes to consider un-autofix-able.
**Default value**: `[]`
**Type**: `Vec<CheckCodePrefix>`
**Type**: `Vec<RuleCodePrefix>`
**Example usage**:
@@ -2848,6 +2868,25 @@ force-single-line = true
---
#### [`force-sort-within-sections`](#force-sort-within-sections)
Don't sort straight-style imports (like `import sys`) before from-style
imports (like `from itertools import groupby`). Instead, sort the
imports by module, independent of import style.
**Default value**: `false`
**Type**: `bool`
**Example usage**:
```toml
[tool.ruff.isort]
force-sort-within-sections = true
```
---
#### [`force-wrap-aliases`](#force-wrap-aliases)
Force `import from` statements with multiple members and at least one

View File

@@ -771,7 +771,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8_to_ruff"
version = "0.0.214"
version = "0.0.216"
dependencies = [
"anyhow",
"clap",
@@ -1975,7 +1975,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.214"
version = "0.0.216"
dependencies = [
"anyhow",
"bincode",

View File

@@ -1,6 +1,6 @@
[package]
name = "flake8-to-ruff"
version = "0.0.214-dev.0"
version = "0.0.216-dev.0"
edition = "2021"
[lib]
@@ -9,6 +9,7 @@ name = "flake8_to_ruff"
[dependencies]
anyhow = { version = "1.0.66" }
clap = { version = "4.0.1", features = ["derive"] }
colored = { version = "2.0.0" }
configparser = { version = "3.0.2" }
once_cell = { version = "1.16.0" }
regex = { version = "1.6.0" }

View File

@@ -0,0 +1,19 @@
[flake8]
# Ignore style and complexity
# E: style errors
# W: style warnings
# C: complexity
# D: docstring warnings (unused pydocstyle extension)
# F841: local variable assigned but never used
ignore = E, C, W, D, F841
builtins = c, get_config
exclude =
.cache,
.github,
docs,
jupyterhub/alembic*,
onbuild,
scripts,
share,
tools,
setup.py

View File

@@ -1,18 +1,19 @@
use std::collections::{BTreeSet, HashMap};
use anyhow::Result;
use colored::Colorize;
use ruff::flake8_pytest_style::types::{
ParametrizeNameType, ParametrizeValuesRowType, ParametrizeValuesType,
};
use ruff::flake8_quotes::settings::Quote;
use ruff::flake8_tidy_imports::settings::Strictness;
use ruff::pydocstyle::settings::Convention;
use ruff::registry::CheckCodePrefix;
use ruff::registry::RuleCodePrefix;
use ruff::settings::options::Options;
use ruff::settings::pyproject::Pyproject;
use ruff::{
flake8_annotations, flake8_bugbear, flake8_errmsg, flake8_pytest_style, flake8_quotes,
flake8_tidy_imports, mccabe, pep8_naming, pydocstyle,
flake8_tidy_imports, mccabe, pep8_naming, pydocstyle, warn_user,
};
use crate::black::Black;
@@ -30,7 +31,7 @@ pub fn convert(
.expect("Unable to find flake8 section in INI file");
// Extract all referenced check code prefixes, to power plugin inference.
let mut referenced_codes: BTreeSet<CheckCodePrefix> = BTreeSet::default();
let mut referenced_codes: BTreeSet<RuleCodePrefix> = BTreeSet::default();
for (key, value) in flake8 {
if let Some(value) = value {
match key.as_str() {
@@ -60,7 +61,7 @@ pub fn convert(
}
let from_codes = plugin::infer_plugins_from_codes(&referenced_codes);
if !from_codes.is_empty() {
eprintln!("Inferred plugins from referenced check codes: {from_codes:#?}");
eprintln!("Inferred plugins from referenced codes: {from_codes:#?}");
}
from_options.into_iter().chain(from_codes).collect()
});
@@ -99,9 +100,14 @@ pub fn convert(
if let Some(value) = value {
match key.as_str() {
// flake8
"builtins" => {
options.builtins = Some(parser::parse_strings(value.as_ref()));
}
"max-line-length" | "max_line_length" => match value.clone().parse::<usize>() {
Ok(line_length) => options.line_length = Some(line_length),
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
},
"select" => {
// No-op (handled above).
@@ -130,7 +136,9 @@ pub fn convert(
options.per_file_ignores =
Some(parser::collect_per_file_ignores(per_file_ignores));
}
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// flake8-bugbear
@@ -142,46 +150,62 @@ pub fn convert(
"suppress-none-returning" | "suppress_none_returning" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.suppress_none_returning = Some(bool),
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"suppress-dummy-args" | "suppress_dummy_args" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.suppress_dummy_args = Some(bool),
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"mypy-init-return" | "mypy_init_return" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.mypy_init_return = Some(bool),
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"allow-star-arg-any" | "allow_star_arg_any" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_annotations.allow_star_arg_any = Some(bool),
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// flake8-quotes
"quotes" | "inline-quotes" | "inline_quotes" => match value.trim() {
"'" | "single" => flake8_quotes.inline_quotes = Some(Quote::Single),
"\"" | "double" => flake8_quotes.inline_quotes = Some(Quote::Double),
_ => eprintln!("Unexpected '{key}' value: {value}"),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
"multiline-quotes" | "multiline_quotes" => match value.trim() {
"'" | "single" => flake8_quotes.multiline_quotes = Some(Quote::Single),
"\"" | "double" => flake8_quotes.multiline_quotes = Some(Quote::Double),
_ => eprintln!("Unexpected '{key}' value: {value}"),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
"docstring-quotes" | "docstring_quotes" => match value.trim() {
"'" | "single" => flake8_quotes.docstring_quotes = Some(Quote::Single),
"\"" | "double" => flake8_quotes.docstring_quotes = Some(Quote::Double),
_ => eprintln!("Unexpected '{key}' value: {value}"),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
"avoid-escape" | "avoid_escape" => match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_quotes.avoid_escape = Some(bool),
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
},
// pep8-naming
"ignore-names" | "ignore_names" => {
@@ -201,7 +225,9 @@ pub fn convert(
"parents" => {
flake8_tidy_imports.ban_relative_imports = Some(Strictness::Parents);
}
_ => eprintln!("Unexpected '{key}' value: {value}"),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
// flake8-docstrings
"docstring-convention" => match value.trim() {
@@ -209,12 +235,16 @@ pub fn convert(
"numpy" => pydocstyle.convention = Some(Convention::Numpy),
"pep257" => pydocstyle.convention = Some(Convention::Pep257),
"all" => pydocstyle.convention = None,
_ => eprintln!("Unexpected '{key}' value: {value}"),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
},
// mccabe
"max-complexity" | "max_complexity" => match value.clone().parse::<usize>() {
Ok(max_complexity) => mccabe.max_complexity = Some(max_complexity),
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
},
// flake8-errmsg
"errmsg-max-string-length" | "errmsg_max_string_length" => {
@@ -222,14 +252,18 @@ pub fn convert(
Ok(max_string_length) => {
flake8_errmsg.max_string_length = Some(max_string_length);
}
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// flake8-pytest-style
"pytest-fixture-no-parentheses" | "pytest_fixture_no_parentheses " => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_pytest_style.fixture_parentheses = Some(!bool),
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
"pytest-parametrize-names-type" | "pytest_parametrize_names_type" => {
@@ -246,7 +280,9 @@ pub fn convert(
flake8_pytest_style.parametrize_names_type =
Some(ParametrizeNameType::List);
}
_ => eprintln!("Unexpected '{key}' value: {value}"),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
}
}
"pytest-parametrize-values-type" | "pytest_parametrize_values_type" => {
@@ -259,7 +295,9 @@ pub fn convert(
flake8_pytest_style.parametrize_values_type =
Some(ParametrizeValuesType::List);
}
_ => eprintln!("Unexpected '{key}' value: {value}"),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
}
}
"pytest-parametrize-values-row-type" | "pytest_parametrize_values_row_type" => {
@@ -272,7 +310,9 @@ pub fn convert(
flake8_pytest_style.parametrize_values_row_type =
Some(ParametrizeValuesRowType::List);
}
_ => eprintln!("Unexpected '{key}' value: {value}"),
_ => {
warn_user!("Unexpected '{key}' value: {value}");
}
}
}
"pytest-raises-require-match-for" | "pytest_raises_require_match_for" => {
@@ -282,11 +322,15 @@ pub fn convert(
"pytest-mark-no-parentheses" | "pytest_mark_no_parentheses" => {
match parser::parse_bool(value.as_ref()) {
Ok(bool) => flake8_pytest_style.mark_parentheses = Some(!bool),
Err(e) => eprintln!("Unable to parse '{key}' property: {e}"),
Err(e) => {
warn_user!("Unable to parse '{key}' property: {e}");
}
}
}
// Unknown
_ => eprintln!("Skipping unsupported property: {key}"),
_ => {
warn_user!("Skipping unsupported property: {}", key);
}
}
}
}
@@ -345,7 +389,7 @@ mod tests {
use anyhow::Result;
use ruff::pydocstyle::settings::Convention;
use ruff::registry::CheckCodePrefix;
use ruff::registry::RuleCodePrefix;
use ruff::settings::options::Options;
use ruff::settings::pyproject::Pyproject;
use ruff::{flake8_quotes, pydocstyle};
@@ -362,6 +406,7 @@ mod tests {
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
builtins: None,
cache_dir: None,
dummy_variable_rgx: None,
exclude: None,
@@ -382,9 +427,9 @@ mod tests {
required_version: None,
respect_gitignore: None,
select: Some(vec![
CheckCodePrefix::E,
CheckCodePrefix::F,
CheckCodePrefix::W,
RuleCodePrefix::E,
RuleCodePrefix::F,
RuleCodePrefix::W,
]),
show_source: None,
src: None,
@@ -425,6 +470,7 @@ mod tests {
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
builtins: None,
cache_dir: None,
dummy_variable_rgx: None,
exclude: None,
@@ -445,9 +491,9 @@ mod tests {
required_version: None,
respect_gitignore: None,
select: Some(vec![
CheckCodePrefix::E,
CheckCodePrefix::F,
CheckCodePrefix::W,
RuleCodePrefix::E,
RuleCodePrefix::F,
RuleCodePrefix::W,
]),
show_source: None,
src: None,
@@ -488,6 +534,7 @@ mod tests {
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
builtins: None,
cache_dir: None,
dummy_variable_rgx: None,
exclude: None,
@@ -508,9 +555,9 @@ mod tests {
required_version: None,
respect_gitignore: None,
select: Some(vec![
CheckCodePrefix::E,
CheckCodePrefix::F,
CheckCodePrefix::W,
RuleCodePrefix::E,
RuleCodePrefix::F,
RuleCodePrefix::W,
]),
show_source: None,
src: None,
@@ -551,6 +598,7 @@ mod tests {
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
builtins: None,
cache_dir: None,
dummy_variable_rgx: None,
exclude: None,
@@ -571,9 +619,9 @@ mod tests {
required_version: None,
respect_gitignore: None,
select: Some(vec![
CheckCodePrefix::E,
CheckCodePrefix::F,
CheckCodePrefix::W,
RuleCodePrefix::E,
RuleCodePrefix::F,
RuleCodePrefix::W,
]),
show_source: None,
src: None,
@@ -614,6 +662,7 @@ mod tests {
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
builtins: None,
cache_dir: None,
dummy_variable_rgx: None,
exclude: None,
@@ -634,9 +683,9 @@ mod tests {
required_version: None,
respect_gitignore: None,
select: Some(vec![
CheckCodePrefix::E,
CheckCodePrefix::F,
CheckCodePrefix::W,
RuleCodePrefix::E,
RuleCodePrefix::F,
RuleCodePrefix::W,
]),
show_source: None,
src: None,
@@ -685,6 +734,7 @@ mod tests {
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
builtins: None,
cache_dir: None,
dummy_variable_rgx: None,
exclude: None,
@@ -705,10 +755,10 @@ mod tests {
required_version: None,
respect_gitignore: None,
select: Some(vec![
CheckCodePrefix::D,
CheckCodePrefix::E,
CheckCodePrefix::F,
CheckCodePrefix::W,
RuleCodePrefix::D,
RuleCodePrefix::E,
RuleCodePrefix::F,
RuleCodePrefix::W,
]),
show_source: None,
src: None,
@@ -751,6 +801,7 @@ mod tests {
)?;
let expected = Pyproject::new(Options {
allowed_confusables: None,
builtins: None,
cache_dir: None,
dummy_variable_rgx: None,
exclude: None,
@@ -771,10 +822,10 @@ mod tests {
required_version: None,
respect_gitignore: None,
select: Some(vec![
CheckCodePrefix::E,
CheckCodePrefix::F,
CheckCodePrefix::Q,
CheckCodePrefix::W,
RuleCodePrefix::E,
RuleCodePrefix::F,
RuleCodePrefix::Q,
RuleCodePrefix::W,
]),
show_source: None,
src: None,

View File

@@ -1,18 +1,20 @@
use std::str::FromStr;
use anyhow::{bail, Result};
use colored::Colorize;
use once_cell::sync::Lazy;
use regex::Regex;
use ruff::registry::{CheckCodePrefix, PREFIX_REDIRECTS};
use ruff::registry::{RuleCodePrefix, PREFIX_REDIRECTS};
use ruff::settings::types::PatternPrefixPair;
use ruff::warn_user;
use rustc_hash::FxHashMap;
static COMMA_SEPARATED_LIST_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").unwrap());
/// Parse a comma-separated list of `CheckCodePrefix` values (e.g.,
/// Parse a comma-separated list of `RuleCodePrefix` values (e.g.,
/// "F401,E501").
pub fn parse_prefix_codes(value: &str) -> Vec<CheckCodePrefix> {
let mut codes: Vec<CheckCodePrefix> = vec![];
pub fn parse_prefix_codes(value: &str) -> Vec<RuleCodePrefix> {
let mut codes: Vec<RuleCodePrefix> = vec![];
for code in COMMA_SEPARATED_LIST_RE.split(value) {
let code = code.trim();
if code.is_empty() {
@@ -20,10 +22,10 @@ pub fn parse_prefix_codes(value: &str) -> Vec<CheckCodePrefix> {
}
if let Some(code) = PREFIX_REDIRECTS.get(code) {
codes.push(code.clone());
} else if let Ok(code) = CheckCodePrefix::from_str(code) {
} else if let Ok(code) = RuleCodePrefix::from_str(code) {
codes.push(code);
} else {
eprintln!("Unsupported prefix code: {code}");
warn_user!("Unsupported prefix code: {code}");
}
}
codes
@@ -81,7 +83,8 @@ impl State {
}
}
/// Generate the list of `StrCheckCodePair` pairs for the current state.
/// Generate the list of `StrRuleCodePair` pairs for the current
/// state.
fn parse(&self) -> Vec<PatternPrefixPair> {
let mut codes: Vec<PatternPrefixPair> = vec![];
for code in &self.codes {
@@ -92,7 +95,7 @@ impl State {
prefix: code.clone(),
});
}
} else if let Ok(code) = CheckCodePrefix::from_str(code) {
} else if let Ok(code) = RuleCodePrefix::from_str(code) {
for filename in &self.filenames {
codes.push(PatternPrefixPair {
pattern: filename.clone(),
@@ -100,7 +103,7 @@ impl State {
});
}
} else {
eprintln!("Unsupported prefix code: {code}");
warn_user!("Unsupported prefix code: {code}");
}
}
codes
@@ -186,8 +189,8 @@ pub fn parse_files_to_codes_mapping(value: &str) -> Result<Vec<PatternPrefixPair
/// Collect a list of `PatternPrefixPair` structs as a `BTreeMap`.
pub fn collect_per_file_ignores(
pairs: Vec<PatternPrefixPair>,
) -> FxHashMap<String, Vec<CheckCodePrefix>> {
let mut per_file_ignores: FxHashMap<String, Vec<CheckCodePrefix>> = FxHashMap::default();
) -> FxHashMap<String, Vec<RuleCodePrefix>> {
let mut per_file_ignores: FxHashMap<String, Vec<RuleCodePrefix>> = FxHashMap::default();
for pair in pairs {
per_file_ignores
.entry(pair.pattern)
@@ -200,7 +203,7 @@ pub fn collect_per_file_ignores(
#[cfg(test)]
mod tests {
use anyhow::Result;
use ruff::registry::CheckCodePrefix;
use ruff::registry::RuleCodePrefix;
use ruff::settings::types::PatternPrefixPair;
use crate::parser::{parse_files_to_codes_mapping, parse_prefix_codes, parse_strings};
@@ -208,27 +211,27 @@ mod tests {
#[test]
fn it_parses_prefix_codes() {
let actual = parse_prefix_codes("");
let expected: Vec<CheckCodePrefix> = vec![];
let expected: Vec<RuleCodePrefix> = vec![];
assert_eq!(actual, expected);
let actual = parse_prefix_codes(" ");
let expected: Vec<CheckCodePrefix> = vec![];
let expected: Vec<RuleCodePrefix> = vec![];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401");
let expected = vec![CheckCodePrefix::F401];
let expected = vec![RuleCodePrefix::F401];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401,");
let expected = vec![CheckCodePrefix::F401];
let expected = vec![RuleCodePrefix::F401];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401,E501");
let expected = vec![CheckCodePrefix::F401, CheckCodePrefix::E501];
let expected = vec![RuleCodePrefix::F401, RuleCodePrefix::E501];
assert_eq!(actual, expected);
let actual = parse_prefix_codes("F401, E501");
let expected = vec![CheckCodePrefix::F401, CheckCodePrefix::E501];
let expected = vec![RuleCodePrefix::F401, RuleCodePrefix::E501];
assert_eq!(actual, expected);
}
@@ -281,11 +284,11 @@ mod tests {
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "locust/test/*".to_string(),
prefix: CheckCodePrefix::F841,
prefix: RuleCodePrefix::F841,
},
PatternPrefixPair {
pattern: "examples/*".to_string(),
prefix: CheckCodePrefix::F841,
prefix: RuleCodePrefix::F841,
},
];
assert_eq!(actual, expected);
@@ -301,23 +304,23 @@ mod tests {
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "t/*".to_string(),
prefix: CheckCodePrefix::D,
prefix: RuleCodePrefix::D,
},
PatternPrefixPair {
pattern: "setup.py".to_string(),
prefix: CheckCodePrefix::D,
prefix: RuleCodePrefix::D,
},
PatternPrefixPair {
pattern: "examples/*".to_string(),
prefix: CheckCodePrefix::D,
prefix: RuleCodePrefix::D,
},
PatternPrefixPair {
pattern: "docs/*".to_string(),
prefix: CheckCodePrefix::D,
prefix: RuleCodePrefix::D,
},
PatternPrefixPair {
pattern: "extra/*".to_string(),
prefix: CheckCodePrefix::D,
prefix: RuleCodePrefix::D,
},
];
assert_eq!(actual, expected);
@@ -339,47 +342,47 @@ mod tests {
let expected: Vec<PatternPrefixPair> = vec![
PatternPrefixPair {
pattern: "scrapy/__init__.py".to_string(),
prefix: CheckCodePrefix::E402,
prefix: RuleCodePrefix::E402,
},
PatternPrefixPair {
pattern: "scrapy/core/downloader/handlers/http.py".to_string(),
prefix: CheckCodePrefix::F401,
prefix: RuleCodePrefix::F401,
},
PatternPrefixPair {
pattern: "scrapy/http/__init__.py".to_string(),
prefix: CheckCodePrefix::F401,
prefix: RuleCodePrefix::F401,
},
PatternPrefixPair {
pattern: "scrapy/linkextractors/__init__.py".to_string(),
prefix: CheckCodePrefix::E402,
prefix: RuleCodePrefix::E402,
},
PatternPrefixPair {
pattern: "scrapy/linkextractors/__init__.py".to_string(),
prefix: CheckCodePrefix::F401,
prefix: RuleCodePrefix::F401,
},
PatternPrefixPair {
pattern: "scrapy/selector/__init__.py".to_string(),
prefix: CheckCodePrefix::F401,
prefix: RuleCodePrefix::F401,
},
PatternPrefixPair {
pattern: "scrapy/spiders/__init__.py".to_string(),
prefix: CheckCodePrefix::E402,
prefix: RuleCodePrefix::E402,
},
PatternPrefixPair {
pattern: "scrapy/spiders/__init__.py".to_string(),
prefix: CheckCodePrefix::F401,
prefix: RuleCodePrefix::F401,
},
PatternPrefixPair {
pattern: "scrapy/utils/url.py".to_string(),
prefix: CheckCodePrefix::F403,
prefix: RuleCodePrefix::F403,
},
PatternPrefixPair {
pattern: "scrapy/utils/url.py".to_string(),
prefix: CheckCodePrefix::F405,
prefix: RuleCodePrefix::F405,
},
PatternPrefixPair {
pattern: "tests/test_loader.py".to_string(),
prefix: CheckCodePrefix::E741,
prefix: RuleCodePrefix::E741,
},
];
assert_eq!(actual, expected);

View File

@@ -3,7 +3,7 @@ use std::fmt;
use std::str::FromStr;
use anyhow::anyhow;
use ruff::registry::CheckCodePrefix;
use ruff::registry::RuleCodePrefix;
#[derive(Clone, Ord, PartialOrd, Eq, PartialEq)]
pub enum Plugin {
@@ -97,53 +97,32 @@ impl fmt::Debug for Plugin {
}
impl Plugin {
pub fn prefix(&self) -> CheckCodePrefix {
pub fn prefix(&self) -> RuleCodePrefix {
match self {
Plugin::Flake8Annotations => CheckCodePrefix::ANN,
Plugin::Flake8Bandit => CheckCodePrefix::S,
Plugin::Flake8Annotations => RuleCodePrefix::ANN,
Plugin::Flake8Bandit => RuleCodePrefix::S,
// TODO(charlie): Handle rename of `B` to `BLE`.
Plugin::Flake8BlindExcept => CheckCodePrefix::BLE,
Plugin::Flake8Bugbear => CheckCodePrefix::B,
Plugin::Flake8Builtins => CheckCodePrefix::A,
Plugin::Flake8Comprehensions => CheckCodePrefix::C4,
Plugin::Flake8Datetimez => CheckCodePrefix::DTZ,
Plugin::Flake8Debugger => CheckCodePrefix::T1,
Plugin::Flake8Docstrings => CheckCodePrefix::D,
Plugin::Flake8BlindExcept => RuleCodePrefix::BLE,
Plugin::Flake8Bugbear => RuleCodePrefix::B,
Plugin::Flake8Builtins => RuleCodePrefix::A,
Plugin::Flake8Comprehensions => RuleCodePrefix::C4,
Plugin::Flake8Datetimez => RuleCodePrefix::DTZ,
Plugin::Flake8Debugger => RuleCodePrefix::T1,
Plugin::Flake8Docstrings => RuleCodePrefix::D,
// TODO(charlie): Handle rename of `E` to `ERA`.
Plugin::Flake8Eradicate => CheckCodePrefix::ERA,
Plugin::Flake8ErrMsg => CheckCodePrefix::EM,
Plugin::Flake8ImplicitStrConcat => CheckCodePrefix::ISC,
Plugin::Flake8Print => CheckCodePrefix::T2,
Plugin::Flake8PytestStyle => CheckCodePrefix::PT,
Plugin::Flake8Quotes => CheckCodePrefix::Q,
Plugin::Flake8Return => CheckCodePrefix::RET,
Plugin::Flake8Simplify => CheckCodePrefix::SIM,
Plugin::Flake8TidyImports => CheckCodePrefix::TID25,
Plugin::McCabe => CheckCodePrefix::C9,
Plugin::PandasVet => CheckCodePrefix::PD,
Plugin::PEP8Naming => CheckCodePrefix::N,
Plugin::Pyupgrade => CheckCodePrefix::UP,
}
}
}
pub enum DocstringConvention {
All,
Pep257,
Numpy,
Google,
}
impl FromStr for DocstringConvention {
type Err = anyhow::Error;
fn from_str(string: &str) -> Result<Self, Self::Err> {
match string {
"all" => Ok(DocstringConvention::All),
"pep257" => Ok(DocstringConvention::Pep257),
"numpy" => Ok(DocstringConvention::Numpy),
"google" => Ok(DocstringConvention::Google),
_ => Err(anyhow!("Unknown docstring convention: {string}")),
Plugin::Flake8Eradicate => RuleCodePrefix::ERA,
Plugin::Flake8ErrMsg => RuleCodePrefix::EM,
Plugin::Flake8ImplicitStrConcat => RuleCodePrefix::ISC,
Plugin::Flake8Print => RuleCodePrefix::T2,
Plugin::Flake8PytestStyle => RuleCodePrefix::PT,
Plugin::Flake8Quotes => RuleCodePrefix::Q,
Plugin::Flake8Return => RuleCodePrefix::RET,
Plugin::Flake8Simplify => RuleCodePrefix::SIM,
Plugin::Flake8TidyImports => RuleCodePrefix::TID25,
Plugin::McCabe => RuleCodePrefix::C9,
Plugin::PandasVet => RuleCodePrefix::PD,
Plugin::PEP8Naming => RuleCodePrefix::N,
Plugin::Pyupgrade => RuleCodePrefix::UP,
}
}
}
@@ -269,7 +248,7 @@ pub fn infer_plugins_from_options(flake8: &HashMap<String, Option<String>>) -> V
///
/// For example, if the user ignores `ANN101`, we should infer that
/// `flake8-annotations` is active.
pub fn infer_plugins_from_codes(codes: &BTreeSet<CheckCodePrefix>) -> Vec<Plugin> {
pub fn infer_plugins_from_codes(codes: &BTreeSet<RuleCodePrefix>) -> Vec<Plugin> {
[
Plugin::Flake8Annotations,
Plugin::Flake8Bandit,
@@ -307,9 +286,10 @@ pub fn infer_plugins_from_codes(codes: &BTreeSet<CheckCodePrefix>) -> Vec<Plugin
.collect()
}
/// Resolve the set of enabled `CheckCodePrefix` values for the given plugins.
pub fn resolve_select(plugins: &[Plugin]) -> BTreeSet<CheckCodePrefix> {
let mut select = BTreeSet::from([CheckCodePrefix::F, CheckCodePrefix::E, CheckCodePrefix::W]);
/// Resolve the set of enabled `RuleCodePrefix` values for the given
/// plugins.
pub fn resolve_select(plugins: &[Plugin]) -> BTreeSet<RuleCodePrefix> {
let mut select = BTreeSet::from([RuleCodePrefix::F, RuleCodePrefix::E, RuleCodePrefix::W]);
select.extend(plugins.iter().map(Plugin::prefix));
select
}

View File

@@ -4,7 +4,7 @@ build-backend = "maturin"
[project]
name = "ruff"
version = "0.0.214"
version = "0.0.216"
description = "An extremely fast Python linter, written in Rust."
authors = [
{ name = "Charlie Marsh", email = "charlie.r.marsh@gmail.com" },

View File

@@ -77,6 +77,34 @@ class C:
def f(x):
...
def f(self, x):
"""Docstring."""
def f(self, x):
"""Docstring."""
...
def f(self, x):
pass
def f(self, x):
raise NotImplementedError
def f(self, x):
raise NotImplementedError()
def f(self, x):
raise NotImplementedError("...")
def f(self, x):
raise NotImplemented
def f(self, x):
raise NotImplemented()
def f(self, x):
raise NotImplemented("...")
###
# Unused functions attached to abstract methods (OK).
###

View File

@@ -0,0 +1,11 @@
from a import a1 # import_from
from c import * # import_from_star
import a # import
import c.d
import b as b1 # import_as
from ..parent import *
from .my import fn
from . import my
from .my.nested import fn2
from ...grandparent import fn3

View File

@@ -1,10 +1,20 @@
# isort: off
import sys
import os
import collections
# isort: on
def f():
# isort: off
import sys
import os
import collections
# isort: on
import sys
import os # isort: skip
import collections
import abc
def f():
import sys
import os # isort: skip
import collections
import abc
def f():
import sys
import os # isort:skip
import collections
import abc

View File

@@ -10,3 +10,41 @@ x = {
b"123": 1,
b"123": 4,
}
x = {
"a": 1,
"a": 2,
"a": 3,
"a": 3,
}
x = {
"a": 1,
"a": 2,
"a": 3,
"a": 3,
"a": 4,
}
x = {
"a": 1,
"a": 1,
"a": 2,
"a": 3,
"a": 4,
}
x = {
a: 1,
"a": 1,
a: 1,
"a": 2,
a: 2,
"a": 3,
a: 3,
"a": 3,
a: 4,
}
x = {"a": 1, "a": 1}
x = {"a": 1, "b": 2, "a": 1}

View File

@@ -5,3 +5,41 @@ x = {
a: 2,
b: 3,
}
x = {
a: 1,
a: 2,
a: 3,
a: 3,
}
x = {
a: 1,
a: 2,
a: 3,
a: 3,
a: 4,
}
x = {
a: 1,
a: 1,
a: 2,
a: 3,
a: 4,
}
x = {
a: 1,
"a": 1,
a: 1,
"a": 2,
a: 2,
"a": 3,
a: 3,
"a": 3,
a: 4,
}
x = {a: 1, a: 1}
x = {a: 1, b: 2, a: 1}

View File

@@ -0,0 +1 @@
_("Translations")

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_dev"
version = "0.0.214"
version = "0.0.216"
edition = "2021"
[dependencies]

View File

@@ -3,7 +3,7 @@
use anyhow::Result;
use clap::Args;
use itertools::Itertools;
use ruff::registry::{CheckCategory, CheckCode};
use ruff::registry::{RuleCode, RuleOrigin};
use strum::IntoEnumIterator;
use crate::utils::replace_readme_section;
@@ -25,24 +25,24 @@ pub fn main(cli: &Cli) -> Result<()> {
// Generate the table string.
let mut table_out = String::new();
let mut toc_out = String::new();
for check_category in CheckCategory::iter() {
let codes_csv: String = check_category.codes().iter().map(AsRef::as_ref).join(", ");
table_out.push_str(&format!("### {} ({codes_csv})", check_category.title()));
for origin in RuleOrigin::iter() {
let codes_csv: String = origin.codes().iter().map(AsRef::as_ref).join(", ");
table_out.push_str(&format!("### {} ({codes_csv})", origin.title()));
table_out.push('\n');
table_out.push('\n');
toc_out.push_str(&format!(
" 1. [{} ({})](#{}-{})\n",
check_category.title(),
origin.title(),
codes_csv,
check_category.title().to_lowercase().replace(' ', "-"),
origin.title().to_lowercase().replace(' ', "-"),
codes_csv.to_lowercase().replace(',', "-").replace(' ', "")
));
if let Some((url, platform)) = check_category.url() {
if let Some((url, platform)) = origin.url() {
table_out.push_str(&format!(
"For more, see [{}]({}) on {}.",
check_category.title(),
origin.title(),
url,
platform
));
@@ -55,15 +55,15 @@ pub fn main(cli: &Cli) -> Result<()> {
table_out.push_str("| ---- | ---- | ------- | --- |");
table_out.push('\n');
for check_code in CheckCode::iter() {
if check_code.category() == check_category {
let check_kind = check_code.kind();
let fix_token = if check_kind.fixable() { "🛠" } else { "" };
for rule_code in RuleCode::iter() {
if rule_code.origin() == origin {
let kind = rule_code.kind();
let fix_token = if kind.fixable() { "🛠" } else { "" };
table_out.push_str(&format!(
"| {} | {} | {} | {} |",
check_kind.code().as_ref(),
check_kind.as_ref(),
check_kind.summary().replace('|', r"\|"),
kind.code().as_ref(),
kind.as_ref(),
kind.summary().replace('|', r"\|"),
fix_token
));
table_out.push('\n');

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_macros"
version = "0.0.214"
version = "0.0.216"
edition = "2021"
[lib]

View File

@@ -14,8 +14,8 @@
use syn::{parse_macro_input, DeriveInput};
mod check_code_prefix;
mod config;
mod rule_code_prefix;
#[proc_macro_derive(ConfigurationOptions, attributes(option, doc, option_group))]
pub fn derive_config(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
@@ -26,11 +26,11 @@ pub fn derive_config(input: proc_macro::TokenStream) -> proc_macro::TokenStream
.into()
}
#[proc_macro_derive(CheckCodePrefix)]
pub fn derive_check_code_prefix(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
#[proc_macro_derive(RuleCodePrefix)]
pub fn derive_rule_code_prefix(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input = parse_macro_input!(input as DeriveInput);
check_code_prefix::derive_impl(input)
rule_code_prefix::derive_impl(input)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}

View File

@@ -9,7 +9,8 @@ use syn::{DataEnum, DeriveInput, Ident, Variant};
const ALL: &str = "ALL";
/// A hash map from deprecated `CheckCodePrefix` to latest `CheckCodePrefix`.
/// A hash map from deprecated `RuleCodePrefix` to latest
/// `RuleCodePrefix`.
pub static PREFIX_REDIRECTS: Lazy<HashMap<&'static str, &'static str>> = Lazy::new(|| {
HashMap::from_iter([
// TODO(charlie): Remove by 2023-01-01.
@@ -90,7 +91,7 @@ pub fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream>
let syn::Data::Enum(DataEnum { variants, .. }) = data else {
return Err(syn::Error::new(
ident.span(),
"Can only derive `CheckCodePrefix` from enums.",
"Can only derive `RuleCodePrefix` from enums.",
));
};
@@ -117,7 +118,7 @@ fn expand(
prefix_ident: &Ident,
variants: &Punctuated<Variant, Comma>,
) -> proc_macro2::TokenStream {
// Build up a map from prefix to matching CheckCodes.
// Build up a map from prefix to matching RuleCodes.
let mut prefix_to_codes: BTreeMap<Ident, BTreeSet<String>> = BTreeMap::default();
for variant in variants {
let span = variant.ident.span();
@@ -141,12 +142,12 @@ fn expand(
}
// Add any prefix aliases (e.g., "U" to "UP").
for (alias, check_code) in PREFIX_REDIRECTS.iter() {
for (alias, rule_code) in PREFIX_REDIRECTS.iter() {
prefix_to_codes.insert(
Ident::new(alias, Span::call_site()),
prefix_to_codes
.get(&Ident::new(check_code, Span::call_site()))
.unwrap_or_else(|| panic!("Unknown CheckCode: {alias:?}"))
.get(&Ident::new(rule_code, Span::call_site()))
.unwrap_or_else(|| panic!("Unknown RuleCode: {alias:?}"))
.clone(),
);
}
@@ -159,8 +160,8 @@ fn expand(
let prefix_impl = generate_impls(ident, prefix_ident, &prefix_to_codes);
let prefix_redirects = PREFIX_REDIRECTS.iter().map(|(alias, check_code)| {
let code = Ident::new(check_code, Span::call_site());
let prefix_redirects = PREFIX_REDIRECTS.iter().map(|(alias, rule_code)| {
let code = Ident::new(rule_code, Span::call_site());
quote! {
(#alias, #prefix_ident::#code)
}
@@ -186,7 +187,7 @@ fn expand(
#prefix_impl
/// A hash map from deprecated `CheckCodePrefix` to latest `CheckCodePrefix`.
/// A hash map from deprecated `RuleCodePrefix` to latest `RuleCodePrefix`.
pub static PREFIX_REDIRECTS: ::once_cell::sync::Lazy<::rustc_hash::FxHashMap<&'static str, #prefix_ident>> = ::once_cell::sync::Lazy::new(|| {
::rustc_hash::FxHashMap::from_iter([
#(#prefix_redirects),*
@@ -211,11 +212,8 @@ fn generate_impls(
if let Some(target) = PREFIX_REDIRECTS.get(prefix_str.as_str()) {
quote! {
#prefix_ident::#prefix => {
crate::one_time_warning!(
"{}{} {}",
"warning".yellow().bold(),
":".bold(),
format!("`{}` has been remapped to `{}`", #prefix_str, #target).bold()
crate::warn_user_once!(
"`{}` has been remapped to `{}`", #prefix_str, #target
);
vec![#(#codes),*]
}

View File

@@ -43,9 +43,9 @@ def main(*, name: str, code: str, plugin: str) -> None:
with open(os.path.join(ROOT_DIR, f"src/{dir_name(plugin)}/mod.rs"), "w") as fp:
for line in content.splitlines():
if line.strip() == "fn checks(check_code: CheckCode, path: &Path) -> Result<()> {":
indent = line.split("fn checks(check_code: CheckCode, path: &Path) -> Result<()> {")[0]
fp.write(f'{indent}#[test_case(CheckCode::{code}, Path::new("{code}.py"); "{code}")]')
if line.strip() == "fn rules(check_code: RuleCode, path: &Path) -> Result<()> {":
indent = line.split("fn rules(check_code: RuleCode, path: &Path) -> Result<()> {")[0]
fp.write(f'{indent}#[test_case(RuleCode::{code}, Path::new("{code}.py"); "{code}")]')
fp.write("\n")
fp.write(line)
@@ -73,39 +73,39 @@ pub fn {snake_case(name)}(checker: &mut Checker) {{}}
if line.strip() == f"// {plugin}":
if index == 0:
# `CheckCode` definition
# `RuleCode` definition
indent = line.split(f"// {plugin}")[0]
fp.write(f"{indent}{code},")
fp.write("\n")
elif index == 1:
# `CheckKind` definition
# `DiagnosticKind` definition
indent = line.split(f"// {plugin}")[0]
fp.write(f"{indent}{name},")
fp.write("\n")
elif index == 2:
# `CheckCode#kind()`
# `RuleCode#kind()`
indent = line.split(f"// {plugin}")[0]
fp.write(f"{indent}CheckCode::{code} => CheckKind::{name},")
fp.write(f"{indent}RuleCode::{code} => DiagnosticKind::{name},")
fp.write("\n")
elif index == 3:
# `CheckCode#category()`
# `RuleCode#category()`
indent = line.split(f"// {plugin}")[0]
fp.write(f"{indent}CheckCode::{code} => CheckCategory::{pascal_case(plugin)},")
fp.write(f"{indent}RuleCode::{code} => CheckCategory::{pascal_case(plugin)},")
fp.write("\n")
elif index == 4:
# `CheckKind#code()`
# `DiagnosticKind#code()`
indent = line.split(f"// {plugin}")[0]
fp.write(f"{indent}CheckKind::{name} => &CheckCode::{code},")
fp.write(f"{indent}DiagnosticKind::{name} => &RuleCode::{code},")
fp.write("\n")
elif index == 5:
# `CheckCode#body`
# `RuleCode#body`
indent = line.split(f"// {plugin}")[0]
fp.write(f'{indent}CheckKind::{name} => todo!("Write message body for {code}"),')
fp.write(f'{indent}DiagnosticKind::{name} => todo!("Write message body for {code}"),')
fp.write("\n")
index += 1

View File

@@ -31,10 +31,10 @@ def main(*, plugin: str, url: str) -> None:
# Create the Rust module.
os.makedirs(os.path.join(ROOT_DIR, f"src/{dir_name(plugin)}"), exist_ok=True)
with open(os.path.join(ROOT_DIR, f"src/{dir_name(plugin)}/plugins.rs"), "a"):
with open(os.path.join(ROOT_DIR, f"src/{dir_name(plugin)}/rules"), "a"):
pass
with open(os.path.join(ROOT_DIR, f"src/{dir_name(plugin)}/mod.rs"), "w+") as fp:
fp.write("pub mod plugins;\n")
with open(os.path.join(ROOT_DIR, f"src/{dir_name(plugin)}/rules"), "w+") as fp:
fp.write("pub mod rules;\n")
fp.write("\n")
fp.write(
"""#[cfg(test)]
@@ -45,19 +45,19 @@ mod tests {
use anyhow::Result;
use test_case::test_case;
use crate::registry::CheckCode;
use crate::registry::RuleCode;
use crate::linter::test_path;
use crate::settings;
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
fn rules(check_code: RuleCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());
let checks = test_path(
let diagnostics =test_path(
Path::new("./resources/test/fixtures/%s")
.join(path)
.as_path(),
&settings::Settings::for_rule(check_code),
)?;
insta::assert_yaml_snapshot!(snapshot, checks);
insta::assert_yaml_snapshot!(snapshot, diagnostics);
Ok(())
}
}
@@ -90,8 +90,8 @@ mod tests {
fp.write(f'{indent}CheckCategory::{pascal_case(plugin)} => "{plugin}",')
fp.write("\n")
elif line.strip() == "CheckCategory::Ruff => vec![CheckCodePrefix::RUF],":
indent = line.split("CheckCategory::Ruff => vec![CheckCodePrefix::RUF],")[0]
elif line.strip() == "CheckCategory::Ruff => vec![RuleCodePrefix::RUF],":
indent = line.split("CheckCategory::Ruff => vec![RuleCodePrefix::RUF],")[0]
fp.write(
f"{indent}CheckCategory::{pascal_case(plugin)} => vec![\n"
f'{indent} todo!("Fill-in prefix after generating codes")\n'

524
src/ast/comparable.rs Normal file
View File

@@ -0,0 +1,524 @@
//! An equivalent object hierarchy to the `Expr` hierarchy, but with the ability
//! to compare expressions for equality (via `Eq` and `Hash`).
use num_bigint::BigInt;
use rustpython_ast::{
Arg, Arguments, Boolop, Cmpop, Comprehension, Constant, Expr, ExprContext, ExprKind, Keyword,
Operator, Unaryop,
};
#[derive(Debug, PartialEq, Eq, Hash)]
pub enum ComparableExprContext {
Load,
Store,
Del,
}
impl From<&ExprContext> for ComparableExprContext {
fn from(ctx: &ExprContext) -> Self {
match ctx {
ExprContext::Load => Self::Load,
ExprContext::Store => Self::Store,
ExprContext::Del => Self::Del,
}
}
}
#[derive(Debug, PartialEq, Eq, Hash)]
pub enum ComparableBoolop {
And,
Or,
}
impl From<&Boolop> for ComparableBoolop {
fn from(op: &Boolop) -> Self {
match op {
Boolop::And => Self::And,
Boolop::Or => Self::Or,
}
}
}
#[derive(Debug, PartialEq, Eq, Hash)]
pub enum ComparableOperator {
Add,
Sub,
Mult,
MatMult,
Div,
Mod,
Pow,
LShift,
RShift,
BitOr,
BitXor,
BitAnd,
FloorDiv,
}
impl From<&Operator> for ComparableOperator {
fn from(op: &Operator) -> Self {
match op {
Operator::Add => Self::Add,
Operator::Sub => Self::Sub,
Operator::Mult => Self::Mult,
Operator::MatMult => Self::MatMult,
Operator::Div => Self::Div,
Operator::Mod => Self::Mod,
Operator::Pow => Self::Pow,
Operator::LShift => Self::LShift,
Operator::RShift => Self::RShift,
Operator::BitOr => Self::BitOr,
Operator::BitXor => Self::BitXor,
Operator::BitAnd => Self::BitAnd,
Operator::FloorDiv => Self::FloorDiv,
}
}
}
#[derive(Debug, PartialEq, Eq, Hash)]
pub enum ComparableUnaryop {
Invert,
Not,
UAdd,
USub,
}
impl From<&Unaryop> for ComparableUnaryop {
fn from(op: &Unaryop) -> Self {
match op {
Unaryop::Invert => Self::Invert,
Unaryop::Not => Self::Not,
Unaryop::UAdd => Self::UAdd,
Unaryop::USub => Self::USub,
}
}
}
#[derive(Debug, PartialEq, Eq, Hash)]
pub enum ComparableCmpop {
Eq,
NotEq,
Lt,
LtE,
Gt,
GtE,
Is,
IsNot,
In,
NotIn,
}
impl From<&Cmpop> for ComparableCmpop {
fn from(op: &Cmpop) -> Self {
match op {
Cmpop::Eq => Self::Eq,
Cmpop::NotEq => Self::NotEq,
Cmpop::Lt => Self::Lt,
Cmpop::LtE => Self::LtE,
Cmpop::Gt => Self::Gt,
Cmpop::GtE => Self::GtE,
Cmpop::Is => Self::Is,
Cmpop::IsNot => Self::IsNot,
Cmpop::In => Self::In,
Cmpop::NotIn => Self::NotIn,
}
}
}
#[derive(Debug, PartialEq, Eq, Hash)]
pub enum ComparableConstant<'a> {
None,
Bool(&'a bool),
Str(&'a str),
Bytes(&'a [u8]),
Int(&'a BigInt),
Tuple(Vec<ComparableConstant<'a>>),
Float(u64),
Complex { real: u64, imag: u64 },
Ellipsis,
}
impl<'a> From<&'a Constant> for ComparableConstant<'a> {
fn from(constant: &'a Constant) -> Self {
match constant {
Constant::None => Self::None,
Constant::Bool(value) => Self::Bool(value),
Constant::Str(value) => Self::Str(value),
Constant::Bytes(value) => Self::Bytes(value),
Constant::Int(value) => Self::Int(value),
Constant::Tuple(value) => {
Self::Tuple(value.iter().map(std::convert::Into::into).collect())
}
Constant::Float(value) => Self::Float(value.to_bits()),
Constant::Complex { real, imag } => Self::Complex {
real: real.to_bits(),
imag: imag.to_bits(),
},
Constant::Ellipsis => Self::Ellipsis,
}
}
}
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct ComparableArguments<'a> {
pub posonlyargs: Vec<ComparableArg<'a>>,
pub args: Vec<ComparableArg<'a>>,
pub vararg: Option<ComparableArg<'a>>,
pub kwonlyargs: Vec<ComparableArg<'a>>,
pub kw_defaults: Vec<ComparableExpr<'a>>,
pub kwarg: Option<ComparableArg<'a>>,
pub defaults: Vec<ComparableExpr<'a>>,
}
impl<'a> From<&'a Arguments> for ComparableArguments<'a> {
fn from(arguments: &'a Arguments) -> Self {
Self {
posonlyargs: arguments
.posonlyargs
.iter()
.map(std::convert::Into::into)
.collect(),
args: arguments
.args
.iter()
.map(std::convert::Into::into)
.collect(),
vararg: arguments.vararg.as_ref().map(std::convert::Into::into),
kwonlyargs: arguments
.kwonlyargs
.iter()
.map(std::convert::Into::into)
.collect(),
kw_defaults: arguments
.kw_defaults
.iter()
.map(std::convert::Into::into)
.collect(),
kwarg: arguments.vararg.as_ref().map(std::convert::Into::into),
defaults: arguments
.defaults
.iter()
.map(std::convert::Into::into)
.collect(),
}
}
}
impl<'a> From<&'a Box<Arg>> for ComparableArg<'a> {
fn from(arg: &'a Box<Arg>) -> Self {
(&**arg).into()
}
}
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct ComparableArg<'a> {
pub arg: &'a str,
pub annotation: Option<Box<ComparableExpr<'a>>>,
pub type_comment: Option<&'a str>,
}
impl<'a> From<&'a Arg> for ComparableArg<'a> {
fn from(arg: &'a Arg) -> Self {
Self {
arg: &arg.node.arg,
annotation: arg.node.annotation.as_ref().map(std::convert::Into::into),
type_comment: arg.node.type_comment.as_deref(),
}
}
}
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct ComparableKeyword<'a> {
pub arg: Option<&'a str>,
pub value: ComparableExpr<'a>,
}
impl<'a> From<&'a Keyword> for ComparableKeyword<'a> {
fn from(keyword: &'a Keyword) -> Self {
Self {
arg: keyword.node.arg.as_deref(),
value: (&keyword.node.value).into(),
}
}
}
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct ComparableComprehension<'a> {
pub target: ComparableExpr<'a>,
pub iter: ComparableExpr<'a>,
pub ifs: Vec<ComparableExpr<'a>>,
pub is_async: &'a usize,
}
impl<'a> From<&'a Comprehension> for ComparableComprehension<'a> {
fn from(comprehension: &'a Comprehension) -> Self {
Self {
target: (&comprehension.target).into(),
iter: (&comprehension.iter).into(),
ifs: comprehension
.ifs
.iter()
.map(std::convert::Into::into)
.collect(),
is_async: &comprehension.is_async,
}
}
}
#[derive(Debug, PartialEq, Eq, Hash)]
pub enum ComparableExpr<'a> {
BoolOp {
op: ComparableBoolop,
values: Vec<ComparableExpr<'a>>,
},
NamedExpr {
target: Box<ComparableExpr<'a>>,
value: Box<ComparableExpr<'a>>,
},
BinOp {
left: Box<ComparableExpr<'a>>,
op: ComparableOperator,
right: Box<ComparableExpr<'a>>,
},
UnaryOp {
op: ComparableUnaryop,
operand: Box<ComparableExpr<'a>>,
},
Lambda {
args: ComparableArguments<'a>,
body: Box<ComparableExpr<'a>>,
},
IfExp {
test: Box<ComparableExpr<'a>>,
body: Box<ComparableExpr<'a>>,
orelse: Box<ComparableExpr<'a>>,
},
Dict {
keys: Vec<ComparableExpr<'a>>,
values: Vec<ComparableExpr<'a>>,
},
Set {
elts: Vec<ComparableExpr<'a>>,
},
ListComp {
elt: Box<ComparableExpr<'a>>,
generators: Vec<ComparableComprehension<'a>>,
},
SetComp {
elt: Box<ComparableExpr<'a>>,
generators: Vec<ComparableComprehension<'a>>,
},
DictComp {
key: Box<ComparableExpr<'a>>,
value: Box<ComparableExpr<'a>>,
generators: Vec<ComparableComprehension<'a>>,
},
GeneratorExp {
elt: Box<ComparableExpr<'a>>,
generators: Vec<ComparableComprehension<'a>>,
},
Await {
value: Box<ComparableExpr<'a>>,
},
Yield {
value: Option<Box<ComparableExpr<'a>>>,
},
YieldFrom {
value: Box<ComparableExpr<'a>>,
},
Compare {
left: Box<ComparableExpr<'a>>,
ops: Vec<ComparableCmpop>,
comparators: Vec<ComparableExpr<'a>>,
},
Call {
func: Box<ComparableExpr<'a>>,
args: Vec<ComparableExpr<'a>>,
keywords: Vec<ComparableKeyword<'a>>,
},
FormattedValue {
value: Box<ComparableExpr<'a>>,
conversion: &'a usize,
format_spec: Option<Box<ComparableExpr<'a>>>,
},
JoinedStr {
values: Vec<ComparableExpr<'a>>,
},
Constant {
value: ComparableConstant<'a>,
kind: Option<&'a str>,
},
Attribute {
value: Box<ComparableExpr<'a>>,
attr: &'a str,
ctx: ComparableExprContext,
},
Subscript {
value: Box<ComparableExpr<'a>>,
slice: Box<ComparableExpr<'a>>,
ctx: ComparableExprContext,
},
Starred {
value: Box<ComparableExpr<'a>>,
ctx: ComparableExprContext,
},
Name {
id: &'a str,
ctx: ComparableExprContext,
},
List {
elts: Vec<ComparableExpr<'a>>,
ctx: ComparableExprContext,
},
Tuple {
elts: Vec<ComparableExpr<'a>>,
ctx: ComparableExprContext,
},
Slice {
lower: Option<Box<ComparableExpr<'a>>>,
upper: Option<Box<ComparableExpr<'a>>>,
step: Option<Box<ComparableExpr<'a>>>,
},
}
impl<'a> From<&'a Box<Expr>> for Box<ComparableExpr<'a>> {
fn from(expr: &'a Box<Expr>) -> Self {
Box::new((&**expr).into())
}
}
impl<'a> From<&'a Expr> for ComparableExpr<'a> {
fn from(expr: &'a Expr) -> Self {
match &expr.node {
ExprKind::BoolOp { op, values } => Self::BoolOp {
op: op.into(),
values: values.iter().map(std::convert::Into::into).collect(),
},
ExprKind::NamedExpr { target, value } => Self::NamedExpr {
target: target.into(),
value: value.into(),
},
ExprKind::BinOp { left, op, right } => Self::BinOp {
left: left.into(),
op: op.into(),
right: right.into(),
},
ExprKind::UnaryOp { op, operand } => Self::UnaryOp {
op: op.into(),
operand: operand.into(),
},
ExprKind::Lambda { args, body } => Self::Lambda {
args: (&**args).into(),
body: body.into(),
},
ExprKind::IfExp { test, body, orelse } => Self::IfExp {
test: test.into(),
body: body.into(),
orelse: orelse.into(),
},
ExprKind::Dict { keys, values } => Self::Dict {
keys: keys.iter().map(std::convert::Into::into).collect(),
values: values.iter().map(std::convert::Into::into).collect(),
},
ExprKind::Set { elts } => Self::Set {
elts: elts.iter().map(std::convert::Into::into).collect(),
},
ExprKind::ListComp { elt, generators } => Self::ListComp {
elt: elt.into(),
generators: generators.iter().map(std::convert::Into::into).collect(),
},
ExprKind::SetComp { elt, generators } => Self::SetComp {
elt: elt.into(),
generators: generators.iter().map(std::convert::Into::into).collect(),
},
ExprKind::DictComp {
key,
value,
generators,
} => Self::DictComp {
key: key.into(),
value: value.into(),
generators: generators.iter().map(std::convert::Into::into).collect(),
},
ExprKind::GeneratorExp { elt, generators } => Self::GeneratorExp {
elt: elt.into(),
generators: generators.iter().map(std::convert::Into::into).collect(),
},
ExprKind::Await { value } => Self::Await {
value: value.into(),
},
ExprKind::Yield { value } => Self::Yield {
value: value.as_ref().map(std::convert::Into::into),
},
ExprKind::YieldFrom { value } => Self::YieldFrom {
value: value.into(),
},
ExprKind::Compare {
left,
ops,
comparators,
} => Self::Compare {
left: left.into(),
ops: ops.iter().map(std::convert::Into::into).collect(),
comparators: comparators.iter().map(std::convert::Into::into).collect(),
},
ExprKind::Call {
func,
args,
keywords,
} => Self::Call {
func: func.into(),
args: args.iter().map(std::convert::Into::into).collect(),
keywords: keywords.iter().map(std::convert::Into::into).collect(),
},
ExprKind::FormattedValue {
value,
conversion,
format_spec,
} => Self::FormattedValue {
value: value.into(),
conversion,
format_spec: format_spec.as_ref().map(std::convert::Into::into),
},
ExprKind::JoinedStr { values } => Self::JoinedStr {
values: values.iter().map(std::convert::Into::into).collect(),
},
ExprKind::Constant { value, kind } => Self::Constant {
value: value.into(),
kind: kind.as_ref().map(String::as_str),
},
ExprKind::Attribute { value, attr, ctx } => Self::Attribute {
value: value.into(),
attr,
ctx: ctx.into(),
},
ExprKind::Subscript { value, slice, ctx } => Self::Subscript {
value: value.into(),
slice: slice.into(),
ctx: ctx.into(),
},
ExprKind::Starred { value, ctx } => Self::Starred {
value: value.into(),
ctx: ctx.into(),
},
ExprKind::Name { id, ctx } => Self::Name {
id,
ctx: ctx.into(),
},
ExprKind::List { elts, ctx } => Self::List {
elts: elts.iter().map(std::convert::Into::into).collect(),
ctx: ctx.into(),
},
ExprKind::Tuple { elts, ctx } => Self::Tuple {
elts: elts.iter().map(std::convert::Into::into).collect(),
ctx: ctx.into(),
},
ExprKind::Slice { lower, upper, step } => Self::Slice {
lower: lower.as_ref().map(std::convert::Into::into),
upper: upper.as_ref().map(std::convert::Into::into),
step: step.as_ref().map(std::convert::Into::into),
},
}
}
}

View File

@@ -1,5 +1,6 @@
pub mod branch_detection;
pub mod cast;
pub mod comparable;
pub mod function_type;
pub mod helpers;
pub mod operations;

View File

@@ -7,7 +7,7 @@ use rustpython_parser::ast::Location;
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::source_code_locator::SourceCodeLocator;
#[derive(Debug, Copy, Clone, Hash)]
@@ -30,15 +30,15 @@ impl From<bool> for Mode {
/// Auto-fix errors in a file, and write the fixed source code to disk.
pub fn fix_file<'a>(
checks: &'a [Check],
diagnostics: &'a [Diagnostic],
locator: &'a SourceCodeLocator<'a>,
) -> Option<(Cow<'a, str>, usize)> {
if checks.iter().all(|check| check.fix.is_none()) {
if diagnostics.iter().all(|check| check.fix.is_none()) {
return None;
}
Some(apply_fixes(
checks.iter().filter_map(|check| check.fix.as_ref()),
diagnostics.iter().filter_map(|check| check.fix.as_ref()),
locator,
))
}

View File

@@ -1,6 +1,5 @@
use std::collections::hash_map::DefaultHasher;
use std::fs;
use std::fs::{create_dir_all, File, Metadata};
use std::hash::{Hash, Hasher};
use std::io::Write;
use std::path::{Path, PathBuf};
@@ -8,16 +7,15 @@ use std::path::{Path, PathBuf};
use anyhow::Result;
use filetime::FileTime;
use log::error;
use once_cell::sync::Lazy;
use path_absolutize::Absolutize;
use serde::{Deserialize, Serialize};
use crate::message::Message;
use crate::settings::{flags, Settings};
pub const CACHE_DIR_NAME: &str = ".ruff_cache";
const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION");
static CACHE_DIR: Lazy<Option<String>> = Lazy::new(|| std::env::var("RUFF_CACHE_DIR").ok());
pub const DEFAULT_CACHE_DIR_NAME: &str = ".ruff_cache";
#[derive(Serialize, Deserialize)]
struct CacheMetadata {
@@ -39,9 +37,7 @@ struct CheckResult {
/// Return the cache directory for a given project root. Defers to the
/// `RUFF_CACHE_DIR` environment variable, if set.
pub fn cache_dir(project_root: &Path) -> PathBuf {
CACHE_DIR
.as_ref()
.map_or_else(|| project_root.join(DEFAULT_CACHE_DIR_NAME), PathBuf::from)
project_root.join(CACHE_DIR_NAME)
}
fn content_dir() -> &'static Path {
@@ -60,7 +56,7 @@ fn cache_key<P: AsRef<Path>>(path: P, settings: &Settings, autofix: flags::Autof
/// Initialize the cache at the specified `Path`.
pub fn init(path: &Path) -> Result<()> {
// Create the cache directories.
create_dir_all(path.join(content_dir()))?;
fs::create_dir_all(path.join(content_dir()))?;
// Add the CACHEDIR.TAG.
if !cachedir::is_tagged(path)? {
@@ -70,7 +66,7 @@ pub fn init(path: &Path) -> Result<()> {
// Add the .gitignore.
let gitignore_path = path.join(".gitignore");
if !gitignore_path.exists() {
let mut file = File::create(gitignore_path)?;
let mut file = fs::File::create(gitignore_path)?;
file.write_all(b"*")?;
}
@@ -91,7 +87,7 @@ fn read_sync(cache_dir: &Path, key: u64) -> Result<Vec<u8>, std::io::Error> {
/// Get a value from the cache.
pub fn get<P: AsRef<Path>>(
path: P,
metadata: &Metadata,
metadata: &fs::Metadata,
settings: &Settings,
autofix: flags::Autofix,
) -> Option<Vec<Message>> {
@@ -115,7 +111,7 @@ pub fn get<P: AsRef<Path>>(
/// Set a value in the cache.
pub fn set<P: AsRef<Path>>(
path: P,
metadata: &Metadata,
metadata: &fs::Metadata,
settings: &Settings,
autofix: flags::Autofix,
messages: &[Message],

File diff suppressed because it is too large Load Diff

View File

@@ -8,7 +8,7 @@ use crate::ast::visitor::Visitor;
use crate::directives::IsortDirectives;
use crate::isort;
use crate::isort::track::ImportTracker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::settings::{flags, Settings};
use crate::source_code_locator::SourceCodeLocator;
use crate::source_code_style::SourceCodeStyleDetector;
@@ -20,18 +20,18 @@ fn check_import_blocks(
stylist: &SourceCodeStyleDetector,
autofix: flags::Autofix,
package: Option<&Path>,
) -> Vec<Check> {
let mut checks = vec![];
) -> Vec<Diagnostic> {
let mut diagnostics = vec![];
for block in tracker.into_iter() {
if !block.imports.is_empty() {
if let Some(check) =
isort::plugins::check_imports(&block, locator, settings, stylist, autofix, package)
if let Some(diagnostic) =
isort::rules::check_imports(&block, locator, settings, stylist, autofix, package)
{
checks.push(check);
diagnostics.push(diagnostic);
}
}
}
checks
diagnostics
}
#[allow(clippy::too_many_arguments)]
@@ -44,7 +44,7 @@ pub fn check_imports(
autofix: flags::Autofix,
path: &Path,
package: Option<&Path>,
) -> Vec<Check> {
) -> Vec<Diagnostic> {
let mut tracker = ImportTracker::new(locator, directives, path);
for stmt in python_ast {
tracker.visit_stmt(stmt);

View File

@@ -1,9 +1,9 @@
//! Lint rules based on checking raw physical lines.
use crate::pycodestyle::checks::{line_too_long, no_newline_at_end_of_file};
use crate::pygrep_hooks::plugins::{blanket_noqa, blanket_type_ignore};
use crate::pyupgrade::checks::unnecessary_coding_comment;
use crate::registry::{Check, CheckCode};
use crate::pycodestyle::rules::{line_too_long, no_newline_at_end_of_file};
use crate::pygrep_hooks::rules::{blanket_noqa, blanket_type_ignore};
use crate::pyupgrade::rules::unnecessary_coding_comment;
use crate::registry::{Diagnostic, RuleCode};
use crate::settings::{flags, Settings};
pub fn check_lines(
@@ -11,14 +11,14 @@ pub fn check_lines(
commented_lines: &[usize],
settings: &Settings,
autofix: flags::Autofix,
) -> Vec<Check> {
let mut checks: Vec<Check> = vec![];
) -> Vec<Diagnostic> {
let mut diagnostics: Vec<Diagnostic> = vec![];
let enforce_unnecessary_coding_comment = settings.enabled.contains(&CheckCode::UP009);
let enforce_line_too_long = settings.enabled.contains(&CheckCode::E501);
let enforce_no_newline_at_end_of_file = settings.enabled.contains(&CheckCode::W292);
let enforce_blanket_type_ignore = settings.enabled.contains(&CheckCode::PGH003);
let enforce_blanket_noqa = settings.enabled.contains(&CheckCode::PGH004);
let enforce_unnecessary_coding_comment = settings.enabled.contains(&RuleCode::UP009);
let enforce_line_too_long = settings.enabled.contains(&RuleCode::E501);
let enforce_no_newline_at_end_of_file = settings.enabled.contains(&RuleCode::W292);
let enforce_blanket_type_ignore = settings.enabled.contains(&RuleCode::PGH003);
let enforce_blanket_noqa = settings.enabled.contains(&RuleCode::PGH004);
let mut commented_lines_iter = commented_lines.iter().peekable();
for (index, line) in contents.lines().enumerate() {
@@ -28,59 +28,59 @@ pub fn check_lines(
{
if enforce_unnecessary_coding_comment {
if index < 2 {
if let Some(check) = unnecessary_coding_comment(
if let Some(diagnostic) = unnecessary_coding_comment(
index,
line,
matches!(autofix, flags::Autofix::Enabled)
&& settings.fixable.contains(&CheckCode::UP009),
&& settings.fixable.contains(&RuleCode::UP009),
) {
checks.push(check);
diagnostics.push(diagnostic);
}
}
}
if enforce_blanket_type_ignore {
if commented_lines.contains(&(index + 1)) {
if let Some(check) = blanket_type_ignore(index, line) {
checks.push(check);
if let Some(diagnostic) = blanket_type_ignore(index, line) {
diagnostics.push(diagnostic);
}
}
}
if enforce_blanket_noqa {
if commented_lines.contains(&(index + 1)) {
if let Some(check) = blanket_noqa(index, line) {
checks.push(check);
if let Some(diagnostic) = blanket_noqa(index, line) {
diagnostics.push(diagnostic);
}
}
}
}
if enforce_line_too_long {
if let Some(check) = line_too_long(index, line, settings) {
checks.push(check);
if let Some(diagnostic) = line_too_long(index, line, settings) {
diagnostics.push(diagnostic);
}
}
}
if enforce_no_newline_at_end_of_file {
if let Some(check) = no_newline_at_end_of_file(
if let Some(diagnostic) = no_newline_at_end_of_file(
contents,
matches!(autofix, flags::Autofix::Enabled)
&& settings.fixable.contains(&CheckCode::W292),
&& settings.fixable.contains(&RuleCode::W292),
) {
checks.push(check);
diagnostics.push(diagnostic);
}
}
checks
diagnostics
}
#[cfg(test)]
mod tests {
use super::check_lines;
use crate::registry::CheckCode;
use crate::registry::RuleCode;
use crate::settings::{flags, Settings};
#[test]
@@ -92,7 +92,7 @@ mod tests {
&[],
&Settings {
line_length,
..Settings::for_rule(CheckCode::E501)
..Settings::for_rule(RuleCode::E501)
},
flags::Autofix::Enabled,
)

View File

@@ -8,12 +8,13 @@ use rustpython_parser::ast::Location;
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::noqa::{is_file_exempt, Directive};
use crate::registry::{Check, CheckCode, CheckKind, UnusedCodes, CODE_REDIRECTS};
use crate::registry::{Diagnostic, DiagnosticKind, RuleCode, CODE_REDIRECTS};
use crate::settings::{flags, Settings};
use crate::violations::UnusedCodes;
use crate::{noqa, violations};
pub fn check_noqa(
checks: &mut Vec<Check>,
diagnostics: &mut Vec<Diagnostic>,
contents: &str,
commented_lines: &[usize],
noqa_line_for: &IntMap<usize, usize>,
@@ -23,13 +24,13 @@ pub fn check_noqa(
let mut noqa_directives: IntMap<usize, (Directive, Vec<&str>)> = IntMap::default();
let mut ignored = vec![];
let enforce_noqa = settings.enabled.contains(&CheckCode::RUF100);
let enforce_noqa = settings.enabled.contains(&RuleCode::RUF100);
let lines: Vec<&str> = contents.lines().collect();
for lineno in commented_lines {
// If we hit an exemption for the entire file, bail.
if is_file_exempt(lines[lineno - 1]) {
checks.drain(..);
diagnostics.drain(..);
return;
}
@@ -40,14 +41,14 @@ pub fn check_noqa(
}
}
// Remove any ignored checks.
for (index, check) in checks.iter().enumerate() {
if matches!(check.kind, CheckKind::BlanketNOQA(..)) {
// Remove any ignored diagnostics.
for (index, diagnostic) in diagnostics.iter().enumerate() {
if matches!(diagnostic.kind, DiagnosticKind::BlanketNOQA(..)) {
continue;
}
// Is the check ignored by a `noqa` directive on the parent line?
if let Some(parent_lineno) = check.parent.map(|location| location.row()) {
if let Some(parent_lineno) = diagnostic.parent.map(|location| location.row()) {
let noqa_lineno = noqa_line_for.get(&parent_lineno).unwrap_or(&parent_lineno);
if commented_lines.contains(noqa_lineno) {
let noqa = noqa_directives.entry(noqa_lineno - 1).or_insert_with(|| {
@@ -55,13 +56,13 @@ pub fn check_noqa(
});
match noqa {
(Directive::All(..), matches) => {
matches.push(check.kind.code().as_ref());
matches.push(diagnostic.kind.code().as_ref());
ignored.push(index);
continue;
}
(Directive::Codes(.., codes), matches) => {
if noqa::includes(check.kind.code(), codes) {
matches.push(check.kind.code().as_ref());
if noqa::includes(diagnostic.kind.code(), codes) {
matches.push(diagnostic.kind.code().as_ref());
ignored.push(index);
continue;
}
@@ -71,21 +72,23 @@ pub fn check_noqa(
}
}
// Is the check ignored by a `noqa` directive on the same line?
let check_lineno = check.location.row();
let noqa_lineno = noqa_line_for.get(&check_lineno).unwrap_or(&check_lineno);
// Is the diagnostic ignored by a `noqa` directive on the same line?
let diagnostic_lineno = diagnostic.location.row();
let noqa_lineno = noqa_line_for
.get(&diagnostic_lineno)
.unwrap_or(&diagnostic_lineno);
if commented_lines.contains(noqa_lineno) {
let noqa = noqa_directives
.entry(noqa_lineno - 1)
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno - 1]), vec![]));
match noqa {
(Directive::All(..), matches) => {
matches.push(check.kind.code().as_ref());
matches.push(diagnostic.kind.code().as_ref());
ignored.push(index);
}
(Directive::Codes(.., codes), matches) => {
if noqa::includes(check.kind.code(), codes) {
matches.push(check.kind.code().as_ref());
if noqa::includes(diagnostic.kind.code(), codes) {
matches.push(diagnostic.kind.code().as_ref());
ignored.push(index);
}
}
@@ -100,19 +103,19 @@ pub fn check_noqa(
match directive {
Directive::All(spaces, start, end) => {
if matches.is_empty() {
let mut check = Check::new(
let mut diagnostic = Diagnostic::new(
violations::UnusedNOQA(None),
Range::new(Location::new(row + 1, start), Location::new(row + 1, end)),
);
if matches!(autofix, flags::Autofix::Enabled)
&& settings.fixable.contains(check.kind.code())
&& settings.fixable.contains(diagnostic.kind.code())
{
check.amend(Fix::deletion(
diagnostic.amend(Fix::deletion(
Location::new(row + 1, start - spaces),
Location::new(row + 1, lines[row].chars().count()),
));
}
checks.push(check);
diagnostics.push(diagnostic);
}
}
Directive::Codes(spaces, start, end, codes) => {
@@ -123,7 +126,7 @@ pub fn check_noqa(
let mut self_ignore = false;
for code in codes {
let code = CODE_REDIRECTS.get(code).map_or(code, AsRef::as_ref);
if code == CheckCode::RUF100.as_ref() {
if code == RuleCode::RUF100.as_ref() {
self_ignore = true;
break;
}
@@ -131,8 +134,8 @@ pub fn check_noqa(
if matches.contains(&code) || settings.external.contains(code) {
valid_codes.push(code);
} else {
if let Ok(check_code) = CheckCode::from_str(code) {
if settings.enabled.contains(&check_code) {
if let Ok(rule_code) = RuleCode::from_str(code) {
if settings.enabled.contains(&rule_code) {
unmatched_codes.push(code);
} else {
disabled_codes.push(code);
@@ -151,7 +154,7 @@ pub fn check_noqa(
&& unknown_codes.is_empty()
&& unmatched_codes.is_empty())
{
let mut check = Check::new(
let mut diagnostic = Diagnostic::new(
violations::UnusedNOQA(Some(UnusedCodes {
disabled: disabled_codes
.iter()
@@ -169,22 +172,22 @@ pub fn check_noqa(
Range::new(Location::new(row + 1, start), Location::new(row + 1, end)),
);
if matches!(autofix, flags::Autofix::Enabled)
&& settings.fixable.contains(check.kind.code())
&& settings.fixable.contains(diagnostic.kind.code())
{
if valid_codes.is_empty() {
check.amend(Fix::deletion(
diagnostic.amend(Fix::deletion(
Location::new(row + 1, start - spaces),
Location::new(row + 1, lines[row].chars().count()),
));
} else {
check.amend(Fix::replacement(
diagnostic.amend(Fix::replacement(
format!("# noqa: {}", valid_codes.join(", ")),
Location::new(row + 1, start),
Location::new(row + 1, lines[row].chars().count()),
));
}
}
checks.push(check);
diagnostics.push(diagnostic);
}
}
Directive::None => {}
@@ -194,6 +197,6 @@ pub fn check_noqa(
ignored.sort_unstable();
for index in ignored.iter().rev() {
checks.swap_remove(*index);
diagnostics.swap_remove(*index);
}
}

View File

@@ -3,8 +3,8 @@
use rustpython_parser::lexer::{LexResult, Tok};
use crate::lex::docstring_detection::StateMachine;
use crate::registry::{Check, CheckCode};
use crate::ruff::checks::Context;
use crate::registry::{Diagnostic, RuleCode};
use crate::ruff::rules::Context;
use crate::settings::flags;
use crate::source_code_locator::SourceCodeLocator;
use crate::{eradicate, flake8_implicit_str_concat, flake8_quotes, pycodestyle, ruff, Settings};
@@ -14,20 +14,20 @@ pub fn check_tokens(
tokens: &[LexResult],
settings: &Settings,
autofix: flags::Autofix,
) -> Vec<Check> {
let mut checks: Vec<Check> = vec![];
) -> Vec<Diagnostic> {
let mut diagnostics: Vec<Diagnostic> = vec![];
let enforce_ambiguous_unicode_character = settings.enabled.contains(&CheckCode::RUF001)
|| settings.enabled.contains(&CheckCode::RUF002)
|| settings.enabled.contains(&CheckCode::RUF003);
let enforce_quotes = settings.enabled.contains(&CheckCode::Q000)
|| settings.enabled.contains(&CheckCode::Q001)
|| settings.enabled.contains(&CheckCode::Q002)
|| settings.enabled.contains(&CheckCode::Q003);
let enforce_commented_out_code = settings.enabled.contains(&CheckCode::ERA001);
let enforce_invalid_escape_sequence = settings.enabled.contains(&CheckCode::W605);
let enforce_implicit_string_concatenation = settings.enabled.contains(&CheckCode::ISC001)
|| settings.enabled.contains(&CheckCode::ISC002);
let enforce_ambiguous_unicode_character = settings.enabled.contains(&RuleCode::RUF001)
|| settings.enabled.contains(&RuleCode::RUF002)
|| settings.enabled.contains(&RuleCode::RUF003);
let enforce_quotes = settings.enabled.contains(&RuleCode::Q000)
|| settings.enabled.contains(&RuleCode::Q001)
|| settings.enabled.contains(&RuleCode::Q002)
|| settings.enabled.contains(&RuleCode::Q003);
let enforce_commented_out_code = settings.enabled.contains(&RuleCode::ERA001);
let enforce_invalid_escape_sequence = settings.enabled.contains(&RuleCode::W605);
let enforce_implicit_string_concatenation = settings.enabled.contains(&RuleCode::ISC001)
|| settings.enabled.contains(&RuleCode::ISC002);
let mut state_machine = StateMachine::default();
for &(start, ref tok, end) in tokens.iter().flatten() {
@@ -40,7 +40,7 @@ pub fn check_tokens(
// RUF001, RUF002, RUF003
if enforce_ambiguous_unicode_character {
if matches!(tok, Tok::String { .. } | Tok::Comment(_)) {
checks.extend(ruff::checks::ambiguous_unicode_character(
diagnostics.extend(ruff::rules::ambiguous_unicode_character(
locator,
start,
end,
@@ -62,15 +62,15 @@ pub fn check_tokens(
// flake8-quotes
if enforce_quotes {
if matches!(tok, Tok::String { .. }) {
if let Some(check) = flake8_quotes::checks::quotes(
if let Some(diagnostic) = flake8_quotes::rules::quotes(
locator,
start,
end,
is_docstring,
&settings.flake8_quotes,
) {
if settings.enabled.contains(check.kind.code()) {
checks.push(check);
if settings.enabled.contains(diagnostic.kind.code()) {
diagnostics.push(diagnostic);
}
}
}
@@ -79,10 +79,10 @@ pub fn check_tokens(
// eradicate
if enforce_commented_out_code {
if matches!(tok, Tok::Comment(_)) {
if let Some(check) =
eradicate::checks::commented_out_code(locator, start, end, settings, autofix)
if let Some(diagnostic) =
eradicate::rules::commented_out_code(locator, start, end, settings, autofix)
{
checks.push(check);
diagnostics.push(diagnostic);
}
}
}
@@ -90,12 +90,12 @@ pub fn check_tokens(
// W605
if enforce_invalid_escape_sequence {
if matches!(tok, Tok::String { .. }) {
checks.extend(pycodestyle::checks::invalid_escape_sequence(
diagnostics.extend(pycodestyle::rules::invalid_escape_sequence(
locator,
start,
end,
matches!(autofix, flags::Autofix::Enabled)
&& settings.fixable.contains(&CheckCode::W605),
&& settings.fixable.contains(&RuleCode::W605),
));
}
}
@@ -103,12 +103,12 @@ pub fn check_tokens(
// ISC001, ISC002
if enforce_implicit_string_concatenation {
checks.extend(
flake8_implicit_str_concat::checks::implicit(tokens, locator)
diagnostics.extend(
flake8_implicit_str_concat::rules::implicit(tokens, locator)
.into_iter()
.filter(|check| settings.enabled.contains(check.kind.code())),
.filter(|diagnostic| settings.enabled.contains(diagnostic.kind.code())),
);
}
checks
diagnostics
}

View File

@@ -6,7 +6,7 @@ use rustc_hash::FxHashMap;
use crate::fs;
use crate::logging::LogLevel;
use crate::registry::{CheckCode, CheckCodePrefix};
use crate::registry::{RuleCode, RuleCodePrefix};
use crate::settings::types::{
FilePattern, PatternPrefixPair, PerFileIgnore, PythonVersion, SerializationFormat,
};
@@ -20,7 +20,7 @@ pub struct Cli {
pub files: Vec<PathBuf>,
/// Path to the `pyproject.toml` or `ruff.toml` file to use for
/// configuration.
#[arg(long)]
#[arg(long, conflicts_with = "isolated")]
pub config: Option<PathBuf>,
/// Enable verbose logging.
#[arg(short, long, group = "verbosity")]
@@ -56,21 +56,24 @@ pub struct Cli {
/// Disable cache reads.
#[arg(short, long)]
pub no_cache: bool,
/// Comma-separated list of error codes to enable (or ALL, to enable all
/// checks).
/// Ignore all configuration files.
#[arg(long, conflicts_with = "config")]
pub isolated: bool,
/// Comma-separated list of rule codes to enable (or ALL, to enable all
/// rules).
#[arg(long, value_delimiter = ',')]
pub select: Option<Vec<CheckCodePrefix>>,
pub select: Option<Vec<RuleCodePrefix>>,
/// Like --select, but adds additional error codes on top of the selected
/// ones.
#[arg(long, value_delimiter = ',')]
pub extend_select: Option<Vec<CheckCodePrefix>>,
pub extend_select: Option<Vec<RuleCodePrefix>>,
/// Comma-separated list of error codes to disable.
#[arg(long, value_delimiter = ',')]
pub ignore: Option<Vec<CheckCodePrefix>>,
pub ignore: Option<Vec<RuleCodePrefix>>,
/// Like --ignore, but adds additional error codes on top of the ignored
/// ones.
#[arg(long, value_delimiter = ',')]
pub extend_ignore: Option<Vec<CheckCodePrefix>>,
pub extend_ignore: Option<Vec<RuleCodePrefix>>,
/// List of paths, used to exclude files and/or directories from checks.
#[arg(long, value_delimiter = ',')]
pub exclude: Option<Vec<FilePattern>>,
@@ -81,22 +84,22 @@ pub struct Cli {
/// List of error codes to treat as eligible for autofix. Only applicable
/// when autofix itself is enabled (e.g., via `--fix`).
#[arg(long, value_delimiter = ',')]
pub fixable: Option<Vec<CheckCodePrefix>>,
pub fixable: Option<Vec<RuleCodePrefix>>,
/// List of error codes to treat as ineligible for autofix. Only applicable
/// when autofix itself is enabled (e.g., via `--fix`).
#[arg(long, value_delimiter = ',')]
pub unfixable: Option<Vec<CheckCodePrefix>>,
pub unfixable: Option<Vec<RuleCodePrefix>>,
/// List of mappings from file pattern to code to exclude
#[arg(long, value_delimiter = ',')]
pub per_file_ignores: Option<Vec<PatternPrefixPair>>,
/// Output serialization format for error messages.
#[arg(long, value_enum)]
#[arg(long, value_enum, env = "RUFF_FORMAT")]
pub format: Option<SerializationFormat>,
/// The name of the file when passing it through stdin.
#[arg(long)]
pub stdin_filename: Option<PathBuf>,
/// Path to the cache directory.
#[arg(long)]
#[arg(long, env = "RUFF_CACHE_DIR")]
pub cache_dir: Option<PathBuf>,
/// Show violations with source code.
#[arg(long, overrides_with("no_show_source"))]
@@ -176,7 +179,7 @@ pub struct Cli {
conflicts_with = "stdin_filename",
conflicts_with = "watch",
)]
pub explain: Option<CheckCode>,
pub explain: Option<RuleCode>,
/// Generate shell completion
#[arg(
long,
@@ -240,6 +243,7 @@ impl Cli {
explain: self.explain,
files: self.files,
generate_shell_completion: self.generate_shell_completion,
isolated: self.isolated,
no_cache: self.no_cache,
quiet: self.quiet,
show_files: self.show_files,
@@ -298,9 +302,10 @@ pub struct Arguments {
pub config: Option<PathBuf>,
pub diff: bool,
pub exit_zero: bool,
pub explain: Option<CheckCode>,
pub explain: Option<RuleCode>,
pub files: Vec<PathBuf>,
pub generate_shell_completion: Option<clap_complete_command::Shell>,
pub isolated: bool,
pub no_cache: bool,
pub quiet: bool,
pub show_files: bool,
@@ -318,18 +323,18 @@ pub struct Overrides {
pub dummy_variable_rgx: Option<Regex>,
pub exclude: Option<Vec<FilePattern>>,
pub extend_exclude: Option<Vec<FilePattern>>,
pub extend_ignore: Option<Vec<CheckCodePrefix>>,
pub extend_select: Option<Vec<CheckCodePrefix>>,
pub fixable: Option<Vec<CheckCodePrefix>>,
pub ignore: Option<Vec<CheckCodePrefix>>,
pub extend_ignore: Option<Vec<RuleCodePrefix>>,
pub extend_select: Option<Vec<RuleCodePrefix>>,
pub fixable: Option<Vec<RuleCodePrefix>>,
pub ignore: Option<Vec<RuleCodePrefix>>,
pub line_length: Option<usize>,
pub max_complexity: Option<usize>,
pub per_file_ignores: Option<Vec<PatternPrefixPair>>,
pub respect_gitignore: Option<bool>,
pub select: Option<Vec<CheckCodePrefix>>,
pub select: Option<Vec<RuleCodePrefix>>,
pub show_source: Option<bool>,
pub target_version: Option<PythonVersion>,
pub unfixable: Option<Vec<CheckCodePrefix>>,
pub unfixable: Option<Vec<RuleCodePrefix>>,
// TODO(charlie): Captured in pyproject.toml as a default, but not part of `Settings`.
pub cache_dir: Option<PathBuf>,
pub fix: Option<bool>,
@@ -354,7 +359,7 @@ pub fn extract_log_level(cli: &Arguments) -> LogLevel {
/// Convert a list of `PatternPrefixPair` structs to `PerFileIgnore`.
pub fn collect_per_file_ignores(pairs: Vec<PatternPrefixPair>) -> Vec<PerFileIgnore> {
let mut per_file_ignores: FxHashMap<String, Vec<CheckCodePrefix>> = FxHashMap::default();
let mut per_file_ignores: FxHashMap<String, Vec<RuleCodePrefix>> = FxHashMap::default();
for pair in pairs {
per_file_ignores
.entry(pair.pattern)

View File

@@ -16,17 +16,17 @@ use serde::Serialize;
use walkdir::WalkDir;
use crate::autofix::fixer;
use crate::cache::DEFAULT_CACHE_DIR_NAME;
use crate::cache::CACHE_DIR_NAME;
use crate::cli::Overrides;
use crate::iterators::par_iter;
use crate::linter::{add_noqa_to_path, lint_path, lint_stdin, Diagnostics};
use crate::logging::LogLevel;
use crate::message::Message;
use crate::registry::CheckCode;
use crate::registry::RuleCode;
use crate::resolver::{FileDiscovery, PyprojectDiscovery};
use crate::settings::flags;
use crate::settings::types::SerializationFormat;
use crate::{cache, fs, one_time_warning, packages, resolver, violations};
use crate::{cache, fs, packages, resolver, violations, warn_user_once};
/// Run the linter over a collection of files.
pub fn run(
@@ -45,12 +45,7 @@ pub fn run(
debug!("Identified files to lint in: {:?}", duration);
if paths.is_empty() {
one_time_warning!(
"{}{} {}",
"warning".yellow().bold(),
":".bold(),
"No Python files found under the given path(s)".bold()
);
warn_user_once!("No Python files found under the given path(s)");
return Ok(Diagnostics::default());
}
@@ -117,7 +112,7 @@ pub fn run(
.unwrap_or_else(|(path, message)| {
if let Some(path) = &path {
let settings = resolver.resolve(path, pyproject_strategy);
if settings.enabled.contains(&CheckCode::E902) {
if settings.enabled.contains(&RuleCode::E902) {
Diagnostics::new(vec![Message {
kind: violations::IOError(message).into(),
location: Location::default(),
@@ -196,12 +191,7 @@ pub fn add_noqa(
debug!("Identified files to lint in: {:?}", duration);
if paths.is_empty() {
one_time_warning!(
"{}{} {}",
"warning".yellow().bold(),
":".bold(),
"No Python files found under the given path(s)".bold()
);
warn_user_once!("No Python files found under the given path(s)");
return Ok(0);
}
@@ -271,12 +261,7 @@ pub fn show_files(
resolver::python_files_in_path(files, pyproject_strategy, file_strategy, overrides)?;
if paths.is_empty() {
one_time_warning!(
"{}{} {}",
"warning".yellow().bold(),
":".bold(),
"No Python files found under the given path(s)".bold()
);
warn_user_once!("No Python files found under the given path(s)");
return Ok(());
}
@@ -298,18 +283,18 @@ pub fn show_files(
#[derive(Serialize)]
struct Explanation<'a> {
code: &'a str,
category: &'a str,
origin: &'a str,
summary: &'a str,
}
/// Explain a `CheckCode` to the user.
pub fn explain(code: &CheckCode, format: &SerializationFormat) -> Result<()> {
/// Explain a `RuleCode` to the user.
pub fn explain(code: &RuleCode, format: &SerializationFormat) -> Result<()> {
match format {
SerializationFormat::Text | SerializationFormat::Grouped => {
println!(
"{} ({}): {}",
code.as_ref(),
code.category().title(),
code.origin().title(),
code.kind().summary()
);
}
@@ -318,7 +303,7 @@ pub fn explain(code: &CheckCode, format: &SerializationFormat) -> Result<()> {
"{}",
serde_json::to_string_pretty(&Explanation {
code: code.as_ref(),
category: code.category().title(),
origin: code.origin().title(),
summary: &code.kind().summary(),
})?
);
@@ -340,10 +325,10 @@ pub fn explain(code: &CheckCode, format: &SerializationFormat) -> Result<()> {
pub fn clean(level: &LogLevel) -> Result<()> {
for entry in WalkDir::new(&*path_dedot::CWD)
.into_iter()
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.filter(|entry| entry.file_type().is_dir())
{
let cache = entry.path().join(DEFAULT_CACHE_DIR_NAME);
let cache = entry.path().join(CACHE_DIR_NAME);
if cache.is_dir() {
if level >= &LogLevel::Default {
eprintln!("Removing cache at: {}", fs::relativize_path(&cache).bold());

View File

@@ -20,7 +20,7 @@ impl Flags {
if settings
.enabled
.iter()
.any(|check_code| matches!(check_code.lint_source(), LintSource::Imports))
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::Imports))
{
Flags::NOQA | Flags::ISORT
} else {
@@ -104,10 +104,13 @@ pub fn extract_isort_directives(lxr: &[LexResult]) -> IsortDirectives {
continue;
};
// `isort` allows for `# isort: skip` and `# isort: skip_file` to include or
// omit a space after the colon. The remaining action comments are
// required to include the space, and must appear on their own lines.
let comment_text = comment_text.trim_end();
if comment_text == "# isort: split" {
splits.push(start.row());
} else if comment_text == "# isort: skip_file" {
} else if comment_text == "# isort: skip_file" || comment_text == "# isort:skip_file" {
skip_file = true;
} else if off.is_some() {
if comment_text == "# isort: on" {
@@ -119,7 +122,7 @@ pub fn extract_isort_directives(lxr: &[LexResult]) -> IsortDirectives {
off = None;
}
} else {
if comment_text.contains("isort: skip") {
if comment_text.contains("isort: skip") || comment_text.contains("isort:skip") {
exclusions.insert(start.row());
} else if comment_text == "# isort: off" {
off = Some(start);

View File

@@ -1,5 +1,5 @@
pub mod checks;
pub mod detection;
pub mod rules;
#[cfg(test)]
mod tests {
@@ -10,19 +10,19 @@ mod tests {
use test_case::test_case;
use crate::linter::test_path;
use crate::registry::CheckCode;
use crate::registry::RuleCode;
use crate::settings;
#[test_case(CheckCode::ERA001, Path::new("ERA001.py"); "ERA001")]
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());
let checks = test_path(
#[test_case(RuleCode::ERA001, Path::new("ERA001.py"); "ERA001")]
fn rules(rule_code: RuleCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("./resources/test/fixtures/eradicate")
.join(path)
.as_path(),
&settings::Settings::for_rule(check_code),
&settings::Settings::for_rule(rule_code),
)?;
insta::assert_yaml_snapshot!(snapshot, checks);
insta::assert_yaml_snapshot!(snapshot, diagnostics);
Ok(())
}
}

View File

@@ -3,9 +3,9 @@ use rustpython_ast::Location;
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::eradicate::detection::comment_contains_code;
use crate::registry::CheckCode;
use crate::registry::RuleCode;
use crate::settings::flags;
use crate::{violations, Check, Settings, SourceCodeLocator};
use crate::{violations, Diagnostic, Settings, SourceCodeLocator};
fn is_standalone_comment(line: &str) -> bool {
for char in line.chars() {
@@ -25,20 +25,20 @@ pub fn commented_out_code(
end: Location,
settings: &Settings,
autofix: flags::Autofix,
) -> Option<Check> {
) -> Option<Diagnostic> {
let location = Location::new(start.row(), 0);
let end_location = Location::new(end.row() + 1, 0);
let line = locator.slice_source_code_range(&Range::new(location, end_location));
// Verify that the comment is on its own line, and that it contains code.
if is_standalone_comment(&line) && comment_contains_code(&line, &settings.task_tags[..]) {
let mut check = Check::new(violations::CommentedOutCode, Range::new(start, end));
let mut diagnostic = Diagnostic::new(violations::CommentedOutCode, Range::new(start, end));
if matches!(autofix, flags::Autofix::Enabled)
&& settings.fixable.contains(&CheckCode::ERA001)
&& settings.fixable.contains(&RuleCode::ERA001)
{
check.amend(Fix::deletion(location, end_location));
diagnostic.amend(Fix::deletion(location, end_location));
}
Some(check)
Some(diagnostic)
} else {
None
}

View File

@@ -1,4 +1,4 @@
pub mod plugins;
pub mod rules;
#[cfg(test)]
mod tests {
@@ -9,28 +9,28 @@ mod tests {
use test_case::test_case;
use crate::linter::test_path;
use crate::registry::CheckCode;
use crate::registry::RuleCode;
use crate::settings;
#[test_case(CheckCode::YTT101, Path::new("YTT101.py"); "YTT101")]
#[test_case(CheckCode::YTT102, Path::new("YTT102.py"); "YTT102")]
#[test_case(CheckCode::YTT103, Path::new("YTT103.py"); "YTT103")]
#[test_case(CheckCode::YTT201, Path::new("YTT201.py"); "YTT201")]
#[test_case(CheckCode::YTT202, Path::new("YTT202.py"); "YTT202")]
#[test_case(CheckCode::YTT203, Path::new("YTT203.py"); "YTT203")]
#[test_case(CheckCode::YTT204, Path::new("YTT204.py"); "YTT204")]
#[test_case(CheckCode::YTT301, Path::new("YTT301.py"); "YTT301")]
#[test_case(CheckCode::YTT302, Path::new("YTT302.py"); "YTT302")]
#[test_case(CheckCode::YTT303, Path::new("YTT303.py"); "YTT303")]
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());
let checks = test_path(
#[test_case(RuleCode::YTT101, Path::new("YTT101.py"); "YTT101")]
#[test_case(RuleCode::YTT102, Path::new("YTT102.py"); "YTT102")]
#[test_case(RuleCode::YTT103, Path::new("YTT103.py"); "YTT103")]
#[test_case(RuleCode::YTT201, Path::new("YTT201.py"); "YTT201")]
#[test_case(RuleCode::YTT202, Path::new("YTT202.py"); "YTT202")]
#[test_case(RuleCode::YTT203, Path::new("YTT203.py"); "YTT203")]
#[test_case(RuleCode::YTT204, Path::new("YTT204.py"); "YTT204")]
#[test_case(RuleCode::YTT301, Path::new("YTT301.py"); "YTT301")]
#[test_case(RuleCode::YTT302, Path::new("YTT302.py"); "YTT302")]
#[test_case(RuleCode::YTT303, Path::new("YTT303.py"); "YTT303")]
fn rules(rule_code: RuleCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_2020")
.join(path)
.as_path(),
&settings::Settings::for_rule(check_code),
&settings::Settings::for_rule(rule_code),
)?;
insta::assert_yaml_snapshot!(snapshot, checks);
insta::assert_yaml_snapshot!(snapshot, diagnostics);
Ok(())
}
}

View File

@@ -4,7 +4,7 @@ use rustpython_ast::{Cmpop, Constant, Expr, ExprKind, Located};
use crate::ast::helpers::match_module_member;
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::{Check, CheckCode};
use crate::registry::{Diagnostic, RuleCode};
use crate::violations;
fn is_sys(checker: &Checker, expr: &Expr, target: &str) -> bool {
@@ -32,17 +32,16 @@ pub fn subscript(checker: &mut Checker, value: &Expr, slice: &Expr) {
..
} = &upper.node
{
if *i == BigInt::from(1)
&& checker.settings.enabled.contains(&CheckCode::YTT303)
if *i == BigInt::from(1) && checker.settings.enabled.contains(&RuleCode::YTT303)
{
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::SysVersionSlice1Referenced,
Range::from_located(value),
));
} else if *i == BigInt::from(3)
&& checker.settings.enabled.contains(&CheckCode::YTT101)
&& checker.settings.enabled.contains(&RuleCode::YTT101)
{
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::SysVersionSlice3Referenced,
Range::from_located(value),
));
@@ -54,15 +53,15 @@ pub fn subscript(checker: &mut Checker, value: &Expr, slice: &Expr) {
value: Constant::Int(i),
..
} => {
if *i == BigInt::from(2) && checker.settings.enabled.contains(&CheckCode::YTT102) {
checker.checks.push(Check::new(
if *i == BigInt::from(2) && checker.settings.enabled.contains(&RuleCode::YTT102) {
checker.diagnostics.push(Diagnostic::new(
violations::SysVersion2Referenced,
Range::from_located(value),
));
} else if *i == BigInt::from(0)
&& checker.settings.enabled.contains(&CheckCode::YTT301)
&& checker.settings.enabled.contains(&RuleCode::YTT301)
{
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::SysVersion0Referenced,
Range::from_located(value),
));
@@ -97,9 +96,9 @@ pub fn compare(checker: &mut Checker, left: &Expr, ops: &[Cmpop], comparators: &
) = (ops, comparators)
{
if *n == BigInt::from(3)
&& checker.settings.enabled.contains(&CheckCode::YTT201)
&& checker.settings.enabled.contains(&RuleCode::YTT201)
{
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::SysVersionInfo0Eq3Referenced,
Range::from_located(left),
));
@@ -118,8 +117,8 @@ pub fn compare(checker: &mut Checker, left: &Expr, ops: &[Cmpop], comparators: &
}],
) = (ops, comparators)
{
if checker.settings.enabled.contains(&CheckCode::YTT203) {
checker.checks.push(Check::new(
if checker.settings.enabled.contains(&RuleCode::YTT203) {
checker.diagnostics.push(Diagnostic::new(
violations::SysVersionInfo1CmpInt,
Range::from_located(left),
));
@@ -144,8 +143,8 @@ pub fn compare(checker: &mut Checker, left: &Expr, ops: &[Cmpop], comparators: &
}],
) = (ops, comparators)
{
if checker.settings.enabled.contains(&CheckCode::YTT204) {
checker.checks.push(Check::new(
if checker.settings.enabled.contains(&RuleCode::YTT204) {
checker.diagnostics.push(Diagnostic::new(
violations::SysVersionInfoMinorCmpInt,
Range::from_located(left),
));
@@ -170,14 +169,14 @@ pub fn compare(checker: &mut Checker, left: &Expr, ops: &[Cmpop], comparators: &
) = (ops, comparators)
{
if s.len() == 1 {
if checker.settings.enabled.contains(&CheckCode::YTT302) {
checker.checks.push(Check::new(
if checker.settings.enabled.contains(&RuleCode::YTT302) {
checker.diagnostics.push(Diagnostic::new(
violations::SysVersionCmpStr10,
Range::from_located(left),
));
}
} else if checker.settings.enabled.contains(&CheckCode::YTT103) {
checker.checks.push(Check::new(
} else if checker.settings.enabled.contains(&RuleCode::YTT103) {
checker.diagnostics.push(Diagnostic::new(
violations::SysVersionCmpStr3,
Range::from_located(left),
));
@@ -195,7 +194,7 @@ pub fn name_or_attribute(checker: &mut Checker, expr: &Expr) {
&checker.from_imports,
&checker.import_aliases,
) {
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::SixPY3Referenced,
Range::from_located(expr),
));

View File

@@ -1,6 +1,6 @@
mod fixes;
pub mod helpers;
pub mod plugins;
pub mod rules;
pub mod settings;
#[cfg(test)]
@@ -10,36 +10,36 @@ mod tests {
use anyhow::Result;
use crate::linter::test_path;
use crate::registry::CheckCode;
use crate::registry::RuleCode;
use crate::{flake8_annotations, Settings};
#[test]
fn defaults() -> Result<()> {
let checks = test_path(
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_annotations/annotation_presence.py"),
&Settings {
..Settings::for_rules(vec![
CheckCode::ANN001,
CheckCode::ANN002,
CheckCode::ANN003,
CheckCode::ANN101,
CheckCode::ANN102,
CheckCode::ANN201,
CheckCode::ANN202,
CheckCode::ANN204,
CheckCode::ANN205,
CheckCode::ANN206,
CheckCode::ANN401,
RuleCode::ANN001,
RuleCode::ANN002,
RuleCode::ANN003,
RuleCode::ANN101,
RuleCode::ANN102,
RuleCode::ANN201,
RuleCode::ANN202,
RuleCode::ANN204,
RuleCode::ANN205,
RuleCode::ANN206,
RuleCode::ANN401,
])
},
)?;
insta::assert_yaml_snapshot!(checks);
insta::assert_yaml_snapshot!(diagnostics);
Ok(())
}
#[test]
fn suppress_dummy_args() -> Result<()> {
let checks = test_path(
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_annotations/suppress_dummy_args.py"),
&Settings {
flake8_annotations: flake8_annotations::settings::Settings {
@@ -49,21 +49,21 @@ mod tests {
allow_star_arg_any: false,
},
..Settings::for_rules(vec![
CheckCode::ANN001,
CheckCode::ANN002,
CheckCode::ANN003,
CheckCode::ANN101,
CheckCode::ANN102,
RuleCode::ANN001,
RuleCode::ANN002,
RuleCode::ANN003,
RuleCode::ANN101,
RuleCode::ANN102,
])
},
)?;
insta::assert_yaml_snapshot!(checks);
insta::assert_yaml_snapshot!(diagnostics);
Ok(())
}
#[test]
fn mypy_init_return() -> Result<()> {
let checks = test_path(
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_annotations/mypy_init_return.py"),
&Settings {
flake8_annotations: flake8_annotations::settings::Settings {
@@ -73,21 +73,21 @@ mod tests {
allow_star_arg_any: false,
},
..Settings::for_rules(vec![
CheckCode::ANN201,
CheckCode::ANN202,
CheckCode::ANN204,
CheckCode::ANN205,
CheckCode::ANN206,
RuleCode::ANN201,
RuleCode::ANN202,
RuleCode::ANN204,
RuleCode::ANN205,
RuleCode::ANN206,
])
},
)?;
insta::assert_yaml_snapshot!(checks);
insta::assert_yaml_snapshot!(diagnostics);
Ok(())
}
#[test]
fn suppress_none_returning() -> Result<()> {
let checks = test_path(
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_annotations/suppress_none_returning.py"),
&Settings {
flake8_annotations: flake8_annotations::settings::Settings {
@@ -97,21 +97,21 @@ mod tests {
allow_star_arg_any: false,
},
..Settings::for_rules(vec![
CheckCode::ANN201,
CheckCode::ANN202,
CheckCode::ANN204,
CheckCode::ANN205,
CheckCode::ANN206,
RuleCode::ANN201,
RuleCode::ANN202,
RuleCode::ANN204,
RuleCode::ANN205,
RuleCode::ANN206,
])
},
)?;
insta::assert_yaml_snapshot!(checks);
insta::assert_yaml_snapshot!(diagnostics);
Ok(())
}
#[test]
fn allow_star_arg_any() -> Result<()> {
let checks = test_path(
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_annotations/allow_star_arg_any.py"),
&Settings {
flake8_annotations: flake8_annotations::settings::Settings {
@@ -120,28 +120,28 @@ mod tests {
suppress_none_returning: false,
allow_star_arg_any: true,
},
..Settings::for_rules(vec![CheckCode::ANN401])
..Settings::for_rules(vec![RuleCode::ANN401])
},
)?;
insta::assert_yaml_snapshot!(checks);
insta::assert_yaml_snapshot!(diagnostics);
Ok(())
}
#[test]
fn allow_overload() -> Result<()> {
let checks = test_path(
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_annotations/allow_overload.py"),
&Settings {
..Settings::for_rules(vec![
CheckCode::ANN201,
CheckCode::ANN202,
CheckCode::ANN204,
CheckCode::ANN205,
CheckCode::ANN206,
RuleCode::ANN201,
RuleCode::ANN202,
RuleCode::ANN204,
RuleCode::ANN205,
RuleCode::ANN206,
])
},
)?;
insta::assert_yaml_snapshot!(checks);
insta::assert_yaml_snapshot!(diagnostics);
Ok(())
}
}

View File

@@ -8,9 +8,9 @@ use crate::checkers::ast::Checker;
use crate::docstrings::definition::{Definition, DefinitionKind};
use crate::flake8_annotations::fixes;
use crate::flake8_annotations::helpers::match_function_def;
use crate::registry::CheckCode;
use crate::registry::RuleCode;
use crate::visibility::Visibility;
use crate::{violations, visibility, Check};
use crate::{violations, visibility, Diagnostic};
#[derive(Default)]
struct ReturnStatementVisitor<'a> {
@@ -57,7 +57,7 @@ where
F: FnOnce() -> String,
{
if checker.match_typing_expr(annotation, "Any") {
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::DynamicallyTypedExpression(func()),
Range::from_located(annotation),
));
@@ -85,15 +85,15 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
.chain(args.kwonlyargs.iter())
{
if let Some(expr) = &arg.node.annotation {
if checker.settings.enabled.contains(&CheckCode::ANN401) {
if checker.settings.enabled.contains(&RuleCode::ANN401) {
check_dynamically_typed(checker, expr, || arg.node.arg.to_string());
};
} else {
if !(checker.settings.flake8_annotations.suppress_dummy_args
&& checker.settings.dummy_variable_rgx.is_match(&arg.node.arg))
{
if checker.settings.enabled.contains(&CheckCode::ANN001) {
checker.checks.push(Check::new(
if checker.settings.enabled.contains(&RuleCode::ANN001) {
checker.diagnostics.push(Diagnostic::new(
violations::MissingTypeFunctionArgument(arg.node.arg.to_string()),
Range::from_located(arg),
));
@@ -106,7 +106,7 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
if let Some(arg) = &args.vararg {
if let Some(expr) = &arg.node.annotation {
if !checker.settings.flake8_annotations.allow_star_arg_any {
if checker.settings.enabled.contains(&CheckCode::ANN401) {
if checker.settings.enabled.contains(&RuleCode::ANN401) {
let name = arg.node.arg.to_string();
check_dynamically_typed(checker, expr, || format!("*{name}"));
}
@@ -115,8 +115,8 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
if !(checker.settings.flake8_annotations.suppress_dummy_args
&& checker.settings.dummy_variable_rgx.is_match(&arg.node.arg))
{
if checker.settings.enabled.contains(&CheckCode::ANN002) {
checker.checks.push(Check::new(
if checker.settings.enabled.contains(&RuleCode::ANN002) {
checker.diagnostics.push(Diagnostic::new(
violations::MissingTypeArgs(arg.node.arg.to_string()),
Range::from_located(arg),
));
@@ -129,7 +129,7 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
if let Some(arg) = &args.kwarg {
if let Some(expr) = &arg.node.annotation {
if !checker.settings.flake8_annotations.allow_star_arg_any {
if checker.settings.enabled.contains(&CheckCode::ANN401) {
if checker.settings.enabled.contains(&RuleCode::ANN401) {
let name = arg.node.arg.to_string();
check_dynamically_typed(checker, expr, || format!("**{name}"));
}
@@ -138,8 +138,8 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
if !(checker.settings.flake8_annotations.suppress_dummy_args
&& checker.settings.dummy_variable_rgx.is_match(&arg.node.arg))
{
if checker.settings.enabled.contains(&CheckCode::ANN003) {
checker.checks.push(Check::new(
if checker.settings.enabled.contains(&RuleCode::ANN003) {
checker.diagnostics.push(Diagnostic::new(
violations::MissingTypeKwargs(arg.node.arg.to_string()),
Range::from_located(arg),
));
@@ -150,7 +150,7 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
// ANN201, ANN202, ANN401
if let Some(expr) = &returns {
if checker.settings.enabled.contains(&CheckCode::ANN401) {
if checker.settings.enabled.contains(&RuleCode::ANN401) {
check_dynamically_typed(checker, expr, || name.to_string());
};
} else {
@@ -164,16 +164,16 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
match visibility {
Visibility::Public => {
if checker.settings.enabled.contains(&CheckCode::ANN201) {
checker.checks.push(Check::new(
if checker.settings.enabled.contains(&RuleCode::ANN201) {
checker.diagnostics.push(Diagnostic::new(
violations::MissingReturnTypePublicFunction(name.to_string()),
helpers::identifier_range(stmt, checker.locator),
));
}
}
Visibility::Private => {
if checker.settings.enabled.contains(&CheckCode::ANN202) {
checker.checks.push(Check::new(
if checker.settings.enabled.contains(&RuleCode::ANN202) {
checker.diagnostics.push(Diagnostic::new(
violations::MissingReturnTypePrivateFunction(name.to_string()),
helpers::identifier_range(stmt, checker.locator),
));
@@ -203,15 +203,15 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
// ANN401 for dynamically typed arguments
if let Some(annotation) = &arg.node.annotation {
has_any_typed_arg = true;
if checker.settings.enabled.contains(&CheckCode::ANN401) {
if checker.settings.enabled.contains(&RuleCode::ANN401) {
check_dynamically_typed(checker, annotation, || arg.node.arg.to_string());
}
} else {
if !(checker.settings.flake8_annotations.suppress_dummy_args
&& checker.settings.dummy_variable_rgx.is_match(&arg.node.arg))
{
if checker.settings.enabled.contains(&CheckCode::ANN001) {
checker.checks.push(Check::new(
if checker.settings.enabled.contains(&RuleCode::ANN001) {
checker.diagnostics.push(Diagnostic::new(
violations::MissingTypeFunctionArgument(arg.node.arg.to_string()),
Range::from_located(arg),
));
@@ -225,7 +225,7 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
has_any_typed_arg = true;
if let Some(expr) = &arg.node.annotation {
if !checker.settings.flake8_annotations.allow_star_arg_any {
if checker.settings.enabled.contains(&CheckCode::ANN401) {
if checker.settings.enabled.contains(&RuleCode::ANN401) {
let name = arg.node.arg.to_string();
check_dynamically_typed(checker, expr, || format!("*{name}"));
}
@@ -234,8 +234,8 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
if !(checker.settings.flake8_annotations.suppress_dummy_args
&& checker.settings.dummy_variable_rgx.is_match(&arg.node.arg))
{
if checker.settings.enabled.contains(&CheckCode::ANN002) {
checker.checks.push(Check::new(
if checker.settings.enabled.contains(&RuleCode::ANN002) {
checker.diagnostics.push(Diagnostic::new(
violations::MissingTypeArgs(arg.node.arg.to_string()),
Range::from_located(arg),
));
@@ -249,7 +249,7 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
has_any_typed_arg = true;
if let Some(expr) = &arg.node.annotation {
if !checker.settings.flake8_annotations.allow_star_arg_any {
if checker.settings.enabled.contains(&CheckCode::ANN401) {
if checker.settings.enabled.contains(&RuleCode::ANN401) {
let name = arg.node.arg.to_string();
check_dynamically_typed(checker, expr, || format!("**{name}"));
}
@@ -258,8 +258,8 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
if !(checker.settings.flake8_annotations.suppress_dummy_args
&& checker.settings.dummy_variable_rgx.is_match(&arg.node.arg))
{
if checker.settings.enabled.contains(&CheckCode::ANN003) {
checker.checks.push(Check::new(
if checker.settings.enabled.contains(&RuleCode::ANN003) {
checker.diagnostics.push(Diagnostic::new(
violations::MissingTypeKwargs(arg.node.arg.to_string()),
Range::from_located(arg),
));
@@ -273,15 +273,15 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
if let Some(arg) = args.args.first() {
if arg.node.annotation.is_none() {
if visibility::is_classmethod(checker, cast::decorator_list(stmt)) {
if checker.settings.enabled.contains(&CheckCode::ANN102) {
checker.checks.push(Check::new(
if checker.settings.enabled.contains(&RuleCode::ANN102) {
checker.diagnostics.push(Diagnostic::new(
violations::MissingTypeCls(arg.node.arg.to_string()),
Range::from_located(arg),
));
}
} else {
if checker.settings.enabled.contains(&CheckCode::ANN101) {
checker.checks.push(Check::new(
if checker.settings.enabled.contains(&RuleCode::ANN101) {
checker.diagnostics.push(Diagnostic::new(
violations::MissingTypeSelf(arg.node.arg.to_string()),
Range::from_located(arg),
));
@@ -293,7 +293,7 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
// ANN201, ANN202
if let Some(expr) = &returns {
if checker.settings.enabled.contains(&CheckCode::ANN401) {
if checker.settings.enabled.contains(&RuleCode::ANN401) {
check_dynamically_typed(checker, expr, || name.to_string());
}
} else {
@@ -306,15 +306,15 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
}
if visibility::is_classmethod(checker, cast::decorator_list(stmt)) {
if checker.settings.enabled.contains(&CheckCode::ANN206) {
checker.checks.push(Check::new(
if checker.settings.enabled.contains(&RuleCode::ANN206) {
checker.diagnostics.push(Diagnostic::new(
violations::MissingReturnTypeClassMethod(name.to_string()),
helpers::identifier_range(stmt, checker.locator),
));
}
} else if visibility::is_staticmethod(checker, cast::decorator_list(stmt)) {
if checker.settings.enabled.contains(&CheckCode::ANN205) {
checker.checks.push(Check::new(
if checker.settings.enabled.contains(&RuleCode::ANN205) {
checker.diagnostics.push(Diagnostic::new(
violations::MissingReturnTypeStaticMethod(name.to_string()),
helpers::identifier_range(stmt, checker.locator),
));
@@ -322,28 +322,28 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
} else if visibility::is_init(stmt) {
// Allow omission of return annotation in `__init__` functions, as long as at
// least one argument is typed.
if checker.settings.enabled.contains(&CheckCode::ANN204) {
if checker.settings.enabled.contains(&RuleCode::ANN204) {
if !(checker.settings.flake8_annotations.mypy_init_return
&& has_any_typed_arg)
{
let mut check = Check::new(
let mut diagnostic = Diagnostic::new(
violations::MissingReturnTypeSpecialMethod(name.to_string()),
helpers::identifier_range(stmt, checker.locator),
);
if checker.patch(check.kind.code()) {
if checker.patch(diagnostic.kind.code()) {
match fixes::add_return_none_annotation(checker.locator, stmt) {
Ok(fix) => {
check.amend(fix);
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
checker.checks.push(check);
checker.diagnostics.push(diagnostic);
}
}
} else if visibility::is_magic(stmt) {
if checker.settings.enabled.contains(&CheckCode::ANN204) {
checker.checks.push(Check::new(
if checker.settings.enabled.contains(&RuleCode::ANN204) {
checker.diagnostics.push(Diagnostic::new(
violations::MissingReturnTypeSpecialMethod(name.to_string()),
helpers::identifier_range(stmt, checker.locator),
));
@@ -351,16 +351,16 @@ pub fn definition(checker: &mut Checker, definition: &Definition, visibility: &V
} else {
match visibility {
Visibility::Public => {
if checker.settings.enabled.contains(&CheckCode::ANN201) {
checker.checks.push(Check::new(
if checker.settings.enabled.contains(&RuleCode::ANN201) {
checker.diagnostics.push(Diagnostic::new(
violations::MissingReturnTypePublicFunction(name.to_string()),
helpers::identifier_range(stmt, checker.locator),
));
}
}
Visibility::Private => {
if checker.settings.enabled.contains(&CheckCode::ANN202) {
checker.checks.push(Check::new(
if checker.settings.enabled.contains(&RuleCode::ANN202) {
checker.diagnostics.push(Diagnostic::new(
violations::MissingReturnTypePrivateFunction(name.to_string()),
helpers::identifier_range(stmt, checker.locator),
));

View File

@@ -1,10 +0,0 @@
use rustpython_ast::{Located, StmtKind};
use crate::ast::types::Range;
use crate::registry::Check;
use crate::violations;
/// S101
pub fn assert_used(stmt: &Located<StmtKind>) -> Check {
Check::new(violations::AssertUsed, Range::from_located(stmt))
}

View File

@@ -1,5 +1,5 @@
pub mod checks;
mod helpers;
pub mod rules;
pub mod settings;
#[cfg(test)]
@@ -10,36 +10,36 @@ mod tests {
use test_case::test_case;
use crate::linter::test_path;
use crate::registry::CheckCode;
use crate::registry::RuleCode;
use crate::{flake8_bandit, Settings};
#[test_case(CheckCode::S101, Path::new("S101.py"); "S101")]
#[test_case(CheckCode::S102, Path::new("S102.py"); "S102")]
#[test_case(CheckCode::S103, Path::new("S103.py"); "S103")]
#[test_case(CheckCode::S104, Path::new("S104.py"); "S104")]
#[test_case(CheckCode::S105, Path::new("S105.py"); "S105")]
#[test_case(CheckCode::S106, Path::new("S106.py"); "S106")]
#[test_case(CheckCode::S107, Path::new("S107.py"); "S107")]
#[test_case(CheckCode::S108, Path::new("S108.py"); "S108")]
#[test_case(CheckCode::S113, Path::new("S113.py"); "S113")]
#[test_case(CheckCode::S324, Path::new("S324.py"); "S324")]
#[test_case(CheckCode::S501, Path::new("S501.py"); "S501")]
#[test_case(CheckCode::S506, Path::new("S506.py"); "S506")]
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());
let checks = test_path(
#[test_case(RuleCode::S101, Path::new("S101.py"); "S101")]
#[test_case(RuleCode::S102, Path::new("S102.py"); "S102")]
#[test_case(RuleCode::S103, Path::new("S103.py"); "S103")]
#[test_case(RuleCode::S104, Path::new("S104.py"); "S104")]
#[test_case(RuleCode::S105, Path::new("S105.py"); "S105")]
#[test_case(RuleCode::S106, Path::new("S106.py"); "S106")]
#[test_case(RuleCode::S107, Path::new("S107.py"); "S107")]
#[test_case(RuleCode::S108, Path::new("S108.py"); "S108")]
#[test_case(RuleCode::S113, Path::new("S113.py"); "S113")]
#[test_case(RuleCode::S324, Path::new("S324.py"); "S324")]
#[test_case(RuleCode::S501, Path::new("S501.py"); "S501")]
#[test_case(RuleCode::S506, Path::new("S506.py"); "S506")]
fn rules(rule_code: RuleCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_bandit")
.join(path)
.as_path(),
&Settings::for_rule(check_code),
&Settings::for_rule(rule_code),
)?;
insta::assert_yaml_snapshot!(snapshot, checks);
insta::assert_yaml_snapshot!(snapshot, diagnostics);
Ok(())
}
#[test]
fn check_hardcoded_tmp_additional_dirs() -> Result<()> {
let checks = test_path(
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_bandit/S108.py"),
&Settings {
flake8_bandit: flake8_bandit::settings::Settings {
@@ -50,10 +50,10 @@ mod tests {
"/foo".to_string(),
],
},
..Settings::for_rule(CheckCode::S108)
..Settings::for_rule(RuleCode::S108)
},
)?;
insta::assert_yaml_snapshot!("S108_extend", checks);
insta::assert_yaml_snapshot!("S108_extend", diagnostics);
Ok(())
}
}

View File

@@ -0,0 +1,10 @@
use rustpython_ast::{Located, StmtKind};
use crate::ast::types::Range;
use crate::registry::Diagnostic;
use crate::violations;
/// S101
pub fn assert_used(stmt: &Located<StmtKind>) -> Diagnostic {
Diagnostic::new(violations::AssertUsed, Range::from_located(stmt))
}

View File

@@ -5,7 +5,7 @@ use rustpython_ast::{Constant, Expr, ExprKind, Keyword, Operator};
use crate::ast::helpers::{compose_call_path, match_module_member, SimpleCallArgs};
use crate::ast::types::Range;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
const WRITE_WORLD: u16 = 0o2;
@@ -91,13 +91,13 @@ pub fn bad_file_permissions(
keywords: &[Keyword],
from_imports: &FxHashMap<&str, FxHashSet<&str>>,
import_aliases: &FxHashMap<&str, &str>,
) -> Option<Check> {
) -> Option<Diagnostic> {
if match_module_member(func, "os", "chmod", from_imports, import_aliases) {
let call_args = SimpleCallArgs::new(args, keywords);
if let Some(mode_arg) = call_args.get_argument("mode", Some(1)) {
if let Some(int_value) = get_int_value(mode_arg) {
if (int_value & WRITE_WORLD > 0) || (int_value & EXECUTE_GROUP > 0) {
return Some(Check::new(
return Some(Diagnostic::new(
violations::BadFilePermissions(int_value),
Range::from_located(mode_arg),
));

View File

@@ -1,16 +1,19 @@
use rustpython_ast::{Expr, ExprKind};
use crate::ast::types::Range;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
/// S102
pub fn exec_used(expr: &Expr, func: &Expr) -> Option<Check> {
pub fn exec_used(expr: &Expr, func: &Expr) -> Option<Diagnostic> {
let ExprKind::Name { id, .. } = &func.node else {
return None;
};
if id != "exec" {
return None;
}
Some(Check::new(violations::ExecUsed, Range::from_located(expr)))
Some(Diagnostic::new(
violations::ExecUsed,
Range::from_located(expr),
))
}

View File

@@ -1,11 +1,14 @@
use crate::ast::types::Range;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
/// S104
pub fn hardcoded_bind_all_interfaces(value: &str, range: &Range) -> Option<Check> {
pub fn hardcoded_bind_all_interfaces(value: &str, range: &Range) -> Option<Diagnostic> {
if value == "0.0.0.0" {
Some(Check::new(violations::HardcodedBindAllInterfaces, *range))
Some(Diagnostic::new(
violations::HardcodedBindAllInterfaces,
*range,
))
} else {
None
}

View File

@@ -2,24 +2,24 @@ use rustpython_ast::{ArgData, Arguments, Expr, Located};
use crate::ast::types::Range;
use crate::flake8_bandit::helpers::{matches_password_name, string_literal};
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
fn check_password_kwarg(arg: &Located<ArgData>, default: &Expr) -> Option<Check> {
fn check_password_kwarg(arg: &Located<ArgData>, default: &Expr) -> Option<Diagnostic> {
let string = string_literal(default)?;
let kwarg_name = &arg.node.arg;
if !matches_password_name(kwarg_name) {
return None;
}
Some(Check::new(
Some(Diagnostic::new(
violations::HardcodedPasswordDefault(string.to_string()),
Range::from_located(default),
))
}
/// S107
pub fn hardcoded_password_default(arguments: &Arguments) -> Vec<Check> {
let mut checks: Vec<Check> = Vec::new();
pub fn hardcoded_password_default(arguments: &Arguments) -> Vec<Diagnostic> {
let mut diagnostics: Vec<Diagnostic> = Vec::new();
let defaults_start =
arguments.posonlyargs.len() + arguments.args.len() - arguments.defaults.len();
@@ -31,8 +31,8 @@ pub fn hardcoded_password_default(arguments: &Arguments) -> Vec<Check> {
{
if let Some(i) = i.checked_sub(defaults_start) {
let default = &arguments.defaults[i];
if let Some(check) = check_password_kwarg(arg, default) {
checks.push(check);
if let Some(diagnostic) = check_password_kwarg(arg, default) {
diagnostics.push(diagnostic);
}
}
}
@@ -41,11 +41,11 @@ pub fn hardcoded_password_default(arguments: &Arguments) -> Vec<Check> {
for (i, kwarg) in arguments.kwonlyargs.iter().enumerate() {
if let Some(i) = i.checked_sub(defaults_start) {
let default = &arguments.kw_defaults[i];
if let Some(check) = check_password_kwarg(kwarg, default) {
checks.push(check);
if let Some(diagnostic) = check_password_kwarg(kwarg, default) {
diagnostics.push(diagnostic);
}
}
}
checks
diagnostics
}

View File

@@ -2,11 +2,11 @@ use rustpython_ast::Keyword;
use crate::ast::types::Range;
use crate::flake8_bandit::helpers::{matches_password_name, string_literal};
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
/// S106
pub fn hardcoded_password_func_arg(keywords: &[Keyword]) -> Vec<Check> {
pub fn hardcoded_password_func_arg(keywords: &[Keyword]) -> Vec<Diagnostic> {
keywords
.iter()
.filter_map(|keyword| {
@@ -15,7 +15,7 @@ pub fn hardcoded_password_func_arg(keywords: &[Keyword]) -> Vec<Check> {
if !matches_password_name(arg) {
return None;
}
Some(Check::new(
Some(Diagnostic::new(
violations::HardcodedPasswordFuncArg(string.to_string()),
Range::from_located(keyword),
))

View File

@@ -2,7 +2,7 @@ use rustpython_ast::{Constant, Expr, ExprKind};
use crate::ast::types::Range;
use crate::flake8_bandit::helpers::{matches_password_name, string_literal};
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
fn is_password_target(target: &Expr) -> bool {
@@ -26,7 +26,7 @@ fn is_password_target(target: &Expr) -> bool {
}
/// S105
pub fn compare_to_hardcoded_password_string(left: &Expr, comparators: &[Expr]) -> Vec<Check> {
pub fn compare_to_hardcoded_password_string(left: &Expr, comparators: &[Expr]) -> Vec<Diagnostic> {
comparators
.iter()
.filter_map(|comp| {
@@ -34,7 +34,7 @@ pub fn compare_to_hardcoded_password_string(left: &Expr, comparators: &[Expr]) -
if !is_password_target(left) {
return None;
}
Some(Check::new(
Some(Diagnostic::new(
violations::HardcodedPasswordString(string.to_string()),
Range::from_located(comp),
))
@@ -43,11 +43,11 @@ pub fn compare_to_hardcoded_password_string(left: &Expr, comparators: &[Expr]) -
}
/// S105
pub fn assign_hardcoded_password_string(value: &Expr, targets: &[Expr]) -> Option<Check> {
pub fn assign_hardcoded_password_string(value: &Expr, targets: &[Expr]) -> Option<Diagnostic> {
if let Some(string) = string_literal(value) {
for target in targets {
if is_password_target(target) {
return Some(Check::new(
return Some(Diagnostic::new(
violations::HardcodedPasswordString(string.to_string()),
Range::from_located(value),
));

View File

@@ -1,13 +1,17 @@
use rustpython_ast::Expr;
use crate::ast::types::Range;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
/// S108
pub fn hardcoded_tmp_directory(expr: &Expr, value: &str, prefixes: &[String]) -> Option<Check> {
pub fn hardcoded_tmp_directory(
expr: &Expr,
value: &str,
prefixes: &[String],
) -> Option<Diagnostic> {
if prefixes.iter().any(|prefix| value.starts_with(prefix)) {
Some(Check::new(
Some(Diagnostic::new(
violations::HardcodedTempFile(value.to_string()),
Range::from_located(expr),
))

View File

@@ -4,7 +4,7 @@ use rustpython_ast::{Constant, Expr, ExprKind, Keyword};
use crate::ast::helpers::{match_module_member, SimpleCallArgs};
use crate::ast::types::Range;
use crate::flake8_bandit::helpers::string_literal;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
const WEAK_HASHES: [&str; 4] = ["md4", "md5", "sha", "sha1"];
@@ -29,7 +29,7 @@ pub fn hashlib_insecure_hash_functions(
keywords: &[Keyword],
from_imports: &FxHashMap<&str, FxHashSet<&str>>,
import_aliases: &FxHashMap<&str, &str>,
) -> Option<Check> {
) -> Option<Diagnostic> {
if match_module_member(func, "hashlib", "new", from_imports, import_aliases) {
let call_args = SimpleCallArgs::new(args, keywords);
@@ -41,7 +41,7 @@ pub fn hashlib_insecure_hash_functions(
let hash_func_name = string_literal(name_arg)?;
if WEAK_HASHES.contains(&hash_func_name.to_lowercase().as_str()) {
return Some(Check::new(
return Some(Diagnostic::new(
violations::HashlibInsecureHashFunction(hash_func_name.to_string()),
Range::from_located(name_arg),
));
@@ -56,7 +56,7 @@ pub fn hashlib_insecure_hash_functions(
return None;
}
return Some(Check::new(
return Some(Diagnostic::new(
violations::HashlibInsecureHashFunction((*func_name).to_string()),
Range::from_located(func),
));

View File

@@ -4,7 +4,7 @@ use rustpython_parser::ast::Constant;
use crate::ast::helpers::{collect_call_paths, dealias_call_path, match_call_path, SimpleCallArgs};
use crate::ast::types::Range;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
const REQUESTS_HTTP_VERBS: [&str; 7] = ["get", "options", "head", "post", "put", "patch", "delete"];
@@ -29,7 +29,7 @@ pub fn request_with_no_cert_validation(
keywords: &[Keyword],
from_imports: &FxHashMap<&str, FxHashSet<&str>>,
import_aliases: &FxHashMap<&str, &str>,
) -> Option<Check> {
) -> Option<Diagnostic> {
let call_path = dealias_call_path(collect_call_paths(func), import_aliases);
let call_args = SimpleCallArgs::new(args, keywords);
@@ -41,7 +41,7 @@ pub fn request_with_no_cert_validation(
..
} = &verify_arg.node
{
return Some(Check::new(
return Some(Diagnostic::new(
violations::RequestWithNoCertValidation("requests".to_string()),
Range::from_located(verify_arg),
));
@@ -58,7 +58,7 @@ pub fn request_with_no_cert_validation(
..
} = &verify_arg.node
{
return Some(Check::new(
return Some(Diagnostic::new(
violations::RequestWithNoCertValidation("httpx".to_string()),
Range::from_located(verify_arg),
));

View File

@@ -4,7 +4,7 @@ use rustpython_parser::ast::Constant;
use crate::ast::helpers::{collect_call_paths, dealias_call_path, match_call_path, SimpleCallArgs};
use crate::ast::types::Range;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
const HTTP_VERBS: [&str; 7] = ["get", "options", "head", "post", "put", "patch", "delete"];
@@ -16,7 +16,7 @@ pub fn request_without_timeout(
keywords: &[Keyword],
from_imports: &FxHashMap<&str, FxHashSet<&str>>,
import_aliases: &FxHashMap<&str, &str>,
) -> Option<Check> {
) -> Option<Diagnostic> {
let call_path = dealias_call_path(collect_call_paths(func), import_aliases);
for func_name in &HTTP_VERBS {
if match_call_path(&call_path, "requests", func_name, from_imports) {
@@ -29,13 +29,13 @@ pub fn request_without_timeout(
} => Some(value.to_string()),
_ => None,
} {
return Some(Check::new(
return Some(Diagnostic::new(
violations::RequestWithoutTimeout(Some(timeout)),
Range::from_located(timeout_arg),
));
}
} else {
return Some(Check::new(
return Some(Diagnostic::new(
violations::RequestWithoutTimeout(None),
Range::from_located(func),
));

View File

@@ -3,7 +3,7 @@ use rustpython_ast::{Expr, ExprKind, Keyword};
use crate::ast::helpers::{match_module_member, SimpleCallArgs};
use crate::ast::types::Range;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
/// S506
@@ -13,7 +13,7 @@ pub fn unsafe_yaml_load(
keywords: &[Keyword],
from_imports: &FxHashMap<&str, FxHashSet<&str>>,
import_aliases: &FxHashMap<&str, &str>,
) -> Option<Check> {
) -> Option<Diagnostic> {
if match_module_member(func, "yaml", "load", from_imports, import_aliases) {
let call_args = SimpleCallArgs::new(args, keywords);
if let Some(loader_arg) = call_args.get_argument("Loader", Some(1)) {
@@ -35,13 +35,13 @@ pub fn unsafe_yaml_load(
ExprKind::Name { id, .. } => Some(id.to_string()),
_ => None,
};
return Some(Check::new(
return Some(Diagnostic::new(
violations::UnsafeYAMLLoad(loader),
Range::from_located(loader_arg),
));
}
} else {
return Some(Check::new(
return Some(Diagnostic::new(
violations::UnsafeYAMLLoad(None),
Range::from_located(func),
));

View File

@@ -1,4 +1,4 @@
pub mod plugins;
pub mod rules;
#[cfg(test)]
mod tests {
@@ -9,19 +9,19 @@ mod tests {
use test_case::test_case;
use crate::linter::test_path;
use crate::registry::CheckCode;
use crate::registry::RuleCode;
use crate::settings;
#[test_case(CheckCode::BLE001, Path::new("BLE.py"); "BLE001")]
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());
let checks = test_path(
#[test_case(RuleCode::BLE001, Path::new("BLE.py"); "BLE001")]
fn rules(rule_code: RuleCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_blind_except")
.join(path)
.as_path(),
&settings::Settings::for_rule(check_code),
&settings::Settings::for_rule(rule_code),
)?;
insta::assert_yaml_snapshot!(snapshot, checks);
insta::assert_yaml_snapshot!(snapshot, diagnostics);
Ok(())
}
}

View File

@@ -2,7 +2,7 @@ use rustpython_ast::{Expr, ExprKind, Stmt, StmtKind};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
/// BLE001
@@ -36,7 +36,7 @@ pub fn blind_except(
false
}
}) {
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::BlindExcept(id.to_string()),
Range::from_located(type_),
));

View File

@@ -1,4 +1,4 @@
pub mod plugins;
pub mod rules;
#[cfg(test)]
mod tests {
@@ -9,21 +9,21 @@ mod tests {
use test_case::test_case;
use crate::linter::test_path;
use crate::registry::CheckCode;
use crate::registry::RuleCode;
use crate::settings;
#[test_case(CheckCode::FBT001, Path::new("FBT.py"); "FBT001")]
#[test_case(CheckCode::FBT002, Path::new("FBT.py"); "FBT002")]
#[test_case(CheckCode::FBT003, Path::new("FBT.py"); "FBT003")]
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());
let checks = test_path(
#[test_case(RuleCode::FBT001, Path::new("FBT.py"); "FBT001")]
#[test_case(RuleCode::FBT002, Path::new("FBT.py"); "FBT002")]
#[test_case(RuleCode::FBT003, Path::new("FBT.py"); "FBT003")]
fn rules(rule_code: RuleCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_boolean_trap")
.join(path)
.as_path(),
&settings::Settings::for_rule(check_code),
&settings::Settings::for_rule(rule_code),
)?;
insta::assert_yaml_snapshot!(snapshot, checks);
insta::assert_yaml_snapshot!(snapshot, diagnostics);
Ok(())
}
}

View File

@@ -3,7 +3,7 @@ use rustpython_parser::ast::{Constant, Expr};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::{Check, CheckKind};
use crate::registry::{Diagnostic, DiagnosticKind};
use crate::violations;
const FUNC_NAME_ALLOWLIST: &[&str] = &[
@@ -47,11 +47,11 @@ fn is_boolean_arg(arg: &Expr) -> bool {
)
}
fn add_if_boolean(checker: &mut Checker, arg: &Expr, kind: CheckKind) {
fn add_if_boolean(checker: &mut Checker, arg: &Expr, kind: DiagnosticKind) {
if is_boolean_arg(arg) {
checker
.checks
.push(Check::new(kind, Range::from_located(arg)));
.diagnostics
.push(Diagnostic::new(kind, Range::from_located(arg)));
}
}
@@ -76,7 +76,7 @@ pub fn check_positional_boolean_in_def(checker: &mut Checker, arguments: &Argume
if !hint {
continue;
}
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::BooleanPositionalArgInFunctionDefinition,
Range::from_located(arg),
));

View File

@@ -1,4 +1,4 @@
pub mod plugins;
pub mod rules;
pub mod settings;
#[cfg(test)]
@@ -9,53 +9,53 @@ mod tests {
use test_case::test_case;
use crate::linter::test_path;
use crate::registry::CheckCode;
use crate::registry::RuleCode;
use crate::{flake8_bugbear, Settings};
#[test_case(CheckCode::B002, Path::new("B002.py"); "B002")]
#[test_case(CheckCode::B003, Path::new("B003.py"); "B003")]
#[test_case(CheckCode::B004, Path::new("B004.py"); "B004")]
#[test_case(CheckCode::B005, Path::new("B005.py"); "B005")]
#[test_case(CheckCode::B006, Path::new("B006_B008.py"); "B006")]
#[test_case(CheckCode::B007, Path::new("B007.py"); "B007")]
#[test_case(CheckCode::B008, Path::new("B006_B008.py"); "B008")]
#[test_case(CheckCode::B009, Path::new("B009_B010.py"); "B009")]
#[test_case(CheckCode::B010, Path::new("B009_B010.py"); "B010")]
#[test_case(CheckCode::B011, Path::new("B011.py"); "B011")]
#[test_case(CheckCode::B012, Path::new("B012.py"); "B012")]
#[test_case(CheckCode::B013, Path::new("B013.py"); "B013")]
#[test_case(CheckCode::B014, Path::new("B014.py"); "B014")]
#[test_case(CheckCode::B015, Path::new("B015.py"); "B015")]
#[test_case(CheckCode::B016, Path::new("B016.py"); "B016")]
#[test_case(CheckCode::B017, Path::new("B017.py"); "B017")]
#[test_case(CheckCode::B018, Path::new("B018.py"); "B018")]
#[test_case(CheckCode::B019, Path::new("B019.py"); "B019")]
#[test_case(CheckCode::B020, Path::new("B020.py"); "B020")]
#[test_case(CheckCode::B021, Path::new("B021.py"); "B021")]
#[test_case(CheckCode::B022, Path::new("B022.py"); "B022")]
#[test_case(CheckCode::B023, Path::new("B023.py"); "B023")]
#[test_case(CheckCode::B024, Path::new("B024.py"); "B024")]
#[test_case(CheckCode::B025, Path::new("B025.py"); "B025")]
#[test_case(CheckCode::B026, Path::new("B026.py"); "B026")]
#[test_case(CheckCode::B027, Path::new("B027.py"); "B027")]
#[test_case(CheckCode::B904, Path::new("B904.py"); "B904")]
#[test_case(CheckCode::B905, Path::new("B905.py"); "B905")]
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());
let checks = test_path(
#[test_case(RuleCode::B002, Path::new("B002.py"); "B002")]
#[test_case(RuleCode::B003, Path::new("B003.py"); "B003")]
#[test_case(RuleCode::B004, Path::new("B004.py"); "B004")]
#[test_case(RuleCode::B005, Path::new("B005.py"); "B005")]
#[test_case(RuleCode::B006, Path::new("B006_B008.py"); "B006")]
#[test_case(RuleCode::B007, Path::new("B007.py"); "B007")]
#[test_case(RuleCode::B008, Path::new("B006_B008.py"); "B008")]
#[test_case(RuleCode::B009, Path::new("B009_B010.py"); "B009")]
#[test_case(RuleCode::B010, Path::new("B009_B010.py"); "B010")]
#[test_case(RuleCode::B011, Path::new("B011.py"); "B011")]
#[test_case(RuleCode::B012, Path::new("B012.py"); "B012")]
#[test_case(RuleCode::B013, Path::new("B013.py"); "B013")]
#[test_case(RuleCode::B014, Path::new("B014.py"); "B014")]
#[test_case(RuleCode::B015, Path::new("B015.py"); "B015")]
#[test_case(RuleCode::B016, Path::new("B016.py"); "B016")]
#[test_case(RuleCode::B017, Path::new("B017.py"); "B017")]
#[test_case(RuleCode::B018, Path::new("B018.py"); "B018")]
#[test_case(RuleCode::B019, Path::new("B019.py"); "B019")]
#[test_case(RuleCode::B020, Path::new("B020.py"); "B020")]
#[test_case(RuleCode::B021, Path::new("B021.py"); "B021")]
#[test_case(RuleCode::B022, Path::new("B022.py"); "B022")]
#[test_case(RuleCode::B023, Path::new("B023.py"); "B023")]
#[test_case(RuleCode::B024, Path::new("B024.py"); "B024")]
#[test_case(RuleCode::B025, Path::new("B025.py"); "B025")]
#[test_case(RuleCode::B026, Path::new("B026.py"); "B026")]
#[test_case(RuleCode::B027, Path::new("B027.py"); "B027")]
#[test_case(RuleCode::B904, Path::new("B904.py"); "B904")]
#[test_case(RuleCode::B905, Path::new("B905.py"); "B905")]
fn rules(rule_code: RuleCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_bugbear")
.join(path)
.as_path(),
&Settings::for_rule(check_code),
&Settings::for_rule(rule_code),
)?;
insta::assert_yaml_snapshot!(snapshot, checks);
insta::assert_yaml_snapshot!(snapshot, diagnostics);
Ok(())
}
#[test]
fn extend_immutable_calls() -> Result<()> {
let snapshot = "extend_immutable_calls".to_string();
let checks = test_path(
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_bugbear/B008_extended.py"),
&Settings {
flake8_bugbear: flake8_bugbear::settings::Settings {
@@ -64,10 +64,10 @@ mod tests {
"fastapi.Query".to_string(),
],
},
..Settings::for_rules(vec![CheckCode::B008])
..Settings::for_rules(vec![RuleCode::B008])
},
)?;
insta::assert_yaml_snapshot!(snapshot, checks);
insta::assert_yaml_snapshot!(snapshot, diagnostics);
Ok(())
}
}

View File

@@ -4,7 +4,7 @@ use rustpython_ast::{Constant, Expr, ExprKind, Keyword, Stmt, StmtKind};
use crate::ast::helpers::match_module_member;
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::{Check, CheckCode};
use crate::registry::{Diagnostic, RuleCode};
use crate::violations;
fn is_abc_class(
@@ -108,7 +108,7 @@ pub fn abstract_base_class(
has_abstract_method |= has_abstract_decorator;
if !checker.settings.enabled.contains(&CheckCode::B027) {
if !checker.settings.enabled.contains(&RuleCode::B027) {
continue;
}
@@ -118,15 +118,15 @@ pub fn abstract_base_class(
.iter()
.any(|d| is_overload(d, &checker.from_imports, &checker.import_aliases))
{
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::EmptyMethodWithoutAbstractDecorator(name.to_string()),
Range::from_located(stmt),
));
}
}
if checker.settings.enabled.contains(&CheckCode::B024) {
if checker.settings.enabled.contains(&RuleCode::B024) {
if !has_abstract_method {
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::AbstractBaseClassWithoutAbstractMethod(name.to_string()),
Range::from_located(stmt),
));

View File

@@ -3,7 +3,7 @@ use rustpython_ast::{Constant, Expr, ExprContext, ExprKind, Location, Stmt, Stmt
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::source_code_generator::SourceCodeGenerator;
use crate::violations;
@@ -46,15 +46,15 @@ pub fn assert_false(checker: &mut Checker, stmt: &Stmt, test: &Expr, msg: Option
return;
};
let mut check = Check::new(violations::DoNotAssertFalse, Range::from_located(test));
if checker.patch(check.kind.code()) {
let mut diagnostic = Diagnostic::new(violations::DoNotAssertFalse, Range::from_located(test));
if checker.patch(diagnostic.kind.code()) {
let mut generator: SourceCodeGenerator = checker.style.into();
generator.unparse_stmt(&assertion_error(msg));
check.amend(Fix::replacement(
diagnostic.amend(Fix::replacement(
generator.generate(),
stmt.location,
stmt.end_location.unwrap(),
));
}
checker.checks.push(check);
checker.diagnostics.push(diagnostic);
}

View File

@@ -3,7 +3,7 @@ use rustpython_ast::{ExprKind, Stmt, Withitem};
use crate::ast::helpers::match_module_member;
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
/// B017
@@ -34,7 +34,7 @@ pub fn assert_raises_exception(checker: &mut Checker, stmt: &Stmt, items: &[With
return;
}
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::NoAssertRaisesException,
Range::from_located(stmt),
));

View File

@@ -2,7 +2,7 @@ use rustpython_ast::{Expr, ExprKind};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
/// B003
@@ -23,7 +23,7 @@ pub fn assignment_to_os_environ(checker: &mut Checker, targets: &[Expr]) {
if id != "os" {
return;
}
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::AssignmentToOsEnviron,
Range::from_located(target),
));

View File

@@ -3,7 +3,7 @@ use rustpython_ast::{Expr, ExprKind};
use crate::ast::helpers::{collect_call_paths, dealias_call_path, match_call_path};
use crate::ast::types::{Range, ScopeKind};
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
fn is_cache_func(checker: &Checker, expr: &Expr) -> bool {
@@ -34,7 +34,7 @@ pub fn cached_instance_method(checker: &mut Checker, decorator_list: &[Expr]) {
_ => decorator,
},
) {
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::CachedInstanceMethod,
Range::from_located(decorator),
));

View File

@@ -2,7 +2,7 @@ use rustpython_ast::{Expr, ExprKind};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
/// B016
@@ -10,7 +10,7 @@ pub fn cannot_raise_literal(checker: &mut Checker, expr: &Expr) {
let ExprKind::Constant { .. } = &expr.node else {
return;
};
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::CannotRaiseLiteral,
Range::from_located(expr),
));

View File

@@ -6,7 +6,7 @@ use crate::ast::helpers;
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::checkers::ast::Checker;
use crate::registry::{Check, CheckCode};
use crate::registry::{Diagnostic, RuleCode};
use crate::source_code_generator::SourceCodeGenerator;
use crate::violations;
@@ -41,10 +41,10 @@ fn duplicate_handler_exceptions<'a>(
}
}
if checker.settings.enabled.contains(&CheckCode::B014) {
if checker.settings.enabled.contains(&RuleCode::B014) {
// TODO(charlie): Handle "BaseException" and redundant exception aliases.
if !duplicates.is_empty() {
let mut check = Check::new(
let mut diagnostic = Diagnostic::new(
violations::DuplicateHandlerException(
duplicates
.into_iter()
@@ -54,20 +54,20 @@ fn duplicate_handler_exceptions<'a>(
),
Range::from_located(expr),
);
if checker.patch(check.kind.code()) {
if checker.patch(diagnostic.kind.code()) {
let mut generator: SourceCodeGenerator = checker.style.into();
if unique_elts.len() == 1 {
generator.unparse_expr(unique_elts[0], 0);
} else {
generator.unparse_expr(&type_pattern(unique_elts), 0);
}
check.amend(Fix::replacement(
diagnostic.amend(Fix::replacement(
generator.generate(),
expr.location,
expr.end_location.unwrap(),
));
}
checker.checks.push(check);
checker.diagnostics.push(diagnostic);
}
}
@@ -105,10 +105,10 @@ pub fn duplicate_exceptions(checker: &mut Checker, handlers: &[Excepthandler]) {
}
}
if checker.settings.enabled.contains(&CheckCode::B025) {
if checker.settings.enabled.contains(&RuleCode::B025) {
for (name, exprs) in duplicates {
for expr in exprs {
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::DuplicateTryBlockException(name.join(".")),
Range::from_located(expr),
));

View File

@@ -2,7 +2,7 @@ use rustpython_ast::{ExprKind, Stmt, StmtKind};
use crate::ast::helpers;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
/// B021
@@ -16,7 +16,7 @@ pub fn f_string_docstring(checker: &mut Checker, body: &[Stmt]) {
let ExprKind::JoinedStr { .. } = value.node else {
return;
};
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::FStringDocstring,
helpers::identifier_range(stmt, checker.locator),
));

View File

@@ -8,8 +8,8 @@ use crate::ast::types::Range;
use crate::ast::visitor;
use crate::ast::visitor::Visitor;
use crate::checkers::ast::Checker;
use crate::flake8_bugbear::plugins::mutable_argument_default::is_mutable_func;
use crate::registry::{Check, CheckKind};
use crate::flake8_bugbear::rules::mutable_argument_default::is_mutable_func;
use crate::registry::{Diagnostic, DiagnosticKind};
use crate::violations;
const IMMUTABLE_FUNCS: [(&str, &str); 7] = [
@@ -36,7 +36,7 @@ fn is_immutable_func(
}
struct ArgumentDefaultVisitor<'a> {
checks: Vec<(CheckKind, Range)>,
diagnostics: Vec<(DiagnosticKind, Range)>,
extend_immutable_calls: &'a [(&'a str, &'a str)],
from_imports: &'a FxHashMap<&'a str, FxHashSet<&'a str>>,
import_aliases: &'a FxHashMap<&'a str, &'a str>,
@@ -58,7 +58,7 @@ where
)
&& !is_nan_or_infinity(func, args)
{
self.checks.push((
self.diagnostics.push((
violations::FunctionCallArgumentDefault(compose_call_path(expr)).into(),
Range::from_located(expr),
));
@@ -105,7 +105,7 @@ pub fn function_call_argument_default(checker: &mut Checker, arguments: &Argumen
.map(|target| to_module_and_member(target))
.collect();
let mut visitor = ArgumentDefaultVisitor {
checks: vec![],
diagnostics: vec![],
extend_immutable_calls: &extend_immutable_cells,
from_imports: &checker.from_imports,
import_aliases: &checker.import_aliases,
@@ -117,7 +117,7 @@ pub fn function_call_argument_default(checker: &mut Checker, arguments: &Argumen
{
visitor.visit_expr(expr);
}
for (check, range) in visitor.checks {
checker.checks.push(Check::new(check, range));
for (check, range) in visitor.diagnostics {
checker.diagnostics.push(Diagnostic::new(check, range));
}
}

View File

@@ -6,7 +6,7 @@ use crate::ast::types::{Node, Range};
use crate::ast::visitor;
use crate::ast::visitor::Visitor;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
#[derive(Default)]
@@ -212,7 +212,7 @@ where
if reassigned_in_loop.contains(name) {
if !checker.flake8_bugbear_seen.contains(&expr) {
checker.flake8_bugbear_seen.push(expr);
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::FunctionUsesLoopVariable(name.to_string()),
range,
));

View File

@@ -5,7 +5,7 @@ use crate::autofix::Fix;
use crate::checkers::ast::Checker;
use crate::python::identifiers::IDENTIFIER_REGEX;
use crate::python::keyword::KWLIST;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::source_code_generator::SourceCodeGenerator;
use crate::violations;
@@ -45,15 +45,16 @@ pub fn getattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, ar
return;
}
let mut check = Check::new(violations::GetAttrWithConstant, Range::from_located(expr));
if checker.patch(check.kind.code()) {
let mut diagnostic =
Diagnostic::new(violations::GetAttrWithConstant, Range::from_located(expr));
if checker.patch(diagnostic.kind.code()) {
let mut generator: SourceCodeGenerator = checker.style.into();
generator.unparse_expr(&attribute(obj, value), 0);
check.amend(Fix::replacement(
diagnostic.amend(Fix::replacement(
generator.generate(),
expr.location,
expr.end_location.unwrap(),
));
}
checker.checks.push(check);
checker.diagnostics.push(diagnostic);
}

View File

@@ -2,13 +2,13 @@ use rustpython_ast::{Stmt, StmtKind};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
fn walk_stmt(checker: &mut Checker, body: &[Stmt], f: fn(&Stmt) -> bool) {
for stmt in body {
if f(stmt) {
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::JumpStatementInFinally(match &stmt.node {
StmtKind::Break { .. } => "break".to_string(),
StmtKind::Continue { .. } => "continue".to_string(),

View File

@@ -5,7 +5,7 @@ use crate::ast::types::Range;
use crate::ast::visitor;
use crate::ast::visitor::Visitor;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
#[derive(Default)]
@@ -56,7 +56,7 @@ pub fn loop_variable_overrides_iterator(checker: &mut Checker, target: &Expr, it
for (name, expr) in target_names {
if iter_names.contains_key(name) {
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::LoopVariableOverridesIterator(name.to_string()),
Range::from_located(expr),
));

View File

@@ -4,7 +4,7 @@ use rustpython_ast::{Arguments, Constant, Expr, ExprKind, Operator};
use crate::ast::helpers::{collect_call_paths, dealias_call_path, match_call_path};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
const MUTABLE_FUNCS: &[(&str, &str)] = &[
@@ -165,7 +165,7 @@ pub fn mutable_argument_default(checker: &mut Checker, arguments: &Arguments) {
!is_immutable_annotation(expr, &checker.from_imports, &checker.import_aliases)
})
{
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::MutableArgumentDefault,
Range::from_located(default),
));

View File

@@ -4,11 +4,11 @@ use crate::ast::types::Range;
use crate::ast::visitor::Visitor;
use crate::checkers::ast::Checker;
use crate::python::string::is_lower;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
struct RaiseVisitor {
checks: Vec<Check>,
diagnostics: Vec<Diagnostic>,
}
impl<'a> Visitor<'a> for RaiseVisitor {
@@ -20,7 +20,7 @@ impl<'a> Visitor<'a> for RaiseVisitor {
} => match &exc.node {
ExprKind::Name { id, .. } if is_lower(id) => {}
_ => {
self.checks.push(Check::new(
self.diagnostics.push(Diagnostic::new(
violations::RaiseWithoutFromInsideExcept,
Range::from_located(stmt),
));
@@ -46,9 +46,11 @@ impl<'a> Visitor<'a> for RaiseVisitor {
}
pub fn raise_without_from_inside_except(checker: &mut Checker, body: &[Stmt]) {
let mut visitor = RaiseVisitor { checks: vec![] };
let mut visitor = RaiseVisitor {
diagnostics: vec![],
};
for stmt in body {
visitor.visit_stmt(stmt);
}
checker.checks.extend(visitor.checks);
checker.diagnostics.extend(visitor.diagnostics);
}

View File

@@ -3,7 +3,7 @@ use rustpython_ast::{Excepthandler, ExcepthandlerKind, ExprKind};
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::source_code_generator::SourceCodeGenerator;
use crate::violations;
@@ -19,19 +19,19 @@ pub fn redundant_tuple_in_exception_handler(checker: &mut Checker, handlers: &[E
let [elt] = &elts[..] else {
continue;
};
let mut check = Check::new(
let mut diagnostic = Diagnostic::new(
violations::RedundantTupleInExceptionHandler(elt.to_string()),
Range::from_located(type_),
);
if checker.patch(check.kind.code()) {
if checker.patch(diagnostic.kind.code()) {
let mut generator: SourceCodeGenerator = checker.style.into();
generator.unparse_expr(elt, 0);
check.amend(Fix::replacement(
diagnostic.amend(Fix::replacement(
generator.generate(),
type_.location,
type_.end_location.unwrap(),
));
}
checker.checks.push(check);
checker.diagnostics.push(diagnostic);
}
}

View File

@@ -5,7 +5,7 @@ use crate::autofix::Fix;
use crate::checkers::ast::Checker;
use crate::python::identifiers::IDENTIFIER_REGEX;
use crate::python::keyword::KWLIST;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::source_code_generator::SourceCodeGenerator;
use crate::source_code_style::SourceCodeStyleDetector;
use crate::violations;
@@ -61,15 +61,16 @@ pub fn setattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, ar
// (i.e., it's directly within an `StmtKind::Expr`).
if let StmtKind::Expr { value: child } = &checker.current_stmt().node {
if expr == child.as_ref() {
let mut check = Check::new(violations::SetAttrWithConstant, Range::from_located(expr));
if checker.patch(check.kind.code()) {
check.amend(Fix::replacement(
let mut diagnostic =
Diagnostic::new(violations::SetAttrWithConstant, Range::from_located(expr));
if checker.patch(diagnostic.kind.code()) {
diagnostic.amend(Fix::replacement(
assignment(obj, name, value, checker.style),
expr.location,
expr.end_location.unwrap(),
));
}
checker.checks.push(check);
checker.diagnostics.push(diagnostic);
}
}
}

View File

@@ -2,7 +2,7 @@ use rustpython_ast::{Expr, ExprKind, Keyword};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
/// B026
@@ -21,7 +21,7 @@ pub fn star_arg_unpacking_after_keyword_arg(
if arg.location <= keyword.location {
continue;
}
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::StarArgUnpackingAfterKeywordArg,
Range::from_located(arg),
));

View File

@@ -3,7 +3,7 @@ use rustpython_ast::{Constant, Expr, ExprKind};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
/// B005
@@ -26,7 +26,7 @@ pub fn strip_with_multi_characters(checker: &mut Checker, expr: &Expr, func: &Ex
};
if value.len() > 1 && value.chars().unique().count() != value.len() {
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::StripWithMultiCharacters,
Range::from_located(expr),
));

View File

@@ -2,7 +2,7 @@ use rustpython_ast::{Expr, ExprKind, Unaryop};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
/// B002
@@ -16,7 +16,7 @@ pub fn unary_prefix_increment(checker: &mut Checker, expr: &Expr, op: &Unaryop,
if !matches!(op, Unaryop::UAdd) {
return;
}
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::UnaryPrefixIncrement,
Range::from_located(expr),
));

View File

@@ -2,7 +2,7 @@ use rustpython_ast::{Constant, Expr, ExprKind};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
/// B004
@@ -26,7 +26,7 @@ pub fn unreliable_callable_check(checker: &mut Checker, expr: &Expr, func: &Expr
if s != "__call__" {
return;
}
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::UnreliableCallableCheck,
Range::from_located(expr),
));

View File

@@ -6,7 +6,7 @@ use crate::ast::visitor;
use crate::ast::visitor::Visitor;
use crate::autofix::Fix;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
/// Identify all `ExprKind::Name` nodes in an AST.
@@ -62,18 +62,18 @@ pub fn unused_loop_control_variable(checker: &mut Checker, target: &Expr, body:
continue;
}
let mut check = Check::new(
let mut diagnostic = Diagnostic::new(
violations::UnusedLoopControlVariable(name.to_string()),
Range::from_located(expr),
);
if checker.patch(check.kind.code()) {
if checker.patch(diagnostic.kind.code()) {
// Prefix the variable name with an underscore.
check.amend(Fix::replacement(
diagnostic.amend(Fix::replacement(
format!("_{name}"),
expr.location,
expr.end_location.unwrap(),
));
}
checker.checks.push(check);
checker.diagnostics.push(diagnostic);
}
}

View File

@@ -2,12 +2,12 @@ use rustpython_ast::{Expr, ExprKind};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
pub fn useless_comparison(checker: &mut Checker, expr: &Expr) {
if matches!(expr.node, ExprKind::Compare { .. }) {
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::UselessComparison,
Range::from_located(expr),
));

View File

@@ -3,7 +3,7 @@ use rustpython_ast::Expr;
use crate::ast::helpers::{collect_call_paths, match_call_path};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
/// B005
@@ -15,7 +15,7 @@ pub fn useless_contextlib_suppress(checker: &mut Checker, expr: &Expr, args: &[E
&checker.from_imports,
) && args.is_empty()
{
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::UselessContextlibSuppress,
Range::from_located(expr),
));

View File

@@ -2,7 +2,7 @@ use rustpython_ast::{Constant, ExprKind, Stmt, StmtKind};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
// B018
@@ -11,7 +11,7 @@ pub fn useless_expression(checker: &mut Checker, body: &[Stmt]) {
if let StmtKind::Expr { value } = &stmt.node {
match &value.node {
ExprKind::List { .. } | ExprKind::Dict { .. } | ExprKind::Set { .. } => {
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::UselessExpression,
Range::from_located(value),
));
@@ -19,7 +19,7 @@ pub fn useless_expression(checker: &mut Checker, body: &[Stmt]) {
ExprKind::Constant { value: val, .. } => match &val {
Constant::Str { .. } | Constant::Ellipsis => {}
_ => {
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::UselessExpression,
Range::from_located(value),
));

View File

@@ -2,7 +2,7 @@ use rustpython_ast::{Expr, ExprKind, Keyword};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
/// B905
@@ -23,7 +23,7 @@ pub fn zip_without_explicit_strict(
.map_or(false, |name| name == "strict")
})
{
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::ZipWithoutExplicitStrict,
Range::from_located(expr),
));

View File

@@ -1,4 +1,4 @@
pub mod checks;
pub mod rules;
pub mod types;
#[cfg(test)]
@@ -10,21 +10,21 @@ mod tests {
use test_case::test_case;
use crate::linter::test_path;
use crate::registry::CheckCode;
use crate::registry::RuleCode;
use crate::settings;
#[test_case(CheckCode::A001, Path::new("A001.py"); "A001")]
#[test_case(CheckCode::A002, Path::new("A002.py"); "A002")]
#[test_case(CheckCode::A003, Path::new("A003.py"); "A003")]
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());
let checks = test_path(
#[test_case(RuleCode::A001, Path::new("A001.py"); "A001")]
#[test_case(RuleCode::A002, Path::new("A002.py"); "A002")]
#[test_case(RuleCode::A003, Path::new("A003.py"); "A003")]
fn rules(rule_code: RuleCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_builtins")
.join(path)
.as_path(),
&settings::Settings::for_rule(check_code),
&settings::Settings::for_rule(rule_code),
)?;
insta::assert_yaml_snapshot!(snapshot, checks);
insta::assert_yaml_snapshot!(snapshot, diagnostics);
Ok(())
}
}

View File

@@ -3,7 +3,7 @@ use rustpython_ast::Located;
use crate::ast::types::Range;
use crate::flake8_builtins::types::ShadowingType;
use crate::python::builtins::BUILTINS;
use crate::registry::{Check, CheckKind};
use crate::registry::{Diagnostic, DiagnosticKind};
use crate::violations;
/// Check builtin name shadowing.
@@ -11,9 +11,9 @@ pub fn builtin_shadowing<T>(
name: &str,
located: &Located<T>,
node_type: ShadowingType,
) -> Option<Check> {
) -> Option<Diagnostic> {
if BUILTINS.contains(&name) {
Some(Check::new::<CheckKind>(
Some(Diagnostic::new::<DiagnosticKind>(
match node_type {
ShadowingType::Variable => {
violations::BuiltinVariableShadowing(name.to_string()).into()

View File

@@ -1,5 +1,5 @@
pub mod checks;
mod fixes;
pub mod rules;
#[cfg(test)]
mod tests {
@@ -10,35 +10,35 @@ mod tests {
use test_case::test_case;
use crate::linter::test_path;
use crate::registry::CheckCode;
use crate::registry::RuleCode;
use crate::settings;
#[test_case(CheckCode::C400, Path::new("C400.py"); "C400")]
#[test_case(CheckCode::C401, Path::new("C401.py"); "C401")]
#[test_case(CheckCode::C402, Path::new("C402.py"); "C402")]
#[test_case(CheckCode::C403, Path::new("C403.py"); "C403")]
#[test_case(CheckCode::C404, Path::new("C404.py"); "C404")]
#[test_case(CheckCode::C405, Path::new("C405.py"); "C405")]
#[test_case(CheckCode::C406, Path::new("C406.py"); "C406")]
#[test_case(CheckCode::C408, Path::new("C408.py"); "C408")]
#[test_case(CheckCode::C409, Path::new("C409.py"); "C409")]
#[test_case(CheckCode::C410, Path::new("C410.py"); "C410")]
#[test_case(CheckCode::C411, Path::new("C411.py"); "C411")]
#[test_case(CheckCode::C413, Path::new("C413.py"); "C413")]
#[test_case(CheckCode::C414, Path::new("C414.py"); "C414")]
#[test_case(CheckCode::C415, Path::new("C415.py"); "C415")]
#[test_case(CheckCode::C416, Path::new("C416.py"); "C416")]
#[test_case(CheckCode::C417, Path::new("C417.py"); "C417")]
#[test_case(RuleCode::C400, Path::new("C400.py"); "C400")]
#[test_case(RuleCode::C401, Path::new("C401.py"); "C401")]
#[test_case(RuleCode::C402, Path::new("C402.py"); "C402")]
#[test_case(RuleCode::C403, Path::new("C403.py"); "C403")]
#[test_case(RuleCode::C404, Path::new("C404.py"); "C404")]
#[test_case(RuleCode::C405, Path::new("C405.py"); "C405")]
#[test_case(RuleCode::C406, Path::new("C406.py"); "C406")]
#[test_case(RuleCode::C408, Path::new("C408.py"); "C408")]
#[test_case(RuleCode::C409, Path::new("C409.py"); "C409")]
#[test_case(RuleCode::C410, Path::new("C410.py"); "C410")]
#[test_case(RuleCode::C411, Path::new("C411.py"); "C411")]
#[test_case(RuleCode::C413, Path::new("C413.py"); "C413")]
#[test_case(RuleCode::C414, Path::new("C414.py"); "C414")]
#[test_case(RuleCode::C415, Path::new("C415.py"); "C415")]
#[test_case(RuleCode::C416, Path::new("C416.py"); "C416")]
#[test_case(RuleCode::C417, Path::new("C417.py"); "C417")]
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());
let checks = test_path(
fn rules(rule_code: RuleCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_comprehensions")
.join(path)
.as_path(),
&settings::Settings::for_rule(check_code),
&settings::Settings::for_rule(rule_code),
)?;
insta::assert_yaml_snapshot!(snapshot, checks);
insta::assert_yaml_snapshot!(snapshot, diagnostics);
Ok(())
}
}

View File

@@ -6,7 +6,7 @@ use rustpython_ast::{
use crate::ast::types::Range;
use crate::flake8_comprehensions::fixes;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::source_code_locator::SourceCodeLocator;
use crate::violations;
@@ -56,19 +56,19 @@ pub fn unnecessary_generator_list(
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Check> {
) -> Option<Diagnostic> {
let argument = exactly_one_argument_with_matching_function("list", func, args, keywords)?;
if let ExprKind::GeneratorExp { .. } = argument {
let mut check = Check::new(violations::UnnecessaryGeneratorList, location);
let mut diagnostic = Diagnostic::new(violations::UnnecessaryGeneratorList, location);
if fix {
match fixes::fix_unnecessary_generator_list(locator, expr) {
Ok(fix) => {
check.amend(fix);
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
return Some(check);
return Some(diagnostic);
}
None
}
@@ -82,19 +82,19 @@ pub fn unnecessary_generator_set(
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Check> {
) -> Option<Diagnostic> {
let argument = exactly_one_argument_with_matching_function("set", func, args, keywords)?;
if let ExprKind::GeneratorExp { .. } = argument {
let mut check = Check::new(violations::UnnecessaryGeneratorSet, location);
let mut diagnostic = Diagnostic::new(violations::UnnecessaryGeneratorSet, location);
if fix {
match fixes::fix_unnecessary_generator_set(locator, expr) {
Ok(fix) => {
check.amend(fix);
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
return Some(check);
return Some(diagnostic);
}
None
}
@@ -108,21 +108,22 @@ pub fn unnecessary_generator_dict(
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Check> {
) -> Option<Diagnostic> {
let argument = exactly_one_argument_with_matching_function("dict", func, args, keywords)?;
if let ExprKind::GeneratorExp { elt, .. } = argument {
match &elt.node {
ExprKind::Tuple { elts, .. } if elts.len() == 2 => {
let mut check = Check::new(violations::UnnecessaryGeneratorDict, location);
let mut diagnostic =
Diagnostic::new(violations::UnnecessaryGeneratorDict, location);
if fix {
match fixes::fix_unnecessary_generator_dict(locator, expr) {
Ok(fix) => {
check.amend(fix);
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
return Some(check);
return Some(diagnostic);
}
_ => {}
}
@@ -139,19 +140,19 @@ pub fn unnecessary_list_comprehension_set(
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Check> {
) -> Option<Diagnostic> {
let argument = exactly_one_argument_with_matching_function("set", func, args, keywords)?;
if let ExprKind::ListComp { .. } = &argument {
let mut check = Check::new(violations::UnnecessaryListComprehensionSet, location);
let mut diagnostic = Diagnostic::new(violations::UnnecessaryListComprehensionSet, location);
if fix {
match fixes::fix_unnecessary_list_comprehension_set(locator, expr) {
Ok(fix) => {
check.amend(fix);
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
return Some(check);
return Some(diagnostic);
}
None
}
@@ -165,7 +166,7 @@ pub fn unnecessary_list_comprehension_dict(
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Check> {
) -> Option<Diagnostic> {
let argument = exactly_one_argument_with_matching_function("dict", func, args, keywords)?;
let ExprKind::ListComp { elt, .. } = &argument else {
return None;
@@ -176,16 +177,16 @@ pub fn unnecessary_list_comprehension_dict(
if elts.len() != 2 {
return None;
}
let mut check = Check::new(violations::UnnecessaryListComprehensionDict, location);
let mut diagnostic = Diagnostic::new(violations::UnnecessaryListComprehensionDict, location);
if fix {
match fixes::fix_unnecessary_list_comprehension_dict(locator, expr) {
Ok(fix) => {
check.amend(fix);
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(check)
Some(diagnostic)
}
/// C405 (`set([1, 2])`)
@@ -197,26 +198,26 @@ pub fn unnecessary_literal_set(
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Check> {
) -> Option<Diagnostic> {
let argument = exactly_one_argument_with_matching_function("set", func, args, keywords)?;
let kind = match argument {
ExprKind::List { .. } => "list",
ExprKind::Tuple { .. } => "tuple",
_ => return None,
};
let mut check = Check::new(
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryLiteralSet(kind.to_string()),
location,
);
if fix {
match fixes::fix_unnecessary_literal_set(locator, expr) {
Ok(fix) => {
check.amend(fix);
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(check)
Some(diagnostic)
}
/// C406 (`dict([(1, 2)])`)
@@ -228,7 +229,7 @@ pub fn unnecessary_literal_dict(
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Check> {
) -> Option<Diagnostic> {
let argument = exactly_one_argument_with_matching_function("dict", func, args, keywords)?;
let (kind, elts) = match argument {
ExprKind::Tuple { elts, .. } => ("tuple", elts),
@@ -242,19 +243,19 @@ pub fn unnecessary_literal_dict(
{
return None;
}
let mut check = Check::new(
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryLiteralDict(kind.to_string()),
location,
);
if fix {
match fixes::fix_unnecessary_literal_dict(locator, expr) {
Ok(fix) => {
check.amend(fix);
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(check)
Some(diagnostic)
}
/// C408
@@ -266,7 +267,7 @@ pub fn unnecessary_collection_call(
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Check> {
) -> Option<Diagnostic> {
if !args.is_empty() {
return None;
}
@@ -280,19 +281,19 @@ pub fn unnecessary_collection_call(
}
_ => return None,
};
let mut check = Check::new(
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryCollectionCall(id.to_string()),
location,
);
if fix {
match fixes::fix_unnecessary_collection_call(locator, expr) {
Ok(fix) => {
check.amend(fix);
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(check)
Some(diagnostic)
}
/// C409
@@ -303,26 +304,26 @@ pub fn unnecessary_literal_within_tuple_call(
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Check> {
) -> Option<Diagnostic> {
let argument = first_argument_with_matching_function("tuple", func, args)?;
let argument_kind = match argument {
ExprKind::Tuple { .. } => "tuple",
ExprKind::List { .. } => "list",
_ => return None,
};
let mut check = Check::new(
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryLiteralWithinTupleCall(argument_kind.to_string()),
location,
);
if fix {
match fixes::fix_unnecessary_literal_within_tuple_call(locator, expr) {
Ok(fix) => {
check.amend(fix);
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(check)
Some(diagnostic)
}
/// C410
@@ -333,26 +334,26 @@ pub fn unnecessary_literal_within_list_call(
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Check> {
) -> Option<Diagnostic> {
let argument = first_argument_with_matching_function("list", func, args)?;
let argument_kind = match argument {
ExprKind::Tuple { .. } => "tuple",
ExprKind::List { .. } => "list",
_ => return None,
};
let mut check = Check::new(
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryLiteralWithinListCall(argument_kind.to_string()),
location,
);
if fix {
match fixes::fix_unnecessary_literal_within_list_call(locator, expr) {
Ok(fix) => {
check.amend(fix);
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(check)
Some(diagnostic)
}
/// C411
@@ -363,21 +364,21 @@ pub fn unnecessary_list_call(
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Check> {
) -> Option<Diagnostic> {
let argument = first_argument_with_matching_function("list", func, args)?;
if !matches!(argument, ExprKind::ListComp { .. }) {
return None;
}
let mut check = Check::new(violations::UnnecessaryListCall, location);
let mut diagnostic = Diagnostic::new(violations::UnnecessaryListCall, location);
if fix {
match fixes::fix_unnecessary_list_call(locator, expr) {
Ok(fix) => {
check.amend(fix);
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(check)
Some(diagnostic)
}
/// C413
@@ -388,7 +389,7 @@ pub fn unnecessary_call_around_sorted(
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Check> {
) -> Option<Diagnostic> {
let outer = function_name(func)?;
if !(outer == "list" || outer == "reversed") {
return None;
@@ -400,19 +401,19 @@ pub fn unnecessary_call_around_sorted(
return None;
}
let mut check = Check::new(
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryCallAroundSorted(outer.to_string()),
location,
);
if fix {
match fixes::fix_unnecessary_call_around_sorted(locator, expr) {
Ok(fix) => {
check.amend(fix);
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(check)
Some(diagnostic)
}
/// C414
@@ -420,9 +421,9 @@ pub fn unnecessary_double_cast_or_process(
func: &Expr,
args: &[Expr],
location: Range,
) -> Option<Check> {
fn new_check(inner: &str, outer: &str, location: Range) -> Check {
Check::new(
) -> Option<Diagnostic> {
fn new_check(inner: &str, outer: &str, location: Range) -> Diagnostic {
Diagnostic::new(
violations::UnnecessaryDoubleCastOrProcess(inner.to_string(), outer.to_string()),
location,
)
@@ -463,7 +464,7 @@ pub fn unnecessary_subscript_reversal(
func: &Expr,
args: &[Expr],
location: Range,
) -> Option<Check> {
) -> Option<Diagnostic> {
let first_arg = args.first()?;
let id = function_name(func)?;
if !["set", "sorted", "reversed"].contains(&id) {
@@ -493,7 +494,7 @@ pub fn unnecessary_subscript_reversal(
if *val != BigInt::from(1) {
return None;
};
Some(Check::new(
Some(Diagnostic::new(
violations::UnnecessarySubscriptReversal(id.to_string()),
location,
))
@@ -507,7 +508,7 @@ pub fn unnecessary_comprehension(
locator: &SourceCodeLocator,
fix: bool,
location: Range,
) -> Option<Check> {
) -> Option<Diagnostic> {
if generators.len() != 1 {
return None;
}
@@ -525,25 +526,25 @@ pub fn unnecessary_comprehension(
ExprKind::SetComp { .. } => "set",
_ => return None,
};
let mut check = Check::new(
let mut diagnostic = Diagnostic::new(
violations::UnnecessaryComprehension(expr_kind.to_string()),
location,
);
if fix {
match fixes::fix_unnecessary_comprehension(locator, expr) {
Ok(fix) => {
check.amend(fix);
diagnostic.amend(fix);
}
Err(e) => error!("Failed to generate fix: {e}"),
}
}
Some(check)
Some(diagnostic)
}
/// C417
pub fn unnecessary_map(func: &Expr, args: &[Expr], location: Range) -> Option<Check> {
fn new_check(kind: &str, location: Range) -> Check {
Check::new(violations::UnnecessaryMap(kind.to_string()), location)
pub fn unnecessary_map(func: &Expr, args: &[Expr], location: Range) -> Option<Diagnostic> {
fn new_check(kind: &str, location: Range) -> Diagnostic {
Diagnostic::new(violations::UnnecessaryMap(kind.to_string()), location)
}
let id = function_name(func)?;
match id {

View File

@@ -1,4 +1,4 @@
pub mod plugins;
pub mod rules;
#[cfg(test)]
mod tests {
@@ -9,27 +9,27 @@ mod tests {
use test_case::test_case;
use crate::linter::test_path;
use crate::registry::CheckCode;
use crate::registry::RuleCode;
use crate::settings;
#[test_case(CheckCode::DTZ001, Path::new("DTZ001.py"); "DTZ001")]
#[test_case(CheckCode::DTZ002, Path::new("DTZ002.py"); "DTZ002")]
#[test_case(CheckCode::DTZ003, Path::new("DTZ003.py"); "DTZ003")]
#[test_case(CheckCode::DTZ004, Path::new("DTZ004.py"); "DTZ004")]
#[test_case(CheckCode::DTZ005, Path::new("DTZ005.py"); "DTZ005")]
#[test_case(CheckCode::DTZ006, Path::new("DTZ006.py"); "DTZ006")]
#[test_case(CheckCode::DTZ007, Path::new("DTZ007.py"); "DTZ007")]
#[test_case(CheckCode::DTZ011, Path::new("DTZ011.py"); "DTZ011")]
#[test_case(CheckCode::DTZ012, Path::new("DTZ012.py"); "DTZ012")]
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());
let checks = test_path(
#[test_case(RuleCode::DTZ001, Path::new("DTZ001.py"); "DTZ001")]
#[test_case(RuleCode::DTZ002, Path::new("DTZ002.py"); "DTZ002")]
#[test_case(RuleCode::DTZ003, Path::new("DTZ003.py"); "DTZ003")]
#[test_case(RuleCode::DTZ004, Path::new("DTZ004.py"); "DTZ004")]
#[test_case(RuleCode::DTZ005, Path::new("DTZ005.py"); "DTZ005")]
#[test_case(RuleCode::DTZ006, Path::new("DTZ006.py"); "DTZ006")]
#[test_case(RuleCode::DTZ007, Path::new("DTZ007.py"); "DTZ007")]
#[test_case(RuleCode::DTZ011, Path::new("DTZ011.py"); "DTZ011")]
#[test_case(RuleCode::DTZ012, Path::new("DTZ012.py"); "DTZ012")]
fn rules(rule_code: RuleCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_datetimez")
.join(path)
.as_path(),
&settings::Settings::for_rule(check_code),
&settings::Settings::for_rule(rule_code),
)?;
insta::assert_yaml_snapshot!(snapshot, checks);
insta::assert_yaml_snapshot!(snapshot, diagnostics);
Ok(())
}
}

View File

@@ -5,7 +5,7 @@ use crate::ast::helpers::{
};
use crate::ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
pub fn call_datetime_without_tzinfo(
@@ -22,17 +22,19 @@ pub fn call_datetime_without_tzinfo(
// No positional arg: keyword is missing or constant None.
if args.len() < 8 && !has_non_none_keyword(keywords, "tzinfo") {
checker
.checks
.push(Check::new(violations::CallDatetimeWithoutTzinfo, location));
checker.diagnostics.push(Diagnostic::new(
violations::CallDatetimeWithoutTzinfo,
location,
));
return;
}
// Positional arg: is constant None.
if args.len() >= 8 && is_const_none(&args[7]) {
checker
.checks
.push(Check::new(violations::CallDatetimeWithoutTzinfo, location));
checker.diagnostics.push(Diagnostic::new(
violations::CallDatetimeWithoutTzinfo,
location,
));
}
}
@@ -46,8 +48,8 @@ pub fn call_datetime_today(checker: &mut Checker, func: &Expr, location: Range)
&checker.from_imports,
) {
checker
.checks
.push(Check::new(violations::CallDatetimeToday, location));
.diagnostics
.push(Diagnostic::new(violations::CallDatetimeToday, location));
}
}
@@ -61,8 +63,8 @@ pub fn call_datetime_utcnow(checker: &mut Checker, func: &Expr, location: Range)
&checker.from_imports,
) {
checker
.checks
.push(Check::new(violations::CallDatetimeUtcnow, location));
.diagnostics
.push(Diagnostic::new(violations::CallDatetimeUtcnow, location));
}
}
@@ -75,7 +77,7 @@ pub fn call_datetime_utcfromtimestamp(checker: &mut Checker, func: &Expr, locati
"utcfromtimestamp",
&checker.from_imports,
) {
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::CallDatetimeUtcfromtimestamp,
location,
));
@@ -102,7 +104,7 @@ pub fn call_datetime_now_without_tzinfo(
// no args / no args unqualified
if args.is_empty() && keywords.is_empty() {
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::CallDatetimeNowWithoutTzinfo,
location,
));
@@ -111,7 +113,7 @@ pub fn call_datetime_now_without_tzinfo(
// none args
if !args.is_empty() && is_const_none(&args[0]) {
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::CallDatetimeNowWithoutTzinfo,
location,
));
@@ -120,7 +122,7 @@ pub fn call_datetime_now_without_tzinfo(
// wrong keywords / none keyword
if !keywords.is_empty() && !has_non_none_keyword(keywords, "tz") {
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::CallDatetimeNowWithoutTzinfo,
location,
));
@@ -147,25 +149,28 @@ pub fn call_datetime_fromtimestamp(
// no args / no args unqualified
if args.len() < 2 && keywords.is_empty() {
checker
.checks
.push(Check::new(violations::CallDatetimeFromtimestamp, location));
checker.diagnostics.push(Diagnostic::new(
violations::CallDatetimeFromtimestamp,
location,
));
return;
}
// none args
if args.len() > 1 && is_const_none(&args[1]) {
checker
.checks
.push(Check::new(violations::CallDatetimeFromtimestamp, location));
checker.diagnostics.push(Diagnostic::new(
violations::CallDatetimeFromtimestamp,
location,
));
return;
}
// wrong keywords / none keyword
if !keywords.is_empty() && !has_non_none_keyword(keywords, "tz") {
checker
.checks
.push(Check::new(violations::CallDatetimeFromtimestamp, location));
checker.diagnostics.push(Diagnostic::new(
violations::CallDatetimeFromtimestamp,
location,
));
}
}
@@ -198,7 +203,7 @@ pub fn call_datetime_strptime_without_zone(
};
let (Some(grandparent), Some(parent)) = (checker.current_expr_grandparent(), checker.current_expr_parent()) else {
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::CallDatetimeStrptimeWithoutZone,
location,
));
@@ -221,7 +226,7 @@ pub fn call_datetime_strptime_without_zone(
}
}
checker.checks.push(Check::new(
checker.diagnostics.push(Diagnostic::new(
violations::CallDatetimeStrptimeWithoutZone,
location,
));
@@ -232,8 +237,8 @@ pub fn call_date_today(checker: &mut Checker, func: &Expr, location: Range) {
let call_path = dealias_call_path(collect_call_paths(func), &checker.import_aliases);
if match_call_path(&call_path, "datetime.date", "today", &checker.from_imports) {
checker
.checks
.push(Check::new(violations::CallDateToday, location));
.diagnostics
.push(Diagnostic::new(violations::CallDateToday, location));
}
}
@@ -247,7 +252,7 @@ pub fn call_date_fromtimestamp(checker: &mut Checker, func: &Expr, location: Ran
&checker.from_imports,
) {
checker
.checks
.push(Check::new(violations::CallDateFromtimestamp, location));
.diagnostics
.push(Diagnostic::new(violations::CallDateFromtimestamp, location));
}
}

View File

@@ -1,4 +1,4 @@
pub mod checks;
pub mod rules;
pub mod types;
#[cfg(test)]
@@ -10,19 +10,19 @@ mod tests {
use test_case::test_case;
use crate::linter::test_path;
use crate::registry::CheckCode;
use crate::registry::RuleCode;
use crate::settings;
#[test_case(CheckCode::T100, Path::new("T100.py"); "T100")]
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());
let checks = test_path(
#[test_case(RuleCode::T100, Path::new("T100.py"); "T100")]
fn rules(rule_code: RuleCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_debugger")
.join(path)
.as_path(),
&settings::Settings::for_rule(check_code),
&settings::Settings::for_rule(rule_code),
)?;
insta::assert_yaml_snapshot!(snapshot, checks);
insta::assert_yaml_snapshot!(snapshot, diagnostics);
Ok(())
}
}

View File

@@ -4,7 +4,7 @@ use rustpython_ast::{Expr, Stmt};
use crate::ast::helpers::{collect_call_paths, dealias_call_path, match_call_path};
use crate::ast::types::Range;
use crate::flake8_debugger::types::DebuggerUsingType;
use crate::registry::Check;
use crate::registry::Diagnostic;
use crate::violations;
const DEBUGGERS: &[(&str, &str)] = &[
@@ -25,13 +25,13 @@ pub fn debugger_call(
func: &Expr,
from_imports: &FxHashMap<&str, FxHashSet<&str>>,
import_aliases: &FxHashMap<&str, &str>,
) -> Option<Check> {
) -> Option<Diagnostic> {
let call_path = dealias_call_path(collect_call_paths(func), import_aliases);
if DEBUGGERS
.iter()
.any(|(module, member)| match_call_path(&call_path, module, member, from_imports))
{
Some(Check::new(
Some(Diagnostic::new(
violations::Debugger(DebuggerUsingType::Call(call_path.join("."))),
Range::from_located(expr),
))
@@ -41,7 +41,7 @@ pub fn debugger_call(
}
/// Checks for the presence of a debugger import.
pub fn debugger_import(stmt: &Stmt, module: Option<&str>, name: &str) -> Option<Check> {
pub fn debugger_import(stmt: &Stmt, module: Option<&str>, name: &str) -> Option<Diagnostic> {
// Special-case: allow `import builtins`, which is far more general than (e.g.)
// `import celery.contrib.rdb`).
if module.is_none() && name == "builtins" {
@@ -53,7 +53,7 @@ pub fn debugger_import(stmt: &Stmt, module: Option<&str>, name: &str) -> Option<
.iter()
.find(|(module_name, member)| module_name == &module && member == &name)
{
return Some(Check::new(
return Some(Diagnostic::new(
violations::Debugger(DebuggerUsingType::Import(format!("{module_name}.{member}"))),
Range::from_located(stmt),
));
@@ -62,7 +62,7 @@ pub fn debugger_import(stmt: &Stmt, module: Option<&str>, name: &str) -> Option<
.iter()
.any(|(module_name, ..)| module_name == &name)
{
return Some(Check::new(
return Some(Diagnostic::new(
violations::Debugger(DebuggerUsingType::Import(name.to_string())),
Range::from_located(stmt),
));

View File

@@ -1,4 +1,4 @@
pub mod plugins;
pub mod rules;
pub mod settings;
#[cfg(test)]
@@ -8,39 +8,35 @@ mod tests {
use anyhow::Result;
use crate::linter::test_path;
use crate::registry::CheckCode;
use crate::registry::RuleCode;
use crate::{flake8_errmsg, settings};
#[test]
fn defaults() -> Result<()> {
let checks = test_path(
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_errmsg/EM.py"),
&settings::Settings::for_rules(vec![
CheckCode::EM101,
CheckCode::EM102,
CheckCode::EM103,
]),
&settings::Settings::for_rules(vec![RuleCode::EM101, RuleCode::EM102, RuleCode::EM103]),
)?;
insta::assert_yaml_snapshot!("defaults", checks);
insta::assert_yaml_snapshot!("defaults", diagnostics);
Ok(())
}
#[test]
fn custom() -> Result<()> {
let checks = test_path(
let diagnostics = test_path(
Path::new("./resources/test/fixtures/flake8_errmsg/EM.py"),
&settings::Settings {
flake8_errmsg: flake8_errmsg::settings::Settings {
max_string_length: 20,
},
..settings::Settings::for_rules(vec![
CheckCode::EM101,
CheckCode::EM102,
CheckCode::EM103,
RuleCode::EM101,
RuleCode::EM102,
RuleCode::EM103,
])
},
)?;
insta::assert_yaml_snapshot!("custom", checks);
insta::assert_yaml_snapshot!("custom", diagnostics);
Ok(())
}
}

Some files were not shown because too many files have changed in this diff Show More