Compare commits

...

12 Commits

Author SHA1 Message Date
Charlie Marsh
2fe22a223b Bump version to 0.0.147 2022-11-29 20:17:58 -05:00
Charlie Marsh
e762dec677 Add one more note to README 2022-11-29 20:17:46 -05:00
Charlie Marsh
19baa50003 Remove extraneous key in pyproject.toml 2022-11-29 20:13:28 -05:00
Charlie Marsh
ab0df03a05 Fix pyproject tests to include pyupgrade 2022-11-29 20:11:16 -05:00
Charlie Marsh
808b348c5f Add W to pycodestyle list in README 2022-11-29 20:09:07 -05:00
Charlie Marsh
e55daa89e6 Uses dashes for README options (#966) 2022-11-29 20:08:03 -05:00
Charlie Marsh
b8e7d86696 Add pyupgrade's --keep-runtime-typing option (#965) 2022-11-29 20:05:32 -05:00
Charlie Marsh
ced7868559 Add format setting to pyproject.toml (#964) 2022-11-29 19:22:23 -05:00
Ramazan Elsunakev
7c344e8e4c feat: use more precise ranges for imports (#958) 2022-11-29 19:01:39 -05:00
Hayden
ca38c7ac48 Grouped format implementation (#954) 2022-11-29 18:45:16 -05:00
Guillaume Andreu Sabater
602291c0c2 README: fixed conf section typo (#959) 2022-11-29 09:27:02 -05:00
Charlie Marsh
d4cf376e9b Fix failing pyproject test 2022-11-29 00:00:43 -05:00
33 changed files with 526 additions and 386 deletions

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.146
rev: v0.0.147
hooks:
- id: ruff

6
Cargo.lock generated
View File

@@ -700,7 +700,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8-to-ruff"
version = "0.0.146-dev.0"
version = "0.0.147-dev.0"
dependencies = [
"anyhow",
"clap 4.0.22",
@@ -1805,7 +1805,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.146"
version = "0.0.147"
dependencies = [
"annotate-snippets 0.9.1",
"anyhow",
@@ -1856,7 +1856,7 @@ dependencies = [
[[package]]
name = "ruff_dev"
version = "0.0.146"
version = "0.0.147"
dependencies = [
"anyhow",
"clap 4.0.22",

View File

@@ -6,7 +6,7 @@ members = [
[package]
name = "ruff"
version = "0.0.146"
version = "0.0.147"
edition = "2021"
rust-version = "1.65.0"

115
README.md
View File

@@ -29,7 +29,9 @@ functionality behind a single, common interface. Ruff can be used to replace Fla
of plugins), [`isort`](https://pypi.org/project/isort/), [`pydocstyle`](https://pypi.org/project/pydocstyle/),
[`yesqa`](https://github.com/asottile/yesqa), [`eradicate`](https://pypi.org/project/eradicate/),
and even a subset of [`pyupgrade`](https://pypi.org/project/pyupgrade/) and [`autoflake`](https://pypi.org/project/autoflake/)
all while executing tens or hundreds of times faster than any individual tool.
all while executing tens or hundreds of times faster than any individual tool. Ruff goes beyond the
responsibilities of a traditional linter, instead functioning as an advanced code transformation
tool capable of upgrading type annotations, rewriting class definitions, sorting imports, and more.
Ruff is extremely actively developed and used in major open-source projects like:
@@ -68,7 +70,7 @@ of [Conda](https://docs.conda.io/en/latest/):
1. [Configuration](#configuration)
1. [Supported Rules](#supported-rules)
1. [Pyflakes (F)](#pyflakes)
1. [pycodestyle (E)](#pycodestyle)
1. [pycodestyle (E, W)](#pycodestyle)
1. [isort (I)](#isort)
1. [pydocstyle (D)](#pydocstyle)
1. [pyupgrade (U)](#pyupgrade)
@@ -135,7 +137,7 @@ Ruff also works with [pre-commit](https://pre-commit.com):
```yaml
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.146
rev: v0.0.147
hooks:
- id: ruff
```
@@ -236,10 +238,10 @@ See `ruff --help` for more:
```shell
Ruff: An extremely fast Python linter.
Usage: ruff [OPTIONS] <FILES>...
Usage: ruff [OPTIONS] [FILES]...
Arguments:
<FILES>...
[FILES]...
Options:
--config <CONFIG>
@@ -277,7 +279,7 @@ Options:
--per-file-ignores <PER_FILE_IGNORES>
List of mappings from file pattern to code to exclude
--format <FORMAT>
Output serialization format for error messages [default: text] [possible values: text, json]
Output serialization format for error messages [default: text] [possible values: text, json, grouped]
--show-source
Show violations with source code
--show-files
@@ -296,6 +298,8 @@ Options:
Max McCabe complexity allowed for a function
--stdin-filename <STDIN_FILENAME>
The name of the file when passing it through stdin
--explain <EXPLAIN>
Explain a rule
-h, --help
Print help information
-V, --version
@@ -1247,7 +1251,7 @@ exclude = [".venv"]
---
#### [`extend_exclude`](#extend_exclude)
#### [`extend-exclude`](#extend-exclude)
A list of file patterns to omit from linting, in addition to those specified by `exclude`.
@@ -1287,7 +1291,7 @@ ignore = ["F841"]
---
#### [`extend_ignore`](#extend_ignore)
#### [`extend-ignore`](#extend-ignore)
A list of check code prefixes to ignore, in addition to those specified by `ignore`.
@@ -1327,7 +1331,7 @@ select = ["E", "F", "B", "Q"]
---
#### [`extend_select`](#extend_select)
#### [`extend-select`](#extend-select)
A list of check code prefixes to enable, in addition to those specified by `select`.
@@ -1420,7 +1424,7 @@ unfixable = ["F401"]
---
#### [`line_length`](#line_length)
#### [`line-length`](#line-length)
The line length to use when enforcing long-lines violations (like E501).
@@ -1438,7 +1442,26 @@ line-length = 120
---
#### [`per_file_ignores`](#per_file_ignores)
#### [`format`](#format)
The style in which violation messages should be formatted: `"text"` (default), `"grouped"`
(group messages by file), or `"json"` (machine-readable).
**Default value**: `"text"`
**Type**: `SerializationFormat`
**Example usage**:
```toml
[tool.ruff]
# Group violations by containing file.
format = "grouped"
```
---
#### [`per-file-ignores`](#per-file-ignores)
A list of mappings from file pattern to check code prefixes to exclude, when considering any
matching files.
@@ -1459,7 +1482,7 @@ matching files.
---
#### [`show_source`](#show_source)
#### [`show-source`](#show-source)
Whether to show source code snippets when reporting lint error violations (overridden by the
`--show-source` command-line flag).
@@ -1473,7 +1496,7 @@ Whether to show source code snippets when reporting lint error violations (overr
```toml
[tool.ruff]
# By default, always show source code snippets.
show_source = true
show-source = true
```
---
@@ -1496,7 +1519,7 @@ src = ["src", "test"]
---
#### [`target_version`](#target_version)
#### [`target-version`](#target-version)
The Python version to target, e.g., when considering automatic code upgrades, like rewriting type
annotations. Note that the target version will _not_ be inferred from the _current_ Python version,
@@ -1516,7 +1539,7 @@ target-version = "py37"
### `flake8-annotations`
#### [`mypy_init_return`](#mypy_init_return)
#### [`mypy-init-return`](#mypy-init-return)
Whether to allow the omission of a return type hint for `__init__` if at least one argument is
annotated.
@@ -1529,12 +1552,12 @@ annotated.
```toml
[tool.ruff.flake8-annotations]
mypy_init_return = true
mypy-init-return = true
```
---
#### [`suppress_dummy_args`](#suppress_dummy_args)
#### [`suppress-dummy-args`](#suppress-dummy-args)
Whether to suppress `ANN000`-level errors for arguments matching the "dummy" variable regex (like
`_`).
@@ -1547,12 +1570,12 @@ Whether to suppress `ANN000`-level errors for arguments matching the "dummy" var
```toml
[tool.ruff.flake8-annotations]
suppress_dummy_args = true
suppress-dummy-args = true
```
---
#### [`suppress_none_returning`](#suppress_none_returning)
#### [`suppress-none-returning`](#suppress-none-returning)
Whether to suppress `ANN200`-level errors for functions that meet either of the following criteria:
@@ -1567,12 +1590,12 @@ Whether to suppress `ANN200`-level errors for functions that meet either of the
```toml
[tool.ruff.flake8-annotations]
suppress_none_returning = true
suppress-none-returning = true
```
---
#### [`allow_star_arg_any`](#allow_star_arg_any)
#### [`allow-star-arg-any`](#allow-star-arg-any)
Whether to suppress `ANN401` for dynamically typed `*args` and `**kwargs` arguments.
@@ -1584,12 +1607,12 @@ Whether to suppress `ANN401` for dynamically typed `*args` and `**kwargs` argume
```toml
[tool.ruff.flake8-annotations]
allow_star_arg_any = true
allow-star-arg-any = true
```
### `flake8-bugbear`
#### [`extend_immutable_calls`](#extend_immutable_calls)
#### [`extend-immutable-calls`](#extend-immutable-calls)
Additional callable functions to consider "immutable" when evaluating, e.g., no-mutable-default-argument
checks (`B006`).
@@ -1608,7 +1631,7 @@ extend-immutable-calls = ["fastapi.Depends", "fastapi.Query"]
### `flake8-quotes`
#### [`inline_quotes`](#inline_quotes)
#### [`inline-quotes`](#inline-quotes)
Quote style to prefer for inline strings (either "single" (`'`) or "double" (`"`)).
@@ -1625,7 +1648,7 @@ inline-quotes = "single"
---
#### [`multiline_quotes`](#multiline_quotes)
#### [`multiline-quotes`](#multiline-quotes)
Quote style to prefer for multiline strings (either "single" (`'`) or "double" (`"`)).
@@ -1642,7 +1665,7 @@ multiline-quotes = "single"
---
#### [`docstring_quotes`](#docstring_quotes)
#### [`docstring-quotes`](#docstring-quotes)
Quote style to prefer for docstrings (either "single" (`'`) or "double" (`"`)).
@@ -1659,7 +1682,7 @@ docstring-quotes = "single"
---
#### [`avoid_escape`](#avoid_escape)
#### [`avoid-escape`](#avoid-escape)
Whether to avoid using single quotes if a string contains single quotes, or vice-versa with
double quotes, as per [PEP8](https://peps.python.org/pep-0008/#string-quotes). This minimizes the
@@ -1679,7 +1702,7 @@ avoid-escape = false
### `flake8-tidy-imports`
#### [`ban_relative_imports`](#ban_relative_imports)
#### [`ban-relative-imports`](#ban-relative-imports)
Whether to ban all relative imports (`"all"`), or only those imports that extend into the parent
module and beyond (`"parents"`).
@@ -1698,7 +1721,7 @@ ban-relative-imports = "all"
### `isort`
#### [`known_first_party`](known_first_party)
#### [`known-first-party`](known-first-party)
A list of modules to consider first-party, regardless of whether they can be identified as such
via introspection of the local filesystem.
@@ -1716,7 +1739,7 @@ known-first-party = ["src"]
---
#### [`known_third_party`](known_third_party)
#### [`known-third-party`](known-third-party)
A list of modules to consider third-party, regardless of whether they can be identified as such
via introspection of the local filesystem.
@@ -1734,7 +1757,7 @@ known-third-party = ["fastapi"]
---
#### [`extra_standard_library`](extra_standard_library)
#### [`extra-standard-library`](extra-standard-library)
A list of modules to consider standard-library, in addition to those known to Ruff in advance.
@@ -1751,7 +1774,7 @@ extra-standard-library = ["path"]
### `mccabe`
#### [`max_complexity`](#max_complexity)
#### [`max-complexity`](#max-complexity)
The maximum McCabe complexity to allow before triggering `C901` errors.
@@ -1762,14 +1785,14 @@ The maximum McCabe complexity to allow before triggering `C901` errors.
**Example usage**:
```toml
[tool.ruff.flake8-tidy-imports]
[tool.ruff.mccabe]
# Flag errors (`C901`) whenever the complexity level exceeds 5.
max-complexity = 5
```
### `pep8-naming`
#### [`ignore_names`](#ignore_names)
#### [`ignore-names`](#ignore-names)
A list of names to ignore when considering `pep8-naming` violations.
@@ -1786,7 +1809,7 @@ ignore-names = ["callMethod"]
---
#### [`classmethod_decorators`](#classmethod_decorators)
#### [`classmethod-decorators`](#classmethod-decorators)
A list of decorators that, when applied to a method, indicate that the method should be treated as
a class method. For example, Ruff will expect that any method decorated by a decorator in this list
@@ -1806,7 +1829,7 @@ classmethod-decorators = ["classmethod", "pydantic.validator"]
---
#### [`staticmethod_decorators`](#staticmethod_decorators)
#### [`staticmethod-decorators`](#staticmethod-decorators)
A list of decorators that, when applied to a method, indicate that the method should be treated as
a static method. For example, Ruff will expect that any method decorated by a decorator in this list
@@ -1824,6 +1847,26 @@ has no `self` or `cls` argument.
staticmethod-decorators = ["staticmethod", "stcmthd"]
```
### `pyupgrade`
#### [`keep-runtime-typing`](#keep-runtime-typing)
Whether to avoid PEP 585 (`List[int]` -> `list[int]`) and PEP 604 (`Optional[str]` -> `str | None`)
rewrites even if a file imports `from __future__ import annotations`. Note that this setting is
only applicable when the target Python version is below 3.9 and 3.10 respectively.
**Default value**: `false`
**Type**: `bool`
**Example usage**:
```toml
[tool.ruff.pyupgrade]
# Preserve types, even if a file imports `from __future__ import annotations`.
keep-runtime-typing = true
```
## License
MIT

View File

@@ -771,7 +771,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8_to_ruff"
version = "0.0.146"
version = "0.0.147"
dependencies = [
"anyhow",
"clap",
@@ -1975,7 +1975,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.146"
version = "0.0.147"
dependencies = [
"anyhow",
"bincode",

View File

@@ -1,6 +1,6 @@
[package]
name = "flake8-to-ruff"
version = "0.0.146-dev.0"
version = "0.0.147-dev.0"
edition = "2021"
[lib]

View File

@@ -251,6 +251,7 @@ mod tests {
external: None,
fix: None,
fixable: None,
format: None,
ignore: Some(vec![]),
line_length: None,
per_file_ignores: None,
@@ -270,6 +271,7 @@ mod tests {
isort: None,
mccabe: None,
pep8_naming: None,
pyupgrade: None,
});
assert_eq!(actual, expected);
@@ -291,6 +293,7 @@ mod tests {
external: None,
fix: None,
fixable: None,
format: None,
ignore: Some(vec![]),
line_length: Some(100),
per_file_ignores: None,
@@ -310,6 +313,7 @@ mod tests {
isort: None,
mccabe: None,
pep8_naming: None,
pyupgrade: None,
});
assert_eq!(actual, expected);
@@ -331,6 +335,7 @@ mod tests {
external: None,
fix: None,
fixable: None,
format: None,
ignore: Some(vec![]),
line_length: Some(100),
per_file_ignores: None,
@@ -350,6 +355,7 @@ mod tests {
isort: None,
mccabe: None,
pep8_naming: None,
pyupgrade: None,
});
assert_eq!(actual, expected);
@@ -371,6 +377,7 @@ mod tests {
external: None,
fix: None,
fixable: None,
format: None,
ignore: Some(vec![]),
line_length: None,
per_file_ignores: None,
@@ -390,6 +397,7 @@ mod tests {
isort: None,
mccabe: None,
pep8_naming: None,
pyupgrade: None,
});
assert_eq!(actual, expected);
@@ -411,6 +419,7 @@ mod tests {
external: None,
fix: None,
fixable: None,
format: None,
ignore: Some(vec![]),
line_length: None,
per_file_ignores: None,
@@ -435,6 +444,7 @@ mod tests {
isort: None,
mccabe: None,
pep8_naming: None,
pyupgrade: None,
});
assert_eq!(actual, expected);
@@ -459,6 +469,7 @@ mod tests {
external: None,
fix: None,
fixable: None,
format: None,
ignore: Some(vec![]),
line_length: None,
per_file_ignores: None,
@@ -513,6 +524,7 @@ mod tests {
isort: None,
mccabe: None,
pep8_naming: None,
pyupgrade: None,
});
assert_eq!(actual, expected);
@@ -534,6 +546,7 @@ mod tests {
external: None,
fix: None,
fixable: None,
format: None,
ignore: Some(vec![]),
line_length: None,
per_file_ignores: None,
@@ -559,6 +572,7 @@ mod tests {
isort: None,
mccabe: None,
pep8_naming: None,
pyupgrade: None,
});
assert_eq!(actual, expected);

View File

@@ -31,7 +31,3 @@ build-backend = "maturin"
[tool.maturin]
bindings = "bin"
strip = true
[tool.isort]
profile = "black"
known_third_party = ["fastapi", "pydantic", "starlette"]

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_dev"
version = "0.0.146"
version = "0.0.147"
edition = "2021"
[dependencies]

View File

@@ -107,9 +107,3 @@ pub struct Binding {
/// the binding was last used.
pub used: Option<(usize, Range)>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum ImportKind {
Import,
ImportFrom,
}

View File

@@ -4,14 +4,14 @@ use serde::{Deserialize, Serialize};
pub mod fixer;
pub mod helpers;
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct Patch {
pub content: String,
pub location: Location,
pub end_location: Location,
}
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Fix {
pub patch: Patch,
}

View File

@@ -3,7 +3,6 @@
use std::collections::BTreeMap;
use std::path::Path;
use itertools::Itertools;
use log::error;
use rustc_hash::{FxHashMap, FxHashSet};
use rustpython_ast::Withitem;
@@ -19,8 +18,7 @@ use crate::ast::helpers::{
use crate::ast::operations::extract_all_names;
use crate::ast::relocate::relocate_expr;
use crate::ast::types::{
Binding, BindingContext, BindingKind, ClassScope, FunctionScope, ImportKind, Node, Range,
Scope, ScopeKind,
Binding, BindingContext, BindingKind, ClassScope, FunctionScope, Node, Range, Scope, ScopeKind,
};
use crate::ast::visitor::{walk_excepthandler, walk_withitem, Visitor};
use crate::ast::{helpers, operations, visitor};
@@ -594,7 +592,7 @@ where
self.binding_context(),
),
used: None,
range: Range::from_located(stmt),
range: Range::from_located(alias),
},
);
} else {
@@ -629,12 +627,12 @@ where
.last()
.expect("No current scope found."))]
.id,
Range::from_located(stmt),
Range::from_located(alias),
))
} else {
None
},
range: Range::from_located(stmt),
range: Range::from_located(alias),
},
);
}
@@ -754,9 +752,9 @@ where
.last()
.expect("No current scope found."))]
.id,
Range::from_located(stmt),
Range::from_located(alias),
)),
range: Range::from_located(stmt),
range: Range::from_located(alias),
},
);
@@ -768,7 +766,7 @@ where
if !ALL_FEATURE_NAMES.contains(&&*alias.node.name) {
self.add_check(Check::new(
CheckKind::FutureFeatureNotDefined(alias.node.name.to_string()),
Range::from_located(stmt),
Range::from_located(alias),
));
}
}
@@ -830,6 +828,7 @@ where
None => alias.node.name.to_string(),
Some(parent) => format!("{parent}.{}", alias.node.name),
};
let range = Range::from_located(alias);
self.add_binding(
name,
Binding {
@@ -852,12 +851,12 @@ where
.last()
.expect("No current scope found."))]
.id,
Range::from_located(stmt),
range,
))
} else {
None
},
range: Range::from_located(stmt),
range,
},
);
}
@@ -1198,6 +1197,7 @@ where
&& self.settings.enabled.contains(&CheckCode::U007)
&& (self.settings.target_version >= PythonVersion::Py310
|| (self.settings.target_version >= PythonVersion::Py37
&& !self.settings.pyupgrade.keep_runtime_typing
&& self.annotations_future_enabled
&& self.in_deferred_annotation))
{
@@ -1240,6 +1240,7 @@ where
&& self.settings.enabled.contains(&CheckCode::U006)
&& (self.settings.target_version >= PythonVersion::Py39
|| (self.settings.target_version >= PythonVersion::Py37
&& !self.settings.pyupgrade.keep_runtime_typing
&& self.annotations_future_enabled
&& self.in_deferred_annotation))
&& typing::is_pep585_builtin(
@@ -2916,68 +2917,54 @@ impl<'a> Checker<'a> {
if self.settings.enabled.contains(&CheckCode::F401) {
// Collect all unused imports by location. (Multiple unused imports at the same
// location indicates an `import from`.)
let mut unused: BTreeMap<(ImportKind, usize, Option<usize>), Vec<&str>> =
type UnusedImport<'a> = (&'a String, &'a Range);
let mut unused: BTreeMap<(usize, Option<usize>), Vec<UnusedImport>> =
BTreeMap::new();
for (name, binding) in &scope.values {
if !matches!(
binding.kind,
BindingKind::Importation(..)
| BindingKind::SubmoduleImportation(..)
| BindingKind::FromImportation(..)
) {
continue;
}
let (full_name, context) = match &binding.kind {
BindingKind::Importation(_, full_name, context)
| BindingKind::SubmoduleImportation(_, full_name, context)
| BindingKind::FromImportation(_, full_name, context) => {
(full_name, context)
}
_ => continue,
};
let used = binding.used.is_some()
// Skip used exports from `__all__`
if binding.used.is_some()
|| all_names
.as_ref()
.map(|names| names.contains(name))
.unwrap_or_default();
if !used {
match &binding.kind {
BindingKind::FromImportation(_, full_name, context) => {
unused
.entry((
ImportKind::ImportFrom,
context.defined_by,
context.defined_in,
))
.or_default()
.push(full_name);
}
BindingKind::Importation(_, full_name, context)
| BindingKind::SubmoduleImportation(_, full_name, context) => {
unused
.entry((
ImportKind::Import,
context.defined_by,
context.defined_in,
))
.or_default()
.push(full_name);
}
_ => unreachable!("Already filtered on BindingKind."),
}
.unwrap_or_default()
{
continue;
}
unused
.entry((context.defined_by, context.defined_in))
.or_default()
.push((full_name, &binding.range));
}
for ((kind, defined_by, defined_in), full_names) in unused {
for ((defined_by, defined_in), unused_imports) in unused {
let child = self.parents[defined_by];
let parent = defined_in.map(|defined_in| self.parents[defined_in]);
let fix = if self.patch(&CheckCode::F401) {
let in_init_py = self.path.ends_with("__init__.py");
let fix = if !in_init_py && self.patch(&CheckCode::F401) {
let deleted: Vec<&Stmt> = self
.deletions
.iter()
.map(|index| self.parents[*index])
.collect();
match match kind {
ImportKind::Import => pyflakes::fixes::remove_unused_imports,
ImportKind::ImportFrom => pyflakes::fixes::remove_unused_import_froms,
}(
self.locator, &full_names, child, parent, &deleted
match pyflakes::fixes::remove_unused_imports(
self.locator,
&unused_imports,
child,
parent,
&deleted,
) {
Ok(fix) => {
if fix.patch.content.is_empty() || fix.patch.content == "pass" {
@@ -2994,24 +2981,13 @@ impl<'a> Checker<'a> {
None
};
if self.path.ends_with("__init__.py") {
checks.push(Check::new(
CheckKind::UnusedImport(
full_names.into_iter().sorted().map(String::from).collect(),
true,
),
Range::from_located(child),
));
} else {
for (full_name, range) in unused_imports {
let mut check = Check::new(
CheckKind::UnusedImport(
full_names.into_iter().sorted().map(String::from).collect(),
false,
),
Range::from_located(child),
CheckKind::UnusedImport(full_name.clone(), in_init_py),
*range,
);
if let Some(fix) = fix {
check.amend(fix);
if let Some(fix) = fix.as_ref() {
check.amend(fix.clone());
}
checks.push(check);
}

View File

@@ -449,7 +449,7 @@ pub enum CheckKind {
UndefinedExport(String),
UndefinedLocal(String),
UndefinedName(String),
UnusedImport(Vec<String>, bool),
UnusedImport(String, bool),
UnusedVariable(String),
YieldOutsideFunction,
// flake8-builtins
@@ -685,7 +685,7 @@ impl CheckCode {
CheckCode::W292 => CheckKind::NoNewLineAtEndOfFile,
CheckCode::W605 => CheckKind::InvalidEscapeSequence('c'),
// pyflakes
CheckCode::F401 => CheckKind::UnusedImport(vec!["...".to_string()], false),
CheckCode::F401 => CheckKind::UnusedImport("...".to_string(), false),
CheckCode::F402 => CheckKind::ImportShadowedByLoopVar("...".to_string(), 1),
CheckCode::F403 => CheckKind::ImportStarUsed("...".to_string()),
CheckCode::F404 => CheckKind::LateFutureImport,
@@ -1603,12 +1603,11 @@ impl CheckKind {
CheckKind::UndefinedName(name) => {
format!("Undefined name `{name}`")
}
CheckKind::UnusedImport(names, in_init_py) => {
let names = names.iter().map(|name| format!("`{name}`")).join(", ");
CheckKind::UnusedImport(name, in_init_py) => {
if *in_init_py {
format!("{names} imported but unused and missing from `__all__`")
format!("`{name}` imported but unused and missing from `__all__`")
} else {
format!("{names} imported but unused")
format!("`{name}` imported but unused")
}
}
CheckKind::UnusedVariable(name) => {

View File

@@ -7,8 +7,9 @@ use rustc_hash::FxHashMap;
use crate::checks::CheckCode;
use crate::checks_gen::CheckCodePrefix;
use crate::logging::LogLevel;
use crate::printer::SerializationFormat;
use crate::settings::types::{FilePattern, PatternPrefixPair, PerFileIgnore, PythonVersion};
use crate::settings::types::{
FilePattern, PatternPrefixPair, PerFileIgnore, PythonVersion, SerializationFormat,
};
#[derive(Debug, Parser)]
#[command(author, about = "Ruff: An extremely fast Python linter.")]
@@ -77,8 +78,8 @@ pub struct Cli {
#[arg(long, value_delimiter = ',')]
pub per_file_ignores: Vec<PatternPrefixPair>,
/// Output serialization format for error messages.
#[arg(long, value_enum, default_value_t = SerializationFormat::Text)]
pub format: SerializationFormat,
#[arg(long, value_enum)]
pub format: Option<SerializationFormat>,
/// Show violations with source code.
#[arg(long)]
pub show_source: bool,
@@ -143,8 +144,6 @@ pub fn extract_log_level(cli: &Cli) -> LogLevel {
LogLevel::Quiet
} else if cli.verbose {
LogLevel::Verbose
} else if matches!(cli.format, SerializationFormat::Json) {
LogLevel::Quiet
} else {
LogLevel::Default
}

View File

@@ -6,7 +6,7 @@ use walkdir::DirEntry;
use crate::checks::CheckCode;
use crate::fs::iter_python_files;
use crate::printer::SerializationFormat;
use crate::settings::types::SerializationFormat;
use crate::{Configuration, Settings};
/// Print the user-facing configuration settings.
@@ -43,7 +43,7 @@ struct Explanation<'a> {
/// Explain a `CheckCode` to the user.
pub fn explain(code: &CheckCode, format: SerializationFormat) -> Result<()> {
match format {
SerializationFormat::Text => {
SerializationFormat::Text | SerializationFormat::Grouped => {
println!(
"{} ({}): {}",
code.as_ref(),

View File

@@ -1,4 +1,4 @@
//! Settings for the `pep8-naming` plugin.
//! Settings for the `flake8-bugbear` plugin.
use serde::{Deserialize, Serialize};

View File

@@ -23,8 +23,9 @@ use ::ruff::fs::iter_python_files;
use ::ruff::linter::{add_noqa_to_path, autoformat_path, lint_path, lint_stdin, Diagnostics};
use ::ruff::logging::{set_up_logging, LogLevel};
use ::ruff::message::Message;
use ::ruff::printer::{Printer, SerializationFormat};
use ::ruff::printer::Printer;
use ::ruff::settings::configuration::Configuration;
use ::ruff::settings::types::SerializationFormat;
use ::ruff::settings::{pyproject, Settings};
#[cfg(feature = "update-informer")]
use ::ruff::updates;
@@ -189,14 +190,7 @@ fn inner_main() -> Result<ExitCode> {
// Extract command-line arguments.
let cli = Cli::parse();
let fix = cli.fix();
let log_level = extract_log_level(&cli);
set_up_logging(&log_level)?;
if let Some(code) = cli.explain {
commands::explain(&code, cli.format)?;
return Ok(ExitCode::SUCCESS);
}
if let Some(shell) = cli.generate_shell_completion {
shell.generate(&mut Cli::command(), &mut std::io::stdout());
@@ -205,17 +199,9 @@ fn inner_main() -> Result<ExitCode> {
// Find the project root and pyproject.toml.
let project_root = pyproject::find_project_root(&cli.files);
match &project_root {
Some(path) => debug!("Found project root at: {:?}", path),
None => debug!("Unable to identify project root; assuming current directory..."),
};
let pyproject = cli
.config
.or_else(|| pyproject::find_pyproject_toml(project_root.as_ref()));
match &pyproject {
Some(path) => debug!("Found pyproject.toml at: {:?}", path),
None => debug!("Unable to find pyproject.toml; using default settings..."),
};
// Reconcile configuration from pyproject.toml and command-line arguments.
let mut configuration =
@@ -247,6 +233,9 @@ fn inner_main() -> Result<ExitCode> {
if !cli.unfixable.is_empty() {
configuration.unfixable = cli.unfixable;
}
if let Some(format) = cli.format {
configuration.format = format;
}
if let Some(line_length) = cli.line_length {
configuration.line_length = line_length;
}
@@ -279,6 +268,30 @@ fn inner_main() -> Result<ExitCode> {
let fix_enabled: bool = configuration.fix;
let settings = Settings::from_configuration(configuration, project_root.as_ref())?;
// If we're using JSON, override the log level.
let log_level = if matches!(settings.format, SerializationFormat::Json) {
LogLevel::Quiet
} else {
log_level
};
set_up_logging(&log_level)?;
// Now that we've inferred the appropriate log level, add some debug
// information.
match &project_root {
Some(path) => debug!("Found project root at: {:?}", path),
None => debug!("Unable to identify project root; assuming current directory..."),
};
match &pyproject {
Some(path) => debug!("Found pyproject.toml at: {:?}", path),
None => debug!("Unable to find pyproject.toml; using default settings..."),
};
if let Some(code) = cli.explain {
commands::explain(&code, settings.format)?;
return Ok(ExitCode::SUCCESS);
}
if cli.show_files {
commands::show_files(&cli.files, &settings);
return Ok(ExitCode::SUCCESS);
@@ -291,24 +304,21 @@ fn inner_main() -> Result<ExitCode> {
cache_enabled = false;
}
let printer = Printer::new(&cli.format, &log_level);
let printer = Printer::new(&settings.format, &log_level);
if cli.watch {
if settings.format != SerializationFormat::Text {
eprintln!("Warning: --format 'text' is used in watch mode.");
}
if fix_enabled {
eprintln!("Warning: --fix is not enabled in watch mode.");
}
if cli.add_noqa {
eprintln!("Warning: --no-qa is not enabled in watch mode.");
}
if cli.autoformat {
eprintln!("Warning: --autoformat is not enabled in watch mode.");
}
if cli.format != SerializationFormat::Text {
eprintln!("Warning: --format 'text' is used in watch mode.");
}
// Perform an initial run instantly.
printer.clear_screen()?;
printer.write_to_user("Starting linter in watch mode...\n");

View File

@@ -1,16 +1,10 @@
use std::cmp::Ordering;
use std::fmt;
use std::path::Path;
use annotate_snippets::display_list::{DisplayList, FormatOptions};
use annotate_snippets::snippet::{Annotation, AnnotationType, Slice, Snippet, SourceAnnotation};
use colored::Colorize;
use rustpython_parser::ast::Location;
use serde::{Deserialize, Serialize};
use crate::ast::types::Range;
use crate::checks::{Check, CheckKind};
use crate::fs::relativize_path;
use crate::source_code_locator::SourceCodeLocator;
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
@@ -50,57 +44,6 @@ impl PartialOrd for Message {
}
}
impl fmt::Display for Message {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let label = format!(
"{}{}{}{}{}{} {} {}",
relativize_path(Path::new(&self.filename)).bold(),
":".cyan(),
self.location.row(),
":".cyan(),
self.location.column(),
":".cyan(),
self.kind.code().as_ref().red().bold(),
self.kind.body(),
);
match &self.source {
None => write!(f, "{label}"),
Some(source) => {
let snippet = Snippet {
title: Some(Annotation {
label: Some(&label),
annotation_type: AnnotationType::Error,
// The ID (error number) is already encoded in the `label`.
id: None,
}),
footer: vec![],
slices: vec![Slice {
source: &source.contents,
line_start: self.location.row(),
annotations: vec![SourceAnnotation {
label: self.kind.code().as_ref(),
annotation_type: AnnotationType::Error,
range: source.range,
}],
// The origin (file name, line number, and column number) is already encoded
// in the `label`.
origin: None,
fold: false,
}],
opt: FormatOptions {
color: true,
..FormatOptions::default()
},
};
// `split_once(' ')` strips "error: " from `message`.
let message = DisplayList::from(snippet).to_string();
let (_, message) = message.split_once(' ').unwrap();
write!(f, "{message}")
}
}
}
}
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Source {
pub contents: String,

View File

@@ -1,20 +1,22 @@
use std::collections::BTreeMap;
use std::path::Path;
use annotate_snippets::display_list::{DisplayList, FormatOptions};
use annotate_snippets::snippet::{Annotation, AnnotationType, Slice, Snippet, SourceAnnotation};
use anyhow::Result;
use clap::ValueEnum;
use colored::Colorize;
use itertools::iterate;
use rustpython_parser::ast::Location;
use serde::Serialize;
use crate::checks::{CheckCode, CheckKind};
use crate::fs::relativize_path;
use crate::linter::Diagnostics;
use crate::logging::LogLevel;
use crate::message::Message;
use crate::settings::types::SerializationFormat;
use crate::tell_user;
#[derive(Clone, Copy, ValueEnum, PartialEq, Eq, Debug)]
pub enum SerializationFormat {
Text,
Json,
}
#[derive(Serialize)]
struct ExpandedMessage<'a> {
kind: &'a CheckKind,
@@ -41,6 +43,28 @@ impl<'a> Printer<'a> {
}
}
fn pre_text(&self, diagnostics: &Diagnostics) {
if self.log_level >= &LogLevel::Default {
if diagnostics.fixed > 0 {
println!(
"Found {} error(s) ({} fixed).",
diagnostics.messages.len(),
diagnostics.fixed,
);
} else if !diagnostics.messages.is_empty() {
println!("Found {} error(s).", diagnostics.messages.len());
}
}
}
fn post_text(&self, num_fixable: usize) {
if self.log_level >= &LogLevel::Default {
if num_fixable > 0 {
println!("{num_fixable} potentially fixable with the --fix option.");
}
}
}
pub fn write_once(&self, diagnostics: &Diagnostics) -> Result<()> {
if matches!(self.log_level, LogLevel::Silent) {
return Ok(());
@@ -73,27 +97,56 @@ impl<'a> Printer<'a> {
);
}
SerializationFormat::Text => {
if self.log_level >= &LogLevel::Default {
if diagnostics.fixed > 0 {
println!(
"Found {} error(s) ({} fixed).",
diagnostics.messages.len(),
diagnostics.fixed,
);
} else if !diagnostics.messages.is_empty() {
println!("Found {} error(s).", diagnostics.messages.len());
}
}
self.pre_text(diagnostics);
for message in &diagnostics.messages {
println!("{message}");
print_message(message);
}
if self.log_level >= &LogLevel::Default {
if num_fixable > 0 {
println!("{num_fixable} potentially fixable with the --fix option.");
}
self.post_text(num_fixable);
}
SerializationFormat::Grouped => {
self.pre_text(diagnostics);
println!();
// Group by filename.
let mut grouped_messages = BTreeMap::default();
for message in &diagnostics.messages {
grouped_messages
.entry(&message.filename)
.or_insert_with(Vec::new)
.push(message);
}
for (filename, messages) in grouped_messages {
// Compute the maximum number of digits in the row and column, for messages in
// this file.
let row_length = num_digits(
messages
.iter()
.map(|message| message.location.row())
.max()
.unwrap(),
);
let column_length = num_digits(
messages
.iter()
.map(|message| message.location.column())
.max()
.unwrap(),
);
// Print the filename.
println!("{}:", relativize_path(Path::new(&filename)).underline());
// Print each message.
for message in messages {
print_grouped_message(message, row_length, column_length);
}
println!();
}
self.post_text(num_fixable);
}
}
@@ -117,7 +170,7 @@ impl<'a> Printer<'a> {
println!();
}
for message in &diagnostics.messages {
println!("{message}");
print_message(message);
}
}
@@ -130,3 +183,107 @@ impl<'a> Printer<'a> {
Ok(())
}
}
fn num_digits(n: usize) -> usize {
iterate(n, |&n| n / 10)
.take_while(|&n| n > 0)
.count()
.max(1)
}
/// Print a single `Message` with full details.
fn print_message(message: &Message) {
let label = format!(
"{}{}{}{}{}{} {} {}",
relativize_path(Path::new(&message.filename)).bold(),
":".cyan(),
message.location.row(),
":".cyan(),
message.location.column(),
":".cyan(),
message.kind.code().as_ref().red().bold(),
message.kind.body(),
);
println!("{label}");
if let Some(source) = &message.source {
let snippet = Snippet {
title: Some(Annotation {
label: None,
annotation_type: AnnotationType::Error,
// The ID (error number) is already encoded in the `label`.
id: None,
}),
footer: vec![],
slices: vec![Slice {
source: &source.contents,
line_start: message.location.row(),
annotations: vec![SourceAnnotation {
label: message.kind.code().as_ref(),
annotation_type: AnnotationType::Error,
range: source.range,
}],
// The origin (file name, line number, and column number) is already encoded
// in the `label`.
origin: None,
fold: false,
}],
opt: FormatOptions {
color: true,
..FormatOptions::default()
},
};
// Skip the first line, since we format the `label` ourselves.
let message = DisplayList::from(snippet).to_string();
let (_, message) = message.split_once('\n').unwrap();
println!("{message}");
}
}
/// Print a grouped `Message`, assumed to be printed in a group with others from
/// the same file.
fn print_grouped_message(message: &Message, row_length: usize, column_length: usize) {
let label = format!(
" {}{}{}{}{} {} {}",
" ".repeat(row_length - num_digits(message.location.row())),
message.location.row(),
":".cyan(),
message.location.column(),
" ".repeat(column_length - num_digits(message.location.column())),
message.kind.code().as_ref().red().bold(),
message.kind.body(),
);
println!("{label}");
if let Some(source) = &message.source {
let snippet = Snippet {
title: Some(Annotation {
label: None,
annotation_type: AnnotationType::Error,
// The ID (error number) is already encoded in the `label`.
id: None,
}),
footer: vec![],
slices: vec![Slice {
source: &source.contents,
line_start: message.location.row(),
annotations: vec![SourceAnnotation {
label: message.kind.code().as_ref(),
annotation_type: AnnotationType::Error,
range: source.range,
}],
// The origin (file name, line number, and column number) is already encoded
// in the `label`.
origin: None,
fold: false,
}],
opt: FormatOptions {
color: true,
..FormatOptions::default()
},
};
// Skip the first line, since we format the `label` ourselves.
let message = DisplayList::from(snippet).to_string();
let (_, message) = message.split_once('\n').unwrap();
let message = textwrap::indent(message, " ");
println!("{message}");
}
}

View File

@@ -1,7 +1,7 @@
use anyhow::Result;
use libcst_native::{
Codegen, CodegenState, CompOp, Comparison, ComparisonTarget, Expr, Expression, ImportNames,
NameOrAttribute, SmallStatement, Statement,
SmallStatement, Statement,
};
use rustpython_ast::Stmt;
@@ -14,7 +14,7 @@ use crate::source_code_locator::SourceCodeLocator;
/// Generate a Fix to remove any unused imports from an `import` statement.
pub fn remove_unused_imports(
locator: &SourceCodeLocator,
full_names: &[&str],
unused_imports: &Vec<(&String, &Range)>,
stmt: &Stmt,
parent: Option<&Stmt>,
deleted: &[&Stmt],
@@ -25,93 +25,40 @@ pub fn remove_unused_imports(
let Some(Statement::Simple(body)) = tree.body.first_mut() else {
return Err(anyhow::anyhow!("Expected node to be: Statement::Simple"));
};
let Some(SmallStatement::Import(body)) = body.body.first_mut() else {
return Err(anyhow::anyhow!(
"Expected node to be: SmallStatement::ImportFrom"
));
};
let aliases = &mut body.names;
// Preserve the trailing comma (or not) from the last entry.
let trailing_comma = aliases.last().and_then(|alias| alias.comma.clone());
// Identify unused imports from within the `import`.
let mut removable = vec![];
for (index, alias) in aliases.iter().enumerate() {
if full_names.contains(&compose_module_path(&alias.name).as_str()) {
removable.push(index);
}
}
// TODO(charlie): This is quadratic.
for index in removable.iter().rev() {
aliases.remove(*index);
}
if let Some(alias) = aliases.last_mut() {
alias.comma = trailing_comma;
}
if aliases.is_empty() {
helpers::remove_stmt(stmt, parent, deleted)
} else {
let mut state = CodegenState::default();
tree.codegen(&mut state);
Ok(Fix::replacement(
state.to_string(),
stmt.location,
stmt.end_location.unwrap(),
))
}
}
/// Generate a Fix to remove any unused imports from an `import from` statement.
pub fn remove_unused_import_froms(
locator: &SourceCodeLocator,
full_names: &[&str],
stmt: &Stmt,
parent: Option<&Stmt>,
deleted: &[&Stmt],
) -> Result<Fix> {
let module_text = locator.slice_source_code_range(&Range::from_located(stmt));
let mut tree = match_module(&module_text)?;
let Some(Statement::Simple(body)) = tree.body.first_mut() else {
return Err(anyhow::anyhow!("Expected node to be: Statement::Simple"));
};
let Some(SmallStatement::ImportFrom(body)) = body.body.first_mut() else {
return Err(anyhow::anyhow!(
"Expected node to be: SmallStatement::ImportFrom"
));
};
let ImportNames::Aliases(aliases) = &mut body.names else {
return Err(anyhow::anyhow!("Expected node to be: Aliases"));
};
// Preserve the trailing comma (or not) from the last entry.
let trailing_comma = aliases.last().and_then(|alias| alias.comma.clone());
// Identify unused imports from within the `import from`.
let mut removable = vec![];
for (index, alias) in aliases.iter().enumerate() {
if let NameOrAttribute::N(name) = &alias.name {
let import_name = name.value.to_string();
let full_name = body
.module
.as_ref()
.map(compose_module_path)
.map(|module_name| format!("{module_name}.{import_name}"))
.unwrap_or(import_name);
if full_names.contains(&full_name.as_str()) {
removable.push(index);
let (aliases, import_module) = match body.body.first_mut() {
Some(SmallStatement::Import(import_body)) => Ok((&mut import_body.names, None)),
Some(SmallStatement::ImportFrom(import_body)) => {
if let ImportNames::Aliases(names) = &mut import_body.names {
Ok((names, import_body.module.as_ref()))
} else {
Err(anyhow::anyhow!("Expected node to be: Aliases"))
}
}
}
// TODO(charlie): This is quadratic.
for index in removable.iter().rev() {
aliases.remove(*index);
_ => Err(anyhow::anyhow!(
"Expected node to be: SmallStatement::ImportFrom or SmallStatement::Import"
)),
}?;
// Preserve the trailing comma (or not) from the last entry.
let trailing_comma = aliases.last().and_then(|alias| alias.comma.clone());
for (name_to_remove, _) in unused_imports {
let alias_index = aliases.iter().position(|alias| {
let full_name = match import_module {
Some(module_name) => format!(
"{}.{}",
compose_module_path(module_name),
compose_module_path(&alias.name)
),
None => compose_module_path(&alias.name),
};
&full_name.as_str() == name_to_remove
});
if let Some(index) = alias_index {
aliases.remove(index);
}
}
if let Some(alias) = aliases.last_mut() {

View File

@@ -1,4 +1,5 @@
mod checks;
pub mod fixes;
pub mod plugins;
pub mod settings;
pub mod types;

22
src/pyupgrade/settings.rs Normal file
View File

@@ -0,0 +1,22 @@
//! Settings for the `pyupgrade` plugin.
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
pub struct Options {
pub keep_runtime_typing: Option<bool>,
}
#[derive(Debug, Hash, Default)]
pub struct Settings {
pub keep_runtime_typing: bool,
}
impl Settings {
pub fn from_options(options: Options) -> Self {
Self {
keep_runtime_typing: options.keep_runtime_typing.unwrap_or_default(),
}
}
}

View File

@@ -11,10 +11,10 @@ use regex::Regex;
use crate::checks_gen::{CheckCodePrefix, CATEGORIES};
use crate::settings::pyproject::load_options;
use crate::settings::types::{FilePattern, PerFileIgnore, PythonVersion};
use crate::settings::types::{FilePattern, PerFileIgnore, PythonVersion, SerializationFormat};
use crate::{
flake8_annotations, flake8_bugbear, flake8_quotes, flake8_tidy_imports, fs, isort, mccabe,
pep8_naming,
pep8_naming, pyupgrade,
};
#[derive(Debug)]
@@ -27,6 +27,7 @@ pub struct Configuration {
pub external: Vec<String>,
pub fix: bool,
pub fixable: Vec<CheckCodePrefix>,
pub format: SerializationFormat,
pub ignore: Vec<CheckCodePrefix>,
pub line_length: usize,
pub per_file_ignores: Vec<PerFileIgnore>,
@@ -43,6 +44,7 @@ pub struct Configuration {
pub isort: isort::settings::Settings,
pub mccabe: mccabe::settings::Settings,
pub pep8_naming: pep8_naming::settings::Settings,
pub pyupgrade: pyupgrade::settings::Settings,
}
static DEFAULT_EXCLUDE: Lazy<Vec<FilePattern>> = Lazy::new(|| {
@@ -121,6 +123,7 @@ impl Configuration {
fix: options.fix.unwrap_or_default(),
fixable: options.fixable.unwrap_or_else(|| CATEGORIES.to_vec()),
unfixable: options.unfixable.unwrap_or_default(),
format: options.format.unwrap_or(SerializationFormat::Text),
ignore: options.ignore.unwrap_or_default(),
line_length: options.line_length.unwrap_or(88),
per_file_ignores: options
@@ -162,6 +165,10 @@ impl Configuration {
.pep8_naming
.map(pep8_naming::settings::Settings::from_options)
.unwrap_or_default(),
pyupgrade: options
.pyupgrade
.map(pyupgrade::settings::Settings::from_options)
.unwrap_or_default(),
})
}
}

View File

@@ -15,10 +15,10 @@ use rustc_hash::FxHashSet;
use crate::checks::CheckCode;
use crate::checks_gen::{CheckCodePrefix, PrefixSpecificity};
use crate::settings::configuration::Configuration;
use crate::settings::types::{FilePattern, PerFileIgnore, PythonVersion};
use crate::settings::types::{FilePattern, PerFileIgnore, PythonVersion, SerializationFormat};
use crate::{
flake8_annotations, flake8_bugbear, flake8_quotes, flake8_tidy_imports, fs, isort, mccabe,
pep8_naming,
pep8_naming, pyupgrade,
};
pub mod configuration;
@@ -34,6 +34,7 @@ pub struct Settings {
pub extend_exclude: GlobSet,
pub external: BTreeSet<String>,
pub fixable: FxHashSet<CheckCode>,
pub format: SerializationFormat,
pub line_length: usize,
pub per_file_ignores: Vec<(GlobMatcher, GlobMatcher, BTreeSet<CheckCode>)>,
pub show_source: bool,
@@ -47,6 +48,7 @@ pub struct Settings {
pub isort: isort::settings::Settings,
pub mccabe: mccabe::settings::Settings,
pub pep8_naming: pep8_naming::settings::Settings,
pub pyupgrade: pyupgrade::settings::Settings,
}
impl Settings {
@@ -72,6 +74,7 @@ impl Settings {
extend_exclude: resolve_globset(config.extend_exclude, project_root)?,
external: BTreeSet::from_iter(config.external),
fixable: resolve_codes(&config.fixable, &config.unfixable),
format: config.format,
flake8_annotations: config.flake8_annotations,
flake8_bugbear: config.flake8_bugbear,
flake8_quotes: config.flake8_quotes,
@@ -80,6 +83,7 @@ impl Settings {
mccabe: config.mccabe,
line_length: config.line_length,
pep8_naming: config.pep8_naming,
pyupgrade: config.pyupgrade,
per_file_ignores: resolve_per_file_ignores(config.per_file_ignores, project_root)?,
src: config.src,
target_version: config.target_version,
@@ -91,12 +95,14 @@ impl Settings {
Self {
dummy_variable_rgx: Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$").unwrap(),
enabled: FxHashSet::from_iter([check_code.clone()]),
fixable: FxHashSet::from_iter([check_code]),
exclude: GlobSet::empty(),
extend_exclude: GlobSet::empty(),
external: BTreeSet::default(),
fixable: FxHashSet::from_iter([check_code]),
format: SerializationFormat::Text,
line_length: 88,
per_file_ignores: vec![],
show_source: false,
src: vec![path_dedot::CWD.clone()],
target_version: PythonVersion::Py310,
flake8_annotations: flake8_annotations::settings::Settings::default(),
@@ -106,7 +112,7 @@ impl Settings {
isort: isort::settings::Settings::default(),
mccabe: mccabe::settings::Settings::default(),
pep8_naming: pep8_naming::settings::Settings::default(),
show_source: false,
pyupgrade: pyupgrade::settings::Settings::default(),
}
}
@@ -114,12 +120,14 @@ impl Settings {
Self {
dummy_variable_rgx: Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$").unwrap(),
enabled: FxHashSet::from_iter(check_codes.clone()),
fixable: FxHashSet::from_iter(check_codes),
exclude: GlobSet::empty(),
extend_exclude: GlobSet::empty(),
external: BTreeSet::default(),
fixable: FxHashSet::from_iter(check_codes),
format: SerializationFormat::Text,
line_length: 88,
per_file_ignores: vec![],
show_source: false,
src: vec![path_dedot::CWD.clone()],
target_version: PythonVersion::Py310,
flake8_annotations: flake8_annotations::settings::Settings::default(),
@@ -129,7 +137,7 @@ impl Settings {
isort: isort::settings::Settings::default(),
mccabe: mccabe::settings::Settings::default(),
pep8_naming: pep8_naming::settings::Settings::default(),
show_source: false,
pyupgrade: pyupgrade::settings::Settings::default(),
}
}
}
@@ -161,6 +169,7 @@ impl Hash for Settings {
self.isort.hash(state);
self.mccabe.hash(state);
self.pep8_naming.hash(state);
self.pyupgrade.hash(state);
}
}

View File

@@ -4,10 +4,10 @@ use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use crate::checks_gen::CheckCodePrefix;
use crate::settings::types::PythonVersion;
use crate::settings::types::{PythonVersion, SerializationFormat};
use crate::{
flake8_annotations, flake8_bugbear, flake8_quotes, flake8_tidy_imports, isort, mccabe,
pep8_naming,
pep8_naming, pyupgrade,
};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
@@ -21,6 +21,7 @@ pub struct Options {
pub external: Option<Vec<String>>,
pub fix: Option<bool>,
pub fixable: Option<Vec<CheckCodePrefix>>,
pub format: Option<SerializationFormat>,
pub ignore: Option<Vec<CheckCodePrefix>>,
pub line_length: Option<usize>,
pub select: Option<Vec<CheckCodePrefix>>,
@@ -36,6 +37,7 @@ pub struct Options {
pub isort: Option<isort::settings::Options>,
pub mccabe: Option<mccabe::settings::Options>,
pub pep8_naming: Option<pep8_naming::settings::Options>,
pub pyupgrade: Option<pyupgrade::settings::Options>,
// Tables are required to go last.
pub per_file_ignores: Option<FxHashMap<String, Vec<CheckCodePrefix>>>,
}

View File

@@ -148,6 +148,7 @@ mod tests {
show_source: None,
src: None,
target_version: None,
format: None,
unfixable: None,
flake8_annotations: None,
flake8_bugbear: None,
@@ -156,6 +157,7 @@ mod tests {
isort: None,
mccabe: None,
pep8_naming: None,
pyupgrade: None,
})
})
);
@@ -186,6 +188,7 @@ line-length = 79
show_source: None,
src: None,
target_version: None,
format: None,
unfixable: None,
flake8_annotations: None,
flake8_bugbear: None,
@@ -194,6 +197,7 @@ line-length = 79
isort: None,
mccabe: None,
pep8_naming: None,
pyupgrade: None,
})
})
);
@@ -219,6 +223,7 @@ exclude = ["foo.py"]
ignore: None,
extend_ignore: None,
fixable: None,
format: None,
unfixable: None,
per_file_ignores: None,
dummy_variable_rgx: None,
@@ -232,6 +237,7 @@ exclude = ["foo.py"]
isort: None,
mccabe: None,
pep8_naming: None,
pyupgrade: None,
})
})
);
@@ -262,6 +268,7 @@ select = ["E501"]
show_source: None,
src: None,
target_version: None,
format: None,
unfixable: None,
flake8_annotations: None,
flake8_bugbear: None,
@@ -270,6 +277,7 @@ select = ["E501"]
isort: None,
mccabe: None,
pep8_naming: None,
pyupgrade: None,
})
})
);
@@ -301,6 +309,7 @@ ignore = ["E501"]
show_source: None,
src: None,
target_version: None,
format: None,
unfixable: None,
flake8_annotations: None,
flake8_bugbear: None,
@@ -309,6 +318,7 @@ ignore = ["E501"]
isort: None,
mccabe: None,
pep8_naming: None,
pyupgrade: None,
})
})
);
@@ -374,10 +384,11 @@ other-attribute = 1
]),
select: None,
extend_select: None,
external: None,
external: Some(vec!["V101".to_string()]),
ignore: None,
extend_ignore: None,
fixable: None,
format: None,
unfixable: None,
per_file_ignores: Some(FxHashMap::from_iter([(
"__init__.py".to_string(),
@@ -428,6 +439,7 @@ other-attribute = 1
]),
staticmethod_decorators: Some(vec!["staticmethod".to_string()]),
}),
pyupgrade: None,
}
);

View File

@@ -4,6 +4,7 @@ use std::path::{Path, PathBuf};
use std::str::FromStr;
use anyhow::{anyhow, Result};
use clap::ValueEnum;
use globset::{Glob, GlobSetBuilder};
use serde::{de, Deserialize, Deserializer, Serialize};
@@ -141,3 +142,11 @@ impl FromStr for PatternPrefixPair {
Ok(Self { pattern, prefix })
}
}
#[derive(Clone, Copy, ValueEnum, PartialEq, Eq, Serialize, Deserialize, Debug)]
#[serde(rename_all = "kebab-case")]
pub enum SerializationFormat {
Text,
Json,
Grouped,
}

View File

@@ -4,14 +4,14 @@ expression: checks
---
- kind:
UnusedImport:
- - functools
- functools
- false
location:
row: 2
column: 0
column: 7
end_location:
row: 2
column: 20
column: 16
fix:
patch:
content: import os
@@ -23,14 +23,14 @@ expression: checks
column: 20
- kind:
UnusedImport:
- - collections.OrderedDict
- collections.OrderedDict
- false
location:
row: 4
column: 0
row: 6
column: 4
end_location:
row: 8
column: 1
row: 6
column: 15
fix:
patch:
content: "from collections import (\n Counter,\n namedtuple,\n)"
@@ -42,11 +42,11 @@ expression: checks
column: 1
- kind:
UnusedImport:
- - logging.handlers
- logging.handlers
- false
location:
row: 12
column: 0
column: 7
end_location:
row: 12
column: 23
@@ -61,11 +61,11 @@ expression: checks
column: 0
- kind:
UnusedImport:
- - shelve
- shelve
- false
location:
row: 32
column: 4
column: 11
end_location:
row: 32
column: 17
@@ -80,11 +80,11 @@ expression: checks
column: 0
- kind:
UnusedImport:
- - importlib
- importlib
- false
location:
row: 33
column: 4
column: 11
end_location:
row: 33
column: 20
@@ -99,11 +99,11 @@ expression: checks
column: 20
- kind:
UnusedImport:
- - pathlib
- pathlib
- false
location:
row: 37
column: 4
column: 11
end_location:
row: 37
column: 18
@@ -118,11 +118,11 @@ expression: checks
column: 0
- kind:
UnusedImport:
- - pickle
- pickle
- false
location:
row: 52
column: 8
column: 15
end_location:
row: 52
column: 21

View File

@@ -4,11 +4,11 @@ expression: checks
---
- kind:
UnusedImport:
- - a.b.c
- a.b.c
- false
location:
row: 2
column: 0
column: 16
end_location:
row: 2
column: 17
@@ -23,11 +23,11 @@ expression: checks
column: 0
- kind:
UnusedImport:
- - d.e.f
- d.e.f
- false
location:
row: 3
column: 0
column: 16
end_location:
row: 3
column: 22
@@ -42,11 +42,11 @@ expression: checks
column: 0
- kind:
UnusedImport:
- - h.i
- h.i
- false
location:
row: 4
column: 0
column: 7
end_location:
row: 4
column: 10
@@ -61,11 +61,11 @@ expression: checks
column: 0
- kind:
UnusedImport:
- - j.k
- j.k
- false
location:
row: 5
column: 0
column: 7
end_location:
row: 5
column: 15

View File

@@ -4,11 +4,11 @@ expression: checks
---
- kind:
UnusedImport:
- - background.BackgroundTasks
- background.BackgroundTasks
- false
location:
row: 7
column: 0
column: 24
end_location:
row: 7
column: 39
@@ -23,11 +23,11 @@ expression: checks
column: 0
- kind:
UnusedImport:
- - datastructures.UploadFile
- datastructures.UploadFile
- false
location:
row: 10
column: 0
column: 28
end_location:
row: 10
column: 52
@@ -42,11 +42,11 @@ expression: checks
column: 0
- kind:
UnusedImport:
- - background
- background
- false
location:
row: 17
column: 0
column: 7
end_location:
row: 17
column: 17
@@ -61,11 +61,11 @@ expression: checks
column: 0
- kind:
UnusedImport:
- - datastructures
- datastructures
- false
location:
row: 20
column: 0
column: 7
end_location:
row: 20
column: 35

View File

@@ -6,7 +6,7 @@ expression: checks
FutureFeatureNotDefined: non_existent_feature
location:
row: 2
column: 0
column: 23
end_location:
row: 2
column: 43

View File

@@ -4,14 +4,14 @@ expression: checks
---
- kind:
UnusedImport:
- - models.Nut
- models.Nut
- false
location:
row: 6
column: 0
row: 8
column: 4
end_location:
row: 9
column: 1
row: 8
column: 7
fix:
patch:
content: "from models import (\n Fruit,\n)"

View File

@@ -18,7 +18,7 @@ fn test_stdin_error() -> Result<()> {
.write_stdin("import os\n")
.assert()
.failure();
assert!(str::from_utf8(&output.get_output().stdout)?.contains("-:1:1: F401"));
assert!(str::from_utf8(&output.get_output().stdout)?.contains("-:1:8: F401"));
Ok(())
}
@@ -30,7 +30,7 @@ fn test_stdin_filename() -> Result<()> {
.write_stdin("import os\n")
.assert()
.failure();
assert!(str::from_utf8(&output.get_output().stdout)?.contains("F401.py:1:1: F401"));
assert!(str::from_utf8(&output.get_output().stdout)?.contains("F401.py:1:8: F401"));
Ok(())
}