Compare commits

...

18 Commits

Author SHA1 Message Date
Charlie Marsh
949e4d4077 Bump version to 0.0.46 2022-09-24 13:10:10 -04:00
Charlie Marsh
c8cb2eead2 Remove README note about noqa patterns 2022-09-24 13:09:45 -04:00
Seamooo
02ae494a0e Enable per-file ignores (#261) 2022-09-24 13:02:34 -04:00
Harutaka Kawamura
dce86e065b Make unused variable pattern configurable (#265) 2022-09-24 10:43:39 -04:00
Harutaka Kawamura
d77979429c Print warning and error messages in stderr (#267) 2022-09-24 09:27:35 -04:00
Adrian Garcia Badaracco
a3a15d2eb2 error invalid pyproject.toml configs (#264) 2022-09-23 21:16:07 -04:00
Charlie Marsh
5af95428ff Tweak import 2022-09-23 18:53:57 -04:00
Harutaka Kawamura
6338cad4e6 Remove python 3.6 classifier (#260) 2022-09-22 20:38:09 -04:00
Harutaka Kawamura
485881877f Include error code and message in JSON output (#259) 2022-09-22 20:29:21 -04:00
Charlie Marsh
b8f517c70e Bump version to 0.0.45 2022-09-22 14:11:09 -04:00
Charlie Marsh
9f601c2abd Document noqa workflows 2022-09-22 14:10:02 -04:00
Charlie Marsh
c0ce0b0c48 Enable automatic noqa insertion (#256) 2022-09-22 13:59:06 -04:00
Charlie Marsh
e5b16973a9 Enable autofix for M001 (#255) 2022-09-22 13:21:03 -04:00
Charlie Marsh
de9ceb2fe1 Only enforce multi-line noqa directives for strings (#258) 2022-09-22 13:09:02 -04:00
Charlie Marsh
38b19b78b7 Enable noqa directives on logical lines (#257) 2022-09-22 12:56:15 -04:00
Charlie Marsh
7043e15b57 Move noqa to a separate module 2022-09-22 09:04:54 -04:00
Charlie Marsh
9594079235 Add --extend-select and --extend-ignore (#254) 2022-09-21 19:56:43 -04:00
Charlie Marsh
732f208e47 Add a lint rule to enforce noqa validity (#253) 2022-09-21 19:56:38 -04:00
21 changed files with 1151 additions and 131 deletions

8
Cargo.lock generated
View File

@@ -1801,7 +1801,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.44"
version = "0.0.46"
dependencies = [
"anyhow",
"bincode",
@@ -1846,7 +1846,7 @@ dependencies = [
[[package]]
name = "rustpython-ast"
version = "0.1.0"
source = "git+https://github.com/charliermarsh/RustPython.git?rev=7d21c6923a506e79cc041708d83cef925efd33f4#7d21c6923a506e79cc041708d83cef925efd33f4"
source = "git+https://github.com/charliermarsh/RustPython.git?rev=966a80597d626a9a47eaec78471164422d341453#966a80597d626a9a47eaec78471164422d341453"
dependencies = [
"num-bigint",
"rustpython-compiler-core",
@@ -1855,7 +1855,7 @@ dependencies = [
[[package]]
name = "rustpython-compiler-core"
version = "0.1.2"
source = "git+https://github.com/charliermarsh/RustPython.git?rev=7d21c6923a506e79cc041708d83cef925efd33f4#7d21c6923a506e79cc041708d83cef925efd33f4"
source = "git+https://github.com/charliermarsh/RustPython.git?rev=966a80597d626a9a47eaec78471164422d341453#966a80597d626a9a47eaec78471164422d341453"
dependencies = [
"bincode",
"bitflags",
@@ -1872,7 +1872,7 @@ dependencies = [
[[package]]
name = "rustpython-parser"
version = "0.1.2"
source = "git+https://github.com/charliermarsh/RustPython.git?rev=7d21c6923a506e79cc041708d83cef925efd33f4#7d21c6923a506e79cc041708d83cef925efd33f4"
source = "git+https://github.com/charliermarsh/RustPython.git?rev=966a80597d626a9a47eaec78471164422d341453#966a80597d626a9a47eaec78471164422d341453"
dependencies = [
"ahash",
"anyhow",

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff"
version = "0.0.44"
version = "0.0.46"
edition = "2021"
[lib]
@@ -26,7 +26,7 @@ once_cell = { version = "1.13.1" }
path-absolutize = { version = "3.0.13", features = ["once_cell_cache"] }
rayon = { version = "1.5.3" }
regex = { version = "1.6.0" }
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/charliermarsh/RustPython.git", rev = "7d21c6923a506e79cc041708d83cef925efd33f4" }
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/charliermarsh/RustPython.git", rev = "966a80597d626a9a47eaec78471164422d341453" }
serde = { version = "1.0.143", features = ["derive"] }
serde_json = { version = "1.0.83" }
toml = { version = "0.5.9" }

View File

@@ -86,7 +86,7 @@ ruff path/to/code/ --select F401 F403
See `ruff --help` for more:
```shell
ruff (v0.0.44) 0.0.44
ruff (v0.0.46)
An extremely fast Python linter.
USAGE:
@@ -96,12 +96,20 @@ ARGS:
<FILES>...
OPTIONS:
-e, --exit-zero
Exit with status code "0", even upon detecting errors
--select <SELECT>...
List of error codes to enable
--extend-select <EXTEND_SELECT>...
Like --select, but adds additional error codes on top of the selected ones
--ignore <IGNORE>...
List of error codes to ignore
--extend-ignore <EXTEND_IGNORE>...
Like --ignore, but adds additional error codes on top of the ignored ones
--exclude <EXCLUDE>...
List of paths, used to exclude files and/or directories from checks
--extend-exclude <EXTEND_EXCLUDE>...
Like --exclude, but adds additional files and directories on top of the excluded ones
-e, --exit-zero
Exit with status code "0", even upon detecting errors
-f, --fix
Attempt to automatically fix lint errors
--format <FORMAT>
@@ -109,16 +117,16 @@ OPTIONS:
json]
-h, --help
Print help information
--ignore <IGNORE>...
List of error codes to ignore
-n, --no-cache
Disable cache reads
--per-file-ignores <PER_FILE_IGNORES>...
List of mappings from file pattern to code to exclude
-q, --quiet
Disable all logging (but still exit with status code "1" upon detecting errors)
--select <SELECT>...
List of error codes to enable
--add-noqa
Enable automatic additions of noqa directives to failing lines
--show-files
See the files ruff will be run against with the current configuration options
See the files ruff will be run against with the current settings
--show-settings
See ruff's settings
-v, --verbose
@@ -129,6 +137,8 @@ OPTIONS:
Run in watch mode by re-running whenever files change
```
### Excluding files
Exclusions are based on globs, and can be either:
- Single-path patterns, like `.mypy_cache` (to exclude any directory named `.mypy_cache` in the
@@ -138,6 +148,50 @@ Exclusions are based on globs, and can be either:
(to exclude any Python files in `directory`). Note that these paths are relative to the
project root (e.g., the directory containing your `pyproject.toml`).
### Ignoring errors
To omit a lint check entirely, add it to the "ignore" list via `--ignore` or `--extend-ignore`,
either on the command-line or in your `project.toml` file.
To ignore an error in-line, ruff uses a `noqa` system similar to [Flake8](https://flake8.pycqa.org/en/3.1.1/user/ignoring-errors.html).
To ignore an individual error, add `# noqa: {code}` to the end of the line, like so:
```python
# Ignore F841.
x = 1 # noqa: F841
# Ignore E741 and F841.
i = 1 # noqa: E741, F841
# Ignore _all_ errors.
x = 1 # noqa
```
Note that, for multi-line strings, the `noqa` directive should come at the end of the string, and
will apply to the entire body, like so:
```python
"""Lorem ipsum dolor sit amet.
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
""" # noqa: E501
```
ruff supports several (experimental) workflows to aid in `noqa` management.
First, ruff provides a special error code, `M001`, to enforce that your `noqa` directives are
"valid", in that the errors they _say_ they ignore are actually being triggered on that line (and
thus suppressed). **You can run `ruff /path/to/file.py --extend-select M001` to flag unused `noqa`
directives.**
Second, ruff can _automatically remove_ unused `noqa` directives via its autofix functionality.
**You can run `ruff /path/to/file.py --extend-select M001 --fix` to automatically remove unused
`noqa` directives.**
Third, ruff can _automatically add_ `noqa` directives to all failing lines. This is useful when
migrating a new codebase to ruff. **You can run `ruff /path/to/file.py --add-noqa` to automatically
add `noqa` directives to all failing lines, with the appropriate error codes.**
### Compatibility with Black
ruff is compatible with [Black](https://github.com/psf/black) out-of-the-box, as long as
@@ -158,8 +212,7 @@ variables.)
Beyond rule-set parity, ruff suffers from the following limitations vis-à-vis Flake8:
1. Flake8 has a plugin architecture and supports writing custom lint rules.
2. Flake8 supports a wider range of `noqa` patterns, such as per-file ignores defined in `.flake8`.
3. ruff does not yet support parenthesized context managers.
2. ruff does not yet support parenthesized context managers.
## Rules
@@ -209,6 +262,7 @@ Beyond rule-set parity, ruff suffers from the following limitations vis-à-vis F
| F901 | RaiseNotImplemented | `raise NotImplemented` should be `raise NotImplementedError` |
| R001 | UselessObjectInheritance | Class `...` inherits from object |
| R002 | NoAssertEquals | `assertEquals` is deprecated, use `assertEqual` instead |
| M001 | UnusedNOQA | Unused `noqa` directive |
## Development

View File

@@ -8,7 +8,6 @@ classifiers = [
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",

View File

@@ -5,5 +5,12 @@
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
"""
_ = """Lorem ipsum dolor sit amet.
https://github.com/PyCQA/pycodestyle/pull/258/files#diff-841c622497a8033d10152bfdfb15b20b92437ecdea21a260944ea86b77b51533
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
""" # noqa: E501
_ = "---------------------------------------------------------------------------AAAAAAA"
_ = "---------------------------------------------------------------------------亜亜亜亜亜亜亜"

View File

@@ -10,13 +10,13 @@ except ValueError as e:
print(e)
def f():
def f1():
x = 1
y = 2
z = x + y
def g():
def f2():
foo = (1, 2)
(a, b) = (1, 2)
@@ -26,6 +26,12 @@ def g():
(x, y) = baz = bar
def h():
def f3():
locals()
x = 1
def f4():
_ = 1
__ = 1
_discarded = 1

57
resources/test/fixtures/M001.py vendored Normal file
View File

@@ -0,0 +1,57 @@
def f() -> None:
# Valid
a = 1 # noqa
# Valid
b = 2 # noqa: F841
# Invalid
c = 1 # noqa
print(c)
# Invalid
d = 1 # noqa: E501
# Invalid
d = 1 # noqa: F841, E501
# Valid
_ = """Lorem ipsum dolor sit amet.
https://github.com/PyCQA/pycodestyle/pull/258/files#diff-841c622497a8033d10152bfdfb15b20b92437ecdea21a260944ea86b77b51533
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
""" # noqa: E501
# Valid
_ = """Lorem ipsum dolor sit amet.
https://github.com/PyCQA/pycodestyle/pull/258/files#diff-841c622497a8033d10152bfdfb15b20b92437ecdea21a260944ea86b77b51533
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
""" # noqa
# Invalid
_ = """Lorem ipsum dolor sit amet.
https://github.com/PyCQA/pycodestyle/pull/258/files#diff-841c622497a8033d10152bfdfb15b20b92437ecdea21a260944ea86b77b51533
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
""" # noqa: E501, F841
# Invalid
_ = """Lorem ipsum dolor sit amet.
https://github.com/PyCQA/pycodestyle/pull/258/files#diff-841c622497a8033d10152bfdfb15b20b92437ecdea21a260944ea86b77b51533
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor.
""" # noqa: E501
# Invalid
_ = """Lorem ipsum dolor sit amet.
https://github.com/PyCQA/pycodestyle/pull/258/files#diff-841c622497a8033d10152bfdfb15b20b92437ecdea21a260944ea86b77b51533
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor.
""" # noqa

View File

@@ -1,6 +1,7 @@
use std::collections::BTreeSet;
use itertools::izip;
use regex::Regex;
use rustpython_parser::ast::{
Arg, Arguments, Cmpop, Constant, Excepthandler, ExcepthandlerKind, Expr, ExprKind, Keyword,
Location, Stmt, StmtKind, Unaryop,
@@ -71,7 +72,11 @@ pub fn check_not_tests(
}
/// Check UnusedVariable compliance.
pub fn check_unused_variables(scope: &Scope, locator: &dyn CheckLocator) -> Vec<Check> {
pub fn check_unused_variables(
scope: &Scope,
locator: &dyn CheckLocator,
dummy_variable_rgx: &Regex,
) -> Vec<Check> {
let mut checks: Vec<Check> = vec![];
if matches!(
@@ -82,13 +87,12 @@ pub fn check_unused_variables(scope: &Scope, locator: &dyn CheckLocator) -> Vec<
}
for (name, binding) in scope.values.iter() {
// TODO(charlie): Ignore if using `locals`.
if binding.used.is_none()
&& name != "_"
&& matches!(binding.kind, BindingKind::Assignment)
&& !dummy_variable_rgx.is_match(name)
&& name != "__tracebackhide__"
&& name != "__traceback_info__"
&& name != "__traceback_supplement__"
&& matches!(binding.kind, BindingKind::Assignment)
{
checks.push(Check::new(
CheckKind::UnusedVariable(name.to_string()),

View File

@@ -1415,8 +1415,11 @@ impl<'a> Checker<'a> {
fn check_deferred_assignments(&mut self) {
if self.settings.select.contains(&CheckCode::F841) {
while let Some(index) = self.deferred_assignments.pop() {
self.checks
.extend(checks::check_unused_variables(&self.scopes[index], self));
self.checks.extend(checks::check_unused_variables(
&self.scopes[index],
self,
&self.settings.dummy_variable_rgx,
));
}
}
}

View File

@@ -1,6 +1,11 @@
use std::collections::BTreeMap;
use rustpython_parser::ast::Location;
use crate::checks::{Check, CheckCode, CheckKind};
use crate::autofix::fixer;
use crate::checks::{Check, CheckCode, CheckKind, Fix};
use crate::noqa;
use crate::noqa::Directive;
use crate::settings::Settings;
/// Whether the given line is too long and should be reported.
@@ -19,17 +24,58 @@ fn should_enforce_line_length(line: &str, length: usize, limit: usize) -> bool {
}
}
pub fn check_lines(checks: &mut Vec<Check>, contents: &str, settings: &Settings) {
pub fn check_lines(
checks: &mut Vec<Check>,
contents: &str,
noqa_line_for: &[usize],
settings: &Settings,
autofix: &fixer::Mode,
) {
let enforce_line_too_long = settings.select.contains(&CheckCode::E501);
let enforce_noqa = settings.select.contains(&CheckCode::M001);
let mut noqa_directives: BTreeMap<usize, (Directive, Vec<&str>)> = BTreeMap::new();
let mut line_checks = vec![];
let mut ignored = vec![];
for (row, line) in contents.lines().enumerate() {
let lines: Vec<&str> = contents.lines().collect();
for (lineno, line) in lines.iter().enumerate() {
// Grab the noqa (logical) line number for the current (physical) line.
// If there are newlines at the end of the file, they won't be represented in
// `noqa_line_for`, so fallback to the current line.
let noqa_lineno = noqa_line_for
.get(lineno)
.map(|lineno| lineno - 1)
.unwrap_or(lineno);
if enforce_noqa {
noqa_directives
.entry(noqa_lineno)
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
}
// Remove any ignored checks.
// TODO(charlie): Only validate checks for the current line.
for (index, check) in checks.iter().enumerate() {
if check.location.row() == row + 1 && check.is_inline_ignored(line) {
ignored.push(index);
if check.location.row() == lineno + 1 {
let noqa = noqa_directives
.entry(noqa_lineno)
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
match noqa {
(Directive::All(_), matches) => {
matches.push(check.kind.code().as_str());
ignored.push(index)
}
(Directive::Codes(_, codes), matches) => {
if codes.contains(&check.kind.code().as_str()) {
matches.push(check.kind.code().as_str());
ignored.push(index);
}
}
(Directive::None, _) => {}
}
}
}
@@ -37,16 +83,94 @@ pub fn check_lines(checks: &mut Vec<Check>, contents: &str, settings: &Settings)
if enforce_line_too_long {
let line_length = line.chars().count();
if should_enforce_line_length(line, line_length, settings.line_length) {
let noqa = noqa_directives
.entry(noqa_lineno)
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
let check = Check::new(
CheckKind::LineTooLong(line_length, settings.line_length),
Location::new(row + 1, settings.line_length + 1),
Location::new(lineno + 1, settings.line_length + 1),
);
if !check.is_inline_ignored(line) {
line_checks.push(check);
match noqa {
(Directive::All(_), matches) => {
matches.push(check.kind.code().as_str());
}
(Directive::Codes(_, codes), matches) => {
if codes.contains(&check.kind.code().as_str()) {
matches.push(check.kind.code().as_str());
} else {
line_checks.push(check);
}
}
(Directive::None, _) => line_checks.push(check),
}
}
}
}
// Enforce that the noqa directive was actually used.
if enforce_noqa {
for (row, (directive, matches)) in noqa_directives {
match directive {
Directive::All(column) => {
if matches.is_empty() {
let mut check = Check::new(
CheckKind::UnusedNOQA(None),
Location::new(row + 1, column + 1),
);
if matches!(autofix, fixer::Mode::Generate | fixer::Mode::Apply) {
check.amend(Fix {
content: "".to_string(),
start: Location::new(row + 1, column + 1),
end: Location::new(row + 1, lines[row].chars().count() + 1),
applied: false,
});
}
line_checks.push(check);
}
}
Directive::Codes(column, codes) => {
let mut invalid_codes = vec![];
let mut valid_codes = vec![];
for code in codes {
if !matches.contains(&code) {
invalid_codes.push(code);
} else {
valid_codes.push(code);
}
}
if !invalid_codes.is_empty() {
let mut check = Check::new(
CheckKind::UnusedNOQA(Some(invalid_codes.join(", "))),
Location::new(row + 1, column + 1),
);
if matches!(autofix, fixer::Mode::Generate | fixer::Mode::Apply) {
if valid_codes.is_empty() {
check.amend(Fix {
content: "".to_string(),
start: Location::new(row + 1, column + 1),
end: Location::new(row + 1, lines[row].chars().count() + 1),
applied: false,
});
} else {
check.amend(Fix {
content: format!(" # noqa: {}", valid_codes.join(", ")),
start: Location::new(row + 1, column + 1),
end: Location::new(row + 1, lines[row].chars().count() + 1),
applied: false,
});
}
}
line_checks.push(check);
}
}
Directive::None => {}
}
}
}
ignored.sort();
for index in ignored.iter().rev() {
checks.swap_remove(*index);
@@ -56,25 +180,28 @@ pub fn check_lines(checks: &mut Vec<Check>, contents: &str, settings: &Settings)
#[cfg(test)]
mod tests {
use std::collections::BTreeSet;
use crate::autofix::fixer;
use crate::checks::{Check, CheckCode};
use crate::settings;
use super::check_lines;
use super::*;
#[test]
fn e501_non_ascii_char() {
let line = "'\u{4e9c}' * 2"; // 7 in UTF-32, 9 in UTF-8.
let noqa_line_for: Vec<usize> = vec![1];
let check_with_max_line_length = |line_length: usize| {
let mut checks: Vec<Check> = vec![];
let settings = Settings {
pyproject: None,
project_root: None,
line_length,
exclude: vec![],
extend_exclude: vec![],
select: BTreeSet::from_iter(vec![CheckCode::E501]),
};
check_lines(&mut checks, line, &settings);
check_lines(
&mut checks,
line,
&noqa_line_for,
&settings::Settings {
line_length,
..settings::Settings::for_rule(CheckCode::E501)
},
&fixer::Mode::Generate,
);
return checks;
};
assert!(!check_with_max_line_length(6).is_empty());

View File

@@ -1,12 +1,10 @@
use std::str::FromStr;
use anyhow::Result;
use once_cell::sync::Lazy;
use regex::Regex;
use rustpython_parser::ast::Location;
use serde::{Deserialize, Serialize};
pub const ALL_CHECK_CODES: [CheckCode; 44] = [
pub const DEFAULT_CHECK_CODES: [CheckCode; 42] = [
CheckCode::E402,
CheckCode::E501,
CheckCode::E711,
@@ -49,6 +47,52 @@ pub const ALL_CHECK_CODES: [CheckCode; 44] = [
CheckCode::F831,
CheckCode::F841,
CheckCode::F901,
];
pub const ALL_CHECK_CODES: [CheckCode; 45] = [
CheckCode::E402,
CheckCode::E501,
CheckCode::E711,
CheckCode::E712,
CheckCode::E713,
CheckCode::E714,
CheckCode::E721,
CheckCode::E722,
CheckCode::E731,
CheckCode::E741,
CheckCode::E742,
CheckCode::E743,
CheckCode::E902,
CheckCode::E999,
CheckCode::F401,
CheckCode::F402,
CheckCode::F403,
CheckCode::F404,
CheckCode::F405,
CheckCode::F406,
CheckCode::F407,
CheckCode::F541,
CheckCode::F601,
CheckCode::F602,
CheckCode::F621,
CheckCode::F622,
CheckCode::F631,
CheckCode::F632,
CheckCode::F633,
CheckCode::F634,
CheckCode::F701,
CheckCode::F702,
CheckCode::F704,
CheckCode::F706,
CheckCode::F707,
CheckCode::F722,
CheckCode::F821,
CheckCode::F822,
CheckCode::F823,
CheckCode::F831,
CheckCode::F841,
CheckCode::F901,
CheckCode::M001,
CheckCode::R001,
CheckCode::R002,
];
@@ -99,6 +143,7 @@ pub enum CheckCode {
F901,
R001,
R002,
M001,
}
impl FromStr for CheckCode {
@@ -150,6 +195,7 @@ impl FromStr for CheckCode {
"F901" => Ok(CheckCode::F901),
"R001" => Ok(CheckCode::R001),
"R002" => Ok(CheckCode::R002),
"M001" => Ok(CheckCode::M001),
_ => Err(anyhow::anyhow!("Unknown check code: {s}")),
}
}
@@ -202,13 +248,14 @@ impl CheckCode {
CheckCode::F901 => "F901",
CheckCode::R001 => "R001",
CheckCode::R002 => "R002",
CheckCode::M001 => "M001",
}
}
/// The source for the check (either the AST, the filesystem, or the physical lines).
pub fn lint_source(&self) -> &'static LintSource {
match self {
CheckCode::E501 => &LintSource::Lines,
CheckCode::E501 | CheckCode::M001 => &LintSource::Lines,
CheckCode::E902 | CheckCode::E999 => &LintSource::FileSystem,
_ => &LintSource::AST,
}
@@ -259,6 +306,7 @@ impl CheckCode {
CheckCode::F831 => CheckKind::DuplicateArgumentName,
CheckCode::F841 => CheckKind::UnusedVariable("...".to_string()),
CheckCode::F901 => CheckKind::RaiseNotImplemented,
CheckCode::M001 => CheckKind::UnusedNOQA(None),
CheckCode::R001 => CheckKind::UselessObjectInheritance("...".to_string()),
CheckCode::R002 => CheckKind::NoAssertEquals,
}
@@ -280,6 +328,7 @@ pub enum RejectedCmpop {
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum CheckKind {
UnusedNOQA(Option<String>),
AmbiguousClassName(String),
AmbiguousFunctionName(String),
AmbiguousVariableName(String),
@@ -376,6 +425,7 @@ impl CheckKind {
CheckKind::UnusedVariable(_) => "UnusedVariable",
CheckKind::UselessObjectInheritance(_) => "UselessObjectInheritance",
CheckKind::YieldOutsideFunction => "YieldOutsideFunction",
CheckKind::UnusedNOQA(_) => "UnusedNOQA",
}
}
@@ -399,8 +449,8 @@ impl CheckKind {
CheckKind::IfTuple => &CheckCode::F634,
CheckKind::ImportShadowedByLoopVar(_, _) => &CheckCode::F402,
CheckKind::ImportStarNotPermitted(_) => &CheckCode::F406,
CheckKind::ImportStarUsed(_) => &CheckCode::F403,
CheckKind::ImportStarUsage(_, _) => &CheckCode::F405,
CheckKind::ImportStarUsed(_) => &CheckCode::F403,
CheckKind::InvalidPrintSyntax => &CheckCode::F633,
CheckKind::IsLiteral => &CheckCode::F632,
CheckKind::LateFutureImport => &CheckCode::F404,
@@ -423,6 +473,7 @@ impl CheckKind {
CheckKind::UndefinedLocal(_) => &CheckCode::F823,
CheckKind::UndefinedName(_) => &CheckCode::F821,
CheckKind::UnusedImport(_) => &CheckCode::F401,
CheckKind::UnusedNOQA(_) => &CheckCode::M001,
CheckKind::UnusedVariable(_) => &CheckCode::F841,
CheckKind::UselessObjectInheritance(_) => &CheckCode::R001,
CheckKind::YieldOutsideFunction => &CheckCode::F704,
@@ -556,6 +607,10 @@ impl CheckKind {
CheckKind::YieldOutsideFunction => {
"a `yield` or `yield from` statement outside of a function/method".to_string()
}
CheckKind::UnusedNOQA(code) => match code {
None => "Unused `noqa` directive".to_string(),
Some(code) => format!("Unused `noqa` directive for: {code}"),
},
}
}
@@ -563,7 +618,9 @@ impl CheckKind {
pub fn fixable(&self) -> bool {
matches!(
self,
CheckKind::NoAssertEquals | CheckKind::UselessObjectInheritance(_)
CheckKind::NoAssertEquals
| CheckKind::UselessObjectInheritance(_)
| CheckKind::UnusedNOQA(_)
)
}
}
@@ -583,11 +640,6 @@ pub struct Check {
pub fix: Option<Fix>,
}
static NO_QA_REGEX: Lazy<Regex> = Lazy::new(|| {
Regex::new(r"(?i)# noqa(?::\s?(?P<codes>([A-Z]+[0-9]+(?:[,\s]+)?)+))?").expect("Invalid regex")
});
static SPLIT_COMMA_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").expect("Invalid regex"));
impl Check {
pub fn new(kind: CheckKind, location: Location) -> Self {
Self {
@@ -600,25 +652,4 @@ impl Check {
pub fn amend(&mut self, fix: Fix) {
self.fix = Some(fix);
}
pub fn is_inline_ignored(&self, line: &str) -> bool {
match NO_QA_REGEX.captures(line) {
Some(caps) => match caps.name("codes") {
Some(codes) => {
for code in SPLIT_COMMA_REGEX
.split(codes.as_str())
.map(|code| code.trim())
.filter(|code| !code.is_empty())
{
if code == self.kind.code().as_str() {
return true;
}
}
false
}
None => true,
},
None => false,
}
}
}

View File

@@ -1,4 +1,5 @@
use std::borrow::Cow;
use std::collections::BTreeSet;
use std::fs::File;
use std::io::{BufReader, Read};
use std::ops::Deref;
@@ -10,7 +11,8 @@ use path_absolutize::path_dedot;
use path_absolutize::Absolutize;
use walkdir::{DirEntry, WalkDir};
use crate::settings::FilePattern;
use crate::checks::CheckCode;
use crate::settings::{FilePattern, PerFileIgnore};
/// Extract the absolute path and basename (as strings) from a Path.
fn extract_path_names(path: &Path) -> Result<(&str, &str)> {
@@ -25,9 +27,12 @@ fn extract_path_names(path: &Path) -> Result<(&str, &str)> {
Ok((file_path, file_basename))
}
fn is_excluded(file_path: &str, file_basename: &str, exclude: &[FilePattern]) -> bool {
fn is_excluded<'a, T>(file_path: &str, file_basename: &str, exclude: T) -> bool
where
T: Iterator<Item = &'a FilePattern>,
{
for pattern in exclude {
match &pattern {
match pattern {
FilePattern::Simple(basename) => {
if *basename == file_basename {
return true;
@@ -45,7 +50,7 @@ fn is_excluded(file_path: &str, file_basename: &str, exclude: &[FilePattern]) ->
return true;
}
}
}
};
}
false
}
@@ -85,13 +90,13 @@ pub fn iter_python_files<'a>(
if has_exclude
&& (!exclude_simple || file_type.is_dir())
&& is_excluded(file_path, file_basename, exclude)
&& is_excluded(file_path, file_basename, exclude.iter())
{
debug!("Ignored path via `exclude`: {:?}", path);
false
} else if has_extend_exclude
&& (!extend_exclude_simple || file_type.is_dir())
&& is_excluded(file_path, file_basename, extend_exclude)
&& is_excluded(file_path, file_basename, extend_exclude.iter())
{
debug!("Ignored path via `extend-exclude`: {:?}", path);
false
@@ -112,6 +117,25 @@ pub fn iter_python_files<'a>(
})
}
/// Create tree set with codes matching the pattern/code pairs.
pub fn ignores_from_path<'a>(
path: &Path,
pattern_code_pairs: &'a [PerFileIgnore],
) -> Result<BTreeSet<&'a CheckCode>> {
let (file_path, file_basename) = extract_path_names(path)?;
Ok(pattern_code_pairs
.iter()
.filter(|pattern_code_pair| {
is_excluded(
file_path,
file_basename,
[&pattern_code_pair.pattern].into_iter(),
)
})
.map(|pattern_code_pair| &pattern_code_pair.code)
.collect())
}
/// Convert any path to an absolute path (based on the current working directory).
pub fn normalize_path(path: &Path) -> PathBuf {
if let Ok(path) = path.absolutize() {
@@ -180,7 +204,7 @@ mod tests {
&Some(project_root.to_path_buf()),
)];
let (file_path, file_basename) = extract_path_names(&path)?;
assert!(is_excluded(file_path, file_basename, &exclude));
assert!(is_excluded(file_path, file_basename, exclude.iter()));
let path = Path::new("foo/bar").absolutize_from(project_root).unwrap();
let exclude = vec![FilePattern::from_user(
@@ -188,7 +212,7 @@ mod tests {
&Some(project_root.to_path_buf()),
)];
let (file_path, file_basename) = extract_path_names(&path)?;
assert!(is_excluded(file_path, file_basename, &exclude));
assert!(is_excluded(file_path, file_basename, exclude.iter()));
let path = Path::new("foo/bar/baz.py")
.absolutize_from(project_root)
@@ -198,7 +222,7 @@ mod tests {
&Some(project_root.to_path_buf()),
)];
let (file_path, file_basename) = extract_path_names(&path)?;
assert!(is_excluded(file_path, file_basename, &exclude));
assert!(is_excluded(file_path, file_basename, exclude.iter()));
let path = Path::new("foo/bar").absolutize_from(project_root).unwrap();
let exclude = vec![FilePattern::from_user(
@@ -206,7 +230,7 @@ mod tests {
&Some(project_root.to_path_buf()),
)];
let (file_path, file_basename) = extract_path_names(&path)?;
assert!(is_excluded(file_path, file_basename, &exclude));
assert!(is_excluded(file_path, file_basename, exclude.iter()));
let path = Path::new("foo/bar/baz.py")
.absolutize_from(project_root)
@@ -216,7 +240,7 @@ mod tests {
&Some(project_root.to_path_buf()),
)];
let (file_path, file_basename) = extract_path_names(&path)?;
assert!(is_excluded(file_path, file_basename, &exclude));
assert!(is_excluded(file_path, file_basename, exclude.iter()));
let path = Path::new("foo/bar/baz.py")
.absolutize_from(project_root)
@@ -226,7 +250,7 @@ mod tests {
&Some(project_root.to_path_buf()),
)];
let (file_path, file_basename) = extract_path_names(&path)?;
assert!(is_excluded(file_path, file_basename, &exclude));
assert!(is_excluded(file_path, file_basename, exclude.iter()));
let path = Path::new("foo/bar/baz.py")
.absolutize_from(project_root)
@@ -236,7 +260,7 @@ mod tests {
&Some(project_root.to_path_buf()),
)];
let (file_path, file_basename) = extract_path_names(&path)?;
assert!(!is_excluded(file_path, file_basename, &exclude));
assert!(!is_excluded(file_path, file_basename, exclude.iter()));
Ok(())
}

View File

@@ -10,6 +10,7 @@ pub mod fs;
pub mod linter;
pub mod logging;
pub mod message;
mod noqa;
pub mod printer;
pub mod pyproject;
mod python;

View File

@@ -2,7 +2,8 @@ use std::path::Path;
use anyhow::Result;
use log::debug;
use rustpython_parser::parser;
use rustpython_parser::lexer::LexResult;
use rustpython_parser::{lexer, parser};
use crate::autofix::fixer;
use crate::autofix::fixer::fix_file;
@@ -10,25 +11,30 @@ use crate::check_ast::check_ast;
use crate::check_lines::check_lines;
use crate::checks::{Check, CheckCode, CheckKind, LintSource};
use crate::message::Message;
use crate::noqa::add_noqa;
use crate::settings::Settings;
use crate::{cache, fs};
use crate::{cache, fs, noqa};
fn check_path(
path: &Path,
contents: &str,
tokens: Vec<LexResult>,
settings: &Settings,
autofix: &fixer::Mode,
) -> Vec<Check> {
) -> Result<Vec<Check>> {
// Aggregate all checks.
let mut checks: Vec<Check> = vec![];
// Determine the noqa line for every line in the source.
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
// Run the AST-based checks.
if settings
.select
.iter()
.any(|check_code| matches!(check_code.lint_source(), LintSource::AST))
{
match parser::parse_program(contents, "<filename>") {
match parser::parse_program_tokens(tokens, "<filename>") {
Ok(python_ast) => {
checks.extend(check_ast(&python_ast, contents, settings, autofix, path))
}
@@ -44,9 +50,20 @@ fn check_path(
}
// Run the lines-based checks.
check_lines(&mut checks, contents, settings);
check_lines(&mut checks, contents, &noqa_line_for, settings, autofix);
checks
// Create path ignores.
if !checks.is_empty() && !settings.per_file_ignores.is_empty() {
let ignores = fs::ignores_from_path(path, &settings.per_file_ignores)?;
if !ignores.is_empty() {
return Ok(checks
.into_iter()
.filter(|check| !ignores.contains(check.kind.code()))
.collect());
}
}
Ok(checks)
}
pub fn lint_path(
@@ -66,8 +83,11 @@ pub fn lint_path(
// Read the file from disk.
let contents = fs::read_file(path)?;
// Tokenize once.
let tokens: Vec<LexResult> = lexer::make_tokenizer(&contents).collect();
// Generate checks.
let mut checks = check_path(path, &contents, settings, autofix);
let mut checks = check_path(path, &contents, tokens, settings, autofix)?;
// Apply autofix.
if matches!(autofix, fixer::Mode::Apply) {
@@ -89,11 +109,30 @@ pub fn lint_path(
Ok(messages)
}
pub fn add_noqa_to_path(path: &Path, settings: &Settings) -> Result<usize> {
// Read the file from disk.
let contents = fs::read_file(path)?;
// Tokenize once.
let tokens: Vec<LexResult> = lexer::make_tokenizer(&contents).collect();
// Determine the noqa line for every line in the source.
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
// Generate checks.
let checks = check_path(path, &contents, tokens, settings, &fixer::Mode::None)?;
add_noqa(&checks, &contents, &noqa_line_for, path)
}
#[cfg(test)]
mod tests {
use std::path::Path;
use anyhow::Result;
use regex::Regex;
use rustpython_parser::lexer;
use rustpython_parser::lexer::LexResult;
use crate::autofix::fixer;
use crate::checks::{Check, CheckCode};
@@ -107,7 +146,8 @@ mod tests {
autofix: &fixer::Mode,
) -> Result<Vec<Check>> {
let contents = fs::read_file(path)?;
Ok(linter::check_path(path, &contents, settings, autofix))
let tokens: Vec<LexResult> = lexer::make_tokenizer(&contents).collect();
linter::check_path(path, &contents, tokens, settings, autofix)
}
#[test]
@@ -566,6 +606,21 @@ mod tests {
Ok(())
}
#[test]
fn f841_dummy_variable_rgx() -> Result<()> {
let mut checks = check_path(
Path::new("./resources/test/fixtures/F841.py"),
&settings::Settings {
dummy_variable_rgx: Regex::new(r"^z$").unwrap(),
..settings::Settings::for_rule(CheckCode::F841)
},
&fixer::Mode::Generate,
)?;
checks.sort_by_key(|check| check.location);
insta::assert_yaml_snapshot!(checks);
Ok(())
}
#[test]
fn f901() -> Result<()> {
let mut checks = check_path(
@@ -578,6 +633,18 @@ mod tests {
Ok(())
}
#[test]
fn m001() -> Result<()> {
let mut checks = check_path(
Path::new("./resources/test/fixtures/M001.py"),
&settings::Settings::for_rules(vec![CheckCode::M001, CheckCode::E501, CheckCode::F841]),
&fixer::Mode::Generate,
)?;
checks.sort_by_key(|check| check.location);
insta::assert_yaml_snapshot!(checks);
Ok(())
}
#[test]
fn r001() -> Result<()> {
let mut checks = check_path(

View File

@@ -1,5 +1,6 @@
extern crate core;
use regex::Regex;
use std::io;
use std::path::{Path, PathBuf};
use std::process::ExitCode;
@@ -18,12 +19,13 @@ use ::ruff::cache;
use ::ruff::checks::CheckCode;
use ::ruff::checks::CheckKind;
use ::ruff::fs::iter_python_files;
use ::ruff::linter::add_noqa_to_path;
use ::ruff::linter::lint_path;
use ::ruff::logging::set_up_logging;
use ::ruff::message::Message;
use ::ruff::printer::{Printer, SerializationFormat};
use ::ruff::pyproject;
use ::ruff::settings::{FilePattern, Settings};
use ::ruff::pyproject::{self, StrCheckCodePair};
use ::ruff::settings::{FilePattern, PerFileIgnore, Settings};
use ::ruff::tell_user;
const CARGO_PKG_NAME: &str = env!("CARGO_PKG_NAME");
@@ -57,15 +59,24 @@ struct Cli {
/// List of error codes to enable.
#[clap(long, multiple = true)]
select: Vec<CheckCode>,
/// Like --select, but adds additional error codes on top of the selected ones.
#[clap(long, multiple = true)]
extend_select: Vec<CheckCode>,
/// List of error codes to ignore.
#[clap(long, multiple = true)]
ignore: Vec<CheckCode>,
/// Like --ignore, but adds additional error codes on top of the ignored ones.
#[clap(long, multiple = true)]
extend_ignore: Vec<CheckCode>,
/// List of paths, used to exclude files and/or directories from checks.
#[clap(long, multiple = true)]
exclude: Vec<String>,
/// Like --exclude, but adds additional files and directories on top of the excluded ones.
#[clap(long, multiple = true)]
extend_exclude: Vec<String>,
/// List of mappings from file pattern to code to exclude
#[clap(long, multiple = true)]
per_file_ignores: Vec<StrCheckCodePair>,
/// Output serialization format for error messages.
#[clap(long, arg_enum, default_value_t=SerializationFormat::Text)]
format: SerializationFormat,
@@ -75,6 +86,12 @@ struct Cli {
/// See ruff's settings.
#[clap(long, action)]
show_settings: bool,
/// Enable automatic additions of noqa directives to failing lines.
#[clap(long, action)]
add_noqa: bool,
/// Regular expression matching the name of dummy variables.
#[clap(long)]
dummy_variable_rgx: Option<Regex>,
}
#[cfg(feature = "update-informer")]
@@ -177,6 +194,35 @@ fn run_once(
Ok(messages)
}
fn add_noqa(files: &[PathBuf], settings: &Settings) -> Result<usize> {
// Collect all the files to check.
let start = Instant::now();
let paths: Vec<Result<DirEntry, walkdir::Error>> = files
.iter()
.flat_map(|path| iter_python_files(path, &settings.exclude, &settings.extend_exclude))
.collect();
let duration = start.elapsed();
debug!("Identified files to lint in: {:?}", duration);
let start = Instant::now();
let modifications: usize = paths
.par_iter()
.map(|entry| match entry {
Ok(entry) => {
let path = entry.path();
add_noqa_to_path(path, settings)
}
Err(_) => Ok(0),
})
.flatten()
.sum();
let duration = start.elapsed();
debug!("Added noqa to files in: {:?}", duration);
Ok(modifications)
}
fn inner_main() -> Result<ExitCode> {
let cli = Cli::parse();
@@ -205,23 +251,41 @@ fn inner_main() -> Result<ExitCode> {
.iter()
.map(|path| FilePattern::from_user(path, &project_root))
.collect();
let per_file_ignores: Vec<PerFileIgnore> = cli
.per_file_ignores
.into_iter()
.map(|pair| PerFileIgnore::new(pair, &project_root))
.collect();
let mut settings = Settings::from_pyproject(pyproject, project_root);
let mut settings = Settings::from_pyproject(pyproject, project_root)?;
if !exclude.is_empty() {
settings.exclude = exclude;
}
if !extend_exclude.is_empty() {
settings.extend_exclude = extend_exclude;
}
if !per_file_ignores.is_empty() {
settings.per_file_ignores = per_file_ignores;
}
if !cli.select.is_empty() {
settings.clear();
settings.select(cli.select);
}
if !cli.extend_select.is_empty() {
settings.select(cli.extend_select);
}
if !cli.ignore.is_empty() {
settings.ignore(&cli.ignore);
}
if !cli.extend_ignore.is_empty() {
settings.ignore(&cli.extend_ignore);
}
if let Some(dummy_variable_rgx) = cli.dummy_variable_rgx {
settings.dummy_variable_rgx = dummy_variable_rgx;
}
if cli.show_settings && cli.show_files {
println!("Error: specify --show-settings or show-files (not both).");
eprintln!("Error: specify --show-settings or show-files (not both).");
return Ok(ExitCode::FAILURE);
}
if cli.show_settings {
@@ -238,11 +302,15 @@ fn inner_main() -> Result<ExitCode> {
let mut printer = Printer::new(cli.format, cli.verbose);
if cli.watch {
if cli.fix {
println!("Warning: --fix is not enabled in watch mode.");
eprintln!("Warning: --fix is not enabled in watch mode.");
}
if cli.add_noqa {
eprintln!("Warning: --no-qa is not enabled in watch mode.");
}
if cli.format != SerializationFormat::Text {
println!("Warning: --format 'text' is used in watch mode.");
eprintln!("Warning: --format 'text' is used in watch mode.");
}
// Perform an initial run instantly.
@@ -279,6 +347,11 @@ fn inner_main() -> Result<ExitCode> {
Err(e) => return Err(e.into()),
}
}
} else if cli.add_noqa {
let modifications = add_noqa(&cli.files, &settings)?;
if modifications > 0 {
println!("Added {modifications} noqa directives.");
}
} else {
let messages = run_once(&cli.files, &settings, !cli.no_cache, cli.fix)?;
if !cli.quiet {
@@ -299,6 +372,9 @@ fn inner_main() -> Result<ExitCode> {
fn main() -> ExitCode {
match inner_main() {
Ok(code) => code,
Err(_) => ExitCode::FAILURE,
Err(err) => {
eprintln!("{} {:?}", "error".red().bold(), err);
ExitCode::FAILURE
}
}
}

280
src/noqa.rs Normal file
View File

@@ -0,0 +1,280 @@
use std::cmp::{max, min};
use crate::checks::{Check, CheckCode};
use anyhow::Result;
use once_cell::sync::Lazy;
use regex::Regex;
use rustpython_parser::lexer::{LexResult, Tok};
use std::collections::{BTreeMap, BTreeSet};
use std::fs;
use std::path::Path;
static NO_QA_REGEX: Lazy<Regex> = Lazy::new(|| {
Regex::new(r"(?i)(?P<noqa>\s*# noqa(?::\s?(?P<codes>([A-Z]+[0-9]+(?:[,\s]+)?)+))?)")
.expect("Invalid regex")
});
static SPLIT_COMMA_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").expect("Invalid regex"));
#[derive(Debug)]
pub enum Directive<'a> {
None,
All(usize),
Codes(usize, Vec<&'a str>),
}
pub fn extract_noqa_directive(line: &str) -> Directive {
match NO_QA_REGEX.captures(line) {
Some(caps) => match caps.name("noqa") {
Some(noqa) => match caps.name("codes") {
Some(codes) => Directive::Codes(
noqa.start(),
SPLIT_COMMA_REGEX
.split(codes.as_str())
.map(|code| code.trim())
.filter(|code| !code.is_empty())
.collect(),
),
None => Directive::All(noqa.start()),
},
None => Directive::None,
},
None => Directive::None,
}
}
pub fn extract_noqa_line_for(lxr: &[LexResult]) -> Vec<usize> {
let mut noqa_line_for: Vec<usize> = vec![];
let mut last_is_string = false;
let mut last_seen = usize::MIN;
let mut min_line = usize::MAX;
let mut max_line = usize::MIN;
for (start, tok, end) in lxr.iter().flatten() {
if matches!(tok, Tok::EndOfFile) {
break;
}
if matches!(tok, Tok::Newline) {
min_line = min(min_line, start.row());
max_line = max(max_line, start.row());
// For now, we only care about preserving noqa directives across multi-line strings.
if last_is_string {
noqa_line_for.extend(vec![max_line; (max_line + 1) - min_line]);
} else {
for i in (min_line - 1)..(max_line) {
noqa_line_for.push(i + 1);
}
}
min_line = usize::MAX;
max_line = usize::MIN;
} else {
// Handle empty lines.
if start.row() > last_seen {
for i in last_seen..(start.row() - 1) {
noqa_line_for.push(i + 1);
}
}
min_line = min(min_line, start.row());
max_line = max(max_line, end.row());
}
last_seen = start.row();
last_is_string = matches!(tok, Tok::String { .. });
}
noqa_line_for
}
fn add_noqa_inner(
checks: &Vec<Check>,
contents: &str,
noqa_line_for: &[usize],
) -> Result<(usize, String)> {
let lines: Vec<&str> = contents.lines().collect();
let mut matches_by_line: BTreeMap<usize, BTreeSet<&CheckCode>> = BTreeMap::new();
for lineno in 0..lines.len() {
let mut codes: BTreeSet<&CheckCode> = BTreeSet::new();
for check in checks {
if check.location.row() == lineno + 1 {
codes.insert(check.kind.code());
}
}
// Grab the noqa (logical) line number for the current (physical) line.
// If there are newlines at the end of the file, they won't be represented in
// `noqa_line_for`, so fallback to the current line.
let noqa_lineno = noqa_line_for
.get(lineno)
.map(|lineno| lineno - 1)
.unwrap_or(lineno);
if !codes.is_empty() {
let matches = matches_by_line
.entry(noqa_lineno)
.or_insert_with(BTreeSet::new);
matches.append(&mut codes);
}
}
let mut count: usize = 0;
let mut output = "".to_string();
for (lineno, line) in lines.iter().enumerate() {
match matches_by_line.get(&lineno) {
None => {
output.push_str(line);
output.push('\n');
}
Some(codes) => {
match extract_noqa_directive(line) {
Directive::None => {
output.push_str(line);
}
Directive::All(start) => output.push_str(&line[..start]),
Directive::Codes(start, _) => output.push_str(&line[..start]),
};
let codes: Vec<&str> = codes.iter().map(|code| code.as_str()).collect();
output.push_str(" # noqa: ");
output.push_str(&codes.join(", "));
output.push('\n');
count += 1;
}
}
}
Ok((count, output))
}
pub fn add_noqa(
checks: &Vec<Check>,
contents: &str,
noqa_line_for: &[usize],
path: &Path,
) -> Result<usize> {
let (count, output) = add_noqa_inner(checks, contents, noqa_line_for)?;
fs::write(path, output)?;
Ok(count)
}
#[cfg(test)]
mod tests {
use crate::checks::{Check, CheckKind};
use anyhow::Result;
use rustpython_parser::ast::Location;
use rustpython_parser::lexer;
use rustpython_parser::lexer::LexResult;
use crate::noqa::{add_noqa_inner, extract_noqa_line_for};
#[test]
fn extraction() -> Result<()> {
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = 1
y = 2
z = x + 1",
)
.collect();
println!("{:?}", extract_noqa_line_for(&lxr));
assert_eq!(extract_noqa_line_for(&lxr), vec![1, 2, 3]);
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"
x = 1
y = 2
z = x + 1",
)
.collect();
println!("{:?}", extract_noqa_line_for(&lxr));
assert_eq!(extract_noqa_line_for(&lxr), vec![1, 2, 3, 4]);
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = 1
y = 2
z = x + 1
",
)
.collect();
println!("{:?}", extract_noqa_line_for(&lxr));
assert_eq!(extract_noqa_line_for(&lxr), vec![1, 2, 3]);
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = 1
y = 2
z = x + 1
",
)
.collect();
println!("{:?}", extract_noqa_line_for(&lxr));
assert_eq!(extract_noqa_line_for(&lxr), vec![1, 2, 3, 4]);
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = '''abc
def
ghi
'''
y = 2
z = x + 1",
)
.collect();
assert_eq!(extract_noqa_line_for(&lxr), vec![4, 4, 4, 4, 5, 6]);
Ok(())
}
#[test]
fn modification() -> Result<()> {
let checks = vec![];
let contents = "x = 1";
let noqa_line_for = vec![1];
let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for)?;
assert_eq!(count, 0);
assert_eq!(output.trim(), contents.trim());
let checks = vec![Check::new(
CheckKind::UnusedVariable("x".to_string()),
Location::new(1, 1),
)];
let contents = "x = 1";
let noqa_line_for = vec![1];
let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for)?;
assert_eq!(count, 1);
assert_eq!(output.trim(), "x = 1 # noqa: F841".trim());
let checks = vec![
Check::new(
CheckKind::AmbiguousVariableName("x".to_string()),
Location::new(1, 1),
),
Check::new(
CheckKind::UnusedVariable("x".to_string()),
Location::new(1, 1),
),
];
let contents = "x = 1 # noqa: E741";
let noqa_line_for = vec![1];
let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for)?;
assert_eq!(count, 1);
assert_eq!(output.trim(), "x = 1 # noqa: E741, F841".trim());
let checks = vec![
Check::new(
CheckKind::AmbiguousVariableName("x".to_string()),
Location::new(1, 1),
),
Check::new(
CheckKind::UnusedVariable("x".to_string()),
Location::new(1, 1),
),
];
let contents = "x = 1 # noqa";
let noqa_line_for = vec![1];
let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for)?;
assert_eq!(count, 1);
assert_eq!(output.trim(), "x = 1 # noqa: E741, F841".trim());
Ok(())
}
}

View File

@@ -1,7 +1,10 @@
use anyhow::Result;
use clap::ValueEnum;
use colored::Colorize;
use rustpython_parser::ast::Location;
use serde::Serialize;
use crate::checks::{CheckCode, CheckKind};
use crate::message::Message;
use crate::tell_user;
@@ -11,6 +14,16 @@ pub enum SerializationFormat {
Json,
}
#[derive(Serialize)]
struct ExpandedMessage<'a> {
kind: &'a CheckKind,
code: &'a CheckCode,
message: String,
fixed: bool,
location: Location,
filename: &'a String,
}
pub struct Printer {
format: SerializationFormat,
verbose: bool,
@@ -31,7 +44,22 @@ impl Printer {
match self.format {
SerializationFormat::Json => {
println!("{}", serde_json::to_string_pretty(&messages)?)
println!(
"{}",
serde_json::to_string_pretty(
&messages
.iter()
.map(|m| ExpandedMessage {
kind: &m.kind,
code: m.kind.code(),
message: m.kind.body(),
fixed: m.fixed,
location: m.location,
filename: &m.filename,
})
.collect::<Vec<_>>()
)?
)
}
SerializationFormat::Text => {
if !fixed.is_empty() {

View File

@@ -1,30 +1,25 @@
use std::path::{Path, PathBuf};
use std::str::FromStr;
use anyhow::Result;
use anyhow::{anyhow, Result};
use common_path::common_path_all;
use path_absolutize::Absolutize;
use serde::Deserialize;
use serde::de;
use serde::{Deserialize, Deserializer};
use crate::checks::CheckCode;
use crate::fs;
pub fn load_config(pyproject: &Option<PathBuf>) -> Config {
pub fn load_config(pyproject: &Option<PathBuf>) -> Result<Config> {
match pyproject {
Some(pyproject) => match parse_pyproject_toml(pyproject) {
Ok(pyproject) => pyproject
.tool
.and_then(|tool| tool.ruff)
.unwrap_or_default(),
Err(e) => {
println!("Failed to load pyproject.toml: {:?}", e);
println!("Falling back to default configuration...");
Default::default()
}
},
Some(pyproject) => Ok(parse_pyproject_toml(pyproject)?
.tool
.and_then(|tool| tool.ruff)
.unwrap_or_default()),
None => {
println!("No pyproject.toml found.");
println!("Falling back to default configuration...");
Default::default()
eprintln!("No pyproject.toml found.");
eprintln!("Falling back to default configuration...");
Ok(Default::default())
}
}
}
@@ -37,6 +32,50 @@ pub struct Config {
pub extend_exclude: Option<Vec<String>>,
pub select: Option<Vec<CheckCode>>,
pub ignore: Option<Vec<CheckCode>>,
pub per_file_ignores: Option<Vec<StrCheckCodePair>>,
pub dummy_variable_rgx: Option<String>,
}
#[derive(Debug, PartialEq, Eq)]
pub struct StrCheckCodePair {
pub pattern: String,
pub code: CheckCode,
}
impl StrCheckCodePair {
const EXPECTED_PATTERN: &'static str = "<FilePattern>:<CheckCode> pattern";
}
impl<'de> Deserialize<'de> for StrCheckCodePair {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let str_result = String::deserialize(deserializer)?;
Self::from_str(str_result.as_str()).map_err(|_| {
de::Error::invalid_value(
de::Unexpected::Str(str_result.as_str()),
&Self::EXPECTED_PATTERN,
)
})
}
}
impl FromStr for StrCheckCodePair {
type Err = anyhow::Error;
fn from_str(string: &str) -> Result<Self, Self::Err> {
let (pattern_str, code_string) = {
let tokens = string.split(':').collect::<Vec<_>>();
if tokens.len() != 2 {
return Err(anyhow!("Expected {}", Self::EXPECTED_PATTERN));
}
(tokens[0], tokens[1])
};
let code = CheckCode::from_str(code_string)?;
let pattern = pattern_str.into();
Ok(Self { pattern, code })
}
}
#[derive(Debug, PartialEq, Eq, Deserialize)]
@@ -102,9 +141,11 @@ pub fn find_project_root(sources: &[PathBuf]) -> Option<PathBuf> {
mod tests {
use std::env::current_dir;
use std::path::PathBuf;
use std::str::FromStr;
use anyhow::Result;
use super::StrCheckCodePair;
use crate::checks::CheckCode;
use crate::pyproject::{
find_project_root, find_pyproject_toml, parse_pyproject_toml, Config, PyProject, Tools,
@@ -137,6 +178,8 @@ mod tests {
extend_exclude: None,
select: None,
ignore: None,
per_file_ignores: None,
dummy_variable_rgx: None,
})
})
);
@@ -157,6 +200,8 @@ line-length = 79
extend_exclude: None,
select: None,
ignore: None,
per_file_ignores: None,
dummy_variable_rgx: None,
})
})
);
@@ -177,6 +222,8 @@ exclude = ["foo.py"]
extend_exclude: None,
select: None,
ignore: None,
per_file_ignores: None,
dummy_variable_rgx: None,
})
})
);
@@ -197,6 +244,8 @@ select = ["E501"]
extend_exclude: None,
select: Some(vec![CheckCode::E501]),
ignore: None,
per_file_ignores: None,
dummy_variable_rgx: None,
})
})
);
@@ -217,6 +266,8 @@ ignore = ["E501"]
extend_exclude: None,
select: None,
ignore: Some(vec![CheckCode::E501]),
per_file_ignores: None,
dummy_variable_rgx: None,
})
})
);
@@ -281,9 +332,29 @@ other-attribute = 1
]),
select: None,
ignore: None,
per_file_ignores: None,
dummy_variable_rgx: None,
}
);
Ok(())
}
#[test]
fn str_check_code_pair_strings() {
let result = StrCheckCodePair::from_str("foo:E501");
assert!(result.is_ok());
let result = StrCheckCodePair::from_str("E501:foo");
assert!(result.is_err());
let result = StrCheckCodePair::from_str("E501");
assert!(result.is_err());
let result = StrCheckCodePair::from_str("foo");
assert!(result.is_err());
let result = StrCheckCodePair::from_str("foo:E501:E402");
assert!(result.is_err());
let result = StrCheckCodePair::from_str("**/bar:E501");
assert!(result.is_ok());
let result = StrCheckCodePair::from_str("bar:E502");
assert!(result.is_err());
}
}

View File

@@ -2,14 +2,16 @@ use std::collections::BTreeSet;
use std::hash::{Hash, Hasher};
use std::path::{Path, PathBuf};
use anyhow::{anyhow, Result};
use glob::Pattern;
use once_cell::sync::Lazy;
use regex::Regex;
use crate::checks::{CheckCode, ALL_CHECK_CODES};
use crate::checks::{CheckCode, DEFAULT_CHECK_CODES};
use crate::fs;
use crate::pyproject::load_config;
use crate::pyproject::{load_config, StrCheckCodePair};
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Hash)]
pub enum FilePattern {
Simple(&'static str),
Complex(Pattern, Option<Pattern>),
@@ -34,6 +36,20 @@ impl FilePattern {
}
}
#[derive(Debug, Clone, Hash)]
pub struct PerFileIgnore {
pub pattern: FilePattern,
pub code: CheckCode,
}
impl PerFileIgnore {
pub fn new(user_in: StrCheckCodePair, project_root: &Option<PathBuf>) -> Self {
let pattern = FilePattern::from_user(user_in.pattern.as_str(), project_root);
let code = user_in.code;
Self { pattern, code }
}
}
#[derive(Debug)]
pub struct Settings {
pub pyproject: Option<PathBuf>,
@@ -42,6 +58,8 @@ pub struct Settings {
pub exclude: Vec<FilePattern>,
pub extend_exclude: Vec<FilePattern>,
pub select: BTreeSet<CheckCode>,
pub per_file_ignores: Vec<PerFileIgnore>,
pub dummy_variable_rgx: Regex,
}
impl Settings {
@@ -53,6 +71,8 @@ impl Settings {
exclude: vec![],
extend_exclude: vec![],
select: BTreeSet::from([check_code]),
per_file_ignores: vec![],
dummy_variable_rgx: DEFAULT_DUMMY_VARIABLE_RGX.clone(),
}
}
@@ -64,6 +84,8 @@ impl Settings {
exclude: vec![],
extend_exclude: vec![],
select: BTreeSet::from_iter(check_codes),
per_file_ignores: vec![],
dummy_variable_rgx: DEFAULT_DUMMY_VARIABLE_RGX.clone(),
}
}
}
@@ -71,9 +93,13 @@ impl Settings {
impl Hash for Settings {
fn hash<H: Hasher>(&self, state: &mut H) {
self.line_length.hash(state);
self.dummy_variable_rgx.as_str().hash(state);
for value in self.select.iter() {
value.hash(state);
}
for value in self.per_file_ignores.iter() {
value.hash(state);
}
}
}
@@ -101,9 +127,15 @@ static DEFAULT_EXCLUDE: Lazy<Vec<FilePattern>> = Lazy::new(|| {
]
});
static DEFAULT_DUMMY_VARIABLE_RGX: Lazy<Regex> =
Lazy::new(|| Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$").unwrap());
impl Settings {
pub fn from_pyproject(pyproject: Option<PathBuf>, project_root: Option<PathBuf>) -> Self {
let config = load_config(&pyproject);
pub fn from_pyproject(
pyproject: Option<PathBuf>,
project_root: Option<PathBuf>,
) -> Result<Self> {
let config = load_config(&pyproject)?;
let mut settings = Settings {
line_length: config.line_length.unwrap_or(88),
exclude: config
@@ -127,7 +159,21 @@ impl Settings {
select: if let Some(select) = config.select {
BTreeSet::from_iter(select)
} else {
BTreeSet::from_iter(ALL_CHECK_CODES)
BTreeSet::from_iter(DEFAULT_CHECK_CODES)
},
per_file_ignores: config
.per_file_ignores
.map(|ignore_strings| {
ignore_strings
.into_iter()
.map(|pair| PerFileIgnore::new(pair, &project_root))
.collect()
})
.unwrap_or_default(),
dummy_variable_rgx: match config.dummy_variable_rgx {
Some(pattern) => Regex::new(&pattern)
.map_err(|e| anyhow!("Invalid dummy-variable-rgx value: {e}"))?,
None => DEFAULT_DUMMY_VARIABLE_RGX.clone(),
},
pyproject,
project_root,
@@ -135,11 +181,14 @@ impl Settings {
if let Some(ignore) = &config.ignore {
settings.ignore(ignore);
}
settings
Ok(settings)
}
pub fn clear(&mut self) {
self.select.clear();
}
pub fn select(&mut self, codes: Vec<CheckCode>) {
self.select.clear();
for code in codes {
self.select.insert(code);
}

View File

@@ -0,0 +1,47 @@
---
source: src/linter.rs
expression: checks
---
- kind:
UnusedVariable: e
location:
row: 3
column: 1
fix: ~
- kind:
UnusedVariable: foo
location:
row: 20
column: 5
fix: ~
- kind:
UnusedVariable: a
location:
row: 21
column: 6
fix: ~
- kind:
UnusedVariable: b
location:
row: 21
column: 9
fix: ~
- kind:
UnusedVariable: _
location:
row: 35
column: 5
fix: ~
- kind:
UnusedVariable: __
location:
row: 36
column: 5
fix: ~
- kind:
UnusedVariable: _discarded
location:
row: 37
column: 5
fix: ~

View File

@@ -0,0 +1,89 @@
---
source: src/linter.rs
expression: checks
---
- kind:
UnusedNOQA: ~
location:
row: 9
column: 10
fix:
content: ""
start:
row: 9
column: 10
end:
row: 9
column: 18
applied: false
- kind:
UnusedNOQA: E501
location:
row: 13
column: 10
fix:
content: ""
start:
row: 13
column: 10
end:
row: 13
column: 24
applied: false
- kind:
UnusedNOQA: E501
location:
row: 16
column: 10
fix:
content: " # noqa: F841"
start:
row: 16
column: 10
end:
row: 16
column: 30
applied: false
- kind:
UnusedNOQA: F841
location:
row: 41
column: 4
fix:
content: " # noqa: E501"
start:
row: 41
column: 4
end:
row: 41
column: 24
applied: false
- kind:
UnusedNOQA: E501
location:
row: 49
column: 4
fix:
content: ""
start:
row: 49
column: 4
end:
row: 49
column: 18
applied: false
- kind:
UnusedNOQA: ~
location:
row: 57
column: 4
fix:
content: ""
start:
row: 57
column: 4
end:
row: 57
column: 12
applied: false