Compare commits

...

14 Commits

Author SHA1 Message Date
Charlie Marsh
e66b786229 Bump version to 0.0.153 2022-12-03 17:05:39 -05:00
Harutaka Kawamura
e05e1cdf76 Implement consider-merging-isinstance (#1009) 2022-12-03 16:51:53 -05:00
Harutaka Kawamura
f92cc7a159 Fix clippy errors on main (#1010) 2022-12-03 10:02:36 -05:00
Harutaka Kawamura
ebd2181946 Implement unnecessary-direct-lambda-call (#1008) 2022-12-03 09:59:04 -05:00
Harutaka Kawamura
3efa1a03f2 Fix match_like_matches_macro in src/pylint/plugins.rs (#1007) 2022-12-03 00:21:32 -05:00
Harutaka Kawamura
115e85b47d Rename PLE0206 to PLR0206 (#1006) 2022-12-03 00:14:46 -05:00
Harutaka Kawamura
31a3314ebd Implement PLE0206 (#1005) 2022-12-03 00:04:43 -05:00
Charlie Marsh
bf33025ea9 Support whole-file noqa exclusions (#1001) 2022-12-02 23:56:56 -05:00
Charlie Marsh
1a33ee3fc4 Bump version to 0.0.152 2022-12-02 13:23:00 -05:00
Charlie Marsh
4722885910 Avoid recursing on nested deferred annotations (#1000)
Parse nested deferred annotations
2022-12-02 13:22:39 -05:00
Jonathan Plasse
117fcb6936 Add no-eval rule from pygrep-hooks (#994) 2022-12-02 12:59:06 -05:00
Charlie Marsh
1a24d78f67 Bump version to 0.0.151 2022-12-01 22:31:44 -05:00
Charlie Marsh
4a4082cf0e Track type definitions and annotations separately (#992) 2022-12-01 22:31:20 -05:00
Charlie Marsh
18b9fbd71e Improve docstring checks with empty trailing lines (#991) 2022-12-01 20:15:44 -05:00
42 changed files with 1043 additions and 195 deletions

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.150
rev: v0.0.153
hooks:
- id: ruff

6
Cargo.lock generated
View File

@@ -703,7 +703,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8-to-ruff"
version = "0.0.150-dev.0"
version = "0.0.153-dev.0"
dependencies = [
"anyhow",
"clap 4.0.22",
@@ -1837,7 +1837,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.150"
version = "0.0.153"
dependencies = [
"annotate-snippets 0.9.1",
"anyhow",
@@ -1889,7 +1889,7 @@ dependencies = [
[[package]]
name = "ruff_dev"
version = "0.0.150"
version = "0.0.153"
dependencies = [
"anyhow",
"clap 4.0.22",

View File

@@ -6,7 +6,7 @@ members = [
[package]
name = "ruff"
version = "0.0.150"
version = "0.0.153"
edition = "2021"
rust-version = "1.65.0"

23
LICENSE
View File

@@ -471,6 +471,29 @@ are:
SOFTWARE.
"""
- pygrep-hooks, licensed as follows:
"""
Copyright (c) 2018 Anthony Sottile
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
- pyupgrade, licensed as follows:
"""
Copyright (c) 2017 Anthony Sottile

View File

@@ -77,18 +77,20 @@ of [Conda](https://docs.conda.io/en/latest/):
1. [pep8-naming (N)](#pep8-naming)
1. [eradicate (ERA)](#eradicate)
1. [flake8-bandit (S)](#flake8-bandit)
1. [flake8-comprehensions (C)](#flake8-comprehensions)
1. [flake8-comprehensions (C4)](#flake8-comprehensions)
1. [flake8-boolean-trap (FBT)](#flake8-boolean-trap)
1. [flake8-bugbear (B)](#flake8-bugbear)
1. [flake8-builtins (A)](#flake8-builtins)
1. [flake8-debugger (T)](#flake8-debugger)
1. [flake8-debugger (T10)](#flake8-debugger)
1. [flake8-tidy-imports (I25)](#flake8-tidy-imports)
1. [flake8-print (T)](#flake8-print)
1. [flake8-print (T20)](#flake8-print)
1. [flake8-quotes (Q)](#flake8-quotes)
1. [flake8-annotations (ANN)](#flake8-annotations)
1. [flake8-2020 (YTT)](#flake8-2020)
1. [flake8-blind-except (BLE)](#flake8-blind-except)
1. [mccabe (C90)](#mccabe)
1. [pygrep-hooks (PGH)](#pygrep-hooks)
1. [Pylint (PL)](#pylint)
1. [Ruff-specific rules (RUF)](#ruff-specific-rules)
1. [Meta rules (M)](#meta-rules)
1. [Editor Integrations](#editor-integrations)
@@ -143,7 +145,7 @@ Ruff also works with [pre-commit](https://pre-commit.com):
```yaml
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.150
rev: v0.0.153
hooks:
- id: ruff
```
@@ -314,10 +316,10 @@ Options:
### Ignoring errors
To omit a lint check entirely, add it to the "ignore" list via `--ignore` or `--extend-ignore`,
either on the command-line or in your `project.toml` file.
To omit a lint check entirely, add it to the "ignore" list via [`ignore`](#ignore) or
[`extend-ignore`](#extend-ignore), either on the command-line or in your `project.toml` file.
To ignore an error in-line, Ruff uses a `noqa` system similar to [Flake8](https://flake8.pycqa.org/en/3.1.1/user/ignoring-errors.html).
To ignore an error inline, Ruff uses a `noqa` system similar to [Flake8](https://flake8.pycqa.org/en/3.1.1/user/ignoring-errors.html).
To ignore an individual error, add `# noqa: {code}` to the end of the line, like so:
```python
@@ -332,7 +334,7 @@ x = 1 # noqa
```
Note that, for multi-line strings, the `noqa` directive should come at the end of the string, and
will apply to the entire body, like so:
will apply to the entire string, like so:
```python
"""Lorem ipsum dolor sit amet.
@@ -341,6 +343,15 @@ Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor i
""" # noqa: E501
```
To ignore all errors across an entire file, Ruff supports Flake8's `# flake8: noqa` directive (or,
equivalently, `# ruff: noqa`). Adding either of those directives to any part of a file will disable
error reporting for the entire file.
For targeted exclusions across entire files (e.g., "Ignore all F841 violations in
`/path/to/file.py`"), see the [`per-file-ignores`](#per-file-ignores) configuration setting.
### Automating `noqa` Directives
Ruff supports several workflows to aid in `noqa` management.
First, Ruff provides a special error code, `M001`, to enforce that your `noqa` directives are
@@ -726,12 +737,23 @@ For more, see [mccabe](https://pypi.org/project/mccabe/0.7.0/) on PyPI.
| ---- | ---- | ------- | --- |
| C901 | FunctionIsTooComplex | `...` is too complex (10) | |
### pygrep-hooks
For more, see [pygrep-hooks](https://github.com/pre-commit/pygrep-hooks) on GitHub.
| Code | Name | Message | Fix |
| ---- | ---- | ------- | --- |
| PGH001 | NoEval | No builtin `eval()` allowed | |
### Pylint
For more, see [Pylint](https://pypi.org/project/pylint/2.15.7/) on PyPI.
| Code | Name | Message | Fix |
| ---- | ---- | ------- | --- |
| PLR1701 | ConsiderMergingIsinstance | Consider merging these isinstance calls: `isinstance(..., (...))` | |
| PLC3002 | UnnecessaryDirectLambdaCall | Lambda expression called directly. Execute the expression inline instead. | |
| PLR0206 | PropertyWithParameters | Cannot have defined parameters for properties | |
| PLE1142 | AwaitOutsideAsync | `await` should be used within an async function | |
### Ruff-specific rules
@@ -902,6 +924,7 @@ natively, including:
- [`yesqa`](https://github.com/asottile/yesqa)
- [`eradicate`](https://pypi.org/project/eradicate/)
- [`pyupgrade`](https://pypi.org/project/pyupgrade/) (16/33)
- [`pygrep-hooks`](https://github.com/pre-commit/pygrep-hooks) (1/10)
- [`autoflake`](https://pypi.org/project/autoflake/) (1/7)
Beyond the rule set, Ruff suffers from the following limitations vis-à-vis Flake8:
@@ -946,8 +969,10 @@ Today, Ruff can be used to replace Flake8 when used with any of the following pl
- [`flake8-tidy-imports`](https://pypi.org/project/flake8-tidy-imports/) (1/3)
- [`mccabe`](https://pypi.org/project/mccabe/)
Ruff can also replace [`isort`](https://pypi.org/project/isort/), [`yesqa`](https://github.com/asottile/yesqa),
and a subset of the rules implemented in [`pyupgrade`](https://pypi.org/project/pyupgrade/) (16/33).
Ruff can also replace [`isort`](https://pypi.org/project/isort/),
[`yesqa`](https://github.com/asottile/yesqa), [`eradicate`](https://pypi.org/project/eradicate/),
[`pygrep-hooks`](https://github.com/pre-commit/pygrep-hooks) (1/10), and a subset of the rules
implemented in [`pyupgrade`](https://pypi.org/project/pyupgrade/) (16/33).
If you're looking to use Ruff, but rely on an unsupported Flake8 plugin, free to file an Issue.
@@ -1261,7 +1286,7 @@ Exclusions are based on globs, and can be either:
(to exclude any Python files in `directory`). Note that these paths are relative to the
project root (e.g., the directory containing your `pyproject.toml`).
Note that you'll typically want to use [`extend_exclude`](#extend_exclude) to modify the excluded
Note that you'll typically want to use [`extend_exclude`](#extend-exclude) to modify the excluded
paths.
**Default value**: `[".bzr", ".direnv", ".eggs", ".git", ".hg", ".mypy_cache", ".nox", ".pants.d", ".ruff_cache", ".svn", ".tox", ".venv", "__pypackages__", "_build", "buck-out", "build", "dist", "node_modules", "venv"]`

View File

@@ -771,7 +771,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8_to_ruff"
version = "0.0.150"
version = "0.0.153"
dependencies = [
"anyhow",
"clap",
@@ -1975,7 +1975,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.150"
version = "0.0.153"
dependencies = [
"anyhow",
"bincode",

View File

@@ -1,6 +1,6 @@
[package]
name = "flake8-to-ruff"
version = "0.0.150-dev.0"
version = "0.0.153-dev.0"
edition = "2021"
[lib]

View File

@@ -1,7 +1,7 @@
from __future__ import annotations
from dataclasses import dataclass
from typing import List, Optional
from typing import Callable, List, Tuple, Optional, Sequence
from models import (
Fruit,
@@ -37,4 +37,12 @@ def f(x: int) -> List[int]:
return y
x: Optional[int] = None
x: Tuple[int, ...] = (1, 2)
def f(param: "Optional[Callable]" = None) -> "None":
pass
def f(param: Optional["Sequence"] = None) -> "None":
pass

View File

@@ -0,0 +1,9 @@
from ast import literal_eval
eval("3 + 4")
literal_eval({1: 2})
def fn() -> None:
eval("3 + 4")

View File

@@ -0,0 +1,11 @@
def eval(content: str) -> None:
pass
eval("3 + 4")
literal_eval({1: 2})
def fn() -> None:
eval("3 + 4")

View File

@@ -0,0 +1,37 @@
"""Checks use of consider-merging-isinstance"""
# pylint:disable=line-too-long, simplifiable-condition
def isinstances():
"Examples of isinstances"
var = range(10)
# merged
if isinstance(var[1], (int, float)):
pass
result = isinstance(var[2], (int, float))
# not merged
if isinstance(var[3], int) or isinstance(var[3], float) or isinstance(var[3], list) and True: # [consider-merging-isinstance]
pass
result = isinstance(var[4], int) or isinstance(var[4], float) or isinstance(var[5], list) and False # [consider-merging-isinstance]
result = isinstance(var[5], int) or True or isinstance(var[5], float) # [consider-merging-isinstance]
infered_isinstance = isinstance
result = infered_isinstance(var[6], int) or infered_isinstance(var[6], float) or infered_isinstance(var[6], list) and False # [consider-merging-isinstance]
result = isinstance(var[10], str) or isinstance(var[10], int) and var[8] * 14 or isinstance(var[10], float) and var[5] * 14.4 or isinstance(var[10], list) # [consider-merging-isinstance]
result = isinstance(var[11], int) or isinstance(var[11], int) or isinstance(var[11], float) # [consider-merging-isinstance]
result = isinstance(var[20])
result = isinstance()
# Combination merged and not merged
result = isinstance(var[12], (int, float)) or isinstance(var[12], list) # [consider-merging-isinstance]
# not merged but valid
result = isinstance(var[5], int) and var[5] * 14 or isinstance(var[5], float) and var[5] * 14.4
result = isinstance(var[7], int) or not isinstance(var[7], float)
result = isinstance(var[6], int) or isinstance(var[7], float)
result = isinstance(var[6], int) or isinstance(var[7], int)
return result

View File

@@ -0,0 +1,30 @@
# pylint: disable=missing-docstring, too-few-public-methods
from abc import ABCMeta, abstractmethod
class Cls:
@property
def attribute(self, param, param1): # [property-with-parameters]
return param + param1
@property
def attribute_keyword_only(self, *, param, param1): # [property-with-parameters]
return param + param1
@property
def attribute_positional_only(self, param, param1, /): # [property-with-parameters]
return param + param1
class MyClassBase(metaclass=ABCMeta):
"""MyClassBase."""
@property
@abstractmethod
def example(self):
"""Getter."""
@example.setter
@abstractmethod
def example(self, value):
"""Setter."""

View File

@@ -0,0 +1,5 @@
"""Test unnecessary direct calls to lambda expressions."""
# pylint: disable=undefined-variable, line-too-long
y = (lambda x: x**2 + 2*x + 1)(a) # [unnecessary-direct-lambda-call]
y = max((lambda x: x**2)(a), (lambda x: x+1)(a)) # [unnecessary-direct-lambda-call,unnecessary-direct-lambda-call]

View File

@@ -1,7 +1,7 @@
from __future__ import annotations
from dataclasses import dataclass
from typing import List, Optional
from typing import List, Optional, TypeAlias, Union
from models import (
Fruit,
@@ -38,3 +38,5 @@ def f(x: int) -> List[int]:
x: Optional[int] = None
MyList: TypeAlias = Union[List[int], List[str]]

View File

@@ -0,0 +1,7 @@
# flake8: noqa
import os
def f():
x = 1

View File

@@ -0,0 +1,7 @@
# ruff: noqa
import os
def f():
x = 1

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_dev"
version = "0.0.150"
version = "0.0.153"
edition = "2021"
[dependencies]

View File

@@ -28,11 +28,12 @@ pub fn main(cli: &Cli) -> Result<()> {
output.push('\n');
output.push('\n');
if let Some(url) = check_category.url() {
if let Some((url, platform)) = check_category.url() {
output.push_str(&format!(
"For more, see [{}]({}) on PyPI.",
"For more, see [{}]({}) on {}.",
check_category.title(),
url
url,
platform
));
output.push('\n');
output.push('\n');

View File

@@ -4,8 +4,6 @@ use rustpython_parser::ast::{
PatternKind, Stmt, StmtKind, Unaryop, Withitem,
};
use crate::ast::helpers::match_name_or_attr;
pub trait Visitor<'a> {
fn visit_stmt(&mut self, stmt: &'a Stmt) {
walk_stmt(self, stmt);
@@ -150,11 +148,7 @@ pub fn walk_stmt<'a, V: Visitor<'a> + ?Sized>(visitor: &mut V, stmt: &'a Stmt) {
} => {
visitor.visit_annotation(annotation);
if let Some(expr) = value {
if match_name_or_attr(annotation, "TypeAlias") {
visitor.visit_annotation(expr);
} else {
visitor.visit_expr(expr);
}
visitor.visit_expr(expr);
}
visitor.visit_expr(target);
}

View File

@@ -1,3 +1,5 @@
use std::str::Lines;
use rustpython_ast::{Located, Location};
use crate::ast::types::Range;
@@ -37,3 +39,38 @@ pub fn clean(indentation: &str) -> String {
.map(|char| if char.is_whitespace() { char } else { ' ' })
.collect()
}
/// Like `str#lines`, but includes a trailing newline as an empty line.
pub struct LinesWithTrailingNewline<'a> {
trailing: Option<&'a str>,
underlying: Lines<'a>,
}
impl<'a> LinesWithTrailingNewline<'a> {
pub fn from(input: &'a str) -> LinesWithTrailingNewline<'a> {
LinesWithTrailingNewline {
underlying: input.lines(),
trailing: if input.ends_with('\n') {
Some("")
} else {
None
},
}
}
}
impl<'a> Iterator for LinesWithTrailingNewline<'a> {
type Item = &'a str;
#[inline]
fn next(&mut self) -> Option<&'a str> {
let mut next = self.underlying.next();
if next.is_none() {
if self.trailing.is_some() {
next = self.trailing;
self.trailing = None;
}
}
next
}
}

View File

@@ -37,11 +37,13 @@ use crate::{
docstrings, flake8_2020, flake8_annotations, flake8_bandit, flake8_blind_except,
flake8_boolean_trap, flake8_bugbear, flake8_builtins, flake8_comprehensions, flake8_debugger,
flake8_print, flake8_tidy_imports, mccabe, pep8_naming, pycodestyle, pydocstyle, pyflakes,
pylint, pyupgrade, rules,
pygrep_hooks, pylint, pyupgrade, rules,
};
const GLOBAL_SCOPE_INDEX: usize = 0;
type DeferralContext = (Vec<usize>, Vec<usize>);
#[allow(clippy::struct_excessive_bools)]
pub struct Checker<'a> {
// Input data.
@@ -66,17 +68,18 @@ pub struct Checker<'a> {
scopes: Vec<Scope<'a>>,
scope_stack: Vec<usize>,
dead_scopes: Vec<usize>,
deferred_string_annotations: Vec<(Range, &'a str, Vec<usize>, Vec<usize>)>,
deferred_annotations: Vec<(&'a Expr, Vec<usize>, Vec<usize>)>,
deferred_functions: Vec<(&'a Stmt, Vec<usize>, Vec<usize>, VisibleScope)>,
deferred_lambdas: Vec<(&'a Expr, Vec<usize>, Vec<usize>)>,
deferred_string_type_definitions: Vec<(Range, &'a str, bool, DeferralContext)>,
deferred_type_definitions: Vec<(&'a Expr, bool, DeferralContext)>,
deferred_functions: Vec<(&'a Stmt, DeferralContext, VisibleScope)>,
deferred_lambdas: Vec<(&'a Expr, DeferralContext)>,
deferred_assignments: Vec<usize>,
// Internal, derivative state.
visible_scope: VisibleScope,
in_f_string: Option<Range>,
in_annotation: bool,
in_deferred_string_annotation: bool,
in_deferred_annotation: bool,
in_type_definition: bool,
in_deferred_string_type_definition: bool,
in_deferred_type_definition: bool,
in_literal: bool,
in_subscript: bool,
in_withitem: bool,
@@ -110,8 +113,8 @@ impl<'a> Checker<'a> {
scopes: vec![],
scope_stack: vec![],
dead_scopes: vec![],
deferred_string_annotations: vec![],
deferred_annotations: vec![],
deferred_string_type_definitions: vec![],
deferred_type_definitions: vec![],
deferred_functions: vec![],
deferred_lambdas: vec![],
deferred_assignments: vec![],
@@ -122,8 +125,9 @@ impl<'a> Checker<'a> {
},
in_f_string: None,
in_annotation: false,
in_deferred_string_annotation: false,
in_deferred_annotation: false,
in_type_definition: false,
in_deferred_string_type_definition: false,
in_deferred_type_definition: false,
in_literal: false,
in_subscript: false,
in_withitem: false,
@@ -437,6 +441,10 @@ where
);
}
if self.settings.enabled.contains(&CheckCode::PLR0206) {
pylint::plugins::property_with_parameters(self, stmt, decorator_list, args);
}
self.check_builtin_shadowing(name, Range::from_located(stmt), true);
// Visit the decorators and arguments, but avoid the body, which will be
@@ -1087,8 +1095,7 @@ where
self.deferred_functions.push((
stmt,
self.scope_stack.clone(),
self.parent_stack.clone(),
(self.scope_stack.clone(), self.parent_stack.clone()),
self.visible_scope.clone(),
));
}
@@ -1135,6 +1142,24 @@ where
self.visit_stmt(stmt);
}
}
StmtKind::AnnAssign {
target,
annotation,
value,
..
} => {
self.visit_annotation(annotation);
if let Some(expr) = value {
if self.match_typing_expr(annotation, "TypeAlias") {
self.in_type_definition = true;
self.visit_expr(expr);
self.in_type_definition = false;
} else {
self.visit_expr(expr);
}
}
self.visit_expr(target);
}
_ => visitor::walk_stmt(self, stmt),
};
self.visible_scope = prev_visible_scope;
@@ -1157,33 +1182,39 @@ where
fn visit_annotation(&mut self, expr: &'b Expr) {
let prev_in_annotation = self.in_annotation;
let prev_in_type_definition = self.in_type_definition;
self.in_annotation = true;
self.in_type_definition = true;
self.visit_expr(expr);
self.in_annotation = prev_in_annotation;
self.in_type_definition = prev_in_type_definition;
}
fn visit_expr(&mut self, expr: &'b Expr) {
let prev_in_f_string = self.in_f_string;
let prev_in_literal = self.in_literal;
let prev_in_annotation = self.in_annotation;
let prev_in_type_definition = self.in_type_definition;
if self.in_annotation && self.annotations_future_enabled {
if !(self.in_deferred_type_definition || self.in_deferred_string_type_definition)
&& self.in_type_definition
&& self.annotations_future_enabled
{
if let ExprKind::Constant {
value: Constant::Str(value),
..
} = &expr.node
{
self.deferred_string_annotations.push((
self.deferred_string_type_definitions.push((
Range::from_located(expr),
value,
self.scope_stack.clone(),
self.parent_stack.clone(),
self.in_annotation,
(self.scope_stack.clone(), self.parent_stack.clone()),
));
} else {
self.deferred_annotations.push((
self.deferred_type_definitions.push((
expr,
self.scope_stack.clone(),
self.parent_stack.clone(),
self.in_annotation,
(self.scope_stack.clone(), self.parent_stack.clone()),
));
}
return;
@@ -1193,13 +1224,13 @@ where
match &expr.node {
ExprKind::Subscript { value, slice, .. } => {
// Ex) Optional[...]
if !self.in_deferred_string_annotation
if !self.in_deferred_string_type_definition
&& self.settings.enabled.contains(&CheckCode::U007)
&& (self.settings.target_version >= PythonVersion::Py310
|| (self.settings.target_version >= PythonVersion::Py37
&& !self.settings.pyupgrade.keep_runtime_typing
&& self.annotations_future_enabled
&& self.in_deferred_annotation))
&& self.in_annotation))
{
pyupgrade::plugins::use_pep604_annotation(self, expr, value, slice);
}
@@ -1236,13 +1267,13 @@ where
match ctx {
ExprContext::Load => {
// Ex) List[...]
if !self.in_deferred_string_annotation
if !self.in_deferred_string_type_definition
&& self.settings.enabled.contains(&CheckCode::U006)
&& (self.settings.target_version >= PythonVersion::Py39
|| (self.settings.target_version >= PythonVersion::Py37
&& !self.settings.pyupgrade.keep_runtime_typing
&& self.annotations_future_enabled
&& self.in_deferred_annotation))
&& self.in_annotation))
&& typing::is_pep585_builtin(
expr,
&self.from_imports,
@@ -1277,12 +1308,12 @@ where
}
ExprKind::Attribute { attr, .. } => {
// Ex) typing.List[...]
if !self.in_deferred_string_annotation
if !self.in_deferred_string_type_definition
&& self.settings.enabled.contains(&CheckCode::U006)
&& (self.settings.target_version >= PythonVersion::Py39
|| (self.settings.target_version >= PythonVersion::Py37
&& self.annotations_future_enabled
&& self.in_deferred_annotation))
&& self.in_annotation))
&& typing::is_pep585_builtin(expr, &self.from_imports, &self.import_aliases)
{
pyupgrade::plugins::use_pep585_annotation(self, expr, attr);
@@ -1676,6 +1707,16 @@ where
}
}
// pygrep-hooks
if self.settings.enabled.contains(&CheckCode::PGH001) {
pygrep_hooks::checks::no_eval(self, func);
}
// pylint
if self.settings.enabled.contains(&CheckCode::PLC3002) {
pylint::plugins::unnecessary_direct_lambda_call(self, expr, func);
}
// Ruff
if self.settings.enabled.contains(&CheckCode::RUF101) {
rules::plugins::convert_exit_to_sys_exit(self, func);
@@ -1946,12 +1987,12 @@ where
value: Constant::Str(value),
..
} => {
if self.in_annotation && !self.in_literal {
self.deferred_string_annotations.push((
if self.in_type_definition && !self.in_literal {
self.deferred_string_type_definitions.push((
Range::from_located(expr),
value,
self.scope_stack.clone(),
self.parent_stack.clone(),
self.in_annotation,
(self.scope_stack.clone(), self.parent_stack.clone()),
));
}
if self.settings.enabled.contains(&CheckCode::S104) {
@@ -2022,17 +2063,19 @@ where
}
self.push_scope(Scope::new(ScopeKind::Generator));
}
ExprKind::BoolOp { op, values } => {
if self.settings.enabled.contains(&CheckCode::PLR1701) {
pylint::plugins::consider_merging_isinstance(self, expr, op, values);
}
}
_ => {}
};
// Recurse.
match &expr.node {
ExprKind::Lambda { .. } => {
self.deferred_lambdas.push((
expr,
self.scope_stack.clone(),
self.parent_stack.clone(),
));
self.deferred_lambdas
.push((expr, (self.scope_stack.clone(), self.parent_stack.clone())));
}
ExprKind::Call {
func,
@@ -2043,12 +2086,16 @@ where
if self.match_typing_call_path(&call_path, "ForwardRef") {
self.visit_expr(func);
for expr in args {
self.visit_annotation(expr);
self.in_type_definition = true;
self.visit_expr(expr);
self.in_type_definition = prev_in_type_definition;
}
} else if self.match_typing_call_path(&call_path, "cast") {
self.visit_expr(func);
if !args.is_empty() {
self.visit_annotation(&args[0]);
self.in_type_definition = true;
self.visit_expr(&args[0]);
self.in_type_definition = prev_in_type_definition;
}
for expr in args.iter().skip(1) {
self.visit_expr(expr);
@@ -2056,22 +2103,28 @@ where
} else if self.match_typing_call_path(&call_path, "NewType") {
self.visit_expr(func);
for expr in args.iter().skip(1) {
self.visit_annotation(expr);
self.in_type_definition = true;
self.visit_expr(expr);
self.in_type_definition = prev_in_type_definition;
}
} else if self.match_typing_call_path(&call_path, "TypeVar") {
self.visit_expr(func);
for expr in args.iter().skip(1) {
self.visit_annotation(expr);
self.in_type_definition = true;
self.visit_expr(expr);
self.in_type_definition = prev_in_type_definition;
}
for keyword in keywords {
let KeywordData { arg, value } = &keyword.node;
if let Some(id) = arg {
if id == "bound" {
self.visit_annotation(value);
} else {
self.in_annotation = false;
self.in_type_definition = true;
self.visit_expr(value);
self.in_annotation = prev_in_annotation;
self.in_type_definition = prev_in_type_definition;
} else {
self.in_type_definition = false;
self.visit_expr(value);
self.in_type_definition = prev_in_type_definition;
}
}
}
@@ -2087,11 +2140,13 @@ where
ExprKind::List { elts, .. }
| ExprKind::Tuple { elts, .. } => {
if elts.len() == 2 {
self.in_annotation = false;
self.in_type_definition = false;
self.visit_expr(&elts[0]);
self.in_annotation = prev_in_annotation;
self.in_type_definition = prev_in_type_definition;
self.visit_annotation(&elts[1]);
self.in_type_definition = true;
self.visit_expr(&elts[1]);
self.in_type_definition = prev_in_type_definition;
}
}
_ => {}
@@ -2105,7 +2160,9 @@ where
// Ex) NamedTuple("a", a=int)
for keyword in keywords {
let KeywordData { value, .. } = &keyword.node;
self.visit_annotation(value);
self.in_type_definition = true;
self.visit_expr(value);
self.in_type_definition = prev_in_type_definition;
}
} else if self.match_typing_call_path(&call_path, "TypedDict") {
self.visit_expr(func);
@@ -2114,12 +2171,14 @@ where
if args.len() > 1 {
if let ExprKind::Dict { keys, values } = &args[1].node {
for key in keys {
self.in_annotation = false;
self.in_type_definition = false;
self.visit_expr(key);
self.in_annotation = prev_in_annotation;
self.in_type_definition = prev_in_type_definition;
}
for value in values {
self.visit_annotation(value);
self.in_type_definition = true;
self.visit_expr(value);
self.in_type_definition = prev_in_type_definition;
}
}
}
@@ -2127,7 +2186,9 @@ where
// Ex) TypedDict("a", a=int)
for keyword in keywords {
let KeywordData { value, .. } = &keyword.node;
self.visit_annotation(value);
self.in_type_definition = true;
self.visit_expr(value);
self.in_type_definition = prev_in_type_definition;
}
} else {
visitor::walk_expr(self, expr);
@@ -2157,7 +2218,9 @@ where
// Ex) Optional[int]
SubscriptKind::AnnotatedSubscript => {
self.visit_expr(value);
self.visit_annotation(slice);
self.in_type_definition = true;
self.visit_expr(slice);
self.in_type_definition = prev_in_type_definition;
self.visit_expr_context(ctx);
}
// Ex) Annotated[int, "Hello, world!"]
@@ -2169,11 +2232,11 @@ where
if let ExprKind::Tuple { elts, ctx } = &slice.node {
if let Some(expr) = elts.first() {
self.visit_expr(expr);
self.in_annotation = false;
self.in_type_definition = false;
for expr in elts.iter().skip(1) {
self.visit_expr(expr);
}
self.in_annotation = true;
self.in_type_definition = prev_in_type_definition;
self.visit_expr_context(ctx);
}
} else {
@@ -2205,7 +2268,7 @@ where
_ => {}
};
self.in_annotation = prev_in_annotation;
self.in_type_definition = prev_in_type_definition;
self.in_literal = prev_in_literal;
self.in_f_string = prev_in_f_string;
}
@@ -2777,28 +2840,33 @@ impl<'a> Checker<'a> {
docstring.is_some()
}
fn check_deferred_annotations(&mut self) {
while let Some((expr, scopes, parents)) = self.deferred_annotations.pop() {
fn check_deferred_type_definitions(&mut self) {
while let Some((expr, in_annotation, (scopes, parents))) =
self.deferred_type_definitions.pop()
{
self.scope_stack = scopes;
self.parent_stack = parents;
self.in_deferred_annotation = true;
self.in_annotation = in_annotation;
self.in_type_definition = true;
self.in_deferred_type_definition = true;
self.visit_expr(expr);
self.in_deferred_annotation = false;
self.in_deferred_type_definition = false;
self.in_type_definition = false;
}
}
fn check_deferred_string_annotations<'b>(&mut self, allocator: &'b mut Vec<Expr>)
fn check_deferred_string_type_definitions<'b>(&mut self, allocator: &'b mut Vec<Expr>)
where
'b: 'a,
{
let mut stacks = vec![];
while let Some((range, expression, scopes, parents)) =
self.deferred_string_annotations.pop()
while let Some((range, expression, in_annotation, context)) =
self.deferred_string_type_definitions.pop()
{
if let Ok(mut expr) = parser::parse_expression(expression, "<filename>") {
relocate_expr(&mut expr, range);
allocator.push(expr);
stacks.push((scopes, parents));
stacks.push((in_annotation, context));
} else {
if self.settings.enabled.contains(&CheckCode::F722) {
self.add_check(Check::new(
@@ -2808,19 +2876,22 @@ impl<'a> Checker<'a> {
}
}
}
for (expr, (scopes, parents)) in allocator.iter().zip(stacks) {
for (expr, (in_annotation, (scopes, parents))) in allocator.iter().zip(stacks) {
self.scope_stack = scopes;
self.parent_stack = parents;
self.in_deferred_string_annotation = true;
self.in_annotation = in_annotation;
self.in_type_definition = true;
self.in_deferred_string_type_definition = true;
self.visit_expr(expr);
self.in_deferred_string_annotation = false;
self.in_deferred_string_type_definition = false;
self.in_type_definition = false;
}
}
fn check_deferred_functions(&mut self) {
while let Some((stmt, scopes, parents, visibility)) = self.deferred_functions.pop() {
self.parent_stack = parents;
while let Some((stmt, (scopes, parents), visibility)) = self.deferred_functions.pop() {
self.scope_stack = scopes;
self.parent_stack = parents;
self.visible_scope = visibility;
self.push_scope(Scope::new(ScopeKind::Function(FunctionScope {
async_: matches!(stmt.node, StmtKind::AsyncFunctionDef { .. }),
@@ -2846,9 +2917,9 @@ impl<'a> Checker<'a> {
}
fn check_deferred_lambdas(&mut self) {
while let Some((expr, scopes, parents)) = self.deferred_lambdas.pop() {
self.parent_stack = parents;
while let Some((expr, (scopes, parents))) = self.deferred_lambdas.pop() {
self.scope_stack = scopes;
self.parent_stack = parents;
self.push_scope(Scope::new(ScopeKind::Lambda));
if let ExprKind::Lambda { args, body } = &expr.node {
@@ -3185,9 +3256,9 @@ pub fn check_ast(
checker.check_deferred_functions();
checker.check_deferred_lambdas();
checker.check_deferred_assignments();
checker.check_deferred_annotations();
checker.check_deferred_type_definitions();
let mut allocator = vec![];
checker.check_deferred_string_annotations(&mut allocator);
checker.check_deferred_string_type_definitions(&mut allocator);
// Reset the scope to module-level, and check all consumed scopes.
checker.scope_stack = vec![GLOBAL_SCOPE_INDEX];

View File

@@ -9,10 +9,10 @@ use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::checks::{Check, CheckCode, CheckKind};
use crate::noqa;
use crate::noqa::Directive;
use crate::noqa::{is_file_exempt, Directive};
use crate::settings::Settings;
// Regex from PEP263
// Regex from PEP263.
static CODING_COMMENT_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r"^[ \t\f]*#.*?coding[:=][ \t]*utf-?8").unwrap());
@@ -58,8 +58,11 @@ pub fn check_lines(
}
macro_rules! add_if {
($check:expr, $noqa:expr) => {{
match $noqa {
($check:expr, $noqa_lineno:expr, $line:expr) => {{
match noqa_directives
.entry($noqa_lineno)
.or_insert_with(|| (noqa::extract_noqa_directive($line), vec![]))
{
(Directive::All(..), matches) => {
matches.push($check.kind.code().as_ref());
if ignore_noqa {
@@ -83,6 +86,12 @@ pub fn check_lines(
let lines: Vec<&str> = contents.lines().collect();
for (lineno, line) in lines.iter().enumerate() {
// If we hit an exemption for the entire file, bail.
if is_file_exempt(line) {
checks.drain(..);
return;
}
// Grab the noqa (logical) line number for the current (physical) line.
// If there are newlines at the end of the file, they won't be represented in
// `noqa_line_for`, so fallback to the current line.
@@ -106,11 +115,7 @@ pub fn check_lines(
Location::new(lineno + 2, 0),
));
}
let noqa = noqa_directives.entry(noqa_lineno).or_insert_with(|| {
(noqa::extract_noqa_directive(lines[noqa_lineno]), vec![])
});
add_if!(check, noqa);
add_if!(check, noqa_lineno, lines[noqa_lineno]);
}
}
}
@@ -155,11 +160,7 @@ pub fn check_lines(
end_location: Location::new(lineno + 1, line_length),
},
);
let noqa = noqa_directives
.entry(noqa_lineno)
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
add_if!(check, noqa);
add_if!(check, noqa_lineno, lines[noqa_lineno]);
}
}
}
@@ -179,10 +180,7 @@ pub fn check_lines(
let lineno = lines.len() - 1;
let noqa_lineno = noqa_line_for.get(&(lineno + 1)).unwrap_or(&(lineno + 1)) - 1;
let noqa = noqa_directives
.entry(noqa_lineno)
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
add_if!(check, noqa);
add_if!(check, noqa_lineno, lines[noqa_lineno]);
}
}
@@ -269,13 +267,12 @@ mod tests {
#[test]
fn e501_non_ascii_char() {
let line = "'\u{4e9c}' * 2"; // 7 in UTF-32, 9 in UTF-8.
let noqa_line_for: IntMap<usize, usize> = IntMap::default();
let check_with_max_line_length = |line_length: usize| {
let mut checks: Vec<Check> = vec![];
check_lines(
&mut checks,
line,
&noqa_line_for,
&IntMap::default(),
&Settings {
line_length,
..Settings::for_rule(CheckCode::E501)

View File

@@ -90,7 +90,10 @@ pub enum CheckCode {
F831,
F841,
F901,
// pylint errors
// pylint
PLR1701,
PLC3002,
PLR0206,
PLE1142,
// flake8-builtins
A001,
@@ -278,6 +281,8 @@ pub enum CheckCode {
RUF101,
// Meta
M001,
// pygrep-hooks
PGH001,
}
#[derive(EnumIter, Debug, PartialEq, Eq)]
@@ -302,11 +307,26 @@ pub enum CheckCategory {
Flake82020,
Flake8BlindExcept,
McCabe,
PygrepHooks,
Pylint,
Ruff,
Meta,
}
pub enum Platform {
PyPI,
GitHub,
}
impl fmt::Display for Platform {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self {
Platform::PyPI => fmt.write_str("PyPI"),
Platform::GitHub => fmt.write_str("GitHub"),
}
}
}
impl CheckCategory {
pub fn title(&self) -> &'static str {
match self {
@@ -331,51 +351,97 @@ impl CheckCategory {
CheckCategory::Pydocstyle => "pydocstyle",
CheckCategory::Pyflakes => "Pyflakes",
CheckCategory::Pylint => "Pylint",
CheckCategory::PygrepHooks => "pygrep-hooks",
CheckCategory::Pyupgrade => "pyupgrade",
CheckCategory::Ruff => "Ruff-specific rules",
}
}
pub fn url(&self) -> Option<&'static str> {
pub fn url(&self) -> Option<(&'static str, &'static Platform)> {
match self {
CheckCategory::Eradicate => Some("https://pypi.org/project/eradicate/2.1.0/"),
CheckCategory::Flake82020 => Some("https://pypi.org/project/flake8-2020/1.7.0/"),
CheckCategory::Flake8Annotations => {
Some("https://pypi.org/project/flake8-annotations/2.9.1/")
CheckCategory::Eradicate => {
Some(("https://pypi.org/project/eradicate/2.1.0/", &Platform::PyPI))
}
CheckCategory::Flake8Bandit => Some("https://pypi.org/project/flake8-bandit/4.1.1/"),
CheckCategory::Flake8BlindExcept => {
Some("https://pypi.org/project/flake8-blind-except/0.2.1/")
CheckCategory::Flake82020 => Some((
"https://pypi.org/project/flake8-2020/1.7.0/",
&Platform::PyPI,
)),
CheckCategory::Flake8Annotations => Some((
"https://pypi.org/project/flake8-annotations/2.9.1/",
&Platform::PyPI,
)),
CheckCategory::Flake8Bandit => Some((
"https://pypi.org/project/flake8-bandit/4.1.1/",
&Platform::PyPI,
)),
CheckCategory::Flake8BlindExcept => Some((
"https://pypi.org/project/flake8-blind-except/0.2.1/",
&Platform::PyPI,
)),
CheckCategory::Flake8BooleanTrap => Some((
"https://pypi.org/project/flake8-boolean-trap/0.1.0/",
&Platform::PyPI,
)),
CheckCategory::Flake8Bugbear => Some((
"https://pypi.org/project/flake8-bugbear/22.10.27/",
&Platform::PyPI,
)),
CheckCategory::Flake8Builtins => Some((
"https://pypi.org/project/flake8-builtins/2.0.1/",
&Platform::PyPI,
)),
CheckCategory::Flake8Comprehensions => Some((
"https://pypi.org/project/flake8-comprehensions/3.10.1/",
&Platform::PyPI,
)),
CheckCategory::Flake8Debugger => Some((
"https://pypi.org/project/flake8-debugger/4.1.2/",
&Platform::PyPI,
)),
CheckCategory::Flake8Print => Some((
"https://pypi.org/project/flake8-print/5.0.0/",
&Platform::PyPI,
)),
CheckCategory::Flake8Quotes => Some((
"https://pypi.org/project/flake8-quotes/3.3.1/",
&Platform::PyPI,
)),
CheckCategory::Flake8TidyImports => Some((
"https://pypi.org/project/flake8-tidy-imports/4.8.0/",
&Platform::PyPI,
)),
CheckCategory::Isort => {
Some(("https://pypi.org/project/isort/5.10.1/", &Platform::PyPI))
}
CheckCategory::Flake8BooleanTrap => {
Some("https://pypi.org/project/flake8-boolean-trap/0.1.0/")
CheckCategory::McCabe => {
Some(("https://pypi.org/project/mccabe/0.7.0/", &Platform::PyPI))
}
CheckCategory::Flake8Bugbear => {
Some("https://pypi.org/project/flake8-bugbear/22.10.27/")
}
CheckCategory::Flake8Builtins => {
Some("https://pypi.org/project/flake8-builtins/2.0.1/")
}
CheckCategory::Flake8Comprehensions => {
Some("https://pypi.org/project/flake8-comprehensions/3.10.1/")
}
CheckCategory::Flake8Debugger => {
Some("https://pypi.org/project/flake8-debugger/4.1.2/")
}
CheckCategory::Flake8Print => Some("https://pypi.org/project/flake8-print/5.0.0/"),
CheckCategory::Flake8Quotes => Some("https://pypi.org/project/flake8-quotes/3.3.1/"),
CheckCategory::Flake8TidyImports => {
Some("https://pypi.org/project/flake8-tidy-imports/4.8.0/")
}
CheckCategory::Isort => Some("https://pypi.org/project/isort/5.10.1/"),
CheckCategory::McCabe => Some("https://pypi.org/project/mccabe/0.7.0/"),
CheckCategory::Meta => None,
CheckCategory::PEP8Naming => Some("https://pypi.org/project/pep8-naming/0.13.2/"),
CheckCategory::Pycodestyle => Some("https://pypi.org/project/pycodestyle/2.9.1/"),
CheckCategory::Pydocstyle => Some("https://pypi.org/project/pydocstyle/6.1.1/"),
CheckCategory::Pyflakes => Some("https://pypi.org/project/pyflakes/2.5.0/"),
CheckCategory::Pylint => Some("https://pypi.org/project/pylint/2.15.7/"),
CheckCategory::Pyupgrade => Some("https://pypi.org/project/pyupgrade/3.2.0/"),
CheckCategory::PEP8Naming => Some((
"https://pypi.org/project/pep8-naming/0.13.2/",
&Platform::PyPI,
)),
CheckCategory::Pycodestyle => Some((
"https://pypi.org/project/pycodestyle/2.9.1/",
&Platform::PyPI,
)),
CheckCategory::Pydocstyle => Some((
"https://pypi.org/project/pydocstyle/6.1.1/",
&Platform::PyPI,
)),
CheckCategory::Pyflakes => {
Some(("https://pypi.org/project/pyflakes/2.5.0/", &Platform::PyPI))
}
CheckCategory::Pylint => {
Some(("https://pypi.org/project/pylint/2.15.7/", &Platform::PyPI))
}
CheckCategory::PygrepHooks => Some((
"https://github.com/pre-commit/pygrep-hooks",
&Platform::GitHub,
)),
CheckCategory::Pyupgrade => {
Some(("https://pypi.org/project/pyupgrade/3.2.0/", &Platform::PyPI))
}
CheckCategory::Ruff => None,
}
}
@@ -476,7 +542,10 @@ pub enum CheckKind {
UnusedImport(String, bool),
UnusedVariable(String),
YieldOutsideFunction(DeferralKeyword),
// pylint errors
// pylint
ConsiderMergingIsinstance(String, Vec<String>),
UnnecessaryDirectLambdaCall,
PropertyWithParameters,
AwaitOutsideAsync,
// flake8-builtins
BuiltinVariableShadowing(String),
@@ -657,6 +726,8 @@ pub enum CheckKind {
BooleanPositionalArgInFunctionDefinition,
BooleanDefaultValueInFunctionDefinition,
BooleanPositionalValueInFunctionCall,
// pygrep-hooks
NoEval,
// Ruff
AmbiguousUnicodeCharacterString(char, char),
AmbiguousUnicodeCharacterDocstring(char, char),
@@ -759,8 +830,13 @@ impl CheckCode {
CheckCode::F831 => CheckKind::DuplicateArgumentName,
CheckCode::F841 => CheckKind::UnusedVariable("...".to_string()),
CheckCode::F901 => CheckKind::RaiseNotImplemented,
// pylint errors
// pylint
CheckCode::PLC3002 => CheckKind::UnnecessaryDirectLambdaCall,
CheckCode::PLE1142 => CheckKind::AwaitOutsideAsync,
CheckCode::PLR0206 => CheckKind::PropertyWithParameters,
CheckCode::PLR1701 => {
CheckKind::ConsiderMergingIsinstance("...".to_string(), vec!["...".to_string()])
}
// flake8-builtins
CheckCode::A001 => CheckKind::BuiltinVariableShadowing("...".to_string()),
CheckCode::A002 => CheckKind::BuiltinArgumentShadowing("...".to_string()),
@@ -975,6 +1051,8 @@ impl CheckCode {
CheckCode::FBT001 => CheckKind::BooleanPositionalArgInFunctionDefinition,
CheckCode::FBT002 => CheckKind::BooleanDefaultValueInFunctionDefinition,
CheckCode::FBT003 => CheckKind::BooleanPositionalValueInFunctionCall,
// pygrep-hooks
CheckCode::PGH001 => CheckKind::NoEval,
// Ruff
CheckCode::RUF001 => CheckKind::AmbiguousUnicodeCharacterString('𝐁', 'B'),
CheckCode::RUF002 => CheckKind::AmbiguousUnicodeCharacterDocstring('𝐁', 'B'),
@@ -1169,7 +1247,11 @@ impl CheckCode {
CheckCode::N816 => CheckCategory::PEP8Naming,
CheckCode::N817 => CheckCategory::PEP8Naming,
CheckCode::N818 => CheckCategory::PEP8Naming,
CheckCode::PGH001 => CheckCategory::PygrepHooks,
CheckCode::PLC3002 => CheckCategory::Pylint,
CheckCode::PLE1142 => CheckCategory::Pylint,
CheckCode::PLR0206 => CheckCategory::Pylint,
CheckCode::PLR1701 => CheckCategory::Pylint,
CheckCode::Q000 => CheckCategory::Flake8Quotes,
CheckCode::Q001 => CheckCategory::Flake8Quotes,
CheckCode::Q002 => CheckCategory::Flake8Quotes,
@@ -1281,8 +1363,11 @@ impl CheckKind {
// pycodestyle warnings
CheckKind::NoNewLineAtEndOfFile => &CheckCode::W292,
CheckKind::InvalidEscapeSequence(_) => &CheckCode::W605,
// pylint errors
// pylint
CheckKind::AwaitOutsideAsync => &CheckCode::PLE1142,
CheckKind::ConsiderMergingIsinstance(..) => &CheckCode::PLR1701,
CheckKind::PropertyWithParameters => &CheckCode::PLR0206,
CheckKind::UnnecessaryDirectLambdaCall => &CheckCode::PLC3002,
// flake8-builtins
CheckKind::BuiltinVariableShadowing(_) => &CheckCode::A001,
CheckKind::BuiltinArgumentShadowing(_) => &CheckCode::A002,
@@ -1456,12 +1541,14 @@ impl CheckKind {
CheckKind::HardcodedPasswordString(..) => &CheckCode::S105,
CheckKind::HardcodedPasswordFuncArg(..) => &CheckCode::S106,
CheckKind::HardcodedPasswordDefault(..) => &CheckCode::S107,
// McCabe
// mccabe
CheckKind::FunctionIsTooComplex(..) => &CheckCode::C901,
// flake8-boolean-trap
CheckKind::BooleanPositionalArgInFunctionDefinition => &CheckCode::FBT001,
CheckKind::BooleanDefaultValueInFunctionDefinition => &CheckCode::FBT002,
CheckKind::BooleanPositionalValueInFunctionCall => &CheckCode::FBT003,
// pygrep-hooks
CheckKind::NoEval => &CheckCode::PGH001,
// Ruff
CheckKind::AmbiguousUnicodeCharacterString(..) => &CheckCode::RUF001,
CheckKind::AmbiguousUnicodeCharacterDocstring(..) => &CheckCode::RUF002,
@@ -1652,7 +1739,17 @@ impl CheckKind {
CheckKind::InvalidEscapeSequence(char) => {
format!("Invalid escape sequence: '\\{char}'")
}
// pylint errors
// pylint
CheckKind::ConsiderMergingIsinstance(obj, types) => {
let types = types.join(", ");
format!("Consider merging these isinstance calls: `isinstance({obj}, ({types}))`")
}
CheckKind::UnnecessaryDirectLambdaCall => "Lambda expression called directly. Execute \
the expression inline instead."
.to_string(),
CheckKind::PropertyWithParameters => {
"Cannot have defined parameters for properties".to_string()
}
CheckKind::AwaitOutsideAsync => {
"`await` should be used within an async function".to_string()
}
@@ -2183,7 +2280,7 @@ impl CheckKind {
}
// flake8-blind-except
CheckKind::BlindExcept => "Blind except Exception: statement".to_string(),
// McCabe
// mccabe
CheckKind::FunctionIsTooComplex(name, complexity) => {
format!("`{name}` is too complex ({complexity})")
}
@@ -2197,6 +2294,8 @@ impl CheckKind {
CheckKind::BooleanPositionalValueInFunctionCall => {
"Boolean positional value in function call".to_string()
}
// pygrep-hooks
CheckKind::NoEval => "No builtin `eval()` allowed".to_string(),
// Ruff
CheckKind::AmbiguousUnicodeCharacterString(confusable, representant) => {
format!(

View File

@@ -278,11 +278,29 @@ pub enum CheckCodePrefix {
N816,
N817,
N818,
PGH,
PGH0,
PGH00,
PGH001,
PLC,
PLC3,
PLC30,
PLC300,
PLC3002,
PLE,
PLE1,
PLE11,
PLE114,
PLE1142,
PLR,
PLR0,
PLR02,
PLR020,
PLR0206,
PLR1,
PLR17,
PLR170,
PLR1701,
Q,
Q0,
Q00,
@@ -1148,11 +1166,29 @@ impl CheckCodePrefix {
CheckCodePrefix::N816 => vec![CheckCode::N816],
CheckCodePrefix::N817 => vec![CheckCode::N817],
CheckCodePrefix::N818 => vec![CheckCode::N818],
CheckCodePrefix::PGH => vec![CheckCode::PGH001],
CheckCodePrefix::PGH0 => vec![CheckCode::PGH001],
CheckCodePrefix::PGH00 => vec![CheckCode::PGH001],
CheckCodePrefix::PGH001 => vec![CheckCode::PGH001],
CheckCodePrefix::PLC => vec![CheckCode::PLC3002],
CheckCodePrefix::PLC3 => vec![CheckCode::PLC3002],
CheckCodePrefix::PLC30 => vec![CheckCode::PLC3002],
CheckCodePrefix::PLC300 => vec![CheckCode::PLC3002],
CheckCodePrefix::PLC3002 => vec![CheckCode::PLC3002],
CheckCodePrefix::PLE => vec![CheckCode::PLE1142],
CheckCodePrefix::PLE1 => vec![CheckCode::PLE1142],
CheckCodePrefix::PLE11 => vec![CheckCode::PLE1142],
CheckCodePrefix::PLE114 => vec![CheckCode::PLE1142],
CheckCodePrefix::PLE1142 => vec![CheckCode::PLE1142],
CheckCodePrefix::PLR => vec![CheckCode::PLR1701, CheckCode::PLR0206],
CheckCodePrefix::PLR0 => vec![CheckCode::PLR0206],
CheckCodePrefix::PLR02 => vec![CheckCode::PLR0206],
CheckCodePrefix::PLR020 => vec![CheckCode::PLR0206],
CheckCodePrefix::PLR0206 => vec![CheckCode::PLR0206],
CheckCodePrefix::PLR1 => vec![CheckCode::PLR1701],
CheckCodePrefix::PLR17 => vec![CheckCode::PLR1701],
CheckCodePrefix::PLR170 => vec![CheckCode::PLR1701],
CheckCodePrefix::PLR1701 => vec![CheckCode::PLR1701],
CheckCodePrefix::Q => vec![
CheckCode::Q000,
CheckCode::Q001,
@@ -1615,11 +1651,29 @@ impl CheckCodePrefix {
CheckCodePrefix::N816 => SuffixLength::Three,
CheckCodePrefix::N817 => SuffixLength::Three,
CheckCodePrefix::N818 => SuffixLength::Three,
CheckCodePrefix::PGH => SuffixLength::Zero,
CheckCodePrefix::PGH0 => SuffixLength::One,
CheckCodePrefix::PGH00 => SuffixLength::Two,
CheckCodePrefix::PGH001 => SuffixLength::Three,
CheckCodePrefix::PLC => SuffixLength::Zero,
CheckCodePrefix::PLC3 => SuffixLength::One,
CheckCodePrefix::PLC30 => SuffixLength::Two,
CheckCodePrefix::PLC300 => SuffixLength::Three,
CheckCodePrefix::PLC3002 => SuffixLength::Four,
CheckCodePrefix::PLE => SuffixLength::Zero,
CheckCodePrefix::PLE1 => SuffixLength::One,
CheckCodePrefix::PLE11 => SuffixLength::Two,
CheckCodePrefix::PLE114 => SuffixLength::Three,
CheckCodePrefix::PLE1142 => SuffixLength::Four,
CheckCodePrefix::PLR => SuffixLength::Zero,
CheckCodePrefix::PLR0 => SuffixLength::One,
CheckCodePrefix::PLR02 => SuffixLength::Two,
CheckCodePrefix::PLR020 => SuffixLength::Three,
CheckCodePrefix::PLR0206 => SuffixLength::Four,
CheckCodePrefix::PLR1 => SuffixLength::One,
CheckCodePrefix::PLR17 => SuffixLength::Two,
CheckCodePrefix::PLR170 => SuffixLength::Three,
CheckCodePrefix::PLR1701 => SuffixLength::Four,
CheckCodePrefix::Q => SuffixLength::Zero,
CheckCodePrefix::Q0 => SuffixLength::One,
CheckCodePrefix::Q00 => SuffixLength::Two,
@@ -1713,7 +1767,10 @@ pub const CATEGORIES: &[CheckCodePrefix] = &[
CheckCodePrefix::I,
CheckCodePrefix::M,
CheckCodePrefix::N,
CheckCodePrefix::PGH,
CheckCodePrefix::PLC,
CheckCodePrefix::PLE,
CheckCodePrefix::PLR,
CheckCodePrefix::Q,
CheckCodePrefix::RUF,
CheckCodePrefix::S,

View File

@@ -65,6 +65,7 @@ pub mod printer;
mod pycodestyle;
mod pydocstyle;
mod pyflakes;
mod pygrep_hooks;
mod pylint;
mod python;
mod pyupgrade;

View File

@@ -10,7 +10,7 @@ use regex::Regex;
use crate::checks::{Check, CheckCode};
static NO_QA_REGEX: Lazy<Regex> = Lazy::new(|| {
static NO_QA_LINE_REGEX: Lazy<Regex> = Lazy::new(|| {
Regex::new(
r"(?P<spaces>\s*)(?P<noqa>(?i:# noqa)(?::\s?(?P<codes>([A-Z]+[0-9]+(?:[,\s]+)?)+))?)",
)
@@ -18,6 +18,18 @@ static NO_QA_REGEX: Lazy<Regex> = Lazy::new(|| {
});
static SPLIT_COMMA_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").unwrap());
/// Return `true` if a file is exempt from checking based on the contents of the
/// given line.
pub fn is_file_exempt(line: &str) -> bool {
let line = line.trim_start();
line.starts_with("# flake8: noqa")
|| line.starts_with("# flake8: NOQA")
|| line.starts_with("# flake8: NoQA")
|| line.starts_with("# ruff: noqa")
|| line.starts_with("# ruff: NOQA")
|| line.starts_with("# ruff: NoQA")
}
#[derive(Debug)]
pub enum Directive<'a> {
None,
@@ -26,7 +38,7 @@ pub enum Directive<'a> {
}
pub fn extract_noqa_directive(line: &str) -> Directive {
match NO_QA_REGEX.captures(line) {
match NO_QA_LINE_REGEX.captures(line) {
Some(caps) => match caps.name("spaces") {
Some(spaces) => match caps.name("noqa") {
Some(noqa) => match caps.name("codes") {
@@ -70,9 +82,13 @@ fn add_noqa_inner(
noqa_line_for: &IntMap<usize, usize>,
external: &BTreeSet<String>,
) -> (usize, String) {
let lines: Vec<&str> = contents.lines().collect();
let mut matches_by_line: BTreeMap<usize, BTreeSet<&CheckCode>> = BTreeMap::new();
for lineno in 0..lines.len() {
for (lineno, line) in contents.lines().enumerate() {
// If we hit an exemption for the entire file, bail.
if is_file_exempt(line) {
return (0, contents.to_string());
}
let mut codes: BTreeSet<&CheckCode> = BTreeSet::new();
for check in checks {
if check.location.row() == lineno + 1 {
@@ -93,7 +109,7 @@ fn add_noqa_inner(
let mut count: usize = 0;
let mut output = String::new();
for (lineno, line) in lines.iter().enumerate() {
for (lineno, line) in contents.lines().enumerate() {
match matches_by_line.get(&lineno) {
None => {
output.push_str(line);
@@ -155,7 +171,7 @@ fn add_noqa_inner(
output.push('\n');
// Only count if the new line is an actual edit.
if &formatted != line {
if formatted != line {
count += 1;
}
}
@@ -176,20 +192,20 @@ mod tests {
use crate::ast::types::Range;
use crate::checks::{Check, CheckKind};
use crate::noqa::{add_noqa_inner, NO_QA_REGEX};
use crate::noqa::{add_noqa_inner, NO_QA_LINE_REGEX};
#[test]
fn regex() {
assert!(NO_QA_REGEX.is_match("# noqa"));
assert!(NO_QA_REGEX.is_match("# NoQA"));
assert!(NO_QA_LINE_REGEX.is_match("# noqa"));
assert!(NO_QA_LINE_REGEX.is_match("# NoQA"));
assert!(NO_QA_REGEX.is_match("# noqa: F401"));
assert!(NO_QA_REGEX.is_match("# NoQA: F401"));
assert!(NO_QA_REGEX.is_match("# noqa: F401, E501"));
assert!(NO_QA_LINE_REGEX.is_match("# noqa: F401"));
assert!(NO_QA_LINE_REGEX.is_match("# NoQA: F401"));
assert!(NO_QA_LINE_REGEX.is_match("# noqa: F401, E501"));
assert!(NO_QA_REGEX.is_match("# noqa:F401"));
assert!(NO_QA_REGEX.is_match("# NoQA:F401"));
assert!(NO_QA_REGEX.is_match("# noqa:F401, E501"));
assert!(NO_QA_LINE_REGEX.is_match("# noqa:F401"));
assert!(NO_QA_LINE_REGEX.is_match("# NoQA:F401"));
assert!(NO_QA_LINE_REGEX.is_match("# noqa:F401, E501"));
}
#[test]

View File

@@ -8,6 +8,7 @@ use rustpython_ast::{Constant, ExprKind, Location, StmtKind};
use crate::ast::types::Range;
use crate::ast::whitespace;
use crate::ast::whitespace::LinesWithTrailingNewline;
use crate::autofix::Fix;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckCode, CheckKind};
@@ -126,7 +127,7 @@ pub fn one_liner(checker: &mut Checker, definition: &Definition) {
{
let mut line_count = 0;
let mut non_empty_line_count = 0;
for line in string.lines() {
for line in LinesWithTrailingNewline::from(string) {
line_count += 1;
if !line.trim().is_empty() {
non_empty_line_count += 1;
@@ -136,7 +137,7 @@ pub fn one_liner(checker: &mut Checker, definition: &Definition) {
}
}
if non_empty_line_count == 1 && line_count > 1 {
if non_empty_line_count == 1 && (line_count > 1) {
checker.add_check(Check::new(
CheckKind::FitsOnOneLine,
Range::from_located(docstring),
@@ -387,12 +388,14 @@ pub fn indent(checker: &mut Checker, definition: &Definition) {
..
} = &docstring.node
{
let lines: Vec<&str> = string.lines().collect();
// Split the docstring into lines.
let lines: Vec<&str> = LinesWithTrailingNewline::from(string).collect();
if lines.len() <= 1 {
return;
}
let docstring_indent = whitespace::indentation(checker, docstring);
let mut has_seen_tab = docstring_indent.contains('\t');
let mut is_over_indented = true;
let mut over_indented_lines = vec![];
@@ -418,7 +421,9 @@ pub fn indent(checker: &mut Checker, definition: &Definition) {
if checker.settings.enabled.contains(&CheckCode::D207) {
// We report under-indentation on every line. This isn't great, but enables
// autofix.
if !is_blank && line_indent.len() < docstring_indent.len() {
if (i == lines.len() - 1 || !is_blank)
&& line_indent.len() < docstring_indent.len()
{
let mut check = Check::new(
CheckKind::NoUnderIndentation,
Range {
@@ -524,7 +529,7 @@ pub fn newline_after_last_paragraph(checker: &mut Checker, definition: &Definiti
} = &docstring.node
{
let mut line_count = 0;
for line in string.lines() {
for line in LinesWithTrailingNewline::from(string) {
if !line.trim().is_empty() {
line_count += 1;
}
@@ -570,7 +575,7 @@ pub fn no_surrounding_whitespace(checker: &mut Checker, definition: &Definition)
..
} = &docstring.node
{
let mut lines = string.lines();
let mut lines = LinesWithTrailingNewline::from(string);
if let Some(line) = lines.next() {
let trimmed = line.trim();
if trimmed.is_empty() {
@@ -627,7 +632,7 @@ pub fn multi_line_summary_start(checker: &mut Checker, definition: &Definition)
..
} = &docstring.node
{
if string.lines().nth(1).is_some() {
if LinesWithTrailingNewline::from(string).nth(1).is_some() {
if let Some(first_line) = checker
.locator
.slice_source_code_range(&Range::from_located(docstring))
@@ -871,7 +876,7 @@ pub fn sections(checker: &mut Checker, definition: &Definition) {
..
} = &docstring.node
{
let lines: Vec<&str> = string.lines().collect();
let lines: Vec<&str> = LinesWithTrailingNewline::from(string).collect();
if lines.len() < 2 {
return;
}

View File

@@ -17,6 +17,21 @@ expression: checks
end_location:
row: 232
column: 0
- kind: NoUnderIndentation
location:
row: 244
column: 0
end_location:
row: 244
column: 0
fix:
content: " "
location:
row: 244
column: 0
end_location:
row: 244
column: 0
- kind: NoUnderIndentation
location:
row: 440
@@ -32,4 +47,19 @@ expression: checks
end_location:
row: 440
column: 4
- kind: NoUnderIndentation
location:
row: 441
column: 0
end_location:
row: 441
column: 0
fix:
content: " "
location:
row: 441
column: 0
end_location:
row: 441
column: 4

View File

@@ -0,0 +1,15 @@
use rustpython_ast::{Expr, ExprKind};
use crate::ast::types::Range;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
pub fn no_eval(checker: &mut Checker, func: &Expr) {
if let ExprKind::Name { id, .. } = &func.node {
if id == "eval" {
if checker.is_builtin("eval") {
checker.add_check(Check::new(CheckKind::NoEval, Range::from_located(func)));
}
}
}
}

30
src/pygrep_hooks/mod.rs Normal file
View File

@@ -0,0 +1,30 @@
pub mod checks;
#[cfg(test)]
mod tests {
use std::convert::AsRef;
use std::path::Path;
use anyhow::Result;
use test_case::test_case;
use crate::checks::CheckCode;
use crate::linter::test_path;
use crate::settings;
#[test_case(CheckCode::PGH001, Path::new("PGH001_0.py"); "PGH001_0")]
#[test_case(CheckCode::PGH001, Path::new("PGH001_1.py"); "PGH001_1")]
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());
let mut checks = test_path(
Path::new("./resources/test/fixtures/pygrep-hooks")
.join(path)
.as_path(),
&settings::Settings::for_rule(check_code),
true,
)?;
checks.sort_by_key(|check| check.location);
insta::assert_yaml_snapshot!(snapshot, checks);
Ok(())
}
}

View File

@@ -0,0 +1,21 @@
---
source: src/pygrep_hooks/mod.rs
expression: checks
---
- kind: NoEval
location:
row: 3
column: 0
end_location:
row: 3
column: 4
fix: ~
- kind: NoEval
location:
row: 9
column: 4
end_location:
row: 9
column: 8
fix: ~

View File

@@ -0,0 +1,6 @@
---
source: src/pygrep_hooks/mod.rs
expression: checks
---
[]

View File

@@ -11,14 +11,17 @@ mod tests {
use crate::linter::test_path;
use crate::Settings;
#[test_case(Path::new("await_outside_async.py"))]
fn checks(path: &Path) -> Result<()> {
#[test_case(CheckCode::PLC3002, Path::new("unnecessary_direct_lambda_call.py"); "PLC3002")]
#[test_case(CheckCode::PLE1142, Path::new("await_outside_async.py"); "PLE1142")]
#[test_case(CheckCode::PLR0206, Path::new("property_with_parameters.py"); "PLR0206")]
#[test_case(CheckCode::PLR1701, Path::new("consider_merging_isinstance.py"); "PLR1701")]
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
let snapshot = format!("{}", path.to_string_lossy());
let mut checks = test_path(
Path::new("./resources/test/fixtures/pylint")
.join(path)
.as_path(),
&Settings::for_rules(vec![CheckCode::PLE1142]),
&Settings::for_rules(vec![check_code]),
true,
)?;
checks.sort_by_key(|check| check.location);

View File

@@ -1,10 +1,22 @@
use rustpython_ast::Expr;
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use rustpython_ast::{Arguments, Boolop, Expr, ExprKind, Stmt};
use crate::ast::types::{FunctionScope, Range, ScopeKind};
use crate::check_ast::Checker;
use crate::checks::CheckKind;
use crate::Check;
/// PLC3002
pub fn unnecessary_direct_lambda_call(checker: &mut Checker, expr: &Expr, func: &Expr) {
if let ExprKind::Lambda { .. } = &func.node {
checker.add_check(Check::new(
CheckKind::UnnecessaryDirectLambdaCall,
Range::from_located(expr),
));
}
}
/// PLE1142
pub fn await_outside_async(checker: &mut Checker, expr: &Expr) {
if !checker
@@ -24,3 +36,73 @@ pub fn await_outside_async(checker: &mut Checker, expr: &Expr) {
));
}
}
/// PLR0206
pub fn property_with_parameters(
checker: &mut Checker,
stmt: &Stmt,
decorator_list: &[Expr],
args: &Arguments,
) {
if decorator_list
.iter()
.any(|d| matches!(&d.node, ExprKind::Name { id, .. } if id == "property"))
{
if checker.is_builtin("property")
&& args
.args
.iter()
.chain(args.posonlyargs.iter())
.chain(args.kwonlyargs.iter())
.count()
> 1
{
checker.add_check(Check::new(
CheckKind::PropertyWithParameters,
Range::from_located(stmt),
));
}
}
}
/// PLR1701
pub fn consider_merging_isinstance(
checker: &mut Checker,
expr: &Expr,
op: &Boolop,
values: &[Expr],
) {
if !matches!(op, Boolop::Or) || !checker.is_builtin("isinstance") {
return;
}
let mut obj_to_types: FxHashMap<String, FxHashSet<String>> = FxHashMap::default();
for value in values {
if let ExprKind::Call { func, args, .. } = &value.node {
if matches!(&func.node, ExprKind::Name { id, .. } if id == "isinstance") {
if let [obj, types] = &args[..] {
obj_to_types
.entry(obj.to_string())
.or_insert_with(FxHashSet::default)
.extend(match &types.node {
ExprKind::Tuple { elts, .. } => {
elts.iter().map(std::string::ToString::to_string).collect()
}
_ => {
vec![types.to_string()]
}
});
}
}
}
}
for (obj, types) in obj_to_types {
if types.len() > 1 {
checker.add_check(Check::new(
CheckKind::ConsiderMergingIsinstance(obj, types.into_iter().sorted().collect()),
Range::from_located(expr),
));
}
}
}

View File

@@ -0,0 +1,78 @@
---
source: src/pylint/mod.rs
expression: checks
---
- kind:
ConsiderMergingIsinstance:
- "var[3]"
- - float
- int
location:
row: 15
column: 31
end_location:
row: 15
column: 96
fix: ~
- kind:
ConsiderMergingIsinstance:
- "var[4]"
- - float
- int
location:
row: 17
column: 37
end_location:
row: 17
column: 103
fix: ~
- kind:
ConsiderMergingIsinstance:
- "var[5]"
- - float
- int
location:
row: 19
column: 37
end_location:
row: 19
column: 73
fix: ~
- kind:
ConsiderMergingIsinstance:
- "var[10]"
- - list
- str
location:
row: 23
column: 38
end_location:
row: 23
column: 158
fix: ~
- kind:
ConsiderMergingIsinstance:
- "var[11]"
- - float
- int
location:
row: 24
column: 38
end_location:
row: 24
column: 95
fix: ~
- kind:
ConsiderMergingIsinstance:
- "var[12]"
- - float
- int
- list
location:
row: 30
column: 47
end_location:
row: 30
column: 75
fix: ~

View File

@@ -0,0 +1,29 @@
---
source: src/pylint/mod.rs
expression: checks
---
- kind: PropertyWithParameters
location:
row: 7
column: 4
end_location:
row: 10
column: 4
fix: ~
- kind: PropertyWithParameters
location:
row: 11
column: 4
end_location:
row: 14
column: 4
fix: ~
- kind: PropertyWithParameters
location:
row: 15
column: 4
end_location:
row: 19
column: 0
fix: ~

View File

@@ -0,0 +1,29 @@
---
source: src/pylint/mod.rs
expression: checks
---
- kind: UnnecessaryDirectLambdaCall
location:
row: 4
column: 4
end_location:
row: 4
column: 33
fix: ~
- kind: UnnecessaryDirectLambdaCall
location:
row: 5
column: 8
end_location:
row: 5
column: 27
fix: ~
- kind: UnnecessaryDirectLambdaCall
location:
row: 5
column: 29
end_location:
row: 5
column: 47
fix: ~

View File

@@ -34,4 +34,36 @@ expression: checks
end_location:
row: 35
column: 12
- kind:
UsePEP585Annotation: List
location:
row: 42
column: 26
end_location:
row: 42
column: 30
fix:
content: list
location:
row: 42
column: 26
end_location:
row: 42
column: 30
- kind:
UsePEP585Annotation: List
location:
row: 42
column: 37
end_location:
row: 42
column: 41
fix:
content: list
location:
row: 42
column: 37
end_location:
row: 42
column: 41

View File

@@ -17,4 +17,19 @@ expression: checks
end_location:
row: 40
column: 16
- kind: UsePEP604Annotation
location:
row: 42
column: 20
end_location:
row: 42
column: 47
fix:
content: "List[int] | List[str]"
location:
row: 42
column: 20
end_location:
row: 42
column: 47

View File

@@ -50,4 +50,28 @@ mod tests {
insta::assert_yaml_snapshot!(checks);
Ok(())
}
#[test]
fn flake8_noqa() -> Result<()> {
let mut checks = test_path(
Path::new("./resources/test/fixtures/ruff/flake8_noqa.py"),
&settings::Settings::for_rules(vec![CheckCode::F401, CheckCode::F841]),
true,
)?;
checks.sort_by_key(|check| check.location);
insta::assert_yaml_snapshot!(checks);
Ok(())
}
#[test]
fn ruff_noqa() -> Result<()> {
let mut checks = test_path(
Path::new("./resources/test/fixtures/ruff/ruff_noqa.py"),
&settings::Settings::for_rules(vec![CheckCode::F401, CheckCode::F841]),
true,
)?;
checks.sort_by_key(|check| check.location);
insta::assert_yaml_snapshot!(checks);
Ok(())
}
}

View File

@@ -0,0 +1,6 @@
---
source: src/rules/mod.rs
expression: checks
---
[]

View File

@@ -0,0 +1,6 @@
---
source: src/rules/mod.rs
expression: checks
---
[]