Compare commits

...

11 Commits

Author SHA1 Message Date
Charlie Marsh
124782771f Bump version to 0.0.130 2022-11-20 10:37:19 -05:00
Charlie Marsh
98cab5cdba Add class names to NamedTuple and TypedDict rules (#829) 2022-11-20 10:29:47 -05:00
Martin Lehoux
40f38c94a5 Implement U014: Convert NamedTuple function to class (#819) 2022-11-20 10:26:15 -05:00
Harutaka Kawamura
7839204bf7 Implement autofix for B010 (#823) 2022-11-20 10:14:29 -05:00
Jonathan Plasse
e63ea704f0 Adjust U011 start location (#828) 2022-11-20 10:13:29 -05:00
Charlie Marsh
4be09b45ea Bump version to 0.0.129 2022-11-19 19:52:40 -05:00
Harutaka Kawamura
13e8ed0a0a Implement autofix for E731 (#814) 2022-11-19 19:51:41 -05:00
Anders Kaseorg
4161d4ae32 Exempt parameters with immutable annotations from B006 (#821) 2022-11-19 19:46:08 -05:00
Charlie Marsh
99f7854d8c Mark nonlocal variables as used in parent scopes (#822) 2022-11-19 19:21:02 -05:00
Harutaka Kawamura
a580d1a858 Adjust UnusedNOQA start location (#817) 2022-11-19 09:30:02 -05:00
Martin Lehoux
86806a9e39 U013: Also convert typing.TypedDict (#810) 2022-11-19 09:29:05 -05:00
47 changed files with 1172 additions and 364 deletions

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.128
rev: v0.0.130
hooks:
- id: ruff

6
Cargo.lock generated
View File

@@ -940,7 +940,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8-to-ruff"
version = "0.0.128-dev.0"
version = "0.0.130-dev.0"
dependencies = [
"anyhow",
"clap 4.0.22",
@@ -2248,7 +2248,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.128"
version = "0.0.130"
dependencies = [
"annotate-snippets 0.9.1",
"anyhow",
@@ -2298,7 +2298,7 @@ dependencies = [
[[package]]
name = "ruff_dev"
version = "0.0.128"
version = "0.0.130"
dependencies = [
"anyhow",
"clap 4.0.22",

View File

@@ -6,7 +6,7 @@ members = [
[package]
name = "ruff"
version = "0.0.128"
version = "0.0.130"
edition = "2021"
[lib]

View File

@@ -380,7 +380,7 @@ For more, see [pycodestyle](https://pypi.org/project/pycodestyle/2.9.1/) on PyPI
| E714 | NotIsTest | Test for object identity should be `is not` | 🛠 |
| E721 | TypeComparison | Do not compare types, use `isinstance()` | |
| E722 | DoNotUseBareExcept | Do not use bare `except` | |
| E731 | DoNotAssignLambda | Do not assign a lambda expression, use a def | |
| E731 | DoNotAssignLambda | Do not assign a lambda expression, use a def | 🛠 |
| E741 | AmbiguousVariableName | Ambiguous variable name: `...` | |
| E742 | AmbiguousClassName | Ambiguous class name: `...` | |
| E743 | AmbiguousFunctionName | Ambiguous function name: `...` | |
@@ -466,6 +466,7 @@ For more, see [pyupgrade](https://pypi.org/project/pyupgrade/3.2.0/) on PyPI.
| U011 | UnnecessaryLRUCacheParams | Unnecessary parameters to `functools.lru_cache` | 🛠 |
| U012 | UnnecessaryEncodeUTF8 | Unnecessary call to `encode` as UTF-8 | 🛠 |
| U013 | ConvertTypedDictFunctionalToClass | Convert `TypedDict` functional syntax to class syntax | 🛠 |
| U014 | ConvertNamedTupleFunctionalToClass | Convert `NamedTuple` functional syntax to class syntax | 🛠 |
### pep8-naming
@@ -549,7 +550,7 @@ For more, see [flake8-bugbear](https://pypi.org/project/flake8-bugbear/22.10.27/
| B007 | UnusedLoopControlVariable | Loop control variable `i` not used within the loop body | 🛠 |
| B008 | FunctionCallArgumentDefault | Do not perform function call in argument defaults | |
| B009 | GetAttrWithConstant | Do not call `getattr` with a constant attribute value. It is not any safer than normal property access. | 🛠 |
| B010 | SetAttrWithConstant | Do not call `setattr` with a constant attribute value. It is not any safer than normal property access. | |
| B010 | SetAttrWithConstant | Do not call `setattr` with a constant attribute value. It is not any safer than normal property access. | 🛠 |
| B011 | DoNotAssertFalse | Do not `assert False` (`python -O` removes these calls), raise `AssertionError()` | 🛠 |
| B012 | JumpStatementInFinally | `return/continue/break` inside finally blocks cause exceptions to be silenced | |
| B013 | RedundantTupleInExceptionHandler | A length-one tuple literal is redundant. Write `except ValueError` instead of `except (ValueError,)`. | |
@@ -820,7 +821,7 @@ including:
- [`flake8-boolean-trap`](https://pypi.org/project/flake8-boolean-trap/)
- [`mccabe`](https://pypi.org/project/mccabe/)
- [`isort`](https://pypi.org/project/isort/)
- [`pyupgrade`](https://pypi.org/project/pyupgrade/) (14/33)
- [`pyupgrade`](https://pypi.org/project/pyupgrade/) (15/33)
- [`autoflake`](https://pypi.org/project/autoflake/) (1/7)
Beyond rule-set parity, Ruff suffers from the following limitations vis-à-vis Flake8:
@@ -852,7 +853,7 @@ Today, Ruff can be used to replace Flake8 when used with any of the following pl
- [`mccabe`](https://pypi.org/project/mccabe/)
Ruff can also replace [`isort`](https://pypi.org/project/isort/), [`yesqa`](https://github.com/asottile/yesqa),
and a subset of the rules implemented in [`pyupgrade`](https://pypi.org/project/pyupgrade/) (14/33).
and a subset of the rules implemented in [`pyupgrade`](https://pypi.org/project/pyupgrade/) (15/33).
If you're looking to use Ruff, but rely on an unsupported Flake8 plugin, free to file an Issue.

View File

@@ -771,7 +771,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8_to_ruff"
version = "0.0.128"
version = "0.0.130"
dependencies = [
"anyhow",
"clap",
@@ -1975,7 +1975,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.128"
version = "0.0.130"
dependencies = [
"anyhow",
"bincode",

View File

@@ -1,6 +1,6 @@
[package]
name = "flake8-to-ruff"
version = "0.0.128-dev.0"
version = "0.0.130-dev.0"
edition = "2021"
[lib]

View File

@@ -185,3 +185,23 @@ def nested_b008(a=random.randint(0, dt.datetime.now().year)):
# Ignore lambda contents since they are evaluated at call time.
def foo(f=lambda x: print(x)):
f(1)
from collections import abc
from typing import Annotated, Dict, Optional, Sequence, Union, Set
def immutable_annotations(
a: Sequence[int] | None = [],
b: Optional[abc.Mapping[int, int]] = {},
c: Annotated[Union[abc.Set[str], abc.Sized], "annotation"] = set(),
):
pass
def mutable_annotations(
a: list[int] | None = [],
b: Optional[Dict[int, int]] = {},
c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
):
pass

View File

@@ -34,3 +34,4 @@ setattr(foo, "bar", None)
setattr(foo, "_123abc", None)
setattr(foo, "abc123", None)
setattr(foo, r"abc123", None)
setattr(foo.bar, r"baz", None)

View File

@@ -35,3 +35,20 @@ def f4():
_ = 1
__ = 1
_discarded = 1
a = 1
def f5():
global a
# Used in `f7` via `nonlocal`.
b = 1
def f6():
# F841
b = 1
def f7():
nonlocal b

View File

@@ -18,6 +18,14 @@ def f() -> None:
# Invalid (and unimplemented)
d = 1 # noqa: F841, W191
# fmt: off
# Invalid - no space before #
d = 1# noqa: E501
# Invalid - many spaces before #
d = 1 # noqa: E501
# fmt: on
# Valid
_ = """Lorem ipsum dolor sit amet.

View File

@@ -1,4 +1,5 @@
from typing import TypedDict, NotRequired, Literal
import typing
# dict literal
MyType1 = TypedDict("MyType1", {"a": int, "b": str})
@@ -27,3 +28,6 @@ MyType9 = TypedDict("MyType9", {"in": int, "x-y": int})
# using Literal type
MyType10 = TypedDict("MyType10", {"key": Literal["value"]})
# using namespace TypedDict
MyType11 = typing.TypedDict("MyType11", {"key": int})

22
resources/test/fixtures/U014.py vendored Normal file
View File

@@ -0,0 +1,22 @@
from typing import NamedTuple
import typing
# with complex annotations
NT1 = NamedTuple("NT1", [("a", int), ("b", tuple[str, ...])])
# with default values as list
NT2 = NamedTuple(
"NT2",
[("a", int), ("b", str), ("c", list[bool])],
defaults=["foo", [True]],
)
# with namespace
NT3 = typing.NamedTuple("NT3", [("a", int), ("b", str)])
# with too many default values
NT4 = NamedTuple(
"NT4",
[("a", int), ("b", str)],
defaults=[1, "bar", "baz"],
)

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_dev"
version = "0.0.128"
version = "0.0.130"
edition = "2021"
[dependencies]

View File

@@ -1,7 +1,10 @@
use fnv::{FnvHashMap, FnvHashSet};
use once_cell::sync::Lazy;
use regex::Regex;
use rustpython_ast::{Excepthandler, ExcepthandlerKind, Expr, ExprKind, Location, StmtKind};
use rustpython_ast::{Excepthandler, ExcepthandlerKind, Expr, ExprKind, Location, Stmt, StmtKind};
use crate::ast::types::Range;
use crate::SourceCodeLocator;
#[inline(always)]
fn collect_call_path_inner<'a>(expr: &'a Expr, parts: &mut Vec<&'a str>) {
@@ -261,6 +264,34 @@ pub fn to_absolute(relative: &Location, base: &Location) -> Location {
}
}
/// Return `true` if a `Stmt` has leading content.
pub fn match_leading_content(stmt: &Stmt, locator: &SourceCodeLocator) -> bool {
let range = Range {
location: Location::new(stmt.location.row(), 0),
end_location: stmt.location,
};
let prefix = locator.slice_source_code_range(&range);
prefix.chars().any(|char| !char.is_whitespace())
}
/// Return `true` if a `Stmt` has trailing content.
pub fn match_trailing_content(stmt: &Stmt, locator: &SourceCodeLocator) -> bool {
let range = Range {
location: stmt.end_location.unwrap(),
end_location: Location::new(stmt.end_location.unwrap().row() + 1, 0),
};
let suffix = locator.slice_source_code_range(&range);
for char in suffix.chars() {
if char == '#' {
return false;
}
if !char.is_whitespace() {
return true;
}
}
false
}
#[cfg(test)]
mod tests {
use anyhow::Result;

View File

@@ -3,3 +3,4 @@ pub mod operations;
pub mod relocate;
pub mod types;
pub mod visitor;
pub mod whitespace;

View File

@@ -19,9 +19,7 @@ impl Range {
pub fn from_located<T>(located: &Located<T>) -> Self {
Range {
location: located.location,
end_location: located
.end_location
.expect("AST nodes should have end_location."),
end_location: located.end_location.unwrap(),
}
}
}
@@ -83,6 +81,7 @@ pub enum BindingKind {
Binding,
LoopVar,
Global,
Nonlocal,
Builtin,
ClassDefinition,
Definition,

View File

@@ -3,12 +3,6 @@ use rustpython_ast::{Located, Location};
use crate::ast::types::Range;
use crate::check_ast::Checker;
pub const TRIPLE_QUOTE_PREFIXES: &[&str] = &[
"ur\"\"\"", "ur'''", "u\"\"\"", "u'''", "r\"\"\"", "r'''", "\"\"\"", "'''",
];
pub const SINGLE_QUOTE_PREFIXES: &[&str] = &["ur\"", "ur'", "u\"", "u'", "r\"", "r'", "\"", "'"];
/// Extract the leading words from a line of text.
pub fn leading_words(line: &str) -> String {
line.trim()

View File

@@ -213,21 +213,74 @@ where
// Pre-visit.
match &stmt.node {
StmtKind::Global { names } | StmtKind::Nonlocal { names } => {
let global_scope_id = self.scopes[GLOBAL_SCOPE_INDEX].id;
let scope =
&mut self.scopes[*(self.scope_stack.last().expect("No current scope found."))];
if scope.id != global_scope_id {
StmtKind::Global { names } => {
let scope_index = *self.scope_stack.last().expect("No current scope found.");
if scope_index != GLOBAL_SCOPE_INDEX {
let scope = &mut self.scopes[scope_index];
let usage = Some((scope.id, Range::from_located(stmt)));
for name in names {
// Add a binding to the current scope.
scope.values.insert(
name,
Binding {
kind: BindingKind::Global,
used: Some((global_scope_id, Range::from_located(stmt))),
used: usage,
range: Range::from_located(stmt),
},
);
}
// Mark the binding in the global scope as used.
for name in names {
if let Some(mut existing) = self.scopes[GLOBAL_SCOPE_INDEX]
.values
.get_mut(&name.as_str())
{
existing.used = usage;
}
}
}
if self.settings.enabled.contains(&CheckCode::E741) {
let location = Range::from_located(stmt);
self.add_checks(
names
.iter()
.filter_map(|name| {
pycodestyle::checks::ambiguous_variable_name(name, location)
})
.into_iter(),
);
}
}
StmtKind::Nonlocal { names } => {
let scope_index = *self.scope_stack.last().expect("No current scope found.");
if scope_index != GLOBAL_SCOPE_INDEX {
let scope = &mut self.scopes[scope_index];
let usage = Some((scope.id, Range::from_located(stmt)));
for name in names {
// Add a binding to the current scope.
scope.values.insert(
name,
Binding {
kind: BindingKind::Global,
used: usage,
range: Range::from_located(stmt),
},
);
}
// Mark the binding in the defining scopes as used too. (Skip the global scope
// and the current scope.)
for name in names {
for index in self.scope_stack.iter().skip(1).rev().skip(1) {
if let Some(mut existing) =
self.scopes[*index].values.get_mut(&name.as_str())
{
existing.used = usage;
}
}
}
}
if self.settings.enabled.contains(&CheckCode::E741) {
@@ -928,11 +981,7 @@ where
StmtKind::Assign { targets, value, .. } => {
if self.settings.enabled.contains(&CheckCode::E731) {
if let [target] = &targets[..] {
if let Some(check) =
pycodestyle::checks::do_not_assign_lambda(target, value, stmt)
{
self.add_check(check);
}
pycodestyle::plugins::do_not_assign_lambda(self, target, value, stmt)
}
}
if self.settings.enabled.contains(&CheckCode::U001) {
@@ -953,15 +1002,16 @@ where
self, stmt, targets, value,
);
}
if self.settings.enabled.contains(&CheckCode::U014) {
pyupgrade::plugins::convert_named_tuple_functional_to_class(
self, stmt, targets, value,
);
}
}
StmtKind::AnnAssign { target, value, .. } => {
if self.settings.enabled.contains(&CheckCode::E731) {
if let Some(value) = value {
if let Some(check) =
pycodestyle::checks::do_not_assign_lambda(target, value, stmt)
{
self.add_check(check);
}
pycodestyle::plugins::do_not_assign_lambda(self, target, value, stmt);
}
}
}

View File

@@ -103,7 +103,7 @@ pub fn check_lines(
matches.push(check.kind.code().as_ref());
ignored.push(index)
}
(Directive::Codes(_, _, codes), matches) => {
(Directive::Codes(.., codes), matches) => {
if codes.contains(&check.kind.code().as_ref()) {
matches.push(check.kind.code().as_ref());
ignored.push(index);
@@ -133,7 +133,7 @@ pub fn check_lines(
(Directive::All(..), matches) => {
matches.push(check.kind.code().as_ref());
}
(Directive::Codes(_, _, codes), matches) => {
(Directive::Codes(.., codes), matches) => {
if codes.contains(&check.kind.code().as_ref()) {
matches.push(check.kind.code().as_ref());
} else {
@@ -170,7 +170,7 @@ pub fn check_lines(
(Directive::All(..), matches) => {
matches.push(check.kind.code().as_ref());
}
(Directive::Codes(_, _, codes), matches) => {
(Directive::Codes(.., codes), matches) => {
if codes.contains(&check.kind.code().as_ref()) {
matches.push(check.kind.code().as_ref());
} else {
@@ -186,7 +186,7 @@ pub fn check_lines(
if enforce_noqa {
for (row, (directive, matches)) in noqa_directives {
match directive {
Directive::All(start, end) => {
Directive::All(spaces, start, end) => {
if matches.is_empty() {
let mut check = Check::new(
CheckKind::UnusedNOQA(None),
@@ -197,14 +197,14 @@ pub fn check_lines(
);
if autofix.patch() && settings.fixable.contains(check.kind.code()) {
check.amend(Fix::deletion(
Location::new(row + 1, start),
Location::new(row + 1, start - spaces),
Location::new(row + 1, lines[row].chars().count()),
));
}
line_checks.push(check);
}
}
Directive::Codes(start, end, codes) => {
Directive::Codes(spaces, start, end, codes) => {
let mut invalid_codes = vec![];
let mut valid_codes = vec![];
for code in codes {
@@ -226,12 +226,12 @@ pub fn check_lines(
if autofix.patch() && settings.fixable.contains(check.kind.code()) {
if valid_codes.is_empty() {
check.amend(Fix::deletion(
Location::new(row + 1, start),
Location::new(row + 1, start - spaces),
Location::new(row + 1, lines[row].chars().count()),
));
} else {
check.amend(Fix::replacement(
format!(" # noqa: {}", valid_codes.join(", ")),
format!("# noqa: {}", valid_codes.join(", ")),
Location::new(row + 1, start),
Location::new(row + 1, lines[row].chars().count()),
));

View File

@@ -170,6 +170,7 @@ pub enum CheckCode {
U011,
U012,
U013,
U014,
// pydocstyle
D100,
D101,
@@ -500,7 +501,8 @@ pub enum CheckKind {
UnnecessaryFutureImport(Vec<String>),
UnnecessaryLRUCacheParams,
UnnecessaryEncodeUTF8,
ConvertTypedDictFunctionalToClass,
ConvertTypedDictFunctionalToClass(String),
ConvertNamedTupleFunctionalToClass(String),
// pydocstyle
BlankLineAfterLastSection(String),
BlankLineAfterSection(String),
@@ -773,7 +775,8 @@ impl CheckCode {
CheckCode::U010 => CheckKind::UnnecessaryFutureImport(vec!["...".to_string()]),
CheckCode::U011 => CheckKind::UnnecessaryLRUCacheParams,
CheckCode::U012 => CheckKind::UnnecessaryEncodeUTF8,
CheckCode::U013 => CheckKind::ConvertTypedDictFunctionalToClass,
CheckCode::U013 => CheckKind::ConvertTypedDictFunctionalToClass("...".to_string()),
CheckCode::U014 => CheckKind::ConvertNamedTupleFunctionalToClass("...".to_string()),
// pydocstyle
CheckCode::D100 => CheckKind::PublicModule,
CheckCode::D101 => CheckKind::PublicClass,
@@ -1005,6 +1008,7 @@ impl CheckCode {
CheckCode::U011 => CheckCategory::Pyupgrade,
CheckCode::U012 => CheckCategory::Pyupgrade,
CheckCode::U013 => CheckCategory::Pyupgrade,
CheckCode::U014 => CheckCategory::Pyupgrade,
CheckCode::D100 => CheckCategory::Pydocstyle,
CheckCode::D101 => CheckCategory::Pydocstyle,
CheckCode::D102 => CheckCategory::Pydocstyle,
@@ -1227,7 +1231,8 @@ impl CheckKind {
CheckKind::UnnecessaryFutureImport(_) => &CheckCode::U010,
CheckKind::UnnecessaryLRUCacheParams => &CheckCode::U011,
CheckKind::UnnecessaryEncodeUTF8 => &CheckCode::U012,
CheckKind::ConvertTypedDictFunctionalToClass => &CheckCode::U013,
CheckKind::ConvertTypedDictFunctionalToClass(_) => &CheckCode::U013,
CheckKind::ConvertNamedTupleFunctionalToClass(_) => &CheckCode::U014,
// pydocstyle
CheckKind::BlankLineAfterLastSection(_) => &CheckCode::D413,
CheckKind::BlankLineAfterSection(_) => &CheckCode::D410,
@@ -1776,8 +1781,11 @@ impl CheckKind {
"Unnecessary parameters to `functools.lru_cache`".to_string()
}
CheckKind::UnnecessaryEncodeUTF8 => "Unnecessary call to `encode` as UTF-8".to_string(),
CheckKind::ConvertTypedDictFunctionalToClass => {
"Convert `TypedDict` functional syntax to class syntax".to_string()
CheckKind::ConvertTypedDictFunctionalToClass(name) => {
format!("Convert `{name}` from `TypedDict` functional to class syntax")
}
CheckKind::ConvertNamedTupleFunctionalToClass(name) => {
format!("Convert `{name}` from `NamedTuple` functional to class syntax")
}
// pydocstyle
CheckKind::FitsOnOneLine => "One-line docstring should fit on one line".to_string(),
@@ -2038,46 +2046,53 @@ impl CheckKind {
pub fn fixable(&self) -> bool {
matches!(
self,
CheckKind::AmbiguousUnicodeCharacterString(_, _)
| CheckKind::AmbiguousUnicodeCharacterDocstring(_, _)
| CheckKind::BlankLineAfterLastSection(_)
| CheckKind::BlankLineAfterSection(_)
CheckKind::AmbiguousUnicodeCharacterString(..)
| CheckKind::AmbiguousUnicodeCharacterDocstring(..)
| CheckKind::BlankLineAfterLastSection(..)
| CheckKind::BlankLineAfterSection(..)
| CheckKind::BlankLineAfterSummary
| CheckKind::BlankLineBeforeSection(_)
| CheckKind::CapitalizeSectionName(_)
| CheckKind::DashedUnderlineAfterSection(_)
| CheckKind::DeprecatedUnittestAlias(_, _)
| CheckKind::BlankLineBeforeSection(..)
| CheckKind::CapitalizeSectionName(..)
| CheckKind::ConvertNamedTupleFunctionalToClass(..)
| CheckKind::ConvertTypedDictFunctionalToClass(..)
| CheckKind::DashedUnderlineAfterSection(..)
| CheckKind::DeprecatedUnittestAlias(..)
| CheckKind::DoNotAssertFalse
| CheckKind::DuplicateHandlerException(_)
| CheckKind::DoNotAssignLambda
| CheckKind::DuplicateHandlerException(..)
| CheckKind::GetAttrWithConstant
| CheckKind::IsLiteral
| CheckKind::NewLineAfterLastParagraph
| CheckKind::NewLineAfterSectionName(_)
| CheckKind::NoBlankLineAfterFunction(_)
| CheckKind::NoBlankLineBeforeClass(_)
| CheckKind::NoBlankLineBeforeFunction(_)
| CheckKind::NoBlankLinesBetweenHeaderAndContent(_)
| CheckKind::NewLineAfterSectionName(..)
| CheckKind::NoBlankLineAfterFunction(..)
| CheckKind::NoBlankLineBeforeClass(..)
| CheckKind::NoBlankLineBeforeFunction(..)
| CheckKind::NoBlankLinesBetweenHeaderAndContent(..)
| CheckKind::NoOverIndentation
| CheckKind::NoSurroundingWhitespace
| CheckKind::NoUnderIndentation
| CheckKind::OneBlankLineAfterClass(_)
| CheckKind::OneBlankLineBeforeClass(_)
| CheckKind::NoneComparison(..)
| CheckKind::NotInTest
| CheckKind::NotIsTest
| CheckKind::OneBlankLineAfterClass(..)
| CheckKind::OneBlankLineBeforeClass(..)
| CheckKind::PEP3120UnnecessaryCodingComment
| CheckKind::PPrintFound
| CheckKind::PrintFound
| CheckKind::RaiseNotImplemented
| CheckKind::SectionNameEndsInColon(_)
| CheckKind::SectionNotOverIndented(_)
| CheckKind::SectionUnderlineAfterName(_)
| CheckKind::SectionUnderlineMatchesSectionLength(_)
| CheckKind::SectionUnderlineNotOverIndented(_)
| CheckKind::SectionNameEndsInColon(..)
| CheckKind::SectionNotOverIndented(..)
| CheckKind::SectionUnderlineAfterName(..)
| CheckKind::SectionUnderlineMatchesSectionLength(..)
| CheckKind::SectionUnderlineNotOverIndented(..)
| CheckKind::SetAttrWithConstant
| CheckKind::SuperCallWithParameters
| CheckKind::TypeOfPrimitive(_)
| CheckKind::UnnecessaryCollectionCall(_)
| CheckKind::UnnecessaryComprehension(_)
| CheckKind::TrueFalseComparison(..)
| CheckKind::TypeOfPrimitive(..)
| CheckKind::UnnecessaryCollectionCall(..)
| CheckKind::UnnecessaryComprehension(..)
| CheckKind::UnnecessaryEncodeUTF8
| CheckKind::ConvertTypedDictFunctionalToClass
| CheckKind::UnnecessaryFutureImport(_)
| CheckKind::UnnecessaryFutureImport(..)
| CheckKind::UnnecessaryGeneratorDict
| CheckKind::UnnecessaryGeneratorList
| CheckKind::UnnecessaryGeneratorSet
@@ -2085,18 +2100,18 @@ impl CheckKind {
| CheckKind::UnnecessaryListCall
| CheckKind::UnnecessaryListComprehensionDict
| CheckKind::UnnecessaryListComprehensionSet
| CheckKind::UnnecessaryLiteralDict(_)
| CheckKind::UnnecessaryLiteralSet(_)
| CheckKind::UnnecessaryLiteralWithinListCall(_)
| CheckKind::UnnecessaryLiteralWithinTupleCall(_)
| CheckKind::UnnecessaryLiteralDict(..)
| CheckKind::UnnecessaryLiteralSet(..)
| CheckKind::UnnecessaryLiteralWithinListCall(..)
| CheckKind::UnnecessaryLiteralWithinTupleCall(..)
| CheckKind::UnsortedImports
| CheckKind::UnusedImport(_, false)
| CheckKind::UnusedLoopControlVariable(_)
| CheckKind::UnusedNOQA(_)
| CheckKind::UsePEP585Annotation(_)
| CheckKind::UnusedLoopControlVariable(..)
| CheckKind::UnusedNOQA(..)
| CheckKind::UsePEP585Annotation(..)
| CheckKind::UsePEP604Annotation
| CheckKind::UselessMetaclassType
| CheckKind::UselessObjectInheritance(_)
| CheckKind::UselessObjectInheritance(..)
)
}
}

View File

@@ -297,6 +297,7 @@ pub enum CheckCodePrefix {
U011,
U012,
U013,
U014,
W,
W2,
W29,
@@ -1111,6 +1112,7 @@ impl CheckCodePrefix {
CheckCode::U011,
CheckCode::U012,
CheckCode::U013,
CheckCode::U014,
],
CheckCodePrefix::U0 => vec![
CheckCode::U001,
@@ -1125,6 +1127,7 @@ impl CheckCodePrefix {
CheckCode::U011,
CheckCode::U012,
CheckCode::U013,
CheckCode::U014,
],
CheckCodePrefix::U00 => vec![
CheckCode::U001,
@@ -1149,11 +1152,13 @@ impl CheckCodePrefix {
CheckCode::U011,
CheckCode::U012,
CheckCode::U013,
CheckCode::U014,
],
CheckCodePrefix::U010 => vec![CheckCode::U010],
CheckCodePrefix::U011 => vec![CheckCode::U011],
CheckCodePrefix::U012 => vec![CheckCode::U012],
CheckCodePrefix::U013 => vec![CheckCode::U013],
CheckCodePrefix::U014 => vec![CheckCode::U014],
CheckCodePrefix::W => vec![CheckCode::W292, CheckCode::W605],
CheckCodePrefix::W2 => vec![CheckCode::W292],
CheckCodePrefix::W29 => vec![CheckCode::W292],
@@ -1496,6 +1501,7 @@ impl CheckCodePrefix {
CheckCodePrefix::U011 => PrefixSpecificity::Explicit,
CheckCodePrefix::U012 => PrefixSpecificity::Explicit,
CheckCodePrefix::U013 => PrefixSpecificity::Explicit,
CheckCodePrefix::U014 => PrefixSpecificity::Explicit,
CheckCodePrefix::W => PrefixSpecificity::Category,
CheckCodePrefix::W2 => PrefixSpecificity::Hundreds,
CheckCodePrefix::W29 => PrefixSpecificity::Tens,

View File

@@ -0,0 +1,5 @@
pub const TRIPLE_QUOTE_PREFIXES: &[&str] = &[
"ur\"\"\"", "ur'''", "u\"\"\"", "u'''", "r\"\"\"", "r'''", "\"\"\"", "'''",
];
pub const SINGLE_QUOTE_PREFIXES: &[&str] = &["ur\"", "ur'", "u\"", "u'", "r\"", "r'", "\"", "'"];

View File

@@ -1,7 +1,7 @@
pub mod constants;
pub mod definition;
pub mod extraction;
pub mod google;
pub mod helpers;
pub mod numpy;
pub mod sections;
pub mod styles;

View File

@@ -1,4 +1,4 @@
use crate::docstrings::helpers;
use crate::ast::whitespace;
use crate::docstrings::styles::SectionStyle;
#[derive(Debug)]
@@ -14,7 +14,7 @@ pub(crate) struct SectionContext<'a> {
fn suspected_as_section(line: &str, style: &SectionStyle) -> bool {
style
.lowercase_section_names()
.contains(&helpers::leading_words(line).to_lowercase().as_str())
.contains(&whitespace::leading_words(line).to_lowercase().as_str())
}
/// Check if the suspected context is really a section header.
@@ -64,7 +64,7 @@ pub(crate) fn section_contexts<'a>(
let mut contexts = vec![];
for lineno in suspected_section_indices {
let context = SectionContext {
section_name: helpers::leading_words(lines[lineno]),
section_name: whitespace::leading_words(lines[lineno]),
previous_line: lines[lineno - 1],
line: lines[lineno],
following_lines: &lines[lineno + 1..],

View File

@@ -1,12 +1,12 @@
use fnv::{FnvHashMap, FnvHashSet};
use rustpython_ast::{Arguments, Expr, ExprKind};
use rustpython_ast::{Arguments, Constant, Expr, ExprKind, Operator};
use crate::ast::helpers::{collect_call_paths, dealias_call_path, match_call_path};
use crate::ast::types::Range;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
const MUTABLE_FUNCS: [(&str, &str); 7] = [
const MUTABLE_FUNCS: &[(&str, &str)] = &[
("", "dict"),
("", "list"),
("", "set"),
@@ -16,6 +16,47 @@ const MUTABLE_FUNCS: [(&str, &str); 7] = [
("collections", "deque"),
];
const IMMUTABLE_TYPES: &[(&str, &str)] = &[
("", "bool"),
("", "bytes"),
("", "complex"),
("", "float"),
("", "frozenset"),
("", "int"),
("", "object"),
("", "range"),
("", "str"),
("collections.abc", "Sized"),
("typing", "LiteralString"),
("typing", "Sized"),
];
const IMMUTABLE_GENERIC_TYPES: &[(&str, &str)] = &[
("", "tuple"),
("collections.abc", "ByteString"),
("collections.abc", "Collection"),
("collections.abc", "Container"),
("collections.abc", "Iterable"),
("collections.abc", "Mapping"),
("collections.abc", "Reversible"),
("collections.abc", "Sequence"),
("collections.abc", "Set"),
("typing", "AbstractSet"),
("typing", "ByteString"),
("typing", "Callable"),
("typing", "Collection"),
("typing", "Container"),
("typing", "FrozenSet"),
("typing", "Iterable"),
("typing", "Literal"),
("typing", "Mapping"),
("typing", "Never"),
("typing", "NoReturn"),
("typing", "Reversible"),
("typing", "Sequence"),
("typing", "Tuple"),
];
pub fn is_mutable_func(
expr: &Expr,
from_imports: &FnvHashMap<&str, FnvHashSet<&str>>,
@@ -27,34 +68,106 @@ pub fn is_mutable_func(
.any(|(module, member)| match_call_path(&call_path, module, member, from_imports))
}
fn is_mutable_expr(
expr: &Expr,
from_imports: &FnvHashMap<&str, FnvHashSet<&str>>,
import_aliases: &FnvHashMap<&str, &str>,
) -> bool {
match &expr.node {
ExprKind::List { .. }
| ExprKind::Dict { .. }
| ExprKind::Set { .. }
| ExprKind::ListComp { .. }
| ExprKind::DictComp { .. }
| ExprKind::SetComp { .. } => true,
ExprKind::Call { func, .. } => is_mutable_func(func, from_imports, import_aliases),
_ => false,
}
}
fn is_immutable_annotation(
expr: &Expr,
from_imports: &FnvHashMap<&str, FnvHashSet<&str>>,
import_aliases: &FnvHashMap<&str, &str>,
) -> bool {
match &expr.node {
ExprKind::Name { .. } | ExprKind::Attribute { .. } => {
let call_path = dealias_call_path(collect_call_paths(expr), import_aliases);
IMMUTABLE_TYPES
.iter()
.chain(IMMUTABLE_GENERIC_TYPES)
.any(|(module, member)| match_call_path(&call_path, module, member, from_imports))
}
ExprKind::Subscript { value, slice, .. } => {
let call_path = dealias_call_path(collect_call_paths(value), import_aliases);
if IMMUTABLE_GENERIC_TYPES
.iter()
.any(|(module, member)| match_call_path(&call_path, module, member, from_imports))
{
true
} else if match_call_path(&call_path, "typing", "Union", from_imports) {
if let ExprKind::Tuple { elts, .. } = &slice.node {
elts.iter()
.all(|elt| is_immutable_annotation(elt, from_imports, import_aliases))
} else {
false
}
} else if match_call_path(&call_path, "typing", "Optional", from_imports) {
is_immutable_annotation(slice, from_imports, import_aliases)
} else if match_call_path(&call_path, "typing", "Annotated", from_imports) {
if let ExprKind::Tuple { elts, .. } = &slice.node {
elts.first().map_or(false, |elt| {
is_immutable_annotation(elt, from_imports, import_aliases)
})
} else {
false
}
} else {
false
}
}
ExprKind::BinOp {
left,
op: Operator::BitOr,
right,
} => {
is_immutable_annotation(left, from_imports, import_aliases)
&& is_immutable_annotation(right, from_imports, import_aliases)
}
ExprKind::Constant {
value: Constant::None,
..
} => true,
_ => false,
}
}
/// B006
pub fn mutable_argument_default(checker: &mut Checker, arguments: &Arguments) {
for expr in arguments
.defaults
// Scan in reverse order to right-align zip()
for (arg, default) in arguments
.kwonlyargs
.iter()
.chain(arguments.kw_defaults.iter())
.rev()
.zip(arguments.kw_defaults.iter().rev())
.chain(
arguments
.args
.iter()
.rev()
.chain(arguments.posonlyargs.iter().rev())
.zip(arguments.defaults.iter().rev()),
)
{
match &expr.node {
ExprKind::List { .. }
| ExprKind::Dict { .. }
| ExprKind::Set { .. }
| ExprKind::ListComp { .. }
| ExprKind::DictComp { .. }
| ExprKind::SetComp { .. } => {
checker.add_check(Check::new(
CheckKind::MutableArgumentDefault,
Range::from_located(expr),
));
}
ExprKind::Call { func, .. } => {
if is_mutable_func(func, &checker.from_imports, &checker.import_aliases) {
checker.add_check(Check::new(
CheckKind::MutableArgumentDefault,
Range::from_located(expr),
));
}
}
_ => {}
if is_mutable_expr(default, &checker.from_imports, &checker.import_aliases)
&& arg.node.annotation.as_ref().map_or(true, |expr| {
!is_immutable_annotation(expr, &checker.from_imports, &checker.import_aliases)
})
{
checker.add_check(Check::new(
CheckKind::MutableArgumentDefault,
Range::from_located(default),
));
}
}
}

View File

@@ -1,26 +1,61 @@
use rustpython_ast::{Constant, Expr, ExprKind};
use rustpython_ast::{Constant, Expr, ExprContext, ExprKind, Stmt, StmtKind};
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
use crate::code_gen::SourceGenerator;
use crate::python::identifiers::IDENTIFIER_REGEX;
use crate::python::keyword::KWLIST;
fn assignment(obj: &Expr, name: &str, value: &Expr) -> Option<String> {
let stmt = Stmt::new(
Default::default(),
Default::default(),
StmtKind::Assign {
targets: vec![Expr::new(
Default::default(),
Default::default(),
ExprKind::Attribute {
value: Box::new(obj.clone()),
attr: name.to_string(),
ctx: ExprContext::Store,
},
)],
value: Box::new(value.clone()),
type_comment: None,
},
);
let mut generator = SourceGenerator::new();
match generator.unparse_stmt(&stmt) {
Ok(()) => generator.generate().ok(),
Err(_) => None,
}
}
/// B010
pub fn setattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
if let ExprKind::Name { id, .. } = &func.node {
if id == "setattr" {
if let [_, arg, _] = args {
if let [obj, name, value] = args {
if let ExprKind::Constant {
value: Constant::Str(value),
value: Constant::Str(name),
..
} = &arg.node
} = &name.node
{
if IDENTIFIER_REGEX.is_match(value) && !KWLIST.contains(&value.as_str()) {
checker.add_check(Check::new(
CheckKind::SetAttrWithConstant,
Range::from_located(expr),
));
if IDENTIFIER_REGEX.is_match(name) && !KWLIST.contains(&name.as_str()) {
let mut check =
Check::new(CheckKind::SetAttrWithConstant, Range::from_located(expr));
if checker.patch(check.kind.code()) {
if let Some(content) = assignment(obj, name, value) {
check.amend(Fix::replacement(
content,
expr.location,
expr.end_location.unwrap(),
));
}
}
checker.add_check(check);
}
}
}

View File

@@ -1,10 +1,11 @@
use rustpython_ast::{Location, Stmt};
use textwrap::{dedent, indent};
use crate::ast::helpers::{match_leading_content, match_trailing_content};
use crate::ast::types::Range;
use crate::ast::whitespace::leading_space;
use crate::autofix::{fixer, Fix};
use crate::checks::CheckKind;
use crate::docstrings::helpers::leading_space;
use crate::isort::{comments, format_imports};
use crate::{Check, Settings, SourceCodeLocator};
@@ -27,34 +28,6 @@ fn extract_indentation(body: &[&Stmt], locator: &SourceCodeLocator) -> String {
leading_space(&existing)
}
fn match_leading_content(body: &[&Stmt], locator: &SourceCodeLocator) -> bool {
let location = body.first().unwrap().location;
let range = Range {
location: Location::new(location.row(), 0),
end_location: location,
};
let prefix = locator.slice_source_code_range(&range);
prefix.chars().any(|char| !char.is_whitespace())
}
fn match_trailing_content(body: &[&Stmt], locator: &SourceCodeLocator) -> bool {
let end_location = body.last().unwrap().end_location.unwrap();
let range = Range {
location: end_location,
end_location: Location::new(end_location.row() + 1, 0),
};
let suffix = locator.slice_source_code_range(&range);
for char in suffix.chars() {
if char == '#' {
return false;
}
if !char.is_whitespace() {
return true;
}
}
false
}
/// I001
pub fn check_imports(
body: Vec<&Stmt>,
@@ -75,8 +48,8 @@ pub fn check_imports(
);
// Special-cases: there's leading or trailing content in the import block.
let has_leading_content = match_leading_content(&body, locator);
let has_trailing_content = match_trailing_content(&body, locator);
let has_leading_content = match_leading_content(body.first().unwrap(), locator);
let has_trailing_content = match_trailing_content(body.last().unwrap(), locator);
// Generate the sorted import block.
let expected = format_imports(

View File

@@ -519,6 +519,7 @@ mod tests {
#[test_case(CheckCode::U011, Path::new("U011_1.py"); "U011_1")]
#[test_case(CheckCode::U012, Path::new("U012.py"); "U012")]
#[test_case(CheckCode::U013, Path::new("U013.py"); "U013")]
#[test_case(CheckCode::U014, Path::new("U014.py"); "U014")]
#[test_case(CheckCode::W292, Path::new("W292_0.py"); "W292_0")]
#[test_case(CheckCode::W292, Path::new("W292_1.py"); "W292_1")]
#[test_case(CheckCode::W292, Path::new("W292_2.py"); "W292_2")]

View File

@@ -10,32 +10,40 @@ use regex::Regex;
use crate::checks::{Check, CheckCode};
static NO_QA_REGEX: Lazy<Regex> = Lazy::new(|| {
Regex::new(r"(?P<noqa>\s*(?i:# noqa)(?::\s?(?P<codes>([A-Z]+[0-9]+(?:[,\s]+)?)+))?)")
.expect("Invalid regex")
Regex::new(
r"(?P<spaces>\s*)(?P<noqa>(?i:# noqa)(?::\s?(?P<codes>([A-Z]+[0-9]+(?:[,\s]+)?)+))?)",
)
.expect("Invalid regex")
});
static SPLIT_COMMA_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").expect("Invalid regex"));
#[derive(Debug)]
pub enum Directive<'a> {
None,
All(usize, usize),
Codes(usize, usize, Vec<&'a str>),
All(usize, usize, usize),
Codes(usize, usize, usize, Vec<&'a str>),
}
pub fn extract_noqa_directive(line: &str) -> Directive {
match NO_QA_REGEX.captures(line) {
Some(caps) => match caps.name("noqa") {
Some(noqa) => match caps.name("codes") {
Some(codes) => Directive::Codes(
noqa.start(),
noqa.end(),
SPLIT_COMMA_REGEX
.split(codes.as_str())
.map(|code| code.trim())
.filter(|code| !code.is_empty())
.collect(),
),
None => Directive::All(noqa.start(), noqa.end()),
Some(caps) => match caps.name("spaces") {
Some(spaces) => match caps.name("noqa") {
Some(noqa) => match caps.name("codes") {
Some(codes) => Directive::Codes(
spaces.as_str().chars().count(),
noqa.start(),
noqa.end(),
SPLIT_COMMA_REGEX
.split(codes.as_str())
.map(|code| code.trim())
.filter(|code| !code.is_empty())
.collect(),
),
None => {
Directive::All(spaces.as_str().chars().count(), noqa.start(), noqa.end())
}
},
None => Directive::None,
},
None => Directive::None,
},
@@ -92,12 +100,14 @@ fn add_noqa_inner(
match extract_noqa_directive(line) {
Directive::None => {
output.push_str(line);
output.push_str(" # noqa: ");
}
Directive::All(_, start, _) | Directive::Codes(_, start, ..) => {
output.push_str(&line[..start]);
output.push_str("# noqa: ");
}
Directive::All(start, _) => output.push_str(&line[..start]),
Directive::Codes(start, ..) => output.push_str(&line[..start]),
};
let codes: Vec<&str> = codes.iter().map(|code| code.as_ref()).collect();
output.push_str(" # noqa: ");
output.push_str(&codes.join(", "));
output.push('\n');
count += 1;

View File

@@ -1,6 +1,6 @@
use itertools::izip;
use rustpython_ast::Location;
use rustpython_parser::ast::{Cmpop, Expr, ExprKind, Stmt};
use rustpython_parser::ast::{Cmpop, Expr, ExprKind};
use crate::ast::types::Range;
use crate::checks::{Check, CheckKind};
@@ -46,19 +46,6 @@ pub fn ambiguous_function_name(name: &str, location: Range) -> Option<Check> {
}
}
/// E731
pub fn do_not_assign_lambda(target: &Expr, value: &Expr, stmt: &Stmt) -> Option<Check> {
if let ExprKind::Name { .. } = &target.node {
if let ExprKind::Lambda { .. } = &value.node {
return Some(Check::new(
CheckKind::DoNotAssignLambda,
Range::from_located(stmt),
));
}
}
None
}
/// E721
pub fn type_comparison(ops: &[Cmpop], comparators: &[Expr], location: Range) -> Vec<Check> {
let mut checks: Vec<Check> = vec![];

View File

@@ -1,8 +1,13 @@
use anyhow::Result;
use fnv::FnvHashMap;
use itertools::izip;
use rustpython_parser::ast::{Cmpop, Constant, Expr, ExprKind, Unaryop};
use log::error;
use rustpython_ast::{Arguments, Location, StmtKind};
use rustpython_parser::ast::{Cmpop, Constant, Expr, ExprKind, Stmt, Unaryop};
use crate::ast::helpers::{match_leading_content, match_trailing_content};
use crate::ast::types::Range;
use crate::ast::whitespace::leading_space;
use crate::autofix::Fix;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind, RejectedCmpop};
@@ -260,3 +265,69 @@ pub fn not_tests(
}
}
}
fn function(name: &str, args: &Arguments, body: &Expr) -> Result<String> {
let body = Stmt::new(
Default::default(),
Default::default(),
StmtKind::Return {
value: Some(Box::new(body.clone())),
},
);
let func = Stmt::new(
Default::default(),
Default::default(),
StmtKind::FunctionDef {
name: name.to_string(),
args: Box::new(args.clone()),
body: vec![body],
decorator_list: vec![],
returns: None,
type_comment: None,
},
);
let mut generator = SourceGenerator::new();
generator.unparse_stmt(&func)?;
generator.generate().map_err(|e| e.into())
}
/// E731
pub fn do_not_assign_lambda(checker: &mut Checker, target: &Expr, value: &Expr, stmt: &Stmt) {
if let ExprKind::Name { id, .. } = &target.node {
if let ExprKind::Lambda { args, body } = &value.node {
let mut check = Check::new(CheckKind::DoNotAssignLambda, Range::from_located(stmt));
if checker.patch(check.kind.code()) {
if !match_leading_content(stmt, checker.locator)
&& !match_trailing_content(stmt, checker.locator)
{
match function(id, args, body) {
Ok(content) => {
let indentation =
&leading_space(&checker.locator.slice_source_code_range(&Range {
location: Location::new(stmt.location.row(), 0),
end_location: Location::new(stmt.location.row() + 1, 0),
}));
let mut indented = String::new();
for (idx, line) in content.lines().enumerate() {
if idx == 0 {
indented.push_str(line);
} else {
indented.push('\n');
indented.push_str(indentation);
indented.push_str(line);
}
}
check.amend(Fix::replacement(
indented,
stmt.location,
stmt.end_location.unwrap(),
));
}
Err(e) => error!("Failed to generate fix: {}", e),
}
}
}
checker.add_check(check);
}
}
}

View File

@@ -7,11 +7,12 @@ use regex::Regex;
use rustpython_ast::{Arg, Constant, ExprKind, Location, StmtKind};
use crate::ast::types::Range;
use crate::ast::whitespace;
use crate::autofix::Fix;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckCode, CheckKind};
use crate::docstrings::constants;
use crate::docstrings::definition::{Definition, DefinitionKind};
use crate::docstrings::helpers;
use crate::docstrings::sections::{section_contexts, SectionContext};
use crate::docstrings::styles::SectionStyle;
use crate::visibility::{is_init, is_magic, is_overload, is_staticmethod, Visibility};
@@ -391,7 +392,7 @@ pub fn indent(checker: &mut Checker, definition: &Definition) {
return;
}
let docstring_indent = helpers::indentation(checker, docstring);
let docstring_indent = whitespace::indentation(checker, docstring);
let mut has_seen_tab = docstring_indent.contains('\t');
let mut is_over_indented = true;
let mut over_indented_lines = vec![];
@@ -408,7 +409,7 @@ pub fn indent(checker: &mut Checker, definition: &Definition) {
continue;
}
let line_indent = helpers::leading_space(lines[i]);
let line_indent = whitespace::leading_space(lines[i]);
// We only report tab indentation once, so only check if we haven't seen a tab
// yet.
@@ -427,7 +428,7 @@ pub fn indent(checker: &mut Checker, definition: &Definition) {
);
if checker.patch(check.kind.code()) {
check.amend(Fix::replacement(
helpers::clean(&docstring_indent),
whitespace::clean(&docstring_indent),
Location::new(docstring.location.row() + i, 0),
Location::new(docstring.location.row() + i, line_indent.len()),
));
@@ -464,7 +465,7 @@ pub fn indent(checker: &mut Checker, definition: &Definition) {
// If every line (except the last) is over-indented...
if is_over_indented {
for i in over_indented_lines {
let line_indent = helpers::leading_space(lines[i]);
let line_indent = whitespace::leading_space(lines[i]);
if line_indent.len() > docstring_indent.len() {
// We report over-indentation on every line. This isn't great, but
// enables autofix.
@@ -477,7 +478,7 @@ pub fn indent(checker: &mut Checker, definition: &Definition) {
);
if checker.patch(check.kind.code()) {
check.amend(Fix::replacement(
helpers::clean(&docstring_indent),
whitespace::clean(&docstring_indent),
Location::new(docstring.location.row() + i, 0),
Location::new(docstring.location.row() + i, line_indent.len()),
));
@@ -490,7 +491,7 @@ pub fn indent(checker: &mut Checker, definition: &Definition) {
// If the last line is over-indented...
if !lines.is_empty() {
let i = lines.len() - 1;
let line_indent = helpers::leading_space(lines[i]);
let line_indent = whitespace::leading_space(lines[i]);
if line_indent.len() > docstring_indent.len() {
let mut check = Check::new(
CheckKind::NoOverIndentation,
@@ -501,7 +502,7 @@ pub fn indent(checker: &mut Checker, definition: &Definition) {
);
if checker.patch(check.kind.code()) {
check.amend(Fix::replacement(
helpers::clean(&docstring_indent),
whitespace::clean(&docstring_indent),
Location::new(docstring.location.row() + i, 0),
Location::new(docstring.location.row() + i, line_indent.len()),
));
@@ -541,7 +542,7 @@ pub fn newline_after_last_paragraph(checker: &mut Checker, definition: &Definiti
// Insert a newline just before the end-quote(s).
let content = format!(
"\n{}",
helpers::clean(&helpers::indentation(checker, docstring))
whitespace::clean(&whitespace::indentation(checker, docstring))
);
check.amend(Fix::insertion(
content,
@@ -588,9 +589,9 @@ pub fn no_surrounding_whitespace(checker: &mut Checker, definition: &Definition)
.next()
.map(|line| line.to_lowercase())
{
for pattern in helpers::TRIPLE_QUOTE_PREFIXES
for pattern in constants::TRIPLE_QUOTE_PREFIXES
.iter()
.chain(helpers::SINGLE_QUOTE_PREFIXES)
.chain(constants::SINGLE_QUOTE_PREFIXES)
{
if first_line.starts_with(pattern) {
check.amend(Fix::replacement(
@@ -634,7 +635,7 @@ pub fn multi_line_summary_start(checker: &mut Checker, definition: &Definition)
.next()
.map(|line| line.to_lowercase())
{
if helpers::TRIPLE_QUOTE_PREFIXES.contains(&first_line.as_str()) {
if constants::TRIPLE_QUOTE_PREFIXES.contains(&first_line.as_str()) {
if checker.settings.enabled.contains(&CheckCode::D212) {
checker.add_check(Check::new(
CheckKind::MultiLineSummaryFirstLine,
@@ -920,7 +921,7 @@ fn blanks_and_section_underline(
// Add a dashed line (of the appropriate length) under the section header.
let content = format!(
"{}{}\n",
helpers::clean(&helpers::indentation(checker, docstring)),
whitespace::clean(&whitespace::indentation(checker, docstring)),
"-".repeat(context.section_name.len())
);
check.amend(Fix::insertion(
@@ -954,7 +955,7 @@ fn blanks_and_section_underline(
// Add a dashed line (of the appropriate length) under the section header.
let content = format!(
"{}{}\n",
helpers::clean(&helpers::indentation(checker, docstring)),
whitespace::clean(&whitespace::indentation(checker, docstring)),
"-".repeat(context.section_name.len())
);
check.amend(Fix::insertion(
@@ -1030,7 +1031,7 @@ fn blanks_and_section_underline(
// Replace the existing underline with a line of the appropriate length.
let content = format!(
"{}{}\n",
helpers::clean(&helpers::indentation(checker, docstring)),
whitespace::clean(&whitespace::indentation(checker, docstring)),
"-".repeat(context.section_name.len())
);
check.amend(Fix::replacement(
@@ -1057,8 +1058,8 @@ fn blanks_and_section_underline(
}
if checker.settings.enabled.contains(&CheckCode::D215) {
let leading_space = helpers::leading_space(non_empty_line);
let indentation = helpers::indentation(checker, docstring);
let leading_space = whitespace::leading_space(non_empty_line);
let indentation = whitespace::indentation(checker, docstring);
if leading_space.len() > indentation.len() {
let mut check = Check::new(
CheckKind::SectionUnderlineNotOverIndented(context.section_name.to_string()),
@@ -1067,7 +1068,7 @@ fn blanks_and_section_underline(
if checker.patch(check.kind.code()) {
// Replace the existing indentation with whitespace of the appropriate length.
check.amend(Fix::replacement(
helpers::clean(&indentation),
whitespace::clean(&indentation),
Location::new(
docstring.location.row()
+ context.original_index
@@ -1198,8 +1199,8 @@ fn common_section(
}
if checker.settings.enabled.contains(&CheckCode::D214) {
let leading_space = helpers::leading_space(context.line);
let indentation = helpers::indentation(checker, docstring);
let leading_space = whitespace::leading_space(context.line);
let indentation = whitespace::indentation(checker, docstring);
if leading_space.len() > indentation.len() {
let mut check = Check::new(
CheckKind::SectionNotOverIndented(context.section_name.to_string()),
@@ -1208,7 +1209,7 @@ fn common_section(
if checker.patch(check.kind.code()) {
// Replace the existing indentation with whitespace of the appropriate length.
check.amend(Fix::replacement(
helpers::clean(&indentation),
whitespace::clean(&indentation),
Location::new(docstring.location.row() + context.original_index, 0),
Location::new(
docstring.location.row() + context.original_index,
@@ -1400,13 +1401,13 @@ fn args_section(checker: &mut Checker, definition: &Definition, context: &Sectio
fn parameters_section(checker: &mut Checker, definition: &Definition, context: &SectionContext) {
// Collect the list of arguments documented in the docstring.
let mut docstring_args: FnvHashSet<&str> = FnvHashSet::default();
let section_level_indent = helpers::leading_space(context.line);
let section_level_indent = whitespace::leading_space(context.line);
for i in 1..context.following_lines.len() {
let current_line = context.following_lines[i - 1];
let current_leading_space = helpers::leading_space(current_line);
let current_leading_space = whitespace::leading_space(current_line);
let next_line = context.following_lines[i];
if current_leading_space == section_level_indent
&& (helpers::leading_space(next_line).len() > current_leading_space.len())
&& (whitespace::leading_space(next_line).len() > current_leading_space.len())
&& !next_line.trim().is_empty()
{
let parameters = if let Some(semi_index) = current_line.find(':') {

View File

@@ -162,12 +162,13 @@ pub fn unnecessary_lru_cache_params(
import_aliases,
)
{
let range = Range {
location: func.end_location.unwrap(),
end_location: expr.end_location.unwrap(),
};
// Ex) `functools.lru_cache()`
if keywords.is_empty() {
return Some(Check::new(
CheckKind::UnnecessaryLRUCacheParams,
Range::from_located(expr),
));
return Some(Check::new(CheckKind::UnnecessaryLRUCacheParams, range));
}
// Ex) `functools.lru_cache(maxsize=None)`
if target_version >= PythonVersion::Py39 && keywords.len() == 1 {
@@ -181,10 +182,7 @@ pub fn unnecessary_lru_cache_params(
}
)
{
return Some(Check::new(
CheckKind::UnnecessaryLRUCacheParams,
Range::from_located(expr),
));
return Some(Check::new(CheckKind::UnnecessaryLRUCacheParams, range));
}
}
}

View File

@@ -191,34 +191,3 @@ pub fn remove_unnecessary_future_import(
))
}
}
/// U011
pub fn remove_unnecessary_lru_cache_params(
locator: &SourceCodeLocator,
decor_at: &Location,
) -> Option<Fix> {
let contents = locator.slice_source_code_at(decor_at);
let mut fix_start = None;
let mut fix_end = None;
let mut count: usize = 0;
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
if matches!(tok, Tok::Lpar) {
if count == 0 {
fix_start = Some(helpers::to_absolute(&start, decor_at));
}
count += 1;
}
if matches!(tok, Tok::Rpar) {
count -= 1;
if count == 0 {
fix_end = Some(helpers::to_absolute(&end, decor_at));
break;
}
}
}
match (fix_start, fix_end) {
(Some(start), Some(end)) => Some(Fix::deletion(start, end)),
_ => None,
}
}

View File

@@ -0,0 +1,208 @@
use anyhow::{bail, Result};
use log::error;
use rustpython_ast::{Constant, Expr, ExprContext, ExprKind, Keyword, Stmt, StmtKind};
use crate::ast::helpers::match_module_member;
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
use crate::code_gen::SourceGenerator;
use crate::python::identifiers::IDENTIFIER_REGEX;
use crate::python::keyword::KWLIST;
/// Return the typename, args, keywords and mother class
fn match_named_tuple_assign<'a>(
checker: &Checker,
targets: &'a [Expr],
value: &'a Expr,
) -> Option<(&'a str, &'a [Expr], &'a [Keyword], &'a ExprKind)> {
if let Some(target) = targets.get(0) {
if let ExprKind::Name { id: typename, .. } = &target.node {
if let ExprKind::Call {
func,
args,
keywords,
} = &value.node
{
if match_module_member(
func,
"typing",
"NamedTuple",
&checker.from_imports,
&checker.import_aliases,
) {
return Some((typename, args, keywords, &func.node));
}
}
}
}
None
}
/// Generate a `StmtKind::AnnAssign` representing the provided property
/// definition.
fn create_property_assignment_stmt(
property: &str,
annotation: &ExprKind,
value: Option<&ExprKind>,
) -> Stmt {
Stmt::new(
Default::default(),
Default::default(),
StmtKind::AnnAssign {
target: Box::new(Expr::new(
Default::default(),
Default::default(),
ExprKind::Name {
id: property.to_string(),
ctx: ExprContext::Load,
},
)),
annotation: Box::new(Expr::new(
Default::default(),
Default::default(),
annotation.clone(),
)),
value: value
.map(|v| Box::new(Expr::new(Default::default(), Default::default(), v.clone()))),
simple: 1,
},
)
}
/// Match the `defaults` keyword in a `NamedTuple(...)` call.
fn match_defaults(keywords: &[Keyword]) -> Result<&[Expr]> {
match keywords.iter().find(|keyword| {
if let Some(arg) = &keyword.node.arg {
arg.as_str() == "defaults"
} else {
false
}
}) {
Some(defaults) => match &defaults.node.value.node {
ExprKind::List { elts, .. } => Ok(elts),
ExprKind::Tuple { elts, .. } => Ok(elts),
_ => bail!("Expected defaults to be `ExprKind::List` | `ExprKind::Tuple`"),
},
None => Ok(&[]),
}
}
/// Create a list of property assignments from the `NamedTuple` arguments.
fn create_properties_from_args(args: &[Expr], defaults: &[Expr]) -> Result<Vec<Stmt>> {
if let Some(fields) = args.get(1) {
if let ExprKind::List { elts, .. } = &fields.node {
let padded_defaults = if elts.len() >= defaults.len() {
std::iter::repeat(None)
.take(elts.len() - defaults.len())
.chain(defaults.iter().map(Some))
} else {
bail!("Defaults must be `None` or an iterable of at least the number of fields")
};
elts.iter()
.zip(padded_defaults)
.map(|(field, default)| {
if let ExprKind::Tuple { elts, .. } = &field.node {
if let [field_name, annotation] = elts.as_slice() {
if let ExprKind::Constant {
value: Constant::Str(property),
..
} = &field_name.node
{
if IDENTIFIER_REGEX.is_match(property)
&& !KWLIST.contains(&property.as_str())
{
Ok(create_property_assignment_stmt(
property,
&annotation.node,
default.map(|d| &d.node),
))
} else {
bail!("Invalid property name: {}", property)
}
} else {
bail!("Expected `field_name` to be `Constant::Str`")
}
} else {
bail!("Expected `elts` to have exactly two elements")
}
} else {
bail!("Expected `field` to be `ExprKind::Tuple`")
}
})
.collect()
} else {
bail!("Expected argument to be `ExprKind::List`")
}
} else {
Ok(vec![])
}
}
/// Generate a `StmtKind:ClassDef` statement based on the provided body and
/// keywords.
fn create_class_def_stmt(typename: &str, body: Vec<Stmt>, base_class: &ExprKind) -> Stmt {
Stmt::new(
Default::default(),
Default::default(),
StmtKind::ClassDef {
name: typename.to_string(),
bases: vec![Expr::new(
Default::default(),
Default::default(),
base_class.clone(),
)],
keywords: vec![],
body,
decorator_list: vec![],
},
)
}
fn convert_to_class(
stmt: &Stmt,
typename: &str,
body: Vec<Stmt>,
base_class: &ExprKind,
) -> Result<Fix> {
let mut generator = SourceGenerator::new();
generator.unparse_stmt(&create_class_def_stmt(typename, body, base_class))?;
let content = generator.generate()?;
Ok(Fix::replacement(
content,
stmt.location,
stmt.end_location.unwrap(),
))
}
/// U014
pub fn convert_named_tuple_functional_to_class(
checker: &mut Checker,
stmt: &Stmt,
targets: &[Expr],
value: &Expr,
) {
if let Some((typename, args, keywords, base_class)) =
match_named_tuple_assign(checker, targets, value)
{
match match_defaults(keywords) {
Ok(defaults) => {
if let Ok(properties) = create_properties_from_args(args, defaults) {
let mut check = Check::new(
CheckKind::ConvertNamedTupleFunctionalToClass(typename.to_string()),
Range::from_located(stmt),
);
if checker.patch(check.kind.code()) {
match convert_to_class(stmt, typename, properties, base_class) {
Ok(fix) => check.amend(fix),
Err(err) => error!("Failed to convert `NamedTuple`: {}", err),
}
}
checker.add_check(check);
}
}
Err(err) => error!("Failed to parse defaults: {}", err),
}
}
}

View File

@@ -2,6 +2,7 @@ use anyhow::{bail, Result};
use log::error;
use rustpython_ast::{Constant, Expr, ExprContext, ExprKind, Keyword, KeywordData, Stmt, StmtKind};
use crate::ast::helpers::match_module_member;
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::check_ast::Checker;
@@ -10,11 +11,13 @@ use crate::code_gen::SourceGenerator;
use crate::python::identifiers::IDENTIFIER_REGEX;
use crate::python::keyword::KWLIST;
/// Return the class name, arguments, and keywords for a `TypedDict` assignment.
/// Return the class name, arguments, keywords and base class for a `TypedDict`
/// assignment.
fn match_typed_dict_assign<'a>(
checker: &Checker,
targets: &'a [Expr],
value: &'a Expr,
) -> Option<(&'a str, &'a [Expr], &'a [Keyword])> {
) -> Option<(&'a str, &'a [Expr], &'a [Keyword], &'a ExprKind)> {
if let Some(target) = targets.get(0) {
if let ExprKind::Name { id: class_name, .. } = &target.node {
if let ExprKind::Call {
@@ -23,10 +26,14 @@ fn match_typed_dict_assign<'a>(
keywords,
} = &value.node
{
if let ExprKind::Name { id: func_name, .. } = &func.node {
if func_name == "TypedDict" {
return Some((class_name, args, keywords));
}
if match_module_member(
func,
"typing",
"TypedDict",
&checker.from_imports,
&checker.import_aliases,
) {
return Some((class_name, args, keywords, &func.node));
}
}
}
@@ -65,12 +72,13 @@ fn create_pass_stmt() -> Stmt {
Stmt::new(Default::default(), Default::default(), StmtKind::Pass)
}
/// Generate a `StmtKind:ClassDef` statement bsaed on the provided body and
/// keywords.
/// Generate a `StmtKind:ClassDef` statement based on the provided body,
/// keywords and base class.
fn create_class_def_stmt(
class_name: &str,
body: Vec<Stmt>,
total_keyword: Option<KeywordData>,
base_class: &ExprKind,
) -> Stmt {
let keywords = match total_keyword {
Some(keyword) => vec![Keyword::new(
@@ -88,10 +96,7 @@ fn create_class_def_stmt(
bases: vec![Expr::new(
Default::default(),
Default::default(),
ExprKind::Name {
id: "TypedDict".to_string(),
ctx: ExprContext::Load,
},
base_class.clone(),
)],
keywords,
body,
@@ -150,11 +155,11 @@ fn get_properties_from_keywords(keywords: &[Keyword]) -> Result<Vec<Stmt>> {
// The only way to have the `total` keyword is to use the args version, like:
// (`TypedDict('name', {'a': int}, total=True)`)
fn get_total_from_only_keyword(keywords: &[Keyword]) -> Option<KeywordData> {
fn get_total_from_only_keyword(keywords: &[Keyword]) -> Option<&KeywordData> {
match keywords.get(0) {
Some(keyword) => match &keyword.node.arg {
Some(arg) => match arg.as_str() {
"total" => Some(keyword.node.clone()),
"total" => Some(&keyword.node),
_ => None,
},
None => None,
@@ -171,7 +176,7 @@ fn get_properties_and_total(
// dict and keywords. For example, the following is illegal:
// MyType = TypedDict('MyType', {'a': int, 'b': str}, a=int, b=str)
if let Some(dict) = args.get(1) {
let total = get_total_from_only_keyword(keywords);
let total = get_total_from_only_keyword(keywords).cloned();
match &dict.node {
ExprKind::Dict { keys, values } => {
Ok((get_properties_from_dict_literal(keys, values)?, total))
@@ -188,15 +193,21 @@ fn get_properties_and_total(
}
}
/// Generate a `Fix` to convert a `TypedDict` to a functional class.
fn convert_to_functional_class(
/// Generate a `Fix` to convert a `TypedDict` from functional to class.
fn convert_to_class(
stmt: &Stmt,
class_name: &str,
body: Vec<Stmt>,
total_keyword: Option<KeywordData>,
base_class: &ExprKind,
) -> Result<Fix> {
let mut generator = SourceGenerator::new();
generator.unparse_stmt(&create_class_def_stmt(class_name, body, total_keyword))?;
generator.unparse_stmt(&create_class_def_stmt(
class_name,
body,
total_keyword,
base_class,
))?;
let content = generator.generate()?;
Ok(Fix::replacement(
content,
@@ -212,16 +223,18 @@ pub fn convert_typed_dict_functional_to_class(
targets: &[Expr],
value: &Expr,
) {
if let Some((class_name, args, keywords)) = match_typed_dict_assign(targets, value) {
if let Some((class_name, args, keywords, base_class)) =
match_typed_dict_assign(checker, targets, value)
{
match get_properties_and_total(args, keywords) {
Err(err) => error!("Failed to parse TypedDict: {}", err),
Ok((body, total_keyword)) => {
let mut check = Check::new(
CheckKind::ConvertTypedDictFunctionalToClass,
CheckKind::ConvertTypedDictFunctionalToClass(class_name.to_string()),
Range::from_located(stmt),
);
if checker.patch(check.kind.code()) {
match convert_to_functional_class(stmt, class_name, body, total_keyword) {
match convert_to_class(stmt, class_name, body, total_keyword, base_class) {
Ok(fix) => check.amend(fix),
Err(err) => error!("Failed to convert TypedDict: {}", err),
};

View File

@@ -1,3 +1,4 @@
pub use convert_named_tuple_functional_to_class::convert_named_tuple_functional_to_class;
pub use convert_typed_dict_functional_to_class::convert_typed_dict_functional_to_class;
pub use deprecated_unittest_alias::deprecated_unittest_alias;
pub use super_call_with_parameters::super_call_with_parameters;
@@ -10,6 +11,7 @@ pub use use_pep604_annotation::use_pep604_annotation;
pub use useless_metaclass_type::useless_metaclass_type;
pub use useless_object_inheritance::useless_object_inheritance;
mod convert_named_tuple_functional_to_class;
mod convert_typed_dict_functional_to_class;
mod deprecated_unittest_alias;
mod super_call_with_parameters;

View File

@@ -1,7 +1,8 @@
use rustpython_parser::ast::Expr;
use crate::autofix::Fix;
use crate::check_ast::Checker;
use crate::pyupgrade::{checks, fixes};
use crate::pyupgrade::checks;
/// U011
pub fn unnecessary_lru_cache_params(checker: &mut Checker, decorator_list: &[Expr]) {
@@ -12,11 +13,7 @@ pub fn unnecessary_lru_cache_params(checker: &mut Checker, decorator_list: &[Exp
&checker.import_aliases,
) {
if checker.patch(check.kind.code()) {
if let Some(fix) =
fixes::remove_unnecessary_lru_cache_params(checker.locator, &check.location)
{
check.amend(fix);
}
check.amend(Fix::deletion(check.location, check.end_location));
}
checker.add_check(check);
}

View File

@@ -90,4 +90,28 @@ expression: checks
row: 170
column: 48
fix: ~
- kind: MutableArgumentDefault
location:
row: 203
column: 26
end_location:
row: 203
column: 28
fix: ~
- kind: MutableArgumentDefault
location:
row: 204
column: 34
end_location:
row: 204
column: 36
fix: ~
- kind: MutableArgumentDefault
location:
row: 205
column: 61
end_location:
row: 205
column: 66
fix: ~

View File

@@ -9,7 +9,16 @@ expression: checks
end_location:
row: 33
column: 25
fix: ~
fix:
patch:
content: foo.bar = None
location:
row: 33
column: 0
end_location:
row: 33
column: 25
applied: false
- kind: SetAttrWithConstant
location:
row: 34
@@ -17,7 +26,16 @@ expression: checks
end_location:
row: 34
column: 29
fix: ~
fix:
patch:
content: foo._123abc = None
location:
row: 34
column: 0
end_location:
row: 34
column: 29
applied: false
- kind: SetAttrWithConstant
location:
row: 35
@@ -25,7 +43,16 @@ expression: checks
end_location:
row: 35
column: 28
fix: ~
fix:
patch:
content: foo.abc123 = None
location:
row: 35
column: 0
end_location:
row: 35
column: 28
applied: false
- kind: SetAttrWithConstant
location:
row: 36
@@ -33,5 +60,31 @@ expression: checks
end_location:
row: 36
column: 29
fix: ~
fix:
patch:
content: foo.abc123 = None
location:
row: 36
column: 0
end_location:
row: 36
column: 29
applied: false
- kind: SetAttrWithConstant
location:
row: 37
column: 0
end_location:
row: 37
column: 30
fix:
patch:
content: foo.bar.baz = None
location:
row: 37
column: 0
end_location:
row: 37
column: 30
applied: false

View File

@@ -9,7 +9,16 @@ expression: checks
end_location:
row: 2
column: 19
fix: ~
fix:
patch:
content: "def f(x):\n return (2 * x)"
location:
row: 2
column: 0
end_location:
row: 2
column: 19
applied: false
- kind: DoNotAssignLambda
location:
row: 4
@@ -17,7 +26,16 @@ expression: checks
end_location:
row: 4
column: 19
fix: ~
fix:
patch:
content: "def f(x):\n return (2 * x)"
location:
row: 4
column: 0
end_location:
row: 4
column: 19
applied: false
- kind: DoNotAssignLambda
location:
row: 7
@@ -25,5 +43,14 @@ expression: checks
end_location:
row: 7
column: 29
fix: ~
fix:
patch:
content: "def this(y, z):\n return (2 * x)"
location:
row: 7
column: 4
end_location:
row: 7
column: 29
applied: false

View File

@@ -47,4 +47,13 @@ expression: checks
row: 21
column: 9
fix: ~
- kind:
UnusedVariable: b
location:
row: 51
column: 8
end_location:
row: 51
column: 9
fix: ~

View File

@@ -5,7 +5,7 @@ expression: checks
- kind: UnnecessaryLRUCacheParams
location:
row: 5
column: 1
column: 10
end_location:
row: 5
column: 12
@@ -22,7 +22,7 @@ expression: checks
- kind: UnnecessaryLRUCacheParams
location:
row: 11
column: 1
column: 20
end_location:
row: 11
column: 22
@@ -39,7 +39,7 @@ expression: checks
- kind: UnnecessaryLRUCacheParams
location:
row: 16
column: 1
column: 10
end_location:
row: 16
column: 24
@@ -56,7 +56,7 @@ expression: checks
- kind: UnnecessaryLRUCacheParams
location:
row: 21
column: 1
column: 20
end_location:
row: 21
column: 34
@@ -73,7 +73,7 @@ expression: checks
- kind: UnnecessaryLRUCacheParams
location:
row: 27
column: 1
column: 10
end_location:
row: 28
column: 1
@@ -90,7 +90,7 @@ expression: checks
- kind: UnnecessaryLRUCacheParams
location:
row: 33
column: 1
column: 10
end_location:
row: 35
column: 1
@@ -107,7 +107,7 @@ expression: checks
- kind: UnnecessaryLRUCacheParams
location:
row: 40
column: 1
column: 20
end_location:
row: 42
column: 19
@@ -124,7 +124,7 @@ expression: checks
- kind: UnnecessaryLRUCacheParams
location:
row: 47
column: 1
column: 20
end_location:
row: 51
column: 1
@@ -141,7 +141,7 @@ expression: checks
- kind: UnnecessaryLRUCacheParams
location:
row: 56
column: 1
column: 20
end_location:
row: 62
column: 1
@@ -158,7 +158,7 @@ expression: checks
- kind: UnnecessaryLRUCacheParams
location:
row: 67
column: 1
column: 20
end_location:
row: 72
column: 1

View File

@@ -2,157 +2,184 @@
source: src/linter.rs
expression: checks
---
- kind: ConvertTypedDictFunctionalToClass
- kind:
ConvertTypedDictFunctionalToClass: MyType1
location:
row: 4
row: 5
column: 0
end_location:
row: 4
row: 5
column: 52
fix:
patch:
content: "class MyType1(TypedDict):\n a: int\n b: str"
location:
row: 4
row: 5
column: 0
end_location:
row: 4
row: 5
column: 52
applied: false
- kind: ConvertTypedDictFunctionalToClass
- kind:
ConvertTypedDictFunctionalToClass: MyType2
location:
row: 7
row: 8
column: 0
end_location:
row: 7
row: 8
column: 50
fix:
patch:
content: "class MyType2(TypedDict):\n a: int\n b: str"
location:
row: 7
row: 8
column: 0
end_location:
row: 7
row: 8
column: 50
applied: false
- kind: ConvertTypedDictFunctionalToClass
- kind:
ConvertTypedDictFunctionalToClass: MyType3
location:
row: 10
row: 11
column: 0
end_location:
row: 10
row: 11
column: 44
fix:
patch:
content: "class MyType3(TypedDict):\n a: int\n b: str"
location:
row: 10
row: 11
column: 0
end_location:
row: 10
row: 11
column: 44
applied: false
- kind: ConvertTypedDictFunctionalToClass
- kind:
ConvertTypedDictFunctionalToClass: MyType4
location:
row: 13
row: 14
column: 0
end_location:
row: 13
row: 14
column: 30
fix:
patch:
content: "class MyType4(TypedDict):\n pass"
location:
row: 13
row: 14
column: 0
end_location:
row: 13
row: 14
column: 30
applied: false
- kind: ConvertTypedDictFunctionalToClass
- kind:
ConvertTypedDictFunctionalToClass: MyType5
location:
row: 16
row: 17
column: 0
end_location:
row: 16
row: 17
column: 46
fix:
patch:
content: "class MyType5(TypedDict):\n a: 'hello'"
location:
row: 16
row: 17
column: 0
end_location:
row: 16
row: 17
column: 46
applied: false
- kind: ConvertTypedDictFunctionalToClass
- kind:
ConvertTypedDictFunctionalToClass: MyType6
location:
row: 17
row: 18
column: 0
end_location:
row: 17
row: 18
column: 41
fix:
patch:
content: "class MyType6(TypedDict):\n a: 'hello'"
location:
row: 17
row: 18
column: 0
end_location:
row: 17
row: 18
column: 41
applied: false
- kind: ConvertTypedDictFunctionalToClass
- kind:
ConvertTypedDictFunctionalToClass: MyType7
location:
row: 20
row: 21
column: 0
end_location:
row: 20
row: 21
column: 56
fix:
patch:
content: "class MyType7(TypedDict):\n a: NotRequired[dict]"
location:
row: 20
row: 21
column: 0
end_location:
row: 20
row: 21
column: 56
applied: false
- kind: ConvertTypedDictFunctionalToClass
- kind:
ConvertTypedDictFunctionalToClass: MyType8
location:
row: 23
row: 24
column: 0
end_location:
row: 23
row: 24
column: 65
fix:
patch:
content: "class MyType8(TypedDict, total=False):\n x: int\n y: int"
location:
row: 23
row: 24
column: 0
end_location:
row: 23
row: 24
column: 65
applied: false
- kind: ConvertTypedDictFunctionalToClass
- kind:
ConvertTypedDictFunctionalToClass: MyType10
location:
row: 29
row: 30
column: 0
end_location:
row: 29
row: 30
column: 59
fix:
patch:
content: "class MyType10(TypedDict):\n key: Literal['value']"
location:
row: 29
row: 30
column: 0
end_location:
row: 29
row: 30
column: 59
applied: false
- kind:
ConvertTypedDictFunctionalToClass: MyType11
location:
row: 33
column: 0
end_location:
row: 33
column: 53
fix:
patch:
content: "class MyType11(typing.TypedDict):\n key: int"
location:
row: 33
column: 0
end_location:
row: 33
column: 53
applied: false

View File

@@ -0,0 +1,59 @@
---
source: src/linter.rs
expression: checks
---
- kind:
ConvertNamedTupleFunctionalToClass: NT1
location:
row: 5
column: 0
end_location:
row: 5
column: 61
fix:
patch:
content: "class NT1(NamedTuple):\n a: int\n b: tuple[str, ...]"
location:
row: 5
column: 0
end_location:
row: 5
column: 61
applied: false
- kind:
ConvertNamedTupleFunctionalToClass: NT2
location:
row: 8
column: 0
end_location:
row: 12
column: 1
fix:
patch:
content: "class NT2(NamedTuple):\n a: int\n b: str = 'foo'\n c: list[bool] = [True]"
location:
row: 8
column: 0
end_location:
row: 12
column: 1
applied: false
- kind:
ConvertNamedTupleFunctionalToClass: NT3
location:
row: 15
column: 0
end_location:
row: 15
column: 56
fix:
patch:
content: "class NT3(typing.NamedTuple):\n a: int\n b: str"
location:
row: 15
column: 0
end_location:
row: 15
column: 56
applied: false

View File

@@ -65,4 +65,13 @@ expression: checks
row: 37
column: 14
fix: ~
- kind:
UnusedVariable: b
location:
row: 51
column: 8
end_location:
row: 51
column: 9
fix: ~

View File

@@ -6,7 +6,7 @@ expression: checks
UnusedNOQA: ~
location:
row: 9
column: 9
column: 11
end_location:
row: 9
column: 17
@@ -25,7 +25,7 @@ expression: checks
- E501
location:
row: 13
column: 9
column: 11
end_location:
row: 13
column: 23
@@ -45,7 +45,7 @@ expression: checks
- E501
location:
row: 16
column: 9
column: 11
end_location:
row: 16
column: 29
@@ -61,16 +61,17 @@ expression: checks
applied: false
- kind:
UnusedNOQA:
- F841
- W191
location:
row: 19
column: 9
column: 11
end_location:
row: 19
column: 29
fix:
patch:
content: " # noqa: F841"
content: ""
location:
row: 19
column: 9
@@ -78,60 +79,107 @@ expression: checks
row: 19
column: 29
applied: false
- kind:
UnusedNOQA:
- E501
location:
row: 23
column: 9
end_location:
row: 23
column: 21
fix:
patch:
content: ""
location:
row: 23
column: 9
end_location:
row: 23
column: 21
applied: false
- kind:
UnusedVariable: d
location:
row: 26
column: 4
end_location:
row: 26
column: 5
fix: ~
- kind:
UnusedNOQA:
- E501
location:
row: 26
column: 32
end_location:
row: 26
column: 44
fix:
patch:
content: ""
location:
row: 26
column: 9
end_location:
row: 26
column: 44
applied: false
- kind:
UnusedNOQA:
- F841
location:
row: 44
column: 3
row: 52
column: 5
end_location:
row: 44
row: 52
column: 23
fix:
patch:
content: " # noqa: E501"
content: "# noqa: E501"
location:
row: 44
column: 3
row: 52
column: 5
end_location:
row: 44
row: 52
column: 23
applied: false
- kind:
UnusedNOQA:
- E501
location:
row: 52
column: 3
row: 60
column: 5
end_location:
row: 52
row: 60
column: 17
fix:
patch:
content: ""
location:
row: 52
row: 60
column: 3
end_location:
row: 52
row: 60
column: 17
applied: false
- kind:
UnusedNOQA: ~
location:
row: 60
column: 3
row: 68
column: 5
end_location:
row: 60
row: 68
column: 11
fix:
patch:
content: ""
location:
row: 60
row: 68
column: 3
end_location:
row: 60
row: 68
column: 11
applied: false