Compare commits

...

15 Commits

Author SHA1 Message Date
Charlie Marsh
a21fe716f2 Bump version to 0.0.113 2022-11-11 22:42:02 -05:00
Charlie Marsh
558883299a Default to isort's import sort logic (#691) 2022-11-11 22:41:39 -05:00
Charlie Marsh
048a13c795 Add a separate local folder category for imports (#690) 2022-11-11 22:12:48 -05:00
Anders Kaseorg
5a8b7c1d20 Implement flake8-2020 (sys.version, sys.version_info misuse) (#688) 2022-11-11 20:39:37 -05:00
Charlie Marsh
f8932ec12b Add some TODOs around import tracking 2022-11-11 19:07:40 -05:00
Charlie Marsh
2e7878ff48 Bump version to 0.0.112 2022-11-11 17:13:04 -05:00
Anders Kaseorg
5113ded22a Add ruff.__main__ wrapper to allow invocation as ‘python -m ruff’ (#687) 2022-11-11 15:53:42 -05:00
Anders Kaseorg
bf7bf7aa17 Only scan checks once in check_lines (#679) 2022-11-11 13:34:23 -05:00
Charlie Marsh
560c00ff9d Bump version to 0.0.111 2022-11-11 12:38:23 -05:00
Charlie Marsh
befe64a10e Support isort: skip, isort: on, and isort: off (#678) 2022-11-11 12:38:01 -05:00
Charlie Marsh
4eccfdeb69 Fix lambda handling for B010 (#685) 2022-11-11 11:18:23 -05:00
Charlie Marsh
4123ba9851 Add backticks around setattr 2022-11-11 11:08:22 -05:00
Harutaka Kawamura
e727c24f79 Implement autofix for B009 (#684) 2022-11-11 11:06:47 -05:00
Harutaka Kawamura
bd3b40688f Implement B010 (#683) 2022-11-11 10:26:37 -05:00
Charlie Marsh
b5549382a7 Clarify a few settings for isort behavior (#676) 2022-11-10 23:19:51 -05:00
69 changed files with 1668 additions and 400 deletions

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.110
rev: v0.0.113
hooks:
- id: ruff

14
Cargo.lock generated
View File

@@ -933,7 +933,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8-to-ruff"
version = "0.0.110-dev.0"
version = "0.0.113-dev.0"
dependencies = [
"anyhow",
"clap 4.0.22",
@@ -1622,6 +1622,12 @@ dependencies = [
"libc",
]
[[package]]
name = "nohash-hasher"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451"
[[package]]
name = "nom"
version = "5.1.2"
@@ -2234,11 +2240,12 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.110"
version = "0.0.113"
dependencies = [
"anyhow",
"assert_cmd",
"bincode",
"bitflags",
"cacache",
"chrono",
"clap 4.0.22",
@@ -2255,6 +2262,7 @@ dependencies = [
"itertools",
"libcst",
"log",
"nohash-hasher",
"notify",
"num-bigint",
"once_cell",
@@ -2279,7 +2287,7 @@ dependencies = [
[[package]]
name = "ruff_dev"
version = "0.0.110"
version = "0.0.113"
dependencies = [
"anyhow",
"clap 4.0.22",

View File

@@ -6,7 +6,7 @@ members = [
[package]
name = "ruff"
version = "0.0.110"
version = "0.0.113"
edition = "2021"
[lib]
@@ -15,6 +15,7 @@ name = "ruff"
[dependencies]
anyhow = { version = "1.0.66" }
bincode = { version = "1.3.3" }
bitflags = { version = "1.3.2" }
chrono = { version = "0.4.21" }
clap = { version = "4.0.1", features = ["derive"] }
colored = { version = "2.0.0" }
@@ -26,6 +27,7 @@ glob = { version = "0.3.0" }
itertools = { version = "0.10.5" }
libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "a13ec97dd4eb925bde4d426c6e422582793b260c" }
log = { version = "0.4.17" }
nohash-hasher = { version = "0.2.0" }
notify = { version = "4.0.17" }
num-bigint = { version = "0.4.3" }
once_cell = { version = "1.16.0" }

View File

@@ -57,8 +57,9 @@ Read the [launch blog post](https://notes.crmarsh.com/python-tooling-could-be-mu
9. [flake8-print](#flake8-print)
10. [flake8-quotes](#flake8-quotes)
11. [flake8-annotations](#flake8-annotations)
12. [Ruff-specific rules](#ruff-specific-rules)
13. [Meta rules](#meta-rules)
12. [flake8-2020](#flake8-2020)
13. [Ruff-specific rules](#ruff-specific-rules)
14. [Meta rules](#meta-rules)
5. [Editor Integrations](#editor-integrations)
6. [FAQ](#faq)
7. [Development](#development)
@@ -98,7 +99,7 @@ Ruff also works with [pre-commit](https://pre-commit.com):
```yaml
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.110
rev: v0.0.113
hooks:
- id: ruff
```
@@ -501,7 +502,8 @@ For more, see [flake8-bugbear](https://pypi.org/project/flake8-bugbear/22.10.27/
| B006 | MutableArgumentDefault | Do not use mutable data structures for argument defaults. | |
| B007 | UnusedLoopControlVariable | Loop control variable `i` not used within the loop body. | 🛠 |
| B008 | FunctionCallArgumentDefault | Do not perform function calls in argument defaults. | |
| B009 | GetAttrWithConstant | Do not call `getattr` with a constant attribute value, it is not any safer than normal property access. | |
| B009 | GetAttrWithConstant | Do not call `getattr` with a constant attribute value, it is not any safer than normal property access. | 🛠 |
| B010 | SetAttrWithConstant | Do not call `setattr` with a constant attribute value, it is not any safer than normal property access. | |
| B011 | DoNotAssertFalse | Do not `assert False` (`python -O` removes these calls), raise `AssertionError()` | 🛠 |
| B013 | RedundantTupleInExceptionHandler | A length-one tuple literal is redundant. Write `except ValueError:` instead of `except (ValueError,):`. | |
| B014 | DuplicateHandlerException | Exception handler with duplicate exception: `ValueError` | 🛠 |
@@ -560,6 +562,23 @@ For more, see [flake8-annotations](https://pypi.org/project/flake8-annotations/2
| ANN206 | MissingReturnTypeClassMethod | Missing return type annotation for classmethod `...` | |
| ANN401 | DynamicallyTypedExpression | Dynamically typed expressions (typing.Any) are disallowed in `...` | |
### flake8-2020
For more, see [flake8-2020](https://pypi.org/project/flake8-2020/1.7.0/) on PyPI.
| Code | Name | Message | Fix |
| ---- | ---- | ------- | --- |
| YTT101 | SysVersionSlice3Referenced | `sys.version[:3]` referenced (python3.10), use `sys.version_info` | |
| YTT102 | SysVersion2Referenced | `sys.version[2]` referenced (python3.10), use `sys.version_info` | |
| YTT103 | SysVersionCmpStr3 | `sys.version` compared to string (python3.10), use `sys.version_info` | |
| YTT201 | SysVersionInfo0Eq3Referenced | `sys.version_info[0] == 3` referenced (python4), use `>=` | |
| YTT202 | SixPY3Referenced | `six.PY3` referenced (python4), use `not six.PY2` | |
| YTT203 | SysVersionInfo1CmpInt | `sys.version_info[1]` compared to integer (python4), compare `sys.version_info` to tuple | |
| YTT204 | SysVersionInfoMinorCmpInt | `sys.version_info.minor` compared to integer (python4), compare `sys.version_info` to tuple | |
| YTT301 | SysVersion0Referenced | `sys.version[0]` referenced (python10), use `sys.version_info` | |
| YTT302 | SysVersionCmpStr10 | `sys.version` compared to string (python10), use `sys.version_info` | |
| YTT303 | SysVersionSlice1Referenced | `sys.version[:1]` referenced (python10), use `sys.version_info` | |
### Ruff-specific rules
| Code | Name | Message | Fix |
@@ -665,7 +684,8 @@ including:
- [`flake8-quotes`](https://pypi.org/project/flake8-quotes/)
- [`flake8-annotations`](https://pypi.org/project/flake8-annotations/)
- [`flake8-comprehensions`](https://pypi.org/project/flake8-comprehensions/)
- [`flake8-bugbear`](https://pypi.org/project/flake8-bugbear/) (19/32)
- [`flake8-bugbear`](https://pypi.org/project/flake8-bugbear/) (20/32)
- [`flake8-2020`](https://pypi.org/project/flake8-2020/)
- [`pyupgrade`](https://pypi.org/project/pyupgrade/) (14/34)
- [`autoflake`](https://pypi.org/project/autoflake/) (1/7)
@@ -688,7 +708,8 @@ Today, Ruff can be used to replace Flake8 when used with any of the following pl
- [`flake8-quotes`](https://pypi.org/project/flake8-quotes/)
- [`flake8-annotations`](https://pypi.org/project/flake8-annotations/)
- [`flake8-comprehensions`](https://pypi.org/project/flake8-comprehensions/)
- [`flake8-bugbear`](https://pypi.org/project/flake8-bugbear/) (19/32)
- [`flake8-bugbear`](https://pypi.org/project/flake8-bugbear/) (20/32)
- [`flake8-2020`](https://pypi.org/project/flake8-2020/)
Ruff can also replace [`isort`](https://pypi.org/project/isort/), [`yesqa`](https://github.com/asottile/yesqa),
and a subset of the rules implemented in [`pyupgrade`](https://pypi.org/project/pyupgrade/) (14/34).
@@ -713,8 +734,11 @@ project. See [#283](https://github.com/charliermarsh/ruff/issues/283) for more.
### How does Ruff's import sorting compare to [`isort`](https://pypi.org/project/isort/)?
Ruff's import sorting is intended to be equivalent to `isort` when used `profile = "black"` and
`combine_as_imports = true`. Like `isort`, Ruff's import sorting is compatible with Black.
Ruff's import sorting is intended to be equivalent to `isort` when used `profile = "black"`, and a
few other settings (`combine_as_imports = true`, `order_by_type = false`, and
`case_sensitive` = true`).
Like `isort`, Ruff's import sorting is compatible with Black.
Ruff is less configurable than `isort`, but supports the `known-first-party`, `known-third-party`,
`extra-standard-library`, and `src` settings, like so:

View File

@@ -771,7 +771,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8_to_ruff"
version = "0.0.110"
version = "0.0.113"
dependencies = [
"anyhow",
"clap",
@@ -1975,7 +1975,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.110"
version = "0.0.113"
dependencies = [
"anyhow",
"bincode",

View File

@@ -1,6 +1,6 @@
[package]
name = "flake8-to-ruff"
version = "0.0.110-dev.0"
version = "0.0.113-dev.0"
edition = "2021"
[lib]

View File

@@ -1,7 +1,7 @@
"""
Should emit:
B009 - Line 17, 18, 19, 44
B010 - Line 28, 29, 30
B009 - Line 18, 19, 20, 21, 22
B010 - Line 33, 34, 35, 36
"""
# Valid getattr usage
@@ -11,37 +11,26 @@ getattr(foo, "bar{foo}".format(foo="a"), None)
getattr(foo, "bar{foo}".format(foo="a"))
getattr(foo, bar, None)
getattr(foo, "123abc")
getattr(foo, r"123\abc")
getattr(foo, "except")
# Invalid usage
getattr(foo, "bar")
getattr(foo, "_123abc")
getattr(foo, "abc123")
getattr(foo, r"abc123")
_ = lambda x: getattr(x, "bar")
# Valid setattr usage
setattr(foo, bar, None)
setattr(foo, "bar{foo}".format(foo="a"), None)
setattr(foo, "123abc", None)
setattr(foo, r"123\abc", None)
setattr(foo, "except", None)
_ = lambda x: setattr(x, "bar", 1)
# Invalid usage
setattr(foo, "bar", None)
setattr(foo, "_123abc", None)
setattr(foo, "abc123", None)
# Allow use of setattr within lambda expression
# since assignment is not valid in this context.
c = lambda x: setattr(x, "some_attr", 1)
class FakeCookieStore:
def __init__(self, has_setter):
self.cookie_filter = None
if has_setter:
self.setCookieFilter = lambda func: setattr(self, "cookie_filter", func)
# getattr is still flagged within lambda though
c = lambda x: getattr(x, "some_attr")
# should be replaced with
c = lambda x: x.some_attr
setattr(foo, r"abc123", None)

13
resources/test/fixtures/YTT101.py vendored Normal file
View File

@@ -0,0 +1,13 @@
import sys
from sys import version, version as v
print(sys.version)
print(sys.version[:3])
print(version[:3])
# ignore from imports with aliases, patches welcome
print(v[:3])
# the tool is timid and only flags certain numeric slices
i = 3
print(sys.version[:i])

5
resources/test/fixtures/YTT102.py vendored Normal file
View File

@@ -0,0 +1,5 @@
import sys
from sys import version
py_minor = sys.version[2]
py_minor = version[2]

8
resources/test/fixtures/YTT103.py vendored Normal file
View File

@@ -0,0 +1,8 @@
import sys
from sys import version
version < "3.5"
sys.version < "3.5"
sys.version <= "3.5"
sys.version > "3.5"
sys.version >= "3.5"

10
resources/test/fixtures/YTT201.py vendored Normal file
View File

@@ -0,0 +1,10 @@
import sys
from sys import version_info
print("{}.{}".format(*sys.version_info))
PY3 = sys.version_info[0] >= 3
PY3 = sys.version_info[0] == 3
PY3 = version_info[0] == 3
PY2 = sys.version_info[0] != 3
PY2 = version_info[0] != 3

7
resources/test/fixtures/YTT202.py vendored Normal file
View File

@@ -0,0 +1,7 @@
import six
from six import PY3
if six.PY3:
print("3")
if PY3:
print("3")

5
resources/test/fixtures/YTT203.py vendored Normal file
View File

@@ -0,0 +1,5 @@
import sys
from sys import version_info
sys.version_info[1] >= 5
version_info[1] < 6

5
resources/test/fixtures/YTT204.py vendored Normal file
View File

@@ -0,0 +1,5 @@
import sys
from sys import version_info
sys.version_info.minor <= 7
version_info.minor > 8

5
resources/test/fixtures/YTT301.py vendored Normal file
View File

@@ -0,0 +1,5 @@
import sys
from sys import version
py_major = sys.version[0]
py_major = version[0]

8
resources/test/fixtures/YTT302.py vendored Normal file
View File

@@ -0,0 +1,8 @@
import sys
from sys import version
version < "3"
sys.version < "3"
sys.version <= "3"
sys.version > "3"
sys.version >= "3"

5
resources/test/fixtures/YTT303.py vendored Normal file
View File

@@ -0,0 +1,5 @@
import sys
from sys import version
print(sys.version[:1])
print(version[:1])

View File

@@ -0,0 +1,12 @@
import StringIO
import glob
import os
import shutil
import tempfile
import time
from subprocess import PIPE, Popen, STDOUT
from module import Class, CONSTANT, function, BASIC, Apple
import foo
import FOO
import BAR
import bar

View File

@@ -0,0 +1,2 @@
[tool.ruff]
line-length = 88

View File

@@ -0,0 +1,4 @@
import sys
import leading_prefix
import os
from . import leading_prefix

10
resources/test/fixtures/isort/skip.py vendored Normal file
View File

@@ -0,0 +1,10 @@
# isort: off
import sys
import os
import collections
# isort: on
import sys
import os # isort: skip
import collections
import abc

0
ruff/__init__.py Normal file
View File

7
ruff/__main__.py Normal file
View File

@@ -0,0 +1,7 @@
import os
import sys
import sysconfig
if __name__ == "__main__":
ruff = os.path.join(sysconfig.get_path("scripts"), "ruff")
os.spawnv(os.P_WAIT, ruff, [ruff, *sys.argv[1:]])

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_dev"
version = "0.0.110"
version = "0.0.113"
edition = "2021"
[dependencies]

View File

@@ -46,6 +46,7 @@ pub enum ScopeKind<'a> {
Generator,
Module,
Arg,
Lambda,
}
#[derive(Clone, Debug)]

View File

@@ -23,7 +23,6 @@ use crate::ast::{helpers, operations, visitor};
use crate::autofix::fixer;
use crate::checks::{Check, CheckCode, CheckKind};
use crate::docstrings::definition::{Definition, DefinitionKind, Documentable};
use crate::isort::track::ImportTracker;
use crate::python::builtins::{BUILTINS, MAGIC_GLOBALS};
use crate::python::future::ALL_FEATURE_NAMES;
use crate::python::typing;
@@ -33,17 +32,19 @@ use crate::settings::Settings;
use crate::source_code_locator::SourceCodeLocator;
use crate::visibility::{module_visibility, transition_scope, Modifier, Visibility, VisibleScope};
use crate::{
docstrings, flake8_annotations, flake8_bugbear, flake8_builtins, flake8_comprehensions,
flake8_print, pep8_naming, pycodestyle, pydocstyle, pyflakes, pyupgrade,
docstrings, flake8_2020, flake8_annotations, flake8_bugbear, flake8_builtins,
flake8_comprehensions, flake8_print, pep8_naming, pycodestyle, pydocstyle, pyflakes, pyupgrade,
};
const GLOBAL_SCOPE_INDEX: usize = 0;
const TRACK_FROM_IMPORTS: [&str; 10] = [
const TRACK_FROM_IMPORTS: [&str; 12] = [
"collections",
"collections.abc",
"contextlib",
"functools",
"re",
"six",
"sys",
"typing",
"typing.io",
"typing.re",
@@ -78,7 +79,6 @@ pub struct Checker<'a> {
deferred_functions: Vec<(&'a Stmt, Vec<usize>, Vec<usize>, VisibleScope)>,
deferred_lambdas: Vec<(&'a Expr, Vec<usize>, Vec<usize>)>,
deferred_assignments: Vec<usize>,
import_tracker: ImportTracker<'a>,
// Internal, derivative state.
visible_scope: VisibleScope,
in_f_string: Option<Range>,
@@ -117,7 +117,6 @@ impl<'a> Checker<'a> {
deferred_functions: Default::default(),
deferred_lambdas: Default::default(),
deferred_assignments: Default::default(),
import_tracker: ImportTracker::new(),
// Internal, derivative state.
visible_scope: VisibleScope {
modifier: Modifier::Module,
@@ -185,9 +184,6 @@ where
'b: 'a,
{
fn visit_stmt(&mut self, stmt: &'b Stmt) {
// Call-through to any composed visitors.
self.import_tracker.visit_stmt(stmt);
self.push_parent(stmt);
// Track whether we've seen docstrings, non-imports, etc.
@@ -990,6 +986,14 @@ where
if self.match_typing_module(value, "Literal") {
self.in_literal = true;
}
if self.settings.enabled.contains(&CheckCode::YTT101)
|| self.settings.enabled.contains(&CheckCode::YTT102)
|| self.settings.enabled.contains(&CheckCode::YTT301)
|| self.settings.enabled.contains(&CheckCode::YTT303)
{
flake8_2020::plugins::subscript(self, value, slice);
}
}
ExprKind::Tuple { elts, ctx } | ExprKind::List { elts, ctx } => {
if matches!(ctx, ExprContext::Store) {
@@ -1007,34 +1011,40 @@ where
}
}
}
ExprKind::Name { id, ctx } => match ctx {
ExprContext::Load => {
// Ex) List[...]
if self.settings.enabled.contains(&CheckCode::U006)
&& self.settings.target_version >= PythonVersion::Py39
&& typing::is_pep585_builtin(expr, self.from_imports.get("typing"))
{
pyupgrade::plugins::use_pep585_annotation(self, expr, id);
}
self.handle_node_load(expr);
}
ExprContext::Store => {
if self.settings.enabled.contains(&CheckCode::E741) {
if let Some(check) = pycodestyle::checks::ambiguous_variable_name(
id,
Range::from_located(expr),
) {
self.add_check(check);
ExprKind::Name { id, ctx } => {
match ctx {
ExprContext::Load => {
// Ex) List[...]
if self.settings.enabled.contains(&CheckCode::U006)
&& self.settings.target_version >= PythonVersion::Py39
&& typing::is_pep585_builtin(expr, self.from_imports.get("typing"))
{
pyupgrade::plugins::use_pep585_annotation(self, expr, id);
}
self.handle_node_load(expr);
}
ExprContext::Store => {
if self.settings.enabled.contains(&CheckCode::E741) {
if let Some(check) = pycodestyle::checks::ambiguous_variable_name(
id,
Range::from_located(expr),
) {
self.add_check(check);
}
}
self.check_builtin_shadowing(id, Range::from_located(expr), true);
self.check_builtin_shadowing(id, Range::from_located(expr), true);
self.handle_node_store(expr, self.current_parent());
self.handle_node_store(expr, self.current_parent());
}
ExprContext::Del => self.handle_node_delete(expr),
}
ExprContext::Del => self.handle_node_delete(expr),
},
if self.settings.enabled.contains(&CheckCode::YTT202) {
flake8_2020::plugins::name_or_attribute(self, expr);
}
}
ExprKind::Attribute { attr, .. } => {
// Ex) typing.List[...]
if self.settings.enabled.contains(&CheckCode::U006)
@@ -1043,6 +1053,10 @@ where
{
pyupgrade::plugins::use_pep585_annotation(self, expr, attr);
}
if self.settings.enabled.contains(&CheckCode::YTT202) {
flake8_2020::plugins::name_or_attribute(self, expr);
}
}
ExprKind::Call {
func,
@@ -1074,6 +1088,16 @@ where
if self.settings.enabled.contains(&CheckCode::B009) {
flake8_bugbear::plugins::getattr_with_constant(self, expr, func, args);
}
if self.settings.enabled.contains(&CheckCode::B010) {
if !self
.scope_stack
.iter()
.rev()
.any(|index| matches!(self.scopes[*index].kind, ScopeKind::Lambda))
{
flake8_bugbear::plugins::setattr_with_constant(self, expr, func, args);
}
}
if self.settings.enabled.contains(&CheckCode::B026) {
flake8_bugbear::plugins::star_arg_unpacking_after_keyword_arg(
self, args, keywords,
@@ -1418,6 +1442,15 @@ where
.into_iter(),
);
}
if self.settings.enabled.contains(&CheckCode::YTT103)
|| self.settings.enabled.contains(&CheckCode::YTT201)
|| self.settings.enabled.contains(&CheckCode::YTT203)
|| self.settings.enabled.contains(&CheckCode::YTT204)
|| self.settings.enabled.contains(&CheckCode::YTT302)
{
flake8_2020::plugins::compare(self, left, ops, comparators);
}
}
ExprKind::Constant {
value: Constant::Str(value),
@@ -1461,8 +1494,8 @@ where
for expr in &args.defaults {
self.visit_expr(expr);
}
self.push_scope(Scope::new(ScopeKind::Lambda))
}
ExprKind::ListComp { elt, generators } | ExprKind::SetComp { elt, generators } => {
if self.settings.enabled.contains(&CheckCode::C416) {
if let Some(check) = flake8_comprehensions::checks::unnecessary_comprehension(
@@ -1478,7 +1511,6 @@ where
}
self.push_scope(Scope::new(ScopeKind::Generator))
}
ExprKind::GeneratorExp { .. } | ExprKind::DictComp { .. } => {
self.push_scope(Scope::new(ScopeKind::Generator))
}
@@ -1649,7 +1681,8 @@ where
// Post-visit.
match &expr.node {
ExprKind::GeneratorExp { .. }
ExprKind::Lambda { .. }
| ExprKind::GeneratorExp { .. }
| ExprKind::ListComp { .. }
| ExprKind::DictComp { .. }
| ExprKind::SetComp { .. } => {
@@ -1664,9 +1697,6 @@ where
}
fn visit_excepthandler(&mut self, excepthandler: &'b Excepthandler) {
// Call-through to any composed visitors.
self.import_tracker.visit_excepthandler(excepthandler);
match &excepthandler.node {
ExcepthandlerKind::ExceptHandler { type_, name, .. } => {
if self.settings.enabled.contains(&CheckCode::E722) && type_.is_none() {
@@ -2244,7 +2274,7 @@ impl<'a> Checker<'a> {
while let Some((expr, scopes, parents)) = self.deferred_lambdas.pop() {
self.parent_stack = parents;
self.scope_stack = scopes;
self.push_scope(Scope::new(ScopeKind::Function(Default::default())));
self.push_scope(Scope::new(ScopeKind::Lambda));
if let ExprKind::Lambda { args, body } = &expr.node {
self.visit_arguments(args);

View File

@@ -1,5 +1,6 @@
//! Lint rules based on import analysis.
use nohash_hasher::IntSet;
use rustpython_parser::ast::Suite;
use crate::ast::visitor::Visitor;
@@ -30,10 +31,11 @@ fn check_import_blocks(
pub fn check_imports(
python_ast: &Suite,
locator: &SourceCodeLocator,
exclusions: &IntSet<usize>,
settings: &Settings,
autofix: &fixer::Mode,
) -> Vec<Check> {
let mut tracker = ImportTracker::new();
let mut tracker = ImportTracker::new(exclusions);
for stmt in python_ast {
tracker.visit_stmt(stmt);
}

View File

@@ -1,7 +1,6 @@
//! Lint rules based on checking raw physical lines.
use std::collections::BTreeMap;
use nohash_hasher::IntMap;
use once_cell::sync::Lazy;
use regex::Regex;
use rustpython_parser::ast::Location;
@@ -36,7 +35,7 @@ fn should_enforce_line_length(line: &str, length: usize, limit: usize) -> bool {
pub fn check_lines(
checks: &mut Vec<Check>,
contents: &str,
noqa_line_for: &[usize],
noqa_line_for: &IntMap<usize, usize>,
settings: &Settings,
autofix: &fixer::Mode,
) {
@@ -44,18 +43,23 @@ pub fn check_lines(
let enforce_line_too_long = settings.enabled.contains(&CheckCode::E501);
let enforce_noqa = settings.enabled.contains(&CheckCode::M001);
let mut noqa_directives: BTreeMap<usize, (Directive, Vec<&str>)> = BTreeMap::new();
let mut noqa_directives: IntMap<usize, (Directive, Vec<&str>)> = IntMap::default();
let mut line_checks = vec![];
let mut ignored = vec![];
checks.sort_by_key(|check| check.location);
let mut checks_iter = checks.iter().enumerate().peekable();
if let Some((_index, check)) = checks_iter.peek() {
assert!(check.location.row() >= 1);
}
let lines: Vec<&str> = contents.lines().collect();
for (lineno, line) in lines.iter().enumerate() {
// Grab the noqa (logical) line number for the current (physical) line.
// If there are newlines at the end of the file, they won't be represented in
// `noqa_line_for`, so fallback to the current line.
let noqa_lineno = noqa_line_for
.get(lineno)
.get(&lineno)
.map(|lineno| lineno - 1)
.unwrap_or(lineno);
@@ -90,26 +94,25 @@ pub fn check_lines(
}
// Remove any ignored checks.
// TODO(charlie): Only validate checks for the current line.
for (index, check) in checks.iter().enumerate() {
if check.location.row() == lineno + 1 {
let noqa = noqa_directives
.entry(noqa_lineno)
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
while let Some((index, check)) =
checks_iter.next_if(|(_index, check)| check.location.row() == lineno + 1)
{
let noqa = noqa_directives
.entry(noqa_lineno)
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
match noqa {
(Directive::All(..), matches) => {
matches.push(check.kind.code().as_ref());
ignored.push(index)
}
(Directive::Codes(_, _, codes), matches) => {
if codes.contains(&check.kind.code().as_ref()) {
matches.push(check.kind.code().as_ref());
ignored.push(index);
}
}
(Directive::None, _) => {}
match noqa {
(Directive::All(..), matches) => {
matches.push(check.kind.code().as_ref());
ignored.push(index)
}
(Directive::Codes(_, _, codes), matches) => {
if codes.contains(&check.kind.code().as_ref()) {
matches.push(check.kind.code().as_ref());
ignored.push(index);
}
}
(Directive::None, _) => {}
}
}
@@ -153,7 +156,7 @@ pub fn check_lines(
if let Some(line) = lines.last() {
let lineno = lines.len() - 1;
let noqa_lineno = noqa_line_for
.get(lineno)
.get(&lineno)
.map(|lineno| lineno - 1)
.unwrap_or(lineno);
@@ -257,6 +260,8 @@ pub fn check_lines(
#[cfg(test)]
mod tests {
use nohash_hasher::IntMap;
use super::check_lines;
use crate::autofix::fixer;
use crate::checks::{Check, CheckCode};
@@ -265,7 +270,7 @@ mod tests {
#[test]
fn e501_non_ascii_char() {
let line = "'\u{4e9c}' * 2"; // 7 in UTF-32, 9 in UTF-8.
let noqa_line_for: Vec<usize> = vec![1];
let noqa_line_for: IntMap<usize, usize> = Default::default();
let check_with_max_line_length = |line_length: usize| {
let mut checks: Vec<Check> = vec![];
check_lines(

View File

@@ -85,6 +85,7 @@ pub enum CheckCode {
B007,
B008,
B009,
B010,
B011,
B013,
B014,
@@ -131,6 +132,17 @@ pub enum CheckCode {
ANN205,
ANN206,
ANN401,
// flake8-2020
YTT101,
YTT102,
YTT103,
YTT201,
YTT202,
YTT203,
YTT204,
YTT301,
YTT302,
YTT303,
// pyupgrade
U001,
U002,
@@ -228,6 +240,7 @@ pub enum CheckCategory {
Flake8Print,
Flake8Quotes,
Flake8Annotations,
Flake82020,
Ruff,
Meta,
}
@@ -244,6 +257,7 @@ impl CheckCategory {
CheckCategory::Flake8Print => "flake8-print",
CheckCategory::Flake8Quotes => "flake8-quotes",
CheckCategory::Flake8Annotations => "flake8-annotations",
CheckCategory::Flake82020 => "flake8-2020",
CheckCategory::Pyupgrade => "pyupgrade",
CheckCategory::Pydocstyle => "pydocstyle",
CheckCategory::PEP8Naming => "pep8-naming",
@@ -271,6 +285,7 @@ impl CheckCategory {
CheckCategory::Flake8Annotations => {
Some("https://pypi.org/project/flake8-annotations/2.9.1/")
}
CheckCategory::Flake82020 => Some("https://pypi.org/project/flake8-2020/1.7.0/"),
CheckCategory::Pyupgrade => Some("https://pypi.org/project/pyupgrade/3.2.0/"),
CheckCategory::Pydocstyle => Some("https://pypi.org/project/pydocstyle/6.1.1/"),
CheckCategory::PEP8Naming => Some("https://pypi.org/project/pep8-naming/0.13.2/"),
@@ -357,6 +372,7 @@ pub enum CheckKind {
UnusedLoopControlVariable(String),
FunctionCallArgumentDefault,
GetAttrWithConstant,
SetAttrWithConstant,
DoNotAssertFalse,
RedundantTupleInExceptionHandler(String),
DuplicateHandlerException(Vec<String>),
@@ -403,6 +419,17 @@ pub enum CheckKind {
MissingReturnTypeStaticMethod(String),
MissingReturnTypeClassMethod(String),
DynamicallyTypedExpression(String),
// flake8-2020
SysVersionSlice3Referenced,
SysVersion2Referenced,
SysVersionCmpStr3,
SysVersionInfo0Eq3Referenced,
SixPY3Referenced,
SysVersionInfo1CmpInt,
SysVersionInfoMinorCmpInt,
SysVersion0Referenced,
SysVersionCmpStr10,
SysVersionSlice1Referenced,
// pyupgrade
TypeOfPrimitive(Primitive),
UnnecessaryAbspath,
@@ -573,6 +600,7 @@ impl CheckCode {
CheckCode::B007 => CheckKind::UnusedLoopControlVariable("i".to_string()),
CheckCode::B008 => CheckKind::FunctionCallArgumentDefault,
CheckCode::B009 => CheckKind::GetAttrWithConstant,
CheckCode::B010 => CheckKind::SetAttrWithConstant,
CheckCode::B011 => CheckKind::DoNotAssertFalse,
CheckCode::B013 => {
CheckKind::RedundantTupleInExceptionHandler("ValueError".to_string())
@@ -634,6 +662,17 @@ impl CheckCode {
CheckCode::ANN205 => CheckKind::MissingReturnTypeStaticMethod("...".to_string()),
CheckCode::ANN206 => CheckKind::MissingReturnTypeClassMethod("...".to_string()),
CheckCode::ANN401 => CheckKind::DynamicallyTypedExpression("...".to_string()),
// flake8-2020
CheckCode::YTT101 => CheckKind::SysVersionSlice3Referenced,
CheckCode::YTT102 => CheckKind::SysVersion2Referenced,
CheckCode::YTT103 => CheckKind::SysVersionCmpStr3,
CheckCode::YTT201 => CheckKind::SysVersionInfo0Eq3Referenced,
CheckCode::YTT202 => CheckKind::SixPY3Referenced,
CheckCode::YTT203 => CheckKind::SysVersionInfo1CmpInt,
CheckCode::YTT204 => CheckKind::SysVersionInfoMinorCmpInt,
CheckCode::YTT301 => CheckKind::SysVersion0Referenced,
CheckCode::YTT302 => CheckKind::SysVersionCmpStr10,
CheckCode::YTT303 => CheckKind::SysVersionSlice1Referenced,
// pyupgrade
CheckCode::U001 => CheckKind::UselessMetaclassType,
CheckCode::U002 => CheckKind::UnnecessaryAbspath,
@@ -794,6 +833,7 @@ impl CheckCode {
CheckCode::B007 => CheckCategory::Flake8Bugbear,
CheckCode::B008 => CheckCategory::Flake8Bugbear,
CheckCode::B009 => CheckCategory::Flake8Bugbear,
CheckCode::B010 => CheckCategory::Flake8Bugbear,
CheckCode::B011 => CheckCategory::Flake8Bugbear,
CheckCode::B013 => CheckCategory::Flake8Bugbear,
CheckCode::B014 => CheckCategory::Flake8Bugbear,
@@ -836,6 +876,16 @@ impl CheckCode {
CheckCode::ANN205 => CheckCategory::Flake8Annotations,
CheckCode::ANN206 => CheckCategory::Flake8Annotations,
CheckCode::ANN401 => CheckCategory::Flake8Annotations,
CheckCode::YTT101 => CheckCategory::Flake82020,
CheckCode::YTT102 => CheckCategory::Flake82020,
CheckCode::YTT103 => CheckCategory::Flake82020,
CheckCode::YTT201 => CheckCategory::Flake82020,
CheckCode::YTT202 => CheckCategory::Flake82020,
CheckCode::YTT203 => CheckCategory::Flake82020,
CheckCode::YTT204 => CheckCategory::Flake82020,
CheckCode::YTT301 => CheckCategory::Flake82020,
CheckCode::YTT302 => CheckCategory::Flake82020,
CheckCode::YTT303 => CheckCategory::Flake82020,
CheckCode::U001 => CheckCategory::Pyupgrade,
CheckCode::U002 => CheckCategory::Pyupgrade,
CheckCode::U003 => CheckCategory::Pyupgrade,
@@ -978,6 +1028,7 @@ impl CheckKind {
CheckKind::UnusedLoopControlVariable(_) => &CheckCode::B007,
CheckKind::FunctionCallArgumentDefault => &CheckCode::B008,
CheckKind::GetAttrWithConstant => &CheckCode::B009,
CheckKind::SetAttrWithConstant => &CheckCode::B010,
CheckKind::DoNotAssertFalse => &CheckCode::B011,
CheckKind::RedundantTupleInExceptionHandler(_) => &CheckCode::B013,
CheckKind::DuplicateHandlerException(_) => &CheckCode::B014,
@@ -1024,6 +1075,17 @@ impl CheckKind {
CheckKind::MissingReturnTypeStaticMethod(_) => &CheckCode::ANN205,
CheckKind::MissingReturnTypeClassMethod(_) => &CheckCode::ANN206,
CheckKind::DynamicallyTypedExpression(_) => &CheckCode::ANN401,
// flake8-2020
CheckKind::SysVersionSlice3Referenced => &CheckCode::YTT101,
CheckKind::SysVersion2Referenced => &CheckCode::YTT102,
CheckKind::SysVersionCmpStr3 => &CheckCode::YTT103,
CheckKind::SysVersionInfo0Eq3Referenced => &CheckCode::YTT201,
CheckKind::SixPY3Referenced => &CheckCode::YTT202,
CheckKind::SysVersionInfo1CmpInt => &CheckCode::YTT203,
CheckKind::SysVersionInfoMinorCmpInt => &CheckCode::YTT204,
CheckKind::SysVersion0Referenced => &CheckCode::YTT301,
CheckKind::SysVersionCmpStr10 => &CheckCode::YTT302,
CheckKind::SysVersionSlice1Referenced => &CheckCode::YTT303,
// pyupgrade
CheckKind::TypeOfPrimitive(_) => &CheckCode::U003,
CheckKind::UnnecessaryAbspath => &CheckCode::U002,
@@ -1287,6 +1349,10 @@ impl CheckKind {
value, it is not any safer than normal property \
access."
.to_string(),
CheckKind::SetAttrWithConstant => "Do not call `setattr` with a constant attribute \
value, it is not any safer than normal property \
access."
.to_string(),
CheckKind::DoNotAssertFalse => "Do not `assert False` (`python -O` removes these \
calls), raise `AssertionError()`"
.to_string(),
@@ -1464,6 +1530,38 @@ impl CheckKind {
CheckKind::DynamicallyTypedExpression(name) => {
format!("Dynamically typed expressions (typing.Any) are disallowed in `{name}`")
}
// flake8-2020
CheckKind::SysVersionSlice3Referenced => {
"`sys.version[:3]` referenced (python3.10), use `sys.version_info`".to_string()
}
CheckKind::SysVersion2Referenced => {
"`sys.version[2]` referenced (python3.10), use `sys.version_info`".to_string()
}
CheckKind::SysVersionCmpStr3 => {
"`sys.version` compared to string (python3.10), use `sys.version_info`".to_string()
}
CheckKind::SysVersionInfo0Eq3Referenced => {
"`sys.version_info[0] == 3` referenced (python4), use `>=`".to_string()
}
CheckKind::SixPY3Referenced => {
"`six.PY3` referenced (python4), use `not six.PY2`".to_string()
}
CheckKind::SysVersionInfo1CmpInt => "`sys.version_info[1]` compared to integer \
(python4), compare `sys.version_info` to tuple"
.to_string(),
CheckKind::SysVersionInfoMinorCmpInt => "`sys.version_info.minor` compared to integer \
(python4), compare `sys.version_info` to \
tuple"
.to_string(),
CheckKind::SysVersion0Referenced => {
"`sys.version[0]` referenced (python10), use `sys.version_info`".to_string()
}
CheckKind::SysVersionCmpStr10 => {
"`sys.version` compared to string (python10), use `sys.version_info`".to_string()
}
CheckKind::SysVersionSlice1Referenced => {
"`sys.version[:1]` referenced (python10), use `sys.version_info`".to_string()
}
// pyupgrade
CheckKind::TypeOfPrimitive(primitive) => {
format!("Use `{}` instead of `type(...)`", primitive.builtin())
@@ -1734,6 +1832,7 @@ impl CheckKind {
| CheckKind::DeprecatedUnittestAlias(_, _)
| CheckKind::DoNotAssertFalse
| CheckKind::DuplicateHandlerException(_)
| CheckKind::GetAttrWithConstant
| CheckKind::IsLiteral
| CheckKind::NewLineAfterLastParagraph
| CheckKind::NewLineAfterSectionName(_)

View File

@@ -45,6 +45,7 @@ pub enum CheckCodePrefix {
B008,
B009,
B01,
B010,
B011,
B013,
B014,
@@ -270,6 +271,23 @@ pub enum CheckCodePrefix {
W6,
W60,
W605,
YTT,
YTT1,
YTT10,
YTT101,
YTT102,
YTT103,
YTT2,
YTT20,
YTT201,
YTT202,
YTT203,
YTT204,
YTT3,
YTT30,
YTT301,
YTT302,
YTT303,
}
#[derive(PartialEq, Eq, PartialOrd, Ord)]
@@ -342,6 +360,7 @@ impl CheckCodePrefix {
CheckCode::B007,
CheckCode::B008,
CheckCode::B009,
CheckCode::B010,
CheckCode::B011,
CheckCode::B013,
CheckCode::B014,
@@ -361,6 +380,7 @@ impl CheckCodePrefix {
CheckCode::B007,
CheckCode::B008,
CheckCode::B009,
CheckCode::B010,
CheckCode::B011,
CheckCode::B013,
CheckCode::B014,
@@ -390,6 +410,7 @@ impl CheckCodePrefix {
CheckCodePrefix::B008 => vec![CheckCode::B008],
CheckCodePrefix::B009 => vec![CheckCode::B009],
CheckCodePrefix::B01 => vec![
CheckCode::B010,
CheckCode::B011,
CheckCode::B013,
CheckCode::B014,
@@ -398,6 +419,7 @@ impl CheckCodePrefix {
CheckCode::B017,
CheckCode::B018,
],
CheckCodePrefix::B010 => vec![CheckCode::B010],
CheckCodePrefix::B011 => vec![CheckCode::B011],
CheckCodePrefix::B013 => vec![CheckCode::B013],
CheckCodePrefix::B014 => vec![CheckCode::B014],
@@ -1021,6 +1043,44 @@ impl CheckCodePrefix {
CheckCodePrefix::W6 => vec![CheckCode::W605],
CheckCodePrefix::W60 => vec![CheckCode::W605],
CheckCodePrefix::W605 => vec![CheckCode::W605],
CheckCodePrefix::YTT => vec![
CheckCode::YTT101,
CheckCode::YTT102,
CheckCode::YTT103,
CheckCode::YTT201,
CheckCode::YTT202,
CheckCode::YTT203,
CheckCode::YTT204,
CheckCode::YTT301,
CheckCode::YTT302,
CheckCode::YTT303,
],
CheckCodePrefix::YTT1 => vec![CheckCode::YTT101, CheckCode::YTT102, CheckCode::YTT103],
CheckCodePrefix::YTT10 => vec![CheckCode::YTT101, CheckCode::YTT102, CheckCode::YTT103],
CheckCodePrefix::YTT101 => vec![CheckCode::YTT101],
CheckCodePrefix::YTT102 => vec![CheckCode::YTT102],
CheckCodePrefix::YTT103 => vec![CheckCode::YTT103],
CheckCodePrefix::YTT2 => vec![
CheckCode::YTT201,
CheckCode::YTT202,
CheckCode::YTT203,
CheckCode::YTT204,
],
CheckCodePrefix::YTT20 => vec![
CheckCode::YTT201,
CheckCode::YTT202,
CheckCode::YTT203,
CheckCode::YTT204,
],
CheckCodePrefix::YTT201 => vec![CheckCode::YTT201],
CheckCodePrefix::YTT202 => vec![CheckCode::YTT202],
CheckCodePrefix::YTT203 => vec![CheckCode::YTT203],
CheckCodePrefix::YTT204 => vec![CheckCode::YTT204],
CheckCodePrefix::YTT3 => vec![CheckCode::YTT301, CheckCode::YTT302, CheckCode::YTT303],
CheckCodePrefix::YTT30 => vec![CheckCode::YTT301, CheckCode::YTT302, CheckCode::YTT303],
CheckCodePrefix::YTT301 => vec![CheckCode::YTT301],
CheckCodePrefix::YTT302 => vec![CheckCode::YTT302],
CheckCodePrefix::YTT303 => vec![CheckCode::YTT303],
}
}
}
@@ -1066,6 +1126,7 @@ impl CheckCodePrefix {
CheckCodePrefix::B008 => PrefixSpecificity::Explicit,
CheckCodePrefix::B009 => PrefixSpecificity::Explicit,
CheckCodePrefix::B01 => PrefixSpecificity::Tens,
CheckCodePrefix::B010 => PrefixSpecificity::Explicit,
CheckCodePrefix::B011 => PrefixSpecificity::Explicit,
CheckCodePrefix::B013 => PrefixSpecificity::Explicit,
CheckCodePrefix::B014 => PrefixSpecificity::Explicit,
@@ -1291,6 +1352,23 @@ impl CheckCodePrefix {
CheckCodePrefix::W6 => PrefixSpecificity::Hundreds,
CheckCodePrefix::W60 => PrefixSpecificity::Tens,
CheckCodePrefix::W605 => PrefixSpecificity::Explicit,
CheckCodePrefix::YTT => PrefixSpecificity::Category,
CheckCodePrefix::YTT1 => PrefixSpecificity::Hundreds,
CheckCodePrefix::YTT10 => PrefixSpecificity::Tens,
CheckCodePrefix::YTT101 => PrefixSpecificity::Explicit,
CheckCodePrefix::YTT102 => PrefixSpecificity::Explicit,
CheckCodePrefix::YTT103 => PrefixSpecificity::Explicit,
CheckCodePrefix::YTT2 => PrefixSpecificity::Hundreds,
CheckCodePrefix::YTT20 => PrefixSpecificity::Tens,
CheckCodePrefix::YTT201 => PrefixSpecificity::Explicit,
CheckCodePrefix::YTT202 => PrefixSpecificity::Explicit,
CheckCodePrefix::YTT203 => PrefixSpecificity::Explicit,
CheckCodePrefix::YTT204 => PrefixSpecificity::Explicit,
CheckCodePrefix::YTT3 => PrefixSpecificity::Hundreds,
CheckCodePrefix::YTT30 => PrefixSpecificity::Tens,
CheckCodePrefix::YTT301 => PrefixSpecificity::Explicit,
CheckCodePrefix::YTT302 => PrefixSpecificity::Explicit,
CheckCodePrefix::YTT303 => PrefixSpecificity::Explicit,
}
}
}

206
src/directives.rs Normal file
View File

@@ -0,0 +1,206 @@
//! Extract `# noqa` and `# isort: skip` directives from tokenized source.
use bitflags::bitflags;
use nohash_hasher::{IntMap, IntSet};
use rustpython_ast::Location;
use rustpython_parser::lexer::{LexResult, Tok};
use crate::ast::types::Range;
use crate::checks::LintSource;
use crate::{Settings, SourceCodeLocator};
bitflags! {
pub struct Flags: u32 {
const NOQA = 0b00000001;
const ISORT = 0b00000010;
}
}
impl Flags {
pub fn from_settings(settings: &Settings) -> Self {
if settings
.enabled
.iter()
.any(|check_code| matches!(check_code.lint_source(), LintSource::Imports))
{
Flags::NOQA | Flags::ISORT
} else {
Flags::NOQA
}
}
}
pub struct Directives {
pub noqa_line_for: IntMap<usize, usize>,
pub isort_exclusions: IntSet<usize>,
}
pub fn extract_directives(
lxr: &[LexResult],
locator: &SourceCodeLocator,
flags: &Flags,
) -> Directives {
Directives {
noqa_line_for: if flags.contains(Flags::NOQA) {
extract_noqa_line_for(lxr)
} else {
Default::default()
},
isort_exclusions: if flags.contains(Flags::ISORT) {
extract_isort_exclusions(lxr, locator)
} else {
Default::default()
},
}
}
/// Extract a mapping from logical line to noqa line.
pub fn extract_noqa_line_for(lxr: &[LexResult]) -> IntMap<usize, usize> {
let mut noqa_line_for: IntMap<usize, usize> = IntMap::default();
for (start, tok, end) in lxr.iter().flatten() {
if matches!(tok, Tok::EndOfFile) {
break;
}
// For multi-line strings, we expect `noqa` directives on the last line of the
// string.
if matches!(tok, Tok::String { .. }) && end.row() > start.row() {
for i in start.row()..end.row() {
noqa_line_for.insert(i, end.row());
}
}
}
noqa_line_for
}
/// Extract a set of lines over which to disable isort.
pub fn extract_isort_exclusions(lxr: &[LexResult], locator: &SourceCodeLocator) -> IntSet<usize> {
let mut exclusions: IntSet<usize> = IntSet::default();
let mut off: Option<&Location> = None;
for (start, tok, end) in lxr.iter().flatten() {
// TODO(charlie): Modify RustPython to include the comment text in the token.
if matches!(tok, Tok::Comment) {
let comment_text = locator.slice_source_code_range(&Range {
location: *start,
end_location: *end,
});
if off.is_some() {
if comment_text == "# isort: on" {
if let Some(start) = off {
for row in start.row() + 1..=end.row() {
exclusions.insert(row);
}
}
off = None;
}
} else {
if comment_text.contains("isort: skip") || comment_text.contains("isort:skip") {
exclusions.insert(start.row());
} else if comment_text == "# isort: off" {
off = Some(start);
}
}
} else if matches!(tok, Tok::EndOfFile) {
if let Some(start) = off {
for row in start.row() + 1..=end.row() {
exclusions.insert(row);
}
}
break;
}
}
exclusions
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use nohash_hasher::IntMap;
use rustpython_parser::lexer;
use rustpython_parser::lexer::LexResult;
use crate::directives::extract_noqa_line_for;
#[test]
fn extraction() -> Result<()> {
let empty: IntMap<usize, usize> = Default::default();
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = 1
y = 2
z = x + 1",
)
.collect();
assert_eq!(extract_noqa_line_for(&lxr), empty);
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"
x = 1
y = 2
z = x + 1",
)
.collect();
assert_eq!(extract_noqa_line_for(&lxr), empty);
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = 1
y = 2
z = x + 1
",
)
.collect();
assert_eq!(extract_noqa_line_for(&lxr), empty);
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = 1
y = 2
z = x + 1
",
)
.collect();
assert_eq!(extract_noqa_line_for(&lxr), empty);
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = '''abc
def
ghi
'''
y = 2
z = x + 1",
)
.collect();
assert_eq!(
extract_noqa_line_for(&lxr),
IntMap::from_iter([(1, 4), (2, 4), (3, 4)])
);
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = 1
y = '''abc
def
ghi
'''
z = 2",
)
.collect();
assert_eq!(
extract_noqa_line_for(&lxr),
IntMap::from_iter([(2, 5), (3, 5), (4, 5)])
);
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = 1
y = '''abc
def
ghi
'''",
)
.collect();
assert_eq!(
extract_noqa_line_for(&lxr),
IntMap::from_iter([(2, 5), (3, 5), (4, 5)])
);
Ok(())
}
}

1
src/flake8_2020/mod.rs Normal file
View File

@@ -0,0 +1 @@
pub mod plugins;

192
src/flake8_2020/plugins.rs Normal file
View File

@@ -0,0 +1,192 @@
use num_bigint::BigInt;
use rustpython_ast::{Cmpop, Constant, Expr, ExprKind, Located};
use crate::ast::helpers::match_name_or_attr_from_module;
use crate::ast::types::Range;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckCode, CheckKind};
fn is_sys(checker: &Checker, expr: &Expr, target: &str) -> bool {
match_name_or_attr_from_module(expr, target, "sys", checker.from_imports.get("sys"))
}
/// YTT101, YTT102, YTT301, YTT303
pub fn subscript(checker: &mut Checker, value: &Expr, slice: &Expr) {
if is_sys(checker, value, "version") {
match &slice.node {
ExprKind::Slice {
lower: None,
upper: Some(upper),
step: None,
..
} => {
if let ExprKind::Constant {
value: Constant::Int(i),
..
} = &upper.node
{
if *i == BigInt::from(1)
&& checker.settings.enabled.contains(&CheckCode::YTT303)
{
checker.add_check(Check::new(
CheckKind::SysVersionSlice1Referenced,
Range::from_located(value),
));
} else if *i == BigInt::from(3)
&& checker.settings.enabled.contains(&CheckCode::YTT101)
{
checker.add_check(Check::new(
CheckKind::SysVersionSlice3Referenced,
Range::from_located(value),
));
}
}
}
ExprKind::Constant {
value: Constant::Int(i),
..
} => {
if *i == BigInt::from(2) && checker.settings.enabled.contains(&CheckCode::YTT102) {
checker.add_check(Check::new(
CheckKind::SysVersion2Referenced,
Range::from_located(value),
));
} else if *i == BigInt::from(0)
&& checker.settings.enabled.contains(&CheckCode::YTT301)
{
checker.add_check(Check::new(
CheckKind::SysVersion0Referenced,
Range::from_located(value),
));
}
}
_ => {}
}
}
}
/// YTT103, YTT201, YTT203, YTT204, YTT302
pub fn compare(checker: &mut Checker, left: &Expr, ops: &[Cmpop], comparators: &[Expr]) {
match &left.node {
ExprKind::Subscript { value, slice, .. } if is_sys(checker, value, "version_info") => {
if let ExprKind::Constant {
value: Constant::Int(i),
..
} = &slice.node
{
if *i == BigInt::from(0) {
if let (
[Cmpop::Eq | Cmpop::NotEq],
[Located {
node:
ExprKind::Constant {
value: Constant::Int(n),
..
},
..
}],
) = (ops, comparators)
{
if *n == BigInt::from(3)
&& checker.settings.enabled.contains(&CheckCode::YTT201)
{
checker.add_check(Check::new(
CheckKind::SysVersionInfo0Eq3Referenced,
Range::from_located(left),
));
}
}
} else if *i == BigInt::from(1) {
if let (
[Cmpop::Lt | Cmpop::LtE | Cmpop::Gt | Cmpop::GtE],
[Located {
node:
ExprKind::Constant {
value: Constant::Int(_),
..
},
..
}],
) = (ops, comparators)
{
if checker.settings.enabled.contains(&CheckCode::YTT203) {
checker.add_check(Check::new(
CheckKind::SysVersionInfo1CmpInt,
Range::from_located(left),
));
}
}
}
}
}
ExprKind::Attribute { value, attr, .. }
if is_sys(checker, value, "version_info") && attr == "minor" =>
{
if let (
[Cmpop::Lt | Cmpop::LtE | Cmpop::Gt | Cmpop::GtE],
[Located {
node:
ExprKind::Constant {
value: Constant::Int(_),
..
},
..
}],
) = (ops, comparators)
{
if checker.settings.enabled.contains(&CheckCode::YTT204) {
checker.add_check(Check::new(
CheckKind::SysVersionInfoMinorCmpInt,
Range::from_located(left),
));
}
}
}
_ => {}
}
if is_sys(checker, left, "version") {
if let (
[Cmpop::Lt | Cmpop::LtE | Cmpop::Gt | Cmpop::GtE],
[Located {
node:
ExprKind::Constant {
value: Constant::Str(s),
..
},
..
}],
) = (ops, comparators)
{
if s.len() == 1 {
if checker.settings.enabled.contains(&CheckCode::YTT302) {
checker.add_check(Check::new(
CheckKind::SysVersionCmpStr10,
Range::from_located(left),
));
}
} else if checker.settings.enabled.contains(&CheckCode::YTT103) {
checker.add_check(Check::new(
CheckKind::SysVersionCmpStr3,
Range::from_located(left),
));
}
}
}
}
/// YTT202
pub fn name_or_attribute(checker: &mut Checker, expr: &Expr) {
if match_name_or_attr_from_module(expr, "PY3", "six", checker.from_imports.get("six"))
&& checker.settings.enabled.contains(&CheckCode::YTT202)
{
checker.add_check(Check::new(
CheckKind::SixPY3Referenced,
Range::from_located(expr),
));
}
}

View File

@@ -0,0 +1,5 @@
use once_cell::sync::Lazy;
use regex::Regex;
pub static IDENTIFIER_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r"^[A-Za-z_][A-Za-z0-9_]*$").unwrap());

View File

@@ -1 +1,2 @@
mod constants;
pub mod plugins;

View File

@@ -8,6 +8,7 @@ use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
use crate::flake8_bugbear::plugins::mutable_argument_default::is_mutable_func;
// TODO(charlie): Verify imports for each of the imported members.
const IMMUTABLE_FUNCS: [&str; 11] = [
"tuple",
"frozenset",

View File

@@ -1,30 +1,50 @@
use once_cell::sync::Lazy;
use regex::Regex;
use rustpython_ast::{Constant, Expr, ExprKind};
use rustpython_ast::{Constant, Expr, ExprContext, ExprKind};
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
use crate::code_gen::SourceGenerator;
use crate::flake8_bugbear::constants::IDENTIFIER_REGEX;
use crate::python::keyword::KWLIST;
static IDENTIFIER_REGEX: Lazy<Regex> =
Lazy::new(|| Regex::new(r"^[A-Za-z_][A-Za-z0-9_]*$").unwrap());
fn attribute(value: &Expr, attr: &str) -> Expr {
Expr::new(
Default::default(),
Default::default(),
ExprKind::Attribute {
value: Box::new(value.clone()),
attr: attr.to_string(),
ctx: ExprContext::Load,
},
)
}
/// B009
pub fn getattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
if let ExprKind::Name { id, .. } = &func.node {
if id == "getattr" {
if let [_, arg] = args {
if let [obj, arg] = args {
if let ExprKind::Constant {
value: Constant::Str(value),
..
} = &arg.node
{
if IDENTIFIER_REGEX.is_match(value) && !KWLIST.contains(&value.as_str()) {
checker.add_check(Check::new(
CheckKind::GetAttrWithConstant,
Range::from_located(expr),
));
let mut check =
Check::new(CheckKind::GetAttrWithConstant, Range::from_located(expr));
if checker.patch() {
let mut generator = SourceGenerator::new();
if let Ok(()) = generator.unparse_expr(&attribute(obj, value), 0) {
if let Ok(content) = generator.generate() {
check.amend(Fix::replacement(
content,
expr.location,
expr.end_location.unwrap(),
));
}
}
}
checker.add_check(check);
}
}
}

View File

@@ -7,6 +7,7 @@ pub use function_call_argument_default::function_call_argument_default;
pub use getattr_with_constant::getattr_with_constant;
pub use mutable_argument_default::mutable_argument_default;
pub use redundant_tuple_in_exception_handler::redundant_tuple_in_exception_handler;
pub use setattr_with_constant::setattr_with_constant;
pub use star_arg_unpacking_after_keyword_arg::star_arg_unpacking_after_keyword_arg;
pub use strip_with_multi_characters::strip_with_multi_characters;
pub use unary_prefix_increment::unary_prefix_increment;
@@ -24,6 +25,7 @@ mod function_call_argument_default;
mod getattr_with_constant;
mod mutable_argument_default;
mod redundant_tuple_in_exception_handler;
mod setattr_with_constant;
mod star_arg_unpacking_after_keyword_arg;
mod strip_with_multi_characters;
mod unary_prefix_increment;

View File

@@ -4,6 +4,7 @@ use crate::ast::types::Range;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
// TODO(charlie): Verify imports for each of the imported members.
pub fn is_mutable_func(expr: &Expr) -> bool {
match &expr.node {
ExprKind::Name { id, .. }

View File

@@ -0,0 +1,29 @@
use rustpython_ast::{Constant, Expr, ExprKind};
use crate::ast::types::Range;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
use crate::flake8_bugbear::constants::IDENTIFIER_REGEX;
use crate::python::keyword::KWLIST;
/// B010
pub fn setattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, args: &[Expr]) {
if let ExprKind::Name { id, .. } = &func.node {
if id == "setattr" {
if let [_, arg, _] = args {
if let ExprKind::Constant {
value: Constant::Str(value),
..
} = &arg.node
{
if IDENTIFIER_REGEX.is_match(value) && !KWLIST.contains(&value.as_str()) {
checker.add_check(Check::new(
CheckKind::SetAttrWithConstant,
Range::from_located(expr),
));
}
}
}
}
}
}

View File

@@ -12,16 +12,20 @@ pub enum ImportType {
StandardLibrary,
ThirdParty,
FirstParty,
LocalFolder,
}
pub fn categorize(
module_base: &str,
level: &Option<usize>,
src: &[PathBuf],
known_first_party: &BTreeSet<String>,
known_third_party: &BTreeSet<String>,
extra_standard_library: &BTreeSet<String>,
) -> ImportType {
if known_first_party.contains(module_base) {
if level.map(|level| level > 0).unwrap_or(false) {
ImportType::LocalFolder
} else if known_first_party.contains(module_base) {
ImportType::FirstParty
} else if known_third_party.contains(module_base) {
ImportType::ThirdParty
@@ -31,12 +35,10 @@ pub fn categorize(
import_type.clone()
} else if KNOWN_STANDARD_LIBRARY.contains(module_base) {
ImportType::StandardLibrary
} else if find_local(src, module_base) {
ImportType::FirstParty
} else {
if find_local(src, module_base) {
ImportType::FirstParty
} else {
ImportType::ThirdParty
}
ImportType::ThirdParty
}
}

View File

@@ -1,15 +1,18 @@
use std::collections::{BTreeMap, BTreeSet};
use std::path::PathBuf;
use itertools::Itertools;
use ropey::RopeBuilder;
use rustpython_ast::{Stmt, StmtKind};
use crate::isort::categorize::{categorize, ImportType};
use crate::isort::types::{AliasData, ImportBlock, ImportFromData, Importable};
use crate::isort::sorting::{member_key, module_key};
use crate::isort::types::{AliasData, ImportBlock, ImportFromData, Importable, OrderedImportBlock};
mod categorize;
pub mod plugins;
pub mod settings;
mod sorting;
pub mod track;
mod types;
@@ -62,6 +65,7 @@ fn categorize_imports<'a>(
for alias in block.import {
let import_type = categorize(
&alias.module_base(),
&None,
src,
known_first_party,
known_third_party,
@@ -77,6 +81,7 @@ fn categorize_imports<'a>(
for (import_from, aliases) in block.import_from {
let classification = categorize(
&import_from.module_base(),
import_from.level,
src,
known_first_party,
known_third_party,
@@ -91,7 +96,37 @@ fn categorize_imports<'a>(
block_by_type
}
pub fn sort_imports(
fn sort_imports(block: ImportBlock) -> OrderedImportBlock {
let mut ordered: OrderedImportBlock = Default::default();
// Sort `StmtKind::Import`.
for import in block
.import
.into_iter()
.sorted_by_cached_key(|alias| module_key(alias.name))
{
ordered.import.push(import);
}
// Sort `StmtKind::ImportFrom`.
for (import_from, aliases) in
block
.import_from
.into_iter()
.sorted_by_cached_key(|(import_from, _)| {
import_from.module.as_ref().map(|module| module_key(module))
})
{
ordered.import_from.push((
import_from,
aliases
.into_iter()
.sorted_by_cached_key(|alias| member_key(alias.name))
.collect(),
));
}
ordered
}
pub fn format_imports(
block: Vec<&Stmt>,
line_length: &usize,
src: &[PathBuf],
@@ -114,45 +149,41 @@ pub fn sort_imports(
// Generate replacement source code.
let mut output = RopeBuilder::new();
let mut first_block = true;
for import_type in [
ImportType::Future,
ImportType::StandardLibrary,
ImportType::ThirdParty,
ImportType::FirstParty,
] {
if let Some(import_block) = block_by_type.get(&import_type) {
// Add a blank line between every section.
if !first_block {
output.append("\n");
for import_block in block_by_type.into_values() {
let import_block = sort_imports(import_block);
// Add a blank line between every section.
if !first_block {
output.append("\n");
} else {
first_block = false;
}
// Format `StmtKind::Import` statements.
for AliasData { name, asname } in import_block.import.iter() {
if let Some(asname) = asname {
output.append(&format!("import {} as {}\n", name, asname));
} else {
first_block = false;
output.append(&format!("import {}\n", name));
}
}
// Format `StmtKind::Import` statements.
for AliasData { name, asname } in import_block.import.iter() {
if let Some(asname) = asname {
output.append(&format!("import {} as {}\n", name, asname));
} else {
output.append(&format!("import {}\n", name));
}
}
// Format `StmtKind::ImportFrom` statements.
for (import_from, aliases) in import_block.import_from.iter() {
let prelude: String = format!("from {} import ", import_from.module_name());
let members: Vec<String> = aliases
.iter()
.map(|AliasData { name, asname }| {
if let Some(asname) = asname {
format!("{} as {}", name, asname)
} else {
name.to_string()
}
})
.collect();
// Format `StmtKind::ImportFrom` statements.
for (import_from, aliases) in import_block.import_from.iter() {
let prelude: String = format!("from {} import ", import_from.module_name());
let members: Vec<String> = aliases
.iter()
.map(|AliasData { name, asname }| {
if let Some(asname) = asname {
format!("{} as {}", name, asname)
} else {
name.to_string()
}
})
.collect();
// Can we fit the import on a single line?
let expected_len: usize =
// Can we fit the import on a single line?
let expected_len: usize =
// `from base import `
prelude.len()
// `member( as alias)?`
@@ -160,36 +191,35 @@ pub fn sort_imports(
// `, `
+ 2 * (members.len() - 1);
if expected_len <= *line_length {
// `from base import `
output.append(&prelude);
// `member( as alias)?(, )?`
for (index, part) in members.into_iter().enumerate() {
if index > 0 {
output.append(", ");
}
output.append(&part);
if expected_len <= *line_length {
// `from base import `
output.append(&prelude);
// `member( as alias)?(, )?`
for (index, part) in members.into_iter().enumerate() {
if index > 0 {
output.append(", ");
}
// `\n`
output.append("\n");
} else {
// `from base import (\n`
output.append(&prelude);
output.append("(");
output.append("\n");
output.append(&part);
}
// `\n`
output.append("\n");
} else {
// `from base import (\n`
output.append(&prelude);
output.append("(");
output.append("\n");
// ` member( as alias)?,\n`
for part in members {
output.append(INDENT);
output.append(&part);
output.append(",");
output.append("\n");
}
// `)\n`
output.append(")");
// ` member( as alias)?,\n`
for part in members {
output.append(INDENT);
output.append(&part);
output.append(",");
output.append("\n");
}
// `)\n`
output.append(")");
output.append("\n");
}
}
}
@@ -208,17 +238,20 @@ mod tests {
use crate::linter::test_path;
use crate::Settings;
#[test_case(Path::new("reorder_within_section.py"))]
#[test_case(Path::new("no_reorder_within_section.py"))]
#[test_case(Path::new("separate_future_imports.py"))]
#[test_case(Path::new("separate_third_party_imports.py"))]
#[test_case(Path::new("separate_first_party_imports.py"))]
#[test_case(Path::new("deduplicate_imports.py"))]
#[test_case(Path::new("combine_import_froms.py"))]
#[test_case(Path::new("preserve_indentation.py"))]
#[test_case(Path::new("deduplicate_imports.py"))]
#[test_case(Path::new("fit_line_length.py"))]
#[test_case(Path::new("import_from_after_import.py"))]
#[test_case(Path::new("leading_prefix.py"))]
#[test_case(Path::new("no_reorder_within_section.py"))]
#[test_case(Path::new("order_by_type.py"))]
#[test_case(Path::new("preserve_indentation.py"))]
#[test_case(Path::new("reorder_within_section.py"))]
#[test_case(Path::new("separate_first_party_imports.py"))]
#[test_case(Path::new("separate_future_imports.py"))]
#[test_case(Path::new("separate_local_folder_imports.py"))]
#[test_case(Path::new("separate_third_party_imports.py"))]
#[test_case(Path::new("skip.py"))]
#[test_case(Path::new("trailing_suffix.py"))]
fn isort(path: &Path) -> Result<()> {
let snapshot = format!("{}", path.to_string_lossy());

View File

@@ -5,7 +5,7 @@ use crate::ast::types::Range;
use crate::autofix::{fixer, Fix};
use crate::checks::CheckKind;
use crate::docstrings::helpers::leading_space;
use crate::isort::sort_imports;
use crate::isort::format_imports;
use crate::{Check, Settings, SourceCodeLocator};
fn extract_range(body: &[&Stmt]) -> Range {
@@ -62,7 +62,7 @@ pub fn check_imports(
let has_trailing_content = match_trailing_content(&body, locator);
// Generate the sorted import block.
let expected = sort_imports(
let expected = format_imports(
body,
&settings.line_length,
&settings.src,

View File

@@ -0,0 +1,22 @@
---
source: src/isort/mod.rs
expression: checks
---
- kind: UnsortedImports
location:
row: 1
column: 0
end_location:
row: 13
column: 0
fix:
patch:
content: "import glob\nimport os\nimport shutil\nimport tempfile\nimport time\nfrom subprocess import PIPE, STDOUT, Popen\n\nimport BAR\nimport bar\nimport FOO\nimport foo\nimport StringIO\nfrom module import BASIC, CONSTANT, Apple, Class, function\n"
location:
row: 1
column: 0
end_location:
row: 13
column: 0
applied: false

View File

@@ -0,0 +1,22 @@
---
source: src/isort/mod.rs
expression: checks
---
- kind: UnsortedImports
location:
row: 1
column: 0
end_location:
row: 5
column: 0
fix:
patch:
content: "import os\nimport sys\n\nimport leading_prefix\n\nfrom . import leading_prefix\n"
location:
row: 1
column: 0
end_location:
row: 5
column: 0
applied: false

View File

@@ -0,0 +1,22 @@
---
source: src/isort/mod.rs
expression: checks
---
- kind: UnsortedImports
location:
row: 9
column: 0
end_location:
row: 11
column: 0
fix:
patch:
content: "import abc\nimport collections\n"
location:
row: 9
column: 0
end_location:
row: 11
column: 0
applied: false

34
src/isort/sorting.rs Normal file
View File

@@ -0,0 +1,34 @@
/// See: https://github.com/PyCQA/isort/blob/12cc5fbd67eebf92eb2213b03c07b138ae1fb448/isort/sorting.py#L13
use crate::python::string;
#[derive(PartialOrd, Ord, PartialEq, Eq)]
pub enum Prefix {
Constants,
Classes,
Variables,
}
pub fn module_key(module_name: &str) -> String {
module_name.to_lowercase()
}
pub fn member_key(member_name: &str) -> (Prefix, String) {
(
if member_name.len() > 1 && string::is_upper(member_name) {
// Ex) `CONSTANT`
Prefix::Constants
} else if member_name
.chars()
.next()
.map(|char| char.is_uppercase())
.unwrap_or(false)
{
// Ex) `Class`
Prefix::Classes
} else {
// Ex) `variable`
Prefix::Variables
},
member_name.to_lowercase(),
)
}

View File

@@ -1,3 +1,4 @@
use nohash_hasher::IntSet;
use rustpython_ast::{
Alias, Arg, Arguments, Boolop, Cmpop, Comprehension, Constant, Excepthandler,
ExcepthandlerKind, Expr, ExprContext, Keyword, MatchCase, Operator, Pattern, Stmt, StmtKind,
@@ -8,16 +9,19 @@ use crate::ast::visitor::Visitor;
#[derive(Debug)]
pub struct ImportTracker<'a> {
pub blocks: Vec<Vec<&'a Stmt>>,
exclusions: &'a IntSet<usize>,
blocks: Vec<Vec<&'a Stmt>>,
}
impl<'a> ImportTracker<'a> {
pub fn new() -> Self {
pub fn new(exclusions: &'a IntSet<usize>) -> Self {
Self {
exclusions,
blocks: vec![vec![]],
}
}
fn add_import(&mut self, stmt: &'a Stmt) {
fn track_import(&mut self, stmt: &'a Stmt) {
let index = self.blocks.len() - 1;
self.blocks[index].push(stmt);
}
@@ -43,8 +47,9 @@ where
if matches!(
stmt.node,
StmtKind::Import { .. } | StmtKind::ImportFrom { .. }
) {
self.add_import(stmt);
) && !self.exclusions.contains(&stmt.location.row())
{
self.track_import(stmt);
} else {
self.finalize();
}

View File

@@ -53,3 +53,11 @@ pub struct ImportBlock<'a> {
// Set of (name, asname).
pub import: BTreeSet<AliasData<'a>>,
}
#[derive(Debug, Default)]
pub struct OrderedImportBlock<'a> {
// Map from (module, level) to `AliasData`.
pub import_from: Vec<(ImportFromData<'a>, Vec<AliasData<'a>>)>,
// Set of (name, asname).
pub import: Vec<AliasData<'a>>,
}

View File

@@ -25,7 +25,9 @@ pub mod checks_gen;
pub mod cli;
pub mod code_gen;
mod cst;
mod directives;
mod docstrings;
mod flake8_2020;
pub mod flake8_annotations;
mod flake8_bugbear;
mod flake8_builtins;
@@ -74,8 +76,12 @@ pub fn check(path: &Path, contents: &str, autofix: bool) -> Result<Vec<Check>> {
// Initialize the SourceCodeLocator (which computes offsets lazily).
let locator = SourceCodeLocator::new(contents);
// Determine the noqa line for every line in the source.
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
// Extract the `# noqa` and `# isort: skip` directives from the source.
let directives = directives::extract_directives(
&tokens,
&locator,
&directives::Flags::from_settings(&settings),
);
// Generate checks.
let checks = check_path(
@@ -83,7 +89,7 @@ pub fn check(path: &Path, contents: &str, autofix: bool) -> Result<Vec<Check>> {
contents,
tokens,
&locator,
&noqa_line_for,
&directives,
&settings,
&if autofix { Mode::Generate } else { Mode::None },
)?;

View File

@@ -21,11 +21,12 @@ use crate::check_lines::check_lines;
use crate::check_tokens::check_tokens;
use crate::checks::{Check, CheckCode, CheckKind, LintSource};
use crate::code_gen::SourceGenerator;
use crate::directives::Directives;
use crate::message::Message;
use crate::noqa::add_noqa;
use crate::settings::Settings;
use crate::source_code_locator::SourceCodeLocator;
use crate::{cache, fs, noqa};
use crate::{cache, directives, fs};
/// Collect tokens up to and including the first error.
pub(crate) fn tokenize(contents: &str) -> Vec<LexResult> {
@@ -56,7 +57,7 @@ pub(crate) fn check_path(
contents: &str,
tokens: Vec<LexResult>,
locator: &SourceCodeLocator,
noqa_line_for: &[usize],
directives: &Directives,
settings: &Settings,
autofix: &fixer::Mode,
) -> Result<Vec<Check>> {
@@ -88,7 +89,13 @@ pub(crate) fn check_path(
checks.extend(check_ast(&python_ast, locator, settings, autofix, path));
}
if use_imports {
checks.extend(check_imports(&python_ast, locator, settings, autofix));
checks.extend(check_imports(
&python_ast,
locator,
&directives.isort_exclusions,
settings,
autofix,
));
}
}
Err(parse_error) => {
@@ -106,7 +113,13 @@ pub(crate) fn check_path(
}
// Run the lines-based checks.
check_lines(&mut checks, contents, noqa_line_for, settings, autofix);
check_lines(
&mut checks,
contents,
&directives.noqa_line_for,
settings,
autofix,
);
// Create path ignores.
if !checks.is_empty() && !settings.per_file_ignores.is_empty() {
@@ -134,8 +147,12 @@ pub fn lint_stdin(
// Initialize the SourceCodeLocator (which computes offsets lazily).
let locator = SourceCodeLocator::new(stdin);
// Determine the noqa line for every line in the source.
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
// Extract the `# noqa` and `# isort: skip` directives from the source.
let directives = directives::extract_directives(
&tokens,
&locator,
&directives::Flags::from_settings(settings),
);
// Generate checks.
let mut checks = check_path(
@@ -143,7 +160,7 @@ pub fn lint_stdin(
stdin,
tokens,
&locator,
&noqa_line_for,
&directives,
settings,
autofix,
)?;
@@ -188,8 +205,12 @@ pub fn lint_path(
// Initialize the SourceCodeLocator (which computes offsets lazily).
let locator = SourceCodeLocator::new(&contents);
// Determine the noqa line for every line in the source.
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
// Determine the noqa and isort exclusions.
let directives = directives::extract_directives(
&tokens,
&locator,
&directives::Flags::from_settings(settings),
);
// Generate checks.
let mut checks = check_path(
@@ -197,7 +218,7 @@ pub fn lint_path(
&contents,
tokens,
&locator,
&noqa_line_for,
&directives,
settings,
autofix,
)?;
@@ -230,8 +251,12 @@ pub fn add_noqa_to_path(path: &Path, settings: &Settings) -> Result<usize> {
// Initialize the SourceCodeLocator (which computes offsets lazily).
let locator = SourceCodeLocator::new(&contents);
// Determine the noqa line for every line in the source.
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
// Extract the `# noqa` and `# isort: skip` directives from the source.
let directives = directives::extract_directives(
&tokens,
&locator,
&directives::Flags::from_settings(settings),
);
// Generate checks.
let checks = check_path(
@@ -239,12 +264,12 @@ pub fn add_noqa_to_path(path: &Path, settings: &Settings) -> Result<usize> {
&contents,
tokens,
&locator,
&noqa_line_for,
&directives,
settings,
&fixer::Mode::None,
)?;
add_noqa(&checks, &contents, &noqa_line_for, path)
add_noqa(&checks, &contents, &directives.noqa_line_for, path)
}
pub fn autoformat_path(path: &Path) -> Result<()> {
@@ -268,13 +293,17 @@ pub fn test_path(path: &Path, settings: &Settings, autofix: &fixer::Mode) -> Res
let contents = fs::read_file(path)?;
let tokens: Vec<LexResult> = tokenize(&contents);
let locator = SourceCodeLocator::new(&contents);
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
let directives = directives::extract_directives(
&tokens,
&locator,
&directives::Flags::from_settings(settings),
);
check_path(
path,
&contents,
tokens,
&locator,
&noqa_line_for,
&directives,
settings,
autofix,
)
@@ -305,6 +334,7 @@ mod tests {
#[test_case(CheckCode::B007, Path::new("B007.py"); "B007")]
#[test_case(CheckCode::B008, Path::new("B006_B008.py"); "B008")]
#[test_case(CheckCode::B009, Path::new("B009_B010.py"); "B009")]
#[test_case(CheckCode::B010, Path::new("B009_B010.py"); "B010")]
#[test_case(CheckCode::B011, Path::new("B011.py"); "B011")]
#[test_case(CheckCode::B013, Path::new("B013.py"); "B013")]
#[test_case(CheckCode::B014, Path::new("B014.py"); "B014")]
@@ -463,6 +493,16 @@ mod tests {
#[test_case(CheckCode::RUF001, Path::new("RUF001.py"); "RUF001")]
#[test_case(CheckCode::RUF002, Path::new("RUF002.py"); "RUF002")]
#[test_case(CheckCode::RUF003, Path::new("RUF003.py"); "RUF003")]
#[test_case(CheckCode::YTT101, Path::new("YTT101.py"); "YTT101")]
#[test_case(CheckCode::YTT102, Path::new("YTT102.py"); "YTT102")]
#[test_case(CheckCode::YTT103, Path::new("YTT103.py"); "YTT103")]
#[test_case(CheckCode::YTT201, Path::new("YTT201.py"); "YTT201")]
#[test_case(CheckCode::YTT202, Path::new("YTT202.py"); "YTT202")]
#[test_case(CheckCode::YTT203, Path::new("YTT203.py"); "YTT203")]
#[test_case(CheckCode::YTT204, Path::new("YTT204.py"); "YTT204")]
#[test_case(CheckCode::YTT301, Path::new("YTT301.py"); "YTT301")]
#[test_case(CheckCode::YTT302, Path::new("YTT302.py"); "YTT302")]
#[test_case(CheckCode::YTT303, Path::new("YTT303.py"); "YTT303")]
fn checks(check_code: CheckCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", check_code.as_ref(), path.to_string_lossy());
let mut checks = test_path(

View File

@@ -3,14 +3,14 @@ use std::fs;
use std::path::Path;
use anyhow::Result;
use nohash_hasher::IntMap;
use once_cell::sync::Lazy;
use regex::Regex;
use rustpython_parser::lexer::{LexResult, Tok};
use crate::checks::{Check, CheckCode};
static NO_QA_REGEX: Lazy<Regex> = Lazy::new(|| {
Regex::new(r"(?i)(?P<noqa>\s*# noqa(?::\s?(?P<codes>([A-Z]+[0-9]+(?:[,\s]+)?)+))?)")
Regex::new(r"(?P<noqa>\s*# noqa(?::\s?(?P<codes>([A-Z]+[0-9]+(?:[,\s]+)?)+))?)")
.expect("Invalid regex")
});
static SPLIT_COMMA_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"[,\s]").expect("Invalid regex"));
@@ -43,30 +43,21 @@ pub fn extract_noqa_directive(line: &str) -> Directive {
}
}
pub fn extract_noqa_line_for(lxr: &[LexResult]) -> Vec<usize> {
let mut noqa_line_for: Vec<usize> = vec![];
for (start, tok, end) in lxr.iter().flatten() {
if matches!(tok, Tok::EndOfFile) {
break;
}
// For multi-line strings, we expect `noqa` directives on the last line of the
// string. By definition, we can't have multiple multi-line strings on
// the same line, so we don't need to verify that we haven't already
// traversed past the current line.
if matches!(tok, Tok::String { .. }) && end.row() > start.row() {
for i in (noqa_line_for.len())..(start.row() - 1) {
noqa_line_for.push(i + 1);
}
noqa_line_for.extend(vec![end.row(); (end.row() + 1) - start.row()]);
}
}
noqa_line_for
pub fn add_noqa(
checks: &[Check],
contents: &str,
noqa_line_for: &IntMap<usize, usize>,
path: &Path,
) -> Result<usize> {
let (count, output) = add_noqa_inner(checks, contents, noqa_line_for)?;
fs::write(path, output)?;
Ok(count)
}
fn add_noqa_inner(
checks: &[Check],
contents: &str,
noqa_line_for: &[usize],
noqa_line_for: &IntMap<usize, usize>,
) -> Result<(usize, String)> {
let lines: Vec<&str> = contents.lines().collect();
let mut matches_by_line: BTreeMap<usize, BTreeSet<&CheckCode>> = BTreeMap::new();
@@ -82,7 +73,7 @@ fn add_noqa_inner(
// If there are newlines at the end of the file, they won't be represented in
// `noqa_line_for`, so fallback to the current line.
let noqa_lineno = noqa_line_for
.get(lineno)
.get(&lineno)
.map(|lineno| lineno - 1)
.unwrap_or(lineno);
@@ -120,108 +111,20 @@ fn add_noqa_inner(
Ok((count, output))
}
pub fn add_noqa(
checks: &[Check],
contents: &str,
noqa_line_for: &[usize],
path: &Path,
) -> Result<usize> {
let (count, output) = add_noqa_inner(checks, contents, noqa_line_for)?;
fs::write(path, output)?;
Ok(count)
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use rustpython_parser::ast::Location;
use rustpython_parser::lexer;
use rustpython_parser::lexer::LexResult;
use crate::ast::types::Range;
use crate::checks::{Check, CheckKind};
use crate::noqa::{add_noqa_inner, extract_noqa_line_for};
#[test]
fn extraction() -> Result<()> {
let empty: Vec<usize> = Default::default();
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = 1
y = 2
z = x + 1",
)
.collect();
assert_eq!(extract_noqa_line_for(&lxr), empty);
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"
x = 1
y = 2
z = x + 1",
)
.collect();
assert_eq!(extract_noqa_line_for(&lxr), empty);
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = 1
y = 2
z = x + 1
",
)
.collect();
assert_eq!(extract_noqa_line_for(&lxr), empty);
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = 1
y = 2
z = x + 1
",
)
.collect();
assert_eq!(extract_noqa_line_for(&lxr), empty);
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = '''abc
def
ghi
'''
y = 2
z = x + 1",
)
.collect();
assert_eq!(extract_noqa_line_for(&lxr), vec![4, 4, 4, 4]);
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = 1
y = '''abc
def
ghi
'''
z = 2",
)
.collect();
assert_eq!(extract_noqa_line_for(&lxr), vec![1, 5, 5, 5, 5]);
let lxr: Vec<LexResult> = lexer::make_tokenizer(
"x = 1
y = '''abc
def
ghi
'''",
)
.collect();
assert_eq!(extract_noqa_line_for(&lxr), vec![1, 5, 5, 5, 5]);
Ok(())
}
use crate::noqa::add_noqa_inner;
#[test]
fn modification() -> Result<()> {
let checks = vec![];
let contents = "x = 1";
let noqa_line_for = vec![1];
let noqa_line_for = Default::default();
let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for)?;
assert_eq!(count, 0);
assert_eq!(output.trim(), contents.trim());
@@ -234,7 +137,7 @@ ghi
},
)];
let contents = "x = 1";
let noqa_line_for = vec![1];
let noqa_line_for = Default::default();
let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for)?;
assert_eq!(count, 1);
assert_eq!(output.trim(), "x = 1 # noqa: F841".trim());
@@ -256,7 +159,7 @@ ghi
),
];
let contents = "x = 1 # noqa: E741";
let noqa_line_for = vec![1];
let noqa_line_for = Default::default();
let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for)?;
assert_eq!(count, 1);
assert_eq!(output.trim(), "x = 1 # noqa: E741, F841".trim());
@@ -278,7 +181,7 @@ ghi
),
];
let contents = "x = 1 # noqa";
let noqa_line_for = vec![1];
let noqa_line_for = Default::default();
let (count, output) = add_noqa_inner(&checks, contents, &noqa_line_for)?;
assert_eq!(count, 1);
assert_eq!(output.trim(), "x = 1 # noqa: E741, F841".trim());

View File

@@ -5,6 +5,7 @@ use crate::checks::{Check, CheckKind};
use crate::pep8_naming::helpers;
use crate::pep8_naming::helpers::FunctionType;
use crate::pep8_naming::settings::Settings;
use crate::python::string;
/// N801
pub fn invalid_class_name(class_def: &Stmt, name: &str) -> Option<Check> {
@@ -133,7 +134,7 @@ pub fn constant_imported_as_non_constant(
name: &str,
asname: &str,
) -> Option<Check> {
if helpers::is_upper(name) && !helpers::is_upper(asname) {
if string::is_upper(name) && !string::is_upper(asname) {
return Some(Check::new(
CheckKind::ConstantImportedAsNonConstant(name.to_string(), asname.to_string()),
Range::from_located(import_from),
@@ -148,7 +149,7 @@ pub fn lowercase_imported_as_non_lowercase(
name: &str,
asname: &str,
) -> Option<Check> {
if !helpers::is_upper(name) && helpers::is_lower(name) && asname.to_lowercase() != asname {
if !string::is_upper(name) && string::is_lower(name) && asname.to_lowercase() != asname {
return Some(Check::new(
CheckKind::LowercaseImportedAsNonLowercase(name.to_string(), asname.to_string()),
Range::from_located(import_from),
@@ -163,7 +164,7 @@ pub fn camelcase_imported_as_lowercase(
name: &str,
asname: &str,
) -> Option<Check> {
if helpers::is_camelcase(name) && helpers::is_lower(asname) {
if helpers::is_camelcase(name) && string::is_lower(asname) {
return Some(Check::new(
CheckKind::CamelcaseImportedAsLowercase(name.to_string(), asname.to_string()),
Range::from_located(import_from),
@@ -179,8 +180,8 @@ pub fn camelcase_imported_as_constant(
asname: &str,
) -> Option<Check> {
if helpers::is_camelcase(name)
&& !helpers::is_lower(asname)
&& helpers::is_upper(asname)
&& !string::is_lower(asname)
&& string::is_upper(asname)
&& !helpers::is_acronym(name, asname)
{
return Some(Check::new(
@@ -230,8 +231,8 @@ pub fn camelcase_imported_as_acronym(
asname: &str,
) -> Option<Check> {
if helpers::is_camelcase(name)
&& !helpers::is_lower(asname)
&& helpers::is_upper(asname)
&& !string::is_lower(asname)
&& string::is_upper(asname)
&& helpers::is_acronym(name, asname)
{
return Some(Check::new(

View File

@@ -4,6 +4,7 @@ use rustpython_ast::{Expr, ExprKind};
use crate::ast::helpers::match_name_or_attr;
use crate::ast::types::{Scope, ScopeKind};
use crate::pep8_naming::settings::Settings;
use crate::python::string::{is_lower, is_upper};
const CLASS_METHODS: [&str; 3] = ["__new__", "__init_subclass__", "__class_getitem__"];
const METACLASS_BASES: [&str; 2] = ["type", "ABCMeta"];
@@ -59,30 +60,6 @@ pub fn function_type(
}
}
pub fn is_lower(s: &str) -> bool {
let mut cased = false;
for c in s.chars() {
if c.is_uppercase() {
return false;
} else if !cased && c.is_lowercase() {
cased = true;
}
}
cased
}
pub fn is_upper(s: &str) -> bool {
let mut cased = false;
for c in s.chars() {
if c.is_lowercase() {
return false;
} else if !cased && c.is_uppercase() {
cased = true;
}
}
cased
}
pub fn is_camelcase(name: &str) -> bool {
!is_lower(name) && !is_upper(name) && !name.contains('_')
}
@@ -103,31 +80,7 @@ pub fn is_acronym(name: &str, asname: &str) -> bool {
#[cfg(test)]
mod tests {
use crate::pep8_naming::helpers::{
is_acronym, is_camelcase, is_lower, is_mixed_case, is_upper,
};
#[test]
fn test_is_lower() -> () {
assert!(is_lower("abc"));
assert!(is_lower("a_b_c"));
assert!(is_lower("a2c"));
assert!(!is_lower("aBc"));
assert!(!is_lower("ABC"));
assert!(!is_lower(""));
assert!(!is_lower("_"));
}
#[test]
fn test_is_upper() -> () {
assert!(is_upper("ABC"));
assert!(is_upper("A_B_C"));
assert!(is_upper("A2C"));
assert!(!is_upper("aBc"));
assert!(!is_upper("abc"));
assert!(!is_upper(""));
assert!(!is_upper("_"));
}
use crate::pep8_naming::helpers::{is_acronym, is_camelcase, is_mixed_case};
#[test]
fn test_is_camelcase() -> () {

View File

@@ -1,5 +1,6 @@
pub mod builtins;
pub mod future;
pub mod keyword;
pub mod string;
pub mod sys;
pub mod typing;

50
src/python/string.rs Normal file
View File

@@ -0,0 +1,50 @@
pub fn is_lower(s: &str) -> bool {
let mut cased = false;
for c in s.chars() {
if c.is_uppercase() {
return false;
} else if !cased && c.is_lowercase() {
cased = true;
}
}
cased
}
pub fn is_upper(s: &str) -> bool {
let mut cased = false;
for c in s.chars() {
if c.is_lowercase() {
return false;
} else if !cased && c.is_uppercase() {
cased = true;
}
}
cased
}
#[cfg(test)]
mod tests {
use crate::python::string::{is_lower, is_upper};
#[test]
fn test_is_lower() -> () {
assert!(is_lower("abc"));
assert!(is_lower("a_b_c"));
assert!(is_lower("a2c"));
assert!(!is_lower("aBc"));
assert!(!is_lower("ABC"));
assert!(!is_lower(""));
assert!(!is_lower("_"));
}
#[test]
fn test_is_upper() -> () {
assert!(is_upper("ABC"));
assert!(is_upper("A_B_C"));
assert!(is_upper("A2C"));
assert!(!is_upper("aBc"));
assert!(!is_upper("abc"));
assert!(!is_upper(""));
assert!(!is_upper("_"));
}
}

View File

@@ -4,34 +4,87 @@ expression: checks
---
- kind: GetAttrWithConstant
location:
row: 17
row: 18
column: 0
end_location:
row: 17
row: 18
column: 19
fix: ~
fix:
patch:
content: foo.bar
location:
row: 18
column: 0
end_location:
row: 18
column: 19
applied: false
- kind: GetAttrWithConstant
location:
row: 18
row: 19
column: 0
end_location:
row: 18
row: 19
column: 23
fix: ~
fix:
patch:
content: foo._123abc
location:
row: 19
column: 0
end_location:
row: 19
column: 23
applied: false
- kind: GetAttrWithConstant
location:
row: 19
row: 20
column: 0
end_location:
row: 19
row: 20
column: 22
fix: ~
fix:
patch:
content: foo.abc123
location:
row: 20
column: 0
end_location:
row: 20
column: 22
applied: false
- kind: GetAttrWithConstant
location:
row: 45
row: 21
column: 0
end_location:
row: 21
column: 23
fix:
patch:
content: foo.abc123
location:
row: 21
column: 0
end_location:
row: 21
column: 23
applied: false
- kind: GetAttrWithConstant
location:
row: 22
column: 14
end_location:
row: 45
column: 37
fix: ~
row: 22
column: 31
fix:
patch:
content: x.bar
location:
row: 22
column: 14
end_location:
row: 22
column: 31
applied: false

View File

@@ -0,0 +1,37 @@
---
source: src/linter.rs
expression: checks
---
- kind: SetAttrWithConstant
location:
row: 33
column: 0
end_location:
row: 33
column: 25
fix: ~
- kind: SetAttrWithConstant
location:
row: 34
column: 0
end_location:
row: 34
column: 29
fix: ~
- kind: SetAttrWithConstant
location:
row: 35
column: 0
end_location:
row: 35
column: 28
fix: ~
- kind: SetAttrWithConstant
location:
row: 36
column: 0
end_location:
row: 36
column: 29
fix: ~

View File

@@ -0,0 +1,21 @@
---
source: src/linter.rs
expression: checks
---
- kind: SysVersionSlice3Referenced
location:
row: 6
column: 6
end_location:
row: 6
column: 17
fix: ~
- kind: SysVersionSlice3Referenced
location:
row: 7
column: 6
end_location:
row: 7
column: 13
fix: ~

View File

@@ -0,0 +1,21 @@
---
source: src/linter.rs
expression: checks
---
- kind: SysVersion2Referenced
location:
row: 4
column: 11
end_location:
row: 4
column: 22
fix: ~
- kind: SysVersion2Referenced
location:
row: 5
column: 11
end_location:
row: 5
column: 18
fix: ~

View File

@@ -0,0 +1,45 @@
---
source: src/linter.rs
expression: checks
---
- kind: SysVersionCmpStr3
location:
row: 4
column: 0
end_location:
row: 4
column: 7
fix: ~
- kind: SysVersionCmpStr3
location:
row: 5
column: 0
end_location:
row: 5
column: 11
fix: ~
- kind: SysVersionCmpStr3
location:
row: 6
column: 0
end_location:
row: 6
column: 11
fix: ~
- kind: SysVersionCmpStr3
location:
row: 7
column: 0
end_location:
row: 7
column: 11
fix: ~
- kind: SysVersionCmpStr3
location:
row: 8
column: 0
end_location:
row: 8
column: 11
fix: ~

View File

@@ -0,0 +1,37 @@
---
source: src/linter.rs
expression: checks
---
- kind: SysVersionInfo0Eq3Referenced
location:
row: 7
column: 6
end_location:
row: 7
column: 25
fix: ~
- kind: SysVersionInfo0Eq3Referenced
location:
row: 8
column: 6
end_location:
row: 8
column: 21
fix: ~
- kind: SysVersionInfo0Eq3Referenced
location:
row: 9
column: 6
end_location:
row: 9
column: 25
fix: ~
- kind: SysVersionInfo0Eq3Referenced
location:
row: 10
column: 6
end_location:
row: 10
column: 21
fix: ~

View File

@@ -0,0 +1,21 @@
---
source: src/linter.rs
expression: checks
---
- kind: SixPY3Referenced
location:
row: 4
column: 3
end_location:
row: 4
column: 10
fix: ~
- kind: SixPY3Referenced
location:
row: 6
column: 3
end_location:
row: 6
column: 6
fix: ~

View File

@@ -0,0 +1,21 @@
---
source: src/linter.rs
expression: checks
---
- kind: SysVersionInfo1CmpInt
location:
row: 4
column: 0
end_location:
row: 4
column: 19
fix: ~
- kind: SysVersionInfo1CmpInt
location:
row: 5
column: 0
end_location:
row: 5
column: 15
fix: ~

View File

@@ -0,0 +1,21 @@
---
source: src/linter.rs
expression: checks
---
- kind: SysVersionInfoMinorCmpInt
location:
row: 4
column: 0
end_location:
row: 4
column: 22
fix: ~
- kind: SysVersionInfoMinorCmpInt
location:
row: 5
column: 0
end_location:
row: 5
column: 18
fix: ~

View File

@@ -0,0 +1,21 @@
---
source: src/linter.rs
expression: checks
---
- kind: SysVersion0Referenced
location:
row: 4
column: 11
end_location:
row: 4
column: 22
fix: ~
- kind: SysVersion0Referenced
location:
row: 5
column: 11
end_location:
row: 5
column: 18
fix: ~

View File

@@ -0,0 +1,45 @@
---
source: src/linter.rs
expression: checks
---
- kind: SysVersionCmpStr10
location:
row: 4
column: 0
end_location:
row: 4
column: 7
fix: ~
- kind: SysVersionCmpStr10
location:
row: 5
column: 0
end_location:
row: 5
column: 11
fix: ~
- kind: SysVersionCmpStr10
location:
row: 6
column: 0
end_location:
row: 6
column: 11
fix: ~
- kind: SysVersionCmpStr10
location:
row: 7
column: 0
end_location:
row: 7
column: 11
fix: ~
- kind: SysVersionCmpStr10
location:
row: 8
column: 0
end_location:
row: 8
column: 11
fix: ~

View File

@@ -0,0 +1,21 @@
---
source: src/linter.rs
expression: checks
---
- kind: SysVersionSlice1Referenced
location:
row: 4
column: 6
end_location:
row: 4
column: 17
fix: ~
- kind: SysVersionSlice1Referenced
location:
row: 5
column: 6
end_location:
row: 5
column: 13
fix: ~

View File

@@ -17,7 +17,7 @@ impl<'a> SourceCodeLocator<'a> {
pub fn new(contents: &'a str) -> Self {
SourceCodeLocator {
contents,
rope: OnceCell::new(),
rope: Default::default(),
}
}