Compare commits

..

30 Commits

Author SHA1 Message Date
Charlie Marsh
5a3f06bab1 Bump version to 0.0.90 2022-10-29 18:34:38 -04:00
Charlie Marsh
db59d5b558 Use a single SourceCodeLocator everywhere (#510) 2022-10-29 18:23:24 -04:00
Charlie Marsh
2fcbf3ab62 Simplify SourceCodeLocator offset computation (#509) 2022-10-29 18:13:42 -04:00
Anders Kaseorg
fa9b10be72 Remove leading space from C416 message (#508) 2022-10-29 17:40:50 -04:00
Charlie Marsh
c495cef529 Move pyproject.toml logging to debug (#506) 2022-10-29 17:07:46 -04:00
Charlie Marsh
c0c8dff6ce Implement configuration options for pep8-naming (#505) 2022-10-29 17:00:30 -04:00
Charlie Marsh
80b00cc89f Add error code categories to table of contents (#504) 2022-10-29 16:39:55 -04:00
Charlie Marsh
934db3d179 Bump version to 0.0.89 2022-10-29 15:39:17 -04:00
Charlie Marsh
6a040a0405 Update checks_gen.rs 2022-10-29 15:39:02 -04:00
Harutaka Kawamura
2821ef0f69 Implement B013 (#503) 2022-10-29 15:36:29 -04:00
Harutaka Kawamura
343d931ddb Ignore unittest methods and functions in N802 (#502) 2022-10-29 15:36:09 -04:00
Harutaka Kawamura
3fc257f71b Implement N806, 815, 816, 818 (#501) 2022-10-29 15:35:56 -04:00
Charlie Marsh
6dbb0a17e9 Update README to reflect selection groups 2022-10-28 19:15:46 -04:00
Charlie Marsh
ae5ad6a4ac Bump version to 0.0.88 2022-10-28 19:11:04 -04:00
Charlie Marsh
549af6c584 Regenerate CheckCodePrefix 2022-10-28 19:10:45 -04:00
Charlie Marsh
9a799eb4e6 Bump version to 0.0.87 2022-10-28 19:00:03 -04:00
Anders Kaseorg
f260b873b6 Fix “not a char boundary” error with Unicode in extract_quote (#497) 2022-10-28 18:59:12 -04:00
Charlie Marsh
782a90b584 Add tests for resolve_codes (#498) 2022-10-28 18:58:46 -04:00
Charlie Marsh
7df903dc4d Move around and rename some of the Settings structs (#496) 2022-10-28 18:46:54 -04:00
Charlie Marsh
8fc5e91ec7 Enable prefix-based check code selection (#493) 2022-10-28 18:19:57 -04:00
Charlie Marsh
9ca1a2c273 Fix failing pyproject.toml test 2022-10-28 18:13:07 -04:00
Charlie Marsh
86265c1d7c Implement the flake8-quotes plugin (#495) 2022-10-28 17:52:11 -04:00
Charlie Marsh
a057c9a323 Move invalid_escape_sequence into pycodestyle (#494) 2022-10-28 12:20:11 -04:00
Trevor Gross
2e63bb6dcb Update hook id in README and in .pre-commit-config.yaml (#492) 2022-10-27 17:32:52 -04:00
Charlie Marsh
1b5db80b32 Update pre-commit invocation in README.md 2022-10-27 17:19:22 -04:00
Charlie Marsh
3f20cea402 Bump version to 0.0.86 2022-10-27 13:09:57 -04:00
Charlie Marsh
389fe1ff64 Avoid auto-fixing unused imports in __init__.py (#489) 2022-10-27 13:08:04 -04:00
Charlie Marsh
bad2d7ba85 Add example of per-file ignores to the README (#488) 2022-10-27 12:58:52 -04:00
Charlie Marsh
416aa298ac Allow whitespace in per-file ignore patterns (#487) 2022-10-27 12:55:28 -04:00
Charlie Marsh
a535b1adbf Replace compliance comments with check codes (#485) 2022-10-27 09:32:18 -04:00
108 changed files with 4286 additions and 895 deletions

View File

@@ -1,8 +1,8 @@
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.40
rev: v0.0.90
hooks:
- id: lint
- id: ruff
- repo: https://github.com/abravalheri/validate-pyproject
rev: v0.10.1

12
Cargo.lock generated
View File

@@ -466,6 +466,15 @@ dependencies = [
"winapi 0.3.9",
]
[[package]]
name = "codegen"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff61280aed771c3070e7dcc9e050c66f1eb1e3b96431ba66f9f74641d02fc41d"
dependencies = [
"indexmap",
]
[[package]]
name = "codespan-reporting"
version = "0.11.1"
@@ -2045,7 +2054,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.85"
version = "0.0.90"
dependencies = [
"anyhow",
"assert_cmd",
@@ -2054,6 +2063,7 @@ dependencies = [
"chrono",
"clap",
"clearscreen",
"codegen",
"colored",
"common-path",
"dirs 4.0.0",

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff"
version = "0.0.85"
version = "0.0.90"
edition = "2021"
[lib]
@@ -50,6 +50,7 @@ getrandom = { version = "0.2.7", features = ["js"] }
[dev-dependencies]
assert_cmd = { version = "2.0.4" }
codegen = { version = "0.2.0" }
insta = { version = "1.19.1", features = ["yaml"] }
test-case = { version = "2.2.2" }

185
README.md
View File

@@ -38,13 +38,25 @@ Read the [launch blog post](https://notes.crmarsh.com/python-tooling-could-be-mu
1. [Installation and Usage](#installation-and-usage)
2. [Configuration](#configuration)
3. [Supported Rules](#supported-rules)
4. [Editor Integrations](#editor-integrations)
5. [FAQ](#faq)
6. [Development](#development)
7. [Releases](#releases)
8. [Benchmarks](#benchmarks)
9. [License](#license)
10. [Contributing](#contributing)
1. [Pyflakes](#pyflakes)
2. [pycodestyle (error)](#pycodestyle-error)
3. [pycodestyle (warning)](#pycodestyle-warning)
4. [pydocstyle](#pydocstyle)
5. [pyupgrade](#pyupgrade)
6. [pep8-naming](#pep8-naming)
7. [flake8-comprehensions](#flake8-comprehensions)
8. [flake8-bugbear](#flake8-bugbear)
9. [flake8-builtins](#flake8-builtins)
10. [flake8-print](#flake8-print)
11. [flake8-quotes](#flake8-quotes)
12. [Meta rules](#meta-rules)
5. [Editor Integrations](#editor-integrations)
6. [FAQ](#faq)
7. [Development](#development)
8. [Releases](#releases)
9. [Benchmarks](#benchmarks)
10. [License](#license)
11. [Contributing](#contributing)
## Installation and Usage
@@ -77,11 +89,14 @@ Ruff also works with [pre-commit](https://pre-commit.com):
```yaml
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.85
rev: v0.0.90
hooks:
- id: lint
- id: ruff
```
<!-- TODO(charlie): Remove this message a few versions after v0.0.86. -->
_Note: prior to `v0.0.86`, `ruff-pre-commit` used `lint` (rather than `ruff`) as the hook ID._
## Configuration
Ruff is configurable both via `pyproject.toml` and the command line.
@@ -91,13 +106,25 @@ For example, you could configure Ruff to only enforce a subset of rules with:
```toml
[tool.ruff]
line-length = 88
select = [
"F401",
"F403",
select = ["E", "F"]
ignore = ["E501"]
per-file-ignores = [
"__init__.py:F401",
"path/to/file.py:F401"
]
```
Alternatively, on the command-line:
Plugin configurations should be expressed as subsections, e.g.:
```toml
[tool.ruff]
line-length = 88
[tool.ruff.flake8-quotes]
docstring-quotes = "double"
```
Alternatively, common configuration settings can be provided via the command-line:
```shell
ruff path/to/code/ --select F401 --select F403
@@ -219,8 +246,7 @@ add `noqa` directives to all failing lines, with the appropriate error codes.**
## Supported Rules
By default, Ruff enables all `E`, `W`, and `F` error codes, which correspond to those built-in to
Flake8.
By default, Ruff enables all `E` and `F` error codes, which correspond to those built-in to Flake8.
The 🛠 emoji indicates that a rule is automatically fixable by the `--fix` command-line option.
@@ -354,12 +380,16 @@ The 🛠 emoji indicates that a rule is automatically fixable by the `--fix` com
| N803 | InvalidArgumentName | Argument name `...` should be lowercase | |
| N804 | InvalidFirstArgumentNameForClassMethod | First argument of a class method should be named `cls` | |
| N805 | InvalidFirstArgumentNameForMethod | First argument of a method should be named `self` | |
| N806 | NonLowercaseVariableInFunction | Variable `...` in function should be lowercase | |
| N807 | DunderFunctionName | Function name should not start and end with `__` | |
| N811 | ConstantImportedAsNonConstant | Constant `...` imported as non-constant `...` | |
| N812 | LowercaseImportedAsNonLowercase | Lowercase `...` imported as non-lowercase `...` | |
| N813 | CamelcaseImportedAsLowercase | Camelcase `...` imported as lowercase `...` | |
| N814 | CamelcaseImportedAsConstant | Camelcase `...` imported as constant `...` | |
| N815 | MixedCaseVariableInClassScope | Variable `mixedCase` in class scope should not be mixedCase | |
| N816 | MixedCaseVariableInGlobalScope | Variable `mixedCase` in global scope should not be mixedCase | |
| N817 | CamelcaseImportedAsAcronym | Camelcase `...` imported as acronym `...` | |
| N818 | ErrorSuffixOnExceptionName | Exception name `...` should be named with an Error suffix | |
### flake8-comprehensions
@@ -379,7 +409,7 @@ The 🛠 emoji indicates that a rule is automatically fixable by the `--fix` com
| C413 | UnnecessaryCallAroundSorted | Unnecessary `(list\|reversed)` call around `sorted()` | |
| C414 | UnnecessaryDoubleCastOrProcess | Unnecessary `(list\|reversed\|set\|sorted\|tuple)` call within `(list\|set\|sorted\|tuple)()` | |
| C415 | UnnecessarySubscriptReversal | Unnecessary subscript reversal of iterable within `(reversed\|set\|sorted)()` | |
| C416 | UnnecessaryComprehension | Unnecessary `(list\|set)` comprehension (rewrite using `(list\|set)()`) | |
| C416 | UnnecessaryComprehension | Unnecessary `(list\|set)` comprehension (rewrite using `(list\|set)()`) | |
| C417 | UnnecessaryMap | Unnecessary `map` usage (rewrite using a `(list\|set\|dict)` comprehension) | |
### flake8-bugbear
@@ -389,6 +419,7 @@ The 🛠 emoji indicates that a rule is automatically fixable by the `--fix` com
| B002 | UnaryPrefixIncrement | Python does not support the unary prefix increment. | |
| B007 | UnusedLoopControlVariable | Loop control variable `i` not used within the loop body. | 🛠 |
| B011 | DoNotAssertFalse | Do not `assert False` (`python -O` removes these calls), raise `AssertionError()` | 🛠 |
| B013 | RedundantTupleInExceptionHandler | A length-one tuple literal is redundant. Write `except ValueError:` instead of `except (ValueError,):`. | |
| B014 | DuplicateHandlerException | Exception handler with duplicate exception: `ValueError` | 🛠 |
| B017 | NoAssertRaisesException | `assertRaises(Exception):` should be considered evil. | |
| B025 | DuplicateTryBlockException | try-except block with duplicate exception `Exception` | |
@@ -408,6 +439,15 @@ The 🛠 emoji indicates that a rule is automatically fixable by the `--fix` com
| T201 | PrintFound | `print` found | 🛠 |
| T203 | PPrintFound | `pprint` found | 🛠 |
### flake8-quotes
| Code | Name | Message | Fix |
| ---- | ---- | ------- | --- |
| Q000 | BadQuotesInlineString | Single quotes found but double quotes preferred | |
| Q001 | BadQuotesMultilineString | Single quote multiline found but double quotes preferred | |
| Q002 | BadQuotesDocstring | Single quote docstring found but double quotes preferred | |
| Q003 | AvoidQuoteEscape | Change outer quotes to avoid escaping inner quotes | |
### Meta rules
| Code | Name | Message | Fix |
@@ -474,13 +514,15 @@ Ruff re-implements some of the most popular Flake8 plugins and related code qual
including:
- [`pydocstyle`](https://pypi.org/project/pydocstyle/)
- [`pep8-naming`](https://pypi.org/project/pep8-naming/)
- [`yesqa`](https://github.com/asottile/yesqa)
- [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/)
- [`flake8-builtins`](https://pypi.org/project/flake8-builtins/)
- [`flake8-super`](https://pypi.org/project/flake8-super/)
- [`flake8-print`](https://pypi.org/project/flake8-print/)
- [`flake8-quotes`](https://pypi.org/project/flake8-quotes/)
- [`flake8-comprehensions`](https://pypi.org/project/flake8-comprehensions/)
- [`flake8-bugbear`](https://pypi.org/project/flake8-bugbear/) (9/32)
- [`flake8-bugbear`](https://pypi.org/project/flake8-bugbear/) (10/32)
- [`pyupgrade`](https://pypi.org/project/pyupgrade/) (8/34)
- [`autoflake`](https://pypi.org/project/autoflake/) (1/7)
@@ -495,10 +537,12 @@ Beyond rule-set parity, Ruff suffers from the following limitations vis-à-vis F
Today, Ruff can be used to replace Flake8 when used with any of the following plugins:
- [`pep8-naming`](https://pypi.org/project/pep8-naming/)
- [`flake8-docstrings`](https://pypi.org/project/flake8-docstrings/)
- [`flake8-builtins`](https://pypi.org/project/flake8-builtins/)
- [`flake8-super`](https://pypi.org/project/flake8-super/)
- [`flake8-print`](https://pypi.org/project/flake8-print/)
- [`flake8-quotes`](https://pypi.org/project/flake8-quotes/)
- [`flake8-comprehensions`](https://pypi.org/project/flake8-comprehensions/)
- [`flake8-bugbear`](https://pypi.org/project/flake8-bugbear/) (9/32)
@@ -529,35 +573,55 @@ Yes! To enable a specific docstring convention, start by enabling all `pydocstyl
then selectively disabling based on your [preferred convention](https://www.pydocstyle.org/en/latest/error_codes.html#default-conventions).
For example, if you're coming from `flake8-docstrings`, the following configuration is equivalent to
`--docstring-convention numpy`:
`--docstring-convention=numpy`:
```toml
[tool.ruff]
extend-select = [
"D100",
"D101",
"D102",
"D103",
"D104",
"D105",
"D106",
"D200",
"D201",
"D202",
extend-select = ["D"]
extend-ignore = [
"D107",
"D203",
"D212",
"D213",
"D402",
"D413",
"D415",
"D416",
"D417",
]
```
Similarly, the following is equivalent to `--docstring-convention=google`:
```toml
[tool.ruff]
extend-select = ["D"]
extend-ignore = [
"D203",
"D204",
"D205",
"D206",
"D207",
"D208",
"D209",
"D210",
"D211",
"D213",
"D215",
"D400",
"D404",
"D406",
"D407",
"D408",
"D409",
"D413",
]
```
Similarly, the following is equivalent to `--docstring-convention=pep8`:
```toml
[tool.ruff]
extend-select = ["D"]
extend-ignore = [
"D203",
"D212",
"D213",
"D214",
"D215",
"D300",
"D400",
"D402",
"D403",
"D404",
"D405",
"D406",
@@ -566,51 +630,10 @@ extend-select = [
"D409",
"D410",
"D411",
"D412",
"D413",
"D418",
"D419",
]
```
Similarly, the following is equivalent to `--docstring-convention google`:
```toml
[tool.ruff]
extend-select = [
"D100",
"D101",
"D102",
"D103",
"D104",
"D105",
"D106",
"D107",
"D200",
"D201",
"D202",
"D205",
"D206",
"D207",
"D208",
"D209",
"D210",
"D211",
"D212",
"D214",
"D300",
"D402",
"D403",
"D405",
"D410",
"D411",
"D412",
"D414",
"D415",
"D416",
"D417",
"D418",
"D419",
]
```

View File

@@ -0,0 +1,105 @@
//! Generate the CheckCodePrefix enum.
use std::collections::{BTreeMap, BTreeSet};
use codegen::{Scope, Type, Variant};
use itertools::Itertools;
use strum::IntoEnumIterator;
use ruff::checks::CheckCode;
fn main() {
// Build up a map from prefix to matching CheckCodes.
let mut prefix_to_codes: BTreeMap<String, BTreeSet<CheckCode>> = Default::default();
for check_code in CheckCode::iter() {
let as_ref = check_code.as_ref().to_string();
for i in 1..=as_ref.len() {
let prefix = as_ref[..i].to_string();
let entry = prefix_to_codes
.entry(prefix)
.or_insert_with(|| Default::default());
entry.insert(check_code.clone());
}
}
let mut scope = Scope::new();
// Create the `CheckCodePrefix` definition.
let mut gen = scope
.new_enum("CheckCodePrefix")
.vis("pub")
.derive("EnumString")
.derive("Debug")
.derive("PartialEq")
.derive("Eq")
.derive("Clone")
.derive("Serialize")
.derive("Deserialize");
for (prefix, _) in &prefix_to_codes {
gen = gen.push_variant(Variant::new(prefix.to_string()));
}
// Create the `PrefixSpecificity` definition.
scope
.new_enum("PrefixSpecificity")
.vis("pub")
.derive("PartialEq")
.derive("Eq")
.derive("PartialOrd")
.derive("Ord")
.push_variant(Variant::new("Category"))
.push_variant(Variant::new("Hundreds"))
.push_variant(Variant::new("Tens"))
.push_variant(Variant::new("Explicit"));
// Create the `match` statement, to map from definition to relevant codes.
let mut gen = scope
.new_impl("CheckCodePrefix")
.new_fn("codes")
.arg_ref_self()
.ret(Type::new("Vec<CheckCode>"))
.vis("pub")
.line("match self {");
for (prefix, codes) in &prefix_to_codes {
gen = gen.line(format!(
"CheckCodePrefix::{prefix} => vec![{}],",
codes
.iter()
.map(|code| format!("CheckCode::{}", code.as_ref()))
.join(", ")
));
}
gen.line("}");
// Create the `match` statement, to map from definition to specificity.
let mut gen = scope
.new_impl("CheckCodePrefix")
.new_fn("specificity")
.arg_ref_self()
.ret(Type::new("PrefixSpecificity"))
.vis("pub")
.line("match self {");
for (prefix, _) in &prefix_to_codes {
let specificity = match prefix.len() {
4 => "Explicit",
3 => "Tens",
2 => "Hundreds",
1 => "Category",
_ => panic!("Invalid prefix: {}", prefix),
};
gen = gen.line(format!(
"CheckCodePrefix::{prefix} => PrefixSpecificity::{},",
specificity
));
}
gen.line("}");
println!("//! File automatically generated by examples/generate_check_code_prefix.rs.");
println!();
println!("use serde::{{Deserialize, Serialize}};");
println!("use strum_macros::EnumString;");
println!();
println!("use crate::checks::CheckCode;");
println!();
println!("{}", scope.to_string());
}

8
resources/test/fixtures/B013.py vendored Normal file
View File

@@ -0,0 +1,8 @@
try:
pass
except (ValueError,):
pass
except AttributeError:
pass
except (ImportError, TypeError):
pass

View File

@@ -1,3 +1,6 @@
import unittest
def Bad():
pass
@@ -24,3 +27,15 @@ def _good():
def good_func():
pass
def tearDownModule():
pass
class Test(unittest.TestCase):
def tearDown(self):
return super().tearDown()
def testTest(self):
assert True

4
resources/test/fixtures/N806.py vendored Normal file
View File

@@ -0,0 +1,4 @@
def f():
lower = 0
Camel = 0
CONSTANT = 0

6
resources/test/fixtures/N815.py vendored Normal file
View File

@@ -0,0 +1,6 @@
class C:
lower = 0
CONSTANT = 0
mixedCase = 0
_mixedCase = 0
mixed_Case = 0

5
resources/test/fixtures/N816.py vendored Normal file
View File

@@ -0,0 +1,5 @@
lower = 0
CONSTANT = 0
mixedCase = 0
_mixedCase = 0
mixed_Case = 0

10
resources/test/fixtures/N818.py vendored Normal file
View File

@@ -0,0 +1,10 @@
class Error(Exception):
pass
class AnotherError(Exception):
pass
class C(Exception):
pass

View File

@@ -1,3 +1,5 @@
import os
print(__path__)
__all__ = ["a", "b", "c"]

View File

@@ -0,0 +1,38 @@
"""
Double quotes multiline module docstring
"""
"""
this is not a docstring
"""
l = []
class Cls:
"""
Double quotes multiline class docstring
"""
"""
this is not a docstring
"""
# The colon in the list indexing below is an edge case for the docstring scanner
def f(self, bar="""
definitely not a docstring""",
val=l[Cls():3]):
"""
Double quotes multiline function docstring
"""
some_expression = 'hello world'
"""
this is not a docstring
"""
if l:
"""
Looks like a docstring, but in reality it isn't - only modules, classes and functions
"""
pass

View File

@@ -0,0 +1,9 @@
class SingleLineDocstrings():
""" Double quotes single line class docstring """
""" Not a docstring """
def foo(self, bar="""not a docstring"""):
""" Double quotes single line method docstring"""
pass
class Nested(foo()[:]): """ inline docstring """; pass

View File

@@ -0,0 +1,22 @@
def foo():
"""function without params, single line docstring"""
""" not a docstring"""
return
def foo2():
"""
function without params, multiline docstring
"""
""" not a docstring"""
return
def fun_with_params_no_docstring(a, b="""
not a
""" """docstring"""):
pass
def fun_with_params_no_docstring2(a, b=c[foo():], c=\
""" not a docstring """):
pass

View File

@@ -0,0 +1,11 @@
"""
Double quotes multiline module docstring
"""
"""
this is not a docstring
"""
def foo():
pass
"""
this is not a docstring
"""

View File

@@ -0,0 +1,6 @@
""" Double quotes singleline module docstring """
""" this is not a docstring """
def foo():
pass
""" this is not a docstring """

View File

@@ -0,0 +1,40 @@
'''
Single quotes multiline module docstring
'''
'''
this is not a docstring
'''
l = []
class Cls(MakeKlass('''
class params \t not a docstring
''')):
'''
Single quotes multiline class docstring
'''
'''
this is not a docstring
'''
# The colon in the list indexing below is an edge case for the docstring scanner
def f(self, bar='''
definitely not a docstring''',
val=l[Cls():3]):
'''
Single quotes multiline function docstring
'''
some_expression = 'hello world'
'''
this is not a docstring
'''
if l:
'''
Looks like a docstring, but in reality it isn't - only modules, classes and functions
'''
pass

View File

@@ -0,0 +1,9 @@
class SingleLineDocstrings():
''' Double quotes single line class docstring '''
''' Not a docstring '''
def foo(self, bar='''not a docstring'''):
''' Double quotes single line method docstring'''
pass
class Nested(foo()[:]): ''' inline docstring '''; pass

View File

@@ -0,0 +1,23 @@
def foo():
'''function without params, single line docstring'''
''' not a docstring'''
return
def foo2():
'''
function without params, multiline docstring
'''
''' not a docstring'''
return
def fun_with_params_no_docstring(a, b='''
not a
''' '''docstring'''):
pass
def fun_with_params_no_docstring2(a, b=c[foo():], c=\
''' not a docstring '''):
pass

View File

@@ -0,0 +1,11 @@
'''
Double quotes multiline module docstring
'''
'''
this is not a docstring
'''
def foo():
pass
'''
this is not a docstring
'''

View File

@@ -0,0 +1,6 @@
''' Double quotes singleline module docstring '''
''' this is not a docstring '''
def foo():
pass
''' this is not a docstring '''

View File

@@ -0,0 +1,2 @@
this_should_be_linted = "double quote string"
this_should_be_linted = u"double quote string"

View File

@@ -0,0 +1,5 @@
this_should_raise_Q003 = 'This is a \'string\''
this_is_fine = '"This" is a \'string\''
this_is_fine = "This is a 'string'"
this_is_fine = "\"This\" is a 'string'"
this_is_fine = r'This is a \'string\''

View File

@@ -0,0 +1,9 @@
s = """ This "should"
be
"linted" """
s = ''' This "should"
"not" be
"linted" '''
s = """'This should not be linted due to having would-be quadruple end quote'"""

View File

@@ -0,0 +1 @@
this_should_not_be_linted = "double quote string" # noqa

View File

@@ -0,0 +1,2 @@
s = 'double "quotes" wrapped in singles are ignored'
s = "single 'quotes' wrapped in doubles are ignored"

View File

@@ -0,0 +1,2 @@
this_should_be_linted = 'single quote string'
this_should_be_linted = u'double quote string'

View File

@@ -0,0 +1,5 @@
this_should_raise_Q003 = "This is a \"string\""
this_is_fine = "'This' is a \"string\""
this_is_fine = 'This is a "string"'
this_is_fine = '\'This\' is a "string"'
this_is_fine = r"This is a \"string\""

View File

@@ -0,0 +1,9 @@
s = ''' This 'should'
be
'linted' '''
s = """ This 'should'
'not' be
'linted' """
s = '''"This should not be linted due to having would-be quadruple end quote"'''

View File

@@ -0,0 +1 @@
this_should_not_be_linted = 'single quote string' # noqa

View File

@@ -0,0 +1,2 @@
s = "single 'quotes' wrapped in doubles are ignored"
s = 'double "quotes" wrapped in singles are ignored'

View File

@@ -5,3 +5,34 @@ extend-exclude = [
"migrations",
"directory/also_excluded.py",
]
per-file-ignores = [
"__init__.py:F401",
]
[tool.ruff.flake8-quotes]
inline-quotes = "single"
multiline-quotes = "double"
docstring-quotes = "double"
avoid-escape = true
[tool.ruff.pep8-naming]
ignore-names = [
"setUp",
"tearDown",
"setUpClass",
"tearDownClass",
"setUpModule",
"tearDownModule",
"asyncSetUp",
"asyncTearDown",
"setUpTestData",
"failureException",
"longMessage",
"maxDiff",
]
classmethod-decorators = [
"classmethod",
]
staticmethod-decorators = [
"staticmethod",
]

View File

@@ -1,3 +1,4 @@
use once_cell::unsync::OnceCell;
use rustpython_parser::ast::{Constant, Expr, ExprKind, Location, Stmt, StmtKind};
use crate::ast::types::{BindingKind, Range, Scope};
@@ -120,45 +121,47 @@ pub fn is_unpacking_assignment(stmt: &Stmt) -> bool {
/// Struct used to efficiently slice source code at (row, column) Locations.
pub struct SourceCodeLocator<'a> {
content: &'a str,
offsets: Vec<Vec<usize>>,
contents: &'a str,
offsets: OnceCell<Vec<Vec<usize>>>,
}
impl<'a> SourceCodeLocator<'a> {
pub fn new(content: &'a str) -> Self {
pub fn new(contents: &'a str) -> Self {
SourceCodeLocator {
content,
offsets: Self::compute_offsets(content),
contents,
offsets: OnceCell::new(),
}
}
fn compute_offsets(content: &str) -> Vec<Vec<usize>> {
let mut offsets = vec![];
let mut offset = 0;
for line in content.lines() {
let mut newline = 0;
let mut line_offsets: Vec<usize> = vec![];
for (i, char) in line.char_indices() {
line_offsets.push(offset + i);
newline = i + char.len_utf8();
let mut offsets = vec![vec![]];
let mut line_index = 0;
for (i, char) in content.char_indices() {
offsets[line_index].push(i);
if char == '\n' {
line_index += 1;
offsets.push(vec![]);
}
line_offsets.push(offset + newline);
offsets.push(line_offsets);
offset += newline + 1;
}
offsets.push(vec![offset]);
offsets
}
fn get_or_init_offsets(&self) -> &Vec<Vec<usize>> {
self.offsets
.get_or_init(|| Self::compute_offsets(self.contents))
}
pub fn slice_source_code_at(&self, location: &Location) -> &'a str {
let offset = self.offsets[location.row() - 1][location.column() - 1];
&self.content[offset..]
let offsets = self.get_or_init_offsets();
let offset = offsets[location.row() - 1][location.column() - 1];
&self.contents[offset..]
}
pub fn slice_source_code_range(&self, range: &Range) -> &'a str {
let start = self.offsets[range.location.row() - 1][range.location.column() - 1];
let end = self.offsets[range.end_location.row() - 1][range.end_location.column() - 1];
&self.content[start..end]
let offsets = self.get_or_init_offsets();
let start = offsets[range.location.row() - 1][range.location.column() - 1];
let end = offsets[range.end_location.row() - 1][range.end_location.column() - 1];
&self.contents[start..end]
}
pub fn partition_source_code_at(
@@ -166,14 +169,15 @@ impl<'a> SourceCodeLocator<'a> {
outer: &Range,
inner: &Range,
) -> (&'a str, &'a str, &'a str) {
let outer_start = self.offsets[outer.location.row() - 1][outer.location.column() - 1];
let outer_end = self.offsets[outer.end_location.row() - 1][outer.end_location.column() - 1];
let inner_start = self.offsets[inner.location.row() - 1][inner.location.column() - 1];
let inner_end = self.offsets[inner.end_location.row() - 1][inner.end_location.column() - 1];
let offsets = self.get_or_init_offsets();
let outer_start = offsets[outer.location.row() - 1][outer.location.column() - 1];
let outer_end = offsets[outer.end_location.row() - 1][outer.end_location.column() - 1];
let inner_start = offsets[inner.location.row() - 1][inner.location.column() - 1];
let inner_end = offsets[inner.end_location.row() - 1][inner.end_location.column() - 1];
(
&self.content[outer_start..inner_start],
&self.content[inner_start..inner_end],
&self.content[inner_end..outer_end],
&self.contents[outer_start..inner_start],
&self.contents[inner_start..inner_end],
&self.contents[inner_end..outer_end],
)
}
}
@@ -184,15 +188,21 @@ mod tests {
#[test]
fn source_code_locator_init() {
let content = "x = 1\ny = 2\nz = x + y\n";
let locator = SourceCodeLocator::new(content);
let offsets = locator.get_or_init_offsets();
assert_eq!(offsets.len(), 4);
assert_eq!(offsets[0], [0, 1, 2, 3, 4, 5]);
assert_eq!(offsets[1], [6, 7, 8, 9, 10, 11]);
assert_eq!(offsets[2], [12, 13, 14, 15, 16, 17, 18, 19, 20, 21]);
assert!(offsets[3].is_empty());
let content = "# \u{4e9c}\nclass Foo:\n \"\"\".\"\"\"";
let locator = SourceCodeLocator::new(content);
assert_eq!(locator.offsets.len(), 4);
assert_eq!(locator.offsets[0], [0, 1, 2, 5]);
assert_eq!(locator.offsets[1], [6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]);
assert_eq!(
locator.offsets[2],
[17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28]
);
assert_eq!(locator.offsets[3], [29]);
let offsets = locator.get_or_init_offsets();
assert_eq!(offsets.len(), 3);
assert_eq!(offsets[0], [0, 1, 2, 5]);
assert_eq!(offsets[1], [6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]);
assert_eq!(offsets[2], [17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27]);
}
}

View File

@@ -5,7 +5,6 @@ use std::ops::Deref;
use std::path::Path;
use log::error;
use once_cell::unsync::OnceCell;
use rustpython_parser::ast::{
Arg, Arguments, Constant, Excepthandler, ExcepthandlerKind, Expr, ExprContext, ExprKind,
KeywordData, Operator, Stmt, StmtKind, Suite,
@@ -26,7 +25,8 @@ use crate::checks::{Check, CheckCode, CheckKind};
use crate::docstrings::definition::{Definition, DefinitionKind, Documentable};
use crate::python::builtins::{BUILTINS, MAGIC_GLOBALS};
use crate::python::future::ALL_FEATURE_NAMES;
use crate::settings::{PythonVersion, Settings};
use crate::settings::types::PythonVersion;
use crate::settings::Settings;
use crate::visibility::{module_visibility, transition_scope, Modifier, Visibility, VisibleScope};
use crate::{
docstrings, flake8_bugbear, flake8_builtins, flake8_comprehensions, flake8_print, pep8_naming,
@@ -38,13 +38,11 @@ pub const GLOBAL_SCOPE_INDEX: usize = 0;
pub struct Checker<'a> {
// Input data.
path: &'a Path,
content: &'a str,
autofix: &'a fixer::Mode,
pub(crate) settings: &'a Settings,
pub(crate) locator: &'a SourceCodeLocator<'a>,
// Computed checks.
checks: Vec<Check>,
// Efficient source-code slicing.
locator: OnceCell<SourceCodeLocator<'a>>,
// Docstring tracking.
docstrings: Vec<(Definition<'a>, Visibility)>,
// Edit tracking.
@@ -78,14 +76,13 @@ impl<'a> Checker<'a> {
settings: &'a Settings,
autofix: &'a fixer::Mode,
path: &'a Path,
content: &'a str,
locator: &'a SourceCodeLocator,
) -> Checker<'a> {
Checker {
settings,
autofix,
path,
content,
locator: OnceCell::new(),
locator,
checks: Default::default(),
docstrings: Default::default(),
deletions: Default::default(),
@@ -113,12 +110,6 @@ impl<'a> Checker<'a> {
}
}
/// Get access to a lazily-initialized `SourceCodeLocator` for the file contents.
pub fn get_locator(&self) -> &SourceCodeLocator {
self.locator
.get_or_init(|| SourceCodeLocator::new(self.content))
}
/// Return `true` if a patch should be generated under the given autofix `Mode`.
pub fn patch(&self) -> bool {
self.autofix.patch()
@@ -235,7 +226,11 @@ where
}
if self.settings.enabled.contains(&CheckCode::N802) {
if let Some(check) = pep8_naming::checks::invalid_function_name(stmt, name) {
if let Some(check) = pep8_naming::checks::invalid_function_name(
stmt,
name,
&self.settings.pep8_naming,
) {
self.checks.push(check);
}
}
@@ -246,6 +241,7 @@ where
self.current_scope(),
decorator_list,
args,
&self.settings.pep8_naming,
)
{
self.checks.push(check);
@@ -257,6 +253,7 @@ where
self.current_scope(),
decorator_list,
args,
&self.settings.pep8_naming,
) {
self.checks.push(check);
}
@@ -362,6 +359,14 @@ where
}
}
if self.settings.enabled.contains(&CheckCode::N818) {
if let Some(check) =
pep8_naming::checks::error_suffix_on_exception_name(stmt, bases, name)
{
self.checks.push(check);
}
}
self.check_builtin_shadowing(
name,
self.locate_check(Range::from_located(stmt)),
@@ -705,6 +710,9 @@ where
{
flake8_bugbear::plugins::duplicate_exceptions(self, stmt, handlers);
}
if self.settings.enabled.contains(&CheckCode::B013) {
flake8_bugbear::plugins::redundant_tuple_in_exception_handler(self, handlers);
}
}
StmtKind::Assign { targets, value, .. } => {
if self.settings.enabled.contains(&CheckCode::E731) {
@@ -1798,6 +1806,30 @@ impl<'a> Checker<'a> {
}
}
if self.settings.enabled.contains(&CheckCode::N806) {
if let Some(check) =
pep8_naming::checks::non_lowercase_variable_in_function(current, expr, id)
{
self.checks.push(check);
}
}
if self.settings.enabled.contains(&CheckCode::N815) {
if let Some(check) =
pep8_naming::checks::mixed_case_variable_in_class_scope(current, expr, id)
{
self.checks.push(check);
}
}
if self.settings.enabled.contains(&CheckCode::N816) {
if let Some(check) =
pep8_naming::checks::mixed_case_variable_in_global_scope(current, expr, id)
{
self.checks.push(check);
}
}
if matches!(parent.node, StmtKind::AnnAssign { value: None, .. }) {
self.add_binding(
id.to_string(),
@@ -2104,7 +2136,7 @@ impl<'a> Checker<'a> {
ImportKind::ImportFrom => pyflakes::fixes::remove_unused_import_froms,
};
match removal_fn(self.get_locator(), &full_names, child, parent, &deleted) {
match removal_fn(self.locator, &full_names, child, parent, &deleted) {
Ok(fix) => Some(fix),
Err(e) => {
error!("Failed to fix unused imports: {}", e);
@@ -2115,15 +2147,27 @@ impl<'a> Checker<'a> {
None
};
let mut check = Check::new(
CheckKind::UnusedImport(full_names.into_iter().map(String::from).collect()),
self.locate_check(Range::from_located(child)),
);
if let Some(fix) = fix {
check.amend(fix);
if self.path.ends_with("__init__.py") {
self.checks.push(Check::new(
CheckKind::UnusedImport(
full_names.into_iter().map(String::from).collect(),
true,
),
self.locate_check(Range::from_located(child)),
));
} else {
let mut check = Check::new(
CheckKind::UnusedImport(
full_names.into_iter().map(String::from).collect(),
false,
),
self.locate_check(Range::from_located(child)),
);
if let Some(fix) = fix {
check.amend(fix);
}
self.checks.push(check);
}
self.checks.push(check);
}
}
}
@@ -2252,12 +2296,12 @@ impl<'a> Checker<'a> {
pub fn check_ast(
python_ast: &Suite,
contents: &str,
locator: &SourceCodeLocator,
settings: &Settings,
autofix: &fixer::Mode,
path: &Path,
) -> Vec<Check> {
let mut checker = Checker::new(settings, autofix, path, contents);
let mut checker = Checker::new(settings, autofix, path, locator);
checker.push_scope(Scope::new(ScopeKind::Module));
checker.bind_builtins();

View File

@@ -229,7 +229,7 @@ pub fn check_lines(
mod tests {
use crate::autofix::fixer;
use crate::checks::{Check, CheckCode};
use crate::settings;
use crate::settings::Settings;
use super::check_lines;
@@ -243,9 +243,9 @@ mod tests {
&mut checks,
line,
&noqa_line_for,
&settings::Settings {
&Settings {
line_length,
..settings::Settings::for_rule(CheckCode::E501)
..Settings::for_rule(CheckCode::E501)
},
&fixer::Mode::Generate,
);

View File

@@ -1,123 +1,50 @@
//! Lint rules based on token traversal.
use rustpython_ast::Location;
use rustpython_parser::lexer::{LexResult, Tok};
use crate::ast::operations::SourceCodeLocator;
use crate::ast::types::Range;
use crate::checks::{Check, CheckCode, CheckKind};
use crate::Settings;
// See: https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals
const VALID_ESCAPE_SEQUENCES: &[char; 23] = &[
'\n', '\\', '\'', '"', 'a', 'b', 'f', 'n', 'r', 't', 'v', '0', '1', '2', '3', '4', '5', '6',
'7', 'x', // Escape sequences only recognized in string literals
'N', 'u', 'U',
];
/// Return the quotation markers used for a String token.
fn extract_quote(text: &str) -> &str {
if text.len() >= 3 {
let triple = &text[text.len() - 3..];
if triple == "'''" || triple == "\"\"\"" {
return triple;
}
}
if !text.is_empty() {
let single = &text[text.len() - 1..];
if single == "'" || single == "\"" {
return single;
}
}
panic!("Unable to find quotation mark for String token.")
}
/// W605
fn invalid_escape_sequence(
locator: &SourceCodeLocator,
start: &Location,
end: &Location,
) -> Vec<Check> {
let mut checks = vec![];
let text = locator.slice_source_code_range(&Range {
location: *start,
end_location: *end,
});
// Determine whether the string is single- or triple-quoted.
let quote = extract_quote(text);
let quote_pos = text.find(quote).unwrap();
let prefix = text[..quote_pos].to_lowercase();
let body = &text[(quote_pos + quote.len())..(text.len() - quote.len())];
if !prefix.contains('r') {
let mut col_offset = 0;
let mut row_offset = 0;
let mut in_escape = false;
let mut chars = body.chars();
let mut current = chars.next();
let mut next = chars.next();
while let (Some(current_char), Some(next_char)) = (current, next) {
// If we see an escaped backslash, avoid treating the character _after_ the
// escaped backslash as itself an escaped character.
if in_escape {
in_escape = false;
} else {
in_escape = current_char == '\\' && next_char == '\\';
if current_char == '\\' && !VALID_ESCAPE_SEQUENCES.contains(&next_char) {
// Compute the location of the escape sequence by offsetting the location of the
// string token by the characters we've seen thus far.
let location = if row_offset == 0 {
Location::new(
start.row() + row_offset,
start.column() + prefix.len() + quote.len() + col_offset,
)
} else {
Location::new(start.row() + row_offset, col_offset + 1)
};
let end_location = Location::new(location.row(), location.column() + 1);
checks.push(Check::new(
CheckKind::InvalidEscapeSequence(next_char),
Range {
location,
end_location,
},
))
}
}
// Track the offset from the start position as we iterate over the body.
if current_char == '\n' {
col_offset = 0;
row_offset += 1;
} else {
col_offset += 1;
}
current = next;
next = chars.next();
}
}
checks
}
use crate::checks::{Check, CheckCode};
use crate::flake8_quotes::docstring_detection::StateMachine;
use crate::{flake8_quotes, pycodestyle, Settings};
pub fn check_tokens(
checks: &mut Vec<Check>,
contents: &str,
locator: &SourceCodeLocator,
tokens: &[LexResult],
settings: &Settings,
) {
// TODO(charlie): Use a shared SourceCodeLocator between this site and the AST traversal.
let locator = SourceCodeLocator::new(contents);
let enforce_invalid_escape_sequence = settings.enabled.contains(&CheckCode::W605);
let enforce_quotes = settings.enabled.contains(&CheckCode::Q000)
| settings.enabled.contains(&CheckCode::Q001)
| settings.enabled.contains(&CheckCode::Q002)
| settings.enabled.contains(&CheckCode::Q003);
let mut state_machine = StateMachine::new();
for (start, tok, end) in tokens.iter().flatten() {
// W605
if enforce_invalid_escape_sequence {
if matches!(tok, Tok::String { .. }) {
checks.extend(invalid_escape_sequence(&locator, start, end));
checks.extend(pycodestyle::checks::invalid_escape_sequence(
locator, start, end,
));
}
}
// flake8-quotes
if enforce_quotes {
let is_docstring = state_machine.consume(tok);
if matches!(tok, Tok::String { .. }) {
if let Some(check) = flake8_quotes::checks::quotes(
locator,
start,
end,
is_docstring,
&settings.flake8_quotes,
) {
if settings.enabled.contains(check.kind.code()) {
checks.push(check);
}
}
}
}
}

View File

@@ -7,6 +7,7 @@ use strum_macros::{AsRefStr, EnumIter, EnumString};
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::flake8_quotes::settings::Quote;
use crate::pyupgrade::types::Primitive;
#[derive(
@@ -79,6 +80,7 @@ pub enum CheckCode {
B002,
B007,
B011,
B013,
B014,
B017,
B025,
@@ -102,6 +104,11 @@ pub enum CheckCode {
// flake8-print
T201,
T203,
// flake8-quotes
Q000,
Q001,
Q002,
Q003,
// pyupgrade
U001,
U002,
@@ -162,12 +169,16 @@ pub enum CheckCode {
N803,
N804,
N805,
N806,
N807,
N811,
N812,
N813,
N814,
N815,
N816,
N817,
N818,
// Meta
M001,
}
@@ -184,6 +195,7 @@ pub enum CheckCategory {
Flake8Bugbear,
Flake8Builtins,
Flake8Print,
Flake8Quotes,
Meta,
}
@@ -197,6 +209,7 @@ impl CheckCategory {
CheckCategory::Flake8Bugbear => "flake8-bugbear",
CheckCategory::Flake8Comprehensions => "flake8-comprehensions",
CheckCategory::Flake8Print => "flake8-print",
CheckCategory::Flake8Quotes => "flake8-quotes",
CheckCategory::Pyupgrade => "pyupgrade",
CheckCategory::Pydocstyle => "pydocstyle",
CheckCategory::PEP8Naming => "pep8-naming",
@@ -265,7 +278,7 @@ pub enum CheckKind {
UndefinedExport(String),
UndefinedLocal(String),
UndefinedName(String),
UnusedImport(Vec<String>),
UnusedImport(Vec<String>, bool),
UnusedVariable(String),
YieldOutsideFunction,
// flake8-builtins
@@ -276,6 +289,7 @@ pub enum CheckKind {
UnaryPrefixIncrement,
UnusedLoopControlVariable(String),
DoNotAssertFalse,
RedundantTupleInExceptionHandler(String),
DuplicateHandlerException(Vec<String>),
NoAssertRaisesException,
DuplicateTryBlockException(String),
@@ -299,6 +313,11 @@ pub enum CheckKind {
// flake8-print
PrintFound,
PPrintFound,
// flake8-quotes
BadQuotesInlineString(Quote),
BadQuotesMultilineString(Quote),
BadQuotesDocstring(Quote),
AvoidQuoteEscape,
// pyupgrade
TypeOfPrimitive(Primitive),
UnnecessaryAbspath,
@@ -359,12 +378,16 @@ pub enum CheckKind {
InvalidArgumentName(String),
InvalidFirstArgumentNameForClassMethod,
InvalidFirstArgumentNameForMethod,
NonLowercaseVariableInFunction(String),
DunderFunctionName,
ConstantImportedAsNonConstant(String, String),
LowercaseImportedAsNonLowercase(String, String),
CamelcaseImportedAsLowercase(String, String),
CamelcaseImportedAsConstant(String, String),
MixedCaseVariableInClassScope(String),
MixedCaseVariableInGlobalScope(String),
CamelcaseImportedAsAcronym(String, String),
ErrorSuffixOnExceptionName(String),
// Meta
UnusedNOQA(Option<Vec<String>>),
}
@@ -374,7 +397,11 @@ impl CheckCode {
pub fn lint_source(&self) -> &'static LintSource {
match self {
CheckCode::E501 | CheckCode::W292 | CheckCode::M001 => &LintSource::Lines,
CheckCode::W605 => &LintSource::Tokens,
CheckCode::W605
| CheckCode::Q000
| CheckCode::Q001
| CheckCode::Q002
| CheckCode::Q003 => &LintSource::Tokens,
CheckCode::E902 => &LintSource::FileSystem,
_ => &LintSource::AST,
}
@@ -402,7 +429,7 @@ impl CheckCode {
CheckCode::W292 => CheckKind::NoNewLineAtEndOfFile,
CheckCode::W605 => CheckKind::InvalidEscapeSequence('c'),
// pyflakes
CheckCode::F401 => CheckKind::UnusedImport(vec!["...".to_string()]),
CheckCode::F401 => CheckKind::UnusedImport(vec!["...".to_string()], false),
CheckCode::F402 => CheckKind::ImportShadowedByLoopVar("...".to_string(), 1),
CheckCode::F403 => CheckKind::ImportStarUsed("...".to_string()),
CheckCode::F404 => CheckKind::LateFutureImport,
@@ -440,6 +467,9 @@ impl CheckCode {
CheckCode::B002 => CheckKind::UnaryPrefixIncrement,
CheckCode::B007 => CheckKind::UnusedLoopControlVariable("i".to_string()),
CheckCode::B011 => CheckKind::DoNotAssertFalse,
CheckCode::B013 => {
CheckKind::RedundantTupleInExceptionHandler("ValueError".to_string())
}
CheckCode::B014 => CheckKind::DuplicateHandlerException(vec!["ValueError".to_string()]),
CheckCode::B017 => CheckKind::NoAssertRaisesException,
CheckCode::B025 => CheckKind::DuplicateTryBlockException("Exception".to_string()),
@@ -476,6 +506,11 @@ impl CheckCode {
// flake8-print
CheckCode::T201 => CheckKind::PrintFound,
CheckCode::T203 => CheckKind::PPrintFound,
// flake8-quotes
CheckCode::Q000 => CheckKind::BadQuotesInlineString(Quote::Double),
CheckCode::Q001 => CheckKind::BadQuotesMultilineString(Quote::Double),
CheckCode::Q002 => CheckKind::BadQuotesDocstring(Quote::Double),
CheckCode::Q003 => CheckKind::AvoidQuoteEscape,
// pyupgrade
CheckCode::U001 => CheckKind::UselessMetaclassType,
CheckCode::U002 => CheckKind::UnnecessaryAbspath,
@@ -545,6 +580,7 @@ impl CheckCode {
CheckCode::N803 => CheckKind::InvalidArgumentName("...".to_string()),
CheckCode::N804 => CheckKind::InvalidFirstArgumentNameForClassMethod,
CheckCode::N805 => CheckKind::InvalidFirstArgumentNameForMethod,
CheckCode::N806 => CheckKind::NonLowercaseVariableInFunction("...".to_string()),
CheckCode::N807 => CheckKind::DunderFunctionName,
CheckCode::N811 => {
CheckKind::ConstantImportedAsNonConstant("...".to_string(), "...".to_string())
@@ -558,9 +594,12 @@ impl CheckCode {
CheckCode::N814 => {
CheckKind::CamelcaseImportedAsConstant("...".to_string(), "...".to_string())
}
CheckCode::N815 => CheckKind::MixedCaseVariableInClassScope("mixedCase".to_string()),
CheckCode::N816 => CheckKind::MixedCaseVariableInGlobalScope("mixedCase".to_string()),
CheckCode::N817 => {
CheckKind::CamelcaseImportedAsAcronym("...".to_string(), "...".to_string())
}
CheckCode::N818 => CheckKind::ErrorSuffixOnExceptionName("...".to_string()),
// Meta
CheckCode::M001 => CheckKind::UnusedNOQA(None),
}
@@ -618,6 +657,7 @@ impl CheckCode {
CheckCode::B002 => CheckCategory::Flake8Bugbear,
CheckCode::B007 => CheckCategory::Flake8Bugbear,
CheckCode::B011 => CheckCategory::Flake8Bugbear,
CheckCode::B013 => CheckCategory::Flake8Bugbear,
CheckCode::B014 => CheckCategory::Flake8Bugbear,
CheckCode::B017 => CheckCategory::Flake8Bugbear,
CheckCode::B025 => CheckCategory::Flake8Bugbear,
@@ -639,6 +679,10 @@ impl CheckCode {
CheckCode::C417 => CheckCategory::Flake8Comprehensions,
CheckCode::T201 => CheckCategory::Flake8Print,
CheckCode::T203 => CheckCategory::Flake8Print,
CheckCode::Q000 => CheckCategory::Flake8Quotes,
CheckCode::Q001 => CheckCategory::Flake8Quotes,
CheckCode::Q002 => CheckCategory::Flake8Quotes,
CheckCode::Q003 => CheckCategory::Flake8Quotes,
CheckCode::U001 => CheckCategory::Pyupgrade,
CheckCode::U002 => CheckCategory::Pyupgrade,
CheckCode::U003 => CheckCategory::Pyupgrade,
@@ -696,12 +740,16 @@ impl CheckCode {
CheckCode::N803 => CheckCategory::PEP8Naming,
CheckCode::N804 => CheckCategory::PEP8Naming,
CheckCode::N805 => CheckCategory::PEP8Naming,
CheckCode::N806 => CheckCategory::PEP8Naming,
CheckCode::N807 => CheckCategory::PEP8Naming,
CheckCode::N811 => CheckCategory::PEP8Naming,
CheckCode::N812 => CheckCategory::PEP8Naming,
CheckCode::N813 => CheckCategory::PEP8Naming,
CheckCode::N814 => CheckCategory::PEP8Naming,
CheckCode::N815 => CheckCategory::PEP8Naming,
CheckCode::N816 => CheckCategory::PEP8Naming,
CheckCode::N817 => CheckCategory::PEP8Naming,
CheckCode::N818 => CheckCategory::PEP8Naming,
CheckCode::M001 => CheckCategory::Meta,
}
}
@@ -751,7 +799,7 @@ impl CheckKind {
CheckKind::UndefinedExport(_) => &CheckCode::F822,
CheckKind::UndefinedLocal(_) => &CheckCode::F823,
CheckKind::UndefinedName(_) => &CheckCode::F821,
CheckKind::UnusedImport(_) => &CheckCode::F401,
CheckKind::UnusedImport(_, _) => &CheckCode::F401,
CheckKind::UnusedVariable(_) => &CheckCode::F841,
CheckKind::YieldOutsideFunction => &CheckCode::F704,
// pycodestyle warnings
@@ -765,6 +813,7 @@ impl CheckKind {
CheckKind::UnaryPrefixIncrement => &CheckCode::B002,
CheckKind::UnusedLoopControlVariable(_) => &CheckCode::B007,
CheckKind::DoNotAssertFalse => &CheckCode::B011,
CheckKind::RedundantTupleInExceptionHandler(_) => &CheckCode::B013,
CheckKind::DuplicateHandlerException(_) => &CheckCode::B014,
CheckKind::NoAssertRaisesException => &CheckCode::B017,
CheckKind::DuplicateTryBlockException(_) => &CheckCode::B025,
@@ -788,6 +837,11 @@ impl CheckKind {
// flake8-print
CheckKind::PrintFound => &CheckCode::T201,
CheckKind::PPrintFound => &CheckCode::T203,
// flake8-quotes
CheckKind::BadQuotesInlineString(_) => &CheckCode::Q000,
CheckKind::BadQuotesMultilineString(_) => &CheckCode::Q001,
CheckKind::BadQuotesDocstring(_) => &CheckCode::Q002,
CheckKind::AvoidQuoteEscape => &CheckCode::Q003,
// pyupgrade
CheckKind::TypeOfPrimitive(_) => &CheckCode::U003,
CheckKind::UnnecessaryAbspath => &CheckCode::U002,
@@ -848,12 +902,16 @@ impl CheckKind {
CheckKind::InvalidArgumentName(_) => &CheckCode::N803,
CheckKind::InvalidFirstArgumentNameForClassMethod => &CheckCode::N804,
CheckKind::InvalidFirstArgumentNameForMethod => &CheckCode::N805,
CheckKind::NonLowercaseVariableInFunction(..) => &CheckCode::N806,
CheckKind::DunderFunctionName => &CheckCode::N807,
CheckKind::ConstantImportedAsNonConstant(..) => &CheckCode::N811,
CheckKind::LowercaseImportedAsNonLowercase(..) => &CheckCode::N812,
CheckKind::CamelcaseImportedAsLowercase(..) => &CheckCode::N813,
CheckKind::CamelcaseImportedAsConstant(..) => &CheckCode::N814,
CheckKind::MixedCaseVariableInClassScope(..) => &CheckCode::N815,
CheckKind::MixedCaseVariableInGlobalScope(..) => &CheckCode::N816,
CheckKind::CamelcaseImportedAsAcronym(..) => &CheckCode::N817,
CheckKind::ErrorSuffixOnExceptionName(..) => &CheckCode::N818,
// Meta
CheckKind::UnusedNOQA(_) => &CheckCode::M001,
}
@@ -980,9 +1038,13 @@ impl CheckKind {
CheckKind::UndefinedName(name) => {
format!("Undefined name `{name}`")
}
CheckKind::UnusedImport(names) => {
CheckKind::UnusedImport(names, in_init_py) => {
let names = names.iter().map(|name| format!("`{name}`")).join(", ");
format!("{names} imported but unused")
if *in_init_py {
format!("{names} imported but unused and missing from `__all__`")
} else {
format!("{names} imported but unused")
}
}
CheckKind::UnusedVariable(name) => {
format!("Local variable `{name}` is assigned to but never used")
@@ -1010,6 +1072,9 @@ impl CheckKind {
"Do not `assert False` (`python -O` removes these calls), raise `AssertionError()`"
.to_string()
}
CheckKind::RedundantTupleInExceptionHandler(name) => {
format!("A length-one tuple literal is redundant. Write `except {name}:` instead of `except ({name},):`.")
}
CheckKind::DuplicateHandlerException(names) => {
if names.len() == 1 {
let name = &names[0];
@@ -1085,7 +1150,7 @@ impl CheckKind {
format!("Unnecessary subscript reversal of iterable within `{func}()`")
}
CheckKind::UnnecessaryComprehension(obj_type) => {
format!(" Unnecessary `{obj_type}` comprehension (rewrite using `{obj_type}()`)")
format!("Unnecessary `{obj_type}` comprehension (rewrite using `{obj_type}()`)")
}
CheckKind::UnnecessaryMap(obj_type) => {
if obj_type == "generator" {
@@ -1097,6 +1162,26 @@ impl CheckKind {
// flake8-print
CheckKind::PrintFound => "`print` found".to_string(),
CheckKind::PPrintFound => "`pprint` found".to_string(),
// flake8-quotes
CheckKind::BadQuotesInlineString(quote) => {
match quote {
Quote::Single => "Double quotes found but single quotes preferred".to_string(),
Quote::Double => "Single quotes found but double quotes preferred".to_string(),
}
},
CheckKind::BadQuotesMultilineString(quote) => {
match quote {
Quote::Single => "Double quote multiline found but single quotes preferred".to_string(),
Quote::Double => "Single quote multiline found but double quotes preferred".to_string(),
}
},
CheckKind::BadQuotesDocstring(quote) => {
match quote {
Quote::Single => "Double quote docstring found but single quotes preferred".to_string(),
Quote::Double => "Single quote docstring found but double quotes preferred".to_string(),
}
},
CheckKind::AvoidQuoteEscape => "Change outer quotes to avoid escaping inner quotes".to_string(),
// pyupgrade
CheckKind::TypeOfPrimitive(primitive) => {
format!("Use `{}` instead of `type(...)`", primitive.builtin())
@@ -1250,6 +1335,9 @@ impl CheckKind {
CheckKind::InvalidFirstArgumentNameForMethod => {
"First argument of a method should be named `self`".to_string()
}
CheckKind::NonLowercaseVariableInFunction(name) => {
format!("Variable `{name}` in function should be lowercase")
}
CheckKind::DunderFunctionName => {
"Function name should not start and end with `__`".to_string()
}
@@ -1265,9 +1353,18 @@ impl CheckKind {
CheckKind::CamelcaseImportedAsConstant(name, asname) => {
format!("Camelcase `{name}` imported as constant `{asname}`")
}
CheckKind::MixedCaseVariableInClassScope(name) => {
format!("Variable `{name}` in class scope should not be mixedCase")
}
CheckKind::MixedCaseVariableInGlobalScope(name) => {
format!("Variable `{name}` in global scope should not be mixedCase")
}
CheckKind::CamelcaseImportedAsAcronym(name, asname) => {
format!("Camelcase `{name}` imported as acronym `{asname}`")
}
CheckKind::ErrorSuffixOnExceptionName(name) => {
format!("Exception name `{name}` should be named with an Error suffix")
}
// Meta
CheckKind::UnusedNOQA(codes) => match codes {
None => "Unused `noqa` directive".to_string(),
@@ -1338,7 +1435,7 @@ impl CheckKind {
| CheckKind::SuperCallWithParameters
| CheckKind::TypeOfPrimitive(_)
| CheckKind::UnnecessaryAbspath
| CheckKind::UnusedImport(_)
| CheckKind::UnusedImport(_, false)
| CheckKind::UnusedLoopControlVariable(_)
| CheckKind::UnusedNOQA(_)
| CheckKind::UsePEP585Annotation(_)

1101
src/checks_gen.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -5,11 +5,11 @@ use clap::{command, Parser};
use log::warn;
use regex::Regex;
use crate::checks::CheckCode;
use crate::checks_gen::CheckCodePrefix;
use crate::printer::SerializationFormat;
use crate::pyproject::StrCheckCodePair;
use crate::settings::PythonVersion;
use crate::RawSettings;
use crate::settings::configuration::Configuration;
use crate::settings::types::PythonVersion;
use crate::settings::types::StrCheckCodePair;
#[derive(Debug, Parser)]
#[command(author, about = "ruff: An extremely fast Python linter.")]
@@ -43,16 +43,16 @@ pub struct Cli {
pub no_cache: bool,
/// List of error codes to enable.
#[arg(long, value_delimiter = ',')]
pub select: Vec<CheckCode>,
pub select: Vec<CheckCodePrefix>,
/// Like --select, but adds additional error codes on top of the selected ones.
#[arg(long, value_delimiter = ',')]
pub extend_select: Vec<CheckCode>,
pub extend_select: Vec<CheckCodePrefix>,
/// List of error codes to ignore.
#[arg(long, value_delimiter = ',')]
pub ignore: Vec<CheckCode>,
pub ignore: Vec<CheckCodePrefix>,
/// Like --ignore, but adds additional error codes on top of the ignored ones.
#[arg(long, value_delimiter = ',')]
pub extend_ignore: Vec<CheckCode>,
pub extend_ignore: Vec<CheckCodePrefix>,
/// List of paths, used to exclude files and/or directories from checks.
#[arg(long, value_delimiter = ',')]
pub exclude: Vec<String>,
@@ -106,10 +106,10 @@ impl fmt::Display for Warnable {
/// Warn the user if they attempt to enable a code that won't be respected.
pub fn warn_on(
flag: Warnable,
codes: &[CheckCode],
cli_ignore: &[CheckCode],
cli_extend_ignore: &[CheckCode],
pyproject_settings: &RawSettings,
codes: &[CheckCodePrefix],
cli_ignore: &[CheckCodePrefix],
cli_extend_ignore: &[CheckCodePrefix],
pyproject_configuration: &Configuration,
pyproject_path: &Option<PathBuf>,
) {
for code in codes {
@@ -117,7 +117,7 @@ pub fn warn_on(
if cli_ignore.contains(code) {
warn!("{code:?} was passed to {flag}, but ignored via --ignore")
}
} else if pyproject_settings.ignore.contains(code) {
} else if pyproject_configuration.ignore.contains(code) {
if let Some(path) = pyproject_path {
warn!(
"{code:?} was passed to {flag}, but ignored by the `ignore` field in {}",
@@ -131,7 +131,7 @@ pub fn warn_on(
if cli_extend_ignore.contains(code) {
warn!("{code:?} was passed to {flag}, but ignored via --extend-ignore")
}
} else if pyproject_settings.extend_ignore.contains(code) {
} else if pyproject_configuration.extend_ignore.contains(code) {
if let Some(path) = pyproject_path {
warn!(
"{code:?} was passed to {flag}, but ignored by the `extend_ignore` field in {}",

View File

@@ -27,7 +27,7 @@ pub fn leading_space(line: &str) -> String {
/// Extract the leading indentation from a docstring.
pub fn indentation<'a>(checker: &'a Checker, docstring: &Expr) -> &'a str {
let range = Range::from_located(docstring);
checker.get_locator().slice_source_code_range(&Range {
checker.locator.slice_source_code_range(&Range {
location: Location::new(range.location.row(), 1),
end_location: Location::new(range.location.row(), range.location.column()),
})

View File

@@ -2,11 +2,13 @@ pub use assert_false::assert_false;
pub use assert_raises_exception::assert_raises_exception;
pub use duplicate_exceptions::duplicate_exceptions;
pub use duplicate_exceptions::duplicate_handler_exceptions;
pub use redundant_tuple_in_exception_handler::redundant_tuple_in_exception_handler;
pub use unary_prefix_increment::unary_prefix_increment;
pub use unused_loop_control_variable::unused_loop_control_variable;
mod assert_false;
mod assert_raises_exception;
mod duplicate_exceptions;
mod redundant_tuple_in_exception_handler;
mod unary_prefix_increment;
mod unused_loop_control_variable;

View File

@@ -0,0 +1,22 @@
use rustpython_ast::{Excepthandler, ExcepthandlerKind, ExprKind};
use crate::ast::types::{CheckLocator, Range};
use crate::check_ast::Checker;
use crate::checks::{Check, CheckKind};
/// B013
pub fn redundant_tuple_in_exception_handler(checker: &mut Checker, handlers: &[Excepthandler]) {
for handler in handlers {
let ExcepthandlerKind::ExceptHandler { type_, .. } = &handler.node;
if let Some(type_) = type_ {
if let ExprKind::Tuple { elts, .. } = &type_.node {
if elts.len() == 1 {
checker.add_check(Check::new(
CheckKind::RedundantTupleInExceptionHandler(elts[0].to_string()),
checker.locate_check(Range::from_located(type_)),
));
}
}
}
}
}

View File

@@ -37,7 +37,7 @@ fn first_argument_with_matching_function<'a>(
Some(&args.first()?.node)
}
/// Check `list(generator)` compliance.
/// C400 (`list(generator)`)
pub fn unnecessary_generator_list(expr: &Expr, func: &Expr, args: &[Expr]) -> Option<Check> {
let argument = exactly_one_argument_with_matching_function("list", func, args)?;
if let ExprKind::GeneratorExp { .. } = argument {
@@ -49,7 +49,7 @@ pub fn unnecessary_generator_list(expr: &Expr, func: &Expr, args: &[Expr]) -> Op
None
}
/// Check `set(generator)` compliance.
/// C401 (`set(generator)`)
pub fn unnecessary_generator_set(expr: &Expr, func: &Expr, args: &[Expr]) -> Option<Check> {
let argument = exactly_one_argument_with_matching_function("set", func, args)?;
if let ExprKind::GeneratorExp { .. } = argument {
@@ -61,7 +61,7 @@ pub fn unnecessary_generator_set(expr: &Expr, func: &Expr, args: &[Expr]) -> Opt
None
}
/// Check `dict((x, y) for x, y in iterable)` compliance.
/// C402 (`dict((x, y) for x, y in iterable)`)
pub fn unnecessary_generator_dict(expr: &Expr, func: &Expr, args: &[Expr]) -> Option<Check> {
let argument = exactly_one_argument_with_matching_function("dict", func, args)?;
if let ExprKind::GeneratorExp { elt, .. } = argument {
@@ -78,7 +78,7 @@ pub fn unnecessary_generator_dict(expr: &Expr, func: &Expr, args: &[Expr]) -> Op
None
}
/// Check `set([...])` compliance.
/// C403 (`set([...])`)
pub fn unnecessary_list_comprehension_set(
expr: &Expr,
func: &Expr,
@@ -94,7 +94,7 @@ pub fn unnecessary_list_comprehension_set(
None
}
/// Check `dict([...])` compliance.
/// C404 (`dict([...])`)
pub fn unnecessary_list_comprehension_dict(
expr: &Expr,
func: &Expr,
@@ -115,7 +115,7 @@ pub fn unnecessary_list_comprehension_dict(
None
}
/// Check `set([1, 2])` compliance.
/// C405 (`set([1, 2])`)
pub fn unnecessary_literal_set(expr: &Expr, func: &Expr, args: &[Expr]) -> Option<Check> {
let argument = exactly_one_argument_with_matching_function("set", func, args)?;
let kind = match argument {
@@ -129,7 +129,7 @@ pub fn unnecessary_literal_set(expr: &Expr, func: &Expr, args: &[Expr]) -> Optio
))
}
/// Check `dict([(1, 2)])` compliance.
/// C406 (`dict([(1, 2)])`)
pub fn unnecessary_literal_dict(expr: &Expr, func: &Expr, args: &[Expr]) -> Option<Check> {
let argument = exactly_one_argument_with_matching_function("dict", func, args)?;
let (kind, elts) = match argument {
@@ -151,6 +151,7 @@ pub fn unnecessary_literal_dict(expr: &Expr, func: &Expr, args: &[Expr]) -> Opti
))
}
/// C408
pub fn unnecessary_collection_call(
expr: &Expr,
func: &Expr,
@@ -162,10 +163,10 @@ pub fn unnecessary_collection_call(
}
let id = function_name(func)?;
match id {
"dict" if keywords.is_empty() || keywords.iter().all(|kw| kw.node.arg.is_some()) => (),
"list" | "tuple" => {
// list() or tuple()
}
"dict" if keywords.is_empty() || keywords.iter().all(|kw| kw.node.arg.is_some()) => (),
_ => return None,
};
Some(Check::new(
@@ -174,6 +175,7 @@ pub fn unnecessary_collection_call(
))
}
/// C409
pub fn unnecessary_literal_within_tuple_call(
expr: &Expr,
func: &Expr,
@@ -191,6 +193,7 @@ pub fn unnecessary_literal_within_tuple_call(
))
}
/// C410
pub fn unnecessary_literal_within_list_call(
expr: &Expr,
func: &Expr,
@@ -208,6 +211,7 @@ pub fn unnecessary_literal_within_list_call(
))
}
/// C411
pub fn unnecessary_list_call(expr: &Expr, func: &Expr, args: &[Expr]) -> Option<Check> {
let argument = first_argument_with_matching_function("list", func, args)?;
if let ExprKind::ListComp { .. } = argument {
@@ -219,6 +223,7 @@ pub fn unnecessary_list_call(expr: &Expr, func: &Expr, args: &[Expr]) -> Option<
None
}
/// C413
pub fn unnecessary_call_around_sorted(expr: &Expr, func: &Expr, args: &[Expr]) -> Option<Check> {
let outer = function_name(func)?;
if !(outer == "list" || outer == "reversed") {
@@ -235,6 +240,7 @@ pub fn unnecessary_call_around_sorted(expr: &Expr, func: &Expr, args: &[Expr]) -
None
}
/// C414
pub fn unnecessary_double_cast_or_process(
expr: &Expr,
func: &Expr,
@@ -274,6 +280,7 @@ pub fn unnecessary_double_cast_or_process(
None
}
/// C415
pub fn unnecessary_subscript_reversal(expr: &Expr, func: &Expr, args: &[Expr]) -> Option<Check> {
let first_arg = args.first()?;
let id = function_name(func)?;
@@ -309,6 +316,7 @@ pub fn unnecessary_subscript_reversal(expr: &Expr, func: &Expr, args: &[Expr]) -
None
}
/// C416
pub fn unnecessary_comprehension(
expr: &Expr,
elt: &Expr,
@@ -337,6 +345,7 @@ pub fn unnecessary_comprehension(
))
}
/// C417
pub fn unnecessary_map(expr: &Expr, func: &Expr, args: &[Expr]) -> Option<Check> {
fn new_check(kind: &str, expr: &Expr) -> Check {
Check::new(

298
src/flake8_quotes/checks.rs Normal file
View File

@@ -0,0 +1,298 @@
use rustpython_ast::Location;
use crate::ast::operations::SourceCodeLocator;
use crate::ast::types::Range;
use crate::checks::{Check, CheckKind};
use crate::flake8_quotes::settings::{Quote, Settings};
fn good_single(quote: &Quote) -> char {
match quote {
Quote::Single => '\'',
Quote::Double => '"',
}
}
fn bad_single(quote: &Quote) -> char {
match quote {
Quote::Double => '\'',
Quote::Single => '"',
}
}
fn good_multiline(quote: &Quote) -> &str {
match quote {
Quote::Single => "'''",
Quote::Double => "\"\"\"",
}
}
fn good_multiline_ending(quote: &Quote) -> &str {
match quote {
Quote::Single => "'\"\"\"",
Quote::Double => "\"'''",
}
}
fn good_docstring(quote: &Quote) -> &str {
match quote {
Quote::Single => "'''",
Quote::Double => "\"\"\"",
}
}
pub fn quotes(
locator: &SourceCodeLocator,
start: &Location,
end: &Location,
is_docstring: bool,
settings: &Settings,
) -> Option<Check> {
let text = locator.slice_source_code_range(&Range {
location: *start,
end_location: *end,
});
// Remove any prefixes (e.g., remove `u` from `u"foo"`).
let last_quote_char = text.chars().last().unwrap();
let first_quote_char = text.find(last_quote_char).unwrap();
let prefix = &text[..first_quote_char].to_lowercase();
let raw_text = &text[first_quote_char..];
// Determine if the string is multiline-based.
let is_multiline = if raw_text.len() >= 3 {
let mut chars = raw_text.chars();
let first = chars.next().unwrap();
let second = chars.next().unwrap();
let third = chars.next().unwrap();
first == second && second == third
} else {
false
};
if is_docstring {
if raw_text.contains(good_docstring(&settings.docstring_quotes)) {
return None;
}
return Some(Check::new(
CheckKind::BadQuotesDocstring(settings.docstring_quotes.clone()),
Range {
location: *start,
end_location: *end,
},
));
} else if is_multiline {
// If our string is or contains a known good string, ignore it.
if raw_text.contains(good_multiline(&settings.multiline_quotes)) {
return None;
}
// If our string ends with a known good ending, then ignore it.
if raw_text.ends_with(good_multiline_ending(&settings.multiline_quotes)) {
return None;
}
return Some(Check::new(
CheckKind::BadQuotesMultilineString(settings.multiline_quotes.clone()),
Range {
location: *start,
end_location: *end,
},
));
} else {
let string_contents = &raw_text[1..raw_text.len() - 1];
// If we're using the preferred quotation type, check for escapes.
if last_quote_char == good_single(&settings.inline_quotes) {
if !settings.avoid_escape || prefix.contains('r') {
return None;
}
if string_contents.contains(good_single(&settings.inline_quotes))
&& !string_contents.contains(bad_single(&settings.inline_quotes))
{
return Some(Check::new(
CheckKind::AvoidQuoteEscape,
Range {
location: *start,
end_location: *end,
},
));
}
return None;
}
// If we're not using the preferred type, only allow use to avoid escapes.
if !string_contents.contains(good_single(&settings.inline_quotes)) {
return Some(Check::new(
CheckKind::BadQuotesInlineString(settings.inline_quotes.clone()),
Range {
location: *start,
end_location: *end,
},
));
}
}
None
}
#[cfg(test)]
mod tests {
use std::path::Path;
use anyhow::Result;
use rustpython_parser::lexer::LexResult;
use test_case::test_case;
use crate::autofix::fixer;
use crate::checks::{Check, CheckCode};
use crate::flake8_quotes::settings::Quote;
use crate::linter::tokenize;
use crate::{flake8_quotes, linter, Settings};
use crate::{fs, noqa};
fn check_path(path: &Path, settings: &Settings, autofix: &fixer::Mode) -> Result<Vec<Check>> {
let contents = fs::read_file(path)?;
let tokens: Vec<LexResult> = tokenize(&contents);
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
linter::check_path(path, &contents, tokens, &noqa_line_for, settings, autofix)
}
#[test_case(Path::new("doubles.py"))]
#[test_case(Path::new("doubles_escaped.py"))]
#[test_case(Path::new("doubles_multiline_string.py"))]
#[test_case(Path::new("doubles_noqa.py"))]
#[test_case(Path::new("doubles_wrapped.py"))]
fn doubles(path: &Path) -> Result<()> {
let snapshot = format!("doubles_{}", path.to_string_lossy());
let mut checks = check_path(
Path::new("./resources/test/fixtures/flake8_quotes")
.join(path)
.as_path(),
&Settings {
flake8_quotes: flake8_quotes::settings::Settings {
inline_quotes: Quote::Single,
multiline_quotes: Quote::Single,
docstring_quotes: Quote::Single,
avoid_escape: true,
},
..Settings::for_rules(vec![
CheckCode::Q000,
CheckCode::Q001,
CheckCode::Q002,
CheckCode::Q003,
])
},
&fixer::Mode::Generate,
)?;
checks.sort_by_key(|check| check.location);
insta::assert_yaml_snapshot!(snapshot, checks);
Ok(())
}
#[test_case(Path::new("singles.py"))]
#[test_case(Path::new("singles_escaped.py"))]
#[test_case(Path::new("singles_multiline_string.py"))]
#[test_case(Path::new("singles_noqa.py"))]
#[test_case(Path::new("singles_wrapped.py"))]
fn singles(path: &Path) -> Result<()> {
let snapshot = format!("singles_{}", path.to_string_lossy());
let mut checks = check_path(
Path::new("./resources/test/fixtures/flake8_quotes")
.join(path)
.as_path(),
&Settings {
flake8_quotes: flake8_quotes::settings::Settings {
inline_quotes: Quote::Double,
multiline_quotes: Quote::Double,
docstring_quotes: Quote::Double,
avoid_escape: true,
},
..Settings::for_rules(vec![
CheckCode::Q000,
CheckCode::Q001,
CheckCode::Q002,
CheckCode::Q003,
])
},
&fixer::Mode::Generate,
)?;
checks.sort_by_key(|check| check.location);
insta::assert_yaml_snapshot!(snapshot, checks);
Ok(())
}
#[test_case(Path::new("docstring_doubles.py"))]
#[test_case(Path::new("docstring_doubles_module_multiline.py"))]
#[test_case(Path::new("docstring_doubles_module_singleline.py"))]
#[test_case(Path::new("docstring_doubles_class.py"))]
#[test_case(Path::new("docstring_doubles_function.py"))]
#[test_case(Path::new("docstring_singles.py"))]
#[test_case(Path::new("docstring_singles_module_multiline.py"))]
#[test_case(Path::new("docstring_singles_module_singleline.py"))]
#[test_case(Path::new("docstring_singles_class.py"))]
#[test_case(Path::new("docstring_singles_function.py"))]
fn double_docstring(path: &Path) -> Result<()> {
let snapshot = format!("double_docstring_{}", path.to_string_lossy());
let mut checks = check_path(
Path::new("./resources/test/fixtures/flake8_quotes")
.join(path)
.as_path(),
&Settings {
flake8_quotes: flake8_quotes::settings::Settings {
inline_quotes: Quote::Single,
multiline_quotes: Quote::Single,
docstring_quotes: Quote::Double,
avoid_escape: true,
},
..Settings::for_rules(vec![
CheckCode::Q000,
CheckCode::Q001,
CheckCode::Q002,
CheckCode::Q003,
])
},
&fixer::Mode::Generate,
)?;
checks.sort_by_key(|check| check.location);
insta::assert_yaml_snapshot!(snapshot, checks);
Ok(())
}
#[test_case(Path::new("docstring_doubles.py"))]
#[test_case(Path::new("docstring_doubles_module_multiline.py"))]
#[test_case(Path::new("docstring_doubles_module_singleline.py"))]
#[test_case(Path::new("docstring_doubles_class.py"))]
#[test_case(Path::new("docstring_doubles_function.py"))]
#[test_case(Path::new("docstring_singles.py"))]
#[test_case(Path::new("docstring_singles_module_multiline.py"))]
#[test_case(Path::new("docstring_singles_module_singleline.py"))]
#[test_case(Path::new("docstring_singles_class.py"))]
#[test_case(Path::new("docstring_singles_function.py"))]
fn single_docstring(path: &Path) -> Result<()> {
let snapshot = format!("single_docstring_{}", path.to_string_lossy());
let mut checks = check_path(
Path::new("./resources/test/fixtures/flake8_quotes")
.join(path)
.as_path(),
&Settings {
flake8_quotes: flake8_quotes::settings::Settings {
inline_quotes: Quote::Single,
multiline_quotes: Quote::Double,
docstring_quotes: Quote::Single,
avoid_escape: true,
},
..Settings::for_rules(vec![
CheckCode::Q000,
CheckCode::Q001,
CheckCode::Q002,
CheckCode::Q003,
])
},
&fixer::Mode::Generate,
)?;
checks.sort_by_key(|check| check.location);
insta::assert_yaml_snapshot!(snapshot, checks);
Ok(())
}
}

View File

@@ -0,0 +1,118 @@
//! Extract docstrings via tokenization.
//!
//! See: https://github.com/zheller/flake8-quotes/blob/ef0d9a90249a080e460b70ab62bf4b65e5aa5816/flake8_quotes/docstring_detection.py#L29
//!
//! TODO(charlie): Consolidate with the existing AST-based docstring extraction.
use rustpython_parser::lexer::Tok;
#[derive(Debug)]
enum State {
// Start of the module: first string gets marked as a docstring.
ExpectModuleDocstring,
// After seeing a class definition, we're waiting for the block colon (and do bracket counting).
ExpectClassColon,
// After seeing the block colon in a class definition, we expect a docstring.
ExpectClassDocstring,
// Same as ExpectClassColon, but for function definitions.
ExpectFunctionColon,
// Same as ExpectClassDocstring, but for function definitions.
ExpectFunctionDocstring,
// Skip tokens until we observe a `class` or `def`.
Other,
}
pub struct StateMachine {
state: State,
bracket_count: usize,
}
impl StateMachine {
pub fn new() -> Self {
Self {
state: State::ExpectModuleDocstring,
bracket_count: 0,
}
}
pub fn consume(&mut self, tok: &Tok) -> bool {
if matches!(tok, Tok::Newline | Tok::Indent | Tok::Dedent) {
return false;
}
if matches!(tok, Tok::String { .. }) {
return if matches!(
self.state,
State::ExpectModuleDocstring
| State::ExpectClassDocstring
| State::ExpectFunctionDocstring
) {
self.state = State::Other;
true
} else {
false
};
}
if matches!(tok, Tok::Class) {
self.state = State::ExpectClassColon;
self.bracket_count = 0;
return false;
}
if matches!(tok, Tok::Def) {
self.state = State::ExpectFunctionColon;
self.bracket_count = 0;
return false;
}
if matches!(tok, Tok::Colon) {
if self.bracket_count == 0 {
if matches!(self.state, State::ExpectClassColon) {
self.state = State::ExpectClassDocstring;
} else if matches!(self.state, State::ExpectFunctionColon) {
self.state = State::ExpectFunctionDocstring;
}
}
return false;
}
if matches!(tok, Tok::Lpar | Tok::Lbrace | Tok::Lsqb) {
self.bracket_count += 1;
if matches!(
self.state,
State::ExpectModuleDocstring
| State::ExpectClassDocstring
| State::ExpectFunctionDocstring
) {
self.state = State::Other;
}
return false;
}
if matches!(tok, Tok::Rpar | Tok::Rbrace | Tok::Rsqb) {
self.bracket_count -= 1;
if matches!(
self.state,
State::ExpectModuleDocstring
| State::ExpectClassDocstring
| State::ExpectFunctionDocstring
) {
self.state = State::Other;
}
return false;
}
if matches!(
self.state,
State::ExpectModuleDocstring
| State::ExpectClassDocstring
| State::ExpectFunctionDocstring
) {
self.state = State::Other;
return false;
}
false
}
}

3
src/flake8_quotes/mod.rs Normal file
View File

@@ -0,0 +1,3 @@
pub mod checks;
pub mod docstring_detection;
pub mod settings;

View File

@@ -0,0 +1,49 @@
//! Settings for the `flake-quotes` plugin.
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
pub enum Quote {
Single,
Double,
}
#[derive(Debug, PartialEq, Eq, Deserialize)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
pub struct Options {
pub inline_quotes: Option<Quote>,
pub multiline_quotes: Option<Quote>,
pub docstring_quotes: Option<Quote>,
pub avoid_escape: Option<bool>,
}
#[derive(Debug)]
pub struct Settings {
pub inline_quotes: Quote,
pub multiline_quotes: Quote,
pub docstring_quotes: Quote,
pub avoid_escape: bool,
}
impl Settings {
pub fn from_options(options: Options) -> Self {
Self {
inline_quotes: options.inline_quotes.unwrap_or(Quote::Single),
multiline_quotes: options.multiline_quotes.unwrap_or(Quote::Double),
docstring_quotes: options.docstring_quotes.unwrap_or(Quote::Double),
avoid_escape: options.avoid_escape.unwrap_or(true),
}
}
}
impl Default for Settings {
fn default() -> Self {
Self {
inline_quotes: Quote::Single,
multiline_quotes: Quote::Double,
docstring_quotes: Quote::Double,
avoid_escape: true,
}
}
}

View File

@@ -0,0 +1,50 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesMultilineString: single
location:
row: 5
column: 1
end_location:
row: 7
column: 4
fix: ~
- kind:
BadQuotesMultilineString: single
location:
row: 16
column: 5
end_location:
row: 18
column: 8
fix: ~
- kind:
BadQuotesMultilineString: single
location:
row: 21
column: 21
end_location:
row: 22
column: 38
fix: ~
- kind:
BadQuotesMultilineString: single
location:
row: 30
column: 9
end_location:
row: 32
column: 12
fix: ~
- kind:
BadQuotesMultilineString: single
location:
row: 35
column: 13
end_location:
row: 37
column: 16
fix: ~

View File

@@ -0,0 +1,23 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesMultilineString: single
location:
row: 3
column: 5
end_location:
row: 3
column: 28
fix: ~
- kind:
BadQuotesMultilineString: single
location:
row: 5
column: 23
end_location:
row: 5
column: 44
fix: ~

View File

@@ -0,0 +1,50 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesMultilineString: single
location:
row: 3
column: 5
end_location:
row: 3
column: 27
fix: ~
- kind:
BadQuotesMultilineString: single
location:
row: 11
column: 5
end_location:
row: 11
column: 27
fix: ~
- kind:
BadQuotesMultilineString: single
location:
row: 15
column: 39
end_location:
row: 17
column: 4
fix: ~
- kind:
BadQuotesMultilineString: single
location:
row: 17
column: 5
end_location:
row: 17
column: 20
fix: ~
- kind:
BadQuotesMultilineString: single
location:
row: 21
column: 5
end_location:
row: 21
column: 28
fix: ~

View File

@@ -0,0 +1,23 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesMultilineString: single
location:
row: 4
column: 1
end_location:
row: 6
column: 4
fix: ~
- kind:
BadQuotesMultilineString: single
location:
row: 9
column: 1
end_location:
row: 11
column: 4
fix: ~

View File

@@ -0,0 +1,23 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesMultilineString: single
location:
row: 2
column: 1
end_location:
row: 2
column: 32
fix: ~
- kind:
BadQuotesMultilineString: single
location:
row: 6
column: 1
end_location:
row: 6
column: 32
fix: ~

View File

@@ -0,0 +1,32 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesDocstring: double
location:
row: 1
column: 1
end_location:
row: 3
column: 4
fix: ~
- kind:
BadQuotesDocstring: double
location:
row: 14
column: 5
end_location:
row: 16
column: 8
fix: ~
- kind:
BadQuotesDocstring: double
location:
row: 26
column: 9
end_location:
row: 28
column: 12
fix: ~

View File

@@ -0,0 +1,32 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesDocstring: double
location:
row: 2
column: 5
end_location:
row: 2
column: 54
fix: ~
- kind:
BadQuotesDocstring: double
location:
row: 6
column: 9
end_location:
row: 6
column: 58
fix: ~
- kind:
BadQuotesDocstring: double
location:
row: 9
column: 29
end_location:
row: 9
column: 53
fix: ~

View File

@@ -0,0 +1,23 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesDocstring: double
location:
row: 2
column: 5
end_location:
row: 2
column: 57
fix: ~
- kind:
BadQuotesDocstring: double
location:
row: 8
column: 5
end_location:
row: 10
column: 8
fix: ~

View File

@@ -0,0 +1,14 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesDocstring: double
location:
row: 1
column: 1
end_location:
row: 3
column: 4
fix: ~

View File

@@ -0,0 +1,14 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesDocstring: double
location:
row: 1
column: 1
end_location:
row: 1
column: 50
fix: ~

View File

@@ -0,0 +1,23 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesInlineString: single
location:
row: 1
column: 25
end_location:
row: 1
column: 46
fix: ~
- kind:
BadQuotesInlineString: single
location:
row: 2
column: 25
end_location:
row: 2
column: 47
fix: ~

View File

@@ -0,0 +1,13 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind: AvoidQuoteEscape
location:
row: 1
column: 26
end_location:
row: 1
column: 48
fix: ~

View File

@@ -0,0 +1,14 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesMultilineString: single
location:
row: 1
column: 5
end_location:
row: 3
column: 13
fix: ~

View File

@@ -0,0 +1,6 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
[]

View File

@@ -0,0 +1,6 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
[]

View File

@@ -0,0 +1,32 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesDocstring: single
location:
row: 1
column: 1
end_location:
row: 3
column: 4
fix: ~
- kind:
BadQuotesDocstring: single
location:
row: 12
column: 5
end_location:
row: 14
column: 8
fix: ~
- kind:
BadQuotesDocstring: single
location:
row: 24
column: 9
end_location:
row: 26
column: 12
fix: ~

View File

@@ -0,0 +1,32 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesDocstring: single
location:
row: 2
column: 5
end_location:
row: 2
column: 54
fix: ~
- kind:
BadQuotesDocstring: single
location:
row: 6
column: 9
end_location:
row: 6
column: 58
fix: ~
- kind:
BadQuotesDocstring: single
location:
row: 9
column: 29
end_location:
row: 9
column: 53
fix: ~

View File

@@ -0,0 +1,23 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesDocstring: single
location:
row: 2
column: 5
end_location:
row: 2
column: 57
fix: ~
- kind:
BadQuotesDocstring: single
location:
row: 8
column: 5
end_location:
row: 10
column: 8
fix: ~

View File

@@ -0,0 +1,14 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesDocstring: single
location:
row: 1
column: 1
end_location:
row: 3
column: 4
fix: ~

View File

@@ -0,0 +1,14 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesDocstring: single
location:
row: 1
column: 1
end_location:
row: 1
column: 50
fix: ~

View File

@@ -0,0 +1,59 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesMultilineString: double
location:
row: 5
column: 1
end_location:
row: 7
column: 4
fix: ~
- kind:
BadQuotesMultilineString: double
location:
row: 11
column: 21
end_location:
row: 13
column: 4
fix: ~
- kind:
BadQuotesMultilineString: double
location:
row: 18
column: 5
end_location:
row: 20
column: 8
fix: ~
- kind:
BadQuotesMultilineString: double
location:
row: 23
column: 21
end_location:
row: 24
column: 38
fix: ~
- kind:
BadQuotesMultilineString: double
location:
row: 32
column: 9
end_location:
row: 34
column: 12
fix: ~
- kind:
BadQuotesMultilineString: double
location:
row: 37
column: 13
end_location:
row: 39
column: 16
fix: ~

View File

@@ -0,0 +1,23 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesMultilineString: double
location:
row: 3
column: 5
end_location:
row: 3
column: 28
fix: ~
- kind:
BadQuotesMultilineString: double
location:
row: 5
column: 23
end_location:
row: 5
column: 44
fix: ~

View File

@@ -0,0 +1,50 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesMultilineString: double
location:
row: 3
column: 5
end_location:
row: 3
column: 27
fix: ~
- kind:
BadQuotesMultilineString: double
location:
row: 11
column: 5
end_location:
row: 11
column: 27
fix: ~
- kind:
BadQuotesMultilineString: double
location:
row: 15
column: 39
end_location:
row: 17
column: 4
fix: ~
- kind:
BadQuotesMultilineString: double
location:
row: 17
column: 5
end_location:
row: 17
column: 20
fix: ~
- kind:
BadQuotesMultilineString: double
location:
row: 21
column: 5
end_location:
row: 21
column: 28
fix: ~

View File

@@ -0,0 +1,23 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesMultilineString: double
location:
row: 4
column: 1
end_location:
row: 6
column: 4
fix: ~
- kind:
BadQuotesMultilineString: double
location:
row: 9
column: 1
end_location:
row: 11
column: 4
fix: ~

View File

@@ -0,0 +1,23 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesMultilineString: double
location:
row: 2
column: 1
end_location:
row: 2
column: 32
fix: ~
- kind:
BadQuotesMultilineString: double
location:
row: 6
column: 1
end_location:
row: 6
column: 32
fix: ~

View File

@@ -0,0 +1,23 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesInlineString: double
location:
row: 1
column: 25
end_location:
row: 1
column: 46
fix: ~
- kind:
BadQuotesInlineString: double
location:
row: 2
column: 25
end_location:
row: 2
column: 47
fix: ~

View File

@@ -0,0 +1,13 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind: AvoidQuoteEscape
location:
row: 1
column: 26
end_location:
row: 1
column: 48
fix: ~

View File

@@ -0,0 +1,14 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
- kind:
BadQuotesMultilineString: double
location:
row: 1
column: 5
end_location:
row: 3
column: 13
fix: ~

View File

@@ -0,0 +1,6 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
[]

View File

@@ -0,0 +1,6 @@
---
source: src/flake8_quotes/checks.rs
expression: checks
---
[]

View File

@@ -12,7 +12,7 @@ use path_absolutize::Absolutize;
use walkdir::{DirEntry, WalkDir};
use crate::checks::CheckCode;
use crate::settings::{FilePattern, PerFileIgnore};
use crate::settings::types::{FilePattern, PerFileIgnore};
/// Extract the absolute path and basename (as strings) from a Path.
fn extract_path_names(path: &Path) -> Result<(&str, &str)> {
@@ -133,7 +133,7 @@ pub fn ignores_from_path<'a>(
[&pattern_code_pair.pattern].into_iter(),
)
})
.map(|pattern_code_pair| &pattern_code_pair.code)
.flat_map(|pattern_code_pair| &pattern_code_pair.codes)
.collect())
}
@@ -178,7 +178,7 @@ mod tests {
use path_absolutize::Absolutize;
use crate::fs::{extract_path_names, is_excluded, is_included};
use crate::settings::FilePattern;
use crate::settings::types::FilePattern;
#[test]
fn inclusions() {

View File

@@ -6,10 +6,13 @@ use anyhow::Result;
use log::debug;
use rustpython_parser::lexer::LexResult;
use settings::pyproject;
use settings::Settings;
use crate::autofix::fixer::Mode;
use crate::linter::{check_path, tokenize};
use crate::message::Message;
use crate::settings::{RawSettings, Settings};
use crate::settings::configuration::Configuration;
mod ast;
mod autofix;
@@ -18,6 +21,7 @@ pub mod check_ast;
mod check_lines;
mod check_tokens;
pub mod checks;
mod checks_gen;
pub mod cli;
pub mod code_gen;
mod cst;
@@ -26,6 +30,7 @@ mod flake8_bugbear;
mod flake8_builtins;
mod flake8_comprehensions;
mod flake8_print;
mod flake8_quotes;
pub mod fs;
pub mod linter;
pub mod logging;
@@ -36,14 +41,13 @@ pub mod printer;
mod pycodestyle;
mod pydocstyle;
mod pyflakes;
pub mod pyproject;
mod python;
mod pyupgrade;
pub mod settings;
pub mod visibility;
/// Run ruff over Python source code directly.
pub fn check(path: &Path, contents: &str, quiet: bool) -> Result<Vec<Message>> {
pub fn check(path: &Path, contents: &str) -> Result<Vec<Message>> {
// Find the project root and pyproject.toml.
let project_root = pyproject::find_project_root(&[path.to_path_buf()]);
match &project_root {
@@ -56,11 +60,8 @@ pub fn check(path: &Path, contents: &str, quiet: bool) -> Result<Vec<Message>> {
None => debug!("Unable to find pyproject.toml; using default settings..."),
};
let settings = Settings::from_raw(RawSettings::from_pyproject(
&pyproject,
&project_root,
quiet,
)?);
let settings =
Settings::from_configuration(Configuration::from_pyproject(&pyproject, &project_root)?);
// Tokenize once.
let tokens: Vec<LexResult> = tokenize(contents);

View File

@@ -9,6 +9,7 @@ use log::debug;
use rustpython_parser::lexer::LexResult;
use rustpython_parser::{lexer, parser};
use crate::ast::operations::SourceCodeLocator;
use crate::ast::types::Range;
use crate::autofix::fixer;
use crate::autofix::fixer::fix_file;
@@ -46,13 +47,16 @@ pub(crate) fn check_path(
// Aggregate all checks.
let mut checks: Vec<Check> = vec![];
// Initialize the SourceCodeLocator (which computes offsets lazily).
let locator = SourceCodeLocator::new(contents);
// Run the token-based checks.
if settings
.enabled
.iter()
.any(|check_code| matches!(check_code.lint_source(), LintSource::Tokens))
{
check_tokens(&mut checks, contents, &tokens, settings);
check_tokens(&mut checks, &locator, &tokens, settings);
}
// Run the AST-based checks.
@@ -63,7 +67,7 @@ pub(crate) fn check_path(
{
match parser::parse_program_tokens(tokens, "<filename>") {
Ok(python_ast) => {
checks.extend(check_ast(&python_ast, contents, settings, autofix, path))
checks.extend(check_ast(&python_ast, &locator, settings, autofix, path))
}
Err(parse_error) => {
if settings.enabled.contains(&CheckCode::E999) {
@@ -236,16 +240,12 @@ mod tests {
use crate::autofix::fixer;
use crate::checks::{Check, CheckCode};
use crate::linter;
use crate::linter::tokenize;
use crate::settings;
use crate::{fs, noqa};
use crate::{linter, Settings};
fn check_path(
path: &Path,
settings: &settings::Settings,
autofix: &fixer::Mode,
) -> Result<Vec<Check>> {
fn check_path(path: &Path, settings: &Settings, autofix: &fixer::Mode) -> Result<Vec<Check>> {
let contents = fs::read_file(path)?;
let tokens: Vec<LexResult> = tokenize(&contents);
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
@@ -258,6 +258,7 @@ mod tests {
#[test_case(CheckCode::B002, Path::new("B002.py"); "B002")]
#[test_case(CheckCode::B007, Path::new("B007.py"); "B007")]
#[test_case(CheckCode::B011, Path::new("B011.py"); "B011")]
#[test_case(CheckCode::B013, Path::new("B013.py"); "B013")]
#[test_case(CheckCode::B014, Path::new("B014.py"); "B014")]
#[test_case(CheckCode::B017, Path::new("B017.py"); "B017")]
#[test_case(CheckCode::B025, Path::new("B025.py"); "B025")]
@@ -372,12 +373,16 @@ mod tests {
#[test_case(CheckCode::N803, Path::new("N803.py"); "N803")]
#[test_case(CheckCode::N804, Path::new("N804.py"); "N804")]
#[test_case(CheckCode::N805, Path::new("N805.py"); "N805")]
#[test_case(CheckCode::N806, Path::new("N806.py"); "N806")]
#[test_case(CheckCode::N807, Path::new("N807.py"); "N807")]
#[test_case(CheckCode::N811, Path::new("N811.py"); "N811")]
#[test_case(CheckCode::N812, Path::new("N812.py"); "N812")]
#[test_case(CheckCode::N813, Path::new("N813.py"); "N813")]
#[test_case(CheckCode::N814, Path::new("N814.py"); "N814")]
#[test_case(CheckCode::N815, Path::new("N815.py"); "N815")]
#[test_case(CheckCode::N816, Path::new("N816.py"); "N816")]
#[test_case(CheckCode::N817, Path::new("N817.py"); "N817")]
#[test_case(CheckCode::N818, Path::new("N818.py"); "N818")]
#[test_case(CheckCode::T201, Path::new("T201.py"); "T201")]
#[test_case(CheckCode::T203, Path::new("T203.py"); "T203")]
#[test_case(CheckCode::U001, Path::new("U001.py"); "U001")]

View File

@@ -25,10 +25,11 @@ use ruff::linter::{lint_path, lint_stdin};
use ruff::logging::set_up_logging;
use ruff::message::Message;
use ruff::printer::{Printer, SerializationFormat};
use ruff::pyproject::{self};
use ruff::settings::CurrentSettings;
use ruff::settings::RawSettings;
use ruff::settings::{FilePattern, PerFileIgnore, Settings};
use ruff::settings::configuration::Configuration;
use ruff::settings::pyproject;
use ruff::settings::types::{FilePattern, PerFileIgnore};
use ruff::settings::user::UserConfiguration;
use ruff::settings::Settings;
use ruff::tell_user;
#[cfg(feature = "update-informer")]
@@ -73,10 +74,14 @@ fn check_for_updates() {
}
}
fn show_settings(settings: RawSettings, project_root: Option<PathBuf>, pyproject: Option<PathBuf>) {
fn show_settings(
configuration: Configuration,
project_root: Option<PathBuf>,
pyproject: Option<PathBuf>,
) {
println!(
"{:#?}",
CurrentSettings::from_settings(settings, project_root, pyproject)
UserConfiguration::from_configuration(configuration, project_root, pyproject)
);
}
@@ -256,15 +261,15 @@ fn inner_main() -> Result<ExitCode> {
.map(|pair| PerFileIgnore::new(pair, &project_root))
.collect();
let mut settings = RawSettings::from_pyproject(&pyproject, &project_root, cli.quiet)?;
let mut configuration = Configuration::from_pyproject(&pyproject, &project_root)?;
if !exclude.is_empty() {
settings.exclude = exclude;
configuration.exclude = exclude;
}
if !extend_exclude.is_empty() {
settings.extend_exclude = extend_exclude;
configuration.extend_exclude = extend_exclude;
}
if !per_file_ignores.is_empty() {
settings.per_file_ignores = per_file_ignores;
configuration.per_file_ignores = per_file_ignores;
}
if !cli.select.is_empty() {
warn_on(
@@ -272,10 +277,10 @@ fn inner_main() -> Result<ExitCode> {
&cli.select,
&cli.ignore,
&cli.extend_ignore,
&settings,
&configuration,
&pyproject,
);
settings.select = cli.select;
configuration.select = cli.select;
}
if !cli.extend_select.is_empty() {
warn_on(
@@ -283,22 +288,22 @@ fn inner_main() -> Result<ExitCode> {
&cli.extend_select,
&cli.ignore,
&cli.extend_ignore,
&settings,
&configuration,
&pyproject,
);
settings.extend_select = cli.extend_select;
configuration.extend_select = cli.extend_select;
}
if !cli.ignore.is_empty() {
settings.ignore = cli.ignore;
configuration.ignore = cli.ignore;
}
if !cli.extend_ignore.is_empty() {
settings.extend_ignore = cli.extend_ignore;
configuration.extend_ignore = cli.extend_ignore;
}
if let Some(target_version) = cli.target_version {
settings.target_version = target_version;
configuration.target_version = target_version;
}
if let Some(dummy_variable_rgx) = cli.dummy_variable_rgx {
settings.dummy_variable_rgx = dummy_variable_rgx;
configuration.dummy_variable_rgx = dummy_variable_rgx;
}
if cli.show_settings && cli.show_files {
@@ -306,11 +311,11 @@ fn inner_main() -> Result<ExitCode> {
return Ok(ExitCode::FAILURE);
}
if cli.show_settings {
show_settings(settings, project_root, pyproject);
show_settings(configuration, project_root, pyproject);
return Ok(ExitCode::SUCCESS);
}
let settings = Settings::from_raw(settings);
let settings = Settings::from_configuration(configuration);
if cli.show_files {
show_files(&cli.files, &settings);

View File

@@ -169,7 +169,6 @@ y = 2
z = x + 1",
)
.collect();
println!("{:?}", extract_noqa_line_for(&lxr));
assert_eq!(extract_noqa_line_for(&lxr), empty);
let lxr: Vec<LexResult> = lexer::make_tokenizer(
@@ -179,7 +178,6 @@ y = 2
z = x + 1",
)
.collect();
println!("{:?}", extract_noqa_line_for(&lxr));
assert_eq!(extract_noqa_line_for(&lxr), empty);
let lxr: Vec<LexResult> = lexer::make_tokenizer(
@@ -189,7 +187,6 @@ z = x + 1
",
)
.collect();
println!("{:?}", extract_noqa_line_for(&lxr));
assert_eq!(extract_noqa_line_for(&lxr), empty);
let lxr: Vec<LexResult> = lexer::make_tokenizer(
@@ -200,7 +197,6 @@ z = x + 1
",
)
.collect();
println!("{:?}", extract_noqa_line_for(&lxr));
assert_eq!(extract_noqa_line_for(&lxr), empty);
let lxr: Vec<LexResult> = lexer::make_tokenizer(

View File

@@ -1,9 +1,11 @@
use itertools::Itertools;
use rustpython_ast::{Arguments, Expr, ExprKind, Stmt};
use crate::ast::types::{Range, Scope, ScopeKind};
use crate::ast::types::{FunctionScope, Range, Scope, ScopeKind};
use crate::checks::{Check, CheckKind};
use crate::pep8_naming::settings::Settings;
/// N801
pub fn invalid_class_name(class_def: &Stmt, name: &str) -> Option<Check> {
let stripped = name.strip_prefix('_').unwrap_or(name);
if !stripped
@@ -21,8 +23,14 @@ pub fn invalid_class_name(class_def: &Stmt, name: &str) -> Option<Check> {
None
}
pub fn invalid_function_name(func_def: &Stmt, name: &str) -> Option<Check> {
if name.chars().any(|c| c.is_uppercase()) {
/// N802
pub fn invalid_function_name(func_def: &Stmt, name: &str, settings: &Settings) -> Option<Check> {
if !is_lower(name)
&& !settings
.ignore_names
.iter()
.any(|ignore_name| ignore_name == name)
{
return Some(Check::new(
CheckKind::InvalidFunctionName(name.to_string()),
Range::from_located(func_def),
@@ -31,8 +39,9 @@ pub fn invalid_function_name(func_def: &Stmt, name: &str) -> Option<Check> {
None
}
/// N803
pub fn invalid_argument_name(location: Range, name: &str) -> Option<Check> {
if name.chars().any(|c| c.is_uppercase()) {
if !is_lower(name) {
return Some(Check::new(
CheckKind::InvalidArgumentName(name.to_string()),
location,
@@ -41,10 +50,12 @@ pub fn invalid_argument_name(location: Range, name: &str) -> Option<Check> {
None
}
/// N804
pub fn invalid_first_argument_name_for_class_method(
scope: &Scope,
decorator_list: &[Expr],
args: &Arguments,
settings: &Settings,
) -> Option<Check> {
if !matches!(scope.kind, ScopeKind::Class) {
return None;
@@ -52,7 +63,7 @@ pub fn invalid_first_argument_name_for_class_method(
if decorator_list.iter().any(|decorator| {
if let ExprKind::Name { id, .. } = &decorator.node {
id == "classmethod"
settings.classmethod_decorators.contains(id)
} else {
false
}
@@ -69,10 +80,12 @@ pub fn invalid_first_argument_name_for_class_method(
None
}
/// N805
pub fn invalid_first_argument_name_for_method(
scope: &Scope,
decorator_list: &[Expr],
args: &Arguments,
settings: &Settings,
) -> Option<Check> {
if !matches!(scope.kind, ScopeKind::Class) {
return None;
@@ -80,7 +93,8 @@ pub fn invalid_first_argument_name_for_method(
if decorator_list.iter().any(|decorator| {
if let ExprKind::Name { id, .. } = &decorator.node {
id == "classmethod" || id == "staticmethod"
settings.classmethod_decorators.contains(id)
|| settings.staticmethod_decorators.contains(id)
} else {
false
}
@@ -99,6 +113,21 @@ pub fn invalid_first_argument_name_for_method(
None
}
/// N806
pub fn non_lowercase_variable_in_function(scope: &Scope, expr: &Expr, name: &str) -> Option<Check> {
if !matches!(scope.kind, ScopeKind::Function(FunctionScope { .. })) {
return None;
}
if !is_lower(name) {
return Some(Check::new(
CheckKind::NonLowercaseVariableInFunction(name.to_string()),
Range::from_located(expr),
));
}
None
}
/// N807
pub fn dunder_function_name(func_def: &Stmt, scope: &Scope, name: &str) -> Option<Check> {
if matches!(scope.kind, ScopeKind::Class) {
return None;
@@ -114,6 +143,136 @@ pub fn dunder_function_name(func_def: &Stmt, scope: &Scope, name: &str) -> Optio
None
}
/// N811
pub fn constant_imported_as_non_constant(
import_from: &Stmt,
name: &str,
asname: &str,
) -> Option<Check> {
if is_upper(name) && !is_upper(asname) {
return Some(Check::new(
CheckKind::ConstantImportedAsNonConstant(name.to_string(), asname.to_string()),
Range::from_located(import_from),
));
}
None
}
/// N812
pub fn lowercase_imported_as_non_lowercase(
import_from: &Stmt,
name: &str,
asname: &str,
) -> Option<Check> {
if is_lower(name) && asname.to_lowercase() != asname {
return Some(Check::new(
CheckKind::LowercaseImportedAsNonLowercase(name.to_string(), asname.to_string()),
Range::from_located(import_from),
));
}
None
}
/// N813
pub fn camelcase_imported_as_lowercase(
import_from: &Stmt,
name: &str,
asname: &str,
) -> Option<Check> {
if is_camelcase(name) && is_lower(asname) {
return Some(Check::new(
CheckKind::CamelcaseImportedAsLowercase(name.to_string(), asname.to_string()),
Range::from_located(import_from),
));
}
None
}
/// N814
pub fn camelcase_imported_as_constant(
import_from: &Stmt,
name: &str,
asname: &str,
) -> Option<Check> {
if is_camelcase(name) && is_upper(asname) && !is_acronym(name, asname) {
return Some(Check::new(
CheckKind::CamelcaseImportedAsConstant(name.to_string(), asname.to_string()),
Range::from_located(import_from),
));
}
None
}
/// N815
pub fn mixed_case_variable_in_class_scope(scope: &Scope, expr: &Expr, name: &str) -> Option<Check> {
if !matches!(scope.kind, ScopeKind::Class) {
return None;
}
if is_mixed_case(name) {
return Some(Check::new(
CheckKind::MixedCaseVariableInClassScope(name.to_string()),
Range::from_located(expr),
));
}
None
}
/// N816
pub fn mixed_case_variable_in_global_scope(
scope: &Scope,
expr: &Expr,
name: &str,
) -> Option<Check> {
if !matches!(scope.kind, ScopeKind::Module) {
return None;
}
if is_mixed_case(name) {
return Some(Check::new(
CheckKind::MixedCaseVariableInGlobalScope(name.to_string()),
Range::from_located(expr),
));
}
None
}
/// N817
pub fn camelcase_imported_as_acronym(
import_from: &Stmt,
name: &str,
asname: &str,
) -> Option<Check> {
if is_camelcase(name) && is_upper(asname) && is_acronym(name, asname) {
return Some(Check::new(
CheckKind::CamelcaseImportedAsAcronym(name.to_string(), asname.to_string()),
Range::from_located(import_from),
));
}
None
}
/// N818
pub fn error_suffix_on_exception_name(
class_def: &Stmt,
bases: &[Expr],
name: &str,
) -> Option<Check> {
if bases.iter().any(|base| {
if let ExprKind::Name { id, .. } = &base.node {
id == "Exception"
} else {
false
}
}) {
if !name.ends_with("Error") {
return Some(Check::new(
CheckKind::ErrorSuffixOnExceptionName(name.to_string()),
Range::from_located(class_def),
));
}
}
None
}
fn is_lower(s: &str) -> bool {
let mut cased = false;
for c in s.chars() {
@@ -138,86 +297,27 @@ fn is_upper(s: &str) -> bool {
cased
}
pub fn constant_imported_as_non_constant(
import_from: &Stmt,
name: &str,
asname: &str,
) -> Option<Check> {
if is_upper(name) && !is_upper(asname) {
return Some(Check::new(
CheckKind::ConstantImportedAsNonConstant(name.to_string(), asname.to_string()),
Range::from_located(import_from),
));
}
None
}
pub fn lowercase_imported_as_non_lowercase(
import_from: &Stmt,
name: &str,
asname: &str,
) -> Option<Check> {
if is_lower(name) && asname.to_lowercase() != asname {
return Some(Check::new(
CheckKind::LowercaseImportedAsNonLowercase(name.to_string(), asname.to_string()),
Range::from_located(import_from),
));
}
None
}
fn is_camelcase(name: &str) -> bool {
!is_lower(name) && !is_upper(name) && !name.contains('_')
}
fn is_mixed_case(name: &str) -> bool {
!is_lower(name)
&& name
.strip_prefix('_')
.unwrap_or(name)
.chars()
.next()
.map_or_else(|| false, |c| c.is_lowercase())
}
fn is_acronym(name: &str, asname: &str) -> bool {
name.chars().filter(|c| c.is_uppercase()).join("") == asname
}
pub fn camelcase_imported_as_lowercase(
import_from: &Stmt,
name: &str,
asname: &str,
) -> Option<Check> {
if is_camelcase(name) && is_lower(asname) {
return Some(Check::new(
CheckKind::CamelcaseImportedAsLowercase(name.to_string(), asname.to_string()),
Range::from_located(import_from),
));
}
None
}
pub fn camelcase_imported_as_constant(
import_from: &Stmt,
name: &str,
asname: &str,
) -> Option<Check> {
if is_camelcase(name) && is_upper(asname) && !is_acronym(name, asname) {
return Some(Check::new(
CheckKind::CamelcaseImportedAsConstant(name.to_string(), asname.to_string()),
Range::from_located(import_from),
));
}
None
}
pub fn camelcase_imported_as_acronym(
import_from: &Stmt,
name: &str,
asname: &str,
) -> Option<Check> {
if is_camelcase(name) && is_upper(asname) && is_acronym(name, asname) {
return Some(Check::new(
CheckKind::CamelcaseImportedAsAcronym(name.to_string(), asname.to_string()),
Range::from_located(import_from),
));
}
None
}
#[cfg(test)]
mod tests {
use super::{is_acronym, is_camelcase, is_lower, is_upper};
use super::{is_acronym, is_camelcase, is_lower, is_mixed_case, is_upper};
#[test]
fn test_is_lower() -> () {
@@ -251,6 +351,17 @@ mod tests {
assert!(!is_camelcase("CAMEL_CASE"));
}
#[test]
fn test_is_mixed_case() -> () {
assert!(is_mixed_case("mixedCase"));
assert!(is_mixed_case("mixed_Case"));
assert!(is_mixed_case("_mixed_Case"));
assert!(!is_mixed_case("mixed_case"));
assert!(!is_mixed_case("MIXED_CASE"));
assert!(!is_mixed_case(""));
assert!(!is_mixed_case("_"));
}
#[test]
fn test_is_acronym() -> () {
assert!(is_acronym("AB", "AB"));

View File

@@ -1 +1,2 @@
pub mod checks;
pub mod settings;

View File

@@ -0,0 +1,63 @@
//! Settings for the `pep8-naming` plugin.
use serde::Deserialize;
const IGNORE_NAMES: [&str; 12] = [
"setUp",
"tearDown",
"setUpClass",
"tearDownClass",
"setUpModule",
"tearDownModule",
"asyncSetUp",
"asyncTearDown",
"setUpTestData",
"failureException",
"longMessage",
"maxDiff",
];
const CLASSMETHOD_DECORATORS: [&str; 1] = ["classmethod"];
const STATICMETHOD_DECORATORS: [&str; 1] = ["staticmethod"];
#[derive(Debug, PartialEq, Eq, Deserialize)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
pub struct Options {
pub ignore_names: Option<Vec<String>>,
pub classmethod_decorators: Option<Vec<String>>,
pub staticmethod_decorators: Option<Vec<String>>,
}
#[derive(Debug)]
pub struct Settings {
pub ignore_names: Vec<String>,
pub classmethod_decorators: Vec<String>,
pub staticmethod_decorators: Vec<String>,
}
impl Settings {
pub fn from_options(options: Options) -> Self {
Self {
ignore_names: options
.ignore_names
.unwrap_or_else(|| IGNORE_NAMES.map(String::from).to_vec()),
classmethod_decorators: options
.classmethod_decorators
.unwrap_or_else(|| CLASSMETHOD_DECORATORS.map(String::from).to_vec()),
staticmethod_decorators: options
.staticmethod_decorators
.unwrap_or_else(|| STATICMETHOD_DECORATORS.map(String::from).to_vec()),
}
}
}
impl Default for Settings {
fn default() -> Self {
Self {
ignore_names: IGNORE_NAMES.map(String::from).to_vec(),
classmethod_decorators: CLASSMETHOD_DECORATORS.map(String::from).to_vec(),
staticmethod_decorators: STATICMETHOD_DECORATORS.map(String::from).to_vec(),
}
}
}

View File

@@ -1,6 +1,8 @@
use itertools::izip;
use rustpython_ast::Location;
use rustpython_parser::ast::{Cmpop, Constant, Expr, ExprKind, Unaryop};
use crate::ast::operations::SourceCodeLocator;
use crate::ast::types::{CheckLocator, Range};
use crate::checks::{Check, CheckKind, RejectedCmpop};
@@ -8,7 +10,7 @@ fn is_ambiguous_name(name: &str) -> bool {
name == "l" || name == "I" || name == "O"
}
/// Check AmbiguousVariableName compliance.
/// E741
pub fn ambiguous_variable_name(name: &str, location: Range) -> Option<Check> {
if is_ambiguous_name(name) {
Some(Check::new(
@@ -20,7 +22,7 @@ pub fn ambiguous_variable_name(name: &str, location: Range) -> Option<Check> {
}
}
/// Check AmbiguousClassName compliance.
/// E742
pub fn ambiguous_class_name(name: &str, location: Range) -> Option<Check> {
if is_ambiguous_name(name) {
Some(Check::new(
@@ -32,7 +34,7 @@ pub fn ambiguous_class_name(name: &str, location: Range) -> Option<Check> {
}
}
/// Check AmbiguousFunctionName compliance.
/// E743
pub fn ambiguous_function_name(name: &str, location: Range) -> Option<Check> {
if is_ambiguous_name(name) {
Some(Check::new(
@@ -44,7 +46,7 @@ pub fn ambiguous_function_name(name: &str, location: Range) -> Option<Check> {
}
}
/// Check DoNotAssignLambda compliance.
/// E731
pub fn do_not_assign_lambda(value: &Expr, location: Range) -> Option<Check> {
if let ExprKind::Lambda { .. } = &value.node {
Some(Check::new(CheckKind::DoNotAssignLambda, location))
@@ -53,7 +55,7 @@ pub fn do_not_assign_lambda(value: &Expr, location: Range) -> Option<Check> {
}
}
/// Check NotInTest and NotIsTest compliance.
/// E713, E714
pub fn not_tests(
op: &Unaryop,
operand: &Expr,
@@ -92,7 +94,7 @@ pub fn not_tests(
checks
}
/// Check TrueFalseComparison and NoneComparison compliance.
/// E711, E712
pub fn literal_comparisons(
left: &Expr,
ops: &[Cmpop],
@@ -201,7 +203,7 @@ pub fn literal_comparisons(
checks
}
/// Check TypeComparison compliance.
/// E721
pub fn type_comparison(ops: &[Cmpop], comparators: &[Expr], location: Range) -> Vec<Check> {
let mut checks: Vec<Check> = vec![];
@@ -236,3 +238,92 @@ pub fn type_comparison(ops: &[Cmpop], comparators: &[Expr], location: Range) ->
checks
}
// See: https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals
const VALID_ESCAPE_SEQUENCES: &[char; 23] = &[
'\n', '\\', '\'', '"', 'a', 'b', 'f', 'n', 'r', 't', 'v', '0', '1', '2', '3', '4', '5', '6',
'7', 'x', // Escape sequences only recognized in string literals
'N', 'u', 'U',
];
/// Return the quotation markers used for a String token.
fn extract_quote(text: &str) -> &str {
for quote in ["'''", "\"\"\"", "'", "\""] {
if text.ends_with(quote) {
return quote;
}
}
panic!("Unable to find quotation mark for String token.")
}
/// W605
pub fn invalid_escape_sequence(
locator: &SourceCodeLocator,
start: &Location,
end: &Location,
) -> Vec<Check> {
let mut checks = vec![];
let text = locator.slice_source_code_range(&Range {
location: *start,
end_location: *end,
});
// Determine whether the string is single- or triple-quoted.
let quote = extract_quote(text);
let quote_pos = text.find(quote).unwrap();
let prefix = text[..quote_pos].to_lowercase();
let body = &text[(quote_pos + quote.len())..(text.len() - quote.len())];
if !prefix.contains('r') {
let mut col_offset = 0;
let mut row_offset = 0;
let mut in_escape = false;
let mut chars = body.chars();
let mut current = chars.next();
let mut next = chars.next();
while let (Some(current_char), Some(next_char)) = (current, next) {
// If we see an escaped backslash, avoid treating the character _after_ the
// escaped backslash as itself an escaped character.
if in_escape {
in_escape = false;
} else {
in_escape = current_char == '\\' && next_char == '\\';
if current_char == '\\' && !VALID_ESCAPE_SEQUENCES.contains(&next_char) {
// Compute the location of the escape sequence by offsetting the location of the
// string token by the characters we've seen thus far.
let location = if row_offset == 0 {
Location::new(
start.row() + row_offset,
start.column() + prefix.len() + quote.len() + col_offset,
)
} else {
Location::new(start.row() + row_offset, col_offset + 1)
};
let end_location = Location::new(location.row(), location.column() + 1);
checks.push(Check::new(
CheckKind::InvalidEscapeSequence(next_char),
Range {
location,
end_location,
},
))
}
}
// Track the offset from the start position as we iterate over the body.
if current_char == '\n' {
col_offset = 0;
row_offset += 1;
} else {
col_offset += 1;
}
current = next;
next = chars.next();
}
}
checks
}

View File

@@ -162,7 +162,7 @@ pub fn blank_before_after_function(checker: &mut Checker, definition: &Definitio
} = &docstring.node
{
if checker.settings.enabled.contains(&CheckCode::D201) {
let (before, _, _) = checker.get_locator().partition_source_code_at(
let (before, _, _) = checker.locator.partition_source_code_at(
&Range::from_located(parent),
&Range::from_located(docstring),
);
@@ -190,7 +190,7 @@ pub fn blank_before_after_function(checker: &mut Checker, definition: &Definitio
}
if checker.settings.enabled.contains(&CheckCode::D202) {
let (_, _, after) = checker.get_locator().partition_source_code_at(
let (_, _, after) = checker.locator.partition_source_code_at(
&Range::from_located(parent),
&Range::from_located(docstring),
);
@@ -253,7 +253,7 @@ pub fn blank_before_after_class(checker: &mut Checker, definition: &Definition)
if checker.settings.enabled.contains(&CheckCode::D203)
|| checker.settings.enabled.contains(&CheckCode::D211)
{
let (before, _, _) = checker.get_locator().partition_source_code_at(
let (before, _, _) = checker.locator.partition_source_code_at(
&Range::from_located(parent),
&Range::from_located(docstring),
);
@@ -300,7 +300,7 @@ pub fn blank_before_after_class(checker: &mut Checker, definition: &Definition)
}
if checker.settings.enabled.contains(&CheckCode::D204) {
let (_, _, after) = checker.get_locator().partition_source_code_at(
let (_, _, after) = checker.locator.partition_source_code_at(
&Range::from_located(parent),
&Range::from_located(docstring),
);
@@ -530,7 +530,7 @@ pub fn newline_after_last_paragraph(checker: &mut Checker, definition: &Definiti
}
if line_count > 1 {
let content = checker
.get_locator()
.locator
.slice_source_code_range(&Range::from_located(docstring));
if let Some(last_line) = content.lines().last().map(|line| line.trim()) {
if last_line != "\"\"\"" && last_line != "'''" {
@@ -583,7 +583,7 @@ pub fn no_surrounding_whitespace(checker: &mut Checker, definition: &Definition)
);
if checker.patch() {
if let Some(first_line) = checker
.get_locator()
.locator
.slice_source_code_range(&Range::from_located(docstring))
.lines()
.next()
@@ -629,7 +629,7 @@ pub fn multi_line_summary_start(checker: &mut Checker, definition: &Definition)
{
if string.lines().nth(1).is_some() {
if let Some(first_line) = checker
.get_locator()
.locator
.slice_source_code_range(&Range::from_located(docstring))
.lines()
.next()
@@ -665,7 +665,7 @@ pub fn triple_quotes(checker: &mut Checker, definition: &Definition) {
} = &docstring.node
{
if let Some(first_line) = checker
.get_locator()
.locator
.slice_source_code_range(&Range::from_located(docstring))
.lines()
.next()

View File

@@ -10,7 +10,7 @@ use rustpython_parser::ast::{
use crate::ast::types::{BindingKind, CheckLocator, FunctionScope, Range, Scope, ScopeKind};
use crate::checks::{Check, CheckKind};
/// Check IfTuple compliance.
/// F634
pub fn if_tuple(test: &Expr, location: Range) -> Option<Check> {
if let ExprKind::Tuple { elts, .. } = &test.node {
if !elts.is_empty() {
@@ -20,7 +20,7 @@ pub fn if_tuple(test: &Expr, location: Range) -> Option<Check> {
None
}
/// Check AssertTuple compliance.
/// F631
pub fn assert_tuple(test: &Expr, location: Range) -> Option<Check> {
if let ExprKind::Tuple { elts, .. } = &test.node {
if !elts.is_empty() {
@@ -30,7 +30,7 @@ pub fn assert_tuple(test: &Expr, location: Range) -> Option<Check> {
None
}
/// Check UnusedVariable compliance.
/// F841
pub fn unused_variables(
scope: &Scope,
locator: &dyn CheckLocator,
@@ -63,7 +63,7 @@ pub fn unused_variables(
checks
}
/// Check DefaultExceptNotLast compliance.
/// F707
pub fn default_except_not_last(handlers: &[Excepthandler]) -> Option<Check> {
for (idx, handler) in handlers.iter().enumerate() {
let ExcepthandlerKind::ExceptHandler { type_, .. } = &handler.node;
@@ -78,7 +78,7 @@ pub fn default_except_not_last(handlers: &[Excepthandler]) -> Option<Check> {
None
}
/// Check RaiseNotImplemented compliance.
/// F901
pub fn raise_not_implemented(expr: &Expr) -> Option<Check> {
match &expr.node {
ExprKind::Call { func, .. } => {
@@ -105,7 +105,7 @@ pub fn raise_not_implemented(expr: &Expr) -> Option<Check> {
None
}
/// Check DuplicateArgumentName compliance.
/// F831
pub fn duplicate_arguments(arguments: &Arguments) -> Vec<Check> {
let mut checks: Vec<Check> = vec![];
@@ -153,7 +153,7 @@ fn convert_to_value(expr: &Expr) -> Option<DictionaryKey> {
}
}
/// Check MultiValueRepeatedKeyLiteral and MultiValueRepeatedKeyVariable compliance.
/// F601, F602
pub fn repeated_keys(
keys: &[Expr],
check_repeated_literals: bool,
@@ -215,7 +215,7 @@ fn is_constant_non_singleton(expr: &Expr) -> bool {
is_constant(expr) && !is_singleton(expr)
}
/// Check IsLiteral compliance.
/// F632
pub fn is_literal(left: &Expr, ops: &[Cmpop], comparators: &[Expr], location: Range) -> Vec<Check> {
let mut checks: Vec<Check> = vec![];
@@ -232,7 +232,7 @@ pub fn is_literal(left: &Expr, ops: &[Cmpop], comparators: &[Expr], location: Ra
checks
}
/// Check TwoStarredExpressions and TooManyExpressionsInStarredAssignment compliance.
/// F621, F622
pub fn starred_expressions(
elts: &[Expr],
check_too_many_expressions: bool,
@@ -262,7 +262,7 @@ pub fn starred_expressions(
None
}
/// Check BreakOutsideLoop compliance.
/// F701
pub fn break_outside_loop(
stmt: &Stmt,
parents: &[&Stmt],
@@ -303,7 +303,7 @@ pub fn break_outside_loop(
}
}
/// Check ContinueOutsideLoop compliance.
/// F702
pub fn continue_outside_loop(
stmt: &Stmt,
parents: &[&Stmt],

View File

@@ -5,7 +5,7 @@ use crate::ast::types::{Binding, BindingKind, Range, Scope, ScopeKind};
use crate::checks::{Check, CheckKind};
use crate::pyupgrade::types::Primitive;
/// Check that `super()` has no args
/// U008
pub fn super_args(
scope: &Scope,
parents: &[&Stmt],
@@ -70,7 +70,7 @@ pub fn super_args(
None
}
/// Check UselessMetaclassType compliance.
/// U001
pub fn useless_metaclass_type(targets: &[Expr], value: &Expr, location: Range) -> Option<Check> {
if targets.len() == 1 {
if let ExprKind::Name { id, .. } = targets.first().map(|expr| &expr.node).unwrap() {
@@ -86,7 +86,7 @@ pub fn useless_metaclass_type(targets: &[Expr], value: &Expr, location: Range) -
None
}
/// Check UnnecessaryAbspath compliance.
/// U002
pub fn unnecessary_abspath(func: &Expr, args: &[Expr], location: Range) -> Option<Check> {
// Validate the arguments.
if args.len() == 1 {
@@ -106,7 +106,7 @@ pub fn unnecessary_abspath(func: &Expr, args: &[Expr], location: Range) -> Optio
None
}
/// Check UselessObjectInheritance compliance.
/// U004
pub fn useless_object_inheritance(name: &str, bases: &[Expr], scope: &Scope) -> Option<Check> {
for expr in bases {
if let ExprKind::Name { id, .. } = &expr.node {
@@ -131,7 +131,7 @@ pub fn useless_object_inheritance(name: &str, bases: &[Expr], scope: &Scope) ->
None
}
/// Check TypeOfPrimitive compliance.
/// U003
pub fn type_of_primitive(func: &Expr, args: &[Expr], location: Range) -> Option<Check> {
// Validate the arguments.
if args.len() == 1 {

View File

@@ -17,9 +17,7 @@ pub fn super_call_with_parameters(checker: &mut Checker, expr: &Expr, func: &Exp
.collect();
if let Some(mut check) = checks::super_args(scope, &parents, expr, func, args) {
if checker.patch() {
if let Some(fix) =
pyupgrade::fixes::remove_super_arguments(checker.get_locator(), expr)
{
if let Some(fix) = pyupgrade::fixes::remove_super_arguments(checker.locator, expr) {
check.amend(fix);
}
}

View File

@@ -15,7 +15,7 @@ pub fn useless_object_inheritance(
if let Some(mut check) = checks::useless_object_inheritance(name, bases, scope) {
if checker.patch() {
if let Some(fix) = pyupgrade::fixes::remove_class_def_base(
checker.get_locator(),
checker.locator,
&stmt.location,
check.location,
bases,

View File

@@ -1,321 +0,0 @@
use std::collections::BTreeSet;
use std::hash::{Hash, Hasher};
use std::path::{Path, PathBuf};
use std::str::FromStr;
use anyhow::{anyhow, Result};
use glob::Pattern;
use once_cell::sync::Lazy;
use regex::Regex;
use serde::{Deserialize, Serialize};
use strum::IntoEnumIterator;
use crate::checks::{CheckCategory, CheckCode};
use crate::fs;
use crate::pyproject::{load_config, StrCheckCodePair};
#[derive(Clone, Debug, PartialOrd, PartialEq, Eq, Serialize, Deserialize)]
pub enum PythonVersion {
Py33,
Py34,
Py35,
Py36,
Py37,
Py38,
Py39,
Py310,
Py311,
}
impl FromStr for PythonVersion {
type Err = anyhow::Error;
fn from_str(string: &str) -> Result<Self, Self::Err> {
match string {
"py33" => Ok(PythonVersion::Py33),
"py34" => Ok(PythonVersion::Py34),
"py35" => Ok(PythonVersion::Py35),
"py36" => Ok(PythonVersion::Py36),
"py37" => Ok(PythonVersion::Py37),
"py38" => Ok(PythonVersion::Py38),
"py39" => Ok(PythonVersion::Py39),
"py310" => Ok(PythonVersion::Py310),
"py311" => Ok(PythonVersion::Py311),
_ => Err(anyhow!("Unknown version: {}", string)),
}
}
}
#[derive(Debug, Clone, Hash)]
pub enum FilePattern {
Simple(&'static str),
Complex(Pattern, Option<Pattern>),
}
impl FilePattern {
pub fn from_user(pattern: &str, project_root: &Option<PathBuf>) -> Self {
let path = Path::new(pattern);
let absolute_path = match project_root {
Some(project_root) => fs::normalize_path_to(path, project_root),
None => fs::normalize_path(path),
};
let absolute = Pattern::new(&absolute_path.to_string_lossy()).expect("Invalid pattern.");
let basename = if !pattern.contains(std::path::MAIN_SEPARATOR) {
Some(Pattern::new(pattern).expect("Invalid pattern."))
} else {
None
};
FilePattern::Complex(absolute, basename)
}
}
#[derive(Debug, Clone, Hash)]
pub struct PerFileIgnore {
pub pattern: FilePattern,
pub code: CheckCode,
}
impl PerFileIgnore {
pub fn new(user_in: StrCheckCodePair, project_root: &Option<PathBuf>) -> Self {
let pattern = FilePattern::from_user(user_in.pattern.as_str(), project_root);
let code = user_in.code;
Self { pattern, code }
}
}
#[derive(Debug)]
pub struct RawSettings {
pub dummy_variable_rgx: Regex,
pub exclude: Vec<FilePattern>,
pub extend_exclude: Vec<FilePattern>,
pub extend_ignore: Vec<CheckCode>,
pub extend_select: Vec<CheckCode>,
pub ignore: Vec<CheckCode>,
pub line_length: usize,
pub per_file_ignores: Vec<PerFileIgnore>,
pub select: Vec<CheckCode>,
pub target_version: PythonVersion,
}
static DEFAULT_EXCLUDE: Lazy<Vec<FilePattern>> = Lazy::new(|| {
vec![
FilePattern::Simple(".bzr"),
FilePattern::Simple(".direnv"),
FilePattern::Simple(".eggs"),
FilePattern::Simple(".git"),
FilePattern::Simple(".hg"),
FilePattern::Simple(".mypy_cache"),
FilePattern::Simple(".nox"),
FilePattern::Simple(".pants.d"),
FilePattern::Simple(".ruff_cache"),
FilePattern::Simple(".svn"),
FilePattern::Simple(".tox"),
FilePattern::Simple(".venv"),
FilePattern::Simple("__pypackages__"),
FilePattern::Simple("_build"),
FilePattern::Simple("buck-out"),
FilePattern::Simple("build"),
FilePattern::Simple("dist"),
FilePattern::Simple("node_modules"),
FilePattern::Simple("venv"),
]
});
static DEFAULT_DUMMY_VARIABLE_RGX: Lazy<Regex> =
Lazy::new(|| Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$").unwrap());
impl RawSettings {
pub fn from_pyproject(
pyproject: &Option<PathBuf>,
project_root: &Option<PathBuf>,
quiet: bool,
) -> Result<Self> {
let config = load_config(pyproject, quiet)?;
Ok(RawSettings {
dummy_variable_rgx: match config.dummy_variable_rgx {
Some(pattern) => Regex::new(&pattern)
.map_err(|e| anyhow!("Invalid dummy-variable-rgx value: {e}"))?,
None => DEFAULT_DUMMY_VARIABLE_RGX.clone(),
},
target_version: config.target_version.unwrap_or(PythonVersion::Py310),
exclude: config
.exclude
.map(|paths| {
paths
.iter()
.map(|path| FilePattern::from_user(path, project_root))
.collect()
})
.unwrap_or_else(|| DEFAULT_EXCLUDE.clone()),
extend_exclude: config
.extend_exclude
.iter()
.map(|path| FilePattern::from_user(path, project_root))
.collect(),
extend_ignore: config.extend_ignore,
select: config.select.unwrap_or_else(|| {
CheckCode::iter()
.filter(|code| {
matches!(
code.category(),
CheckCategory::PycodestyleError | CheckCategory::Pyflakes
)
})
.collect()
}),
extend_select: config.extend_select,
ignore: config.ignore,
line_length: config.line_length.unwrap_or(88),
per_file_ignores: config
.per_file_ignores
.into_iter()
.map(|pair| PerFileIgnore::new(pair, project_root))
.collect(),
})
}
}
#[derive(Debug)]
pub struct Settings {
pub dummy_variable_rgx: Regex,
pub enabled: BTreeSet<CheckCode>,
pub exclude: Vec<FilePattern>,
pub extend_exclude: Vec<FilePattern>,
pub line_length: usize,
pub per_file_ignores: Vec<PerFileIgnore>,
pub target_version: PythonVersion,
}
impl Settings {
pub fn from_raw(settings: RawSettings) -> Self {
// Materialize the set of enabled CheckCodes.
let mut enabled: BTreeSet<CheckCode> = BTreeSet::new();
enabled.extend(settings.select);
enabled.extend(settings.extend_select);
for code in &settings.ignore {
enabled.remove(code);
}
for code in &settings.extend_ignore {
enabled.remove(code);
}
Self {
dummy_variable_rgx: settings.dummy_variable_rgx,
enabled,
exclude: settings.exclude,
extend_exclude: settings.extend_exclude,
line_length: settings.line_length,
per_file_ignores: settings.per_file_ignores,
target_version: PythonVersion::Py310,
}
}
pub fn for_rule(check_code: CheckCode) -> Self {
Self {
dummy_variable_rgx: DEFAULT_DUMMY_VARIABLE_RGX.clone(),
enabled: BTreeSet::from([check_code]),
exclude: vec![],
extend_exclude: vec![],
line_length: 88,
per_file_ignores: vec![],
target_version: PythonVersion::Py310,
}
}
pub fn for_rules(check_codes: Vec<CheckCode>) -> Self {
Self {
dummy_variable_rgx: DEFAULT_DUMMY_VARIABLE_RGX.clone(),
enabled: BTreeSet::from_iter(check_codes),
exclude: vec![],
extend_exclude: vec![],
line_length: 88,
per_file_ignores: vec![],
target_version: PythonVersion::Py310,
}
}
}
impl Hash for Settings {
fn hash<H: Hasher>(&self, state: &mut H) {
self.line_length.hash(state);
self.dummy_variable_rgx.as_str().hash(state);
for value in self.enabled.iter() {
value.hash(state);
}
for value in self.per_file_ignores.iter() {
value.hash(state);
}
}
}
/// Struct to render user-facing exclusion patterns.
#[derive(Debug)]
#[allow(dead_code)]
pub struct Exclusion {
basename: Option<String>,
absolute: Option<String>,
}
impl Exclusion {
pub fn from_file_pattern(file_pattern: FilePattern) -> Self {
match file_pattern {
FilePattern::Simple(basename) => Exclusion {
basename: Some(basename.to_string()),
absolute: None,
},
FilePattern::Complex(absolute, basename) => Exclusion {
basename: basename.map(|pattern| pattern.to_string()),
absolute: Some(absolute.to_string()),
},
}
}
}
/// Struct to render user-facing Settings.
#[derive(Debug)]
pub struct CurrentSettings {
pub dummy_variable_rgx: Regex,
pub exclude: Vec<Exclusion>,
pub extend_exclude: Vec<Exclusion>,
pub extend_ignore: Vec<CheckCode>,
pub extend_select: Vec<CheckCode>,
pub ignore: Vec<CheckCode>,
pub line_length: usize,
pub per_file_ignores: Vec<PerFileIgnore>,
pub select: Vec<CheckCode>,
pub target_version: PythonVersion,
pub project_root: Option<PathBuf>,
pub pyproject: Option<PathBuf>,
}
impl CurrentSettings {
pub fn from_settings(
settings: RawSettings,
project_root: Option<PathBuf>,
pyproject: Option<PathBuf>,
) -> Self {
Self {
dummy_variable_rgx: settings.dummy_variable_rgx,
exclude: settings
.exclude
.into_iter()
.map(Exclusion::from_file_pattern)
.collect(),
extend_exclude: settings
.extend_exclude
.into_iter()
.map(Exclusion::from_file_pattern)
.collect(),
extend_ignore: settings.extend_ignore,
extend_select: settings.extend_select,
ignore: settings.ignore,
line_length: settings.line_length,
per_file_ignores: settings.per_file_ignores,
select: settings.select,
target_version: settings.target_version,
project_root,
pyproject,
}
}
}

View File

@@ -0,0 +1,109 @@
//! User-provided program settings, taking into account pyproject.toml and command-line options.
//! Structure mirrors the user-facing representation of the various parameters.
use std::path::PathBuf;
use anyhow::{anyhow, Result};
use once_cell::sync::Lazy;
use regex::Regex;
use crate::checks_gen::CheckCodePrefix;
use crate::settings::pyproject::load_options;
use crate::settings::types::{FilePattern, PerFileIgnore, PythonVersion};
use crate::{flake8_quotes, pep8_naming};
#[derive(Debug)]
pub struct Configuration {
pub dummy_variable_rgx: Regex,
pub exclude: Vec<FilePattern>,
pub extend_exclude: Vec<FilePattern>,
pub extend_ignore: Vec<CheckCodePrefix>,
pub extend_select: Vec<CheckCodePrefix>,
pub ignore: Vec<CheckCodePrefix>,
pub line_length: usize,
pub per_file_ignores: Vec<PerFileIgnore>,
pub select: Vec<CheckCodePrefix>,
pub target_version: PythonVersion,
// Plugins
pub flake8_quotes: flake8_quotes::settings::Settings,
pub pep8_naming: pep8_naming::settings::Settings,
}
static DEFAULT_EXCLUDE: Lazy<Vec<FilePattern>> = Lazy::new(|| {
vec![
FilePattern::Simple(".bzr"),
FilePattern::Simple(".direnv"),
FilePattern::Simple(".eggs"),
FilePattern::Simple(".git"),
FilePattern::Simple(".hg"),
FilePattern::Simple(".mypy_cache"),
FilePattern::Simple(".nox"),
FilePattern::Simple(".pants.d"),
FilePattern::Simple(".ruff_cache"),
FilePattern::Simple(".svn"),
FilePattern::Simple(".tox"),
FilePattern::Simple(".venv"),
FilePattern::Simple("__pypackages__"),
FilePattern::Simple("_build"),
FilePattern::Simple("buck-out"),
FilePattern::Simple("build"),
FilePattern::Simple("dist"),
FilePattern::Simple("node_modules"),
FilePattern::Simple("venv"),
]
});
static DEFAULT_DUMMY_VARIABLE_RGX: Lazy<Regex> =
Lazy::new(|| Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$").unwrap());
impl Configuration {
pub fn from_pyproject(
pyproject: &Option<PathBuf>,
project_root: &Option<PathBuf>,
) -> Result<Self> {
let options = load_options(pyproject)?;
Ok(Configuration {
dummy_variable_rgx: match options.dummy_variable_rgx {
Some(pattern) => Regex::new(&pattern)
.map_err(|e| anyhow!("Invalid dummy-variable-rgx value: {e}"))?,
None => DEFAULT_DUMMY_VARIABLE_RGX.clone(),
},
target_version: options.target_version.unwrap_or(PythonVersion::Py310),
exclude: options
.exclude
.map(|paths| {
paths
.iter()
.map(|path| FilePattern::from_user(path, project_root))
.collect()
})
.unwrap_or_else(|| DEFAULT_EXCLUDE.clone()),
extend_exclude: options
.extend_exclude
.iter()
.map(|path| FilePattern::from_user(path, project_root))
.collect(),
extend_ignore: options.extend_ignore,
select: options
.select
.unwrap_or_else(|| vec![CheckCodePrefix::E, CheckCodePrefix::F]),
extend_select: options.extend_select,
ignore: options.ignore,
line_length: options.line_length.unwrap_or(88),
per_file_ignores: options
.per_file_ignores
.into_iter()
.map(|pair| PerFileIgnore::new(pair, project_root))
.collect(),
// Plugins
flake8_quotes: options
.flake8_quotes
.map(flake8_quotes::settings::Settings::from_options)
.unwrap_or_default(),
pep8_naming: options
.pep8_naming
.map(pep8_naming::settings::Settings::from_options)
.unwrap_or_default(),
})
}
}

165
src/settings/mod.rs Normal file
View File

@@ -0,0 +1,165 @@
//! Effective program settings, taking into account pyproject.toml and command-line options.
//! Structure is optimized for internal usage, as opposed to external visibility or parsing.
use std::collections::BTreeSet;
use std::hash::{Hash, Hasher};
use regex::Regex;
use crate::checks::CheckCode;
use crate::checks_gen::{CheckCodePrefix, PrefixSpecificity};
use crate::settings::configuration::Configuration;
use crate::settings::types::{FilePattern, PerFileIgnore, PythonVersion};
use crate::{flake8_quotes, pep8_naming};
pub mod configuration;
pub mod options;
pub mod pyproject;
pub mod types;
pub mod user;
#[derive(Debug)]
pub struct Settings {
pub dummy_variable_rgx: Regex,
pub enabled: BTreeSet<CheckCode>,
pub exclude: Vec<FilePattern>,
pub extend_exclude: Vec<FilePattern>,
pub line_length: usize,
pub per_file_ignores: Vec<PerFileIgnore>,
pub target_version: PythonVersion,
// Plugins
pub flake8_quotes: flake8_quotes::settings::Settings,
pub pep8_naming: pep8_naming::settings::Settings,
}
impl Settings {
pub fn from_configuration(config: Configuration) -> Self {
Self {
dummy_variable_rgx: config.dummy_variable_rgx,
enabled: resolve_codes(
&config.select,
&config.extend_select,
&config.ignore,
&config.extend_ignore,
),
exclude: config.exclude,
extend_exclude: config.extend_exclude,
flake8_quotes: config.flake8_quotes,
line_length: config.line_length,
pep8_naming: config.pep8_naming,
per_file_ignores: config.per_file_ignores,
target_version: config.target_version,
}
}
pub fn for_rule(check_code: CheckCode) -> Self {
Self {
dummy_variable_rgx: Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$").unwrap(),
enabled: BTreeSet::from([check_code]),
exclude: vec![],
extend_exclude: vec![],
line_length: 88,
per_file_ignores: vec![],
target_version: PythonVersion::Py310,
flake8_quotes: Default::default(),
pep8_naming: Default::default(),
}
}
pub fn for_rules(check_codes: Vec<CheckCode>) -> Self {
Self {
dummy_variable_rgx: Regex::new("^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$").unwrap(),
enabled: BTreeSet::from_iter(check_codes),
exclude: vec![],
extend_exclude: vec![],
line_length: 88,
per_file_ignores: vec![],
target_version: PythonVersion::Py310,
flake8_quotes: Default::default(),
pep8_naming: Default::default(),
}
}
}
impl Hash for Settings {
fn hash<H: Hasher>(&self, state: &mut H) {
self.line_length.hash(state);
self.dummy_variable_rgx.as_str().hash(state);
for value in self.enabled.iter() {
value.hash(state);
}
for value in self.per_file_ignores.iter() {
value.hash(state);
}
}
}
/// Given a set of selected and ignored prefixes, resolve the set of enabled error codes.
fn resolve_codes(
select: &[CheckCodePrefix],
extend_select: &[CheckCodePrefix],
ignore: &[CheckCodePrefix],
extend_ignore: &[CheckCodePrefix],
) -> BTreeSet<CheckCode> {
let mut codes: BTreeSet<CheckCode> = BTreeSet::new();
for specificity in [
PrefixSpecificity::Category,
PrefixSpecificity::Hundreds,
PrefixSpecificity::Tens,
PrefixSpecificity::Explicit,
] {
for prefix in select {
if prefix.specificity() == specificity {
codes.extend(prefix.codes());
}
}
for prefix in extend_select {
if prefix.specificity() == specificity {
codes.extend(prefix.codes());
}
}
for prefix in ignore {
if prefix.specificity() == specificity {
for code in prefix.codes() {
codes.remove(&code);
}
}
}
for prefix in extend_ignore {
if prefix.specificity() == specificity {
for code in prefix.codes() {
codes.remove(&code);
}
}
}
}
codes
}
#[cfg(test)]
mod tests {
use std::collections::BTreeSet;
use crate::checks::CheckCode;
use crate::checks_gen::CheckCodePrefix;
use crate::settings::resolve_codes;
#[test]
fn resolver() {
let actual = resolve_codes(&[CheckCodePrefix::W], &[], &[], &[]);
let expected = BTreeSet::from_iter([CheckCode::W292, CheckCode::W605]);
assert_eq!(actual, expected);
let actual = resolve_codes(&[CheckCodePrefix::W6], &[], &[], &[]);
let expected = BTreeSet::from_iter([CheckCode::W605]);
assert_eq!(actual, expected);
let actual = resolve_codes(&[CheckCodePrefix::W], &[], &[CheckCodePrefix::W292], &[]);
let expected = BTreeSet::from_iter([CheckCode::W605]);
assert_eq!(actual, expected);
let actual = resolve_codes(&[CheckCodePrefix::W605], &[], &[CheckCodePrefix::W605], &[]);
let expected = BTreeSet::from_iter([]);
assert_eq!(actual, expected);
}
}

30
src/settings/options.rs Normal file
View File

@@ -0,0 +1,30 @@
//! Options that the user can provide via pyproject.toml.
use serde::Deserialize;
use crate::checks_gen::CheckCodePrefix;
use crate::settings::types::{PythonVersion, StrCheckCodePair};
use crate::{flake8_quotes, pep8_naming};
#[derive(Debug, PartialEq, Eq, Deserialize, Default)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
pub struct Options {
pub line_length: Option<usize>,
pub exclude: Option<Vec<String>>,
#[serde(default)]
pub extend_exclude: Vec<String>,
pub select: Option<Vec<CheckCodePrefix>>,
#[serde(default)]
pub extend_select: Vec<CheckCodePrefix>,
#[serde(default)]
pub ignore: Vec<CheckCodePrefix>,
#[serde(default)]
pub extend_ignore: Vec<CheckCodePrefix>,
#[serde(default)]
pub per_file_ignores: Vec<StrCheckCodePair>,
pub dummy_variable_rgx: Option<String>,
pub target_version: Option<PythonVersion>,
// Plugins
pub flake8_quotes: Option<flake8_quotes::settings::Options>,
pub pep8_naming: Option<pep8_naming::settings::Options>,
}

View File

@@ -1,105 +1,27 @@
//! Utilities for locating (and extracting configuration from) a pyproject.toml.
use std::path::{Path, PathBuf};
use std::str::FromStr;
use anyhow::{anyhow, Result};
use anyhow::Result;
use common_path::common_path_all;
use log::debug;
use path_absolutize::Absolutize;
use serde::de;
use serde::{Deserialize, Deserializer};
use serde::Deserialize;
use crate::checks::CheckCode;
use crate::fs;
use crate::settings::PythonVersion;
pub fn load_config(pyproject: &Option<PathBuf>, quiet: bool) -> Result<Config> {
match pyproject {
Some(pyproject) => Ok(parse_pyproject_toml(pyproject)?
.tool
.and_then(|tool| tool.ruff)
.unwrap_or_default()),
None => {
if !quiet {
eprintln!("No pyproject.toml found.");
eprintln!("Falling back to default configuration...");
}
Ok(Default::default())
}
}
}
#[derive(Debug, PartialEq, Eq, Deserialize, Default)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
pub struct Config {
pub line_length: Option<usize>,
pub exclude: Option<Vec<String>>,
#[serde(default)]
pub extend_exclude: Vec<String>,
pub select: Option<Vec<CheckCode>>,
#[serde(default)]
pub extend_select: Vec<CheckCode>,
#[serde(default)]
pub ignore: Vec<CheckCode>,
#[serde(default)]
pub extend_ignore: Vec<CheckCode>,
#[serde(default)]
pub per_file_ignores: Vec<StrCheckCodePair>,
pub dummy_variable_rgx: Option<String>,
pub target_version: Option<PythonVersion>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct StrCheckCodePair {
pub pattern: String,
pub code: CheckCode,
}
impl StrCheckCodePair {
const EXPECTED_PATTERN: &'static str = "<FilePattern>:<CheckCode> pattern";
}
impl<'de> Deserialize<'de> for StrCheckCodePair {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let str_result = String::deserialize(deserializer)?;
Self::from_str(str_result.as_str()).map_err(|_| {
de::Error::invalid_value(
de::Unexpected::Str(str_result.as_str()),
&Self::EXPECTED_PATTERN,
)
})
}
}
impl FromStr for StrCheckCodePair {
type Err = anyhow::Error;
fn from_str(string: &str) -> Result<Self, Self::Err> {
let (pattern_str, code_string) = {
let tokens = string.split(':').collect::<Vec<_>>();
if tokens.len() != 2 {
return Err(anyhow!("Expected {}", Self::EXPECTED_PATTERN));
}
(tokens[0], tokens[1])
};
let code = CheckCode::from_str(code_string)?;
let pattern = pattern_str.into();
Ok(Self { pattern, code })
}
}
use crate::settings::options::Options;
#[derive(Debug, PartialEq, Eq, Deserialize)]
struct Tools {
ruff: Option<Config>,
ruff: Option<Options>,
}
#[derive(Debug, PartialEq, Eq, Deserialize)]
struct PyProject {
struct Pyproject {
tool: Option<Tools>,
}
fn parse_pyproject_toml(path: &Path) -> Result<PyProject> {
fn parse_pyproject_toml(path: &Path) -> Result<Pyproject> {
let contents = fs::read_file(path)?;
toml::from_str(&contents).map_err(|e| e.into())
}
@@ -148,6 +70,20 @@ pub fn find_project_root(sources: &[PathBuf]) -> Option<PathBuf> {
None
}
pub fn load_options(pyproject: &Option<PathBuf>) -> Result<Options> {
match pyproject {
Some(pyproject) => Ok(parse_pyproject_toml(pyproject)?
.tool
.and_then(|tool| tool.ruff)
.unwrap_or_default()),
None => {
debug!("No pyproject.toml found.");
debug!("Falling back to default configuration...");
Ok(Default::default())
}
}
}
#[cfg(test)]
mod tests {
use std::env::current_dir;
@@ -156,26 +92,27 @@ mod tests {
use anyhow::Result;
use crate::checks::CheckCode;
use crate::pyproject::{
find_project_root, find_pyproject_toml, parse_pyproject_toml, Config, PyProject, Tools,
use crate::checks_gen::CheckCodePrefix;
use crate::flake8_quotes::settings::Quote;
use crate::settings::pyproject::{
find_project_root, find_pyproject_toml, parse_pyproject_toml, Options, Pyproject, Tools,
};
use super::StrCheckCodePair;
use crate::settings::types::StrCheckCodePair;
use crate::{flake8_quotes, pep8_naming};
#[test]
fn deserialize() -> Result<()> {
let pyproject: PyProject = toml::from_str(r#""#)?;
let pyproject: Pyproject = toml::from_str(r#""#)?;
assert_eq!(pyproject.tool, None);
let pyproject: PyProject = toml::from_str(
let pyproject: Pyproject = toml::from_str(
r#"
[tool.black]
"#,
)?;
assert_eq!(pyproject.tool, Some(Tools { ruff: None }));
let pyproject: PyProject = toml::from_str(
let pyproject: Pyproject = toml::from_str(
r#"
[tool.black]
[tool.ruff]
@@ -184,7 +121,7 @@ mod tests {
assert_eq!(
pyproject.tool,
Some(Tools {
ruff: Some(Config {
ruff: Some(Options {
line_length: None,
exclude: None,
extend_exclude: vec![],
@@ -195,11 +132,13 @@ mod tests {
per_file_ignores: vec![],
dummy_variable_rgx: None,
target_version: None,
flake8_quotes: None,
pep8_naming: None,
})
})
);
let pyproject: PyProject = toml::from_str(
let pyproject: Pyproject = toml::from_str(
r#"
[tool.black]
[tool.ruff]
@@ -209,7 +148,7 @@ line-length = 79
assert_eq!(
pyproject.tool,
Some(Tools {
ruff: Some(Config {
ruff: Some(Options {
line_length: Some(79),
exclude: None,
extend_exclude: vec![],
@@ -220,11 +159,13 @@ line-length = 79
per_file_ignores: vec![],
dummy_variable_rgx: None,
target_version: None,
flake8_quotes: None,
pep8_naming: None,
})
})
);
let pyproject: PyProject = toml::from_str(
let pyproject: Pyproject = toml::from_str(
r#"
[tool.black]
[tool.ruff]
@@ -234,7 +175,7 @@ exclude = ["foo.py"]
assert_eq!(
pyproject.tool,
Some(Tools {
ruff: Some(Config {
ruff: Some(Options {
line_length: None,
exclude: Some(vec!["foo.py".to_string()]),
extend_exclude: vec![],
@@ -245,11 +186,13 @@ exclude = ["foo.py"]
per_file_ignores: vec![],
dummy_variable_rgx: None,
target_version: None,
flake8_quotes: None,
pep8_naming: None,
})
})
);
let pyproject: PyProject = toml::from_str(
let pyproject: Pyproject = toml::from_str(
r#"
[tool.black]
[tool.ruff]
@@ -259,22 +202,24 @@ select = ["E501"]
assert_eq!(
pyproject.tool,
Some(Tools {
ruff: Some(Config {
ruff: Some(Options {
line_length: None,
exclude: None,
extend_exclude: vec![],
select: Some(vec![CheckCode::E501]),
select: Some(vec![CheckCodePrefix::E501]),
extend_select: vec![],
ignore: vec![],
extend_ignore: vec![],
per_file_ignores: vec![],
dummy_variable_rgx: None,
target_version: None,
flake8_quotes: None,
pep8_naming: None,
})
})
);
let pyproject: PyProject = toml::from_str(
let pyproject: Pyproject = toml::from_str(
r#"
[tool.black]
[tool.ruff]
@@ -285,22 +230,24 @@ ignore = ["E501"]
assert_eq!(
pyproject.tool,
Some(Tools {
ruff: Some(Config {
ruff: Some(Options {
line_length: None,
exclude: None,
extend_exclude: vec![],
select: None,
extend_select: vec![CheckCode::M001],
ignore: vec![CheckCode::E501],
extend_select: vec![CheckCodePrefix::M001],
ignore: vec![CheckCodePrefix::E501],
extend_ignore: vec![],
per_file_ignores: vec![],
dummy_variable_rgx: None,
target_version: None,
flake8_quotes: None,
pep8_naming: None,
})
})
);
assert!(toml::from_str::<PyProject>(
assert!(toml::from_str::<Pyproject>(
r#"
[tool.black]
[tool.ruff]
@@ -309,7 +256,7 @@ line_length = 79
)
.is_err());
assert!(toml::from_str::<PyProject>(
assert!(toml::from_str::<Pyproject>(
r#"
[tool.black]
[tool.ruff]
@@ -318,7 +265,7 @@ select = ["E123"]
)
.is_err());
assert!(toml::from_str::<PyProject>(
assert!(toml::from_str::<Pyproject>(
r#"
[tool.black]
[tool.ruff]
@@ -350,7 +297,7 @@ other-attribute = 1
.expect("Unable to find tool.ruff.");
assert_eq!(
config,
Config {
Options {
line_length: Some(88),
exclude: None,
extend_exclude: vec![
@@ -362,9 +309,36 @@ other-attribute = 1
extend_select: vec![],
ignore: vec![],
extend_ignore: vec![],
per_file_ignores: vec![],
per_file_ignores: vec![StrCheckCodePair {
pattern: "__init__.py".to_string(),
code: CheckCodePrefix::F401
}],
dummy_variable_rgx: None,
target_version: None,
flake8_quotes: Some(flake8_quotes::settings::Options {
inline_quotes: Some(Quote::Single),
multiline_quotes: Some(Quote::Double),
docstring_quotes: Some(Quote::Double),
avoid_escape: Some(true),
}),
pep8_naming: Some(pep8_naming::settings::Options {
ignore_names: Some(vec![
"setUp".to_string(),
"tearDown".to_string(),
"setUpClass".to_string(),
"tearDownClass".to_string(),
"setUpModule".to_string(),
"tearDownModule".to_string(),
"asyncSetUp".to_string(),
"asyncTearDown".to_string(),
"setUpTestData".to_string(),
"failureException".to_string(),
"longMessage".to_string(),
"maxDiff".to_string(),
]),
classmethod_decorators: Some(vec!["classmethod".to_string()]),
staticmethod_decorators: Some(vec!["staticmethod".to_string()]),
}),
}
);
@@ -375,6 +349,8 @@ other-attribute = 1
fn str_check_code_pair_strings() {
let result = StrCheckCodePair::from_str("foo:E501");
assert!(result.is_ok());
let result = StrCheckCodePair::from_str("foo: E501");
assert!(result.is_ok());
let result = StrCheckCodePair::from_str("E501:foo");
assert!(result.is_err());
let result = StrCheckCodePair::from_str("E501");

125
src/settings/types.rs Normal file
View File

@@ -0,0 +1,125 @@
use std::collections::BTreeSet;
use std::hash::Hash;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use anyhow::{anyhow, Result};
use glob::Pattern;
use serde::{de, Deserialize, Deserializer, Serialize};
use crate::checks::CheckCode;
use crate::checks_gen::CheckCodePrefix;
use crate::fs;
#[derive(Clone, Debug, PartialOrd, PartialEq, Eq, Serialize, Deserialize)]
pub enum PythonVersion {
Py33,
Py34,
Py35,
Py36,
Py37,
Py38,
Py39,
Py310,
Py311,
}
impl FromStr for PythonVersion {
type Err = anyhow::Error;
fn from_str(string: &str) -> Result<Self, Self::Err> {
match string {
"py33" => Ok(PythonVersion::Py33),
"py34" => Ok(PythonVersion::Py34),
"py35" => Ok(PythonVersion::Py35),
"py36" => Ok(PythonVersion::Py36),
"py37" => Ok(PythonVersion::Py37),
"py38" => Ok(PythonVersion::Py38),
"py39" => Ok(PythonVersion::Py39),
"py310" => Ok(PythonVersion::Py310),
"py311" => Ok(PythonVersion::Py311),
_ => Err(anyhow!("Unknown version: {}", string)),
}
}
}
#[derive(Debug, Clone, Hash)]
pub enum FilePattern {
Simple(&'static str),
Complex(Pattern, Option<Pattern>),
}
impl FilePattern {
pub fn from_user(pattern: &str, project_root: &Option<PathBuf>) -> Self {
let path = Path::new(pattern);
let absolute_path = match project_root {
Some(project_root) => fs::normalize_path_to(path, project_root),
None => fs::normalize_path(path),
};
let absolute = Pattern::new(&absolute_path.to_string_lossy()).expect("Invalid pattern.");
let basename = if !pattern.contains(std::path::MAIN_SEPARATOR) {
Some(Pattern::new(pattern).expect("Invalid pattern."))
} else {
None
};
FilePattern::Complex(absolute, basename)
}
}
#[derive(Debug, Clone, Hash)]
pub struct PerFileIgnore {
pub pattern: FilePattern,
pub codes: BTreeSet<CheckCode>,
}
impl PerFileIgnore {
pub fn new(user_in: StrCheckCodePair, project_root: &Option<PathBuf>) -> Self {
let pattern = FilePattern::from_user(user_in.pattern.as_str(), project_root);
let codes = BTreeSet::from_iter(user_in.code.codes());
Self { pattern, codes }
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct StrCheckCodePair {
pub pattern: String,
pub code: CheckCodePrefix,
}
impl StrCheckCodePair {
const EXPECTED_PATTERN: &'static str = "<FilePattern>:<CheckCode> pattern";
}
impl<'de> Deserialize<'de> for StrCheckCodePair {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let str_result = String::deserialize(deserializer)?;
Self::from_str(str_result.as_str()).map_err(|_| {
de::Error::invalid_value(
de::Unexpected::Str(str_result.as_str()),
&Self::EXPECTED_PATTERN,
)
})
}
}
impl FromStr for StrCheckCodePair {
type Err = anyhow::Error;
fn from_str(string: &str) -> Result<Self, Self::Err> {
let (pattern_str, code_string) = {
let tokens = string.split(':').collect::<Vec<_>>();
if tokens.len() != 2 {
return Err(anyhow!("Expected {}", Self::EXPECTED_PATTERN));
}
(tokens[0].trim(), tokens[1].trim())
};
let code = CheckCodePrefix::from_str(code_string)?;
let pattern = pattern_str.into();
Ok(Self { pattern, code })
}
}

86
src/settings/user.rs Normal file
View File

@@ -0,0 +1,86 @@
//! Structs to render user-facing settings.
use std::path::PathBuf;
use regex::Regex;
use crate::checks_gen::CheckCodePrefix;
use crate::settings::types::{FilePattern, PerFileIgnore, PythonVersion};
use crate::{flake8_quotes, pep8_naming, Configuration};
/// Struct to render user-facing exclusion patterns.
#[derive(Debug)]
#[allow(dead_code)]
pub struct Exclusion {
basename: Option<String>,
absolute: Option<String>,
}
impl Exclusion {
pub fn from_file_pattern(file_pattern: FilePattern) -> Self {
match file_pattern {
FilePattern::Simple(basename) => Exclusion {
basename: Some(basename.to_string()),
absolute: None,
},
FilePattern::Complex(absolute, basename) => Exclusion {
basename: basename.map(|pattern| pattern.to_string()),
absolute: Some(absolute.to_string()),
},
}
}
}
/// Struct to render user-facing configuration.
#[derive(Debug)]
pub struct UserConfiguration {
pub dummy_variable_rgx: Regex,
pub exclude: Vec<Exclusion>,
pub extend_exclude: Vec<Exclusion>,
pub extend_ignore: Vec<CheckCodePrefix>,
pub extend_select: Vec<CheckCodePrefix>,
pub ignore: Vec<CheckCodePrefix>,
pub line_length: usize,
pub per_file_ignores: Vec<PerFileIgnore>,
pub select: Vec<CheckCodePrefix>,
pub target_version: PythonVersion,
// Plugins
pub flake8_quotes: flake8_quotes::settings::Settings,
pub pep8_naming: pep8_naming::settings::Settings,
// Non-settings exposed to the user
pub project_root: Option<PathBuf>,
pub pyproject: Option<PathBuf>,
}
impl UserConfiguration {
pub fn from_configuration(
configuration: Configuration,
project_root: Option<PathBuf>,
pyproject: Option<PathBuf>,
) -> Self {
Self {
dummy_variable_rgx: configuration.dummy_variable_rgx,
exclude: configuration
.exclude
.into_iter()
.map(Exclusion::from_file_pattern)
.collect(),
extend_exclude: configuration
.extend_exclude
.into_iter()
.map(Exclusion::from_file_pattern)
.collect(),
extend_ignore: configuration.extend_ignore,
extend_select: configuration.extend_select,
ignore: configuration.ignore,
line_length: configuration.line_length,
per_file_ignores: configuration.per_file_ignores,
select: configuration.select,
target_version: configuration.target_version,
flake8_quotes: configuration.flake8_quotes,
pep8_naming: configuration.pep8_naming,
project_root,
pyproject,
}
}
}

View File

@@ -0,0 +1,14 @@
---
source: src/linter.rs
expression: checks
---
- kind:
RedundantTupleInExceptionHandler: ValueError
location:
row: 3
column: 9
end_location:
row: 3
column: 20
fix: ~

Some files were not shown because too many files have changed in this diff Show More