Compare commits

...

17 Commits

Author SHA1 Message Date
Charlie Marsh
9aeb5df5fe Bump version to 0.0.220 2023-01-12 17:57:04 -05:00
Charlie Marsh
7ffba7b552 Use absolute paths for GitHub and Gitlab annotations (#1837)
Note that the _annotation path_ is absolute, while the path encoded in
the message remains relative.

![Screen Shot 2023-01-12 at 5 54 11
PM](https://user-images.githubusercontent.com/1309177/212198531-63f15445-0f6a-471c-a64c-18ad2b6df0c7.png)

Closes #1835.
2023-01-12 17:54:34 -05:00
Charlie Marsh
06473bb1b5 Support for-else loops in SIM110 and SIM111 (#1834)
This PR adds support for `SIM110` and `SIM111` simplifications of the
form:

```py
def f():
    # SIM110
    for x in iterable:
        if check(x):
            return True
    else:
        return False
```
2023-01-12 17:04:58 -05:00
Ash Berlin-Taylor
bf5c048502 Airflow is now using ruff (#1833)
😀
2023-01-12 16:50:01 -05:00
Charlie Marsh
eaed08ae79 Skip SIM110/SIM111 fixes that create long lines 2023-01-12 16:21:54 -05:00
Charlie Marsh
e0fdc4c5e8 Avoid SIM110/SIM110 errors with else statements (#1832)
Closes #1831.
2023-01-12 16:17:27 -05:00
Charlie Marsh
590bec57f4 Fix typo in relative-imports-order option name 2023-01-12 15:57:58 -05:00
Charlie Marsh
3110d342c7 Implement isort's reverse_relative setting (#1826)
This PR implements `reverse-relative`, from isort, but renames it to
`relative-imports-order` with the respected value `closest-to-furthest`
and `furthest-to-closest`, and the latter being the default.

Closes #1813.
2023-01-12 15:48:40 -05:00
nefrob
39aae28eb4 📝 Update readme example for adding isort required imports (#1824)
Fixes use of  isort name to the ruff name.
2023-01-12 13:18:06 -05:00
Charlie Marsh
dcccfe2591 Avoid parsing pyproject.toml files when settings are fixed (#1827)
Apart from being wasteful, this can also cause problems (see the linked
issue).

Resolves #1812.
2023-01-12 13:15:44 -05:00
Martin Fischer
38f5e8f423 Decouple linter module from cache module 2023-01-12 13:09:59 -05:00
Martin Fischer
74f14182ea Decouple resolver module from cli::Overrides 2023-01-12 13:09:59 -05:00
Charlie Marsh
bbc1e7804e Don't trigger SIM401 for complex default values (#1825)
Resolves #1809.
2023-01-12 12:51:23 -05:00
messense
c6320b29e4 Implement autofix for flake8-quotes (#1810)
Resolves #1789
2023-01-12 12:42:28 -05:00
Maksudul Haque
1a90408e8c [flake8-bandit] Add Rule for S701 (jinja2 autoescape false) (#1815)
ref: https://github.com/charliermarsh/ruff/issues/1646

Co-authored-by: Charlie Marsh <charlie.r.marsh@gmail.com>
2023-01-12 11:59:20 -05:00
Jeroen Van Goey
07134c50c8 Add usage of ruff in pandas to README (#1811)
pandas now uses ruff for linting, see
https://github.com/pandas-dev/pandas/pull/50160
2023-01-12 10:55:21 -05:00
Charlie Marsh
b36d4a15b0 Modify visibility and shuffle around some modules (#1807) 2023-01-11 23:57:05 -05:00
202 changed files with 2536 additions and 1291 deletions

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.219
rev: v0.0.220
hooks:
- id: ruff

8
Cargo.lock generated
View File

@@ -735,7 +735,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8-to-ruff"
version = "0.0.219-dev.0"
version = "0.0.220-dev.0"
dependencies = [
"anyhow",
"clap 4.0.32",
@@ -1874,7 +1874,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.219"
version = "0.0.220"
dependencies = [
"annotate-snippets 0.9.1",
"anyhow",
@@ -1942,7 +1942,7 @@ dependencies = [
[[package]]
name = "ruff_dev"
version = "0.0.219"
version = "0.0.220"
dependencies = [
"anyhow",
"clap 4.0.32",
@@ -1962,7 +1962,7 @@ dependencies = [
[[package]]
name = "ruff_macros"
version = "0.0.219"
version = "0.0.220"
dependencies = [
"once_cell",
"proc-macro2",

View File

@@ -6,7 +6,7 @@ members = [
[package]
name = "ruff"
version = "0.0.219"
version = "0.0.220"
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
edition = "2021"
rust-version = "1.65.0"
@@ -52,7 +52,7 @@ path-absolutize = { version = "3.0.14", features = ["once_cell_cache", "use_unix
quick-junit = { version = "0.3.2" }
regex = { version = "1.6.0" }
ropey = { version = "1.5.0", features = ["cr_lines", "simd"], default-features = false }
ruff_macros = { version = "0.0.219", path = "ruff_macros" }
ruff_macros = { version = "0.0.220", path = "ruff_macros" }
rustc-hash = { version = "1.1.0" }
rustpython-ast = { features = ["unparse"], git = "https://github.com/RustPython/RustPython.git", rev = "d532160333ffeb6dbeca2c2728c2391cd1e53b7f" }
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "d532160333ffeb6dbeca2c2728c2391cd1e53b7f" }

View File

@@ -46,7 +46,9 @@ imports, and more.
Ruff is extremely actively developed and used in major open-source projects like:
- [pandas](https://github.com/pandas-dev/pandas)
- [FastAPI](https://github.com/tiangolo/fastapi)
- [Apache Airflow](https://github.com/apache/airflow)
- [Bokeh](https://github.com/bokeh/bokeh)
- [Zulip](https://github.com/zulip/zulip)
- [Pydantic](https://github.com/pydantic/pydantic)
@@ -180,7 +182,7 @@ Ruff also works with [pre-commit](https://pre-commit.com):
```yaml
- repo: https://github.com/charliermarsh/ruff-pre-commit
# Ruff version.
rev: 'v0.0.219'
rev: 'v0.0.220'
hooks:
- id: ruff
# Respect `exclude` and `extend-exclude` settings.
@@ -782,6 +784,7 @@ For more, see [flake8-bandit](https://pypi.org/project/flake8-bandit/4.1.1/) on
| S506 | UnsafeYAMLLoad | Probable use of unsafe `yaml.load`. Allows instantiation of arbitrary objects. Consider `yaml.safe_load`. | |
| S508 | SnmpInsecureVersion | The use of SNMPv1 and SNMPv2 is insecure. Use SNMPv3 if able. | |
| S509 | SnmpWeakCryptography | You should not use SNMPv3 without encryption. `noAuthNoPriv` & `authNoPriv` is insecure. | |
| S701 | Jinja2AutoescapeFalse | By default, jinja2 sets `autoescape` to `False`. Consider using `autoescape=True` or the `select_autoescape` function to mitigate XSS vulnerabilities. | |
### flake8-blind-except (BLE)
@@ -950,10 +953,10 @@ For more, see [flake8-quotes](https://pypi.org/project/flake8-quotes/3.3.1/) on
| Code | Name | Message | Fix |
| ---- | ---- | ------- | --- |
| Q000 | BadQuotesInlineString | Single quotes found but double quotes preferred | |
| Q001 | BadQuotesMultilineString | Single quote multiline found but double quotes preferred | |
| Q002 | BadQuotesDocstring | Single quote docstring found but double quotes preferred | |
| Q003 | AvoidQuoteEscape | Change outer quotes to avoid escaping inner quotes | |
| Q000 | BadQuotesInlineString | Single quotes found but double quotes preferred | 🛠 |
| Q001 | BadQuotesMultilineString | Single quote multiline found but double quotes preferred | 🛠 |
| Q002 | BadQuotesDocstring | Single quote docstring found but double quotes preferred | 🛠 |
| Q003 | AvoidQuoteEscape | Change outer quotes to avoid escaping inner quotes | 🛠 |
### flake8-return (RET)
@@ -3020,6 +3023,30 @@ order-by-type = true
---
#### [`relative-imports-order`](#relative-imports-order)
Whether to place "closer" imports (fewer `.` characters, most local)
before "further" imports (more `.` characters, least local), or vice
versa.
The default ("furthest-to-closest") is equivalent to isort's
`reverse-relative` default (`reverse-relative = false`); setting
this to "closest-to-furthest" is equivalent to isort's `reverse-relative
= true`.
**Default value**: `furthest-to-closest`
**Type**: `RelatveImportsOrder`
**Example usage**:
```toml
[tool.ruff.isort]
relative-imports-order = "closest-to-furthest"
```
---
#### [`required-imports`](#required-imports)
Add the specified import line to all files.
@@ -3032,7 +3059,7 @@ Add the specified import line to all files.
```toml
[tool.ruff.isort]
add-import = ["from __future__ import annotations"]
required-imports = ["from __future__ import annotations"]
```
---

View File

@@ -771,7 +771,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8_to_ruff"
version = "0.0.219"
version = "0.0.220"
dependencies = [
"anyhow",
"clap",
@@ -1975,7 +1975,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.219"
version = "0.0.220"
dependencies = [
"anyhow",
"bincode",

View File

@@ -1,6 +1,6 @@
[package]
name = "flake8-to-ruff"
version = "0.0.219-dev.0"
version = "0.0.220-dev.0"
edition = "2021"
[lib]

View File

@@ -4,7 +4,7 @@ build-backend = "maturin"
[project]
name = "ruff"
version = "0.0.219"
version = "0.0.220"
description = "An extremely fast Python linter, written in Rust."
authors = [
{ name = "Charlie Marsh", email = "charlie.r.marsh@gmail.com" },

View File

@@ -0,0 +1,29 @@
import jinja2
from jinja2 import Environment, select_autoescape
templateLoader = jinja2.FileSystemLoader( searchpath="/" )
something = ''
Environment(loader=templateLoader, load=templateLoader, autoescape=True)
templateEnv = jinja2.Environment(autoescape=True,
loader=templateLoader )
Environment(loader=templateLoader, load=templateLoader, autoescape=something) # S701
templateEnv = jinja2.Environment(autoescape=False, loader=templateLoader ) # S701
Environment(loader=templateLoader,
load=templateLoader,
autoescape=False) # S701
Environment(loader=templateLoader, # S701
load=templateLoader)
Environment(loader=templateLoader, autoescape=select_autoescape())
Environment(loader=templateLoader,
autoescape=select_autoescape(['html', 'htm', 'xml']))
Environment(loader=templateLoader,
autoescape=jinja2.select_autoescape(['html', 'htm', 'xml']))
def fake_func():
return 'foobar'
Environment(loader=templateLoader, autoescape=fake_func()) # S701

View File

@@ -1,4 +1,5 @@
this_should_raise_Q003 = 'This is a \'string\''
this_should_raise_Q003 = 'This is \\ a \\\'string\''
this_is_fine = '"This" is a \'string\''
this_is_fine = "This is a 'string'"
this_is_fine = "\"This\" is a 'string'"

View File

@@ -1,5 +1,6 @@
def f():
for x in iterable: # SIM110
# SIM110
for x in iterable:
if check(x):
return True
return False
@@ -20,14 +21,16 @@ def f():
def f():
for x in iterable: # SIM111
# SIM111
for x in iterable:
if check(x):
return False
return True
def f():
for x in iterable: # SIM111
# SIM111
for x in iterable:
if not x.is_empty():
return False
return True
@@ -45,3 +48,70 @@ def f():
if check(x):
return "foo"
return "bar"
def f():
# SIM110
for x in iterable:
if check(x):
return True
else:
return False
def f():
# SIM111
for x in iterable:
if check(x):
return False
else:
return True
def f():
# SIM110
for x in iterable:
if check(x):
return True
else:
return False
return True
def f():
# SIM111
for x in iterable:
if check(x):
return False
else:
return True
return False
def f():
for x in iterable:
if check(x):
return True
elif x.is_empty():
return True
return False
def f():
for x in iterable:
if check(x):
return True
else:
return True
return False
def f():
for x in iterable:
if check(x):
return True
elif x.is_empty():
return True
else:
return True
return False

View File

@@ -1,10 +1,18 @@
def f():
for x in iterable: # SIM110
# SIM110
for x in iterable:
if check(x):
return True
return False
def f():
for x in iterable:
if check(x):
return True
return True
def f():
for el in [1, 2, 3]:
if is_true(el):
@@ -13,21 +21,97 @@ def f():
def f():
for x in iterable: # SIM111
# SIM111
for x in iterable:
if check(x):
return False
return True
def f():
for x in iterable: # SIM 111
# SIM111
for x in iterable:
if not x.is_empty():
return False
return True
def f():
for x in iterable:
if check(x):
return False
return False
def f():
for x in iterable:
if check(x):
return "foo"
return "bar"
def f():
# SIM110
for x in iterable:
if check(x):
return True
else:
return False
def f():
# SIM111
for x in iterable:
if check(x):
return False
else:
return True
def f():
# SIM110
for x in iterable:
if check(x):
return True
else:
return False
return True
def f():
# SIM111
for x in iterable:
if check(x):
return False
else:
return True
return False
def f():
for x in iterable:
if check(x):
return True
elif x.is_empty():
return True
return False
def f():
for x in iterable:
if check(x):
return True
else:
return True
return False
def f():
for x in iterable:
if check(x):
return True
elif x.is_empty():
return True
else:
return True
return False

View File

@@ -79,3 +79,9 @@ if key in a_dict:
else:
var2 = value2
var = "default"
# OK (complex default value)
if key in a_dict:
var = a_dict[key]
else:
var = foo()

View File

@@ -0,0 +1,3 @@
from ... import a
from .. import b
from . import c

View File

@@ -17,7 +17,7 @@ resources/test/project/examples/docs/docs/file.py:8:5: F841 Local variable `x` i
resources/test/project/project/file.py:1:8: F401 `os` imported but unused
resources/test/project/project/import_file.py:1:1: I001 Import block is un-sorted or un-formatted
Found 7 error(s).
6 potentially fixable with the --fix option.
7 potentially fixable with the --fix option.
```
Running from the project directory itself should exhibit the same behavior:
@@ -32,7 +32,7 @@ examples/docs/docs/file.py:8:5: F841 Local variable `x` is assigned to but never
project/file.py:1:8: F401 `os` imported but unused
project/import_file.py:1:1: I001 Import block is un-sorted or un-formatted
Found 7 error(s).
6 potentially fixable with the --fix option.
7 potentially fixable with the --fix option.
```
Running from the sub-package directory should exhibit the same behavior, but omit the top-level
@@ -43,7 +43,7 @@ files:
docs/file.py:1:1: I001 Import block is un-sorted or un-formatted
docs/file.py:8:5: F841 Local variable `x` is assigned to but never used
Found 2 error(s).
1 potentially fixable with the --fix option.
2 potentially fixable with the --fix option.
```
`--config` should force Ruff to use the specified `pyproject.toml` for all files, and resolve
@@ -74,7 +74,7 @@ docs/docs/file.py:1:1: I001 Import block is un-sorted or un-formatted
docs/docs/file.py:8:5: F841 Local variable `x` is assigned to but never used
excluded/script.py:5:5: F841 Local variable `x` is assigned to but never used
Found 4 error(s).
1 potentially fixable with the --fix option.
4 potentially fixable with the --fix option.
```
Passing an excluded directory directly should report errors in the contained files:

View File

@@ -820,6 +820,17 @@
"null"
]
},
"relative-imports-order": {
"description": "Whether to place \"closer\" imports (fewer `.` characters, most local) before \"further\" imports (more `.` characters, least local), or vice versa.\n\nThe default (\"furthest-to-closest\") is equivalent to isort's `reverse-relative` default (`reverse-relative = false`); setting this to \"closest-to-furthest\" is equivalent to isort's `reverse-relative = true`.",
"anyOf": [
{
"$ref": "#/definitions/RelatveImportsOrder"
},
{
"type": "null"
}
]
},
"required-imports": {
"description": "Add the specified import line to all files.",
"type": [
@@ -1007,6 +1018,24 @@
}
]
},
"RelatveImportsOrder": {
"oneOf": [
{
"description": "Place \"closer\" imports (fewer `.` characters, most local) before \"further\" imports (more `.` characters, least local).",
"type": "string",
"enum": [
"closest-to-furthest"
]
},
{
"description": "Place \"further\" imports (more `.` characters, least local) imports before \"closer\" imports (fewer `.` characters, most local).",
"type": "string",
"enum": [
"furthest-to-closest"
]
}
]
},
"RuleCodePrefix": {
"type": "string",
"enum": [
@@ -1515,6 +1544,9 @@
"S506",
"S508",
"S509",
"S7",
"S70",
"S701",
"SIM",
"SIM1",
"SIM10",

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_dev"
version = "0.0.219"
version = "0.0.220"
edition = "2021"
[lib]

View File

@@ -5,9 +5,7 @@ use std::path::PathBuf;
use anyhow::Result;
use clap::Args;
use ruff::source_code_generator::SourceCodeGenerator;
use ruff::source_code_locator::SourceCodeLocator;
use ruff::source_code_style::SourceCodeStyleDetector;
use ruff::source_code::{Generator, Locator, Stylist};
use rustpython_parser::parser;
#[derive(Args)]
@@ -20,9 +18,9 @@ pub struct Cli {
pub fn main(cli: &Cli) -> Result<()> {
let contents = fs::read_to_string(&cli.file)?;
let python_ast = parser::parse_program(&contents, &cli.file.to_string_lossy())?;
let locator = SourceCodeLocator::new(&contents);
let stylist = SourceCodeStyleDetector::from_contents(&contents, &locator);
let mut generator: SourceCodeGenerator = (&stylist).into();
let locator = Locator::new(&contents);
let stylist = Stylist::from_contents(&contents, &locator);
let mut generator: Generator = (&stylist).into();
generator.unparse_suite(&python_ast);
println!("{}", generator.generate());
Ok(())

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_macros"
version = "0.0.219"
version = "0.0.220"
edition = "2021"
[lib]

View File

@@ -34,7 +34,7 @@ def main(*, plugin: str, url: str) -> None:
with open(os.path.join(ROOT_DIR, f"src/{dir_name(plugin)}/rules.rs"), "w+") as fp:
fp.write("use crate::checkers::ast::Checker;\n")
with open(os.path.join(ROOT_DIR, f"src/{dir_name(plugin)}/mod.rs"), "w+") as fp:
fp.write("pub mod rules;\n")
fp.write("pub(crate) mod rules;\n")
fp.write("\n")
fp.write(
"""#[cfg(test)]

View File

@@ -12,9 +12,7 @@ use rustpython_parser::lexer::Tok;
use rustpython_parser::token::StringKind;
use crate::ast::types::{Binding, BindingKind, Range};
use crate::source_code_generator::SourceCodeGenerator;
use crate::source_code_style::SourceCodeStyleDetector;
use crate::SourceCodeLocator;
use crate::source_code::{Generator, Locator, Stylist};
/// Create an `Expr` with default location from an `ExprKind`.
pub fn create_expr(node: ExprKind) -> Expr {
@@ -27,15 +25,15 @@ pub fn create_stmt(node: StmtKind) -> Stmt {
}
/// Generate source code from an `Expr`.
pub fn unparse_expr(expr: &Expr, stylist: &SourceCodeStyleDetector) -> String {
let mut generator: SourceCodeGenerator = stylist.into();
pub fn unparse_expr(expr: &Expr, stylist: &Stylist) -> String {
let mut generator: Generator = stylist.into();
generator.unparse_expr(expr, 0);
generator.generate()
}
/// Generate source code from an `Stmt`.
pub fn unparse_stmt(stmt: &Stmt, stylist: &SourceCodeStyleDetector) -> String {
let mut generator: SourceCodeGenerator = stylist.into();
pub fn unparse_stmt(stmt: &Stmt, stylist: &Stylist) -> String {
let mut generator: Generator = stylist.into();
generator.unparse_stmt(stmt);
generator.generate()
}
@@ -431,7 +429,7 @@ pub fn collect_arg_names<'a>(arguments: &'a Arguments) -> FxHashSet<&'a str> {
}
/// Returns `true` if a statement or expression includes at least one comment.
pub fn has_comments<T>(located: &Located<T>, locator: &SourceCodeLocator) -> bool {
pub fn has_comments<T>(located: &Located<T>, locator: &Locator) -> bool {
lexer::make_tokenizer(&locator.slice_source_code_range(&Range::from_located(located)))
.flatten()
.any(|(_, tok, _)| matches!(tok, Tok::Comment(..)))
@@ -483,14 +481,14 @@ pub fn to_absolute(relative: Location, base: Location) -> Location {
}
/// Return `true` if a `Stmt` has leading content.
pub fn match_leading_content(stmt: &Stmt, locator: &SourceCodeLocator) -> bool {
pub fn match_leading_content(stmt: &Stmt, locator: &Locator) -> bool {
let range = Range::new(Location::new(stmt.location.row(), 0), stmt.location);
let prefix = locator.slice_source_code_range(&range);
prefix.chars().any(|char| !char.is_whitespace())
}
/// Return `true` if a `Stmt` has trailing content.
pub fn match_trailing_content(stmt: &Stmt, locator: &SourceCodeLocator) -> bool {
pub fn match_trailing_content(stmt: &Stmt, locator: &Locator) -> bool {
let range = Range::new(
stmt.end_location.unwrap(),
Location::new(stmt.end_location.unwrap().row() + 1, 0),
@@ -508,7 +506,7 @@ pub fn match_trailing_content(stmt: &Stmt, locator: &SourceCodeLocator) -> bool
}
/// Return the number of trailing empty lines following a statement.
pub fn count_trailing_lines(stmt: &Stmt, locator: &SourceCodeLocator) -> usize {
pub fn count_trailing_lines(stmt: &Stmt, locator: &Locator) -> usize {
let suffix =
locator.slice_source_code_at(&Location::new(stmt.end_location.unwrap().row() + 1, 0));
suffix
@@ -520,7 +518,7 @@ pub fn count_trailing_lines(stmt: &Stmt, locator: &SourceCodeLocator) -> usize {
/// Return the appropriate visual `Range` for any message that spans a `Stmt`.
/// Specifically, this method returns the range of a function or class name,
/// rather than that of the entire function or class body.
pub fn identifier_range(stmt: &Stmt, locator: &SourceCodeLocator) -> Range {
pub fn identifier_range(stmt: &Stmt, locator: &Locator) -> Range {
if matches!(
stmt.node,
StmtKind::ClassDef { .. }
@@ -539,7 +537,7 @@ pub fn identifier_range(stmt: &Stmt, locator: &SourceCodeLocator) -> Range {
}
/// Like `identifier_range`, but accepts a `Binding`.
pub fn binding_range(binding: &Binding, locator: &SourceCodeLocator) -> Range {
pub fn binding_range(binding: &Binding, locator: &Locator) -> Range {
if matches!(
binding.kind,
BindingKind::ClassDefinition | BindingKind::FunctionDefinition
@@ -555,7 +553,7 @@ pub fn binding_range(binding: &Binding, locator: &SourceCodeLocator) -> Range {
}
// Return the ranges of `Name` tokens within a specified node.
pub fn find_names<T>(located: &Located<T>, locator: &SourceCodeLocator) -> Vec<Range> {
pub fn find_names<T>(located: &Located<T>, locator: &Locator) -> Vec<Range> {
let contents = locator.slice_source_code_range(&Range::from_located(located));
lexer::make_tokenizer_located(&contents, located.location)
.flatten()
@@ -568,10 +566,7 @@ pub fn find_names<T>(located: &Located<T>, locator: &SourceCodeLocator) -> Vec<R
}
/// Return the `Range` of `name` in `Excepthandler`.
pub fn excepthandler_name_range(
handler: &Excepthandler,
locator: &SourceCodeLocator,
) -> Option<Range> {
pub fn excepthandler_name_range(handler: &Excepthandler, locator: &Locator) -> Option<Range> {
let ExcepthandlerKind::ExceptHandler {
name, type_, body, ..
} = &handler.node;
@@ -594,7 +589,7 @@ pub fn excepthandler_name_range(
}
/// Return the `Range` of `except` in `Excepthandler`.
pub fn except_range(handler: &Excepthandler, locator: &SourceCodeLocator) -> Range {
pub fn except_range(handler: &Excepthandler, locator: &Locator) -> Range {
let ExcepthandlerKind::ExceptHandler { body, type_, .. } = &handler.node;
let end = if let Some(type_) = type_ {
type_.location
@@ -619,7 +614,7 @@ pub fn except_range(handler: &Excepthandler, locator: &SourceCodeLocator) -> Ran
}
/// Find f-strings that don't contain any formatted values in a `JoinedStr`.
pub fn find_useless_f_strings(expr: &Expr, locator: &SourceCodeLocator) -> Vec<(Range, Range)> {
pub fn find_useless_f_strings(expr: &Expr, locator: &Locator) -> Vec<(Range, Range)> {
let contents = locator.slice_source_code_range(&Range::from_located(expr));
lexer::make_tokenizer_located(&contents, expr.location)
.flatten()
@@ -656,7 +651,7 @@ pub fn find_useless_f_strings(expr: &Expr, locator: &SourceCodeLocator) -> Vec<(
}
/// Return the `Range` of `else` in `For`, `AsyncFor`, and `While` statements.
pub fn else_range(stmt: &Stmt, locator: &SourceCodeLocator) -> Option<Range> {
pub fn else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
match &stmt.node {
StmtKind::For { body, orelse, .. }
| StmtKind::AsyncFor { body, orelse, .. }
@@ -690,7 +685,7 @@ pub fn else_range(stmt: &Stmt, locator: &SourceCodeLocator) -> Option<Range> {
/// Return `true` if a `Stmt` appears to be part of a multi-statement line, with
/// other statements preceding it.
pub fn preceded_by_continuation(stmt: &Stmt, locator: &SourceCodeLocator) -> bool {
pub fn preceded_by_continuation(stmt: &Stmt, locator: &Locator) -> bool {
// Does the previous line end in a continuation? This will have a specific
// false-positive, which is that if the previous line ends in a comment, it
// will be treated as a continuation. So we should only use this information to
@@ -711,13 +706,13 @@ pub fn preceded_by_continuation(stmt: &Stmt, locator: &SourceCodeLocator) -> boo
/// Return `true` if a `Stmt` appears to be part of a multi-statement line, with
/// other statements preceding it.
pub fn preceded_by_multi_statement_line(stmt: &Stmt, locator: &SourceCodeLocator) -> bool {
pub fn preceded_by_multi_statement_line(stmt: &Stmt, locator: &Locator) -> bool {
match_leading_content(stmt, locator) || preceded_by_continuation(stmt, locator)
}
/// Return `true` if a `Stmt` appears to be part of a multi-statement line, with
/// other statements following it.
pub fn followed_by_multi_statement_line(stmt: &Stmt, locator: &SourceCodeLocator) -> bool {
pub fn followed_by_multi_statement_line(stmt: &Stmt, locator: &Locator) -> bool {
match_trailing_content(stmt, locator)
}
@@ -799,7 +794,7 @@ mod tests {
else_range, identifier_range, match_module_member, match_trailing_content,
};
use crate::ast::types::Range;
use crate::source_code_locator::SourceCodeLocator;
use crate::source_code::Locator;
#[test]
fn builtin() -> Result<()> {
@@ -949,25 +944,25 @@ mod tests {
let contents = "x = 1";
let program = parser::parse_program(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = SourceCodeLocator::new(contents);
let locator = Locator::new(contents);
assert!(!match_trailing_content(stmt, &locator));
let contents = "x = 1; y = 2";
let program = parser::parse_program(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = SourceCodeLocator::new(contents);
let locator = Locator::new(contents);
assert!(match_trailing_content(stmt, &locator));
let contents = "x = 1 ";
let program = parser::parse_program(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = SourceCodeLocator::new(contents);
let locator = Locator::new(contents);
assert!(!match_trailing_content(stmt, &locator));
let contents = "x = 1 # Comment";
let program = parser::parse_program(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = SourceCodeLocator::new(contents);
let locator = Locator::new(contents);
assert!(!match_trailing_content(stmt, &locator));
let contents = r#"
@@ -977,7 +972,7 @@ y = 2
.trim();
let program = parser::parse_program(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = SourceCodeLocator::new(contents);
let locator = Locator::new(contents);
assert!(!match_trailing_content(stmt, &locator));
Ok(())
@@ -988,7 +983,7 @@ y = 2
let contents = "def f(): pass".trim();
let program = parser::parse_program(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = SourceCodeLocator::new(contents);
let locator = Locator::new(contents);
assert_eq!(
identifier_range(stmt, &locator),
Range::new(Location::new(1, 4), Location::new(1, 5),)
@@ -1002,7 +997,7 @@ def \
.trim();
let program = parser::parse_program(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = SourceCodeLocator::new(contents);
let locator = Locator::new(contents);
assert_eq!(
identifier_range(stmt, &locator),
Range::new(Location::new(2, 2), Location::new(2, 3),)
@@ -1011,7 +1006,7 @@ def \
let contents = "class Class(): pass".trim();
let program = parser::parse_program(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = SourceCodeLocator::new(contents);
let locator = Locator::new(contents);
assert_eq!(
identifier_range(stmt, &locator),
Range::new(Location::new(1, 6), Location::new(1, 11),)
@@ -1020,7 +1015,7 @@ def \
let contents = "class Class: pass".trim();
let program = parser::parse_program(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = SourceCodeLocator::new(contents);
let locator = Locator::new(contents);
assert_eq!(
identifier_range(stmt, &locator),
Range::new(Location::new(1, 6), Location::new(1, 11),)
@@ -1034,7 +1029,7 @@ class Class():
.trim();
let program = parser::parse_program(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = SourceCodeLocator::new(contents);
let locator = Locator::new(contents);
assert_eq!(
identifier_range(stmt, &locator),
Range::new(Location::new(2, 6), Location::new(2, 11),)
@@ -1043,7 +1038,7 @@ class Class():
let contents = r#"x = y + 1"#.trim();
let program = parser::parse_program(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = SourceCodeLocator::new(contents);
let locator = Locator::new(contents);
assert_eq!(
identifier_range(stmt, &locator),
Range::new(Location::new(1, 0), Location::new(1, 9),)
@@ -1063,7 +1058,7 @@ else:
.trim();
let program = parser::parse_program(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = SourceCodeLocator::new(contents);
let locator = Locator::new(contents);
let range = else_range(stmt, &locator).unwrap();
assert_eq!(range.location.row(), 3);
assert_eq!(range.location.column(), 0);

View File

@@ -74,7 +74,6 @@ pub enum ScopeKind<'a> {
Function(FunctionDef<'a>),
Generator,
Module,
Arg,
Lambda(Lambda<'a>),
}

View File

@@ -1,225 +1 @@
use std::borrow::Cow;
use std::collections::BTreeSet;
use itertools::Itertools;
use ropey::RopeBuilder;
use rustpython_parser::ast::Location;
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::registry::Diagnostic;
use crate::source_code_locator::SourceCodeLocator;
#[derive(Debug, Copy, Clone, Hash)]
pub enum Mode {
Generate,
Apply,
Diff,
None,
}
impl From<bool> for Mode {
fn from(value: bool) -> Self {
if value {
Mode::Apply
} else {
Mode::None
}
}
}
/// Auto-fix errors in a file, and write the fixed source code to disk.
pub fn fix_file<'a>(
diagnostics: &'a [Diagnostic],
locator: &'a SourceCodeLocator<'a>,
) -> Option<(Cow<'a, str>, usize)> {
if diagnostics.iter().all(|check| check.fix.is_none()) {
return None;
}
Some(apply_fixes(
diagnostics.iter().filter_map(|check| check.fix.as_ref()),
locator,
))
}
/// Apply a series of fixes.
fn apply_fixes<'a>(
fixes: impl Iterator<Item = &'a Fix>,
locator: &'a SourceCodeLocator<'a>,
) -> (Cow<'a, str>, usize) {
let mut output = RopeBuilder::new();
let mut last_pos: Location = Location::new(1, 0);
let mut applied: BTreeSet<&Fix> = BTreeSet::default();
let mut num_fixed: usize = 0;
for fix in fixes.sorted_by_key(|fix| fix.location) {
// If we already applied an identical fix as part of another correction, skip
// any re-application.
if applied.contains(&fix) {
num_fixed += 1;
continue;
}
// Best-effort approach: if this fix overlaps with a fix we've already applied,
// skip it.
if last_pos > fix.location {
continue;
}
// Add all contents from `last_pos` to `fix.location`.
let slice = locator.slice_source_code_range(&Range::new(last_pos, fix.location));
output.append(&slice);
// Add the patch itself.
output.append(&fix.content);
// Track that the fix was applied.
last_pos = fix.end_location;
applied.insert(fix);
num_fixed += 1;
}
// Add the remaining content.
let slice = locator.slice_source_code_at(&last_pos);
output.append(&slice);
(Cow::from(output.finish()), num_fixed)
}
#[cfg(test)]
mod tests {
use rustpython_parser::ast::Location;
use crate::autofix::fixer::apply_fixes;
use crate::autofix::Fix;
use crate::SourceCodeLocator;
#[test]
fn empty_file() {
let fixes = vec![];
let locator = SourceCodeLocator::new(r#""#);
let (contents, fixed) = apply_fixes(fixes.iter(), &locator);
assert_eq!(contents, "");
assert_eq!(fixed, 0);
}
#[test]
fn apply_single_replacement() {
let fixes = vec![Fix {
content: "Bar".to_string(),
location: Location::new(1, 8),
end_location: Location::new(1, 14),
}];
let locator = SourceCodeLocator::new(
r#"
class A(object):
...
"#
.trim(),
);
let (contents, fixed) = apply_fixes(fixes.iter(), &locator);
assert_eq!(
contents,
r#"
class A(Bar):
...
"#
.trim(),
);
assert_eq!(fixed, 1);
}
#[test]
fn apply_single_removal() {
let fixes = vec![Fix {
content: String::new(),
location: Location::new(1, 7),
end_location: Location::new(1, 15),
}];
let locator = SourceCodeLocator::new(
r#"
class A(object):
...
"#
.trim(),
);
let (contents, fixed) = apply_fixes(fixes.iter(), &locator);
assert_eq!(
contents,
r#"
class A:
...
"#
.trim()
);
assert_eq!(fixed, 1);
}
#[test]
fn apply_double_removal() {
let fixes = vec![
Fix {
content: String::new(),
location: Location::new(1, 7),
end_location: Location::new(1, 16),
},
Fix {
content: String::new(),
location: Location::new(1, 16),
end_location: Location::new(1, 23),
},
];
let locator = SourceCodeLocator::new(
r#"
class A(object, object):
...
"#
.trim(),
);
let (contents, fixed) = apply_fixes(fixes.iter(), &locator);
assert_eq!(
contents,
r#"
class A:
...
"#
.trim()
);
assert_eq!(fixed, 2);
}
#[test]
fn ignore_overlapping_fixes() {
let fixes = vec![
Fix {
content: String::new(),
location: Location::new(1, 7),
end_location: Location::new(1, 15),
},
Fix {
content: "ignored".to_string(),
location: Location::new(1, 9),
end_location: Location::new(1, 11),
},
];
let locator = SourceCodeLocator::new(
r#"
class A(object):
...
"#
.trim(),
);
let (contents, fixed) = apply_fixes(fixes.iter(), &locator);
assert_eq!(
contents,
r#"
class A:
...
"#
.trim(),
);
assert_eq!(fixed, 1);
}
}

View File

@@ -9,10 +9,10 @@ use crate::ast::helpers;
use crate::ast::helpers::to_absolute;
use crate::ast::types::Range;
use crate::ast::whitespace::LinesWithTrailingNewline;
use crate::autofix::Fix;
use crate::cst::helpers::compose_module_path;
use crate::cst::matchers::match_module;
use crate::source_code_locator::SourceCodeLocator;
use crate::fix::Fix;
use crate::source_code::Locator;
/// Determine if a body contains only a single statement, taking into account
/// deleted.
@@ -78,7 +78,7 @@ fn is_lone_child(child: &Stmt, parent: &Stmt, deleted: &[&Stmt]) -> Result<bool>
/// Return the location of a trailing semicolon following a `Stmt`, if it's part
/// of a multi-statement line.
fn trailing_semicolon(stmt: &Stmt, locator: &SourceCodeLocator) -> Option<Location> {
fn trailing_semicolon(stmt: &Stmt, locator: &Locator) -> Option<Location> {
let contents = locator.slice_source_code_at(&stmt.end_location.unwrap());
for (row, line) in LinesWithTrailingNewline::from(&contents).enumerate() {
let trimmed = line.trim();
@@ -100,7 +100,7 @@ fn trailing_semicolon(stmt: &Stmt, locator: &SourceCodeLocator) -> Option<Locati
}
/// Find the next valid break for a `Stmt` after a semicolon.
fn next_stmt_break(semicolon: Location, locator: &SourceCodeLocator) -> Location {
fn next_stmt_break(semicolon: Location, locator: &Locator) -> Location {
let start_location = Location::new(semicolon.row(), semicolon.column() + 1);
let contents = locator.slice_source_code_at(&start_location);
for (row, line) in LinesWithTrailingNewline::from(&contents).enumerate() {
@@ -133,7 +133,7 @@ fn next_stmt_break(semicolon: Location, locator: &SourceCodeLocator) -> Location
}
/// Return `true` if a `Stmt` occurs at the end of a file.
fn is_end_of_file(stmt: &Stmt, locator: &SourceCodeLocator) -> bool {
fn is_end_of_file(stmt: &Stmt, locator: &Locator) -> bool {
let contents = locator.slice_source_code_at(&stmt.end_location.unwrap());
contents.is_empty()
}
@@ -155,7 +155,7 @@ pub fn delete_stmt(
stmt: &Stmt,
parent: Option<&Stmt>,
deleted: &[&Stmt],
locator: &SourceCodeLocator,
locator: &Locator,
) -> Result<Fix> {
if parent
.map(|parent| is_lone_child(stmt, parent, deleted))
@@ -197,7 +197,7 @@ pub fn remove_unused_imports<'a>(
stmt: &Stmt,
parent: Option<&Stmt>,
deleted: &[&Stmt],
locator: &SourceCodeLocator,
locator: &Locator,
) -> Result<Fix> {
let module_text = locator.slice_source_code_range(&Range::from_located(stmt));
let mut tree = match_module(&module_text)?;
@@ -299,20 +299,20 @@ mod tests {
use rustpython_parser::parser;
use crate::autofix::helpers::{next_stmt_break, trailing_semicolon};
use crate::source_code_locator::SourceCodeLocator;
use crate::source_code::Locator;
#[test]
fn find_semicolon() -> Result<()> {
let contents = "x = 1";
let program = parser::parse_program(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = SourceCodeLocator::new(contents);
let locator = Locator::new(contents);
assert_eq!(trailing_semicolon(stmt, &locator), None);
let contents = "x = 1; y = 1";
let program = parser::parse_program(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = SourceCodeLocator::new(contents);
let locator = Locator::new(contents);
assert_eq!(
trailing_semicolon(stmt, &locator),
Some(Location::new(1, 5))
@@ -321,7 +321,7 @@ mod tests {
let contents = "x = 1 ; y = 1";
let program = parser::parse_program(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = SourceCodeLocator::new(contents);
let locator = Locator::new(contents);
assert_eq!(
trailing_semicolon(stmt, &locator),
Some(Location::new(1, 6))
@@ -334,7 +334,7 @@ x = 1 \
.trim();
let program = parser::parse_program(contents, "<filename>")?;
let stmt = program.first().unwrap();
let locator = SourceCodeLocator::new(contents);
let locator = Locator::new(contents);
assert_eq!(
trailing_semicolon(stmt, &locator),
Some(Location::new(2, 2))
@@ -346,14 +346,14 @@ x = 1 \
#[test]
fn find_next_stmt_break() {
let contents = "x = 1; y = 1";
let locator = SourceCodeLocator::new(contents);
let locator = Locator::new(contents);
assert_eq!(
next_stmt_break(Location::new(1, 4), &locator),
Location::new(1, 5)
);
let contents = "x = 1 ; y = 1";
let locator = SourceCodeLocator::new(contents);
let locator = Locator::new(contents);
assert_eq!(
next_stmt_break(Location::new(1, 5), &locator),
Location::new(1, 6)
@@ -364,7 +364,7 @@ x = 1 \
; y = 1
"#
.trim();
let locator = SourceCodeLocator::new(contents);
let locator = Locator::new(contents);
assert_eq!(
next_stmt_break(Location::new(2, 2), &locator),
Location::new(2, 4)

View File

@@ -1,38 +1,210 @@
use std::borrow::Cow;
use std::collections::BTreeSet;
use itertools::Itertools;
use ropey::RopeBuilder;
use rustpython_ast::Location;
use serde::{Deserialize, Serialize};
use crate::ast::types::Range;
use crate::fix::Fix;
use crate::registry::Diagnostic;
use crate::source_code::Locator;
pub mod fixer;
pub mod helpers;
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct Fix {
pub content: String,
pub location: Location,
pub end_location: Location,
/// Auto-fix errors in a file, and write the fixed source code to disk.
pub fn fix_file<'a>(
diagnostics: &'a [Diagnostic],
locator: &'a Locator<'a>,
) -> Option<(Cow<'a, str>, usize)> {
if diagnostics.iter().all(|check| check.fix.is_none()) {
return None;
}
Some(apply_fixes(
diagnostics.iter().filter_map(|check| check.fix.as_ref()),
locator,
))
}
impl Fix {
pub fn deletion(start: Location, end: Location) -> Self {
Self {
/// Apply a series of fixes.
fn apply_fixes<'a>(
fixes: impl Iterator<Item = &'a Fix>,
locator: &'a Locator<'a>,
) -> (Cow<'a, str>, usize) {
let mut output = RopeBuilder::new();
let mut last_pos: Location = Location::new(1, 0);
let mut applied: BTreeSet<&Fix> = BTreeSet::default();
let mut num_fixed: usize = 0;
for fix in fixes.sorted_by_key(|fix| fix.location) {
// If we already applied an identical fix as part of another correction, skip
// any re-application.
if applied.contains(&fix) {
num_fixed += 1;
continue;
}
// Best-effort approach: if this fix overlaps with a fix we've already applied,
// skip it.
if last_pos > fix.location {
continue;
}
// Add all contents from `last_pos` to `fix.location`.
let slice = locator.slice_source_code_range(&Range::new(last_pos, fix.location));
output.append(&slice);
// Add the patch itself.
output.append(&fix.content);
// Track that the fix was applied.
last_pos = fix.end_location;
applied.insert(fix);
num_fixed += 1;
}
// Add the remaining content.
let slice = locator.slice_source_code_at(&last_pos);
output.append(&slice);
(Cow::from(output.finish()), num_fixed)
}
#[cfg(test)]
mod tests {
use rustpython_parser::ast::Location;
use crate::autofix::apply_fixes;
use crate::fix::Fix;
use crate::source_code::Locator;
#[test]
fn empty_file() {
let fixes = vec![];
let locator = Locator::new(r#""#);
let (contents, fixed) = apply_fixes(fixes.iter(), &locator);
assert_eq!(contents, "");
assert_eq!(fixed, 0);
}
#[test]
fn apply_single_replacement() {
let fixes = vec![Fix {
content: "Bar".to_string(),
location: Location::new(1, 8),
end_location: Location::new(1, 14),
}];
let locator = Locator::new(
r#"
class A(object):
...
"#
.trim(),
);
let (contents, fixed) = apply_fixes(fixes.iter(), &locator);
assert_eq!(
contents,
r#"
class A(Bar):
...
"#
.trim(),
);
assert_eq!(fixed, 1);
}
#[test]
fn apply_single_removal() {
let fixes = vec![Fix {
content: String::new(),
location: start,
end_location: end,
}
location: Location::new(1, 7),
end_location: Location::new(1, 15),
}];
let locator = Locator::new(
r#"
class A(object):
...
"#
.trim(),
);
let (contents, fixed) = apply_fixes(fixes.iter(), &locator);
assert_eq!(
contents,
r#"
class A:
...
"#
.trim()
);
assert_eq!(fixed, 1);
}
pub fn replacement(content: String, start: Location, end: Location) -> Self {
Self {
content,
location: start,
end_location: end,
}
#[test]
fn apply_double_removal() {
let fixes = vec![
Fix {
content: String::new(),
location: Location::new(1, 7),
end_location: Location::new(1, 16),
},
Fix {
content: String::new(),
location: Location::new(1, 16),
end_location: Location::new(1, 23),
},
];
let locator = Locator::new(
r#"
class A(object, object):
...
"#
.trim(),
);
let (contents, fixed) = apply_fixes(fixes.iter(), &locator);
assert_eq!(
contents,
r#"
class A:
...
"#
.trim()
);
assert_eq!(fixed, 2);
}
pub fn insertion(content: String, at: Location) -> Self {
Self {
content,
location: at,
end_location: at,
}
#[test]
fn ignore_overlapping_fixes() {
let fixes = vec![
Fix {
content: String::new(),
location: Location::new(1, 7),
end_location: Location::new(1, 15),
},
Fix {
content: "ignored".to_string(),
location: Location::new(1, 9),
end_location: Location::new(1, 11),
},
];
let locator = Locator::new(
r#"
class A(object):
...
"#
.trim(),
);
let (contents, fixed) = apply_fixes(fixes.iter(), &locator);
assert_eq!(
contents,
r#"
class A:
...
"#
.trim(),
);
assert_eq!(fixed, 1);
}
}

View File

@@ -1,3 +1,4 @@
#![cfg_attr(target_family = "wasm", allow(dead_code))]
use std::collections::hash_map::DefaultHasher;
use std::fs;
use std::hash::{Hash, Hasher};
@@ -53,6 +54,7 @@ fn cache_key<P: AsRef<Path>>(path: P, settings: &Settings, autofix: flags::Autof
hasher.finish()
}
#[allow(dead_code)]
/// Initialize the cache at the specified `Path`.
pub fn init(path: &Path) -> Result<()> {
// Create the cache directories.

View File

@@ -33,8 +33,7 @@ use crate::python::typing::SubscriptKind;
use crate::registry::{Diagnostic, RuleCode};
use crate::settings::types::PythonVersion;
use crate::settings::{flags, Settings};
use crate::source_code_locator::SourceCodeLocator;
use crate::source_code_style::SourceCodeStyleDetector;
use crate::source_code::{Locator, Stylist};
use crate::violations::DeferralKeyword;
use crate::visibility::{module_visibility, transition_scope, Modifier, Visibility, VisibleScope};
use crate::{
@@ -59,8 +58,8 @@ pub struct Checker<'a> {
noqa: flags::Noqa,
pub(crate) settings: &'a Settings,
pub(crate) noqa_line_for: &'a IntMap<usize, usize>,
pub(crate) locator: &'a SourceCodeLocator<'a>,
pub(crate) style: &'a SourceCodeStyleDetector<'a>,
pub(crate) locator: &'a Locator<'a>,
pub(crate) style: &'a Stylist<'a>,
// Computed diagnostics.
pub(crate) diagnostics: Vec<Diagnostic>,
// Function and class definition tracking (e.g., for docstring enforcement).
@@ -110,8 +109,8 @@ impl<'a> Checker<'a> {
autofix: flags::Autofix,
noqa: flags::Noqa,
path: &'a Path,
locator: &'a SourceCodeLocator,
style: &'a SourceCodeStyleDetector,
locator: &'a Locator,
style: &'a Stylist,
) -> Checker<'a> {
Checker {
settings,
@@ -1311,8 +1310,15 @@ where
if self.settings.enabled.contains(&RuleCode::PLW0120) {
pylint::rules::useless_else_on_loop(self, stmt, body, orelse);
}
if self.settings.enabled.contains(&RuleCode::SIM118) {
flake8_simplify::rules::key_in_dict_for(self, target, iter);
if matches!(stmt.node, StmtKind::For { .. }) {
if self.settings.enabled.contains(&RuleCode::SIM110)
|| self.settings.enabled.contains(&RuleCode::SIM111)
{
flake8_simplify::rules::convert_for_loop_to_any_all(self, stmt, None);
}
if self.settings.enabled.contains(&RuleCode::SIM118) {
flake8_simplify::rules::key_in_dict_for(self, target, iter);
}
}
}
StmtKind::Try {
@@ -2024,6 +2030,17 @@ where
self.diagnostics.push(diagnostic);
}
}
if self.settings.enabled.contains(&RuleCode::S701) {
if let Some(diagnostic) = flake8_bandit::rules::jinja2_autoescape_false(
func,
args,
keywords,
&self.from_imports,
&self.import_aliases,
) {
self.diagnostics.push(diagnostic);
}
}
if self.settings.enabled.contains(&RuleCode::S106) {
self.diagnostics
.extend(flake8_bandit::rules::hardcoded_password_func_arg(keywords));
@@ -3158,7 +3175,7 @@ where
if matches!(stmt.node, StmtKind::For { .. })
&& matches!(sibling.node, StmtKind::Return { .. })
{
flake8_simplify::rules::convert_loop_to_any_all(self, stmt, sibling);
flake8_simplify::rules::convert_for_loop_to_any_all(self, stmt, Some(sibling));
}
}
}
@@ -3238,16 +3255,6 @@ impl<'a> Checker<'a> {
self.parents.iter().rev().nth(1)
}
/// Return the grandparent `Stmt` of the current `Stmt`, if any.
pub fn current_stmt_grandparent(&self) -> Option<&RefEquality<'a, Stmt>> {
self.parents.iter().rev().nth(2)
}
/// Return the current `Expr`.
pub fn current_expr(&self) -> Option<&RefEquality<'a, Expr>> {
self.exprs.iter().rev().next()
}
/// Return the parent `Expr` of the current `Expr`.
pub fn current_expr_parent(&self) -> Option<&RefEquality<'a, Expr>> {
self.exprs.iter().rev().nth(1)
@@ -4308,8 +4315,8 @@ impl<'a> Checker<'a> {
#[allow(clippy::too_many_arguments)]
pub fn check_ast(
python_ast: &Suite,
locator: &SourceCodeLocator,
stylist: &SourceCodeStyleDetector,
locator: &Locator,
stylist: &Stylist,
noqa_line_for: &IntMap<usize, usize>,
settings: &Settings,
autofix: flags::Autofix,

View File

@@ -10,16 +10,15 @@ use crate::isort;
use crate::isort::track::{Block, ImportTracker};
use crate::registry::{Diagnostic, RuleCode};
use crate::settings::{flags, Settings};
use crate::source_code_locator::SourceCodeLocator;
use crate::source_code_style::SourceCodeStyleDetector;
use crate::source_code::{Locator, Stylist};
#[allow(clippy::too_many_arguments)]
pub fn check_imports(
python_ast: &Suite,
locator: &SourceCodeLocator,
locator: &Locator,
directives: &IsortDirectives,
settings: &Settings,
stylist: &SourceCodeStyleDetector,
stylist: &Stylist,
autofix: flags::Autofix,
path: &Path,
package: Option<&Path>,

View File

@@ -6,7 +6,7 @@ use nohash_hasher::IntMap;
use rustpython_parser::ast::Location;
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::fix::Fix;
use crate::noqa::{is_file_exempt, Directive};
use crate::registry::{Diagnostic, DiagnosticKind, RuleCode, CODE_REDIRECTS};
use crate::settings::{flags, Settings};

View File

@@ -5,12 +5,12 @@ use rustpython_parser::lexer::{LexResult, Tok};
use crate::lex::docstring_detection::StateMachine;
use crate::registry::{Diagnostic, RuleCode};
use crate::ruff::rules::Context;
use crate::settings::flags;
use crate::source_code_locator::SourceCodeLocator;
use crate::{eradicate, flake8_implicit_str_concat, flake8_quotes, pycodestyle, ruff, Settings};
use crate::settings::{flags, Settings};
use crate::source_code::Locator;
use crate::{eradicate, flake8_implicit_str_concat, flake8_quotes, pycodestyle, ruff};
pub fn check_tokens(
locator: &SourceCodeLocator,
locator: &Locator,
tokens: &[LexResult],
settings: &Settings,
autofix: flags::Autofix,
@@ -67,7 +67,8 @@ pub fn check_tokens(
start,
end,
is_docstring,
&settings.flake8_quotes,
settings,
autofix,
) {
if settings.enabled.contains(diagnostic.kind.code()) {
diagnostics.push(diagnostic);

View File

@@ -4,12 +4,13 @@ use clap::{command, Parser};
use regex::Regex;
use rustc_hash::FxHashMap;
use crate::fs;
use crate::logging::LogLevel;
use crate::registry::{RuleCode, RuleCodePrefix};
use crate::resolver::ConfigProcessor;
use crate::settings::types::{
FilePattern, PatternPrefixPair, PerFileIgnore, PythonVersion, SerializationFormat,
};
use crate::{fs, mccabe};
#[derive(Debug, Parser)]
#[command(author, about = "Ruff: An extremely fast Python linter.")]
@@ -344,6 +345,87 @@ pub struct Overrides {
pub update_check: Option<bool>,
}
impl ConfigProcessor for &Overrides {
fn process_config(&self, config: &mut crate::settings::configuration::Configuration) {
if let Some(cache_dir) = &self.cache_dir {
config.cache_dir = Some(cache_dir.clone());
}
if let Some(dummy_variable_rgx) = &self.dummy_variable_rgx {
config.dummy_variable_rgx = Some(dummy_variable_rgx.clone());
}
if let Some(exclude) = &self.exclude {
config.exclude = Some(exclude.clone());
}
if let Some(extend_exclude) = &self.extend_exclude {
config.extend_exclude.extend(extend_exclude.clone());
}
if let Some(fix) = &self.fix {
config.fix = Some(*fix);
}
if let Some(fix_only) = &self.fix_only {
config.fix_only = Some(*fix_only);
}
if let Some(fixable) = &self.fixable {
config.fixable = Some(fixable.clone());
}
if let Some(format) = &self.format {
config.format = Some(*format);
}
if let Some(force_exclude) = &self.force_exclude {
config.force_exclude = Some(*force_exclude);
}
if let Some(ignore) = &self.ignore {
config.ignore = Some(ignore.clone());
}
if let Some(line_length) = &self.line_length {
config.line_length = Some(*line_length);
}
if let Some(max_complexity) = &self.max_complexity {
config.mccabe = Some(mccabe::settings::Options {
max_complexity: Some(*max_complexity),
});
}
if let Some(per_file_ignores) = &self.per_file_ignores {
config.per_file_ignores = Some(collect_per_file_ignores(per_file_ignores.clone()));
}
if let Some(respect_gitignore) = &self.respect_gitignore {
config.respect_gitignore = Some(*respect_gitignore);
}
if let Some(select) = &self.select {
config.select = Some(select.clone());
}
if let Some(show_source) = &self.show_source {
config.show_source = Some(*show_source);
}
if let Some(target_version) = &self.target_version {
config.target_version = Some(*target_version);
}
if let Some(unfixable) = &self.unfixable {
config.unfixable = Some(unfixable.clone());
}
if let Some(update_check) = &self.update_check {
config.update_check = Some(*update_check);
}
// Special-case: `extend_ignore` and `extend_select` are parallel arrays, so
// push an empty array if only one of the two is provided.
match (&self.extend_ignore, &self.extend_select) {
(Some(extend_ignore), Some(extend_select)) => {
config.extend_ignore.push(extend_ignore.clone());
config.extend_select.push(extend_select.clone());
}
(Some(extend_ignore), None) => {
config.extend_ignore.push(extend_ignore.clone());
config.extend_select.push(Vec::new());
}
(None, Some(extend_select)) => {
config.extend_ignore.push(Vec::new());
config.extend_select.push(extend_select.clone());
}
(None, None) => {}
}
}
}
/// Map the CLI settings to a `LogLevel`.
pub fn extract_log_level(cli: &Arguments) -> LogLevel {
if cli.silent {

View File

@@ -15,18 +15,18 @@ use rustpython_ast::Location;
use serde::Serialize;
use walkdir::WalkDir;
use crate::autofix::fixer;
use crate::cache::CACHE_DIR_NAME;
use crate::cli::Overrides;
use crate::diagnostics::{lint_path, lint_stdin, Diagnostics};
use crate::iterators::par_iter;
use crate::linter::{add_noqa_to_path, lint_path, lint_stdin, Diagnostics};
use crate::linter::add_noqa_to_path;
use crate::logging::LogLevel;
use crate::message::Message;
use crate::registry::RuleCode;
use crate::resolver::{FileDiscovery, PyprojectDiscovery};
use crate::settings::flags;
use crate::settings::types::SerializationFormat;
use crate::{cache, fs, packages, resolver, violations, warn_user_once};
use crate::{cache, fix, fs, packaging, resolver, violations, warn_user_once};
/// Run the linter over a collection of files.
pub fn run(
@@ -35,7 +35,7 @@ pub fn run(
file_strategy: &FileDiscovery,
overrides: &Overrides,
cache: flags::Cache,
autofix: fixer::Mode,
autofix: fix::FixMode,
) -> Result<Diagnostics> {
// Collect all the Python files to check.
let start = Instant::now();
@@ -77,7 +77,7 @@ pub fn run(
};
// Discover the package root for each Python file.
let package_roots = packages::detect_package_roots(
let package_roots = packaging::detect_package_roots(
&paths
.iter()
.flatten()
@@ -156,7 +156,7 @@ pub fn run_stdin(
pyproject_strategy: &PyprojectDiscovery,
file_strategy: &FileDiscovery,
overrides: &Overrides,
autofix: fixer::Mode,
autofix: fix::FixMode,
) -> Result<Diagnostics> {
if let Some(filename) = filename {
if !resolver::python_file_at_path(filename, pyproject_strategy, file_strategy, overrides)? {
@@ -169,7 +169,7 @@ pub fn run_stdin(
};
let package_root = filename
.and_then(Path::parent)
.and_then(packages::detect_package_root);
.and_then(packaging::detect_package_root);
let stdin = read_from_stdin()?;
let mut diagnostics = lint_stdin(filename, package_root, &stdin, settings, autofix)?;
diagnostics.messages.sort_unstable();

154
src/diagnostics.rs Normal file
View File

@@ -0,0 +1,154 @@
#![cfg_attr(target_family = "wasm", allow(dead_code))]
use std::fs::write;
use std::io;
use std::io::Write;
use std::ops::AddAssign;
use std::path::Path;
use anyhow::Result;
use log::debug;
use similar::TextDiff;
use crate::linter::{lint_fix, lint_only};
use crate::message::Message;
use crate::settings::{flags, Settings};
use crate::{cache, fix, fs};
#[derive(Debug, Default)]
pub struct Diagnostics {
pub messages: Vec<Message>,
pub fixed: usize,
}
impl Diagnostics {
pub fn new(messages: Vec<Message>) -> Self {
Self { messages, fixed: 0 }
}
}
impl AddAssign for Diagnostics {
fn add_assign(&mut self, other: Self) {
self.messages.extend(other.messages);
self.fixed += other.fixed;
}
}
/// Lint the source code at the given `Path`.
pub fn lint_path(
path: &Path,
package: Option<&Path>,
settings: &Settings,
cache: flags::Cache,
autofix: fix::FixMode,
) -> Result<Diagnostics> {
// Validate the `Settings` and return any errors.
settings.validate()?;
// Check the cache.
// TODO(charlie): `fixer::Mode::Apply` and `fixer::Mode::Diff` both have
// side-effects that aren't captured in the cache. (In practice, it's fine
// to cache `fixer::Mode::Apply`, since a file either has no fixes, or we'll
// write the fixes to disk, thus invalidating the cache. But it's a bit hard
// to reason about. We need to come up with a better solution here.)
let metadata = if matches!(cache, flags::Cache::Enabled)
&& matches!(autofix, fix::FixMode::None | fix::FixMode::Generate)
{
let metadata = path.metadata()?;
if let Some(messages) = cache::get(path, &metadata, settings, autofix.into()) {
debug!("Cache hit for: {}", path.to_string_lossy());
return Ok(Diagnostics::new(messages));
}
Some(metadata)
} else {
None
};
// Read the file from disk.
let contents = fs::read_file(path)?;
// Lint the file.
let (messages, fixed) = if matches!(autofix, fix::FixMode::Apply | fix::FixMode::Diff) {
let (transformed, fixed, messages) = lint_fix(&contents, path, package, settings)?;
if fixed > 0 {
if matches!(autofix, fix::FixMode::Apply) {
write(path, transformed)?;
} else if matches!(autofix, fix::FixMode::Diff) {
let mut stdout = io::stdout().lock();
TextDiff::from_lines(&contents, &transformed)
.unified_diff()
.header(&fs::relativize_path(path), &fs::relativize_path(path))
.to_writer(&mut stdout)?;
stdout.write_all(b"\n")?;
stdout.flush()?;
}
}
(messages, fixed)
} else {
let messages = lint_only(&contents, path, package, settings, autofix.into())?;
let fixed = 0;
(messages, fixed)
};
// Re-populate the cache.
if let Some(metadata) = metadata {
cache::set(path, &metadata, settings, autofix.into(), &messages);
}
Ok(Diagnostics { messages, fixed })
}
/// Generate `Diagnostic`s from source code content derived from
/// stdin.
pub fn lint_stdin(
path: Option<&Path>,
package: Option<&Path>,
contents: &str,
settings: &Settings,
autofix: fix::FixMode,
) -> Result<Diagnostics> {
// Validate the `Settings` and return any errors.
settings.validate()?;
// Lint the inputs.
let (messages, fixed) = if matches!(autofix, fix::FixMode::Apply | fix::FixMode::Diff) {
let (transformed, fixed, messages) = lint_fix(
contents,
path.unwrap_or_else(|| Path::new("-")),
package,
settings,
)?;
if matches!(autofix, fix::FixMode::Apply) {
// Write the contents to stdout, regardless of whether any errors were fixed.
io::stdout().write_all(transformed.as_bytes())?;
} else if matches!(autofix, fix::FixMode::Diff) {
// But only write a diff if it's non-empty.
if fixed > 0 {
let text_diff = TextDiff::from_lines(contents, &transformed);
let mut unified_diff = text_diff.unified_diff();
if let Some(path) = path {
unified_diff.header(&fs::relativize_path(path), &fs::relativize_path(path));
}
let mut stdout = io::stdout().lock();
unified_diff.to_writer(&mut stdout)?;
stdout.write_all(b"\n")?;
stdout.flush()?;
}
}
(messages, fixed)
} else {
let messages = lint_only(
contents,
path.unwrap_or_else(|| Path::new("-")),
package,
settings,
autofix.into(),
)?;
let fixed = 0;
(messages, fixed)
};
Ok(Diagnostics { messages, fixed })
}

View File

@@ -6,7 +6,7 @@ use rustpython_ast::Location;
use rustpython_parser::lexer::{LexResult, Tok};
use crate::registry::LintSource;
use crate::Settings;
use crate::settings::Settings;
bitflags! {
pub struct Flags: u32 {

View File

@@ -1,5 +1,5 @@
pub mod detection;
pub mod rules;
pub(crate) mod detection;
pub(crate) mod rules;
#[cfg(test)]
mod tests {

View File

@@ -1,11 +1,12 @@
use rustpython_ast::Location;
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::eradicate::detection::comment_contains_code;
use crate::registry::RuleCode;
use crate::settings::flags;
use crate::{violations, Diagnostic, Settings, SourceCodeLocator};
use crate::fix::Fix;
use crate::registry::{Diagnostic, RuleCode};
use crate::settings::{flags, Settings};
use crate::source_code::Locator;
use crate::violations;
fn is_standalone_comment(line: &str) -> bool {
for char in line.chars() {
@@ -20,7 +21,7 @@ fn is_standalone_comment(line: &str) -> bool {
/// ERA001
pub fn commented_out_code(
locator: &SourceCodeLocator,
locator: &Locator,
start: Location,
end: Location,
settings: &Settings,

53
src/fix.rs Normal file
View File

@@ -0,0 +1,53 @@
use rustpython_ast::Location;
use serde::{Deserialize, Serialize};
#[derive(Debug, Copy, Clone, Hash)]
pub enum FixMode {
Generate,
Apply,
Diff,
None,
}
impl From<bool> for FixMode {
fn from(value: bool) -> Self {
if value {
FixMode::Apply
} else {
FixMode::None
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct Fix {
pub content: String,
pub location: Location,
pub end_location: Location,
}
impl Fix {
pub fn deletion(start: Location, end: Location) -> Self {
Self {
content: String::new(),
location: start,
end_location: end,
}
}
pub fn replacement(content: String, start: Location, end: Location) -> Self {
Self {
content,
location: start,
end_location: end,
}
}
pub fn insertion(content: String, at: Location) -> Self {
Self {
content,
location: at,
end_location: at,
}
}
}

View File

@@ -1,4 +1,4 @@
pub mod rules;
pub(crate) mod rules;
#[cfg(test)]
mod tests {

View File

@@ -4,11 +4,11 @@ use rustpython_parser::lexer;
use rustpython_parser::lexer::Tok;
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::source_code_locator::SourceCodeLocator;
use crate::fix::Fix;
use crate::source_code::Locator;
/// ANN204
pub fn add_return_none_annotation(locator: &SourceCodeLocator, stmt: &Stmt) -> Result<Fix> {
pub fn add_return_none_annotation(locator: &Locator, stmt: &Stmt) -> Result<Fix> {
let range = Range::from_located(stmt);
let contents = locator.slice_source_code_range(&range);

View File

@@ -1,6 +1,6 @@
mod fixes;
pub mod helpers;
pub mod rules;
pub(crate) mod helpers;
pub(crate) mod rules;
pub mod settings;
#[cfg(test)]
@@ -9,9 +9,10 @@ mod tests {
use anyhow::Result;
use crate::flake8_annotations;
use crate::linter::test_path;
use crate::registry::RuleCode;
use crate::{flake8_annotations, Settings};
use crate::settings::Settings;
#[test]
fn defaults() -> Result<()> {

View File

@@ -8,9 +8,9 @@ use crate::checkers::ast::Checker;
use crate::docstrings::definition::{Definition, DefinitionKind};
use crate::flake8_annotations::fixes;
use crate::flake8_annotations::helpers::match_function_def;
use crate::registry::RuleCode;
use crate::registry::{Diagnostic, RuleCode};
use crate::visibility::Visibility;
use crate::{violations, visibility, Diagnostic};
use crate::{violations, visibility};
#[derive(Default)]
struct ReturnStatementVisitor<'a> {

View File

@@ -1,5 +1,5 @@
mod helpers;
pub mod rules;
pub(crate) mod rules;
pub mod settings;
#[cfg(test)]
@@ -9,9 +9,10 @@ mod tests {
use anyhow::Result;
use test_case::test_case;
use crate::flake8_bandit;
use crate::linter::test_path;
use crate::registry::RuleCode;
use crate::{flake8_bandit, Settings};
use crate::settings::Settings;
#[test_case(RuleCode::S101, Path::new("S101.py"); "S101")]
#[test_case(RuleCode::S102, Path::new("S102.py"); "S102")]
@@ -27,6 +28,7 @@ mod tests {
#[test_case(RuleCode::S506, Path::new("S506.py"); "S506")]
#[test_case(RuleCode::S508, Path::new("S508.py"); "S508")]
#[test_case(RuleCode::S509, Path::new("S509.py"); "S509")]
#[test_case(RuleCode::S701, Path::new("S701.py"); "S701")]
fn rules(rule_code: RuleCode, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(

View File

@@ -0,0 +1,57 @@
use rustc_hash::{FxHashMap, FxHashSet};
use rustpython_ast::{Expr, ExprKind, Keyword};
use rustpython_parser::ast::Constant;
use crate::ast::helpers::{collect_call_paths, dealias_call_path, match_call_path, SimpleCallArgs};
use crate::ast::types::Range;
use crate::registry::Diagnostic;
use crate::violations;
/// S701
pub fn jinja2_autoescape_false(
func: &Expr,
args: &[Expr],
keywords: &[Keyword],
from_imports: &FxHashMap<&str, FxHashSet<&str>>,
import_aliases: &FxHashMap<&str, &str>,
) -> Option<Diagnostic> {
if match_call_path(
&dealias_call_path(collect_call_paths(func), import_aliases),
"jinja2",
"Environment",
from_imports,
) {
let call_args = SimpleCallArgs::new(args, keywords);
if let Some(autoescape_arg) = call_args.get_argument("autoescape", None) {
match &autoescape_arg.node {
ExprKind::Constant {
value: Constant::Bool(true),
..
} => (),
ExprKind::Call { func, .. } => {
if let ExprKind::Name { id, .. } = &func.node {
if id.as_str() != "select_autoescape" {
return Some(Diagnostic::new(
violations::Jinja2AutoescapeFalse(true),
Range::from_located(autoescape_arg),
));
}
}
}
_ => {
return Some(Diagnostic::new(
violations::Jinja2AutoescapeFalse(true),
Range::from_located(autoescape_arg),
))
}
}
} else {
return Some(Diagnostic::new(
violations::Jinja2AutoescapeFalse(false),
Range::from_located(func),
));
}
}
None
}

View File

@@ -9,6 +9,7 @@ pub use hardcoded_password_string::{
};
pub use hardcoded_tmp_directory::hardcoded_tmp_directory;
pub use hashlib_insecure_hash_functions::hashlib_insecure_hash_functions;
pub use jinja2_autoescape_false::jinja2_autoescape_false;
pub use request_with_no_cert_validation::request_with_no_cert_validation;
pub use request_without_timeout::request_without_timeout;
pub use snmp_insecure_version::snmp_insecure_version;
@@ -24,6 +25,7 @@ mod hardcoded_password_func_arg;
mod hardcoded_password_string;
mod hardcoded_tmp_directory;
mod hashlib_insecure_hash_functions;
mod jinja2_autoescape_false;
mod request_with_no_cert_validation;
mod request_without_timeout;
mod snmp_insecure_version;

View File

@@ -0,0 +1,55 @@
---
source: src/flake8_bandit/mod.rs
expression: diagnostics
---
- kind:
Jinja2AutoescapeFalse: true
location:
row: 9
column: 67
end_location:
row: 9
column: 76
fix: ~
parent: ~
- kind:
Jinja2AutoescapeFalse: true
location:
row: 10
column: 44
end_location:
row: 10
column: 49
fix: ~
parent: ~
- kind:
Jinja2AutoescapeFalse: true
location:
row: 13
column: 23
end_location:
row: 13
column: 28
fix: ~
parent: ~
- kind:
Jinja2AutoescapeFalse: false
location:
row: 15
column: 0
end_location:
row: 15
column: 11
fix: ~
parent: ~
- kind:
Jinja2AutoescapeFalse: true
location:
row: 29
column: 46
end_location:
row: 29
column: 57
fix: ~
parent: ~

View File

@@ -1,4 +1,4 @@
pub mod rules;
pub(crate) mod rules;
#[cfg(test)]
mod tests {

View File

@@ -1,4 +1,4 @@
pub mod rules;
pub(crate) mod rules;
#[cfg(test)]
mod tests {

View File

@@ -1,4 +1,4 @@
pub mod rules;
pub(crate) mod rules;
pub mod settings;
#[cfg(test)]
@@ -8,9 +8,10 @@ mod tests {
use anyhow::Result;
use test_case::test_case;
use crate::flake8_bugbear;
use crate::linter::test_path;
use crate::registry::RuleCode;
use crate::{flake8_bugbear, Settings};
use crate::settings::Settings;
#[test_case(RuleCode::B002, Path::new("B002.py"); "B002")]
#[test_case(RuleCode::B003, Path::new("B003.py"); "B003")]

View File

@@ -1,10 +1,10 @@
use rustpython_ast::{Constant, Expr, ExprContext, ExprKind, Location, Stmt, StmtKind};
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::Diagnostic;
use crate::source_code_generator::SourceCodeGenerator;
use crate::source_code::Generator;
use crate::violations;
fn assertion_error(msg: Option<&Expr>) -> Stmt {
@@ -48,7 +48,7 @@ pub fn assert_false(checker: &mut Checker, stmt: &Stmt, test: &Expr, msg: Option
let mut diagnostic = Diagnostic::new(violations::DoNotAssertFalse, Range::from_located(test));
if checker.patch(diagnostic.kind.code()) {
let mut generator: SourceCodeGenerator = checker.style.into();
let mut generator: Generator = checker.style.into();
generator.unparse_stmt(&assertion_error(msg));
diagnostic.amend(Fix::replacement(
generator.generate(),

View File

@@ -4,10 +4,10 @@ use rustpython_ast::{Excepthandler, ExcepthandlerKind, Expr, ExprContext, ExprKi
use crate::ast::helpers;
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::{Diagnostic, RuleCode};
use crate::source_code_generator::SourceCodeGenerator;
use crate::source_code::Generator;
use crate::violations;
fn type_pattern(elts: Vec<&Expr>) -> Expr {
@@ -55,7 +55,7 @@ fn duplicate_handler_exceptions<'a>(
Range::from_located(expr),
);
if checker.patch(diagnostic.kind.code()) {
let mut generator: SourceCodeGenerator = checker.style.into();
let mut generator: Generator = checker.style.into();
if unique_elts.len() == 1 {
generator.unparse_expr(unique_elts[0], 0);
} else {

View File

@@ -1,12 +1,12 @@
use rustpython_ast::{Constant, Expr, ExprContext, ExprKind, Location};
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::python::identifiers::IDENTIFIER_REGEX;
use crate::python::keyword::KWLIST;
use crate::registry::Diagnostic;
use crate::source_code_generator::SourceCodeGenerator;
use crate::source_code::Generator;
use crate::violations;
fn attribute(value: &Expr, attr: &str) -> Expr {
@@ -48,7 +48,7 @@ pub fn getattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, ar
let mut diagnostic =
Diagnostic::new(violations::GetAttrWithConstant, Range::from_located(expr));
if checker.patch(diagnostic.kind.code()) {
let mut generator: SourceCodeGenerator = checker.style.into();
let mut generator: Generator = checker.style.into();
generator.unparse_expr(&attribute(obj, value), 0);
diagnostic.amend(Fix::replacement(
generator.generate(),

View File

@@ -1,10 +1,10 @@
use rustpython_ast::{Excepthandler, ExcepthandlerKind, ExprKind};
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::Diagnostic;
use crate::source_code_generator::SourceCodeGenerator;
use crate::source_code::Generator;
use crate::violations;
/// B013
@@ -24,7 +24,7 @@ pub fn redundant_tuple_in_exception_handler(checker: &mut Checker, handlers: &[E
Range::from_located(type_),
);
if checker.patch(diagnostic.kind.code()) {
let mut generator: SourceCodeGenerator = checker.style.into();
let mut generator: Generator = checker.style.into();
generator.unparse_expr(elt, 0);
diagnostic.amend(Fix::replacement(
generator.generate(),

View File

@@ -1,16 +1,15 @@
use rustpython_ast::{Constant, Expr, ExprContext, ExprKind, Location, Stmt, StmtKind};
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::python::identifiers::IDENTIFIER_REGEX;
use crate::python::keyword::KWLIST;
use crate::registry::Diagnostic;
use crate::source_code_generator::SourceCodeGenerator;
use crate::source_code_style::SourceCodeStyleDetector;
use crate::source_code::{Generator, Stylist};
use crate::violations;
fn assignment(obj: &Expr, name: &str, value: &Expr, stylist: &SourceCodeStyleDetector) -> String {
fn assignment(obj: &Expr, name: &str, value: &Expr, stylist: &Stylist) -> String {
let stmt = Stmt::new(
Location::default(),
Location::default(),
@@ -28,7 +27,7 @@ fn assignment(obj: &Expr, name: &str, value: &Expr, stylist: &SourceCodeStyleDet
type_comment: None,
},
);
let mut generator: SourceCodeGenerator = stylist.into();
let mut generator: Generator = stylist.into();
generator.unparse_stmt(&stmt);
generator.generate()
}

View File

@@ -4,8 +4,8 @@ use rustpython_ast::{Expr, ExprKind, Stmt};
use crate::ast::types::Range;
use crate::ast::visitor;
use crate::ast::visitor::Visitor;
use crate::autofix::Fix;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::Diagnostic;
use crate::violations;

View File

@@ -1,5 +1,5 @@
pub mod rules;
pub mod types;
pub(crate) mod rules;
pub(crate) mod types;
#[cfg(test)]
mod tests {

View File

@@ -7,9 +7,9 @@ use libcst_native::{
};
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::cst::matchers::{match_expr, match_module};
use crate::source_code_locator::SourceCodeLocator;
use crate::fix::Fix;
use crate::source_code::Locator;
fn match_call<'a, 'b>(expr: &'a mut Expr<'b>) -> Result<&'a mut Call<'b>> {
if let Expression::Call(call) = &mut expr.value {
@@ -29,7 +29,7 @@ fn match_arg<'a, 'b>(call: &'a Call<'b>) -> Result<&'a Arg<'b>> {
/// (C400) Convert `list(x for x in y)` to `[x for x in y]`.
pub fn fix_unnecessary_generator_list(
locator: &SourceCodeLocator,
locator: &Locator,
expr: &rustpython_ast::Expr,
) -> Result<Fix> {
// Expr(Call(GeneratorExp)))) -> Expr(ListComp)))
@@ -70,7 +70,7 @@ pub fn fix_unnecessary_generator_list(
/// (C401) Convert `set(x for x in y)` to `{x for x in y}`.
pub fn fix_unnecessary_generator_set(
locator: &SourceCodeLocator,
locator: &Locator,
expr: &rustpython_ast::Expr,
) -> Result<Fix> {
// Expr(Call(GeneratorExp)))) -> Expr(SetComp)))
@@ -112,7 +112,7 @@ pub fn fix_unnecessary_generator_set(
/// (C402) Convert `dict((x, x) for x in range(3))` to `{x: x for x in
/// range(3)}`.
pub fn fix_unnecessary_generator_dict(
locator: &SourceCodeLocator,
locator: &Locator,
expr: &rustpython_ast::Expr,
) -> Result<Fix> {
let module_text = locator.slice_source_code_range(&Range::from_located(expr));
@@ -169,7 +169,7 @@ pub fn fix_unnecessary_generator_dict(
/// (C403) Convert `set([x for x in y])` to `{x for x in y}`.
pub fn fix_unnecessary_list_comprehension_set(
locator: &SourceCodeLocator,
locator: &Locator,
expr: &rustpython_ast::Expr,
) -> Result<Fix> {
// Expr(Call(ListComp)))) ->
@@ -210,7 +210,7 @@ pub fn fix_unnecessary_list_comprehension_set(
/// (C404) Convert `dict([(i, i) for i in range(3)])` to `{i: i for i in
/// range(3)}`.
pub fn fix_unnecessary_list_comprehension_dict(
locator: &SourceCodeLocator,
locator: &Locator,
expr: &rustpython_ast::Expr,
) -> Result<Fix> {
let module_text = locator.slice_source_code_range(&Range::from_located(expr));
@@ -259,10 +259,7 @@ pub fn fix_unnecessary_list_comprehension_dict(
}
/// (C405) Convert `set((1, 2))` to `{1, 2}`.
pub fn fix_unnecessary_literal_set(
locator: &SourceCodeLocator,
expr: &rustpython_ast::Expr,
) -> Result<Fix> {
pub fn fix_unnecessary_literal_set(locator: &Locator, expr: &rustpython_ast::Expr) -> Result<Fix> {
// Expr(Call(List|Tuple)))) -> Expr(Set)))
let module_text = locator.slice_source_code_range(&Range::from_located(expr));
let mut tree = match_module(&module_text)?;
@@ -305,10 +302,7 @@ pub fn fix_unnecessary_literal_set(
}
/// (C406) Convert `dict([(1, 2)])` to `{1: 2}`.
pub fn fix_unnecessary_literal_dict(
locator: &SourceCodeLocator,
expr: &rustpython_ast::Expr,
) -> Result<Fix> {
pub fn fix_unnecessary_literal_dict(locator: &Locator, expr: &rustpython_ast::Expr) -> Result<Fix> {
// Expr(Call(List|Tuple)))) -> Expr(Dict)))
let module_text = locator.slice_source_code_range(&Range::from_located(expr));
let mut tree = match_module(&module_text)?;
@@ -374,7 +368,7 @@ pub fn fix_unnecessary_literal_dict(
/// (C408)
pub fn fix_unnecessary_collection_call(
locator: &SourceCodeLocator,
locator: &Locator,
expr: &rustpython_ast::Expr,
) -> Result<Fix> {
// Expr(Call("list" | "tuple" | "dict")))) -> Expr(List|Tuple|Dict)
@@ -483,7 +477,7 @@ pub fn fix_unnecessary_collection_call(
/// (C409) Convert `tuple([1, 2])` to `tuple(1, 2)`
pub fn fix_unnecessary_literal_within_tuple_call(
locator: &SourceCodeLocator,
locator: &Locator,
expr: &rustpython_ast::Expr,
) -> Result<Fix> {
let module_text = locator.slice_source_code_range(&Range::from_located(expr));
@@ -537,7 +531,7 @@ pub fn fix_unnecessary_literal_within_tuple_call(
/// (C410) Convert `list([1, 2])` to `[1, 2]`
pub fn fix_unnecessary_literal_within_list_call(
locator: &SourceCodeLocator,
locator: &Locator,
expr: &rustpython_ast::Expr,
) -> Result<Fix> {
let module_text = locator.slice_source_code_range(&Range::from_located(expr));
@@ -592,10 +586,7 @@ pub fn fix_unnecessary_literal_within_list_call(
}
/// (C411) Convert `list([i * i for i in x])` to `[i * i for i in x]`.
pub fn fix_unnecessary_list_call(
locator: &SourceCodeLocator,
expr: &rustpython_ast::Expr,
) -> Result<Fix> {
pub fn fix_unnecessary_list_call(locator: &Locator, expr: &rustpython_ast::Expr) -> Result<Fix> {
// Expr(Call(List|Tuple)))) -> Expr(List|Tuple)))
let module_text = locator.slice_source_code_range(&Range::from_located(expr));
let mut tree = match_module(&module_text)?;
@@ -619,7 +610,7 @@ pub fn fix_unnecessary_list_call(
/// (C413) Convert `reversed(sorted([2, 3, 1]))` to `sorted([2, 3, 1],
/// reverse=True)`.
pub fn fix_unnecessary_call_around_sorted(
locator: &SourceCodeLocator,
locator: &Locator,
expr: &rustpython_ast::Expr,
) -> Result<Fix> {
let module_text = locator.slice_source_code_range(&Range::from_located(expr));
@@ -701,7 +692,7 @@ pub fn fix_unnecessary_call_around_sorted(
/// (C416) Convert `[i for i in x]` to `list(x)`.
pub fn fix_unnecessary_comprehension(
locator: &SourceCodeLocator,
locator: &Locator,
expr: &rustpython_ast::Expr,
) -> Result<Fix> {
let module_text = locator.slice_source_code_range(&Range::from_located(expr));

View File

@@ -1,5 +1,5 @@
mod fixes;
pub mod rules;
pub(crate) mod rules;
#[cfg(test)]
mod tests {

View File

@@ -1,4 +1,4 @@
pub mod rules;
pub(crate) mod rules;
#[cfg(test)]
mod tests {

View File

@@ -1,5 +1,5 @@
pub mod rules;
pub mod types;
pub(crate) mod rules;
pub(crate) mod types;
#[cfg(test)]
mod tests {

View File

@@ -1,4 +1,4 @@
pub mod rules;
pub(crate) mod rules;
pub mod settings;
#[cfg(test)]

View File

@@ -1,4 +1,4 @@
pub mod rules;
pub(crate) mod rules;
#[cfg(test)]
mod tests {

View File

@@ -4,11 +4,11 @@ use rustpython_parser::lexer::{LexResult, Tok};
use crate::ast::types::Range;
use crate::registry::Diagnostic;
use crate::source_code_locator::SourceCodeLocator;
use crate::source_code::Locator;
use crate::violations;
/// ISC001, ISC002
pub fn implicit(tokens: &[LexResult], locator: &SourceCodeLocator) -> Vec<Diagnostic> {
pub fn implicit(tokens: &[LexResult], locator: &Locator) -> Vec<Diagnostic> {
let mut diagnostics = vec![];
for ((a_start, a_tok, a_end), (b_start, b_tok, b_end)) in
tokens.iter().flatten().tuple_windows()

View File

@@ -1,17 +1,17 @@
pub mod rules;
pub(crate) mod rules;
pub mod settings;
#[cfg(test)]
mod tests {
use std::path::Path;
use anyhow::Result;
use rustc_hash::FxHashMap;
use crate::flake8_import_conventions;
use crate::linter::test_path;
use crate::registry::RuleCode;
use crate::{flake8_import_conventions, Settings};
use crate::settings::Settings;
#[test]
fn defaults() -> Result<()> {

View File

@@ -1,4 +1,4 @@
pub mod rules;
pub(crate) mod rules;
#[cfg(test)]
mod tests {

View File

@@ -4,8 +4,8 @@ use rustpython_ast::{Constant, Expr, ExprKind, Stmt, StmtKind};
use crate::ast::types::{Range, RefEquality};
use crate::autofix::helpers::delete_stmt;
use crate::autofix::Fix;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::{Diagnostic, RuleCode};
use crate::violations;

View File

@@ -1,4 +1,4 @@
pub mod rules;
pub(crate) mod rules;
#[cfg(test)]
mod tests {

View File

@@ -1,4 +1,4 @@
pub mod rules;
pub(crate) mod rules;
pub mod settings;
pub mod types;

View File

@@ -8,8 +8,8 @@ use crate::ast::helpers::unparse_stmt;
use crate::ast::types::Range;
use crate::ast::visitor;
use crate::ast::visitor::Visitor;
use crate::autofix::Fix;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::Diagnostic;
use crate::violations;

View File

@@ -8,8 +8,8 @@ use crate::ast::helpers::{collect_arg_names, collect_call_paths};
use crate::ast::types::Range;
use crate::ast::visitor;
use crate::ast::visitor::Visitor;
use crate::autofix::Fix;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::{Diagnostic, RuleCode};
use crate::violations;

View File

@@ -2,8 +2,8 @@ use rustpython_ast::{Expr, ExprKind, Location};
use super::helpers::{get_mark_decorators, get_mark_name};
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::registry::{Diagnostic, RuleCode};
use crate::violations;

View File

@@ -3,11 +3,11 @@ use rustpython_ast::{Constant, Expr, ExprContext, ExprKind};
use super::helpers::is_pytest_parametrize;
use crate::ast::helpers::create_expr;
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::flake8_pytest_style::types;
use crate::registry::{Diagnostic, RuleCode};
use crate::source_code_generator::SourceCodeGenerator;
use crate::source_code::Generator;
use crate::violations;
fn get_parametrize_decorator<'a>(checker: &Checker, decorators: &'a [Expr]) -> Option<&'a Expr> {
@@ -31,7 +31,7 @@ fn elts_to_csv(elts: &[Expr], checker: &Checker) -> Option<String> {
return None;
}
let mut generator: SourceCodeGenerator = checker.style.into();
let mut generator: Generator = checker.style.into();
generator.unparse_expr(
&create_expr(ExprKind::Constant {
value: Constant::Str(elts.iter().fold(String::new(), |mut acc, elt| {
@@ -85,7 +85,7 @@ fn check_names(checker: &mut Checker, expr: &Expr) {
Range::from_located(expr),
);
if checker.patch(diagnostic.kind.code()) {
let mut generator: SourceCodeGenerator = checker.style.into();
let mut generator: Generator = checker.style.into();
generator.unparse_expr(
&create_expr(ExprKind::Tuple {
elts: names
@@ -115,7 +115,7 @@ fn check_names(checker: &mut Checker, expr: &Expr) {
Range::from_located(expr),
);
if checker.patch(diagnostic.kind.code()) {
let mut generator: SourceCodeGenerator = checker.style.into();
let mut generator: Generator = checker.style.into();
generator.unparse_expr(
&create_expr(ExprKind::List {
elts: names
@@ -157,7 +157,7 @@ fn check_names(checker: &mut Checker, expr: &Expr) {
Range::from_located(expr),
);
if checker.patch(diagnostic.kind.code()) {
let mut generator: SourceCodeGenerator = checker.style.into();
let mut generator: Generator = checker.style.into();
generator.unparse_expr(
&create_expr(ExprKind::List {
elts: elts.clone(),
@@ -206,7 +206,7 @@ fn check_names(checker: &mut Checker, expr: &Expr) {
Range::from_located(expr),
);
if checker.patch(diagnostic.kind.code()) {
let mut generator: SourceCodeGenerator = checker.style.into();
let mut generator: Generator = checker.style.into();
generator.unparse_expr(
&create_expr(ExprKind::Tuple {
elts: elts.clone(),
@@ -284,7 +284,7 @@ fn handle_single_name(checker: &mut Checker, expr: &Expr, value: &Expr) {
);
if checker.patch(diagnostic.kind.code()) {
let mut generator: SourceCodeGenerator = checker.style.into();
let mut generator: Generator = checker.style.into();
generator.unparse_expr(&create_expr(value.node.clone()), 0);
diagnostic.amend(Fix::replacement(
generator.generate(),

View File

@@ -1,4 +1,4 @@
pub mod rules;
pub(crate) mod rules;
pub mod settings;
#[cfg(test)]
@@ -8,10 +8,11 @@ mod tests {
use anyhow::Result;
use test_case::test_case;
use crate::flake8_quotes;
use crate::flake8_quotes::settings::Quote;
use crate::linter::test_path;
use crate::registry::RuleCode;
use crate::{flake8_quotes, Settings};
use crate::settings::Settings;
#[test_case(Path::new("doubles.py"))]
#[test_case(Path::new("doubles_escaped.py"))]

View File

@@ -1,9 +1,11 @@
use rustpython_ast::Location;
use crate::ast::types::Range;
use crate::flake8_quotes::settings::{Quote, Settings};
use crate::registry::Diagnostic;
use crate::source_code_locator::SourceCodeLocator;
use crate::fix::Fix;
use crate::flake8_quotes::settings::Quote;
use crate::registry::{Diagnostic, RuleCode};
use crate::settings::{flags, Settings};
use crate::source_code::Locator;
use crate::violations;
fn good_single(quote: &Quote) -> char {
@@ -42,12 +44,14 @@ fn good_docstring(quote: &Quote) -> &str {
}
pub fn quotes(
locator: &SourceCodeLocator,
locator: &Locator,
start: Location,
end: Location,
is_docstring: bool,
settings: &Settings,
autofix: flags::Autofix,
) -> Option<Diagnostic> {
let quotes_settings = &settings.flake8_quotes;
let text = locator.slice_source_code_range(&Range::new(start, end));
// Remove any prefixes (e.g., remove `u` from `u"foo"`).
@@ -68,54 +72,139 @@ pub fn quotes(
};
if is_docstring {
if raw_text.contains(good_docstring(&settings.docstring_quotes)) {
if raw_text.contains(good_docstring(&quotes_settings.docstring_quotes)) {
return None;
}
Some(Diagnostic::new(
violations::BadQuotesDocstring(settings.docstring_quotes.clone()),
let mut diagnostic = Diagnostic::new(
violations::BadQuotesDocstring(quotes_settings.docstring_quotes.clone()),
Range::new(start, end),
))
);
if matches!(autofix, flags::Autofix::Enabled) && settings.fixable.contains(&RuleCode::Q002)
{
let quote_count = if is_multiline { 3 } else { 1 };
let string_contents = &raw_text[quote_count..raw_text.len() - quote_count];
let quote = good_docstring(&quotes_settings.docstring_quotes).repeat(quote_count);
let mut fixed_contents =
String::with_capacity(prefix.len() + string_contents.len() + quote.len() * 2);
fixed_contents.push_str(prefix);
fixed_contents.push_str(&quote);
fixed_contents.push_str(string_contents);
fixed_contents.push_str(&quote);
diagnostic.amend(Fix::replacement(fixed_contents, start, end));
}
Some(diagnostic)
} else if is_multiline {
// If our string is or contains a known good string, ignore it.
if raw_text.contains(good_multiline(&settings.multiline_quotes)) {
if raw_text.contains(good_multiline(&quotes_settings.multiline_quotes)) {
return None;
}
// If our string ends with a known good ending, then ignore it.
if raw_text.ends_with(good_multiline_ending(&settings.multiline_quotes)) {
if raw_text.ends_with(good_multiline_ending(&quotes_settings.multiline_quotes)) {
return None;
}
Some(Diagnostic::new(
violations::BadQuotesMultilineString(settings.multiline_quotes.clone()),
let mut diagnostic = Diagnostic::new(
violations::BadQuotesMultilineString(quotes_settings.multiline_quotes.clone()),
Range::new(start, end),
))
);
if matches!(autofix, flags::Autofix::Enabled) && settings.fixable.contains(&RuleCode::Q001)
{
let string_contents = &raw_text[3..raw_text.len() - 3];
let quote = good_multiline(&quotes_settings.multiline_quotes);
let mut fixed_contents =
String::with_capacity(prefix.len() + string_contents.len() + quote.len() * 2);
fixed_contents.push_str(prefix);
fixed_contents.push_str(quote);
fixed_contents.push_str(string_contents);
fixed_contents.push_str(quote);
diagnostic.amend(Fix::replacement(fixed_contents, start, end));
}
Some(diagnostic)
} else {
let string_contents = &raw_text[1..raw_text.len() - 1];
// If we're using the preferred quotation type, check for escapes.
if last_quote_char == good_single(&settings.inline_quotes) {
if !settings.avoid_escape || prefix.contains('r') {
if last_quote_char == good_single(&quotes_settings.inline_quotes) {
if !quotes_settings.avoid_escape || prefix.contains('r') {
return None;
}
if string_contents.contains(good_single(&settings.inline_quotes))
&& !string_contents.contains(bad_single(&settings.inline_quotes))
if string_contents.contains(good_single(&quotes_settings.inline_quotes))
&& !string_contents.contains(bad_single(&quotes_settings.inline_quotes))
{
return Some(Diagnostic::new(
violations::AvoidQuoteEscape,
Range::new(start, end),
));
let mut diagnostic =
Diagnostic::new(violations::AvoidQuoteEscape, Range::new(start, end));
if matches!(autofix, flags::Autofix::Enabled)
&& settings.fixable.contains(&RuleCode::Q003)
{
let quote = bad_single(&quotes_settings.inline_quotes);
let mut fixed_contents =
String::with_capacity(prefix.len() + string_contents.len() + 2);
fixed_contents.push_str(prefix);
fixed_contents.push(quote);
let chars: Vec<char> = string_contents.chars().collect();
let mut backslash_count = 0;
for col_offset in 0..chars.len() {
let char = chars[col_offset];
if char != '\\' {
fixed_contents.push(char);
continue;
}
backslash_count += 1;
// If the previous character was also a backslash
if col_offset > 0 && chars[col_offset - 1] == '\\' && backslash_count == 2 {
fixed_contents.push(char);
// reset to 0
backslash_count = 0;
continue;
}
// If we're at the end of the line
if col_offset == chars.len() - 1 {
fixed_contents.push(char);
continue;
}
let next_char = chars[col_offset + 1];
// Remove quote escape
if next_char == '\'' || next_char == '"' {
// reset to 0
backslash_count = 0;
continue;
}
fixed_contents.push(char);
}
fixed_contents.push(quote);
diagnostic.amend(Fix::replacement(fixed_contents, start, end));
}
return Some(diagnostic);
}
return None;
}
// If we're not using the preferred type, only allow use to avoid escapes.
if !string_contents.contains(good_single(&settings.inline_quotes)) {
return Some(Diagnostic::new(
violations::BadQuotesInlineString(settings.inline_quotes.clone()),
if !string_contents.contains(good_single(&quotes_settings.inline_quotes)) {
let mut diagnostic = Diagnostic::new(
violations::BadQuotesInlineString(quotes_settings.inline_quotes.clone()),
Range::new(start, end),
));
);
if matches!(autofix, flags::Autofix::Enabled)
&& settings.fixable.contains(&RuleCode::Q000)
{
let quote = good_single(&quotes_settings.inline_quotes);
let mut fixed_contents =
String::with_capacity(prefix.len() + string_contents.len() + 2);
fixed_contents.push_str(prefix);
fixed_contents.push(quote);
fixed_contents.push_str(string_contents);
fixed_contents.push(quote);
diagnostic.amend(Fix::replacement(fixed_contents, start, end));
}
return Some(diagnostic);
}
None

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesMultilineString: single
@@ -10,7 +10,14 @@ expression: checks
end_location:
row: 7
column: 3
fix: ~
fix:
content: "'''\nthis is not a docstring\n'''"
location:
row: 5
column: 0
end_location:
row: 7
column: 3
parent: ~
- kind:
BadQuotesMultilineString: single
@@ -20,7 +27,14 @@ expression: checks
end_location:
row: 18
column: 7
fix: ~
fix:
content: "'''\n this is not a docstring\n '''"
location:
row: 16
column: 4
end_location:
row: 18
column: 7
parent: ~
- kind:
BadQuotesMultilineString: single
@@ -30,7 +44,14 @@ expression: checks
end_location:
row: 22
column: 37
fix: ~
fix:
content: "'''\n definitely not a docstring'''"
location:
row: 21
column: 20
end_location:
row: 22
column: 37
parent: ~
- kind:
BadQuotesMultilineString: single
@@ -40,7 +61,14 @@ expression: checks
end_location:
row: 32
column: 11
fix: ~
fix:
content: "'''\n this is not a docstring\n '''"
location:
row: 30
column: 8
end_location:
row: 32
column: 11
parent: ~
- kind:
BadQuotesMultilineString: single
@@ -50,6 +78,13 @@ expression: checks
end_location:
row: 37
column: 15
fix: ~
fix:
content: "'''\n Looks like a docstring, but in reality it isn't - only modules, classes and functions\n '''"
location:
row: 35
column: 12
end_location:
row: 37
column: 15
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesMultilineString: single
@@ -10,7 +10,14 @@ expression: checks
end_location:
row: 3
column: 27
fix: ~
fix:
content: "''' Not a docstring '''"
location:
row: 3
column: 4
end_location:
row: 3
column: 27
parent: ~
- kind:
BadQuotesMultilineString: single
@@ -20,6 +27,13 @@ expression: checks
end_location:
row: 5
column: 43
fix: ~
fix:
content: "'''not a docstring'''"
location:
row: 5
column: 22
end_location:
row: 5
column: 43
parent: ~

View File

@@ -10,7 +10,14 @@ expression: diagnostics
end_location:
row: 3
column: 26
fix: ~
fix:
content: "''' not a docstring'''"
location:
row: 3
column: 4
end_location:
row: 3
column: 26
parent: ~
- kind:
BadQuotesMultilineString: single
@@ -20,7 +27,14 @@ expression: diagnostics
end_location:
row: 11
column: 26
fix: ~
fix:
content: "''' not a docstring'''"
location:
row: 11
column: 4
end_location:
row: 11
column: 26
parent: ~
- kind:
BadQuotesMultilineString: single
@@ -30,7 +44,14 @@ expression: diagnostics
end_location:
row: 17
column: 3
fix: ~
fix:
content: "'''\n not a\n'''"
location:
row: 15
column: 38
end_location:
row: 17
column: 3
parent: ~
- kind:
BadQuotesMultilineString: single
@@ -40,7 +61,14 @@ expression: diagnostics
end_location:
row: 17
column: 19
fix: ~
fix:
content: "'''docstring'''"
location:
row: 17
column: 4
end_location:
row: 17
column: 19
parent: ~
- kind:
BadQuotesMultilineString: single
@@ -50,6 +78,13 @@ expression: diagnostics
end_location:
row: 22
column: 27
fix: ~
fix:
content: "''' not a docstring '''"
location:
row: 22
column: 4
end_location:
row: 22
column: 27
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesMultilineString: single
@@ -10,7 +10,14 @@ expression: checks
end_location:
row: 6
column: 3
fix: ~
fix:
content: "'''\nthis is not a docstring\n'''"
location:
row: 4
column: 0
end_location:
row: 6
column: 3
parent: ~
- kind:
BadQuotesMultilineString: single
@@ -20,6 +27,13 @@ expression: checks
end_location:
row: 11
column: 3
fix: ~
fix:
content: "'''\nthis is not a docstring\n'''"
location:
row: 9
column: 0
end_location:
row: 11
column: 3
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesMultilineString: single
@@ -10,7 +10,14 @@ expression: checks
end_location:
row: 2
column: 31
fix: ~
fix:
content: "''' this is not a docstring '''"
location:
row: 2
column: 0
end_location:
row: 2
column: 31
parent: ~
- kind:
BadQuotesMultilineString: single
@@ -20,6 +27,13 @@ expression: checks
end_location:
row: 6
column: 31
fix: ~
fix:
content: "''' this is not a docstring '''"
location:
row: 6
column: 0
end_location:
row: 6
column: 31
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesDocstring: double
@@ -10,7 +10,14 @@ expression: checks
end_location:
row: 3
column: 3
fix: ~
fix:
content: "\"\"\"\nSingle quotes multiline module docstring\n\"\"\""
location:
row: 1
column: 0
end_location:
row: 3
column: 3
parent: ~
- kind:
BadQuotesDocstring: double
@@ -20,7 +27,14 @@ expression: checks
end_location:
row: 16
column: 7
fix: ~
fix:
content: "\"\"\"\n Single quotes multiline class docstring\n \"\"\""
location:
row: 14
column: 4
end_location:
row: 16
column: 7
parent: ~
- kind:
BadQuotesDocstring: double
@@ -30,6 +44,13 @@ expression: checks
end_location:
row: 28
column: 11
fix: ~
fix:
content: "\"\"\"\n Single quotes multiline function docstring\n \"\"\""
location:
row: 26
column: 8
end_location:
row: 28
column: 11
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesDocstring: double
@@ -10,7 +10,14 @@ expression: checks
end_location:
row: 2
column: 53
fix: ~
fix:
content: "\"\"\" Double quotes single line class docstring \"\"\""
location:
row: 2
column: 4
end_location:
row: 2
column: 53
parent: ~
- kind:
BadQuotesDocstring: double
@@ -20,7 +27,14 @@ expression: checks
end_location:
row: 6
column: 57
fix: ~
fix:
content: "\"\"\" Double quotes single line method docstring\"\"\""
location:
row: 6
column: 8
end_location:
row: 6
column: 57
parent: ~
- kind:
BadQuotesDocstring: double
@@ -30,6 +44,13 @@ expression: checks
end_location:
row: 9
column: 52
fix: ~
fix:
content: "\"\"\" inline docstring \"\"\""
location:
row: 9
column: 28
end_location:
row: 9
column: 52
parent: ~

View File

@@ -10,7 +10,14 @@ expression: diagnostics
end_location:
row: 2
column: 56
fix: ~
fix:
content: "\"\"\"function without params, single line docstring\"\"\""
location:
row: 2
column: 4
end_location:
row: 2
column: 56
parent: ~
- kind:
BadQuotesDocstring: double
@@ -20,7 +27,14 @@ expression: diagnostics
end_location:
row: 10
column: 7
fix: ~
fix:
content: "\"\"\"\n function without params, multiline docstring\n \"\"\""
location:
row: 8
column: 4
end_location:
row: 10
column: 7
parent: ~
- kind:
BadQuotesDocstring: double
@@ -30,6 +44,13 @@ expression: diagnostics
end_location:
row: 27
column: 27
fix: ~
fix:
content: "\"Single line docstring\""
location:
row: 27
column: 4
end_location:
row: 27
column: 27
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesDocstring: double
@@ -10,6 +10,13 @@ expression: checks
end_location:
row: 3
column: 3
fix: ~
fix:
content: "\"\"\"\nDouble quotes multiline module docstring\n\"\"\""
location:
row: 1
column: 0
end_location:
row: 3
column: 3
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesDocstring: double
@@ -10,6 +10,13 @@ expression: checks
end_location:
row: 1
column: 49
fix: ~
fix:
content: "\"\"\" Double quotes singleline module docstring \"\"\""
location:
row: 1
column: 0
end_location:
row: 1
column: 49
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesInlineString: single
@@ -10,7 +10,14 @@ expression: checks
end_location:
row: 1
column: 45
fix: ~
fix:
content: "'double quote string'"
location:
row: 1
column: 24
end_location:
row: 1
column: 45
parent: ~
- kind:
BadQuotesInlineString: single
@@ -20,6 +27,13 @@ expression: checks
end_location:
row: 2
column: 46
fix: ~
fix:
content: "u'double quote string'"
location:
row: 2
column: 24
end_location:
row: 2
column: 46
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
AvoidQuoteEscape: ~
@@ -10,6 +10,30 @@ expression: checks
end_location:
row: 1
column: 47
fix: ~
fix:
content: "\"This is a 'string'\""
location:
row: 1
column: 25
end_location:
row: 1
column: 47
parent: ~
- kind:
AvoidQuoteEscape: ~
location:
row: 2
column: 25
end_location:
row: 2
column: 52
fix:
content: "\"This is \\\\ a \\\\'string'\""
location:
row: 2
column: 25
end_location:
row: 2
column: 52
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesMultilineString: single
@@ -10,6 +10,13 @@ expression: checks
end_location:
row: 3
column: 12
fix: ~
fix:
content: "''' This \"should\"\nbe\n\"linted\" '''"
location:
row: 1
column: 4
end_location:
row: 3
column: 12
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesDocstring: single
@@ -10,7 +10,14 @@ expression: checks
end_location:
row: 3
column: 3
fix: ~
fix:
content: "'''\nDouble quotes multiline module docstring\n'''"
location:
row: 1
column: 0
end_location:
row: 3
column: 3
parent: ~
- kind:
BadQuotesDocstring: single
@@ -20,7 +27,14 @@ expression: checks
end_location:
row: 14
column: 7
fix: ~
fix:
content: "'''\n Double quotes multiline class docstring\n '''"
location:
row: 12
column: 4
end_location:
row: 14
column: 7
parent: ~
- kind:
BadQuotesDocstring: single
@@ -30,6 +44,13 @@ expression: checks
end_location:
row: 26
column: 11
fix: ~
fix:
content: "'''\n Double quotes multiline function docstring\n '''"
location:
row: 24
column: 8
end_location:
row: 26
column: 11
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesDocstring: single
@@ -10,7 +10,14 @@ expression: checks
end_location:
row: 2
column: 53
fix: ~
fix:
content: "''' Double quotes single line class docstring '''"
location:
row: 2
column: 4
end_location:
row: 2
column: 53
parent: ~
- kind:
BadQuotesDocstring: single
@@ -20,7 +27,14 @@ expression: checks
end_location:
row: 6
column: 57
fix: ~
fix:
content: "''' Double quotes single line method docstring'''"
location:
row: 6
column: 8
end_location:
row: 6
column: 57
parent: ~
- kind:
BadQuotesDocstring: single
@@ -30,6 +44,13 @@ expression: checks
end_location:
row: 9
column: 52
fix: ~
fix:
content: "''' inline docstring '''"
location:
row: 9
column: 28
end_location:
row: 9
column: 52
parent: ~

View File

@@ -10,7 +10,14 @@ expression: diagnostics
end_location:
row: 2
column: 56
fix: ~
fix:
content: "'''function without params, single line docstring'''"
location:
row: 2
column: 4
end_location:
row: 2
column: 56
parent: ~
- kind:
BadQuotesDocstring: single
@@ -20,7 +27,14 @@ expression: diagnostics
end_location:
row: 10
column: 7
fix: ~
fix:
content: "'''\n function without params, multiline docstring\n '''"
location:
row: 8
column: 4
end_location:
row: 10
column: 7
parent: ~
- kind:
BadQuotesDocstring: single
@@ -30,6 +44,13 @@ expression: diagnostics
end_location:
row: 27
column: 27
fix: ~
fix:
content: "'Single line docstring'"
location:
row: 27
column: 4
end_location:
row: 27
column: 27
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesDocstring: single
@@ -10,6 +10,13 @@ expression: checks
end_location:
row: 3
column: 3
fix: ~
fix:
content: "'''\nDouble quotes multiline module docstring\n'''"
location:
row: 1
column: 0
end_location:
row: 3
column: 3
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesDocstring: single
@@ -10,6 +10,13 @@ expression: checks
end_location:
row: 1
column: 49
fix: ~
fix:
content: "''' Double quotes singleline module docstring '''"
location:
row: 1
column: 0
end_location:
row: 1
column: 49
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesMultilineString: double
@@ -10,7 +10,14 @@ expression: checks
end_location:
row: 7
column: 3
fix: ~
fix:
content: "\"\"\"\nthis is not a docstring\n\"\"\""
location:
row: 5
column: 0
end_location:
row: 7
column: 3
parent: ~
- kind:
BadQuotesMultilineString: double
@@ -20,7 +27,14 @@ expression: checks
end_location:
row: 13
column: 3
fix: ~
fix:
content: "\"\"\"\n class params \\t not a docstring\n\"\"\""
location:
row: 11
column: 20
end_location:
row: 13
column: 3
parent: ~
- kind:
BadQuotesMultilineString: double
@@ -30,7 +44,14 @@ expression: checks
end_location:
row: 20
column: 7
fix: ~
fix:
content: "\"\"\"\n this is not a docstring\n \"\"\""
location:
row: 18
column: 4
end_location:
row: 20
column: 7
parent: ~
- kind:
BadQuotesMultilineString: double
@@ -40,7 +61,14 @@ expression: checks
end_location:
row: 24
column: 37
fix: ~
fix:
content: "\"\"\"\n definitely not a docstring\"\"\""
location:
row: 23
column: 20
end_location:
row: 24
column: 37
parent: ~
- kind:
BadQuotesMultilineString: double
@@ -50,7 +78,14 @@ expression: checks
end_location:
row: 34
column: 11
fix: ~
fix:
content: "\"\"\"\n this is not a docstring\n \"\"\""
location:
row: 32
column: 8
end_location:
row: 34
column: 11
parent: ~
- kind:
BadQuotesMultilineString: double
@@ -60,6 +95,13 @@ expression: checks
end_location:
row: 39
column: 15
fix: ~
fix:
content: "\"\"\"\n Looks like a docstring, but in reality it isn't - only modules, classes and functions\n \"\"\""
location:
row: 37
column: 12
end_location:
row: 39
column: 15
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesMultilineString: double
@@ -10,7 +10,14 @@ expression: checks
end_location:
row: 3
column: 27
fix: ~
fix:
content: "\"\"\" Not a docstring \"\"\""
location:
row: 3
column: 4
end_location:
row: 3
column: 27
parent: ~
- kind:
BadQuotesMultilineString: double
@@ -20,6 +27,13 @@ expression: checks
end_location:
row: 5
column: 43
fix: ~
fix:
content: "\"\"\"not a docstring\"\"\""
location:
row: 5
column: 22
end_location:
row: 5
column: 43
parent: ~

View File

@@ -10,7 +10,14 @@ expression: diagnostics
end_location:
row: 3
column: 26
fix: ~
fix:
content: "\"\"\" not a docstring\"\"\""
location:
row: 3
column: 4
end_location:
row: 3
column: 26
parent: ~
- kind:
BadQuotesMultilineString: double
@@ -20,7 +27,14 @@ expression: diagnostics
end_location:
row: 11
column: 26
fix: ~
fix:
content: "\"\"\" not a docstring\"\"\""
location:
row: 11
column: 4
end_location:
row: 11
column: 26
parent: ~
- kind:
BadQuotesMultilineString: double
@@ -30,7 +44,14 @@ expression: diagnostics
end_location:
row: 17
column: 3
fix: ~
fix:
content: "\"\"\"\n not a\n\"\"\""
location:
row: 15
column: 38
end_location:
row: 17
column: 3
parent: ~
- kind:
BadQuotesMultilineString: double
@@ -40,7 +61,14 @@ expression: diagnostics
end_location:
row: 17
column: 19
fix: ~
fix:
content: "\"\"\"docstring\"\"\""
location:
row: 17
column: 4
end_location:
row: 17
column: 19
parent: ~
- kind:
BadQuotesMultilineString: double
@@ -50,6 +78,13 @@ expression: diagnostics
end_location:
row: 22
column: 27
fix: ~
fix:
content: "\"\"\" not a docstring \"\"\""
location:
row: 22
column: 4
end_location:
row: 22
column: 27
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesMultilineString: double
@@ -10,7 +10,14 @@ expression: checks
end_location:
row: 6
column: 3
fix: ~
fix:
content: "\"\"\"\nthis is not a docstring\n\"\"\""
location:
row: 4
column: 0
end_location:
row: 6
column: 3
parent: ~
- kind:
BadQuotesMultilineString: double
@@ -20,6 +27,13 @@ expression: checks
end_location:
row: 11
column: 3
fix: ~
fix:
content: "\"\"\"\nthis is not a docstring\n\"\"\""
location:
row: 9
column: 0
end_location:
row: 11
column: 3
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesMultilineString: double
@@ -10,7 +10,14 @@ expression: checks
end_location:
row: 2
column: 31
fix: ~
fix:
content: "\"\"\" this is not a docstring \"\"\""
location:
row: 2
column: 0
end_location:
row: 2
column: 31
parent: ~
- kind:
BadQuotesMultilineString: double
@@ -20,6 +27,13 @@ expression: checks
end_location:
row: 6
column: 31
fix: ~
fix:
content: "\"\"\" this is not a docstring \"\"\""
location:
row: 6
column: 0
end_location:
row: 6
column: 31
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesInlineString: double
@@ -10,7 +10,14 @@ expression: checks
end_location:
row: 1
column: 45
fix: ~
fix:
content: "\"single quote string\""
location:
row: 1
column: 24
end_location:
row: 1
column: 45
parent: ~
- kind:
BadQuotesInlineString: double
@@ -20,6 +27,13 @@ expression: checks
end_location:
row: 2
column: 46
fix: ~
fix:
content: "u\"double quote string\""
location:
row: 2
column: 24
end_location:
row: 2
column: 46
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
AvoidQuoteEscape: ~
@@ -10,6 +10,13 @@ expression: checks
end_location:
row: 1
column: 47
fix: ~
fix:
content: "'This is a \"string\"'"
location:
row: 1
column: 25
end_location:
row: 1
column: 47
parent: ~

View File

@@ -1,6 +1,6 @@
---
source: src/flake8_quotes/mod.rs
expression: checks
expression: diagnostics
---
- kind:
BadQuotesMultilineString: double
@@ -10,6 +10,13 @@ expression: checks
end_location:
row: 3
column: 12
fix: ~
fix:
content: "\"\"\" This 'should'\nbe\n'linted' \"\"\""
location:
row: 1
column: 4
end_location:
row: 3
column: 12
parent: ~

View File

@@ -1,5 +1,5 @@
mod helpers;
pub mod rules;
pub(crate) mod rules;
mod visitor;
#[cfg(test)]
@@ -11,7 +11,7 @@ mod tests {
use crate::linter::test_path;
use crate::registry::RuleCode;
use crate::Settings;
use crate::settings::Settings;
#[test_case(RuleCode::RET501, Path::new("RET501.py"); "RET501")]
#[test_case(RuleCode::RET502, Path::new("RET502.py"); "RET502")]

View File

@@ -4,13 +4,13 @@ use rustpython_ast::{Constant, Expr, ExprKind, Location, Stmt, StmtKind};
use crate::ast::types::Range;
use crate::ast::visitor::Visitor;
use crate::ast::whitespace::indentation;
use crate::autofix::Fix;
use crate::checkers::ast::Checker;
use crate::fix::Fix;
use crate::flake8_return::helpers::result_exists;
use crate::flake8_return::visitor::{ReturnVisitor, Stack};
use crate::registry::RuleCode;
use crate::registry::{Diagnostic, RuleCode};
use crate::violations;
use crate::violations::Branch;
use crate::{violations, Diagnostic};
/// RET501
fn unnecessary_return_none(checker: &mut Checker, stack: &Stack) {

Some files were not shown because too many files have changed in this diff Show More