Compare commits
11 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cfbd068dd5 | ||
|
|
8aed23fe0a | ||
|
|
c016c41c71 | ||
|
|
f1a5e53f06 | ||
|
|
1e94e0221f | ||
|
|
543865c96b | ||
|
|
b8e3f0bc13 | ||
|
|
643cedb200 | ||
|
|
91620c378a | ||
|
|
b732135795 | ||
|
|
9384a081f9 |
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
@@ -27,8 +27,8 @@ jobs:
|
||||
toolchain: nightly-2022-11-01
|
||||
override: true
|
||||
- uses: Swatinem/rust-cache@v1
|
||||
- run: cargo build --all --release
|
||||
- run: ./target/release/ruff_dev generate-all
|
||||
- run: cargo build --all
|
||||
- run: ./target/debug/ruff_dev generate-all
|
||||
- run: git diff --quiet README.md || echo "::error file=README.md::This file is outdated. Run 'cargo +nightly dev generate-all'."
|
||||
- run: git diff --quiet ruff.schema.json || echo "::error file=ruff.schema.json::This file is outdated. Run 'cargo +nightly dev generate-all'."
|
||||
- run: git diff --exit-code -- README.md ruff.schema.json
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
rev: v0.0.217
|
||||
rev: v0.0.218
|
||||
hooks:
|
||||
- id: ruff
|
||||
|
||||
|
||||
8
Cargo.lock
generated
8
Cargo.lock
generated
@@ -735,7 +735,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.217-dev.0"
|
||||
version = "0.0.218-dev.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap 4.0.32",
|
||||
@@ -1874,7 +1874,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.0.217"
|
||||
version = "0.0.218"
|
||||
dependencies = [
|
||||
"annotate-snippets 0.9.1",
|
||||
"anyhow",
|
||||
@@ -1942,7 +1942,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_dev"
|
||||
version = "0.0.217"
|
||||
version = "0.0.218"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap 4.0.32",
|
||||
@@ -1962,7 +1962,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_macros"
|
||||
version = "0.0.217"
|
||||
version = "0.0.218"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"proc-macro2",
|
||||
|
||||
@@ -6,7 +6,7 @@ members = [
|
||||
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.0.217"
|
||||
version = "0.0.218"
|
||||
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.65.0"
|
||||
@@ -19,6 +19,7 @@ license = "MIT"
|
||||
[lib]
|
||||
name = "ruff"
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
annotate-snippets = { version = "0.9.1", features = ["color"] }
|
||||
@@ -51,7 +52,7 @@ path-absolutize = { version = "3.0.14", features = ["once_cell_cache", "use_unix
|
||||
quick-junit = { version = "0.3.2" }
|
||||
regex = { version = "1.6.0" }
|
||||
ropey = { version = "1.5.0", features = ["cr_lines", "simd"], default-features = false }
|
||||
ruff_macros = { version = "0.0.217", path = "ruff_macros" }
|
||||
ruff_macros = { version = "0.0.218", path = "ruff_macros" }
|
||||
rustc-hash = { version = "1.1.0" }
|
||||
rustpython-ast = { features = ["unparse"], git = "https://github.com/RustPython/RustPython.git", rev = "d532160333ffeb6dbeca2c2728c2391cd1e53b7f" }
|
||||
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "d532160333ffeb6dbeca2c2728c2391cd1e53b7f" }
|
||||
|
||||
26
README.md
26
README.md
@@ -180,7 +180,7 @@ Ruff also works with [pre-commit](https://pre-commit.com):
|
||||
```yaml
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.0.217'
|
||||
rev: 'v0.0.218'
|
||||
hooks:
|
||||
- id: ruff
|
||||
# Respect `exclude` and `extend-exclude` settings.
|
||||
@@ -614,6 +614,7 @@ For more, see [isort](https://pypi.org/project/isort/5.10.1/) on PyPI.
|
||||
| Code | Name | Message | Fix |
|
||||
| ---- | ---- | ------- | --- |
|
||||
| I001 | UnsortedImports | Import block is un-sorted or un-formatted | 🛠 |
|
||||
| I002 | MissingRequiredImport | Missing required import: `from __future__ import ...` | 🛠 |
|
||||
|
||||
### pydocstyle (D)
|
||||
|
||||
@@ -701,6 +702,7 @@ For more, see [pyupgrade](https://pypi.org/project/pyupgrade/3.2.0/) on PyPI.
|
||||
| UP027 | RewriteListComprehension | Replace unpacked list comprehension with a generator expression | 🛠 |
|
||||
| UP028 | RewriteYieldFrom | Replace `yield` over `for` loop with `yield from` | 🛠 |
|
||||
| UP029 | UnnecessaryBuiltinImport | Unnecessary builtin import: `...` | 🛠 |
|
||||
| UP030 | FormatLiterals | Use implicit references for positional format fields | 🛠 |
|
||||
|
||||
### pep8-naming (N)
|
||||
|
||||
@@ -777,6 +779,8 @@ For more, see [flake8-bandit](https://pypi.org/project/flake8-bandit/4.1.1/) on
|
||||
| S324 | HashlibInsecureHashFunction | Probable use of insecure hash functions in `hashlib`: "..." | |
|
||||
| S501 | RequestWithNoCertValidation | Probable use of `...` call with `verify=False` disabling SSL certificate checks | |
|
||||
| S506 | UnsafeYAMLLoad | Probable use of unsafe `yaml.load`. Allows instantiation of arbitrary objects. Consider `yaml.safe_load`. | |
|
||||
| S508 | SnmpInsecureVersion | The use of SNMPv1 and SNMPv2 is insecure. Use SNMPv3 if able. | |
|
||||
| S509 | SnmpWeakCryptography | You should not use SNMPv3 without encryption. `noAuthNoPriv` & `authNoPriv` is insecure. | |
|
||||
|
||||
### flake8-blind-except (BLE)
|
||||
|
||||
@@ -980,6 +984,7 @@ For more, see [flake8-simplify](https://pypi.org/project/flake8-simplify/0.19.3/
|
||||
| SIM109 | CompareWithTuple | Use `value in (..., ...)` instead of `value == ... or value == ...` | 🛠 |
|
||||
| SIM110 | ConvertLoopToAny | Use `return any(x for x in y)` instead of `for` loop | 🛠 |
|
||||
| SIM111 | ConvertLoopToAll | Use `return all(x for x in y)` instead of `for` loop | 🛠 |
|
||||
| SIM112 | UseCapitalEnvironmentVariables | Use capitalized environment variable `...` instead of `...` | 🛠 |
|
||||
| SIM117 | MultipleWithStatements | Use a single `with` statement with multiple contexts instead of nested `with` statements | |
|
||||
| SIM118 | KeyInDict | Use `key in dict` instead of `key in dict.keys()` | 🛠 |
|
||||
| SIM201 | NegateEqualOp | Use `left != right` instead of `not left == right` | 🛠 |
|
||||
@@ -2998,6 +3003,23 @@ order-by-type = true
|
||||
|
||||
---
|
||||
|
||||
#### [`required-imports`](#required-imports)
|
||||
|
||||
Add the specified import line to all files.
|
||||
|
||||
**Default value**: `[]`
|
||||
|
||||
**Type**: `Vec<String>`
|
||||
|
||||
**Example usage**:
|
||||
|
||||
```toml
|
||||
[tool.ruff.isort]
|
||||
add-import = ["from __future__ import annotations"]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### [`single-line-exclusions`](#single-line-exclusions)
|
||||
|
||||
One or more modules to exclude from the single line rule.
|
||||
@@ -3191,4 +3213,4 @@ MIT
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome and hugely appreciated. To get started, check out the
|
||||
[contributing guidelines](https://github.com/charliermarsh/ruff/blob/main/.github/CONTRIBUTING.md).
|
||||
[contributing guidelines](https://github.com/charliermarsh/ruff/blob/main/CONTRIBUTING.md).
|
||||
|
||||
4
flake8_to_ruff/Cargo.lock
generated
4
flake8_to_ruff/Cargo.lock
generated
@@ -771,7 +771,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
|
||||
|
||||
[[package]]
|
||||
name = "flake8_to_ruff"
|
||||
version = "0.0.217"
|
||||
version = "0.0.218"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@@ -1975,7 +1975,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.0.217"
|
||||
version = "0.0.218"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bincode",
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
[package]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.217-dev.0"
|
||||
version = "0.0.218-dev.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "flake8_to_ruff"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
anyhow = { version = "1.0.66" }
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "maturin"
|
||||
|
||||
[project]
|
||||
name = "ruff"
|
||||
version = "0.0.217"
|
||||
version = "0.0.218"
|
||||
description = "An extremely fast Python linter, written in Rust."
|
||||
authors = [
|
||||
{ name = "Charlie Marsh", email = "charlie.r.marsh@gmail.com" },
|
||||
|
||||
6
resources/test/fixtures/flake8_bandit/S508.py
vendored
Normal file
6
resources/test/fixtures/flake8_bandit/S508.py
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
from pysnmp.hlapi import CommunityData
|
||||
|
||||
CommunityData("public", mpModel=0) # S508
|
||||
CommunityData("public", mpModel=1) # S508
|
||||
|
||||
CommunityData("public", mpModel=2) # OK
|
||||
7
resources/test/fixtures/flake8_bandit/S509.py
vendored
Normal file
7
resources/test/fixtures/flake8_bandit/S509.py
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
from pysnmp.hlapi import UsmUserData
|
||||
|
||||
|
||||
insecure = UsmUserData("securityName") # S509
|
||||
auth_no_priv = UsmUserData("securityName", "authName") # S509
|
||||
|
||||
less_insecure = UsmUserData("securityName", "authName", "privName") # OK
|
||||
114
resources/test/fixtures/flake8_bugbear/B023.py
vendored
114
resources/test/fixtures/flake8_bugbear/B023.py
vendored
@@ -25,10 +25,10 @@ for x in range(3):
|
||||
|
||||
|
||||
def check_inside_functions_too():
|
||||
ls = [lambda: x for x in range(2)]
|
||||
st = {lambda: x for x in range(2)}
|
||||
gn = (lambda: x for x in range(2))
|
||||
dt = {x: lambda: x for x in range(2)}
|
||||
ls = [lambda: x for x in range(2)] # error
|
||||
st = {lambda: x for x in range(2)} # error
|
||||
gn = (lambda: x for x in range(2)) # error
|
||||
dt = {x: lambda: x for x in range(2)} # error
|
||||
|
||||
|
||||
async def pointless_async_iterable():
|
||||
@@ -37,9 +37,9 @@ async def pointless_async_iterable():
|
||||
|
||||
async def container_for_problems():
|
||||
async for x in pointless_async_iterable():
|
||||
functions.append(lambda: x)
|
||||
functions.append(lambda: x) # error
|
||||
|
||||
[lambda: x async for x in pointless_async_iterable()]
|
||||
[lambda: x async for x in pointless_async_iterable()] # error
|
||||
|
||||
|
||||
a = 10
|
||||
@@ -47,10 +47,10 @@ b = 0
|
||||
while True:
|
||||
a = a_ = a - 1
|
||||
b += 1
|
||||
functions.append(lambda: a)
|
||||
functions.append(lambda: a_)
|
||||
functions.append(lambda: b)
|
||||
functions.append(lambda: c) # not a name error because of late binding!
|
||||
functions.append(lambda: a) # error
|
||||
functions.append(lambda: a_) # error
|
||||
functions.append(lambda: b) # error
|
||||
functions.append(lambda: c) # error, but not a name error due to late binding
|
||||
c: bool = a > 3
|
||||
if not c:
|
||||
break
|
||||
@@ -58,7 +58,7 @@ while True:
|
||||
# Nested loops should not duplicate reports
|
||||
for j in range(2):
|
||||
for k in range(3):
|
||||
lambda: j * k
|
||||
lambda: j * k # error
|
||||
|
||||
|
||||
for j, k, l in [(1, 2, 3)]:
|
||||
@@ -80,3 +80,95 @@ for var in range(2):
|
||||
|
||||
for i in range(3):
|
||||
lambda: f"{i}"
|
||||
|
||||
|
||||
# `query` is defined in the function, so also defining it in the loop should be OK.
|
||||
for name in ["a", "b"]:
|
||||
query = name
|
||||
|
||||
def myfunc(x):
|
||||
query = x
|
||||
query_post = x
|
||||
_ = query
|
||||
_ = query_post
|
||||
|
||||
query_post = name # in case iteration order matters
|
||||
|
||||
|
||||
# Bug here because two dict comprehensions reference `name`, one of which is inside
|
||||
# the lambda. This should be totally fine, of course.
|
||||
_ = {
|
||||
k: v
|
||||
for k, v in reduce(
|
||||
lambda data, event: merge_mappings(
|
||||
[data, {name: f(caches, data, event) for name, f in xx}]
|
||||
),
|
||||
events,
|
||||
{name: getattr(group, name) for name in yy},
|
||||
).items()
|
||||
if k in backfill_fields
|
||||
}
|
||||
|
||||
|
||||
# OK to define lambdas if they're immediately consumed, typically as the `key=`
|
||||
# argument or in a consumed `filter()` (even if a comprehension is better style)
|
||||
for x in range(2):
|
||||
# It's not a complete get-out-of-linting-free construct - these should fail:
|
||||
min([None, lambda: x], key=repr)
|
||||
sorted([None, lambda: x], key=repr)
|
||||
any(filter(bool, [None, lambda: x]))
|
||||
list(filter(bool, [None, lambda: x]))
|
||||
all(reduce(bool, [None, lambda: x]))
|
||||
|
||||
# But all these should be OK:
|
||||
min(range(3), key=lambda y: x * y)
|
||||
max(range(3), key=lambda y: x * y)
|
||||
sorted(range(3), key=lambda y: x * y)
|
||||
|
||||
any(map(lambda y: x < y, range(3)))
|
||||
all(map(lambda y: x < y, range(3)))
|
||||
set(map(lambda y: x < y, range(3)))
|
||||
list(map(lambda y: x < y, range(3)))
|
||||
tuple(map(lambda y: x < y, range(3)))
|
||||
sorted(map(lambda y: x < y, range(3)))
|
||||
frozenset(map(lambda y: x < y, range(3)))
|
||||
|
||||
any(filter(lambda y: x < y, range(3)))
|
||||
all(filter(lambda y: x < y, range(3)))
|
||||
set(filter(lambda y: x < y, range(3)))
|
||||
list(filter(lambda y: x < y, range(3)))
|
||||
tuple(filter(lambda y: x < y, range(3)))
|
||||
sorted(filter(lambda y: x < y, range(3)))
|
||||
frozenset(filter(lambda y: x < y, range(3)))
|
||||
|
||||
any(reduce(lambda y: x | y, range(3)))
|
||||
all(reduce(lambda y: x | y, range(3)))
|
||||
set(reduce(lambda y: x | y, range(3)))
|
||||
list(reduce(lambda y: x | y, range(3)))
|
||||
tuple(reduce(lambda y: x | y, range(3)))
|
||||
sorted(reduce(lambda y: x | y, range(3)))
|
||||
frozenset(reduce(lambda y: x | y, range(3)))
|
||||
|
||||
import functools
|
||||
|
||||
any(functools.reduce(lambda y: x | y, range(3)))
|
||||
all(functools.reduce(lambda y: x | y, range(3)))
|
||||
set(functools.reduce(lambda y: x | y, range(3)))
|
||||
list(functools.reduce(lambda y: x | y, range(3)))
|
||||
tuple(functools.reduce(lambda y: x | y, range(3)))
|
||||
sorted(functools.reduce(lambda y: x | y, range(3)))
|
||||
frozenset(functools.reduce(lambda y: x | y, range(3)))
|
||||
|
||||
# OK because the lambda which references a loop variable is defined in a `return`
|
||||
# statement, and after we return the loop variable can't be redefined.
|
||||
# In principle we could do something fancy with `break`, but it's not worth it.
|
||||
def iter_f(names):
|
||||
for name in names:
|
||||
if exists(name):
|
||||
return lambda: name if exists(name) else None
|
||||
|
||||
if foo(name):
|
||||
return [lambda: name] # known false alarm
|
||||
|
||||
if False:
|
||||
return [lambda: i for i in range(3)] # error
|
||||
|
||||
19
resources/test/fixtures/flake8_simplify/SIM112.py
vendored
Normal file
19
resources/test/fixtures/flake8_simplify/SIM112.py
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import os
|
||||
|
||||
# Bad
|
||||
os.environ['foo']
|
||||
|
||||
os.environ.get('foo')
|
||||
|
||||
os.environ.get('foo', 'bar')
|
||||
|
||||
os.getenv('foo')
|
||||
|
||||
# Good
|
||||
os.environ['FOO']
|
||||
|
||||
os.environ.get('FOO')
|
||||
|
||||
os.environ.get('FOO', 'bar')
|
||||
|
||||
os.getenv('FOO')
|
||||
3
resources/test/fixtures/isort/required_imports/comment.py
vendored
Normal file
3
resources/test/fixtures/isort/required_imports/comment.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
x = 1
|
||||
3
resources/test/fixtures/isort/required_imports/docstring.py
vendored
Normal file
3
resources/test/fixtures/isort/required_imports/docstring.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
"""Hello, world!"""
|
||||
|
||||
x = 1
|
||||
1
resources/test/fixtures/isort/required_imports/docstring_only.py
vendored
Normal file
1
resources/test/fixtures/isort/required_imports/docstring_only.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
"""Hello, world!"""
|
||||
2
resources/test/fixtures/isort/required_imports/docstring_with_continuation.py
vendored
Normal file
2
resources/test/fixtures/isort/required_imports/docstring_with_continuation.py
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
"""Hello, world!"""; x = \
|
||||
1; y = 2
|
||||
1
resources/test/fixtures/isort/required_imports/docstring_with_semicolon.py
vendored
Normal file
1
resources/test/fixtures/isort/required_imports/docstring_with_semicolon.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
"""Hello, world!"""; x = 1
|
||||
0
resources/test/fixtures/isort/required_imports/empty.py
vendored
Normal file
0
resources/test/fixtures/isort/required_imports/empty.py
vendored
Normal file
2
resources/test/fixtures/isort/required_imports/existing_import.py
vendored
Normal file
2
resources/test/fixtures/isort/required_imports/existing_import.py
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
from __future__ import generator_stop
|
||||
import os
|
||||
36
resources/test/fixtures/pyupgrade/UP030_0.py
vendored
Normal file
36
resources/test/fixtures/pyupgrade/UP030_0.py
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
# Invalid calls; errors expected.
|
||||
|
||||
"{0}" "{1}" "{2}".format(1, 2, 3)
|
||||
|
||||
"a {3} complicated {1} string with {0} {2}".format(
|
||||
"first", "second", "third", "fourth"
|
||||
)
|
||||
|
||||
'{0}'.format(1)
|
||||
|
||||
'{0:x}'.format(30)
|
||||
|
||||
x = '{0}'.format(1)
|
||||
|
||||
'''{0}\n{1}\n'''.format(1, 2)
|
||||
|
||||
x = "foo {0}" \
|
||||
"bar {1}".format(1, 2)
|
||||
|
||||
("{0}").format(1)
|
||||
|
||||
"\N{snowman} {0}".format(1)
|
||||
|
||||
'{' '0}'.format(1)
|
||||
|
||||
# These will not change because we are waiting for libcst to fix this issue:
|
||||
# https://github.com/Instagram/LibCST/issues/846
|
||||
print(
|
||||
'foo{0}'
|
||||
'bar{1}'.format(1, 2)
|
||||
)
|
||||
|
||||
print(
|
||||
'foo{0}' # ohai\n"
|
||||
'bar{1}'.format(1, 2)
|
||||
)
|
||||
23
resources/test/fixtures/pyupgrade/UP030_1.py
vendored
Normal file
23
resources/test/fixtures/pyupgrade/UP030_1.py
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
# Valid calls; no errors expected.
|
||||
|
||||
'{}'.format(1)
|
||||
|
||||
|
||||
x = ('{0} {1}',)
|
||||
|
||||
'{0} {0}'.format(1)
|
||||
|
||||
'{0:<{1}}'.format(1, 4)
|
||||
|
||||
f"{0}".format(a)
|
||||
|
||||
f"{0}".format(1)
|
||||
|
||||
print(f"{0}".format(1))
|
||||
|
||||
# I did not include the following tests because ruff does not seem to work with
|
||||
# invalid python syntax (which is a good thing)
|
||||
|
||||
# "{0}"format(1)
|
||||
# '{'.format(1)", "'}'.format(1)
|
||||
# ("{0}" # {1}\n"{2}").format(1, 2, 3)
|
||||
@@ -820,6 +820,16 @@
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"required-imports": {
|
||||
"description": "Add the specified import line to all files.",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"single-line-exclusions": {
|
||||
"description": "One or more modules to exclude from the single line rule.",
|
||||
"type": [
|
||||
@@ -1268,6 +1278,7 @@
|
||||
"I0",
|
||||
"I00",
|
||||
"I001",
|
||||
"I002",
|
||||
"I2",
|
||||
"I25",
|
||||
"I252",
|
||||
@@ -1493,6 +1504,8 @@
|
||||
"S50",
|
||||
"S501",
|
||||
"S506",
|
||||
"S508",
|
||||
"S509",
|
||||
"SIM",
|
||||
"SIM1",
|
||||
"SIM10",
|
||||
@@ -1506,6 +1519,7 @@
|
||||
"SIM11",
|
||||
"SIM110",
|
||||
"SIM111",
|
||||
"SIM112",
|
||||
"SIM117",
|
||||
"SIM118",
|
||||
"SIM2",
|
||||
@@ -1592,6 +1606,8 @@
|
||||
"UP027",
|
||||
"UP028",
|
||||
"UP029",
|
||||
"UP03",
|
||||
"UP030",
|
||||
"W",
|
||||
"W2",
|
||||
"W29",
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
[package]
|
||||
name = "ruff_dev"
|
||||
version = "0.0.217"
|
||||
version = "0.0.218"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "ruff_dev"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
anyhow = { version = "1.0.66" }
|
||||
clap = { version = "4.0.1", features = ["derive"] }
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
[package]
|
||||
name = "ruff_macros"
|
||||
version = "0.0.217"
|
||||
version = "0.0.218"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
once_cell = { version = "1.17.0" }
|
||||
|
||||
@@ -12,9 +12,12 @@
|
||||
)]
|
||||
#![forbid(unsafe_code)]
|
||||
|
||||
use syn::{parse_macro_input, DeriveInput};
|
||||
use proc_macro2::Span;
|
||||
use quote::quote;
|
||||
use syn::{parse_macro_input, DeriveInput, Ident};
|
||||
|
||||
mod config;
|
||||
mod prefixes;
|
||||
mod rule_code_prefix;
|
||||
|
||||
#[proc_macro_derive(ConfigurationOptions, attributes(option, doc, option_group))]
|
||||
@@ -34,3 +37,23 @@ pub fn derive_rule_code_prefix(input: proc_macro::TokenStream) -> proc_macro::To
|
||||
.unwrap_or_else(syn::Error::into_compile_error)
|
||||
.into()
|
||||
}
|
||||
|
||||
#[proc_macro]
|
||||
pub fn origin_by_code(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let ident = parse_macro_input!(item as Ident).to_string();
|
||||
let mut iter = prefixes::PREFIX_TO_ORIGIN.iter();
|
||||
let origin = loop {
|
||||
let (prefix, origin) = iter
|
||||
.next()
|
||||
.unwrap_or_else(|| panic!("code doesn't start with any recognized prefix: {ident}"));
|
||||
if ident.starts_with(prefix) {
|
||||
break origin;
|
||||
}
|
||||
};
|
||||
let prefix = Ident::new(origin, Span::call_site());
|
||||
|
||||
quote! {
|
||||
RuleOrigin::#prefix
|
||||
}
|
||||
.into()
|
||||
}
|
||||
|
||||
53
ruff_macros/src/prefixes.rs
Normal file
53
ruff_macros/src/prefixes.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
// Longer prefixes should come first so that you can find an origin for a code
|
||||
// by simply picking the first entry that starts with the given prefix.
|
||||
|
||||
pub const PREFIX_TO_ORIGIN: &[(&str, &str)] = &[
|
||||
("ANN", "Flake8Annotations"),
|
||||
("ARG", "Flake8UnusedArguments"),
|
||||
("A", "Flake8Builtins"),
|
||||
("BLE", "Flake8BlindExcept"),
|
||||
("B", "Flake8Bugbear"),
|
||||
("C4", "Flake8Comprehensions"),
|
||||
("C9", "McCabe"),
|
||||
("DTZ", "Flake8Datetimez"),
|
||||
("D", "Pydocstyle"),
|
||||
("ERA", "Eradicate"),
|
||||
("EM", "Flake8ErrMsg"),
|
||||
("E", "Pycodestyle"),
|
||||
("FBT", "Flake8BooleanTrap"),
|
||||
("F", "Pyflakes"),
|
||||
("ICN", "Flake8ImportConventions"),
|
||||
("ISC", "Flake8ImplicitStrConcat"),
|
||||
("I", "Isort"),
|
||||
("N", "PEP8Naming"),
|
||||
("PD", "PandasVet"),
|
||||
("PGH", "PygrepHooks"),
|
||||
("PL", "Pylint"),
|
||||
("PT", "Flake8PytestStyle"),
|
||||
("Q", "Flake8Quotes"),
|
||||
("RET", "Flake8Return"),
|
||||
("SIM", "Flake8Simplify"),
|
||||
("S", "Flake8Bandit"),
|
||||
("T10", "Flake8Debugger"),
|
||||
("T20", "Flake8Print"),
|
||||
("TID", "Flake8TidyImports"),
|
||||
("UP", "Pyupgrade"),
|
||||
("W", "Pycodestyle"),
|
||||
("YTT", "Flake82020"),
|
||||
("PIE", "Flake8Pie"),
|
||||
("RUF", "Ruff"),
|
||||
];
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::PREFIX_TO_ORIGIN;
|
||||
|
||||
#[test]
|
||||
fn order() {
|
||||
for (idx, (prefix, _)) in PREFIX_TO_ORIGIN.iter().enumerate() {
|
||||
for (prior_prefix, _) in PREFIX_TO_ORIGIN[..idx].iter() {
|
||||
assert!(!prefix.starts_with(prior_prefix));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -116,33 +116,6 @@ impl Violation for %s {
|
||||
fp.write("\n")
|
||||
has_written = True
|
||||
|
||||
# Add the relevant code-to-origin pair to `src/registry.rs`.
|
||||
with open(os.path.join(ROOT_DIR, "src/registry.rs")) as fp:
|
||||
content = fp.read()
|
||||
|
||||
seen_impl = False
|
||||
has_written = False
|
||||
with open(os.path.join(ROOT_DIR, "src/registry.rs"), "w") as fp:
|
||||
for line in content.splitlines():
|
||||
fp.write(line)
|
||||
fp.write("\n")
|
||||
|
||||
if has_written:
|
||||
continue
|
||||
|
||||
if line.startswith("impl RuleCode"):
|
||||
seen_impl = True
|
||||
continue
|
||||
|
||||
if not seen_impl:
|
||||
continue
|
||||
|
||||
if line.strip() == f"// {origin}":
|
||||
indent = line.split("//")[0]
|
||||
fp.write(f"{indent}RuleCode::{code} => RuleOrigin::{pascal_case(origin)},")
|
||||
fp.write("\n")
|
||||
has_written = True
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
|
||||
@@ -714,6 +714,21 @@ pub fn followed_by_multi_statement_line(stmt: &Stmt, locator: &SourceCodeLocator
|
||||
match_trailing_content(stmt, locator)
|
||||
}
|
||||
|
||||
/// Return `true` if a `Stmt` is a docstring.
|
||||
pub fn is_docstring_stmt(stmt: &Stmt) -> bool {
|
||||
if let StmtKind::Expr { value } = &stmt.node {
|
||||
matches!(
|
||||
value.node,
|
||||
ExprKind::Constant {
|
||||
value: Constant::Str { .. },
|
||||
..
|
||||
}
|
||||
)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
/// A simple representation of a call's positional and keyword arguments.
|
||||
pub struct SimpleCallArgs<'a> {
|
||||
@@ -759,6 +774,11 @@ impl<'a> SimpleCallArgs<'a> {
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Get the number of positional and keyword arguments used.
|
||||
pub fn len(&self) -> usize {
|
||||
self.args.len() + self.kwargs.len()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -1405,6 +1405,9 @@ where
|
||||
if self.settings.enabled.contains(&RuleCode::B015) {
|
||||
flake8_bugbear::rules::useless_comparison(self, value);
|
||||
}
|
||||
if self.settings.enabled.contains(&RuleCode::SIM112) {
|
||||
flake8_simplify::rules::use_capital_environment_variables(self, value);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@@ -1827,6 +1830,8 @@ where
|
||||
|| self.settings.enabled.contains(&RuleCode::F523)
|
||||
|| self.settings.enabled.contains(&RuleCode::F524)
|
||||
|| self.settings.enabled.contains(&RuleCode::F525)
|
||||
// pyupgrade
|
||||
|| self.settings.enabled.contains(&RuleCode::UP030)
|
||||
{
|
||||
if let ExprKind::Attribute { value, attr, .. } = &func.node {
|
||||
if let ExprKind::Constant {
|
||||
@@ -1873,6 +1878,10 @@ where
|
||||
self, &summary, location,
|
||||
);
|
||||
}
|
||||
|
||||
if self.settings.enabled.contains(&RuleCode::UP030) {
|
||||
pyupgrade::rules::format_literals(self, &summary, expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1988,6 +1997,28 @@ where
|
||||
self.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if self.settings.enabled.contains(&RuleCode::S508) {
|
||||
if let Some(diagnostic) = flake8_bandit::rules::snmp_insecure_version(
|
||||
func,
|
||||
args,
|
||||
keywords,
|
||||
&self.from_imports,
|
||||
&self.import_aliases,
|
||||
) {
|
||||
self.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if self.settings.enabled.contains(&RuleCode::S509) {
|
||||
if let Some(diagnostic) = flake8_bandit::rules::snmp_weak_cryptography(
|
||||
func,
|
||||
args,
|
||||
keywords,
|
||||
&self.from_imports,
|
||||
&self.import_aliases,
|
||||
) {
|
||||
self.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if self.settings.enabled.contains(&RuleCode::S106) {
|
||||
self.diagnostics
|
||||
.extend(flake8_bandit::rules::hardcoded_password_func_arg(keywords));
|
||||
|
||||
@@ -7,33 +7,12 @@ use rustpython_parser::ast::Suite;
|
||||
use crate::ast::visitor::Visitor;
|
||||
use crate::directives::IsortDirectives;
|
||||
use crate::isort;
|
||||
use crate::isort::track::ImportTracker;
|
||||
use crate::registry::Diagnostic;
|
||||
use crate::isort::track::{Block, ImportTracker};
|
||||
use crate::registry::{Diagnostic, RuleCode};
|
||||
use crate::settings::{flags, Settings};
|
||||
use crate::source_code_locator::SourceCodeLocator;
|
||||
use crate::source_code_style::SourceCodeStyleDetector;
|
||||
|
||||
fn check_import_blocks(
|
||||
tracker: ImportTracker,
|
||||
locator: &SourceCodeLocator,
|
||||
settings: &Settings,
|
||||
stylist: &SourceCodeStyleDetector,
|
||||
autofix: flags::Autofix,
|
||||
package: Option<&Path>,
|
||||
) -> Vec<Diagnostic> {
|
||||
let mut diagnostics = vec![];
|
||||
for block in tracker.into_iter() {
|
||||
if !block.imports.is_empty() {
|
||||
if let Some(diagnostic) =
|
||||
isort::rules::check_imports(&block, locator, settings, stylist, autofix, package)
|
||||
{
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
diagnostics
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn check_imports(
|
||||
python_ast: &Suite,
|
||||
@@ -45,9 +24,33 @@ pub fn check_imports(
|
||||
path: &Path,
|
||||
package: Option<&Path>,
|
||||
) -> Vec<Diagnostic> {
|
||||
let mut tracker = ImportTracker::new(locator, directives, path);
|
||||
for stmt in python_ast {
|
||||
tracker.visit_stmt(stmt);
|
||||
// Extract all imports from the AST.
|
||||
let tracker = {
|
||||
let mut tracker = ImportTracker::new(locator, directives, path);
|
||||
for stmt in python_ast {
|
||||
tracker.visit_stmt(stmt);
|
||||
}
|
||||
tracker
|
||||
};
|
||||
let blocks: Vec<&Block> = tracker.iter().collect();
|
||||
|
||||
// Enforce import rules.
|
||||
let mut diagnostics = vec![];
|
||||
if settings.enabled.contains(&RuleCode::I001) {
|
||||
for block in &blocks {
|
||||
if !block.imports.is_empty() {
|
||||
if let Some(diagnostic) = isort::rules::organize_imports(
|
||||
block, locator, settings, stylist, autofix, package,
|
||||
) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
check_import_blocks(tracker, locator, settings, stylist, autofix, package)
|
||||
if settings.enabled.contains(&RuleCode::I002) {
|
||||
diagnostics.extend(isort::rules::add_required_imports(
|
||||
&blocks, python_ast, locator, settings, autofix,
|
||||
));
|
||||
}
|
||||
diagnostics
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
use anyhow::{bail, Result};
|
||||
use libcst_native::{Expr, Import, ImportFrom, Module, SmallStatement, Statement};
|
||||
use libcst_native::{
|
||||
Call, Expr, Expression, Import, ImportFrom, Module, SmallStatement, Statement,
|
||||
};
|
||||
|
||||
pub fn match_module(module_text: &str) -> Result<Module> {
|
||||
match libcst_native::parse_module(module_text, None) {
|
||||
@@ -8,6 +10,13 @@ pub fn match_module(module_text: &str) -> Result<Module> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn match_expression(expression_text: &str) -> Result<Expression> {
|
||||
match libcst_native::parse_expression(expression_text) {
|
||||
Ok(expression) => Ok(expression),
|
||||
Err(_) => bail!("Failed to extract CST from source"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn match_expr<'a, 'b>(module: &'a mut Module<'b>) -> Result<&'a mut Expr<'b>> {
|
||||
if let Some(Statement::Simple(expr)) = module.body.first_mut() {
|
||||
if let Some(SmallStatement::Expr(expr)) = expr.body.first_mut() {
|
||||
@@ -43,3 +52,11 @@ pub fn match_import_from<'a, 'b>(module: &'a mut Module<'b>) -> Result<&'a mut I
|
||||
bail!("Expected Statement::Simple")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn match_call<'a, 'b>(expression: &'a mut Expression<'b>) -> Result<&'a mut Call<'b>> {
|
||||
if let Expression::Call(call) = expression {
|
||||
Ok(call)
|
||||
} else {
|
||||
bail!("Expected SmallStatement::Expr")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,6 +33,7 @@ impl Flags {
|
||||
pub struct IsortDirectives {
|
||||
pub exclusions: IntSet<usize>,
|
||||
pub splits: Vec<usize>,
|
||||
pub skip_file: bool,
|
||||
}
|
||||
|
||||
pub struct Directives {
|
||||
@@ -89,17 +90,11 @@ pub fn extract_noqa_line_for(lxr: &[LexResult]) -> IntMap<usize, usize> {
|
||||
pub fn extract_isort_directives(lxr: &[LexResult]) -> IsortDirectives {
|
||||
let mut exclusions: IntSet<usize> = IntSet::default();
|
||||
let mut splits: Vec<usize> = Vec::default();
|
||||
let mut skip_file: bool = false;
|
||||
let mut off: Option<Location> = None;
|
||||
let mut last: Option<Location> = None;
|
||||
for &(start, ref tok, end) in lxr.iter().flatten() {
|
||||
last = Some(end);
|
||||
|
||||
// No need to keep processing, but we do need to determine the last token.
|
||||
if skip_file {
|
||||
continue;
|
||||
}
|
||||
|
||||
let Tok::Comment(comment_text) = tok else {
|
||||
continue;
|
||||
};
|
||||
@@ -111,7 +106,10 @@ pub fn extract_isort_directives(lxr: &[LexResult]) -> IsortDirectives {
|
||||
if comment_text == "# isort: split" {
|
||||
splits.push(start.row());
|
||||
} else if comment_text == "# isort: skip_file" || comment_text == "# isort:skip_file" {
|
||||
skip_file = true;
|
||||
return IsortDirectives {
|
||||
skip_file: true,
|
||||
..IsortDirectives::default()
|
||||
};
|
||||
} else if off.is_some() {
|
||||
if comment_text == "# isort: on" {
|
||||
if let Some(start) = off {
|
||||
@@ -130,14 +128,7 @@ pub fn extract_isort_directives(lxr: &[LexResult]) -> IsortDirectives {
|
||||
}
|
||||
}
|
||||
|
||||
if skip_file {
|
||||
// Enforce `isort: skip_file`.
|
||||
if let Some(end) = last {
|
||||
for row in 1..=end.row() {
|
||||
exclusions.insert(row);
|
||||
}
|
||||
}
|
||||
} else if let Some(start) = off {
|
||||
if let Some(start) = off {
|
||||
// Enforce unterminated `isort: off`.
|
||||
if let Some(end) = last {
|
||||
for row in start.row() + 1..=end.row() {
|
||||
@@ -145,7 +136,11 @@ pub fn extract_isort_directives(lxr: &[LexResult]) -> IsortDirectives {
|
||||
}
|
||||
}
|
||||
}
|
||||
IsortDirectives { exclusions, splits }
|
||||
IsortDirectives {
|
||||
exclusions,
|
||||
splits,
|
||||
..IsortDirectives::default()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -283,10 +278,7 @@ x = 1
|
||||
y = 2
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
assert_eq!(
|
||||
extract_isort_directives(&lxr).exclusions,
|
||||
IntSet::from_iter([1, 2, 3, 4])
|
||||
);
|
||||
assert_eq!(extract_isort_directives(&lxr).exclusions, IntSet::default());
|
||||
|
||||
let contents = "# isort: off
|
||||
x = 1
|
||||
@@ -295,10 +287,7 @@ y = 2
|
||||
# isort: skip_file
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
assert_eq!(
|
||||
extract_isort_directives(&lxr).exclusions,
|
||||
IntSet::from_iter([1, 2, 3, 4, 5, 6])
|
||||
);
|
||||
assert_eq!(extract_isort_directives(&lxr).exclusions, IntSet::default());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -25,6 +25,8 @@ mod tests {
|
||||
#[test_case(RuleCode::S324, Path::new("S324.py"); "S324")]
|
||||
#[test_case(RuleCode::S501, Path::new("S501.py"); "S501")]
|
||||
#[test_case(RuleCode::S506, Path::new("S506.py"); "S506")]
|
||||
#[test_case(RuleCode::S508, Path::new("S508.py"); "S508")]
|
||||
#[test_case(RuleCode::S509, Path::new("S509.py"); "S509")]
|
||||
fn rules(rule_code: RuleCode, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
|
||||
@@ -11,6 +11,8 @@ pub use hardcoded_tmp_directory::hardcoded_tmp_directory;
|
||||
pub use hashlib_insecure_hash_functions::hashlib_insecure_hash_functions;
|
||||
pub use request_with_no_cert_validation::request_with_no_cert_validation;
|
||||
pub use request_without_timeout::request_without_timeout;
|
||||
pub use snmp_insecure_version::snmp_insecure_version;
|
||||
pub use snmp_weak_cryptography::snmp_weak_cryptography;
|
||||
pub use unsafe_yaml_load::unsafe_yaml_load;
|
||||
|
||||
mod assert_used;
|
||||
@@ -24,4 +26,6 @@ mod hardcoded_tmp_directory;
|
||||
mod hashlib_insecure_hash_functions;
|
||||
mod request_with_no_cert_validation;
|
||||
mod request_without_timeout;
|
||||
mod snmp_insecure_version;
|
||||
mod snmp_weak_cryptography;
|
||||
mod unsafe_yaml_load;
|
||||
|
||||
40
src/flake8_bandit/rules/snmp_insecure_version.rs
Normal file
40
src/flake8_bandit/rules/snmp_insecure_version.rs
Normal file
@@ -0,0 +1,40 @@
|
||||
use num_traits::{One, Zero};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use rustpython_ast::{Expr, ExprKind, Keyword};
|
||||
use rustpython_parser::ast::Constant;
|
||||
|
||||
use crate::ast::helpers::{collect_call_paths, dealias_call_path, match_call_path, SimpleCallArgs};
|
||||
use crate::ast::types::Range;
|
||||
use crate::registry::Diagnostic;
|
||||
use crate::violations;
|
||||
|
||||
/// S508
|
||||
pub fn snmp_insecure_version(
|
||||
func: &Expr,
|
||||
args: &[Expr],
|
||||
keywords: &[Keyword],
|
||||
from_imports: &FxHashMap<&str, FxHashSet<&str>>,
|
||||
import_aliases: &FxHashMap<&str, &str>,
|
||||
) -> Option<Diagnostic> {
|
||||
let call_path = dealias_call_path(collect_call_paths(func), import_aliases);
|
||||
|
||||
if match_call_path(&call_path, "pysnmp.hlapi", "CommunityData", from_imports) {
|
||||
let call_args = SimpleCallArgs::new(args, keywords);
|
||||
|
||||
if let Some(mp_model_arg) = call_args.get_argument("mpModel", None) {
|
||||
if let ExprKind::Constant {
|
||||
value: Constant::Int(value),
|
||||
..
|
||||
} = &mp_model_arg.node
|
||||
{
|
||||
if value.is_zero() || value.is_one() {
|
||||
return Some(Diagnostic::new(
|
||||
violations::SnmpInsecureVersion,
|
||||
Range::from_located(mp_model_arg),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
30
src/flake8_bandit/rules/snmp_weak_cryptography.rs
Normal file
30
src/flake8_bandit/rules/snmp_weak_cryptography.rs
Normal file
@@ -0,0 +1,30 @@
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use rustpython_ast::{Expr, Keyword};
|
||||
|
||||
use crate::ast::helpers::{collect_call_paths, dealias_call_path, match_call_path, SimpleCallArgs};
|
||||
use crate::ast::types::Range;
|
||||
use crate::registry::Diagnostic;
|
||||
use crate::violations;
|
||||
|
||||
/// S509
|
||||
pub fn snmp_weak_cryptography(
|
||||
func: &Expr,
|
||||
args: &[Expr],
|
||||
keywords: &[Keyword],
|
||||
from_imports: &FxHashMap<&str, FxHashSet<&str>>,
|
||||
import_aliases: &FxHashMap<&str, &str>,
|
||||
) -> Option<Diagnostic> {
|
||||
let call_path = dealias_call_path(collect_call_paths(func), import_aliases);
|
||||
|
||||
if match_call_path(&call_path, "pysnmp.hlapi", "UsmUserData", from_imports) {
|
||||
let call_args = SimpleCallArgs::new(args, keywords);
|
||||
|
||||
if call_args.len() < 3 {
|
||||
return Some(Diagnostic::new(
|
||||
violations::SnmpWeakCryptography,
|
||||
Range::from_located(func),
|
||||
));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
---
|
||||
source: src/flake8_bandit/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
SnmpInsecureVersion: ~
|
||||
location:
|
||||
row: 3
|
||||
column: 32
|
||||
end_location:
|
||||
row: 3
|
||||
column: 33
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
SnmpInsecureVersion: ~
|
||||
location:
|
||||
row: 4
|
||||
column: 32
|
||||
end_location:
|
||||
row: 4
|
||||
column: 33
|
||||
fix: ~
|
||||
parent: ~
|
||||
|
||||
@@ -0,0 +1,25 @@
|
||||
---
|
||||
source: src/flake8_bandit/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
SnmpWeakCryptography: ~
|
||||
location:
|
||||
row: 4
|
||||
column: 11
|
||||
end_location:
|
||||
row: 4
|
||||
column: 22
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
SnmpWeakCryptography: ~
|
||||
location:
|
||||
row: 5
|
||||
column: 15
|
||||
end_location:
|
||||
row: 5
|
||||
column: 26
|
||||
fix: ~
|
||||
parent: ~
|
||||
|
||||
@@ -12,7 +12,9 @@ use crate::violations;
|
||||
#[derive(Default)]
|
||||
struct LoadedNamesVisitor<'a> {
|
||||
// Tuple of: name, defining expression, and defining range.
|
||||
names: Vec<(&'a str, &'a Expr, Range)>,
|
||||
loaded: Vec<(&'a str, &'a Expr, Range)>,
|
||||
// Tuple of: name, defining expression, and defining range.
|
||||
stored: Vec<(&'a str, &'a Expr, Range)>,
|
||||
}
|
||||
|
||||
/// `Visitor` to collect all used identifiers in a statement.
|
||||
@@ -22,12 +24,11 @@ where
|
||||
{
|
||||
fn visit_expr(&mut self, expr: &'b Expr) {
|
||||
match &expr.node {
|
||||
ExprKind::JoinedStr { .. } => {
|
||||
visitor::walk_expr(self, expr);
|
||||
}
|
||||
ExprKind::Name { id, ctx } if matches!(ctx, ExprContext::Load) => {
|
||||
self.names.push((id, expr, Range::from_located(expr)));
|
||||
}
|
||||
ExprKind::Name { id, ctx } => match ctx {
|
||||
ExprContext::Load => self.loaded.push((id, expr, Range::from_located(expr))),
|
||||
ExprContext::Store => self.stored.push((id, expr, Range::from_located(expr))),
|
||||
ExprContext::Del => {}
|
||||
},
|
||||
_ => visitor::walk_expr(self, expr),
|
||||
}
|
||||
}
|
||||
@@ -36,6 +37,7 @@ where
|
||||
#[derive(Default)]
|
||||
struct SuspiciousVariablesVisitor<'a> {
|
||||
names: Vec<(&'a str, &'a Expr, Range)>,
|
||||
safe_functions: Vec<&'a Expr>,
|
||||
}
|
||||
|
||||
/// `Visitor` to collect all suspicious variables (those referenced in
|
||||
@@ -50,45 +52,90 @@ where
|
||||
| StmtKind::AsyncFunctionDef { args, body, .. } => {
|
||||
// Collect all loaded variable names.
|
||||
let mut visitor = LoadedNamesVisitor::default();
|
||||
for stmt in body {
|
||||
visitor.visit_stmt(stmt);
|
||||
}
|
||||
visitor.visit_body(body);
|
||||
|
||||
// Collect all argument names.
|
||||
let arg_names = collect_arg_names(args);
|
||||
let mut arg_names = collect_arg_names(args);
|
||||
arg_names.extend(visitor.stored.iter().map(|(id, ..)| id));
|
||||
|
||||
// Treat any non-arguments as "suspicious".
|
||||
self.names.extend(
|
||||
visitor
|
||||
.names
|
||||
.into_iter()
|
||||
.loaded
|
||||
.iter()
|
||||
.filter(|(id, ..)| !arg_names.contains(id)),
|
||||
);
|
||||
}
|
||||
_ => visitor::walk_stmt(self, stmt),
|
||||
StmtKind::Return { value: Some(value) } => {
|
||||
// Mark `return lambda: x` as safe.
|
||||
if matches!(value.node, ExprKind::Lambda { .. }) {
|
||||
self.safe_functions.push(value);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
visitor::walk_stmt(self, stmt);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'b Expr) {
|
||||
match &expr.node {
|
||||
ExprKind::Lambda { args, body } => {
|
||||
// Collect all loaded variable names.
|
||||
let mut visitor = LoadedNamesVisitor::default();
|
||||
visitor.visit_expr(body);
|
||||
|
||||
// Collect all argument names.
|
||||
let arg_names = collect_arg_names(args);
|
||||
|
||||
// Treat any non-arguments as "suspicious".
|
||||
self.names.extend(
|
||||
visitor
|
||||
.names
|
||||
.into_iter()
|
||||
.filter(|(id, ..)| !arg_names.contains(id)),
|
||||
);
|
||||
ExprKind::Call {
|
||||
func,
|
||||
args,
|
||||
keywords,
|
||||
} => {
|
||||
if let ExprKind::Name { id, .. } = &func.node {
|
||||
if id == "filter" || id == "reduce" || id == "map" {
|
||||
for arg in args {
|
||||
if matches!(arg.node, ExprKind::Lambda { .. }) {
|
||||
self.safe_functions.push(arg);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if let ExprKind::Attribute { value, attr, .. } = &func.node {
|
||||
if attr == "reduce" {
|
||||
if let ExprKind::Name { id, .. } = &value.node {
|
||||
if id == "functools" {
|
||||
for arg in args {
|
||||
if matches!(arg.node, ExprKind::Lambda { .. }) {
|
||||
self.safe_functions.push(arg);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for keyword in keywords {
|
||||
if keyword.node.arg.as_ref().map_or(false, |arg| arg == "key")
|
||||
&& matches!(keyword.node.value.node, ExprKind::Lambda { .. })
|
||||
{
|
||||
self.safe_functions.push(&keyword.node.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => visitor::walk_expr(self, expr),
|
||||
ExprKind::Lambda { args, body } => {
|
||||
if !self.safe_functions.contains(&expr) {
|
||||
// Collect all loaded variable names.
|
||||
let mut visitor = LoadedNamesVisitor::default();
|
||||
visitor.visit_expr(body);
|
||||
|
||||
// Collect all argument names.
|
||||
let mut arg_names = collect_arg_names(args);
|
||||
arg_names.extend(visitor.stored.iter().map(|(id, ..)| id));
|
||||
|
||||
// Treat any non-arguments as "suspicious".
|
||||
self.names.extend(
|
||||
visitor
|
||||
.loaded
|
||||
.iter()
|
||||
.filter(|(id, ..)| !arg_names.contains(id)),
|
||||
);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
visitor::walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
source: src/flake8_bugbear/mod.rs
|
||||
expression: checks
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
FunctionUsesLoopVariable: x
|
||||
@@ -172,4 +172,74 @@ expression: checks
|
||||
column: 16
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
FunctionUsesLoopVariable: x
|
||||
location:
|
||||
row: 117
|
||||
column: 23
|
||||
end_location:
|
||||
row: 117
|
||||
column: 24
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
FunctionUsesLoopVariable: x
|
||||
location:
|
||||
row: 118
|
||||
column: 26
|
||||
end_location:
|
||||
row: 118
|
||||
column: 27
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
FunctionUsesLoopVariable: x
|
||||
location:
|
||||
row: 119
|
||||
column: 36
|
||||
end_location:
|
||||
row: 119
|
||||
column: 37
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
FunctionUsesLoopVariable: x
|
||||
location:
|
||||
row: 120
|
||||
column: 37
|
||||
end_location:
|
||||
row: 120
|
||||
column: 38
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
FunctionUsesLoopVariable: x
|
||||
location:
|
||||
row: 121
|
||||
column: 36
|
||||
end_location:
|
||||
row: 121
|
||||
column: 37
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
FunctionUsesLoopVariable: name
|
||||
location:
|
||||
row: 171
|
||||
column: 28
|
||||
end_location:
|
||||
row: 171
|
||||
column: 32
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
FunctionUsesLoopVariable: i
|
||||
location:
|
||||
row: 174
|
||||
column: 28
|
||||
end_location:
|
||||
row: 174
|
||||
column: 29
|
||||
fix: ~
|
||||
parent: ~
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ use super::helpers::{
|
||||
get_mark_decorators, get_mark_name, is_abstractmethod_decorator, is_pytest_fixture,
|
||||
is_pytest_yield_fixture, keyword_is_literal,
|
||||
};
|
||||
use crate::ast::helpers::{collect_arg_names, collect_call_paths, identifier_range};
|
||||
use crate::ast::helpers::{collect_arg_names, collect_call_paths};
|
||||
use crate::ast::types::Range;
|
||||
use crate::ast::visitor;
|
||||
use crate::ast::visitor::Visitor;
|
||||
@@ -156,33 +156,19 @@ fn check_fixture_returns(checker: &mut Checker, func: &Stmt, func_name: &str, bo
|
||||
&& visitor.has_return_with_value
|
||||
&& func_name.starts_with('_')
|
||||
{
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
violations::IncorrectFixtureNameUnderscore(func_name.to_string()),
|
||||
Range::from_located(func),
|
||||
);
|
||||
if checker.patch(diagnostic.kind.code()) {
|
||||
let func_name_range = identifier_range(func, checker.locator);
|
||||
let num_underscores = func_name.len() - func_name.trim_start_matches('_').len();
|
||||
diagnostic.amend(Fix::deletion(
|
||||
func_name_range.location,
|
||||
func_name_range.location.with_col_offset(num_underscores),
|
||||
));
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
));
|
||||
} else if checker.settings.enabled.contains(&RuleCode::PT004)
|
||||
&& !visitor.has_return_with_value
|
||||
&& !visitor.has_yield_from
|
||||
&& !func_name.starts_with('_')
|
||||
{
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
violations::MissingFixtureNameUnderscore(func_name.to_string()),
|
||||
Range::from_located(func),
|
||||
);
|
||||
if checker.patch(diagnostic.kind.code()) {
|
||||
let func_name_range = identifier_range(func, checker.locator);
|
||||
diagnostic.amend(Fix::insertion("_".to_string(), func_name_range.location));
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
));
|
||||
}
|
||||
|
||||
if checker.settings.enabled.contains(&RuleCode::PT022) {
|
||||
|
||||
@@ -10,14 +10,7 @@ expression: diagnostics
|
||||
end_location:
|
||||
row: 52
|
||||
column: 30
|
||||
fix:
|
||||
content: _
|
||||
location:
|
||||
row: 51
|
||||
column: 4
|
||||
end_location:
|
||||
row: 51
|
||||
column: 4
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingFixtureNameUnderscore: activate_context
|
||||
@@ -27,13 +20,6 @@ expression: diagnostics
|
||||
end_location:
|
||||
row: 58
|
||||
column: 13
|
||||
fix:
|
||||
content: _
|
||||
location:
|
||||
row: 56
|
||||
column: 4
|
||||
end_location:
|
||||
row: 56
|
||||
column: 4
|
||||
fix: ~
|
||||
parent: ~
|
||||
|
||||
|
||||
@@ -10,14 +10,7 @@ expression: diagnostics
|
||||
end_location:
|
||||
row: 42
|
||||
column: 12
|
||||
fix:
|
||||
content: ""
|
||||
location:
|
||||
row: 41
|
||||
column: 4
|
||||
end_location:
|
||||
row: 41
|
||||
column: 5
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
IncorrectFixtureNameUnderscore: _activate_context
|
||||
@@ -27,14 +20,7 @@ expression: diagnostics
|
||||
end_location:
|
||||
row: 48
|
||||
column: 21
|
||||
fix:
|
||||
content: ""
|
||||
location:
|
||||
row: 46
|
||||
column: 4
|
||||
end_location:
|
||||
row: 46
|
||||
column: 5
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
IncorrectFixtureNameUnderscore: _activate_context
|
||||
@@ -44,13 +30,6 @@ expression: diagnostics
|
||||
end_location:
|
||||
row: 57
|
||||
column: 34
|
||||
fix:
|
||||
content: ""
|
||||
location:
|
||||
row: 52
|
||||
column: 4
|
||||
end_location:
|
||||
row: 52
|
||||
column: 5
|
||||
fix: ~
|
||||
parent: ~
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ mod tests {
|
||||
#[test_case(RuleCode::SIM109, Path::new("SIM109.py"); "SIM109")]
|
||||
#[test_case(RuleCode::SIM110, Path::new("SIM110.py"); "SIM110")]
|
||||
#[test_case(RuleCode::SIM111, Path::new("SIM111.py"); "SIM111")]
|
||||
#[test_case(RuleCode::SIM112, Path::new("SIM112.py"); "SIM112")]
|
||||
#[test_case(RuleCode::SIM117, Path::new("SIM117.py"); "SIM117")]
|
||||
#[test_case(RuleCode::SIM201, Path::new("SIM201.py"); "SIM201")]
|
||||
#[test_case(RuleCode::SIM202, Path::new("SIM202.py"); "SIM202")]
|
||||
|
||||
106
src/flake8_simplify/rules/ast_expr.rs
Normal file
106
src/flake8_simplify/rules/ast_expr.rs
Normal file
@@ -0,0 +1,106 @@
|
||||
use rustpython_ast::{Constant, Expr, ExprKind};
|
||||
|
||||
use crate::ast::helpers::{create_expr, match_module_member, unparse_expr};
|
||||
use crate::ast::types::Range;
|
||||
use crate::autofix::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::registry::{Diagnostic, RuleCode};
|
||||
use crate::violations;
|
||||
|
||||
/// SIM112
|
||||
pub fn use_capital_environment_variables(checker: &mut Checker, expr: &Expr) {
|
||||
// check `os.environ['foo']`
|
||||
if let ExprKind::Subscript { .. } = &expr.node {
|
||||
check_os_environ_subscript(checker, expr);
|
||||
return;
|
||||
}
|
||||
|
||||
// check `os.environ.get('foo')` and `os.getenv('foo')``
|
||||
let is_os_environ_get = match_module_member(
|
||||
expr,
|
||||
"os.environ",
|
||||
"get",
|
||||
&checker.from_imports,
|
||||
&checker.import_aliases,
|
||||
);
|
||||
let is_os_getenv = match_module_member(
|
||||
expr,
|
||||
"os",
|
||||
"getenv",
|
||||
&checker.from_imports,
|
||||
&checker.import_aliases,
|
||||
);
|
||||
if !(is_os_environ_get || is_os_getenv) {
|
||||
return;
|
||||
}
|
||||
|
||||
let ExprKind::Call { args, .. } = &expr.node else {
|
||||
return;
|
||||
};
|
||||
let Some(arg) = args.get(0) else {
|
||||
return;
|
||||
};
|
||||
let ExprKind::Constant { value: Constant::Str(env_var), kind } = &arg.node else {
|
||||
return;
|
||||
};
|
||||
let capital_env_var = env_var.to_ascii_uppercase();
|
||||
if &capital_env_var == env_var {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
violations::UseCapitalEnvironmentVariables(capital_env_var.clone(), env_var.clone()),
|
||||
Range::from_located(arg),
|
||||
);
|
||||
if checker.patch(&RuleCode::SIM112) {
|
||||
let new_env_var = create_expr(ExprKind::Constant {
|
||||
value: capital_env_var.into(),
|
||||
kind: kind.clone(),
|
||||
});
|
||||
diagnostic.amend(Fix::replacement(
|
||||
unparse_expr(&new_env_var, checker.style),
|
||||
arg.location,
|
||||
arg.end_location.unwrap(),
|
||||
));
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
|
||||
fn check_os_environ_subscript(checker: &mut Checker, expr: &Expr) {
|
||||
let ExprKind::Subscript { value, slice, .. } = &expr.node else {
|
||||
return;
|
||||
};
|
||||
let ExprKind::Attribute { value: attr_value, attr, .. } = &value.node else {
|
||||
return;
|
||||
};
|
||||
let ExprKind::Name { id, .. } = &attr_value.node else {
|
||||
return;
|
||||
};
|
||||
if id != "os" || attr != "environ" {
|
||||
return;
|
||||
}
|
||||
let ExprKind::Constant { value: Constant::Str(env_var), kind } = &slice.node else {
|
||||
return;
|
||||
};
|
||||
let capital_env_var = env_var.to_ascii_uppercase();
|
||||
if &capital_env_var == env_var {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
violations::UseCapitalEnvironmentVariables(capital_env_var.clone(), env_var.clone()),
|
||||
Range::from_located(slice),
|
||||
);
|
||||
if checker.patch(&RuleCode::SIM112) {
|
||||
let new_env_var = create_expr(ExprKind::Constant {
|
||||
value: capital_env_var.into(),
|
||||
kind: kind.clone(),
|
||||
});
|
||||
diagnostic.amend(Fix::replacement(
|
||||
unparse_expr(&new_env_var, checker.style),
|
||||
slice.location,
|
||||
slice.end_location.unwrap(),
|
||||
));
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
pub use ast_bool_op::{
|
||||
a_and_not_a, a_or_not_a, and_false, compare_with_tuple, duplicate_isinstance_call, or_true,
|
||||
};
|
||||
pub use ast_expr::use_capital_environment_variables;
|
||||
pub use ast_for::convert_loop_to_any_all;
|
||||
pub use ast_if::{nested_if_statements, return_bool_condition_directly, use_ternary_operator};
|
||||
pub use ast_ifexp::{
|
||||
@@ -14,6 +15,7 @@ pub use use_contextlib_suppress::use_contextlib_suppress;
|
||||
pub use yoda_conditions::yoda_conditions;
|
||||
|
||||
mod ast_bool_op;
|
||||
mod ast_expr;
|
||||
mod ast_for;
|
||||
mod ast_if;
|
||||
mod ast_ifexp;
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
---
|
||||
source: src/flake8_simplify/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
UseCapitalEnvironmentVariables:
|
||||
- FOO
|
||||
- foo
|
||||
location:
|
||||
row: 4
|
||||
column: 11
|
||||
end_location:
|
||||
row: 4
|
||||
column: 16
|
||||
fix:
|
||||
content: "'FOO'"
|
||||
location:
|
||||
row: 4
|
||||
column: 11
|
||||
end_location:
|
||||
row: 4
|
||||
column: 16
|
||||
parent: ~
|
||||
- kind:
|
||||
UseCapitalEnvironmentVariables:
|
||||
- FOO
|
||||
- foo
|
||||
location:
|
||||
row: 6
|
||||
column: 15
|
||||
end_location:
|
||||
row: 6
|
||||
column: 20
|
||||
fix:
|
||||
content: "'FOO'"
|
||||
location:
|
||||
row: 6
|
||||
column: 15
|
||||
end_location:
|
||||
row: 6
|
||||
column: 20
|
||||
parent: ~
|
||||
- kind:
|
||||
UseCapitalEnvironmentVariables:
|
||||
- FOO
|
||||
- foo
|
||||
location:
|
||||
row: 8
|
||||
column: 15
|
||||
end_location:
|
||||
row: 8
|
||||
column: 20
|
||||
fix:
|
||||
content: "'FOO'"
|
||||
location:
|
||||
row: 8
|
||||
column: 15
|
||||
end_location:
|
||||
row: 8
|
||||
column: 20
|
||||
parent: ~
|
||||
- kind:
|
||||
UseCapitalEnvironmentVariables:
|
||||
- FOO
|
||||
- foo
|
||||
location:
|
||||
row: 10
|
||||
column: 10
|
||||
end_location:
|
||||
row: 10
|
||||
column: 15
|
||||
fix:
|
||||
content: "'FOO'"
|
||||
location:
|
||||
row: 10
|
||||
column: 10
|
||||
end_location:
|
||||
row: 10
|
||||
column: 15
|
||||
parent: ~
|
||||
|
||||
@@ -1,19 +1,6 @@
|
||||
use rustpython_ast::{Constant, ExprKind, Stmt, StmtKind};
|
||||
|
||||
/// Return `true` if a `Stmt` is a docstring.
|
||||
fn is_docstring_stmt(stmt: &Stmt) -> bool {
|
||||
if let StmtKind::Expr { value } = &stmt.node {
|
||||
matches!(
|
||||
value.node,
|
||||
ExprKind::Constant {
|
||||
value: Constant::Str { .. },
|
||||
..
|
||||
}
|
||||
)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
use crate::ast::helpers::is_docstring_stmt;
|
||||
|
||||
/// Return `true` if a `Stmt` is a "empty": a `pass`, `...`, `raise
|
||||
/// NotImplementedError`, or `raise NotImplemented` (with or without arguments).
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
use rustpython_ast::Stmt;
|
||||
use rustpython_ast::{Location, Stmt};
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::Tok;
|
||||
|
||||
use crate::ast::helpers::is_docstring_stmt;
|
||||
use crate::ast::types::Range;
|
||||
use crate::isort::types::TrailingComma;
|
||||
use crate::source_code_locator::SourceCodeLocator;
|
||||
@@ -86,3 +87,122 @@ pub fn has_comment_break(stmt: &Stmt, locator: &SourceCodeLocator) -> bool {
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
/// Find the end of the last docstring.
|
||||
fn match_docstring_end(body: &[Stmt]) -> Option<Location> {
|
||||
let mut iter = body.iter();
|
||||
let Some(mut stmt) = iter.next() else {
|
||||
return None;
|
||||
};
|
||||
if !is_docstring_stmt(stmt) {
|
||||
return None;
|
||||
}
|
||||
for next in iter {
|
||||
if !is_docstring_stmt(next) {
|
||||
break;
|
||||
}
|
||||
stmt = next;
|
||||
}
|
||||
Some(stmt.end_location.unwrap())
|
||||
}
|
||||
|
||||
/// Find the end of the first token that isn't a docstring, comment, or
|
||||
/// whitespace.
|
||||
pub fn find_splice_location(body: &[Stmt], locator: &SourceCodeLocator) -> Location {
|
||||
// Find the first AST node that isn't a docstring.
|
||||
let mut splice = match_docstring_end(body).unwrap_or_default();
|
||||
|
||||
// Find the first token that isn't a comment or whitespace.
|
||||
let contents = locator.slice_source_code_at(&splice);
|
||||
for (.., tok, end) in lexer::make_tokenizer(&contents).flatten() {
|
||||
if matches!(tok, Tok::Comment(..) | Tok::Newline) {
|
||||
splice = end;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
splice
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
use rustpython_ast::Location;
|
||||
use rustpython_parser::parser;
|
||||
|
||||
use crate::isort::helpers::find_splice_location;
|
||||
use crate::source_code_locator::SourceCodeLocator;
|
||||
|
||||
fn splice_contents(contents: &str) -> Result<Location> {
|
||||
let program = parser::parse_program(contents, "<filename>")?;
|
||||
let locator = SourceCodeLocator::new(contents);
|
||||
Ok(find_splice_location(&program, &locator))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn splice() -> Result<()> {
|
||||
let contents = "";
|
||||
assert_eq!(splice_contents(contents)?, Location::new(1, 0));
|
||||
|
||||
let contents = r#"
|
||||
"""Hello, world!"""
|
||||
"#
|
||||
.trim();
|
||||
assert_eq!(splice_contents(contents)?, Location::new(1, 19));
|
||||
|
||||
let contents = r#"
|
||||
"""Hello, world!"""
|
||||
"""Hello, world!"""
|
||||
"#
|
||||
.trim();
|
||||
assert_eq!(splice_contents(contents)?, Location::new(2, 19));
|
||||
|
||||
let contents = r#"
|
||||
x = 1
|
||||
"#
|
||||
.trim();
|
||||
assert_eq!(splice_contents(contents)?, Location::new(1, 0));
|
||||
|
||||
let contents = r#"
|
||||
#!/usr/bin/env python3
|
||||
"#
|
||||
.trim();
|
||||
assert_eq!(splice_contents(contents)?, Location::new(1, 22));
|
||||
|
||||
let contents = r#"
|
||||
#!/usr/bin/env python3
|
||||
"""Hello, world!"""
|
||||
"#
|
||||
.trim();
|
||||
assert_eq!(splice_contents(contents)?, Location::new(2, 19));
|
||||
|
||||
let contents = r#"
|
||||
"""Hello, world!"""
|
||||
#!/usr/bin/env python3
|
||||
"#
|
||||
.trim();
|
||||
assert_eq!(splice_contents(contents)?, Location::new(2, 22));
|
||||
|
||||
let contents = r#"
|
||||
"""%s""" % "Hello, world!"
|
||||
"#
|
||||
.trim();
|
||||
assert_eq!(splice_contents(contents)?, Location::new(1, 0));
|
||||
|
||||
let contents = r#"
|
||||
"""Hello, world!"""; x = 1
|
||||
"#
|
||||
.trim();
|
||||
assert_eq!(splice_contents(contents)?, Location::new(1, 19));
|
||||
|
||||
let contents = r#"
|
||||
"""Hello, world!"""; x = 1; y = \
|
||||
2
|
||||
"#
|
||||
.trim();
|
||||
assert_eq!(splice_contents(contents)?, Location::new(1, 19));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -828,4 +828,99 @@ mod tests {
|
||||
insta::assert_yaml_snapshot!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Path::new("docstring.py"))]
|
||||
#[test_case(Path::new("docstring_only.py"))]
|
||||
#[test_case(Path::new("empty.py"))]
|
||||
fn required_import(path: &Path) -> Result<()> {
|
||||
let snapshot = format!("required_import_{}", path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
Path::new("./resources/test/fixtures/isort/required_imports")
|
||||
.join(path)
|
||||
.as_path(),
|
||||
&Settings {
|
||||
src: vec![Path::new("resources/test/fixtures/isort").to_path_buf()],
|
||||
isort: isort::settings::Settings {
|
||||
required_imports: BTreeSet::from([
|
||||
"from __future__ import annotations".to_string()
|
||||
]),
|
||||
..isort::settings::Settings::default()
|
||||
},
|
||||
..Settings::for_rule(RuleCode::I002)
|
||||
},
|
||||
)?;
|
||||
insta::assert_yaml_snapshot!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Path::new("docstring.py"))]
|
||||
#[test_case(Path::new("docstring_only.py"))]
|
||||
#[test_case(Path::new("empty.py"))]
|
||||
fn required_imports(path: &Path) -> Result<()> {
|
||||
let snapshot = format!("required_imports_{}", path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
Path::new("./resources/test/fixtures/isort/required_imports")
|
||||
.join(path)
|
||||
.as_path(),
|
||||
&Settings {
|
||||
src: vec![Path::new("resources/test/fixtures/isort").to_path_buf()],
|
||||
isort: isort::settings::Settings {
|
||||
required_imports: BTreeSet::from([
|
||||
"from __future__ import annotations".to_string(),
|
||||
"from __future__ import generator_stop".to_string(),
|
||||
]),
|
||||
..isort::settings::Settings::default()
|
||||
},
|
||||
..Settings::for_rule(RuleCode::I002)
|
||||
},
|
||||
)?;
|
||||
insta::assert_yaml_snapshot!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Path::new("docstring.py"))]
|
||||
#[test_case(Path::new("docstring_only.py"))]
|
||||
#[test_case(Path::new("empty.py"))]
|
||||
fn combined_required_imports(path: &Path) -> Result<()> {
|
||||
let snapshot = format!("combined_required_imports_{}", path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
Path::new("./resources/test/fixtures/isort/required_imports")
|
||||
.join(path)
|
||||
.as_path(),
|
||||
&Settings {
|
||||
src: vec![Path::new("resources/test/fixtures/isort").to_path_buf()],
|
||||
isort: isort::settings::Settings {
|
||||
required_imports: BTreeSet::from(["from __future__ import annotations, \
|
||||
generator_stop"
|
||||
.to_string()]),
|
||||
..isort::settings::Settings::default()
|
||||
},
|
||||
..Settings::for_rule(RuleCode::I002)
|
||||
},
|
||||
)?;
|
||||
insta::assert_yaml_snapshot!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Path::new("docstring.py"))]
|
||||
#[test_case(Path::new("docstring_only.py"))]
|
||||
#[test_case(Path::new("empty.py"))]
|
||||
fn straight_required_import(path: &Path) -> Result<()> {
|
||||
let snapshot = format!("straight_required_import_{}", path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
Path::new("./resources/test/fixtures/isort/required_imports")
|
||||
.join(path)
|
||||
.as_path(),
|
||||
&Settings {
|
||||
src: vec![Path::new("resources/test/fixtures/isort").to_path_buf()],
|
||||
isort: isort::settings::Settings {
|
||||
required_imports: BTreeSet::from(["import os".to_string()]),
|
||||
..isort::settings::Settings::default()
|
||||
},
|
||||
..Settings::for_rule(RuleCode::I002)
|
||||
},
|
||||
)?;
|
||||
insta::assert_yaml_snapshot!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
222
src/isort/rules/add_required_imports.rs
Normal file
222
src/isort/rules/add_required_imports.rs
Normal file
@@ -0,0 +1,222 @@
|
||||
use std::fmt;
|
||||
|
||||
use log::error;
|
||||
use rustpython_ast::{Location, StmtKind, Suite};
|
||||
|
||||
use crate::ast::helpers::is_docstring_stmt;
|
||||
use crate::ast::types::Range;
|
||||
use crate::autofix::Fix;
|
||||
use crate::isort::helpers;
|
||||
use crate::isort::track::Block;
|
||||
use crate::registry::{Diagnostic, RuleCode};
|
||||
use crate::settings::{flags, Settings};
|
||||
use crate::source_code_locator::SourceCodeLocator;
|
||||
use crate::violations;
|
||||
|
||||
struct Alias<'a> {
|
||||
name: &'a str,
|
||||
as_name: Option<&'a str>,
|
||||
}
|
||||
|
||||
struct ImportFrom<'a> {
|
||||
module: Option<&'a str>,
|
||||
name: Alias<'a>,
|
||||
level: Option<&'a usize>,
|
||||
}
|
||||
|
||||
struct Import<'a> {
|
||||
name: Alias<'a>,
|
||||
}
|
||||
|
||||
enum AnyImport<'a> {
|
||||
Import(Import<'a>),
|
||||
ImportFrom(ImportFrom<'a>),
|
||||
}
|
||||
|
||||
impl fmt::Display for ImportFrom<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "from ")?;
|
||||
if let Some(level) = self.level {
|
||||
write!(f, "{}", ".".repeat(*level))?;
|
||||
}
|
||||
if let Some(module) = self.module {
|
||||
write!(f, "{module}")?;
|
||||
}
|
||||
write!(f, " import {}", self.name.name)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Import<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "import {}", self.name.name)?;
|
||||
if let Some(as_name) = self.name.as_name {
|
||||
write!(f, " as {as_name}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for AnyImport<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
AnyImport::Import(import) => write!(f, "{import}"),
|
||||
AnyImport::ImportFrom(import_from) => write!(f, "{import_from}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn contains(block: &Block, required_import: &AnyImport) -> bool {
|
||||
block.imports.iter().any(|import| match required_import {
|
||||
AnyImport::Import(required_import) => {
|
||||
let StmtKind::Import {
|
||||
names,
|
||||
} = &import.node else {
|
||||
return false;
|
||||
};
|
||||
names.iter().any(|alias| {
|
||||
alias.node.name == required_import.name.name
|
||||
&& alias.node.asname.as_deref() == required_import.name.as_name
|
||||
})
|
||||
}
|
||||
AnyImport::ImportFrom(required_import) => {
|
||||
let StmtKind::ImportFrom {
|
||||
module,
|
||||
names,
|
||||
level,
|
||||
} = &import.node else {
|
||||
return false;
|
||||
};
|
||||
module.as_deref() == required_import.module
|
||||
&& level.as_ref() == required_import.level
|
||||
&& names.iter().any(|alias| {
|
||||
alias.node.name == required_import.name.name
|
||||
&& alias.node.asname.as_deref() == required_import.name.as_name
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn add_required_import(
|
||||
required_import: &AnyImport,
|
||||
blocks: &[&Block],
|
||||
python_ast: &Suite,
|
||||
locator: &SourceCodeLocator,
|
||||
settings: &Settings,
|
||||
autofix: flags::Autofix,
|
||||
) -> Option<Diagnostic> {
|
||||
// If the import is already present in a top-level block, don't add it.
|
||||
if blocks
|
||||
.iter()
|
||||
.filter(|block| !block.nested)
|
||||
.any(|block| contains(block, required_import))
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
// Don't add imports to semantically-empty files.
|
||||
if python_ast.iter().all(is_docstring_stmt) {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Always insert the diagnostic at top-of-file.
|
||||
let required_import = required_import.to_string();
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
violations::MissingRequiredImport(required_import.clone()),
|
||||
Range::new(Location::default(), Location::default()),
|
||||
);
|
||||
if matches!(autofix, flags::Autofix::Enabled) && settings.fixable.contains(&RuleCode::I002) {
|
||||
// Determine the location at which the import should be inserted.
|
||||
let splice = helpers::find_splice_location(python_ast, locator);
|
||||
|
||||
// Generate the edit.
|
||||
let mut contents = String::with_capacity(required_import.len() + 1);
|
||||
|
||||
// If we're inserting beyond the start of the file, we add
|
||||
// a newline _before_, since the splice represents the _end_ of the last
|
||||
// irrelevant token (e.g., the end of a comment or the end of
|
||||
// docstring). This ensures that we properly handle awkward cases like
|
||||
// docstrings that are followed by semicolons.
|
||||
if splice > Location::default() {
|
||||
contents.push('\n');
|
||||
}
|
||||
contents.push_str(&required_import);
|
||||
|
||||
// If we're inserting at the start of the file, add a trailing newline instead.
|
||||
if splice == Location::default() {
|
||||
contents.push('\n');
|
||||
}
|
||||
|
||||
// Construct the fix.
|
||||
diagnostic.amend(Fix::insertion(contents, splice));
|
||||
}
|
||||
Some(diagnostic)
|
||||
}
|
||||
|
||||
/// I002
|
||||
pub fn add_required_imports(
|
||||
blocks: &[&Block],
|
||||
python_ast: &Suite,
|
||||
locator: &SourceCodeLocator,
|
||||
settings: &Settings,
|
||||
autofix: flags::Autofix,
|
||||
) -> Vec<Diagnostic> {
|
||||
settings
|
||||
.isort
|
||||
.required_imports
|
||||
.iter()
|
||||
.flat_map(|required_import| {
|
||||
let Ok(body) = rustpython_parser::parser::parse_program(required_import, "<filename>") else {
|
||||
error!("Failed to parse required import: `{}`", required_import);
|
||||
return vec![];
|
||||
};
|
||||
if body.is_empty() || body.len() > 1 {
|
||||
error!("Expected require import to contain a single statement: `{}`", required_import);
|
||||
return vec![];
|
||||
}
|
||||
|
||||
match &body[0].node {
|
||||
StmtKind::ImportFrom { module, names, level } => {
|
||||
names.iter().filter_map(|name| {
|
||||
add_required_import(
|
||||
&AnyImport::ImportFrom(ImportFrom {
|
||||
module: module.as_ref().map(String::as_str),
|
||||
name: Alias {
|
||||
name: name.node.name.as_str(),
|
||||
as_name: name.node.asname.as_deref(),
|
||||
},
|
||||
level: level.as_ref(),
|
||||
}),
|
||||
blocks,
|
||||
python_ast,
|
||||
locator,
|
||||
settings,
|
||||
autofix,
|
||||
)
|
||||
}).collect()
|
||||
}
|
||||
StmtKind::Import { names } => {
|
||||
names.iter().filter_map(|name| {
|
||||
add_required_import(
|
||||
&AnyImport::Import(Import {
|
||||
name: Alias {
|
||||
name: name.node.name.as_str(),
|
||||
as_name: name.node.asname.as_deref(),
|
||||
},
|
||||
}),
|
||||
blocks,
|
||||
python_ast,
|
||||
locator,
|
||||
settings,
|
||||
autofix,
|
||||
)
|
||||
}).collect()
|
||||
}
|
||||
_ => {
|
||||
error!("Expected required import to be in import-from style: `{}`", required_import);
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
5
src/isort/rules/mod.rs
Normal file
5
src/isort/rules/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub use add_required_imports::add_required_imports;
|
||||
pub use organize_imports::organize_imports;
|
||||
|
||||
pub mod add_required_imports;
|
||||
pub mod organize_imports;
|
||||
@@ -27,7 +27,7 @@ fn extract_indentation_range(body: &[&Stmt]) -> Range {
|
||||
}
|
||||
|
||||
/// I001
|
||||
pub fn check_imports(
|
||||
pub fn organize_imports(
|
||||
block: &Block,
|
||||
locator: &SourceCodeLocator,
|
||||
settings: &Settings,
|
||||
@@ -129,36 +129,47 @@ pub struct Options {
|
||||
/// A list of modules to consider standard-library, in addition to those
|
||||
/// known to Ruff in advance.
|
||||
pub extra_standard_library: Option<Vec<String>>,
|
||||
#[option(
|
||||
default = r#"[]"#,
|
||||
value_type = "Vec<String>",
|
||||
example = r#"
|
||||
add-import = ["from __future__ import annotations"]
|
||||
"#
|
||||
)]
|
||||
/// Add the specified import line to all files.
|
||||
pub required_imports: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct Settings {
|
||||
pub required_imports: BTreeSet<String>,
|
||||
pub combine_as_imports: bool,
|
||||
pub force_wrap_aliases: bool,
|
||||
pub split_on_trailing_comma: bool,
|
||||
pub extra_standard_library: BTreeSet<String>,
|
||||
pub force_single_line: bool,
|
||||
pub order_by_type: bool,
|
||||
pub force_sort_within_sections: bool,
|
||||
pub single_line_exclusions: BTreeSet<String>,
|
||||
pub force_wrap_aliases: bool,
|
||||
pub known_first_party: BTreeSet<String>,
|
||||
pub known_third_party: BTreeSet<String>,
|
||||
pub extra_standard_library: BTreeSet<String>,
|
||||
pub order_by_type: bool,
|
||||
pub single_line_exclusions: BTreeSet<String>,
|
||||
pub split_on_trailing_comma: bool,
|
||||
}
|
||||
|
||||
impl Default for Settings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
required_imports: BTreeSet::new(),
|
||||
combine_as_imports: false,
|
||||
force_wrap_aliases: false,
|
||||
split_on_trailing_comma: true,
|
||||
extra_standard_library: BTreeSet::new(),
|
||||
force_single_line: false,
|
||||
order_by_type: true,
|
||||
force_sort_within_sections: false,
|
||||
single_line_exclusions: BTreeSet::new(),
|
||||
force_wrap_aliases: false,
|
||||
known_first_party: BTreeSet::new(),
|
||||
known_third_party: BTreeSet::new(),
|
||||
extra_standard_library: BTreeSet::new(),
|
||||
order_by_type: true,
|
||||
single_line_exclusions: BTreeSet::new(),
|
||||
split_on_trailing_comma: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -166,20 +177,21 @@ impl Default for Settings {
|
||||
impl From<Options> for Settings {
|
||||
fn from(options: Options) -> Self {
|
||||
Self {
|
||||
required_imports: BTreeSet::from_iter(options.required_imports.unwrap_or_default()),
|
||||
combine_as_imports: options.combine_as_imports.unwrap_or(false),
|
||||
force_wrap_aliases: options.force_wrap_aliases.unwrap_or(false),
|
||||
split_on_trailing_comma: options.split_on_trailing_comma.unwrap_or(true),
|
||||
force_single_line: options.force_single_line.unwrap_or(false),
|
||||
order_by_type: options.order_by_type.unwrap_or(true),
|
||||
force_sort_within_sections: options.force_sort_within_sections.unwrap_or(false),
|
||||
single_line_exclusions: BTreeSet::from_iter(
|
||||
options.single_line_exclusions.unwrap_or_default(),
|
||||
),
|
||||
known_first_party: BTreeSet::from_iter(options.known_first_party.unwrap_or_default()),
|
||||
known_third_party: BTreeSet::from_iter(options.known_third_party.unwrap_or_default()),
|
||||
extra_standard_library: BTreeSet::from_iter(
|
||||
options.extra_standard_library.unwrap_or_default(),
|
||||
),
|
||||
force_single_line: options.force_single_line.unwrap_or(false),
|
||||
force_sort_within_sections: options.force_sort_within_sections.unwrap_or(false),
|
||||
force_wrap_aliases: options.force_wrap_aliases.unwrap_or(false),
|
||||
known_first_party: BTreeSet::from_iter(options.known_first_party.unwrap_or_default()),
|
||||
known_third_party: BTreeSet::from_iter(options.known_third_party.unwrap_or_default()),
|
||||
order_by_type: options.order_by_type.unwrap_or(true),
|
||||
single_line_exclusions: BTreeSet::from_iter(
|
||||
options.single_line_exclusions.unwrap_or_default(),
|
||||
),
|
||||
split_on_trailing_comma: options.split_on_trailing_comma.unwrap_or(true),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -187,16 +199,17 @@ impl From<Options> for Settings {
|
||||
impl From<Settings> for Options {
|
||||
fn from(settings: Settings) -> Self {
|
||||
Self {
|
||||
required_imports: Some(settings.required_imports.into_iter().collect()),
|
||||
combine_as_imports: Some(settings.combine_as_imports),
|
||||
force_wrap_aliases: Some(settings.force_wrap_aliases),
|
||||
split_on_trailing_comma: Some(settings.split_on_trailing_comma),
|
||||
extra_standard_library: Some(settings.extra_standard_library.into_iter().collect()),
|
||||
force_single_line: Some(settings.force_single_line),
|
||||
order_by_type: Some(settings.order_by_type),
|
||||
force_sort_within_sections: Some(settings.force_sort_within_sections),
|
||||
single_line_exclusions: Some(settings.single_line_exclusions.into_iter().collect()),
|
||||
force_wrap_aliases: Some(settings.force_wrap_aliases),
|
||||
known_first_party: Some(settings.known_first_party.into_iter().collect()),
|
||||
known_third_party: Some(settings.known_third_party.into_iter().collect()),
|
||||
extra_standard_library: Some(settings.extra_standard_library.into_iter().collect()),
|
||||
order_by_type: Some(settings.order_by_type),
|
||||
single_line_exclusions: Some(settings.single_line_exclusions.into_iter().collect()),
|
||||
split_on_trailing_comma: Some(settings.split_on_trailing_comma),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
---
|
||||
source: src/isort/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
MissingRequiredImport: from __future__ import annotations
|
||||
location:
|
||||
row: 1
|
||||
column: 0
|
||||
end_location:
|
||||
row: 1
|
||||
column: 0
|
||||
fix:
|
||||
content: "\nfrom __future__ import annotations"
|
||||
location:
|
||||
row: 1
|
||||
column: 19
|
||||
end_location:
|
||||
row: 1
|
||||
column: 19
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingRequiredImport: from __future__ import generator_stop
|
||||
location:
|
||||
row: 1
|
||||
column: 0
|
||||
end_location:
|
||||
row: 1
|
||||
column: 0
|
||||
fix:
|
||||
content: "\nfrom __future__ import generator_stop"
|
||||
location:
|
||||
row: 1
|
||||
column: 19
|
||||
end_location:
|
||||
row: 1
|
||||
column: 19
|
||||
parent: ~
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
---
|
||||
source: src/isort/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
[]
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
---
|
||||
source: src/isort/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
[]
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
---
|
||||
source: src/isort/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
MissingRequiredImport: from __future__ import annotations
|
||||
location:
|
||||
row: 1
|
||||
column: 0
|
||||
end_location:
|
||||
row: 1
|
||||
column: 0
|
||||
fix:
|
||||
content: "\nfrom __future__ import annotations"
|
||||
location:
|
||||
row: 1
|
||||
column: 19
|
||||
end_location:
|
||||
row: 1
|
||||
column: 19
|
||||
parent: ~
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
---
|
||||
source: src/isort/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
[]
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
---
|
||||
source: src/isort/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
[]
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
---
|
||||
source: src/isort/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
MissingRequiredImport: from __future__ import annotations
|
||||
location:
|
||||
row: 1
|
||||
column: 0
|
||||
end_location:
|
||||
row: 1
|
||||
column: 0
|
||||
fix:
|
||||
content: "\nfrom __future__ import annotations"
|
||||
location:
|
||||
row: 1
|
||||
column: 19
|
||||
end_location:
|
||||
row: 1
|
||||
column: 19
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingRequiredImport: from __future__ import generator_stop
|
||||
location:
|
||||
row: 1
|
||||
column: 0
|
||||
end_location:
|
||||
row: 1
|
||||
column: 0
|
||||
fix:
|
||||
content: "\nfrom __future__ import generator_stop"
|
||||
location:
|
||||
row: 1
|
||||
column: 19
|
||||
end_location:
|
||||
row: 1
|
||||
column: 19
|
||||
parent: ~
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
---
|
||||
source: src/isort/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
[]
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
---
|
||||
source: src/isort/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
[]
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
---
|
||||
source: src/isort/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
MissingRequiredImport: import os
|
||||
location:
|
||||
row: 1
|
||||
column: 0
|
||||
end_location:
|
||||
row: 1
|
||||
column: 0
|
||||
fix:
|
||||
content: "\nimport os"
|
||||
location:
|
||||
row: 1
|
||||
column: 19
|
||||
end_location:
|
||||
row: 1
|
||||
column: 19
|
||||
parent: ~
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
---
|
||||
source: src/isort/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
[]
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
---
|
||||
source: src/isort/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
[]
|
||||
|
||||
@@ -20,6 +20,7 @@ pub enum Trailer {
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Block<'a> {
|
||||
pub nested: bool,
|
||||
pub imports: Vec<&'a Stmt>,
|
||||
pub trailer: Option<Trailer>,
|
||||
}
|
||||
@@ -52,6 +53,7 @@ impl<'a> ImportTracker<'a> {
|
||||
fn track_import(&mut self, stmt: &'a Stmt) {
|
||||
let index = self.blocks.len() - 1;
|
||||
self.blocks[index].imports.push(stmt);
|
||||
self.blocks[index].nested = self.nested;
|
||||
}
|
||||
|
||||
fn trailer_for(&self, stmt: &'a Stmt) -> Option<Trailer> {
|
||||
@@ -105,8 +107,11 @@ impl<'a> ImportTracker<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_iter(self) -> impl IntoIterator<Item = Block<'a>> {
|
||||
self.blocks.into_iter()
|
||||
pub fn iter<'b>(&'a self) -> impl Iterator<Item = &'b Block<'a>>
|
||||
where
|
||||
'a: 'b,
|
||||
{
|
||||
self.blocks.iter()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -84,10 +84,11 @@ pub(crate) fn check_path(
|
||||
.enabled
|
||||
.iter()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::AST));
|
||||
let use_imports = settings
|
||||
.enabled
|
||||
.iter()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::Imports));
|
||||
let use_imports = !directives.isort.skip_file
|
||||
&& settings
|
||||
.enabled
|
||||
.iter()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::Imports));
|
||||
if use_ast || use_imports {
|
||||
match rustpython_helpers::parse_program_tokens(tokens, "<filename>") {
|
||||
Ok(python_ast) => {
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
//! Implements helper functions for using vendored/format.rs
|
||||
use std::convert::TryFrom;
|
||||
|
||||
use rustc_hash::FxHashSet;
|
||||
use rustpython_common::format::{
|
||||
FieldName, FieldType, FormatParseError, FormatPart, FormatString, FromTemplate,
|
||||
};
|
||||
@@ -21,10 +20,12 @@ pub(crate) fn error_to_string(err: &FormatParseError) -> String {
|
||||
.to_string()
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct FormatSummary {
|
||||
pub autos: FxHashSet<usize>,
|
||||
pub indexes: FxHashSet<usize>,
|
||||
pub keywords: FxHashSet<String>,
|
||||
pub autos: Vec<usize>,
|
||||
pub indexes: Vec<usize>,
|
||||
pub keywords: Vec<String>,
|
||||
pub has_nested_parts: bool,
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for FormatSummary {
|
||||
@@ -33,9 +34,10 @@ impl TryFrom<&str> for FormatSummary {
|
||||
fn try_from(literal: &str) -> Result<Self, Self::Error> {
|
||||
let format_string = FormatString::from_str(literal)?;
|
||||
|
||||
let mut autos = FxHashSet::default();
|
||||
let mut indexes = FxHashSet::default();
|
||||
let mut keywords = FxHashSet::default();
|
||||
let mut autos = Vec::new();
|
||||
let mut indexes = Vec::new();
|
||||
let mut keywords = Vec::new();
|
||||
let mut has_nested_parts = false;
|
||||
|
||||
for format_part in format_string.format_parts {
|
||||
let FormatPart::Field {
|
||||
@@ -47,9 +49,9 @@ impl TryFrom<&str> for FormatSummary {
|
||||
};
|
||||
let parsed = FieldName::parse(&field_name)?;
|
||||
match parsed.field_type {
|
||||
FieldType::Auto => autos.insert(autos.len()),
|
||||
FieldType::Index(i) => indexes.insert(i),
|
||||
FieldType::Keyword(k) => keywords.insert(k),
|
||||
FieldType::Auto => autos.push(autos.len()),
|
||||
FieldType::Index(i) => indexes.push(i),
|
||||
FieldType::Keyword(k) => keywords.push(k),
|
||||
};
|
||||
|
||||
let nested = FormatString::from_str(&format_spec)?;
|
||||
@@ -59,10 +61,11 @@ impl TryFrom<&str> for FormatSummary {
|
||||
};
|
||||
let parsed = FieldName::parse(&field_name)?;
|
||||
match parsed.field_type {
|
||||
FieldType::Auto => autos.insert(autos.len()),
|
||||
FieldType::Index(i) => indexes.insert(i),
|
||||
FieldType::Keyword(k) => keywords.insert(k),
|
||||
FieldType::Auto => autos.push(autos.len()),
|
||||
FieldType::Index(i) => indexes.push(i),
|
||||
FieldType::Keyword(k) => keywords.push(k),
|
||||
};
|
||||
has_nested_parts = true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -70,6 +73,7 @@ impl TryFrom<&str> for FormatSummary {
|
||||
autos,
|
||||
indexes,
|
||||
keywords,
|
||||
has_nested_parts,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -82,9 +86,9 @@ mod tests {
|
||||
fn test_format_summary() {
|
||||
let literal = "foo{foo}a{}b{2}c{2}d{1}{}{}e{bar}{foo}f{spam}";
|
||||
|
||||
let expected_autos = [0usize, 1usize, 2usize].into_iter().collect();
|
||||
let expected_indexes = [1usize, 2usize].into_iter().collect();
|
||||
let expected_keywords = ["foo", "bar", "spam"]
|
||||
let expected_autos = [0usize, 1usize, 2usize].to_vec();
|
||||
let expected_indexes = [2usize, 2usize, 1usize].to_vec();
|
||||
let expected_keywords: Vec<_> = ["foo", "bar", "foo", "spam"]
|
||||
.into_iter()
|
||||
.map(String::from)
|
||||
.collect();
|
||||
@@ -94,15 +98,16 @@ mod tests {
|
||||
assert_eq!(format_summary.autos, expected_autos);
|
||||
assert_eq!(format_summary.indexes, expected_indexes);
|
||||
assert_eq!(format_summary.keywords, expected_keywords);
|
||||
assert!(!format_summary.has_nested_parts);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_summary_nested() {
|
||||
let literal = "foo{foo}a{:{}{}}b{2:{3}{4}}c{2}d{1}{}e{bar:{spam}{eggs}}";
|
||||
|
||||
let expected_autos = [0usize, 1usize, 2usize, 3usize].into_iter().collect();
|
||||
let expected_indexes = [1usize, 2usize, 3usize, 4usize].into_iter().collect();
|
||||
let expected_keywords = ["foo", "bar", "spam", "eggs"]
|
||||
let expected_autos = [0usize, 1usize, 2usize, 3usize].to_vec();
|
||||
let expected_indexes = [2usize, 3usize, 4usize, 2usize, 1usize].to_vec();
|
||||
let expected_keywords: Vec<_> = ["foo", "bar", "spam", "eggs"]
|
||||
.into_iter()
|
||||
.map(String::from)
|
||||
.collect();
|
||||
@@ -112,6 +117,7 @@ mod tests {
|
||||
assert_eq!(format_summary.autos, expected_autos);
|
||||
assert_eq!(format_summary.indexes, expected_indexes);
|
||||
assert_eq!(format_summary.keywords, expected_keywords);
|
||||
assert!(format_summary.has_nested_parts);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -50,6 +50,8 @@ mod tests {
|
||||
#[test_case(RuleCode::UP028, Path::new("UP028_0.py"); "UP028_0")]
|
||||
#[test_case(RuleCode::UP028, Path::new("UP028_1.py"); "UP028_1")]
|
||||
#[test_case(RuleCode::UP029, Path::new("UP029.py"); "UP029")]
|
||||
#[test_case(RuleCode::UP030, Path::new("UP030_0.py"); "UP030_0")]
|
||||
#[test_case(RuleCode::UP030, Path::new("UP030_1.py"); "UP030_1")]
|
||||
fn rules(rule_code: RuleCode, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
|
||||
118
src/pyupgrade/rules/format_literals.rs
Normal file
118
src/pyupgrade/rules/format_literals.rs
Normal file
@@ -0,0 +1,118 @@
|
||||
use anyhow::{anyhow, bail, Result};
|
||||
use libcst_native::{Arg, Codegen, CodegenState, Expression};
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use rustpython_ast::Expr;
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::autofix::Fix;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::cst::matchers::{match_call, match_expression};
|
||||
use crate::pyflakes::format::FormatSummary;
|
||||
use crate::registry::Diagnostic;
|
||||
use crate::violations;
|
||||
|
||||
// An opening curly brace, followed by any integer, followed by any text,
|
||||
// followed by a closing brace.
|
||||
static FORMAT_SPECIFIER: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"\{(?P<int>\d+)(?P<fmt>.*?)}").unwrap());
|
||||
|
||||
/// Returns a string without the format specifiers.
|
||||
/// Ex. "Hello {0} {1}" -> "Hello {} {}"
|
||||
fn remove_specifiers(raw_specifiers: &str) -> String {
|
||||
FORMAT_SPECIFIER
|
||||
.replace_all(raw_specifiers, "{$fmt}")
|
||||
.to_string()
|
||||
}
|
||||
|
||||
/// Return the corrected argument vector.
|
||||
fn generate_arguments<'a>(
|
||||
old_args: &[Arg<'a>],
|
||||
correct_order: &'a [usize],
|
||||
) -> Result<Vec<Arg<'a>>> {
|
||||
let mut new_args: Vec<Arg> = Vec::with_capacity(old_args.len());
|
||||
for (idx, given) in correct_order.iter().enumerate() {
|
||||
// We need to keep the formatting in the same order but move the values.
|
||||
let values = old_args
|
||||
.get(*given)
|
||||
.ok_or_else(|| anyhow!("Failed to extract argument at: {given}"))?;
|
||||
let formatting = old_args
|
||||
.get(idx)
|
||||
.ok_or_else(|| anyhow!("Failed to extract argument at: {idx}"))?;
|
||||
let new_arg = Arg {
|
||||
value: values.value.clone(),
|
||||
comma: formatting.comma.clone(),
|
||||
equal: None,
|
||||
keyword: None,
|
||||
star: values.star,
|
||||
whitespace_after_star: formatting.whitespace_after_star.clone(),
|
||||
whitespace_after_arg: formatting.whitespace_after_arg.clone(),
|
||||
};
|
||||
new_args.push(new_arg);
|
||||
}
|
||||
Ok(new_args)
|
||||
}
|
||||
|
||||
/// Returns the corrected function call.
|
||||
fn generate_call(module_text: &str, correct_order: &[usize]) -> Result<String> {
|
||||
let mut expression = match_expression(module_text)?;
|
||||
let mut call = match_call(&mut expression)?;
|
||||
|
||||
// Fix the call arguments.
|
||||
call.args = generate_arguments(&call.args, correct_order)?;
|
||||
|
||||
// Fix the string itself.
|
||||
let Expression::Attribute(item) = &*call.func else {
|
||||
panic!("Expected: Expression::Attribute")
|
||||
};
|
||||
|
||||
let mut state = CodegenState::default();
|
||||
item.codegen(&mut state);
|
||||
let cleaned = remove_specifiers(&state.to_string());
|
||||
|
||||
call.func = Box::new(match_expression(&cleaned)?);
|
||||
|
||||
let mut state = CodegenState::default();
|
||||
expression.codegen(&mut state);
|
||||
if module_text == state.to_string() {
|
||||
// Ex) `'{' '0}'.format(1)`
|
||||
bail!("Failed to generate call expression for: {module_text}")
|
||||
}
|
||||
Ok(state.to_string())
|
||||
}
|
||||
|
||||
/// UP030
|
||||
pub(crate) fn format_literals(checker: &mut Checker, summary: &FormatSummary, expr: &Expr) {
|
||||
// The format we expect is, e.g.: `"{0} {1}".format(...)`
|
||||
if summary.has_nested_parts {
|
||||
return;
|
||||
}
|
||||
if !summary.keywords.is_empty() {
|
||||
return;
|
||||
}
|
||||
if !summary.autos.is_empty() {
|
||||
return;
|
||||
}
|
||||
if !(0..summary.indexes.len()).all(|index| summary.indexes.contains(&index)) {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut diagnostic = Diagnostic::new(violations::FormatLiterals, Range::from_located(expr));
|
||||
if checker.patch(diagnostic.kind.code()) {
|
||||
// Currently, the only issue we know of is in LibCST:
|
||||
// https://github.com/Instagram/LibCST/issues/846
|
||||
if let Ok(contents) = generate_call(
|
||||
&checker
|
||||
.locator
|
||||
.slice_source_code_range(&Range::from_located(expr)),
|
||||
&summary.indexes,
|
||||
) {
|
||||
diagnostic.amend(Fix::replacement(
|
||||
contents,
|
||||
expr.location,
|
||||
expr.end_location.unwrap(),
|
||||
));
|
||||
};
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
@@ -1,34 +1,35 @@
|
||||
pub use convert_named_tuple_functional_to_class::convert_named_tuple_functional_to_class;
|
||||
pub use convert_typed_dict_functional_to_class::convert_typed_dict_functional_to_class;
|
||||
pub use datetime_utc_alias::datetime_utc_alias;
|
||||
pub use deprecated_unittest_alias::deprecated_unittest_alias;
|
||||
pub use native_literals::native_literals;
|
||||
pub(crate) use convert_named_tuple_functional_to_class::convert_named_tuple_functional_to_class;
|
||||
pub(crate) use convert_typed_dict_functional_to_class::convert_typed_dict_functional_to_class;
|
||||
pub(crate) use datetime_utc_alias::datetime_utc_alias;
|
||||
pub(crate) use deprecated_unittest_alias::deprecated_unittest_alias;
|
||||
pub(crate) use format_literals::format_literals;
|
||||
pub(crate) use native_literals::native_literals;
|
||||
use once_cell::sync::Lazy;
|
||||
pub use open_alias::open_alias;
|
||||
pub use os_error_alias::os_error_alias;
|
||||
pub use redundant_open_modes::redundant_open_modes;
|
||||
pub(crate) use open_alias::open_alias;
|
||||
pub(crate) use os_error_alias::os_error_alias;
|
||||
pub(crate) use redundant_open_modes::redundant_open_modes;
|
||||
use regex::Regex;
|
||||
pub use remove_six_compat::remove_six_compat;
|
||||
pub use replace_stdout_stderr::replace_stdout_stderr;
|
||||
pub use replace_universal_newlines::replace_universal_newlines;
|
||||
pub use rewrite_c_element_tree::replace_c_element_tree;
|
||||
pub use rewrite_mock_import::{rewrite_mock_attribute, rewrite_mock_import};
|
||||
pub use rewrite_unicode_literal::rewrite_unicode_literal;
|
||||
pub use rewrite_yield_from::rewrite_yield_from;
|
||||
pub(crate) use remove_six_compat::remove_six_compat;
|
||||
pub(crate) use replace_stdout_stderr::replace_stdout_stderr;
|
||||
pub(crate) use replace_universal_newlines::replace_universal_newlines;
|
||||
pub(crate) use rewrite_c_element_tree::replace_c_element_tree;
|
||||
pub(crate) use rewrite_mock_import::{rewrite_mock_attribute, rewrite_mock_import};
|
||||
pub(crate) use rewrite_unicode_literal::rewrite_unicode_literal;
|
||||
pub(crate) use rewrite_yield_from::rewrite_yield_from;
|
||||
use rustpython_ast::Location;
|
||||
use rustpython_parser::ast::{ArgData, Expr, ExprKind, Stmt, StmtKind};
|
||||
pub use super_call_with_parameters::super_call_with_parameters;
|
||||
pub use type_of_primitive::type_of_primitive;
|
||||
pub use typing_text_str_alias::typing_text_str_alias;
|
||||
pub use unnecessary_builtin_import::unnecessary_builtin_import;
|
||||
pub use unnecessary_encode_utf8::unnecessary_encode_utf8;
|
||||
pub use unnecessary_future_import::unnecessary_future_import;
|
||||
pub use unnecessary_lru_cache_params::unnecessary_lru_cache_params;
|
||||
pub use unpack_list_comprehension::unpack_list_comprehension;
|
||||
pub use use_pep585_annotation::use_pep585_annotation;
|
||||
pub use use_pep604_annotation::use_pep604_annotation;
|
||||
pub use useless_metaclass_type::useless_metaclass_type;
|
||||
pub use useless_object_inheritance::useless_object_inheritance;
|
||||
pub(crate) use super_call_with_parameters::super_call_with_parameters;
|
||||
pub(crate) use type_of_primitive::type_of_primitive;
|
||||
pub(crate) use typing_text_str_alias::typing_text_str_alias;
|
||||
pub(crate) use unnecessary_builtin_import::unnecessary_builtin_import;
|
||||
pub(crate) use unnecessary_encode_utf8::unnecessary_encode_utf8;
|
||||
pub(crate) use unnecessary_future_import::unnecessary_future_import;
|
||||
pub(crate) use unnecessary_lru_cache_params::unnecessary_lru_cache_params;
|
||||
pub(crate) use unpack_list_comprehension::unpack_list_comprehension;
|
||||
pub(crate) use use_pep585_annotation::use_pep585_annotation;
|
||||
pub(crate) use use_pep604_annotation::use_pep604_annotation;
|
||||
pub(crate) use useless_metaclass_type::useless_metaclass_type;
|
||||
pub(crate) use useless_object_inheritance::useless_object_inheritance;
|
||||
|
||||
use crate::ast::helpers::{self};
|
||||
use crate::ast::types::{Range, Scope, ScopeKind};
|
||||
@@ -40,6 +41,7 @@ mod convert_named_tuple_functional_to_class;
|
||||
mod convert_typed_dict_functional_to_class;
|
||||
mod datetime_utc_alias;
|
||||
mod deprecated_unittest_alias;
|
||||
mod format_literals;
|
||||
mod native_literals;
|
||||
mod open_alias;
|
||||
mod os_error_alias;
|
||||
|
||||
@@ -0,0 +1,188 @@
|
||||
---
|
||||
source: src/pyupgrade/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
FormatLiterals: ~
|
||||
location:
|
||||
row: 3
|
||||
column: 0
|
||||
end_location:
|
||||
row: 3
|
||||
column: 33
|
||||
fix:
|
||||
content: "\"{}\" \"{}\" \"{}\".format(1, 2, 3)"
|
||||
location:
|
||||
row: 3
|
||||
column: 0
|
||||
end_location:
|
||||
row: 3
|
||||
column: 33
|
||||
parent: ~
|
||||
- kind:
|
||||
FormatLiterals: ~
|
||||
location:
|
||||
row: 5
|
||||
column: 0
|
||||
end_location:
|
||||
row: 7
|
||||
column: 1
|
||||
fix:
|
||||
content: "\"a {} complicated {} string with {} {}\".format(\n \"fourth\", \"second\", \"first\", \"third\"\n)"
|
||||
location:
|
||||
row: 5
|
||||
column: 0
|
||||
end_location:
|
||||
row: 7
|
||||
column: 1
|
||||
parent: ~
|
||||
- kind:
|
||||
FormatLiterals: ~
|
||||
location:
|
||||
row: 9
|
||||
column: 0
|
||||
end_location:
|
||||
row: 9
|
||||
column: 15
|
||||
fix:
|
||||
content: "'{}'.format(1)"
|
||||
location:
|
||||
row: 9
|
||||
column: 0
|
||||
end_location:
|
||||
row: 9
|
||||
column: 15
|
||||
parent: ~
|
||||
- kind:
|
||||
FormatLiterals: ~
|
||||
location:
|
||||
row: 11
|
||||
column: 0
|
||||
end_location:
|
||||
row: 11
|
||||
column: 18
|
||||
fix:
|
||||
content: "'{:x}'.format(30)"
|
||||
location:
|
||||
row: 11
|
||||
column: 0
|
||||
end_location:
|
||||
row: 11
|
||||
column: 18
|
||||
parent: ~
|
||||
- kind:
|
||||
FormatLiterals: ~
|
||||
location:
|
||||
row: 13
|
||||
column: 4
|
||||
end_location:
|
||||
row: 13
|
||||
column: 19
|
||||
fix:
|
||||
content: "'{}'.format(1)"
|
||||
location:
|
||||
row: 13
|
||||
column: 4
|
||||
end_location:
|
||||
row: 13
|
||||
column: 19
|
||||
parent: ~
|
||||
- kind:
|
||||
FormatLiterals: ~
|
||||
location:
|
||||
row: 15
|
||||
column: 0
|
||||
end_location:
|
||||
row: 15
|
||||
column: 29
|
||||
fix:
|
||||
content: "'''{}\\n{}\\n'''.format(1, 2)"
|
||||
location:
|
||||
row: 15
|
||||
column: 0
|
||||
end_location:
|
||||
row: 15
|
||||
column: 29
|
||||
parent: ~
|
||||
- kind:
|
||||
FormatLiterals: ~
|
||||
location:
|
||||
row: 17
|
||||
column: 4
|
||||
end_location:
|
||||
row: 18
|
||||
column: 26
|
||||
fix:
|
||||
content: "\"foo {}\" \\\n \"bar {}\".format(1, 2)"
|
||||
location:
|
||||
row: 17
|
||||
column: 4
|
||||
end_location:
|
||||
row: 18
|
||||
column: 26
|
||||
parent: ~
|
||||
- kind:
|
||||
FormatLiterals: ~
|
||||
location:
|
||||
row: 20
|
||||
column: 0
|
||||
end_location:
|
||||
row: 20
|
||||
column: 17
|
||||
fix:
|
||||
content: "(\"{}\").format(1)"
|
||||
location:
|
||||
row: 20
|
||||
column: 0
|
||||
end_location:
|
||||
row: 20
|
||||
column: 17
|
||||
parent: ~
|
||||
- kind:
|
||||
FormatLiterals: ~
|
||||
location:
|
||||
row: 22
|
||||
column: 0
|
||||
end_location:
|
||||
row: 22
|
||||
column: 27
|
||||
fix:
|
||||
content: "\"\\N{snowman} {}\".format(1)"
|
||||
location:
|
||||
row: 22
|
||||
column: 0
|
||||
end_location:
|
||||
row: 22
|
||||
column: 27
|
||||
parent: ~
|
||||
- kind:
|
||||
FormatLiterals: ~
|
||||
location:
|
||||
row: 24
|
||||
column: 0
|
||||
end_location:
|
||||
row: 24
|
||||
column: 18
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
FormatLiterals: ~
|
||||
location:
|
||||
row: 29
|
||||
column: 4
|
||||
end_location:
|
||||
row: 30
|
||||
column: 25
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
FormatLiterals: ~
|
||||
location:
|
||||
row: 34
|
||||
column: 4
|
||||
end_location:
|
||||
row: 35
|
||||
column: 25
|
||||
fix: ~
|
||||
parent: ~
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
---
|
||||
source: src/pyupgrade/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
[]
|
||||
|
||||
411
src/registry.rs
411
src/registry.rs
@@ -54,6 +54,14 @@ macro_rules! define_rule_mapping {
|
||||
)+
|
||||
}
|
||||
}
|
||||
|
||||
pub fn origin(&self) -> RuleOrigin {
|
||||
match self {
|
||||
$(
|
||||
RuleCode::$code => ruff_macros::origin_by_code!($code),
|
||||
)+
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DiagnosticKind {
|
||||
@@ -295,6 +303,7 @@ define_rule_mapping!(
|
||||
SIM109 => violations::CompareWithTuple,
|
||||
SIM110 => violations::ConvertLoopToAny,
|
||||
SIM111 => violations::ConvertLoopToAll,
|
||||
SIM112 => violations::UseCapitalEnvironmentVariables,
|
||||
SIM117 => violations::MultipleWithStatements,
|
||||
SIM118 => violations::KeyInDict,
|
||||
SIM201 => violations::NegateEqualOp,
|
||||
@@ -337,6 +346,7 @@ define_rule_mapping!(
|
||||
UP027 => violations::RewriteListComprehension,
|
||||
UP028 => violations::RewriteYieldFrom,
|
||||
UP029 => violations::UnnecessaryBuiltinImport,
|
||||
UP030 => violations::FormatLiterals,
|
||||
// pydocstyle
|
||||
D100 => violations::PublicModule,
|
||||
D101 => violations::PublicClass,
|
||||
@@ -401,6 +411,7 @@ define_rule_mapping!(
|
||||
N818 => violations::ErrorSuffixOnExceptionName,
|
||||
// isort
|
||||
I001 => violations::UnsortedImports,
|
||||
I002 => violations::MissingRequiredImport,
|
||||
// eradicate
|
||||
ERA001 => violations::CommentedOutCode,
|
||||
// flake8-bandit
|
||||
@@ -416,6 +427,8 @@ define_rule_mapping!(
|
||||
S324 => violations::HashlibInsecureHashFunction,
|
||||
S501 => violations::RequestWithNoCertValidation,
|
||||
S506 => violations::UnsafeYAMLLoad,
|
||||
S508 => violations::SnmpInsecureVersion,
|
||||
S509 => violations::SnmpWeakCryptography,
|
||||
// flake8-boolean-trap
|
||||
FBT001 => violations::BooleanPositionalArgInFunctionDefinition,
|
||||
FBT002 => violations::BooleanDefaultValueInFunctionDefinition,
|
||||
@@ -786,406 +799,10 @@ impl RuleCode {
|
||||
| RuleCode::RUF002
|
||||
| RuleCode::RUF003 => &LintSource::Tokens,
|
||||
RuleCode::E902 => &LintSource::FileSystem,
|
||||
RuleCode::I001 => &LintSource::Imports,
|
||||
RuleCode::I001 | RuleCode::I002 => &LintSource::Imports,
|
||||
_ => &LintSource::AST,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn origin(&self) -> RuleOrigin {
|
||||
#[allow(clippy::match_same_arms)]
|
||||
match self {
|
||||
// flake8-builtins
|
||||
RuleCode::A001 => RuleOrigin::Flake8Builtins,
|
||||
RuleCode::A002 => RuleOrigin::Flake8Builtins,
|
||||
RuleCode::A003 => RuleOrigin::Flake8Builtins,
|
||||
// flake8-annotations
|
||||
RuleCode::ANN001 => RuleOrigin::Flake8Annotations,
|
||||
RuleCode::ANN002 => RuleOrigin::Flake8Annotations,
|
||||
RuleCode::ANN003 => RuleOrigin::Flake8Annotations,
|
||||
RuleCode::ANN101 => RuleOrigin::Flake8Annotations,
|
||||
RuleCode::ANN102 => RuleOrigin::Flake8Annotations,
|
||||
RuleCode::ANN201 => RuleOrigin::Flake8Annotations,
|
||||
RuleCode::ANN202 => RuleOrigin::Flake8Annotations,
|
||||
RuleCode::ANN204 => RuleOrigin::Flake8Annotations,
|
||||
RuleCode::ANN205 => RuleOrigin::Flake8Annotations,
|
||||
RuleCode::ANN206 => RuleOrigin::Flake8Annotations,
|
||||
RuleCode::ANN401 => RuleOrigin::Flake8Annotations,
|
||||
// flake8-unused-arguments
|
||||
RuleCode::ARG001 => RuleOrigin::Flake8UnusedArguments,
|
||||
RuleCode::ARG002 => RuleOrigin::Flake8UnusedArguments,
|
||||
RuleCode::ARG003 => RuleOrigin::Flake8UnusedArguments,
|
||||
RuleCode::ARG004 => RuleOrigin::Flake8UnusedArguments,
|
||||
RuleCode::ARG005 => RuleOrigin::Flake8UnusedArguments,
|
||||
// flake8-bugbear
|
||||
RuleCode::B002 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B003 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B004 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B005 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B006 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B007 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B008 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B009 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B010 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B011 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B012 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B013 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B014 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B015 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B016 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B017 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B018 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B019 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B020 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B021 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B022 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B023 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B024 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B025 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B026 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B027 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B904 => RuleOrigin::Flake8Bugbear,
|
||||
RuleCode::B905 => RuleOrigin::Flake8Bugbear,
|
||||
// flake8-blind-except
|
||||
RuleCode::BLE001 => RuleOrigin::Flake8BlindExcept,
|
||||
// flake8-comprehensions
|
||||
RuleCode::C400 => RuleOrigin::Flake8Comprehensions,
|
||||
RuleCode::C401 => RuleOrigin::Flake8Comprehensions,
|
||||
RuleCode::C402 => RuleOrigin::Flake8Comprehensions,
|
||||
RuleCode::C403 => RuleOrigin::Flake8Comprehensions,
|
||||
RuleCode::C404 => RuleOrigin::Flake8Comprehensions,
|
||||
RuleCode::C405 => RuleOrigin::Flake8Comprehensions,
|
||||
RuleCode::C406 => RuleOrigin::Flake8Comprehensions,
|
||||
RuleCode::C408 => RuleOrigin::Flake8Comprehensions,
|
||||
RuleCode::C409 => RuleOrigin::Flake8Comprehensions,
|
||||
RuleCode::C410 => RuleOrigin::Flake8Comprehensions,
|
||||
RuleCode::C411 => RuleOrigin::Flake8Comprehensions,
|
||||
RuleCode::C413 => RuleOrigin::Flake8Comprehensions,
|
||||
RuleCode::C414 => RuleOrigin::Flake8Comprehensions,
|
||||
RuleCode::C415 => RuleOrigin::Flake8Comprehensions,
|
||||
RuleCode::C416 => RuleOrigin::Flake8Comprehensions,
|
||||
RuleCode::C417 => RuleOrigin::Flake8Comprehensions,
|
||||
// mccabe
|
||||
RuleCode::C901 => RuleOrigin::McCabe,
|
||||
// pydocstyle
|
||||
RuleCode::D100 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D101 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D102 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D103 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D104 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D105 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D106 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D107 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D200 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D201 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D202 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D203 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D204 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D205 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D206 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D207 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D208 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D209 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D210 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D211 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D212 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D213 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D214 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D215 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D300 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D301 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D400 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D402 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D403 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D404 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D405 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D406 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D407 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D408 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D409 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D410 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D411 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D412 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D413 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D414 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D415 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D416 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D417 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D418 => RuleOrigin::Pydocstyle,
|
||||
RuleCode::D419 => RuleOrigin::Pydocstyle,
|
||||
// flake8-datetimez
|
||||
RuleCode::DTZ001 => RuleOrigin::Flake8Datetimez,
|
||||
RuleCode::DTZ002 => RuleOrigin::Flake8Datetimez,
|
||||
RuleCode::DTZ003 => RuleOrigin::Flake8Datetimez,
|
||||
RuleCode::DTZ004 => RuleOrigin::Flake8Datetimez,
|
||||
RuleCode::DTZ005 => RuleOrigin::Flake8Datetimez,
|
||||
RuleCode::DTZ006 => RuleOrigin::Flake8Datetimez,
|
||||
RuleCode::DTZ007 => RuleOrigin::Flake8Datetimez,
|
||||
RuleCode::DTZ011 => RuleOrigin::Flake8Datetimez,
|
||||
RuleCode::DTZ012 => RuleOrigin::Flake8Datetimez,
|
||||
// pycodestyle (errors)
|
||||
RuleCode::E401 => RuleOrigin::Pycodestyle,
|
||||
RuleCode::E402 => RuleOrigin::Pycodestyle,
|
||||
RuleCode::E501 => RuleOrigin::Pycodestyle,
|
||||
RuleCode::E711 => RuleOrigin::Pycodestyle,
|
||||
RuleCode::E712 => RuleOrigin::Pycodestyle,
|
||||
RuleCode::E713 => RuleOrigin::Pycodestyle,
|
||||
RuleCode::E714 => RuleOrigin::Pycodestyle,
|
||||
RuleCode::E721 => RuleOrigin::Pycodestyle,
|
||||
RuleCode::E722 => RuleOrigin::Pycodestyle,
|
||||
RuleCode::E731 => RuleOrigin::Pycodestyle,
|
||||
RuleCode::E741 => RuleOrigin::Pycodestyle,
|
||||
RuleCode::E742 => RuleOrigin::Pycodestyle,
|
||||
RuleCode::E743 => RuleOrigin::Pycodestyle,
|
||||
RuleCode::E902 => RuleOrigin::Pycodestyle,
|
||||
RuleCode::E999 => RuleOrigin::Pycodestyle,
|
||||
// flake8-errmsg
|
||||
RuleCode::EM101 => RuleOrigin::Flake8ErrMsg,
|
||||
RuleCode::EM102 => RuleOrigin::Flake8ErrMsg,
|
||||
RuleCode::EM103 => RuleOrigin::Flake8ErrMsg,
|
||||
// eradicate
|
||||
RuleCode::ERA001 => RuleOrigin::Eradicate,
|
||||
// pyflakes
|
||||
RuleCode::F401 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F402 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F403 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F404 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F405 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F406 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F407 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F501 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F502 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F503 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F504 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F505 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F506 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F507 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F508 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F509 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F521 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F522 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F523 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F524 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F525 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F541 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F601 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F602 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F621 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F622 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F631 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F632 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F633 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F634 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F701 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F702 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F704 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F706 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F707 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F722 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F811 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F821 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F822 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F823 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F841 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F842 => RuleOrigin::Pyflakes,
|
||||
RuleCode::F901 => RuleOrigin::Pyflakes,
|
||||
// flake8-boolean-trap
|
||||
RuleCode::FBT001 => RuleOrigin::Flake8BooleanTrap,
|
||||
RuleCode::FBT002 => RuleOrigin::Flake8BooleanTrap,
|
||||
RuleCode::FBT003 => RuleOrigin::Flake8BooleanTrap,
|
||||
// isort
|
||||
RuleCode::I001 => RuleOrigin::Isort,
|
||||
// flake8-import-conventions
|
||||
RuleCode::ICN001 => RuleOrigin::Flake8ImportConventions,
|
||||
// flake8-implicit-str-concat
|
||||
RuleCode::ISC001 => RuleOrigin::Flake8ImplicitStrConcat,
|
||||
RuleCode::ISC002 => RuleOrigin::Flake8ImplicitStrConcat,
|
||||
RuleCode::ISC003 => RuleOrigin::Flake8ImplicitStrConcat,
|
||||
// pep8-naming
|
||||
RuleCode::N801 => RuleOrigin::PEP8Naming,
|
||||
RuleCode::N802 => RuleOrigin::PEP8Naming,
|
||||
RuleCode::N803 => RuleOrigin::PEP8Naming,
|
||||
RuleCode::N804 => RuleOrigin::PEP8Naming,
|
||||
RuleCode::N805 => RuleOrigin::PEP8Naming,
|
||||
RuleCode::N806 => RuleOrigin::PEP8Naming,
|
||||
RuleCode::N807 => RuleOrigin::PEP8Naming,
|
||||
RuleCode::N811 => RuleOrigin::PEP8Naming,
|
||||
RuleCode::N812 => RuleOrigin::PEP8Naming,
|
||||
RuleCode::N813 => RuleOrigin::PEP8Naming,
|
||||
RuleCode::N814 => RuleOrigin::PEP8Naming,
|
||||
RuleCode::N815 => RuleOrigin::PEP8Naming,
|
||||
RuleCode::N816 => RuleOrigin::PEP8Naming,
|
||||
RuleCode::N817 => RuleOrigin::PEP8Naming,
|
||||
RuleCode::N818 => RuleOrigin::PEP8Naming,
|
||||
// pandas-vet
|
||||
RuleCode::PD002 => RuleOrigin::PandasVet,
|
||||
RuleCode::PD003 => RuleOrigin::PandasVet,
|
||||
RuleCode::PD004 => RuleOrigin::PandasVet,
|
||||
RuleCode::PD007 => RuleOrigin::PandasVet,
|
||||
RuleCode::PD008 => RuleOrigin::PandasVet,
|
||||
RuleCode::PD009 => RuleOrigin::PandasVet,
|
||||
RuleCode::PD010 => RuleOrigin::PandasVet,
|
||||
RuleCode::PD011 => RuleOrigin::PandasVet,
|
||||
RuleCode::PD012 => RuleOrigin::PandasVet,
|
||||
RuleCode::PD013 => RuleOrigin::PandasVet,
|
||||
RuleCode::PD015 => RuleOrigin::PandasVet,
|
||||
RuleCode::PD901 => RuleOrigin::PandasVet,
|
||||
// pygrep-hooks
|
||||
RuleCode::PGH001 => RuleOrigin::PygrepHooks,
|
||||
RuleCode::PGH002 => RuleOrigin::PygrepHooks,
|
||||
RuleCode::PGH003 => RuleOrigin::PygrepHooks,
|
||||
RuleCode::PGH004 => RuleOrigin::PygrepHooks,
|
||||
// pylint
|
||||
RuleCode::PLC0414 => RuleOrigin::Pylint,
|
||||
RuleCode::PLC2201 => RuleOrigin::Pylint,
|
||||
RuleCode::PLC3002 => RuleOrigin::Pylint,
|
||||
RuleCode::PLE0117 => RuleOrigin::Pylint,
|
||||
RuleCode::PLE0118 => RuleOrigin::Pylint,
|
||||
RuleCode::PLE1142 => RuleOrigin::Pylint,
|
||||
RuleCode::PLR0206 => RuleOrigin::Pylint,
|
||||
RuleCode::PLR0402 => RuleOrigin::Pylint,
|
||||
RuleCode::PLR1701 => RuleOrigin::Pylint,
|
||||
RuleCode::PLR1722 => RuleOrigin::Pylint,
|
||||
RuleCode::PLW0120 => RuleOrigin::Pylint,
|
||||
RuleCode::PLW0602 => RuleOrigin::Pylint,
|
||||
// flake8-pytest-style
|
||||
RuleCode::PT001 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT002 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT003 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT004 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT005 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT006 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT007 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT008 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT009 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT010 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT011 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT012 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT013 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT015 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT016 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT017 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT018 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT019 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT020 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT021 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT022 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT023 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT024 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT025 => RuleOrigin::Flake8PytestStyle,
|
||||
RuleCode::PT026 => RuleOrigin::Flake8PytestStyle,
|
||||
// flake8-quotes
|
||||
RuleCode::Q000 => RuleOrigin::Flake8Quotes,
|
||||
RuleCode::Q001 => RuleOrigin::Flake8Quotes,
|
||||
RuleCode::Q002 => RuleOrigin::Flake8Quotes,
|
||||
RuleCode::Q003 => RuleOrigin::Flake8Quotes,
|
||||
// flake8-return
|
||||
RuleCode::RET501 => RuleOrigin::Flake8Return,
|
||||
RuleCode::RET502 => RuleOrigin::Flake8Return,
|
||||
RuleCode::RET503 => RuleOrigin::Flake8Return,
|
||||
RuleCode::RET504 => RuleOrigin::Flake8Return,
|
||||
RuleCode::RET505 => RuleOrigin::Flake8Return,
|
||||
RuleCode::RET506 => RuleOrigin::Flake8Return,
|
||||
RuleCode::RET507 => RuleOrigin::Flake8Return,
|
||||
RuleCode::RET508 => RuleOrigin::Flake8Return,
|
||||
// flake8-bandit
|
||||
RuleCode::S101 => RuleOrigin::Flake8Bandit,
|
||||
RuleCode::S102 => RuleOrigin::Flake8Bandit,
|
||||
RuleCode::S103 => RuleOrigin::Flake8Bandit,
|
||||
RuleCode::S104 => RuleOrigin::Flake8Bandit,
|
||||
RuleCode::S105 => RuleOrigin::Flake8Bandit,
|
||||
RuleCode::S106 => RuleOrigin::Flake8Bandit,
|
||||
RuleCode::S107 => RuleOrigin::Flake8Bandit,
|
||||
RuleCode::S108 => RuleOrigin::Flake8Bandit,
|
||||
RuleCode::S113 => RuleOrigin::Flake8Bandit,
|
||||
RuleCode::S324 => RuleOrigin::Flake8Bandit,
|
||||
RuleCode::S501 => RuleOrigin::Flake8Bandit,
|
||||
RuleCode::S506 => RuleOrigin::Flake8Bandit,
|
||||
// flake8-simplify
|
||||
RuleCode::SIM103 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM101 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM102 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM105 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM107 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM108 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM109 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM110 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM111 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM117 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM118 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM201 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM202 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM208 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM210 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM211 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM212 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM220 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM221 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM222 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM223 => RuleOrigin::Flake8Simplify,
|
||||
RuleCode::SIM300 => RuleOrigin::Flake8Simplify,
|
||||
// flake8-debugger
|
||||
RuleCode::T100 => RuleOrigin::Flake8Debugger,
|
||||
// flake8-print
|
||||
RuleCode::T201 => RuleOrigin::Flake8Print,
|
||||
RuleCode::T203 => RuleOrigin::Flake8Print,
|
||||
// flake8-tidy-imports
|
||||
RuleCode::TID251 => RuleOrigin::Flake8TidyImports,
|
||||
RuleCode::TID252 => RuleOrigin::Flake8TidyImports,
|
||||
// pyupgrade
|
||||
RuleCode::UP001 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP003 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP004 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP005 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP006 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP007 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP008 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP009 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP010 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP011 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP012 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP013 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP014 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP015 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP016 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP017 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP018 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP019 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP020 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP021 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP022 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP023 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP024 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP025 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP026 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP027 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP028 => RuleOrigin::Pyupgrade,
|
||||
RuleCode::UP029 => RuleOrigin::Pyupgrade,
|
||||
// pycodestyle (warnings)
|
||||
RuleCode::W292 => RuleOrigin::Pycodestyle,
|
||||
RuleCode::W605 => RuleOrigin::Pycodestyle,
|
||||
// flake8-2020
|
||||
RuleCode::YTT101 => RuleOrigin::Flake82020,
|
||||
RuleCode::YTT102 => RuleOrigin::Flake82020,
|
||||
RuleCode::YTT103 => RuleOrigin::Flake82020,
|
||||
RuleCode::YTT201 => RuleOrigin::Flake82020,
|
||||
RuleCode::YTT202 => RuleOrigin::Flake82020,
|
||||
RuleCode::YTT203 => RuleOrigin::Flake82020,
|
||||
RuleCode::YTT204 => RuleOrigin::Flake82020,
|
||||
RuleCode::YTT301 => RuleOrigin::Flake82020,
|
||||
RuleCode::YTT302 => RuleOrigin::Flake82020,
|
||||
RuleCode::YTT303 => RuleOrigin::Flake82020,
|
||||
// flake8-pie
|
||||
RuleCode::PIE790 => RuleOrigin::Flake8Pie,
|
||||
RuleCode::PIE794 => RuleOrigin::Flake8Pie,
|
||||
RuleCode::PIE807 => RuleOrigin::Flake8Pie,
|
||||
// Ruff
|
||||
RuleCode::RUF001 => RuleOrigin::Ruff,
|
||||
RuleCode::RUF002 => RuleOrigin::Ruff,
|
||||
RuleCode::RUF003 => RuleOrigin::Ruff,
|
||||
RuleCode::RUF004 => RuleOrigin::Ruff,
|
||||
RuleCode::RUF100 => RuleOrigin::Ruff,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DiagnosticKind {
|
||||
|
||||
@@ -2677,6 +2677,24 @@ impl Violation for SysVersionSlice1Referenced {
|
||||
}
|
||||
|
||||
// flake8-simplify
|
||||
define_violation!(
|
||||
pub struct UseCapitalEnvironmentVariables(pub String, pub String);
|
||||
);
|
||||
impl AlwaysAutofixableViolation for UseCapitalEnvironmentVariables {
|
||||
fn message(&self) -> String {
|
||||
let UseCapitalEnvironmentVariables(expected, original) = self;
|
||||
format!("Use capitalized environment variable `{expected}` instead of `{original}`")
|
||||
}
|
||||
|
||||
fn autofix_title(&self) -> String {
|
||||
let UseCapitalEnvironmentVariables(expected, original) = self;
|
||||
format!("Replace `{original}` with `{expected}`")
|
||||
}
|
||||
|
||||
fn placeholder() -> Self {
|
||||
UseCapitalEnvironmentVariables("...".to_string(), "...".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
define_violation!(
|
||||
pub struct DuplicateIsinstanceCall(pub String);
|
||||
@@ -3625,6 +3643,23 @@ impl AlwaysAutofixableViolation for UnnecessaryBuiltinImport {
|
||||
}
|
||||
}
|
||||
|
||||
define_violation!(
|
||||
pub struct FormatLiterals;
|
||||
);
|
||||
impl AlwaysAutofixableViolation for FormatLiterals {
|
||||
fn message(&self) -> String {
|
||||
"Use implicit references for positional format fields".to_string()
|
||||
}
|
||||
|
||||
fn autofix_title(&self) -> String {
|
||||
"Remove explicit positional indexes".to_string()
|
||||
}
|
||||
|
||||
fn placeholder() -> Self {
|
||||
FormatLiterals
|
||||
}
|
||||
}
|
||||
|
||||
// pydocstyle
|
||||
|
||||
define_violation!(
|
||||
@@ -4583,6 +4618,25 @@ impl AlwaysAutofixableViolation for UnsortedImports {
|
||||
}
|
||||
}
|
||||
|
||||
define_violation!(
|
||||
pub struct MissingRequiredImport(pub String);
|
||||
);
|
||||
impl AlwaysAutofixableViolation for MissingRequiredImport {
|
||||
fn message(&self) -> String {
|
||||
let MissingRequiredImport(name) = self;
|
||||
format!("Missing required import: `{name}`")
|
||||
}
|
||||
|
||||
fn autofix_title(&self) -> String {
|
||||
let MissingRequiredImport(name) = self;
|
||||
format!("Insert required import: `{name}`")
|
||||
}
|
||||
|
||||
fn placeholder() -> Self {
|
||||
MissingRequiredImport("from __future__ import ...".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
// eradicate
|
||||
|
||||
define_violation!(
|
||||
@@ -4792,6 +4846,33 @@ impl Violation for UnsafeYAMLLoad {
|
||||
}
|
||||
}
|
||||
|
||||
define_violation!(
|
||||
pub struct SnmpInsecureVersion;
|
||||
);
|
||||
impl Violation for SnmpInsecureVersion {
|
||||
fn message(&self) -> String {
|
||||
"The use of SNMPv1 and SNMPv2 is insecure. Use SNMPv3 if able.".to_string()
|
||||
}
|
||||
|
||||
fn placeholder() -> Self {
|
||||
SnmpInsecureVersion
|
||||
}
|
||||
}
|
||||
|
||||
define_violation!(
|
||||
pub struct SnmpWeakCryptography;
|
||||
);
|
||||
impl Violation for SnmpWeakCryptography {
|
||||
fn message(&self) -> String {
|
||||
"You should not use SNMPv3 without encryption. `noAuthNoPriv` & `authNoPriv` is insecure."
|
||||
.to_string()
|
||||
}
|
||||
|
||||
fn placeholder() -> Self {
|
||||
SnmpWeakCryptography
|
||||
}
|
||||
}
|
||||
|
||||
// flake8-boolean-trap
|
||||
|
||||
define_violation!(
|
||||
|
||||
Reference in New Issue
Block a user