Compare commits
19 Commits
github-292
...
v0.0.254
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
889c05c87e | ||
|
|
bbbc44336e | ||
|
|
d216b2aaa8 | ||
|
|
367cc43c42 | ||
|
|
b5b26d5a3e | ||
|
|
dedf8aa5cc | ||
|
|
eb42ce9319 | ||
|
|
cdbe2ee496 | ||
|
|
d1288dc2b1 | ||
|
|
3bcffb5bdd | ||
|
|
98209be8aa | ||
|
|
a03fa93c3a | ||
|
|
4de3882088 | ||
|
|
3a98b68dc0 | ||
|
|
7e291e542d | ||
|
|
6f649d6579 | ||
|
|
508bc605a5 | ||
|
|
ffdf6e35e6 | ||
|
|
886992c6c2 |
15
.github/workflows/ci.yaml
vendored
15
.github/workflows/ci.yaml
vendored
@@ -109,21 +109,6 @@ jobs:
|
||||
./scripts/add_rule.py --name FirstRule --code TST001 --linter test
|
||||
- run: cargo check
|
||||
|
||||
maturin-build:
|
||||
name: "maturin build"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- uses: Swatinem/rust-cache@v1
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- run: pip install maturin
|
||||
- run: maturin build -b bin
|
||||
- run: python scripts/transform_readme.py --target pypi
|
||||
|
||||
typos:
|
||||
name: "spell check"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
85
Cargo.lock
generated
85
Cargo.lock
generated
@@ -770,7 +770,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.253"
|
||||
version = "0.0.254"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap 4.1.6",
|
||||
@@ -1080,12 +1080,6 @@ version = "1.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
|
||||
|
||||
[[package]]
|
||||
name = "joinery"
|
||||
version = "2.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72167d68f5fce3b8655487b8038691a3c9984ee769590f93f2a631f4ad64e4f5"
|
||||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.61"
|
||||
@@ -1405,7 +1399,6 @@ dependencies = [
|
||||
"autocfg",
|
||||
"num-integer",
|
||||
"num-traits",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1415,7 +1408,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "02e0d21255c828d6f128a1e41534206671e8c3ea0c62f32291e808dc82cff17d"
|
||||
dependencies = [
|
||||
"num-traits",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1447,27 +1439,6 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num_enum"
|
||||
version = "0.5.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3e0072973714303aa6e3631c7e8e777970cf4bdd25dc4932e41031027b8bcc4e"
|
||||
dependencies = [
|
||||
"num_enum_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num_enum_derive"
|
||||
version = "0.5.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0629cbd6b897944899b1f10496d9c4a7ac5878d45fd61bc22e9e79bfbbc29597"
|
||||
dependencies = [
|
||||
"proc-macro-crate",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.17.1"
|
||||
@@ -1757,16 +1728,6 @@ dependencies = [
|
||||
"termtree",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-crate"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "66618389e4ec1c7afe67d51a9bf34ff9236480f8d51e7489b7d5ab0303c13f34"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"toml_edit",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-error"
|
||||
version = "1.0.4"
|
||||
@@ -1981,7 +1942,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.0.253"
|
||||
version = "0.0.254"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bisection",
|
||||
@@ -2015,6 +1976,7 @@ dependencies = [
|
||||
"path-absolutize",
|
||||
"regex",
|
||||
"result-like",
|
||||
"ruff_cache",
|
||||
"ruff_macros",
|
||||
"ruff_python",
|
||||
"ruff_rustpython",
|
||||
@@ -2032,15 +1994,25 @@ dependencies = [
|
||||
"test-case",
|
||||
"textwrap",
|
||||
"thiserror",
|
||||
"titlecase",
|
||||
"toml",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-test",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff_cache"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"filetime",
|
||||
"globset",
|
||||
"itertools",
|
||||
"regex",
|
||||
"ruff_macros",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff_cli"
|
||||
version = "0.0.253"
|
||||
version = "0.0.254"
|
||||
dependencies = [
|
||||
"annotate-snippets 0.9.1",
|
||||
"anyhow",
|
||||
@@ -2066,6 +2038,7 @@ dependencies = [
|
||||
"rayon",
|
||||
"regex",
|
||||
"ruff",
|
||||
"ruff_cache",
|
||||
"rustc-hash",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -2231,7 +2204,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rustpython-ast"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=aa8336ee94492b52458ed8e1517238e5c6c2914c#aa8336ee94492b52458ed8e1517238e5c6c2914c"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=822f6a9fa6b9142413634858c4c6908f678ce887#822f6a9fa6b9142413634858c4c6908f678ce887"
|
||||
dependencies = [
|
||||
"num-bigint",
|
||||
"rustpython-compiler-core",
|
||||
@@ -2240,7 +2213,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rustpython-common"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=aa8336ee94492b52458ed8e1517238e5c6c2914c#aa8336ee94492b52458ed8e1517238e5c6c2914c"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=822f6a9fa6b9142413634858c4c6908f678ce887#822f6a9fa6b9142413634858c4c6908f678ce887"
|
||||
dependencies = [
|
||||
"ascii",
|
||||
"bitflags",
|
||||
@@ -2265,24 +2238,21 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rustpython-compiler-core"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=aa8336ee94492b52458ed8e1517238e5c6c2914c#aa8336ee94492b52458ed8e1517238e5c6c2914c"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=822f6a9fa6b9142413634858c4c6908f678ce887#822f6a9fa6b9142413634858c4c6908f678ce887"
|
||||
dependencies = [
|
||||
"bincode",
|
||||
"bitflags",
|
||||
"bstr 0.2.17",
|
||||
"itertools",
|
||||
"lz4_flex",
|
||||
"num-bigint",
|
||||
"num-complex",
|
||||
"num_enum",
|
||||
"serde",
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustpython-parser"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=aa8336ee94492b52458ed8e1517238e5c6c2914c#aa8336ee94492b52458ed8e1517238e5c6c2914c"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=822f6a9fa6b9142413634858c4c6908f678ce887#822f6a9fa6b9142413634858c4c6908f678ce887"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"anyhow",
|
||||
@@ -2297,7 +2267,7 @@ dependencies = [
|
||||
"rustc-hash",
|
||||
"rustpython-ast",
|
||||
"rustpython-compiler-core",
|
||||
"thiserror",
|
||||
"serde",
|
||||
"tiny-keccak",
|
||||
"unic-emoji-char",
|
||||
"unic-ucd-ident",
|
||||
@@ -2752,17 +2722,6 @@ version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
||||
|
||||
[[package]]
|
||||
name = "titlecase"
|
||||
version = "2.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "38397a8cdb017cfeb48bf6c154d6de975ac69ffeed35980fde199d2ee0842042"
|
||||
dependencies = [
|
||||
"joinery",
|
||||
"lazy_static",
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.6.0"
|
||||
@@ -2949,7 +2908,7 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
|
||||
[[package]]
|
||||
name = "unicode_names2"
|
||||
version = "0.6.0"
|
||||
source = "git+https://github.com/youknowone/unicode_names2.git?tag=v0.6.0+character-alias#4ce16aa85cbcdd9cc830410f1a72ef9a235f2fde"
|
||||
source = "git+https://github.com/youknowone/unicode_names2.git?rev=4ce16aa85cbcdd9cc830410f1a72ef9a235f2fde#4ce16aa85cbcdd9cc830410f1a72ef9a235f2fde"
|
||||
dependencies = [
|
||||
"phf",
|
||||
]
|
||||
|
||||
@@ -14,8 +14,8 @@ libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "80e4c1399f95e
|
||||
once_cell = { version = "1.16.0" }
|
||||
regex = { version = "1.6.0" }
|
||||
rustc-hash = { version = "1.1.0" }
|
||||
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "aa8336ee94492b52458ed8e1517238e5c6c2914c" }
|
||||
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/RustPython/RustPython.git", rev = "aa8336ee94492b52458ed8e1517238e5c6c2914c" }
|
||||
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "822f6a9fa6b9142413634858c4c6908f678ce887" }
|
||||
rustpython-parser = { features = ["lalrpop", "serde"], git = "https://github.com/RustPython/RustPython.git", rev = "822f6a9fa6b9142413634858c4c6908f678ce887" }
|
||||
schemars = { version = "0.8.11" }
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
serde_json = { version = "1.0.87" }
|
||||
|
||||
@@ -137,7 +137,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com) hook:
|
||||
```yaml
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.0.253'
|
||||
rev: 'v0.0.254'
|
||||
hooks:
|
||||
- id: ruff
|
||||
```
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.253"
|
||||
version = "0.0.254"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.0.253"
|
||||
version = "0.0.254"
|
||||
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
@@ -19,6 +19,7 @@ doctest = false
|
||||
ruff_macros = { path = "../ruff_macros" }
|
||||
ruff_python = { path = "../ruff_python" }
|
||||
ruff_rustpython = { path = "../ruff_rustpython" }
|
||||
ruff_cache = { path = "../ruff_cache" }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bisection = { version = "0.1.0" }
|
||||
@@ -58,7 +59,6 @@ strum = { workspace = true }
|
||||
strum_macros = { workspace = true }
|
||||
textwrap = { version = "0.16.0" }
|
||||
thiserror = { version = "1.0" }
|
||||
titlecase = { version = "2.2.1" }
|
||||
toml = { workspace = true }
|
||||
|
||||
# https://docs.rs/getrandom/0.2.7/getrandom/#webassembly-support
|
||||
|
||||
33
crates/ruff/resources/test/fixtures/flake8_pyi/PYI033.py
vendored
Normal file
33
crates/ruff/resources/test/fixtures/flake8_pyi/PYI033.py
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
# From https://github.com/PyCQA/flake8-pyi/blob/4212bec43dbc4020a59b90e2957c9488575e57ba/tests/type_comments.pyi
|
||||
|
||||
from collections.abc import Sequence
|
||||
from typing import TypeAlias
|
||||
|
||||
A: TypeAlias = None # type: int # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
|
||||
B: TypeAlias = None # type: str # And here's an extra comment about why it's that type # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
|
||||
C: TypeAlias = None #type: int # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
|
||||
D: TypeAlias = None # type: int # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
|
||||
E: TypeAlias = None# type: int # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
|
||||
F: TypeAlias = None#type:int # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
|
||||
|
||||
def func(
|
||||
arg1, # type: dict[str, int] # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
|
||||
arg2 # type: Sequence[bytes] # And here's some more info about this arg # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
|
||||
): ...
|
||||
|
||||
class Foo:
|
||||
Attr: TypeAlias = None # type: set[str] # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
|
||||
|
||||
G: TypeAlias = None # type: ignore
|
||||
H: TypeAlias = None # type: ignore[attr-defined]
|
||||
I: TypeAlias = None #type: ignore
|
||||
J: TypeAlias = None # type: ignore
|
||||
K: TypeAlias = None# type: ignore
|
||||
L: TypeAlias = None#type:ignore
|
||||
|
||||
# Whole line commented out # type: int
|
||||
M: TypeAlias = None # type: can't parse me!
|
||||
|
||||
class Bar:
|
||||
N: TypeAlias = None # type: can't parse me either!
|
||||
# This whole line is commented out and indented # type: str
|
||||
33
crates/ruff/resources/test/fixtures/flake8_pyi/PYI033.pyi
vendored
Normal file
33
crates/ruff/resources/test/fixtures/flake8_pyi/PYI033.pyi
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
# From https://github.com/PyCQA/flake8-pyi/blob/4212bec43dbc4020a59b90e2957c9488575e57ba/tests/type_comments.pyi
|
||||
|
||||
from collections.abc import Sequence
|
||||
from typing import TypeAlias
|
||||
|
||||
A: TypeAlias = None # type: int # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
|
||||
B: TypeAlias = None # type: str # And here's an extra comment about why it's that type # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
|
||||
C: TypeAlias = None #type: int # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
|
||||
D: TypeAlias = None # type: int # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
|
||||
E: TypeAlias = None# type: int # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
|
||||
F: TypeAlias = None#type:int # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
|
||||
|
||||
def func(
|
||||
arg1, # type: dict[str, int] # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
|
||||
arg2 # type: Sequence[bytes] # And here's some more info about this arg # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
|
||||
): ...
|
||||
|
||||
class Foo:
|
||||
Attr: TypeAlias = None # type: set[str] # Y033 Do not use type comments in stubs (e.g. use "x: int" instead of "x = ... # type: int")
|
||||
|
||||
G: TypeAlias = None # type: ignore
|
||||
H: TypeAlias = None # type: ignore[attr-defined]
|
||||
I: TypeAlias = None #type: ignore
|
||||
J: TypeAlias = None # type: ignore
|
||||
K: TypeAlias = None# type: ignore
|
||||
L: TypeAlias = None#type:ignore
|
||||
|
||||
# Whole line commented out # type: int
|
||||
M: TypeAlias = None # type: can't parse me!
|
||||
|
||||
class Bar:
|
||||
N: TypeAlias = None # type: can't parse me either!
|
||||
# This whole line is commented out and indented # type: str
|
||||
@@ -293,3 +293,30 @@ def x(y):
|
||||
|
||||
def foo(baz: str) -> str:
|
||||
return baz
|
||||
|
||||
|
||||
def end_of_statement():
|
||||
def example():
|
||||
if True:
|
||||
return ""
|
||||
|
||||
|
||||
def example():
|
||||
if True:
|
||||
return ""
|
||||
|
||||
|
||||
def example():
|
||||
if True:
|
||||
return "" # type: ignore
|
||||
|
||||
|
||||
def example():
|
||||
if True:
|
||||
return "" ;
|
||||
|
||||
|
||||
def example():
|
||||
if True:
|
||||
return "" \
|
||||
; # type: ignore
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
from typing import TYPE_CHECKING, Any, ClassVar
|
||||
|
||||
|
||||
import attrs
|
||||
|
||||
from ....import unknown
|
||||
from ..protocol import commands, definitions, responses
|
||||
from ..server import example
|
||||
from .. import server
|
||||
|
||||
14
crates/ruff/resources/test/fixtures/pycodestyle/E21.py
vendored
Normal file
14
crates/ruff/resources/test/fixtures/pycodestyle/E21.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
#: E211
|
||||
spam (1)
|
||||
#: E211 E211
|
||||
dict ['key'] = list [index]
|
||||
#: E211
|
||||
dict['key'] ['subkey'] = list[index]
|
||||
#: Okay
|
||||
spam(1)
|
||||
dict['key'] = list[index]
|
||||
|
||||
|
||||
# This is not prohibited by PEP8, but avoid it.
|
||||
class Foo (Bar, Baz):
|
||||
pass
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from functools import cached_property
|
||||
|
||||
from gi.repository import GObject
|
||||
|
||||
# Bad examples
|
||||
|
||||
def bad_liouiwnlkjl():
|
||||
@@ -76,6 +78,11 @@ class Thingy:
|
||||
"""This property method docstring does not need to be written in imperative mood."""
|
||||
return self._beep
|
||||
|
||||
@GObject.Property
|
||||
def good_custom_property(self):
|
||||
"""This property method docstring does not need to be written in imperative mood."""
|
||||
return self._beep
|
||||
|
||||
@cached_property
|
||||
def good_cached_property(self):
|
||||
"""This property method docstring does not need to be written in imperative mood."""
|
||||
|
||||
23
crates/ruff/resources/test/fixtures/pyflakes/F821_11.py
vendored
Normal file
23
crates/ruff/resources/test/fixtures/pyflakes/F821_11.py
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
"""Test case: strings used within calls within type annotations."""
|
||||
|
||||
from typing import Callable
|
||||
|
||||
import bpy
|
||||
from mypy_extensions import VarArg
|
||||
|
||||
from foo import Bar
|
||||
|
||||
|
||||
class LightShow(bpy.types.Operator):
|
||||
label = "Create Character"
|
||||
name = "lightshow.letter_creation"
|
||||
|
||||
filepath: bpy.props.StringProperty(subtype="FILE_PATH") # OK
|
||||
|
||||
|
||||
def f(x: Callable[[VarArg("os")], None]): # F821
|
||||
pass
|
||||
|
||||
|
||||
f(Callable[["Bar"], None])
|
||||
f(Callable[["Baz"], None])
|
||||
25
crates/ruff/resources/test/fixtures/pyflakes/F821_12.py
vendored
Normal file
25
crates/ruff/resources/test/fixtures/pyflakes/F821_12.py
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
"""Test case: strings used within calls within type annotations."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Callable
|
||||
|
||||
import bpy
|
||||
from mypy_extensions import VarArg
|
||||
|
||||
from foo import Bar
|
||||
|
||||
|
||||
class LightShow(bpy.types.Operator):
|
||||
label = "Create Character"
|
||||
name = "lightshow.letter_creation"
|
||||
|
||||
filepath: bpy.props.StringProperty(subtype="FILE_PATH") # OK
|
||||
|
||||
|
||||
def f(x: Callable[[VarArg("os")], None]): # F821
|
||||
pass
|
||||
|
||||
|
||||
f(Callable[["Bar"], None])
|
||||
f(Callable[["Baz"], None])
|
||||
@@ -25,22 +25,7 @@ avoid-escape = true
|
||||
max-complexity = 10
|
||||
|
||||
[tool.ruff.pep8-naming]
|
||||
ignore-names = [
|
||||
"setUp",
|
||||
"tearDown",
|
||||
"setUpClass",
|
||||
"tearDownClass",
|
||||
"setUpModule",
|
||||
"tearDownModule",
|
||||
"asyncSetUp",
|
||||
"asyncTearDown",
|
||||
"setUpTestData",
|
||||
"failureException",
|
||||
"longMessage",
|
||||
"maxDiff",
|
||||
]
|
||||
classmethod-decorators = ["classmethod", "pydantic.validator"]
|
||||
staticmethod-decorators = ["staticmethod"]
|
||||
classmethod-decorators = ["pydantic.validator"]
|
||||
|
||||
[tool.ruff.flake8-tidy-imports]
|
||||
ban-relative-imports = "parents"
|
||||
|
||||
7
crates/ruff/resources/test/fixtures/pyupgrade/UP038.py
vendored
Normal file
7
crates/ruff/resources/test/fixtures/pyupgrade/UP038.py
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
isinstance(1, (int, float)) # UP038
|
||||
issubclass("yes", (int, float, str)) # UP038
|
||||
|
||||
isinstance(1, int) # OK
|
||||
issubclass("yes", int) # OK
|
||||
isinstance(1, int | float) # OK
|
||||
issubclass("yes", int | str) # OK
|
||||
@@ -32,9 +32,10 @@ pub fn classify(
|
||||
checker
|
||||
.resolve_call_path(map_callable(expr))
|
||||
.map_or(false, |call_path| {
|
||||
staticmethod_decorators
|
||||
.iter()
|
||||
.any(|decorator| call_path == to_call_path(decorator))
|
||||
call_path.as_slice() == ["", "staticmethod"]
|
||||
|| staticmethod_decorators
|
||||
.iter()
|
||||
.any(|decorator| call_path == to_call_path(decorator))
|
||||
})
|
||||
}) {
|
||||
FunctionType::StaticMethod
|
||||
@@ -51,6 +52,7 @@ pub fn classify(
|
||||
|| decorator_list.iter().any(|expr| {
|
||||
// The method is decorated with a class method decorator (like `@classmethod`).
|
||||
checker.resolve_call_path(map_callable(expr)).map_or(false, |call_path| {
|
||||
call_path.as_slice() == ["", "classmethod"] ||
|
||||
classmethod_decorators
|
||||
.iter()
|
||||
.any(|decorator| call_path == to_call_path(decorator))
|
||||
|
||||
@@ -1113,6 +1113,32 @@ pub fn first_colon_range(range: Range, locator: &Locator) -> Option<Range> {
|
||||
range
|
||||
}
|
||||
|
||||
/// Given a statement, find its "logical end".
|
||||
///
|
||||
/// For example: the statement could be following by a trailing semicolon, by an end-of-line
|
||||
/// comment, or by any number of continuation lines (and then by a comment, and so on).
|
||||
pub fn end_of_statement(stmt: &Stmt, locator: &Locator) -> Location {
|
||||
let contents = locator.skip(stmt.end_location.unwrap());
|
||||
|
||||
// End-of-file, so just return the end of the statement.
|
||||
if contents.is_empty() {
|
||||
return stmt.end_location.unwrap();
|
||||
}
|
||||
|
||||
// Otherwise, find the end of the last line that's "part of" the statement.
|
||||
for (lineno, line) in contents.lines().enumerate() {
|
||||
if line.ends_with('\\') {
|
||||
continue;
|
||||
}
|
||||
return to_absolute(
|
||||
Location::new(lineno + 1, line.chars().count()),
|
||||
stmt.end_location.unwrap(),
|
||||
);
|
||||
}
|
||||
|
||||
unreachable!("Expected to find end-of-statement")
|
||||
}
|
||||
|
||||
/// Return the `Range` of the first `Elif` or `Else` token in an `If` statement.
|
||||
pub fn elif_else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
|
||||
let StmtKind::If { body, orelse, .. } = &stmt.node else {
|
||||
|
||||
@@ -310,6 +310,26 @@ impl<'a> Checker<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Visit an [`Expr`], and treat it as a type definition.
|
||||
macro_rules! visit_type_definition {
|
||||
($self:ident, $expr:expr) => {{
|
||||
let prev_in_type_definition = $self.in_type_definition;
|
||||
$self.in_type_definition = true;
|
||||
$self.visit_expr($expr);
|
||||
$self.in_type_definition = prev_in_type_definition;
|
||||
}};
|
||||
}
|
||||
|
||||
/// Visit an [`Expr`], and treat it as _not_ a type definition.
|
||||
macro_rules! visit_non_type_definition {
|
||||
($self:ident, $expr:expr) => {{
|
||||
let prev_in_type_definition = $self.in_type_definition;
|
||||
$self.in_type_definition = false;
|
||||
$self.visit_expr($expr);
|
||||
$self.in_type_definition = prev_in_type_definition;
|
||||
}};
|
||||
}
|
||||
|
||||
impl<'a, 'b> Visitor<'b> for Checker<'a>
|
||||
where
|
||||
'b: 'a,
|
||||
@@ -755,33 +775,67 @@ where
|
||||
for expr in decorator_list {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
|
||||
// If we're in a class or module scope, then the annotation needs to be
|
||||
// available at runtime.
|
||||
// See: https://docs.python.org/3/reference/simple_stmts.html#annotated-assignment-statements
|
||||
let runtime_annotation = !self.annotations_future_enabled
|
||||
&& matches!(
|
||||
self.current_scope().kind,
|
||||
ScopeKind::Class(..) | ScopeKind::Module
|
||||
);
|
||||
|
||||
for arg in &args.posonlyargs {
|
||||
if let Some(expr) = &arg.node.annotation {
|
||||
self.visit_annotation(expr);
|
||||
if runtime_annotation {
|
||||
visit_type_definition!(self, expr);
|
||||
} else {
|
||||
self.visit_annotation(expr);
|
||||
};
|
||||
}
|
||||
}
|
||||
for arg in &args.args {
|
||||
if let Some(expr) = &arg.node.annotation {
|
||||
self.visit_annotation(expr);
|
||||
if runtime_annotation {
|
||||
visit_type_definition!(self, expr);
|
||||
} else {
|
||||
self.visit_annotation(expr);
|
||||
};
|
||||
}
|
||||
}
|
||||
if let Some(arg) = &args.vararg {
|
||||
if let Some(expr) = &arg.node.annotation {
|
||||
self.visit_annotation(expr);
|
||||
if runtime_annotation {
|
||||
visit_type_definition!(self, expr);
|
||||
} else {
|
||||
self.visit_annotation(expr);
|
||||
};
|
||||
}
|
||||
}
|
||||
for arg in &args.kwonlyargs {
|
||||
if let Some(expr) = &arg.node.annotation {
|
||||
self.visit_annotation(expr);
|
||||
if runtime_annotation {
|
||||
visit_type_definition!(self, expr);
|
||||
} else {
|
||||
self.visit_annotation(expr);
|
||||
};
|
||||
}
|
||||
}
|
||||
if let Some(arg) = &args.kwarg {
|
||||
if let Some(expr) = &arg.node.annotation {
|
||||
self.visit_annotation(expr);
|
||||
if runtime_annotation {
|
||||
visit_type_definition!(self, expr);
|
||||
} else {
|
||||
self.visit_annotation(expr);
|
||||
};
|
||||
}
|
||||
}
|
||||
for expr in returns {
|
||||
self.visit_annotation(expr);
|
||||
if runtime_annotation {
|
||||
visit_type_definition!(self, expr);
|
||||
} else {
|
||||
self.visit_annotation(expr);
|
||||
};
|
||||
}
|
||||
for expr in &args.kw_defaults {
|
||||
self.visit_expr(expr);
|
||||
@@ -2202,23 +2256,20 @@ where
|
||||
// If we're in a class or module scope, then the annotation needs to be
|
||||
// available at runtime.
|
||||
// See: https://docs.python.org/3/reference/simple_stmts.html#annotated-assignment-statements
|
||||
if !self.annotations_future_enabled
|
||||
let runtime_annotation = !self.annotations_future_enabled
|
||||
&& matches!(
|
||||
self.current_scope().kind,
|
||||
ScopeKind::Class(..) | ScopeKind::Module
|
||||
)
|
||||
{
|
||||
self.in_type_definition = true;
|
||||
self.visit_expr(annotation);
|
||||
self.in_type_definition = false;
|
||||
);
|
||||
|
||||
if runtime_annotation {
|
||||
visit_type_definition!(self, annotation);
|
||||
} else {
|
||||
self.visit_annotation(annotation);
|
||||
}
|
||||
if let Some(expr) = value {
|
||||
if self.match_typing_expr(annotation, "TypeAlias") {
|
||||
self.in_type_definition = true;
|
||||
self.visit_expr(expr);
|
||||
self.in_type_definition = false;
|
||||
visit_type_definition!(self, expr);
|
||||
} else {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
@@ -2275,12 +2326,9 @@ where
|
||||
|
||||
fn visit_annotation(&mut self, expr: &'b Expr) {
|
||||
let prev_in_annotation = self.in_annotation;
|
||||
let prev_in_type_definition = self.in_type_definition;
|
||||
self.in_annotation = true;
|
||||
self.in_type_definition = true;
|
||||
self.visit_expr(expr);
|
||||
visit_type_definition!(self, expr);
|
||||
self.in_annotation = prev_in_annotation;
|
||||
self.in_type_definition = prev_in_type_definition;
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'b Expr) {
|
||||
@@ -2312,7 +2360,6 @@ where
|
||||
self.push_expr(expr);
|
||||
|
||||
let prev_in_literal = self.in_literal;
|
||||
let prev_in_type_definition = self.in_type_definition;
|
||||
|
||||
// Pre-visit.
|
||||
match &expr.node {
|
||||
@@ -2584,6 +2631,11 @@ where
|
||||
if self.settings.rules.enabled(&Rule::OSErrorAlias) {
|
||||
pyupgrade::rules::os_error_alias(self, &expr);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::IsinstanceWithTuple)
|
||||
&& self.settings.target_version >= PythonVersion::Py310
|
||||
{
|
||||
pyupgrade::rules::use_pep604_isinstance(self, expr, func, args);
|
||||
}
|
||||
|
||||
// flake8-print
|
||||
if self.settings.rules.enabled(&Rule::PrintFound)
|
||||
@@ -3485,32 +3537,7 @@ where
|
||||
flake8_pie::rules::prefer_list_builtin(self, expr);
|
||||
}
|
||||
|
||||
// Visit the arguments, but avoid the body, which will be deferred.
|
||||
for arg in &args.posonlyargs {
|
||||
if let Some(expr) = &arg.node.annotation {
|
||||
self.visit_annotation(expr);
|
||||
}
|
||||
}
|
||||
for arg in &args.args {
|
||||
if let Some(expr) = &arg.node.annotation {
|
||||
self.visit_annotation(expr);
|
||||
}
|
||||
}
|
||||
if let Some(arg) = &args.vararg {
|
||||
if let Some(expr) = &arg.node.annotation {
|
||||
self.visit_annotation(expr);
|
||||
}
|
||||
}
|
||||
for arg in &args.kwonlyargs {
|
||||
if let Some(expr) = &arg.node.annotation {
|
||||
self.visit_annotation(expr);
|
||||
}
|
||||
}
|
||||
if let Some(arg) = &args.kwarg {
|
||||
if let Some(expr) = &arg.node.annotation {
|
||||
self.visit_annotation(expr);
|
||||
}
|
||||
}
|
||||
// Visit the default arguments, but avoid the body, which will be deferred.
|
||||
for expr in &args.kw_defaults {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
@@ -3608,9 +3635,16 @@ where
|
||||
Some(Callable::NamedTuple)
|
||||
} else if self.match_typing_call_path(&call_path, "TypedDict") {
|
||||
Some(Callable::TypedDict)
|
||||
} else if ["Arg", "DefaultArg", "NamedArg", "DefaultNamedArg"]
|
||||
.iter()
|
||||
.any(|target| call_path.as_slice() == ["mypy_extensions", target])
|
||||
} else if [
|
||||
"Arg",
|
||||
"DefaultArg",
|
||||
"NamedArg",
|
||||
"DefaultNamedArg",
|
||||
"VarArg",
|
||||
"KwArg",
|
||||
]
|
||||
.iter()
|
||||
.any(|target| call_path.as_slice() == ["mypy_extensions", target])
|
||||
{
|
||||
Some(Callable::MypyExtension)
|
||||
} else {
|
||||
@@ -3621,17 +3655,13 @@ where
|
||||
Some(Callable::ForwardRef) => {
|
||||
self.visit_expr(func);
|
||||
for expr in args {
|
||||
self.in_type_definition = true;
|
||||
self.visit_expr(expr);
|
||||
self.in_type_definition = prev_in_type_definition;
|
||||
visit_type_definition!(self, expr);
|
||||
}
|
||||
}
|
||||
Some(Callable::Cast) => {
|
||||
self.visit_expr(func);
|
||||
if !args.is_empty() {
|
||||
self.in_type_definition = true;
|
||||
self.visit_expr(&args[0]);
|
||||
self.in_type_definition = prev_in_type_definition;
|
||||
visit_type_definition!(self, &args[0]);
|
||||
}
|
||||
for expr in args.iter().skip(1) {
|
||||
self.visit_expr(expr);
|
||||
@@ -3640,29 +3670,21 @@ where
|
||||
Some(Callable::NewType) => {
|
||||
self.visit_expr(func);
|
||||
for expr in args.iter().skip(1) {
|
||||
self.in_type_definition = true;
|
||||
self.visit_expr(expr);
|
||||
self.in_type_definition = prev_in_type_definition;
|
||||
visit_type_definition!(self, expr);
|
||||
}
|
||||
}
|
||||
Some(Callable::TypeVar) => {
|
||||
self.visit_expr(func);
|
||||
for expr in args.iter().skip(1) {
|
||||
self.in_type_definition = true;
|
||||
self.visit_expr(expr);
|
||||
self.in_type_definition = prev_in_type_definition;
|
||||
visit_type_definition!(self, expr);
|
||||
}
|
||||
for keyword in keywords {
|
||||
let KeywordData { arg, value } = &keyword.node;
|
||||
if let Some(id) = arg {
|
||||
if id == "bound" {
|
||||
self.in_type_definition = true;
|
||||
self.visit_expr(value);
|
||||
self.in_type_definition = prev_in_type_definition;
|
||||
visit_type_definition!(self, value);
|
||||
} else {
|
||||
self.in_type_definition = false;
|
||||
self.visit_expr(value);
|
||||
self.in_type_definition = prev_in_type_definition;
|
||||
visit_non_type_definition!(self, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3679,16 +3701,8 @@ where
|
||||
ExprKind::List { elts, .. }
|
||||
| ExprKind::Tuple { elts, .. } => {
|
||||
if elts.len() == 2 {
|
||||
self.in_type_definition = false;
|
||||
|
||||
self.visit_expr(&elts[0]);
|
||||
self.in_type_definition =
|
||||
prev_in_type_definition;
|
||||
|
||||
self.in_type_definition = true;
|
||||
self.visit_expr(&elts[1]);
|
||||
self.in_type_definition =
|
||||
prev_in_type_definition;
|
||||
visit_non_type_definition!(self, &elts[0]);
|
||||
visit_type_definition!(self, &elts[1]);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
@@ -3702,9 +3716,7 @@ where
|
||||
// Ex) NamedTuple("a", a=int)
|
||||
for keyword in keywords {
|
||||
let KeywordData { value, .. } = &keyword.node;
|
||||
self.in_type_definition = true;
|
||||
self.visit_expr(value);
|
||||
self.in_type_definition = prev_in_type_definition;
|
||||
visit_type_definition!(self, value);
|
||||
}
|
||||
}
|
||||
Some(Callable::TypedDict) => {
|
||||
@@ -3714,14 +3726,10 @@ where
|
||||
if args.len() > 1 {
|
||||
if let ExprKind::Dict { keys, values } = &args[1].node {
|
||||
for key in keys.iter().flatten() {
|
||||
self.in_type_definition = false;
|
||||
self.visit_expr(key);
|
||||
self.in_type_definition = prev_in_type_definition;
|
||||
visit_non_type_definition!(self, key);
|
||||
}
|
||||
for value in values {
|
||||
self.in_type_definition = true;
|
||||
self.visit_expr(value);
|
||||
self.in_type_definition = prev_in_type_definition;
|
||||
visit_type_definition!(self, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3729,9 +3737,7 @@ where
|
||||
// Ex) TypedDict("a", a=int)
|
||||
for keyword in keywords {
|
||||
let KeywordData { value, .. } = &keyword.node;
|
||||
self.in_type_definition = true;
|
||||
self.visit_expr(value);
|
||||
self.in_type_definition = prev_in_type_definition;
|
||||
visit_type_definition!(self, value);
|
||||
}
|
||||
}
|
||||
Some(Callable::MypyExtension) => {
|
||||
@@ -3739,39 +3745,39 @@ where
|
||||
|
||||
if let Some(arg) = args.first() {
|
||||
// Ex) DefaultNamedArg(bool | None, name="some_prop_name")
|
||||
self.in_type_definition = true;
|
||||
self.visit_expr(arg);
|
||||
self.in_type_definition = prev_in_type_definition;
|
||||
visit_type_definition!(self, arg);
|
||||
|
||||
for arg in args.iter().skip(1) {
|
||||
self.in_type_definition = false;
|
||||
self.visit_expr(arg);
|
||||
self.in_type_definition = prev_in_type_definition;
|
||||
visit_non_type_definition!(self, arg);
|
||||
}
|
||||
for keyword in keywords {
|
||||
let KeywordData { value, .. } = &keyword.node;
|
||||
self.in_type_definition = false;
|
||||
self.visit_expr(value);
|
||||
self.in_type_definition = prev_in_type_definition;
|
||||
visit_non_type_definition!(self, value);
|
||||
}
|
||||
} else {
|
||||
// Ex) DefaultNamedArg(type="bool", name="some_prop_name")
|
||||
for keyword in keywords {
|
||||
let KeywordData { value, arg, .. } = &keyword.node;
|
||||
if arg.as_ref().map_or(false, |arg| arg == "type") {
|
||||
self.in_type_definition = true;
|
||||
self.visit_expr(value);
|
||||
self.in_type_definition = prev_in_type_definition;
|
||||
visit_type_definition!(self, value);
|
||||
} else {
|
||||
self.in_type_definition = false;
|
||||
self.visit_expr(value);
|
||||
self.in_type_definition = prev_in_type_definition;
|
||||
visit_non_type_definition!(self, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
visitor::walk_expr(self, expr);
|
||||
// If we're in a type definition, we need to treat the arguments to any
|
||||
// other callables as non-type definitions (i.e., we don't want to treat
|
||||
// any strings as deferred type definitions).
|
||||
self.visit_expr(func);
|
||||
for arg in args {
|
||||
visit_non_type_definition!(self, arg);
|
||||
}
|
||||
for keyword in keywords {
|
||||
let KeywordData { value, .. } = &keyword.node;
|
||||
visit_non_type_definition!(self, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3797,9 +3803,7 @@ where
|
||||
// Ex) Optional[int]
|
||||
SubscriptKind::AnnotatedSubscript => {
|
||||
self.visit_expr(value);
|
||||
self.in_type_definition = true;
|
||||
self.visit_expr(slice);
|
||||
self.in_type_definition = prev_in_type_definition;
|
||||
visit_type_definition!(self, slice);
|
||||
self.visit_expr_context(ctx);
|
||||
}
|
||||
// Ex) Annotated[int, "Hello, world!"]
|
||||
@@ -3810,11 +3814,9 @@ where
|
||||
if let ExprKind::Tuple { elts, ctx } = &slice.node {
|
||||
if let Some(expr) = elts.first() {
|
||||
self.visit_expr(expr);
|
||||
self.in_type_definition = false;
|
||||
for expr in elts.iter().skip(1) {
|
||||
self.visit_expr(expr);
|
||||
visit_non_type_definition!(self, expr);
|
||||
}
|
||||
self.in_type_definition = prev_in_type_definition;
|
||||
self.visit_expr_context(ctx);
|
||||
}
|
||||
} else {
|
||||
@@ -3849,7 +3851,6 @@ where
|
||||
_ => {}
|
||||
};
|
||||
|
||||
self.in_type_definition = prev_in_type_definition;
|
||||
self.in_literal = prev_in_literal;
|
||||
|
||||
self.pop_expr();
|
||||
@@ -5611,7 +5612,11 @@ impl<'a> Checker<'a> {
|
||||
pydocstyle::rules::ends_with_period(self, &docstring);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::NonImperativeMood) {
|
||||
pydocstyle::rules::non_imperative_mood(self, &docstring);
|
||||
pydocstyle::rules::non_imperative_mood(
|
||||
self,
|
||||
&docstring,
|
||||
&self.settings.pydocstyle.property_decorators,
|
||||
);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::NoSignature) {
|
||||
pydocstyle::rules::no_signature(self, &docstring);
|
||||
|
||||
@@ -1,17 +1,20 @@
|
||||
#![allow(dead_code, unused_imports, unused_variables)]
|
||||
|
||||
use bisection::bisect_left;
|
||||
use itertools::Itertools;
|
||||
use rustpython_parser::ast::Location;
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::registry::Diagnostic;
|
||||
use crate::registry::{Diagnostic, Rule};
|
||||
use crate::rules::pycodestyle::logical_lines::{iter_logical_lines, TokenFlags};
|
||||
use crate::rules::pycodestyle::rules::{
|
||||
extraneous_whitespace, indentation, missing_whitespace_after_keyword, space_around_operator,
|
||||
whitespace_around_keywords, whitespace_around_named_parameter_equals,
|
||||
whitespace_before_comment,
|
||||
extraneous_whitespace, indentation, missing_whitespace_after_keyword,
|
||||
missing_whitespace_around_operator, space_around_operator, whitespace_around_keywords,
|
||||
whitespace_around_named_parameter_equals, whitespace_before_comment,
|
||||
whitespace_before_parameters,
|
||||
};
|
||||
use crate::settings::Settings;
|
||||
use crate::settings::{flags, Settings};
|
||||
use crate::source_code::{Locator, Stylist};
|
||||
|
||||
/// Return the amount of indentation, expanding tabs to the next multiple of 8.
|
||||
@@ -40,6 +43,7 @@ pub fn check_logical_lines(
|
||||
locator: &Locator,
|
||||
stylist: &Stylist,
|
||||
settings: &Settings,
|
||||
autofix: flags::Autofix,
|
||||
) -> Vec<Diagnostic> {
|
||||
let mut diagnostics = vec![];
|
||||
|
||||
@@ -147,6 +151,32 @@ pub fn check_logical_lines(
|
||||
});
|
||||
}
|
||||
}
|
||||
for (location, kind) in missing_whitespace_around_operator(&line.tokens) {
|
||||
if settings.rules.enabled(kind.rule()) {
|
||||
diagnostics.push(Diagnostic {
|
||||
kind,
|
||||
location,
|
||||
end_location: location,
|
||||
fix: None,
|
||||
parent: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if line.flags.contains(TokenFlags::BRACKET) {
|
||||
#[cfg(feature = "logical_lines")]
|
||||
let should_fix =
|
||||
autofix.into() && settings.rules.should_fix(&Rule::WhitespaceBeforeParameters);
|
||||
|
||||
#[cfg(not(feature = "logical_lines"))]
|
||||
let should_fix = false;
|
||||
|
||||
for diagnostic in whitespace_before_parameters(&line.tokens, should_fix) {
|
||||
if settings.rules.enabled(diagnostic.kind.rule()) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (index, kind) in indentation(
|
||||
|
||||
@@ -7,8 +7,8 @@ use crate::lex::docstring_detection::StateMachine;
|
||||
use crate::registry::{Diagnostic, Rule};
|
||||
use crate::rules::ruff::rules::Context;
|
||||
use crate::rules::{
|
||||
eradicate, flake8_commas, flake8_implicit_str_concat, flake8_quotes, pycodestyle, pyupgrade,
|
||||
ruff,
|
||||
eradicate, flake8_commas, flake8_implicit_str_concat, flake8_pyi, flake8_quotes, pycodestyle,
|
||||
pyupgrade, ruff,
|
||||
};
|
||||
use crate::settings::{flags, Settings};
|
||||
use crate::source_code::Locator;
|
||||
@@ -18,6 +18,7 @@ pub fn check_tokens(
|
||||
tokens: &[LexResult],
|
||||
settings: &Settings,
|
||||
autofix: flags::Autofix,
|
||||
is_interface_definition: bool,
|
||||
) -> Vec<Diagnostic> {
|
||||
let mut diagnostics: Vec<Diagnostic> = vec![];
|
||||
|
||||
@@ -55,6 +56,7 @@ pub fn check_tokens(
|
||||
.enabled(&Rule::TrailingCommaOnBareTupleProhibited)
|
||||
|| settings.rules.enabled(&Rule::TrailingCommaProhibited);
|
||||
let enforce_extraneous_parenthesis = settings.rules.enabled(&Rule::ExtraneousParentheses);
|
||||
let enforce_type_comment_in_stub = settings.rules.enabled(&Rule::TypeCommentInStub);
|
||||
|
||||
// RUF001, RUF002, RUF003
|
||||
if enforce_ambiguous_unicode_character {
|
||||
@@ -161,5 +163,10 @@ pub fn check_tokens(
|
||||
);
|
||||
}
|
||||
|
||||
// PYI033
|
||||
if enforce_type_comment_in_stub && is_interface_definition {
|
||||
diagnostics.extend(flake8_pyi::rules::type_comment_in_stub(tokens));
|
||||
}
|
||||
|
||||
diagnostics
|
||||
}
|
||||
|
||||
@@ -29,6 +29,8 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E203") => Rule::WhitespaceBeforePunctuation,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E211") => Rule::WhitespaceBeforeParameters,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E221") => Rule::MultipleSpacesBeforeOperator,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E222") => Rule::MultipleSpacesAfterOperator,
|
||||
@@ -37,6 +39,14 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E224") => Rule::TabAfterOperator,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E225") => Rule::MissingWhitespaceAroundOperator,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E226") => Rule::MissingWhitespaceAroundArithmeticOperator,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E227") => Rule::MissingWhitespaceAroundBitwiseOrShiftOperator,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E228") => Rule::MissingWhitespaceAroundModuloOperator,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E251") => Rule::UnexpectedSpacesAroundKeywordParameterEquals,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E252") => Rule::MissingWhitespaceAroundParameterEquals,
|
||||
@@ -344,6 +354,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Pyupgrade, "035") => Rule::ImportReplacements,
|
||||
(Pyupgrade, "036") => Rule::OutdatedVersionBlock,
|
||||
(Pyupgrade, "037") => Rule::QuotedAnnotation,
|
||||
(Pyupgrade, "038") => Rule::IsinstanceWithTuple,
|
||||
|
||||
// pydocstyle
|
||||
(Pydocstyle, "100") => Rule::PublicModule,
|
||||
@@ -500,6 +511,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Flake8Pyi, "011") => Rule::TypedArgumentSimpleDefaults,
|
||||
(Flake8Pyi, "014") => Rule::ArgumentSimpleDefaults,
|
||||
(Flake8Pyi, "021") => Rule::DocstringInStub,
|
||||
(Flake8Pyi, "033") => Rule::TypeCommentInStub,
|
||||
|
||||
// flake8-pytest-style
|
||||
(Flake8PytestStyle, "001") => Rule::IncorrectFixtureParenthesesStyle,
|
||||
|
||||
@@ -1,70 +1,36 @@
|
||||
//! Abstractions for Google-style docstrings.
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use rustc_hash::FxHashSet;
|
||||
use crate::docstrings::sections::SectionKind;
|
||||
|
||||
pub(crate) static GOOGLE_SECTION_NAMES: Lazy<FxHashSet<&'static str>> = Lazy::new(|| {
|
||||
FxHashSet::from_iter([
|
||||
"Args",
|
||||
"Arguments",
|
||||
"Attention",
|
||||
"Attributes",
|
||||
"Caution",
|
||||
"Danger",
|
||||
"Error",
|
||||
"Example",
|
||||
"Examples",
|
||||
"Hint",
|
||||
"Important",
|
||||
"Keyword Args",
|
||||
"Keyword Arguments",
|
||||
"Methods",
|
||||
"Note",
|
||||
"Notes",
|
||||
"Return",
|
||||
"Returns",
|
||||
"Raises",
|
||||
"References",
|
||||
"See Also",
|
||||
"Tip",
|
||||
"Todo",
|
||||
"Warning",
|
||||
"Warnings",
|
||||
"Warns",
|
||||
"Yield",
|
||||
"Yields",
|
||||
])
|
||||
});
|
||||
|
||||
pub(crate) static LOWERCASE_GOOGLE_SECTION_NAMES: Lazy<FxHashSet<&'static str>> = Lazy::new(|| {
|
||||
FxHashSet::from_iter([
|
||||
"args",
|
||||
"arguments",
|
||||
"attention",
|
||||
"attributes",
|
||||
"caution",
|
||||
"danger",
|
||||
"error",
|
||||
"example",
|
||||
"examples",
|
||||
"hint",
|
||||
"important",
|
||||
"keyword args",
|
||||
"keyword arguments",
|
||||
"methods",
|
||||
"note",
|
||||
"notes",
|
||||
"return",
|
||||
"returns",
|
||||
"raises",
|
||||
"references",
|
||||
"see also",
|
||||
"tip",
|
||||
"todo",
|
||||
"warning",
|
||||
"warnings",
|
||||
"warns",
|
||||
"yield",
|
||||
"yields",
|
||||
])
|
||||
});
|
||||
pub(crate) static GOOGLE_SECTIONS: &[SectionKind] = &[
|
||||
SectionKind::Attributes,
|
||||
SectionKind::Examples,
|
||||
SectionKind::Methods,
|
||||
SectionKind::Notes,
|
||||
SectionKind::Raises,
|
||||
SectionKind::References,
|
||||
SectionKind::Returns,
|
||||
SectionKind::SeeAlso,
|
||||
SectionKind::Yields,
|
||||
// Google-only
|
||||
SectionKind::Args,
|
||||
SectionKind::Arguments,
|
||||
SectionKind::Attention,
|
||||
SectionKind::Caution,
|
||||
SectionKind::Danger,
|
||||
SectionKind::Error,
|
||||
SectionKind::Example,
|
||||
SectionKind::Hint,
|
||||
SectionKind::Important,
|
||||
SectionKind::KeywordArgs,
|
||||
SectionKind::KeywordArguments,
|
||||
SectionKind::Note,
|
||||
SectionKind::Notes,
|
||||
SectionKind::Return,
|
||||
SectionKind::Tip,
|
||||
SectionKind::Todo,
|
||||
SectionKind::Warning,
|
||||
SectionKind::Warnings,
|
||||
SectionKind::Warns,
|
||||
SectionKind::Yield,
|
||||
];
|
||||
|
||||
@@ -1,40 +1,20 @@
|
||||
//! Abstractions for NumPy-style docstrings.
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use rustc_hash::FxHashSet;
|
||||
use crate::docstrings::sections::SectionKind;
|
||||
|
||||
pub(crate) static LOWERCASE_NUMPY_SECTION_NAMES: Lazy<FxHashSet<&'static str>> = Lazy::new(|| {
|
||||
FxHashSet::from_iter([
|
||||
"short summary",
|
||||
"extended summary",
|
||||
"parameters",
|
||||
"returns",
|
||||
"yields",
|
||||
"other parameters",
|
||||
"raises",
|
||||
"see also",
|
||||
"notes",
|
||||
"references",
|
||||
"examples",
|
||||
"attributes",
|
||||
"methods",
|
||||
])
|
||||
});
|
||||
|
||||
pub(crate) static NUMPY_SECTION_NAMES: Lazy<FxHashSet<&'static str>> = Lazy::new(|| {
|
||||
FxHashSet::from_iter([
|
||||
"Short Summary",
|
||||
"Extended Summary",
|
||||
"Parameters",
|
||||
"Returns",
|
||||
"Yields",
|
||||
"Other Parameters",
|
||||
"Raises",
|
||||
"See Also",
|
||||
"Notes",
|
||||
"References",
|
||||
"Examples",
|
||||
"Attributes",
|
||||
"Methods",
|
||||
])
|
||||
});
|
||||
pub(crate) static NUMPY_SECTIONS: &[SectionKind] = &[
|
||||
SectionKind::Attributes,
|
||||
SectionKind::Examples,
|
||||
SectionKind::Methods,
|
||||
SectionKind::Notes,
|
||||
SectionKind::Raises,
|
||||
SectionKind::References,
|
||||
SectionKind::Returns,
|
||||
SectionKind::SeeAlso,
|
||||
SectionKind::Yields,
|
||||
// NumPy-only
|
||||
SectionKind::ExtendedSummary,
|
||||
SectionKind::OtherParameters,
|
||||
SectionKind::Parameters,
|
||||
SectionKind::ShortSummary,
|
||||
];
|
||||
|
||||
@@ -1,8 +1,126 @@
|
||||
use strum_macros::EnumIter;
|
||||
|
||||
use crate::ast::whitespace;
|
||||
use crate::docstrings::styles::SectionStyle;
|
||||
|
||||
#[derive(EnumIter, PartialEq, Eq, Debug, Clone, Copy)]
|
||||
pub enum SectionKind {
|
||||
Args,
|
||||
Arguments,
|
||||
Attention,
|
||||
Attributes,
|
||||
Caution,
|
||||
Danger,
|
||||
Error,
|
||||
Example,
|
||||
Examples,
|
||||
ExtendedSummary,
|
||||
Hint,
|
||||
Important,
|
||||
KeywordArgs,
|
||||
KeywordArguments,
|
||||
Methods,
|
||||
Note,
|
||||
Notes,
|
||||
OtherParameters,
|
||||
Parameters,
|
||||
Raises,
|
||||
References,
|
||||
Return,
|
||||
Returns,
|
||||
SeeAlso,
|
||||
ShortSummary,
|
||||
Tip,
|
||||
Todo,
|
||||
Warning,
|
||||
Warnings,
|
||||
Warns,
|
||||
Yield,
|
||||
Yields,
|
||||
}
|
||||
|
||||
impl SectionKind {
|
||||
pub fn from_str(s: &str) -> Option<Self> {
|
||||
match s.to_ascii_lowercase().as_str() {
|
||||
"args" => Some(Self::Args),
|
||||
"arguments" => Some(Self::Arguments),
|
||||
"attention" => Some(Self::Attention),
|
||||
"attributes" => Some(Self::Attributes),
|
||||
"caution" => Some(Self::Caution),
|
||||
"danger" => Some(Self::Danger),
|
||||
"error" => Some(Self::Error),
|
||||
"example" => Some(Self::Example),
|
||||
"examples" => Some(Self::Examples),
|
||||
"extended summary" => Some(Self::ExtendedSummary),
|
||||
"hint" => Some(Self::Hint),
|
||||
"important" => Some(Self::Important),
|
||||
"keyword args" => Some(Self::KeywordArgs),
|
||||
"keyword arguments" => Some(Self::KeywordArguments),
|
||||
"methods" => Some(Self::Methods),
|
||||
"note" => Some(Self::Note),
|
||||
"notes" => Some(Self::Notes),
|
||||
"other parameters" => Some(Self::OtherParameters),
|
||||
"parameters" => Some(Self::Parameters),
|
||||
"raises" => Some(Self::Raises),
|
||||
"references" => Some(Self::References),
|
||||
"return" => Some(Self::Return),
|
||||
"returns" => Some(Self::Returns),
|
||||
"see also" => Some(Self::SeeAlso),
|
||||
"short summary" => Some(Self::ShortSummary),
|
||||
"tip" => Some(Self::Tip),
|
||||
"todo" => Some(Self::Todo),
|
||||
"warning" => Some(Self::Warning),
|
||||
"warnings" => Some(Self::Warnings),
|
||||
"warns" => Some(Self::Warns),
|
||||
"yield" => Some(Self::Yield),
|
||||
"yields" => Some(Self::Yields),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Args => "Args",
|
||||
Self::Arguments => "Arguments",
|
||||
Self::Attention => "Attention",
|
||||
Self::Attributes => "Attributes",
|
||||
Self::Caution => "Caution",
|
||||
Self::Danger => "Danger",
|
||||
Self::Error => "Error",
|
||||
Self::Example => "Example",
|
||||
Self::Examples => "Examples",
|
||||
Self::ExtendedSummary => "Extended Summary",
|
||||
Self::Hint => "Hint",
|
||||
Self::Important => "Important",
|
||||
Self::KeywordArgs => "Keyword Args",
|
||||
Self::KeywordArguments => "Keyword Arguments",
|
||||
Self::Methods => "Methods",
|
||||
Self::Note => "Note",
|
||||
Self::Notes => "Notes",
|
||||
Self::OtherParameters => "Other Parameters",
|
||||
Self::Parameters => "Parameters",
|
||||
Self::Raises => "Raises",
|
||||
Self::References => "References",
|
||||
Self::Return => "Return",
|
||||
Self::Returns => "Returns",
|
||||
Self::SeeAlso => "See Also",
|
||||
Self::ShortSummary => "Short Summary",
|
||||
Self::Tip => "Tip",
|
||||
Self::Todo => "Todo",
|
||||
Self::Warning => "Warning",
|
||||
Self::Warnings => "Warnings",
|
||||
Self::Warns => "Warns",
|
||||
Self::Yield => "Yield",
|
||||
Self::Yields => "Yields",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct SectionContext<'a> {
|
||||
/// The "kind" of the section, e.g. "SectionKind::Args" or "SectionKind::Returns".
|
||||
pub(crate) kind: SectionKind,
|
||||
/// The name of the section as it appears in the docstring, e.g. "Args" or "Returns".
|
||||
pub(crate) section_name: &'a str,
|
||||
pub(crate) previous_line: &'a str,
|
||||
pub(crate) line: &'a str,
|
||||
@@ -11,10 +129,13 @@ pub(crate) struct SectionContext<'a> {
|
||||
pub(crate) original_index: usize,
|
||||
}
|
||||
|
||||
fn suspected_as_section(line: &str, style: &SectionStyle) -> bool {
|
||||
style
|
||||
.lowercase_section_names()
|
||||
.contains(&whitespace::leading_words(line).to_lowercase().as_str())
|
||||
fn suspected_as_section(line: &str, style: &SectionStyle) -> Option<SectionKind> {
|
||||
if let Some(kind) = SectionKind::from_str(whitespace::leading_words(line)) {
|
||||
if style.sections().contains(&kind) {
|
||||
return Some(kind);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Check if the suspected context is really a section header.
|
||||
@@ -49,21 +170,15 @@ pub(crate) fn section_contexts<'a>(
|
||||
lines: &'a [&'a str],
|
||||
style: &SectionStyle,
|
||||
) -> Vec<SectionContext<'a>> {
|
||||
let suspected_section_indices: Vec<usize> = lines
|
||||
let mut contexts = vec![];
|
||||
for (kind, lineno) in lines
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(lineno, line)| {
|
||||
if lineno > 0 && suspected_as_section(line, style) {
|
||||
Some(lineno)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut contexts = vec![];
|
||||
for lineno in suspected_section_indices {
|
||||
.skip(1)
|
||||
.filter_map(|(lineno, line)| suspected_as_section(line, style).map(|kind| (kind, lineno)))
|
||||
{
|
||||
let context = SectionContext {
|
||||
kind,
|
||||
section_name: whitespace::leading_words(lines[lineno]),
|
||||
previous_line: lines[lineno - 1],
|
||||
line: lines[lineno],
|
||||
@@ -76,11 +191,12 @@ pub(crate) fn section_contexts<'a>(
|
||||
}
|
||||
}
|
||||
|
||||
let mut truncated_contexts = vec![];
|
||||
let mut truncated_contexts = Vec::with_capacity(contexts.len());
|
||||
let mut end: Option<usize> = None;
|
||||
for context in contexts.into_iter().rev() {
|
||||
let next_end = context.original_index;
|
||||
truncated_contexts.push(SectionContext {
|
||||
kind: context.kind,
|
||||
section_name: context.section_name,
|
||||
previous_line: context.previous_line,
|
||||
line: context.line,
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::docstrings::google::{GOOGLE_SECTION_NAMES, LOWERCASE_GOOGLE_SECTION_NAMES};
|
||||
use crate::docstrings::numpy::{LOWERCASE_NUMPY_SECTION_NAMES, NUMPY_SECTION_NAMES};
|
||||
use crate::docstrings::google::GOOGLE_SECTIONS;
|
||||
use crate::docstrings::numpy::NUMPY_SECTIONS;
|
||||
use crate::docstrings::sections::SectionKind;
|
||||
|
||||
pub(crate) enum SectionStyle {
|
||||
Numpy,
|
||||
@@ -10,17 +8,10 @@ pub(crate) enum SectionStyle {
|
||||
}
|
||||
|
||||
impl SectionStyle {
|
||||
pub(crate) fn section_names(&self) -> &Lazy<FxHashSet<&'static str>> {
|
||||
pub(crate) fn sections(&self) -> &[SectionKind] {
|
||||
match self {
|
||||
SectionStyle::Numpy => &NUMPY_SECTION_NAMES,
|
||||
SectionStyle::Google => &GOOGLE_SECTION_NAMES,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn lowercase_section_names(&self) -> &Lazy<FxHashSet<&'static str>> {
|
||||
match self {
|
||||
SectionStyle::Numpy => &LOWERCASE_NUMPY_SECTION_NAMES,
|
||||
SectionStyle::Google => &LOWERCASE_GOOGLE_SECTION_NAMES,
|
||||
SectionStyle::Numpy => NUMPY_SECTIONS,
|
||||
SectionStyle::Google => GOOGLE_SECTIONS,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -577,6 +577,7 @@ mod tests {
|
||||
pydocstyle: Some(pydocstyle::settings::Options {
|
||||
convention: Some(Convention::Numpy),
|
||||
ignore_decorators: None,
|
||||
property_decorators: None,
|
||||
}),
|
||||
..default_options([Linter::Pydocstyle.into()])
|
||||
});
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
use std::ops::Deref;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use globset::GlobMatcher;
|
||||
use log::debug;
|
||||
use path_absolutize::{path_dedot, Absolutize};
|
||||
use rustc_hash::FxHashSet;
|
||||
|
||||
use crate::registry::Rule;
|
||||
use crate::settings::hashable::{HashableGlobMatcher, HashableHashSet};
|
||||
|
||||
/// Extract the absolute path and basename (as strings) from a Path.
|
||||
pub fn extract_path_names(path: &Path) -> Result<(&str, &str)> {
|
||||
@@ -25,11 +24,7 @@ pub fn extract_path_names(path: &Path) -> Result<(&str, &str)> {
|
||||
/// Create a set with codes matching the pattern/code pairs.
|
||||
pub(crate) fn ignores_from_path<'a>(
|
||||
path: &Path,
|
||||
pattern_code_pairs: &'a [(
|
||||
HashableGlobMatcher,
|
||||
HashableGlobMatcher,
|
||||
HashableHashSet<Rule>,
|
||||
)],
|
||||
pattern_code_pairs: &'a [(GlobMatcher, GlobMatcher, FxHashSet<Rule>)],
|
||||
) -> FxHashSet<&'a Rule> {
|
||||
let (file_path, file_basename) = extract_path_names(path).expect("Unable to parse filename");
|
||||
pattern_code_pairs
|
||||
@@ -39,8 +34,8 @@ pub(crate) fn ignores_from_path<'a>(
|
||||
debug!(
|
||||
"Adding per-file ignores for {:?} due to basename match on {:?}: {:?}",
|
||||
path,
|
||||
basename.deref().glob().regex(),
|
||||
&**codes
|
||||
basename.glob().regex(),
|
||||
codes
|
||||
);
|
||||
return Some(codes.iter());
|
||||
}
|
||||
@@ -48,8 +43,8 @@ pub(crate) fn ignores_from_path<'a>(
|
||||
debug!(
|
||||
"Adding per-file ignores for {:?} due to absolute match on {:?}: {:?}",
|
||||
path,
|
||||
absolute.deref().glob().regex(),
|
||||
&**codes
|
||||
absolute.glob().regex(),
|
||||
codes
|
||||
);
|
||||
return Some(codes.iter());
|
||||
}
|
||||
|
||||
@@ -13,7 +13,6 @@ pub use violation::{AutofixKind, Availability as AutofixAvailability};
|
||||
|
||||
mod ast;
|
||||
mod autofix;
|
||||
pub mod cache;
|
||||
mod checkers;
|
||||
mod codes;
|
||||
mod cst;
|
||||
|
||||
@@ -21,6 +21,7 @@ use crate::doc_lines::{doc_lines_from_ast, doc_lines_from_tokens};
|
||||
use crate::message::{Message, Source};
|
||||
use crate::noqa::{add_noqa, rule_is_ignored};
|
||||
use crate::registry::{Diagnostic, Rule};
|
||||
use crate::resolver::is_interface_definition_path;
|
||||
use crate::rules::pycodestyle;
|
||||
use crate::settings::{flags, Settings};
|
||||
use crate::source_code::{Indexer, Locator, Stylist};
|
||||
@@ -83,7 +84,14 @@ pub fn check_path(
|
||||
.iter_enabled()
|
||||
.any(|rule_code| rule_code.lint_source().is_tokens())
|
||||
{
|
||||
diagnostics.extend(check_tokens(locator, &tokens, settings, autofix));
|
||||
let is_interface_definition = is_interface_definition_path(path);
|
||||
diagnostics.extend(check_tokens(
|
||||
locator,
|
||||
&tokens,
|
||||
settings,
|
||||
autofix,
|
||||
is_interface_definition,
|
||||
));
|
||||
}
|
||||
|
||||
// Run the filesystem-based rules.
|
||||
@@ -101,7 +109,13 @@ pub fn check_path(
|
||||
.iter_enabled()
|
||||
.any(|rule_code| rule_code.lint_source().is_logical_lines())
|
||||
{
|
||||
diagnostics.extend(check_logical_lines(&tokens, locator, stylist, settings));
|
||||
diagnostics.extend(check_logical_lines(
|
||||
&tokens,
|
||||
locator,
|
||||
stylist,
|
||||
settings,
|
||||
flags::Autofix::Enabled,
|
||||
));
|
||||
}
|
||||
|
||||
// Run the AST-based rules.
|
||||
|
||||
@@ -57,12 +57,22 @@ ruff_macros::register_rules!(
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::MultipleSpacesBeforeKeyword,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::MissingWhitespaceAroundOperator,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::MissingWhitespaceAroundArithmeticOperator,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::MissingWhitespaceAroundBitwiseOrShiftOperator,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::MissingWhitespaceAroundModuloOperator,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::TabAfterKeyword,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::UnexpectedSpacesAroundKeywordParameterEquals,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::MissingWhitespaceAroundParameterEquals,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::WhitespaceBeforeParameters,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::TabBeforeKeyword,
|
||||
rules::pycodestyle::rules::MultipleImportsOnOneLine,
|
||||
rules::pycodestyle::rules::ModuleImportNotAtTopOfFile,
|
||||
@@ -331,6 +341,7 @@ ruff_macros::register_rules!(
|
||||
rules::pyupgrade::rules::ImportReplacements,
|
||||
rules::pyupgrade::rules::OutdatedVersionBlock,
|
||||
rules::pyupgrade::rules::QuotedAnnotation,
|
||||
rules::pyupgrade::rules::IsinstanceWithTuple,
|
||||
// pydocstyle
|
||||
rules::pydocstyle::rules::PublicModule,
|
||||
rules::pydocstyle::rules::PublicClass,
|
||||
@@ -474,6 +485,7 @@ ruff_macros::register_rules!(
|
||||
rules::flake8_pyi::rules::DocstringInStub,
|
||||
rules::flake8_pyi::rules::TypedArgumentSimpleDefaults,
|
||||
rules::flake8_pyi::rules::ArgumentSimpleDefaults,
|
||||
rules::flake8_pyi::rules::TypeCommentInStub,
|
||||
// flake8-pytest-style
|
||||
rules::flake8_pytest_style::rules::IncorrectFixtureParenthesesStyle,
|
||||
rules::flake8_pytest_style::rules::FixturePositionalArgs,
|
||||
@@ -827,19 +839,25 @@ impl Rule {
|
||||
| Rule::MultipleStatementsOnOneLineColon
|
||||
| Rule::UselessSemicolon
|
||||
| Rule::MultipleStatementsOnOneLineSemicolon
|
||||
| Rule::TrailingCommaProhibited => &LintSource::Tokens,
|
||||
| Rule::TrailingCommaProhibited
|
||||
| Rule::TypeCommentInStub => &LintSource::Tokens,
|
||||
Rule::IOError => &LintSource::Io,
|
||||
Rule::UnsortedImports | Rule::MissingRequiredImport => &LintSource::Imports,
|
||||
Rule::ImplicitNamespacePackage | Rule::InvalidModuleName => &LintSource::Filesystem,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
Rule::IndentationWithInvalidMultiple
|
||||
| Rule::IndentationWithInvalidMultipleComment
|
||||
| Rule::MissingWhitespaceAfterKeyword
|
||||
| Rule::MissingWhitespaceAroundArithmeticOperator
|
||||
| Rule::MissingWhitespaceAroundBitwiseOrShiftOperator
|
||||
| Rule::MissingWhitespaceAroundModuloOperator
|
||||
| Rule::MissingWhitespaceAroundOperator
|
||||
| Rule::MissingWhitespaceAroundParameterEquals
|
||||
| Rule::MultipleLeadingHashesForBlockComment
|
||||
| Rule::MultipleSpacesAfterKeyword
|
||||
| Rule::MultipleSpacesAfterOperator
|
||||
| Rule::MultipleSpacesBeforeKeyword
|
||||
| Rule::MultipleSpacesBeforeOperator
|
||||
| Rule::MissingWhitespaceAfterKeyword
|
||||
| Rule::NoIndentedBlock
|
||||
| Rule::NoIndentedBlockComment
|
||||
| Rule::NoSpaceAfterBlockComment
|
||||
@@ -852,10 +870,10 @@ impl Rule {
|
||||
| Rule::TooFewSpacesBeforeInlineComment
|
||||
| Rule::UnexpectedIndentation
|
||||
| Rule::UnexpectedIndentationComment
|
||||
| Rule::UnexpectedSpacesAroundKeywordParameterEquals
|
||||
| Rule::WhitespaceAfterOpenBracket
|
||||
| Rule::WhitespaceBeforeCloseBracket
|
||||
| Rule::UnexpectedSpacesAroundKeywordParameterEquals
|
||||
| Rule::MissingWhitespaceAroundParameterEquals
|
||||
| Rule::WhitespaceBeforeParameters
|
||||
| Rule::WhitespaceBeforePunctuation => &LintSource::LogicalLines,
|
||||
_ => &LintSource::Ast,
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
//! Settings for the `flake-annotations` plugin.
|
||||
|
||||
use ruff_macros::CacheKey;
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -60,7 +61,7 @@ pub struct Options {
|
||||
pub ignore_fully_untyped: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Hash)]
|
||||
#[derive(Debug, Default, CacheKey)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct Settings {
|
||||
pub mypy_init_return: bool,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
//! Settings for the `flake8-bandit` plugin.
|
||||
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use ruff_macros::{CacheKey, ConfigurationOptions};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -45,7 +45,7 @@ pub struct Options {
|
||||
pub check_typed_exception: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash)]
|
||||
#[derive(Debug, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub hardcoded_tmp_directory: Vec<String>,
|
||||
pub check_typed_exception: bool,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
//! Settings for the `flake8-bugbear` plugin.
|
||||
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use ruff_macros::{CacheKey, ConfigurationOptions};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -26,7 +26,7 @@ pub struct Options {
|
||||
pub extend_immutable_calls: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Hash)]
|
||||
#[derive(Debug, Default, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub extend_immutable_calls: Vec<String>,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
//! Settings for the `flake8-builtins` plugin.
|
||||
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use ruff_macros::{CacheKey, ConfigurationOptions};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -22,7 +22,7 @@ pub struct Options {
|
||||
pub builtins_ignorelist: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Hash)]
|
||||
#[derive(Debug, Default, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub builtins_ignorelist: Vec<String>,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
//! Settings for the `flake8-comprehensions` plugin.
|
||||
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use ruff_macros::{CacheKey, ConfigurationOptions};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -22,7 +22,7 @@ pub struct Options {
|
||||
pub allow_dict_calls_with_keyword_arguments: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Hash)]
|
||||
#[derive(Debug, Default, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub allow_dict_calls_with_keyword_arguments: bool,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
//! Settings for the `flake8-errmsg` plugin.
|
||||
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use ruff_macros::{CacheKey, ConfigurationOptions};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -18,7 +18,7 @@ pub struct Options {
|
||||
pub max_string_length: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Hash)]
|
||||
#[derive(Debug, Default, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub max_string_length: usize,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
//! Settings for the `flake8-implicit-str-concat` plugin.
|
||||
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use ruff_macros::{CacheKey, ConfigurationOptions};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -32,7 +32,7 @@ pub struct Options {
|
||||
pub allow_multiline: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash)]
|
||||
#[derive(Debug, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub allow_multiline: bool,
|
||||
}
|
||||
|
||||
@@ -1,14 +1,10 @@
|
||||
//! Settings for import conventions.
|
||||
|
||||
use std::hash::Hash;
|
||||
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use ruff_macros::{CacheKey, ConfigurationOptions};
|
||||
use rustc_hash::FxHashMap;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::settings::hashable::HashableHashMap;
|
||||
|
||||
const CONVENTIONAL_ALIASES: &[(&str, &str)] = &[
|
||||
("altair", "alt"),
|
||||
("matplotlib", "mpl"),
|
||||
@@ -64,9 +60,9 @@ pub struct Options {
|
||||
pub extend_aliases: Option<FxHashMap<String, String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash)]
|
||||
#[derive(Debug, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub aliases: HashableHashMap<String, String>,
|
||||
pub aliases: FxHashMap<String, String>,
|
||||
}
|
||||
|
||||
fn default_aliases() -> FxHashMap<String, String> {
|
||||
@@ -90,7 +86,7 @@ fn resolve_aliases(options: Options) -> FxHashMap<String, String> {
|
||||
impl Default for Settings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
aliases: default_aliases().into(),
|
||||
aliases: default_aliases(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -98,7 +94,7 @@ impl Default for Settings {
|
||||
impl From<Options> for Settings {
|
||||
fn from(options: Options) -> Self {
|
||||
Self {
|
||||
aliases: resolve_aliases(options).into(),
|
||||
aliases: resolve_aliases(options),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -106,7 +102,7 @@ impl From<Options> for Settings {
|
||||
impl From<Settings> for Options {
|
||||
fn from(settings: Settings) -> Self {
|
||||
Self {
|
||||
aliases: Some(settings.aliases.into()),
|
||||
aliases: Some(settings.aliases),
|
||||
extend_aliases: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,6 +31,8 @@ mod tests {
|
||||
#[test_case(Rule::ArgumentSimpleDefaults, Path::new("PYI014.pyi"))]
|
||||
#[test_case(Rule::DocstringInStub, Path::new("PYI021.py"))]
|
||||
#[test_case(Rule::DocstringInStub, Path::new("PYI021.pyi"))]
|
||||
#[test_case(Rule::TypeCommentInStub, Path::new("PYI033.py"))]
|
||||
#[test_case(Rule::TypeCommentInStub, Path::new("PYI033.pyi"))]
|
||||
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
|
||||
@@ -7,6 +7,7 @@ pub use simple_defaults::{
|
||||
argument_simple_defaults, typed_argument_simple_defaults, ArgumentSimpleDefaults,
|
||||
TypedArgumentSimpleDefaults,
|
||||
};
|
||||
pub use type_comment_in_stub::{type_comment_in_stub, TypeCommentInStub};
|
||||
pub use unrecognized_platform::{
|
||||
unrecognized_platform, UnrecognizedPlatformCheck, UnrecognizedPlatformName,
|
||||
};
|
||||
@@ -17,4 +18,5 @@ mod non_empty_stub_body;
|
||||
mod pass_statement_stub_body;
|
||||
mod prefix_type_params;
|
||||
mod simple_defaults;
|
||||
mod type_comment_in_stub;
|
||||
mod unrecognized_platform;
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::Tok;
|
||||
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
|
||||
use crate::registry::Diagnostic;
|
||||
use crate::violation::Violation;
|
||||
use crate::Range;
|
||||
|
||||
define_violation!(
|
||||
/// ## What it does
|
||||
/// Checks for the use of type comments (e.g., `x = 1 # type: int`) in stub
|
||||
/// files.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Stub (`.pyi`) files should use type annotations directly, rather
|
||||
/// than type comments, even if they're intended to support Python 2, since
|
||||
/// stub files are not executed at runtime. The one exception is `# type: ignore`.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// x = 1 # type: int
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// x: int = 1
|
||||
/// ```
|
||||
pub struct TypeCommentInStub;
|
||||
);
|
||||
impl Violation for TypeCommentInStub {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("Don't use type comments in stub file")
|
||||
}
|
||||
}
|
||||
|
||||
static TYPE_COMMENT_REGEX: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"^#\s*type:\s*([^#]+)(\s*#.*?)?$").unwrap());
|
||||
static TYPE_IGNORE_REGEX: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"^#\s*type:\s*ignore([^#]+)?(\s*#.*?)?$").unwrap());
|
||||
|
||||
/// PYI033
|
||||
pub fn type_comment_in_stub(tokens: &[LexResult]) -> Vec<Diagnostic> {
|
||||
let mut diagnostics = vec![];
|
||||
|
||||
for token in tokens.iter().flatten() {
|
||||
if let (location, Tok::Comment(comment), end_location) = token {
|
||||
if TYPE_COMMENT_REGEX.is_match(comment) && !TYPE_IGNORE_REGEX.is_match(comment) {
|
||||
diagnostics.push(Diagnostic::new(
|
||||
TypeCommentInStub,
|
||||
Range {
|
||||
location: *location,
|
||||
end_location: *end_location,
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
diagnostics
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
---
|
||||
source: crates/ruff/src/rules/flake8_pyi/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
[]
|
||||
|
||||
@@ -0,0 +1,115 @@
|
||||
---
|
||||
source: crates/ruff/src/rules/flake8_pyi/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
TypeCommentInStub: ~
|
||||
location:
|
||||
row: 6
|
||||
column: 21
|
||||
end_location:
|
||||
row: 6
|
||||
column: 127
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
TypeCommentInStub: ~
|
||||
location:
|
||||
row: 7
|
||||
column: 21
|
||||
end_location:
|
||||
row: 7
|
||||
column: 183
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
TypeCommentInStub: ~
|
||||
location:
|
||||
row: 8
|
||||
column: 21
|
||||
end_location:
|
||||
row: 8
|
||||
column: 126
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
TypeCommentInStub: ~
|
||||
location:
|
||||
row: 9
|
||||
column: 21
|
||||
end_location:
|
||||
row: 9
|
||||
column: 132
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
TypeCommentInStub: ~
|
||||
location:
|
||||
row: 10
|
||||
column: 19
|
||||
end_location:
|
||||
row: 10
|
||||
column: 128
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
TypeCommentInStub: ~
|
||||
location:
|
||||
row: 11
|
||||
column: 19
|
||||
end_location:
|
||||
row: 11
|
||||
column: 123
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
TypeCommentInStub: ~
|
||||
location:
|
||||
row: 14
|
||||
column: 11
|
||||
end_location:
|
||||
row: 14
|
||||
column: 128
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
TypeCommentInStub: ~
|
||||
location:
|
||||
row: 15
|
||||
column: 10
|
||||
end_location:
|
||||
row: 15
|
||||
column: 172
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
TypeCommentInStub: ~
|
||||
location:
|
||||
row: 19
|
||||
column: 28
|
||||
end_location:
|
||||
row: 19
|
||||
column: 139
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
TypeCommentInStub: ~
|
||||
location:
|
||||
row: 29
|
||||
column: 21
|
||||
end_location:
|
||||
row: 29
|
||||
column: 44
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
TypeCommentInStub: ~
|
||||
location:
|
||||
row: 32
|
||||
column: 25
|
||||
end_location:
|
||||
row: 32
|
||||
column: 55
|
||||
fix: ~
|
||||
parent: ~
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
//! Settings for the `flake8-pytest-style` plugin.
|
||||
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use ruff_macros::{CacheKey, ConfigurationOptions};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -113,7 +113,7 @@ pub struct Options {
|
||||
pub mark_parentheses: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash)]
|
||||
#[derive(Debug, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub fixture_parentheses: bool,
|
||||
pub parametrize_names_type: types::ParametrizeNameType,
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
use ruff_macros::CacheKey;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
|
||||
#[derive(Clone, Copy, Debug, CacheKey, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
|
||||
pub enum ParametrizeNameType {
|
||||
#[serde(rename = "csv")]
|
||||
Csv,
|
||||
@@ -29,7 +30,7 @@ impl Display for ParametrizeNameType {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
|
||||
#[derive(Clone, Copy, Debug, CacheKey, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
|
||||
pub enum ParametrizeValuesType {
|
||||
#[serde(rename = "tuple")]
|
||||
Tuple,
|
||||
@@ -52,7 +53,7 @@ impl Display for ParametrizeValuesType {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
|
||||
#[derive(Clone, Copy, Debug, CacheKey, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
|
||||
pub enum ParametrizeValuesRowType {
|
||||
#[serde(rename = "tuple")]
|
||||
Tuple,
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
//! Settings for the `flake8-quotes` plugin.
|
||||
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use ruff_macros::{CacheKey, ConfigurationOptions};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, JsonSchema)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, CacheKey, JsonSchema)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
pub enum Quote {
|
||||
/// Use single quotes.
|
||||
@@ -71,7 +71,7 @@ pub struct Options {
|
||||
pub avoid_escape: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash)]
|
||||
#[derive(Debug, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub inline_quotes: Quote,
|
||||
pub multiline_quotes: Quote,
|
||||
|
||||
@@ -3,7 +3,7 @@ use rustpython_parser::ast::{Constant, Expr, ExprKind, Location, Stmt, StmtKind}
|
||||
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
|
||||
use crate::ast::helpers::elif_else_range;
|
||||
use crate::ast::helpers::{elif_else_range, end_of_statement};
|
||||
use crate::ast::types::Range;
|
||||
use crate::ast::visitor::Visitor;
|
||||
use crate::ast::whitespace::indentation;
|
||||
@@ -216,7 +216,12 @@ fn implicit_return(checker: &mut Checker, stmt: &Stmt) {
|
||||
content.push_str(checker.stylist.line_ending().as_str());
|
||||
content.push_str(indent);
|
||||
content.push_str("return None");
|
||||
diagnostic.amend(Fix::insertion(content, stmt.end_location.unwrap()));
|
||||
// This is the last statement in the function. So it has to be followed by
|
||||
// a newline, or comments, or nothing.
|
||||
diagnostic.amend(Fix::insertion(
|
||||
content,
|
||||
end_of_statement(stmt, checker.locator),
|
||||
));
|
||||
}
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
@@ -251,7 +256,10 @@ fn implicit_return(checker: &mut Checker, stmt: &Stmt) {
|
||||
content.push_str(checker.stylist.line_ending().as_str());
|
||||
content.push_str(indent);
|
||||
content.push_str("return None");
|
||||
diagnostic.amend(Fix::insertion(content, stmt.end_location.unwrap()));
|
||||
diagnostic.amend(Fix::insertion(
|
||||
content,
|
||||
end_of_statement(stmt, checker.locator),
|
||||
));
|
||||
}
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
@@ -287,7 +295,10 @@ fn implicit_return(checker: &mut Checker, stmt: &Stmt) {
|
||||
content.push_str(checker.stylist.line_ending().as_str());
|
||||
content.push_str(indent);
|
||||
content.push_str("return None");
|
||||
diagnostic.amend(Fix::insertion(content, stmt.end_location.unwrap()));
|
||||
diagnostic.amend(Fix::insertion(
|
||||
content,
|
||||
end_of_statement(stmt, checker.locator),
|
||||
));
|
||||
}
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
|
||||
@@ -31,10 +31,10 @@ expression: diagnostics
|
||||
content: "\n return None"
|
||||
location:
|
||||
row: 27
|
||||
column: 15
|
||||
column: 24
|
||||
end_location:
|
||||
row: 27
|
||||
column: 15
|
||||
column: 24
|
||||
parent: ~
|
||||
- kind:
|
||||
ImplicitReturn: ~
|
||||
@@ -48,10 +48,10 @@ expression: diagnostics
|
||||
content: "\n return None"
|
||||
location:
|
||||
row: 36
|
||||
column: 11
|
||||
column: 20
|
||||
end_location:
|
||||
row: 36
|
||||
column: 11
|
||||
column: 20
|
||||
parent: ~
|
||||
- kind:
|
||||
ImplicitReturn: ~
|
||||
@@ -82,10 +82,10 @@ expression: diagnostics
|
||||
content: "\n return None"
|
||||
location:
|
||||
row: 52
|
||||
column: 15
|
||||
column: 24
|
||||
end_location:
|
||||
row: 52
|
||||
column: 15
|
||||
column: 24
|
||||
parent: ~
|
||||
- kind:
|
||||
ImplicitReturn: ~
|
||||
@@ -99,10 +99,10 @@ expression: diagnostics
|
||||
content: "\n return None"
|
||||
location:
|
||||
row: 59
|
||||
column: 22
|
||||
column: 31
|
||||
end_location:
|
||||
row: 59
|
||||
column: 22
|
||||
column: 31
|
||||
parent: ~
|
||||
- kind:
|
||||
ImplicitReturn: ~
|
||||
@@ -116,10 +116,10 @@ expression: diagnostics
|
||||
content: "\n return None"
|
||||
location:
|
||||
row: 66
|
||||
column: 21
|
||||
column: 30
|
||||
end_location:
|
||||
row: 66
|
||||
column: 21
|
||||
column: 30
|
||||
parent: ~
|
||||
- kind:
|
||||
ImplicitReturn: ~
|
||||
@@ -235,9 +235,94 @@ expression: diagnostics
|
||||
content: "\n return None"
|
||||
location:
|
||||
row: 291
|
||||
column: 19
|
||||
column: 28
|
||||
end_location:
|
||||
row: 291
|
||||
column: 19
|
||||
column: 28
|
||||
parent: ~
|
||||
- kind:
|
||||
ImplicitReturn: ~
|
||||
location:
|
||||
row: 300
|
||||
column: 8
|
||||
end_location:
|
||||
row: 301
|
||||
column: 21
|
||||
fix:
|
||||
content: "\n return None"
|
||||
location:
|
||||
row: 301
|
||||
column: 21
|
||||
end_location:
|
||||
row: 301
|
||||
column: 21
|
||||
parent: ~
|
||||
- kind:
|
||||
ImplicitReturn: ~
|
||||
location:
|
||||
row: 305
|
||||
column: 8
|
||||
end_location:
|
||||
row: 306
|
||||
column: 21
|
||||
fix:
|
||||
content: "\n return None"
|
||||
location:
|
||||
row: 306
|
||||
column: 21
|
||||
end_location:
|
||||
row: 306
|
||||
column: 21
|
||||
parent: ~
|
||||
- kind:
|
||||
ImplicitReturn: ~
|
||||
location:
|
||||
row: 310
|
||||
column: 8
|
||||
end_location:
|
||||
row: 311
|
||||
column: 21
|
||||
fix:
|
||||
content: "\n return None"
|
||||
location:
|
||||
row: 311
|
||||
column: 37
|
||||
end_location:
|
||||
row: 311
|
||||
column: 37
|
||||
parent: ~
|
||||
- kind:
|
||||
ImplicitReturn: ~
|
||||
location:
|
||||
row: 315
|
||||
column: 8
|
||||
end_location:
|
||||
row: 316
|
||||
column: 21
|
||||
fix:
|
||||
content: "\n return None"
|
||||
location:
|
||||
row: 316
|
||||
column: 24
|
||||
end_location:
|
||||
row: 316
|
||||
column: 24
|
||||
parent: ~
|
||||
- kind:
|
||||
ImplicitReturn: ~
|
||||
location:
|
||||
row: 320
|
||||
column: 8
|
||||
end_location:
|
||||
row: 321
|
||||
column: 21
|
||||
fix:
|
||||
content: "\n return None"
|
||||
location:
|
||||
row: 322
|
||||
column: 33
|
||||
end_location:
|
||||
row: 322
|
||||
column: 33
|
||||
parent: ~
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
//! Settings for the `flake8-self` plugin.
|
||||
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use ruff_macros::{CacheKey, ConfigurationOptions};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -28,7 +28,7 @@ pub struct Options {
|
||||
pub ignore_names: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash)]
|
||||
#[derive(Debug, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub ignore_names: Vec<String>,
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
use ruff_macros::{define_violation, derive_message_formats, CacheKey};
|
||||
use rustc_hash::FxHashMap;
|
||||
use rustpython_parser::ast::{Alias, Expr, Located};
|
||||
use schemars::JsonSchema;
|
||||
@@ -7,12 +7,11 @@ use serde::{Deserialize, Serialize};
|
||||
use crate::ast::types::{CallPath, Range};
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::registry::Diagnostic;
|
||||
use crate::settings::hashable::HashableHashMap;
|
||||
use crate::violation::Violation;
|
||||
|
||||
pub type Settings = HashableHashMap<String, ApiBan>;
|
||||
pub type Settings = FxHashMap<String, ApiBan>;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, JsonSchema)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, CacheKey, JsonSchema)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
pub struct ApiBan {
|
||||
/// The message to display when the API is used.
|
||||
@@ -147,8 +146,7 @@ mod tests {
|
||||
msg: "Use typing_extensions.TypedDict instead.".to_string(),
|
||||
},
|
||||
),
|
||||
])
|
||||
.into(),
|
||||
]),
|
||||
..Default::default()
|
||||
},
|
||||
..Settings::for_rules(vec![Rule::BannedApi])
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
//! Rules from [flake8-tidy-imports](https://pypi.org/project/flake8-tidy-imports/).
|
||||
use ruff_macros::CacheKey;
|
||||
|
||||
pub mod options;
|
||||
|
||||
pub mod banned_api;
|
||||
pub mod relative_imports;
|
||||
|
||||
#[derive(Debug, Hash, Default)]
|
||||
#[derive(Debug, CacheKey, Default)]
|
||||
pub struct Settings {
|
||||
pub ban_relative_imports: relative_imports::Settings,
|
||||
pub banned_api: banned_api::Settings,
|
||||
|
||||
@@ -48,7 +48,7 @@ impl From<Options> for Settings {
|
||||
fn from(options: Options) -> Self {
|
||||
Self {
|
||||
ban_relative_imports: options.ban_relative_imports.unwrap_or(Strictness::Parents),
|
||||
banned_api: options.banned_api.unwrap_or_default().into(),
|
||||
banned_api: options.banned_api.unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -57,7 +57,7 @@ impl From<Settings> for Options {
|
||||
fn from(settings: Settings) -> Self {
|
||||
Self {
|
||||
ban_relative_imports: Some(settings.ban_relative_imports),
|
||||
banned_api: Some(settings.banned_api.into()),
|
||||
banned_api: Some(settings.banned_api),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ use rustpython_parser::ast::{Stmt, StmtKind};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
use ruff_macros::{define_violation, derive_message_formats, CacheKey};
|
||||
use ruff_python::identifiers::is_module_name;
|
||||
|
||||
use crate::ast::helpers::{create_stmt, from_relative_import, unparse_stmt};
|
||||
@@ -15,7 +15,7 @@ use crate::violation::{AutofixKind, Availability, Violation};
|
||||
|
||||
pub type Settings = Strictness;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, JsonSchema, Default)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, CacheKey, JsonSchema, Default)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
pub enum Strictness {
|
||||
/// Ban imports that extend into the parent module or beyond.
|
||||
@@ -94,6 +94,10 @@ fn fix_banned_relative_import(
|
||||
) -> Option<Fix> {
|
||||
// Only fix is the module path is known.
|
||||
if let Some(mut parts) = module_path.cloned() {
|
||||
if *level? >= parts.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Remove relative level from module path.
|
||||
for _ in 0..*level? {
|
||||
parts.pop();
|
||||
|
||||
@@ -10,50 +10,25 @@ expression: diagnostics
|
||||
column: 0
|
||||
end_location:
|
||||
row: 5
|
||||
column: 55
|
||||
fix:
|
||||
content: "from my_package.sublib.protocol import commands, definitions, responses"
|
||||
location:
|
||||
row: 5
|
||||
column: 0
|
||||
end_location:
|
||||
row: 5
|
||||
column: 55
|
||||
column: 23
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
RelativeImports:
|
||||
strictness: parents
|
||||
location:
|
||||
row: 5
|
||||
row: 6
|
||||
column: 0
|
||||
end_location:
|
||||
row: 5
|
||||
row: 6
|
||||
column: 55
|
||||
fix:
|
||||
content: "from my_package.sublib.protocol import commands, definitions, responses"
|
||||
location:
|
||||
row: 5
|
||||
row: 6
|
||||
column: 0
|
||||
end_location:
|
||||
row: 5
|
||||
column: 55
|
||||
parent: ~
|
||||
- kind:
|
||||
RelativeImports:
|
||||
strictness: parents
|
||||
location:
|
||||
row: 5
|
||||
column: 0
|
||||
end_location:
|
||||
row: 5
|
||||
column: 55
|
||||
fix:
|
||||
content: "from my_package.sublib.protocol import commands, definitions, responses"
|
||||
location:
|
||||
row: 5
|
||||
column: 0
|
||||
end_location:
|
||||
row: 5
|
||||
row: 6
|
||||
column: 55
|
||||
parent: ~
|
||||
- kind:
|
||||
@@ -64,32 +39,68 @@ expression: diagnostics
|
||||
column: 0
|
||||
end_location:
|
||||
row: 6
|
||||
column: 55
|
||||
fix:
|
||||
content: "from my_package.sublib.protocol import commands, definitions, responses"
|
||||
location:
|
||||
row: 6
|
||||
column: 0
|
||||
end_location:
|
||||
row: 6
|
||||
column: 55
|
||||
parent: ~
|
||||
- kind:
|
||||
RelativeImports:
|
||||
strictness: parents
|
||||
location:
|
||||
row: 6
|
||||
column: 0
|
||||
end_location:
|
||||
row: 6
|
||||
column: 55
|
||||
fix:
|
||||
content: "from my_package.sublib.protocol import commands, definitions, responses"
|
||||
location:
|
||||
row: 6
|
||||
column: 0
|
||||
end_location:
|
||||
row: 6
|
||||
column: 55
|
||||
parent: ~
|
||||
- kind:
|
||||
RelativeImports:
|
||||
strictness: parents
|
||||
location:
|
||||
row: 7
|
||||
column: 0
|
||||
end_location:
|
||||
row: 7
|
||||
column: 28
|
||||
fix:
|
||||
content: from my_package.sublib.server import example
|
||||
location:
|
||||
row: 6
|
||||
row: 7
|
||||
column: 0
|
||||
end_location:
|
||||
row: 6
|
||||
row: 7
|
||||
column: 28
|
||||
parent: ~
|
||||
- kind:
|
||||
RelativeImports:
|
||||
strictness: parents
|
||||
location:
|
||||
row: 7
|
||||
row: 8
|
||||
column: 0
|
||||
end_location:
|
||||
row: 7
|
||||
row: 8
|
||||
column: 21
|
||||
fix:
|
||||
content: from my_package.sublib import server
|
||||
location:
|
||||
row: 7
|
||||
row: 8
|
||||
column: 0
|
||||
end_location:
|
||||
row: 7
|
||||
row: 8
|
||||
column: 21
|
||||
parent: ~
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
//! Settings for the `flake8-type-checking` plugin.
|
||||
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use ruff_macros::{CacheKey, ConfigurationOptions};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -36,7 +36,7 @@ pub struct Options {
|
||||
pub exempt_modules: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash)]
|
||||
#[derive(Debug, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub strict: bool,
|
||||
pub exempt_modules: Vec<String>,
|
||||
|
||||
@@ -1,6 +1,38 @@
|
||||
---
|
||||
source: src/rules/flake8_type_checking/mod.rs
|
||||
source: crates/ruff/src/rules/flake8_type_checking/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
[]
|
||||
- kind:
|
||||
RuntimeImportInTypeCheckingBlock:
|
||||
full_name: typing.List
|
||||
location:
|
||||
row: 4
|
||||
column: 23
|
||||
end_location:
|
||||
row: 4
|
||||
column: 27
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
RuntimeImportInTypeCheckingBlock:
|
||||
full_name: typing.Sequence
|
||||
location:
|
||||
row: 4
|
||||
column: 29
|
||||
end_location:
|
||||
row: 4
|
||||
column: 37
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
RuntimeImportInTypeCheckingBlock:
|
||||
full_name: typing.Set
|
||||
location:
|
||||
row: 4
|
||||
column: 39
|
||||
end_location:
|
||||
row: 4
|
||||
column: 42
|
||||
fix: ~
|
||||
parent: ~
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
//! Settings for the `flake8-unused-arguments` plugin.
|
||||
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use ruff_macros::{CacheKey, ConfigurationOptions};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -22,7 +22,7 @@ pub struct Options {
|
||||
pub ignore_variadic_names: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Hash)]
|
||||
#[derive(Debug, Default, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub ignore_variadic_names: bool,
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ define_violation!(
|
||||
impl Violation for PathlibAbspath {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.path.abspath` should be replaced by `.resolve()`")
|
||||
format!("`os.path.abspath()` should be replaced by `Path.resolve()`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ define_violation!(
|
||||
impl Violation for PathlibChmod {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.chmod` should be replaced by `.chmod()`")
|
||||
format!("`os.chmod()` should be replaced by `Path.chmod()`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,7 +31,7 @@ define_violation!(
|
||||
impl Violation for PathlibMakedirs {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.makedirs` should be replaced by `.mkdir(parents=True)`")
|
||||
format!("`os.makedirs()` should be replaced by `Path.mkdir(parents=True)`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,7 +42,7 @@ define_violation!(
|
||||
impl Violation for PathlibMkdir {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.mkdir` should be replaced by `.mkdir()`")
|
||||
format!("`os.mkdir()` should be replaced by `Path.mkdir()`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,7 +53,7 @@ define_violation!(
|
||||
impl Violation for PathlibRename {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.rename` should be replaced by `.rename()`")
|
||||
format!("`os.rename()` should be replaced by `Path.rename()`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -64,7 +64,7 @@ define_violation!(
|
||||
impl Violation for PathlibReplace {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.replace`should be replaced by `.replace()`")
|
||||
format!("`os.replace()` should be replaced by `Path.replace()`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -75,7 +75,7 @@ define_violation!(
|
||||
impl Violation for PathlibRmdir {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.rmdir` should be replaced by `.rmdir()`")
|
||||
format!("`os.rmdir()` should be replaced by `Path.rmdir()`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,7 +86,7 @@ define_violation!(
|
||||
impl Violation for PathlibRemove {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.remove` should be replaced by `.unlink()`")
|
||||
format!("`os.remove()` should be replaced by `Path.unlink()`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -97,7 +97,7 @@ define_violation!(
|
||||
impl Violation for PathlibUnlink {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.unlink` should be replaced by `.unlink()`")
|
||||
format!("`os.unlink()` should be replaced by `Path.unlink()`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -108,7 +108,7 @@ define_violation!(
|
||||
impl Violation for PathlibGetcwd {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.getcwd` should be replaced by `Path.cwd()`")
|
||||
format!("`os.getcwd()` should be replaced by `Path.cwd()`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -119,7 +119,7 @@ define_violation!(
|
||||
impl Violation for PathlibExists {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.path.exists` should be replaced by `.exists()`")
|
||||
format!("`os.path.exists()` should be replaced by `Path.exists()`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -130,7 +130,7 @@ define_violation!(
|
||||
impl Violation for PathlibExpanduser {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.path.expanduser` should be replaced by `.expanduser()`")
|
||||
format!("`os.path.expanduser()` should be replaced by `Path.expanduser()`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -141,7 +141,7 @@ define_violation!(
|
||||
impl Violation for PathlibIsDir {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.path.isdir` should be replaced by `.is_dir()`")
|
||||
format!("`os.path.isdir()` should be replaced by `Path.is_dir()`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -152,7 +152,7 @@ define_violation!(
|
||||
impl Violation for PathlibIsFile {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.path.isfile` should be replaced by `.is_file()`")
|
||||
format!("`os.path.isfile()` should be replaced by `Path.is_file()`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -163,7 +163,7 @@ define_violation!(
|
||||
impl Violation for PathlibIsLink {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.path.islink` should be replaced by `.is_symlink()`")
|
||||
format!("`os.path.islink()` should be replaced by `Path.is_symlink()`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -174,7 +174,7 @@ define_violation!(
|
||||
impl Violation for PathlibReadlink {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.readlink` should be replaced by `.readlink()`")
|
||||
format!("`os.readlink()` should be replaced by `Path.readlink()`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -185,7 +185,9 @@ define_violation!(
|
||||
impl Violation for PathlibStat {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.stat` should be replaced by `.stat()` or `.owner()` or `.group()`")
|
||||
format!(
|
||||
"`os.stat()` should be replaced by `Path.stat()`, `Path.owner()`, or `Path.group()`"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -196,7 +198,7 @@ define_violation!(
|
||||
impl Violation for PathlibIsAbs {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.path.isabs` should be replaced by `.is_absolute()`")
|
||||
format!("`os.path.isabs()` should be replaced by `Path.is_absolute()`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -207,7 +209,7 @@ define_violation!(
|
||||
impl Violation for PathlibJoin {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.path.join` should be replaced by foo_path / \"bar\"")
|
||||
format!("`os.path.join()` should be replaced by `Path` with `/` operator")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -218,7 +220,7 @@ define_violation!(
|
||||
impl Violation for PathlibBasename {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.path.basename` should be replaced by `.name`")
|
||||
format!("`os.path.basename()` should be replaced by `Path.name`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -229,7 +231,7 @@ define_violation!(
|
||||
impl Violation for PathlibDirname {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.path.dirname` should be replaced by `.parent`")
|
||||
format!("`os.path.dirname()` should be replaced by `Path.parent`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -240,7 +242,7 @@ define_violation!(
|
||||
impl Violation for PathlibSamefile {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.path.samefile` should be replaced by `.samefile()`")
|
||||
format!("`os.path.samefile()` should be replaced by `Path.samefile()`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -251,7 +253,7 @@ define_violation!(
|
||||
impl Violation for PathlibSplitext {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`os.path.splitext` should be replaced by `.suffix`")
|
||||
format!("`os.path.splitext()` should be replaced by `Path.suffix`")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -262,7 +264,7 @@ define_violation!(
|
||||
impl Violation for PathlibOpen {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`open(\"foo\")` should be replaced by `Path(\"foo\").open()`")
|
||||
format!("`open()` should be replaced by `Path.open()`")
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use log::debug;
|
||||
use ruff_macros::CacheKey;
|
||||
use ruff_python::sys::KNOWN_STANDARD_LIBRARY;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -12,7 +13,17 @@ use super::types::{ImportBlock, Importable};
|
||||
use crate::settings::types::PythonVersion;
|
||||
|
||||
#[derive(
|
||||
Debug, PartialOrd, Ord, PartialEq, Eq, Clone, Serialize, Deserialize, JsonSchema, Hash, EnumIter,
|
||||
Debug,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
PartialEq,
|
||||
Eq,
|
||||
Clone,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
JsonSchema,
|
||||
CacheKey,
|
||||
EnumIter,
|
||||
)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
pub enum ImportType {
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use ruff_macros::{CacheKey, ConfigurationOptions};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::categorize::ImportType;
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, JsonSchema)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, CacheKey, JsonSchema)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
pub enum RelativeImportsOrder {
|
||||
/// Place "closer" imports (fewer `.` characters, most local) before
|
||||
@@ -265,7 +265,7 @@ pub struct Options {
|
||||
pub forced_separate: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash)]
|
||||
#[derive(Debug, CacheKey)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct Settings {
|
||||
pub required_imports: BTreeSet<String>,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
//! Settings for the `mccabe` plugin.
|
||||
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use ruff_macros::{CacheKey, ConfigurationOptions};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -25,7 +25,7 @@ pub struct Options {
|
||||
pub max_complexity: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash)]
|
||||
#[derive(Debug, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub max_complexity: usize,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
//! Settings for the `pep8-naming` plugin.
|
||||
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use ruff_macros::{CacheKey, ConfigurationOptions};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -19,10 +19,6 @@ const IGNORE_NAMES: [&str; 12] = [
|
||||
"maxDiff",
|
||||
];
|
||||
|
||||
const CLASSMETHOD_DECORATORS: [&str; 1] = ["classmethod"];
|
||||
|
||||
const STATICMETHOD_DECORATORS: [&str; 1] = ["staticmethod"];
|
||||
|
||||
#[derive(
|
||||
Debug, PartialEq, Eq, Serialize, Deserialize, Default, ConfigurationOptions, JsonSchema,
|
||||
)]
|
||||
@@ -42,34 +38,38 @@ pub struct Options {
|
||||
/// A list of names to ignore when considering `pep8-naming` violations.
|
||||
pub ignore_names: Option<Vec<String>>,
|
||||
#[option(
|
||||
default = r#"["classmethod"]"#,
|
||||
default = r#"[]"#,
|
||||
value_type = "list[str]",
|
||||
example = r#"
|
||||
# Allow Pydantic's `@validator` decorator to trigger class method treatment.
|
||||
classmethod-decorators = ["classmethod", "pydantic.validator"]
|
||||
classmethod-decorators = ["pydantic.validator"]
|
||||
"#
|
||||
)]
|
||||
/// A list of decorators that, when applied to a method, indicate that the
|
||||
/// method should be treated as a class method. For example, Ruff will
|
||||
/// expect that any method decorated by a decorator in this list takes a
|
||||
/// `cls` argument as its first argument.
|
||||
/// method should be treated as a class method (in addition to the builtin
|
||||
/// `@classmethod`).
|
||||
///
|
||||
/// For example, Ruff will expect that any method decorated by a decorator
|
||||
/// in this list takes a `cls` argument as its first argument.
|
||||
pub classmethod_decorators: Option<Vec<String>>,
|
||||
#[option(
|
||||
default = r#"["staticmethod"]"#,
|
||||
default = r#"[]"#,
|
||||
value_type = "list[str]",
|
||||
example = r#"
|
||||
# Allow a shorthand alias, `@stcmthd`, to trigger static method treatment.
|
||||
staticmethod-decorators = ["staticmethod", "stcmthd"]
|
||||
staticmethod-decorators = ["stcmthd"]
|
||||
"#
|
||||
)]
|
||||
/// A list of decorators that, when applied to a method, indicate that the
|
||||
/// method should be treated as a static method. For example, Ruff will
|
||||
/// expect that any method decorated by a decorator in this list has no
|
||||
/// `self` or `cls` argument.
|
||||
/// method should be treated as a static method (in addition to the builtin
|
||||
/// `@staticmethod`).
|
||||
///
|
||||
/// For example, Ruff will expect that any method decorated by a decorator
|
||||
/// in this list has no `self` or `cls` argument.
|
||||
pub staticmethod_decorators: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash)]
|
||||
#[derive(Debug, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub ignore_names: Vec<String>,
|
||||
pub classmethod_decorators: Vec<String>,
|
||||
@@ -80,8 +80,8 @@ impl Default for Settings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
ignore_names: IGNORE_NAMES.map(String::from).to_vec(),
|
||||
classmethod_decorators: CLASSMETHOD_DECORATORS.map(String::from).to_vec(),
|
||||
staticmethod_decorators: STATICMETHOD_DECORATORS.map(String::from).to_vec(),
|
||||
classmethod_decorators: Vec::new(),
|
||||
staticmethod_decorators: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -92,12 +92,8 @@ impl From<Options> for Settings {
|
||||
ignore_names: options
|
||||
.ignore_names
|
||||
.unwrap_or_else(|| IGNORE_NAMES.map(String::from).to_vec()),
|
||||
classmethod_decorators: options
|
||||
.classmethod_decorators
|
||||
.unwrap_or_else(|| CLASSMETHOD_DECORATORS.map(String::from).to_vec()),
|
||||
staticmethod_decorators: options
|
||||
.staticmethod_decorators
|
||||
.unwrap_or_else(|| STATICMETHOD_DECORATORS.map(String::from).to_vec()),
|
||||
classmethod_decorators: options.classmethod_decorators.unwrap_or_default(),
|
||||
staticmethod_decorators: options.staticmethod_decorators.unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -156,3 +156,71 @@ pub fn is_op_token(token: &Tok) -> bool {
|
||||
| Tok::Colon
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_skip_comment_token(token: &Tok) -> bool {
|
||||
matches!(
|
||||
token,
|
||||
Tok::Newline | Tok::Indent | Tok::Dedent | Tok::NonLogicalNewline | Tok::Comment { .. }
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_soft_keyword_token(token: &Tok) -> bool {
|
||||
matches!(token, Tok::Match | Tok::Case)
|
||||
}
|
||||
|
||||
pub fn is_arithmetic_token(token: &Tok) -> bool {
|
||||
matches!(
|
||||
token,
|
||||
Tok::DoubleStar | Tok::Star | Tok::Plus | Tok::Minus | Tok::Slash | Tok::At
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_ws_optional_token(token: &Tok) -> bool {
|
||||
is_arithmetic_token(token)
|
||||
|| matches!(
|
||||
token,
|
||||
Tok::CircumFlex
|
||||
| Tok::Amper
|
||||
| Tok::Vbar
|
||||
| Tok::LeftShift
|
||||
| Tok::RightShift
|
||||
| Tok::Percent
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_ws_needed_token(token: &Tok) -> bool {
|
||||
matches!(
|
||||
token,
|
||||
Tok::DoubleStarEqual
|
||||
| Tok::StarEqual
|
||||
| Tok::SlashEqual
|
||||
| Tok::DoubleSlashEqual
|
||||
| Tok::PlusEqual
|
||||
| Tok::MinusEqual
|
||||
| Tok::NotEqual
|
||||
| Tok::Less
|
||||
| Tok::Greater
|
||||
| Tok::PercentEqual
|
||||
| Tok::CircumflexEqual
|
||||
| Tok::AmperEqual
|
||||
| Tok::VbarEqual
|
||||
| Tok::EqEqual
|
||||
| Tok::LessEqual
|
||||
| Tok::GreaterEqual
|
||||
| Tok::LeftShiftEqual
|
||||
| Tok::RightShiftEqual
|
||||
| Tok::Equal
|
||||
| Tok::And
|
||||
| Tok::Or
|
||||
| Tok::In
|
||||
| Tok::Is
|
||||
| Tok::Rarrow
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_unary_token(token: &Tok) -> bool {
|
||||
matches!(
|
||||
token,
|
||||
Tok::Plus | Tok::Minus | Tok::Star | Tok::DoubleStar | Tok::RightShift
|
||||
)
|
||||
}
|
||||
|
||||
@@ -13,12 +13,13 @@ mod tests {
|
||||
use insta::assert_yaml_snapshot;
|
||||
use test_case::test_case;
|
||||
|
||||
use super::settings::Settings;
|
||||
use crate::registry::Rule;
|
||||
use crate::settings;
|
||||
use crate::source_code::LineEnding;
|
||||
use crate::test::test_path;
|
||||
|
||||
use super::settings::Settings;
|
||||
|
||||
#[test_case(Rule::AmbiguousClassName, Path::new("E742.py"))]
|
||||
#[test_case(Rule::AmbiguousFunctionName, Path::new("E743.py"))]
|
||||
#[test_case(Rule::AmbiguousVariableName, Path::new("E741.py"))]
|
||||
@@ -95,12 +96,20 @@ mod tests {
|
||||
#[test_case(Rule::TabAfterOperator, Path::new("E22.py"))]
|
||||
#[test_case(Rule::TabBeforeKeyword, Path::new("E27.py"))]
|
||||
#[test_case(Rule::TabBeforeOperator, Path::new("E22.py"))]
|
||||
#[test_case(Rule::MissingWhitespaceAroundOperator, Path::new("E22.py"))]
|
||||
#[test_case(Rule::MissingWhitespaceAroundArithmeticOperator, Path::new("E22.py"))]
|
||||
#[test_case(
|
||||
Rule::MissingWhitespaceAroundBitwiseOrShiftOperator,
|
||||
Path::new("E22.py")
|
||||
)]
|
||||
#[test_case(Rule::MissingWhitespaceAroundModuloOperator, Path::new("E22.py"))]
|
||||
#[test_case(Rule::TooFewSpacesBeforeInlineComment, Path::new("E26.py"))]
|
||||
#[test_case(Rule::UnexpectedIndentation, Path::new("E11.py"))]
|
||||
#[test_case(Rule::UnexpectedIndentationComment, Path::new("E11.py"))]
|
||||
#[test_case(Rule::WhitespaceAfterOpenBracket, Path::new("E20.py"))]
|
||||
#[test_case(Rule::WhitespaceBeforeCloseBracket, Path::new("E20.py"))]
|
||||
#[test_case(Rule::WhitespaceBeforePunctuation, Path::new("E20.py"))]
|
||||
#[test_case(Rule::WhitespaceBeforeParameters, Path::new("E21.py"))]
|
||||
#[test_case(
|
||||
Rule::UnexpectedSpacesAroundKeywordParameterEquals,
|
||||
Path::new("E25.py")
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#![allow(dead_code)]
|
||||
#![allow(dead_code, unused_imports, unused_variables)]
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#![allow(dead_code)]
|
||||
#![allow(dead_code, unused_imports, unused_variables)]
|
||||
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#![allow(dead_code, unused_imports)]
|
||||
#![allow(dead_code, unused_imports, unused_variables)]
|
||||
|
||||
use rustpython_parser::ast::Location;
|
||||
use rustpython_parser::Tok;
|
||||
|
||||
@@ -0,0 +1,196 @@
|
||||
#![allow(dead_code, unused_imports, unused_variables)]
|
||||
|
||||
use rustpython_parser::ast::Location;
|
||||
use rustpython_parser::Tok;
|
||||
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
|
||||
use crate::registry::DiagnosticKind;
|
||||
use crate::rules::pycodestyle::helpers::{
|
||||
is_arithmetic_token, is_keyword_token, is_op_token, is_singleton_token, is_skip_comment_token,
|
||||
is_soft_keyword_token, is_unary_token, is_ws_needed_token, is_ws_optional_token,
|
||||
};
|
||||
use crate::violation::Violation;
|
||||
|
||||
// E225
|
||||
define_violation!(
|
||||
pub struct MissingWhitespaceAroundOperator;
|
||||
);
|
||||
impl Violation for MissingWhitespaceAroundOperator {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("Missing whitespace around operator")
|
||||
}
|
||||
}
|
||||
|
||||
// E226
|
||||
define_violation!(
|
||||
pub struct MissingWhitespaceAroundArithmeticOperator;
|
||||
);
|
||||
impl Violation for MissingWhitespaceAroundArithmeticOperator {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("Missing whitespace around arithmetic operator")
|
||||
}
|
||||
}
|
||||
|
||||
// E227
|
||||
define_violation!(
|
||||
pub struct MissingWhitespaceAroundBitwiseOrShiftOperator;
|
||||
);
|
||||
impl Violation for MissingWhitespaceAroundBitwiseOrShiftOperator {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("Missing whitespace around bitwise or shift operator")
|
||||
}
|
||||
}
|
||||
|
||||
// E228
|
||||
define_violation!(
|
||||
pub struct MissingWhitespaceAroundModuloOperator;
|
||||
);
|
||||
impl Violation for MissingWhitespaceAroundModuloOperator {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("Missing whitespace around modulo operator")
|
||||
}
|
||||
}
|
||||
|
||||
/// E225, E226, E227, E228
|
||||
#[cfg(feature = "logical_lines")]
|
||||
#[allow(clippy::if_same_then_else)]
|
||||
pub fn missing_whitespace_around_operator(
|
||||
tokens: &[(Location, &Tok, Location)],
|
||||
) -> Vec<(Location, DiagnosticKind)> {
|
||||
let mut diagnostics = vec![];
|
||||
|
||||
let mut needs_space_main: Option<bool> = Some(false);
|
||||
let mut needs_space_aux: Option<bool> = None;
|
||||
let mut prev_end_aux: Option<&Location> = None;
|
||||
let mut parens = 0;
|
||||
let mut prev_type: Option<&Tok> = None;
|
||||
let mut prev_end: Option<&Location> = None;
|
||||
|
||||
for (start, token, end) in tokens {
|
||||
if is_skip_comment_token(token) {
|
||||
continue;
|
||||
}
|
||||
if **token == Tok::Lpar || **token == Tok::Lambda {
|
||||
parens += 1;
|
||||
} else if **token == Tok::Rpar {
|
||||
parens -= 1;
|
||||
}
|
||||
let needs_space = (needs_space_main.is_some() && needs_space_main.unwrap())
|
||||
|| needs_space_aux.is_some()
|
||||
|| prev_end_aux.is_some();
|
||||
if needs_space {
|
||||
if Some(start) != prev_end {
|
||||
if !(needs_space_main.is_some() && needs_space_main.unwrap())
|
||||
&& (needs_space_aux.is_none() || !needs_space_aux.unwrap())
|
||||
{
|
||||
diagnostics.push((
|
||||
*(prev_end_aux.unwrap()),
|
||||
MissingWhitespaceAroundOperator.into(),
|
||||
));
|
||||
}
|
||||
needs_space_main = Some(false);
|
||||
needs_space_aux = None;
|
||||
prev_end_aux = None;
|
||||
} else if **token == Tok::Greater
|
||||
&& (prev_type == Some(&Tok::Less) || prev_type == Some(&Tok::Minus))
|
||||
{
|
||||
// Tolerate the "<>" operator, even if running Python 3
|
||||
// Deal with Python 3's annotated return value "->"
|
||||
} else if prev_type == Some(&Tok::Slash)
|
||||
&& (**token == Tok::Comma || **token == Tok::Rpar || **token == Tok::Colon)
|
||||
|| (prev_type == Some(&Tok::Rpar) && **token == Tok::Colon)
|
||||
{
|
||||
// Tolerate the "/" operator in function definition
|
||||
// For more info see PEP570
|
||||
} else {
|
||||
if (needs_space_main.is_some() && needs_space_main.unwrap())
|
||||
|| (needs_space_aux.is_some() && needs_space_aux.unwrap())
|
||||
{
|
||||
diagnostics
|
||||
.push((*(prev_end.unwrap()), MissingWhitespaceAroundOperator.into()));
|
||||
} else if prev_type != Some(&Tok::DoubleStar) {
|
||||
if prev_type == Some(&Tok::Percent) {
|
||||
diagnostics.push((
|
||||
*(prev_end_aux.unwrap()),
|
||||
MissingWhitespaceAroundModuloOperator.into(),
|
||||
));
|
||||
} else if !is_arithmetic_token(prev_type.unwrap()) {
|
||||
diagnostics.push((
|
||||
*(prev_end_aux.unwrap()),
|
||||
MissingWhitespaceAroundBitwiseOrShiftOperator.into(),
|
||||
));
|
||||
} else {
|
||||
diagnostics.push((
|
||||
*(prev_end_aux.unwrap()),
|
||||
MissingWhitespaceAroundArithmeticOperator.into(),
|
||||
));
|
||||
}
|
||||
}
|
||||
needs_space_main = Some(false);
|
||||
needs_space_aux = None;
|
||||
prev_end_aux = None;
|
||||
}
|
||||
} else if (is_op_token(token) || matches!(token, Tok::Name { .. })) && prev_end.is_some() {
|
||||
if **token == Tok::Equal && parens > 0 {
|
||||
// Allow keyword args or defaults: foo(bar=None).
|
||||
} else if is_ws_needed_token(token) {
|
||||
needs_space_main = Some(true);
|
||||
needs_space_aux = None;
|
||||
prev_end_aux = None;
|
||||
} else if is_unary_token(token) {
|
||||
// Check if the operator is used as a binary operator
|
||||
// Allow unary operators: -123, -x, +1.
|
||||
// Allow argument unpacking: foo(*args, **kwargs)
|
||||
if (prev_type.is_some()
|
||||
&& is_op_token(prev_type.unwrap())
|
||||
&& (prev_type == Some(&Tok::Rpar)
|
||||
|| prev_type == Some(&Tok::Rsqb)
|
||||
|| prev_type == Some(&Tok::Rbrace)))
|
||||
|| (!is_op_token(prev_type.unwrap()) && !is_keyword_token(prev_type.unwrap()))
|
||||
&& (!is_soft_keyword_token(prev_type.unwrap()))
|
||||
{
|
||||
needs_space_main = None;
|
||||
needs_space_aux = None;
|
||||
prev_end_aux = None;
|
||||
}
|
||||
} else if is_ws_optional_token(token) {
|
||||
needs_space_main = None;
|
||||
needs_space_aux = None;
|
||||
prev_end_aux = None;
|
||||
}
|
||||
|
||||
if needs_space_main.is_none() {
|
||||
// Surrounding space is optional, but ensure that
|
||||
// trailing space matches opening space
|
||||
needs_space_main = None;
|
||||
prev_end_aux = prev_end;
|
||||
needs_space_aux = Some(Some(start) != prev_end_aux);
|
||||
} else if needs_space_main.is_some()
|
||||
&& needs_space_main.unwrap()
|
||||
&& Some(start) == prev_end_aux
|
||||
{
|
||||
// A needed opening space was not found
|
||||
diagnostics.push((*(prev_end.unwrap()), MissingWhitespaceAroundOperator.into()));
|
||||
needs_space_main = Some(false);
|
||||
needs_space_aux = None;
|
||||
prev_end_aux = None;
|
||||
}
|
||||
}
|
||||
prev_type = Some(*token);
|
||||
prev_end = Some(end);
|
||||
}
|
||||
|
||||
diagnostics
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "logical_lines"))]
|
||||
pub fn missing_whitespace_around_operator(
|
||||
_tokens: &[(Location, &Tok, Location)],
|
||||
) -> Vec<(Location, DiagnosticKind)> {
|
||||
vec![]
|
||||
}
|
||||
@@ -21,7 +21,6 @@ pub use indentation::{
|
||||
NoIndentedBlock, NoIndentedBlockComment, OverIndented, UnexpectedIndentation,
|
||||
UnexpectedIndentationComment,
|
||||
};
|
||||
|
||||
pub use indentation_contains_tabs::{indentation_contains_tabs, IndentationContainsTabs};
|
||||
pub use invalid_escape_sequence::{invalid_escape_sequence, InvalidEscapeSequence};
|
||||
pub use lambda_assignment::{lambda_assignment, LambdaAssignment};
|
||||
@@ -30,6 +29,11 @@ pub use literal_comparisons::{literal_comparisons, NoneComparison, TrueFalseComp
|
||||
pub use missing_whitespace_after_keyword::{
|
||||
missing_whitespace_after_keyword, MissingWhitespaceAfterKeyword,
|
||||
};
|
||||
pub use missing_whitespace_around_operator::{
|
||||
missing_whitespace_around_operator, MissingWhitespaceAroundArithmeticOperator,
|
||||
MissingWhitespaceAroundBitwiseOrShiftOperator, MissingWhitespaceAroundModuloOperator,
|
||||
MissingWhitespaceAroundOperator,
|
||||
};
|
||||
pub use mixed_spaces_and_tabs::{mixed_spaces_and_tabs, MixedSpacesAndTabs};
|
||||
pub use no_newline_at_end_of_file::{no_newline_at_end_of_file, NoNewLineAtEndOfFile};
|
||||
pub use not_tests::{not_tests, NotInTest, NotIsTest};
|
||||
@@ -45,15 +49,15 @@ pub use whitespace_around_keywords::{
|
||||
whitespace_around_keywords, MultipleSpacesAfterKeyword, MultipleSpacesBeforeKeyword,
|
||||
TabAfterKeyword, TabBeforeKeyword,
|
||||
};
|
||||
pub use whitespace_before_comment::{
|
||||
whitespace_before_comment, MultipleLeadingHashesForBlockComment, NoSpaceAfterBlockComment,
|
||||
NoSpaceAfterInlineComment, TooFewSpacesBeforeInlineComment,
|
||||
};
|
||||
|
||||
pub use whitespace_around_named_parameter_equals::{
|
||||
whitespace_around_named_parameter_equals, MissingWhitespaceAroundParameterEquals,
|
||||
UnexpectedSpacesAroundKeywordParameterEquals,
|
||||
};
|
||||
pub use whitespace_before_comment::{
|
||||
whitespace_before_comment, MultipleLeadingHashesForBlockComment, NoSpaceAfterBlockComment,
|
||||
NoSpaceAfterInlineComment, TooFewSpacesBeforeInlineComment,
|
||||
};
|
||||
pub use whitespace_before_parameters::{whitespace_before_parameters, WhitespaceBeforeParameters};
|
||||
|
||||
mod ambiguous_class_name;
|
||||
mod ambiguous_function_name;
|
||||
@@ -71,6 +75,7 @@ mod lambda_assignment;
|
||||
mod line_too_long;
|
||||
mod literal_comparisons;
|
||||
mod missing_whitespace_after_keyword;
|
||||
mod missing_whitespace_around_operator;
|
||||
mod mixed_spaces_and_tabs;
|
||||
mod no_newline_at_end_of_file;
|
||||
mod not_tests;
|
||||
@@ -80,3 +85,4 @@ mod type_comparison;
|
||||
mod whitespace_around_keywords;
|
||||
mod whitespace_around_named_parameter_equals;
|
||||
mod whitespace_before_comment;
|
||||
mod whitespace_before_parameters;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#![allow(dead_code)]
|
||||
#![allow(dead_code, unused_imports, unused_variables)]
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#![allow(dead_code)]
|
||||
#![allow(dead_code, unused_imports, unused_variables)]
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#![allow(dead_code)]
|
||||
#![allow(dead_code, unused_imports, unused_variables)]
|
||||
|
||||
use rustpython_parser::ast::Location;
|
||||
use rustpython_parser::Tok;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#![allow(dead_code)]
|
||||
#![allow(dead_code, unused_imports, unused_variables)]
|
||||
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
use rustpython_parser::ast::Location;
|
||||
|
||||
@@ -0,0 +1,77 @@
|
||||
#![allow(dead_code, unused_imports, unused_variables)]
|
||||
|
||||
use rustpython_parser::ast::Location;
|
||||
use rustpython_parser::Tok;
|
||||
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::fix::Fix;
|
||||
use crate::registry::Diagnostic;
|
||||
use crate::rules::pycodestyle::helpers::{is_keyword_token, is_op_token, is_soft_keyword_token};
|
||||
use crate::violation::AlwaysAutofixableViolation;
|
||||
|
||||
define_violation!(
|
||||
pub struct WhitespaceBeforeParameters {
|
||||
pub bracket: String,
|
||||
}
|
||||
);
|
||||
|
||||
impl AlwaysAutofixableViolation for WhitespaceBeforeParameters {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
let WhitespaceBeforeParameters { bracket } = self;
|
||||
format!("Whitespace before {bracket}")
|
||||
}
|
||||
|
||||
fn autofix_title(&self) -> String {
|
||||
let WhitespaceBeforeParameters { bracket } = self;
|
||||
format!("Removed whitespace before {bracket}")
|
||||
}
|
||||
}
|
||||
|
||||
/// E211
|
||||
#[cfg(feature = "logical_lines")]
|
||||
pub fn whitespace_before_parameters(
|
||||
tokens: &[(Location, &Tok, Location)],
|
||||
autofix: bool,
|
||||
) -> Vec<Diagnostic> {
|
||||
let mut diagnostics = vec![];
|
||||
let (_, mut prev_token, mut prev_end) = tokens.first().unwrap();
|
||||
for (idx, (start, tok, end)) in tokens.iter().enumerate() {
|
||||
if is_op_token(tok)
|
||||
&& (**tok == Tok::Lpar || **tok == Tok::Lsqb)
|
||||
&& *start != prev_end
|
||||
&& (matches!(prev_token, Tok::Name { .. })
|
||||
|| matches!(prev_token, Tok::Rpar | Tok::Rsqb | Tok::Rbrace))
|
||||
&& (idx < 2 || *(tokens[idx - 2].1) != Tok::Class)
|
||||
&& !is_keyword_token(tok)
|
||||
&& !is_soft_keyword_token(tok)
|
||||
{
|
||||
let start = Location::new(prev_end.row(), prev_end.column());
|
||||
let end = Location::new(end.row(), end.column() - 1);
|
||||
|
||||
let kind: WhitespaceBeforeParameters = WhitespaceBeforeParameters {
|
||||
bracket: tok.to_string(),
|
||||
};
|
||||
|
||||
let mut diagnostic = Diagnostic::new(kind, Range::new(start, end));
|
||||
|
||||
if autofix {
|
||||
diagnostic.amend(Fix::deletion(start, end));
|
||||
}
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
prev_token = *tok;
|
||||
prev_end = *end;
|
||||
}
|
||||
diagnostics
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "logical_lines"))]
|
||||
pub fn whitespace_before_parameters(
|
||||
_tokens: &[(Location, &Tok, Location)],
|
||||
_autofix: bool,
|
||||
) -> Vec<Diagnostic> {
|
||||
vec![]
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
//! Settings for the `pycodestyle` plugin.
|
||||
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use ruff_macros::{CacheKey, ConfigurationOptions};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -32,7 +32,7 @@ pub struct Options {
|
||||
pub ignore_overlong_task_comments: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Hash)]
|
||||
#[derive(Debug, Default, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub max_doc_length: Option<usize>,
|
||||
pub ignore_overlong_task_comments: bool,
|
||||
|
||||
@@ -0,0 +1,77 @@
|
||||
---
|
||||
source: crates/ruff/src/rules/pycodestyle/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
WhitespaceBeforeParameters:
|
||||
bracket: "'('"
|
||||
location:
|
||||
row: 2
|
||||
column: 4
|
||||
end_location:
|
||||
row: 2
|
||||
column: 5
|
||||
fix:
|
||||
content: ""
|
||||
location:
|
||||
row: 2
|
||||
column: 4
|
||||
end_location:
|
||||
row: 2
|
||||
column: 5
|
||||
parent: ~
|
||||
- kind:
|
||||
WhitespaceBeforeParameters:
|
||||
bracket: "'['"
|
||||
location:
|
||||
row: 4
|
||||
column: 4
|
||||
end_location:
|
||||
row: 4
|
||||
column: 5
|
||||
fix:
|
||||
content: ""
|
||||
location:
|
||||
row: 4
|
||||
column: 4
|
||||
end_location:
|
||||
row: 4
|
||||
column: 5
|
||||
parent: ~
|
||||
- kind:
|
||||
WhitespaceBeforeParameters:
|
||||
bracket: "'['"
|
||||
location:
|
||||
row: 4
|
||||
column: 19
|
||||
end_location:
|
||||
row: 4
|
||||
column: 20
|
||||
fix:
|
||||
content: ""
|
||||
location:
|
||||
row: 4
|
||||
column: 19
|
||||
end_location:
|
||||
row: 4
|
||||
column: 20
|
||||
parent: ~
|
||||
- kind:
|
||||
WhitespaceBeforeParameters:
|
||||
bracket: "'['"
|
||||
location:
|
||||
row: 6
|
||||
column: 11
|
||||
end_location:
|
||||
row: 6
|
||||
column: 12
|
||||
fix:
|
||||
content: ""
|
||||
location:
|
||||
row: 6
|
||||
column: 11
|
||||
end_location:
|
||||
row: 6
|
||||
column: 12
|
||||
parent: ~
|
||||
|
||||
@@ -0,0 +1,215 @@
|
||||
---
|
||||
source: crates/ruff/src/rules/pycodestyle/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 54
|
||||
column: 12
|
||||
end_location:
|
||||
row: 54
|
||||
column: 12
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 58
|
||||
column: 3
|
||||
end_location:
|
||||
row: 58
|
||||
column: 3
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 60
|
||||
column: 7
|
||||
end_location:
|
||||
row: 60
|
||||
column: 7
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 62
|
||||
column: 11
|
||||
end_location:
|
||||
row: 62
|
||||
column: 11
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 64
|
||||
column: 9
|
||||
end_location:
|
||||
row: 64
|
||||
column: 9
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 66
|
||||
column: 8
|
||||
end_location:
|
||||
row: 66
|
||||
column: 8
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 68
|
||||
column: 14
|
||||
end_location:
|
||||
row: 68
|
||||
column: 14
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 70
|
||||
column: 11
|
||||
end_location:
|
||||
row: 70
|
||||
column: 11
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 72
|
||||
column: 14
|
||||
end_location:
|
||||
row: 72
|
||||
column: 14
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 74
|
||||
column: 11
|
||||
end_location:
|
||||
row: 74
|
||||
column: 11
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 76
|
||||
column: 2
|
||||
end_location:
|
||||
row: 76
|
||||
column: 2
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 76
|
||||
column: 3
|
||||
end_location:
|
||||
row: 76
|
||||
column: 3
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 78
|
||||
column: 2
|
||||
end_location:
|
||||
row: 78
|
||||
column: 2
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 78
|
||||
column: 5
|
||||
end_location:
|
||||
row: 78
|
||||
column: 5
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 88
|
||||
column: 7
|
||||
end_location:
|
||||
row: 88
|
||||
column: 7
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 90
|
||||
column: 5
|
||||
end_location:
|
||||
row: 90
|
||||
column: 5
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 92
|
||||
column: 2
|
||||
end_location:
|
||||
row: 92
|
||||
column: 2
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 94
|
||||
column: 3
|
||||
end_location:
|
||||
row: 94
|
||||
column: 3
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 98
|
||||
column: 8
|
||||
end_location:
|
||||
row: 98
|
||||
column: 8
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 100
|
||||
column: 6
|
||||
end_location:
|
||||
row: 100
|
||||
column: 6
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundOperator: ~
|
||||
location:
|
||||
row: 154
|
||||
column: 12
|
||||
end_location:
|
||||
row: 154
|
||||
column: 12
|
||||
fix: ~
|
||||
parent: ~
|
||||
|
||||
@@ -0,0 +1,125 @@
|
||||
---
|
||||
source: crates/ruff/src/rules/pycodestyle/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
MissingWhitespaceAroundArithmeticOperator: ~
|
||||
location:
|
||||
row: 92
|
||||
column: 3
|
||||
end_location:
|
||||
row: 92
|
||||
column: 3
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundArithmeticOperator: ~
|
||||
location:
|
||||
row: 94
|
||||
column: 4
|
||||
end_location:
|
||||
row: 94
|
||||
column: 4
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundArithmeticOperator: ~
|
||||
location:
|
||||
row: 96
|
||||
column: 4
|
||||
end_location:
|
||||
row: 96
|
||||
column: 4
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundArithmeticOperator: ~
|
||||
location:
|
||||
row: 98
|
||||
column: 10
|
||||
end_location:
|
||||
row: 98
|
||||
column: 10
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundArithmeticOperator: ~
|
||||
location:
|
||||
row: 100
|
||||
column: 10
|
||||
end_location:
|
||||
row: 100
|
||||
column: 10
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundArithmeticOperator: ~
|
||||
location:
|
||||
row: 106
|
||||
column: 6
|
||||
end_location:
|
||||
row: 106
|
||||
column: 6
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundArithmeticOperator: ~
|
||||
location:
|
||||
row: 106
|
||||
column: 14
|
||||
end_location:
|
||||
row: 106
|
||||
column: 14
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundArithmeticOperator: ~
|
||||
location:
|
||||
row: 110
|
||||
column: 5
|
||||
end_location:
|
||||
row: 110
|
||||
column: 5
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundArithmeticOperator: ~
|
||||
location:
|
||||
row: 112
|
||||
column: 5
|
||||
end_location:
|
||||
row: 112
|
||||
column: 5
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundArithmeticOperator: ~
|
||||
location:
|
||||
row: 114
|
||||
column: 10
|
||||
end_location:
|
||||
row: 114
|
||||
column: 10
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundArithmeticOperator: ~
|
||||
location:
|
||||
row: 114
|
||||
column: 16
|
||||
end_location:
|
||||
row: 114
|
||||
column: 16
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundArithmeticOperator: ~
|
||||
location:
|
||||
row: 116
|
||||
column: 11
|
||||
end_location:
|
||||
row: 116
|
||||
column: 11
|
||||
fix: ~
|
||||
parent: ~
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
---
|
||||
source: crates/ruff/src/rules/pycodestyle/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
MissingWhitespaceAroundBitwiseOrShiftOperator: ~
|
||||
location:
|
||||
row: 121
|
||||
column: 11
|
||||
end_location:
|
||||
row: 121
|
||||
column: 11
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundBitwiseOrShiftOperator: ~
|
||||
location:
|
||||
row: 123
|
||||
column: 11
|
||||
end_location:
|
||||
row: 123
|
||||
column: 11
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundBitwiseOrShiftOperator: ~
|
||||
location:
|
||||
row: 125
|
||||
column: 5
|
||||
end_location:
|
||||
row: 125
|
||||
column: 5
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundBitwiseOrShiftOperator: ~
|
||||
location:
|
||||
row: 127
|
||||
column: 5
|
||||
end_location:
|
||||
row: 127
|
||||
column: 5
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundBitwiseOrShiftOperator: ~
|
||||
location:
|
||||
row: 129
|
||||
column: 5
|
||||
end_location:
|
||||
row: 129
|
||||
column: 5
|
||||
fix: ~
|
||||
parent: ~
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
---
|
||||
source: crates/ruff/src/rules/pycodestyle/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
MissingWhitespaceAroundModuloOperator: ~
|
||||
location:
|
||||
row: 131
|
||||
column: 5
|
||||
end_location:
|
||||
row: 131
|
||||
column: 5
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundModuloOperator: ~
|
||||
location:
|
||||
row: 133
|
||||
column: 9
|
||||
end_location:
|
||||
row: 133
|
||||
column: 9
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
MissingWhitespaceAroundModuloOperator: ~
|
||||
location:
|
||||
row: 135
|
||||
column: 25
|
||||
end_location:
|
||||
row: 135
|
||||
column: 25
|
||||
fix: ~
|
||||
parent: ~
|
||||
|
||||
@@ -77,6 +77,9 @@ mod tests {
|
||||
pydocstyle: Settings {
|
||||
convention: None,
|
||||
ignore_decorators: BTreeSet::from_iter(["functools.wraps".to_string()]),
|
||||
property_decorators: BTreeSet::from_iter([
|
||||
"gi.repository.GObject.Property".to_string()
|
||||
]),
|
||||
},
|
||||
..settings::Settings::for_rule(rule_code)
|
||||
},
|
||||
@@ -95,6 +98,7 @@ mod tests {
|
||||
pydocstyle: Settings {
|
||||
convention: None,
|
||||
ignore_decorators: BTreeSet::new(),
|
||||
property_decorators: BTreeSet::new(),
|
||||
},
|
||||
..settings::Settings::for_rule(Rule::UndocumentedParam)
|
||||
},
|
||||
@@ -112,6 +116,7 @@ mod tests {
|
||||
pydocstyle: Settings {
|
||||
convention: Some(Convention::Google),
|
||||
ignore_decorators: BTreeSet::new(),
|
||||
property_decorators: BTreeSet::new(),
|
||||
},
|
||||
..settings::Settings::for_rule(Rule::UndocumentedParam)
|
||||
},
|
||||
@@ -129,6 +134,7 @@ mod tests {
|
||||
pydocstyle: Settings {
|
||||
convention: Some(Convention::Numpy),
|
||||
ignore_decorators: BTreeSet::new(),
|
||||
property_decorators: BTreeSet::new(),
|
||||
},
|
||||
..settings::Settings::for_rule(Rule::UndocumentedParam)
|
||||
},
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::docstrings::definition::Docstring;
|
||||
use crate::docstrings::styles::SectionStyle;
|
||||
use crate::docstrings::sections::SectionKind;
|
||||
use crate::fix::Fix;
|
||||
use crate::message::Location;
|
||||
use crate::registry::Diagnostic;
|
||||
@@ -40,15 +42,13 @@ pub fn ends_with_period(checker: &mut Checker, docstring: &Docstring) {
|
||||
}
|
||||
|
||||
// Avoid false-positives: `Args:`, etc.
|
||||
for style in [SectionStyle::Google, SectionStyle::Numpy] {
|
||||
for section_name in style.section_names().iter() {
|
||||
if let Some(suffix) = trimmed.strip_suffix(section_name) {
|
||||
if suffix.is_empty() {
|
||||
return;
|
||||
}
|
||||
if suffix == ":" {
|
||||
return;
|
||||
}
|
||||
for section_kind in SectionKind::iter() {
|
||||
if let Some(suffix) = trimmed.strip_suffix(section_kind.as_str()) {
|
||||
if suffix.is_empty() {
|
||||
return;
|
||||
}
|
||||
if suffix == ":" {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::docstrings::definition::Docstring;
|
||||
use crate::docstrings::styles::SectionStyle;
|
||||
use crate::docstrings::sections::SectionKind;
|
||||
use crate::fix::Fix;
|
||||
use crate::message::Location;
|
||||
use crate::registry::Diagnostic;
|
||||
@@ -40,15 +42,13 @@ pub fn ends_with_punctuation(checker: &mut Checker, docstring: &Docstring) {
|
||||
}
|
||||
|
||||
// Avoid false-positives: `Args:`, etc.
|
||||
for style in [SectionStyle::Google, SectionStyle::Numpy] {
|
||||
for section_name in style.section_names().iter() {
|
||||
if let Some(suffix) = trimmed.strip_suffix(section_name) {
|
||||
if suffix.is_empty() {
|
||||
return;
|
||||
}
|
||||
if suffix == ":" {
|
||||
return;
|
||||
}
|
||||
for section_kind in SectionKind::iter() {
|
||||
if let Some(suffix) = trimmed.strip_suffix(section_kind.as_str()) {
|
||||
if suffix.is_empty() {
|
||||
return;
|
||||
}
|
||||
if suffix == ":" {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use imperative::Mood;
|
||||
use once_cell::sync::Lazy;
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
|
||||
use crate::ast::cast;
|
||||
use crate::ast::types::Range;
|
||||
use crate::ast::helpers::to_call_path;
|
||||
use crate::ast::types::{CallPath, Range};
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::docstrings::definition::{DefinitionKind, Docstring};
|
||||
use crate::registry::Diagnostic;
|
||||
@@ -14,7 +17,11 @@ use crate::visibility::{is_property, is_test};
|
||||
static MOOD: Lazy<Mood> = Lazy::new(Mood::new);
|
||||
|
||||
/// D401
|
||||
pub fn non_imperative_mood(checker: &mut Checker, docstring: &Docstring) {
|
||||
pub fn non_imperative_mood(
|
||||
checker: &mut Checker,
|
||||
docstring: &Docstring,
|
||||
property_decorators: &BTreeSet<String>,
|
||||
) {
|
||||
let (
|
||||
DefinitionKind::Function(parent)
|
||||
| DefinitionKind::NestedFunction(parent)
|
||||
@@ -22,7 +29,15 @@ pub fn non_imperative_mood(checker: &mut Checker, docstring: &Docstring) {
|
||||
) = &docstring.kind else {
|
||||
return;
|
||||
};
|
||||
if is_test(cast::name(parent)) || is_property(checker, cast::decorator_list(parent)) {
|
||||
|
||||
let property_decorators = property_decorators
|
||||
.iter()
|
||||
.map(|decorator| to_call_path(decorator))
|
||||
.collect::<Vec<CallPath>>();
|
||||
|
||||
if is_test(cast::name(parent))
|
||||
|| is_property(checker, cast::decorator_list(parent), &property_decorators)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,17 +1,18 @@
|
||||
use itertools::Itertools;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
use rustc_hash::FxHashSet;
|
||||
use rustpython_parser::ast::StmtKind;
|
||||
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
|
||||
use crate::ast::helpers::identifier_range;
|
||||
use crate::ast::types::Range;
|
||||
use crate::ast::whitespace::LinesWithTrailingNewline;
|
||||
use crate::ast::{cast, whitespace};
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::docstrings::definition::{DefinitionKind, Docstring};
|
||||
use crate::docstrings::sections::{section_contexts, SectionContext};
|
||||
use crate::docstrings::sections::{section_contexts, SectionContext, SectionKind};
|
||||
use crate::docstrings::styles::SectionStyle;
|
||||
use crate::fix::Fix;
|
||||
use crate::message::Location;
|
||||
@@ -289,18 +290,46 @@ pub fn sections(checker: &mut Checker, docstring: &Docstring, convention: Option
|
||||
}
|
||||
}
|
||||
Some(Convention::Pep257) | None => {
|
||||
// First, interpret as NumPy-style sections.
|
||||
let mut found_numpy_section = false;
|
||||
for context in §ion_contexts(&lines, &SectionStyle::Numpy) {
|
||||
found_numpy_section = true;
|
||||
numpy_section(checker, docstring, context);
|
||||
// There are some overlapping section names, between the Google and NumPy conventions
|
||||
// (e.g., "Returns", "Raises"). Break ties by checking for the presence of some of the
|
||||
// section names that are unique to each convention.
|
||||
|
||||
// If the docstring contains `Args:` or `Arguments:`, use the Google convention.
|
||||
let google_sections = section_contexts(&lines, &SectionStyle::Google);
|
||||
if google_sections
|
||||
.iter()
|
||||
.any(|context| matches!(context.kind, SectionKind::Arguments | SectionKind::Args))
|
||||
{
|
||||
for context in &google_sections {
|
||||
google_section(checker, docstring, context);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// If no such sections were identified, interpret as Google-style sections.
|
||||
if !found_numpy_section {
|
||||
for context in §ion_contexts(&lines, &SectionStyle::Google) {
|
||||
// If the docstring contains `Parameters:` or `Other Parameters:`, use the NumPy
|
||||
// convention.
|
||||
let numpy_sections = section_contexts(&lines, &SectionStyle::Numpy);
|
||||
if numpy_sections.iter().any(|context| {
|
||||
matches!(
|
||||
context.kind,
|
||||
SectionKind::Parameters | SectionKind::OtherParameters
|
||||
)
|
||||
}) {
|
||||
for context in &numpy_sections {
|
||||
numpy_section(checker, docstring, context);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, use whichever convention matched more sections.
|
||||
if google_sections.len() > numpy_sections.len() {
|
||||
for context in &google_sections {
|
||||
google_section(checker, docstring, context);
|
||||
}
|
||||
} else {
|
||||
for context in &numpy_sections {
|
||||
numpy_section(checker, docstring, context);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -614,47 +643,37 @@ fn blanks_and_section_underline(
|
||||
}
|
||||
}
|
||||
|
||||
fn common_section(
|
||||
checker: &mut Checker,
|
||||
docstring: &Docstring,
|
||||
context: &SectionContext,
|
||||
style: &SectionStyle,
|
||||
) {
|
||||
fn common_section(checker: &mut Checker, docstring: &Docstring, context: &SectionContext) {
|
||||
if checker.settings.rules.enabled(&Rule::CapitalizeSectionName) {
|
||||
if !style.section_names().contains(&context.section_name) {
|
||||
let capitalized_section_name = titlecase::titlecase(context.section_name);
|
||||
if style
|
||||
.section_names()
|
||||
.contains(capitalized_section_name.as_str())
|
||||
{
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
CapitalizeSectionName {
|
||||
name: context.section_name.to_string(),
|
||||
},
|
||||
Range::from_located(docstring.expr),
|
||||
);
|
||||
if checker.patch(diagnostic.kind.rule()) {
|
||||
// Replace the section title with the capitalized variant. This requires
|
||||
// locating the start and end of the section name.
|
||||
if let Some(index) = context.line.find(context.section_name) {
|
||||
// Map from bytes to characters.
|
||||
let section_name_start = &context.line[..index].chars().count();
|
||||
let section_name_length = &context.section_name.chars().count();
|
||||
diagnostic.amend(Fix::replacement(
|
||||
capitalized_section_name,
|
||||
Location::new(
|
||||
docstring.expr.location.row() + context.original_index,
|
||||
*section_name_start,
|
||||
),
|
||||
Location::new(
|
||||
docstring.expr.location.row() + context.original_index,
|
||||
section_name_start + section_name_length,
|
||||
),
|
||||
));
|
||||
}
|
||||
let capitalized_section_name = context.kind.as_str();
|
||||
if context.section_name != capitalized_section_name {
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
CapitalizeSectionName {
|
||||
name: context.section_name.to_string(),
|
||||
},
|
||||
Range::from_located(docstring.expr),
|
||||
);
|
||||
if checker.patch(diagnostic.kind.rule()) {
|
||||
// Replace the section title with the capitalized variant. This requires
|
||||
// locating the start and end of the section name.
|
||||
if let Some(index) = context.line.find(context.section_name) {
|
||||
// Map from bytes to characters.
|
||||
let section_name_start = &context.line[..index].chars().count();
|
||||
let section_name_length = &context.section_name.chars().count();
|
||||
diagnostic.amend(Fix::replacement(
|
||||
capitalized_section_name.to_string(),
|
||||
Location::new(
|
||||
docstring.expr.location.row() + context.original_index,
|
||||
*section_name_start,
|
||||
),
|
||||
Location::new(
|
||||
docstring.expr.location.row() + context.original_index,
|
||||
section_name_start + section_name_length,
|
||||
),
|
||||
));
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -933,7 +952,7 @@ fn parameters_section(checker: &mut Checker, docstring: &Docstring, context: &Se
|
||||
}
|
||||
|
||||
fn numpy_section(checker: &mut Checker, docstring: &Docstring, context: &SectionContext) {
|
||||
common_section(checker, docstring, context, &SectionStyle::Numpy);
|
||||
common_section(checker, docstring, context);
|
||||
|
||||
if checker
|
||||
.settings
|
||||
@@ -977,15 +996,14 @@ fn numpy_section(checker: &mut Checker, docstring: &Docstring, context: &Section
|
||||
}
|
||||
|
||||
if checker.settings.rules.enabled(&Rule::UndocumentedParam) {
|
||||
let capitalized_section_name = titlecase::titlecase(context.section_name);
|
||||
if capitalized_section_name == "Parameters" {
|
||||
if matches!(context.kind, SectionKind::Parameters) {
|
||||
parameters_section(checker, docstring, context);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn google_section(checker: &mut Checker, docstring: &Docstring, context: &SectionContext) {
|
||||
common_section(checker, docstring, context, &SectionStyle::Google);
|
||||
common_section(checker, docstring, context);
|
||||
|
||||
if checker
|
||||
.settings
|
||||
@@ -1030,8 +1048,7 @@ fn google_section(checker: &mut Checker, docstring: &Docstring, context: &Sectio
|
||||
}
|
||||
|
||||
if checker.settings.rules.enabled(&Rule::UndocumentedParam) {
|
||||
let capitalized_section_name = titlecase::titlecase(context.section_name);
|
||||
if capitalized_section_name == "Args" || capitalized_section_name == "Arguments" {
|
||||
if matches!(context.kind, SectionKind::Args | SectionKind::Arguments) {
|
||||
args_section(checker, docstring, context);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use ruff_macros::{CacheKey, ConfigurationOptions};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::registry::Rule;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash, JsonSchema)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, JsonSchema, CacheKey)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
pub enum Convention {
|
||||
/// Use Google-style docstrings.
|
||||
@@ -94,15 +94,29 @@ pub struct Options {
|
||||
"#
|
||||
)]
|
||||
/// Ignore docstrings for functions or methods decorated with the
|
||||
/// specified decorators. Unlike the `pydocstyle`, Ruff accepts an array
|
||||
/// of fully-qualified module identifiers, instead of a regular expression.
|
||||
/// specified fully-qualified decorators.
|
||||
pub ignore_decorators: Option<Vec<String>>,
|
||||
#[option(
|
||||
default = r#"[]"#,
|
||||
value_type = "list[str]",
|
||||
example = r#"
|
||||
property-decorators = ["gi.repository.GObject.Property"]
|
||||
"#
|
||||
)]
|
||||
/// A list of decorators that, when applied to a method, indicate that the
|
||||
/// method should be treated as a property (in addition to the builtin
|
||||
/// `@property` and standard-library `@functools.cached_property`).
|
||||
///
|
||||
/// For example, Ruff will expect that any method decorated by a decorator
|
||||
/// in this list can use a non-imperative summary line.
|
||||
pub property_decorators: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Hash)]
|
||||
#[derive(Debug, Default, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub convention: Option<Convention>,
|
||||
pub ignore_decorators: BTreeSet<String>,
|
||||
pub property_decorators: BTreeSet<String>,
|
||||
}
|
||||
|
||||
impl From<Options> for Settings {
|
||||
@@ -110,6 +124,9 @@ impl From<Options> for Settings {
|
||||
Self {
|
||||
convention: options.convention,
|
||||
ignore_decorators: BTreeSet::from_iter(options.ignore_decorators.unwrap_or_default()),
|
||||
property_decorators: BTreeSet::from_iter(
|
||||
options.property_decorators.unwrap_or_default(),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -119,6 +136,7 @@ impl From<Settings> for Options {
|
||||
Self {
|
||||
convention: settings.convention,
|
||||
ignore_decorators: Some(settings.ignore_decorators.into_iter().collect()),
|
||||
property_decorators: Some(settings.property_decorators.into_iter().collect()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,74 +1,74 @@
|
||||
---
|
||||
source: src/rules/pydocstyle/mod.rs
|
||||
source: crates/ruff/src/rules/pydocstyle/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
NonImperativeMood: Returns foo.
|
||||
location:
|
||||
row: 8
|
||||
row: 10
|
||||
column: 4
|
||||
end_location:
|
||||
row: 8
|
||||
row: 10
|
||||
column: 22
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
NonImperativeMood: Constructor for a foo.
|
||||
location:
|
||||
row: 12
|
||||
row: 14
|
||||
column: 4
|
||||
end_location:
|
||||
row: 12
|
||||
row: 14
|
||||
column: 32
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
NonImperativeMood: Constructor for a boa.
|
||||
location:
|
||||
row: 16
|
||||
row: 18
|
||||
column: 4
|
||||
end_location:
|
||||
row: 20
|
||||
row: 22
|
||||
column: 7
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
NonImperativeMood: Runs something
|
||||
location:
|
||||
row: 24
|
||||
row: 26
|
||||
column: 4
|
||||
end_location:
|
||||
row: 24
|
||||
row: 26
|
||||
column: 24
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
NonImperativeMood: "Runs other things, nested"
|
||||
location:
|
||||
row: 27
|
||||
row: 29
|
||||
column: 8
|
||||
end_location:
|
||||
row: 27
|
||||
row: 29
|
||||
column: 39
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
NonImperativeMood: Writes a logical line that
|
||||
location:
|
||||
row: 33
|
||||
row: 35
|
||||
column: 4
|
||||
end_location:
|
||||
row: 35
|
||||
row: 37
|
||||
column: 7
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
NonImperativeMood: This method docstring should be written in imperative mood.
|
||||
location:
|
||||
row: 72
|
||||
row: 74
|
||||
column: 8
|
||||
end_location:
|
||||
row: 72
|
||||
row: 74
|
||||
column: 73
|
||||
fix: ~
|
||||
parent: ~
|
||||
|
||||
@@ -38,40 +38,4 @@ expression: diagnostics
|
||||
row: 218
|
||||
column: 11
|
||||
parent: ~
|
||||
- kind:
|
||||
NewLineAfterSectionName:
|
||||
name: Returns
|
||||
location:
|
||||
row: 252
|
||||
column: 4
|
||||
end_location:
|
||||
row: 262
|
||||
column: 7
|
||||
fix:
|
||||
content: ""
|
||||
location:
|
||||
row: 257
|
||||
column: 11
|
||||
end_location:
|
||||
row: 257
|
||||
column: 12
|
||||
parent: ~
|
||||
- kind:
|
||||
NewLineAfterSectionName:
|
||||
name: Raises
|
||||
location:
|
||||
row: 252
|
||||
column: 4
|
||||
end_location:
|
||||
row: 262
|
||||
column: 7
|
||||
fix:
|
||||
content: ""
|
||||
location:
|
||||
row: 259
|
||||
column: 10
|
||||
end_location:
|
||||
row: 259
|
||||
column: 11
|
||||
parent: ~
|
||||
|
||||
|
||||
@@ -56,6 +56,24 @@ expression: diagnostics
|
||||
row: 219
|
||||
column: 0
|
||||
parent: ~
|
||||
- kind:
|
||||
DashedUnderlineAfterSection:
|
||||
name: Args
|
||||
location:
|
||||
row: 252
|
||||
column: 4
|
||||
end_location:
|
||||
row: 262
|
||||
column: 7
|
||||
fix:
|
||||
content: " ----\n"
|
||||
location:
|
||||
row: 255
|
||||
column: 0
|
||||
end_location:
|
||||
row: 255
|
||||
column: 0
|
||||
parent: ~
|
||||
- kind:
|
||||
DashedUnderlineAfterSection:
|
||||
name: Returns
|
||||
|
||||
@@ -2,6 +2,32 @@
|
||||
source: crates/ruff/src/rules/pydocstyle/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
UndocumentedParam:
|
||||
names:
|
||||
- y
|
||||
- z
|
||||
location:
|
||||
row: 1
|
||||
column: 4
|
||||
end_location:
|
||||
row: 1
|
||||
column: 5
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
UndocumentedParam:
|
||||
names:
|
||||
- y
|
||||
- z
|
||||
location:
|
||||
row: 14
|
||||
column: 4
|
||||
end_location:
|
||||
row: 14
|
||||
column: 5
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
UndocumentedParam:
|
||||
names:
|
||||
@@ -15,6 +41,31 @@ expression: diagnostics
|
||||
column: 5
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
UndocumentedParam:
|
||||
names:
|
||||
- y
|
||||
- z
|
||||
location:
|
||||
row: 39
|
||||
column: 4
|
||||
end_location:
|
||||
row: 39
|
||||
column: 5
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
UndocumentedParam:
|
||||
names:
|
||||
- y
|
||||
location:
|
||||
row: 52
|
||||
column: 4
|
||||
end_location:
|
||||
row: 52
|
||||
column: 5
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
UndocumentedParam:
|
||||
names:
|
||||
|
||||
@@ -104,6 +104,8 @@ mod tests {
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_8.pyi"); "F821_8")]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_9.py"); "F821_9")]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_10.py"); "F821_10")]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_11.py"); "F821_11")]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_12.py"); "F821_12")]
|
||||
#[test_case(Rule::UndefinedExport, Path::new("F822_0.py"); "F822_0")]
|
||||
#[test_case(Rule::UndefinedExport, Path::new("F822_1.py"); "F822_1")]
|
||||
#[test_case(Rule::UndefinedExport, Path::new("F822_2.py"); "F822_2")]
|
||||
@@ -129,7 +131,7 @@ mod tests {
|
||||
let diagnostics = test_path(
|
||||
Path::new("pyflakes/F841_0.py"),
|
||||
&settings::Settings {
|
||||
dummy_variable_rgx: Regex::new(r"^z$").unwrap().into(),
|
||||
dummy_variable_rgx: Regex::new(r"^z$").unwrap(),
|
||||
..settings::Settings::for_rule(Rule::UnusedVariable)
|
||||
},
|
||||
)?;
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
---
|
||||
source: crates/ruff/src/rules/pyflakes/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
UndefinedName:
|
||||
name: os
|
||||
location:
|
||||
row: 18
|
||||
column: 26
|
||||
end_location:
|
||||
row: 18
|
||||
column: 30
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
UndefinedName:
|
||||
name: Baz
|
||||
location:
|
||||
row: 23
|
||||
column: 12
|
||||
end_location:
|
||||
row: 23
|
||||
column: 17
|
||||
fix: ~
|
||||
parent: ~
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
---
|
||||
source: crates/ruff/src/rules/pyflakes/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
UndefinedName:
|
||||
name: os
|
||||
location:
|
||||
row: 20
|
||||
column: 26
|
||||
end_location:
|
||||
row: 20
|
||||
column: 30
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
UndefinedName:
|
||||
name: Baz
|
||||
location:
|
||||
row: 25
|
||||
column: 12
|
||||
end_location:
|
||||
row: 25
|
||||
column: 17
|
||||
fix: ~
|
||||
parent: ~
|
||||
|
||||
@@ -99,7 +99,7 @@ mod tests {
|
||||
let diagnostics = test_path(
|
||||
Path::new("pylint/too_many_arguments_params.py"),
|
||||
&Settings {
|
||||
dummy_variable_rgx: Regex::new(r"skip_.*").unwrap().into(),
|
||||
dummy_variable_rgx: Regex::new(r"skip_.*").unwrap(),
|
||||
..Settings::for_rules(vec![Rule::TooManyArguments])
|
||||
},
|
||||
)?;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use regex::Regex;
|
||||
use std::{fmt, iter};
|
||||
|
||||
use rustpython_parser::ast::{Expr, ExprContext, ExprKind, Stmt, StmtKind, Withitem};
|
||||
@@ -12,7 +13,6 @@ use crate::ast::visitor;
|
||||
use crate::ast::visitor::Visitor;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::registry::Diagnostic;
|
||||
use crate::settings::hashable::HashableRegex;
|
||||
use crate::violation::Violation;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone, Copy)]
|
||||
@@ -142,7 +142,7 @@ struct ExprWithInnerBindingKind<'a> {
|
||||
}
|
||||
|
||||
struct InnerForWithAssignTargetsVisitor<'a> {
|
||||
dummy_variable_rgx: &'a HashableRegex,
|
||||
dummy_variable_rgx: &'a Regex,
|
||||
assignment_targets: Vec<ExprWithInnerBindingKind<'a>>,
|
||||
}
|
||||
|
||||
@@ -213,7 +213,7 @@ where
|
||||
|
||||
fn assignment_targets_from_expr<'a, U>(
|
||||
expr: &'a Expr<U>,
|
||||
dummy_variable_rgx: &'a HashableRegex,
|
||||
dummy_variable_rgx: &'a Regex,
|
||||
) -> Box<dyn Iterator<Item = &'a Expr<U>> + 'a> {
|
||||
// The Box is necessary to ensure the match arms have the same return type - we can't use
|
||||
// a cast to "impl Iterator", since at the time of writing that is only allowed for
|
||||
@@ -266,7 +266,7 @@ fn assignment_targets_from_expr<'a, U>(
|
||||
|
||||
fn assignment_targets_from_with_items<'a, U>(
|
||||
items: &'a [Withitem<U>],
|
||||
dummy_variable_rgx: &'a HashableRegex,
|
||||
dummy_variable_rgx: &'a Regex,
|
||||
) -> impl Iterator<Item = &'a Expr<U>> + 'a {
|
||||
items
|
||||
.iter()
|
||||
@@ -280,7 +280,7 @@ fn assignment_targets_from_with_items<'a, U>(
|
||||
|
||||
fn assignment_targets_from_assign_targets<'a, U>(
|
||||
targets: &'a [Expr<U>],
|
||||
dummy_variable_rgx: &'a HashableRegex,
|
||||
dummy_variable_rgx: &'a Regex,
|
||||
) -> impl Iterator<Item = &'a Expr<U>> + 'a {
|
||||
targets
|
||||
.iter()
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
//! Settings for the `pylint` plugin.
|
||||
|
||||
use std::hash::Hash;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use ruff_macros::ConfigurationOptions;
|
||||
use ruff_macros::{CacheKey, ConfigurationOptions};
|
||||
use rustpython_parser::ast::Constant;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, JsonSchema)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, CacheKey, JsonSchema)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
pub enum ConstantType {
|
||||
Bytes,
|
||||
@@ -72,7 +70,7 @@ pub struct Options {
|
||||
pub max_statements: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Hash)]
|
||||
#[derive(Debug, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub allow_magic_value_types: Vec<ConstantType>,
|
||||
pub max_args: usize,
|
||||
|
||||
@@ -66,6 +66,7 @@ mod tests {
|
||||
#[test_case(Rule::OutdatedVersionBlock, Path::new("UP036_3.py"); "UP036_3")]
|
||||
#[test_case(Rule::OutdatedVersionBlock, Path::new("UP036_4.py"); "UP036_4")]
|
||||
#[test_case(Rule::QuotedAnnotation, Path::new("UP037.py"); "UP037")]
|
||||
#[test_case(Rule::IsinstanceWithTuple, Path::new("UP038.py"); "UP038")]
|
||||
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = path.to_string_lossy().to_string();
|
||||
let diagnostics = test_path(
|
||||
|
||||
@@ -167,7 +167,7 @@ fn try_convert_to_f_string(checker: &Checker, expr: &Expr) -> Option<String> {
|
||||
match part {
|
||||
FormatPart::Field {
|
||||
field_name,
|
||||
preconversion_spec,
|
||||
conversion_spec,
|
||||
format_spec,
|
||||
} => {
|
||||
converted.push('{');
|
||||
@@ -213,9 +213,9 @@ fn try_convert_to_f_string(checker: &Checker, expr: &Expr) -> Option<String> {
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(preconversion_spec) = preconversion_spec {
|
||||
if let Some(conversion_spec) = conversion_spec {
|
||||
converted.push('!');
|
||||
converted.push(preconversion_spec);
|
||||
converted.push(conversion_spec);
|
||||
}
|
||||
|
||||
if !format_spec.is_empty() {
|
||||
|
||||
@@ -39,6 +39,7 @@ pub(crate) use unnecessary_future_import::{unnecessary_future_import, Unnecessar
|
||||
pub(crate) use unpack_list_comprehension::{unpack_list_comprehension, RewriteListComprehension};
|
||||
pub(crate) use use_pep585_annotation::{use_pep585_annotation, DeprecatedCollectionType};
|
||||
pub(crate) use use_pep604_annotation::{use_pep604_annotation, TypingUnion};
|
||||
pub(crate) use use_pep604_isinstance::{use_pep604_isinstance, IsinstanceWithTuple};
|
||||
pub(crate) use useless_metaclass_type::{useless_metaclass_type, UselessMetaclassType};
|
||||
pub(crate) use useless_object_inheritance::{useless_object_inheritance, UselessObjectInheritance};
|
||||
|
||||
@@ -75,5 +76,6 @@ mod unnecessary_future_import;
|
||||
mod unpack_list_comprehension;
|
||||
mod use_pep585_annotation;
|
||||
mod use_pep604_annotation;
|
||||
mod use_pep604_isinstance;
|
||||
mod useless_metaclass_type;
|
||||
mod useless_object_inheritance;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user