Compare commits

..

2 Commits

Author SHA1 Message Date
Aria Desires
a9526fe0a5 serde_json is no longer optional 2025-12-08 15:17:28 -05:00
Aria Desires
e733a87bd7 Teach ty check to ask uv to sync the venv of a PEP-723 script 2025-12-08 15:00:37 -05:00
350 changed files with 8083 additions and 11910 deletions

View File

@@ -298,7 +298,7 @@ jobs:
# sync, not just public items. Eventually we should do this for all
# crates; for now add crates here as they are warning-clean to prevent
# regression.
- run: cargo doc --no-deps -p ty_python_semantic -p ty -p ty_test -p ruff_db -p ruff_python_formatter --document-private-items
- run: cargo doc --no-deps -p ty_python_semantic -p ty -p ty_test -p ruff_db --document-private-items
env:
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
RUSTDOCFLAGS: "-D warnings"

View File

@@ -47,7 +47,6 @@ jobs:
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
shared-key: "mypy-primer"
workspaces: "ruff"
- name: Install Rust toolchain
@@ -87,7 +86,6 @@ jobs:
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
workspaces: "ruff"
shared-key: "mypy-primer"
- name: Install Rust toolchain
run: rustup show
@@ -107,54 +105,3 @@ jobs:
with:
name: mypy_primer_memory_diff
path: mypy_primer_memory.diff
# Runs mypy twice against the same ty version to catch any non-deterministic behavior (ideally).
# The job is disabled for now because there are some non-deterministic diagnostics.
mypy_primer_same_revision:
name: Run mypy_primer on same revision
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
timeout-minutes: 20
# TODO: Enable once we fixed the non-deterministic diagnostics
if: false
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
path: ruff
fetch-depth: 0
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
workspaces: "ruff"
shared-key: "mypy-primer"
- name: Install Rust toolchain
run: rustup show
- name: Run determinism check
env:
BASE_REVISION: ${{ github.event.pull_request.head.sha }}
PRIMER_SELECTOR: crates/ty_python_semantic/resources/primer/good.txt
CLICOLOR_FORCE: "1"
DIFF_FILE: mypy_primer_determinism.diff
run: |
cd ruff
scripts/mypy_primer.sh
- name: Check for non-determinism
run: |
# Remove ANSI color codes for checking
sed -e 's/\x1b\[[0-9;]*m//g' mypy_primer_determinism.diff > mypy_primer_determinism_clean.diff
# Check if there are any differences (non-determinism)
if [ -s mypy_primer_determinism_clean.diff ]; then
echo "ERROR: Non-deterministic output detected!"
echo "The following differences were found when running ty twice on the same commit:"
cat mypy_primer_determinism_clean.diff
exit 1
else
echo "✓ Output is deterministic"
fi

33
Cargo.lock generated
View File

@@ -1016,7 +1016,7 @@ dependencies = [
"libc",
"option-ext",
"redox_users",
"windows-sys 0.61.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -1108,7 +1108,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
dependencies = [
"libc",
"windows-sys 0.61.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -1238,9 +1238,9 @@ dependencies = [
[[package]]
name = "get-size-derive2"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab21d7bd2c625f2064f04ce54bcb88bc57c45724cde45cba326d784e22d3f71a"
checksum = "ff47daa61505c85af126e9dd64af6a342a33dc0cccfe1be74ceadc7d352e6efd"
dependencies = [
"attribute-derive",
"quote",
@@ -1249,15 +1249,14 @@ dependencies = [
[[package]]
name = "get-size2"
version = "0.7.3"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "879272b0de109e2b67b39fcfe3d25fdbba96ac07e44a254f5a0b4d7ff55340cb"
checksum = "ac7bb8710e1f09672102be7ddf39f764d8440ae74a9f4e30aaa4820dcdffa4af"
dependencies = [
"compact_str",
"get-size-derive2",
"hashbrown 0.16.1",
"indexmap",
"ordermap",
"smallvec",
]
@@ -1764,7 +1763,7 @@ dependencies = [
"portable-atomic",
"portable-atomic-util",
"serde_core",
"windows-sys 0.61.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -2234,9 +2233,9 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
[[package]]
name = "ordermap"
version = "1.0.0"
version = "0.5.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed637741ced8fb240855d22a2b4f208dab7a06bcce73380162e5253000c16758"
checksum = "b100f7dd605611822d30e182214d3c02fdefce2d801d23993f6b6ba6ca1392af"
dependencies = [
"indexmap",
"serde",
@@ -3349,7 +3348,6 @@ dependencies = [
"compact_str",
"get-size2",
"insta",
"itertools 0.14.0",
"memchr",
"ruff_annotate_snippets",
"ruff_python_ast",
@@ -3573,7 +3571,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.61.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -3591,7 +3589,7 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
version = "0.24.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=55e5e7d32fa3fc189276f35bb04c9438f9aedbd1#55e5e7d32fa3fc189276f35bb04c9438f9aedbd1"
source = "git+https://github.com/salsa-rs/salsa.git?rev=59aa1075e837f5deb0d6ffb24b68fedc0f4bc5e0#59aa1075e837f5deb0d6ffb24b68fedc0f4bc5e0"
dependencies = [
"boxcar",
"compact_str",
@@ -3602,7 +3600,6 @@ dependencies = [
"indexmap",
"intrusive-collections",
"inventory",
"ordermap",
"parking_lot",
"portable-atomic",
"rustc-hash",
@@ -3616,12 +3613,12 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
version = "0.24.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=55e5e7d32fa3fc189276f35bb04c9438f9aedbd1#55e5e7d32fa3fc189276f35bb04c9438f9aedbd1"
source = "git+https://github.com/salsa-rs/salsa.git?rev=59aa1075e837f5deb0d6ffb24b68fedc0f4bc5e0#59aa1075e837f5deb0d6ffb24b68fedc0f4bc5e0"
[[package]]
name = "salsa-macros"
version = "0.24.0"
source = "git+https://github.com/salsa-rs/salsa.git?rev=55e5e7d32fa3fc189276f35bb04c9438f9aedbd1#55e5e7d32fa3fc189276f35bb04c9438f9aedbd1"
source = "git+https://github.com/salsa-rs/salsa.git?rev=59aa1075e837f5deb0d6ffb24b68fedc0f4bc5e0#59aa1075e837f5deb0d6ffb24b68fedc0f4bc5e0"
dependencies = [
"proc-macro2",
"quote",
@@ -3975,7 +3972,7 @@ dependencies = [
"getrandom 0.3.4",
"once_cell",
"rustix",
"windows-sys 0.61.0",
"windows-sys 0.59.0",
]
[[package]]
@@ -5029,7 +5026,7 @@ version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
dependencies = [
"windows-sys 0.61.0",
"windows-sys 0.59.0",
]
[[package]]

View File

@@ -88,7 +88,7 @@ etcetera = { version = "0.11.0" }
fern = { version = "0.7.0" }
filetime = { version = "0.2.23" }
getrandom = { version = "0.3.1" }
get-size2 = { version = "0.7.3", features = [
get-size2 = { version = "0.7.0", features = [
"derive",
"smallvec",
"hashbrown",
@@ -129,7 +129,7 @@ memchr = { version = "2.7.1" }
mimalloc = { version = "0.1.39" }
natord = { version = "1.0.9" }
notify = { version = "8.0.0" }
ordermap = { version = "1.0.0" }
ordermap = { version = "0.5.0" }
path-absolutize = { version = "3.1.1" }
path-slash = { version = "0.2.1" }
pathdiff = { version = "0.2.1" }
@@ -146,7 +146,7 @@ regex-automata = { version = "0.4.9" }
rustc-hash = { version = "2.0.0" }
rustc-stable-hash = { version = "0.1.2" }
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "55e5e7d32fa3fc189276f35bb04c9438f9aedbd1", default-features = false, features = [
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "59aa1075e837f5deb0d6ffb24b68fedc0f4bc5e0", default-features = false, features = [
"compact_str",
"macros",
"salsa_unstable",

View File

@@ -1440,78 +1440,6 @@ def function():
Ok(())
}
#[test]
fn ignore_noqa() -> Result<()> {
let fixture = CliTest::new()?;
fixture.write_file(
"ruff.toml",
r#"
[lint]
select = ["F401"]
"#,
)?;
fixture.write_file(
"noqa.py",
r#"
import os # noqa: F401
# ruff: disable[F401]
import sys
"#,
)?;
// without --ignore-noqa
assert_cmd_snapshot!(fixture
.check_command()
.args(["--config", "ruff.toml"])
.arg("noqa.py"),
@r"
success: false
exit_code: 1
----- stdout -----
noqa.py:5:8: F401 [*] `sys` imported but unused
Found 1 error.
[*] 1 fixable with the `--fix` option.
----- stderr -----
");
assert_cmd_snapshot!(fixture
.check_command()
.args(["--config", "ruff.toml"])
.arg("noqa.py")
.args(["--preview"]),
@r"
success: true
exit_code: 0
----- stdout -----
All checks passed!
----- stderr -----
");
// with --ignore-noqa --preview
assert_cmd_snapshot!(fixture
.check_command()
.args(["--config", "ruff.toml"])
.arg("noqa.py")
.args(["--ignore-noqa", "--preview"]),
@r"
success: false
exit_code: 1
----- stdout -----
noqa.py:2:8: F401 [*] `os` imported but unused
noqa.py:5:8: F401 [*] `sys` imported but unused
Found 2 errors.
[*] 2 fixable with the `--fix` option.
----- stderr -----
");
Ok(())
}
#[test]
fn add_noqa() -> Result<()> {
let fixture = CliTest::new()?;
@@ -1704,100 +1632,6 @@ def unused(x): # noqa: ANN001, ARG001, D103
Ok(())
}
#[test]
fn add_noqa_existing_file_level_noqa() -> Result<()> {
let fixture = CliTest::new()?;
fixture.write_file(
"ruff.toml",
r#"
[lint]
select = ["F401"]
"#,
)?;
fixture.write_file(
"noqa.py",
r#"
# ruff: noqa F401
import os
"#,
)?;
assert_cmd_snapshot!(fixture
.check_command()
.args(["--config", "ruff.toml"])
.arg("noqa.py")
.arg("--preview")
.args(["--add-noqa"])
.arg("-")
.pass_stdin(r#"
"#), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
let test_code =
fs::read_to_string(fixture.root().join("noqa.py")).expect("should read test file");
insta::assert_snapshot!(test_code, @r"
# ruff: noqa F401
import os
");
Ok(())
}
#[test]
fn add_noqa_existing_range_suppression() -> Result<()> {
let fixture = CliTest::new()?;
fixture.write_file(
"ruff.toml",
r#"
[lint]
select = ["F401"]
"#,
)?;
fixture.write_file(
"noqa.py",
r#"
# ruff: disable[F401]
import os
"#,
)?;
assert_cmd_snapshot!(fixture
.check_command()
.args(["--config", "ruff.toml"])
.arg("noqa.py")
.arg("--preview")
.args(["--add-noqa"])
.arg("-")
.pass_stdin(r#"
"#), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
let test_code =
fs::read_to_string(fixture.root().join("noqa.py")).expect("should read test file");
insta::assert_snapshot!(test_code, @r"
# ruff: disable[F401]
import os
");
Ok(())
}
#[test]
fn add_noqa_multiline_comment() -> Result<()> {
let fixture = CliTest::new()?;

View File

@@ -194,7 +194,7 @@ static SYMPY: Benchmark = Benchmark::new(
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY312,
},
13030,
13000,
);
static TANJUN: Benchmark = Benchmark::new(

View File

@@ -42,14 +42,13 @@ impl<'a> Collector<'a> {
impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
fn visit_stmt(&mut self, stmt: &'ast Stmt) {
match stmt {
Stmt::ImportFrom(import_from) => {
let ast::StmtImportFrom {
names,
module,
level,
range: _,
node_index: _,
} = &**import_from;
Stmt::ImportFrom(ast::StmtImportFrom {
names,
module,
level,
range: _,
node_index: _,
}) => {
let module = module.as_deref();
let level = *level;
for alias in names {
@@ -88,26 +87,24 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
}
}
}
Stmt::Import(import_stmt) => {
let ast::StmtImport {
names,
range: _,
node_index: _,
} = &**import_stmt;
Stmt::Import(ast::StmtImport {
names,
range: _,
node_index: _,
}) => {
for alias in names {
if let Some(module_name) = ModuleName::new(alias.name.as_str()) {
self.imports.push(CollectedImport::Import(module_name));
}
}
}
Stmt::If(if_stmt) => {
let ast::StmtIf {
test,
body,
elif_else_clauses,
range: _,
node_index: _,
} = &**if_stmt;
Stmt::If(ast::StmtIf {
test,
body,
elif_else_clauses,
range: _,
node_index: _,
}) => {
// Skip TYPE_CHECKING blocks if not requested
if self.type_checking_imports || !is_type_checking_condition(test) {
self.visit_body(body);

View File

@@ -199,9 +199,6 @@ def bytes_okay(value=bytes(1)):
def int_okay(value=int("12")):
pass
# Allow immutable slice()
def slice_okay(value=slice(1,2)):
pass
# Allow immutable complex() value
def complex_okay(value=complex(1,2)):

View File

@@ -218,26 +218,3 @@ def should_not_fail(payload, Args):
Args:
The other arguments.
"""
# Test cases for Unpack[TypedDict] kwargs
from typing import TypedDict
from typing_extensions import Unpack
class User(TypedDict):
id: int
name: str
def function_with_unpack_args_should_not_fail(query: str, **kwargs: Unpack[User]):
"""Function with Unpack kwargs.
Args:
query: some arg
"""
def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]):
"""Function with Unpack kwargs but missing query arg documentation.
Args:
**kwargs: keyword arguments
"""

View File

@@ -2,40 +2,15 @@ from abc import ABC, abstractmethod
from contextlib import suppress
class MyError(Exception):
...
class MySubError(MyError):
...
class MyValueError(ValueError):
...
class MyUserWarning(UserWarning):
...
# Violation test cases with builtin errors: PLW0133
# Test case 1: Useless exception statement
def func():
AssertionError("This is an assertion error") # PLW0133
MyError("This is a custom error") # PLW0133
MySubError("This is a custom error") # PLW0133
MyValueError("This is a custom value error") # PLW0133
# Test case 2: Useless exception statement in try-except block
def func():
try:
Exception("This is an exception") # PLW0133
MyError("This is an exception") # PLW0133
MySubError("This is an exception") # PLW0133
MyValueError("This is an exception") # PLW0133
except Exception as err:
pass
@@ -44,9 +19,6 @@ def func():
def func():
if True:
RuntimeError("This is an exception") # PLW0133
MyError("This is an exception") # PLW0133
MySubError("This is an exception") # PLW0133
MyValueError("This is an exception") # PLW0133
# Test case 4: Useless exception statement in class
@@ -54,18 +26,12 @@ def func():
class Class:
def __init__(self):
TypeError("This is an exception") # PLW0133
MyError("This is an exception") # PLW0133
MySubError("This is an exception") # PLW0133
MyValueError("This is an exception") # PLW0133
# Test case 5: Useless exception statement in function
def func():
def inner():
IndexError("This is an exception") # PLW0133
MyError("This is an exception") # PLW0133
MySubError("This is an exception") # PLW0133
MyValueError("This is an exception") # PLW0133
inner()
@@ -74,9 +40,6 @@ def func():
def func():
while True:
KeyError("This is an exception") # PLW0133
MyError("This is an exception") # PLW0133
MySubError("This is an exception") # PLW0133
MyValueError("This is an exception") # PLW0133
# Test case 7: Useless exception statement in abstract class
@@ -85,58 +48,27 @@ def func():
@abstractmethod
def method(self):
NotImplementedError("This is an exception") # PLW0133
MyError("This is an exception") # PLW0133
MySubError("This is an exception") # PLW0133
MyValueError("This is an exception") # PLW0133
# Test case 8: Useless exception statement inside context manager
def func():
with suppress(Exception):
with suppress(AttributeError):
AttributeError("This is an exception") # PLW0133
MyError("This is an exception") # PLW0133
MySubError("This is an exception") # PLW0133
MyValueError("This is an exception") # PLW0133
# Test case 9: Useless exception statement in parentheses
def func():
(RuntimeError("This is an exception")) # PLW0133
(MyError("This is an exception")) # PLW0133
(MySubError("This is an exception")) # PLW0133
(MyValueError("This is an exception")) # PLW0133
# Test case 10: Useless exception statement in continuation
def func():
x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133
x = 1; (MyError("This is an exception")); y = 2 # PLW0133
x = 1; (MySubError("This is an exception")); y = 2 # PLW0133
x = 1; (MyValueError("This is an exception")); y = 2 # PLW0133
# Test case 11: Useless warning statement
def func():
UserWarning("This is a user warning") # PLW0133
MyUserWarning("This is a custom user warning") # PLW0133
# Test case 12: Useless exception statement at module level
import builtins
builtins.TypeError("still an exception even though it's an Attribute") # PLW0133
PythonFinalizationError("Added in Python 3.13") # PLW0133
MyError("This is an exception") # PLW0133
MySubError("This is an exception") # PLW0133
MyValueError("This is an exception") # PLW0133
UserWarning("This is a user warning") # PLW0133
MyUserWarning("This is a custom user warning") # PLW0133
UserWarning("This is an assertion error") # PLW0133
# Non-violation test cases: PLW0133
@@ -187,3 +119,10 @@ def func():
def func():
with suppress(AttributeError):
raise AttributeError("This is an exception") # OK
import builtins
builtins.TypeError("still an exception even though it's an Attribute")
PythonFinalizationError("Added in Python 3.13")

View File

@@ -1,88 +0,0 @@
def f():
# These should both be ignored by the range suppression.
# ruff: disable[E741, F841]
I = 1
# ruff: enable[E741, F841]
def f():
# These should both be ignored by the implicit range suppression.
# Should also generate an "unmatched suppression" warning.
# ruff:disable[E741,F841]
I = 1
def f():
# Neither warning is ignored, and an "unmatched suppression"
# should be generated.
I = 1
# ruff: enable[E741, F841]
def f():
# One should be ignored by the range suppression, and
# the other logged to the user.
# ruff: disable[E741]
I = 1
# ruff: enable[E741]
def f():
# Test interleaved range suppressions. The first and last
# lines should each log a different warning, while the
# middle line should be completely silenced.
# ruff: disable[E741]
l = 0
# ruff: disable[F841]
O = 1
# ruff: enable[E741]
I = 2
# ruff: enable[F841]
def f():
# Neither of these are ignored and warnings are
# logged to user
# ruff: disable[E501]
I = 1
# ruff: enable[E501]
def f():
# These should both be ignored by the range suppression,
# and an unusued noqa diagnostic should be logged.
# ruff:disable[E741,F841]
I = 1 # noqa: E741,F841
# ruff:enable[E741,F841]
def f():
# TODO: Duplicate codes should be counted as duplicate, not unused
# ruff: disable[F841, F841]
foo = 0
def f():
# Overlapping range suppressions, one should be marked as used,
# and the other should trigger an unused suppression diagnostic
# ruff: disable[F841]
# ruff: disable[F841]
foo = 0
def f():
# Multiple codes but only one is used
# ruff: disable[E741, F401, F841]
foo = 0
def f():
# Multiple codes but only two are used
# ruff: disable[E741, F401, F841]
I = 0
def f():
# Multiple codes but none are used
# ruff: disable[E741, F401, F841]
print("hello")

View File

@@ -17,12 +17,11 @@ use ruff_python_ast::PythonVersion;
/// Run lint rules over a [`Stmt`] syntax node.
pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
match stmt {
Stmt::Global(global) => {
let ast::StmtGlobal {
names,
range: _,
node_index: _,
} = &**global;
Stmt::Global(ast::StmtGlobal {
names,
range: _,
node_index: _,
}) => {
if checker.is_rule_enabled(Rule::GlobalAtModuleLevel) {
pylint::rules::global_at_module_level(checker, stmt);
}
@@ -32,12 +31,13 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
}
}
}
Stmt::Nonlocal(nonlocal) => {
let ast::StmtNonlocal {
Stmt::Nonlocal(
nonlocal @ ast::StmtNonlocal {
names,
range: _,
node_index: _,
} = &**nonlocal;
},
) => {
if checker.is_rule_enabled(Rule::AmbiguousVariableName) {
for name in names {
pycodestyle::rules::ambiguous_variable_name(checker, name, name.range());
@@ -47,8 +47,8 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
pylint::rules::nonlocal_and_global(checker, nonlocal);
}
}
Stmt::FunctionDef(function_def) => {
let ast::StmtFunctionDef {
Stmt::FunctionDef(
function_def @ ast::StmtFunctionDef {
is_async,
name,
decorator_list,
@@ -58,7 +58,8 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
type_params: _,
range: _,
node_index: _,
} = &**function_def;
},
) => {
if checker.is_rule_enabled(Rule::DjangoNonLeadingReceiverDecorator) {
flake8_django::rules::non_leading_receiver_decorator(checker, decorator_list);
}
@@ -320,7 +321,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
pylint::rules::in_function(checker, name, body);
}
if checker.is_rule_enabled(Rule::ReimplementedOperator) {
refurb::rules::reimplemented_operator(checker, &(&**function_def).into());
refurb::rules::reimplemented_operator(checker, &function_def.into());
}
if checker.is_rule_enabled(Rule::SslWithBadDefaults) {
flake8_bandit::rules::ssl_with_bad_defaults(checker, function_def);
@@ -355,8 +356,8 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
pylint::rules::return_in_init(checker, stmt);
}
}
Stmt::ClassDef(class_def) => {
let ast::StmtClassDef {
Stmt::ClassDef(
class_def @ ast::StmtClassDef {
name,
arguments,
type_params: _,
@@ -364,7 +365,8 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
body,
range: _,
node_index: _,
} = &**class_def;
},
) => {
if checker.is_rule_enabled(Rule::NoClassmethodDecorator) {
pylint::rules::no_classmethod_decorator(checker, stmt);
}
@@ -524,12 +526,11 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
ruff::rules::implicit_class_var_in_dataclass(checker, class_def);
}
}
Stmt::Import(import) => {
let ast::StmtImport {
names,
range: _,
node_index: _,
} = &**import;
Stmt::Import(ast::StmtImport {
names,
range: _,
node_index: _,
}) => {
if checker.is_rule_enabled(Rule::MultipleImportsOnOneLine) {
pycodestyle::rules::multiple_imports_on_one_line(checker, stmt, names);
}
@@ -577,7 +578,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
flake8_tidy_imports::rules::banned_module_level_imports(checker, stmt);
}
for alias in &import.names {
for alias in names {
if checker.is_rule_enabled(Rule::NonAsciiImportName) {
pylint::rules::non_ascii_module_import(checker, alias);
}
@@ -603,7 +604,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
}
}
if checker.is_rule_enabled(Rule::ManualFromImport) {
pylint::rules::manual_from_import(checker, stmt, alias, &import.names);
pylint::rules::manual_from_import(checker, stmt, alias, names);
}
if checker.is_rule_enabled(Rule::ImportSelf) {
pylint::rules::import_self(checker, alias, checker.module.qualified_name());
@@ -680,9 +681,17 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
}
}
}
Stmt::ImportFrom(import_from) => {
let level = import_from.level;
let module = import_from.module.as_deref();
Stmt::ImportFrom(
import_from @ ast::StmtImportFrom {
names,
module,
level,
range: _,
node_index: _,
},
) => {
let level = *level;
let module = module.as_deref();
if checker.is_rule_enabled(Rule::ModuleImportNotAtTopOfFile) {
pycodestyle::rules::module_import_not_at_top_of_file(checker, stmt);
}
@@ -690,7 +699,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
pylint::rules::import_outside_top_level(checker, stmt);
}
if checker.is_rule_enabled(Rule::GlobalStatement) {
for name in &import_from.names {
for name in names {
if let Some(asname) = name.asname.as_ref() {
pylint::rules::global_statement(checker, asname);
} else {
@@ -699,7 +708,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
}
}
if checker.is_rule_enabled(Rule::NonAsciiImportName) {
for alias in &import_from.names {
for alias in names {
pylint::rules::non_ascii_module_import(checker, alias);
}
}
@@ -715,7 +724,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
if checker.is_rule_enabled(Rule::UnnecessaryBuiltinImport) {
if let Some(module) = module {
pyupgrade::rules::unnecessary_builtin_import(
checker, stmt, module, &import_from.names, level,
checker, stmt, module, names, level,
);
}
}
@@ -751,7 +760,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
&stmt,
);
for alias in &import_from.names {
for alias in names {
if &alias.name == "*" {
continue;
}
@@ -780,7 +789,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
flake8_pyi::rules::from_future_import(checker, import_from);
}
}
for alias in &import_from.names {
for alias in names {
if module != Some("__future__") && &alias.name == "*" {
// F403
checker.report_diagnostic_if_enabled(
@@ -881,7 +890,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
checker,
level,
module,
&import_from.names,
names,
checker.module.qualified_name(),
);
}
@@ -897,14 +906,14 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
flake8_pyi::rules::bytestring_import(checker, import_from);
}
}
Stmt::Raise(raise) => {
Stmt::Raise(raise @ ast::StmtRaise { exc, .. }) => {
if checker.is_rule_enabled(Rule::RaiseNotImplemented) {
if let Some(expr) = &raise.exc {
if let Some(expr) = exc {
pyflakes::rules::raise_not_implemented(checker, expr);
}
}
if checker.is_rule_enabled(Rule::RaiseLiteral) {
if let Some(exc) = &raise.exc {
if let Some(exc) = exc {
flake8_bugbear::rules::raise_literal(checker, exc);
}
}
@@ -913,34 +922,34 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
Rule::FStringInException,
Rule::DotFormatInException,
]) {
if let Some(exc) = &raise.exc {
if let Some(exc) = exc {
flake8_errmsg::rules::string_in_exception(checker, stmt, exc);
}
}
if checker.is_rule_enabled(Rule::OSErrorAlias) {
if let Some(item) = &raise.exc {
if let Some(item) = exc {
pyupgrade::rules::os_error_alias_raise(checker, item);
}
}
if checker.is_rule_enabled(Rule::TimeoutErrorAlias) {
if checker.target_version() >= PythonVersion::PY310 {
if let Some(item) = &raise.exc {
if let Some(item) = exc {
pyupgrade::rules::timeout_error_alias_raise(checker, item);
}
}
}
if checker.is_rule_enabled(Rule::RaiseVanillaClass) {
if let Some(expr) = &raise.exc {
if let Some(expr) = exc {
tryceratops::rules::raise_vanilla_class(checker, expr);
}
}
if checker.is_rule_enabled(Rule::RaiseVanillaArgs) {
if let Some(expr) = &raise.exc {
if let Some(expr) = exc {
tryceratops::rules::raise_vanilla_args(checker, expr);
}
}
if checker.is_rule_enabled(Rule::UnnecessaryParenOnRaiseException) {
if let Some(expr) = &raise.exc {
if let Some(expr) = exc {
flake8_raise::rules::unnecessary_paren_on_raise_exception(checker, expr);
}
}
@@ -948,9 +957,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
pylint::rules::misplaced_bare_raise(checker, raise);
}
}
Stmt::AugAssign(aug_assign) => {
Stmt::AugAssign(aug_assign @ ast::StmtAugAssign { target, .. }) => {
if checker.is_rule_enabled(Rule::GlobalStatement) {
if let Expr::Name(ast::ExprName { id, .. }) = aug_assign.target.as_ref() {
if let Expr::Name(ast::ExprName { id, .. }) = target.as_ref() {
pylint::rules::global_statement(checker, id);
}
}
@@ -958,7 +967,13 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
ruff::rules::sort_dunder_all_aug_assign(checker, aug_assign);
}
}
Stmt::If(if_) => {
Stmt::If(
if_ @ ast::StmtIf {
test,
elif_else_clauses,
..
},
) => {
if checker.is_rule_enabled(Rule::TooManyNestedBlocks) {
pylint::rules::too_many_nested_blocks(checker, stmt);
}
@@ -1021,33 +1036,33 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
Rule::PatchVersionComparison,
Rule::WrongTupleLengthVersionComparison,
]) {
if let Expr::BoolOp(ast::ExprBoolOp { values, .. }) = if_.test.as_ref() {
if let Expr::BoolOp(ast::ExprBoolOp { values, .. }) = test.as_ref() {
for value in values {
flake8_pyi::rules::unrecognized_version_info(checker, value);
}
} else {
flake8_pyi::rules::unrecognized_version_info(checker, &if_.test);
flake8_pyi::rules::unrecognized_version_info(checker, test);
}
}
if checker.any_rule_enabled(&[
Rule::UnrecognizedPlatformCheck,
Rule::UnrecognizedPlatformName,
]) {
if let Expr::BoolOp(ast::ExprBoolOp { values, .. }) = if_.test.as_ref() {
if let Expr::BoolOp(ast::ExprBoolOp { values, .. }) = test.as_ref() {
for value in values {
flake8_pyi::rules::unrecognized_platform(checker, value);
}
} else {
flake8_pyi::rules::unrecognized_platform(checker, &if_.test);
flake8_pyi::rules::unrecognized_platform(checker, test);
}
}
if checker.is_rule_enabled(Rule::ComplexIfStatementInStub) {
if let Expr::BoolOp(ast::ExprBoolOp { values, .. }) = if_.test.as_ref() {
if let Expr::BoolOp(ast::ExprBoolOp { values, .. }) = test.as_ref() {
for value in values {
flake8_pyi::rules::complex_if_statement_in_stub(checker, value);
}
} else {
flake8_pyi::rules::complex_if_statement_in_stub(checker, &if_.test);
flake8_pyi::rules::complex_if_statement_in_stub(checker, test);
}
}
}
@@ -1076,10 +1091,10 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
}
}
let has_else_clause = if_.elif_else_clauses.iter().any(|clause| clause.test.is_none());
let has_else_clause = elif_else_clauses.iter().any(|clause| clause.test.is_none());
bad_version_info_comparison(checker, if_.test.as_ref(), has_else_clause);
for clause in &if_.elif_else_clauses {
bad_version_info_comparison(checker, test.as_ref(), has_else_clause);
for clause in elif_else_clauses {
if let Some(test) = clause.test.as_ref() {
bad_version_info_comparison(checker, test, has_else_clause);
}
@@ -1090,37 +1105,44 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
ruff::rules::if_key_in_dict_del(checker, if_);
}
if checker.is_rule_enabled(Rule::NeedlessElse) {
ruff::rules::needless_else(checker, (&**if_).into());
ruff::rules::needless_else(checker, if_.into());
}
}
Stmt::Assert(assert_stmt) => {
Stmt::Assert(
assert_stmt @ ast::StmtAssert {
test,
msg,
range: _,
node_index: _,
},
) => {
if !checker.semantic.in_type_checking_block() {
if checker.is_rule_enabled(Rule::Assert) {
flake8_bandit::rules::assert_used(checker, stmt);
}
}
if checker.is_rule_enabled(Rule::AssertTuple) {
pyflakes::rules::assert_tuple(checker, stmt, &assert_stmt.test);
pyflakes::rules::assert_tuple(checker, stmt, test);
}
if checker.is_rule_enabled(Rule::AssertFalse) {
flake8_bugbear::rules::assert_false(checker, stmt, &assert_stmt.test, assert_stmt.msg.as_deref());
flake8_bugbear::rules::assert_false(checker, stmt, test, msg.as_deref());
}
if checker.is_rule_enabled(Rule::PytestAssertAlwaysFalse) {
flake8_pytest_style::rules::assert_falsy(checker, stmt, &assert_stmt.test);
flake8_pytest_style::rules::assert_falsy(checker, stmt, test);
}
if checker.is_rule_enabled(Rule::PytestCompositeAssertion) {
flake8_pytest_style::rules::composite_condition(
checker,
stmt,
&assert_stmt.test,
assert_stmt.msg.as_deref(),
test,
msg.as_deref(),
);
}
if checker.is_rule_enabled(Rule::AssertOnStringLiteral) {
pylint::rules::assert_on_string_literal(checker, &assert_stmt.test);
pylint::rules::assert_on_string_literal(checker, test);
}
if checker.is_rule_enabled(Rule::InvalidMockAccess) {
pygrep_hooks::rules::non_existent_mock_method(checker, &assert_stmt.test);
pygrep_hooks::rules::non_existent_mock_method(checker, test);
}
if checker.is_rule_enabled(Rule::AssertWithPrintMessage) {
ruff::rules::assert_with_print_message(checker, assert_stmt);
@@ -1129,18 +1151,18 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
ruff::rules::invalid_assert_message_literal_argument(checker, assert_stmt);
}
}
Stmt::With(with_stmt) => {
Stmt::With(with_stmt @ ast::StmtWith { items, body, .. }) => {
if checker.is_rule_enabled(Rule::TooManyNestedBlocks) {
pylint::rules::too_many_nested_blocks(checker, stmt);
}
if checker.is_rule_enabled(Rule::AssertRaisesException) {
flake8_bugbear::rules::assert_raises_exception(checker, &with_stmt.items);
flake8_bugbear::rules::assert_raises_exception(checker, items);
}
if checker.is_rule_enabled(Rule::PytestRaisesWithMultipleStatements) {
flake8_pytest_style::rules::complex_raises(checker, stmt, &with_stmt.items, &with_stmt.body);
flake8_pytest_style::rules::complex_raises(checker, stmt, items, body);
}
if checker.is_rule_enabled(Rule::PytestWarnsWithMultipleStatements) {
flake8_pytest_style::rules::complex_warns(checker, stmt, &with_stmt.items, &with_stmt.body);
flake8_pytest_style::rules::complex_warns(checker, stmt, items, body);
}
if checker.is_rule_enabled(Rule::MultipleWithStatements) {
flake8_simplify::rules::multiple_with_statements(
@@ -1162,10 +1184,10 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
pylint::rules::useless_with_lock(checker, with_stmt);
}
if checker.is_rule_enabled(Rule::CancelScopeNoCheckpoint) {
flake8_async::rules::cancel_scope_no_checkpoint(checker, with_stmt, &with_stmt.items);
flake8_async::rules::cancel_scope_no_checkpoint(checker, with_stmt, items);
}
}
Stmt::While(while_stmt) => {
Stmt::While(while_stmt @ ast::StmtWhile { body, orelse, .. }) => {
if checker.is_rule_enabled(Rule::TooManyNestedBlocks) {
pylint::rules::too_many_nested_blocks(checker, stmt);
}
@@ -1173,19 +1195,29 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
flake8_bugbear::rules::function_uses_loop_variable(checker, &Node::Stmt(stmt));
}
if checker.is_rule_enabled(Rule::UselessElseOnLoop) {
pylint::rules::useless_else_on_loop(checker, stmt, &while_stmt.body, &while_stmt.orelse);
pylint::rules::useless_else_on_loop(checker, stmt, body, orelse);
}
if checker.is_rule_enabled(Rule::TryExceptInLoop) {
perflint::rules::try_except_in_loop(checker, &while_stmt.body);
perflint::rules::try_except_in_loop(checker, body);
}
if checker.is_rule_enabled(Rule::AsyncBusyWait) {
flake8_async::rules::async_busy_wait(checker, while_stmt);
}
if checker.is_rule_enabled(Rule::NeedlessElse) {
ruff::rules::needless_else(checker, (&**while_stmt).into());
ruff::rules::needless_else(checker, while_stmt.into());
}
}
Stmt::For(for_stmt) => {
Stmt::For(
for_stmt @ ast::StmtFor {
target,
body,
iter,
orelse,
is_async,
range: _,
node_index: _,
},
) => {
if checker.is_rule_enabled(Rule::TooManyNestedBlocks) {
pylint::rules::too_many_nested_blocks(checker, stmt);
}
@@ -1203,25 +1235,25 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
checker.analyze.for_loops.push(checker.semantic.snapshot());
}
if checker.is_rule_enabled(Rule::LoopVariableOverridesIterator) {
flake8_bugbear::rules::loop_variable_overrides_iterator(checker, &for_stmt.target, &for_stmt.iter);
flake8_bugbear::rules::loop_variable_overrides_iterator(checker, target, iter);
}
if checker.is_rule_enabled(Rule::FunctionUsesLoopVariable) {
flake8_bugbear::rules::function_uses_loop_variable(checker, &Node::Stmt(stmt));
}
if checker.is_rule_enabled(Rule::ReuseOfGroupbyGenerator) {
flake8_bugbear::rules::reuse_of_groupby_generator(checker, &for_stmt.target, &for_stmt.body, &for_stmt.iter);
flake8_bugbear::rules::reuse_of_groupby_generator(checker, target, body, iter);
}
if checker.is_rule_enabled(Rule::UselessElseOnLoop) {
pylint::rules::useless_else_on_loop(checker, stmt, &for_stmt.body, &for_stmt.orelse);
pylint::rules::useless_else_on_loop(checker, stmt, body, orelse);
}
if checker.is_rule_enabled(Rule::RedefinedLoopName) {
pylint::rules::redefined_loop_name(checker, stmt);
}
if checker.is_rule_enabled(Rule::IterationOverSet) {
pylint::rules::iteration_over_set(checker, &for_stmt.iter);
pylint::rules::iteration_over_set(checker, iter);
}
if checker.is_rule_enabled(Rule::DictIterMissingItems) {
pylint::rules::dict_iter_missing_items(checker, &for_stmt.target, &for_stmt.iter);
pylint::rules::dict_iter_missing_items(checker, target, iter);
}
if checker.is_rule_enabled(Rule::ManualListCopy) {
perflint::rules::manual_list_copy(checker, for_stmt);
@@ -1231,7 +1263,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
pylint::rules::modified_iterating_set(checker, for_stmt);
}
if checker.is_rule_enabled(Rule::UnnecessaryListCast) {
perflint::rules::unnecessary_list_cast(checker, &for_stmt.iter, &for_stmt.body);
perflint::rules::unnecessary_list_cast(checker, iter, body);
}
if checker.is_rule_enabled(Rule::UnnecessaryListIndexLookup) {
pylint::rules::unnecessary_list_index_lookup(checker, for_stmt);
@@ -1242,7 +1274,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
if checker.is_rule_enabled(Rule::ReadlinesInFor) {
refurb::rules::readlines_in_for(checker, for_stmt);
}
if !for_stmt.is_async {
if !*is_async {
if checker.is_rule_enabled(Rule::ReimplementedBuiltin) {
flake8_simplify::rules::convert_for_loop_to_any_all(checker, stmt);
}
@@ -1250,7 +1282,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
flake8_simplify::rules::key_in_dict_for(checker, for_stmt);
}
if checker.is_rule_enabled(Rule::TryExceptInLoop) {
perflint::rules::try_except_in_loop(checker, &for_stmt.body);
perflint::rules::try_except_in_loop(checker, body);
}
if checker.is_rule_enabled(Rule::ForLoopSetMutations) {
refurb::rules::for_loop_set_mutations(checker, for_stmt);
@@ -1260,133 +1292,141 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
}
}
if checker.is_rule_enabled(Rule::NeedlessElse) {
ruff::rules::needless_else(checker, (&**for_stmt).into());
ruff::rules::needless_else(checker, for_stmt.into());
}
}
Stmt::Try(try_stmt) => {
Stmt::Try(
try_stmt @ ast::StmtTry {
body,
handlers,
orelse,
finalbody,
..
},
) => {
if checker.is_rule_enabled(Rule::TooManyNestedBlocks) {
pylint::rules::too_many_nested_blocks(checker, stmt);
}
if checker.is_rule_enabled(Rule::JumpStatementInFinally) {
flake8_bugbear::rules::jump_statement_in_finally(checker, &try_stmt.finalbody);
flake8_bugbear::rules::jump_statement_in_finally(checker, finalbody);
}
if checker.is_rule_enabled(Rule::ContinueInFinally) {
if checker.target_version() <= PythonVersion::PY38 {
pylint::rules::continue_in_finally(checker, &try_stmt.finalbody);
pylint::rules::continue_in_finally(checker, finalbody);
}
}
if checker.is_rule_enabled(Rule::DefaultExceptNotLast) {
pyflakes::rules::default_except_not_last(checker, &try_stmt.handlers, checker.locator);
pyflakes::rules::default_except_not_last(checker, handlers, checker.locator);
}
if checker.any_rule_enabled(&[
Rule::DuplicateHandlerException,
Rule::DuplicateTryBlockException,
]) {
flake8_bugbear::rules::duplicate_exceptions(checker, &try_stmt.handlers);
flake8_bugbear::rules::duplicate_exceptions(checker, handlers);
}
if checker.is_rule_enabled(Rule::RedundantTupleInExceptionHandler) {
flake8_bugbear::rules::redundant_tuple_in_exception_handler(checker, &try_stmt.handlers);
flake8_bugbear::rules::redundant_tuple_in_exception_handler(checker, handlers);
}
if checker.is_rule_enabled(Rule::OSErrorAlias) {
pyupgrade::rules::os_error_alias_handlers(checker, &try_stmt.handlers);
pyupgrade::rules::os_error_alias_handlers(checker, handlers);
}
if checker.is_rule_enabled(Rule::TimeoutErrorAlias) {
if checker.target_version() >= PythonVersion::PY310 {
pyupgrade::rules::timeout_error_alias_handlers(checker, &try_stmt.handlers);
pyupgrade::rules::timeout_error_alias_handlers(checker, handlers);
}
}
if checker.is_rule_enabled(Rule::PytestAssertInExcept) {
flake8_pytest_style::rules::assert_in_exception_handler(checker, &try_stmt.handlers);
flake8_pytest_style::rules::assert_in_exception_handler(checker, handlers);
}
if checker.is_rule_enabled(Rule::SuppressibleException) {
flake8_simplify::rules::suppressible_exception(
checker, stmt, &try_stmt.body, &try_stmt.handlers, &try_stmt.orelse, &try_stmt.finalbody,
checker, stmt, body, handlers, orelse, finalbody,
);
}
if checker.is_rule_enabled(Rule::ReturnInTryExceptFinally) {
flake8_simplify::rules::return_in_try_except_finally(
checker, &try_stmt.body, &try_stmt.handlers, &try_stmt.finalbody,
checker, body, handlers, finalbody,
);
}
if checker.is_rule_enabled(Rule::TryConsiderElse) {
tryceratops::rules::try_consider_else(checker, &try_stmt.body, &try_stmt.orelse, &try_stmt.handlers);
tryceratops::rules::try_consider_else(checker, body, orelse, handlers);
}
if checker.is_rule_enabled(Rule::VerboseRaise) {
tryceratops::rules::verbose_raise(checker, &try_stmt.handlers);
tryceratops::rules::verbose_raise(checker, handlers);
}
if checker.is_rule_enabled(Rule::VerboseLogMessage) {
tryceratops::rules::verbose_log_message(checker, &try_stmt.handlers);
tryceratops::rules::verbose_log_message(checker, handlers);
}
if checker.is_rule_enabled(Rule::RaiseWithinTry) {
tryceratops::rules::raise_within_try(checker, &try_stmt.body, &try_stmt.handlers);
tryceratops::rules::raise_within_try(checker, body, handlers);
}
if checker.is_rule_enabled(Rule::UselessTryExcept) {
tryceratops::rules::useless_try_except(checker, &try_stmt.handlers);
tryceratops::rules::useless_try_except(checker, handlers);
}
if checker.is_rule_enabled(Rule::ErrorInsteadOfException) {
tryceratops::rules::error_instead_of_exception(checker, &try_stmt.handlers);
tryceratops::rules::error_instead_of_exception(checker, handlers);
}
if checker.is_rule_enabled(Rule::NeedlessElse) {
ruff::rules::needless_else(checker, (&**try_stmt).into());
ruff::rules::needless_else(checker, try_stmt.into());
}
}
Stmt::Assign(assign) => {
Stmt::Assign(assign @ ast::StmtAssign { targets, value, .. }) => {
if checker.is_rule_enabled(Rule::SelfOrClsAssignment) {
for target in &assign.targets {
for target in targets {
pylint::rules::self_or_cls_assignment(checker, target);
}
}
if checker.is_rule_enabled(Rule::RedeclaredAssignedName) {
pylint::rules::redeclared_assigned_name(checker, &assign.targets);
pylint::rules::redeclared_assigned_name(checker, targets);
}
if checker.is_rule_enabled(Rule::LambdaAssignment) {
if let [target] = &assign.targets[..] {
pycodestyle::rules::lambda_assignment(checker, target, &assign.value, None, stmt);
if let [target] = &targets[..] {
pycodestyle::rules::lambda_assignment(checker, target, value, None, stmt);
}
}
if checker.is_rule_enabled(Rule::AssignmentToOsEnviron) {
flake8_bugbear::rules::assignment_to_os_environ(checker, &assign.targets);
flake8_bugbear::rules::assignment_to_os_environ(checker, targets);
}
if checker.is_rule_enabled(Rule::HardcodedPasswordString) {
flake8_bandit::rules::assign_hardcoded_password_string(checker, &assign.value, &assign.targets);
flake8_bandit::rules::assign_hardcoded_password_string(checker, value, targets);
}
if checker.is_rule_enabled(Rule::GlobalStatement) {
for target in &assign.targets {
if let Expr::Name(name_expr) = target {
pylint::rules::global_statement(checker, &name_expr.id);
for target in targets {
if let Expr::Name(ast::ExprName { id, .. }) = target {
pylint::rules::global_statement(checker, id);
}
}
}
if checker.is_rule_enabled(Rule::UselessMetaclassType) {
pyupgrade::rules::useless_metaclass_type(checker, stmt, &assign.value, &assign.targets);
pyupgrade::rules::useless_metaclass_type(checker, stmt, value, targets);
}
if checker.is_rule_enabled(Rule::ConvertTypedDictFunctionalToClass) {
pyupgrade::rules::convert_typed_dict_functional_to_class(
checker, stmt, &assign.targets, &assign.value,
checker, stmt, targets, value,
);
}
if checker.is_rule_enabled(Rule::ConvertNamedTupleFunctionalToClass) {
pyupgrade::rules::convert_named_tuple_functional_to_class(
checker, stmt, &assign.targets, &assign.value,
checker, stmt, targets, value,
);
}
if checker.is_rule_enabled(Rule::PandasDfVariableName) {
pandas_vet::rules::assignment_to_df(checker, &assign.targets);
pandas_vet::rules::assignment_to_df(checker, targets);
}
if checker.is_rule_enabled(Rule::AirflowVariableNameTaskIdMismatch) {
airflow::rules::variable_name_task_id(checker, &assign.targets, &assign.value);
airflow::rules::variable_name_task_id(checker, targets, value);
}
if checker.is_rule_enabled(Rule::SelfAssigningVariable) {
pylint::rules::self_assignment(checker, assign);
}
if checker.is_rule_enabled(Rule::TypeParamNameMismatch) {
pylint::rules::type_param_name_mismatch(checker, &assign.value, &assign.targets);
pylint::rules::type_param_name_mismatch(checker, value, targets);
}
if checker.is_rule_enabled(Rule::TypeNameIncorrectVariance) {
pylint::rules::type_name_incorrect_variance(checker, &assign.value);
pylint::rules::type_name_incorrect_variance(checker, value);
}
if checker.is_rule_enabled(Rule::TypeBivariance) {
pylint::rules::type_bivariance(checker, &assign.value);
pylint::rules::type_bivariance(checker, value);
}
if checker.is_rule_enabled(Rule::NonAugmentedAssignment) {
pylint::rules::non_augmented_assignment(checker, assign);
@@ -1409,14 +1449,14 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
.any(|scope| scope.kind.is_function())
{
if checker.is_rule_enabled(Rule::UnprefixedTypeParam) {
flake8_pyi::rules::prefix_type_params(checker, &assign.value, &assign.targets);
flake8_pyi::rules::prefix_type_params(checker, value, targets);
}
if checker.is_rule_enabled(Rule::AssignmentDefaultInStub) {
flake8_pyi::rules::assignment_default_in_stub(checker, &assign.targets, &assign.value);
flake8_pyi::rules::assignment_default_in_stub(checker, targets, value);
}
if checker.is_rule_enabled(Rule::UnannotatedAssignmentInStub) {
flake8_pyi::rules::unannotated_assignment_in_stub(
checker, &assign.targets, &assign.value,
checker, targets, value,
);
}
if checker.is_rule_enabled(Rule::ComplexAssignmentInStub) {
@@ -1424,7 +1464,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
}
if checker.is_rule_enabled(Rule::TypeAliasWithoutAnnotation) {
flake8_pyi::rules::type_alias_without_annotation(
checker, &assign.value, &assign.targets,
checker, value, targets,
);
}
}
@@ -1437,10 +1477,15 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
pyupgrade::rules::non_pep695_type_alias_type(checker, assign);
}
}
Stmt::AnnAssign(assign_stmt) => {
let target = &assign_stmt.target;
let annotation = &assign_stmt.annotation;
if let Some(value) = &assign_stmt.value {
Stmt::AnnAssign(
assign_stmt @ ast::StmtAnnAssign {
target,
value,
annotation,
..
},
) => {
if let Some(value) = value {
if checker.is_rule_enabled(Rule::LambdaAssignment) {
pycodestyle::rules::lambda_assignment(
checker,
@@ -1461,7 +1506,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
flake8_bugbear::rules::unintentional_type_annotation(
checker,
target,
assign_stmt.value.as_deref(),
value.as_deref(),
stmt,
);
}
@@ -1469,7 +1514,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
pyupgrade::rules::non_pep695_type_alias(checker, assign_stmt);
}
if checker.is_rule_enabled(Rule::HardcodedPasswordString) {
if let Some(value) = assign_stmt.value.as_deref() {
if let Some(value) = value.as_deref() {
flake8_bandit::rules::assign_hardcoded_password_string(
checker,
value,
@@ -1481,7 +1526,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
ruff::rules::sort_dunder_all_ann_assign(checker, assign_stmt);
}
if checker.source_type.is_stub() {
if let Some(value) = &assign_stmt.value {
if let Some(value) = value {
if checker.is_rule_enabled(Rule::AssignmentDefaultInStub) {
// Ignore assignments in function bodies; those are covered by other rules.
if !checker
@@ -1518,8 +1563,7 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
}
}
}
Stmt::TypeAlias(type_alias) => {
let name = &type_alias.name;
Stmt::TypeAlias(ast::StmtTypeAlias { name, .. }) => {
if checker.is_rule_enabled(Rule::SnakeCaseTypeAlias) {
flake8_pyi::rules::snake_case_type_alias(checker, name);
}
@@ -1527,12 +1571,17 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
flake8_pyi::rules::t_suffixed_type_alias(checker, name);
}
}
Stmt::Delete(delete) => {
let targets = &delete.targets;
Stmt::Delete(
delete @ ast::StmtDelete {
targets,
range: _,
node_index: _,
},
) => {
if checker.is_rule_enabled(Rule::GlobalStatement) {
for target in targets {
if let Expr::Name(name_expr) = target {
pylint::rules::global_statement(checker, &name_expr.id);
if let Expr::Name(ast::ExprName { id, .. }) = target {
pylint::rules::global_statement(checker, id);
}
}
}
@@ -1569,13 +1618,12 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
pylint::rules::useless_exception_statement(checker, expr);
}
}
Stmt::Match(match_stmt) => {
let ast::StmtMatch {
subject: _,
cases,
range: _,
node_index: _,
} = &**match_stmt;
Stmt::Match(ast::StmtMatch {
subject: _,
cases,
range: _,
node_index: _,
}) => {
if checker.is_rule_enabled(Rule::NanComparison) {
pylint::rules::nan_comparison_match(checker, cases);
}

View File

@@ -437,15 +437,6 @@ impl<'a> Checker<'a> {
}
}
/// Returns the [`Tokens`] for the parsed source file.
///
///
/// Unlike [`Self::tokens`], this method always returns
/// the tokens for the current file, even when within a parsed type annotation.
pub(crate) fn source_tokens(&self) -> &'a Tokens {
self.parsed.tokens()
}
/// The [`Locator`] for the current file, which enables extraction of source code from byte
/// offsets.
pub(crate) const fn locator(&self) -> &'a Locator<'a> {
@@ -782,10 +773,7 @@ impl SemanticSyntaxContext for Checker<'_> {
for scope in self.semantic.current_scopes() {
match scope.kind {
ScopeKind::Class(_) | ScopeKind::Lambda(_) => return false,
ScopeKind::Function(function_def) => {
let is_async = &function_def.is_async;
return *is_async;
}
ScopeKind::Function(ast::StmtFunctionDef { is_async, .. }) => return *is_async,
ScopeKind::Generator { .. }
| ScopeKind::Module
| ScopeKind::Type
@@ -873,13 +861,9 @@ impl SemanticSyntaxContext for Checker<'_> {
for parent in self.semantic.current_statements().skip(1) {
match parent {
Stmt::For(node) => {
if !node.orelse.contains(child) {
return true;
}
}
Stmt::While(node) => {
if !node.orelse.contains(child) {
Stmt::For(ast::StmtFor { orelse, .. })
| Stmt::While(ast::StmtWhile { orelse, .. }) => {
if !orelse.contains(child) {
return true;
}
}
@@ -895,8 +879,7 @@ impl SemanticSyntaxContext for Checker<'_> {
fn is_bound_parameter(&self, name: &str) -> bool {
match self.semantic.current_scope().kind {
ScopeKind::Function(function_def) => {
let parameters = &function_def.parameters;
ScopeKind::Function(ast::StmtFunctionDef { parameters, .. }) => {
parameters.includes(name)
}
ScopeKind::Class(_)
@@ -940,13 +923,12 @@ impl<'a> Visitor<'a> for Checker<'a> {
{
self.semantic.flags |= SemanticModelFlags::MODULE_DOCSTRING_BOUNDARY;
}
Stmt::ImportFrom(node) => {
Stmt::ImportFrom(ast::StmtImportFrom { module, names, .. }) => {
self.semantic.flags |= SemanticModelFlags::MODULE_DOCSTRING_BOUNDARY;
// Allow __future__ imports until we see a non-__future__ import.
if let Some("__future__") = node.module.as_deref() {
if node
.names
if let Some("__future__") = module.as_deref() {
if names
.iter()
.any(|alias| alias.name.as_str() == "annotations")
{
@@ -990,22 +972,20 @@ impl<'a> Visitor<'a> for Checker<'a> {
// Step 1: Binding
match stmt {
Stmt::AugAssign(node) => {
let ast::StmtAugAssign {
target,
op: _,
value: _,
range: _,
node_index: _,
} = &**node;
Stmt::AugAssign(ast::StmtAugAssign {
target,
op: _,
value: _,
range: _,
node_index: _,
}) => {
self.handle_node_load(target);
}
Stmt::Import(node) => {
let ast::StmtImport {
names,
range: _,
node_index: _,
} = &**node;
Stmt::Import(ast::StmtImport {
names,
range: _,
node_index: _,
}) => {
if self.semantic.at_top_level() {
self.importer.visit_import(stmt);
}
@@ -1054,14 +1034,13 @@ impl<'a> Visitor<'a> for Checker<'a> {
}
}
}
Stmt::ImportFrom(node) => {
let ast::StmtImportFrom {
names,
module,
level,
range: _,
node_index: _,
} = &**node;
Stmt::ImportFrom(ast::StmtImportFrom {
names,
module,
level,
range: _,
node_index: _,
}) => {
if self.semantic.at_top_level() {
self.importer.visit_import(stmt);
}
@@ -1122,12 +1101,11 @@ impl<'a> Visitor<'a> for Checker<'a> {
}
}
}
Stmt::Global(node) => {
let ast::StmtGlobal {
names,
range: _,
node_index: _,
} = &**node;
Stmt::Global(ast::StmtGlobal {
names,
range: _,
node_index: _,
}) => {
if !self.semantic.scope_id.is_global() {
for name in names {
let binding_id = self.semantic.global_scope().get(name);
@@ -1149,12 +1127,11 @@ impl<'a> Visitor<'a> for Checker<'a> {
}
}
}
Stmt::Nonlocal(node) => {
let ast::StmtNonlocal {
names,
range: _,
node_index: _,
} = &**node;
Stmt::Nonlocal(ast::StmtNonlocal {
names,
range: _,
node_index: _,
}) => {
if !self.semantic.scope_id.is_global() {
for name in names {
if let Some((scope_id, binding_id)) = self.semantic.nonlocal(name) {
@@ -1188,13 +1165,17 @@ impl<'a> Visitor<'a> for Checker<'a> {
// Step 2: Traversal
match stmt {
Stmt::FunctionDef(function_def) => {
let name = &function_def.name;
let body = &function_def.body;
let parameters = &function_def.parameters;
let decorator_list = &function_def.decorator_list;
let returns = &function_def.returns;
let type_params = &function_def.type_params;
Stmt::FunctionDef(
function_def @ ast::StmtFunctionDef {
name,
body,
parameters,
decorator_list,
returns,
type_params,
..
},
) => {
// Visit the decorators and arguments, but avoid the body, which will be
// deferred.
for decorator in decorator_list {
@@ -1323,12 +1304,16 @@ impl<'a> Visitor<'a> for Checker<'a> {
BindingFlags::empty(),
);
}
Stmt::ClassDef(class_def) => {
let name = &class_def.name;
let body = &class_def.body;
let arguments = &class_def.arguments;
let decorator_list = &class_def.decorator_list;
let type_params = &class_def.type_params;
Stmt::ClassDef(
class_def @ ast::StmtClassDef {
name,
body,
arguments,
decorator_list,
type_params,
..
},
) => {
for decorator in decorator_list {
self.visit_decorator(decorator);
}
@@ -1375,20 +1360,30 @@ impl<'a> Visitor<'a> for Checker<'a> {
BindingFlags::empty(),
);
}
Stmt::TypeAlias(node) => {
Stmt::TypeAlias(ast::StmtTypeAlias {
range: _,
node_index: _,
name,
type_params,
value,
}) => {
self.semantic.push_scope(ScopeKind::Type);
if let Some(type_params) = &node.type_params {
if let Some(type_params) = type_params {
self.visit_type_params(type_params);
}
self.visit_deferred_type_alias_value(&node.value);
self.visit_deferred_type_alias_value(value);
self.semantic.pop_scope();
self.visit_expr(&node.name);
self.visit_expr(name);
}
Stmt::Try(try_node) => {
let body = &try_node.body;
let handlers = &try_node.handlers;
let orelse = &try_node.orelse;
let finalbody = &try_node.finalbody;
Stmt::Try(
try_node @ ast::StmtTry {
body,
handlers,
orelse,
finalbody,
..
},
) => {
// Iterate over the `body`, then the `handlers`, then the `orelse`, then the
// `finalbody`, but treat the body and the `orelse` as a single branch for
// flow analysis purposes.
@@ -1414,60 +1409,64 @@ impl<'a> Visitor<'a> for Checker<'a> {
self.visit_body(finalbody);
self.semantic.pop_branch();
}
Stmt::AnnAssign(node) => {
Stmt::AnnAssign(ast::StmtAnnAssign {
target,
annotation,
value,
..
}) => {
match AnnotationContext::from_model(
&self.semantic,
self.settings(),
self.target_version(),
) {
AnnotationContext::RuntimeRequired => {
self.visit_runtime_required_annotation(&node.annotation);
self.visit_runtime_required_annotation(annotation);
}
AnnotationContext::RuntimeEvaluated
if flake8_type_checking::helpers::is_dataclass_meta_annotation(
&node.annotation,
annotation,
self.semantic(),
) =>
{
self.visit_runtime_required_annotation(&node.annotation);
self.visit_runtime_required_annotation(annotation);
}
AnnotationContext::RuntimeEvaluated => {
self.visit_runtime_evaluated_annotation(&node.annotation);
self.visit_runtime_evaluated_annotation(annotation);
}
AnnotationContext::TypingOnly
if flake8_type_checking::helpers::is_dataclass_meta_annotation(
&node.annotation,
annotation,
self.semantic(),
) =>
{
if let Expr::Subscript(subscript) = &*node.annotation {
if let Expr::Subscript(subscript) = &**annotation {
// Ex) `InitVar[str]`
self.visit_runtime_required_annotation(&subscript.value);
self.visit_annotation(&subscript.slice);
} else {
// Ex) `InitVar`
self.visit_runtime_required_annotation(&node.annotation);
self.visit_runtime_required_annotation(annotation);
}
}
AnnotationContext::TypingOnly => self.visit_annotation(&node.annotation),
AnnotationContext::TypingOnly => self.visit_annotation(annotation),
}
if let Some(expr) = &node.value {
if self.semantic.match_typing_expr(&node.annotation, "TypeAlias") {
if let Some(expr) = value {
if self.semantic.match_typing_expr(annotation, "TypeAlias") {
self.visit_annotated_type_alias_value(expr);
} else {
self.visit_expr(expr);
}
}
self.visit_expr(&node.target);
self.visit_expr(target);
}
Stmt::Assert(node) => {
let ast::StmtAssert {
test,
msg,
range: _,
node_index: _,
} = &**node;
Stmt::Assert(ast::StmtAssert {
test,
msg,
range: _,
node_index: _,
}) => {
let snapshot = self.semantic.flags;
self.semantic.flags |= SemanticModelFlags::ASSERT_STATEMENT;
self.visit_boolean_test(test);
@@ -1476,14 +1475,13 @@ impl<'a> Visitor<'a> for Checker<'a> {
}
self.semantic.flags = snapshot;
}
Stmt::With(node) => {
let ast::StmtWith {
items,
body,
is_async: _,
range: _,
node_index: _,
} = &**node;
Stmt::With(ast::StmtWith {
items,
body,
is_async: _,
range: _,
node_index: _,
}) => {
for item in items {
self.visit_with_item(item);
}
@@ -1491,22 +1489,26 @@ impl<'a> Visitor<'a> for Checker<'a> {
self.visit_body(body);
self.semantic.pop_branch();
}
Stmt::While(node) => {
let ast::StmtWhile {
test,
body,
orelse,
range: _,
node_index: _,
} = &**node;
Stmt::While(ast::StmtWhile {
test,
body,
orelse,
range: _,
node_index: _,
}) => {
self.visit_boolean_test(test);
self.visit_body(body);
self.visit_body(orelse);
}
Stmt::If(stmt_if) => {
let test = &stmt_if.test;
let body = &stmt_if.body;
let elif_else_clauses = &stmt_if.elif_else_clauses;
Stmt::If(
stmt_if @ ast::StmtIf {
test,
body,
elif_else_clauses,
range: _,
node_index: _,
},
) => {
self.visit_boolean_test(test);
self.semantic.push_branch();
@@ -1531,14 +1533,14 @@ impl<'a> Visitor<'a> for Checker<'a> {
if self.semantic().at_top_level() || self.semantic().current_scope().kind.is_class() {
match stmt {
Stmt::Assign(node) => {
if let [Expr::Name(_)] = node.targets.as_slice() {
Stmt::Assign(ast::StmtAssign { targets, .. }) => {
if let [Expr::Name(_)] = targets.as_slice() {
self.docstring_state =
DocstringState::Expected(ExpectedDocstringKind::Attribute);
}
}
Stmt::AnnAssign(node) => {
if node.target.is_name_expr() {
Stmt::AnnAssign(ast::StmtAnnAssign { target, .. }) => {
if target.is_name_expr() {
self.docstring_state =
DocstringState::Expected(ExpectedDocstringKind::Attribute);
}
@@ -2679,13 +2681,13 @@ impl<'a> Checker<'a> {
match parent {
Stmt::TypeAlias(_) => flags.insert(BindingFlags::DEFERRED_TYPE_ALIAS),
Stmt::AnnAssign(node) => {
Stmt::AnnAssign(ast::StmtAnnAssign { annotation, .. }) => {
// TODO: It is a bit unfortunate that we do this check twice
// maybe we should change how we visit this statement
// so the semantic flag for the type alias sticks around
// until after we've handled this store, so we can check
// the flag instead of duplicating this check
if self.semantic.match_typing_expr(&node.annotation, "TypeAlias") {
if self.semantic.match_typing_expr(annotation, "TypeAlias") {
flags.insert(BindingFlags::ANNOTATED_TYPE_ALIAS);
}
}
@@ -2696,22 +2698,22 @@ impl<'a> Checker<'a> {
if scope.kind.is_module()
&& match parent {
Stmt::Assign(node) => {
if let Some(Expr::Name(ast::ExprName { id, .. })) = node.targets.first() {
Stmt::Assign(ast::StmtAssign { targets, .. }) => {
if let Some(Expr::Name(ast::ExprName { id, .. })) = targets.first() {
id == "__all__"
} else {
false
}
}
Stmt::AugAssign(node) => {
if let Expr::Name(ast::ExprName { id, .. }) = node.target.as_ref() {
Stmt::AugAssign(ast::StmtAugAssign { target, .. }) => {
if let Expr::Name(ast::ExprName { id, .. }) = target.as_ref() {
id == "__all__"
} else {
false
}
}
Stmt::AnnAssign(node) => {
if let Expr::Name(ast::ExprName { id, .. }) = node.target.as_ref() {
Stmt::AnnAssign(ast::StmtAnnAssign { target, .. }) => {
if let Expr::Name(ast::ExprName { id, .. }) = target.as_ref() {
id == "__all__"
} else {
false
@@ -2754,8 +2756,10 @@ impl<'a> Checker<'a> {
// Match the left-hand side of an annotated assignment without a value,
// like `x` in `x: int`. N.B. In stub files, these should be viewed
// as assignments on par with statements such as `x: int = 5`.
if matches!(parent, Stmt::AnnAssign(node) if node.value.is_none())
&& !self.semantic.in_annotation()
if matches!(
parent,
Stmt::AnnAssign(ast::StmtAnnAssign { value: None, .. })
) && !self.semantic.in_annotation()
{
self.add_binding(id, expr.range(), BindingKind::Annotation, flags);
return;
@@ -3027,16 +3031,19 @@ impl<'a> Checker<'a> {
let stmt = self.semantic.current_statement();
let Stmt::FunctionDef(node) = stmt else {
let Stmt::FunctionDef(ast::StmtFunctionDef {
body, parameters, ..
}) = stmt
else {
unreachable!("Expected Stmt::FunctionDef")
};
self.with_semantic_checker(|semantic, context| semantic.visit_stmt(stmt, context));
self.visit_parameters(&node.parameters);
self.visit_parameters(parameters);
// Set the docstring state before visiting the function body.
self.docstring_state = DocstringState::Expected(ExpectedDocstringKind::Function);
self.visit_body(&node.body);
self.visit_body(body);
}
}
self.semantic.restore(snapshot);

View File

@@ -12,20 +12,17 @@ use crate::fix::edits::delete_comment;
use crate::noqa::{
Code, Directive, FileExemption, FileNoqaDirectives, NoqaDirectives, NoqaMapping,
};
use crate::preview::is_range_suppressions_enabled;
use crate::registry::Rule;
use crate::rule_redirects::get_redirect_target;
use crate::rules::pygrep_hooks;
use crate::rules::ruff;
use crate::rules::ruff::rules::{UnusedCodes, UnusedNOQA};
use crate::settings::LinterSettings;
use crate::suppression::Suppressions;
use crate::{Edit, Fix, Locator};
use super::ast::LintContext;
/// RUF100
#[expect(clippy::too_many_arguments)]
pub(crate) fn check_noqa(
context: &mut LintContext,
path: &Path,
@@ -34,7 +31,6 @@ pub(crate) fn check_noqa(
noqa_line_for: &NoqaMapping,
analyze_directives: bool,
settings: &LinterSettings,
suppressions: &Suppressions,
) -> Vec<usize> {
// Identify any codes that are globally exempted (within the current file).
let file_noqa_directives =
@@ -44,7 +40,7 @@ pub(crate) fn check_noqa(
let mut noqa_directives =
NoqaDirectives::from_commented_ranges(comment_ranges, &settings.external, path, locator);
if file_noqa_directives.is_empty() && noqa_directives.is_empty() && suppressions.is_empty() {
if file_noqa_directives.is_empty() && noqa_directives.is_empty() {
return Vec::new();
}
@@ -64,19 +60,11 @@ pub(crate) fn check_noqa(
continue;
}
// Apply file-level suppressions first
if exemption.contains_secondary_code(code) {
ignored_diagnostics.push(index);
continue;
}
// Apply ranged suppressions next
if is_range_suppressions_enabled(settings) && suppressions.check_diagnostic(diagnostic) {
ignored_diagnostics.push(index);
continue;
}
// Apply end-of-line noqa suppressions last
let noqa_offsets = diagnostic
.parent()
.into_iter()
@@ -119,9 +107,6 @@ pub(crate) fn check_noqa(
}
}
// Diagnostics for unused/invalid range suppressions
suppressions.check_suppressions(context, locator);
// Enforce that the noqa directive was actually used (RUF100), unless RUF100 was itself
// suppressed.
if context.is_rule_enabled(Rule::UnusedNOQA)
@@ -143,13 +128,8 @@ pub(crate) fn check_noqa(
Directive::All(directive) => {
if matches.is_empty() {
let edit = delete_comment(directive.range(), locator);
let mut diagnostic = context.report_diagnostic(
UnusedNOQA {
codes: None,
kind: ruff::rules::UnusedNOQAKind::Noqa,
},
directive.range(),
);
let mut diagnostic = context
.report_diagnostic(UnusedNOQA { codes: None }, directive.range());
diagnostic.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Unnecessary);
diagnostic.set_fix(Fix::safe_edit(edit));
}
@@ -244,7 +224,6 @@ pub(crate) fn check_noqa(
.map(|code| (*code).to_string())
.collect(),
}),
kind: ruff::rules::UnusedNOQAKind::Noqa,
},
directive.range(),
);

View File

@@ -3,13 +3,14 @@
use anyhow::{Context, Result};
use ruff_python_ast::AnyNodeRef;
use ruff_python_ast::token::{self, Tokens, parenthesized_range};
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::{self as ast, Arguments, ExceptHandler, Expr, ExprList, Parameters, Stmt};
use ruff_python_codegen::Stylist;
use ruff_python_index::Indexer;
use ruff_python_trivia::textwrap::dedent_to;
use ruff_python_trivia::{
PythonWhitespace, SimpleTokenKind, SimpleTokenizer, has_leading_content, is_python_whitespace,
CommentRanges, PythonWhitespace, SimpleTokenKind, SimpleTokenizer, has_leading_content,
is_python_whitespace,
};
use ruff_source_file::{LineRanges, NewlineWithTrailingNewline, UniversalNewlines};
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
@@ -127,8 +128,8 @@ pub(crate) fn make_redundant_alias<'a>(
stmt: &Stmt,
) -> Vec<Edit> {
let aliases = match stmt {
Stmt::Import(node) => &node.names,
Stmt::ImportFrom(node) => &node.names,
Stmt::Import(ast::StmtImport { names, .. }) => names,
Stmt::ImportFrom(ast::StmtImportFrom { names, .. }) => names,
_ => {
return Vec::new();
}
@@ -208,7 +209,7 @@ pub(crate) fn remove_argument<T: Ranged>(
arguments: &Arguments,
parentheses: Parentheses,
source: &str,
tokens: &Tokens,
comment_ranges: &CommentRanges,
) -> Result<Edit> {
// Partition into arguments before and after the argument to remove.
let (before, after): (Vec<_>, Vec<_>) = arguments
@@ -223,7 +224,7 @@ pub(crate) fn remove_argument<T: Ranged>(
.context("Unable to find argument")?;
let parenthesized_range =
token::parenthesized_range(arg.value().into(), arguments.into(), tokens)
parenthesized_range(arg.value().into(), arguments.into(), comment_ranges, source)
.unwrap_or(arg.range());
if !after.is_empty() {
@@ -269,14 +270,25 @@ pub(crate) fn remove_argument<T: Ranged>(
///
/// The new argument will be inserted before the first existing keyword argument in `arguments`, if
/// there are any present. Otherwise, the new argument is added to the end of the argument list.
pub(crate) fn add_argument(argument: &str, arguments: &Arguments, tokens: &Tokens) -> Edit {
pub(crate) fn add_argument(
argument: &str,
arguments: &Arguments,
comment_ranges: &CommentRanges,
source: &str,
) -> Edit {
if let Some(ast::Keyword { range, value, .. }) = arguments.keywords.first() {
let keyword = parenthesized_range(value.into(), arguments.into(), tokens).unwrap_or(*range);
let keyword = parenthesized_range(value.into(), arguments.into(), comment_ranges, source)
.unwrap_or(*range);
Edit::insertion(format!("{argument}, "), keyword.start())
} else if let Some(last) = arguments.arguments_source_order().last() {
// Case 1: existing arguments, so append after the last argument.
let last = parenthesized_range(last.value().into(), arguments.into(), tokens)
.unwrap_or(last.range());
let last = parenthesized_range(
last.value().into(),
arguments.into(),
comment_ranges,
source,
)
.unwrap_or(last.range());
Edit::insertion(format!(", {argument}"), last.end())
} else {
// Case 2: no arguments. Add argument, without any trailing comma.
@@ -404,46 +416,43 @@ fn is_only<T: PartialEq>(vec: &[T], value: &T) -> bool {
/// Determine if a child is the only statement in its body.
fn is_lone_child(child: &Stmt, parent: &Stmt) -> bool {
match parent {
Stmt::FunctionDef(node) => {
if is_only(&node.body, child) {
Stmt::FunctionDef(ast::StmtFunctionDef { body, .. })
| Stmt::ClassDef(ast::StmtClassDef { body, .. })
| Stmt::With(ast::StmtWith { body, .. }) => {
if is_only(body, child) {
return true;
}
}
Stmt::ClassDef(node) => {
if is_only(&node.body, child) {
Stmt::For(ast::StmtFor { body, orelse, .. })
| Stmt::While(ast::StmtWhile { body, orelse, .. }) => {
if is_only(body, child) || is_only(orelse, child) {
return true;
}
}
Stmt::With(node) => {
if is_only(&node.body, child) {
return true;
}
}
Stmt::For(node) => {
if is_only(&node.body, child) || is_only(&node.orelse, child) {
return true;
}
}
Stmt::While(node) => {
if is_only(&node.body, child) || is_only(&node.orelse, child) {
return true;
}
}
Stmt::If(node) => {
if is_only(&node.body, child)
|| node
.elif_else_clauses
Stmt::If(ast::StmtIf {
body,
elif_else_clauses,
..
}) => {
if is_only(body, child)
|| elif_else_clauses
.iter()
.any(|ast::ElifElseClause { body, .. }| is_only(body, child))
{
return true;
}
}
Stmt::Try(node) => {
if is_only(&node.body, child)
|| is_only(&node.orelse, child)
|| is_only(&node.finalbody, child)
|| node.handlers.iter().any(|handler| match handler {
Stmt::Try(ast::StmtTry {
body,
handlers,
orelse,
finalbody,
..
}) => {
if is_only(body, child)
|| is_only(orelse, child)
|| is_only(finalbody, child)
|| handlers.iter().any(|handler| match handler {
ExceptHandler::ExceptHandler(ast::ExceptHandlerExceptHandler {
body, ..
}) => is_only(body, child),
@@ -452,8 +461,8 @@ fn is_lone_child(child: &Stmt, parent: &Stmt) -> bool {
return true;
}
}
Stmt::Match(node) => {
if node.cases.iter().any(|case| is_only(&case.body, child)) {
Stmt::Match(ast::StmtMatch { cases, .. }) => {
if cases.iter().any(|case| is_only(&case.body, child)) {
return true;
}
}

View File

@@ -236,10 +236,9 @@ impl<'a> Importer<'a> {
semantic: &SemanticModel<'a>,
type_checking_block: &Stmt,
) -> Option<&'a Stmt> {
let Stmt::If(node) = type_checking_block else {
let Stmt::If(ast::StmtIf { test, .. }) = type_checking_block else {
return None;
};
let test = &node.test;
let mut source = test;
while let Expr::Attribute(ast::ExprAttribute { value, .. }) = source.as_ref() {
@@ -454,10 +453,17 @@ impl<'a> Importer<'a> {
if stmt.start() >= at {
break;
}
if let Stmt::ImportFrom(node) = stmt {
if node.level == 0
&& node.module.as_ref().is_some_and(|name| name == module)
&& node.names.iter().all(|alias| alias.name.as_str() != "*")
if let Stmt::ImportFrom(ast::StmtImportFrom {
module: name,
names,
level,
range: _,
node_index: _,
}) = stmt
{
if *level == 0
&& name.as_ref().is_some_and(|name| name == module)
&& names.iter().all(|alias| alias.name.as_str() != "*")
{
import_from = Some(*stmt);
}

View File

@@ -32,7 +32,6 @@ use crate::rules::ruff::rules::test_rules::{self, TEST_RULES, TestRule};
use crate::settings::types::UnsafeFixes;
use crate::settings::{LinterSettings, TargetVersion, flags};
use crate::source_kind::SourceKind;
use crate::suppression::Suppressions;
use crate::{Locator, directives, fs};
pub(crate) mod float;
@@ -129,7 +128,6 @@ pub fn check_path(
source_type: PySourceType,
parsed: &Parsed<ModModule>,
target_version: TargetVersion,
suppressions: &Suppressions,
) -> Vec<Diagnostic> {
// Aggregate all diagnostics.
let mut context = LintContext::new(path, locator.contents(), settings);
@@ -341,7 +339,6 @@ pub fn check_path(
&directives.noqa_line_for,
parsed.has_valid_syntax(),
settings,
suppressions,
);
if noqa.is_enabled() {
for index in ignored.iter().rev() {
@@ -403,9 +400,6 @@ pub fn add_noqa_to_path(
&indexer,
);
// Parse range suppression comments
let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
// Generate diagnostics, ignoring any existing `noqa` directives.
let diagnostics = check_path(
path,
@@ -420,7 +414,6 @@ pub fn add_noqa_to_path(
source_type,
&parsed,
target_version,
&suppressions,
);
// Add any missing `# noqa` pragmas.
@@ -434,7 +427,6 @@ pub fn add_noqa_to_path(
&directives.noqa_line_for,
stylist.line_ending(),
reason,
&suppressions,
)
}
@@ -469,9 +461,6 @@ pub fn lint_only(
&indexer,
);
// Parse range suppression comments
let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
// Generate diagnostics.
let diagnostics = check_path(
path,
@@ -486,7 +475,6 @@ pub fn lint_only(
source_type,
&parsed,
target_version,
&suppressions,
);
LinterResult {
@@ -578,9 +566,6 @@ pub fn lint_fix<'a>(
&indexer,
);
// Parse range suppression comments
let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
// Generate diagnostics.
let diagnostics = check_path(
path,
@@ -595,7 +580,6 @@ pub fn lint_fix<'a>(
source_type,
&parsed,
target_version,
&suppressions,
);
if iterations == 0 {
@@ -785,7 +769,6 @@ mod tests {
use crate::registry::Rule;
use crate::settings::LinterSettings;
use crate::source_kind::SourceKind;
use crate::suppression::Suppressions;
use crate::test::{TestedNotebook, assert_notebook_path, test_contents, test_snippet};
use crate::{Locator, assert_diagnostics, directives, settings};
@@ -961,7 +944,6 @@ mod tests {
&locator,
&indexer,
);
let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
let mut diagnostics = check_path(
path,
None,
@@ -975,7 +957,6 @@ mod tests {
source_type,
&parsed,
target_version,
&suppressions,
);
diagnostics.sort_by(Diagnostic::ruff_start_ordering);
diagnostics

View File

@@ -20,14 +20,12 @@ use crate::Locator;
use crate::fs::relativize_path;
use crate::registry::Rule;
use crate::rule_redirects::get_redirect_target;
use crate::suppression::Suppressions;
/// Generates an array of edits that matches the length of `messages`.
/// Each potential edit in the array is paired, in order, with the associated diagnostic.
/// Each edit will add a `noqa` comment to the appropriate line in the source to hide
/// the diagnostic. These edits may conflict with each other and should not be applied
/// simultaneously.
#[expect(clippy::too_many_arguments)]
pub fn generate_noqa_edits(
path: &Path,
diagnostics: &[Diagnostic],
@@ -36,19 +34,11 @@ pub fn generate_noqa_edits(
external: &[String],
noqa_line_for: &NoqaMapping,
line_ending: LineEnding,
suppressions: &Suppressions,
) -> Vec<Option<Edit>> {
let file_directives = FileNoqaDirectives::extract(locator, comment_ranges, external, path);
let exemption = FileExemption::from(&file_directives);
let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator);
let comments = find_noqa_comments(
diagnostics,
locator,
&exemption,
&directives,
noqa_line_for,
suppressions,
);
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
build_noqa_edits_by_diagnostic(comments, locator, line_ending, None)
}
@@ -735,7 +725,6 @@ pub(crate) fn add_noqa(
noqa_line_for: &NoqaMapping,
line_ending: LineEnding,
reason: Option<&str>,
suppressions: &Suppressions,
) -> Result<usize> {
let (count, output) = add_noqa_inner(
path,
@@ -746,7 +735,6 @@ pub(crate) fn add_noqa(
noqa_line_for,
line_ending,
reason,
suppressions,
);
fs::write(path, output)?;
@@ -763,7 +751,6 @@ fn add_noqa_inner(
noqa_line_for: &NoqaMapping,
line_ending: LineEnding,
reason: Option<&str>,
suppressions: &Suppressions,
) -> (usize, String) {
let mut count = 0;
@@ -773,14 +760,7 @@ fn add_noqa_inner(
let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator);
let comments = find_noqa_comments(
diagnostics,
locator,
&exemption,
&directives,
noqa_line_for,
suppressions,
);
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for);
let edits = build_noqa_edits_by_line(comments, locator, line_ending, reason);
@@ -879,7 +859,6 @@ fn find_noqa_comments<'a>(
exemption: &'a FileExemption,
directives: &'a NoqaDirectives,
noqa_line_for: &NoqaMapping,
suppressions: &'a Suppressions,
) -> Vec<Option<NoqaComment<'a>>> {
// List of noqa comments, ordered to match up with `messages`
let mut comments_by_line: Vec<Option<NoqaComment<'a>>> = vec![];
@@ -896,12 +875,6 @@ fn find_noqa_comments<'a>(
continue;
}
// Apply ranged suppressions next
if suppressions.check_diagnostic(message) {
comments_by_line.push(None);
continue;
}
// Is the violation ignored by a `noqa` directive on the parent line?
if let Some(parent) = message.parent() {
if let Some(directive_line) =
@@ -1280,7 +1253,6 @@ mod tests {
use crate::rules::pycodestyle::rules::{AmbiguousVariableName, UselessSemicolon};
use crate::rules::pyflakes::rules::UnusedVariable;
use crate::rules::pyupgrade::rules::PrintfStringFormatting;
use crate::suppression::Suppressions;
use crate::{Edit, Violation};
use crate::{Locator, generate_noqa_edits};
@@ -2876,7 +2848,6 @@ mod tests {
&noqa_line_for,
LineEnding::Lf,
None,
&Suppressions::default(),
);
assert_eq!(count, 0);
assert_eq!(output, format!("{contents}"));
@@ -2901,7 +2872,6 @@ mod tests {
&noqa_line_for,
LineEnding::Lf,
None,
&Suppressions::default(),
);
assert_eq!(count, 1);
assert_eq!(output, "x = 1 # noqa: F841\n");
@@ -2933,7 +2903,6 @@ mod tests {
&noqa_line_for,
LineEnding::Lf,
None,
&Suppressions::default(),
);
assert_eq!(count, 1);
assert_eq!(output, "x = 1 # noqa: E741, F841\n");
@@ -2965,7 +2934,6 @@ mod tests {
&noqa_line_for,
LineEnding::Lf,
None,
&Suppressions::default(),
);
assert_eq!(count, 0);
assert_eq!(output, "x = 1 # noqa");
@@ -2988,7 +2956,6 @@ print(
let messages = [PrintfStringFormatting
.into_diagnostic(TextRange::new(12.into(), 79.into()), &source_file)];
let comment_ranges = CommentRanges::default();
let suppressions = Suppressions::default();
let edits = generate_noqa_edits(
path,
&messages,
@@ -2997,7 +2964,6 @@ print(
&[],
&noqa_line_for,
LineEnding::Lf,
&suppressions,
);
assert_eq!(
edits,
@@ -3021,7 +2987,6 @@ bar =
[UselessSemicolon.into_diagnostic(TextRange::new(4.into(), 5.into()), &source_file)];
let noqa_line_for = NoqaMapping::default();
let comment_ranges = CommentRanges::default();
let suppressions = Suppressions::default();
let edits = generate_noqa_edits(
path,
&messages,
@@ -3030,7 +2995,6 @@ bar =
&[],
&noqa_line_for,
LineEnding::Lf,
&suppressions,
);
assert_eq!(
edits,

View File

@@ -9,11 +9,6 @@ use crate::settings::LinterSettings;
// Rule-specific behavior
// https://github.com/astral-sh/ruff/pull/21382
pub(crate) const fn is_custom_exception_checking_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/15541
pub(crate) const fn is_suspicious_function_reference_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
@@ -291,8 +286,3 @@ pub(crate) const fn is_s310_resolve_string_literal_bindings_enabled(
) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/21623
pub(crate) const fn is_range_suppressions_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}

View File

@@ -281,10 +281,12 @@ impl Renamer {
) -> Option<Edit> {
let statement = binding.statement(semantic)?;
let value = match statement {
ast::Stmt::Assign(node) => &node.value,
ast::Stmt::AnnAssign(node) => node.value.as_ref()?,
_ => return None,
let (ast::Stmt::Assign(ast::StmtAssign { value, .. })
| ast::Stmt::AnnAssign(ast::StmtAnnAssign {
value: Some(value), ..
})) = statement
else {
return None;
};
let ast::ExprCall {

View File

@@ -448,10 +448,11 @@ fn is_kwarg_parameter(semantic: &SemanticModel, name: &ExprName) -> bool {
return false;
};
let binding = semantic.binding(binding_id);
let Some(Stmt::FunctionDef(node)) = binding.statement(semantic) else {
let Some(Stmt::FunctionDef(StmtFunctionDef { parameters, .. })) = binding.statement(semantic)
else {
return false;
};
node.parameters
parameters
.kwarg
.as_deref()
.is_some_and(|kwarg| kwarg.name.as_str() == name.id.as_str())

View File

@@ -91,8 +91,8 @@ pub(crate) fn fastapi_redundant_response_model(checker: &Checker, function_def:
response_model_arg,
&call.arguments,
Parentheses::Preserve,
checker.source(),
checker.tokens(),
checker.locator().contents(),
checker.comment_ranges(),
)
.map(Fix::unsafe_edit)
});

View File

@@ -2,7 +2,7 @@
//!
//! See: <https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html>
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::Stmt;
use ruff_python_ast::{self as ast, Stmt};
use ruff_text_size::Ranged;
use crate::Violation;
@@ -371,8 +371,7 @@ pub(crate) fn suspicious_imports(checker: &Checker, stmt: &Stmt) {
}
match stmt {
Stmt::Import(node) => {
let names = &node.names;
Stmt::Import(ast::StmtImport { names, .. }) => {
for name in names {
match name.name.as_str() {
"telnetlib" => {
@@ -422,9 +421,8 @@ pub(crate) fn suspicious_imports(checker: &Checker, stmt: &Stmt) {
}
}
}
Stmt::ImportFrom(node) => {
let Some(identifier) = &node.module else { return };
let names = &node.names;
Stmt::ImportFrom(ast::StmtImportFrom { module, names, .. }) => {
let Some(identifier) = module else { return };
match identifier.as_str() {
"telnetlib" => {
checker.report_diagnostic_if_enabled(

View File

@@ -154,12 +154,10 @@ impl<'a> StatementVisitor<'a> for ReraiseVisitor<'a> {
return;
}
match stmt {
Stmt::Raise(node) => {
let exc = node.exc.as_deref();
let cause = node.cause.as_deref();
Stmt::Raise(ast::StmtRaise { exc, cause, .. }) => {
// except Exception [as <name>]:
// raise [<exc> [from <cause>]]
let reraised = match (self.name, exc, cause) {
let reraised = match (self.name, exc.as_deref(), cause.as_deref()) {
// `raise`
(_, None, None) => true,
// `raise SomeExc from <name>`

View File

@@ -173,21 +173,24 @@ pub(crate) fn abstract_base_class(
// If an ABC declares an attribute by providing a type annotation
// but does not actually assign a value for that attribute,
// assume it is intended to be an "abstract attribute"
if let Stmt::AnnAssign(node) = stmt {
if node.value.is_none() {
has_abstract_method = true;
continue;
}
if matches!(
stmt,
Stmt::AnnAssign(ast::StmtAnnAssign { value: None, .. })
) {
has_abstract_method = true;
continue;
}
let Stmt::FunctionDef(node) = stmt else {
let Stmt::FunctionDef(ast::StmtFunctionDef {
decorator_list,
body,
name: method_name,
..
}) = stmt
else {
continue;
};
let decorator_list = &node.decorator_list;
let body = &node.body;
let method_name = &node.name;
let has_abstract_decorator = is_abstract(decorator_list, checker.semantic());
has_abstract_method |= has_abstract_decorator;

View File

@@ -51,7 +51,7 @@ impl AlwaysFixableViolation for AssertFalse {
}
fn assertion_error(msg: Option<&Expr>) -> Stmt {
Stmt::Raise(Box::new(ast::StmtRaise {
Stmt::Raise(ast::StmtRaise {
range: TextRange::default(),
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
exc: Some(Box::new(Expr::Call(ast::ExprCall {
@@ -75,7 +75,7 @@ fn assertion_error(msg: Option<&Expr>) -> Stmt {
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
}))),
cause: None,
}))
})
}
/// B011

View File

@@ -114,14 +114,14 @@ pub(crate) fn class_as_data_structure(checker: &Checker, class_def: &ast::StmtCl
// assignment of a name to an attribute.
fn is_simple_assignment_to_attribute(stmt: &ast::Stmt) -> bool {
match stmt {
ast::Stmt::Assign(node) => {
let [target] = node.targets.as_slice() else {
ast::Stmt::Assign(ast::StmtAssign { targets, value, .. }) => {
let [target] = targets.as_slice() else {
return false;
};
target.is_attribute_expr() && node.value.is_name_expr()
target.is_attribute_expr() && value.is_name_expr()
}
ast::Stmt::AnnAssign(node) => {
node.target.is_attribute_expr() && node.value.as_ref().is_some_and(|val| val.is_name_expr())
ast::Stmt::AnnAssign(ast::StmtAnnAssign { target, value, .. }) => {
target.is_attribute_expr() && value.as_ref().is_some_and(|val| val.is_name_expr())
}
_ => false,
}

View File

@@ -86,10 +86,12 @@ struct SuspiciousVariablesVisitor<'a> {
impl<'a> Visitor<'a> for SuspiciousVariablesVisitor<'a> {
fn visit_stmt(&mut self, stmt: &'a Stmt) {
match stmt {
Stmt::FunctionDef(node) => {
Stmt::FunctionDef(ast::StmtFunctionDef {
parameters, body, ..
}) => {
// Collect all loaded variable names.
let mut visitor = LoadedNamesVisitor::default();
visitor.visit_body(&node.body);
visitor.visit_body(body);
// Treat any non-arguments as "suspicious".
self.names
@@ -98,7 +100,7 @@ impl<'a> Visitor<'a> for SuspiciousVariablesVisitor<'a> {
return false;
}
if node.parameters.includes(&loaded.id) {
if parameters.includes(&loaded.id) {
return false;
}
@@ -240,26 +242,18 @@ impl<'a> Visitor<'a> for AssignedNamesVisitor<'a> {
}
match stmt {
Stmt::Assign(node) => {
Stmt::Assign(ast::StmtAssign { targets, .. }) => {
let mut visitor = NamesFromAssignmentsVisitor::default();
for expr in &node.targets {
for expr in targets {
visitor.visit_expr(expr);
}
self.names.extend(visitor.names);
}
Stmt::AugAssign(node) => {
Stmt::AugAssign(ast::StmtAugAssign { target, .. })
| Stmt::AnnAssign(ast::StmtAnnAssign { target, .. })
| Stmt::For(ast::StmtFor { target, .. }) => {
let mut visitor = NamesFromAssignmentsVisitor::default();
visitor.visit_expr(&node.target);
self.names.extend(visitor.names);
}
Stmt::AnnAssign(node) => {
let mut visitor = NamesFromAssignmentsVisitor::default();
visitor.visit_expr(&node.target);
self.names.extend(visitor.names);
}
Stmt::For(node) => {
let mut visitor = NamesFromAssignmentsVisitor::default();
visitor.visit_expr(&node.target);
visitor.visit_expr(target);
self.names.extend(visitor.names);
}
_ => {}

View File

@@ -1,4 +1,4 @@
use ruff_python_ast::Stmt;
use ruff_python_ast::{self as ast, Stmt};
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_text_size::Ranged;
@@ -71,23 +71,15 @@ fn walk_stmt(checker: &Checker, body: &[Stmt], f: fn(&Stmt) -> bool) {
);
}
match stmt {
Stmt::While(node) => {
walk_stmt(checker, &node.body, Stmt::is_return_stmt);
Stmt::While(ast::StmtWhile { body, .. }) | Stmt::For(ast::StmtFor { body, .. }) => {
walk_stmt(checker, body, Stmt::is_return_stmt);
}
Stmt::For(node) => {
walk_stmt(checker, &node.body, Stmt::is_return_stmt);
Stmt::If(ast::StmtIf { body, .. })
| Stmt::Try(ast::StmtTry { body, .. })
| Stmt::With(ast::StmtWith { body, .. }) => {
walk_stmt(checker, body, f);
}
Stmt::If(node) => {
walk_stmt(checker, &node.body, f);
}
Stmt::Try(node) => {
walk_stmt(checker, &node.body, f);
}
Stmt::With(node) => {
walk_stmt(checker, &node.body, f);
}
Stmt::Match(node) => {
let cases = &node.cases;
Stmt::Match(ast::StmtMatch { cases, .. }) => {
for case in cases {
walk_stmt(checker, &case.body, f);
}

View File

@@ -5,7 +5,8 @@ use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::comparable::ComparableExpr;
use ruff_python_ast::name::UnqualifiedName;
use ruff_python_ast::{
self as ast, Expr, ExprAttribute, ExprCall, ExprSubscript, ExprTuple, Stmt, StmtFor,
Expr, ExprAttribute, ExprCall, ExprSubscript, ExprTuple, Stmt, StmtAssign, StmtAugAssign,
StmtDelete, StmtFor, StmtIf,
visitor::{self, Visitor},
};
use ruff_text_size::TextRange;
@@ -241,39 +242,43 @@ impl<'a> Visitor<'a> for LoopMutationsVisitor<'a> {
fn visit_stmt(&mut self, stmt: &'a Stmt) {
match stmt {
// Ex) `del items[0]`
Stmt::Delete(node) => {
let ast::StmtDelete {
range,
targets,
node_index: _,
} = &**node;
Stmt::Delete(StmtDelete {
range,
targets,
node_index: _,
}) => {
self.handle_delete(*range, targets);
visitor::walk_stmt(self, stmt);
}
// Ex) `items[0] = 1`
Stmt::Assign(node) => {
self.handle_assign(node.range, &node.targets);
Stmt::Assign(StmtAssign { range, targets, .. }) => {
self.handle_assign(*range, targets);
visitor::walk_stmt(self, stmt);
}
// Ex) `items += [1]`
Stmt::AugAssign(node) => {
self.handle_aug_assign(node.range, &node.target);
Stmt::AugAssign(StmtAugAssign { range, target, .. }) => {
self.handle_aug_assign(*range, target);
visitor::walk_stmt(self, stmt);
}
// Ex) `if True: items.append(1)`
Stmt::If(node) => {
Stmt::If(StmtIf {
test,
body,
elif_else_clauses,
..
}) => {
// Handle the `if` branch.
self.branch += 1;
self.branches.push(self.branch);
self.visit_expr(&node.test);
self.visit_body(&node.body);
self.visit_expr(test);
self.visit_body(body);
self.branches.pop();
// Handle the `elif` and `else` branches.
for clause in &node.elif_else_clauses {
for clause in elif_else_clauses {
self.branch += 1;
self.branches.push(self.branch);
if let Some(test) = &clause.test {

View File

@@ -74,7 +74,12 @@ pub(crate) fn map_without_explicit_strict(checker: &Checker, call: &ast::ExprCal
checker
.report_diagnostic(MapWithoutExplicitStrict, call.range())
.set_fix(Fix::applicable_edit(
add_argument("strict=False", &call.arguments, checker.tokens()),
add_argument(
"strict=False",
&call.arguments,
checker.comment_ranges(),
checker.locator().contents(),
),
Applicability::Unsafe,
));
}

View File

@@ -3,7 +3,7 @@ use std::fmt::Write;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::helpers::is_docstring_stmt;
use ruff_python_ast::name::QualifiedName;
use ruff_python_ast::token::parenthesized_range;
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::{self as ast, Expr, ParameterWithDefault};
use ruff_python_semantic::SemanticModel;
use ruff_python_semantic::analyze::function_type::is_stub;
@@ -166,7 +166,12 @@ fn move_initialization(
return None;
}
let range = match parenthesized_range(default.into(), parameter.into(), checker.tokens()) {
let range = match parenthesized_range(
default.into(),
parameter.into(),
checker.comment_ranges(),
checker.source(),
) {
Some(range) => range,
None => default.range(),
};
@@ -189,8 +194,13 @@ fn move_initialization(
"{} = {}",
parameter.parameter.name(),
locator.slice(
parenthesized_range(default.into(), parameter.into(), checker.tokens())
.unwrap_or(default.range())
parenthesized_range(
default.into(),
parameter.into(),
checker.comment_ranges(),
checker.source()
)
.unwrap_or(default.range())
)
);
} else {

View File

@@ -92,7 +92,12 @@ pub(crate) fn no_explicit_stacklevel(checker: &Checker, call: &ast::ExprCall) {
}
let mut diagnostic = checker.report_diagnostic(NoExplicitStacklevel, call.func.range());
let edit = add_argument("stacklevel=2", &call.arguments, checker.tokens());
let edit = add_argument(
"stacklevel=2",
&call.arguments,
checker.comment_ranges(),
checker.locator().contents(),
);
diagnostic.set_fix(Fix::unsafe_edit(edit));
}

View File

@@ -119,11 +119,13 @@ impl<'a> Visitor<'a> for GroupNameFinder<'a> {
return;
}
match stmt {
Stmt::For(node) => {
if self.name_matches(&node.target) {
Stmt::For(ast::StmtFor {
target, iter, body, ..
}) => {
if self.name_matches(target) {
self.overridden = true;
} else {
if self.name_matches(&node.iter) {
if self.name_matches(iter) {
self.increment_usage_count(1);
// This could happen when the group is being looped
// over multiple times:
@@ -134,30 +136,36 @@ impl<'a> Visitor<'a> for GroupNameFinder<'a> {
// for item in group:
// ...
if self.usage_count > 1 {
self.exprs.push(&node.iter);
self.exprs.push(iter);
}
}
self.nested = true;
visitor::walk_body(self, &node.body);
visitor::walk_body(self, body);
self.nested = false;
}
}
Stmt::While(node) => {
Stmt::While(ast::StmtWhile { body, .. }) => {
self.nested = true;
visitor::walk_body(self, &node.body);
visitor::walk_body(self, body);
self.nested = false;
}
Stmt::If(node) => {
Stmt::If(ast::StmtIf {
test,
body,
elif_else_clauses,
range: _,
node_index: _,
}) => {
// base if plus branches
let mut if_stack = Vec::with_capacity(1 + node.elif_else_clauses.len());
let mut if_stack = Vec::with_capacity(1 + elif_else_clauses.len());
// Initialize the vector with the count for the if branch.
if_stack.push(0);
self.counter_stack.push(if_stack);
self.visit_expr(&node.test);
self.visit_body(&node.body);
self.visit_expr(test);
self.visit_body(body);
for clause in &node.elif_else_clauses {
for clause in elif_else_clauses {
self.counter_stack.last_mut().unwrap().push(0);
self.visit_elif_else_clause(clause);
}
@@ -169,10 +177,15 @@ impl<'a> Visitor<'a> for GroupNameFinder<'a> {
self.increment_usage_count(max_count);
}
}
Stmt::Match(node) => {
self.counter_stack.push(Vec::with_capacity(node.cases.len()));
self.visit_expr(&node.subject);
for match_case in &node.cases {
Stmt::Match(ast::StmtMatch {
subject,
cases,
range: _,
node_index: _,
}) => {
self.counter_stack.push(Vec::with_capacity(cases.len()));
self.visit_expr(subject);
for match_case in cases {
self.counter_stack.last_mut().unwrap().push(0);
self.visit_match_case(match_case);
}
@@ -183,17 +196,17 @@ impl<'a> Visitor<'a> for GroupNameFinder<'a> {
self.increment_usage_count(max_count);
}
}
Stmt::Assign(node) => {
if node.targets.iter().any(|target| self.name_matches(target)) {
Stmt::Assign(ast::StmtAssign { targets, value, .. }) => {
if targets.iter().any(|target| self.name_matches(target)) {
self.overridden = true;
} else {
self.visit_expr(&node.value);
self.visit_expr(value);
}
}
Stmt::AnnAssign(node) => {
if self.name_matches(&node.target) {
Stmt::AnnAssign(ast::StmtAnnAssign { target, value, .. }) => {
if self.name_matches(target) {
self.overridden = true;
} else if let Some(expr) = &node.value {
} else if let Some(expr) = value {
self.visit_expr(expr);
}
}

View File

@@ -66,7 +66,7 @@ impl AlwaysFixableViolation for SetAttrWithConstant {
}
fn assignment(obj: &Expr, name: &str, value: &Expr, generator: Generator) -> String {
let stmt = Stmt::Assign(Box::new(ast::StmtAssign {
let stmt = Stmt::Assign(ast::StmtAssign {
targets: vec![Expr::Attribute(ast::ExprAttribute {
value: Box::new(obj.clone()),
attr: Identifier::new(name.to_string(), TextRange::default()),
@@ -77,7 +77,7 @@ fn assignment(obj: &Expr, name: &str, value: &Expr, generator: Generator) -> Str
value: Box::new(value.clone()),
range: TextRange::default(),
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
}));
});
generator.stmt(&stmt)
}

View File

@@ -70,7 +70,12 @@ pub(crate) fn zip_without_explicit_strict(checker: &Checker, call: &ast::ExprCal
checker
.report_diagnostic(ZipWithoutExplicitStrict, call.range())
.set_fix(Fix::applicable_edit(
add_argument("strict=False", &call.arguments, checker.tokens()),
add_argument(
"strict=False",
&call.arguments,
checker.comment_ranges(),
checker.locator().contents(),
),
Applicability::Unsafe,
));
}

View File

@@ -236,227 +236,227 @@ help: Replace with `None`; initialize within function
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:242:20
--> B006_B008.py:239:20
|
240 | # B006 and B008
241 | # We should handle arbitrary nesting of these B008.
242 | def nested_combo(a=[float(3), dt.datetime.now()]):
237 | # B006 and B008
238 | # We should handle arbitrary nesting of these B008.
239 | def nested_combo(a=[float(3), dt.datetime.now()]):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
243 | pass
240 | pass
|
help: Replace with `None`; initialize within function
239 |
240 | # B006 and B008
241 | # We should handle arbitrary nesting of these B008.
236 |
237 | # B006 and B008
238 | # We should handle arbitrary nesting of these B008.
- def nested_combo(a=[float(3), dt.datetime.now()]):
242 + def nested_combo(a=None):
243 | pass
244 |
245 |
239 + def nested_combo(a=None):
240 | pass
241 |
242 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:279:27
--> B006_B008.py:276:27
|
278 | def mutable_annotations(
279 | a: list[int] | None = [],
275 | def mutable_annotations(
276 | a: list[int] | None = [],
| ^^
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
help: Replace with `None`; initialize within function
276 |
277 |
278 | def mutable_annotations(
273 |
274 |
275 | def mutable_annotations(
- a: list[int] | None = [],
279 + a: list[int] | None = None,
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
276 + a: list[int] | None = None,
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:280:35
--> B006_B008.py:277:35
|
278 | def mutable_annotations(
279 | a: list[int] | None = [],
280 | b: Optional[Dict[int, int]] = {},
275 | def mutable_annotations(
276 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {},
| ^^
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
help: Replace with `None`; initialize within function
277 |
278 | def mutable_annotations(
279 | a: list[int] | None = [],
274 |
275 | def mutable_annotations(
276 | a: list[int] | None = [],
- b: Optional[Dict[int, int]] = {},
280 + b: Optional[Dict[int, int]] = None,
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
283 | ):
277 + b: Optional[Dict[int, int]] = None,
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
280 | ):
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:281:62
--> B006_B008.py:278:62
|
279 | a: list[int] | None = [],
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
276 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
| ^^^^^
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
283 | ):
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
280 | ):
|
help: Replace with `None`; initialize within function
278 | def mutable_annotations(
279 | a: list[int] | None = [],
280 | b: Optional[Dict[int, int]] = {},
275 | def mutable_annotations(
276 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {},
- c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
281 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
283 | ):
284 | pass
278 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
280 | ):
281 | pass
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:282:80
--> B006_B008.py:279:80
|
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
| ^^^^^
283 | ):
284 | pass
280 | ):
281 | pass
|
help: Replace with `None`; initialize within function
279 | a: list[int] | None = [],
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
276 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
- d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
283 | ):
284 | pass
285 |
279 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
280 | ):
281 | pass
282 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:287:52
--> B006_B008.py:284:52
|
287 | def single_line_func_wrong(value: dict[str, str] = {}):
284 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
288 | """Docstring"""
285 | """Docstring"""
|
help: Replace with `None`; initialize within function
284 | pass
285 |
281 | pass
282 |
283 |
- def single_line_func_wrong(value: dict[str, str] = {}):
284 + def single_line_func_wrong(value: dict[str, str] = None):
285 | """Docstring"""
286 |
- def single_line_func_wrong(value: dict[str, str] = {}):
287 + def single_line_func_wrong(value: dict[str, str] = None):
288 | """Docstring"""
289 |
290 |
287 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:291:52
--> B006_B008.py:288:52
|
291 | def single_line_func_wrong(value: dict[str, str] = {}):
288 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
292 | """Docstring"""
293 | ...
289 | """Docstring"""
290 | ...
|
help: Replace with `None`; initialize within function
288 | """Docstring"""
289 |
290 |
285 | """Docstring"""
286 |
287 |
- def single_line_func_wrong(value: dict[str, str] = {}):
291 + def single_line_func_wrong(value: dict[str, str] = None):
292 | """Docstring"""
293 | ...
294 |
288 + def single_line_func_wrong(value: dict[str, str] = None):
289 | """Docstring"""
290 | ...
291 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:296:52
--> B006_B008.py:293:52
|
296 | def single_line_func_wrong(value: dict[str, str] = {}):
293 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
297 | """Docstring"""; ...
294 | """Docstring"""; ...
|
help: Replace with `None`; initialize within function
293 | ...
294 |
290 | ...
291 |
292 |
- def single_line_func_wrong(value: dict[str, str] = {}):
293 + def single_line_func_wrong(value: dict[str, str] = None):
294 | """Docstring"""; ...
295 |
- def single_line_func_wrong(value: dict[str, str] = {}):
296 + def single_line_func_wrong(value: dict[str, str] = None):
297 | """Docstring"""; ...
298 |
299 |
296 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:300:52
--> B006_B008.py:297:52
|
300 | def single_line_func_wrong(value: dict[str, str] = {}):
297 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
301 | """Docstring"""; \
302 | ...
298 | """Docstring"""; \
299 | ...
|
help: Replace with `None`; initialize within function
297 | """Docstring"""; ...
298 |
299 |
294 | """Docstring"""; ...
295 |
296 |
- def single_line_func_wrong(value: dict[str, str] = {}):
300 + def single_line_func_wrong(value: dict[str, str] = None):
301 | """Docstring"""; \
302 | ...
303 |
297 + def single_line_func_wrong(value: dict[str, str] = None):
298 | """Docstring"""; \
299 | ...
300 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:305:52
--> B006_B008.py:302:52
|
305 | def single_line_func_wrong(value: dict[str, str] = {
302 | def single_line_func_wrong(value: dict[str, str] = {
| ____________________________________________________^
306 | | # This is a comment
307 | | }):
303 | | # This is a comment
304 | | }):
| |_^
308 | """Docstring"""
305 | """Docstring"""
|
help: Replace with `None`; initialize within function
302 | ...
303 |
304 |
299 | ...
300 |
301 |
- def single_line_func_wrong(value: dict[str, str] = {
- # This is a comment
- }):
305 + def single_line_func_wrong(value: dict[str, str] = None):
306 | """Docstring"""
307 |
308 |
302 + def single_line_func_wrong(value: dict[str, str] = None):
303 | """Docstring"""
304 |
305 |
note: This is an unsafe fix and may change runtime behavior
B006 Do not use mutable data structures for argument defaults
--> B006_B008.py:311:52
--> B006_B008.py:308:52
|
311 | def single_line_func_wrong(value: dict[str, str] = {}) \
308 | def single_line_func_wrong(value: dict[str, str] = {}) \
| ^^
312 | : \
313 | """Docstring"""
309 | : \
310 | """Docstring"""
|
help: Replace with `None`; initialize within function
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:316:52
--> B006_B008.py:313:52
|
316 | def single_line_func_wrong(value: dict[str, str] = {}):
313 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
317 | """Docstring without newline"""
314 | """Docstring without newline"""
|
help: Replace with `None`; initialize within function
313 | """Docstring"""
314 |
315 |
310 | """Docstring"""
311 |
312 |
- def single_line_func_wrong(value: dict[str, str] = {}):
316 + def single_line_func_wrong(value: dict[str, str] = None):
317 | """Docstring without newline"""
313 + def single_line_func_wrong(value: dict[str, str] = None):
314 | """Docstring without newline"""
note: This is an unsafe fix and may change runtime behavior

View File

@@ -53,39 +53,39 @@ B008 Do not perform function call in argument defaults; instead, perform the cal
|
B008 Do not perform function call `dt.datetime.now` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable
--> B006_B008.py:242:31
--> B006_B008.py:239:31
|
240 | # B006 and B008
241 | # We should handle arbitrary nesting of these B008.
242 | def nested_combo(a=[float(3), dt.datetime.now()]):
237 | # B006 and B008
238 | # We should handle arbitrary nesting of these B008.
239 | def nested_combo(a=[float(3), dt.datetime.now()]):
| ^^^^^^^^^^^^^^^^^
243 | pass
240 | pass
|
B008 Do not perform function call `map` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable
--> B006_B008.py:248:22
--> B006_B008.py:245:22
|
246 | # Don't flag nested B006 since we can't guarantee that
247 | # it isn't made mutable by the outer operation.
248 | def no_nested_b006(a=map(lambda s: s.upper(), ["a", "b", "c"])):
243 | # Don't flag nested B006 since we can't guarantee that
244 | # it isn't made mutable by the outer operation.
245 | def no_nested_b006(a=map(lambda s: s.upper(), ["a", "b", "c"])):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
249 | pass
246 | pass
|
B008 Do not perform function call `random.randint` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable
--> B006_B008.py:253:19
--> B006_B008.py:250:19
|
252 | # B008-ception.
253 | def nested_b008(a=random.randint(0, dt.datetime.now().year)):
249 | # B008-ception.
250 | def nested_b008(a=random.randint(0, dt.datetime.now().year)):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
254 | pass
251 | pass
|
B008 Do not perform function call `dt.datetime.now` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable
--> B006_B008.py:253:37
--> B006_B008.py:250:37
|
252 | # B008-ception.
253 | def nested_b008(a=random.randint(0, dt.datetime.now().year)):
249 | # B008-ception.
250 | def nested_b008(a=random.randint(0, dt.datetime.now().year)):
| ^^^^^^^^^^^^^^^^^
254 | pass
251 | pass
|

View File

@@ -236,227 +236,227 @@ help: Replace with `None`; initialize within function
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:242:20
--> B006_B008.py:239:20
|
240 | # B006 and B008
241 | # We should handle arbitrary nesting of these B008.
242 | def nested_combo(a=[float(3), dt.datetime.now()]):
237 | # B006 and B008
238 | # We should handle arbitrary nesting of these B008.
239 | def nested_combo(a=[float(3), dt.datetime.now()]):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
243 | pass
240 | pass
|
help: Replace with `None`; initialize within function
239 |
240 | # B006 and B008
241 | # We should handle arbitrary nesting of these B008.
236 |
237 | # B006 and B008
238 | # We should handle arbitrary nesting of these B008.
- def nested_combo(a=[float(3), dt.datetime.now()]):
242 + def nested_combo(a=None):
243 | pass
244 |
245 |
239 + def nested_combo(a=None):
240 | pass
241 |
242 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:279:27
--> B006_B008.py:276:27
|
278 | def mutable_annotations(
279 | a: list[int] | None = [],
275 | def mutable_annotations(
276 | a: list[int] | None = [],
| ^^
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
help: Replace with `None`; initialize within function
276 |
277 |
278 | def mutable_annotations(
273 |
274 |
275 | def mutable_annotations(
- a: list[int] | None = [],
279 + a: list[int] | None = None,
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
276 + a: list[int] | None = None,
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:280:35
--> B006_B008.py:277:35
|
278 | def mutable_annotations(
279 | a: list[int] | None = [],
280 | b: Optional[Dict[int, int]] = {},
275 | def mutable_annotations(
276 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {},
| ^^
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
|
help: Replace with `None`; initialize within function
277 |
278 | def mutable_annotations(
279 | a: list[int] | None = [],
274 |
275 | def mutable_annotations(
276 | a: list[int] | None = [],
- b: Optional[Dict[int, int]] = {},
280 + b: Optional[Dict[int, int]] = None,
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
283 | ):
277 + b: Optional[Dict[int, int]] = None,
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
280 | ):
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:281:62
--> B006_B008.py:278:62
|
279 | a: list[int] | None = [],
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
276 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
| ^^^^^
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
283 | ):
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
280 | ):
|
help: Replace with `None`; initialize within function
278 | def mutable_annotations(
279 | a: list[int] | None = [],
280 | b: Optional[Dict[int, int]] = {},
275 | def mutable_annotations(
276 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {},
- c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
281 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
283 | ):
284 | pass
278 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
280 | ):
281 | pass
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:282:80
--> B006_B008.py:279:80
|
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
| ^^^^^
283 | ):
284 | pass
280 | ):
281 | pass
|
help: Replace with `None`; initialize within function
279 | a: list[int] | None = [],
280 | b: Optional[Dict[int, int]] = {},
281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
276 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
- d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
282 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
283 | ):
284 | pass
285 |
279 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
280 | ):
281 | pass
282 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:287:52
--> B006_B008.py:284:52
|
287 | def single_line_func_wrong(value: dict[str, str] = {}):
284 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
288 | """Docstring"""
285 | """Docstring"""
|
help: Replace with `None`; initialize within function
284 | pass
285 |
281 | pass
282 |
283 |
- def single_line_func_wrong(value: dict[str, str] = {}):
284 + def single_line_func_wrong(value: dict[str, str] = None):
285 | """Docstring"""
286 |
- def single_line_func_wrong(value: dict[str, str] = {}):
287 + def single_line_func_wrong(value: dict[str, str] = None):
288 | """Docstring"""
289 |
290 |
287 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:291:52
--> B006_B008.py:288:52
|
291 | def single_line_func_wrong(value: dict[str, str] = {}):
288 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
292 | """Docstring"""
293 | ...
289 | """Docstring"""
290 | ...
|
help: Replace with `None`; initialize within function
288 | """Docstring"""
289 |
290 |
285 | """Docstring"""
286 |
287 |
- def single_line_func_wrong(value: dict[str, str] = {}):
291 + def single_line_func_wrong(value: dict[str, str] = None):
292 | """Docstring"""
293 | ...
294 |
288 + def single_line_func_wrong(value: dict[str, str] = None):
289 | """Docstring"""
290 | ...
291 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:296:52
--> B006_B008.py:293:52
|
296 | def single_line_func_wrong(value: dict[str, str] = {}):
293 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
297 | """Docstring"""; ...
294 | """Docstring"""; ...
|
help: Replace with `None`; initialize within function
293 | ...
294 |
290 | ...
291 |
292 |
- def single_line_func_wrong(value: dict[str, str] = {}):
293 + def single_line_func_wrong(value: dict[str, str] = None):
294 | """Docstring"""; ...
295 |
- def single_line_func_wrong(value: dict[str, str] = {}):
296 + def single_line_func_wrong(value: dict[str, str] = None):
297 | """Docstring"""; ...
298 |
299 |
296 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:300:52
--> B006_B008.py:297:52
|
300 | def single_line_func_wrong(value: dict[str, str] = {}):
297 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
301 | """Docstring"""; \
302 | ...
298 | """Docstring"""; \
299 | ...
|
help: Replace with `None`; initialize within function
297 | """Docstring"""; ...
298 |
299 |
294 | """Docstring"""; ...
295 |
296 |
- def single_line_func_wrong(value: dict[str, str] = {}):
300 + def single_line_func_wrong(value: dict[str, str] = None):
301 | """Docstring"""; \
302 | ...
303 |
297 + def single_line_func_wrong(value: dict[str, str] = None):
298 | """Docstring"""; \
299 | ...
300 |
note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:305:52
--> B006_B008.py:302:52
|
305 | def single_line_func_wrong(value: dict[str, str] = {
302 | def single_line_func_wrong(value: dict[str, str] = {
| ____________________________________________________^
306 | | # This is a comment
307 | | }):
303 | | # This is a comment
304 | | }):
| |_^
308 | """Docstring"""
305 | """Docstring"""
|
help: Replace with `None`; initialize within function
302 | ...
303 |
304 |
299 | ...
300 |
301 |
- def single_line_func_wrong(value: dict[str, str] = {
- # This is a comment
- }):
305 + def single_line_func_wrong(value: dict[str, str] = None):
306 | """Docstring"""
307 |
308 |
302 + def single_line_func_wrong(value: dict[str, str] = None):
303 | """Docstring"""
304 |
305 |
note: This is an unsafe fix and may change runtime behavior
B006 Do not use mutable data structures for argument defaults
--> B006_B008.py:311:52
--> B006_B008.py:308:52
|
311 | def single_line_func_wrong(value: dict[str, str] = {}) \
308 | def single_line_func_wrong(value: dict[str, str] = {}) \
| ^^
312 | : \
313 | """Docstring"""
309 | : \
310 | """Docstring"""
|
help: Replace with `None`; initialize within function
B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:316:52
--> B006_B008.py:313:52
|
316 | def single_line_func_wrong(value: dict[str, str] = {}):
313 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^
317 | """Docstring without newline"""
314 | """Docstring without newline"""
|
help: Replace with `None`; initialize within function
313 | """Docstring"""
314 |
315 |
310 | """Docstring"""
311 |
312 |
- def single_line_func_wrong(value: dict[str, str] = {}):
316 + def single_line_func_wrong(value: dict[str, str] = None):
317 | """Docstring without newline"""
313 + def single_line_func_wrong(value: dict[str, str] = None):
314 | """Docstring without newline"""
note: This is an unsafe fix and may change runtime behavior

View File

@@ -2,8 +2,8 @@ use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast as ast;
use ruff_python_ast::ExprGenerator;
use ruff_python_ast::comparable::ComparableExpr;
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::token::TokenKind;
use ruff_python_ast::token::parenthesized_range;
use ruff_text_size::{Ranged, TextRange, TextSize};
use crate::checkers::ast::Checker;
@@ -142,9 +142,13 @@ pub(crate) fn unnecessary_generator_list(checker: &Checker, call: &ast::ExprCall
if *parenthesized {
// The generator's range will include the innermost parentheses, but it could be
// surrounded by additional parentheses.
let range =
parenthesized_range(argument.into(), (&call.arguments).into(), checker.tokens())
.unwrap_or(argument.range());
let range = parenthesized_range(
argument.into(),
(&call.arguments).into(),
checker.comment_ranges(),
checker.locator().contents(),
)
.unwrap_or(argument.range());
// The generator always parenthesizes the expression; trim the parentheses.
let generator = checker.generator().expr(argument);

View File

@@ -2,8 +2,8 @@ use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast as ast;
use ruff_python_ast::ExprGenerator;
use ruff_python_ast::comparable::ComparableExpr;
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::token::TokenKind;
use ruff_python_ast::token::parenthesized_range;
use ruff_text_size::{Ranged, TextRange, TextSize};
use crate::checkers::ast::Checker;
@@ -147,9 +147,13 @@ pub(crate) fn unnecessary_generator_set(checker: &Checker, call: &ast::ExprCall)
if *parenthesized {
// The generator's range will include the innermost parentheses, but it could be
// surrounded by additional parentheses.
let range =
parenthesized_range(argument.into(), (&call.arguments).into(), checker.tokens())
.unwrap_or(argument.range());
let range = parenthesized_range(
argument.into(),
(&call.arguments).into(),
checker.comment_ranges(),
checker.locator().contents(),
)
.unwrap_or(argument.range());
// The generator always parenthesizes the expression; trim the parentheses.
let generator = checker.generator().expr(argument);

View File

@@ -1,7 +1,7 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast as ast;
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::token::TokenKind;
use ruff_python_ast::token::parenthesized_range;
use ruff_text_size::{Ranged, TextRange, TextSize};
use crate::checkers::ast::Checker;
@@ -89,9 +89,13 @@ pub(crate) fn unnecessary_list_comprehension_set(checker: &Checker, call: &ast::
// If the list comprehension is parenthesized, remove the parentheses in addition to
// removing the brackets.
let replacement_range =
parenthesized_range(argument.into(), (&call.arguments).into(), checker.tokens())
.unwrap_or_else(|| argument.range());
let replacement_range = parenthesized_range(
argument.into(),
(&call.arguments).into(),
checker.comment_ranges(),
checker.locator().contents(),
)
.unwrap_or_else(|| argument.range());
let span = argument.range().add_start(one).sub_end(one);
let replacement =

View File

@@ -59,20 +59,16 @@ pub(crate) fn all_with_model_form(checker: &Checker, class_def: &ast::StmtClassD
}
for element in &class_def.body {
let Stmt::ClassDef(class_def_inner) = element else {
let Stmt::ClassDef(ast::StmtClassDef { name, body, .. }) = element else {
continue;
};
let name = &class_def_inner.name;
let body = &class_def_inner.body;
if name != "Meta" {
continue;
}
for element in body {
let Stmt::Assign(assign) = element else {
let Stmt::Assign(ast::StmtAssign { targets, value, .. }) = element else {
continue;
};
let targets = &assign.targets;
let value = &assign.value;
for target in targets {
let Expr::Name(ast::ExprName { id, .. }) = target else {
continue;

View File

@@ -57,19 +57,16 @@ pub(crate) fn exclude_with_model_form(checker: &Checker, class_def: &ast::StmtCl
}
for element in &class_def.body {
let Stmt::ClassDef(class_def_inner) = element else {
let Stmt::ClassDef(ast::StmtClassDef { name, body, .. }) = element else {
continue;
};
let name = &class_def_inner.name;
let body = &class_def_inner.body;
if name != "Meta" {
continue;
}
for element in body {
let Stmt::Assign(assign) = element else {
let Stmt::Assign(ast::StmtAssign { targets, .. }) = element else {
continue;
};
let targets = &assign.targets;
for target in targets {
let Expr::Name(ast::ExprName { id, .. }) = target else {
continue;

View File

@@ -72,7 +72,7 @@ pub(crate) fn model_without_dunder_str(checker: &Checker, class_def: &ast::StmtC
fn has_dunder_method(class_def: &ast::StmtClassDef, semantic: &SemanticModel) -> bool {
analyze::class::any_super_class(class_def, semantic, &|class_def| {
class_def.body.iter().any(|val| match val {
Stmt::FunctionDef(node) => node.name.as_str() == "__str__",
Stmt::FunctionDef(ast::StmtFunctionDef { name, .. }) => name == "__str__",
_ => false,
})
})
@@ -90,25 +90,24 @@ fn is_non_abstract_model(class_def: &ast::StmtClassDef, semantic: &SemanticModel
/// Check if class is abstract, in terms of Django model inheritance.
fn is_model_abstract(class_def: &ast::StmtClassDef) -> bool {
for element in &class_def.body {
let Stmt::ClassDef(node) = element else {
let Stmt::ClassDef(ast::StmtClassDef { name, body, .. }) = element else {
continue;
};
if node.name.as_str() != "Meta" {
if name != "Meta" {
continue;
}
for element in &node.body {
for element in body {
match element {
Stmt::Assign(assign) => {
if assign
.targets
Stmt::Assign(ast::StmtAssign { targets, value, .. }) => {
if targets
.iter()
.any(|target| is_abstract_true_assignment(target, Some(&assign.value)))
.any(|target| is_abstract_true_assignment(target, Some(value)))
{
return true;
}
}
Stmt::AnnAssign(ann_assign) => {
if is_abstract_true_assignment(&ann_assign.target, ann_assign.value.as_deref()) {
Stmt::AnnAssign(ast::StmtAnnAssign { target, value, .. }) => {
if is_abstract_true_assignment(target, value.as_deref()) {
return true;
}
}

View File

@@ -1,4 +1,4 @@
use ruff_python_ast::{Expr, Stmt};
use ruff_python_ast::{self as ast, Expr, Stmt};
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::helpers::is_const_true;
@@ -62,13 +62,10 @@ pub(crate) fn nullable_model_string_field(checker: &Checker, body: &[Stmt]) {
for statement in body {
let value = match statement {
Stmt::Assign(assign) => &assign.value,
Stmt::AnnAssign(ann_assign) => {
match &ann_assign.value {
Some(value) => value,
None => continue,
}
}
Stmt::Assign(ast::StmtAssign { value, .. }) => value,
Stmt::AnnAssign(ast::StmtAnnAssign {
value: Some(value), ..
}) => value,
_ => continue,
};

View File

@@ -153,13 +153,13 @@ impl fmt::Display for ContentType {
fn get_element_type(element: &Stmt, semantic: &SemanticModel) -> Option<ContentType> {
match element {
Stmt::Assign(node) => {
if let Expr::Call(ast::ExprCall { func, .. }) = node.value.as_ref() {
Stmt::Assign(ast::StmtAssign { targets, value, .. }) => {
if let Expr::Call(ast::ExprCall { func, .. }) = value.as_ref() {
if helpers::is_model_field(func, semantic) {
return Some(ContentType::FieldDeclaration);
}
}
let expr = node.targets.first()?;
let expr = targets.first()?;
let Expr::Name(ast::ExprName { id, .. }) = expr else {
return None;
};
@@ -169,14 +169,14 @@ fn get_element_type(element: &Stmt, semantic: &SemanticModel) -> Option<ContentT
None
}
}
Stmt::ClassDef(node) => {
if node.name.as_str() == "Meta" {
Stmt::ClassDef(ast::StmtClassDef { name, .. }) => {
if name == "Meta" {
Some(ContentType::MetaClass)
} else {
None
}
}
Stmt::FunctionDef(node) => match node.name.as_str() {
Stmt::FunctionDef(ast::StmtFunctionDef { name, .. }) => match name.as_str() {
name if is_dunder(name) => Some(ContentType::MagicMethod),
"save" => Some(ContentType::SaveMethod),
"get_absolute_url" => Some(ContentType::GetAbsoluteUrlMethod),

View File

@@ -1,5 +1,5 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::parenthesized_range;
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::{self as ast, Expr, Operator};
use ruff_python_trivia::is_python_whitespace;
use ruff_source_file::LineRanges;
@@ -88,7 +88,13 @@ pub(crate) fn explicit(checker: &Checker, expr: &Expr) {
checker.report_diagnostic(ExplicitStringConcatenation, expr.range());
let is_parenthesized = |expr: &Expr| {
parenthesized_range(expr.into(), bin_op.into(), checker.tokens()).is_some()
parenthesized_range(
expr.into(),
bin_op.into(),
checker.comment_ranges(),
checker.source(),
)
.is_some()
};
// If either `left` or `right` is parenthesized, generating
// a fix would be too involved. Just report the diagnostic.

View File

@@ -1,4 +1,4 @@
use ruff_python_ast::Stmt;
use ruff_python_ast::{Stmt, StmtTry};
use ruff_python_semantic::SemanticModel;
use ruff_text_size::{Ranged, TextSize};
@@ -8,10 +8,9 @@ pub(super) fn outside_handlers(offset: TextSize, semantic: &SemanticModel) -> bo
break;
}
let Stmt::Try(try_stmt) = stmt else {
let Stmt::Try(StmtTry { handlers, .. }) = stmt else {
continue;
};
let handlers = &try_stmt.handlers;
if handlers
.iter()

View File

@@ -111,6 +111,7 @@ pub(crate) fn exc_info_outside_except_handler(checker: &Checker, call: &ExprCall
}
let arguments = &call.arguments;
let source = checker.source();
let mut diagnostic = checker.report_diagnostic(ExcInfoOutsideExceptHandler, exc_info.range);
@@ -119,8 +120,8 @@ pub(crate) fn exc_info_outside_except_handler(checker: &Checker, call: &ExprCall
exc_info,
arguments,
Parentheses::Preserve,
checker.source(),
checker.tokens(),
source,
checker.comment_ranges(),
)?;
Ok(Fix::unsafe_edit(edit))
});

View File

@@ -2,7 +2,7 @@ use rustc_hash::FxHashSet;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::helpers::any_over_expr;
use ruff_python_ast::{Expr, Stmt};
use ruff_python_ast::{self as ast, Expr, Stmt};
use ruff_text_size::Ranged;
use crate::checkers::ast::Checker;
@@ -59,15 +59,15 @@ pub(crate) fn duplicate_class_field_definition(checker: &Checker, body: &[Stmt])
for stmt in body {
// Extract the property name from the assignment statement.
let target = match stmt {
Stmt::Assign(assign_stmt) => {
if let [Expr::Name(id)] = assign_stmt.targets.as_slice() {
Stmt::Assign(ast::StmtAssign { targets, .. }) => {
if let [Expr::Name(id)] = targets.as_slice() {
id
} else {
continue;
}
}
Stmt::AnnAssign(ann_assign_stmt) => {
if let Expr::Name(id) = ann_assign_stmt.target.as_ref() {
Stmt::AnnAssign(ast::StmtAnnAssign { target, .. }) => {
if let Expr::Name(id) = target.as_ref() {
id
} else {
continue;
@@ -78,20 +78,20 @@ pub(crate) fn duplicate_class_field_definition(checker: &Checker, body: &[Stmt])
// If this is an unrolled augmented assignment (e.g., `x = x + 1`), skip it.
match stmt {
Stmt::Assign(assign_stmt) => {
if any_over_expr(assign_stmt.value.as_ref(), &|expr| {
Stmt::Assign(ast::StmtAssign { value, .. }) => {
if any_over_expr(value.as_ref(), &|expr| {
expr.as_name_expr().is_some_and(|name| name.id == target.id)
}) {
continue;
}
}
Stmt::AnnAssign(ann_assign_stmt) => {
if let Some(value) = &ann_assign_stmt.value {
if any_over_expr(value.as_ref(), &|expr| {
expr.as_name_expr().is_some_and(|name| name.id == target.id)
}) {
continue;
}
Stmt::AnnAssign(ast::StmtAnnAssign {
value: Some(value), ..
}) => {
if any_over_expr(value.as_ref(), &|expr| {
expr.as_name_expr().is_some_and(|name| name.id == target.id)
}) {
continue;
}
}
_ => continue,

View File

@@ -58,11 +58,11 @@ impl Violation for NonUniqueEnums {
pub(crate) fn non_unique_enums(checker: &Checker, parent: &Stmt, body: &[Stmt]) {
let semantic = checker.semantic();
let Stmt::ClassDef(class_def) = parent else {
let Stmt::ClassDef(parent) = parent else {
return;
};
if !class_def.bases().iter().any(|expr| {
if !parent.bases().iter().any(|expr| {
semantic
.resolve_qualified_name(expr)
.is_some_and(|qualified_name| matches!(qualified_name.segments(), ["enum", "Enum"]))
@@ -72,10 +72,9 @@ pub(crate) fn non_unique_enums(checker: &Checker, parent: &Stmt, body: &[Stmt])
let mut seen_targets: FxHashSet<ComparableExpr> = FxHashSet::default();
for stmt in body {
let Stmt::Assign(assign_stmt) = stmt else {
let Stmt::Assign(ast::StmtAssign { value, .. }) = stmt else {
continue;
};
let value = &assign_stmt.value;
if is_call_to_enum_auto(semantic, value) {
continue;

View File

@@ -2,7 +2,7 @@ use itertools::Itertools;
use rustc_hash::{FxBuildHasher, FxHashSet};
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::parenthesized_range;
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::{self as ast, Expr};
use ruff_python_stdlib::identifiers::is_identifier;
use ruff_text_size::Ranged;
@@ -129,8 +129,8 @@ pub(crate) fn unnecessary_dict_kwargs(checker: &Checker, call: &ast::ExprCall) {
keyword,
&call.arguments,
Parentheses::Preserve,
checker.source(),
checker.tokens(),
checker.locator().contents(),
checker.comment_ranges(),
)
.map(Fix::safe_edit)
});
@@ -158,7 +158,8 @@ pub(crate) fn unnecessary_dict_kwargs(checker: &Checker, call: &ast::ExprCall) {
parenthesized_range(
value.into(),
dict.into(),
checker.tokens()
checker.comment_ranges(),
checker.locator().contents(),
)
.unwrap_or(value.range())
)

View File

@@ -73,11 +73,11 @@ pub(crate) fn unnecessary_range_start(checker: &Checker, call: &ast::ExprCall) {
let mut diagnostic = checker.report_diagnostic(UnnecessaryRangeStart, start.range());
diagnostic.try_set_fix(|| {
remove_argument(
start,
&start,
&call.arguments,
Parentheses::Preserve,
checker.source(),
checker.tokens(),
checker.locator().contents(),
checker.comment_ranges(),
)
.map(Fix::safe_edit)
});

View File

@@ -160,16 +160,20 @@ fn generate_fix(
) -> anyhow::Result<Fix> {
let locator = checker.locator();
let source = locator.contents();
let tokens = checker.tokens();
let deletion = remove_argument(
generic_base,
arguments,
Parentheses::Preserve,
source,
tokens,
checker.comment_ranges(),
)?;
let insertion = add_argument(locator.slice(generic_base), arguments, tokens);
let insertion = add_argument(
locator.slice(generic_base),
arguments,
checker.comment_ranges(),
source,
);
Ok(Fix::unsafe_edits(deletion, [insertion]))
}

View File

@@ -5,7 +5,7 @@ use ruff_python_ast::{
helpers::{pep_604_union, typing_optional},
name::Name,
operator_precedence::OperatorPrecedence,
token::{Tokens, parenthesized_range},
parenthesize::parenthesized_range,
};
use ruff_python_semantic::analyze::typing::{traverse_literal, traverse_union};
use ruff_text_size::{Ranged, TextRange};
@@ -243,12 +243,16 @@ fn create_fix(
let union_expr = pep_604_union(&[new_literal_expr, none_expr]);
// Check if we need parentheses to preserve operator precedence
let content =
if needs_parentheses_for_precedence(semantic, literal_expr, checker.tokens()) {
format!("({})", checker.generator().expr(&union_expr))
} else {
checker.generator().expr(&union_expr)
};
let content = if needs_parentheses_for_precedence(
semantic,
literal_expr,
checker.comment_ranges(),
checker.source(),
) {
format!("({})", checker.generator().expr(&union_expr))
} else {
checker.generator().expr(&union_expr)
};
let union_edit = Edit::range_replacement(content, literal_expr.range());
Fix::applicable_edit(union_edit, applicability)
@@ -274,7 +278,8 @@ enum UnionKind {
fn needs_parentheses_for_precedence(
semantic: &ruff_python_semantic::SemanticModel,
literal_expr: &Expr,
tokens: &Tokens,
comment_ranges: &ruff_python_trivia::CommentRanges,
source: &str,
) -> bool {
// Get the parent expression to check if we're in a context that needs parentheses
let Some(parent_expr) = semantic.current_expression_parent() else {
@@ -282,7 +287,14 @@ fn needs_parentheses_for_precedence(
};
// Check if the literal expression is already parenthesized
if parenthesized_range(literal_expr.into(), parent_expr.into(), tokens).is_some() {
if parenthesized_range(
literal_expr.into(),
parent_expr.into(),
comment_ranges,
source,
)
.is_some()
{
return false; // Already parenthesized, don't add more
}

View File

@@ -1,3 +1,4 @@
use ruff_python_ast as ast;
use ruff_python_ast::Stmt;
use ruff_macros::{ViolationMetadata, derive_message_formats};
@@ -43,15 +44,17 @@ impl AlwaysFixableViolation for StrOrReprDefinedInStub {
/// PYI029
pub(crate) fn str_or_repr_defined_in_stub(checker: &Checker, stmt: &Stmt) {
let Stmt::FunctionDef(func_def) = stmt else {
let Stmt::FunctionDef(ast::StmtFunctionDef {
name,
decorator_list,
returns,
parameters,
..
}) = stmt
else {
return;
};
let name = &func_def.name;
let decorator_list = &func_def.decorator_list;
let returns = &func_def.returns;
let parameters = &func_def.parameters;
let Some(returns) = returns else {
return;
};

View File

@@ -196,14 +196,15 @@ pub(crate) fn unused_private_type_var(checker: &Checker, scope: &Scope) {
let Some(source) = binding.source else {
continue;
};
let stmt = checker.semantic().statement(source);
let Stmt::Assign(assign) = stmt else {
let stmt @ Stmt::Assign(ast::StmtAssign { targets, value, .. }) =
checker.semantic().statement(source)
else {
continue;
};
let [Expr::Name(ast::ExprName { id, .. })] = &assign.targets[..] else {
let [Expr::Name(ast::ExprName { id, .. })] = &targets[..] else {
continue;
};
let Expr::Call(ast::ExprCall { func, .. }) = assign.value.as_ref() else {
let Expr::Call(ast::ExprCall { func, .. }) = value.as_ref() else {
continue;
};
@@ -316,16 +317,18 @@ pub(crate) fn unused_private_type_alias(checker: &Checker, scope: &Scope) {
fn extract_type_alias_name<'a>(stmt: &'a ast::Stmt, semantic: &SemanticModel) -> Option<&'a str> {
match stmt {
ast::Stmt::AnnAssign(ann_assign) => {
let ast::ExprName { id, .. } = ann_assign.target.as_name_expr()?;
if semantic.match_typing_expr(&ann_assign.annotation, "TypeAlias") {
ast::Stmt::AnnAssign(ast::StmtAnnAssign {
target, annotation, ..
}) => {
let ast::ExprName { id, .. } = target.as_name_expr()?;
if semantic.match_typing_expr(annotation, "TypeAlias") {
Some(id)
} else {
None
}
}
ast::Stmt::TypeAlias(type_alias) => {
let ast::ExprName { id, .. } = type_alias.name.as_name_expr()?;
ast::Stmt::TypeAlias(ast::StmtTypeAlias { name, .. }) => {
let ast::ExprName { id, .. } = name.as_name_expr()?;
Some(id)
}
_ => None,
@@ -385,9 +388,9 @@ fn extract_typeddict_name<'a>(stmt: &'a Stmt, semantic: &SemanticModel) -> Optio
// class Bar(typing.TypedDict, typing.Generic[T]):
// y: T
// ```
Stmt::ClassDef(class_def) => {
Stmt::ClassDef(class_def @ ast::StmtClassDef { name, .. }) => {
if class_def.bases().iter().any(is_typeddict) {
Some(&class_def.name)
Some(name)
} else {
None
}
@@ -399,12 +402,12 @@ fn extract_typeddict_name<'a>(stmt: &'a Stmt, semantic: &SemanticModel) -> Optio
// import typing
// Baz = typing.TypedDict("Baz", {"z": bytes})
// ```
Stmt::Assign(assign) => {
let [target] = assign.targets.as_slice() else {
Stmt::Assign(ast::StmtAssign { targets, value, .. }) => {
let [target] = targets.as_slice() else {
return None;
};
let ast::ExprName { id, .. } = target.as_name_expr()?;
let ast::ExprCall { func, .. } = assign.value.as_call_expr()?;
let ast::ExprCall { func, .. } = value.as_call_expr()?;
if is_typeddict(func) { Some(id) } else { None }
}
_ => None,

View File

@@ -10,7 +10,7 @@ use libcst_native::{
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::helpers::Truthiness;
use ruff_python_ast::token::parenthesized_range;
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::visitor::Visitor;
use ruff_python_ast::{
self as ast, AnyNodeRef, Arguments, BoolOp, ExceptHandler, Expr, Keyword, Stmt, UnaryOp,
@@ -303,7 +303,8 @@ pub(crate) fn unittest_assertion(
parenthesized_range(
expr.into(),
checker.semantic().current_statement().into(),
checker.tokens(),
checker.comment_ranges(),
checker.locator().contents(),
)
.unwrap_or(expr.range()),
)));
@@ -369,10 +370,10 @@ impl Violation for PytestUnittestRaisesAssertion {
/// PT027
pub(crate) fn unittest_raises_assertion_call(checker: &Checker, call: &ast::ExprCall) {
// Bindings in `with` statements are handled by `unittest_raises_assertion_bindings`.
if let Stmt::With(with_stmt) = checker.semantic().current_statement() {
if let Stmt::With(ast::StmtWith { items, .. }) = checker.semantic().current_statement() {
let call_ref = AnyNodeRef::from(call);
if with_stmt.items.iter().any(|item| {
if items.iter().any(|item| {
AnyNodeRef::from(&item.context_expr).ptr_eq(call_ref) && item.optional_vars.is_some()
}) {
return;
@@ -390,11 +391,7 @@ pub(crate) fn unittest_raises_assertion_binding(checker: &Checker, binding: &Bin
let semantic = checker.semantic();
let Some(stmt) = binding.statement(semantic) else {
return;
};
let Stmt::With(with) = stmt else {
let Some(Stmt::With(with)) = binding.statement(semantic) else {
return;
};

View File

@@ -768,8 +768,8 @@ fn check_fixture_decorator(checker: &Checker, func_name: &str, decorator: &Decor
keyword,
arguments,
edits::Parentheses::Preserve,
checker.source(),
checker.tokens(),
checker.locator().contents(),
checker.comment_ranges(),
)
.map(Fix::unsafe_edit)
});

View File

@@ -2,9 +2,10 @@ use rustc_hash::{FxBuildHasher, FxHashMap};
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::comparable::ComparableExpr;
use ruff_python_ast::token::{Tokens, parenthesized_range};
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::{self as ast, Expr, ExprCall, ExprContext, StringLiteralFlags};
use ruff_python_codegen::Generator;
use ruff_python_trivia::CommentRanges;
use ruff_python_trivia::{SimpleTokenKind, SimpleTokenizer};
use ruff_text_size::{Ranged, TextRange, TextSize};
@@ -321,8 +322,18 @@ fn elts_to_csv(elts: &[Expr], generator: Generator, flags: StringLiteralFlags) -
/// ```
///
/// This method assumes that the first argument is a string.
fn get_parametrize_name_range(call: &ExprCall, expr: &Expr, tokens: &Tokens) -> Option<TextRange> {
parenthesized_range(expr.into(), (&call.arguments).into(), tokens)
fn get_parametrize_name_range(
call: &ExprCall,
expr: &Expr,
comment_ranges: &CommentRanges,
source: &str,
) -> Option<TextRange> {
parenthesized_range(
expr.into(),
(&call.arguments).into(),
comment_ranges,
source,
)
}
/// PT006
@@ -338,8 +349,13 @@ fn check_names(checker: &Checker, call: &ExprCall, expr: &Expr, argvalues: &Expr
if names.len() > 1 {
match names_type {
types::ParametrizeNameType::Tuple => {
let name_range = get_parametrize_name_range(call, expr, checker.tokens())
.unwrap_or(expr.range());
let name_range = get_parametrize_name_range(
call,
expr,
checker.comment_ranges(),
checker.locator().contents(),
)
.unwrap_or(expr.range());
let mut diagnostic = checker.report_diagnostic(
PytestParametrizeNamesWrongType {
single_argument: false,
@@ -370,8 +386,13 @@ fn check_names(checker: &Checker, call: &ExprCall, expr: &Expr, argvalues: &Expr
)));
}
types::ParametrizeNameType::List => {
let name_range = get_parametrize_name_range(call, expr, checker.tokens())
.unwrap_or(expr.range());
let name_range = get_parametrize_name_range(
call,
expr,
checker.comment_ranges(),
checker.locator().contents(),
)
.unwrap_or(expr.range());
let mut diagnostic = checker.report_diagnostic(
PytestParametrizeNamesWrongType {
single_argument: false,

View File

@@ -220,11 +220,11 @@ pub(crate) fn complex_raises(checker: &Checker, stmt: &Stmt, items: &[WithItem],
if raises_called {
let is_too_complex = if let [stmt] = body {
match stmt {
Stmt::With(with_stmt) => is_non_trivial_with_body(&with_stmt.body),
Stmt::With(ast::StmtWith { body, .. }) => is_non_trivial_with_body(body),
// Allow function and class definitions to test decorators.
Stmt::ClassDef(_) | Stmt::FunctionDef(_) => false,
// Allow empty `for` loops to test iterators.
Stmt::For(for_stmt) => match &for_stmt.body[..] {
Stmt::For(ast::StmtFor { body, .. }) => match &body[..] {
[Stmt::Pass(_)] => false,
[Stmt::Expr(ast::StmtExpr { value, .. })] => !value.is_ellipsis_literal_expr(),
_ => true,

View File

@@ -162,12 +162,12 @@ impl TryFrom<&str> for UnittestAssert {
}
fn assert(expr: &Expr, msg: Option<&Expr>) -> Stmt {
Stmt::Assert(Box::new(ast::StmtAssert {
Stmt::Assert(ast::StmtAssert {
test: Box::new(expr.clone()),
msg: msg.map(|msg| Box::new(msg.clone())),
range: TextRange::default(),
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
}))
})
}
fn compare(left: &Expr, cmp_op: CmpOp, right: &Expr) -> Expr {

View File

@@ -206,11 +206,11 @@ pub(crate) fn complex_warns(checker: &Checker, stmt: &Stmt, items: &[WithItem],
if warns_called {
let is_too_complex = if let [stmt] = body {
match stmt {
Stmt::With(with_stmt) => is_non_trivial_with_body(&with_stmt.body),
Stmt::With(ast::StmtWith { body, .. }) => is_non_trivial_with_body(body),
// Allow function and class definitions to test decorators.
Stmt::ClassDef(_) | Stmt::FunctionDef(_) => false,
// Allow empty `for` loops to test iterators.
Stmt::For(for_stmt) => match &for_stmt.body[..] {
Stmt::For(ast::StmtFor { body, .. }) => match &body[..] {
[Stmt::Pass(_)] => false,
[Stmt::Expr(ast::StmtExpr { value, .. })] => !value.is_ellipsis_literal_expr(),
_ => true,

View File

@@ -448,12 +448,12 @@ fn is_noreturn_func(func: &Expr, semantic: &SemanticModel) -> bool {
return false;
};
let Stmt::FunctionDef(node) = semantic.statement(node_id)
let Stmt::FunctionDef(ast::StmtFunctionDef { returns, .. }) = semantic.statement(node_id)
else {
return false;
};
let Some(returns) = node.returns.as_ref() else {
let Some(returns) = returns.as_ref() else {
return false;
};
@@ -481,16 +481,19 @@ fn add_return_none(checker: &Checker, stmt: &Stmt, range: TextRange) {
fn has_implicit_return(checker: &Checker, stmt: &Stmt) -> bool {
match stmt {
Stmt::If(node) => {
if node
.body
Stmt::If(ast::StmtIf {
body,
elif_else_clauses,
..
}) => {
if body
.last()
.is_some_and(|last| has_implicit_return(checker, last))
{
return true;
}
if node.elif_else_clauses.iter().any(|clause| {
if elif_else_clauses.iter().any(|clause| {
clause
.body
.last()
@@ -501,33 +504,25 @@ fn has_implicit_return(checker: &Checker, stmt: &Stmt) -> bool {
// Check if we don't have an else clause
matches!(
node.elif_else_clauses.last(),
elif_else_clauses.last(),
None | Some(ast::ElifElseClause { test: Some(_), .. })
)
}
Stmt::Assert(node) if is_const_false(&node.test) => false,
Stmt::While(node) if is_const_true(&node.test) => false,
Stmt::For(node) => {
if let Some(last_stmt) = node.orelse.last() {
Stmt::Assert(ast::StmtAssert { test, .. }) if is_const_false(test) => false,
Stmt::While(ast::StmtWhile { test, .. }) if is_const_true(test) => false,
Stmt::For(ast::StmtFor { orelse, .. }) | Stmt::While(ast::StmtWhile { orelse, .. }) => {
if let Some(last_stmt) = orelse.last() {
has_implicit_return(checker, last_stmt)
} else {
true
}
}
Stmt::While(node) => {
if let Some(last_stmt) = node.orelse.last() {
has_implicit_return(checker, last_stmt)
} else {
true
}
}
Stmt::Match(node) => node.cases.iter().any(|case| {
Stmt::Match(ast::StmtMatch { cases, .. }) => cases.iter().any(|case| {
case.body
.last()
.is_some_and(|last| has_implicit_return(checker, last))
}),
Stmt::With(node) => node
.body
Stmt::With(ast::StmtWith { body, .. }) => body
.last()
.is_some_and(|last_stmt| has_implicit_return(checker, last_stmt)),
Stmt::Return(_) | Stmt::Raise(_) | Stmt::Try(_) => false,

View File

@@ -62,11 +62,11 @@ impl<'semantic, 'data> ReturnVisitor<'semantic, 'data> {
impl<'a> Visitor<'a> for ReturnVisitor<'_, 'a> {
fn visit_stmt(&mut self, stmt: &'a Stmt) {
match stmt {
Stmt::ClassDef(node) => {
Stmt::ClassDef(ast::StmtClassDef { decorator_list, .. }) => {
// Visit the decorators, etc.
self.sibling = Some(stmt);
self.parents.push(stmt);
for decorator in &node.decorator_list {
for decorator in decorator_list {
visitor::walk_decorator(self, decorator);
}
self.parents.pop();
@@ -74,15 +74,12 @@ impl<'a> Visitor<'a> for ReturnVisitor<'_, 'a> {
// But don't recurse into the body.
return;
}
Stmt::FunctionDef(node) => {
let ast::StmtFunctionDef {
parameters,
decorator_list,
returns,
range: _,
node_index: _,
..
} = &**node;
Stmt::FunctionDef(ast::StmtFunctionDef {
parameters,
decorator_list,
returns,
..
}) => {
// Visit the decorators, etc.
self.sibling = Some(stmt);
self.parents.push(stmt);
@@ -98,30 +95,24 @@ impl<'a> Visitor<'a> for ReturnVisitor<'_, 'a> {
// But don't recurse into the body.
return;
}
Stmt::Global(node) => {
let ast::StmtGlobal {
names,
range: _,
node_index: _,
} = &**node;
Stmt::Global(ast::StmtGlobal {
names,
range: _,
node_index: _,
})
| Stmt::Nonlocal(ast::StmtNonlocal {
names,
range: _,
node_index: _,
}) => {
self.stack
.non_locals
.extend(names.iter().map(Identifier::as_str));
}
Stmt::Nonlocal(node) => {
let ast::StmtNonlocal {
names,
range: _,
node_index: _,
} = &**node;
self.stack
.non_locals
.extend(names.iter().map(Identifier::as_str));
}
Stmt::AnnAssign(node) => {
Stmt::AnnAssign(ast::StmtAnnAssign { target, value, .. }) => {
// Ex) `x: int`
if node.value.is_none() {
if let Expr::Name(name) = node.target.as_ref() {
if value.is_none() {
if let Expr::Name(name) = target.as_ref() {
self.stack.annotations.insert(name.id.as_str());
}
}
@@ -149,11 +140,11 @@ impl<'a> Visitor<'a> for ReturnVisitor<'_, 'a> {
// x = f.read()
// return x
// ```
Stmt::With(with_node) => {
Stmt::With(with) => {
if let Some(stmt_assign) =
with_node.body.last().and_then(Stmt::as_assign_stmt)
with.body.last().and_then(Stmt::as_assign_stmt)
{
if !has_conditional_body(with_node, self.semantic) {
if !has_conditional_body(with, self.semantic) {
self.stack.assignment_return.push((
stmt_assign,
stmt_return,
@@ -168,14 +159,11 @@ impl<'a> Visitor<'a> for ReturnVisitor<'_, 'a> {
self.stack.returns.push(stmt_return);
}
Stmt::If(node) => {
let ast::StmtIf {
body,
elif_else_clauses,
range: _,
node_index: _,
..
} = &**node;
Stmt::If(ast::StmtIf {
body,
elif_else_clauses,
..
}) => {
if let Some(first) = elif_else_clauses.first() {
self.stack.elifs_elses.push((body, first));
}

View File

@@ -10,7 +10,7 @@ use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::comparable::ComparableExpr;
use ruff_python_ast::helpers::{Truthiness, contains_effect};
use ruff_python_ast::name::Name;
use ruff_python_ast::token::parenthesized_range;
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_codegen::Generator;
use ruff_python_semantic::SemanticModel;
@@ -800,9 +800,14 @@ fn is_short_circuit(
edit = Some(get_short_circuit_edit(
value,
TextRange::new(
parenthesized_range(furthest.into(), expr.into(), checker.tokens())
.unwrap_or(furthest.range())
.start(),
parenthesized_range(
furthest.into(),
expr.into(),
checker.comment_ranges(),
checker.locator().contents(),
)
.unwrap_or(furthest.range())
.start(),
expr.end(),
),
short_circuit_truthiness,
@@ -823,9 +828,14 @@ fn is_short_circuit(
edit = Some(get_short_circuit_edit(
next_value,
TextRange::new(
parenthesized_range(furthest.into(), expr.into(), checker.tokens())
.unwrap_or(furthest.range())
.start(),
parenthesized_range(
furthest.into(),
expr.into(),
checker.comment_ranges(),
checker.locator().contents(),
)
.unwrap_or(furthest.range())
.start(),
expr.end(),
),
short_circuit_truthiness,

View File

@@ -4,7 +4,7 @@ use ruff_text_size::{Ranged, TextRange};
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::helpers::{is_const_false, is_const_true};
use ruff_python_ast::name::Name;
use ruff_python_ast::token::parenthesized_range;
use ruff_python_ast::parenthesize::parenthesized_range;
use crate::checkers::ast::Checker;
use crate::{AlwaysFixableViolation, Edit, Fix, FixAvailability, Violation};
@@ -171,8 +171,13 @@ pub(crate) fn if_expr_with_true_false(
checker
.locator()
.slice(
parenthesized_range(test.into(), expr.into(), checker.tokens())
.unwrap_or(test.range()),
parenthesized_range(
test.into(),
expr.into(),
checker.comment_ranges(),
checker.locator().contents(),
)
.unwrap_or(test.range()),
)
.to_string(),
expr.range(),

View File

@@ -140,10 +140,10 @@ fn is_dunder_method(name: &str) -> bool {
}
fn is_exception_check(stmt: &Stmt) -> bool {
let Stmt::If(node) = stmt else {
let Stmt::If(ast::StmtIf { body, .. }) = stmt else {
return false;
};
matches!(node.body.as_slice(), [Stmt::Raise(_)])
matches!(body.as_slice(), [Stmt::Raise(_)])
}
/// SIM201

View File

@@ -68,10 +68,18 @@ impl Violation for MultipleWithStatements {
/// Returns a boolean indicating whether it's an async with statement, the items
/// and body.
fn next_with(body: &[Stmt]) -> Option<(bool, &[WithItem], &[Stmt])> {
let [Stmt::With(node)] = body else {
let [
Stmt::With(ast::StmtWith {
is_async,
items,
body,
..
}),
] = body
else {
return None;
};
Some((node.is_async, &node.items, &node.body))
Some((*is_async, items, body))
}
/// Check if `with_items` contains a single item which should not necessarily be
@@ -131,8 +139,8 @@ pub(crate) fn multiple_with_statements(
// with B(), C():
// print("hello")
// ```
if let Some(Stmt::With(node)) = with_parent {
if node.body.len() == 1 {
if let Some(Stmt::With(ast::StmtWith { body, .. })) = with_parent {
if body.len() == 1 {
return;
}
}

View File

@@ -230,13 +230,21 @@ fn nested_if_body(stmt_if: &ast::StmtIf) -> Option<NestedIf<'_>> {
/// ...
/// ```
fn find_last_nested_if(body: &[Stmt]) -> Option<&Expr> {
let [Stmt::If(node)] = body else {
let [
Stmt::If(ast::StmtIf {
test,
body: inner_body,
elif_else_clauses,
..
}),
] = body
else {
return None;
};
if !node.elif_else_clauses.is_empty() {
if !elif_else_clauses.is_empty() {
return None;
}
find_last_nested_if(&node.body).or(Some(&node.test))
find_last_nested_if(inner_body).or(Some(test))
}
/// Returns `true` if an expression is an `if __name__ == "__main__":` check.

View File

@@ -165,18 +165,20 @@ pub(crate) fn enumerate_for_loop(checker: &Checker, for_stmt: &ast::StmtFor) {
/// If the statement is an index increment statement (e.g., `i += 1`), return
/// the name of the index variable.
fn match_index_increment(stmt: &Stmt) -> Option<&ast::ExprName> {
let Stmt::AugAssign(node) = stmt else {
let Stmt::AugAssign(ast::StmtAugAssign {
target,
op: Operator::Add,
value,
..
}) = stmt
else {
return None;
};
if !matches!(node.op, Operator::Add) {
return None;
}
let name = node.target.as_name_expr()?;
let name = target.as_name_expr()?;
if matches!(
node.value.as_ref(),
value.as_ref(),
Expr::NumberLiteral(ast::ExprNumberLiteral {
value: Number::Int(Int::ONE),
..

View File

@@ -98,16 +98,26 @@ pub(crate) fn if_else_block_instead_of_dict_get(checker: &Checker, stmt_if: &ast
let [else_body_stmt] = else_body.as_slice() else {
return;
};
let Stmt::Assign(body_node) = &body_stmt else {
let Stmt::Assign(ast::StmtAssign {
targets: body_var,
value: body_value,
..
}) = &body_stmt
else {
return;
};
let [body_var] = body_node.targets.as_slice() else {
let [body_var] = body_var.as_slice() else {
return;
};
let Stmt::Assign(orelse_node) = &else_body_stmt else {
let Stmt::Assign(ast::StmtAssign {
targets: orelse_var,
value: orelse_value,
..
}) = &else_body_stmt
else {
return;
};
let [orelse_var] = orelse_node.targets.as_slice() else {
let [orelse_var] = orelse_var.as_slice() else {
return;
};
@@ -133,8 +143,8 @@ pub(crate) fn if_else_block_instead_of_dict_get(checker: &Checker, stmt_if: &ast
}
let (expected_var, expected_value, default_var, default_value) = match ops[..] {
[CmpOp::In] => (body_var, &body_node.value, orelse_var, orelse_node.value.as_ref()),
[CmpOp::NotIn] => (orelse_var, &orelse_node.value, body_var, body_node.value.as_ref()),
[CmpOp::In] => (body_var, body_value, orelse_var, orelse_value.as_ref()),
[CmpOp::NotIn] => (orelse_var, orelse_value, body_var, body_value.as_ref()),
_ => {
return;
}

View File

@@ -112,13 +112,27 @@ pub(crate) fn if_else_block_instead_of_if_exp(checker: &Checker, stmt_if: &ast::
else {
return;
};
let [Stmt::Assign(body_node)] = body.as_slice() else {
let [
Stmt::Assign(ast::StmtAssign {
targets: body_targets,
value: body_value,
..
}),
] = body.as_slice()
else {
return;
};
let [Stmt::Assign(else_node)] = else_body.as_slice() else {
let [
Stmt::Assign(ast::StmtAssign {
targets: else_targets,
value: else_value,
..
}),
] = else_body.as_slice()
else {
return;
};
let ([body_target], [else_target]) = (body_node.targets.as_slice(), else_node.targets.as_slice()) else {
let ([body_target], [else_target]) = (body_targets.as_slice(), else_targets.as_slice()) else {
return;
};
let Expr::Name(ast::ExprName { id: body_id, .. }) = body_target else {
@@ -134,13 +148,13 @@ pub(crate) fn if_else_block_instead_of_if_exp(checker: &Checker, stmt_if: &ast::
// Avoid suggesting ternary for `if (yield ...)`-style checks.
// TODO(charlie): Fix precedence handling for yields in generator.
if matches!(
body_node.value.as_ref(),
body_value.as_ref(),
Expr::Yield(_) | Expr::YieldFrom(_) | Expr::Await(_)
) {
return;
}
if matches!(
else_node.value.as_ref(),
else_value.as_ref(),
Expr::Yield(_) | Expr::YieldFrom(_) | Expr::Await(_)
) {
return;
@@ -176,20 +190,20 @@ pub(crate) fn if_else_block_instead_of_if_exp(checker: &Checker, stmt_if: &ast::
// - If `test == not body_value`, replace with `target_var = body_value and else_value`
// - If `not test == body_value`, replace with `target_var = body_value and else_value`
// - Otherwise, replace with `target_var = body_value if test else else_value`
let (contents, assignment_kind) = match (test, &body_node.value) {
(test_node, body_val_node)
if ComparableExpr::from(test_node) == ComparableExpr::from(body_val_node.as_ref())
let (contents, assignment_kind) = match (test, body_value) {
(test_node, body_node)
if ComparableExpr::from(test_node) == ComparableExpr::from(body_node)
&& !contains_effect(test_node, |id| checker.semantic().has_builtin_binding(id)) =>
{
let target_var = &body_target;
let binary = assignment_binary_or(target_var, &body_node.value, &else_node.value);
let binary = assignment_binary_or(target_var, body_value, else_value);
(checker.generator().stmt(&binary), AssignmentKind::Binary)
}
(test_node, body_val_node)
(test_node, body_node)
if (test_node.as_unary_op_expr().is_some_and(|op_expr| {
op_expr.op.is_not()
&& ComparableExpr::from(&op_expr.operand) == ComparableExpr::from(body_val_node.as_ref())
}) || body_val_node.as_ref().as_unary_op_expr().is_some_and(|op_expr| {
&& ComparableExpr::from(&op_expr.operand) == ComparableExpr::from(body_node)
}) || body_node.as_unary_op_expr().is_some_and(|op_expr| {
op_expr.op.is_not()
&& ComparableExpr::from(&op_expr.operand) == ComparableExpr::from(test_node)
})) && !contains_effect(test_node, |id| {
@@ -197,12 +211,12 @@ pub(crate) fn if_else_block_instead_of_if_exp(checker: &Checker, stmt_if: &ast::
}) =>
{
let target_var = &body_target;
let binary = assignment_binary_and(target_var, &body_node.value, &else_node.value);
let binary = assignment_binary_and(target_var, body_value, else_value);
(checker.generator().stmt(&binary), AssignmentKind::Binary)
}
_ => {
let target_var = &body_target;
let ternary = assignment_ternary(target_var, &body_node.value, test, &else_node.value);
let ternary = assignment_ternary(target_var, body_value, test, else_value);
(checker.generator().stmt(&ternary), AssignmentKind::Ternary)
}
};

View File

@@ -4,10 +4,10 @@ use anyhow::Result;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::comparable::ComparableStmt;
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::stmt_if::{IfElifBranch, if_elif_branches};
use ruff_python_ast::token::parenthesized_range;
use ruff_python_ast::{self as ast, Expr};
use ruff_python_trivia::{SimpleTokenKind, SimpleTokenizer};
use ruff_python_trivia::{CommentRanges, SimpleTokenKind, SimpleTokenizer};
use ruff_source_file::LineRanges;
use ruff_text_size::{Ranged, TextRange};
@@ -99,7 +99,7 @@ pub(crate) fn if_with_same_arms(checker: &Checker, stmt_if: &ast::StmtIf) {
&current_branch,
following_branch,
checker.locator(),
checker.tokens(),
checker.comment_ranges(),
)
});
}
@@ -111,7 +111,7 @@ fn merge_branches(
current_branch: &IfElifBranch,
following_branch: &IfElifBranch,
locator: &Locator,
tokens: &ruff_python_ast::token::Tokens,
comment_ranges: &CommentRanges,
) -> Result<Fix> {
// Identify the colon (`:`) at the end of the current branch's test.
let Some(current_branch_colon) =
@@ -127,9 +127,12 @@ fn merge_branches(
);
// If the following test isn't parenthesized, consider parenthesizing it.
let following_branch_test = if let Some(range) =
parenthesized_range(following_branch.test.into(), stmt_if.into(), tokens)
{
let following_branch_test = if let Some(range) = parenthesized_range(
following_branch.test.into(),
stmt_if.into(),
comment_ranges,
locator.contents(),
) {
Cow::Borrowed(locator.slice(range))
} else if matches!(
following_branch.test,
@@ -150,19 +153,24 @@ fn merge_branches(
//
// For example, if the current test is `x if x else y`, we should parenthesize it to
// `(x if x else y) or ...`.
let parenthesize_edit =
if matches!(
current_branch.test,
Expr::Lambda(_) | Expr::Named(_) | Expr::If(_)
) && parenthesized_range(current_branch.test.into(), stmt_if.into(), tokens).is_none()
{
Some(Edit::range_replacement(
format!("({})", locator.slice(current_branch.test)),
current_branch.test.range(),
))
} else {
None
};
let parenthesize_edit = if matches!(
current_branch.test,
Expr::Lambda(_) | Expr::Named(_) | Expr::If(_)
) && parenthesized_range(
current_branch.test.into(),
stmt_if.into(),
comment_ranges,
locator.contents(),
)
.is_none()
{
Some(Edit::range_replacement(
format!("({})", locator.slice(current_branch.test)),
current_branch.test.range(),
))
} else {
None
};
Ok(Fix::safe_edits(
deletion_edit,

View File

@@ -1,6 +1,6 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::AnyNodeRef;
use ruff_python_ast::token::parenthesized_range;
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::{self as ast, Arguments, CmpOp, Comprehension, Expr};
use ruff_python_semantic::analyze::typing;
use ruff_python_trivia::{SimpleTokenKind, SimpleTokenizer};
@@ -90,10 +90,20 @@ fn key_in_dict(checker: &Checker, left: &Expr, right: &Expr, operator: CmpOp, pa
}
// Extract the exact range of the left and right expressions.
let left_range =
parenthesized_range(left.into(), parent, checker.tokens()).unwrap_or(left.range());
let right_range =
parenthesized_range(right.into(), parent, checker.tokens()).unwrap_or(right.range());
let left_range = parenthesized_range(
left.into(),
parent,
checker.comment_ranges(),
checker.locator().contents(),
)
.unwrap_or(left.range());
let right_range = parenthesized_range(
right.into(),
parent,
checker.comment_ranges(),
checker.locator().contents(),
)
.unwrap_or(right.range());
let mut diagnostic = checker.report_diagnostic(
InDictKeys {

View File

@@ -92,9 +92,15 @@ impl Violation for NeedlessBool {
/// SIM103
pub(crate) fn needless_bool(checker: &Checker, stmt: &Stmt) {
let Stmt::If(stmt_if) = stmt else { return };
let ast::StmtIf {
test: if_test,
body: if_body,
elif_else_clauses,
..
} = stmt_if;
// Extract an `if` or `elif` (that returns) followed by an else (that returns the same value)
let (if_test, if_body, else_body, range) = match stmt_if.elif_else_clauses.as_slice() {
let (if_test, if_body, else_body, range) = match elif_else_clauses.as_slice() {
// if-else case:
// ```python
// if x > 0:
@@ -109,8 +115,8 @@ pub(crate) fn needless_bool(checker: &Checker, stmt: &Stmt) {
..
},
] => (
stmt_if.test.as_ref(),
stmt_if.body.as_slice(),
if_test.as_ref(),
if_body,
else_body.as_slice(),
stmt_if.range(),
),
@@ -137,7 +143,7 @@ pub(crate) fn needless_bool(checker: &Checker, stmt: &Stmt) {
},
] => (
elif_test,
elif_body.as_slice(),
elif_body,
else_body.as_slice(),
TextRange::new(elif_range.start(), else_range.end()),
),
@@ -149,7 +155,7 @@ pub(crate) fn needless_bool(checker: &Checker, stmt: &Stmt) {
// ```
[] => {
// Fetching the next sibling is expensive, so do some validation early.
if is_one_line_return_bool(&stmt_if.body).is_none() {
if is_one_line_return_bool(if_body).is_none() {
return;
}
@@ -169,8 +175,8 @@ pub(crate) fn needless_bool(checker: &Checker, stmt: &Stmt) {
}
(
stmt_if.test.as_ref(),
stmt_if.body.as_slice(),
if_test.as_ref(),
if_body,
std::slice::from_ref(next_stmt),
TextRange::new(stmt_if.start(), next_stmt.end()),
)
@@ -225,7 +231,7 @@ pub(crate) fn needless_bool(checker: &Checker, stmt: &Stmt) {
op: ast::UnaryOp::Not,
operand,
..
}) => Some(operand.clone()),
}) => Some((**operand).clone()),
Expr::Compare(ast::ExprCompare {
ops,
@@ -246,26 +252,26 @@ pub(crate) fn needless_bool(checker: &Checker, stmt: &Stmt) {
unreachable!("Single comparison with multiple comparators");
};
Some(Box::new(Expr::Compare(ast::ExprCompare {
Some(Expr::Compare(ast::ExprCompare {
ops: Box::new([op.negate()]),
left: left.clone(),
comparators: Box::new([right.clone()]),
range: TextRange::default(),
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
})))
}))
}
_ => Some(Box::new(Expr::UnaryOp(ast::ExprUnaryOp {
_ => Some(Expr::UnaryOp(ast::ExprUnaryOp {
op: ast::UnaryOp::Not,
operand: Box::new(if_test.clone()),
range: TextRange::default(),
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
}))),
})),
}
} else if if_test.is_compare_expr() {
// If the condition is a comparison, we can replace it with the condition, since we
// know it's a boolean.
Some(Box::new(if_test.clone()))
Some(if_test.clone())
} else if checker.semantic().has_builtin_binding("bool") {
// Otherwise, we need to wrap the condition in a call to `bool`.
let func_node = ast::ExprName {
@@ -285,7 +291,7 @@ pub(crate) fn needless_bool(checker: &Checker, stmt: &Stmt) {
range: TextRange::default(),
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
};
Some(Box::new(Expr::Call(call_node)))
Some(Expr::Call(call_node))
} else {
None
}
@@ -294,7 +300,7 @@ pub(crate) fn needless_bool(checker: &Checker, stmt: &Stmt) {
// Generate the replacement `return` statement.
let replacement = condition.as_ref().map(|expr| {
Stmt::Return(ast::StmtReturn {
value: Some(expr.clone()),
value: Some(Box::new(expr.clone())),
range: TextRange::default(),
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
})

View File

@@ -68,8 +68,8 @@ fn match_async_exit_stack(semantic: &SemanticModel) -> bool {
return false;
}
for parent in semantic.current_statements() {
if let Stmt::With(node) = parent {
for item in &node.items {
if let Stmt::With(ast::StmtWith { items, .. }) = parent {
for item in items {
if let Expr::Call(ast::ExprCall { func, .. }) = &item.context_expr {
if semantic
.resolve_qualified_name(func)
@@ -102,8 +102,8 @@ fn match_exit_stack(semantic: &SemanticModel) -> bool {
return false;
}
for parent in semantic.current_statements() {
if let Stmt::With(node) = parent {
for item in &node.items {
if let Stmt::With(ast::StmtWith { items, .. }) = parent {
for item in items {
if let Expr::Call(ast::ExprCall { func, .. }) = &item.context_expr {
if semantic
.resolve_qualified_name(func)

View File

@@ -269,25 +269,27 @@ struct Terminal<'a> {
}
fn match_loop(stmt: &Stmt) -> Option<Loop<'_>> {
let Stmt::For(for_stmt) = stmt else {
let Stmt::For(ast::StmtFor {
body, target, iter, ..
}) = stmt
else {
return None;
};
let ast::StmtFor {
body, target, iter, ..
} = &**for_stmt;
// The loop itself should contain a single `if` statement, with a single `return` statement in
// the body.
let [Stmt::If(if_stmt)] = body.as_slice() else {
let [
Stmt::If(ast::StmtIf {
body: nested_body,
test: nested_test,
elif_else_clauses: nested_elif_else_clauses,
range: _,
node_index: _,
}),
] = body.as_slice()
else {
return None;
};
let ast::StmtIf {
body: nested_body,
test: nested_test,
elif_else_clauses: nested_elif_else_clauses,
range: _,
node_index: _,
} = &**if_stmt;
if !nested_elif_else_clauses.is_empty() {
return None;
}
@@ -324,10 +326,9 @@ fn match_loop(stmt: &Stmt) -> Option<Loop<'_>> {
/// return False
/// ```
fn match_else_return(stmt: &Stmt) -> Option<Terminal<'_>> {
let Stmt::For(for_stmt) = stmt else {
let Stmt::For(ast::StmtFor { orelse, .. }) = stmt else {
return None;
};
let ast::StmtFor { orelse, .. } = &**for_stmt;
// The `else` block has to contain a single `return True` or `return False`.
let [
@@ -365,10 +366,9 @@ fn match_else_return(stmt: &Stmt) -> Option<Terminal<'_>> {
/// return False
/// ```
fn match_sibling_return<'a>(stmt: &'a Stmt, sibling: &'a Stmt) -> Option<Terminal<'a>> {
let Stmt::For(for_stmt) = stmt else {
let Stmt::For(ast::StmtFor { orelse, .. }) = stmt else {
return None;
};
let ast::StmtFor { orelse, .. } = &**for_stmt;
// The loop itself shouldn't have an `else` block.
if !orelse.is_empty() {

View File

@@ -4,8 +4,7 @@ use ruff_python_ast::{self as ast, Expr, Stmt};
pub(super) fn has_slots(body: &[Stmt]) -> bool {
for stmt in body {
match stmt {
Stmt::Assign(assign) => {
let targets = &assign.targets;
Stmt::Assign(ast::StmtAssign { targets, .. }) => {
for target in targets {
if let Expr::Name(ast::ExprName { id, .. }) = target {
if id.as_str() == "__slots__" {
@@ -14,8 +13,7 @@ pub(super) fn has_slots(body: &[Stmt]) -> bool {
}
}
}
Stmt::AnnAssign(ann_assign) => {
let target = &ann_assign.target;
Stmt::AnnAssign(ast::StmtAnnAssign { target, .. }) => {
if let Expr::Name(ast::ExprName { id, .. }) = target.as_ref() {
if id.as_str() == "__slots__" {
return true;

View File

@@ -92,11 +92,9 @@ impl<'a> BannedModuleImportPolicies<'a> {
pub(crate) fn new(stmt: &'a Stmt, checker: &Checker) -> Self {
match stmt {
Stmt::Import(import) => Self::Import(import),
Stmt::ImportFrom(import) => {
let module = &import.module;
let level = import.level;
Stmt::ImportFrom(import @ StmtImportFrom { module, level, .. }) => {
let module = resolve_imported_module_path(
level,
*level,
module.as_deref(),
checker.module.qualified_name(),
);

View File

@@ -91,10 +91,9 @@ fn fix_banned_relative_import(
return None;
}
let Stmt::ImportFrom(import_from) = stmt else {
let Stmt::ImportFrom(ast::StmtImportFrom { names, .. }) = stmt else {
panic!("Expected Stmt::ImportFrom");
};
let names = &import_from.names;
let node = ast::StmtImportFrom {
module: Some(Identifier::new(
module_path.to_string(),

View File

@@ -11,7 +11,7 @@ use crate::registry::Rule;
use crate::rules::flake8_type_checking::helpers::quote_type_expression;
use crate::{AlwaysFixableViolation, Edit, Fix, FixAvailability, Violation};
use ruff_python_ast::PythonVersion;
use ruff_python_ast::token::parenthesized_range;
use ruff_python_ast::parenthesize::parenthesized_range;
/// ## What it does
/// Checks if [PEP 613] explicit type aliases contain references to
@@ -158,10 +158,10 @@ pub(crate) fn unquoted_type_alias(checker: &Checker, binding: &Binding) {
return;
}
let Some(Stmt::AnnAssign(node)) = binding.statement(checker.semantic()) else {
return;
};
let Some(expr) = &node.value else {
let Some(Stmt::AnnAssign(ast::StmtAnnAssign {
value: Some(expr), ..
})) = binding.statement(checker.semantic())
else {
return;
};
@@ -295,20 +295,21 @@ pub(crate) fn quoted_type_alias(
let range = annotation_expr.range();
let mut diagnostic = checker.report_diagnostic(QuotedTypeAlias, range);
let fix_string = annotation_expr.value.to_string();
let fix_string = if (fix_string.contains('\n') || fix_string.contains('\r'))
&& parenthesized_range(
// Check for parentheses outside the string ("""...""")
// Check for parenthesis outside string ("""...""")
annotation_expr.into(),
checker.semantic().current_statement().into(),
checker.source_tokens(),
checker.comment_ranges(),
checker.locator().contents(),
)
.is_none()
&& parenthesized_range(
// Check for parentheses inside the string """(...)"""
// Check for parenthesis inside string """(...)"""
expr.into(),
annotation_expr.into(),
checker.tokens(),
checker.comment_ranges(),
checker.locator().contents(),
)
.is_none()
{

View File

@@ -1,5 +1,5 @@
use ruff_python_ast as ast;
use ruff_python_ast::{Parameter, Parameters, Stmt, StmtExpr, StmtFunctionDef};
use ruff_python_ast::{Parameter, Parameters, Stmt, StmtExpr, StmtFunctionDef, StmtRaise};
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_semantic::analyze::{function_type, visibility};
@@ -389,20 +389,14 @@ pub(crate) fn is_not_implemented_stub_with_variable(
_ => &function_def.body,
};
let [stmt1, stmt2] = statements else {
return false;
};
let Stmt::Assign(assign_node) = stmt1 else {
return false;
};
let targets = &assign_node.targets;
let value = &assign_node.value;
let Stmt::Raise(raise_node) = stmt2 else {
return false;
};
let Some(exception) = &raise_node.exc else {
let [
Stmt::Assign(ast::StmtAssign { targets, value, .. }),
Stmt::Raise(StmtRaise {
exc: Some(exception),
..
}),
] = statements
else {
return false;
};

View File

@@ -1,9 +1,10 @@
use std::ops::Range;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::parenthesized_range;
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::{Expr, ExprBinOp, ExprCall, Operator};
use ruff_python_semantic::SemanticModel;
use ruff_python_trivia::CommentRanges;
use ruff_text_size::{Ranged, TextRange};
use crate::checkers::ast::Checker;
@@ -88,7 +89,11 @@ pub(crate) fn path_constructor_current_directory(
let mut diagnostic = checker.report_diagnostic(PathConstructorCurrentDirectory, arg.range());
match parent_and_next_path_fragment_range(checker.semantic(), checker.tokens()) {
match parent_and_next_path_fragment_range(
checker.semantic(),
checker.comment_ranges(),
checker.source(),
) {
Some((parent_range, next_fragment_range)) => {
let next_fragment_expr = checker.locator().slice(next_fragment_range);
let call_expr = checker.locator().slice(call.range());
@@ -111,7 +116,7 @@ pub(crate) fn path_constructor_current_directory(
arguments,
Parentheses::Preserve,
checker.source(),
checker.tokens(),
checker.comment_ranges(),
)?;
Ok(Fix::applicable_edit(edit, applicability(call.range())))
}),
@@ -120,7 +125,8 @@ pub(crate) fn path_constructor_current_directory(
fn parent_and_next_path_fragment_range(
semantic: &SemanticModel,
tokens: &ruff_python_ast::token::Tokens,
comment_ranges: &CommentRanges,
source: &str,
) -> Option<(TextRange, TextRange)> {
let parent = semantic.current_expression_parent()?;
@@ -136,6 +142,6 @@ fn parent_and_next_path_fragment_range(
Some((
parent.range(),
parenthesized_range(right.into(), parent.into(), tokens).unwrap_or(range),
parenthesized_range(right.into(), parent.into(), comment_ranges, source).unwrap_or(range),
))
}

View File

@@ -23,13 +23,11 @@ pub(crate) fn annotate_imports<'a>(
.iter()
.map(|import| {
match import {
Stmt::Import(import_stmt) => {
let ast::StmtImport {
names,
range,
node_index: _,
} = &**import_stmt;
Stmt::Import(ast::StmtImport {
names,
range,
node_index: _,
}) => {
// Find comments above.
let mut atop = vec![];
while let Some(comment) =
@@ -60,15 +58,13 @@ pub(crate) fn annotate_imports<'a>(
inline,
}
}
Stmt::ImportFrom(import_from_stmt) => {
let ast::StmtImportFrom {
module,
names,
level,
range: _,
node_index: _,
} = &**import_from_stmt;
Stmt::ImportFrom(ast::StmtImportFrom {
module,
names,
level,
range: _,
node_index: _,
}) => {
// Find comments above.
let mut atop = vec![];
while let Some(comment) =

View File

@@ -183,77 +183,87 @@ impl<'a> StatementVisitor<'a> for BlockBuilder<'a> {
let prev_nested = self.nested;
self.nested = true;
match stmt {
Stmt::FunctionDef(node) => {
for stmt in &node.body {
Stmt::FunctionDef(ast::StmtFunctionDef { body, .. }) => {
for stmt in body {
self.visit_stmt(stmt);
}
self.finalize(None);
}
Stmt::ClassDef(node) => {
for stmt in &node.body {
Stmt::ClassDef(ast::StmtClassDef { body, .. }) => {
for stmt in body {
self.visit_stmt(stmt);
}
self.finalize(None);
}
Stmt::For(node) => {
for stmt in &node.body {
Stmt::For(ast::StmtFor { body, orelse, .. }) => {
for stmt in body {
self.visit_stmt(stmt);
}
self.finalize(None);
for stmt in &node.orelse {
for stmt in orelse {
self.visit_stmt(stmt);
}
self.finalize(None);
}
Stmt::While(node) => {
for stmt in &node.body {
Stmt::While(ast::StmtWhile { body, orelse, .. }) => {
for stmt in body {
self.visit_stmt(stmt);
}
self.finalize(None);
for stmt in &node.orelse {
for stmt in orelse {
self.visit_stmt(stmt);
}
self.finalize(None);
}
Stmt::If(node) => {
for stmt in &node.body {
Stmt::If(ast::StmtIf {
body,
elif_else_clauses,
..
}) => {
for stmt in body {
self.visit_stmt(stmt);
}
self.finalize(None);
for clause in &node.elif_else_clauses {
for clause in elif_else_clauses {
self.visit_elif_else_clause(clause);
}
}
Stmt::With(node) => {
for stmt in &node.body {
Stmt::With(ast::StmtWith { body, .. }) => {
for stmt in body {
self.visit_stmt(stmt);
}
self.finalize(None);
}
Stmt::Match(node) => {
for match_case in &node.cases {
Stmt::Match(ast::StmtMatch { cases, .. }) => {
for match_case in cases {
self.visit_match_case(match_case);
}
}
Stmt::Try(node) => {
for except_handler in &node.handlers {
Stmt::Try(ast::StmtTry {
body,
handlers,
orelse,
finalbody,
..
}) => {
for except_handler in handlers {
self.visit_except_handler(except_handler);
}
for stmt in &node.body {
for stmt in body {
self.visit_stmt(stmt);
}
self.finalize(None);
for stmt in &node.orelse {
for stmt in orelse {
self.visit_stmt(stmt);
}
self.finalize(None);
for stmt in &node.finalbody {
for stmt in finalbody {
self.visit_stmt(stmt);
}
self.finalize(None);

View File

@@ -59,30 +59,30 @@ impl AlwaysFixableViolation for MissingRequiredImport {
fn includes_import(stmt: &Stmt, target: &NameImport) -> bool {
match target {
NameImport::Import(target) => {
let Stmt::Import(import_stmt) = &stmt else {
return false;
};
let ast::StmtImport {
let Stmt::Import(ast::StmtImport {
names,
range: _,
node_index: _,
} = &**import_stmt;
}) = &stmt
else {
return false;
};
names.iter().any(|alias| {
alias.name == target.name.name
&& alias.asname.as_deref() == target.name.as_name.as_deref()
})
}
NameImport::ImportFrom(target) => {
let Stmt::ImportFrom(import_from_stmt) = &stmt else {
return false;
};
let ast::StmtImportFrom {
let Stmt::ImportFrom(ast::StmtImportFrom {
module,
names,
level,
range: _,
node_index: _,
} = &**import_from_stmt;
}) = &stmt
else {
return false;
};
module.as_deref() == target.module.as_deref()
&& *level == target.level
&& names.iter().any(|alias| {

View File

@@ -71,35 +71,39 @@ fn get_complexity_number(stmts: &[Stmt]) -> usize {
let mut complexity = 0;
for stmt in stmts {
match stmt {
Stmt::If(if_stmt) => {
Stmt::If(ast::StmtIf {
body,
elif_else_clauses,
..
}) => {
complexity += 1;
complexity += get_complexity_number(&if_stmt.body);
for clause in &if_stmt.elif_else_clauses {
complexity += get_complexity_number(body);
for clause in elif_else_clauses {
if clause.test.is_some() {
complexity += 1;
}
complexity += get_complexity_number(&clause.body);
}
}
Stmt::For(for_stmt) => {
Stmt::For(ast::StmtFor { body, orelse, .. }) => {
complexity += 1;
complexity += get_complexity_number(&for_stmt.body);
complexity += get_complexity_number(&for_stmt.orelse);
complexity += get_complexity_number(body);
complexity += get_complexity_number(orelse);
}
Stmt::With(with_stmt) => {
complexity += get_complexity_number(&with_stmt.body);
Stmt::With(ast::StmtWith { body, .. }) => {
complexity += get_complexity_number(body);
}
Stmt::While(while_stmt) => {
Stmt::While(ast::StmtWhile { body, orelse, .. }) => {
complexity += 1;
complexity += get_complexity_number(&while_stmt.body);
complexity += get_complexity_number(&while_stmt.orelse);
complexity += get_complexity_number(body);
complexity += get_complexity_number(orelse);
}
Stmt::Match(match_stmt) => {
for case in &match_stmt.cases {
Stmt::Match(ast::StmtMatch { cases, .. }) => {
for case in cases {
complexity += 1;
complexity += get_complexity_number(&case.body);
}
if let Some(last_case) = match_stmt.cases.last() {
if let Some(last_case) = cases.last() {
// The complexity of an irrefutable pattern is similar to an `else` block of an `if` statement.
//
// For example:
@@ -117,14 +121,20 @@ fn get_complexity_number(stmts: &[Stmt]) -> usize {
}
}
}
Stmt::Try(try_stmt) => {
complexity += get_complexity_number(&try_stmt.body);
if !try_stmt.orelse.is_empty() {
Stmt::Try(ast::StmtTry {
body,
handlers,
orelse,
finalbody,
..
}) => {
complexity += get_complexity_number(body);
if !orelse.is_empty() {
complexity += 1;
}
complexity += get_complexity_number(&try_stmt.orelse);
complexity += get_complexity_number(&try_stmt.finalbody);
for handler in &try_stmt.handlers {
complexity += get_complexity_number(orelse);
complexity += get_complexity_number(finalbody);
for handler in handlers {
complexity += 1;
let ExceptHandler::ExceptHandler(ast::ExceptHandlerExceptHandler {
body, ..
@@ -132,12 +142,12 @@ fn get_complexity_number(stmts: &[Stmt]) -> usize {
complexity += get_complexity_number(body);
}
}
Stmt::FunctionDef(func_def) => {
Stmt::FunctionDef(ast::StmtFunctionDef { body, .. }) => {
complexity += 1;
complexity += get_complexity_number(&func_def.body);
complexity += get_complexity_number(body);
}
Stmt::ClassDef(class_def) => {
complexity += get_complexity_number(&class_def.body);
Stmt::ClassDef(ast::StmtClassDef { body, .. }) => {
complexity += get_complexity_number(body);
}
_ => {}
}

View File

@@ -3,7 +3,7 @@ use ruff_python_ast::name::QualifiedName;
use ruff_python_ast::statement_visitor::StatementVisitor;
use ruff_python_ast::visitor::Visitor;
use ruff_python_ast::visitor::{walk_expr, walk_stmt};
use ruff_python_ast::{Alias, Stmt, statement_visitor};
use ruff_python_ast::{Alias, Stmt, StmtImportFrom, statement_visitor};
use ruff_python_semantic::SemanticModel;
/// AST visitor that searches an AST tree for [`ast::StmtImportFrom`] nodes
@@ -28,9 +28,7 @@ impl StatementVisitor<'_> for ImportSearcher<'_> {
if self.found_import {
return;
}
if let Stmt::ImportFrom(import_from) = stmt {
let module = &import_from.module;
let names = &import_from.names;
if let Stmt::ImportFrom(StmtImportFrom { module, names, .. }) = stmt {
if module.as_ref().is_some_and(|module| module == self.module)
&& names.iter().any(|Alias { name, .. }| name == self.name)
{

View File

@@ -1,7 +1,8 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::helpers::is_const_true;
use ruff_python_ast::token::{Tokens, parenthesized_range};
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::{self as ast, Keyword, Stmt};
use ruff_python_trivia::CommentRanges;
use ruff_text_size::Ranged;
use crate::Locator;
@@ -90,7 +91,7 @@ pub(crate) fn inplace_argument(checker: &Checker, call: &ast::ExprCall) {
call,
keyword,
statement,
checker.tokens(),
checker.comment_ranges(),
checker.locator(),
) {
diagnostic.set_fix(fix);
@@ -110,16 +111,21 @@ fn convert_inplace_argument_to_assignment(
call: &ast::ExprCall,
keyword: &Keyword,
statement: &Stmt,
tokens: &Tokens,
comment_ranges: &CommentRanges,
locator: &Locator,
) -> Option<Fix> {
// Add the assignment.
let attr = call.func.as_attribute_expr()?;
let insert_assignment = Edit::insertion(
format!("{name} = ", name = locator.slice(attr.value.range())),
parenthesized_range(call.into(), statement.into(), tokens)
.unwrap_or(call.range())
.start(),
parenthesized_range(
call.into(),
statement.into(),
comment_ranges,
locator.contents(),
)
.unwrap_or(call.range())
.start(),
);
// Remove the `inplace` argument.
@@ -128,7 +134,7 @@ fn convert_inplace_argument_to_assignment(
&call.arguments,
Parentheses::Preserve,
locator.contents(),
tokens,
comment_ranges,
)
.ok()?;

View File

@@ -25,10 +25,10 @@ pub(super) fn is_acronym(name: &str, asname: &str) -> bool {
/// Returns `true` if the statement is an assignment to a named tuple.
pub(super) fn is_named_tuple_assignment(stmt: &Stmt, semantic: &SemanticModel) -> bool {
let Stmt::Assign(node) = stmt else {
let Stmt::Assign(ast::StmtAssign { value, .. }) = stmt else {
return false;
};
let Expr::Call(ast::ExprCall { func, .. }) = node.value.as_ref() else {
let Expr::Call(ast::ExprCall { func, .. }) = value.as_ref() else {
return false;
};
semantic
@@ -45,10 +45,10 @@ pub(super) fn is_typed_dict_assignment(stmt: &Stmt, semantic: &SemanticModel) ->
return false;
}
let Stmt::Assign(node) = stmt else {
let Stmt::Assign(ast::StmtAssign { value, .. }) = stmt else {
return false;
};
let Expr::Call(ast::ExprCall { func, .. }) = node.value.as_ref() else {
let Expr::Call(ast::ExprCall { func, .. }) = value.as_ref() else {
return false;
};
semantic.match_typing_expr(func, "TypedDict")
@@ -60,10 +60,10 @@ pub(super) fn is_type_var_assignment(stmt: &Stmt, semantic: &SemanticModel) -> b
return false;
}
let Stmt::Assign(node) = stmt else {
let Stmt::Assign(ast::StmtAssign { value, .. }) = stmt else {
return false;
};
let Expr::Call(ast::ExprCall { func, .. }) = node.value.as_ref() else {
let Expr::Call(ast::ExprCall { func, .. }) = value.as_ref() else {
return false;
};
semantic
@@ -77,8 +77,8 @@ pub(super) fn is_type_var_assignment(stmt: &Stmt, semantic: &SemanticModel) -> b
/// Returns `true` if the statement is an assignment to a `TypeAlias`.
pub(super) fn is_type_alias_assignment(stmt: &Stmt, semantic: &SemanticModel) -> bool {
match stmt {
Stmt::AnnAssign(node) => {
semantic.match_typing_expr(&node.annotation, "TypeAlias")
Stmt::AnnAssign(ast::StmtAnnAssign { annotation, .. }) => {
semantic.match_typing_expr(annotation, "TypeAlias")
}
Stmt::TypeAlias(_) => true,
_ => false,
@@ -157,15 +157,11 @@ pub(super) fn is_django_model_import(name: &str, stmt: &Stmt, semantic: &Semanti
}
match stmt {
Stmt::AnnAssign(node) => {
if let Some(value) = &node.value {
match_model_import(name, value.as_ref(), semantic)
} else {
false
}
}
Stmt::Assign(node) => {
match_model_import(name, node.value.as_ref(), semantic)
Stmt::AnnAssign(ast::StmtAnnAssign {
value: Some(value), ..
}) => match_model_import(name, value.as_ref(), semantic),
Stmt::Assign(ast::StmtAssign { value, .. }) => {
match_model_import(name, value.as_ref(), semantic)
}
_ => false,
}

View File

@@ -92,16 +92,23 @@ pub(crate) fn manual_dict_comprehension(checker: &Checker, for_stmt: &ast::StmtF
// if idx % 2 == 0:
// result[name] = idx
// ```
[Stmt::If(node)] => {
[
Stmt::If(ast::StmtIf {
body,
elif_else_clauses,
test,
..
}),
] => {
// TODO(charlie): If there's an `else` clause, verify that the `else` has the
// same structure.
if !node.elif_else_clauses.is_empty() {
if !elif_else_clauses.is_empty() {
return;
}
let [stmt] = node.body.as_slice() else {
let [stmt] = body.as_slice() else {
return;
};
(stmt, Some(&node.test))
(stmt, Some(test))
}
// ```python
// for idx, name in enumerate(names):
@@ -111,12 +118,15 @@ pub(crate) fn manual_dict_comprehension(checker: &Checker, for_stmt: &ast::StmtF
_ => return,
};
let Stmt::Assign(node) = stmt else {
let Stmt::Assign(ast::StmtAssign {
targets,
value,
range,
node_index: _,
}) = stmt
else {
return;
};
let targets = &node.targets;
let value = &node.value;
let range = &node.range;
let [
Expr::Subscript(ast::ExprSubscript {
@@ -202,8 +212,8 @@ pub(crate) fn manual_dict_comprehension(checker: &Checker, for_stmt: &ast::StmtF
if is_fix_manual_dict_comprehension_enabled(checker.settings()) {
let binding_stmt = binding.statement(checker.semantic());
let binding_value = binding_stmt.and_then(|binding_stmt| match binding_stmt {
ast::Stmt::AnnAssign(node) => node.value.as_deref(),
ast::Stmt::Assign(node) => Some(&node.value),
ast::Stmt::AnnAssign(assign) => assign.value.as_deref(),
ast::Stmt::Assign(assign) => Some(&assign.value),
_ => None,
});
@@ -233,7 +243,7 @@ pub(crate) fn manual_dict_comprehension(checker: &Checker, for_stmt: &ast::StmtF
// but not necessarily, so this needs to be manually fixed. This does not apply when using an update.
let binding_has_one_target = binding_stmt.is_some_and(|binding_stmt| match binding_stmt {
ast::Stmt::AnnAssign(_) => true,
ast::Stmt::Assign(node) => node.targets.len() == 1,
ast::Stmt::Assign(assign) => assign.targets.len() == 1,
_ => false,
});
// If the binding gets used in between the assignment and the for loop, a comprehension is no longer safe

View File

@@ -109,14 +109,21 @@ pub(crate) fn manual_list_comprehension(checker: &Checker, for_stmt: &ast::StmtF
// if z:
// filtered.append(x)
// ```
[ast::Stmt::If(node)] => {
if !node.elif_else_clauses.is_empty() {
[
ast::Stmt::If(ast::StmtIf {
body,
elif_else_clauses,
test,
..
}),
] => {
if !elif_else_clauses.is_empty() {
return;
}
let [stmt] = node.body.as_slice() else {
let [stmt] = body.as_slice() else {
return;
};
(stmt, Some(&node.test))
(stmt, Some(test))
}
// ```python
// for x in y:
@@ -260,8 +267,8 @@ pub(crate) fn manual_list_comprehension(checker: &Checker, for_stmt: &ast::StmtF
let list_binding_stmt = list_binding.statement(checker.semantic());
let list_binding_value = list_binding_stmt.and_then(|binding_stmt| match binding_stmt {
ast::Stmt::AnnAssign(node) => node.value.as_deref(),
ast::Stmt::Assign(node) => Some(&node.value),
ast::Stmt::AnnAssign(assign) => assign.value.as_deref(),
ast::Stmt::Assign(assign) => Some(&assign.value),
_ => None,
});
@@ -297,7 +304,7 @@ pub(crate) fn manual_list_comprehension(checker: &Checker, for_stmt: &ast::StmtF
// but not necessarily, so this needs to be manually fixed. This does not apply when using an extend.
let binding_has_one_target = list_binding_stmt.is_some_and(|binding_stmt| match binding_stmt {
ast::Stmt::AnnAssign(_) => true,
ast::Stmt::Assign(node) => node.targets.len() == 1,
ast::Stmt::Assign(assign) => assign.targets.len() == 1,
_ => false,
});
@@ -457,8 +464,8 @@ fn convert_to_list_extend(
let binding_stmt = binding.statement(semantic);
let binding_stmt_range = binding_stmt
.and_then(|stmt| match stmt {
ast::Stmt::AnnAssign(node) => Some(node.range),
ast::Stmt::Assign(node) => Some(node.range),
ast::Stmt::AnnAssign(assign) => Some(assign.range),
ast::Stmt::Assign(assign) => Some(assign.range),
_ => None,
})
.ok_or(anyhow!(

View File

@@ -1,6 +1,6 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::statement_visitor::{StatementVisitor, walk_stmt};
use ruff_python_ast::{PythonVersion, Stmt};
use ruff_python_ast::{self as ast, PythonVersion, Stmt};
use ruff_text_size::Ranged;
use crate::Violation;
@@ -93,11 +93,9 @@ pub(crate) fn try_except_in_loop(checker: &Checker, body: &[Stmt]) {
return;
}
let [Stmt::Try(try_stmt)] = body else {
let [Stmt::Try(ast::StmtTry { handlers, body, .. })] = body else {
return;
};
let handlers = &try_stmt.handlers;
let body = &try_stmt.body;
let Some(handler) = handlers.first() else {
return;

View File

@@ -207,7 +207,7 @@ fn match_mutation(stmt: &Stmt, id: &str) -> bool {
target_id == id
}
// Ex) `foo[0] = bar`
Stmt::Assign(node) => node.targets.iter().any(|target| {
Stmt::Assign(ast::StmtAssign { targets, .. }) => targets.iter().any(|target| {
if let Some(ast::ExprSubscript { value: target, .. }) = target.as_subscript_expr() {
if let Some(ast::ExprName { id: target_id, .. }) = target.as_name_expr() {
return target_id == id;
@@ -216,16 +216,16 @@ fn match_mutation(stmt: &Stmt, id: &str) -> bool {
false
}),
// Ex) `foo += bar`
Stmt::AugAssign(node) => {
if let Some(ast::ExprName { id: target_id, .. }) = node.target.as_name_expr() {
Stmt::AugAssign(ast::StmtAugAssign { target, .. }) => {
if let Some(ast::ExprName { id: target_id, .. }) = target.as_name_expr() {
target_id == id
} else {
false
}
}
// Ex) `foo[0]: int = bar`
Stmt::AnnAssign(node) => {
if let Some(ast::ExprSubscript { value: target, .. }) = node.target.as_subscript_expr() {
Stmt::AnnAssign(ast::StmtAnnAssign { target, .. }) => {
if let Some(ast::ExprSubscript { value: target, .. }) = target.as_subscript_expr() {
if let Some(ast::ExprName { id: target_id, .. }) = target.as_name_expr() {
return target_id == id;
}
@@ -233,7 +233,7 @@ fn match_mutation(stmt: &Stmt, id: &str) -> bool {
false
}
// Ex) `del foo[0]`
Stmt::Delete(node) => node.targets.iter().any(|target| {
Stmt::Delete(ast::StmtDelete { targets, .. }) => targets.iter().any(|target| {
if let Some(ast::ExprSubscript { value: target, .. }) = target.as_subscript_expr() {
if let Some(ast::ExprName { id: target_id, .. }) = target.as_name_expr() {
return target_id == id;

View File

@@ -1,6 +1,6 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::identifier::except;
use ruff_python_ast::{ExceptHandler, Expr, Stmt};
use ruff_python_ast::{self as ast, ExceptHandler, Expr, Stmt};
use crate::Violation;
use crate::checkers::ast::Checker;
@@ -65,7 +65,7 @@ pub(crate) fn bare_except(
if type_.is_none()
&& !body
.iter()
.any(|stmt| matches!(stmt, Stmt::Raise(raise_stmt) if raise_stmt.exc.is_none()))
.any(|stmt| matches!(stmt, Stmt::Raise(ast::StmtRaise { exc: None, .. })))
{
checker.report_diagnostic(BareExcept, except(handler, checker.locator().contents()));
}

View File

@@ -1,5 +1,5 @@
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::token::parenthesized_range;
use ruff_python_ast::parenthesize::parenthesized_range;
use ruff_python_ast::{
self as ast, Expr, ExprEllipsisLiteral, ExprLambda, Identifier, Parameter,
ParameterWithDefault, Parameters, Stmt,
@@ -223,7 +223,7 @@ fn function(
..parameter.clone()
})
.collect::<Vec<_>>();
let func = Stmt::FunctionDef(Box::new(ast::StmtFunctionDef {
let func = Stmt::FunctionDef(ast::StmtFunctionDef {
is_async: false,
name: Identifier::new(name.to_string(), TextRange::default()),
parameters: Box::new(Parameters {
@@ -237,13 +237,13 @@ fn function(
type_params: None,
range: TextRange::default(),
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
}));
});
let generated = checker.generator().stmt(&func);
return replace_trailing_ellipsis_with_original_expr(generated, lambda, stmt, checker);
}
}
let function = Stmt::FunctionDef(Box::new(ast::StmtFunctionDef {
let function = Stmt::FunctionDef(ast::StmtFunctionDef {
is_async: false,
name: Identifier::new(name.to_string(), TextRange::default()),
parameters: Box::new(parameters),
@@ -253,7 +253,7 @@ fn function(
type_params: None,
range: TextRange::default(),
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
}));
});
let generated = checker.generator().stmt(&function);
replace_trailing_ellipsis_with_original_expr(generated, lambda, stmt, checker)
@@ -265,19 +265,29 @@ fn replace_trailing_ellipsis_with_original_expr(
stmt: &Stmt,
checker: &Checker,
) -> String {
let original_expr_range =
parenthesized_range((&lambda.body).into(), lambda.into(), checker.tokens())
.unwrap_or(lambda.body.range());
let original_expr_range = parenthesized_range(
(&lambda.body).into(),
lambda.into(),
checker.comment_ranges(),
checker.source(),
)
.unwrap_or(lambda.body.range());
// This prevents the autofix of introducing a syntax error if the lambda's body is an
// expression spanned across multiple lines. To avoid the syntax error we preserve
// the parenthesis around the body.
let original_expr_in_source =
if parenthesized_range(lambda.into(), stmt.into(), checker.tokens()).is_some() {
format!("({})", checker.locator().slice(original_expr_range))
} else {
checker.locator().slice(original_expr_range).to_string()
};
let original_expr_in_source = if parenthesized_range(
lambda.into(),
stmt.into(),
checker.comment_ranges(),
checker.source(),
)
.is_some()
{
format!("({})", checker.locator().slice(original_expr_range))
} else {
checker.locator().slice(original_expr_range).to_string()
};
let placeholder_ellipsis_start = generated.rfind("...").unwrap();
let placeholder_ellipsis_end = placeholder_ellipsis_start + "...".len();

View File

@@ -1,4 +1,4 @@
use ruff_python_ast::token::{Tokens, parenthesized_range};
use ruff_python_ast::parenthesize::parenthesized_range;
use rustc_hash::FxHashMap;
use ruff_macros::{ViolationMetadata, derive_message_formats};
@@ -179,14 +179,15 @@ fn is_redundant_boolean_comparison(op: CmpOp, comparator: &Expr) -> Option<bool>
fn generate_redundant_comparison(
compare: &ast::ExprCompare,
tokens: &Tokens,
comment_ranges: &ruff_python_trivia::CommentRanges,
source: &str,
comparator: &Expr,
kind: bool,
needs_wrap: bool,
) -> String {
let comparator_range = parenthesized_range(comparator.into(), compare.into(), tokens)
.unwrap_or(comparator.range());
let comparator_range =
parenthesized_range(comparator.into(), compare.into(), comment_ranges, source)
.unwrap_or(comparator.range());
let comparator_str = &source[comparator_range];
@@ -378,7 +379,7 @@ pub(crate) fn literal_comparisons(checker: &Checker, compare: &ast::ExprCompare)
.copied()
.collect::<Vec<_>>();
let tokens = checker.tokens();
let comment_ranges = checker.comment_ranges();
let source = checker.source();
let content = match (&*compare.ops, &*compare.comparators) {
@@ -386,13 +387,18 @@ pub(crate) fn literal_comparisons(checker: &Checker, compare: &ast::ExprCompare)
if let Some(kind) = is_redundant_boolean_comparison(*op, &compare.left) {
let needs_wrap = compare.left.range().start() != compare.range().start();
generate_redundant_comparison(
compare, tokens, source, comparator, kind, needs_wrap,
compare,
comment_ranges,
source,
comparator,
kind,
needs_wrap,
)
} else if let Some(kind) = is_redundant_boolean_comparison(*op, comparator) {
let needs_wrap = comparator.range().end() != compare.range().end();
generate_redundant_comparison(
compare,
tokens,
comment_ranges,
source,
&compare.left,
kind,
@@ -404,7 +410,7 @@ pub(crate) fn literal_comparisons(checker: &Checker, compare: &ast::ExprCompare)
&ops,
&compare.comparators,
compare.into(),
tokens,
comment_ranges,
source,
)
}
@@ -414,7 +420,7 @@ pub(crate) fn literal_comparisons(checker: &Checker, compare: &ast::ExprCompare)
&ops,
&compare.comparators,
compare.into(),
tokens,
comment_ranges,
source,
),
};

View File

@@ -107,7 +107,7 @@ pub(crate) fn not_tests(checker: &Checker, unary_op: &ast::ExprUnaryOp) {
&[CmpOp::NotIn],
comparators,
unary_op.into(),
checker.tokens(),
checker.comment_ranges(),
checker.source(),
),
unary_op.range(),
@@ -127,7 +127,7 @@ pub(crate) fn not_tests(checker: &Checker, unary_op: &ast::ExprUnaryOp) {
&[CmpOp::IsNot],
comparators,
unary_op.into(),
checker.tokens(),
checker.comment_ranges(),
checker.source(),
),
unary_op.range(),

Some files were not shown because too many files have changed in this diff Show More