Compare commits

...

34 Commits

Author SHA1 Message Date
Charlie Marsh
2446cd49fa Make CallPath its own struct 2023-04-01 12:06:01 -04:00
Charlie Marsh
5f5e71e81d Make collect_call_path return an Option 2023-04-01 12:05:52 -04:00
Charlie Marsh
d822e08111 Move CallPath into its own module (#3847) 2023-04-01 11:25:04 -04:00
Charlie Marsh
2f90157ce2 Move logging resolver into logging.rs (#3843) 2023-04-01 03:50:44 +00:00
Charlie Marsh
88308ef9cc Move Binding structs out of scope.rs (#3842) 2023-03-31 23:49:48 -04:00
Charlie Marsh
6d80c79bac Combine operations.rs and helpers.rs (#3841) 2023-04-01 03:40:34 +00:00
Charlie Marsh
2fbc620ad3 Move __all__ utilities to all.rs (#3840) 2023-04-01 03:31:15 +00:00
Charlie Marsh
27e40e9b31 Remove helpers.rs dependency on Binding (#3839) 2023-04-01 03:19:45 +00:00
Charlie Marsh
b6276e2d95 Move f-string identification into rule module (#3838) 2023-03-31 23:10:11 -04:00
Charlie Marsh
66d72b1c7b Move keyword checks into is_identifier (#3834) 2023-03-31 16:56:33 -04:00
Jonathan Plasse
968c7df770 Fix is_module_name() and improve perf of is_identifier() (#3795) 2023-03-31 15:15:36 -04:00
Jonathan Plasse
fe38597279 Fix SIM222 and SIM223 false positive (#3832) 2023-03-31 14:50:35 -04:00
Jonathan Plasse
f3f9a9f297 Fix pre-commit CI job exit code (#3833) 2023-03-31 14:47:04 -04:00
Micha Reiser
48d8680e71 Ambiguous unicode, only test unicode characters (#3814) 2023-03-31 18:03:00 +01:00
Charlie Marsh
82584ad101 Extend unncessary-generator-any-all to set comprehensions (#3824) 2023-03-31 16:29:25 +00:00
konstin
13e52b1f76 Use cache in cargo udeps CI (#3809) 2023-03-31 11:07:14 -04:00
Charlie Marsh
dfc872c9a0 Track star imports on Scope directly (#3822) 2023-03-31 15:01:12 +00:00
Charlie Marsh
cf7e1ddd08 Remove some usize references (#3819) 2023-03-30 17:35:42 -04:00
Charlie Marsh
9de1f82658 Avoid unnecessary-comprehension-any-all for async generators (#3823) 2023-03-30 18:43:59 +00:00
Charlie Marsh
54ad9397e5 Flag non-Name expressions in duplicate-isinstance-call (#3817) 2023-03-30 12:19:53 -04:00
Jonathan Plasse
29c8b75fd4 Ignore collapsible-if violations for if False: and if True: (#3732) 2023-03-30 15:52:43 +00:00
Charlie Marsh
0b586d5451 Use panic instead of unreachable for invalid arguments (#3816) 2023-03-30 15:40:53 +00:00
Charlie Marsh
01357f62e5 Add import insertion support to autofix capabilities (#3787) 2023-03-30 15:33:46 +00:00
Micha Reiser
d7113d3995 refactor: StateMachine use match statement (#3811) 2023-03-30 15:55:54 +02:00
Madison Swain-Bowden
a142d71e0b Add Openverse to users of ruff in README (#3806) 2023-03-30 09:14:47 -04:00
Charlie Marsh
f79506f5a4 Move some generic structs out of isort (#3788) 2023-03-30 08:58:01 -04:00
Dhruv Manilawala
44ae3237b8 Additional simple magic return types (#3805) 2023-03-30 08:57:49 -04:00
konstin
f4cda31708 Use crates.io version of pep440_rs (#3812)
* Use crates.io version of pep440_rs

* Update Cargo.lock
2023-03-30 12:47:07 +00:00
Charlie Marsh
4328448a2f Use multi-fix semantics for inplace removal (#3804) 2023-03-30 00:16:43 +00:00
Charlie Marsh
88298759ce Misc. follow-up changes to #3802 (#3803) 2023-03-29 19:18:36 -04:00
Charlie Marsh
3c0e789b19 Improve robustness of argument removal for encode calls (#3802) 2023-03-29 23:07:13 +00:00
Charlie Marsh
8601dcc09b Add import name resolution to Context (#3777) 2023-03-29 21:47:50 +00:00
Charlie Marsh
134fdd1609 Remove star-import handling from sys-exit-alias (#3776) 2023-03-29 21:33:50 +00:00
Charlie Marsh
2e6eddc7bd Improve top-of-file insertions for required imports (#3779) 2023-03-29 21:25:39 +00:00
140 changed files with 3379 additions and 2150 deletions

View File

@@ -183,18 +183,20 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: "Install Rust toolchain"
run: rustup toolchain install nightly
- name: "Install nightly Rust toolchain"
# Only pinned to make caching work, update freely
run: rustup toolchain install nightly-2023-03-30
- uses: Swatinem/rust-cache@v2
- name: "Install cargo-udeps"
uses: taiki-e/install-action@cargo-udeps
- name: "Run cargo-udeps"
run: |
unused_dependencies=$(cargo +nightly udeps > unused.txt && cat unused.txt | cut -d $'\n' -f 2-)
unused_dependencies=$(cargo +nightly-2023-03-30 udeps > unused.txt && cat unused.txt | cut -d $'\n' -f 2-)
if [ -z "$unused_dependencies" ]; then
echo "No unused dependencies found" > $GITHUB_STEP_SUMMARY
exit 0
else
echo "Unused dependencies found" > $GITHUB_STEP_SUMMARY
echo "Found unused dependencies" > $GITHUB_STEP_SUMMARY
echo '```console' >> $GITHUB_STEP_SUMMARY
echo "$unused_dependencies" >> $GITHUB_STEP_SUMMARY
echo '```' >> $GITHUB_STEP_SUMMARY
@@ -225,4 +227,6 @@ jobs:
# Enable color output for pre-commit and remove it for the summary
SKIP=cargo-fmt,clippy,dev-generate-all pre-commit run --all-files --show-diff-on-failure --color=always | \
tee >(sed -E 's/\x1B\[([0-9]{1,2}(;[0-9]{1,2})*)?[mGK]//g' >> $GITHUB_STEP_SUMMARY) >&1
exit_code=${PIPESTATUS[0]}
echo '```' >> $GITHUB_STEP_SUMMARY
exit $exit_code

5
Cargo.lock generated
View File

@@ -1543,8 +1543,9 @@ checksum = "9fa00462b37ead6d11a82c9d568b26682d78e0477dc02d1966c013af80969739"
[[package]]
name = "pep440_rs"
version = "0.2.0"
source = "git+https://github.com/konstin/pep440-rs.git?rev=a8fef4ec47f4c25b070b39cdbe6a0b9847e49941#a8fef4ec47f4c25b070b39cdbe6a0b9847e49941"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d5daf676dd9ff1a39faf9c9da9c46f0dbb6211b21a1839a749f5510c24ceca3f"
dependencies = [
"lazy_static",
"regex",

View File

@@ -363,6 +363,7 @@ Ruff is used in a number of major open-source projects, including:
- [nox](https://github.com/wntrblm/nox)
- [Neon](https://github.com/neondatabase/neon)
- [The Algorithms](https://github.com/TheAlgorithms/Python)
- [Openverse](https://github.com/WordPress/openverse)
## License

View File

@@ -46,9 +46,7 @@ path-absolutize = { workspace = true, features = [
"use_unix_paths_on_wasm",
] }
pathdiff = { version = "0.2.1" }
pep440_rs = { git = "https://github.com/konstin/pep440-rs.git", features = [
"serde",
], rev = "a8fef4ec47f4c25b070b39cdbe6a0b9847e49941" }
pep440_rs = { version = "0.3.1", features = ["serde"] }
regex = { workspace = true }
result-like = { version = "0.4.6" }
rustc-hash = { workspace = true }
@@ -66,7 +64,7 @@ textwrap = { workspace = true }
thiserror = { version = "1.0.38" }
toml = { workspace = true }
typed-arena = { version = "2.0.2" }
unicode-width = {version ="0.1.10"}
unicode-width = { version = "0.1.10" }
[dev-dependencies]
insta = { workspace = true, features = ["yaml", "redactions"] }

View File

@@ -1,11 +1,4 @@
# no error
all((x.id for x in bar))
all(x.id for x in bar)
all(x.id for x in bar)
any(x.id for x in bar)
any({x.id for x in bar})
# PIE 802
# PIE802
any([x.id for x in bar])
all([x.id for x in bar])
any( # first comment
@@ -14,3 +7,13 @@ any( # first comment
all( # first comment
[x.id for x in bar], # second comment
) # third comment
any({x.id for x in bar})
# OK
all(x.id for x in bar)
all(x.id for x in bar)
any(x.id for x in bar)
all((x.id for x in bar))
async def f() -> bool:
return all([await use_greeting(greeting) for greeting in await greetings()])

View File

@@ -16,15 +16,26 @@ if isinstance(a, int) or isinstance(b, bool) or isinstance(a, float): # SIM101
if (isinstance(a, int) or isinstance(a, float)) and isinstance(b, bool): # SIM101
pass
if isinstance(a.b, int) or isinstance(a.b, float): # SIM101
pass
if isinstance(a(), int) or isinstance(a(), float): # SIM101
pass
if isinstance(a, int) and isinstance(b, bool) or isinstance(a, float):
pass
if isinstance(a, bool) or isinstance(b, str):
pass
if isinstance(a, int) or isinstance(a.b, float):
pass
def f():
# OK
def isinstance(a, b):
return False
if isinstance(a, int) or isinstance(a, float):
pass

View File

@@ -46,10 +46,10 @@ if a:
if b:
c
while True:
while x > 0:
# SIM102
if True:
if True:
if y > 0:
if z > 0:
"""this
is valid"""
@@ -64,8 +64,8 @@ is valid"""
# SIM102
if True:
if True:
if x > 0:
if y > 0:
"""this
is valid"""
@@ -78,7 +78,7 @@ is valid"""
("so is"
"this for some reason")
while True:
while x > 0:
# SIM102
if node.module:
if node.module == "multiprocessing" or node.module.startswith(
@@ -129,3 +129,15 @@ if a:
print("baz")
else:
print("bar")
# OK
if False:
if a:
pass
# OK
if True:
if a:
pass

View File

@@ -29,3 +29,16 @@ if True or f() or a or g() or b: # SIM222
if a or True or f() or b or g(): # SIM222
pass
if a and f() and b and g() and False: # OK
pass
if a and f() and False and g() and b: # OK
pass
if False and f() and a and g() and b: # OK
pass
if a and False and f() and b and g(): # OK
pass

View File

@@ -16,11 +16,24 @@ if False:
if a and f() and b and g() and False: # OK
pass
if a and f() and False and g() and b: # SIM222
if a and f() and False and g() and b: # SIM223
pass
if False and f() and a and g() and b: # SIM222
if False and f() and a and g() and b: # SIM223
pass
if a and False and f() and b and g(): # SIM222
if a and False and f() and b and g(): # SIM223
pass
if a or f() or b or g() or True: # OK
pass
if a or f() or True or g() or b: # OK
pass
if True or f() or a or g() or b: # OK
pass
if a or True or f() or b or g(): # OK
pass

View File

@@ -18,3 +18,7 @@ if True:
columns=["a"],
axis=1,
)
x.drop(["a"], axis=1, **kwargs, inplace=True)
x.drop(["a"], axis=1, inplace=True, **kwargs)
f(x.drop(["a"], axis=1, inplace=True))

View File

@@ -9,4 +9,8 @@ def main():
quit(1)
sys.exit(2)
def main():
sys = 1
exit(1)
quit(1)

View File

@@ -7,3 +7,10 @@ quit(0)
def main():
exit(1)
quit(1)
def main():
exit = 1
exit(1)
quit(1)

View File

@@ -29,10 +29,13 @@ string = "hello there"
string.encode("utf-8")
bar = "bar"
f"foo{bar}".encode("utf-8") # f"foo{bar}".encode()
f"foo{bar}".encode("utf-8")
encoding = "latin"
"foo".encode(encoding)
f"foo{bar}".encode(encoding)
f"{a=} {b=}".encode(
"utf-8",
)
# `encode` with custom args and kwargs should not be processed.
"foo".encode("utf-8", errors="replace")

View File

@@ -1,5 +1,4 @@
import functools
from functools import lru_cache
@functools.lru_cache(maxsize=None)
@@ -7,11 +6,6 @@ def fixme():
pass
@lru_cache(maxsize=None)
def fixme():
pass
@other_decorator
@functools.lru_cache(maxsize=None)
def fixme():
@@ -29,31 +23,16 @@ def ok():
pass
@lru_cache()
def ok():
pass
@functools.lru_cache(maxsize=64)
def ok():
pass
@lru_cache(maxsize=64)
def ok():
pass
def user_func():
pass
@lru_cache(user_func)
def ok():
pass
@lru_cache(user_func, maxsize=None)
@functools.lru_cache(user_func)
def ok():
pass

View File

@@ -0,0 +1,51 @@
from functools import lru_cache
@lru_cache(maxsize=None)
def fixme():
pass
@other_decorator
@lru_cache(maxsize=None)
def fixme():
pass
@lru_cache(maxsize=None)
@other_decorator
def fixme():
pass
@lru_cache()
def ok():
pass
@lru_cache(maxsize=64)
def ok():
pass
def user_func():
pass
@lru_cache(user_func)
def ok():
pass
@lru_cache(user_func, maxsize=None)
def ok():
pass
def lru_cache(maxsize=None):
pass
@lru_cache(maxsize=None)
def ok():
pass

View File

@@ -7,13 +7,16 @@ use rustpython_parser::ast::{ExcepthandlerKind, Expr, Keyword, Location, Stmt, S
use rustpython_parser::{lexer, Mode, Tok};
use ruff_diagnostics::Edit;
use ruff_python_ast::context::Context;
use ruff_python_ast::helpers;
use ruff_python_ast::helpers::to_absolute;
use ruff_python_ast::imports::{AnyImport, Import};
use ruff_python_ast::newlines::NewlineWithTrailingNewline;
use ruff_python_ast::source_code::{Indexer, Locator, Stylist};
use crate::cst::helpers::compose_module_path;
use crate::cst::matchers::match_module;
use crate::importer::Importer;
/// Determine if a body contains only a single statement, taking into account
/// deleted.
@@ -349,7 +352,7 @@ pub fn remove_unused_imports<'a>(
/// For this behavior, set `remove_parentheses` to `true`.
pub fn remove_argument(
locator: &Locator,
stmt_at: Location,
call_at: Location,
expr_at: Location,
expr_end: Location,
args: &[Expr],
@@ -357,7 +360,7 @@ pub fn remove_argument(
remove_parentheses: bool,
) -> Result<Edit> {
// TODO(sbrugman): Preserve trailing comments.
let contents = locator.skip(stmt_at);
let contents = locator.skip(call_at);
let mut fix_start = None;
let mut fix_end = None;
@@ -370,7 +373,7 @@ pub fn remove_argument(
if n_arguments == 1 {
// Case 1: there is only one argument.
let mut count: usize = 0;
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, stmt_at).flatten() {
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, call_at).flatten() {
if matches!(tok, Tok::Lpar) {
if count == 0 {
fix_start = Some(if remove_parentheses {
@@ -402,7 +405,7 @@ pub fn remove_argument(
{
// Case 2: argument or keyword is _not_ the last node.
let mut seen_comma = false;
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, stmt_at).flatten() {
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, call_at).flatten() {
if seen_comma {
if matches!(tok, Tok::NonLogicalNewline) {
// Also delete any non-logical newlines after the comma.
@@ -425,7 +428,7 @@ pub fn remove_argument(
} else {
// Case 3: argument or keyword is the last node, so we have to find the last
// comma in the stmt.
for (start, tok, _) in lexer::lex_located(contents, Mode::Module, stmt_at).flatten() {
for (start, tok, _) in lexer::lex_located(contents, Mode::Module, call_at).flatten() {
if start == expr_at {
fix_end = Some(expr_end);
break;
@@ -444,6 +447,82 @@ pub fn remove_argument(
}
}
/// Generate an [`Edit`] to reference the given symbol. Returns the [`Edit`] necessary to make the
/// symbol available in the current scope along with the bound name of the symbol.
///
/// For example, assuming `module` is `"functools"` and `member` is `"lru_cache"`, this function
/// could return an [`Edit`] to add `import functools` to the top of the file, alongside with the
/// name on which the `lru_cache` symbol would be made available (`"functools.lru_cache"`).
///
/// Attempts to reuse existing imports when possible.
pub fn get_or_import_symbol(
module: &str,
member: &str,
context: &Context,
importer: &Importer,
locator: &Locator,
) -> Result<(Edit, String)> {
if let Some((source, binding)) = context.resolve_qualified_import_name(module, member) {
// If the symbol is already available in the current scope, use it.
//
// We also add a no-nop edit to force conflicts with any other fixes that might try to
// remove the import. Consider:
//
// ```py
// import sys
//
// quit()
// ```
//
// Assume you omit this no-op edit. If you run Ruff with `unused-imports` and
// `sys-exit-alias` over this snippet, it will generate two fixes: (1) remove the unused
// `sys` import; and (2) replace `quit()` with `sys.exit()`, under the assumption that `sys`
// is already imported and available.
//
// By adding this no-op edit, we force the `unused-imports` fix to conflict with the
// `sys-exit-alias` fix, and thus will avoid applying both fixes in the same pass.
let import_edit = Edit::replacement(
locator.slice(source).to_string(),
source.location,
source.end_location.unwrap(),
);
Ok((import_edit, binding))
} else {
if let Some(stmt) = importer.get_import_from(module) {
// Case 1: `from functools import lru_cache` is in scope, and we're trying to reference
// `functools.cache`; thus, we add `cache` to the import, and return `"cache"` as the
// bound name.
if context
.find_binding(member)
.map_or(true, |binding| binding.kind.is_builtin())
{
let import_edit = importer.add_member(stmt, member)?;
Ok((import_edit, member.to_string()))
} else {
bail!(
"Unable to insert `{}` into scope due to name conflict",
member
)
}
} else {
// Case 2: No `functools` import is in scope; thus, we add `import functools`, and
// return `"functools.cache"` as the bound name.
if context
.find_binding(module)
.map_or(true, |binding| binding.kind.is_builtin())
{
let import_edit = importer.add_import(&AnyImport::Import(Import::module(module)));
Ok((import_edit, format!("{module}.{member}")))
} else {
bail!(
"Unable to insert `{}` into scope due to name conflict",
module
)
}
}
}
}
#[cfg(test)]
mod tests {
use anyhow::Result;

View File

@@ -81,24 +81,6 @@ fn apply_fixes<'a>(
(output, fixed)
}
/// Apply a single fix.
pub(crate) fn apply_fix(fix: &Edit, locator: &Locator) -> String {
let mut output = String::with_capacity(locator.len());
// Add all contents from `last_pos` to `fix.location`.
let slice = locator.slice(Range::new(Location::new(1, 0), fix.location));
output.push_str(slice);
// Add the patch itself.
output.push_str(&fix.content);
// Add the remaining content.
let slice = locator.skip(fix.end_location);
output.push_str(slice);
output
}
/// Compare two fixes.
fn cmp_fix(rule1: Rule, rule2: Rule, fix1: &Fix, fix2: &Fix) -> std::cmp::Ordering {
fix1.location()
@@ -119,7 +101,7 @@ mod tests {
use ruff_diagnostics::Edit;
use ruff_python_ast::source_code::Locator;
use crate::autofix::{apply_fix, apply_fixes};
use crate::autofix::apply_fixes;
use crate::rules::pycodestyle::rules::MissingNewlineAtEndOfFile;
fn create_diagnostics(edit: impl IntoIterator<Item = Edit>) -> Vec<Diagnostic> {
@@ -262,31 +244,4 @@ class A:
);
assert_eq!(fixed.values().sum::<usize>(), 1);
}
#[test]
fn apply_single_fix() {
let locator = Locator::new(
r#"
class A(object):
...
"#
.trim(),
);
let contents = apply_fix(
&Edit {
content: String::new(),
location: Location::new(1, 7),
end_location: Location::new(1, 15),
},
&locator,
);
assert_eq!(
contents,
r#"
class A:
...
"#
.trim()
);
}
}

View File

@@ -13,13 +13,15 @@ use rustpython_parser::ast::{
};
use ruff_diagnostics::Diagnostic;
use ruff_python_ast::all::{extract_all_names, AllNamesFlags};
use ruff_python_ast::binding::{
Binding, BindingId, BindingKind, Exceptions, ExecutionContext, Export, FromImportation,
Importation, StarImportation, SubmoduleImportation,
};
use ruff_python_ast::context::Context;
use ruff_python_ast::helpers::{binding_range, extract_handled_exceptions, to_module_path};
use ruff_python_ast::operations::{extract_all_names, AllNamesFlags};
use ruff_python_ast::helpers::{extract_handled_exceptions, to_module_path};
use ruff_python_ast::scope::{
Binding, BindingId, BindingKind, ClassDef, Exceptions, ExecutionContext, Export,
FromImportation, FunctionDef, Importation, Lambda, Scope, ScopeId, ScopeKind, ScopeStack,
StarImportation, SubmoduleImportation,
ClassDef, FunctionDef, Lambda, Scope, ScopeId, ScopeKind, ScopeStack,
};
use ruff_python_ast::source_code::{Indexer, Locator, Stylist};
use ruff_python_ast::types::{Node, Range, RefEquality};
@@ -27,9 +29,7 @@ use ruff_python_ast::typing::{
match_annotated_subscript, parse_type_annotation, Callable, SubscriptKind,
};
use ruff_python_ast::visitor::{walk_excepthandler, walk_pattern, Visitor};
use ruff_python_ast::{
branch_detection, cast, helpers, operations, str, typing, visibility, visitor,
};
use ruff_python_ast::{branch_detection, cast, helpers, str, typing, visibility, visitor};
use ruff_python_stdlib::builtins::{BUILTINS, MAGIC_GLOBALS};
use ruff_python_stdlib::path::is_python_stub_file;
@@ -38,6 +38,7 @@ use crate::docstrings::definition::{
transition_scope, Definition, DefinitionKind, Docstring, Documentable,
};
use crate::fs::relativize_path;
use crate::importer::Importer;
use crate::registry::{AsRule, Rule};
use crate::rules::{
flake8_2020, flake8_annotations, flake8_bandit, flake8_blind_except, flake8_boolean_trap,
@@ -70,6 +71,7 @@ pub struct Checker<'a> {
pub locator: &'a Locator<'a>,
pub stylist: &'a Stylist<'a>,
pub indexer: &'a Indexer,
pub importer: Importer<'a>,
// Stateful fields.
pub ctx: Context<'a>,
pub deferred: Deferred<'a>,
@@ -92,6 +94,7 @@ impl<'a> Checker<'a> {
locator: &'a Locator,
stylist: &'a Stylist,
indexer: &'a Indexer,
importer: Importer<'a>,
) -> Checker<'a> {
Checker {
settings,
@@ -105,6 +108,7 @@ impl<'a> Checker<'a> {
locator,
stylist,
indexer,
importer,
ctx: Context::new(&settings.typing_modules, path, module_path),
deferred: Deferred::default(),
diagnostics: Vec::default(),
@@ -183,12 +187,24 @@ where
self.ctx.futures_allowed = false;
if !self.ctx.seen_import_boundary
&& !helpers::is_assignment_to_a_dunder(stmt)
&& !operations::in_nested_block(self.ctx.parents.iter().rev().map(Into::into))
&& !helpers::in_nested_block(self.ctx.parents.iter().rev().map(Into::into))
{
self.ctx.seen_import_boundary = true;
}
}
}
// Track each top-level import, to guide import insertions.
if matches!(
&stmt.node,
StmtKind::Import { .. } | StmtKind::ImportFrom { .. }
) {
let scope_index = self.ctx.scope_id();
if scope_index.is_global() && self.ctx.current_stmt_parent().is_none() {
self.importer.visit_import(stmt);
}
}
// Pre-visit.
match &stmt.node {
StmtKind::Global { names } => {
@@ -1097,7 +1113,7 @@ where
stmt,
names,
module.as_ref().map(String::as_str),
level.as_ref(),
*level,
);
}
if self.settings.rules.enabled(Rule::UnnecessaryBuiltinImport) {
@@ -1136,11 +1152,9 @@ where
.rules
.enabled(Rule::PytestIncorrectPytestImport)
{
if let Some(diagnostic) = flake8_pytest_style::rules::import_from(
stmt,
module.as_deref(),
level.as_ref(),
) {
if let Some(diagnostic) =
flake8_pytest_style::rules::import_from(stmt, module.as_deref(), *level)
{
self.diagnostics.push(diagnostic);
}
}
@@ -1180,22 +1194,10 @@ where
));
}
} else if alias.node.name == "*" {
self.add_binding(
"*",
Binding {
kind: BindingKind::StarImportation(StarImportation {
level: *level,
module: module.clone(),
}),
runtime_usage: None,
synthetic_usage: None,
typing_usage: None,
range: Range::from(stmt),
source: Some(*self.ctx.current_stmt()),
context: self.ctx.execution_context(),
exceptions: self.ctx.exceptions(),
},
);
self.ctx.scope_mut().add_star_import(StarImportation {
module: module.as_ref().map(String::as_str),
level: *level,
});
if self
.settings
@@ -1207,7 +1209,7 @@ where
self.diagnostics.push(Diagnostic::new(
pyflakes::rules::UndefinedLocalWithNestedImportStarUsage {
name: helpers::format_import_from(
level.as_ref(),
*level,
module.as_deref(),
),
},
@@ -1223,17 +1225,11 @@ where
{
self.diagnostics.push(Diagnostic::new(
pyflakes::rules::UndefinedLocalWithImportStar {
name: helpers::format_import_from(
level.as_ref(),
module.as_deref(),
),
name: helpers::format_import_from(*level, module.as_deref()),
},
Range::from(stmt),
));
}
let scope = self.ctx.scope_mut();
scope.import_starred = true;
} else {
if let Some(asname) = &alias.node.asname {
self.check_builtin_shadowing(asname, stmt, false);
@@ -1252,7 +1248,7 @@ where
// and `full_name` would be "foo.bar".
let name = alias.node.asname.as_ref().unwrap_or(&alias.node.name);
let full_name = helpers::format_import_from_member(
level.as_ref(),
*level,
module.as_deref(),
&alias.node.name,
);
@@ -1283,7 +1279,7 @@ where
flake8_tidy_imports::relative_imports::banned_relative_import(
self,
stmt,
level.as_ref(),
*level,
module.as_deref(),
self.module_path.as_ref(),
&self.settings.flake8_tidy_imports.ban_relative_imports,
@@ -1306,7 +1302,7 @@ where
if self.settings.rules.enabled(Rule::UnconventionalImportAlias) {
let full_name = helpers::format_import_from_member(
level.as_ref(),
*level,
module.as_deref(),
&alias.node.name,
);
@@ -1908,8 +1904,8 @@ where
self.ctx.visible_scope = scope;
// If any global bindings don't already exist in the global scope, add it.
let globals = operations::extract_globals(body);
for (name, stmt) in operations::extract_globals(body) {
let globals = helpers::extract_globals(body);
for (name, stmt) in helpers::extract_globals(body) {
if self
.ctx
.global_scope()
@@ -1971,7 +1967,7 @@ where
self.ctx.visible_scope = scope;
// If any global bindings don't already exist in the global scope, add it.
let globals = operations::extract_globals(body);
let globals = helpers::extract_globals(body);
for (name, stmt) in &globals {
if self
.ctx
@@ -4034,7 +4030,6 @@ impl<'a> Checker<'a> {
BindingKind::Importation(..)
| BindingKind::FromImportation(..)
| BindingKind::SubmoduleImportation(..)
| BindingKind::StarImportation(..)
| BindingKind::FutureImportation
);
if binding.kind.is_loop_var() && existing_is_import {
@@ -4063,7 +4058,16 @@ impl<'a> Checker<'a> {
name: name.to_string(),
line: existing.range.location.row(),
},
binding_range(&binding, self.locator),
matches!(
binding.kind,
BindingKind::ClassDefinition | BindingKind::FunctionDefinition
)
.then(|| {
binding.source.as_ref().map_or(binding.range, |source| {
helpers::identifier_range(source, self.locator)
})
})
.unwrap_or(binding.range),
);
if let Some(parent) = binding.source.as_ref() {
if matches!(parent.node, StmtKind::ImportFrom { .. })
@@ -4241,35 +4245,31 @@ impl<'a> Checker<'a> {
first_iter = false;
in_generator = matches!(scope.kind, ScopeKind::Generator);
import_starred = import_starred || scope.import_starred;
import_starred = import_starred || scope.uses_star_imports();
}
if import_starred {
// F405
if self
.settings
.rules
.enabled(Rule::UndefinedLocalWithImportStarUsage)
{
let mut from_list = vec![];
for scope_index in self.ctx.scope_stack.iter() {
let scope = &self.ctx.scopes[*scope_index];
for binding in scope.binding_ids().map(|index| &self.ctx.bindings[*index]) {
if let BindingKind::StarImportation(StarImportation { level, module }) =
&binding.kind
{
from_list.push(helpers::format_import_from(
level.as_ref(),
module.as_deref(),
));
}
}
}
from_list.sort();
let sources: Vec<String> = self
.ctx
.scopes
.iter()
.flat_map(Scope::star_imports)
.map(|StarImportation { level, module }| {
helpers::format_import_from(*level, *module)
})
.sorted()
.dedup()
.collect();
self.diagnostics.push(Diagnostic::new(
pyflakes::rules::UndefinedLocalWithImportStarUsage {
name: id.to_string(),
sources: from_list,
sources,
},
Range::from(expr),
));
@@ -4401,7 +4401,7 @@ impl<'a> Checker<'a> {
return;
}
if operations::is_unpacking_assignment(parent, expr) {
if helpers::is_unpacking_assignment(parent, expr) {
self.add_binding(
id,
Binding {
@@ -4504,7 +4504,7 @@ impl<'a> Checker<'a> {
let ExprKind::Name { id, .. } = &expr.node else {
return;
};
if operations::on_conditional_branch(&mut self.ctx.parents.iter().rev().map(Into::into)) {
if helpers::on_conditional_branch(&mut self.ctx.parents.iter().rev().map(Into::into)) {
return;
}
@@ -4719,7 +4719,7 @@ impl<'a> Checker<'a> {
);
}
} else {
unreachable!("Expected ExprKind::Lambda");
unreachable!("Expected ExprKind::For | ExprKind::AsyncFor");
}
}
}
@@ -4747,7 +4747,6 @@ impl<'a> Checker<'a> {
}
// Mark anything referenced in `__all__` as used.
let all_bindings: Option<(Vec<BindingId>, Range)> = {
let global_scope = self.ctx.global_scope();
let all_names: Option<(&Vec<String>, Range)> = global_scope
@@ -4821,13 +4820,45 @@ impl<'a> Checker<'a> {
for (index, stack) in self.ctx.dead_scopes.iter().rev() {
let scope = &self.ctx.scopes[*index];
// F822
if index.is_global() {
// F822
if self.settings.rules.enabled(Rule::UndefinedExport) {
if !self.path.ends_with("__init__.py") {
if let Some((names, range)) = &all_names {
diagnostics
.extend(pyflakes::rules::undefined_export(names, range, scope));
}
}
}
// F405
if self
.settings
.rules
.enabled(Rule::UndefinedLocalWithImportStarUsage)
{
if let Some((names, range)) = &all_names {
diagnostics.extend(pyflakes::rules::undefined_export(
names, range, self.path, scope,
));
let sources: Vec<String> = scope
.star_imports()
.map(|StarImportation { level, module }| {
helpers::format_import_from(*level, *module)
})
.sorted()
.dedup()
.collect();
if !sources.is_empty() {
for &name in names {
if !scope.defines(name) {
diagnostics.push(Diagnostic::new(
pyflakes::rules::UndefinedLocalWithImportStarUsage {
name: name.to_string(),
sources: sources.clone(),
},
*range,
));
}
}
}
}
}
}
@@ -4868,7 +4899,6 @@ impl<'a> Checker<'a> {
BindingKind::Importation(..)
| BindingKind::FromImportation(..)
| BindingKind::SubmoduleImportation(..)
| BindingKind::StarImportation(..)
| BindingKind::FutureImportation
) {
if binding.used() {
@@ -4883,7 +4913,17 @@ impl<'a> Checker<'a> {
name: (*name).to_string(),
line: binding.range.location.row(),
},
binding_range(rebound, self.locator),
matches!(
rebound.kind,
BindingKind::ClassDefinition
| BindingKind::FunctionDefinition
)
.then(|| {
rebound.source.as_ref().map_or(rebound.range, |source| {
helpers::identifier_range(source, self.locator)
})
})
.unwrap_or(rebound.range),
);
if let Some(parent) = &rebound.source {
if matches!(parent.node, StmtKind::ImportFrom { .. })
@@ -4899,41 +4939,6 @@ impl<'a> Checker<'a> {
}
}
if self
.settings
.rules
.enabled(Rule::UndefinedLocalWithImportStarUsage)
{
if scope.import_starred {
if let Some((names, range)) = &all_names {
let mut from_list = vec![];
for binding in scope.binding_ids().map(|index| &self.ctx.bindings[*index]) {
if let BindingKind::StarImportation(StarImportation { level, module }) =
&binding.kind
{
from_list.push(helpers::format_import_from(
level.as_ref(),
module.as_deref(),
));
}
}
from_list.sort();
for &name in names {
if !scope.defines(name) {
diagnostics.push(Diagnostic::new(
pyflakes::rules::UndefinedLocalWithImportStarUsage {
name: name.to_string(),
sources: from_list.clone(),
},
*range,
));
}
}
}
}
}
if enforce_typing_imports {
let runtime_imports: Vec<&Binding> = if self.settings.flake8_type_checking.strict {
vec![]
@@ -5438,6 +5443,7 @@ pub fn check_ast(
locator,
stylist,
indexer,
Importer::new(python_ast, locator, stylist),
);
checker.bind_builtins();

View File

@@ -1,7 +1,7 @@
use anyhow::{bail, Result};
use libcst_native::{
Attribute, Call, Comparison, Dict, Expr, Expression, Import, ImportFrom, Module, SimpleString,
SmallStatement, Statement,
Attribute, Call, Comparison, Dict, Expr, Expression, Import, ImportAlias, ImportFrom,
ImportNames, Module, SimpleString, SmallStatement, Statement,
};
pub fn match_module(module_text: &str) -> Result<Module> {
@@ -54,6 +54,16 @@ pub fn match_import_from<'a, 'b>(module: &'a mut Module<'b>) -> Result<&'a mut I
}
}
pub fn match_aliases<'a, 'b>(
import_from: &'a mut ImportFrom<'b>,
) -> Result<&'a mut Vec<ImportAlias<'b>>> {
if let ImportNames::Aliases(aliases) = &mut import_from.names {
Ok(aliases)
} else {
bail!("Expected ImportNames::Aliases")
}
}
pub fn match_call<'a, 'b>(expression: &'a mut Expression<'b>) -> Result<&'a mut Call<'b>> {
if let Expression::Call(call) = expression {
Ok(call)

359
crates/ruff/src/importer.rs Normal file
View File

@@ -0,0 +1,359 @@
//! Add and modify import statements to make module members available during fix execution.
use anyhow::Result;
use libcst_native::{Codegen, CodegenState, ImportAlias, Name, NameOrAttribute};
use rustc_hash::FxHashMap;
use rustpython_parser::ast::{Location, Stmt, StmtKind, Suite};
use rustpython_parser::{lexer, Mode, Tok};
use ruff_diagnostics::Edit;
use ruff_python_ast::helpers::is_docstring_stmt;
use ruff_python_ast::imports::AnyImport;
use ruff_python_ast::source_code::{Locator, Stylist};
use crate::cst::matchers::{match_aliases, match_import_from, match_module};
pub struct Importer<'a> {
python_ast: &'a Suite,
locator: &'a Locator<'a>,
stylist: &'a Stylist<'a>,
/// A map from module name to top-level `StmtKind::ImportFrom` statements.
import_from_map: FxHashMap<&'a str, &'a Stmt>,
/// The last top-level import statement.
trailing_import: Option<&'a Stmt>,
}
impl<'a> Importer<'a> {
pub fn new(python_ast: &'a Suite, locator: &'a Locator<'a>, stylist: &'a Stylist<'a>) -> Self {
Self {
python_ast,
locator,
stylist,
import_from_map: FxHashMap::default(),
trailing_import: None,
}
}
/// Visit a top-level import statement.
pub fn visit_import(&mut self, import: &'a Stmt) {
// Store a reference to the import statement in the appropriate map.
match &import.node {
StmtKind::Import { .. } => {
// Nothing to do here, we don't extend top-level `import` statements at all, so
// no need to track them.
}
StmtKind::ImportFrom { module, level, .. } => {
// Store a reverse-map from module name to `import ... from` statement.
if level.map_or(true, |level| level == 0) {
if let Some(module) = module {
self.import_from_map.insert(module.as_str(), import);
}
}
}
_ => {
panic!("Expected StmtKind::Import | StmtKind::ImportFrom");
}
}
// Store a reference to the last top-level import statement.
self.trailing_import = Some(import);
}
/// Add an import statement to import the given module.
///
/// If there are no existing imports, the new import will be added at the top
/// of the file. Otherwise, it will be added after the most recent top-level
/// import statement.
pub fn add_import(&self, import: &AnyImport) -> Edit {
let required_import = import.to_string();
if let Some(stmt) = self.trailing_import {
// Insert after the last top-level import.
let Insertion {
prefix,
location,
suffix,
} = end_of_statement_insertion(stmt, self.locator, self.stylist);
let content = format!("{prefix}{required_import}{suffix}");
Edit::insertion(content, location)
} else {
// Insert at the top of the file.
let Insertion {
prefix,
location,
suffix,
} = top_of_file_insertion(self.python_ast, self.locator, self.stylist);
let content = format!("{prefix}{required_import}{suffix}");
Edit::insertion(content, location)
}
}
/// Return the top-level [`Stmt`] that imports the given module using `StmtKind::ImportFrom`.
/// if it exists.
pub fn get_import_from(&self, module: &str) -> Option<&Stmt> {
self.import_from_map.get(module).copied()
}
/// Add the given member to an existing `StmtKind::ImportFrom` statement.
pub fn add_member(&self, stmt: &Stmt, member: &str) -> Result<Edit> {
let mut tree = match_module(self.locator.slice(stmt))?;
let import_from = match_import_from(&mut tree)?;
let aliases = match_aliases(import_from)?;
aliases.push(ImportAlias {
name: NameOrAttribute::N(Box::new(Name {
value: member,
lpar: vec![],
rpar: vec![],
})),
asname: None,
comma: aliases.last().and_then(|alias| alias.comma.clone()),
});
let mut state = CodegenState {
default_newline: &self.stylist.line_ending(),
default_indent: self.stylist.indentation(),
..CodegenState::default()
};
tree.codegen(&mut state);
Ok(Edit::replacement(
state.to_string(),
stmt.location,
stmt.end_location.unwrap(),
))
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
struct Insertion {
/// The content to add before the insertion.
prefix: &'static str,
/// The location at which to insert.
location: Location,
/// The content to add after the insertion.
suffix: &'static str,
}
impl Insertion {
fn new(prefix: &'static str, location: Location, suffix: &'static str) -> Self {
Self {
prefix,
location,
suffix,
}
}
}
/// Find the end of the last docstring.
fn match_docstring_end(body: &[Stmt]) -> Option<Location> {
let mut iter = body.iter();
let Some(mut stmt) = iter.next() else {
return None;
};
if !is_docstring_stmt(stmt) {
return None;
}
for next in iter {
if !is_docstring_stmt(next) {
break;
}
stmt = next;
}
Some(stmt.end_location.unwrap())
}
/// Find the location at which a "top-of-file" import should be inserted,
/// along with a prefix and suffix to use for the insertion.
///
/// For example, given the following code:
///
/// ```python
/// """Hello, world!"""
///
/// import os
/// ```
///
/// The location returned will be the start of the `import os` statement,
/// along with a trailing newline suffix.
fn end_of_statement_insertion(stmt: &Stmt, locator: &Locator, stylist: &Stylist) -> Insertion {
let location = stmt.end_location.unwrap();
let mut tokens = lexer::lex_located(locator.skip(location), Mode::Module, location).flatten();
if let Some((.., Tok::Semi, end)) = tokens.next() {
// If the first token after the docstring is a semicolon, insert after the semicolon as an
// inline statement;
Insertion::new(" ", end, ";")
} else {
// Otherwise, insert on the next line.
Insertion::new(
"",
Location::new(location.row() + 1, 0),
stylist.line_ending().as_str(),
)
}
}
/// Find the location at which a "top-of-file" import should be inserted,
/// along with a prefix and suffix to use for the insertion.
///
/// For example, given the following code:
///
/// ```python
/// """Hello, world!"""
///
/// import os
/// ```
///
/// The location returned will be the start of the `import os` statement,
/// along with a trailing newline suffix.
fn top_of_file_insertion(body: &[Stmt], locator: &Locator, stylist: &Stylist) -> Insertion {
// Skip over any docstrings.
let mut location = if let Some(location) = match_docstring_end(body) {
// If the first token after the docstring is a semicolon, insert after the semicolon as an
// inline statement;
let first_token = lexer::lex_located(locator.skip(location), Mode::Module, location)
.flatten()
.next();
if let Some((.., Tok::Semi, end)) = first_token {
return Insertion::new(" ", end, ";");
}
// Otherwise, advance to the next row.
Location::new(location.row() + 1, 0)
} else {
Location::default()
};
// Skip over any comments and empty lines.
for (.., tok, end) in
lexer::lex_located(locator.skip(location), Mode::Module, location).flatten()
{
if matches!(tok, Tok::Comment(..) | Tok::Newline) {
location = Location::new(end.row() + 1, 0);
} else {
break;
}
}
return Insertion::new("", location, stylist.line_ending().as_str());
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use rustpython_parser as parser;
use rustpython_parser::ast::Location;
use rustpython_parser::lexer::LexResult;
use ruff_python_ast::source_code::{LineEnding, Locator, Stylist};
use crate::importer::{top_of_file_insertion, Insertion};
fn insert(contents: &str) -> Result<Insertion> {
let program = parser::parse_program(contents, "<filename>")?;
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(contents);
let locator = Locator::new(contents);
let stylist = Stylist::from_tokens(&tokens, &locator);
Ok(top_of_file_insertion(&program, &locator, &stylist))
}
#[test]
fn top_of_file_insertions() -> Result<()> {
let contents = "";
assert_eq!(
insert(contents)?,
Insertion::new("", Location::new(1, 0), LineEnding::default().as_str())
);
let contents = r#"
"""Hello, world!""""#
.trim_start();
assert_eq!(
insert(contents)?,
Insertion::new("", Location::new(2, 0), LineEnding::default().as_str())
);
let contents = r#"
"""Hello, world!"""
"#
.trim_start();
assert_eq!(
insert(contents)?,
Insertion::new("", Location::new(2, 0), "\n")
);
let contents = r#"
"""Hello, world!"""
"""Hello, world!"""
"#
.trim_start();
assert_eq!(
insert(contents)?,
Insertion::new("", Location::new(3, 0), "\n")
);
let contents = r#"
x = 1
"#
.trim_start();
assert_eq!(
insert(contents)?,
Insertion::new("", Location::new(1, 0), "\n")
);
let contents = r#"
#!/usr/bin/env python3
"#
.trim_start();
assert_eq!(
insert(contents)?,
Insertion::new("", Location::new(2, 0), "\n")
);
let contents = r#"
#!/usr/bin/env python3
"""Hello, world!"""
"#
.trim_start();
assert_eq!(
insert(contents)?,
Insertion::new("", Location::new(3, 0), "\n")
);
let contents = r#"
"""Hello, world!"""
#!/usr/bin/env python3
"#
.trim_start();
assert_eq!(
insert(contents)?,
Insertion::new("", Location::new(3, 0), "\n")
);
let contents = r#"
"""%s""" % "Hello, world!"
"#
.trim_start();
assert_eq!(
insert(contents)?,
Insertion::new("", Location::new(1, 0), "\n")
);
let contents = r#"
"""Hello, world!"""; x = 1
"#
.trim_start();
assert_eq!(
insert(contents)?,
Insertion::new(" ", Location::new(1, 20), ";")
);
let contents = r#"
"""Hello, world!"""; x = 1; y = \
2
"#
.trim_start();
assert_eq!(
insert(contents)?,
Insertion::new(" ", Location::new(1, 20), ";")
);
Ok(())
}
}

View File

@@ -32,86 +32,91 @@ pub struct StateMachine {
impl StateMachine {
pub fn consume(&mut self, tok: &Tok) -> bool {
if matches!(
tok,
Tok::NonLogicalNewline | Tok::Newline | Tok::Indent | Tok::Dedent | Tok::Comment(..)
) {
return false;
}
match tok {
Tok::NonLogicalNewline
| Tok::Newline
| Tok::Indent
| Tok::Dedent
| Tok::Comment(..) => false,
if matches!(tok, Tok::String { .. }) {
return if matches!(
self.state,
State::ExpectModuleDocstring
| State::ExpectClassDocstring
| State::ExpectFunctionDocstring
) {
self.state = State::Other;
true
} else {
false
};
}
if matches!(tok, Tok::Class) {
self.state = State::ExpectClassColon;
self.bracket_count = 0;
return false;
}
if matches!(tok, Tok::Def) {
self.state = State::ExpectFunctionColon;
self.bracket_count = 0;
return false;
}
if matches!(tok, Tok::Colon) {
if self.bracket_count == 0 {
if matches!(self.state, State::ExpectClassColon) {
self.state = State::ExpectClassDocstring;
} else if matches!(self.state, State::ExpectFunctionColon) {
self.state = State::ExpectFunctionDocstring;
Tok::String { .. } => {
if matches!(
self.state,
State::ExpectModuleDocstring
| State::ExpectClassDocstring
| State::ExpectFunctionDocstring
) {
self.state = State::Other;
true
} else {
false
}
}
return false;
}
Tok::Class => {
self.state = State::ExpectClassColon;
self.bracket_count = 0;
if matches!(tok, Tok::Lpar | Tok::Lbrace | Tok::Lsqb) {
self.bracket_count += 1;
if matches!(
self.state,
State::ExpectModuleDocstring
| State::ExpectClassDocstring
| State::ExpectFunctionDocstring
) {
self.state = State::Other;
false
}
return false;
}
if matches!(tok, Tok::Rpar | Tok::Rbrace | Tok::Rsqb) {
self.bracket_count -= 1;
if matches!(
self.state,
State::ExpectModuleDocstring
| State::ExpectClassDocstring
| State::ExpectFunctionDocstring
) {
self.state = State::Other;
Tok::Def => {
self.state = State::ExpectFunctionColon;
self.bracket_count = 0;
false
}
return false;
}
if matches!(
self.state,
State::ExpectModuleDocstring
| State::ExpectClassDocstring
| State::ExpectFunctionDocstring
) {
self.state = State::Other;
return false;
}
Tok::Colon => {
if self.bracket_count == 0 {
if matches!(self.state, State::ExpectClassColon) {
self.state = State::ExpectClassDocstring;
} else if matches!(self.state, State::ExpectFunctionColon) {
self.state = State::ExpectFunctionDocstring;
}
}
false
false
}
Tok::Lpar | Tok::Lbrace | Tok::Lsqb => {
self.bracket_count += 1;
if matches!(
self.state,
State::ExpectModuleDocstring
| State::ExpectClassDocstring
| State::ExpectFunctionDocstring
) {
self.state = State::Other;
}
false
}
Tok::Rpar | Tok::Rbrace | Tok::Rsqb => {
self.bracket_count -= 1;
if matches!(
self.state,
State::ExpectModuleDocstring
| State::ExpectClassDocstring
| State::ExpectFunctionDocstring
) {
self.state = State::Other;
}
false
}
_ => {
if matches!(
self.state,
State::ExpectModuleDocstring
| State::ExpectClassDocstring
| State::ExpectFunctionDocstring
) {
self.state = State::Other;
}
false
}
}
}
}

View File

@@ -19,6 +19,7 @@ mod doc_lines;
mod docstrings;
pub mod flake8_to_ruff;
pub mod fs;
mod importer;
pub mod jupyter;
mod lex;
pub mod linter;

View File

@@ -5,7 +5,8 @@ use rustpython_parser::ast::{Constant, Expr, ExprKind, Keyword, Operator};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers::{compose_call_path, SimpleCallArgs};
use ruff_python_ast::call_path::compose_call_path;
use ruff_python_ast::helpers::SimpleCallArgs;
use ruff_python_ast::types::Range;
use crate::checkers::ast::Checker;

View File

@@ -2,8 +2,8 @@ use rustpython_parser::ast::{Expr, ExprKind, Stmt, StmtKind};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers;
use ruff_python_ast::helpers::{find_keyword, is_const_true};
use ruff_python_ast::logging;
use ruff_python_ast::types::Range;
use crate::checkers::ast::Checker;
@@ -59,7 +59,7 @@ pub fn blind_except(
if body.iter().any(|stmt| {
if let StmtKind::Expr { value } = &stmt.node {
if let ExprKind::Call { func, keywords, .. } = &value.node {
if helpers::is_logger_candidate(&checker.ctx, func) {
if logging::is_logger_candidate(&checker.ctx, func) {
if let ExprKind::Attribute { attr, .. } = &func.node {
if attr == "exception" {
return true;

View File

@@ -3,7 +3,7 @@ use rustpython_parser::ast::{Arguments, Constant, Expr, ExprKind};
use ruff_diagnostics::Violation;
use ruff_diagnostics::{Diagnostic, DiagnosticKind};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers::collect_call_path;
use ruff_python_ast::call_path::collect_call_path;
use ruff_python_ast::types::Range;
use crate::checkers::ast::Checker;
@@ -107,8 +107,7 @@ pub fn check_positional_boolean_in_def(
}
if decorator_list.iter().any(|expr| {
let call_path = collect_call_path(expr);
call_path.as_slice() == [name, "setter"]
collect_call_path(expr).map_or(false, |call_path| call_path.as_slice() == [name, "setter"])
}) {
return;
}
@@ -151,8 +150,7 @@ pub fn check_boolean_default_value_in_function_definition(
}
if decorator_list.iter().any(|expr| {
let call_path = collect_call_path(expr);
call_path.as_slice() == [name, "setter"]
collect_call_path(expr).map_or(false, |call_path| call_path.as_slice() == [name, "setter"])
}) {
return;
}

View File

@@ -7,9 +7,10 @@ use rustpython_parser::ast::{
use ruff_diagnostics::{AlwaysAutofixableViolation, Violation};
use ruff_diagnostics::{Diagnostic, Edit};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers;
use ruff_python_ast::call_path;
use ruff_python_ast::call_path::CallPath;
use ruff_python_ast::helpers::unparse_expr;
use ruff_python_ast::types::{CallPath, Range};
use ruff_python_ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::{AsRule, Rule};
@@ -69,8 +70,7 @@ fn duplicate_handler_exceptions<'a>(
let mut duplicates: FxHashSet<CallPath> = FxHashSet::default();
let mut unique_elts: Vec<&Expr> = Vec::default();
for type_ in elts {
let call_path = helpers::collect_call_path(type_);
if !call_path.is_empty() {
if let Some(call_path) = call_path::collect_call_path(type_) {
if seen.contains_key(&call_path) {
duplicates.insert(call_path);
} else {
@@ -124,8 +124,7 @@ pub fn duplicate_exceptions(checker: &mut Checker, handlers: &[Excepthandler]) {
};
match &type_.node {
ExprKind::Attribute { .. } | ExprKind::Name { .. } => {
let call_path = helpers::collect_call_path(type_);
if !call_path.is_empty() {
if let Some(call_path) = call_path::collect_call_path(type_) {
if seen.contains(&call_path) {
duplicates.entry(call_path).or_default().push(type_);
} else {

View File

@@ -3,8 +3,9 @@ use rustpython_parser::ast::{Arguments, Constant, Expr, ExprKind};
use ruff_diagnostics::Violation;
use ruff_diagnostics::{Diagnostic, DiagnosticKind};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers::{compose_call_path, to_call_path};
use ruff_python_ast::types::{CallPath, Range};
use ruff_python_ast::call_path::from_qualified_name;
use ruff_python_ast::call_path::{compose_call_path, CallPath};
use ruff_python_ast::types::Range;
use ruff_python_ast::visitor;
use ruff_python_ast::visitor::Visitor;
@@ -122,7 +123,7 @@ pub fn function_call_argument_default(checker: &mut Checker, arguments: &Argumen
.flake8_bugbear
.extend_immutable_calls
.iter()
.map(|target| to_call_path(target))
.map(|target| from_qualified_name(target))
.collect();
let diagnostics = {
let mut visitor = ArgumentDefaultVisitor {

View File

@@ -5,7 +5,6 @@ use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers::unparse_expr;
use ruff_python_ast::types::Range;
use ruff_python_stdlib::identifiers::{is_identifier, is_mangled_private};
use ruff_python_stdlib::keyword::KWLIST;
use crate::checkers::ast::Checker;
use crate::registry::AsRule;
@@ -58,7 +57,7 @@ pub fn getattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, ar
if !is_identifier(value) {
return;
}
if KWLIST.contains(&value.as_str()) || is_mangled_private(value.as_str()) {
if is_mangled_private(value.as_str()) {
return;
}

View File

@@ -6,7 +6,6 @@ use ruff_python_ast::helpers::unparse_stmt;
use ruff_python_ast::source_code::Stylist;
use ruff_python_ast::types::Range;
use ruff_python_stdlib::identifiers::{is_identifier, is_mangled_private};
use ruff_python_stdlib::keyword::KWLIST;
use crate::checkers::ast::Checker;
use crate::registry::AsRule;
@@ -69,7 +68,7 @@ pub fn setattr_with_constant(checker: &mut Checker, expr: &Expr, func: &Expr, ar
if !is_identifier(name) {
return;
}
if KWLIST.contains(&name.as_str()) || is_mangled_private(name.as_str()) {
if is_mangled_private(name.as_str()) {
return;
}
// We can only replace a `setattr` call (which is an `Expr`) with an assignment

View File

@@ -2,14 +2,11 @@ use rustpython_parser::ast::{Expr, Stmt};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers::format_call_path;
use ruff_python_ast::call_path::{format_call_path, from_unqualified_name, CallPath};
use ruff_python_ast::types::Range;
use crate::checkers::ast::Checker;
use super::types::DebuggerUsingType;
// flake8-debugger
use crate::rules::flake8_debugger::types::DebuggerUsingType;
#[violation]
pub struct Debugger {
@@ -70,9 +67,12 @@ pub fn debugger_import(stmt: &Stmt, module: Option<&str>, name: &str) -> Option<
}
if let Some(module) = module {
let mut call_path = module.split('.').collect::<Vec<_>>();
let mut call_path: CallPath = from_unqualified_name(module);
call_path.push(name);
if DEBUGGERS.iter().any(|target| call_path == **target) {
if DEBUGGERS
.iter()
.any(|target| call_path.as_slice() == *target)
{
return Some(Diagnostic::new(
Debugger {
using_type: DebuggerUsingType::Import(format_call_path(&call_path)),
@@ -81,10 +81,10 @@ pub fn debugger_import(stmt: &Stmt, module: Option<&str>, name: &str) -> Option<
));
}
} else {
let parts = name.split('.').collect::<Vec<_>>();
let parts: CallPath = from_unqualified_name(name);
if DEBUGGERS
.iter()
.any(|call_path| call_path[..call_path.len() - 1] == parts)
.any(|call_path| &call_path[..call_path.len() - 1] == parts.as_slice())
{
return Some(Diagnostic::new(
Debugger {

View File

@@ -1,6 +1,7 @@
use ruff_python_ast::context::Context;
use rustpython_parser::ast::Expr;
use ruff_python_ast::context::Context;
/// Return `true` if a Python class appears to be a Django model, based on its base classes.
pub fn is_model(context: &Context, base: &Expr) -> bool {
context.resolve_call_path(base).map_or(false, |call_path| {

View File

@@ -2,7 +2,8 @@ use rustpython_parser::ast::{Expr, ExprKind};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::types::{CallPath, Range};
use ruff_python_ast::call_path::CallPath;
use ruff_python_ast::types::Range;
/// ## What it does
/// Checks that Django's `@receiver` decorator is listed first, prior to

View File

@@ -1,9 +1,10 @@
use std::fmt;
use rustpython_parser::ast::{Expr, ExprKind, Stmt, StmtKind};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::types::Range;
use rustpython_parser::ast::{Expr, ExprKind, Stmt, StmtKind};
use crate::checkers::ast::Checker;

View File

@@ -1,8 +1,8 @@
use rustpython_parser::ast::{Constant, Expr, ExprKind, Keyword, Location, Operator};
use ruff_diagnostics::{Diagnostic, Edit};
use ruff_python_ast::helpers::{find_keyword, is_logger_candidate, SimpleCallArgs};
use ruff_python_ast::logging::LoggingLevel;
use ruff_python_ast::helpers::{find_keyword, SimpleCallArgs};
use ruff_python_ast::logging;
use ruff_python_ast::types::Range;
use crate::checkers::ast::Checker;
@@ -128,7 +128,7 @@ fn check_log_record_attr_clash(checker: &mut Checker, extra: &Keyword) {
#[derive(Copy, Clone)]
enum LoggingCallType {
/// Logging call with a level method, e.g., `logging.info`.
LevelCall(LoggingLevel),
LevelCall(logging::LoggingLevel),
/// Logging call with an integer level as an argument, e.g., `logger.log(level, ...)`.
LogCall,
}
@@ -138,14 +138,14 @@ impl LoggingCallType {
if attr == "log" {
Some(LoggingCallType::LogCall)
} else {
LoggingLevel::from_attribute(attr).map(LoggingCallType::LevelCall)
logging::LoggingLevel::from_attribute(attr).map(LoggingCallType::LevelCall)
}
}
}
/// Check logging calls for violations.
pub fn logging_call(checker: &mut Checker, func: &Expr, args: &[Expr], keywords: &[Keyword]) {
if !is_logger_candidate(&checker.ctx, func) {
if !logging::is_logger_candidate(&checker.ctx, func) {
return;
}
@@ -173,7 +173,7 @@ pub fn logging_call(checker: &mut Checker, func: &Expr, args: &[Expr], keywords:
if checker.settings.rules.enabled(Rule::LoggingWarn)
&& matches!(
logging_call_type,
LoggingCallType::LevelCall(LoggingLevel::Warn)
LoggingCallType::LevelCall(logging::LoggingLevel::Warn)
)
{
let mut diagnostic = Diagnostic::new(LoggingWarn, level_call_range);
@@ -228,14 +228,14 @@ pub fn logging_call(checker: &mut Checker, func: &Expr, args: &[Expr], keywords:
if let LoggingCallType::LevelCall(logging_level) = logging_call_type {
match logging_level {
LoggingLevel::Error => {
logging::LoggingLevel::Error => {
if checker.settings.rules.enabled(Rule::LoggingExcInfo) {
checker
.diagnostics
.push(Diagnostic::new(LoggingExcInfo, level_call_range));
}
}
LoggingLevel::Exception => {
logging::LoggingLevel::Exception => {
if checker
.settings
.rules

View File

@@ -1,7 +1,7 @@
use itertools::Either::{Left, Right};
use std::collections::BTreeMap;
use std::iter;
use itertools::Either::{Left, Right};
use log::error;
use rustc_hash::FxHashSet;
use rustpython_parser::ast::{
@@ -12,10 +12,9 @@ use ruff_diagnostics::{AlwaysAutofixableViolation, Violation};
use ruff_diagnostics::{Diagnostic, Edit};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::comparable::ComparableExpr;
use ruff_python_ast::helpers::{create_expr, match_trailing_comment, unparse_expr};
use ruff_python_ast::helpers::{any_over_expr, create_expr, match_trailing_comment, unparse_expr};
use ruff_python_ast::types::{Range, RefEquality};
use ruff_python_stdlib::identifiers::is_identifier;
use ruff_python_stdlib::keyword::KWLIST;
use crate::autofix::helpers::delete_stmt;
use crate::checkers::ast::Checker;
@@ -332,6 +331,11 @@ pub fn unnecessary_spread(checker: &mut Checker, keys: &[Option<Expr>], values:
}
}
/// Return `true` if the `Expr` contains an `await` expression.
fn is_async_generator(expr: &Expr) -> bool {
any_over_expr(expr, &|expr| matches!(expr.node, ExprKind::Await { .. }))
}
/// PIE802
pub fn unnecessary_comprehension_any_all(
checker: &mut Checker,
@@ -339,26 +343,26 @@ pub fn unnecessary_comprehension_any_all(
func: &Expr,
args: &[Expr],
) {
if let ExprKind::Name { id, .. } = &func.node {
if (id == "all" || id == "any") && args.len() == 1 {
if !checker.ctx.is_builtin(id) {
return;
}
if let ExprKind::ListComp { .. } = args[0].node {
let mut diagnostic =
Diagnostic::new(UnnecessaryComprehensionAnyAll, Range::from(&args[0]));
if checker.patch(diagnostic.kind.rule()) {
diagnostic.try_set_fix(|| {
fixes::fix_unnecessary_comprehension_any_all(
checker.locator,
checker.stylist,
expr,
)
});
}
checker.diagnostics.push(diagnostic);
}
let ExprKind::Name { id, .. } = &func.node else {
return;
};
if (matches!(id.as_str(), "all" | "any")) && args.len() == 1 {
let (ExprKind::ListComp { elt, .. } | ExprKind::SetComp { elt, .. }) = &args[0].node else {
return;
};
if is_async_generator(elt) {
return;
}
if !checker.ctx.is_builtin(id) {
return;
}
let mut diagnostic = Diagnostic::new(UnnecessaryComprehensionAnyAll, Range::from(&args[0]));
if checker.patch(diagnostic.kind.rule()) {
diagnostic.try_set_fix(|| {
fixes::fix_unnecessary_comprehension_any_all(checker.locator, checker.stylist, expr)
});
}
checker.diagnostics.push(diagnostic);
}
}
@@ -369,7 +373,7 @@ fn is_valid_kwarg_name(key: &Expr) -> bool {
..
} = &key.node
{
is_identifier(value) && !KWLIST.contains(&value.as_str())
is_identifier(value)
} else {
false
}
@@ -512,7 +516,7 @@ pub fn multiple_starts_ends_with(checker: &mut Checker, expr: &Expr) {
/// PIE807
pub fn reimplemented_list_builtin(checker: &mut Checker, expr: &Expr) {
let ExprKind::Lambda { args, body } = &expr.node else {
unreachable!("Expected ExprKind::Lambda");
panic!("Expected ExprKind::Lambda");
};
if args.args.is_empty()
&& args.kwonlyargs.is_empty()

View File

@@ -8,19 +8,19 @@ expression: diagnostics
suggestion: Remove unnecessary list comprehension
fixable: true
location:
row: 9
row: 2
column: 4
end_location:
row: 9
row: 2
column: 23
fix:
edits:
- content: any(x.id for x in bar)
location:
row: 9
row: 2
column: 0
end_location:
row: 9
row: 2
column: 24
parent: ~
- kind:
@@ -29,19 +29,19 @@ expression: diagnostics
suggestion: Remove unnecessary list comprehension
fixable: true
location:
row: 10
row: 3
column: 4
end_location:
row: 10
row: 3
column: 23
fix:
edits:
- content: all(x.id for x in bar)
location:
row: 10
row: 3
column: 0
end_location:
row: 10
row: 3
column: 24
parent: ~
- kind:
@@ -50,19 +50,19 @@ expression: diagnostics
suggestion: Remove unnecessary list comprehension
fixable: true
location:
row: 12
row: 5
column: 4
end_location:
row: 12
row: 5
column: 23
fix:
edits:
- content: "any( # first comment\n x.id for x in bar # second comment\n)"
location:
row: 11
row: 4
column: 0
end_location:
row: 13
row: 6
column: 1
parent: ~
- kind:
@@ -71,19 +71,33 @@ expression: diagnostics
suggestion: Remove unnecessary list comprehension
fixable: true
location:
row: 15
row: 8
column: 4
end_location:
row: 15
row: 8
column: 23
fix:
edits:
- content: "all( # first comment\n x.id for x in bar # second comment\n)"
location:
row: 14
row: 7
column: 0
end_location:
row: 16
row: 9
column: 1
parent: ~
- kind:
name: UnnecessaryComprehensionAnyAll
body: Unnecessary list comprehension.
suggestion: Remove unnecessary list comprehension
fixable: true
location:
row: 10
column: 4
end_location:
row: 10
column: 23
fix:
edits: []
parent: ~

View File

@@ -4,7 +4,8 @@ use rustpython_parser::ast::{Arguments, Expr, ExprKind, Keyword, Location, Stmt,
use ruff_diagnostics::{AlwaysAutofixableViolation, Violation};
use ruff_diagnostics::{Diagnostic, Edit};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers::{collect_arg_names, collect_call_path};
use ruff_python_ast::call_path::collect_call_path;
use ruff_python_ast::helpers::collect_arg_names;
use ruff_python_ast::source_code::Locator;
use ruff_python_ast::types::Range;
use ruff_python_ast::visitor;
@@ -15,8 +16,8 @@ use crate::checkers::ast::Checker;
use crate::registry::{AsRule, Rule};
use super::helpers::{
get_mark_decorators, get_mark_name, is_abstractmethod_decorator, is_pytest_fixture,
is_pytest_yield_fixture, keyword_is_literal,
get_mark_decorators, is_abstractmethod_decorator, is_pytest_fixture, is_pytest_yield_fixture,
keyword_is_literal,
};
#[violation]
@@ -211,7 +212,9 @@ where
}
}
ExprKind::Call { func, .. } => {
if collect_call_path(func).as_slice() == ["request", "addfinalizer"] {
if collect_call_path(func).map_or(false, |call_path| {
call_path.as_slice() == ["request", "addfinalizer"]
}) {
self.addfinalizer_call = Some(expr);
};
visitor::walk_expr(self, expr);
@@ -465,20 +468,19 @@ fn check_fixture_addfinalizer(checker: &mut Checker, args: &Arguments, body: &[S
/// PT024, PT025
fn check_fixture_marks(checker: &mut Checker, decorators: &[Expr]) {
for mark in get_mark_decorators(decorators) {
let name = get_mark_name(mark);
for (expr, call_path) in get_mark_decorators(decorators) {
let name = call_path.last().expect("Expected a mark name");
if checker
.settings
.rules
.enabled(Rule::PytestUnnecessaryAsyncioMarkOnFixture)
{
if name == "asyncio" {
if *name == "asyncio" {
let mut diagnostic =
Diagnostic::new(PytestUnnecessaryAsyncioMarkOnFixture, Range::from(mark));
Diagnostic::new(PytestUnnecessaryAsyncioMarkOnFixture, Range::from(expr));
if checker.patch(diagnostic.kind.rule()) {
let start = Location::new(mark.location.row(), 0);
let end = Location::new(mark.end_location.unwrap().row() + 1, 0);
let start = Location::new(expr.location.row(), 0);
let end = Location::new(expr.end_location.unwrap().row() + 1, 0);
diagnostic.set_fix(Edit::deletion(start, end));
}
checker.diagnostics.push(diagnostic);
@@ -490,12 +492,12 @@ fn check_fixture_marks(checker: &mut Checker, decorators: &[Expr]) {
.rules
.enabled(Rule::PytestErroneousUseFixturesOnFixture)
{
if name == "usefixtures" {
if *name == "usefixtures" {
let mut diagnostic =
Diagnostic::new(PytestErroneousUseFixturesOnFixture, Range::from(mark));
Diagnostic::new(PytestErroneousUseFixturesOnFixture, Range::from(expr));
if checker.patch(diagnostic.kind.rule()) {
let start = Location::new(mark.location.row(), 0);
let end = Location::new(mark.end_location.unwrap().row() + 1, 0);
let start = Location::new(expr.location.row(), 0);
let end = Location::new(expr.end_location.unwrap().row() + 1, 0);
diagnostic.set_fix(Edit::deletion(start, end));
}
checker.diagnostics.push(diagnostic);

View File

@@ -1,20 +1,24 @@
use num_traits::identities::Zero;
use ruff_python_ast::call_path::{collect_call_path, CallPath};
use rustpython_parser::ast::{Constant, Expr, ExprKind, Keyword};
use ruff_python_ast::helpers::{collect_call_path, map_callable};
use ruff_python_ast::helpers::map_callable;
use crate::checkers::ast::Checker;
const ITERABLE_INITIALIZERS: &[&str] = &["dict", "frozenset", "list", "tuple", "set"];
pub fn get_mark_decorators(decorators: &[Expr]) -> impl Iterator<Item = &Expr> {
decorators
.iter()
.filter(|decorator| is_pytest_mark(decorator))
}
pub fn get_mark_name(decorator: &Expr) -> &str {
collect_call_path(map_callable(decorator)).last().unwrap()
pub fn get_mark_decorators(decorators: &[Expr]) -> impl Iterator<Item = (&Expr, CallPath)> {
decorators.iter().filter_map(|decorator| {
let Some(call_path) = collect_call_path(map_callable(decorator)) else {
return None;
};
if call_path.len() > 2 && call_path.as_slice()[..2] == ["pytest", "mark"] {
Some((decorator, call_path))
} else {
None
}
})
}
pub fn is_pytest_fail(call: &Expr, checker: &Checker) -> bool {
@@ -39,15 +43,6 @@ pub fn is_pytest_fixture(decorator: &Expr, checker: &Checker) -> bool {
})
}
pub fn is_pytest_mark(decorator: &Expr) -> bool {
let segments = collect_call_path(map_callable(decorator));
if segments.len() > 2 {
segments[0] == "pytest" && segments[1] == "mark"
} else {
false
}
}
pub fn is_pytest_yield_fixture(decorator: &Expr, checker: &Checker) -> bool {
checker
.ctx

View File

@@ -37,11 +37,11 @@ pub fn import(import_from: &Stmt, name: &str, asname: Option<&str>) -> Option<Di
pub fn import_from(
import_from: &Stmt,
module: Option<&str>,
level: Option<&usize>,
level: Option<usize>,
) -> Option<Diagnostic> {
// If level is not zero or module is none, return
if let Some(level) = level {
if *level != 0 {
if level != 0 {
return None;
}
};

View File

@@ -2,12 +2,13 @@ use rustpython_parser::ast::{Expr, ExprKind, Location};
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::call_path::CallPath;
use ruff_python_ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::{AsRule, Rule};
use super::helpers::{get_mark_decorators, get_mark_name};
use super::helpers::get_mark_decorators;
#[violation]
pub struct PytestIncorrectMarkParenthesesStyle {
@@ -52,13 +53,14 @@ impl AlwaysAutofixableViolation for PytestUseFixturesWithoutParameters {
fn pytest_mark_parentheses(
checker: &mut Checker,
decorator: &Expr,
call_path: &CallPath,
fix: Edit,
preferred: &str,
actual: &str,
) {
let mut diagnostic = Diagnostic::new(
PytestIncorrectMarkParenthesesStyle {
mark_name: get_mark_name(decorator).to_string(),
mark_name: (*call_path.last().unwrap()).to_string(),
expected_parens: preferred.to_string(),
actual_parens: actual.to_string(),
},
@@ -70,7 +72,7 @@ fn pytest_mark_parentheses(
checker.diagnostics.push(diagnostic);
}
fn check_mark_parentheses(checker: &mut Checker, decorator: &Expr) {
fn check_mark_parentheses(checker: &mut Checker, decorator: &Expr, call_path: &CallPath) {
match &decorator.node {
ExprKind::Call {
func,
@@ -84,20 +86,20 @@ fn check_mark_parentheses(checker: &mut Checker, decorator: &Expr) {
{
let fix =
Edit::deletion(func.end_location.unwrap(), decorator.end_location.unwrap());
pytest_mark_parentheses(checker, decorator, fix, "", "()");
pytest_mark_parentheses(checker, decorator, call_path, fix, "", "()");
}
}
_ => {
if checker.settings.flake8_pytest_style.mark_parentheses {
let fix = Edit::insertion("()".to_string(), decorator.end_location.unwrap());
pytest_mark_parentheses(checker, decorator, fix, "()", "");
pytest_mark_parentheses(checker, decorator, call_path, fix, "()", "");
}
}
}
}
fn check_useless_usefixtures(checker: &mut Checker, decorator: &Expr) {
if get_mark_name(decorator) != "usefixtures" {
fn check_useless_usefixtures(checker: &mut Checker, decorator: &Expr, call_path: &CallPath) {
if *call_path.last().unwrap() != "usefixtures" {
return;
}
@@ -130,12 +132,12 @@ pub fn marks(checker: &mut Checker, decorators: &[Expr]) {
.rules
.enabled(Rule::PytestUseFixturesWithoutParameters);
for mark in get_mark_decorators(decorators) {
for (expr, call_path) in get_mark_decorators(decorators) {
if enforce_parentheses {
check_mark_parentheses(checker, mark);
check_mark_parentheses(checker, expr, &call_path);
}
if enforce_useless_usefixtures {
check_useless_usefixtures(checker, mark);
check_useless_usefixtures(checker, expr, &call_path);
}
}
}

View File

@@ -3,7 +3,8 @@ use rustpython_parser::ast::{Expr, ExprKind, Keyword};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers::{collect_arg_names, compose_call_path, SimpleCallArgs};
use ruff_python_ast::call_path::compose_call_path;
use ruff_python_ast::helpers::{collect_arg_names, SimpleCallArgs};
use ruff_python_ast::types::Range;
use ruff_python_ast::visitor;
use ruff_python_ast::visitor::Visitor;

View File

@@ -2,7 +2,8 @@ use rustpython_parser::ast::{Expr, ExprKind, Keyword, Stmt, StmtKind, Withitem};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers::{format_call_path, to_call_path};
use ruff_python_ast::call_path::format_call_path;
use ruff_python_ast::call_path::from_qualified_name;
use ruff_python_ast::types::Range;
use crate::checkers::ast::Checker;
@@ -156,7 +157,7 @@ fn exception_needs_match(checker: &mut Checker, exception: &Expr) {
.flake8_pytest_style
.raises_extend_require_match_for,
)
.any(|target| call_path == to_call_path(target));
.any(|target| call_path == from_qualified_name(target));
if is_broad_exception {
Some(format_call_path(&call_path))
} else {

View File

@@ -2,7 +2,7 @@ use rustpython_parser::ast::{Expr, ExprKind};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers::collect_call_path;
use ruff_python_ast::call_path::collect_call_path;
use ruff_python_ast::scope::ScopeKind;
use ruff_python_ast::types::Range;
@@ -73,46 +73,48 @@ pub fn private_member_access(checker: &mut Checker, expr: &Expr) {
if let ExprKind::Call { func, .. } = &value.node {
// Ignore `super()` calls.
let call_path = collect_call_path(func);
if call_path.as_slice() == ["super"] {
return;
if let Some(call_path) = collect_call_path(func) {
if call_path.as_slice() == ["super"] {
return;
}
}
} else {
// Ignore `self` and `cls` accesses.
let call_path = collect_call_path(value);
if call_path.as_slice() == ["self"]
|| call_path.as_slice() == ["cls"]
|| call_path.as_slice() == ["mcs"]
{
return;
}
if let Some(call_path) = collect_call_path(value) {
if call_path.as_slice() == ["self"]
|| call_path.as_slice() == ["cls"]
|| call_path.as_slice() == ["mcs"]
{
return;
}
// Ignore accesses on class members from _within_ the class.
if checker
.ctx
.scopes
.iter()
.rev()
.find_map(|scope| match &scope.kind {
ScopeKind::Class(class_def) => Some(class_def),
_ => None,
})
.map_or(false, |class_def| {
if call_path.as_slice() == [class_def.name] {
checker
.ctx
.find_binding(class_def.name)
.map_or(false, |binding| {
// TODO(charlie): Could the name ever be bound to a _different_
// class here?
binding.kind.is_class_definition()
})
} else {
false
}
})
{
return;
// Ignore accesses on class members from _within_ the class.
if checker
.ctx
.scopes
.iter()
.rev()
.find_map(|scope| match &scope.kind {
ScopeKind::Class(class_def) => Some(class_def),
_ => None,
})
.map_or(false, |class_def| {
if call_path.as_slice() == [class_def.name] {
checker
.ctx
.find_binding(class_def.name)
.map_or(false, |binding| {
// TODO(charlie): Could the name ever be bound to a
// _different_ class here?
binding.kind.is_class_definition()
})
} else {
false
}
})
{
return;
}
}
}

View File

@@ -3,13 +3,15 @@ use std::iter;
use itertools::Either::{Left, Right};
use itertools::Itertools;
use ruff_python_ast::context::Context;
use rustc_hash::FxHashMap;
use rustpython_parser::ast::{
Boolop, Cmpop, Constant, Expr, ExprContext, ExprKind, Location, Unaryop,
};
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit};
use ruff_diagnostics::{AlwaysAutofixableViolation, AutofixKind, Diagnostic, Edit, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::comparable::ComparableExpr;
use ruff_python_ast::context::Context;
use ruff_python_ast::helpers::{contains_effect, create_expr, has_comments, unparse_expr};
use ruff_python_ast::types::Range;
@@ -44,19 +46,32 @@ use crate::registry::AsRule;
/// - [Python: "isinstance"](https://docs.python.org/3/library/functions.html#isinstance)
#[violation]
pub struct DuplicateIsinstanceCall {
pub name: String,
pub name: Option<String>,
pub fixable: bool,
}
impl AlwaysAutofixableViolation for DuplicateIsinstanceCall {
impl Violation for DuplicateIsinstanceCall {
const AUTOFIX: AutofixKind = AutofixKind::Sometimes;
#[derive_message_formats]
fn message(&self) -> String {
let DuplicateIsinstanceCall { name } = self;
format!("Multiple `isinstance` calls for `{name}`, merge into a single call")
let DuplicateIsinstanceCall { name, .. } = self;
if let Some(name) = name {
format!("Multiple `isinstance` calls for `{name}`, merge into a single call")
} else {
format!("Multiple `isinstance` calls for expression, merge into a single call")
}
}
fn autofix_title(&self) -> String {
let DuplicateIsinstanceCall { name } = self;
format!("Merge `isinstance` calls for `{name}`")
fn autofix_title_formatter(&self) -> Option<fn(&Self) -> String> {
self.fixable
.then_some(|DuplicateIsinstanceCall { name, .. }| {
if let Some(name) = name {
format!("Merge `isinstance` calls for `{name}`")
} else {
format!("Merge `isinstance` calls")
}
})
}
}
@@ -156,9 +171,9 @@ pub fn duplicate_isinstance_call(checker: &mut Checker, expr: &Expr) {
return;
};
// Locate duplicate `isinstance` calls, represented as a map from argument name
// Locate duplicate `isinstance` calls, represented as a map from `ComparableExpr`
// to indices of the relevant `Expr` instances in `values`.
let mut duplicates = BTreeMap::new();
let mut duplicates: FxHashMap<ComparableExpr, Vec<usize>> = FxHashMap::default();
for (index, call) in values.iter().enumerate() {
// Verify that this is an `isinstance` call.
let ExprKind::Call { func, args, keywords } = &call.node else {
@@ -180,27 +195,39 @@ pub fn duplicate_isinstance_call(checker: &mut Checker, expr: &Expr) {
continue;
}
// Collect the name of the argument.
let ExprKind::Name { id: arg_name, .. } = &args[0].node else {
continue;
};
// Collect the target (e.g., `obj` in `isinstance(obj, int)`).
let target = &args[0];
duplicates
.entry(arg_name.as_str())
.entry(target.into())
.or_insert_with(Vec::new)
.push(index);
}
// Generate a `Diagnostic` for each duplicate.
for (arg_name, indices) in duplicates {
for indices in duplicates.values() {
if indices.len() > 1 {
// Grab the target used in each duplicate `isinstance` call (e.g., `obj` in
// `isinstance(obj, int)`).
let target = if let ExprKind::Call { args, .. } = &values[indices[0]].node {
args.get(0).expect("`isinstance` should have two arguments")
} else {
unreachable!("Indices should only contain `isinstance` calls")
};
let fixable = !contains_effect(&checker.ctx, target);
let mut diagnostic = Diagnostic::new(
DuplicateIsinstanceCall {
name: arg_name.to_string(),
name: if let ExprKind::Name { id, .. } = &target.node {
Some(id.to_string())
} else {
None
},
fixable,
},
Range::from(expr),
);
if checker.patch(diagnostic.kind.rule()) {
// Grab the types used in each duplicate `isinstance` call.
if fixable && checker.patch(diagnostic.kind.rule()) {
// Grab the types used in each duplicate `isinstance` call (e.g., `int` and `str`
// in `isinstance(obj, int) or isinstance(obj, str)`).
let types: Vec<&Expr> = indices
.iter()
.map(|index| &values[*index])
@@ -219,10 +246,7 @@ pub fn duplicate_isinstance_call(checker: &mut Checker, expr: &Expr) {
ctx: ExprContext::Load,
})),
args: vec![
create_expr(ExprKind::Name {
id: arg_name.to_string(),
ctx: ExprContext::Load,
}),
target.clone(),
create_expr(ExprKind::Tuple {
// Flatten all the types used across the `isinstance` calls.
elts: types
@@ -479,10 +503,17 @@ pub fn expr_or_not_expr(checker: &mut Checker, expr: &Expr) {
}
}
pub fn is_short_circuit(ctx: &Context, expr: &Expr) -> Option<(Location, Location)> {
pub fn is_short_circuit(
ctx: &Context,
expr: &Expr,
expected_op: &Boolop,
) -> Option<(Location, Location)> {
let ExprKind::BoolOp { op, values, } = &expr.node else {
return None;
};
if op != expected_op {
return None;
}
let short_circuit_value = match op {
Boolop::And => false,
Boolop::Or => true,
@@ -527,7 +558,7 @@ pub fn is_short_circuit(ctx: &Context, expr: &Expr) -> Option<(Location, Locatio
/// SIM222
pub fn expr_or_true(checker: &mut Checker, expr: &Expr) {
let Some((location, end_location)) = is_short_circuit(&checker.ctx, expr) else {
let Some((location, end_location)) = is_short_circuit(&checker.ctx, expr, &Boolop::Or) else {
return;
};
let mut diagnostic = Diagnostic::new(
@@ -549,7 +580,7 @@ pub fn expr_or_true(checker: &mut Checker, expr: &Expr) {
/// SIM223
pub fn expr_and_false(checker: &mut Checker, expr: &Expr) {
let Some((location, end_location)) = is_short_circuit(&checker.ctx, expr) else {
let Some((location, end_location)) = is_short_circuit(&checker.ctx, expr, &Boolop::And) else {
return;
};
let mut diagnostic = Diagnostic::new(

View File

@@ -253,10 +253,22 @@ pub fn nested_if_statements(
return;
}
// Allow `if __name__ == "__main__":` statements.
if is_main_check(test) {
return;
}
// Allow `if True:` and `if False:` statements.
if matches!(
test.node,
ExprKind::Constant {
value: Constant::Bool(..),
..
}
) {
return;
}
// Find the deepest nested if-statement, to inform the range.
let Some((test, first_stmt)) = find_last_nested_if(body) else {
return;

View File

@@ -2,8 +2,8 @@ use rustpython_parser::ast::{Excepthandler, ExcepthandlerKind, Located, Stmt, St
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::call_path::compose_call_path;
use ruff_python_ast::helpers;
use ruff_python_ast::helpers::compose_call_path;
use ruff_python_ast::types::Range;
use crate::checkers::ast::Checker;

View File

@@ -119,7 +119,7 @@ fn reverse_comparison(expr: &Expr, locator: &Locator, stylist: &Stylist) -> Resu
whitespace_before,
whitespace_after,
},
_ => unreachable!("Expected comparison operator"),
_ => panic!("Expected comparison operator"),
};
let mut state = CodegenState {

View File

@@ -128,4 +128,39 @@ expression: diagnostics
row: 16
column: 46
parent: ~
- kind:
name: DuplicateIsinstanceCall
body: "Multiple `isinstance` calls for expression, merge into a single call"
suggestion: "Merge `isinstance` calls"
fixable: true
location:
row: 19
column: 3
end_location:
row: 19
column: 49
fix:
edits:
- content: "isinstance(a.b, (int, float))"
location:
row: 19
column: 3
end_location:
row: 19
column: 49
parent: ~
- kind:
name: DuplicateIsinstanceCall
body: "Multiple `isinstance` calls for expression, merge into a single call"
suggestion: ~
fixable: false
location:
row: 22
column: 3
end_location:
row: 22
column: 49
fix:
edits: []
parent: ~

View File

@@ -110,10 +110,10 @@ expression: diagnostics
column: 4
end_location:
row: 52
column: 16
column: 17
fix:
edits:
- content: " if True and True:\n \"\"\"this\nis valid\"\"\"\n\n \"\"\"the indentation on\n this line is significant\"\"\"\n\n \"this is\" \\\n\"allowed too\"\n\n (\"so is\"\n\"this for some reason\")\n"
- content: " if y > 0 and z > 0:\n \"\"\"this\nis valid\"\"\"\n\n \"\"\"the indentation on\n this line is significant\"\"\"\n\n \"this is\" \\\n\"allowed too\"\n\n (\"so is\"\n\"this for some reason\")\n"
location:
row: 51
column: 0
@@ -131,10 +131,10 @@ expression: diagnostics
column: 0
end_location:
row: 68
column: 12
column: 13
fix:
edits:
- content: "if True and True:\n \"\"\"this\nis valid\"\"\"\n\n \"\"\"the indentation on\n this line is significant\"\"\"\n\n \"this is\" \\\n\"allowed too\"\n\n (\"so is\"\n\"this for some reason\")\n"
- content: "if x > 0 and y > 0:\n \"\"\"this\nis valid\"\"\"\n\n \"\"\"the indentation on\n this line is significant\"\"\"\n\n \"this is\" \\\n\"allowed too\"\n\n (\"so is\"\n\"this for some reason\")\n"
location:
row: 67
column: 0

View File

@@ -5,7 +5,8 @@ use serde::{Deserialize, Serialize};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation, CacheKey};
use ruff_python_ast::types::{CallPath, Range};
use ruff_python_ast::call_path::from_qualified_name;
use ruff_python_ast::types::Range;
use crate::checkers::ast::Checker;
@@ -102,7 +103,7 @@ pub fn banned_attribute_access(checker: &mut Checker, expr: &Expr) {
.flake8_tidy_imports
.banned_api
.iter()
.find(|(banned_path, ..)| call_path == banned_path.split('.').collect::<CallPath>())
.find(|(banned_path, ..)| call_path == from_qualified_name(banned_path))
}) {
checker.diagnostics.push(Diagnostic::new(
BannedApi {

View File

@@ -8,7 +8,6 @@ use ruff_python_ast::helpers::{create_stmt, from_relative_import, unparse_stmt};
use ruff_python_ast::source_code::Stylist;
use ruff_python_ast::types::Range;
use ruff_python_stdlib::identifiers::is_identifier;
use ruff_python_stdlib::keyword::KWLIST;
use crate::checkers::ast::Checker;
use crate::registry::AsRule;
@@ -89,19 +88,19 @@ impl Violation for RelativeImports {
fn fix_banned_relative_import(
stmt: &Stmt,
level: Option<&usize>,
level: Option<usize>,
module: Option<&str>,
module_path: Option<&Vec<String>>,
stylist: &Stylist,
) -> Option<Edit> {
// Only fix is the module path is known.
if let Some(mut parts) = module_path.cloned() {
if *level? >= parts.len() {
if level? >= parts.len() {
return None;
}
// Remove relative level from module path.
for _ in 0..*level? {
for _ in 0..level? {
parts.pop();
}
@@ -109,10 +108,7 @@ fn fix_banned_relative_import(
let call_path = from_relative_import(&parts, module);
// Require import to be a valid module:
// https://python.org/dev/peps/pep-0008/#package-and-module-names
if !call_path
.iter()
.all(|part| is_identifier(part) && !KWLIST.contains(part))
{
if !call_path.iter().all(|part| is_identifier(part)) {
return None;
}
call_path.as_slice().join(".")
@@ -121,27 +117,21 @@ fn fix_banned_relative_import(
let call_path = from_relative_import(&parts, &module);
// Require import to be a valid module:
// https://python.org/dev/peps/pep-0008/#package-and-module-names
if !call_path
.iter()
.all(|part| is_identifier(part) && !KWLIST.contains(part))
{
if !call_path.iter().all(|part| is_identifier(part)) {
return None;
}
call_path.as_slice().join(".")
} else {
// Require import to be a valid module:
// https://python.org/dev/peps/pep-0008/#package-and-module-names
if !parts
.iter()
.all(|part| is_identifier(part) && !KWLIST.contains(&part.as_str()))
{
if !parts.iter().all(|part| is_identifier(part)) {
return None;
}
parts.join(".")
};
let StmtKind::ImportFrom { names, .. } = &stmt.node else {
unreachable!("Expected StmtKind::ImportFrom");
panic!("Expected StmtKind::ImportFrom");
};
let content = unparse_stmt(
&create_stmt(StmtKind::ImportFrom {
@@ -166,7 +156,7 @@ fn fix_banned_relative_import(
pub fn banned_relative_import(
checker: &Checker,
stmt: &Stmt,
level: Option<&usize>,
level: Option<usize>,
module: Option<&str>,
module_path: Option<&Vec<String>>,
strictness: &Strictness,
@@ -175,7 +165,7 @@ pub fn banned_relative_import(
Strictness::All => 0,
Strictness::Parents => 1,
};
if level? > &strictness_level {
if level? > strictness_level {
let mut diagnostic = Diagnostic::new(
RelativeImports {
strictness: *strictness,

View File

@@ -1,9 +1,11 @@
use num_traits::Zero;
use rustpython_parser::ast::{Constant, Expr, ExprKind};
use ruff_python_ast::binding::{Binding, BindingKind, ExecutionContext};
use ruff_python_ast::call_path::from_qualified_name;
use ruff_python_ast::context::Context;
use ruff_python_ast::helpers::{map_callable, to_call_path};
use ruff_python_ast::scope::{Binding, BindingKind, ExecutionContext, ScopeKind};
use ruff_python_ast::helpers::map_callable;
use ruff_python_ast::scope::ScopeKind;
/// Return `true` if [`Expr`] is a guard for a type-checking block.
pub fn is_type_checking_block(context: &Context, test: &Expr) -> bool {
@@ -76,7 +78,7 @@ fn runtime_evaluated_base_class(context: &Context, base_classes: &[String]) -> b
if let Some(call_path) = context.resolve_call_path(base) {
if base_classes
.iter()
.any(|base_class| to_call_path(base_class) == call_path)
.any(|base_class| from_qualified_name(base_class) == call_path)
{
return true;
}
@@ -92,7 +94,7 @@ fn runtime_evaluated_decorators(context: &Context, decorators: &[String]) -> boo
if let Some(call_path) = context.resolve_call_path(map_callable(decorator)) {
if decorators
.iter()
.any(|decorator| to_call_path(decorator) == call_path)
.any(|decorator| from_qualified_name(decorator) == call_path)
{
return true;
}

View File

@@ -1,6 +1,6 @@
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::scope::{
use ruff_python_ast::binding::{
Binding, BindingKind, ExecutionContext, FromImportation, Importation, SubmoduleImportation,
};

View File

@@ -2,7 +2,7 @@ use std::path::Path;
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::scope::{
use ruff_python_ast::binding::{
Binding, BindingKind, ExecutionContext, FromImportation, Importation, SubmoduleImportation,
};
@@ -183,7 +183,7 @@ pub fn typing_only_runtime_import(
// Categorize the import.
match categorize(
full_name,
Some(&level),
Some(level),
&settings.src,
package,
&settings.isort.known_modules,

View File

@@ -5,9 +5,10 @@ use rustpython_parser::ast::{Arg, Arguments};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::binding::Bindings;
use ruff_python_ast::function_type;
use ruff_python_ast::function_type::FunctionType;
use ruff_python_ast::scope::{Bindings, FunctionDef, Lambda, Scope, ScopeKind};
use ruff_python_ast::scope::{FunctionDef, Lambda, Scope, ScopeKind};
use ruff_python_ast::visibility;
use crate::checkers::ast::Checker;
@@ -317,6 +318,6 @@ pub fn unused_arguments(
vec![]
}
}
_ => unreachable!("Expected ScopeKind::Function | ScopeKind::Lambda"),
_ => panic!("Expected ScopeKind::Function | ScopeKind::Lambda"),
}
}

View File

@@ -106,7 +106,7 @@ pub fn annotate_imports<'a>(
annotated.push(AnnotatedImport::ImportFrom {
module: module.as_deref(),
names: aliases,
level: level.as_ref(),
level: *level,
trailing_comma: if split_on_trailing_comma {
trailing_comma(import, locator)
} else {
@@ -116,7 +116,7 @@ pub fn annotate_imports<'a>(
inline,
});
}
_ => unreachable!("Expected StmtKind::Import | StmtKind::ImportFrom"),
_ => panic!("Expected StmtKind::Import | StmtKind::ImportFrom"),
}
}
annotated

View File

@@ -54,7 +54,7 @@ enum Reason<'a> {
#[allow(clippy::too_many_arguments)]
pub fn categorize(
module_name: &str,
level: Option<&usize>,
level: Option<usize>,
src: &[PathBuf],
package: Option<&Path>,
known_modules: &KnownModules,
@@ -62,7 +62,7 @@ pub fn categorize(
) -> ImportType {
let module_base = module_name.split('.').next().unwrap();
let (import_type, reason) = {
if level.map_or(false, |level| *level > 0) {
if level.map_or(false, |level| level > 0) {
(ImportType::LocalFolder, Reason::NonZeroLevel)
} else if module_base == "__future__" {
(ImportType::Future, Reason::Future)

View File

@@ -1,11 +1,10 @@
use rustpython_parser::ast::{Location, Stmt};
use rustpython_parser::ast::Stmt;
use rustpython_parser::{lexer, Mode, Tok};
use ruff_python_ast::helpers::is_docstring_stmt;
use ruff_python_ast::newlines::StrExt;
use ruff_python_ast::source_code::Locator;
use super::types::TrailingComma;
use crate::rules::isort::types::TrailingComma;
/// Return `true` if a `StmtKind::ImportFrom` statement ends with a magic
/// trailing comma.
@@ -83,123 +82,3 @@ pub fn has_comment_break(stmt: &Stmt, locator: &Locator) -> bool {
}
false
}
/// Find the end of the last docstring.
fn match_docstring_end(body: &[Stmt]) -> Option<Location> {
let mut iter = body.iter();
let Some(mut stmt) = iter.next() else {
return None;
};
if !is_docstring_stmt(stmt) {
return None;
}
for next in iter {
if !is_docstring_stmt(next) {
break;
}
stmt = next;
}
Some(stmt.end_location.unwrap())
}
/// Find the end of the first token that isn't a docstring, comment, or
/// whitespace.
pub fn find_splice_location(body: &[Stmt], locator: &Locator) -> Location {
// Find the first AST node that isn't a docstring.
let mut splice = match_docstring_end(body).unwrap_or_default();
// Find the first token that isn't a comment or whitespace.
let contents = locator.skip(splice);
for (.., tok, end) in lexer::lex_located(contents, Mode::Module, splice).flatten() {
if matches!(tok, Tok::Comment(..) | Tok::Newline) {
splice = end;
} else {
break;
}
}
splice
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use rustpython_parser as parser;
use rustpython_parser::ast::Location;
use ruff_python_ast::source_code::Locator;
use super::find_splice_location;
fn splice_contents(contents: &str) -> Result<Location> {
let program = parser::parse_program(contents, "<filename>")?;
let locator = Locator::new(contents);
Ok(find_splice_location(&program, &locator))
}
#[test]
fn splice() -> Result<()> {
let contents = "";
assert_eq!(splice_contents(contents)?, Location::new(1, 0));
let contents = r#"
"""Hello, world!"""
"#
.trim();
assert_eq!(splice_contents(contents)?, Location::new(1, 19));
let contents = r#"
"""Hello, world!"""
"""Hello, world!"""
"#
.trim();
assert_eq!(splice_contents(contents)?, Location::new(2, 19));
let contents = r#"
x = 1
"#
.trim();
assert_eq!(splice_contents(contents)?, Location::new(1, 0));
let contents = r#"
#!/usr/bin/env python3
"#
.trim();
assert_eq!(splice_contents(contents)?, Location::new(1, 22));
let contents = r#"
#!/usr/bin/env python3
"""Hello, world!"""
"#
.trim();
assert_eq!(splice_contents(contents)?, Location::new(2, 19));
let contents = r#"
"""Hello, world!"""
#!/usr/bin/env python3
"#
.trim();
assert_eq!(splice_contents(contents)?, Location::new(2, 22));
let contents = r#"
"""%s""" % "Hello, world!"
"#
.trim();
assert_eq!(splice_contents(contents)?, Location::new(1, 0));
let contents = r#"
"""Hello, world!"""; x = 1
"#
.trim();
assert_eq!(splice_contents(contents)?, Location::new(1, 19));
let contents = r#"
"""Hello, world!"""; x = 1; y = \
2
"#
.trim();
assert_eq!(splice_contents(contents)?, Location::new(1, 19));
Ok(())
}
}

View File

@@ -55,7 +55,7 @@ pub enum AnnotatedImport<'a> {
ImportFrom {
module: Option<&'a str>,
names: Vec<AnnotatedAliasData<'a>>,
level: Option<&'a usize>,
level: Option<usize>,
atop: Vec<Comment<'a>>,
inline: Vec<Comment<'a>>,
trailing_comma: TrailingComma,

View File

@@ -1,21 +1,19 @@
use std::fmt;
use log::error;
use rustpython_parser as parser;
use rustpython_parser::ast::{Location, StmtKind, Suite};
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit};
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers::is_docstring_stmt;
use ruff_python_ast::imports::{Alias, AnyImport, Import, ImportFrom};
use ruff_python_ast::source_code::{Locator, Stylist};
use ruff_python_ast::types::Range;
use crate::importer::Importer;
use crate::registry::Rule;
use crate::rules::isort::track::Block;
use crate::settings::{flags, Settings};
use super::super::helpers;
use super::super::track::Block;
/// ## What it does
/// Adds any required imports, as specified by the user, to the top of the
/// file.
@@ -55,59 +53,6 @@ impl AlwaysAutofixableViolation for MissingRequiredImport {
}
}
struct Alias<'a> {
name: &'a str,
as_name: Option<&'a str>,
}
struct ImportFrom<'a> {
module: Option<&'a str>,
name: Alias<'a>,
level: Option<&'a usize>,
}
struct Import<'a> {
name: Alias<'a>,
}
enum AnyImport<'a> {
Import(Import<'a>),
ImportFrom(ImportFrom<'a>),
}
impl fmt::Display for ImportFrom<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "from ")?;
if let Some(level) = self.level {
write!(f, "{}", ".".repeat(*level))?;
}
if let Some(module) = self.module {
write!(f, "{module}")?;
}
write!(f, " import {}", self.name.name)?;
Ok(())
}
}
impl fmt::Display for Import<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "import {}", self.name.name)?;
if let Some(as_name) = self.name.as_name {
write!(f, " as {as_name}")?;
}
Ok(())
}
}
impl fmt::Display for AnyImport<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
AnyImport::Import(import) => write!(f, "{import}"),
AnyImport::ImportFrom(import_from) => write!(f, "{import_from}"),
}
}
}
fn contains(block: &Block, required_import: &AnyImport) -> bool {
block.imports.iter().any(|import| match required_import {
AnyImport::Import(required_import) => {
@@ -130,7 +75,7 @@ fn contains(block: &Block, required_import: &AnyImport) -> bool {
return false;
};
module.as_deref() == required_import.module
&& level.as_ref() == required_import.level
&& *level == required_import.level
&& names.iter().any(|alias| {
alias.node.name == required_import.name.name
&& alias.node.asname.as_deref() == required_import.name.as_name
@@ -163,38 +108,12 @@ fn add_required_import(
}
// Always insert the diagnostic at top-of-file.
let required_import = required_import.to_string();
let mut diagnostic = Diagnostic::new(
MissingRequiredImport(required_import.clone()),
MissingRequiredImport(required_import.to_string()),
Range::new(Location::default(), Location::default()),
);
if autofix.into() && settings.rules.should_fix(Rule::MissingRequiredImport) {
// Determine the location at which the import should be inserted.
let splice = helpers::find_splice_location(python_ast, locator);
// Generate the edit.
let mut contents = String::with_capacity(required_import.len() + 1);
// Newline (LF/CRLF)
let line_sep = stylist.line_ending().as_str();
// If we're inserting beyond the start of the file, we add
// a newline _before_, since the splice represents the _end_ of the last
// irrelevant token (e.g., the end of a comment or the end of
// docstring). This ensures that we properly handle awkward cases like
// docstrings that are followed by semicolons.
if splice > Location::default() {
contents.push_str(line_sep);
}
contents.push_str(&required_import);
// If we're inserting at the start of the file, add a trailing newline instead.
if splice == Location::default() {
contents.push_str(line_sep);
}
// Construct the fix.
diagnostic.set_fix(Edit::insertion(contents, splice));
diagnostic.set_fix(Importer::new(python_ast, locator, stylist).add_import(required_import));
}
Some(diagnostic)
}
@@ -224,8 +143,8 @@ pub fn add_required_imports(
);
return vec![];
}
match &body[0].node {
let stmt = &body[0];
match &stmt.node {
StmtKind::ImportFrom {
module,
names,
@@ -240,7 +159,7 @@ pub fn add_required_imports(
name: name.node.name.as_str(),
as_name: name.node.asname.as_deref(),
},
level: level.as_ref(),
level: *level,
}),
blocks,
python_ast,

View File

@@ -15,13 +15,13 @@ expression: diagnostics
column: 0
fix:
edits:
- content: "\nfrom __future__ import annotations"
- content: "from __future__ import annotations\n"
location:
row: 1
column: 19
row: 2
column: 0
end_location:
row: 1
column: 19
row: 2
column: 0
parent: ~
- kind:
name: MissingRequiredImport
@@ -36,12 +36,12 @@ expression: diagnostics
column: 0
fix:
edits:
- content: "\nfrom __future__ import generator_stop"
- content: "from __future__ import generator_stop\n"
location:
row: 1
column: 19
row: 2
column: 0
end_location:
row: 1
column: 19
row: 2
column: 0
parent: ~

View File

@@ -15,12 +15,12 @@ expression: diagnostics
column: 0
fix:
edits:
- content: "\nfrom __future__ import annotations"
- content: "from __future__ import annotations\n"
location:
row: 1
column: 19
row: 2
column: 0
end_location:
row: 1
column: 19
row: 2
column: 0
parent: ~

View File

@@ -15,12 +15,12 @@ expression: diagnostics
column: 0
fix:
edits:
- content: "\nfrom __future__ import annotations"
- content: "from __future__ import annotations\n"
location:
row: 3
column: 3
row: 4
column: 0
end_location:
row: 3
column: 3
row: 4
column: 0
parent: ~

View File

@@ -15,13 +15,13 @@ expression: diagnostics
column: 0
fix:
edits:
- content: "\nfrom __future__ import annotations"
- content: "from __future__ import annotations\n"
location:
row: 1
column: 19
row: 2
column: 0
end_location:
row: 1
column: 19
row: 2
column: 0
parent: ~
- kind:
name: MissingRequiredImport
@@ -36,12 +36,12 @@ expression: diagnostics
column: 0
fix:
edits:
- content: "\nfrom __future__ import generator_stop"
- content: "from __future__ import generator_stop\n"
location:
row: 1
column: 19
row: 2
column: 0
end_location:
row: 1
column: 19
row: 2
column: 0
parent: ~

View File

@@ -15,12 +15,12 @@ expression: diagnostics
column: 0
fix:
edits:
- content: "\nimport os"
- content: "import os\n"
location:
row: 1
column: 19
row: 2
column: 0
end_location:
row: 1
column: 19
row: 2
column: 0
parent: ~

View File

@@ -95,8 +95,8 @@ pub fn cmp_members(
/// Compare two relative import levels.
pub fn cmp_levels(
level1: Option<&usize>,
level2: Option<&usize>,
level1: Option<usize>,
level2: Option<usize>,
relative_imports_order: RelativeImportsOrder,
) -> Ordering {
match (level1, level2) {
@@ -104,8 +104,8 @@ pub fn cmp_levels(
(None, Some(_)) => Ordering::Less,
(Some(_), None) => Ordering::Greater,
(Some(level1), Some(level2)) => match relative_imports_order {
RelativeImportsOrder::ClosestToFurthest => level1.cmp(level2),
RelativeImportsOrder::FurthestToClosest => level2.cmp(level1),
RelativeImportsOrder::ClosestToFurthest => level1.cmp(&level2),
RelativeImportsOrder::FurthestToClosest => level2.cmp(&level1),
},
}
}

View File

@@ -14,7 +14,7 @@ pub enum TrailingComma {
#[derive(Debug, Hash, Ord, PartialOrd, Eq, PartialEq, Clone)]
pub struct ImportFromData<'a> {
pub module: Option<&'a str>,
pub level: Option<&'a usize>,
pub level: Option<usize>,
}
#[derive(Debug, Hash, Ord, PartialOrd, Eq, PartialEq)]

View File

@@ -1,40 +1,37 @@
use rustpython_parser::ast::{Expr, ExprKind, Keyword, Location};
use ruff_diagnostics::Edit;
use ruff_python_ast::helpers;
use ruff_diagnostics::{Edit, Fix};
use ruff_python_ast::source_code::Locator;
use ruff_python_ast::types::Range;
use crate::autofix::apply_fix;
use crate::autofix::helpers::remove_argument;
fn match_name(expr: &Expr) -> Option<&str> {
if let ExprKind::Call { func, .. } = &expr.node {
if let ExprKind::Attribute { value, .. } = &func.node {
if let ExprKind::Name { id, .. } = &value.node {
Some(id)
} else {
None
return Some(id);
}
} else {
None
}
} else {
None
}
None
}
/// Remove the `inplace` argument from a function call and replace it with an
/// assignment.
pub fn fix_inplace_argument(
pub(super) fn convert_inplace_argument_to_assignment(
locator: &Locator,
expr: &Expr,
violation_at: Location,
violation_end: Location,
args: &[Expr],
keywords: &[Keyword],
) -> Option<Edit> {
if let Ok(fix) = remove_argument(
) -> Option<Fix> {
// Add the assignment.
let name = match_name(expr)?;
let insert_assignment = Edit::insertion(format!("{name} = "), expr.location);
// Remove the `inplace` argument.
let remove_argument = remove_argument(
locator,
expr.location,
violation_at,
@@ -42,31 +39,8 @@ pub fn fix_inplace_argument(
args,
keywords,
false,
) {
// Reset the line index.
let fix_me = Edit::deletion(
helpers::to_relative(fix.location, expr.location),
helpers::to_relative(fix.end_location, expr.location),
);
)
.ok()?;
// Apply the deletion step.
// TODO(charlie): Find a way to
let contents = locator.slice(Range::new(expr.location, expr.end_location.unwrap()));
let output = apply_fix(&fix_me, &Locator::new(contents));
// Obtain the name prefix.
let name = match_name(expr)?;
// Apply the assignment.
let new_contents = format!("{name} = {output}");
// Create the new fix.
Some(Edit::replacement(
new_contents,
expr.location,
expr.end_location.unwrap(),
))
} else {
None
}
Some(Fix::from_iter([insert_assignment, remove_argument]))
}

View File

@@ -3,7 +3,7 @@ use rustpython_parser::ast::{Expr, ExprKind};
use ruff_diagnostics::Violation;
use ruff_diagnostics::{Diagnostic, DiagnosticKind};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::scope::BindingKind;
use ruff_python_ast::binding::BindingKind;
use ruff_python_ast::types::Range;
use crate::checkers::ast::Checker;
@@ -82,7 +82,6 @@ pub fn check_attr(checker: &mut Checker, attr: &str, value: &Expr, attr_expr: &E
| BindingKind::FunctionDefinition
| BindingKind::Export(..)
| BindingKind::FutureImportation
| BindingKind::StarImportation(..)
| BindingKind::Importation(..)
| BindingKind::FromImportation(..)
| BindingKind::SubmoduleImportation(..)

View File

@@ -3,7 +3,7 @@ use rustpython_parser::ast::{Expr, ExprKind};
use ruff_diagnostics::Violation;
use ruff_diagnostics::{Diagnostic, DiagnosticKind};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::scope::{BindingKind, Importation};
use ruff_python_ast::binding::{BindingKind, Importation};
use ruff_python_ast::types::Range;
use crate::checkers::ast::Checker;
@@ -99,7 +99,6 @@ pub fn check_call(checker: &mut Checker, func: &Expr) {
| BindingKind::FunctionDefinition
| BindingKind::Export(..)
| BindingKind::FutureImportation
| BindingKind::StarImportation(..)
| BindingKind::Importation(..)
| BindingKind::FromImportation(..)
| BindingKind::SubmoduleImportation(..)

View File

@@ -1,12 +1,12 @@
use rustpython_parser::ast::{Constant, Expr, ExprKind, Keyword};
use rustpython_parser::ast::{Constant, Expr, ExprKind, Keyword, StmtKind};
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic};
use ruff_diagnostics::{AutofixKind, Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::types::Range;
use crate::checkers::ast::Checker;
use crate::registry::AsRule;
use crate::rules::pandas_vet::fixes::fix_inplace_argument;
use crate::rules::pandas_vet::fixes::convert_inplace_argument_to_assignment;
/// ## What it does
/// Checks for `inplace=True` usages in `pandas` function and method
@@ -33,16 +33,21 @@ use crate::rules::pandas_vet::fixes::fix_inplace_argument;
/// ## References
/// - [_Why You Should Probably Never Use pandas inplace=True_](https://towardsdatascience.com/why-you-should-probably-never-use-pandas-inplace-true-9f9f211849e4)
#[violation]
pub struct PandasUseOfInplaceArgument;
pub struct PandasUseOfInplaceArgument {
pub fixable: bool,
}
impl Violation for PandasUseOfInplaceArgument {
const AUTOFIX: AutofixKind = AutofixKind::Sometimes;
impl AlwaysAutofixableViolation for PandasUseOfInplaceArgument {
#[derive_message_formats]
fn message(&self) -> String {
format!("`inplace=True` should be avoided; it has inconsistent behavior")
}
fn autofix_title(&self) -> String {
format!("Assign to variable; remove `inplace` arg")
fn autofix_title_formatter(&self) -> Option<fn(&Self) -> String> {
self.fixable
.then_some(|_| format!("Assign to variable; remove `inplace` arg"))
}
}
@@ -53,9 +58,12 @@ pub fn inplace_argument(
args: &[Expr],
keywords: &[Keyword],
) -> Option<Diagnostic> {
for keyword in keywords {
let arg = keyword.node.arg.as_ref()?;
let mut seen_star = false;
for keyword in keywords.iter().rev() {
let Some(arg) = &keyword.node.arg else {
seen_star = true;
continue;
};
if arg == "inplace" {
let is_true_literal = match &keyword.node.value.node {
ExprKind::Constant {
@@ -65,10 +73,18 @@ pub fn inplace_argument(
_ => false,
};
if is_true_literal {
// Avoid applying the fix if:
// 1. The keyword argument is followed by a star argument (we can't be certain that
// the star argument _doesn't_ contain an override).
// 2. The call is part of a larger expression (we're converting an expression to a
// statement, and expressions can't contain statements).
let fixable = !seen_star
&& matches!(checker.ctx.current_stmt().node, StmtKind::Expr { .. })
&& checker.ctx.current_expr_parent().is_none();
let mut diagnostic =
Diagnostic::new(PandasUseOfInplaceArgument, Range::from(keyword));
if checker.patch(diagnostic.kind.rule()) {
if let Some(fix) = fix_inplace_argument(
Diagnostic::new(PandasUseOfInplaceArgument { fixable }, Range::from(keyword));
if fixable && checker.patch(diagnostic.kind.rule()) {
if let Some(fix) = convert_inplace_argument_to_assignment(
checker.locator,
expr,
diagnostic.location,
@@ -81,6 +97,9 @@ pub fn inplace_argument(
}
return Some(diagnostic);
}
// Duplicate keywords is a syntax error, so we can stop here.
break;
}
}
None

View File

@@ -15,13 +15,20 @@ expression: diagnostics
column: 34
fix:
edits:
- content: "x = x.drop([\"a\"], axis=1)"
- content: "x = "
location:
row: 5
column: 0
end_location:
row: 5
column: 35
column: 0
- content: ""
location:
row: 5
column: 20
end_location:
row: 5
column: 34
parent: ~
- kind:
name: PandasUseOfInplaceArgument
@@ -36,13 +43,20 @@ expression: diagnostics
column: 34
fix:
edits:
- content: "x = x.drop([\"a\"], axis=1)"
- content: "x = "
location:
row: 7
column: 0
end_location:
row: 7
column: 35
column: 0
- content: ""
location:
row: 7
column: 20
end_location:
row: 7
column: 34
parent: ~
- kind:
name: PandasUseOfInplaceArgument
@@ -57,13 +71,20 @@ expression: diagnostics
column: 16
fix:
edits:
- content: "x = x.drop(\n columns=[\"a\"],\n axis=1,\n)"
- content: "x = "
location:
row: 9
column: 0
end_location:
row: 13
column: 1
row: 9
column: 0
- content: ""
location:
row: 10
column: 4
end_location:
row: 11
column: 4
parent: ~
- kind:
name: PandasUseOfInplaceArgument
@@ -78,12 +99,75 @@ expression: diagnostics
column: 20
fix:
edits:
- content: "x = x.drop(\n columns=[\"a\"],\n axis=1,\n )"
- content: "x = "
location:
row: 16
column: 4
end_location:
row: 20
column: 5
row: 16
column: 4
- content: ""
location:
row: 17
column: 8
end_location:
row: 18
column: 8
parent: ~
- kind:
name: PandasUseOfInplaceArgument
body: "`inplace=True` should be avoided; it has inconsistent behavior"
suggestion: "Assign to variable; remove `inplace` arg"
fixable: true
location:
row: 22
column: 32
end_location:
row: 22
column: 44
fix:
edits:
- content: "x = "
location:
row: 22
column: 0
end_location:
row: 22
column: 0
- content: ""
location:
row: 22
column: 30
end_location:
row: 22
column: 44
parent: ~
- kind:
name: PandasUseOfInplaceArgument
body: "`inplace=True` should be avoided; it has inconsistent behavior"
suggestion: ~
fixable: false
location:
row: 23
column: 22
end_location:
row: 23
column: 34
fix:
edits: []
parent: ~
- kind:
name: PandasUseOfInplaceArgument
body: "`inplace=True` should be avoided; it has inconsistent behavior"
suggestion: ~
fixable: false
location:
row: 24
column: 24
end_location:
row: 24
column: 36
fix:
edits: []
parent: ~

View File

@@ -41,9 +41,10 @@ mod tests {
#[test_case(Rule::InvalidModuleName, Path::new("N999/module/no_module/test.txt"); "N999_8")]
#[test_case(Rule::InvalidModuleName, Path::new("N999/module/valid_name/file-with-dashes.py"); "N999_9")]
#[test_case(Rule::InvalidModuleName, Path::new("N999/module/valid_name/__main__.py"); "N999_10")]
#[test_case(Rule::InvalidModuleName, Path::new("N999/module/valid_name/0001_initial.py"); "N999_11")]
#[test_case(Rule::InvalidModuleName, Path::new("N999/module/invalid_name/0001_initial.py"); "N999_11")]
#[test_case(Rule::InvalidModuleName, Path::new("N999/module/valid_name/__setup__.py"); "N999_12")]
#[test_case(Rule::InvalidModuleName, Path::new("N999/module/valid_name/file-with-dashes"); "N999_13")]
#[test_case(Rule::InvalidModuleName, Path::new("N999/module/invalid_name/import.py"); "N999_14")]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
let diagnostics = test_path(

View File

@@ -1,13 +1,14 @@
use std::ffi::OsStr;
use std::path::Path;
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::types::Range;
use ruff_python_stdlib::identifiers::is_module_name;
use ruff_python_stdlib::identifiers::{is_migration_name, is_module_name};
/// ## What it does
/// Checks for module names that do not follow the `snake_case` naming
/// convention.
/// convention or are otherwise invalid.
///
/// ## Why is this bad?
/// [PEP 8] recommends the use of the `snake_case` naming convention for
@@ -21,6 +22,10 @@ use ruff_python_stdlib::identifiers::is_module_name;
/// > provides a higher level (e.g. more object oriented) interface, the C/C++ module has
/// > a leading underscore (e.g. `_socket`).
///
/// Further, in order for Python modules to be importable, they must be valid
/// identifiers. As such, they cannot start with a digit, or collide with hard
/// keywords, like `import` or `class`.
///
/// ## Example
/// - Instead of `example-module-name` or `example module name`, use `example_module_name`.
/// - Instead of `ExampleModule`, use `example_module`.
@@ -49,18 +54,21 @@ pub fn invalid_module_name(path: &Path, package: Option<&Path>) -> Option<Diagno
}
if let Some(package) = package {
let module_name = if path.file_name().map_or(false, |file_name| {
file_name == "__init__.py"
|| file_name == "__init__.pyi"
|| file_name == "__main__.py"
|| file_name == "__main__.pyi"
}) {
let module_name = if is_module_file(path) {
package.file_name().unwrap().to_string_lossy()
} else {
path.file_stem().unwrap().to_string_lossy()
};
if !is_module_name(&module_name) {
// As a special case, we allow files in `versions` and `migrations` directories to start
// with a digit (e.g., `0001_initial.py`), to support common conventions used by Django
// and other frameworks.
let is_valid_module_name = if is_migration_file(path) {
is_migration_name(&module_name)
} else {
is_module_name(&module_name)
};
if !is_valid_module_name {
return Some(Diagnostic::new(
InvalidModuleName {
name: module_name.to_string(),
@@ -72,3 +80,21 @@ pub fn invalid_module_name(path: &Path, package: Option<&Path>) -> Option<Diagno
None
}
/// Return `true` if a [`Path`] should use the name of its parent directory as its module name.
fn is_module_file(path: &Path) -> bool {
path.file_name().map_or(false, |file_name| {
file_name == "__init__.py"
|| file_name == "__init__.pyi"
|| file_name == "__main__.py"
|| file_name == "__main__.pyi"
})
}
/// Return `true` if a [`Path`] refers to a migration file.
fn is_migration_file(path: &Path) -> bool {
path.parent()
.and_then(Path::file_name)
.and_then(OsStr::to_str)
.map_or(false, |parent| matches!(parent, "versions" | "migrations"))
}

View File

@@ -0,0 +1,19 @@
---
source: crates/ruff/src/rules/pep8_naming/mod.rs
expression: diagnostics
---
- kind:
name: InvalidModuleName
body: "Invalid module name: '0001_initial'"
suggestion: ~
fixable: false
location:
row: 1
column: 0
end_location:
row: 1
column: 0
fix:
edits: []
parent: ~

View File

@@ -0,0 +1,19 @@
---
source: crates/ruff/src/rules/pep8_naming/mod.rs
expression: diagnostics
---
- kind:
name: InvalidModuleName
body: "Invalid module name: 'import'"
suggestion: ~
fixable: false
location:
row: 1
column: 0
end_location:
row: 1
column: 0
fix:
edits: []
parent: ~

View File

@@ -1,6 +0,0 @@
---
source: crates/ruff/src/rules/pep8_naming/mod.rs
expression: diagnostics
---
[]

View File

@@ -22,7 +22,7 @@ impl From<&Cmpop> for EqCmpop {
match cmpop {
Cmpop::Eq => EqCmpop::Eq,
Cmpop::NotEq => EqCmpop::NotEq,
_ => unreachable!("Expected Cmpop::Eq | Cmpop::NotEq"),
_ => panic!("Expected Cmpop::Eq | Cmpop::NotEq"),
}
}
}

View File

@@ -1,7 +1,8 @@
use ruff_python_ast::call_path::from_qualified_name;
use std::collections::BTreeSet;
use ruff_python_ast::cast;
use ruff_python_ast::helpers::{map_callable, to_call_path};
use ruff_python_ast::helpers::map_callable;
use ruff_python_ast::newlines::StrExt;
use ruff_python_ast::str::is_implicit_concatenation;
@@ -52,7 +53,7 @@ pub(crate) fn should_ignore_definition(
if let Some(call_path) = checker.ctx.resolve_call_path(map_callable(decorator)) {
if ignore_decorators
.iter()
.any(|decorator| to_call_path(decorator) == call_path)
.any(|decorator| from_qualified_name(decorator) == call_path)
{
return true;
}

View File

@@ -1,8 +1,9 @@
use unicode_width::UnicodeWidthStr;
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::str::leading_quote;
use ruff_python_ast::types::Range;
use unicode_width::UnicodeWidthStr;
use crate::checkers::ast::Checker;
use crate::docstrings::definition::{DefinitionKind, Docstring};

View File

@@ -5,10 +5,10 @@ use once_cell::sync::Lazy;
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::call_path::{from_qualified_name, CallPath};
use ruff_python_ast::cast;
use ruff_python_ast::helpers::to_call_path;
use ruff_python_ast::newlines::StrExt;
use ruff_python_ast::types::{CallPath, Range};
use ruff_python_ast::types::Range;
use ruff_python_ast::visibility::{is_property, is_test};
use crate::checkers::ast::Checker;
@@ -33,7 +33,7 @@ pub fn non_imperative_mood(
let property_decorators = property_decorators
.iter()
.map(|decorator| to_call_path(decorator))
.map(|decorator| from_qualified_name(decorator))
.collect::<Vec<CallPath>>();
if is_test(cast::name(parent))

View File

@@ -1,8 +1,9 @@
use rustpython_parser::ast::{Expr, ExprKind};
use rustpython_parser::ast::{Expr, ExprKind, Location};
use rustpython_parser::{lexer, Mode, StringKind, Tok};
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers::find_useless_f_strings;
use ruff_python_ast::source_code::Locator;
use ruff_python_ast::types::Range;
use crate::checkers::ast::Checker;
@@ -47,6 +48,45 @@ impl AlwaysAutofixableViolation for FStringMissingPlaceholders {
}
}
/// Find f-strings that don't contain any formatted values in a `JoinedStr`.
fn find_useless_f_strings<'a>(
expr: &'a Expr,
locator: &'a Locator,
) -> impl Iterator<Item = (Range, Range)> + 'a {
let contents = locator.slice(expr);
lexer::lex_located(contents, Mode::Module, expr.location)
.flatten()
.filter_map(|(location, tok, end_location)| match tok {
Tok::String {
kind: StringKind::FString | StringKind::RawFString,
..
} => {
let first_char = locator.slice(Range {
location,
end_location: Location::new(location.row(), location.column() + 1),
});
// f"..." => f_position = 0
// fr"..." => f_position = 0
// rf"..." => f_position = 1
let f_position = usize::from(!(first_char == "f" || first_char == "F"));
Some((
Range {
location: Location::new(location.row(), location.column() + f_position),
end_location: Location::new(
location.row(),
location.column() + f_position + 1,
),
},
Range {
location,
end_location,
},
))
}
_ => None,
})
}
fn unescape_f_string(content: &str) -> String {
content.replace("{{", "{").replace("}}", "}")
}

View File

@@ -6,7 +6,6 @@ use rustpython_parser::ast::{Cmpop, Expr};
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers;
use ruff_python_ast::operations::locate_cmpops;
use ruff_python_ast::types::Range;
use crate::checkers::ast::Checker;
@@ -23,7 +22,7 @@ impl From<&Cmpop> for IsCmpop {
match cmpop {
Cmpop::Is => IsCmpop::Is,
Cmpop::IsNot => IsCmpop::IsNot,
_ => unreachable!("Expected Cmpop::Is | Cmpop::IsNot"),
_ => panic!("Expected Cmpop::Is | Cmpop::IsNot"),
}
}
}
@@ -60,7 +59,7 @@ pub fn invalid_literal_comparison(
comparators: &[Expr],
location: Range,
) {
let located = Lazy::new(|| locate_cmpops(checker.locator.slice(location)));
let located = Lazy::new(|| helpers::locate_cmpops(checker.locator.slice(location)));
let mut left = left;
for (index, (op, right)) in izip!(ops, comparators).enumerate() {
if matches!(op, Cmpop::Is | Cmpop::IsNot)

View File

@@ -1,5 +1,3 @@
use std::path::Path;
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::scope::Scope;
@@ -19,14 +17,9 @@ impl Violation for UndefinedExport {
}
/// F822
pub fn undefined_export(
names: &[&str],
range: &Range,
path: &Path,
scope: &Scope,
) -> Vec<Diagnostic> {
pub fn undefined_export(names: &[&str], range: &Range, scope: &Scope) -> Vec<Diagnostic> {
let mut diagnostics = Vec::new();
if !scope.import_starred && !path.ends_with("__init__.py") {
if !scope.uses_star_imports() {
for name in names {
if !scope.defines(name) {
diagnostics.push(Diagnostic::new(

View File

@@ -2,7 +2,8 @@ use std::string::ToString;
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::scope::{Bindings, Scope, ScopeKind};
use ruff_python_ast::binding::Bindings;
use ruff_python_ast::scope::{Scope, ScopeKind};
#[violation]
pub struct UndefinedLocal {

View File

@@ -48,7 +48,7 @@ pub fn yield_outside_function(checker: &mut Checker, expr: &Expr) {
ExprKind::Yield { .. } => DeferralKeyword::Yield,
ExprKind::YieldFrom { .. } => DeferralKeyword::YieldFrom,
ExprKind::Await { .. } => DeferralKeyword::Await,
_ => unreachable!("Expected ExprKind::Yield | ExprKind::YieldFrom | ExprKind::Await"),
_ => panic!("Expected ExprKind::Yield | ExprKind::YieldFrom | ExprKind::Await"),
};
checker.diagnostics.push(Diagnostic::new(
YieldOutsideFunction { keyword },

View File

@@ -2,8 +2,8 @@ use rustpython_parser::ast::{Constant, Expr, ExprKind, Keyword};
use ruff_diagnostics::{Diagnostic, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::helpers::{is_logger_candidate, SimpleCallArgs};
use ruff_python_ast::logging::LoggingLevel;
use ruff_python_ast::helpers::SimpleCallArgs;
use ruff_python_ast::logging;
use ruff_python_ast::types::Range;
use crate::checkers::ast::Checker;
@@ -100,12 +100,12 @@ pub fn logging_call(checker: &mut Checker, func: &Expr, args: &[Expr], keywords:
return;
}
if !is_logger_candidate(&checker.ctx, func) {
if !logging::is_logger_candidate(&checker.ctx, func) {
return;
}
if let ExprKind::Attribute { attr, .. } = &func.node {
if LoggingLevel::from_attribute(attr.as_str()).is_some() {
if logging::LoggingLevel::from_attribute(attr.as_str()).is_some() {
let call_args = SimpleCallArgs::new(args, keywords);
if let Some(msg) = call_args.argument("msg", 0) {
if let ExprKind::Constant {

View File

@@ -1,12 +1,10 @@
use rustpython_parser::ast::{Expr, ExprKind};
use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Violation};
use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast::scope::{
BindingKind, FromImportation, Importation, StarImportation, SubmoduleImportation,
};
use ruff_python_ast::types::Range;
use crate::autofix::helpers::get_or_import_symbol;
use crate::checkers::ast::Checker;
use crate::registry::AsRule;
@@ -28,76 +26,6 @@ impl Violation for SysExitAlias {
Some(|SysExitAlias { name }| format!("Replace `{name}` with `sys.exit()`"))
}
}
/// Return `true` if the `module` was imported using a star import (e.g., `from
/// sys import *`).
fn is_module_star_imported(checker: &Checker, module: &str) -> bool {
checker.ctx.scopes().any(|scope| {
scope.binding_ids().any(|index| {
if let BindingKind::StarImportation(StarImportation { module: name, .. }) =
&checker.ctx.bindings[*index].kind
{
name.as_ref().map_or(false, |name| name == module)
} else {
false
}
})
})
}
/// Return the appropriate `sys.exit` reference based on the current set of
/// imports, or `None` is `sys.exit` hasn't been imported.
fn get_member_import_name_alias(checker: &Checker, module: &str, member: &str) -> Option<String> {
checker.ctx.scopes().find_map(|scope| {
scope
.binding_ids()
.find_map(|index| match &checker.ctx.bindings[*index].kind {
// e.g. module=sys object=exit
// `import sys` -> `sys.exit`
// `import sys as sys2` -> `sys2.exit`
BindingKind::Importation(Importation { name, full_name }) => {
if full_name == &module {
Some(format!("{name}.{member}"))
} else {
None
}
}
// e.g. module=os.path object=join
// `from os.path import join` -> `join`
// `from os.path import join as join2` -> `join2`
BindingKind::FromImportation(FromImportation { name, full_name }) => {
let mut parts = full_name.split('.');
if parts.next() == Some(module)
&& parts.next() == Some(member)
&& parts.next().is_none()
{
Some((*name).to_string())
} else {
None
}
}
// e.g. module=os.path object=join
// `from os.path import *` -> `join`
BindingKind::StarImportation(StarImportation { module: name, .. }) => {
if name.as_ref().map_or(false, |name| name == module) {
Some(member.to_string())
} else {
None
}
}
// e.g. module=os.path object=join
// `import os.path ` -> `os.path.join`
BindingKind::SubmoduleImportation(SubmoduleImportation { full_name, .. }) => {
if full_name == &module {
Some(format!("{full_name}.{member}"))
} else {
None
}
}
// Non-imports.
_ => None,
})
})
}
/// PLR1722
pub fn sys_exit_alias(checker: &mut Checker, func: &Expr) {
@@ -108,9 +36,6 @@ pub fn sys_exit_alias(checker: &mut Checker, func: &Expr) {
if id != name {
continue;
}
if name == "exit" && is_module_star_imported(checker, "sys") {
continue;
}
if !checker.ctx.is_builtin(name) {
continue;
}
@@ -121,13 +46,18 @@ pub fn sys_exit_alias(checker: &mut Checker, func: &Expr) {
Range::from(func),
);
if checker.patch(diagnostic.kind.rule()) {
if let Some(content) = get_member_import_name_alias(checker, "sys", "exit") {
diagnostic.set_fix(Edit::replacement(
content,
func.location,
func.end_location.unwrap(),
));
}
diagnostic.try_set_fix(|| {
let (import_edit, binding) = get_or_import_symbol(
"sys",
"exit",
&checker.ctx,
&checker.importer,
checker.locator,
)?;
let reference_edit =
Edit::replacement(binding, func.location, func.end_location.unwrap());
Ok(Fix::from_iter([import_edit, reference_edit]))
});
}
checker.diagnostics.push(diagnostic);
}

View File

@@ -14,7 +14,21 @@ expression: diagnostics
row: 1
column: 4
fix:
edits: []
edits:
- content: "import sys\n"
location:
row: 1
column: 0
end_location:
row: 1
column: 0
- content: sys.exit
location:
row: 1
column: 0
end_location:
row: 1
column: 4
parent: ~
- kind:
name: SysExitAlias
@@ -28,7 +42,21 @@ expression: diagnostics
row: 2
column: 4
fix:
edits: []
edits:
- content: "import sys\n"
location:
row: 1
column: 0
end_location:
row: 1
column: 0
- content: sys.exit
location:
row: 2
column: 0
end_location:
row: 2
column: 4
parent: ~
- kind:
name: SysExitAlias
@@ -42,7 +70,21 @@ expression: diagnostics
row: 6
column: 8
fix:
edits: []
edits:
- content: "import sys\n"
location:
row: 1
column: 0
end_location:
row: 1
column: 0
- content: sys.exit
location:
row: 6
column: 4
end_location:
row: 6
column: 8
parent: ~
- kind:
name: SysExitAlias
@@ -56,6 +98,20 @@ expression: diagnostics
row: 7
column: 8
fix:
edits: []
edits:
- content: "import sys\n"
location:
row: 1
column: 0
end_location:
row: 1
column: 0
- content: sys.exit
location:
row: 7
column: 4
end_location:
row: 7
column: 8
parent: ~

View File

@@ -15,6 +15,13 @@ expression: diagnostics
column: 4
fix:
edits:
- content: import sys
location:
row: 1
column: 0
end_location:
row: 1
column: 10
- content: sys.exit
location:
row: 3
@@ -36,6 +43,13 @@ expression: diagnostics
column: 4
fix:
edits:
- content: import sys
location:
row: 1
column: 0
end_location:
row: 1
column: 10
- content: sys.exit
location:
row: 4
@@ -57,6 +71,13 @@ expression: diagnostics
column: 8
fix:
edits:
- content: import sys
location:
row: 1
column: 0
end_location:
row: 1
column: 10
- content: sys.exit
location:
row: 8
@@ -78,6 +99,13 @@ expression: diagnostics
column: 8
fix:
edits:
- content: import sys
location:
row: 1
column: 0
end_location:
row: 1
column: 10
- content: sys.exit
location:
row: 9
@@ -86,4 +114,32 @@ expression: diagnostics
row: 9
column: 8
parent: ~
- kind:
name: SysExitAlias
body: "Use `sys.exit()` instead of `exit`"
suggestion: "Replace `exit` with `sys.exit()`"
fixable: true
location:
row: 15
column: 4
end_location:
row: 15
column: 8
fix:
edits: []
parent: ~
- kind:
name: SysExitAlias
body: "Use `sys.exit()` instead of `quit`"
suggestion: "Replace `quit` with `sys.exit()`"
fixable: true
location:
row: 16
column: 4
end_location:
row: 16
column: 8
fix:
edits: []
parent: ~

View File

@@ -15,6 +15,13 @@ expression: diagnostics
column: 4
fix:
edits:
- content: import sys as sys2
location:
row: 1
column: 0
end_location:
row: 1
column: 18
- content: sys2.exit
location:
row: 3
@@ -36,6 +43,13 @@ expression: diagnostics
column: 4
fix:
edits:
- content: import sys as sys2
location:
row: 1
column: 0
end_location:
row: 1
column: 18
- content: sys2.exit
location:
row: 4
@@ -57,6 +71,13 @@ expression: diagnostics
column: 8
fix:
edits:
- content: import sys as sys2
location:
row: 1
column: 0
end_location:
row: 1
column: 18
- content: sys2.exit
location:
row: 8
@@ -78,6 +99,13 @@ expression: diagnostics
column: 8
fix:
edits:
- content: import sys as sys2
location:
row: 1
column: 0
end_location:
row: 1
column: 18
- content: sys2.exit
location:
row: 9

View File

@@ -15,6 +15,13 @@ expression: diagnostics
column: 4
fix:
edits:
- content: from sys import exit
location:
row: 1
column: 0
end_location:
row: 1
column: 20
- content: exit
location:
row: 4
@@ -36,6 +43,13 @@ expression: diagnostics
column: 8
fix:
edits:
- content: from sys import exit
location:
row: 1
column: 0
end_location:
row: 1
column: 20
- content: exit
location:
row: 9
@@ -44,4 +58,18 @@ expression: diagnostics
row: 9
column: 8
parent: ~
- kind:
name: SysExitAlias
body: "Use `sys.exit()` instead of `quit`"
suggestion: "Replace `quit` with `sys.exit()`"
fixable: true
location:
row: 16
column: 4
end_location:
row: 16
column: 8
fix:
edits: []
parent: ~

View File

@@ -15,6 +15,13 @@ expression: diagnostics
column: 4
fix:
edits:
- content: from sys import exit as exit2
location:
row: 1
column: 0
end_location:
row: 1
column: 29
- content: exit2
location:
row: 3
@@ -36,6 +43,13 @@ expression: diagnostics
column: 4
fix:
edits:
- content: from sys import exit as exit2
location:
row: 1
column: 0
end_location:
row: 1
column: 29
- content: exit2
location:
row: 4
@@ -57,6 +71,13 @@ expression: diagnostics
column: 8
fix:
edits:
- content: from sys import exit as exit2
location:
row: 1
column: 0
end_location:
row: 1
column: 29
- content: exit2
location:
row: 8
@@ -78,6 +99,13 @@ expression: diagnostics
column: 8
fix:
edits:
- content: from sys import exit as exit2
location:
row: 1
column: 0
end_location:
row: 1
column: 29
- content: exit2
location:
row: 9

View File

@@ -2,6 +2,20 @@
source: crates/ruff/src/rules/pylint/mod.rs
expression: diagnostics
---
- kind:
name: SysExitAlias
body: "Use `sys.exit()` instead of `exit`"
suggestion: "Replace `exit` with `sys.exit()`"
fixable: true
location:
row: 3
column: 0
end_location:
row: 3
column: 4
fix:
edits: []
parent: ~
- kind:
name: SysExitAlias
body: "Use `sys.exit()` instead of `quit`"
@@ -14,14 +28,21 @@ expression: diagnostics
row: 4
column: 4
fix:
edits:
- content: exit
location:
row: 4
column: 0
end_location:
row: 4
column: 4
edits: []
parent: ~
- kind:
name: SysExitAlias
body: "Use `sys.exit()` instead of `exit`"
suggestion: "Replace `exit` with `sys.exit()`"
fixable: true
location:
row: 8
column: 4
end_location:
row: 8
column: 8
fix:
edits: []
parent: ~
- kind:
name: SysExitAlias
@@ -35,13 +56,6 @@ expression: diagnostics
row: 9
column: 8
fix:
edits:
- content: exit
location:
row: 9
column: 4
end_location:
row: 9
column: 8
edits: []
parent: ~

View File

@@ -14,7 +14,21 @@ expression: diagnostics
row: 1
column: 4
fix:
edits: []
edits:
- content: "import sys\n"
location:
row: 1
column: 0
end_location:
row: 1
column: 0
- content: sys.exit
location:
row: 1
column: 0
end_location:
row: 1
column: 4
parent: ~
- kind:
name: SysExitAlias
@@ -28,6 +42,20 @@ expression: diagnostics
row: 2
column: 4
fix:
edits: []
edits:
- content: "import sys\n"
location:
row: 1
column: 0
end_location:
row: 1
column: 0
- content: sys.exit
location:
row: 2
column: 0
end_location:
row: 2
column: 4
parent: ~

View File

@@ -59,7 +59,8 @@ mod tests {
#[test_case(Rule::PrintfStringFormatting, Path::new("UP031_0.py"); "UP031_0")]
#[test_case(Rule::PrintfStringFormatting, Path::new("UP031_1.py"); "UP031_1")]
#[test_case(Rule::FString, Path::new("UP032.py"); "UP032")]
#[test_case(Rule::LRUCacheWithMaxsizeNone, Path::new("UP033.py"); "UP033")]
#[test_case(Rule::LRUCacheWithMaxsizeNone, Path::new("UP033_0.py"); "UP033_0")]
#[test_case(Rule::LRUCacheWithMaxsizeNone, Path::new("UP033_1.py"); "UP033_1")]
#[test_case(Rule::ExtraneousParentheses, Path::new("UP034.py"); "UP034")]
#[test_case(Rule::DeprecatedImport, Path::new("UP035.py"); "UP035")]
#[test_case(Rule::OutdatedVersionBlock, Path::new("UP036_0.py"); "UP036_0")]

View File

@@ -8,7 +8,6 @@ use ruff_python_ast::helpers::{create_expr, create_stmt, unparse_stmt};
use ruff_python_ast::source_code::Stylist;
use ruff_python_ast::types::Range;
use ruff_python_stdlib::identifiers::is_identifier;
use ruff_python_stdlib::keyword::KWLIST;
use crate::checkers::ast::Checker;
use crate::registry::AsRule;
@@ -135,7 +134,7 @@ fn create_properties_from_args(args: &[Expr], defaults: &[Expr]) -> Result<Vec<S
} = &field_name.node else {
bail!("Expected `field_name` to be `Constant::Str`")
};
if !is_identifier(property) || KWLIST.contains(&property.as_str()) {
if !is_identifier(property) {
bail!("Invalid property name: {}", property)
}
Ok(create_property_assignment_stmt(

Some files were not shown because too many files have changed in this diff Show More