Compare commits

..

1 Commits

Author SHA1 Message Date
Andrew Gallant
cc466be6a6 [ty] Fix missing registration of identifiers during semantic index construction
Fixes #572
2025-06-03 10:04:03 -04:00
199 changed files with 8717 additions and 11671 deletions

View File

@@ -1,31 +1,5 @@
# Changelog
## 0.11.13
### Preview features
- \[`airflow`\] Add unsafe fix for module moved cases (`AIR301`,`AIR311`,`AIR312`,`AIR302`) ([#18367](https://github.com/astral-sh/ruff/pull/18367),[#18366](https://github.com/astral-sh/ruff/pull/18366),[#18363](https://github.com/astral-sh/ruff/pull/18363),[#18093](https://github.com/astral-sh/ruff/pull/18093))
- \[`refurb`\] Add coverage of `set` and `frozenset` calls (`FURB171`) ([#18035](https://github.com/astral-sh/ruff/pull/18035))
- \[`refurb`\] Mark `FURB180` fix unsafe when class has bases ([#18149](https://github.com/astral-sh/ruff/pull/18149))
### Bug fixes
- \[`perflint`\] Fix missing parentheses for lambda and ternary conditions (`PERF401`, `PERF403`) ([#18412](https://github.com/astral-sh/ruff/pull/18412))
- \[`pyupgrade`\] Apply `UP035` only on py313+ for `get_type_hints()` ([#18476](https://github.com/astral-sh/ruff/pull/18476))
- \[`pyupgrade`\] Make fix unsafe if it deletes comments (`UP004`,`UP050`) ([#18393](https://github.com/astral-sh/ruff/pull/18393), [#18390](https://github.com/astral-sh/ruff/pull/18390))
### Rule changes
- \[`fastapi`\] Avoid false positive for class dependencies (`FAST003`) ([#18271](https://github.com/astral-sh/ruff/pull/18271))
### Documentation
- Update editor setup docs for Neovim and Vim ([#18324](https://github.com/astral-sh/ruff/pull/18324))
### Other changes
- Support Python 3.14 template strings (t-strings) in formatter and parser ([#17851](https://github.com/astral-sh/ruff/pull/17851))
## 0.11.12
### Preview features

15
Cargo.lock generated
View File

@@ -2501,7 +2501,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.11.13"
version = "0.11.12"
dependencies = [
"anyhow",
"argfile",
@@ -2738,7 +2738,7 @@ dependencies = [
[[package]]
name = "ruff_linter"
version = "0.11.13"
version = "0.11.12"
dependencies = [
"aho-corasick",
"anyhow",
@@ -3074,7 +3074,7 @@ dependencies = [
[[package]]
name = "ruff_wasm"
version = "0.11.13"
version = "0.11.12"
dependencies = [
"console_error_panic_hook",
"console_log",
@@ -3194,7 +3194,7 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "salsa"
version = "0.22.0"
source = "git+https://github.com/carljm/salsa.git?rev=0f6d406f6c309964279baef71588746b8c67b4a3#0f6d406f6c309964279baef71588746b8c67b4a3"
source = "git+https://github.com/salsa-rs/salsa.git?rev=2b5188778e91a5ab50cb7d827148caf7eb2f4630#2b5188778e91a5ab50cb7d827148caf7eb2f4630"
dependencies = [
"boxcar",
"compact_str",
@@ -3218,13 +3218,14 @@ dependencies = [
[[package]]
name = "salsa-macro-rules"
version = "0.22.0"
source = "git+https://github.com/carljm/salsa.git?rev=0f6d406f6c309964279baef71588746b8c67b4a3#0f6d406f6c309964279baef71588746b8c67b4a3"
source = "git+https://github.com/salsa-rs/salsa.git?rev=2b5188778e91a5ab50cb7d827148caf7eb2f4630#2b5188778e91a5ab50cb7d827148caf7eb2f4630"
[[package]]
name = "salsa-macros"
version = "0.22.0"
source = "git+https://github.com/carljm/salsa.git?rev=0f6d406f6c309964279baef71588746b8c67b4a3#0f6d406f6c309964279baef71588746b8c67b4a3"
source = "git+https://github.com/salsa-rs/salsa.git?rev=2b5188778e91a5ab50cb7d827148caf7eb2f4630#2b5188778e91a5ab50cb7d827148caf7eb2f4630"
dependencies = [
"heck",
"proc-macro2",
"quote",
"syn",
@@ -3965,7 +3966,6 @@ dependencies = [
"anyhow",
"bitflags 2.9.1",
"camino",
"colored 3.0.0",
"compact_str",
"countme",
"dir-test",
@@ -3978,7 +3978,6 @@ dependencies = [
"ordermap",
"quickcheck",
"quickcheck_macros",
"ruff_annotate_snippets",
"ruff_db",
"ruff_index",
"ruff_macros",

View File

@@ -129,7 +129,7 @@ regex = { version = "1.10.2" }
rustc-hash = { version = "2.0.0" }
rustc-stable-hash = { version = "0.1.2" }
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
salsa = { git = "https://github.com/carljm/salsa.git", rev = "0f6d406f6c309964279baef71588746b8c67b4a3" }
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "2b5188778e91a5ab50cb7d827148caf7eb2f4630" }
schemars = { version = "0.8.16" }
seahash = { version = "4.1.0" }
serde = { version = "1.0.197", features = ["derive"] }

View File

@@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
# For a specific version.
curl -LsSf https://astral.sh/ruff/0.11.13/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.11.13/install.ps1 | iex"
curl -LsSf https://astral.sh/ruff/0.11.12/install.sh | sh
powershell -c "irm https://astral.sh/ruff/0.11.12/install.ps1 | iex"
```
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
```yaml
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.11.13
rev: v0.11.12
hooks:
# Run the linter.
- id: ruff

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff"
version = "0.11.13"
version = "0.11.12"
publish = true
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -439,10 +439,7 @@ impl LintCacheData {
let messages = messages
.iter()
// Parse the kebab-case rule name into a `Rule`. This will fail for syntax errors, so
// this also serves to filter them out, but we shouldn't be caching files with syntax
// errors anyway.
.filter_map(|msg| Some((msg.name().parse().ok()?, msg)))
.filter_map(|msg| msg.to_rule().map(|rule| (rule, msg)))
.map(|(rule, msg)| {
// Make sure that all message use the same source file.
assert_eq!(

View File

@@ -30,7 +30,7 @@ impl<'a> Explanation<'a> {
let (linter, _) = Linter::parse_code(&code).unwrap();
let fix = rule.fixable().to_string();
Self {
name: rule.name().as_str(),
name: rule.as_ref(),
code,
linter: linter.name(),
summary: rule.message_formats()[0],
@@ -44,7 +44,7 @@ impl<'a> Explanation<'a> {
fn format_rule_text(rule: Rule) -> String {
let mut output = String::new();
let _ = write!(&mut output, "# {} ({})", rule.name(), rule.noqa_code());
let _ = write!(&mut output, "# {} ({})", rule.as_ref(), rule.noqa_code());
output.push('\n');
output.push('\n');

View File

@@ -165,9 +165,9 @@ impl AddAssign for FixMap {
continue;
}
let fixed_in_file = self.0.entry(filename).or_default();
for (rule, name, count) in fixed.iter() {
for (rule, count) in fixed {
if count > 0 {
*fixed_in_file.entry(rule).or_default(name) += count;
*fixed_in_file.entry(rule).or_default() += count;
}
}
}
@@ -305,7 +305,7 @@ pub(crate) fn lint_path(
ParseSource::None,
);
let transformed = source_kind;
let fixed = FixTable::default();
let fixed = FxHashMap::default();
(result, transformed, fixed)
}
} else {
@@ -319,7 +319,7 @@ pub(crate) fn lint_path(
ParseSource::None,
);
let transformed = source_kind;
let fixed = FixTable::default();
let fixed = FxHashMap::default();
(result, transformed, fixed)
};
@@ -473,7 +473,7 @@ pub(crate) fn lint_stdin(
}
let transformed = source_kind;
let fixed = FixTable::default();
let fixed = FxHashMap::default();
(result, transformed, fixed)
}
} else {
@@ -487,7 +487,7 @@ pub(crate) fn lint_stdin(
ParseSource::None,
);
let transformed = source_kind;
let fixed = FixTable::default();
let fixed = FxHashMap::default();
(result, transformed, fixed)
};

View File

@@ -7,7 +7,6 @@ use bitflags::bitflags;
use colored::Colorize;
use itertools::{Itertools, iterate};
use ruff_linter::codes::NoqaCode;
use ruff_linter::linter::FixTable;
use serde::Serialize;
use ruff_linter::fs::relativize_path;
@@ -81,7 +80,7 @@ impl Printer {
let fixed = diagnostics
.fixed
.values()
.flat_map(FixTable::counts)
.flat_map(std::collections::HashMap::values)
.sum::<usize>();
if self.flags.intersects(Flags::SHOW_VIOLATIONS) {
@@ -303,7 +302,7 @@ impl Printer {
let statistics: Vec<ExpandedStatistics> = diagnostics
.messages
.iter()
.map(|message| (message.noqa_code(), message))
.map(|message| (message.to_noqa_code(), message))
.sorted_by_key(|(code, message)| (*code, message.fixable()))
.fold(
vec![],
@@ -473,13 +472,13 @@ fn show_fix_status(fix_mode: flags::FixMode, fixables: Option<&FixableStatistics
fn print_fix_summary(writer: &mut dyn Write, fixed: &FixMap) -> Result<()> {
let total = fixed
.values()
.map(|table| table.counts().sum::<usize>())
.map(|table| table.values().sum::<usize>())
.sum::<usize>();
assert!(total > 0);
let num_digits = num_digits(
fixed
*fixed
.values()
.filter_map(|table| table.counts().max())
.filter_map(|table| table.values().max())
.max()
.unwrap(),
);
@@ -499,11 +498,12 @@ fn print_fix_summary(writer: &mut dyn Write, fixed: &FixMap) -> Result<()> {
relativize_path(filename).bold(),
":".cyan()
)?;
for (code, name, count) in table.iter().sorted_by_key(|(.., count)| Reverse(*count)) {
for (rule, count) in table.iter().sorted_by_key(|(.., count)| Reverse(*count)) {
writeln!(
writer,
" {count:>num_digits$} × {code} ({name})",
code = code.to_string().red().bold(),
" {count:>num_digits$} × {} ({})",
rule.noqa_code().to_string().red().bold(),
rule.as_ref(),
)?;
}
}

View File

@@ -566,7 +566,7 @@ fn venv() -> Result<()> {
----- stderr -----
ruff failed
Cause: Invalid search path settings
Cause: Failed to discover the site-packages directory: Invalid `--python` argument `none`: does not point to a Python executable or a directory on disk
Cause: Failed to discover the site-packages directory: Invalid `--python` argument: `none` could not be canonicalized
");
});

View File

@@ -5436,15 +5436,14 @@ match 2:
print("it's one")
"#
),
@r###"
success: false
exit_code: 1
@r"
success: true
exit_code: 0
----- stdout -----
test.py:2:1: SyntaxError: Cannot use `match` statement on Python 3.9 (syntax was added in Python 3.10)
Found 1 error.
All checks passed!
----- stderr -----
"###
"
);
// syntax error on 3.9 with preview

View File

@@ -1,4 +1,5 @@
use std::fmt::Formatter;
use std::ops::Deref;
use std::sync::Arc;
use ruff_python_ast::ModModule;
@@ -17,7 +18,7 @@ use crate::source::source_text;
/// The query is only cached when the [`source_text()`] hasn't changed. This is because
/// comparing two ASTs is a non-trivial operation and every offset change is directly
/// reflected in the changed AST offsets.
/// The other reason is that Ruff's AST doesn't implement `Eq` which Salsa requires
/// The other reason is that Ruff's AST doesn't implement `Eq` which Sala requires
/// for determining if a query result is unchanged.
#[salsa::tracked(returns(ref), no_eq)]
pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
@@ -35,10 +36,7 @@ pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
ParsedModule::new(parsed)
}
/// A wrapper around a parsed module.
///
/// This type manages instances of the module AST. A particular instance of the AST
/// is represented with the [`ParsedModuleRef`] type.
/// Cheap cloneable wrapper around the parsed module.
#[derive(Clone)]
pub struct ParsedModule {
inner: Arc<Parsed<ModModule>>,
@@ -51,11 +49,17 @@ impl ParsedModule {
}
}
/// Loads a reference to the parsed module.
pub fn load(&self, _db: &dyn Db) -> ParsedModuleRef {
ParsedModuleRef {
module_ref: self.inner.clone(),
}
/// Consumes `self` and returns the Arc storing the parsed module.
pub fn into_arc(self) -> Arc<Parsed<ModModule>> {
self.inner
}
}
impl Deref for ParsedModule {
type Target = Parsed<ModModule>;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
@@ -73,30 +77,6 @@ impl PartialEq for ParsedModule {
impl Eq for ParsedModule {}
/// Cheap cloneable wrapper around an instance of a module AST.
#[derive(Clone)]
pub struct ParsedModuleRef {
module_ref: Arc<Parsed<ModModule>>,
}
impl ParsedModuleRef {
pub fn as_arc(&self) -> &Arc<Parsed<ModModule>> {
&self.module_ref
}
pub fn into_arc(self) -> Arc<Parsed<ModModule>> {
self.module_ref
}
}
impl std::ops::Deref for ParsedModuleRef {
type Target = Parsed<ModModule>;
fn deref(&self) -> &Self::Target {
&self.module_ref
}
}
#[cfg(test)]
mod tests {
use crate::Db;
@@ -118,7 +98,7 @@ mod tests {
let file = system_path_to_file(&db, path).unwrap();
let parsed = parsed_module(&db, file).load(&db);
let parsed = parsed_module(&db, file);
assert!(parsed.has_valid_syntax());
@@ -134,7 +114,7 @@ mod tests {
let file = system_path_to_file(&db, path).unwrap();
let parsed = parsed_module(&db, file).load(&db);
let parsed = parsed_module(&db, file);
assert!(parsed.has_valid_syntax());
@@ -150,7 +130,7 @@ mod tests {
let virtual_file = db.files().virtual_file(&db, path);
let parsed = parsed_module(&db, virtual_file.file()).load(&db);
let parsed = parsed_module(&db, virtual_file.file());
assert!(parsed.has_valid_syntax());
@@ -166,7 +146,7 @@ mod tests {
let virtual_file = db.files().virtual_file(&db, path);
let parsed = parsed_module(&db, virtual_file.file()).load(&db);
let parsed = parsed_module(&db, virtual_file.file());
assert!(parsed.has_valid_syntax());
@@ -197,7 +177,7 @@ else:
let file = vendored_path_to_file(&db, VendoredPath::new("path.pyi")).unwrap();
let parsed = parsed_module(&db, file).load(&db);
let parsed = parsed_module(&db, file);
assert!(parsed.has_valid_syntax());
}

View File

@@ -171,21 +171,6 @@ pub trait System: Debug {
PatternError,
>;
/// Fetches the environment variable `key` from the current process.
///
/// # Errors
///
/// Returns [`std::env::VarError::NotPresent`] if:
/// - The variable is not set.
/// - The variable's name contains an equal sign or NUL (`'='` or `'\0'`).
///
/// Returns [`std::env::VarError::NotUnicode`] if the variable's value is not valid
/// Unicode.
fn env_var(&self, name: &str) -> std::result::Result<String, std::env::VarError> {
let _ = name;
Err(std::env::VarError::NotPresent)
}
fn as_any(&self) -> &dyn std::any::Any;
fn as_any_mut(&mut self) -> &mut dyn std::any::Any;

View File

@@ -214,10 +214,6 @@ impl System for OsSystem {
})
})))
}
fn env_var(&self, name: &str) -> std::result::Result<String, std::env::VarError> {
std::env::var(name)
}
}
impl OsSystem {

View File

@@ -29,7 +29,7 @@ pub(crate) fn main(args: &Args) -> Result<()> {
if let Some(explanation) = rule.explanation() {
let mut output = String::new();
let _ = writeln!(&mut output, "# {} ({})", rule.name(), rule.noqa_code());
let _ = writeln!(&mut output, "# {} ({})", rule.as_ref(), rule.noqa_code());
let (linter, _) = Linter::parse_code(&rule.noqa_code().to_string()).unwrap();
if linter.url().is_some() {
@@ -101,7 +101,7 @@ pub(crate) fn main(args: &Args) -> Result<()> {
let filename = PathBuf::from(ROOT_DIR)
.join("docs")
.join("rules")
.join(&*rule.name())
.join(rule.as_ref())
.with_extension("md");
if args.dry_run {

View File

@@ -55,7 +55,7 @@ fn generate_table(table_out: &mut String, rules: impl IntoIterator<Item = Rule>,
FixAvailability::None => format!("<span {SYMBOL_STYLE}></span>"),
};
let rule_name = rule.name();
let rule_name = rule.as_ref();
// If the message ends in a bracketed expression (like: "Use {replacement}"), escape the
// brackets. Otherwise, it'll be interpreted as an HTML attribute via the `attr_list`

View File

@@ -10,7 +10,7 @@ use ruff_python_ast::PythonVersion;
use ty_python_semantic::lint::{LintRegistry, RuleSelection};
use ty_python_semantic::{
Db, Program, ProgramSettings, PythonPath, PythonPlatform, PythonVersionSource,
PythonVersionWithSource, SearchPathSettings, SysPrefixPathOrigin, default_lint_registry,
PythonVersionWithSource, SearchPathSettings, default_lint_registry,
};
static EMPTY_VENDORED: std::sync::LazyLock<VendoredFileSystem> = std::sync::LazyLock::new(|| {
@@ -37,8 +37,7 @@ impl ModuleDb {
) -> Result<Self> {
let mut search_paths = SearchPathSettings::new(src_roots);
if let Some(venv_path) = venv_path {
search_paths.python_path =
PythonPath::sys_prefix(venv_path, SysPrefixPathOrigin::PythonCliFlag);
search_paths.python_path = PythonPath::from_cli_flag(venv_path);
}
let db = Self::default();

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_linter"
version = "0.11.13"
version = "0.11.12"
publish = false
authors = { workspace = true }
edition = { workspace = true }

View File

@@ -266,15 +266,3 @@ def f():
result = list() # this should be replaced with a comprehension
for i in values:
result.append(i + 1) # PERF401
def f():
src = [1]
dst = []
for i in src:
if True if True else False:
dst.append(i)
for i in src:
if lambda: 0:
dst.append(i)

View File

@@ -151,16 +151,3 @@ def foo():
result = {}
for idx, name in indices, fruit:
result[name] = idx # PERF403
def foo():
src = (("x", 1),)
dst = {}
for k, v in src:
if True if True else False:
dst[k] = v
for k, v in src:
if lambda: 0:
dst[k] = v

View File

@@ -110,8 +110,6 @@ from typing_extensions import CapsuleType
# UP035 on py313+ only
from typing_extensions import deprecated
# UP035 on py313+ only
from typing_extensions import get_type_hints
# https://github.com/astral-sh/ruff/issues/15780
from typing_extensions import is_typeddict

View File

@@ -102,6 +102,3 @@ with open("furb129.py") as f:
pass
for line in(f).readlines():
pass
# Test case for issue #17683 (missing space before keyword)
print([line for line in f.readlines()if True])

View File

@@ -65,7 +65,7 @@ use crate::docstrings::extraction::ExtractionTarget;
use crate::importer::{ImportRequest, Importer, ResolutionError};
use crate::noqa::NoqaMapping;
use crate::package::PackageRoot;
use crate::preview::is_undefined_export_in_dunder_init_enabled;
use crate::preview::{is_semantic_errors_enabled, is_undefined_export_in_dunder_init_enabled};
use crate::registry::{AsRule, Rule};
use crate::rules::pyflakes::rules::{
LateFutureImport, ReturnOutsideFunction, YieldOutsideFunction,
@@ -663,7 +663,9 @@ impl SemanticSyntaxContext for Checker<'_> {
| SemanticSyntaxErrorKind::AsyncComprehensionInSyncComprehension(_)
| SemanticSyntaxErrorKind::DuplicateParameter(_)
| SemanticSyntaxErrorKind::NonlocalDeclarationAtModuleLevel => {
self.semantic_errors.borrow_mut().push(error);
if is_semantic_errors_enabled(self.settings) {
self.semantic_errors.borrow_mut().push(error);
}
}
}
}

View File

@@ -12,6 +12,7 @@ use crate::fix::edits::delete_comment;
use crate::noqa::{
Code, Directive, FileExemption, FileNoqaDirectives, NoqaDirectives, NoqaMapping,
};
use crate::preview::is_check_file_level_directives_enabled;
use crate::registry::{AsRule, Rule, RuleSet};
use crate::rule_redirects::get_redirect_target;
use crate::rules::pygrep_hooks;
@@ -111,16 +112,25 @@ pub(crate) fn check_noqa(
&& !exemption.includes(Rule::UnusedNOQA)
&& !per_file_ignores.contains(Rule::UnusedNOQA)
{
let directives = noqa_directives
.lines()
.iter()
.map(|line| (&line.directive, &line.matches, false))
.chain(
file_noqa_directives
.lines()
.iter()
.map(|line| (&line.parsed_file_exemption, &line.matches, true)),
);
let directives: Vec<_> = if is_check_file_level_directives_enabled(settings) {
noqa_directives
.lines()
.iter()
.map(|line| (&line.directive, &line.matches, false))
.chain(
file_noqa_directives
.lines()
.iter()
.map(|line| (&line.parsed_file_exemption, &line.matches, true)),
)
.collect()
} else {
noqa_directives
.lines()
.iter()
.map(|line| (&line.directive, &line.matches, false))
.collect()
};
for (directive, matches, is_file_level) in directives {
match directive {
Directive::All(directive) => {

View File

@@ -4,13 +4,13 @@
/// `--select`. For pylint this is e.g. C0414 and E0118 but also C and E01.
use std::fmt::Formatter;
use strum_macros::EnumIter;
use strum_macros::{AsRefStr, EnumIter};
use crate::registry::Linter;
use crate::rule_selector::is_single_rule_selector;
use crate::rules;
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct NoqaCode(&'static str, &'static str);
impl NoqaCode {
@@ -847,7 +847,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
(Flake8PytestStyle, "026") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestUseFixturesWithoutParameters),
(Flake8PytestStyle, "027") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestUnittestRaisesAssertion),
(Flake8PytestStyle, "028") => (RuleGroup::Preview, rules::flake8_pytest_style::rules::PytestParameterWithDefaultArgument),
(Flake8PytestStyle, "029") => (RuleGroup::Stable, rules::flake8_pytest_style::rules::PytestWarnsWithoutWarning),
(Flake8PytestStyle, "029") => (RuleGroup::Preview, rules::flake8_pytest_style::rules::PytestWarnsWithoutWarning),
(Flake8PytestStyle, "030") => (RuleGroup::Preview, rules::flake8_pytest_style::rules::PytestWarnsTooBroad),
(Flake8PytestStyle, "031") => (RuleGroup::Preview, rules::flake8_pytest_style::rules::PytestWarnsWithMultipleStatements),
@@ -1019,7 +1019,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
(Ruff, "049") => (RuleGroup::Preview, rules::ruff::rules::DataclassEnum),
(Ruff, "051") => (RuleGroup::Stable, rules::ruff::rules::IfKeyInDictDel),
(Ruff, "052") => (RuleGroup::Preview, rules::ruff::rules::UsedDummyVariable),
(Ruff, "053") => (RuleGroup::Stable, rules::ruff::rules::ClassWithMixedTypeVars),
(Ruff, "053") => (RuleGroup::Preview, rules::ruff::rules::ClassWithMixedTypeVars),
(Ruff, "054") => (RuleGroup::Preview, rules::ruff::rules::IndentedFormFeed),
(Ruff, "055") => (RuleGroup::Preview, rules::ruff::rules::UnnecessaryRegularExpression),
(Ruff, "056") => (RuleGroup::Preview, rules::ruff::rules::FalsyDictGetFallback),
@@ -1129,7 +1129,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
(Refurb, "156") => (RuleGroup::Preview, rules::refurb::rules::HardcodedStringCharset),
(Refurb, "157") => (RuleGroup::Preview, rules::refurb::rules::VerboseDecimalConstructor),
(Refurb, "161") => (RuleGroup::Stable, rules::refurb::rules::BitCount),
(Refurb, "162") => (RuleGroup::Stable, rules::refurb::rules::FromisoformatReplaceZ),
(Refurb, "162") => (RuleGroup::Preview, rules::refurb::rules::FromisoformatReplaceZ),
(Refurb, "163") => (RuleGroup::Stable, rules::refurb::rules::RedundantLogBase),
(Refurb, "164") => (RuleGroup::Preview, rules::refurb::rules::UnnecessaryFromFloat),
(Refurb, "166") => (RuleGroup::Preview, rules::refurb::rules::IntOnSlicedStr),

View File

@@ -1,7 +1,7 @@
use std::collections::BTreeSet;
use itertools::Itertools;
use rustc_hash::FxHashSet;
use rustc_hash::{FxHashMap, FxHashSet};
use ruff_diagnostics::{IsolationLevel, SourceMap};
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
@@ -59,13 +59,13 @@ fn apply_fixes<'a>(
let mut last_pos: Option<TextSize> = None;
let mut applied: BTreeSet<&Edit> = BTreeSet::default();
let mut isolated: FxHashSet<u32> = FxHashSet::default();
let mut fixed = FixTable::default();
let mut fixed = FxHashMap::default();
let mut source_map = SourceMap::default();
for (code, name, fix) in diagnostics
.filter_map(|msg| msg.noqa_code().map(|code| (code, msg.name(), msg)))
.filter_map(|(code, name, diagnostic)| diagnostic.fix().map(|fix| (code, name, fix)))
.sorted_by(|(_, name1, fix1), (_, name2, fix2)| cmp_fix(name1, name2, fix1, fix2))
for (rule, fix) in diagnostics
.filter_map(|msg| msg.to_rule().map(|rule| (rule, msg)))
.filter_map(|(rule, diagnostic)| diagnostic.fix().map(|fix| (rule, fix)))
.sorted_by(|(rule1, fix1), (rule2, fix2)| cmp_fix(*rule1, *rule2, fix1, fix2))
{
let mut edits = fix
.edits()
@@ -110,7 +110,7 @@ fn apply_fixes<'a>(
}
applied.extend(applied_edits.drain(..));
*fixed.entry(code).or_default(name) += 1;
*fixed.entry(rule).or_default() += 1;
}
// Add the remaining content.
@@ -125,44 +125,34 @@ fn apply_fixes<'a>(
}
/// Compare two fixes.
fn cmp_fix(name1: &str, name2: &str, fix1: &Fix, fix2: &Fix) -> std::cmp::Ordering {
fn cmp_fix(rule1: Rule, rule2: Rule, fix1: &Fix, fix2: &Fix) -> std::cmp::Ordering {
// Always apply `RedefinedWhileUnused` before `UnusedImport`, as the latter can end up fixing
// the former. But we can't apply this just for `RedefinedWhileUnused` and `UnusedImport` because it violates
// `< is transitive: a < b and b < c implies a < c. The same must hold for both == and >.`
// See https://github.com/astral-sh/ruff/issues/12469#issuecomment-2244392085
let redefined_while_unused = Rule::RedefinedWhileUnused.name().as_str();
if (name1, name2) == (redefined_while_unused, redefined_while_unused) {
std::cmp::Ordering::Equal
} else if name1 == redefined_while_unused {
std::cmp::Ordering::Less
} else if name2 == redefined_while_unused {
std::cmp::Ordering::Greater
} else {
std::cmp::Ordering::Equal
match (rule1, rule2) {
(Rule::RedefinedWhileUnused, Rule::RedefinedWhileUnused) => std::cmp::Ordering::Equal,
(Rule::RedefinedWhileUnused, _) => std::cmp::Ordering::Less,
(_, Rule::RedefinedWhileUnused) => std::cmp::Ordering::Greater,
_ => std::cmp::Ordering::Equal,
}
// Apply fixes in order of their start position.
.then_with(|| fix1.min_start().cmp(&fix2.min_start()))
// Break ties in the event of overlapping rules, for some specific combinations.
.then_with(|| {
let rules = (name1, name2);
.then_with(|| match (&rule1, &rule2) {
// Apply `MissingTrailingPeriod` fixes before `NewLineAfterLastParagraph` fixes.
let missing_trailing_period = Rule::MissingTrailingPeriod.name().as_str();
let newline_after_last_paragraph = Rule::NewLineAfterLastParagraph.name().as_str();
let if_else_instead_of_dict_get = Rule::IfElseBlockInsteadOfDictGet.name().as_str();
let if_else_instead_of_if_exp = Rule::IfElseBlockInsteadOfIfExp.name().as_str();
if rules == (missing_trailing_period, newline_after_last_paragraph) {
std::cmp::Ordering::Less
} else if rules == (newline_after_last_paragraph, missing_trailing_period) {
(Rule::MissingTrailingPeriod, Rule::NewLineAfterLastParagraph) => std::cmp::Ordering::Less,
(Rule::NewLineAfterLastParagraph, Rule::MissingTrailingPeriod) => {
std::cmp::Ordering::Greater
}
// Apply `IfElseBlockInsteadOfDictGet` fixes before `IfElseBlockInsteadOfIfExp` fixes.
else if rules == (if_else_instead_of_dict_get, if_else_instead_of_if_exp) {
(Rule::IfElseBlockInsteadOfDictGet, Rule::IfElseBlockInsteadOfIfExp) => {
std::cmp::Ordering::Less
} else if rules == (if_else_instead_of_if_exp, if_else_instead_of_dict_get) {
std::cmp::Ordering::Greater
} else {
std::cmp::Ordering::Equal
}
(Rule::IfElseBlockInsteadOfIfExp, Rule::IfElseBlockInsteadOfDictGet) => {
std::cmp::Ordering::Greater
}
_ => std::cmp::Ordering::Equal,
})
}
@@ -207,7 +197,7 @@ mod tests {
source_map,
} = apply_fixes(diagnostics.iter(), &locator);
assert_eq!(code, "");
assert_eq!(fixes.counts().sum::<usize>(), 0);
assert_eq!(fixes.values().sum::<usize>(), 0);
assert!(source_map.markers().is_empty());
}
@@ -244,7 +234,7 @@ print("hello world")
"#
.trim()
);
assert_eq!(fixes.counts().sum::<usize>(), 1);
assert_eq!(fixes.values().sum::<usize>(), 1);
assert_eq!(
source_map.markers(),
&[
@@ -285,7 +275,7 @@ class A(Bar):
"
.trim(),
);
assert_eq!(fixes.counts().sum::<usize>(), 1);
assert_eq!(fixes.values().sum::<usize>(), 1);
assert_eq!(
source_map.markers(),
&[
@@ -322,7 +312,7 @@ class A:
"
.trim()
);
assert_eq!(fixes.counts().sum::<usize>(), 1);
assert_eq!(fixes.values().sum::<usize>(), 1);
assert_eq!(
source_map.markers(),
&[
@@ -363,7 +353,7 @@ class A(object):
"
.trim()
);
assert_eq!(fixes.counts().sum::<usize>(), 2);
assert_eq!(fixes.values().sum::<usize>(), 2);
assert_eq!(
source_map.markers(),
&[
@@ -405,7 +395,7 @@ class A:
"
.trim(),
);
assert_eq!(fixes.counts().sum::<usize>(), 1);
assert_eq!(fixes.values().sum::<usize>(), 1);
assert_eq!(
source_map.markers(),
&[

View File

@@ -1,5 +1,4 @@
use std::borrow::Cow;
use std::collections::hash_map::Entry;
use std::path::Path;
use anyhow::{Result, anyhow};
@@ -23,14 +22,13 @@ use crate::checkers::imports::check_imports;
use crate::checkers::noqa::check_noqa;
use crate::checkers::physical_lines::check_physical_lines;
use crate::checkers::tokens::check_tokens;
use crate::codes::NoqaCode;
use crate::directives::Directives;
use crate::doc_lines::{doc_lines_from_ast, doc_lines_from_tokens};
use crate::fix::{FixResult, fix_file};
use crate::message::Message;
use crate::noqa::add_noqa;
use crate::package::PackageRoot;
use crate::preview::is_py314_support_enabled;
use crate::preview::{is_py314_support_enabled, is_unsupported_syntax_enabled};
use crate::registry::{AsRule, Rule, RuleSet};
#[cfg(any(feature = "test-rules", test))]
use crate::rules::ruff::rules::test_rules::{self, TEST_RULES, TestRule};
@@ -86,53 +84,7 @@ impl LinterResult {
}
}
#[derive(Debug, Default, PartialEq)]
struct FixCount {
rule_name: &'static str,
count: usize,
}
/// A mapping from a noqa code to the corresponding lint name and a count of applied fixes.
#[derive(Debug, Default, PartialEq)]
pub struct FixTable(FxHashMap<NoqaCode, FixCount>);
impl FixTable {
pub fn counts(&self) -> impl Iterator<Item = usize> {
self.0.values().map(|fc| fc.count)
}
pub fn entry(&mut self, code: NoqaCode) -> FixTableEntry {
FixTableEntry(self.0.entry(code))
}
pub fn iter(&self) -> impl Iterator<Item = (NoqaCode, &'static str, usize)> {
self.0
.iter()
.map(|(code, FixCount { rule_name, count })| (*code, *rule_name, *count))
}
pub fn keys(&self) -> impl Iterator<Item = NoqaCode> {
self.0.keys().copied()
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
pub struct FixTableEntry<'a>(Entry<'a, NoqaCode, FixCount>);
impl<'a> FixTableEntry<'a> {
pub fn or_default(self, rule_name: &'static str) -> &'a mut usize {
&mut (self
.0
.or_insert(FixCount {
rule_name,
count: 0,
})
.count)
}
}
pub type FixTable = FxHashMap<Rule, usize>;
pub struct FixerResult<'a> {
/// The result returned by the linter, after applying any fixes.
@@ -447,7 +399,11 @@ pub fn check_path(
}
}
let syntax_errors = parsed.unsupported_syntax_errors();
let syntax_errors = if is_unsupported_syntax_enabled(settings) {
parsed.unsupported_syntax_errors()
} else {
&[]
};
diagnostics_to_messages(
diagnostics,
@@ -625,7 +581,7 @@ pub fn lint_fix<'a>(
let mut transformed = Cow::Borrowed(source_kind);
// Track the number of fixed errors across iterations.
let mut fixed = FixTable::default();
let mut fixed = FxHashMap::default();
// As an escape hatch, bail after 100 iterations.
let mut iterations = 0;
@@ -694,7 +650,12 @@ pub fn lint_fix<'a>(
// syntax error. Return the original code.
if has_valid_syntax && has_no_syntax_errors {
if let Some(error) = parsed.errors().first() {
report_fix_syntax_error(path, transformed.source_code(), error, fixed.keys());
report_fix_syntax_error(
path,
transformed.source_code(),
error,
fixed.keys().copied(),
);
return Err(anyhow!("Fix introduced a syntax error"));
}
}
@@ -709,8 +670,8 @@ pub fn lint_fix<'a>(
{
if iterations < MAX_ITERATIONS {
// Count the number of fixed errors.
for (rule, name, count) in applied.iter() {
*fixed.entry(rule).or_default(name) += count;
for (rule, count) in applied {
*fixed.entry(rule).or_default() += count;
}
transformed = Cow::Owned(transformed.updated(fixed_contents, &source_map));
@@ -737,10 +698,10 @@ pub fn lint_fix<'a>(
}
}
fn collect_rule_codes(rules: impl IntoIterator<Item = NoqaCode>) -> String {
fn collect_rule_codes(rules: impl IntoIterator<Item = Rule>) -> String {
rules
.into_iter()
.map(|rule| rule.to_string())
.map(|rule| rule.noqa_code().to_string())
.sorted_unstable()
.dedup()
.join(", ")
@@ -748,7 +709,7 @@ fn collect_rule_codes(rules: impl IntoIterator<Item = NoqaCode>) -> String {
#[expect(clippy::print_stderr)]
fn report_failed_to_converge_error(path: &Path, transformed: &str, messages: &[Message]) {
let codes = collect_rule_codes(messages.iter().filter_map(Message::noqa_code));
let codes = collect_rule_codes(messages.iter().filter_map(Message::to_rule));
if cfg!(debug_assertions) {
eprintln!(
"{}{} Failed to converge after {} iterations in `{}` with rule codes {}:---\n{}\n---",
@@ -784,7 +745,7 @@ fn report_fix_syntax_error(
path: &Path,
transformed: &str,
error: &ParseError,
rules: impl IntoIterator<Item = NoqaCode>,
rules: impl IntoIterator<Item = Rule>,
) {
let codes = collect_rule_codes(rules);
if cfg!(debug_assertions) {

View File

@@ -33,7 +33,7 @@ impl Emitter for AzureEmitter {
line = location.line,
col = location.column,
code = message
.noqa_code()
.to_noqa_code()
.map_or_else(String::new, |code| format!("code={code};")),
body = message.body(),
)?;

View File

@@ -33,7 +33,7 @@ impl Emitter for GithubEmitter {
writer,
"::error title=Ruff{code},file={file},line={row},col={column},endLine={end_row},endColumn={end_column}::",
code = message
.noqa_code()
.to_noqa_code()
.map_or_else(String::new, |code| format!(" ({code})")),
file = message.filename(),
row = source_location.line,
@@ -50,7 +50,7 @@ impl Emitter for GithubEmitter {
column = location.column,
)?;
if let Some(code) = message.noqa_code() {
if let Some(code) = message.to_noqa_code() {
write!(writer, " {code}")?;
}

View File

@@ -90,7 +90,7 @@ impl Serialize for SerializedMessages<'_> {
}
fingerprints.insert(message_fingerprint);
let (description, check_name) = if let Some(code) = message.noqa_code() {
let (description, check_name) = if let Some(code) = message.to_noqa_code() {
(message.body().to_string(), code.to_string())
} else {
let description = message.body();

View File

@@ -81,8 +81,8 @@ pub(crate) fn message_to_json_value(message: &Message, context: &EmitterContext)
}
json!({
"code": message.noqa_code().map(|code| code.to_string()),
"url": message.to_url(),
"code": message.to_noqa_code().map(|code| code.to_string()),
"url": message.to_rule().and_then(|rule| rule.url()),
"message": message.body(),
"fix": fix,
"cell": notebook_cell_index,

View File

@@ -59,7 +59,7 @@ impl Emitter for JunitEmitter {
body = message.body()
));
let mut case = TestCase::new(
if let Some(code) = message.noqa_code() {
if let Some(code) = message.to_noqa_code() {
format!("org.ruff.{code}")
} else {
"org.ruff".to_string()

View File

@@ -224,22 +224,30 @@ impl Message {
self.fix().is_some()
}
/// Returns the [`Rule`] corresponding to the diagnostic message.
pub fn to_rule(&self) -> Option<Rule> {
if self.is_syntax_error() {
None
} else {
Some(self.name().parse().expect("Expected a valid rule name"))
}
}
/// Returns the [`NoqaCode`] corresponding to the diagnostic message.
pub fn noqa_code(&self) -> Option<NoqaCode> {
pub fn to_noqa_code(&self) -> Option<NoqaCode> {
self.noqa_code
}
/// Returns the URL for the rule documentation, if it exists.
pub fn to_url(&self) -> Option<String> {
if self.is_syntax_error() {
None
} else {
Some(format!(
"{}/rules/{}",
env!("CARGO_PKG_HOMEPAGE"),
self.name()
))
}
// TODO(brent) Rule::url calls Rule::explanation, which calls ViolationMetadata::explain,
// which when derived (seems always to be the case?) is always `Some`, so I think it's
// pretty safe to inline the Rule::url implementation here, using `self.name()`:
//
// format!("{}/rules/{}", env!("CARGO_PKG_HOMEPAGE"), self.name())
//
// at least in the case of diagnostics, I guess syntax errors will return None
self.to_rule().and_then(|rule| rule.url())
}
/// Returns the filename for the message.

View File

@@ -26,7 +26,7 @@ impl Emitter for PylintEmitter {
message.compute_start_location().line
};
let body = if let Some(code) = message.noqa_code() {
let body = if let Some(code) = message.to_noqa_code() {
format!("[{code}] {body}", body = message.body())
} else {
message.body().to_string()

View File

@@ -71,7 +71,7 @@ fn message_to_rdjson_value(message: &Message) -> Value {
"range": rdjson_range(start_location, end_location),
},
"code": {
"value": message.noqa_code().map(|code| code.to_string()),
"value": message.to_noqa_code().map(|code| code.to_string()),
"url": message.to_url(),
},
"suggestions": rdjson_suggestions(fix.edits(), &source_code),
@@ -84,7 +84,7 @@ fn message_to_rdjson_value(message: &Message) -> Value {
"range": rdjson_range(start_location, end_location),
},
"code": {
"value": message.noqa_code().map(|code| code.to_string()),
"value": message.to_noqa_code().map(|code| code.to_string()),
"url": message.to_url(),
},
})

View File

@@ -8,7 +8,7 @@ use serde_json::json;
use ruff_source_file::OneIndexed;
use crate::VERSION;
use crate::codes::NoqaCode;
use crate::codes::Rule;
use crate::fs::normalize_path;
use crate::message::{Emitter, EmitterContext, Message};
use crate::registry::{Linter, RuleNamespace};
@@ -27,7 +27,7 @@ impl Emitter for SarifEmitter {
.map(SarifResult::from_message)
.collect::<Result<Vec<_>>>()?;
let unique_rules: HashSet<_> = results.iter().filter_map(|result| result.code).collect();
let unique_rules: HashSet<_> = results.iter().filter_map(|result| result.rule).collect();
let mut rules: Vec<SarifRule> = unique_rules.into_iter().map(SarifRule::from).collect();
rules.sort_by(|a, b| a.code.cmp(&b.code));
@@ -61,19 +61,13 @@ struct SarifRule<'a> {
url: Option<String>,
}
impl From<NoqaCode> for SarifRule<'_> {
fn from(code: NoqaCode) -> Self {
let code_str = code.to_string();
// This is a manual re-implementation of Rule::from_code, but we also want the Linter. This
// avoids calling Linter::parse_code twice.
let (linter, suffix) = Linter::parse_code(&code_str).unwrap();
let rule = linter
.all_rules()
.find(|rule| rule.noqa_code().suffix() == suffix)
.expect("Expected a valid noqa code corresponding to a rule");
impl From<Rule> for SarifRule<'_> {
fn from(rule: Rule) -> Self {
let code = rule.noqa_code().to_string();
let (linter, _) = Linter::parse_code(&code).unwrap();
Self {
name: rule.into(),
code: code_str,
code,
linter: linter.name(),
summary: rule.message_formats()[0],
explanation: rule.explanation(),
@@ -112,7 +106,7 @@ impl Serialize for SarifRule<'_> {
#[derive(Debug)]
struct SarifResult {
code: Option<NoqaCode>,
rule: Option<Rule>,
level: String,
message: String,
uri: String,
@@ -129,7 +123,7 @@ impl SarifResult {
let end_location = message.compute_end_location();
let path = normalize_path(&*message.filename());
Ok(Self {
code: message.noqa_code(),
rule: message.to_rule(),
level: "error".to_string(),
message: message.body().to_string(),
uri: url::Url::from_file_path(&path)
@@ -149,7 +143,7 @@ impl SarifResult {
let end_location = message.compute_end_location();
let path = normalize_path(&*message.filename());
Ok(Self {
code: message.noqa_code(),
rule: message.to_rule(),
level: "error".to_string(),
message: message.body().to_string(),
uri: path.display().to_string(),
@@ -184,7 +178,7 @@ impl Serialize for SarifResult {
}
}
}],
"ruleId": self.code.map(|code| code.to_string()),
"ruleId": self.rule.map(|rule| rule.noqa_code().to_string()),
})
.serialize(serializer)
}

View File

@@ -151,7 +151,7 @@ impl Display for RuleCodeAndBody<'_> {
if let Some(fix) = self.message.fix() {
// Do not display an indicator for inapplicable fixes
if fix.applies(self.unsafe_fixes.required_applicability()) {
if let Some(code) = self.message.noqa_code() {
if let Some(code) = self.message.to_noqa_code() {
write!(f, "{} ", code.to_string().red().bold())?;
}
return write!(
@@ -164,7 +164,7 @@ impl Display for RuleCodeAndBody<'_> {
}
}
if let Some(code) = self.message.noqa_code() {
if let Some(code) = self.message.to_noqa_code() {
write!(
f,
"{code} {body}",
@@ -254,7 +254,7 @@ impl Display for MessageCodeFrame<'_> {
let label = self
.message
.noqa_code()
.to_noqa_code()
.map_or_else(String::new, |code| code.to_string());
let line_start = self.notebook_index.map_or_else(

View File

@@ -12,14 +12,13 @@ use log::warn;
use ruff_python_trivia::{CommentRanges, Cursor, indentation_at_offset};
use ruff_source_file::{LineEnding, LineRanges};
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
use rustc_hash::FxHashSet;
use crate::Edit;
use crate::Locator;
use crate::codes::NoqaCode;
use crate::fs::relativize_path;
use crate::message::Message;
use crate::registry::Rule;
use crate::registry::{Rule, RuleSet};
use crate::rule_redirects::get_redirect_target;
/// Generates an array of edits that matches the length of `messages`.
@@ -781,7 +780,7 @@ fn build_noqa_edits_by_diagnostic(
if let Some(noqa_edit) = generate_noqa_edit(
comment.directive,
comment.line,
FxHashSet::from_iter([comment.code]),
RuleSet::from_rule(comment.rule),
locator,
line_ending,
) {
@@ -817,7 +816,7 @@ fn build_noqa_edits_by_line<'a>(
offset,
matches
.into_iter()
.map(|NoqaComment { code, .. }| code)
.map(|NoqaComment { rule, .. }| rule)
.collect(),
locator,
line_ending,
@@ -830,7 +829,7 @@ fn build_noqa_edits_by_line<'a>(
struct NoqaComment<'a> {
line: TextSize,
code: NoqaCode,
rule: Rule,
directive: Option<&'a Directive<'a>>,
}
@@ -846,11 +845,13 @@ fn find_noqa_comments<'a>(
// Mark any non-ignored diagnostics.
for message in messages {
let Some(code) = message.noqa_code() else {
let Some(rule) = message.to_rule() else {
comments_by_line.push(None);
continue;
};
let code = rule.noqa_code();
match &exemption {
FileExemption::All(_) => {
// If the file is exempted, don't add any noqa directives.
@@ -899,7 +900,7 @@ fn find_noqa_comments<'a>(
if !codes.includes(code) {
comments_by_line.push(Some(NoqaComment {
line: directive_line.start(),
code,
rule,
directive: Some(directive),
}));
}
@@ -911,7 +912,7 @@ fn find_noqa_comments<'a>(
// There's no existing noqa directive that suppresses the diagnostic.
comments_by_line.push(Some(NoqaComment {
line: locator.line_start(noqa_offset),
code,
rule,
directive: None,
}));
}
@@ -921,7 +922,7 @@ fn find_noqa_comments<'a>(
struct NoqaEdit<'a> {
edit_range: TextRange,
noqa_codes: FxHashSet<NoqaCode>,
rules: RuleSet,
codes: Option<&'a Codes<'a>>,
line_ending: LineEnding,
}
@@ -940,15 +941,18 @@ impl NoqaEdit<'_> {
Some(codes) => {
push_codes(
writer,
self.noqa_codes
self.rules
.iter()
.map(ToString::to_string)
.map(|rule| rule.noqa_code().to_string())
.chain(codes.iter().map(ToString::to_string))
.sorted_unstable(),
);
}
None => {
push_codes(writer, self.noqa_codes.iter().map(ToString::to_string));
push_codes(
writer,
self.rules.iter().map(|rule| rule.noqa_code().to_string()),
);
}
}
write!(writer, "{}", self.line_ending.as_str()).unwrap();
@@ -964,7 +968,7 @@ impl Ranged for NoqaEdit<'_> {
fn generate_noqa_edit<'a>(
directive: Option<&'a Directive>,
offset: TextSize,
noqa_codes: FxHashSet<NoqaCode>,
rules: RuleSet,
locator: &Locator,
line_ending: LineEnding,
) -> Option<NoqaEdit<'a>> {
@@ -993,7 +997,7 @@ fn generate_noqa_edit<'a>(
Some(NoqaEdit {
edit_range,
noqa_codes,
rules,
codes,
line_ending,
})

View File

@@ -7,6 +7,17 @@
use crate::settings::LinterSettings;
// https://github.com/astral-sh/ruff/issues/17412
// https://github.com/astral-sh/ruff/issues/11934
pub(crate) const fn is_semantic_errors_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/16429
pub(crate) const fn is_unsupported_syntax_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}
pub(crate) const fn is_py314_support_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}
@@ -18,11 +29,23 @@ pub(crate) const fn is_full_path_match_source_strategy_enabled(settings: &Linter
// Rule-specific behavior
// https://github.com/astral-sh/ruff/pull/17136
pub(crate) const fn is_shell_injection_only_trusted_input_enabled(
settings: &LinterSettings,
) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/15541
pub(crate) const fn is_suspicious_function_reference_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/7501
pub(crate) const fn is_bool_subtype_of_annotation_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/10759
pub(crate) const fn is_comprehension_with_min_max_sum_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
@@ -40,11 +63,21 @@ pub(crate) const fn is_bad_version_info_in_non_stub_enabled(settings: &LinterSet
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/12676
pub(crate) const fn is_fix_future_annotations_in_stub_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/11074
pub(crate) const fn is_only_add_return_none_at_end_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/12796
pub(crate) const fn is_simplify_ternary_to_binary_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/16719
pub(crate) const fn is_fix_manual_dict_comprehension_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
@@ -71,6 +104,13 @@ pub(crate) const fn is_unicode_to_unicode_confusables_enabled(settings: &LinterS
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/17078
pub(crate) const fn is_support_slices_in_literal_concatenation_enabled(
settings: &LinterSettings,
) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/11370
pub(crate) const fn is_undefined_export_in_dunder_init_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
@@ -81,9 +121,16 @@ pub(crate) const fn is_allow_nested_roots_enabled(settings: &LinterSettings) ->
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/18208
pub(crate) const fn is_multiple_with_statements_fix_safe_enabled(
settings: &LinterSettings,
) -> bool {
// https://github.com/astral-sh/ruff/pull/17061
pub(crate) const fn is_check_file_level_directives_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/17644
pub(crate) const fn is_readlines_in_for_fix_safe_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}
pub(crate) const fn multiple_with_statements_fix_safe_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}

View File

@@ -1,7 +1,6 @@
//! Remnant of the registry of all [`Rule`] implementations, now it's reexporting from codes.rs
//! with some helper symbols
use ruff_db::diagnostic::LintName;
use strum_macros::EnumIter;
pub use codes::Rule;
@@ -349,18 +348,9 @@ impl Rule {
/// Return the URL for the rule documentation, if it exists.
pub fn url(&self) -> Option<String> {
self.explanation().is_some().then(|| {
format!(
"{}/rules/{name}",
env!("CARGO_PKG_HOMEPAGE"),
name = self.name()
)
})
}
pub fn name(&self) -> LintName {
let name: &'static str = self.into();
LintName::of(name)
self.explanation()
.is_some()
.then(|| format!("{}/rules/{}", env!("CARGO_PKG_HOMEPAGE"), self.as_ref()))
}
}
@@ -431,7 +421,7 @@ pub mod clap_completion {
fn possible_values(&self) -> Option<Box<dyn Iterator<Item = PossibleValue> + '_>> {
Some(Box::new(Rule::iter().map(|rule| {
let name = rule.noqa_code().to_string();
let help = rule.name().as_str();
let help = rule.as_ref().to_string();
PossibleValue::new(name).help(help)
})))
}
@@ -453,7 +443,7 @@ mod tests {
assert!(
rule.explanation().is_some(),
"Rule {} is missing documentation",
rule.name()
rule.as_ref()
);
}
}
@@ -470,10 +460,10 @@ mod tests {
.collect();
for rule in Rule::iter() {
let rule_name = rule.name();
let rule_name = rule.as_ref();
for pattern in &patterns {
assert!(
!pattern.matches(&rule_name),
!pattern.matches(rule_name),
"{rule_name} does not match naming convention, see CONTRIBUTING.md"
);
}

View File

@@ -302,8 +302,9 @@ impl Display for RuleSet {
} else {
writeln!(f, "[")?;
for rule in self {
let name = rule.as_ref();
let code = rule.noqa_code();
writeln!(f, "\t{name} ({code}),", name = rule.name())?;
writeln!(f, "\t{name} ({code}),")?;
}
write!(f, "]")?;
}

View File

@@ -485,7 +485,8 @@ pub mod clap_completion {
prefix.linter().common_prefix(),
prefix.short_code()
);
return Some(PossibleValue::new(code).help(rule.name().as_str()));
let name: &'static str = rule.into();
return Some(PossibleValue::new(code).help(name));
}
None

View File

@@ -3,6 +3,7 @@ pub(crate) mod rules;
#[cfg(test)]
mod tests {
use std::convert::AsRef;
use std::path::Path;
use anyhow::Result;
@@ -17,7 +18,7 @@ mod tests {
#[test_case(Rule::FastApiNonAnnotatedDependency, Path::new("FAST002_1.py"))]
#[test_case(Rule::FastApiUnusedPathParameter, Path::new("FAST003.py"))]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("fastapi").join(path).as_path(),
&settings::LinterSettings::for_rule(rule_code),
@@ -31,7 +32,7 @@ mod tests {
#[test_case(Rule::FastApiNonAnnotatedDependency, Path::new("FAST002_0.py"))]
#[test_case(Rule::FastApiNonAnnotatedDependency, Path::new("FAST002_1.py"))]
fn rules_py38(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}_py38", rule_code.name(), path.to_string_lossy());
let snapshot = format!("{}_{}_py38", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("fastapi").join(path).as_path(),
&settings::LinterSettings {

View File

@@ -104,6 +104,7 @@ mod tests {
#[test_case(Rule::SuspiciousURLOpenUsage, Path::new("S310.py"))]
#[test_case(Rule::SuspiciousNonCryptographicRandomUsage, Path::new("S311.py"))]
#[test_case(Rule::SuspiciousTelnetUsage, Path::new("S312.py"))]
#[test_case(Rule::SubprocessWithoutShellEqualsTrue, Path::new("S603.py"))]
fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!(
"preview__{}_{}",

View File

@@ -7,6 +7,7 @@ use ruff_python_semantic::SemanticModel;
use ruff_text_size::Ranged;
use crate::Violation;
use crate::preview::is_shell_injection_only_trusted_input_enabled;
use crate::{
checkers::ast::Checker, registry::Rule, rules::flake8_bandit::helpers::string_literal,
};
@@ -324,7 +325,9 @@ pub(crate) fn shell_injection(checker: &Checker, call: &ast::ExprCall) {
}
// S603
_ => {
if !is_trusted_input(arg) {
if !is_trusted_input(arg)
|| !is_shell_injection_only_trusted_input_enabled(checker.settings)
{
if checker.enabled(Rule::SubprocessWithoutShellEqualsTrue) {
checker.report_diagnostic(
SubprocessWithoutShellEqualsTrue,

View File

@@ -106,6 +106,74 @@ S603.py:21:1: S603 `subprocess` call: check for execution of untrusted input
23 | # Literals are fine, they're trusted.
|
S603.py:24:1: S603 `subprocess` call: check for execution of untrusted input
|
23 | # Literals are fine, they're trusted.
24 | run("true")
| ^^^ S603
25 | Popen(["true"])
26 | Popen("true", shell=False)
|
S603.py:25:1: S603 `subprocess` call: check for execution of untrusted input
|
23 | # Literals are fine, they're trusted.
24 | run("true")
25 | Popen(["true"])
| ^^^^^ S603
26 | Popen("true", shell=False)
27 | call("true", shell=False)
|
S603.py:26:1: S603 `subprocess` call: check for execution of untrusted input
|
24 | run("true")
25 | Popen(["true"])
26 | Popen("true", shell=False)
| ^^^^^ S603
27 | call("true", shell=False)
28 | check_call("true", shell=False)
|
S603.py:27:1: S603 `subprocess` call: check for execution of untrusted input
|
25 | Popen(["true"])
26 | Popen("true", shell=False)
27 | call("true", shell=False)
| ^^^^ S603
28 | check_call("true", shell=False)
29 | check_output("true", shell=False)
|
S603.py:28:1: S603 `subprocess` call: check for execution of untrusted input
|
26 | Popen("true", shell=False)
27 | call("true", shell=False)
28 | check_call("true", shell=False)
| ^^^^^^^^^^ S603
29 | check_output("true", shell=False)
30 | run("true", shell=False)
|
S603.py:29:1: S603 `subprocess` call: check for execution of untrusted input
|
27 | call("true", shell=False)
28 | check_call("true", shell=False)
29 | check_output("true", shell=False)
| ^^^^^^^^^^^^ S603
30 | run("true", shell=False)
|
S603.py:30:1: S603 `subprocess` call: check for execution of untrusted input
|
28 | check_call("true", shell=False)
29 | check_output("true", shell=False)
30 | run("true", shell=False)
| ^^^ S603
31 |
32 | # Not through assignments though.
|
S603.py:34:1: S603 `subprocess` call: check for execution of untrusted input
|
32 | # Not through assignments though.
@@ -116,6 +184,15 @@ S603.py:34:1: S603 `subprocess` call: check for execution of untrusted input
36 | # Instant named expressions are fine.
|
S603.py:37:1: S603 `subprocess` call: check for execution of untrusted input
|
36 | # Instant named expressions are fine.
37 | run(c := "true")
| ^^^ S603
38 |
39 | # But non-instant are not.
|
S603.py:41:1: S603 `subprocess` call: check for execution of untrusted input
|
39 | # But non-instant are not.
@@ -123,3 +200,20 @@ S603.py:41:1: S603 `subprocess` call: check for execution of untrusted input
41 | run(e)
| ^^^ S603
|
S603.py:46:1: S603 `subprocess` call: check for execution of untrusted input
|
44 | # https://github.com/astral-sh/ruff/issues/17798
45 | # Tuple literals are trusted
46 | check_output(("literal", "cmd", "using", "tuple"), text=True)
| ^^^^^^^^^^^^ S603
47 | Popen(("literal", "cmd", "using", "tuple"))
|
S603.py:47:1: S603 `subprocess` call: check for execution of untrusted input
|
45 | # Tuple literals are trusted
46 | check_output(("literal", "cmd", "using", "tuple"), text=True)
47 | Popen(("literal", "cmd", "using", "tuple"))
| ^^^^^ S603
|

View File

@@ -0,0 +1,125 @@
---
source: crates/ruff_linter/src/rules/flake8_bandit/mod.rs
---
S603.py:5:1: S603 `subprocess` call: check for execution of untrusted input
|
3 | # Different Popen wrappers are checked.
4 | a = input()
5 | Popen(a, shell=False)
| ^^^^^ S603
6 | call(a, shell=False)
7 | check_call(a, shell=False)
|
S603.py:6:1: S603 `subprocess` call: check for execution of untrusted input
|
4 | a = input()
5 | Popen(a, shell=False)
6 | call(a, shell=False)
| ^^^^ S603
7 | check_call(a, shell=False)
8 | check_output(a, shell=False)
|
S603.py:7:1: S603 `subprocess` call: check for execution of untrusted input
|
5 | Popen(a, shell=False)
6 | call(a, shell=False)
7 | check_call(a, shell=False)
| ^^^^^^^^^^ S603
8 | check_output(a, shell=False)
9 | run(a, shell=False)
|
S603.py:8:1: S603 `subprocess` call: check for execution of untrusted input
|
6 | call(a, shell=False)
7 | check_call(a, shell=False)
8 | check_output(a, shell=False)
| ^^^^^^^^^^^^ S603
9 | run(a, shell=False)
|
S603.py:9:1: S603 `subprocess` call: check for execution of untrusted input
|
7 | check_call(a, shell=False)
8 | check_output(a, shell=False)
9 | run(a, shell=False)
| ^^^ S603
10 |
11 | # Falsey values are treated as false.
|
S603.py:12:1: S603 `subprocess` call: check for execution of untrusted input
|
11 | # Falsey values are treated as false.
12 | Popen(a, shell=0)
| ^^^^^ S603
13 | Popen(a, shell=[])
14 | Popen(a, shell={})
|
S603.py:13:1: S603 `subprocess` call: check for execution of untrusted input
|
11 | # Falsey values are treated as false.
12 | Popen(a, shell=0)
13 | Popen(a, shell=[])
| ^^^^^ S603
14 | Popen(a, shell={})
15 | Popen(a, shell=None)
|
S603.py:14:1: S603 `subprocess` call: check for execution of untrusted input
|
12 | Popen(a, shell=0)
13 | Popen(a, shell=[])
14 | Popen(a, shell={})
| ^^^^^ S603
15 | Popen(a, shell=None)
|
S603.py:15:1: S603 `subprocess` call: check for execution of untrusted input
|
13 | Popen(a, shell=[])
14 | Popen(a, shell={})
15 | Popen(a, shell=None)
| ^^^^^ S603
16 |
17 | # Unknown values are treated as falsey.
|
S603.py:18:1: S603 `subprocess` call: check for execution of untrusted input
|
17 | # Unknown values are treated as falsey.
18 | Popen(a, shell=True if True else False)
| ^^^^^ S603
19 |
20 | # No value is also caught.
|
S603.py:21:1: S603 `subprocess` call: check for execution of untrusted input
|
20 | # No value is also caught.
21 | Popen(a)
| ^^^^^ S603
22 |
23 | # Literals are fine, they're trusted.
|
S603.py:34:1: S603 `subprocess` call: check for execution of untrusted input
|
32 | # Not through assignments though.
33 | cmd = ["true"]
34 | run(cmd)
| ^^^ S603
35 |
36 | # Instant named expressions are fine.
|
S603.py:41:1: S603 `subprocess` call: check for execution of untrusted input
|
39 | # But non-instant are not.
40 | (e := "echo")
41 | run(e)
| ^^^ S603
|

View File

@@ -12,6 +12,7 @@ mod tests {
use crate::registry::Rule;
use crate::settings::LinterSettings;
use crate::settings::types::PreviewMode;
use crate::test::test_path;
use crate::{assert_messages, settings};
@@ -28,6 +29,24 @@ mod tests {
Ok(())
}
#[test_case(Rule::BooleanTypeHintPositionalArgument, Path::new("FBT.py"))]
fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!(
"preview__{}_{}",
rule_code.noqa_code(),
path.to_string_lossy()
);
let diagnostics = test_path(
Path::new("flake8_boolean_trap").join(path).as_path(),
&settings::LinterSettings {
preview: PreviewMode::Enabled,
..settings::LinterSettings::for_rule(rule_code)
},
)?;
assert_messages!(snapshot, diagnostics);
Ok(())
}
#[test]
fn extend_allowed_callable() -> Result<()> {
let diagnostics = test_path(

View File

@@ -7,12 +7,12 @@ use ruff_python_semantic::analyze::visibility;
use crate::Violation;
use crate::checkers::ast::Checker;
use crate::preview::is_bool_subtype_of_annotation_enabled;
use crate::rules::flake8_boolean_trap::helpers::is_allowed_func_def;
/// ## What it does
/// Checks for the use of boolean positional arguments in function definitions,
/// as determined by the presence of a type hint containing `bool` as an
/// evident subtype - e.g. `bool`, `bool | int`, `typing.Optional[bool]`, etc.
/// as determined by the presence of a `bool` type hint.
///
/// ## Why is this bad?
/// Calling a function with boolean positional arguments is confusing as the
@@ -30,6 +30,9 @@ use crate::rules::flake8_boolean_trap::helpers::is_allowed_func_def;
/// Dunder methods that define operators are exempt from this rule, as are
/// setters and `@override` definitions.
///
/// In [preview], this rule will also flag annotations that include boolean
/// variants, like `bool | int`.
///
/// ## Example
///
/// ```python
@@ -93,6 +96,8 @@ use crate::rules::flake8_boolean_trap::helpers::is_allowed_func_def;
/// ## References
/// - [Python documentation: Calls](https://docs.python.org/3/reference/expressions.html#calls)
/// - [_How to Avoid “The Boolean Trap”_ by Adam Johnson](https://adamj.eu/tech/2021/07/10/python-type-hints-how-to-avoid-the-boolean-trap/)
///
/// [preview]: https://docs.astral.sh/ruff/preview/
#[derive(ViolationMetadata)]
pub(crate) struct BooleanTypeHintPositionalArgument;
@@ -123,8 +128,14 @@ pub(crate) fn boolean_type_hint_positional_argument(
let Some(annotation) = parameter.annotation() else {
continue;
};
if !match_annotation_to_complex_bool(annotation, checker.semantic()) {
continue;
if is_bool_subtype_of_annotation_enabled(checker.settings) {
if !match_annotation_to_complex_bool(annotation, checker.semantic()) {
continue;
}
} else {
if !match_annotation_to_literal_bool(annotation) {
continue;
}
}
// Allow Boolean type hints in setters.
@@ -150,6 +161,17 @@ pub(crate) fn boolean_type_hint_positional_argument(
}
}
/// Returns `true` if the annotation is a boolean type hint (e.g., `bool`).
fn match_annotation_to_literal_bool(annotation: &Expr) -> bool {
match annotation {
// Ex) `True`
Expr::Name(name) => &name.id == "bool",
// Ex) `"True"`
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => value == "bool",
_ => false,
}
}
/// Returns `true` if the annotation is a boolean type hint (e.g., `bool`), or a type hint that
/// includes boolean as a variant (e.g., `bool | int`).
fn match_annotation_to_complex_bool(annotation: &Expr, semantic: &SemanticModel) -> bool {

View File

@@ -1,5 +1,6 @@
---
source: crates/ruff_linter/src/rules/flake8_boolean_trap/mod.rs
snapshot_kind: text
---
FBT.py:4:5: FBT001 Boolean-typed positional argument in function definition
|
@@ -88,17 +89,3 @@ FBT.py:90:19: FBT001 Boolean-typed positional argument in function definition
| ^^^^^ FBT001
91 | pass
|
FBT.py:100:10: FBT001 Boolean-typed positional argument in function definition
|
100 | def func(x: Union[list, Optional[int | str | float | bool]]):
| ^ FBT001
101 | pass
|
FBT.py:104:10: FBT001 Boolean-typed positional argument in function definition
|
104 | def func(x: bool | str):
| ^ FBT001
105 | pass
|

View File

@@ -0,0 +1,105 @@
---
source: crates/ruff_linter/src/rules/flake8_boolean_trap/mod.rs
snapshot_kind: text
---
FBT.py:4:5: FBT001 Boolean-typed positional argument in function definition
|
2 | posonly_nohint,
3 | posonly_nonboolhint: int,
4 | posonly_boolhint: bool,
| ^^^^^^^^^^^^^^^^ FBT001
5 | posonly_boolstrhint: "bool",
6 | /,
|
FBT.py:5:5: FBT001 Boolean-typed positional argument in function definition
|
3 | posonly_nonboolhint: int,
4 | posonly_boolhint: bool,
5 | posonly_boolstrhint: "bool",
| ^^^^^^^^^^^^^^^^^^^ FBT001
6 | /,
7 | offset,
|
FBT.py:10:5: FBT001 Boolean-typed positional argument in function definition
|
8 | posorkw_nonvalued_nohint,
9 | posorkw_nonvalued_nonboolhint: int,
10 | posorkw_nonvalued_boolhint: bool,
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ FBT001
11 | posorkw_nonvalued_boolstrhint: "bool",
12 | posorkw_boolvalued_nohint=True,
|
FBT.py:11:5: FBT001 Boolean-typed positional argument in function definition
|
9 | posorkw_nonvalued_nonboolhint: int,
10 | posorkw_nonvalued_boolhint: bool,
11 | posorkw_nonvalued_boolstrhint: "bool",
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FBT001
12 | posorkw_boolvalued_nohint=True,
13 | posorkw_boolvalued_nonboolhint: int = True,
|
FBT.py:14:5: FBT001 Boolean-typed positional argument in function definition
|
12 | posorkw_boolvalued_nohint=True,
13 | posorkw_boolvalued_nonboolhint: int = True,
14 | posorkw_boolvalued_boolhint: bool = True,
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ FBT001
15 | posorkw_boolvalued_boolstrhint: "bool" = True,
16 | posorkw_nonboolvalued_nohint=1,
|
FBT.py:15:5: FBT001 Boolean-typed positional argument in function definition
|
13 | posorkw_boolvalued_nonboolhint: int = True,
14 | posorkw_boolvalued_boolhint: bool = True,
15 | posorkw_boolvalued_boolstrhint: "bool" = True,
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FBT001
16 | posorkw_nonboolvalued_nohint=1,
17 | posorkw_nonboolvalued_nonboolhint: int = 2,
|
FBT.py:18:5: FBT001 Boolean-typed positional argument in function definition
|
16 | posorkw_nonboolvalued_nohint=1,
17 | posorkw_nonboolvalued_nonboolhint: int = 2,
18 | posorkw_nonboolvalued_boolhint: bool = 3,
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FBT001
19 | posorkw_nonboolvalued_boolstrhint: "bool" = 4,
20 | *,
|
FBT.py:19:5: FBT001 Boolean-typed positional argument in function definition
|
17 | posorkw_nonboolvalued_nonboolhint: int = 2,
18 | posorkw_nonboolvalued_boolhint: bool = 3,
19 | posorkw_nonboolvalued_boolstrhint: "bool" = 4,
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FBT001
20 | *,
21 | kwonly_nonvalued_nohint,
|
FBT.py:90:19: FBT001 Boolean-typed positional argument in function definition
|
89 | # FBT001: Boolean positional arg in function definition
90 | def foo(self, value: bool) -> None:
| ^^^^^ FBT001
91 | pass
|
FBT.py:100:10: FBT001 Boolean-typed positional argument in function definition
|
100 | def func(x: Union[list, Optional[int | str | float | bool]]):
| ^ FBT001
101 | pass
|
FBT.py:104:10: FBT001 Boolean-typed positional argument in function definition
|
104 | def func(x: bool | str):
| ^ FBT001
105 | pass
|

View File

@@ -16,7 +16,7 @@ mod tests {
#[test_case(Rule::LineContainsTodo; "T003")]
#[test_case(Rule::LineContainsXxx; "T004")]
fn rules(rule_code: Rule) -> Result<()> {
let snapshot = format!("{}_T00.py", rule_code.name());
let snapshot = format!("{}_T00.py", rule_code.as_ref());
let diagnostics = test_path(
Path::new("flake8_fixme/T00.py"),
&settings::LinterSettings::for_rule(rule_code),

View File

@@ -29,7 +29,7 @@ mod tests {
#[test_case(Rule::FormatInGetTextFuncCall, Path::new("INT002.py"))]
#[test_case(Rule::PrintfInGetTextFuncCall, Path::new("INT003.py"))]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("flake8_gettext").join(path).as_path(),
&settings::LinterSettings::for_rule(rule_code),

View File

@@ -11,6 +11,7 @@ mod tests {
use crate::registry::Rule;
use crate::rules::pep8_naming;
use crate::settings::types::PreviewMode;
use crate::test::test_path;
use crate::{assert_messages, settings};
@@ -171,4 +172,22 @@ mod tests {
assert_messages!(snapshot, diagnostics);
Ok(())
}
#[test_case(Rule::FutureAnnotationsInStub, Path::new("PYI044.pyi"))]
fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!(
"preview__{}_{}",
rule_code.noqa_code(),
path.to_string_lossy()
);
let diagnostics = test_path(
Path::new("flake8_pyi").join(path).as_path(),
&settings::LinterSettings {
preview: PreviewMode::Enabled,
..settings::LinterSettings::for_rule(rule_code)
},
)?;
assert_messages!(snapshot, diagnostics);
Ok(())
}
}

View File

@@ -3,7 +3,7 @@ use ruff_python_ast::StmtImportFrom;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use crate::{Fix, FixAvailability, Violation};
use crate::{checkers::ast::Checker, fix};
use crate::{checkers::ast::Checker, fix, preview::is_fix_future_annotations_in_stub_enabled};
/// ## What it does
/// Checks for the presence of the `from __future__ import annotations` import
@@ -55,18 +55,20 @@ pub(crate) fn from_future_import(checker: &Checker, target: &StmtImportFrom) {
let mut diagnostic = checker.report_diagnostic(FutureAnnotationsInStub, *range);
let stmt = checker.semantic().current_statement();
if is_fix_future_annotations_in_stub_enabled(checker.settings) {
let stmt = checker.semantic().current_statement();
diagnostic.try_set_fix(|| {
let edit = fix::edits::remove_unused_imports(
std::iter::once("annotations"),
stmt,
None,
checker.locator(),
checker.stylist(),
checker.indexer(),
)?;
diagnostic.try_set_fix(|| {
let edit = fix::edits::remove_unused_imports(
std::iter::once("annotations"),
stmt,
None,
checker.locator(),
checker.stylist(),
checker.indexer(),
)?;
Ok(Fix::safe_edit(edit))
});
Ok(Fix::safe_edit(edit))
});
}
}

View File

@@ -1,7 +1,7 @@
---
source: crates/ruff_linter/src/rules/flake8_pyi/mod.rs
---
PYI044.pyi:2:1: PYI044 [*] `from __future__ import annotations` has no effect in stub files, since type checkers automatically treat stubs as having those semantics
PYI044.pyi:2:1: PYI044 `from __future__ import annotations` has no effect in stub files, since type checkers automatically treat stubs as having those semantics
|
1 | # Bad import.
2 | from __future__ import annotations # PYI044.
@@ -10,14 +10,7 @@ PYI044.pyi:2:1: PYI044 [*] `from __future__ import annotations` has no effect in
|
= help: Remove `from __future__ import annotations`
Safe fix
1 1 | # Bad import.
2 |-from __future__ import annotations # PYI044.
3 2 | from __future__ import annotations, with_statement # PYI044.
4 3 |
5 4 | # Good imports.
PYI044.pyi:3:1: PYI044 [*] `from __future__ import annotations` has no effect in stub files, since type checkers automatically treat stubs as having those semantics
PYI044.pyi:3:1: PYI044 `from __future__ import annotations` has no effect in stub files, since type checkers automatically treat stubs as having those semantics
|
1 | # Bad import.
2 | from __future__ import annotations # PYI044.
@@ -27,12 +20,3 @@ PYI044.pyi:3:1: PYI044 [*] `from __future__ import annotations` has no effect in
5 | # Good imports.
|
= help: Remove `from __future__ import annotations`
Safe fix
1 1 | # Bad import.
2 2 | from __future__ import annotations # PYI044.
3 |-from __future__ import annotations, with_statement # PYI044.
3 |+from __future__ import with_statement # PYI044.
4 4 |
5 5 | # Good imports.
6 6 | from __future__ import with_statement

View File

@@ -0,0 +1,38 @@
---
source: crates/ruff_linter/src/rules/flake8_pyi/mod.rs
---
PYI044.pyi:2:1: PYI044 [*] `from __future__ import annotations` has no effect in stub files, since type checkers automatically treat stubs as having those semantics
|
1 | # Bad import.
2 | from __future__ import annotations # PYI044.
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI044
3 | from __future__ import annotations, with_statement # PYI044.
|
= help: Remove `from __future__ import annotations`
Safe fix
1 1 | # Bad import.
2 |-from __future__ import annotations # PYI044.
3 2 | from __future__ import annotations, with_statement # PYI044.
4 3 |
5 4 | # Good imports.
PYI044.pyi:3:1: PYI044 [*] `from __future__ import annotations` has no effect in stub files, since type checkers automatically treat stubs as having those semantics
|
1 | # Bad import.
2 | from __future__ import annotations # PYI044.
3 | from __future__ import annotations, with_statement # PYI044.
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI044
4 |
5 | # Good imports.
|
= help: Remove `from __future__ import annotations`
Safe fix
1 1 | # Bad import.
2 2 | from __future__ import annotations # PYI044.
3 |-from __future__ import annotations, with_statement # PYI044.
3 |+from __future__ import with_statement # PYI044.
4 4 |
5 5 | # Good imports.
6 6 | from __future__ import with_statement

View File

@@ -3,6 +3,7 @@ pub(crate) mod rules;
#[cfg(test)]
mod tests {
use std::convert::AsRef;
use std::path::Path;
use anyhow::Result;
@@ -14,7 +15,7 @@ mod tests {
#[test_case(Rule::UnnecessaryParenOnRaiseException, Path::new("RSE102.py"))]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("flake8_raise").join(path).as_path(),
&settings::LinterSettings::for_rule(rule_code),

View File

@@ -4,6 +4,7 @@ pub mod settings;
#[cfg(test)]
mod tests {
use std::convert::AsRef;
use std::path::Path;
use crate::registry::Rule;
@@ -17,7 +18,7 @@ mod tests {
#[test_case(Rule::PrivateMemberAccess, Path::new("SLF001.py"))]
#[test_case(Rule::PrivateMemberAccess, Path::new("SLF001_1.py"))]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("flake8_self").join(path).as_path(),
&settings::LinterSettings::for_rule(rule_code),

View File

@@ -58,6 +58,7 @@ mod tests {
Ok(())
}
#[test_case(Rule::IfElseBlockInsteadOfIfExp, Path::new("SIM108.py"))]
#[test_case(Rule::MultipleWithStatements, Path::new("SIM117.py"))]
fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!(

View File

@@ -10,7 +10,7 @@ use super::fix_with;
use crate::Fix;
use crate::checkers::ast::Checker;
use crate::fix::edits::fits;
use crate::preview::is_multiple_with_statements_fix_safe_enabled;
use crate::preview::multiple_with_statements_fix_safe_enabled;
use crate::{FixAvailability, Violation};
/// ## What it does
@@ -195,7 +195,7 @@ pub(crate) fn multiple_with_statements(
checker.settings.tab_size,
)
}) {
if is_multiple_with_statements_fix_safe_enabled(checker.settings) {
if multiple_with_statements_fix_safe_enabled(checker.settings) {
Ok(Some(Fix::safe_edit(edit)))
} else {
Ok(Some(Fix::unsafe_edit(edit)))

View File

@@ -7,14 +7,13 @@ use ruff_text_size::{Ranged, TextRange};
use crate::checkers::ast::Checker;
use crate::fix::edits::fits;
use crate::preview::is_simplify_ternary_to_binary_enabled;
use crate::{Edit, Fix, FixAvailability, Violation};
/// ## What it does
/// Check for `if`-`else`-blocks that can be replaced with a ternary
/// or binary operator.
///
/// The lint is suppressed if the suggested replacement would exceed
/// the maximum line length configured in [pycodestyle.max-line-length].
/// Check for `if`-`else`-blocks that can be replaced with a ternary operator.
/// Moreover, in [preview], check if these ternary expressions can be
/// further simplified to binary expressions.
///
/// ## Why is this bad?
/// `if`-`else`-blocks that assign a value to a variable in both branches can
@@ -34,7 +33,7 @@ use crate::{Edit, Fix, FixAvailability, Violation};
/// bar = x if foo else y
/// ```
///
/// Or:
/// Or, in [preview]:
///
/// ```python
/// if cond:
@@ -58,8 +57,8 @@ use crate::{Edit, Fix, FixAvailability, Violation};
/// ## References
/// - [Python documentation: Conditional expressions](https://docs.python.org/3/reference/expressions.html#conditional-expressions)
///
/// [preview]: https://docs.astral.sh/ruff/preview/
/// [code coverage]: https://github.com/nedbat/coveragepy/issues/509
/// [pycodestyle.max-line-length]: https://docs.astral.sh/ruff/settings/#lint_pycodestyle_max-line-length
#[derive(ViolationMetadata)]
pub(crate) struct IfElseBlockInsteadOfIfExp {
/// The ternary or binary expression to replace the `if`-`else`-block.
@@ -184,12 +183,16 @@ pub(crate) fn if_else_block_instead_of_if_exp(checker: &Checker, stmt_if: &ast::
//
// The match statement below implements the following
// logic:
// - If `test == body_value`, replace with `target_var = test or else_value`
// - If `test == not body_value`, replace with `target_var = body_value and else_value`
// - If `not test == body_value`, replace with `target_var = body_value and else_value`
// - If `test == body_value` and preview enabled, replace with `target_var = test or else_value`
// - If `test == not body_value` and preview enabled, replace with `target_var = body_value and else_value`
// - If `not test == body_value` and preview enabled, replace with `target_var = body_value and else_value`
// - Otherwise, replace with `target_var = body_value if test else else_value`
let (contents, assignment_kind) = match (test, body_value) {
(test_node, body_node)
let (contents, assignment_kind) = match (
is_simplify_ternary_to_binary_enabled(checker.settings),
test,
body_value,
) {
(true, test_node, body_node)
if ComparableExpr::from(test_node) == ComparableExpr::from(body_node)
&& !contains_effect(test_node, |id| checker.semantic().has_builtin_binding(id)) =>
{
@@ -197,7 +200,7 @@ pub(crate) fn if_else_block_instead_of_if_exp(checker: &Checker, stmt_if: &ast::
let binary = assignment_binary_or(target_var, body_value, else_value);
(checker.generator().stmt(&binary), AssignmentKind::Binary)
}
(test_node, body_node)
(true, test_node, body_node)
if (test_node.as_unary_op_expr().is_some_and(|op_expr| {
op_expr.op.is_not()
&& ComparableExpr::from(&op_expr.operand) == ComparableExpr::from(body_node)

View File

@@ -118,7 +118,7 @@ SIM108.py:117:1: SIM108 Use ternary operator `x = 3 if True else 5` instead of `
|
= help: Replace `if`-`else`-block with `x = 3 if True else 5`
SIM108.py:141:1: SIM108 [*] Use binary operator `z = cond or other_cond` instead of `if`-`else`-block
SIM108.py:141:1: SIM108 [*] Use ternary operator `z = cond if cond else other_cond` instead of `if`-`else`-block
|
139 | # SIM108 - should suggest
140 | # z = cond or other_cond
@@ -130,7 +130,7 @@ SIM108.py:141:1: SIM108 [*] Use binary operator `z = cond or other_cond` instead
145 |
146 | # SIM108 - should suggest
|
= help: Replace `if`-`else`-block with `z = cond or other_cond`
= help: Replace `if`-`else`-block with `z = cond if cond else other_cond`
Unsafe fix
138 138 |
@@ -140,12 +140,12 @@ SIM108.py:141:1: SIM108 [*] Use binary operator `z = cond or other_cond` instead
142 |- z = cond
143 |-else:
144 |- z = other_cond
141 |+z = cond or other_cond
141 |+z = cond if cond else other_cond
145 142 |
146 143 | # SIM108 - should suggest
147 144 | # z = cond and other_cond
SIM108.py:148:1: SIM108 [*] Use binary operator `z = cond and other_cond` instead of `if`-`else`-block
SIM108.py:148:1: SIM108 [*] Use ternary operator `z = cond if not cond else other_cond` instead of `if`-`else`-block
|
146 | # SIM108 - should suggest
147 | # z = cond and other_cond
@@ -157,7 +157,7 @@ SIM108.py:148:1: SIM108 [*] Use binary operator `z = cond and other_cond` instea
152 |
153 | # SIM108 - should suggest
|
= help: Replace `if`-`else`-block with `z = cond and other_cond`
= help: Replace `if`-`else`-block with `z = cond if not cond else other_cond`
Unsafe fix
145 145 |
@@ -167,12 +167,12 @@ SIM108.py:148:1: SIM108 [*] Use binary operator `z = cond and other_cond` instea
149 |- z = cond
150 |-else:
151 |- z = other_cond
148 |+z = cond and other_cond
148 |+z = cond if not cond else other_cond
152 149 |
153 150 | # SIM108 - should suggest
154 151 | # z = not cond and other_cond
SIM108.py:155:1: SIM108 [*] Use binary operator `z = not cond and other_cond` instead of `if`-`else`-block
SIM108.py:155:1: SIM108 [*] Use ternary operator `z = not cond if cond else other_cond` instead of `if`-`else`-block
|
153 | # SIM108 - should suggest
154 | # z = not cond and other_cond
@@ -184,7 +184,7 @@ SIM108.py:155:1: SIM108 [*] Use binary operator `z = not cond and other_cond` in
159 |
160 | # SIM108 does not suggest
|
= help: Replace `if`-`else`-block with `z = not cond and other_cond`
= help: Replace `if`-`else`-block with `z = not cond if cond else other_cond`
Unsafe fix
152 152 |
@@ -194,7 +194,7 @@ SIM108.py:155:1: SIM108 [*] Use binary operator `z = not cond and other_cond` in
156 |- z = not cond
157 |-else:
158 |- z = other_cond
155 |+z = not cond and other_cond
155 |+z = not cond if cond else other_cond
159 156 |
160 157 | # SIM108 does not suggest
161 158 | # a binary option in these cases,

View File

@@ -0,0 +1,382 @@
---
source: crates/ruff_linter/src/rules/flake8_simplify/mod.rs
---
SIM108.py:2:1: SIM108 [*] Use ternary operator `b = c if a else d` instead of `if`-`else`-block
|
1 | # SIM108
2 | / if a:
3 | | b = c
4 | | else:
5 | | b = d
| |_________^ SIM108
6 |
7 | # OK
|
= help: Replace `if`-`else`-block with `b = c if a else d`
Unsafe fix
1 1 | # SIM108
2 |-if a:
3 |- b = c
4 |-else:
5 |- b = d
2 |+b = c if a else d
6 3 |
7 4 | # OK
8 5 | b = c if a else d
SIM108.py:30:5: SIM108 [*] Use ternary operator `b = 1 if a else 2` instead of `if`-`else`-block
|
28 | pass
29 | else:
30 | / if a:
31 | | b = 1
32 | | else:
33 | | b = 2
| |_____________^ SIM108
|
= help: Replace `if`-`else`-block with `b = 1 if a else 2`
Unsafe fix
27 27 | if True:
28 28 | pass
29 29 | else:
30 |- if a:
31 |- b = 1
32 |- else:
33 |- b = 2
30 |+ b = 1 if a else 2
34 31 |
35 32 |
36 33 | import sys
SIM108.py:58:1: SIM108 Use ternary operator `abc = x if x > 0 else -x` instead of `if`-`else`-block
|
57 | # SIM108 (without fix due to comments)
58 | / if x > 0:
59 | | # test test
60 | | abc = x
61 | | else:
62 | | # test test test
63 | | abc = -x
| |____________^ SIM108
|
= help: Replace `if`-`else`-block with `abc = x if x > 0 else -x`
SIM108.py:82:1: SIM108 [*] Use ternary operator `b = "cccccccccccccccccccccccccccccccccß" if a else "ddddddddddddddddddddddddddddddddd💣"` instead of `if`-`else`-block
|
81 | # SIM108
82 | / if a:
83 | | b = "cccccccccccccccccccccccccccccccccß"
84 | | else:
85 | | b = "ddddddddddddddddddddddddddddddddd💣"
| |_____________________________________________^ SIM108
|
= help: Replace `if`-`else`-block with `b = "cccccccccccccccccccccccccccccccccß" if a else "ddddddddddddddddddddddddddddddddd💣"`
Unsafe fix
79 79 |
80 80 |
81 81 | # SIM108
82 |-if a:
83 |- b = "cccccccccccccccccccccccccccccccccß"
84 |-else:
85 |- b = "ddddddddddddddddddddddddddddddddd💣"
82 |+b = "cccccccccccccccccccccccccccccccccß" if a else "ddddddddddddddddddddddddddddddddd💣"
86 83 |
87 84 |
88 85 | # OK (too long)
SIM108.py:105:1: SIM108 Use ternary operator `exitcode = 0 if True else 1` instead of `if`-`else`-block
|
104 | # SIM108 (without fix due to trailing comment)
105 | / if True:
106 | | exitcode = 0
107 | | else:
108 | | exitcode = 1 # Trailing comment
| |________________^ SIM108
|
= help: Replace `if`-`else`-block with `exitcode = 0 if True else 1`
SIM108.py:112:1: SIM108 Use ternary operator `x = 3 if True else 5` instead of `if`-`else`-block
|
111 | # SIM108
112 | / if True: x = 3 # Foo
113 | | else: x = 5
| |___________^ SIM108
|
= help: Replace `if`-`else`-block with `x = 3 if True else 5`
SIM108.py:117:1: SIM108 Use ternary operator `x = 3 if True else 5` instead of `if`-`else`-block
|
116 | # SIM108
117 | / if True: # Foo
118 | | x = 3
119 | | else:
120 | | x = 5
| |_________^ SIM108
|
= help: Replace `if`-`else`-block with `x = 3 if True else 5`
SIM108.py:141:1: SIM108 [*] Use binary operator `z = cond or other_cond` instead of `if`-`else`-block
|
139 | # SIM108 - should suggest
140 | # z = cond or other_cond
141 | / if cond:
142 | | z = cond
143 | | else:
144 | | z = other_cond
| |__________________^ SIM108
145 |
146 | # SIM108 - should suggest
|
= help: Replace `if`-`else`-block with `z = cond or other_cond`
Unsafe fix
138 138 |
139 139 | # SIM108 - should suggest
140 140 | # z = cond or other_cond
141 |-if cond:
142 |- z = cond
143 |-else:
144 |- z = other_cond
141 |+z = cond or other_cond
145 142 |
146 143 | # SIM108 - should suggest
147 144 | # z = cond and other_cond
SIM108.py:148:1: SIM108 [*] Use binary operator `z = cond and other_cond` instead of `if`-`else`-block
|
146 | # SIM108 - should suggest
147 | # z = cond and other_cond
148 | / if not cond:
149 | | z = cond
150 | | else:
151 | | z = other_cond
| |__________________^ SIM108
152 |
153 | # SIM108 - should suggest
|
= help: Replace `if`-`else`-block with `z = cond and other_cond`
Unsafe fix
145 145 |
146 146 | # SIM108 - should suggest
147 147 | # z = cond and other_cond
148 |-if not cond:
149 |- z = cond
150 |-else:
151 |- z = other_cond
148 |+z = cond and other_cond
152 149 |
153 150 | # SIM108 - should suggest
154 151 | # z = not cond and other_cond
SIM108.py:155:1: SIM108 [*] Use binary operator `z = not cond and other_cond` instead of `if`-`else`-block
|
153 | # SIM108 - should suggest
154 | # z = not cond and other_cond
155 | / if cond:
156 | | z = not cond
157 | | else:
158 | | z = other_cond
| |__________________^ SIM108
159 |
160 | # SIM108 does not suggest
|
= help: Replace `if`-`else`-block with `z = not cond and other_cond`
Unsafe fix
152 152 |
153 153 | # SIM108 - should suggest
154 154 | # z = not cond and other_cond
155 |-if cond:
156 |- z = not cond
157 |-else:
158 |- z = other_cond
155 |+z = not cond and other_cond
159 156 |
160 157 | # SIM108 does not suggest
161 158 | # a binary option in these cases,
SIM108.py:167:1: SIM108 [*] Use ternary operator `z = 1 if True else other` instead of `if`-`else`-block
|
165 | # (Of course, these specific expressions
166 | # should be simplified for other reasons...)
167 | / if True:
168 | | z = 1
169 | | else:
170 | | z = other
| |_____________^ SIM108
171 |
172 | if False:
|
= help: Replace `if`-`else`-block with `z = 1 if True else other`
Unsafe fix
164 164 | # so, e.g. `True == 1`.
165 165 | # (Of course, these specific expressions
166 166 | # should be simplified for other reasons...)
167 |-if True:
168 |- z = 1
169 |-else:
170 |- z = other
167 |+z = 1 if True else other
171 168 |
172 169 | if False:
173 170 | z = 1
SIM108.py:172:1: SIM108 [*] Use ternary operator `z = 1 if False else other` instead of `if`-`else`-block
|
170 | z = other
171 |
172 | / if False:
173 | | z = 1
174 | | else:
175 | | z = other
| |_____________^ SIM108
176 |
177 | if 1:
|
= help: Replace `if`-`else`-block with `z = 1 if False else other`
Unsafe fix
169 169 | else:
170 170 | z = other
171 171 |
172 |-if False:
173 |- z = 1
174 |-else:
175 |- z = other
172 |+z = 1 if False else other
176 173 |
177 174 | if 1:
178 175 | z = True
SIM108.py:177:1: SIM108 [*] Use ternary operator `z = True if 1 else other` instead of `if`-`else`-block
|
175 | z = other
176 |
177 | / if 1:
178 | | z = True
179 | | else:
180 | | z = other
| |_____________^ SIM108
181 |
182 | # SIM108 does not suggest a binary option in this
|
= help: Replace `if`-`else`-block with `z = True if 1 else other`
Unsafe fix
174 174 | else:
175 175 | z = other
176 176 |
177 |-if 1:
178 |- z = True
179 |-else:
180 |- z = other
177 |+z = True if 1 else other
181 178 |
182 179 | # SIM108 does not suggest a binary option in this
183 180 | # case, since we'd be reducing the number of calls
SIM108.py:185:1: SIM108 [*] Use ternary operator `z = foo() if foo() else other` instead of `if`-`else`-block
|
183 | # case, since we'd be reducing the number of calls
184 | # from Two to one.
185 | / if foo():
186 | | z = foo()
187 | | else:
188 | | z = other
| |_____________^ SIM108
189 |
190 | # SIM108 does not suggest a binary option in this
|
= help: Replace `if`-`else`-block with `z = foo() if foo() else other`
Unsafe fix
182 182 | # SIM108 does not suggest a binary option in this
183 183 | # case, since we'd be reducing the number of calls
184 184 | # from Two to one.
185 |-if foo():
186 |- z = foo()
187 |-else:
188 |- z = other
185 |+z = foo() if foo() else other
189 186 |
190 187 | # SIM108 does not suggest a binary option in this
191 188 | # case, since we'd be reducing the number of calls
SIM108.py:193:1: SIM108 [*] Use ternary operator `z = not foo() if foo() else other` instead of `if`-`else`-block
|
191 | # case, since we'd be reducing the number of calls
192 | # from Two to one.
193 | / if foo():
194 | | z = not foo()
195 | | else:
196 | | z = other
| |_____________^ SIM108
|
= help: Replace `if`-`else`-block with `z = not foo() if foo() else other`
Unsafe fix
190 190 | # SIM108 does not suggest a binary option in this
191 191 | # case, since we'd be reducing the number of calls
192 192 | # from Two to one.
193 |-if foo():
194 |- z = not foo()
195 |-else:
196 |- z = other
193 |+z = not foo() if foo() else other
197 194 |
198 195 |
199 196 | # These two cases double as tests for f-string quote preservation. The first
SIM108.py:202:1: SIM108 [*] Use ternary operator `var = "str" if cond else f"{first}-{second}"` instead of `if`-`else`-block
|
200 | # f-string should preserve its double quotes, and the second should preserve
201 | # single quotes
202 | / if cond:
203 | | var = "str"
204 | | else:
205 | | var = f"{first}-{second}"
| |_____________________________^ SIM108
206 |
207 | if cond:
|
= help: Replace `if`-`else`-block with `var = "str" if cond else f"{first}-{second}"`
Unsafe fix
199 199 | # These two cases double as tests for f-string quote preservation. The first
200 200 | # f-string should preserve its double quotes, and the second should preserve
201 201 | # single quotes
202 |-if cond:
203 |- var = "str"
204 |-else:
205 |- var = f"{first}-{second}"
202 |+var = "str" if cond else f"{first}-{second}"
206 203 |
207 204 | if cond:
208 205 | var = "str"
SIM108.py:207:1: SIM108 [*] Use ternary operator `var = "str" if cond else f'{first}-{second}'` instead of `if`-`else`-block
|
205 | var = f"{first}-{second}"
206 |
207 | / if cond:
208 | | var = "str"
209 | | else:
210 | | var = f'{first}-{second}'
| |_____________________________^ SIM108
|
= help: Replace `if`-`else`-block with `var = "str" if cond else f'{first}-{second}'`
Unsafe fix
204 204 | else:
205 205 | var = f"{first}-{second}"
206 206 |
207 |-if cond:
208 |- var = "str"
209 |-else:
210 |- var = f'{first}-{second}'
207 |+var = "str" if cond else f'{first}-{second}'

View File

@@ -2,6 +2,7 @@ pub(crate) mod rules;
#[cfg(test)]
mod tests {
use std::convert::AsRef;
use std::path::Path;
use anyhow::Result;
@@ -19,7 +20,7 @@ mod tests {
#[test_case(Rule::InvalidTodoCapitalization, Path::new("TD006.py"))]
#[test_case(Rule::MissingSpaceAfterTodoColon, Path::new("TD007.py"))]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("flake8_todos").join(path).as_path(),
&settings::LinterSettings::for_rule(rule_code),

View File

@@ -6,6 +6,7 @@ pub mod settings;
#[cfg(test)]
mod tests {
use std::convert::AsRef;
use std::path::Path;
use anyhow::Result;
@@ -54,7 +55,7 @@ mod tests {
#[test_case(Rule::TypingOnlyThirdPartyImport, Path::new("typing_modules_1.py"))]
#[test_case(Rule::TypingOnlyThirdPartyImport, Path::new("typing_modules_2.py"))]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("flake8_type_checking").join(path).as_path(),
&settings::LinterSettings::for_rule(rule_code),
@@ -69,7 +70,7 @@ mod tests {
#[test_case(Rule::QuotedTypeAlias, Path::new("TC008.py"))]
#[test_case(Rule::QuotedTypeAlias, Path::new("TC008_typing_execution_context.py"))]
fn type_alias_rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("flake8_type_checking").join(path).as_path(),
&settings::LinterSettings::for_rules(vec![
@@ -83,7 +84,11 @@ mod tests {
#[test_case(Rule::QuotedTypeAlias, Path::new("TC008_union_syntax_pre_py310.py"))]
fn type_alias_rules_pre_py310(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("pre_py310_{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!(
"pre_py310_{}_{}",
rule_code.as_ref(),
path.to_string_lossy()
);
let diagnostics = test_path(
Path::new("flake8_type_checking").join(path).as_path(),
&settings::LinterSettings {
@@ -102,7 +107,7 @@ mod tests {
#[test_case(Rule::RuntimeImportInTypeCheckingBlock, Path::new("quote3.py"))]
#[test_case(Rule::TypingOnlyThirdPartyImport, Path::new("quote3.py"))]
fn quote(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("quote_{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!("quote_{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("flake8_type_checking").join(path).as_path(),
&settings::LinterSettings {
@@ -121,7 +126,7 @@ mod tests {
#[test_case(Rule::TypingOnlyStandardLibraryImport, Path::new("init_var.py"))]
#[test_case(Rule::TypingOnlyStandardLibraryImport, Path::new("kw_only.py"))]
fn strict(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("strict_{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!("strict_{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("flake8_type_checking").join(path).as_path(),
&settings::LinterSettings {
@@ -165,7 +170,7 @@ mod tests {
Path::new("exempt_type_checking_3.py")
)]
fn exempt_type_checking(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("flake8_type_checking").join(path).as_path(),
&settings::LinterSettings {
@@ -202,7 +207,7 @@ mod tests {
Path::new("runtime_evaluated_base_classes_5.py")
)]
fn runtime_evaluated_base_classes(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("flake8_type_checking").join(path).as_path(),
&settings::LinterSettings {
@@ -233,7 +238,7 @@ mod tests {
Path::new("runtime_evaluated_decorators_3.py")
)]
fn runtime_evaluated_decorators(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("flake8_type_checking").join(path).as_path(),
&settings::LinterSettings {
@@ -259,7 +264,7 @@ mod tests {
Path::new("module/undefined.py")
)]
fn base_class_same_file(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("flake8_type_checking").join(path).as_path(),
&settings::LinterSettings {
@@ -277,7 +282,7 @@ mod tests {
#[test_case(Rule::RuntimeImportInTypeCheckingBlock, Path::new("module/app.py"))]
#[test_case(Rule::TypingOnlyStandardLibraryImport, Path::new("module/routes.py"))]
fn decorator_same_file(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("flake8_type_checking").join(path).as_path(),
&settings::LinterSettings {

View File

@@ -4,6 +4,7 @@ pub(crate) mod rules;
#[cfg(test)]
mod tests {
use std::convert::AsRef;
use std::path::Path;
use anyhow::Result;
@@ -21,7 +22,7 @@ mod tests {
#[test_case(Rule::Numpy2Deprecation, Path::new("NPY201_2.py"))]
#[test_case(Rule::Numpy2Deprecation, Path::new("NPY201_3.py"))]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("numpy").join(path).as_path(),
&settings::LinterSettings::for_rule(rule_code),

View File

@@ -346,11 +346,10 @@ fn convert_to_dict_comprehension(
// since if the assignment expression appears
// internally (e.g. as an operand in a boolean
// operation) then it will already be parenthesized.
match test {
Expr::Named(_) | Expr::If(_) | Expr::Lambda(_) => {
format!(" if ({})", locator.slice(test.range()))
}
_ => format!(" if {}", locator.slice(test.range())),
if test.is_named_expr() {
format!(" if ({})", locator.slice(test.range()))
} else {
format!(" if {}", locator.slice(test.range()))
}
}
None => String::new(),

View File

@@ -358,7 +358,7 @@ fn convert_to_list_extend(
fix_type: ComprehensionType,
binding: &Binding,
for_stmt: &ast::StmtFor,
if_test: Option<&Expr>,
if_test: Option<&ast::Expr>,
to_append: &Expr,
checker: &Checker,
) -> Result<Fix> {
@@ -374,11 +374,10 @@ fn convert_to_list_extend(
// since if the assignment expression appears
// internally (e.g. as an operand in a boolean
// operation) then it will already be parenthesized.
match test {
Expr::Named(_) | Expr::If(_) | Expr::Lambda(_) => {
format!(" if ({})", locator.slice(test.range()))
}
_ => format!(" if {}", locator.slice(test.range())),
if test.is_named_expr() {
format!(" if ({})", locator.slice(test.range()))
} else {
format!(" if {}", locator.slice(test.range()))
}
}
None => String::new(),

View File

@@ -219,27 +219,5 @@ PERF401.py:268:9: PERF401 Use a list comprehension to create a transformed list
267 | for i in values:
268 | result.append(i + 1) # PERF401
| ^^^^^^^^^^^^^^^^^^^^ PERF401
269 |
270 | def f():
|
= help: Replace for loop with list comprehension
PERF401.py:276:13: PERF401 Use a list comprehension to create a transformed list
|
274 | for i in src:
275 | if True if True else False:
276 | dst.append(i)
| ^^^^^^^^^^^^^ PERF401
277 |
278 | for i in src:
|
= help: Replace for loop with list comprehension
PERF401.py:280:13: PERF401 Use `list.extend` to create a transformed list
|
278 | for i in src:
279 | if lambda: 0:
280 | dst.append(i)
| ^^^^^^^^^^^^^ PERF401
|
= help: Replace for loop with list.extend

View File

@@ -128,23 +128,3 @@ PERF403.py:153:9: PERF403 Use a dictionary comprehension instead of a for-loop
| ^^^^^^^^^^^^^^^^^^ PERF403
|
= help: Replace for loop with dict comprehension
PERF403.py:162:13: PERF403 Use a dictionary comprehension instead of a for-loop
|
160 | for k, v in src:
161 | if True if True else False:
162 | dst[k] = v
| ^^^^^^^^^^ PERF403
163 |
164 | for k, v in src:
|
= help: Replace for loop with dict comprehension
PERF403.py:166:13: PERF403 Use a dictionary comprehension instead of a for-loop
|
164 | for k, v in src:
165 | if lambda: 0:
166 | dst[k] = v
| ^^^^^^^^^^ PERF403
|
= help: Replace for loop with dict comprehension

View File

@@ -517,8 +517,6 @@ PERF401.py:268:9: PERF401 [*] Use a list comprehension to create a transformed l
267 | for i in values:
268 | result.append(i + 1) # PERF401
| ^^^^^^^^^^^^^^^^^^^^ PERF401
269 |
270 | def f():
|
= help: Replace for loop with list comprehension
@@ -531,49 +529,3 @@ PERF401.py:268:9: PERF401 [*] Use a list comprehension to create a transformed l
268 |- result.append(i + 1) # PERF401
266 |+ # this should be replaced with a comprehension
267 |+ result = [i + 1 for i in values] # PERF401
269 268 |
270 269 | def f():
271 270 | src = [1]
PERF401.py:276:13: PERF401 [*] Use a list comprehension to create a transformed list
|
274 | for i in src:
275 | if True if True else False:
276 | dst.append(i)
| ^^^^^^^^^^^^^ PERF401
277 |
278 | for i in src:
|
= help: Replace for loop with list comprehension
Unsafe fix
269 269 |
270 270 | def f():
271 271 | src = [1]
272 |- dst = []
273 272 |
274 |- for i in src:
275 |- if True if True else False:
276 |- dst.append(i)
273 |+ dst = [i for i in src if (True if True else False)]
277 274 |
278 275 | for i in src:
279 276 | if lambda: 0:
PERF401.py:280:13: PERF401 [*] Use `list.extend` to create a transformed list
|
278 | for i in src:
279 | if lambda: 0:
280 | dst.append(i)
| ^^^^^^^^^^^^^ PERF401
|
= help: Replace for loop with list.extend
Unsafe fix
275 275 | if True if True else False:
276 276 | dst.append(i)
277 277 |
278 |- for i in src:
279 |- if lambda: 0:
280 |- dst.append(i)
278 |+ dst.extend(i for i in src if (lambda: 0))

View File

@@ -305,49 +305,3 @@ PERF403.py:153:9: PERF403 [*] Use a dictionary comprehension instead of a for-lo
152 |- for idx, name in indices, fruit:
153 |- result[name] = idx # PERF403
151 |+ result = {name: idx for idx, name in (indices, fruit)} # PERF403
154 152 |
155 153 |
156 154 | def foo():
PERF403.py:162:13: PERF403 [*] Use a dictionary comprehension instead of a for-loop
|
160 | for k, v in src:
161 | if True if True else False:
162 | dst[k] = v
| ^^^^^^^^^^ PERF403
163 |
164 | for k, v in src:
|
= help: Replace for loop with dict comprehension
Unsafe fix
155 155 |
156 156 | def foo():
157 157 | src = (("x", 1),)
158 |- dst = {}
159 158 |
160 |- for k, v in src:
161 |- if True if True else False:
162 |- dst[k] = v
159 |+ dst = {k: v for k, v in src if (True if True else False)}
163 160 |
164 161 | for k, v in src:
165 162 | if lambda: 0:
PERF403.py:166:13: PERF403 [*] Use `dict.update` instead of a for-loop
|
164 | for k, v in src:
165 | if lambda: 0:
166 | dst[k] = v
| ^^^^^^^^^^ PERF403
|
= help: Replace for loop with `dict.update`
Unsafe fix
161 161 | if True if True else False:
162 162 | dst[k] = v
163 163 |
164 |- for k, v in src:
165 |- if lambda: 0:
166 |- dst[k] = v
164 |+ dst.update({k: v for k, v in src if (lambda: 0)})

View File

@@ -4,6 +4,7 @@ pub mod settings;
#[cfg(test)]
mod tests {
use std::convert::AsRef;
use std::path::Path;
use anyhow::Result;
@@ -19,7 +20,7 @@ mod tests {
#[test_case(Rule::DocstringMissingException, Path::new("DOC501.py"))]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("pydoclint").join(path).as_path(),
&settings::LinterSettings::for_rule(rule_code),
@@ -35,7 +36,7 @@ mod tests {
#[test_case(Rule::DocstringMissingException, Path::new("DOC501_google.py"))]
#[test_case(Rule::DocstringExtraneousException, Path::new("DOC502_google.py"))]
fn rules_google_style(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("pydoclint").join(path).as_path(),
&settings::LinterSettings {
@@ -57,7 +58,7 @@ mod tests {
#[test_case(Rule::DocstringMissingException, Path::new("DOC501_numpy.py"))]
#[test_case(Rule::DocstringExtraneousException, Path::new("DOC502_numpy.py"))]
fn rules_numpy_style(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("pydoclint").join(path).as_path(),
&settings::LinterSettings {
@@ -78,7 +79,7 @@ mod tests {
fn rules_google_style_ignore_one_line(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!(
"{}_{}_ignore_one_line",
rule_code.name(),
rule_code.as_ref(),
path.to_string_lossy()
);
let diagnostics = test_path(

View File

@@ -776,10 +776,8 @@ mod tests {
messages.sort_by_key(Ranged::start);
let actual = messages
.iter()
.filter(|msg| !msg.is_syntax_error())
.map(Message::name)
.filter_map(Message::to_rule)
.collect::<Vec<_>>();
let expected: Vec<_> = expected.iter().map(|rule| rule.name().as_str()).collect();
assert_eq!(actual, expected);
}

View File

@@ -271,7 +271,7 @@ const TYPING_TO_RE_39: &[&str] = &["Match", "Pattern"];
const TYPING_RE_TO_RE_39: &[&str] = &["Match", "Pattern"];
// Members of `typing_extensions` that were moved to `typing`.
const TYPING_EXTENSIONS_TO_TYPING_39: &[&str] = &["Annotated"];
const TYPING_EXTENSIONS_TO_TYPING_39: &[&str] = &["Annotated", "get_type_hints"];
// Members of `typing` that were moved _and_ renamed (and thus cannot be
// automatically fixed).
@@ -373,9 +373,6 @@ const TYPING_EXTENSIONS_TO_TYPING_313: &[&str] = &[
"NoDefault",
"ReadOnly",
"TypeIs",
// Introduced in Python 3.5,
// but typing_extensions backports features from py313:
"get_type_hints",
// Introduced in Python 3.6,
// but typing_extensions backports features from py313:
"ContextManager",

View File

@@ -1,7 +1,6 @@
use crate::checkers::ast::Checker;
use crate::fix::edits::{Parentheses, remove_argument};
use crate::{Fix, FixAvailability, Violation};
use ruff_diagnostics::Applicability;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::StmtClassDef;
use ruff_text_size::Ranged;
@@ -64,21 +63,13 @@ pub(crate) fn useless_class_metaclass_type(checker: &Checker, class_def: &StmtCl
);
diagnostic.try_set_fix(|| {
let edit = remove_argument(
remove_argument(
keyword,
arguments,
Parentheses::Remove,
checker.locator().contents(),
)?;
let range = edit.range();
let applicability = if checker.comment_ranges().intersects(range) {
Applicability::Unsafe
} else {
Applicability::Safe
};
Ok(Fix::applicable_edit(edit, applicability))
)
.map(Fix::safe_edit)
});
}
}

View File

@@ -1,4 +1,3 @@
use ruff_diagnostics::Applicability;
use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast as ast;
use ruff_text_size::Ranged;
@@ -62,23 +61,14 @@ pub(crate) fn useless_object_inheritance(checker: &Checker, class_def: &ast::Stm
},
base.range(),
);
diagnostic.try_set_fix(|| {
let edit = remove_argument(
remove_argument(
base,
arguments,
Parentheses::Remove,
checker.locator().contents(),
)?;
let range = edit.range();
let applicability = if checker.comment_ranges().intersects(range) {
Applicability::Unsafe
} else {
Applicability::Safe
};
Ok(Fix::applicable_edit(edit, applicability))
)
.map(Fix::safe_edit)
});
}
}

View File

@@ -51,7 +51,7 @@ UP004.py:16:5: UP004 [*] Class `A` inherits from `object`
|
= help: Remove `object` inheritance
Unsafe fix
Safe fix
12 12 | ...
13 13 |
14 14 |
@@ -75,7 +75,7 @@ UP004.py:24:5: UP004 [*] Class `A` inherits from `object`
|
= help: Remove `object` inheritance
Unsafe fix
Safe fix
19 19 | ...
20 20 |
21 21 |
@@ -99,7 +99,7 @@ UP004.py:31:5: UP004 [*] Class `A` inherits from `object`
|
= help: Remove `object` inheritance
Unsafe fix
Safe fix
26 26 | ...
27 27 |
28 28 |
@@ -122,7 +122,7 @@ UP004.py:37:5: UP004 [*] Class `A` inherits from `object`
|
= help: Remove `object` inheritance
Unsafe fix
Safe fix
33 33 | ...
34 34 |
35 35 |
@@ -146,7 +146,7 @@ UP004.py:45:5: UP004 [*] Class `A` inherits from `object`
|
= help: Remove `object` inheritance
Unsafe fix
Safe fix
40 40 | ...
41 41 |
42 42 |
@@ -171,7 +171,7 @@ UP004.py:53:5: UP004 [*] Class `A` inherits from `object`
|
= help: Remove `object` inheritance
Unsafe fix
Safe fix
48 48 | ...
49 49 |
50 50 |
@@ -196,7 +196,7 @@ UP004.py:61:5: UP004 [*] Class `A` inherits from `object`
|
= help: Remove `object` inheritance
Unsafe fix
Safe fix
56 56 | ...
57 57 |
58 58 |
@@ -221,7 +221,7 @@ UP004.py:69:5: UP004 [*] Class `A` inherits from `object`
|
= help: Remove `object` inheritance
Unsafe fix
Safe fix
64 64 | ...
65 65 |
66 66 |
@@ -320,7 +320,7 @@ UP004.py:98:5: UP004 [*] Class `B` inherits from `object`
|
= help: Remove `object` inheritance
Unsafe fix
Safe fix
95 95 |
96 96 |
97 97 | class B(
@@ -381,7 +381,7 @@ UP004.py:125:5: UP004 [*] Class `A` inherits from `object`
|
= help: Remove `object` inheritance
Unsafe fix
Safe fix
121 121 | ...
122 122 |
123 123 |
@@ -403,7 +403,7 @@ UP004.py:131:5: UP004 [*] Class `A` inherits from `object`
|
= help: Remove `object` inheritance
Unsafe fix
Safe fix
127 127 | ...
128 128 |
129 129 |

View File

@@ -1179,8 +1179,6 @@ UP035.py:111:1: UP035 [*] Import from `warnings` instead: `deprecated`
110 | # UP035 on py313+ only
111 | from typing_extensions import deprecated
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ UP035
112 |
113 | # UP035 on py313+ only
|
= help: Import from `warnings`
@@ -1191,25 +1189,5 @@ UP035.py:111:1: UP035 [*] Import from `warnings` instead: `deprecated`
111 |-from typing_extensions import deprecated
111 |+from warnings import deprecated
112 112 |
113 113 | # UP035 on py313+ only
114 114 | from typing_extensions import get_type_hints
UP035.py:114:1: UP035 [*] Import from `typing` instead: `get_type_hints`
|
113 | # UP035 on py313+ only
114 | from typing_extensions import get_type_hints
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ UP035
115 |
116 | # https://github.com/astral-sh/ruff/issues/15780
|
= help: Import from `typing`
Safe fix
111 111 | from typing_extensions import deprecated
112 112 |
113 113 | # UP035 on py313+ only
114 |-from typing_extensions import get_type_hints
114 |+from typing import get_type_hints
115 115 |
116 116 | # https://github.com/astral-sh/ruff/issues/15780
117 117 | from typing_extensions import is_typeddict
113 113 |
114 114 | # https://github.com/astral-sh/ruff/issues/15780

View File

@@ -51,7 +51,7 @@ UP050.py:16:5: UP050 [*] Class `A` uses `metaclass=type`, which is redundant
|
= help: Remove `metaclass=type`
Unsafe fix
Safe fix
12 12 | ...
13 13 |
14 14 |
@@ -75,7 +75,7 @@ UP050.py:24:5: UP050 [*] Class `A` uses `metaclass=type`, which is redundant
|
= help: Remove `metaclass=type`
Unsafe fix
Safe fix
19 19 | ...
20 20 |
21 21 |
@@ -98,7 +98,7 @@ UP050.py:30:5: UP050 [*] Class `A` uses `metaclass=type`, which is redundant
|
= help: Remove `metaclass=type`
Unsafe fix
Safe fix
26 26 | ...
27 27 |
28 28 |
@@ -122,7 +122,7 @@ UP050.py:38:5: UP050 [*] Class `A` uses `metaclass=type`, which is redundant
|
= help: Remove `metaclass=type`
Unsafe fix
Safe fix
33 33 | ...
34 34 |
35 35 |
@@ -185,7 +185,7 @@ UP050.py:58:5: UP050 [*] Class `B` uses `metaclass=type`, which is redundant
|
= help: Remove `metaclass=type`
Unsafe fix
Safe fix
54 54 |
55 55 | class B(
56 56 | A,
@@ -205,7 +205,7 @@ UP050.py:69:5: UP050 [*] Class `A` uses `metaclass=type`, which is redundant
|
= help: Remove `metaclass=type`
Unsafe fix
Safe fix
65 65 | ...
66 66 |
67 67 |

View File

@@ -62,6 +62,24 @@ mod tests {
Ok(())
}
#[test_case(Rule::ReadlinesInFor, Path::new("FURB129.py"))]
fn preview(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!(
"preview__{}_{}",
rule_code.noqa_code(),
path.to_string_lossy()
);
let diagnostics = test_path(
Path::new("refurb").join(path).as_path(),
&settings::LinterSettings {
preview: settings::types::PreviewMode::Enabled,
..settings::LinterSettings::for_rule(rule_code)
},
)?;
assert_messages!(snapshot, diagnostics);
Ok(())
}
#[test]
fn write_whole_file_python_39() -> Result<()> {
let diagnostics = test_path(

View File

@@ -6,7 +6,7 @@ use ruff_python_semantic::analyze::typing::is_io_base_expr;
use ruff_text_size::Ranged;
use crate::checkers::ast::Checker;
use crate::fix::edits::pad_end;
use crate::preview::is_readlines_in_for_fix_safe_enabled;
use crate::{AlwaysFixableViolation, Edit, Fix};
/// ## What it does
@@ -85,25 +85,21 @@ fn readlines_in_iter(checker: &Checker, iter_expr: &Expr) {
return;
}
}
let deletion_range = if let Some(parenthesized_range) = parenthesized_range(
let edit = if let Some(parenthesized_range) = parenthesized_range(
expr_attr.value.as_ref().into(),
expr_attr.into(),
checker.comment_ranges(),
checker.source(),
) {
expr_call.range().add_start(parenthesized_range.len())
Edit::range_deletion(expr_call.range().add_start(parenthesized_range.len()))
} else {
expr_call.range().add_start(expr_attr.value.range().len())
};
let padded = pad_end(String::new(), deletion_range.end(), checker.locator());
let edit = if padded.is_empty() {
Edit::range_deletion(deletion_range)
} else {
Edit::range_replacement(padded, deletion_range)
Edit::range_deletion(expr_call.range().add_start(expr_attr.value.range().len()))
};
let mut diagnostic = checker.report_diagnostic(ReadlinesInFor, expr_call.range());
diagnostic.set_fix(Fix::safe_edit(edit));
diagnostic.set_fix(if is_readlines_in_for_fix_safe_enabled(checker.settings) {
Fix::safe_edit(edit)
} else {
Fix::unsafe_edit(edit)
});
}

View File

@@ -12,7 +12,7 @@ FURB129.py:7:18: FURB129 [*] Instead of calling `readlines()`, iterate over file
|
= help: Remove `readlines()`
Safe fix
Unsafe fix
4 4 |
5 5 | # Errors
6 6 | with open("FURB129.py") as f:
@@ -33,7 +33,7 @@ FURB129.py:9:35: FURB129 [*] Instead of calling `readlines()`, iterate over file
|
= help: Remove `readlines()`
Safe fix
Unsafe fix
6 6 | with open("FURB129.py") as f:
7 7 | for _line in f.readlines():
8 8 | pass
@@ -53,7 +53,7 @@ FURB129.py:10:35: FURB129 [*] Instead of calling `readlines()`, iterate over fil
|
= help: Remove `readlines()`
Safe fix
Unsafe fix
7 7 | for _line in f.readlines():
8 8 | pass
9 9 | a = [line.lower() for line in f.readlines()]
@@ -74,7 +74,7 @@ FURB129.py:11:49: FURB129 [*] Instead of calling `readlines()`, iterate over fil
|
= help: Remove `readlines()`
Safe fix
Unsafe fix
8 8 | pass
9 9 | a = [line.lower() for line in f.readlines()]
10 10 | b = {line.upper() for line in f.readlines()}
@@ -93,7 +93,7 @@ FURB129.py:14:18: FURB129 [*] Instead of calling `readlines()`, iterate over fil
|
= help: Remove `readlines()`
Safe fix
Unsafe fix
11 11 | c = {line.lower(): line.upper() for line in f.readlines()}
12 12 |
13 13 | with Path("FURB129.py").open() as f:
@@ -113,7 +113,7 @@ FURB129.py:17:14: FURB129 [*] Instead of calling `readlines()`, iterate over fil
|
= help: Remove `readlines()`
Safe fix
Unsafe fix
14 14 | for _line in f.readlines():
15 15 | pass
16 16 |
@@ -133,7 +133,7 @@ FURB129.py:20:14: FURB129 [*] Instead of calling `readlines()`, iterate over fil
|
= help: Remove `readlines()`
Safe fix
Unsafe fix
17 17 | for _line in open("FURB129.py").readlines():
18 18 | pass
19 19 |
@@ -154,7 +154,7 @@ FURB129.py:26:18: FURB129 [*] Instead of calling `readlines()`, iterate over fil
|
= help: Remove `readlines()`
Safe fix
Unsafe fix
23 23 |
24 24 | def func():
25 25 | f = Path("FURB129.py").open()
@@ -173,7 +173,7 @@ FURB129.py:32:18: FURB129 [*] Instead of calling `readlines()`, iterate over fil
|
= help: Remove `readlines()`
Safe fix
Unsafe fix
29 29 |
30 30 |
31 31 | def func(f: io.BytesIO):
@@ -194,7 +194,7 @@ FURB129.py:38:22: FURB129 [*] Instead of calling `readlines()`, iterate over fil
|
= help: Remove `readlines()`
Safe fix
Unsafe fix
35 35 |
36 36 | def func():
37 37 | with (open("FURB129.py") as f, foo as bar):
@@ -213,7 +213,7 @@ FURB129.py:47:17: FURB129 [*] Instead of calling `readlines()`, iterate over fil
|
= help: Remove `readlines()`
Safe fix
Unsafe fix
44 44 | import builtins
45 45 |
46 46 | with builtins.open("FURB129.py") as f:
@@ -232,7 +232,7 @@ FURB129.py:54:17: FURB129 [*] Instead of calling `readlines()`, iterate over fil
|
= help: Remove `readlines()`
Safe fix
Unsafe fix
51 51 | from builtins import open as o
52 52 |
53 53 | with o("FURB129.py") as f:
@@ -252,7 +252,7 @@ FURB129.py:93:17: FURB129 [*] Instead of calling `readlines()`, iterate over fil
|
= help: Remove `readlines()`
Safe fix
Unsafe fix
90 90 |
91 91 | # https://github.com/astral-sh/ruff/issues/18231
92 92 | with open("furb129.py") as f:
@@ -270,7 +270,7 @@ FURB129.py:97:23: FURB129 [*] Instead of calling `readlines()`, iterate over fil
|
= help: Remove `readlines()`
Safe fix
Unsafe fix
94 94 | pass
95 95 |
96 96 | with open("furb129.py") as f:
@@ -290,7 +290,7 @@ FURB129.py:101:17: FURB129 [*] Instead of calling `readlines()`, iterate over fi
|
= help: Remove `readlines()`
Safe fix
Unsafe fix
98 98 |
99 99 |
100 100 | with open("furb129.py") as f:
@@ -310,27 +310,10 @@ FURB129.py:103:16: FURB129 [*] Instead of calling `readlines()`, iterate over fi
|
= help: Remove `readlines()`
Safe fix
Unsafe fix
100 100 | with open("furb129.py") as f:
101 101 | for line in (((f))).readlines():
102 102 | pass
103 |- for line in(f).readlines():
103 |+ for line in(f):
104 104 | pass
105 105 |
106 106 | # Test case for issue #17683 (missing space before keyword)
FURB129.py:107:29: FURB129 [*] Instead of calling `readlines()`, iterate over file object directly
|
106 | # Test case for issue #17683 (missing space before keyword)
107 | print([line for line in f.readlines()if True])
| ^^^^^^^^^^^^^ FURB129
|
= help: Remove `readlines()`
Safe fix
104 104 | pass
105 105 |
106 106 | # Test case for issue #17683 (missing space before keyword)
107 |- print([line for line in f.readlines()if True])
107 |+ print([line for line in f if True])

View File

@@ -0,0 +1,319 @@
---
source: crates/ruff_linter/src/rules/refurb/mod.rs
---
FURB129.py:7:18: FURB129 [*] Instead of calling `readlines()`, iterate over file object directly
|
5 | # Errors
6 | with open("FURB129.py") as f:
7 | for _line in f.readlines():
| ^^^^^^^^^^^^^ FURB129
8 | pass
9 | a = [line.lower() for line in f.readlines()]
|
= help: Remove `readlines()`
Safe fix
4 4 |
5 5 | # Errors
6 6 | with open("FURB129.py") as f:
7 |- for _line in f.readlines():
7 |+ for _line in f:
8 8 | pass
9 9 | a = [line.lower() for line in f.readlines()]
10 10 | b = {line.upper() for line in f.readlines()}
FURB129.py:9:35: FURB129 [*] Instead of calling `readlines()`, iterate over file object directly
|
7 | for _line in f.readlines():
8 | pass
9 | a = [line.lower() for line in f.readlines()]
| ^^^^^^^^^^^^^ FURB129
10 | b = {line.upper() for line in f.readlines()}
11 | c = {line.lower(): line.upper() for line in f.readlines()}
|
= help: Remove `readlines()`
Safe fix
6 6 | with open("FURB129.py") as f:
7 7 | for _line in f.readlines():
8 8 | pass
9 |- a = [line.lower() for line in f.readlines()]
9 |+ a = [line.lower() for line in f]
10 10 | b = {line.upper() for line in f.readlines()}
11 11 | c = {line.lower(): line.upper() for line in f.readlines()}
12 12 |
FURB129.py:10:35: FURB129 [*] Instead of calling `readlines()`, iterate over file object directly
|
8 | pass
9 | a = [line.lower() for line in f.readlines()]
10 | b = {line.upper() for line in f.readlines()}
| ^^^^^^^^^^^^^ FURB129
11 | c = {line.lower(): line.upper() for line in f.readlines()}
|
= help: Remove `readlines()`
Safe fix
7 7 | for _line in f.readlines():
8 8 | pass
9 9 | a = [line.lower() for line in f.readlines()]
10 |- b = {line.upper() for line in f.readlines()}
10 |+ b = {line.upper() for line in f}
11 11 | c = {line.lower(): line.upper() for line in f.readlines()}
12 12 |
13 13 | with Path("FURB129.py").open() as f:
FURB129.py:11:49: FURB129 [*] Instead of calling `readlines()`, iterate over file object directly
|
9 | a = [line.lower() for line in f.readlines()]
10 | b = {line.upper() for line in f.readlines()}
11 | c = {line.lower(): line.upper() for line in f.readlines()}
| ^^^^^^^^^^^^^ FURB129
12 |
13 | with Path("FURB129.py").open() as f:
|
= help: Remove `readlines()`
Safe fix
8 8 | pass
9 9 | a = [line.lower() for line in f.readlines()]
10 10 | b = {line.upper() for line in f.readlines()}
11 |- c = {line.lower(): line.upper() for line in f.readlines()}
11 |+ c = {line.lower(): line.upper() for line in f}
12 12 |
13 13 | with Path("FURB129.py").open() as f:
14 14 | for _line in f.readlines():
FURB129.py:14:18: FURB129 [*] Instead of calling `readlines()`, iterate over file object directly
|
13 | with Path("FURB129.py").open() as f:
14 | for _line in f.readlines():
| ^^^^^^^^^^^^^ FURB129
15 | pass
|
= help: Remove `readlines()`
Safe fix
11 11 | c = {line.lower(): line.upper() for line in f.readlines()}
12 12 |
13 13 | with Path("FURB129.py").open() as f:
14 |- for _line in f.readlines():
14 |+ for _line in f:
15 15 | pass
16 16 |
17 17 | for _line in open("FURB129.py").readlines():
FURB129.py:17:14: FURB129 [*] Instead of calling `readlines()`, iterate over file object directly
|
15 | pass
16 |
17 | for _line in open("FURB129.py").readlines():
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FURB129
18 | pass
|
= help: Remove `readlines()`
Safe fix
14 14 | for _line in f.readlines():
15 15 | pass
16 16 |
17 |-for _line in open("FURB129.py").readlines():
17 |+for _line in open("FURB129.py"):
18 18 | pass
19 19 |
20 20 | for _line in Path("FURB129.py").open().readlines():
FURB129.py:20:14: FURB129 [*] Instead of calling `readlines()`, iterate over file object directly
|
18 | pass
19 |
20 | for _line in Path("FURB129.py").open().readlines():
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FURB129
21 | pass
|
= help: Remove `readlines()`
Safe fix
17 17 | for _line in open("FURB129.py").readlines():
18 18 | pass
19 19 |
20 |-for _line in Path("FURB129.py").open().readlines():
20 |+for _line in Path("FURB129.py").open():
21 21 | pass
22 22 |
23 23 |
FURB129.py:26:18: FURB129 [*] Instead of calling `readlines()`, iterate over file object directly
|
24 | def func():
25 | f = Path("FURB129.py").open()
26 | for _line in f.readlines():
| ^^^^^^^^^^^^^ FURB129
27 | pass
28 | f.close()
|
= help: Remove `readlines()`
Safe fix
23 23 |
24 24 | def func():
25 25 | f = Path("FURB129.py").open()
26 |- for _line in f.readlines():
26 |+ for _line in f:
27 27 | pass
28 28 | f.close()
29 29 |
FURB129.py:32:18: FURB129 [*] Instead of calling `readlines()`, iterate over file object directly
|
31 | def func(f: io.BytesIO):
32 | for _line in f.readlines():
| ^^^^^^^^^^^^^ FURB129
33 | pass
|
= help: Remove `readlines()`
Safe fix
29 29 |
30 30 |
31 31 | def func(f: io.BytesIO):
32 |- for _line in f.readlines():
32 |+ for _line in f:
33 33 | pass
34 34 |
35 35 |
FURB129.py:38:22: FURB129 [*] Instead of calling `readlines()`, iterate over file object directly
|
36 | def func():
37 | with (open("FURB129.py") as f, foo as bar):
38 | for _line in f.readlines():
| ^^^^^^^^^^^^^ FURB129
39 | pass
40 | for _line in bar.readlines():
|
= help: Remove `readlines()`
Safe fix
35 35 |
36 36 | def func():
37 37 | with (open("FURB129.py") as f, foo as bar):
38 |- for _line in f.readlines():
38 |+ for _line in f:
39 39 | pass
40 40 | for _line in bar.readlines():
41 41 | pass
FURB129.py:47:17: FURB129 [*] Instead of calling `readlines()`, iterate over file object directly
|
46 | with builtins.open("FURB129.py") as f:
47 | for line in f.readlines():
| ^^^^^^^^^^^^^ FURB129
48 | pass
|
= help: Remove `readlines()`
Safe fix
44 44 | import builtins
45 45 |
46 46 | with builtins.open("FURB129.py") as f:
47 |- for line in f.readlines():
47 |+ for line in f:
48 48 | pass
49 49 |
50 50 |
FURB129.py:54:17: FURB129 [*] Instead of calling `readlines()`, iterate over file object directly
|
53 | with o("FURB129.py") as f:
54 | for line in f.readlines():
| ^^^^^^^^^^^^^ FURB129
55 | pass
|
= help: Remove `readlines()`
Safe fix
51 51 | from builtins import open as o
52 52 |
53 53 | with o("FURB129.py") as f:
54 |- for line in f.readlines():
54 |+ for line in f:
55 55 | pass
56 56 |
57 57 |
FURB129.py:93:17: FURB129 [*] Instead of calling `readlines()`, iterate over file object directly
|
91 | # https://github.com/astral-sh/ruff/issues/18231
92 | with open("furb129.py") as f:
93 | for line in (f).readlines():
| ^^^^^^^^^^^^^^^ FURB129
94 | pass
|
= help: Remove `readlines()`
Safe fix
90 90 |
91 91 | # https://github.com/astral-sh/ruff/issues/18231
92 92 | with open("furb129.py") as f:
93 |- for line in (f).readlines():
93 |+ for line in (f):
94 94 | pass
95 95 |
96 96 | with open("furb129.py") as f:
FURB129.py:97:23: FURB129 [*] Instead of calling `readlines()`, iterate over file object directly
|
96 | with open("furb129.py") as f:
97 | [line for line in (f).readlines()]
| ^^^^^^^^^^^^^^^ FURB129
|
= help: Remove `readlines()`
Safe fix
94 94 | pass
95 95 |
96 96 | with open("furb129.py") as f:
97 |- [line for line in (f).readlines()]
97 |+ [line for line in (f)]
98 98 |
99 99 |
100 100 | with open("furb129.py") as f:
FURB129.py:101:17: FURB129 [*] Instead of calling `readlines()`, iterate over file object directly
|
100 | with open("furb129.py") as f:
101 | for line in (((f))).readlines():
| ^^^^^^^^^^^^^^^^^^^ FURB129
102 | pass
103 | for line in(f).readlines():
|
= help: Remove `readlines()`
Safe fix
98 98 |
99 99 |
100 100 | with open("furb129.py") as f:
101 |- for line in (((f))).readlines():
101 |+ for line in (((f))):
102 102 | pass
103 103 | for line in(f).readlines():
104 104 | pass
FURB129.py:103:16: FURB129 [*] Instead of calling `readlines()`, iterate over file object directly
|
101 | for line in (((f))).readlines():
102 | pass
103 | for line in(f).readlines():
| ^^^^^^^^^^^^^^^ FURB129
104 | pass
|
= help: Remove `readlines()`
Safe fix
100 100 | with open("furb129.py") as f:
101 101 | for line in (((f))).readlines():
102 102 | pass
103 |- for line in(f).readlines():
103 |+ for line in(f):
104 104 | pass

View File

@@ -24,7 +24,6 @@ mod tests {
use crate::{assert_messages, settings};
#[test_case(Rule::CollectionLiteralConcatenation, Path::new("RUF005.py"))]
#[test_case(Rule::CollectionLiteralConcatenation, Path::new("RUF005_slices.py"))]
#[test_case(Rule::AsyncioDanglingTask, Path::new("RUF006.py"))]
#[test_case(Rule::ZipInsteadOfPairwise, Path::new("RUF007.py"))]
#[test_case(Rule::MutableDataclassDefault, Path::new("RUF008.py"))]
@@ -95,7 +94,6 @@ mod tests {
#[test_case(Rule::MapIntVersionParsing, Path::new("RUF048_1.py"))]
#[test_case(Rule::IfKeyInDictDel, Path::new("RUF051.py"))]
#[test_case(Rule::UsedDummyVariable, Path::new("RUF052.py"))]
#[test_case(Rule::ClassWithMixedTypeVars, Path::new("RUF053.py"))]
#[test_case(Rule::FalsyDictGetFallback, Path::new("RUF056.py"))]
#[test_case(Rule::UnusedUnpackedVariable, Path::new("RUF059_0.py"))]
#[test_case(Rule::UnusedUnpackedVariable, Path::new("RUF059_1.py"))]
@@ -324,7 +322,10 @@ mod tests {
fn ruff_noqa_filedirective_unused() -> Result<()> {
let diagnostics = test_path(
Path::new("ruff/RUF100_6.py"),
&settings::LinterSettings::for_rules(vec![Rule::UnusedNOQA]),
&settings::LinterSettings {
preview: PreviewMode::Enabled,
..settings::LinterSettings::for_rules(vec![Rule::UnusedNOQA])
},
)?;
assert_messages!(diagnostics);
Ok(())
@@ -334,12 +335,15 @@ mod tests {
fn ruff_noqa_filedirective_unused_last_of_many() -> Result<()> {
let diagnostics = test_path(
Path::new("ruff/RUF100_7.py"),
&settings::LinterSettings::for_rules(vec![
Rule::UnusedNOQA,
Rule::FStringMissingPlaceholders,
Rule::LineTooLong,
Rule::UnusedVariable,
]),
&settings::LinterSettings {
preview: PreviewMode::Enabled,
..settings::LinterSettings::for_rules(vec![
Rule::UnusedNOQA,
Rule::FStringMissingPlaceholders,
Rule::LineTooLong,
Rule::UnusedVariable,
])
},
)?;
assert_messages!(diagnostics);
Ok(())
@@ -476,8 +480,10 @@ mod tests {
#[test_case(Rule::DataclassEnum, Path::new("RUF049.py"))]
#[test_case(Rule::StarmapZip, Path::new("RUF058_0.py"))]
#[test_case(Rule::StarmapZip, Path::new("RUF058_1.py"))]
#[test_case(Rule::ClassWithMixedTypeVars, Path::new("RUF053.py"))]
#[test_case(Rule::IndentedFormFeed, Path::new("RUF054.py"))]
#[test_case(Rule::ImplicitClassVarInDataclass, Path::new("RUF045.py"))]
#[test_case(Rule::CollectionLiteralConcatenation, Path::new("RUF005_slices.py"))]
fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!(
"preview__{}_{}",

View File

@@ -4,6 +4,7 @@ use ruff_text_size::{Ranged, TextRange};
use crate::checkers::ast::Checker;
use crate::fix::snippet::SourceCodeSnippet;
use crate::preview::is_support_slices_in_literal_concatenation_enabled;
use crate::{Edit, Fix, FixAvailability, Violation};
/// ## What it does
@@ -95,7 +96,7 @@ enum Type {
}
/// Recursively merge all the tuples and lists in the expression.
fn concatenate_expressions(expr: &Expr) -> Option<(Expr, Type)> {
fn concatenate_expressions(expr: &Expr, should_support_slices: bool) -> Option<(Expr, Type)> {
let Expr::BinOp(ast::ExprBinOp {
left,
op: Operator::Add,
@@ -107,18 +108,22 @@ fn concatenate_expressions(expr: &Expr) -> Option<(Expr, Type)> {
};
let new_left = match left.as_ref() {
Expr::BinOp(ast::ExprBinOp { .. }) => match concatenate_expressions(left) {
Some((new_left, _)) => new_left,
None => *left.clone(),
},
Expr::BinOp(ast::ExprBinOp { .. }) => {
match concatenate_expressions(left, should_support_slices) {
Some((new_left, _)) => new_left,
None => *left.clone(),
}
}
_ => *left.clone(),
};
let new_right = match right.as_ref() {
Expr::BinOp(ast::ExprBinOp { .. }) => match concatenate_expressions(right) {
Some((new_right, _)) => new_right,
None => *right.clone(),
},
Expr::BinOp(ast::ExprBinOp { .. }) => {
match concatenate_expressions(right, should_support_slices) {
Some((new_right, _)) => new_right,
None => *right.clone(),
}
}
_ => *right.clone(),
};
@@ -146,7 +151,9 @@ fn concatenate_expressions(expr: &Expr) -> Option<(Expr, Type)> {
make_splat_elts(splat_element, other_elements, splat_at_left)
}
// Subscripts are also considered safe-ish to splat if the indexer is a slice.
Expr::Subscript(ast::ExprSubscript { slice, .. }) if matches!(&**slice, Expr::Slice(_)) => {
Expr::Subscript(ast::ExprSubscript { slice, .. })
if should_support_slices && matches!(&**slice, Expr::Slice(_)) =>
{
make_splat_elts(splat_element, other_elements, splat_at_left)
}
// If the splat element is itself a list/tuple, insert them in the other list/tuple.
@@ -191,7 +198,10 @@ pub(crate) fn collection_literal_concatenation(checker: &Checker, expr: &Expr) {
return;
}
let Some((new_expr, type_)) = concatenate_expressions(expr) else {
let should_support_slices =
is_support_slices_in_literal_concatenation_enabled(checker.settings);
let Some((new_expr, type_)) = concatenate_expressions(expr, should_support_slices) else {
return;
};

View File

@@ -7,7 +7,6 @@ use ruff_python_semantic::analyze::function_type::is_stub;
use crate::Violation;
use crate::checkers::ast::Checker;
use crate::rules::fastapi::rules::is_fastapi_route;
/// ## What it does

View File

@@ -4,6 +4,7 @@ pub(crate) mod rules;
#[cfg(test)]
mod tests {
use std::convert::AsRef;
use std::path::Path;
use anyhow::Result;
@@ -24,7 +25,7 @@ mod tests {
#[test_case(Rule::ErrorInsteadOfException, Path::new("TRY400.py"))]
#[test_case(Rule::VerboseLogMessage, Path::new("TRY401.py"))]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.name(), path.to_string_lossy());
let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy());
let diagnostics = test_path(
Path::new("tryceratops").join(path).as_path(),
&settings::LinterSettings::for_rule(rule_code),

View File

@@ -20,7 +20,6 @@ use ruff_python_parser::{ParseError, ParseOptions};
use ruff_python_trivia::textwrap::dedent;
use ruff_source_file::SourceFileBuilder;
use crate::codes::Rule;
use crate::fix::{FixResult, fix_file};
use crate::linter::check_path;
use crate::message::{Emitter, EmitterContext, Message, TextEmitter};
@@ -234,9 +233,8 @@ Source with applied fixes:
let messages = messages
.into_iter()
.filter_map(|msg| Some((msg.noqa_code()?, msg)))
.map(|(code, mut diagnostic)| {
let rule = Rule::from_code(&code.to_string()).unwrap();
.filter_map(|msg| Some((msg.to_rule()?, msg)))
.map(|(rule, mut diagnostic)| {
let fixable = diagnostic.fix().is_some_and(|fix| {
matches!(
fix.applicability(),

View File

@@ -174,7 +174,7 @@ pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
output.extend(quote! {
impl #linter {
pub(crate) fn rules(&self) -> ::std::vec::IntoIter<Rule> {
pub fn rules(&self) -> ::std::vec::IntoIter<Rule> {
match self { #prefix_into_iter_match_arms }
}
}
@@ -182,7 +182,7 @@ pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
}
output.extend(quote! {
impl RuleCodePrefix {
pub(crate) fn parse(linter: &Linter, code: &str) -> Result<Self, crate::registry::FromCodeError> {
pub fn parse(linter: &Linter, code: &str) -> Result<Self, crate::registry::FromCodeError> {
use std::str::FromStr;
Ok(match linter {
@@ -190,7 +190,7 @@ pub(crate) fn map_codes(func: &ItemFn) -> syn::Result<TokenStream> {
})
}
pub(crate) fn rules(&self) -> ::std::vec::IntoIter<Rule> {
pub fn rules(&self) -> ::std::vec::IntoIter<Rule> {
match self {
#(RuleCodePrefix::#linter_idents(prefix) => prefix.clone().rules(),)*
}
@@ -319,7 +319,7 @@ See also https://github.com/astral-sh/ruff/issues/2186.
matches!(self.group(), RuleGroup::Preview)
}
pub(crate) fn is_stable(&self) -> bool {
pub fn is_stable(&self) -> bool {
matches!(self.group(), RuleGroup::Stable)
}
@@ -371,7 +371,7 @@ fn generate_iter_impl(
quote! {
impl Linter {
/// Rules not in the preview.
pub(crate) fn rules(self: &Linter) -> ::std::vec::IntoIter<Rule> {
pub fn rules(self: &Linter) -> ::std::vec::IntoIter<Rule> {
match self {
#linter_rules_match_arms
}
@@ -385,7 +385,7 @@ fn generate_iter_impl(
}
impl RuleCodePrefix {
pub(crate) fn iter() -> impl Iterator<Item = RuleCodePrefix> {
pub fn iter() -> impl Iterator<Item = RuleCodePrefix> {
use strum::IntoEnumIterator;
let mut prefixes = Vec::new();
@@ -436,6 +436,7 @@ fn register_rules<'a>(input: impl Iterator<Item = &'a Rule>) -> TokenStream {
PartialOrd,
Ord,
::ruff_macros::CacheKey,
AsRefStr,
::strum_macros::IntoStaticStr,
::strum_macros::EnumString,
::serde::Serialize,

View File

@@ -2,7 +2,6 @@
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # noqa: This shouldn't break
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # NoQA: This shouldn't break
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # type: This shouldn't break
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # ty: ignore This shouldn't break
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # pyright: This shouldn't break
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # pylint: This shouldn't break
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # noqa This shouldn't break

View File

@@ -165,7 +165,7 @@ where
pub fn formatted_file(db: &dyn Db, file: File) -> Result<Option<String>, FormatModuleError> {
let options = db.format_options(file);
let parsed = parsed_module(db.upcast(), file).load(db.upcast());
let parsed = parsed_module(db.upcast(), file);
if let Some(first) = parsed.errors().first() {
return Err(FormatModuleError::ParseError(first.clone()));
@@ -174,7 +174,7 @@ pub fn formatted_file(db: &dyn Db, file: File) -> Result<Option<String>, FormatM
let comment_ranges = CommentRanges::from(parsed.tokens());
let source = source_text(db.upcast(), file);
let formatted = format_node(&parsed, &comment_ranges, &source, options)?;
let formatted = format_node(parsed, &comment_ranges, &source, options)?;
let printed = formatted.print()?;
if printed.as_code() == &*source {

View File

@@ -1,6 +1,7 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/trailing_comments.py
snapshot_kind: text
---
## Input
```python
@@ -8,7 +9,6 @@ input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/trailing_c
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # noqa: This shouldn't break
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # NoQA: This shouldn't break
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # type: This shouldn't break
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # ty: ignore This shouldn't break
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # pyright: This shouldn't break
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # pylint: This shouldn't break
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # noqa This shouldn't break
@@ -44,7 +44,6 @@ i = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # noqa: This shouldn't break
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # NoQA: This shouldn't break
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # type: This shouldn't break
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # ty: ignore This shouldn't break
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # pyright: This shouldn't break
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # pylint: This shouldn't break
i = ("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",) # noqa This shouldn't break

View File

@@ -26,5 +26,5 @@ pub fn is_pragma_comment(comment: &str) -> bool {
// Case-sensitive match against a variety of pragmas that _do_ require a trailing colon.
trimmed
.split_once(':')
.is_some_and(|(maybe_pragma, _)| matches!(maybe_pragma, "isort" | "type" | "pyright" | "pylint" | "flake8" | "ruff" | "ty"))
.is_some_and(|(maybe_pragma, _)| matches!(maybe_pragma, "isort" | "type" | "pyright" | "pylint" | "flake8" | "ruff"))
}

View File

@@ -12,6 +12,7 @@ use crate::{
use ruff_diagnostics::{Applicability, Edit, Fix};
use ruff_linter::{
Locator,
codes::Rule,
directives::{Flags, extract_directives},
generate_noqa_edits,
linter::check_path,
@@ -165,17 +166,26 @@ pub(crate) fn check(
messages
.into_iter()
.zip(noqa_edits)
.filter_map(|(message, noqa_edit)| {
if message.is_syntax_error() && !show_syntax_errors {
None
} else {
Some(to_lsp_diagnostic(
&message,
noqa_edit,
&source_kind,
locator.to_index(),
encoding,
))
.filter_map(|(message, noqa_edit)| match message.to_rule() {
Some(rule) => Some(to_lsp_diagnostic(
rule,
&message,
noqa_edit,
&source_kind,
locator.to_index(),
encoding,
)),
None => {
if show_syntax_errors {
Some(syntax_error_to_lsp_diagnostic(
&message,
&source_kind,
locator.to_index(),
encoding,
))
} else {
None
}
}
});
@@ -231,6 +241,7 @@ pub(crate) fn fixes_for_diagnostics(
/// Generates an LSP diagnostic with an associated cell index for the diagnostic to go in.
/// If the source kind is a text document, the cell index will always be `0`.
fn to_lsp_diagnostic(
rule: Rule,
diagnostic: &Message,
noqa_edit: Option<Edit>,
source_kind: &SourceKind,
@@ -242,13 +253,11 @@ fn to_lsp_diagnostic(
let body = diagnostic.body().to_string();
let fix = diagnostic.fix();
let suggestion = diagnostic.suggestion();
let code = diagnostic.noqa_code();
let fix = fix.and_then(|fix| fix.applies(Applicability::Unsafe).then_some(fix));
let data = (fix.is_some() || noqa_edit.is_some())
.then(|| {
let code = code?.to_string();
let edits = fix
.into_iter()
.flat_map(Fix::edits)
@@ -265,12 +274,14 @@ fn to_lsp_diagnostic(
title: suggestion.unwrap_or(name).to_string(),
noqa_edit,
edits,
code,
code: rule.noqa_code().to_string(),
})
.ok()
})
.flatten();
let code = rule.noqa_code().to_string();
let range: lsp_types::Range;
let cell: usize;
@@ -286,25 +297,14 @@ fn to_lsp_diagnostic(
range = diagnostic_range.to_range(source_kind.source_code(), index, encoding);
}
let (severity, tags, code) = if let Some(code) = code {
let code = code.to_string();
(
Some(severity(&code)),
tags(&code),
Some(lsp_types::NumberOrString::String(code)),
)
} else {
(None, None, None)
};
(
cell,
lsp_types::Diagnostic {
range,
severity,
tags,
code,
code_description: diagnostic.to_url().and_then(|url| {
severity: Some(severity(&code)),
tags: tags(&code),
code: Some(lsp_types::NumberOrString::String(code)),
code_description: rule.url().and_then(|url| {
Some(lsp_types::CodeDescription {
href: lsp_types::Url::parse(&url).ok()?,
})
@@ -317,6 +317,45 @@ fn to_lsp_diagnostic(
)
}
fn syntax_error_to_lsp_diagnostic(
syntax_error: &Message,
source_kind: &SourceKind,
index: &LineIndex,
encoding: PositionEncoding,
) -> (usize, lsp_types::Diagnostic) {
let range: lsp_types::Range;
let cell: usize;
if let Some(notebook_index) = source_kind.as_ipy_notebook().map(Notebook::index) {
NotebookRange { cell, range } = syntax_error.range().to_notebook_range(
source_kind.source_code(),
index,
notebook_index,
encoding,
);
} else {
cell = usize::default();
range = syntax_error
.range()
.to_range(source_kind.source_code(), index, encoding);
}
(
cell,
lsp_types::Diagnostic {
range,
severity: Some(lsp_types::DiagnosticSeverity::ERROR),
tags: None,
code: None,
code_description: None,
source: Some(DIAGNOSTIC_NAME.into()),
message: syntax_error.body().to_string(),
related_information: None,
data: None,
},
)
}
fn diagnostic_edit_range(
range: TextRange,
source_kind: &SourceKind,

View File

@@ -85,7 +85,7 @@ pub(crate) fn hover(
fn format_rule_text(rule: Rule) -> String {
let mut output = String::new();
let _ = write!(&mut output, "# {} ({})", rule.name(), rule.noqa_code());
let _ = write!(&mut output, "# {} ({})", rule.as_ref(), rule.noqa_code());
output.push('\n');
output.push('\n');

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_wasm"
version = "0.11.13"
version = "0.11.12"
publish = false
authors = { workspace = true }
edition = { workspace = true }

Some files were not shown because too many files have changed in this diff Show More