Compare commits

...

7 Commits

Author SHA1 Message Date
Charlie Marsh
59615486d8 Bump version to 0.0.134 2022-11-21 16:15:23 -05:00
Andri Bergsson
ce116a80ad Automatically remove redundant open modes #640 (#843) 2022-11-21 16:06:41 -05:00
Anders Kaseorg
731fba9006 Ignore clippy::unreadable-literal (pedantic) for CONFUSABLES (#864) 2022-11-21 16:00:05 -05:00
Anders Kaseorg
9bcf194fdc Ignore clippy::match-same-arms (pedantic) in a few places (#863) 2022-11-21 15:59:58 -05:00
Anders Kaseorg
58949b564e Fix clippy::trivially-copy-pass-by-ref (pedantic) (#862) 2022-11-21 15:59:51 -05:00
Anders Kaseorg
6b9e57fb78 Fix clippy::sort-unstable (pedantic) (#861) 2022-11-21 15:59:41 -05:00
Anders Kaseorg
cb119401a7 Fix clippy::inefficient-to-string (pedantic) (#860) 2022-11-21 15:59:35 -05:00
37 changed files with 706 additions and 73 deletions

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.133
rev: v0.0.134
hooks:
- id: ruff

6
Cargo.lock generated
View File

@@ -670,7 +670,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8-to-ruff"
version = "0.0.133-dev.0"
version = "0.0.134-dev.0"
dependencies = [
"anyhow",
"clap 4.0.22",
@@ -1768,7 +1768,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.133"
version = "0.0.134"
dependencies = [
"annotate-snippets 0.9.1",
"anyhow",
@@ -1818,7 +1818,7 @@ dependencies = [
[[package]]
name = "ruff_dev"
version = "0.0.133"
version = "0.0.134"
dependencies = [
"anyhow",
"clap 4.0.22",

View File

@@ -6,7 +6,7 @@ members = [
[package]
name = "ruff"
version = "0.0.133"
version = "0.0.134"
edition = "2021"
[lib]

View File

@@ -471,6 +471,7 @@ For more, see [pyupgrade](https://pypi.org/project/pyupgrade/3.2.0/) on PyPI.
| U012 | UnnecessaryEncodeUTF8 | Unnecessary call to `encode` as UTF-8 | 🛠 |
| U013 | ConvertTypedDictFunctionalToClass | Convert `...` from `TypedDict` functional to class syntax | 🛠 |
| U014 | ConvertNamedTupleFunctionalToClass | Convert `...` from `NamedTuple` functional to class syntax | 🛠 |
| U015 | RedundantOpenModes | Unnecessary open mode parameters | 🛠 |
### pep8-naming
@@ -826,7 +827,7 @@ including:
- [`flake8-boolean-trap`](https://pypi.org/project/flake8-boolean-trap/)
- [`mccabe`](https://pypi.org/project/mccabe/)
- [`isort`](https://pypi.org/project/isort/)
- [`pyupgrade`](https://pypi.org/project/pyupgrade/) (15/33)
- [`pyupgrade`](https://pypi.org/project/pyupgrade/) (16/33)
- [`autoflake`](https://pypi.org/project/autoflake/) (1/7)
Beyond rule-set parity, Ruff suffers from the following limitations vis-à-vis Flake8:
@@ -858,7 +859,7 @@ Today, Ruff can be used to replace Flake8 when used with any of the following pl
- [`mccabe`](https://pypi.org/project/mccabe/)
Ruff can also replace [`isort`](https://pypi.org/project/isort/), [`yesqa`](https://github.com/asottile/yesqa),
and a subset of the rules implemented in [`pyupgrade`](https://pypi.org/project/pyupgrade/) (15/33).
and a subset of the rules implemented in [`pyupgrade`](https://pypi.org/project/pyupgrade/) (16/33).
If you're looking to use Ruff, but rely on an unsupported Flake8 plugin, free to file an Issue.

View File

@@ -771,7 +771,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
[[package]]
name = "flake8_to_ruff"
version = "0.0.133"
version = "0.0.134"
dependencies = [
"anyhow",
"clap",
@@ -1975,7 +1975,7 @@ dependencies = [
[[package]]
name = "ruff"
version = "0.0.133"
version = "0.0.134"
dependencies = [
"anyhow",
"bincode",

View File

@@ -1,6 +1,6 @@
[package]
name = "flake8-to-ruff"
version = "0.0.133-dev.0"
version = "0.0.134-dev.0"
edition = "2021"
[lib]

38
resources/test/fixtures/U015.py vendored Normal file
View File

@@ -0,0 +1,38 @@
open("foo", "U")
open("foo", "Ur")
open("foo", "Ub")
open("foo", "rUb")
open("foo", "r")
open("foo", "rt")
open("f", "r", encoding="UTF-8")
open("f", "wt")
with open("foo", "U") as f:
pass
with open("foo", "Ur") as f:
pass
with open("foo", "Ub") as f:
pass
with open("foo", "rUb") as f:
pass
with open("foo", "r") as f:
pass
with open("foo", "rt") as f:
pass
with open("foo", "r", encoding="UTF-8") as f:
pass
with open("foo", "wt") as f:
pass
open(f("a", "b", "c"), "U")
open(f("a", "b", "c"), "Ub")
with open(f("a", "b", "c"), "U") as f:
pass
with open(f("a", "b", "c"), "Ub") as f:
pass
with open("foo", "U") as fa, open("bar", "U") as fb:
pass
with open("foo", "Ub") as fa, open("bar", "Ub") as fb:
pass

View File

@@ -1,6 +1,6 @@
[package]
name = "ruff_dev"
version = "0.0.133"
version = "0.0.134"
edition = "2021"
[dependencies]

View File

@@ -77,6 +77,7 @@ pub fn main(cli: &Cli) -> Result<()> {
.arg_ref_self()
.ret(Type::new("Vec<CheckCode>"))
.vis("pub")
.line("#[allow(clippy::match_same_arms)]")
.line("match self {");
for (prefix, codes) in &prefix_to_codes {
gen = gen.line(format!(
@@ -96,6 +97,7 @@ pub fn main(cli: &Cli) -> Result<()> {
.arg_ref_self()
.ret(Type::new("PrefixSpecificity"))
.vis("pub")
.line("#[allow(clippy::match_same_arms)]")
.line("match self {");
for prefix in prefix_to_codes.keys() {
let num_numeric = prefix.chars().filter(|char| char.is_numeric()).count();

View File

@@ -253,7 +253,7 @@ pub fn to_module_and_member(target: &str) -> (&str, &str) {
/// Convert a location within a file (relative to `base`) to an absolute
/// position.
pub fn to_absolute(relative: &Location, base: &Location) -> Location {
pub fn to_absolute(relative: Location, base: Location) -> Location {
if relative.row() == 1 {
Location::new(
relative.row() + base.row() - 1,

View File

@@ -94,7 +94,7 @@ fn apply_fixes<'a>(
}
// Add the remaining content.
let slice = locator.slice_source_code_at(&last_pos);
let slice = locator.slice_source_code_at(last_pos);
output.append(&slice);
Cow::from(output.finish())

View File

@@ -109,7 +109,7 @@ pub fn init() -> Result<()> {
Ok(())
}
fn write_sync(key: &u64, value: &[u8]) -> Result<(), std::io::Error> {
fn write_sync(key: u64, value: &[u8]) -> Result<(), std::io::Error> {
fs::write(
Path::new(cache_dir())
.join(content_dir())
@@ -118,7 +118,7 @@ fn write_sync(key: &u64, value: &[u8]) -> Result<(), std::io::Error> {
)
}
fn read_sync(key: &u64) -> Result<Vec<u8>, std::io::Error> {
fn read_sync(key: u64) -> Result<Vec<u8>, std::io::Error> {
fs::read(
Path::new(cache_dir())
.join(content_dir())
@@ -138,7 +138,7 @@ pub fn get(
return None;
};
if let Ok(encoded) = read_sync(&cache_key(path, settings, autofix)) {
if let Ok(encoded) = read_sync(cache_key(path, settings, autofix)) {
match bincode::deserialize::<CheckResult>(&encoded[..]) {
Ok(CheckResult {
metadata: CacheMetadata { mtime },
@@ -174,7 +174,7 @@ pub fn set(
messages,
};
if let Err(e) = write_sync(
&cache_key(path, settings, autofix),
cache_key(path, settings, autofix),
&bincode::serialize(&check_result).unwrap(),
) {
error!("Failed to write to cache: {e:?}")

View File

@@ -1516,6 +1516,10 @@ where
pyupgrade::plugins::type_of_primitive(self, expr, func, args);
}
if self.settings.enabled.contains(&CheckCode::U015) {
pyupgrade::plugins::redundant_open_modes(self, expr);
}
// flake8-boolean-trap
if self.settings.enabled.contains(&CheckCode::FBT003) {
flake8_boolean_trap::plugins::check_boolean_positional_value_in_function_call(
@@ -2594,7 +2598,7 @@ impl<'a> Checker<'a> {
if !scope.import_starred && !self.path.ends_with("__init__.py") {
if let Some(all_binding) = all_binding {
if let Some(names) = &all_names {
for name in names {
for &name in names {
if !scope.values.contains_key(name) {
checks.push(Check::new(
CheckKind::UndefinedExport(name.to_string()),
@@ -2622,7 +2626,7 @@ impl<'a> Checker<'a> {
}
from_list.sort();
for name in names {
for &name in names {
if !scope.values.contains_key(name) {
checks.push(Check::new(
CheckKind::ImportStarUsage(

View File

@@ -245,7 +245,7 @@ pub fn check_lines(
}
}
ignored.sort();
ignored.sort_unstable();
for index in ignored.iter().rev() {
checks.swap_remove(*index);
}

View File

@@ -26,7 +26,7 @@ pub fn check_tokens(
let enforce_invalid_escape_sequence = settings.enabled.contains(&CheckCode::W605);
let mut state_machine: StateMachine = Default::default();
for (start, tok, end) in tokens.iter().flatten() {
for &(start, ref tok, end) in tokens.iter().flatten() {
let is_docstring = if enforce_ambiguous_unicode_character || enforce_quotes {
state_machine.consume(tok)
} else {

View File

@@ -171,6 +171,7 @@ pub enum CheckCode {
U012,
U013,
U014,
U015,
// pydocstyle
D100,
D101,
@@ -504,6 +505,7 @@ pub enum CheckKind {
UnnecessaryEncodeUTF8,
ConvertTypedDictFunctionalToClass(String),
ConvertNamedTupleFunctionalToClass(String),
RedundantOpenModes,
// pydocstyle
BlankLineAfterLastSection(String),
BlankLineAfterSection(String),
@@ -779,6 +781,7 @@ impl CheckCode {
CheckCode::U012 => CheckKind::UnnecessaryEncodeUTF8,
CheckCode::U013 => CheckKind::ConvertTypedDictFunctionalToClass("...".to_string()),
CheckCode::U014 => CheckKind::ConvertNamedTupleFunctionalToClass("...".to_string()),
CheckCode::U015 => CheckKind::RedundantOpenModes,
// pydocstyle
CheckCode::D100 => CheckKind::PublicModule,
CheckCode::D101 => CheckKind::PublicClass,
@@ -881,6 +884,7 @@ impl CheckCode {
}
pub fn category(&self) -> CheckCategory {
#[allow(clippy::match_same_arms)]
match self {
CheckCode::E402 => CheckCategory::Pycodestyle,
CheckCode::E501 => CheckCategory::Pycodestyle,
@@ -1012,6 +1016,7 @@ impl CheckCode {
CheckCode::U012 => CheckCategory::Pyupgrade,
CheckCode::U013 => CheckCategory::Pyupgrade,
CheckCode::U014 => CheckCategory::Pyupgrade,
CheckCode::U015 => CheckCategory::Pyupgrade,
CheckCode::D100 => CheckCategory::Pydocstyle,
CheckCode::D101 => CheckCategory::Pydocstyle,
CheckCode::D102 => CheckCategory::Pydocstyle,
@@ -1237,6 +1242,7 @@ impl CheckKind {
CheckKind::UnnecessaryEncodeUTF8 => &CheckCode::U012,
CheckKind::ConvertTypedDictFunctionalToClass(_) => &CheckCode::U013,
CheckKind::ConvertNamedTupleFunctionalToClass(_) => &CheckCode::U014,
CheckKind::RedundantOpenModes => &CheckCode::U015,
// pydocstyle
CheckKind::BlankLineAfterLastSection(_) => &CheckCode::D413,
CheckKind::BlankLineAfterSection(_) => &CheckCode::D410,
@@ -1786,6 +1792,7 @@ impl CheckKind {
"Unnecessary parameters to `functools.lru_cache`".to_string()
}
CheckKind::UnnecessaryEncodeUTF8 => "Unnecessary call to `encode` as UTF-8".to_string(),
CheckKind::RedundantOpenModes => "Unnecessary open mode parameters".to_string(),
CheckKind::ConvertTypedDictFunctionalToClass(name) => {
format!("Convert `{name}` from `TypedDict` functional to class syntax")
}
@@ -2103,6 +2110,7 @@ impl CheckKind {
| CheckKind::UnnecessaryComprehension(..)
| CheckKind::UnnecessaryEncodeUTF8
| CheckKind::UnnecessaryFutureImport(..)
| CheckKind::RedundantOpenModes
| CheckKind::UnnecessaryGeneratorDict
| CheckKind::UnnecessaryGeneratorList
| CheckKind::UnnecessaryGeneratorSet

View File

@@ -301,6 +301,7 @@ pub enum CheckCodePrefix {
U012,
U013,
U014,
U015,
W,
W2,
W29,
@@ -337,6 +338,7 @@ pub enum PrefixSpecificity {
impl CheckCodePrefix {
pub fn codes(&self) -> Vec<CheckCode> {
#[allow(clippy::match_same_arms)]
match self {
CheckCodePrefix::A => vec![CheckCode::A001, CheckCode::A002, CheckCode::A003],
CheckCodePrefix::A0 => vec![CheckCode::A001, CheckCode::A002, CheckCode::A003],
@@ -1124,6 +1126,7 @@ impl CheckCodePrefix {
CheckCode::U012,
CheckCode::U013,
CheckCode::U014,
CheckCode::U015,
],
CheckCodePrefix::U0 => vec![
CheckCode::U001,
@@ -1139,6 +1142,7 @@ impl CheckCodePrefix {
CheckCode::U012,
CheckCode::U013,
CheckCode::U014,
CheckCode::U015,
],
CheckCodePrefix::U00 => vec![
CheckCode::U001,
@@ -1164,12 +1168,14 @@ impl CheckCodePrefix {
CheckCode::U012,
CheckCode::U013,
CheckCode::U014,
CheckCode::U015,
],
CheckCodePrefix::U010 => vec![CheckCode::U010],
CheckCodePrefix::U011 => vec![CheckCode::U011],
CheckCodePrefix::U012 => vec![CheckCode::U012],
CheckCodePrefix::U013 => vec![CheckCode::U013],
CheckCodePrefix::U014 => vec![CheckCode::U014],
CheckCodePrefix::U015 => vec![CheckCode::U015],
CheckCodePrefix::W => vec![CheckCode::W292, CheckCode::W605],
CheckCodePrefix::W2 => vec![CheckCode::W292],
CheckCodePrefix::W29 => vec![CheckCode::W292],
@@ -1221,6 +1227,7 @@ impl CheckCodePrefix {
impl CheckCodePrefix {
pub fn specificity(&self) -> PrefixSpecificity {
#[allow(clippy::match_same_arms)]
match self {
CheckCodePrefix::A => PrefixSpecificity::Category,
CheckCodePrefix::A0 => PrefixSpecificity::Hundreds,
@@ -1516,6 +1523,7 @@ impl CheckCodePrefix {
CheckCodePrefix::U012 => PrefixSpecificity::Explicit,
CheckCodePrefix::U013 => PrefixSpecificity::Explicit,
CheckCodePrefix::U014 => PrefixSpecificity::Explicit,
CheckCodePrefix::U015 => PrefixSpecificity::Explicit,
CheckCodePrefix::W => PrefixSpecificity::Category,
CheckCodePrefix::W2 => PrefixSpecificity::Hundreds,
CheckCodePrefix::W29 => PrefixSpecificity::Tens,

View File

@@ -38,7 +38,7 @@ pub struct Directives {
pub fn extract_directives(
lxr: &[LexResult],
locator: &SourceCodeLocator,
flags: &Flags,
flags: Flags,
) -> Directives {
Directives {
noqa_line_for: if flags.contains(Flags::NOQA) {
@@ -75,13 +75,13 @@ pub fn extract_noqa_line_for(lxr: &[LexResult]) -> IntMap<usize, usize> {
/// Extract a set of lines over which to disable isort.
pub fn extract_isort_exclusions(lxr: &[LexResult], locator: &SourceCodeLocator) -> IntSet<usize> {
let mut exclusions: IntSet<usize> = IntSet::default();
let mut off: Option<&Location> = None;
for (start, tok, end) in lxr.iter().flatten() {
let mut off: Option<Location> = None;
for &(start, ref tok, end) in lxr.iter().flatten() {
// TODO(charlie): Modify RustPython to include the comment text in the token.
if matches!(tok, Tok::Comment) {
let comment_text = locator.slice_source_code_range(&Range {
location: *start,
end_location: *end,
location: start,
end_location: end,
});
if off.is_some() {
if comment_text == "# isort: on" {

View File

@@ -26,7 +26,7 @@ fn match_tuple_range<T>(located: &Located<T>, locator: &SourceCodeLocator) -> Re
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
if matches!(tok, Tok::Lpar) {
if count == 0 {
location = Some(helpers::to_absolute(&start, &range.location));
location = Some(helpers::to_absolute(start, range.location));
}
count += 1;
}
@@ -34,7 +34,7 @@ fn match_tuple_range<T>(located: &Located<T>, locator: &SourceCodeLocator) -> Re
if matches!(tok, Tok::Rpar) {
count -= 1;
if count == 0 {
end_location = Some(helpers::to_absolute(&end, &range.location));
end_location = Some(helpers::to_absolute(end, range.location));
break;
}
}

View File

@@ -42,14 +42,14 @@ fn good_docstring(quote: &Quote) -> &str {
pub fn quotes(
locator: &SourceCodeLocator,
start: &Location,
end: &Location,
start: Location,
end: Location,
is_docstring: bool,
settings: &Settings,
) -> Option<Check> {
let text = locator.slice_source_code_range(&Range {
location: *start,
end_location: *end,
location: start,
end_location: end,
});
// Remove any prefixes (e.g., remove `u` from `u"foo"`).
@@ -77,8 +77,8 @@ pub fn quotes(
return Some(Check::new(
CheckKind::BadQuotesDocstring(settings.docstring_quotes.clone()),
Range {
location: *start,
end_location: *end,
location: start,
end_location: end,
},
));
} else if is_multiline {
@@ -95,8 +95,8 @@ pub fn quotes(
return Some(Check::new(
CheckKind::BadQuotesMultilineString(settings.multiline_quotes.clone()),
Range {
location: *start,
end_location: *end,
location: start,
end_location: end,
},
));
} else {
@@ -113,8 +113,8 @@ pub fn quotes(
return Some(Check::new(
CheckKind::AvoidQuoteEscape,
Range {
location: *start,
end_location: *end,
location: start,
end_location: end,
},
));
}
@@ -126,8 +126,8 @@ pub fn quotes(
return Some(Check::new(
CheckKind::BadQuotesInlineString(settings.inline_quotes.clone()),
Range {
location: *start,
end_location: *end,
location: start,
end_location: end,
},
));
}

View File

@@ -22,8 +22,8 @@ pub fn collect_comments<'a>(range: &Range, locator: &'a SourceCodeLocator) -> Ve
.flatten()
.filter_map(|(start, tok, end)| {
if matches!(tok, Tok::Comment) {
let start = helpers::to_absolute(&start, &range.location);
let end = helpers::to_absolute(&end, &range.location);
let start = helpers::to_absolute(start, range.location);
let end = helpers::to_absolute(end, range.location);
Some(Comment {
value: locator.slice_source_code_range(&Range {
location: start,

View File

@@ -38,7 +38,7 @@ pub fn format_import_from(
import_from: &ImportFromData,
comments: &CommentSet,
aliases: &[(AliasData, CommentSet)],
line_length: &usize,
line_length: usize,
is_first: bool,
) -> String {
// We can only inline if: (1) none of the aliases have atop comments, and (3)
@@ -54,7 +54,7 @@ pub fn format_import_from(
{
let (single_line, import_length) =
format_single_line(import_from, comments, aliases, is_first);
if import_length <= *line_length {
if import_length <= line_length {
return single_line;
}
}

View File

@@ -400,7 +400,7 @@ fn sort_imports(block: ImportBlock) -> OrderedImportBlock {
pub fn format_imports(
block: &[&Stmt],
comments: Vec<Comment>,
line_length: &usize,
line_length: usize,
src: &[PathBuf],
known_first_party: &BTreeSet<String>,
known_third_party: &BTreeSet<String>,

View File

@@ -55,7 +55,7 @@ pub fn check_imports(
let expected = format_imports(
&body,
comments,
&(settings.line_length - indentation.len()),
settings.line_length - indentation.len(),
&settings.src,
&settings.isort.known_first_party,
&settings.isort.known_third_party,

View File

@@ -89,7 +89,7 @@ pub fn check(path: &Path, contents: &str, autofix: bool) -> Result<Vec<Check>> {
let directives = directives::extract_directives(
&tokens,
&locator,
&directives::Flags::from_settings(&settings),
directives::Flags::from_settings(&settings),
);
// Generate checks.

View File

@@ -151,7 +151,7 @@ pub fn lint_stdin(
let directives = directives::extract_directives(
&tokens,
&locator,
&directives::Flags::from_settings(settings),
directives::Flags::from_settings(settings),
);
// Generate checks.
@@ -215,7 +215,7 @@ pub fn lint_path(
let directives = directives::extract_directives(
&tokens,
&locator,
&directives::Flags::from_settings(settings),
directives::Flags::from_settings(settings),
);
// Generate checks.
@@ -269,7 +269,7 @@ pub fn add_noqa_to_path(path: &Path, settings: &Settings) -> Result<usize> {
let directives = directives::extract_directives(
&tokens,
&locator,
&directives::Flags::from_settings(settings),
directives::Flags::from_settings(settings),
);
// Generate checks.
@@ -310,7 +310,7 @@ pub fn test_path(path: &Path, settings: &Settings, autofix: &fixer::Mode) -> Res
let directives = directives::extract_directives(
&tokens,
&locator,
&directives::Flags::from_settings(settings),
directives::Flags::from_settings(settings),
);
check_path(
path,
@@ -518,6 +518,7 @@ mod tests {
#[test_case(CheckCode::U012, Path::new("U012.py"); "U012")]
#[test_case(CheckCode::U013, Path::new("U013.py"); "U013")]
#[test_case(CheckCode::U014, Path::new("U014.py"); "U014")]
#[test_case(CheckCode::U015, Path::new("U015.py"); "U015")]
#[test_case(CheckCode::W292, Path::new("W292_0.py"); "W292_0")]
#[test_case(CheckCode::W292, Path::new("W292_1.py"); "W292_1")]
#[test_case(CheckCode::W292, Path::new("W292_2.py"); "W292_2")]

View File

@@ -103,14 +103,14 @@ fn extract_quote(text: &str) -> &str {
/// W605
pub fn invalid_escape_sequence(
locator: &SourceCodeLocator,
start: &Location,
end: &Location,
start: Location,
end: Location,
) -> Vec<Check> {
let mut checks = vec![];
let text = locator.slice_source_code_range(&Range {
location: *start,
end_location: *end,
location: start,
end_location: end,
});
// Determine whether the string is single- or triple-quoted.

View File

@@ -63,13 +63,13 @@ pub fn unused_variables(scope: &Scope, dummy_variable_rgx: &Regex) -> Vec<Check>
return checks;
}
for (name, binding) in scope.values.iter() {
for (&name, binding) in scope.values.iter() {
if binding.used.is_none()
&& matches!(binding.kind, BindingKind::Assignment)
&& !dummy_variable_rgx.is_match(name)
&& name != &"__tracebackhide__"
&& name != &"__traceback_info__"
&& name != &"__traceback_supplement__"
&& name != "__tracebackhide__"
&& name != "__traceback_info__"
&& name != "__traceback_supplement__"
{
checks.push(Check::new(
CheckKind::UnusedVariable(name.to_string()),

View File

@@ -13,7 +13,7 @@ use crate::source_code_locator::SourceCodeLocator;
/// Generate a fix to remove a base from a ClassDef statement.
pub fn remove_class_def_base(
locator: &SourceCodeLocator,
stmt_at: &Location,
stmt_at: Location,
expr_at: Location,
bases: &[Expr],
keywords: &[Keyword],
@@ -28,7 +28,7 @@ pub fn remove_class_def_base(
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
if matches!(tok, Tok::Lpar) {
if count == 0 {
fix_start = Some(helpers::to_absolute(&start, stmt_at));
fix_start = Some(helpers::to_absolute(start, stmt_at));
}
count += 1;
}
@@ -36,7 +36,7 @@ pub fn remove_class_def_base(
if matches!(tok, Tok::Rpar) {
count -= 1;
if count == 0 {
fix_end = Some(helpers::to_absolute(&end, stmt_at));
fix_end = Some(helpers::to_absolute(end, stmt_at));
break;
}
}
@@ -59,7 +59,7 @@ pub fn remove_class_def_base(
let mut fix_end: Option<Location> = None;
let mut seen_comma = false;
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
let start = helpers::to_absolute(&start, stmt_at);
let start = helpers::to_absolute(start, stmt_at);
if seen_comma {
if matches!(tok, Tok::Newline) {
fix_end = Some(end);
@@ -86,8 +86,8 @@ pub fn remove_class_def_base(
let mut fix_start: Option<Location> = None;
let mut fix_end: Option<Location> = None;
for (start, tok, end) in lexer::make_tokenizer(&contents).flatten() {
let start = helpers::to_absolute(&start, stmt_at);
let end = helpers::to_absolute(&end, stmt_at);
let start = helpers::to_absolute(start, stmt_at);
let end = helpers::to_absolute(end, stmt_at);
if start == expr_at {
fix_end = Some(end);
break;

View File

@@ -30,7 +30,7 @@ static DEPRECATED_ALIASES: Lazy<FxHashMap<&'static str, &'static str>> = Lazy::n
/// U005
pub fn deprecated_unittest_alias(checker: &mut Checker, expr: &Expr) {
if let ExprKind::Attribute { value, attr, .. } = &expr.node {
if let Some(target) = DEPRECATED_ALIASES.get(attr.as_str()) {
if let Some(&target) = DEPRECATED_ALIASES.get(attr.as_str()) {
if let ExprKind::Name { id, .. } = &value.node {
if id == "self" {
let mut check = Check::new(

View File

@@ -1,6 +1,7 @@
pub use convert_named_tuple_functional_to_class::convert_named_tuple_functional_to_class;
pub use convert_typed_dict_functional_to_class::convert_typed_dict_functional_to_class;
pub use deprecated_unittest_alias::deprecated_unittest_alias;
pub use redundant_open_modes::redundant_open_modes;
pub use super_call_with_parameters::super_call_with_parameters;
pub use type_of_primitive::type_of_primitive;
pub use unnecessary_encode_utf8::unnecessary_encode_utf8;
@@ -14,6 +15,7 @@ pub use useless_object_inheritance::useless_object_inheritance;
mod convert_named_tuple_functional_to_class;
mod convert_typed_dict_functional_to_class;
mod deprecated_unittest_alias;
mod redundant_open_modes;
mod super_call_with_parameters;
mod type_of_primitive;
mod unnecessary_encode_utf8;

View File

@@ -0,0 +1,155 @@
use std::str::FromStr;
use anyhow::{anyhow, Result};
use log::error;
use rustpython_ast::{Constant, Expr, ExprKind, Located, Location};
use rustpython_parser::lexer;
use rustpython_parser::token::Tok;
use crate::ast::helpers::{self, match_name_or_attr};
use crate::ast::types::Range;
use crate::autofix::Fix;
use crate::check_ast::Checker;
use crate::checks::{Check, CheckCode, CheckKind};
use crate::source_code_locator::SourceCodeLocator;
const OPEN_FUNC_NAME: &str = "open";
enum OpenMode {
U,
Ur,
Ub,
RUb,
R,
Rt,
Wt,
}
impl FromStr for OpenMode {
type Err = anyhow::Error;
fn from_str(string: &str) -> Result<Self, Self::Err> {
match string {
"U" => Ok(OpenMode::U),
"Ur" => Ok(OpenMode::Ur),
"Ub" => Ok(OpenMode::Ub),
"rUb" => Ok(OpenMode::RUb),
"r" => Ok(OpenMode::R),
"rt" => Ok(OpenMode::Rt),
"wt" => Ok(OpenMode::Wt),
_ => Err(anyhow!("Unknown open mode: {}", string)),
}
}
}
impl OpenMode {
fn replacement_value(&self) -> Option<String> {
match *self {
OpenMode::U => None,
OpenMode::Ur => None,
OpenMode::Ub => Some(String::from("\"rb\"")),
OpenMode::RUb => Some(String::from("\"rb\"")),
OpenMode::R => None,
OpenMode::Rt => None,
OpenMode::Wt => Some(String::from("\"w\"")),
}
}
}
fn match_open(expr: &Expr) -> Option<&Expr> {
if let ExprKind::Call { func, args, .. } = &expr.node {
// TODO(andberger): Verify that "open" is still bound to the built-in function.
if match_name_or_attr(func, OPEN_FUNC_NAME) {
// Return the "open mode" parameter.
return args.get(1);
}
}
None
}
fn create_check(
expr: &Expr,
mode_param: &Expr,
replacement_value: Option<String>,
locator: &SourceCodeLocator,
patch: bool,
) -> Check {
let mut check = Check::new(CheckKind::RedundantOpenModes, Range::from_located(expr));
if patch {
if let Some(content) = replacement_value {
check.amend(Fix::replacement(
content,
mode_param.location,
mode_param.end_location.unwrap(),
))
} else {
match create_remove_param_fix(locator, expr, mode_param) {
Ok(fix) => check.amend(fix),
Err(e) => error!("Failed to remove parameter: {}", e),
}
}
}
check
}
fn create_remove_param_fix(
locator: &SourceCodeLocator,
expr: &Expr,
mode_param: &Expr,
) -> Result<Fix> {
let content = locator.slice_source_code_range(&Range {
location: expr.location,
end_location: expr.end_location.unwrap(),
});
// Find the last comma before mode_param
// and delete that comma as well as mode_param.
let mut fix_start: Option<Location> = None;
let mut fix_end: Option<Location> = None;
for (start, tok, end) in lexer::make_tokenizer(&content).flatten() {
let start = helpers::to_absolute(start, expr.location);
let end = helpers::to_absolute(end, expr.location);
if start == mode_param.location {
fix_end = Some(end);
break;
}
if matches!(tok, Tok::Comma) {
fix_start = Some(start);
}
}
match (fix_start, fix_end) {
(Some(start), Some(end)) => Ok(Fix::deletion(start, end)),
_ => Err(anyhow::anyhow!(
"Failed to locate start and end parentheses."
)),
}
}
/// U015
pub fn redundant_open_modes(checker: &mut Checker, expr: &Expr) {
// TODO(andberger): Add "mode" keyword argument handling to handle invocations
// on the following formats:
// - `open("foo", mode="U")`
// - `open(name="foo", mode="U")`
// - `open(mode="U", name="foo")`
if let Some(mode_param) = match_open(expr) {
if let Located {
node:
ExprKind::Constant {
value: Constant::Str(mode_param_value),
..
},
..
} = mode_param
{
if let Ok(mode) = OpenMode::from_str(mode_param_value.as_str()) {
checker.add_check(create_check(
expr,
mode_param,
mode.replacement_value(),
checker.locator,
checker.patch(&CheckCode::U015),
));
}
}
}
}

View File

@@ -7,7 +7,7 @@ use crate::checks::{Check, CheckKind};
/// U006
pub fn use_pep585_annotation(checker: &mut Checker, expr: &Expr, id: &str) {
let replacement = checker.import_aliases.get(id).unwrap_or(&id);
let replacement = *checker.import_aliases.get(id).unwrap_or(&id);
let mut check = Check::new(
CheckKind::UsePEP585Annotation(replacement.to_string()),
Range::from_located(expr),

View File

@@ -17,7 +17,7 @@ pub fn useless_object_inheritance(
if checker.patch(check.kind.code()) {
if let Some(fix) = pyupgrade::fixes::remove_class_def_base(
checker.locator,
&stmt.location,
stmt.location,
check.location,
bases,
keywords,

View File

@@ -10,6 +10,7 @@ use crate::{Check, Settings};
/// See: https://github.com/microsoft/vscode/blob/095ddabc52b82498ee7f718a34f9dd11d59099a8/src/vs/base/common/strings.ts#L1094
static CONFUSABLES: Lazy<FxHashMap<u32, u32>> = Lazy::new(|| {
#[allow(clippy::unreadable_literal)]
FxHashMap::from_iter([
(8232, 32),
(8233, 32),
@@ -1603,8 +1604,8 @@ pub enum Context {
pub fn ambiguous_unicode_character(
locator: &SourceCodeLocator,
start: &Location,
end: &Location,
start: Location,
end: Location,
context: Context,
settings: &Settings,
autofix: &fixer::Mode,
@@ -1612,8 +1613,8 @@ pub fn ambiguous_unicode_character(
let mut checks = vec![];
let text = locator.slice_source_code_range(&Range {
location: *start,
end_location: *end,
location: start,
end_location: end,
});
let mut col_offset = 0;

View File

@@ -0,0 +1,413 @@
---
source: src/linter.rs
expression: checks
---
- kind: RedundantOpenModes
location:
row: 1
column: 0
end_location:
row: 1
column: 16
fix:
patch:
content: ""
location:
row: 1
column: 10
end_location:
row: 1
column: 15
applied: false
- kind: RedundantOpenModes
location:
row: 2
column: 0
end_location:
row: 2
column: 17
fix:
patch:
content: ""
location:
row: 2
column: 10
end_location:
row: 2
column: 16
applied: false
- kind: RedundantOpenModes
location:
row: 3
column: 0
end_location:
row: 3
column: 17
fix:
patch:
content: "\"rb\""
location:
row: 3
column: 12
end_location:
row: 3
column: 16
applied: false
- kind: RedundantOpenModes
location:
row: 4
column: 0
end_location:
row: 4
column: 18
fix:
patch:
content: "\"rb\""
location:
row: 4
column: 12
end_location:
row: 4
column: 17
applied: false
- kind: RedundantOpenModes
location:
row: 5
column: 0
end_location:
row: 5
column: 16
fix:
patch:
content: ""
location:
row: 5
column: 10
end_location:
row: 5
column: 15
applied: false
- kind: RedundantOpenModes
location:
row: 6
column: 0
end_location:
row: 6
column: 17
fix:
patch:
content: ""
location:
row: 6
column: 10
end_location:
row: 6
column: 16
applied: false
- kind: RedundantOpenModes
location:
row: 7
column: 0
end_location:
row: 7
column: 32
fix:
patch:
content: ""
location:
row: 7
column: 8
end_location:
row: 7
column: 13
applied: false
- kind: RedundantOpenModes
location:
row: 8
column: 0
end_location:
row: 8
column: 15
fix:
patch:
content: "\"w\""
location:
row: 8
column: 10
end_location:
row: 8
column: 14
applied: false
- kind: RedundantOpenModes
location:
row: 10
column: 5
end_location:
row: 10
column: 21
fix:
patch:
content: ""
location:
row: 10
column: 15
end_location:
row: 10
column: 20
applied: false
- kind: RedundantOpenModes
location:
row: 12
column: 5
end_location:
row: 12
column: 22
fix:
patch:
content: ""
location:
row: 12
column: 15
end_location:
row: 12
column: 21
applied: false
- kind: RedundantOpenModes
location:
row: 14
column: 5
end_location:
row: 14
column: 22
fix:
patch:
content: "\"rb\""
location:
row: 14
column: 17
end_location:
row: 14
column: 21
applied: false
- kind: RedundantOpenModes
location:
row: 16
column: 5
end_location:
row: 16
column: 23
fix:
patch:
content: "\"rb\""
location:
row: 16
column: 17
end_location:
row: 16
column: 22
applied: false
- kind: RedundantOpenModes
location:
row: 18
column: 5
end_location:
row: 18
column: 21
fix:
patch:
content: ""
location:
row: 18
column: 15
end_location:
row: 18
column: 20
applied: false
- kind: RedundantOpenModes
location:
row: 20
column: 5
end_location:
row: 20
column: 22
fix:
patch:
content: ""
location:
row: 20
column: 15
end_location:
row: 20
column: 21
applied: false
- kind: RedundantOpenModes
location:
row: 22
column: 5
end_location:
row: 22
column: 39
fix:
patch:
content: ""
location:
row: 22
column: 15
end_location:
row: 22
column: 20
applied: false
- kind: RedundantOpenModes
location:
row: 24
column: 5
end_location:
row: 24
column: 22
fix:
patch:
content: "\"w\""
location:
row: 24
column: 17
end_location:
row: 24
column: 21
applied: false
- kind: RedundantOpenModes
location:
row: 27
column: 0
end_location:
row: 27
column: 27
fix:
patch:
content: ""
location:
row: 27
column: 21
end_location:
row: 27
column: 26
applied: false
- kind: RedundantOpenModes
location:
row: 28
column: 0
end_location:
row: 28
column: 28
fix:
patch:
content: "\"rb\""
location:
row: 28
column: 23
end_location:
row: 28
column: 27
applied: false
- kind: RedundantOpenModes
location:
row: 30
column: 5
end_location:
row: 30
column: 32
fix:
patch:
content: ""
location:
row: 30
column: 26
end_location:
row: 30
column: 31
applied: false
- kind: RedundantOpenModes
location:
row: 32
column: 5
end_location:
row: 32
column: 33
fix:
patch:
content: "\"rb\""
location:
row: 32
column: 28
end_location:
row: 32
column: 32
applied: false
- kind: RedundantOpenModes
location:
row: 35
column: 5
end_location:
row: 35
column: 21
fix:
patch:
content: ""
location:
row: 35
column: 15
end_location:
row: 35
column: 20
applied: false
- kind: RedundantOpenModes
location:
row: 35
column: 29
end_location:
row: 35
column: 45
fix:
patch:
content: ""
location:
row: 35
column: 39
end_location:
row: 35
column: 44
applied: false
- kind: RedundantOpenModes
location:
row: 37
column: 5
end_location:
row: 37
column: 22
fix:
patch:
content: "\"rb\""
location:
row: 37
column: 17
end_location:
row: 37
column: 21
applied: false
- kind: RedundantOpenModes
location:
row: 37
column: 30
end_location:
row: 37
column: 47
fix:
patch:
content: "\"rb\""
location:
row: 37
column: 42
end_location:
row: 37
column: 46
applied: false

View File

@@ -25,7 +25,7 @@ impl<'a> SourceCodeLocator<'a> {
self.rope.get_or_init(|| Rope::from_str(self.contents))
}
pub fn slice_source_code_at(&self, location: &Location) -> Cow<'_, str> {
pub fn slice_source_code_at(&self, location: Location) -> Cow<'_, str> {
let rope = self.get_or_init_rope();
let offset = rope.line_to_char(location.row() - 1) + location.column();
Cow::from(rope.slice(offset..))