Compare commits
3 Commits
preview-bi
...
charlie/ex
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4e23ee0e1d | ||
|
|
7a349660e4 | ||
|
|
a9d13e6bc9 |
@@ -1,8 +0,0 @@
|
||||
[profile.ci]
|
||||
# Print out output for failing tests as soon as they fail, and also at the end
|
||||
# of the run (for easy scrollability).
|
||||
failure-output = "immediate-final"
|
||||
# Do not cancel the test run on the first failure.
|
||||
fail-fast = false
|
||||
|
||||
status-level = "skip"
|
||||
23
.github/workflows/ci.yaml
vendored
23
.github/workflows/ci.yaml
vendored
@@ -111,23 +111,13 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
run: cargo insta test --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
run: cargo insta test --all --all-features --unreferenced reject
|
||||
# Check for broken links in the documentation.
|
||||
- run: cargo doc --all --no-deps
|
||||
env:
|
||||
@@ -148,16 +138,15 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo nextest"
|
||||
- name: "Install cargo insta"
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
tool: cargo-insta
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
run: |
|
||||
cargo nextest run --all-features --profile ci
|
||||
cargo test --all-features --doc
|
||||
# We can't reject unreferenced snapshots on windows because flake8_executable can't run on windows
|
||||
run: cargo insta test --all --exclude ruff_dev --all-features
|
||||
|
||||
cargo-test-wasm:
|
||||
name: "cargo test (wasm)"
|
||||
@@ -418,7 +407,7 @@ jobs:
|
||||
- uses: actions/setup-python@v5
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
uses: webfactory/ssh-agent@v0.8.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
- name: "Install Rust toolchain"
|
||||
|
||||
2
.github/workflows/docs.yaml
vendored
2
.github/workflows/docs.yaml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
- uses: actions/setup-python@v5
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
uses: webfactory/ssh-agent@v0.8.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
- name: "Install Rust toolchain"
|
||||
|
||||
@@ -26,10 +26,6 @@ Welcome! We're happy to have you here. Thank you in advance for your contributio
|
||||
- [`cargo dev`](#cargo-dev)
|
||||
- [Subsystems](#subsystems)
|
||||
- [Compilation Pipeline](#compilation-pipeline)
|
||||
- [Import Categorization](#import-categorization)
|
||||
- [Project root](#project-root)
|
||||
- [Package root](#package-root)
|
||||
- [Import categorization](#import-categorization-1)
|
||||
|
||||
## The Basics
|
||||
|
||||
@@ -67,7 +63,7 @@ You'll also need [Insta](https://insta.rs/docs/) to update snapshot tests:
|
||||
cargo install cargo-insta
|
||||
```
|
||||
|
||||
And you'll need pre-commit to run some validation checks:
|
||||
and pre-commit to run some validation checks:
|
||||
|
||||
```shell
|
||||
pipx install pre-commit # or `pip install pre-commit` if you have a virtualenv
|
||||
@@ -80,16 +76,6 @@ when making a commit:
|
||||
pre-commit install
|
||||
```
|
||||
|
||||
We recommend [nextest](https://nexte.st/) to run Ruff's test suite (via `cargo nextest run`),
|
||||
though it's not strictly necessary:
|
||||
|
||||
```shell
|
||||
cargo install cargo-nextest --locked
|
||||
```
|
||||
|
||||
Throughout this guide, any usages of `cargo test` can be replaced with `cargo nextest run`,
|
||||
if you choose to install `nextest`.
|
||||
|
||||
### Development
|
||||
|
||||
After cloning the repository, run Ruff locally from the repository root with:
|
||||
@@ -387,11 +373,6 @@ We have several ways of benchmarking and profiling Ruff:
|
||||
- Microbenchmarks which run the linter or the formatter on individual files. These run on pull requests.
|
||||
- Profiling the linter on either the microbenchmarks or entire projects
|
||||
|
||||
> \[!NOTE\]
|
||||
> When running benchmarks, ensure that your CPU is otherwise idle (e.g., close any background
|
||||
> applications, like web browsers). You may also want to switch your CPU to a "performance"
|
||||
> mode, if it exists, especially when benchmarking short-lived processes.
|
||||
|
||||
### CPython Benchmark
|
||||
|
||||
First, clone [CPython](https://github.com/python/cpython). It's a large and diverse Python codebase,
|
||||
|
||||
16
Cargo.lock
generated
16
Cargo.lock
generated
@@ -273,9 +273,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "chrono"
|
||||
version = "0.4.34"
|
||||
version = "0.4.33"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5bc015644b92d5890fab7489e49d21f879d5c990186827d42ec511919404f38b"
|
||||
checksum = "9f13690e35a5e4ace198e7beea2895d29f3a9cc55015fcebe6336bd2010af9eb"
|
||||
dependencies = [
|
||||
"android-tzdata",
|
||||
"iana-time-zone",
|
||||
@@ -1037,9 +1037,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "indicatif"
|
||||
version = "0.17.8"
|
||||
version = "0.17.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3"
|
||||
checksum = "fb28741c9db9a713d93deb3bb9515c20788cef5815265bee4980e87bde7e0f25"
|
||||
dependencies = [
|
||||
"console",
|
||||
"instant",
|
||||
@@ -2944,18 +2944,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.57"
|
||||
version = "1.0.56"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b"
|
||||
checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad"
|
||||
dependencies = [
|
||||
"thiserror-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror-impl"
|
||||
version = "1.0.57"
|
||||
version = "1.0.56"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81"
|
||||
checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
||||
@@ -21,7 +21,7 @@ bincode = { version = "1.3.3" }
|
||||
bitflags = { version = "2.4.1" }
|
||||
bstr = { version = "1.9.0" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
chrono = { version = "0.4.34", default-features = false, features = ["clock"] }
|
||||
chrono = { version = "0.4.33", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.4.18", features = ["derive"] }
|
||||
clap_complete_command = { version = "0.5.1" }
|
||||
clearscreen = { version = "2.0.0" }
|
||||
@@ -44,7 +44,7 @@ hexf-parse = { version ="0.2.1"}
|
||||
ignore = { version = "0.4.22" }
|
||||
imara-diff ={ version = "0.1.5"}
|
||||
imperative = { version = "1.0.4" }
|
||||
indicatif ={ version = "0.17.8"}
|
||||
indicatif ={ version = "0.17.7"}
|
||||
indoc ={ version = "2.0.4"}
|
||||
insta = { version = "1.34.0", feature = ["filters", "glob"] }
|
||||
insta-cmd = { version = "0.4.0" }
|
||||
@@ -92,7 +92,7 @@ strum_macros = { version = "0.25.3" }
|
||||
syn = { version = "2.0.40" }
|
||||
tempfile = { version ="3.9.0"}
|
||||
test-case = { version = "3.3.1" }
|
||||
thiserror = { version = "1.0.57" }
|
||||
thiserror = { version = "1.0.51" }
|
||||
tikv-jemallocator = { version ="0.5.0"}
|
||||
toml = { version = "0.8.9" }
|
||||
tracing = { version = "0.1.40" }
|
||||
|
||||
@@ -48,7 +48,6 @@ serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
shellexpand = { workspace = true }
|
||||
strum = { workspace = true, features = [] }
|
||||
tempfile = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
tracing = { workspace = true, features = ["log"] }
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::fmt::Debug;
|
||||
use std::fs::{self, File};
|
||||
use std::hash::Hasher;
|
||||
use std::io::{self, BufReader, Write};
|
||||
use std::io::{self, BufReader, BufWriter, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::atomic::{AtomicU64, Ordering};
|
||||
use std::sync::Mutex;
|
||||
@@ -15,7 +15,6 @@ use rayon::iter::ParallelIterator;
|
||||
use rayon::iter::{IntoParallelIterator, ParallelBridge};
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
use ruff_cache::{CacheKey, CacheKeyHasher};
|
||||
use ruff_diagnostics::{DiagnosticKind, Fix};
|
||||
@@ -166,29 +165,15 @@ impl Cache {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Write the cache to a temporary file first and then rename it for an "atomic" write.
|
||||
// Protects against data loss if the process is killed during the write and races between different ruff
|
||||
// processes, resulting in a corrupted cache file. https://github.com/astral-sh/ruff/issues/8147#issuecomment-1943345964
|
||||
let mut temp_file =
|
||||
NamedTempFile::new_in(self.path.parent().expect("Write path must have a parent"))
|
||||
.context("Failed to create temporary file")?;
|
||||
|
||||
// Serialize to in-memory buffer because hyperfine benchmark showed that it's faster than
|
||||
// using a `BufWriter` and our cache files are small enough that streaming isn't necessary.
|
||||
let serialized =
|
||||
bincode::serialize(&self.package).context("Failed to serialize cache data")?;
|
||||
temp_file
|
||||
.write_all(&serialized)
|
||||
.context("Failed to write serialized cache to temporary file.")?;
|
||||
|
||||
temp_file.persist(&self.path).with_context(|| {
|
||||
let file = File::create(&self.path)
|
||||
.with_context(|| format!("Failed to create cache file '{}'", self.path.display()))?;
|
||||
let writer = BufWriter::new(file);
|
||||
bincode::serialize_into(writer, &self.package).with_context(|| {
|
||||
format!(
|
||||
"Failed to rename temporary cache file to {}",
|
||||
"Failed to serialise cache to file '{}'",
|
||||
self.path.display()
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
/// Applies the pending changes without storing the cache to disk.
|
||||
|
||||
@@ -25,15 +25,6 @@ import cycles. They also increase the cognitive load of reading the code.
|
||||
If an import statement is used to check for the availability or existence
|
||||
of a module, consider using `importlib.util.find_spec` instead.
|
||||
|
||||
If an import statement is used to re-export a symbol as part of a module's
|
||||
public interface, consider using a "redundant" import alias, which
|
||||
instructs Ruff (and other tools) to respect the re-export, and avoid
|
||||
marking it as unused, as in:
|
||||
|
||||
```python
|
||||
from module import member as member
|
||||
```
|
||||
|
||||
## Example
|
||||
```python
|
||||
import numpy as np # unused import
|
||||
@@ -60,12 +51,11 @@ else:
|
||||
```
|
||||
|
||||
## Options
|
||||
- `lint.ignore-init-module-imports`
|
||||
- `lint.pyflakes.extend-generics`
|
||||
|
||||
## References
|
||||
- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)
|
||||
- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)
|
||||
- [Typing documentation: interface conventions](https://typing.readthedocs.io/en/latest/source/libraries.html#library-interface-public-and-private-symbols)
|
||||
|
||||
----- stderr -----
|
||||
|
||||
|
||||
@@ -13,7 +13,6 @@ license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
bench = false
|
||||
doctest = false
|
||||
|
||||
[[bench]]
|
||||
name = "linter"
|
||||
|
||||
@@ -11,7 +11,6 @@ repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
ruff_text_size = { path = "../ruff_text_size" }
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use std::cell::Cell;
|
||||
use std::num::NonZeroU8;
|
||||
|
||||
use crate::format_element::PrintMode;
|
||||
use crate::{GroupId, TextSize};
|
||||
use std::cell::Cell;
|
||||
use std::num::NonZeroU8;
|
||||
|
||||
/// A Tag marking the start and end of some content to which some special formatting should be applied.
|
||||
///
|
||||
@@ -100,10 +99,6 @@ pub enum Tag {
|
||||
}
|
||||
|
||||
impl Tag {
|
||||
pub const fn align(count: NonZeroU8) -> Tag {
|
||||
Tag::StartAlign(Align(count))
|
||||
}
|
||||
|
||||
/// Returns `true` if `self` is any start tag.
|
||||
pub const fn is_start(&self) -> bool {
|
||||
matches!(
|
||||
|
||||
@@ -11,7 +11,6 @@ repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
ruff_macros = { path = "../ruff_macros" }
|
||||
|
||||
@@ -11,25 +11,13 @@ class _UnusedTypedDict2(typing.TypedDict):
|
||||
|
||||
|
||||
class _UsedTypedDict(TypedDict):
|
||||
foo: bytes
|
||||
foo: bytes
|
||||
|
||||
|
||||
class _CustomClass(_UsedTypedDict):
|
||||
bar: list[int]
|
||||
|
||||
|
||||
_UnusedTypedDict3 = TypedDict("_UnusedTypedDict3", {"foo": int})
|
||||
_UsedTypedDict3 = TypedDict("_UsedTypedDict3", {"bar": bytes})
|
||||
|
||||
|
||||
def uses_UsedTypedDict3(arg: _UsedTypedDict3) -> None: ...
|
||||
|
||||
|
||||
# In `.py` files, we don't flag unused definitions in class scopes (unlike in `.pyi`
|
||||
# files).
|
||||
class _CustomClass3:
|
||||
class _UnusedTypeDict4(TypedDict):
|
||||
pass
|
||||
|
||||
def method(self) -> None:
|
||||
_CustomClass3._UnusedTypeDict4()
|
||||
|
||||
@@ -35,13 +35,3 @@ _UnusedTypedDict3 = TypedDict("_UnusedTypedDict3", {"foo": int})
|
||||
_UsedTypedDict3 = TypedDict("_UsedTypedDict3", {"bar": bytes})
|
||||
|
||||
def uses_UsedTypedDict3(arg: _UsedTypedDict3) -> None: ...
|
||||
|
||||
|
||||
# In `.pyi` files, we flag unused definitions in class scopes as well as in the global
|
||||
# scope (unlike in `.py` files).
|
||||
class _CustomClass3:
|
||||
class _UnusedTypeDict4(TypedDict):
|
||||
pass
|
||||
|
||||
def method(self) -> None:
|
||||
_CustomClass3._UnusedTypeDict4()
|
||||
|
||||
@@ -36,47 +36,35 @@ for i in list( # Comment
|
||||
): # PERF101
|
||||
pass
|
||||
|
||||
for i in list(foo_dict): # OK
|
||||
for i in list(foo_dict): # Ok
|
||||
pass
|
||||
|
||||
for i in list(1): # OK
|
||||
for i in list(1): # Ok
|
||||
pass
|
||||
|
||||
for i in list(foo_int): # OK
|
||||
for i in list(foo_int): # Ok
|
||||
pass
|
||||
|
||||
|
||||
import itertools
|
||||
|
||||
for i in itertools.product(foo_int): # OK
|
||||
for i in itertools.product(foo_int): # Ok
|
||||
pass
|
||||
|
||||
for i in list(foo_list): # OK
|
||||
for i in list(foo_list): # Ok
|
||||
foo_list.append(i + 1)
|
||||
|
||||
for i in list(foo_list): # PERF101
|
||||
# Make sure we match the correct list
|
||||
other_list.append(i + 1)
|
||||
|
||||
for i in list(foo_tuple): # OK
|
||||
for i in list(foo_tuple): # Ok
|
||||
foo_tuple.append(i + 1)
|
||||
|
||||
for i in list(foo_set): # OK
|
||||
for i in list(foo_set): # Ok
|
||||
foo_set.append(i + 1)
|
||||
|
||||
x, y, nested_tuple = (1, 2, (3, 4, 5))
|
||||
|
||||
for i in list(nested_tuple): # PERF101
|
||||
pass
|
||||
|
||||
for i in list(foo_list): # OK
|
||||
if True:
|
||||
foo_list.append(i + 1)
|
||||
|
||||
for i in list(foo_list): # OK
|
||||
if True:
|
||||
foo_list[i] = i + 1
|
||||
|
||||
for i in list(foo_list): # OK
|
||||
if True:
|
||||
del foo_list[i + 1]
|
||||
|
||||
@@ -4,12 +4,7 @@
|
||||
1 in (
|
||||
1, 2, 3
|
||||
)
|
||||
fruits = ["cherry", "grapes"]
|
||||
"cherry" in fruits
|
||||
_ = {key: value for key, value in {"a": 1, "b": 2}.items() if key in ("a", "b")}
|
||||
|
||||
# OK
|
||||
fruits in [[1, 2, 3], [4, 5, 6]]
|
||||
fruits in [1, 2, 3]
|
||||
1 in [[1, 2, 3], [4, 5, 6]]
|
||||
_ = {key: value for key, value in {"a": 1, "b": 2}.items() if key in (["a", "b"], ["c", "d"])}
|
||||
fruits = ["cherry", "grapes"]
|
||||
"cherry" in fruits
|
||||
|
||||
@@ -35,15 +35,6 @@ if argc != 0: # correct
|
||||
if argc != 1: # correct
|
||||
pass
|
||||
|
||||
if argc != -1.0: # correct
|
||||
pass
|
||||
|
||||
if argc != 0.0: # correct
|
||||
pass
|
||||
|
||||
if argc != 1.0: # correct
|
||||
pass
|
||||
|
||||
if argc != 2: # [magic-value-comparison]
|
||||
pass
|
||||
|
||||
@@ -53,12 +44,6 @@ if argc != -2: # [magic-value-comparison]
|
||||
if argc != +2: # [magic-value-comparison]
|
||||
pass
|
||||
|
||||
if argc != -2.0: # [magic-value-comparison]
|
||||
pass
|
||||
|
||||
if argc != +2.0: # [magic-value-comparison]
|
||||
pass
|
||||
|
||||
if __name__ == "__main__": # correct
|
||||
pass
|
||||
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
import codecs
|
||||
import io
|
||||
from pathlib import Path
|
||||
|
||||
# Errors
|
||||
with open("FURB129.py") as f:
|
||||
for _line in f.readlines():
|
||||
pass
|
||||
a = [line.lower() for line in f.readlines()]
|
||||
b = {line.upper() for line in f.readlines()}
|
||||
c = {line.lower(): line.upper() for line in f.readlines()}
|
||||
|
||||
with Path("FURB129.py").open() as f:
|
||||
for _line in f.readlines():
|
||||
pass
|
||||
|
||||
for _line in open("FURB129.py").readlines():
|
||||
pass
|
||||
|
||||
for _line in Path("FURB129.py").open().readlines():
|
||||
pass
|
||||
|
||||
|
||||
def func():
|
||||
f = Path("FURB129.py").open()
|
||||
for _line in f.readlines():
|
||||
pass
|
||||
f.close()
|
||||
|
||||
|
||||
def func(f: io.BytesIO):
|
||||
for _line in f.readlines():
|
||||
pass
|
||||
|
||||
|
||||
def func():
|
||||
with (open("FURB129.py") as f, foo as bar):
|
||||
for _line in f.readlines():
|
||||
pass
|
||||
for _line in bar.readlines():
|
||||
pass
|
||||
|
||||
|
||||
# False positives
|
||||
def func(f):
|
||||
for _line in f.readlines():
|
||||
pass
|
||||
|
||||
|
||||
def func(f: codecs.StreamReader):
|
||||
for _line in f.readlines():
|
||||
pass
|
||||
|
||||
|
||||
def func():
|
||||
class A:
|
||||
def readlines(self) -> list[str]:
|
||||
return ["a", "b", "c"]
|
||||
|
||||
return A()
|
||||
|
||||
|
||||
for _line in func().readlines():
|
||||
pass
|
||||
|
||||
# OK
|
||||
for _line in ["a", "b", "c"]:
|
||||
pass
|
||||
with open("FURB129.py") as f:
|
||||
for _line in f:
|
||||
pass
|
||||
for _line in f.readlines(10):
|
||||
pass
|
||||
for _not_line in f.readline():
|
||||
pass
|
||||
@@ -162,26 +162,3 @@ async def f(x: bool):
|
||||
T = asyncio.create_task(asyncio.sleep(1))
|
||||
else:
|
||||
T = None
|
||||
|
||||
|
||||
# Error
|
||||
def f():
|
||||
loop = asyncio.new_event_loop()
|
||||
loop.create_task(main()) # Error
|
||||
|
||||
# Error
|
||||
def f():
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.create_task(main()) # Error
|
||||
|
||||
# OK
|
||||
def f():
|
||||
global task
|
||||
loop = asyncio.new_event_loop()
|
||||
task = loop.create_task(main()) # Error
|
||||
|
||||
# OK
|
||||
def f():
|
||||
global task
|
||||
loop = asyncio.get_event_loop()
|
||||
task = loop.create_task(main()) # Error
|
||||
|
||||
@@ -2,14 +2,11 @@ use ruff_python_ast::Comprehension;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::codes::Rule;
|
||||
use crate::rules::{flake8_simplify, refurb};
|
||||
use crate::rules::flake8_simplify;
|
||||
|
||||
/// Run lint rules over a [`Comprehension`] syntax nodes.
|
||||
pub(crate) fn comprehension(comprehension: &Comprehension, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::InDictKeys) {
|
||||
flake8_simplify::rules::key_in_dict_comprehension(checker, comprehension);
|
||||
}
|
||||
if checker.enabled(Rule::ReadlinesInFor) {
|
||||
refurb::rules::readlines_in_comprehension(checker, comprehension);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -281,21 +281,17 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) {
|
||||
}
|
||||
}
|
||||
|
||||
if checker.source_type.is_stub()
|
||||
|| matches!(scope.kind, ScopeKind::Module | ScopeKind::Function(_))
|
||||
{
|
||||
if checker.enabled(Rule::UnusedPrivateTypeVar) {
|
||||
flake8_pyi::rules::unused_private_type_var(checker, scope, &mut diagnostics);
|
||||
}
|
||||
if checker.enabled(Rule::UnusedPrivateProtocol) {
|
||||
flake8_pyi::rules::unused_private_protocol(checker, scope, &mut diagnostics);
|
||||
}
|
||||
if checker.enabled(Rule::UnusedPrivateTypeAlias) {
|
||||
flake8_pyi::rules::unused_private_type_alias(checker, scope, &mut diagnostics);
|
||||
}
|
||||
if checker.enabled(Rule::UnusedPrivateTypedDict) {
|
||||
flake8_pyi::rules::unused_private_typed_dict(checker, scope, &mut diagnostics);
|
||||
}
|
||||
if checker.enabled(Rule::UnusedPrivateTypeVar) {
|
||||
flake8_pyi::rules::unused_private_type_var(checker, scope, &mut diagnostics);
|
||||
}
|
||||
if checker.enabled(Rule::UnusedPrivateProtocol) {
|
||||
flake8_pyi::rules::unused_private_protocol(checker, scope, &mut diagnostics);
|
||||
}
|
||||
if checker.enabled(Rule::UnusedPrivateTypeAlias) {
|
||||
flake8_pyi::rules::unused_private_type_alias(checker, scope, &mut diagnostics);
|
||||
}
|
||||
if checker.enabled(Rule::UnusedPrivateTypedDict) {
|
||||
flake8_pyi::rules::unused_private_typed_dict(checker, scope, &mut diagnostics);
|
||||
}
|
||||
|
||||
if checker.enabled(Rule::AsyncioDanglingTask) {
|
||||
|
||||
@@ -343,15 +343,16 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
Expr::Call(
|
||||
call @ ast::ExprCall {
|
||||
func,
|
||||
arguments:
|
||||
Arguments {
|
||||
args,
|
||||
keywords,
|
||||
range: _,
|
||||
},
|
||||
arguments,
|
||||
range: _,
|
||||
},
|
||||
) => {
|
||||
let Arguments {
|
||||
args,
|
||||
keywords,
|
||||
range: _,
|
||||
} = &**arguments;
|
||||
|
||||
if checker.any_enabled(&[
|
||||
// pylint
|
||||
Rule::BadStringFormatCharacter,
|
||||
|
||||
@@ -1317,9 +1317,6 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::UnnecessaryDictIndexLookup) {
|
||||
pylint::rules::unnecessary_dict_index_lookup(checker, for_stmt);
|
||||
}
|
||||
if checker.enabled(Rule::ReadlinesInFor) {
|
||||
refurb::rules::readlines_in_for(checker, for_stmt);
|
||||
}
|
||||
if !is_async {
|
||||
if checker.enabled(Rule::ReimplementedBuiltin) {
|
||||
flake8_simplify::rules::convert_for_loop_to_any_all(checker, stmt);
|
||||
|
||||
@@ -40,7 +40,7 @@ use ruff_diagnostics::{Diagnostic, IsolationLevel};
|
||||
use ruff_notebook::{CellOffsets, NotebookIndex};
|
||||
use ruff_python_ast::all::{extract_all_names, DunderAllFlags};
|
||||
use ruff_python_ast::helpers::{
|
||||
collect_import_from_member, extract_handled_exceptions, is_docstring_stmt, to_module_path,
|
||||
collect_import_from_member, extract_handled_exceptions, to_module_path,
|
||||
};
|
||||
use ruff_python_ast::identifier::Identifier;
|
||||
use ruff_python_ast::str::trailing_quote;
|
||||
@@ -71,38 +71,6 @@ mod analyze;
|
||||
mod annotation;
|
||||
mod deferred;
|
||||
|
||||
/// State representing whether a docstring is expected or not for the next statement.
|
||||
#[derive(Default, Debug, Copy, Clone, PartialEq)]
|
||||
enum DocstringState {
|
||||
/// The next statement is expected to be a docstring, but not necessarily so.
|
||||
///
|
||||
/// For example, in the following code:
|
||||
///
|
||||
/// ```python
|
||||
/// class Foo:
|
||||
/// pass
|
||||
///
|
||||
///
|
||||
/// def bar(x, y):
|
||||
/// """Docstring."""
|
||||
/// return x + y
|
||||
/// ```
|
||||
///
|
||||
/// For `Foo`, the state is expected when the checker is visiting the class
|
||||
/// body but isn't going to be present. While, for `bar` function, the docstring
|
||||
/// is expected and present.
|
||||
#[default]
|
||||
Expected,
|
||||
Other,
|
||||
}
|
||||
|
||||
impl DocstringState {
|
||||
/// Returns `true` if the next statement is expected to be a docstring.
|
||||
const fn is_expected(self) -> bool {
|
||||
matches!(self, DocstringState::Expected)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Checker<'a> {
|
||||
/// The [`Path`] to the file under analysis.
|
||||
path: &'a Path,
|
||||
@@ -146,8 +114,6 @@ pub(crate) struct Checker<'a> {
|
||||
pub(crate) flake8_bugbear_seen: Vec<TextRange>,
|
||||
/// The end offset of the last visited statement.
|
||||
last_stmt_end: TextSize,
|
||||
/// A state describing if a docstring is expected or not.
|
||||
docstring_state: DocstringState,
|
||||
}
|
||||
|
||||
impl<'a> Checker<'a> {
|
||||
@@ -187,7 +153,6 @@ impl<'a> Checker<'a> {
|
||||
cell_offsets,
|
||||
notebook_index,
|
||||
last_stmt_end: TextSize::default(),
|
||||
docstring_state: DocstringState::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -340,16 +305,19 @@ where
|
||||
self.semantic.flags -= SemanticModelFlags::IMPORT_BOUNDARY;
|
||||
}
|
||||
|
||||
// Track whether we've seen module docstrings, non-imports, etc.
|
||||
// Track whether we've seen docstrings, non-imports, etc.
|
||||
match stmt {
|
||||
Stmt::Expr(ast::StmtExpr { value, .. })
|
||||
if !self.semantic.seen_module_docstring_boundary()
|
||||
if !self
|
||||
.semantic
|
||||
.flags
|
||||
.intersects(SemanticModelFlags::MODULE_DOCSTRING)
|
||||
&& value.is_string_literal_expr() =>
|
||||
{
|
||||
self.semantic.flags |= SemanticModelFlags::MODULE_DOCSTRING_BOUNDARY;
|
||||
self.semantic.flags |= SemanticModelFlags::MODULE_DOCSTRING;
|
||||
}
|
||||
Stmt::ImportFrom(ast::StmtImportFrom { module, names, .. }) => {
|
||||
self.semantic.flags |= SemanticModelFlags::MODULE_DOCSTRING_BOUNDARY;
|
||||
self.semantic.flags |= SemanticModelFlags::MODULE_DOCSTRING;
|
||||
|
||||
// Allow __future__ imports until we see a non-__future__ import.
|
||||
if let Some("__future__") = module.as_deref() {
|
||||
@@ -364,11 +332,11 @@ where
|
||||
}
|
||||
}
|
||||
Stmt::Import(_) => {
|
||||
self.semantic.flags |= SemanticModelFlags::MODULE_DOCSTRING_BOUNDARY;
|
||||
self.semantic.flags |= SemanticModelFlags::MODULE_DOCSTRING;
|
||||
self.semantic.flags |= SemanticModelFlags::FUTURES_BOUNDARY;
|
||||
}
|
||||
_ => {
|
||||
self.semantic.flags |= SemanticModelFlags::MODULE_DOCSTRING_BOUNDARY;
|
||||
self.semantic.flags |= SemanticModelFlags::MODULE_DOCSTRING;
|
||||
self.semantic.flags |= SemanticModelFlags::FUTURES_BOUNDARY;
|
||||
if !(self.semantic.seen_import_boundary()
|
||||
|| helpers::is_assignment_to_a_dunder(stmt)
|
||||
@@ -385,16 +353,6 @@ where
|
||||
// the node.
|
||||
let flags_snapshot = self.semantic.flags;
|
||||
|
||||
// Update the semantic model if it is in a docstring. This should be done after the
|
||||
// flags snapshot to ensure that it gets reset once the statement is analyzed.
|
||||
if self.docstring_state.is_expected() {
|
||||
if is_docstring_stmt(stmt) {
|
||||
self.semantic.flags |= SemanticModelFlags::DOCSTRING;
|
||||
}
|
||||
// Reset the state irrespective of whether the statement is a docstring or not.
|
||||
self.docstring_state = DocstringState::Other;
|
||||
}
|
||||
|
||||
// Step 1: Binding
|
||||
match stmt {
|
||||
Stmt::AugAssign(ast::StmtAugAssign {
|
||||
@@ -696,8 +654,6 @@ where
|
||||
self.semantic.set_globals(globals);
|
||||
}
|
||||
|
||||
// Set the docstring state before visiting the class body.
|
||||
self.docstring_state = DocstringState::Expected;
|
||||
self.visit_body(body);
|
||||
}
|
||||
Stmt::TypeAlias(ast::StmtTypeAlias {
|
||||
@@ -948,7 +904,7 @@ where
|
||||
range: _,
|
||||
}) => {
|
||||
if let Expr::Name(ast::ExprName { id, ctx, range: _ }) = func.as_ref() {
|
||||
if id == "locals" && ctx.is_load() {
|
||||
if &**id == "locals" && ctx.is_load() {
|
||||
let scope = self.semantic.current_scope_mut();
|
||||
scope.set_uses_locals();
|
||||
}
|
||||
@@ -1117,7 +1073,7 @@ where
|
||||
range: _,
|
||||
} = keyword;
|
||||
if let Some(id) = arg {
|
||||
if id.as_str() == "bound" {
|
||||
if &**id == "bound" {
|
||||
self.visit_type_definition(value);
|
||||
} else {
|
||||
self.visit_non_type_definition(value);
|
||||
@@ -1161,7 +1117,7 @@ where
|
||||
match (arg.as_ref(), value) {
|
||||
// Ex) NamedTuple("a", **{"a": int})
|
||||
(None, Expr::Dict(ast::ExprDict { keys, values, .. })) => {
|
||||
for (key, value) in keys.iter().zip(values) {
|
||||
for (key, value) in keys.iter().zip(values.iter()) {
|
||||
if let Some(key) = key.as_ref() {
|
||||
self.visit_non_type_definition(key);
|
||||
self.visit_type_definition(value);
|
||||
@@ -1197,7 +1153,7 @@ where
|
||||
for key in keys.iter().flatten() {
|
||||
self.visit_non_type_definition(key);
|
||||
}
|
||||
for value in values {
|
||||
for value in values.iter() {
|
||||
self.visit_type_definition(value);
|
||||
}
|
||||
} else {
|
||||
@@ -1332,16 +1288,6 @@ where
|
||||
self.semantic.flags |= SemanticModelFlags::F_STRING;
|
||||
visitor::walk_expr(self, expr);
|
||||
}
|
||||
Expr::NamedExpr(ast::ExprNamedExpr {
|
||||
target,
|
||||
value,
|
||||
range: _,
|
||||
}) => {
|
||||
self.visit_expr(value);
|
||||
|
||||
self.semantic.flags |= SemanticModelFlags::NAMED_EXPRESSION_ASSIGNMENT;
|
||||
self.visit_expr(target);
|
||||
}
|
||||
_ => visitor::walk_expr(self, expr),
|
||||
}
|
||||
|
||||
@@ -1558,8 +1504,6 @@ impl<'a> Checker<'a> {
|
||||
unreachable!("Generator expression must contain at least one generator");
|
||||
};
|
||||
|
||||
let flags = self.semantic.flags;
|
||||
|
||||
// Generators are compiled as nested functions. (This may change with PEP 709.)
|
||||
// As such, the `iter` of the first generator is evaluated in the outer scope, while all
|
||||
// subsequent nodes are evaluated in the inner scope.
|
||||
@@ -1589,22 +1533,14 @@ impl<'a> Checker<'a> {
|
||||
// `x` is local to `foo`, and the `T` in `y=T` skips the class scope when resolving.
|
||||
self.visit_expr(&generator.iter);
|
||||
self.semantic.push_scope(ScopeKind::Generator);
|
||||
|
||||
self.semantic.flags = flags | SemanticModelFlags::COMPREHENSION_ASSIGNMENT;
|
||||
self.visit_expr(&generator.target);
|
||||
self.semantic.flags = flags;
|
||||
|
||||
for expr in &generator.ifs {
|
||||
self.visit_boolean_test(expr);
|
||||
}
|
||||
|
||||
for generator in iterator {
|
||||
self.visit_expr(&generator.iter);
|
||||
|
||||
self.semantic.flags = flags | SemanticModelFlags::COMPREHENSION_ASSIGNMENT;
|
||||
self.visit_expr(&generator.target);
|
||||
self.semantic.flags = flags;
|
||||
|
||||
for expr in &generator.ifs {
|
||||
self.visit_boolean_test(expr);
|
||||
}
|
||||
@@ -1803,21 +1739,11 @@ impl<'a> Checker<'a> {
|
||||
return;
|
||||
}
|
||||
|
||||
// A binding within a `for` must be a loop variable, as in:
|
||||
// ```python
|
||||
// for x in range(10):
|
||||
// ...
|
||||
// ```
|
||||
if parent.is_for_stmt() {
|
||||
self.add_binding(id, expr.range(), BindingKind::LoopVar, flags);
|
||||
return;
|
||||
}
|
||||
|
||||
// A binding within a `with` must be an item, as in:
|
||||
// ```python
|
||||
// with open("file.txt") as fp:
|
||||
// ...
|
||||
// ```
|
||||
if parent.is_with_stmt() {
|
||||
self.add_binding(id, expr.range(), BindingKind::WithItemVar, flags);
|
||||
return;
|
||||
@@ -1829,21 +1755,21 @@ impl<'a> Checker<'a> {
|
||||
&& match parent {
|
||||
Stmt::Assign(ast::StmtAssign { targets, .. }) => {
|
||||
if let Some(Expr::Name(ast::ExprName { id, .. })) = targets.first() {
|
||||
id == "__all__"
|
||||
&**id == "__all__"
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
Stmt::AugAssign(ast::StmtAugAssign { target, .. }) => {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = target.as_ref() {
|
||||
id == "__all__"
|
||||
&**id == "__all__"
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
Stmt::AnnAssign(ast::StmtAnnAssign { target, .. }) => {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = target.as_ref() {
|
||||
id == "__all__"
|
||||
&**id == "__all__"
|
||||
} else {
|
||||
false
|
||||
}
|
||||
@@ -1873,26 +1799,17 @@ impl<'a> Checker<'a> {
|
||||
}
|
||||
|
||||
// If the expression is the left-hand side of a walrus operator, then it's a named
|
||||
// expression assignment, as in:
|
||||
// ```python
|
||||
// if (x := 10) > 5:
|
||||
// ...
|
||||
// ```
|
||||
if self.semantic.in_named_expression_assignment() {
|
||||
// expression assignment.
|
||||
if self
|
||||
.semantic
|
||||
.current_expressions()
|
||||
.filter_map(Expr::as_named_expr_expr)
|
||||
.any(|parent| parent.target.as_ref() == expr)
|
||||
{
|
||||
self.add_binding(id, expr.range(), BindingKind::NamedExprAssignment, flags);
|
||||
return;
|
||||
}
|
||||
|
||||
// If the expression is part of a comprehension target, then it's a comprehension variable
|
||||
// assignment, as in:
|
||||
// ```python
|
||||
// [x for x in range(10)]
|
||||
// ```
|
||||
if self.semantic.in_comprehension_assignment() {
|
||||
self.add_binding(id, expr.range(), BindingKind::ComprehensionVar, flags);
|
||||
return;
|
||||
}
|
||||
|
||||
self.add_binding(id, expr.range(), BindingKind::Assignment, flags);
|
||||
}
|
||||
|
||||
@@ -2008,8 +1925,6 @@ impl<'a> Checker<'a> {
|
||||
};
|
||||
|
||||
self.visit_parameters(parameters);
|
||||
// Set the docstring state before visiting the function body.
|
||||
self.docstring_state = DocstringState::Expected;
|
||||
self.visit_body(body);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ fn extract_import_map(path: &Path, package: Option<&Path>, blocks: &[&Block]) ->
|
||||
}) => {
|
||||
let level = level.unwrap_or_default() as usize;
|
||||
let module = if let Some(module) = module {
|
||||
let module: &String = module.as_ref();
|
||||
let module: &str = module.as_str();
|
||||
if level == 0 {
|
||||
Cow::Borrowed(module)
|
||||
} else {
|
||||
|
||||
@@ -1025,7 +1025,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
#[allow(deprecated)]
|
||||
(Refurb, "113") => (RuleGroup::Nursery, rules::refurb::rules::RepeatedAppend),
|
||||
(Refurb, "118") => (RuleGroup::Preview, rules::refurb::rules::ReimplementedOperator),
|
||||
(Refurb, "129") => (RuleGroup::Preview, rules::refurb::rules::ReadlinesInFor),
|
||||
#[allow(deprecated)]
|
||||
(Refurb, "131") => (RuleGroup::Nursery, rules::refurb::rules::DeleteFullSlice),
|
||||
#[allow(deprecated)]
|
||||
|
||||
@@ -248,7 +248,6 @@ impl Renamer {
|
||||
| BindingKind::Assignment
|
||||
| BindingKind::BoundException
|
||||
| BindingKind::LoopVar
|
||||
| BindingKind::ComprehensionVar
|
||||
| BindingKind::WithItemVar
|
||||
| BindingKind::Global
|
||||
| BindingKind::Nonlocal(_)
|
||||
|
||||
@@ -81,7 +81,7 @@ pub(crate) fn variable_name_task_id(
|
||||
let ast::ExprStringLiteral { value: task_id, .. } = keyword.value.as_string_literal_expr()?;
|
||||
|
||||
// If the target name is the same as the task_id, no violation.
|
||||
if task_id == id {
|
||||
if task_id == &**id {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
||||
@@ -137,7 +137,7 @@ impl AutoPythonType {
|
||||
)
|
||||
.ok()?;
|
||||
let expr = Expr::Name(ast::ExprName {
|
||||
id: binding,
|
||||
id: binding.into_boxed_str(),
|
||||
range: TextRange::default(),
|
||||
ctx: ExprContext::Load,
|
||||
});
|
||||
|
||||
@@ -118,7 +118,8 @@ fn is_open_call_from_pathlib(func: &Expr, semantic: &SemanticModel) -> bool {
|
||||
|
||||
let binding = semantic.binding(binding_id);
|
||||
|
||||
let Some(Expr::Call(call)) = analyze::typing::find_binding_value(binding, semantic) else {
|
||||
let Some(Expr::Call(call)) = analyze::typing::find_binding_value(&name.id, binding, semantic)
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
|
||||
|
||||
@@ -52,7 +52,7 @@ impl Violation for HardcodedPasswordString {
|
||||
fn password_target(target: &Expr) -> Option<&str> {
|
||||
let target_name = match target {
|
||||
// variable = "s3cr3t"
|
||||
Expr::Name(ast::ExprName { id, .. }) => id.as_str(),
|
||||
Expr::Name(ast::ExprName { id, .. }) => &**id,
|
||||
// d["password"] = "s3cr3t"
|
||||
Expr::Subscript(ast::ExprSubscript { slice, .. }) => match slice.as_ref() {
|
||||
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => value.to_str(),
|
||||
|
||||
@@ -69,7 +69,7 @@ pub(crate) fn jinja2_autoescape_false(checker: &mut Checker, call: &ast::ExprCal
|
||||
Expr::BooleanLiteral(ast::ExprBooleanLiteral { value: true, .. }) => (),
|
||||
Expr::Call(ast::ExprCall { func, .. }) => {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = func.as_ref() {
|
||||
if id != "select_autoescape" {
|
||||
if &**id != "select_autoescape" {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
Jinja2AutoescapeFalse { value: true },
|
||||
keyword.range(),
|
||||
|
||||
@@ -64,7 +64,7 @@ pub(crate) fn ssl_with_bad_defaults(checker: &mut Checker, function_def: &StmtFu
|
||||
if let Some(default) = ¶m.default {
|
||||
match default.as_ref() {
|
||||
Expr::Name(ast::ExprName { id, range, .. }) => {
|
||||
if is_insecure_protocol(id.as_str()) {
|
||||
if is_insecure_protocol(id) {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
SslWithBadDefaults {
|
||||
protocol: id.to_string(),
|
||||
|
||||
@@ -83,7 +83,7 @@ pub(crate) fn blind_except(
|
||||
return;
|
||||
};
|
||||
|
||||
if !matches!(id.as_str(), "BaseException" | "Exception") {
|
||||
if !matches!(&**id, "BaseException" | "Exception") {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -96,7 +96,7 @@ pub(crate) fn blind_except(
|
||||
if let Stmt::Raise(ast::StmtRaise { exc, .. }) = stmt {
|
||||
if let Some(exc) = exc {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = exc.as_ref() {
|
||||
name.is_some_and(|name| id == name)
|
||||
name.is_some_and(|name| &**id == name)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
|
||||
@@ -54,7 +54,7 @@ pub(super) fn is_allowed_func_def(name: &str) -> bool {
|
||||
pub(super) fn allow_boolean_trap(call: &ast::ExprCall) -> bool {
|
||||
let func_name = match call.func.as_ref() {
|
||||
Expr::Attribute(ast::ExprAttribute { attr, .. }) => attr.as_str(),
|
||||
Expr::Name(ast::ExprName { id, .. }) => id.as_str(),
|
||||
Expr::Name(ast::ExprName { id, .. }) => &**id,
|
||||
_ => return false,
|
||||
};
|
||||
|
||||
|
||||
@@ -164,7 +164,7 @@ pub(crate) fn boolean_type_hint_positional_argument(
|
||||
fn match_annotation_to_literal_bool(annotation: &Expr) -> bool {
|
||||
match annotation {
|
||||
// Ex) `True`
|
||||
Expr::Name(name) => &name.id == "bool",
|
||||
Expr::Name(name) => &*name.id == "bool",
|
||||
// Ex) `"True"`
|
||||
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => value == "bool",
|
||||
_ => false,
|
||||
@@ -176,7 +176,7 @@ fn match_annotation_to_literal_bool(annotation: &Expr) -> bool {
|
||||
fn match_annotation_to_complex_bool(annotation: &Expr, semantic: &SemanticModel) -> bool {
|
||||
match annotation {
|
||||
// Ex) `bool`
|
||||
Expr::Name(name) => &name.id == "bool",
|
||||
Expr::Name(name) => &*name.id == "bool",
|
||||
// Ex) `"bool"`
|
||||
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => value == "bool",
|
||||
// Ex) `bool | int`
|
||||
|
||||
@@ -57,7 +57,7 @@ fn assertion_error(msg: Option<&Expr>) -> Stmt {
|
||||
ctx: ExprContext::Load,
|
||||
range: TextRange::default(),
|
||||
})),
|
||||
arguments: Arguments {
|
||||
arguments: Box::new(Arguments {
|
||||
args: if let Some(msg) = msg {
|
||||
Box::from([msg.clone()])
|
||||
} else {
|
||||
@@ -65,7 +65,7 @@ fn assertion_error(msg: Option<&Expr>) -> Stmt {
|
||||
},
|
||||
keywords: Box::from([]),
|
||||
range: TextRange::default(),
|
||||
},
|
||||
}),
|
||||
range: TextRange::default(),
|
||||
}))),
|
||||
cause: None,
|
||||
|
||||
@@ -63,7 +63,7 @@ pub(crate) fn assignment_to_os_environ(checker: &mut Checker, targets: &[Expr])
|
||||
let Expr::Name(ast::ExprName { id, .. }) = value.as_ref() else {
|
||||
return;
|
||||
};
|
||||
if id != "os" {
|
||||
if &**id != "os" {
|
||||
return;
|
||||
}
|
||||
checker
|
||||
|
||||
@@ -85,7 +85,7 @@ pub(crate) fn cached_instance_method(checker: &mut Checker, decorator_list: &[De
|
||||
// TODO(charlie): This should take into account `classmethod-decorators` and
|
||||
// `staticmethod-decorators`.
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = &decorator.expression {
|
||||
if id == "classmethod" || id == "staticmethod" {
|
||||
if &**id == "classmethod" || &**id == "staticmethod" {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -131,7 +131,7 @@ impl<'a> Visitor<'a> for SuspiciousVariablesVisitor<'a> {
|
||||
}) => {
|
||||
match func.as_ref() {
|
||||
Expr::Name(ast::ExprName { id, .. }) => {
|
||||
if matches!(id.as_str(), "filter" | "reduce" | "map") {
|
||||
if matches!(&**id, "filter" | "reduce" | "map") {
|
||||
for arg in arguments.args.iter() {
|
||||
if arg.is_lambda_expr() {
|
||||
self.safe_functions.push(arg);
|
||||
@@ -142,7 +142,7 @@ impl<'a> Visitor<'a> for SuspiciousVariablesVisitor<'a> {
|
||||
Expr::Attribute(ast::ExprAttribute { value, attr, .. }) => {
|
||||
if attr == "reduce" {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = value.as_ref() {
|
||||
if id == "functools" {
|
||||
if &**id == "functools" {
|
||||
for arg in arguments.args.iter() {
|
||||
if arg.is_lambda_expr() {
|
||||
self.safe_functions.push(arg);
|
||||
@@ -209,7 +209,7 @@ impl<'a> Visitor<'a> for NamesFromAssignmentsVisitor<'a> {
|
||||
fn visit_expr(&mut self, expr: &'a Expr) {
|
||||
match expr {
|
||||
Expr::Name(ast::ExprName { id, .. }) => {
|
||||
self.names.push(id.as_str());
|
||||
self.names.push(id);
|
||||
}
|
||||
Expr::Starred(ast::ExprStarred { value, .. }) => {
|
||||
self.visit_expr(value);
|
||||
@@ -303,7 +303,7 @@ pub(crate) fn function_uses_loop_variable(checker: &mut Checker, node: &Node) {
|
||||
// If a variable was used in a function or lambda body, and assigned in the
|
||||
// loop, flag it.
|
||||
for name in suspicious_variables {
|
||||
if reassigned_in_loop.contains(&name.id.as_str()) {
|
||||
if reassigned_in_loop.contains(&&*name.id) {
|
||||
if !checker.flake8_bugbear_seen.contains(&name.range()) {
|
||||
checker.flake8_bugbear_seen.push(name.range());
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
|
||||
@@ -57,7 +57,7 @@ pub(crate) fn getattr_with_constant(
|
||||
let Expr::Name(ast::ExprName { id, .. }) = func else {
|
||||
return;
|
||||
};
|
||||
if id != "getattr" {
|
||||
if &**id != "getattr" {
|
||||
return;
|
||||
}
|
||||
let [obj, arg] = args else {
|
||||
|
||||
@@ -87,7 +87,7 @@ pub(crate) fn raise_without_from_inside_except(
|
||||
if let Some(name) = name {
|
||||
if exc
|
||||
.as_name_expr()
|
||||
.is_some_and(|ast::ExprName { id, .. }| name == id)
|
||||
.is_some_and(|ast::ExprName { id, .. }| &**id == name)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -83,7 +83,7 @@ impl<'a> GroupNameFinder<'a> {
|
||||
|
||||
fn name_matches(&self, expr: &Expr) -> bool {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = expr {
|
||||
id == self.group_name
|
||||
&**id == self.group_name
|
||||
} else {
|
||||
false
|
||||
}
|
||||
|
||||
@@ -71,7 +71,7 @@ pub(crate) fn setattr_with_constant(
|
||||
let Expr::Name(ast::ExprName { id, .. }) = func else {
|
||||
return;
|
||||
};
|
||||
if id != "setattr" {
|
||||
if &**id != "setattr" {
|
||||
return;
|
||||
}
|
||||
let [obj, name, value] = args else {
|
||||
|
||||
@@ -73,7 +73,7 @@ pub(crate) fn static_key_dict_comprehension(checker: &mut Checker, dict_comp: &a
|
||||
fn is_constant(key: &Expr, names: &FxHashMap<&str, &ast::ExprName>) -> bool {
|
||||
match key {
|
||||
Expr::Tuple(ast::ExprTuple { elts, .. }) => elts.iter().all(|elt| is_constant(elt, names)),
|
||||
Expr::Name(ast::ExprName { id, .. }) => !names.contains_key(id.as_str()),
|
||||
Expr::Name(ast::ExprName { id, .. }) => !names.contains_key(&**id),
|
||||
Expr::Attribute(ast::ExprAttribute { value, .. }) => is_constant(value, names),
|
||||
Expr::Subscript(ast::ExprSubscript { value, slice, .. }) => {
|
||||
is_constant(value, names) && is_constant(slice, names)
|
||||
|
||||
@@ -54,7 +54,7 @@ pub(crate) fn unintentional_type_annotation(
|
||||
}
|
||||
Expr::Attribute(ast::ExprAttribute { value, .. }) => {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = value.as_ref() {
|
||||
if id != "self" {
|
||||
if &**id != "self" {
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(UnintentionalTypeAnnotation, stmt.range()));
|
||||
|
||||
@@ -61,7 +61,7 @@ pub(crate) fn unreliable_callable_check(
|
||||
let Expr::Name(ast::ExprName { id, .. }) = func else {
|
||||
return;
|
||||
};
|
||||
if !matches!(id.as_str(), "hasattr" | "getattr") {
|
||||
if !matches!(&**id, "hasattr" | "getattr") {
|
||||
return;
|
||||
}
|
||||
let [obj, attr, ..] = args else {
|
||||
@@ -75,7 +75,7 @@ pub(crate) fn unreliable_callable_check(
|
||||
}
|
||||
|
||||
let mut diagnostic = Diagnostic::new(UnreliableCallableCheck, expr.range());
|
||||
if id == "hasattr" {
|
||||
if &**id == "hasattr" {
|
||||
if checker.semantic().is_builtin("callable") {
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement(
|
||||
format!("callable({})", checker.locator().slice(obj)),
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Applicability, Diagnostic, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
|
||||
use ruff_python_ast::{self as ast, Arguments, Expr};
|
||||
use ruff_python_ast::{self as ast, Expr};
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
@@ -53,7 +53,7 @@ impl AlwaysFixableViolation for ZipWithoutExplicitStrict {
|
||||
/// B905
|
||||
pub(crate) fn zip_without_explicit_strict(checker: &mut Checker, call: &ast::ExprCall) {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = call.func.as_ref() {
|
||||
if id == "zip"
|
||||
if &**id == "zip"
|
||||
&& checker.semantic().is_builtin("zip")
|
||||
&& call.arguments.find_keyword("strict").is_none()
|
||||
&& !call
|
||||
@@ -91,9 +91,7 @@ pub(crate) fn zip_without_explicit_strict(checker: &mut Checker, call: &ast::Exp
|
||||
/// `itertools.cycle` or similar).
|
||||
fn is_infinite_iterator(arg: &Expr, semantic: &SemanticModel) -> bool {
|
||||
let Expr::Call(ast::ExprCall {
|
||||
func,
|
||||
arguments: Arguments { args, keywords, .. },
|
||||
..
|
||||
func, arguments, ..
|
||||
}) = &arg
|
||||
else {
|
||||
return false;
|
||||
@@ -104,17 +102,17 @@ fn is_infinite_iterator(arg: &Expr, semantic: &SemanticModel) -> bool {
|
||||
["itertools", "cycle" | "count"] => true,
|
||||
["itertools", "repeat"] => {
|
||||
// Ex) `itertools.repeat(1)`
|
||||
if keywords.is_empty() && args.len() == 1 {
|
||||
if arguments.keywords.is_empty() && arguments.args.len() == 1 {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Ex) `itertools.repeat(1, None)`
|
||||
if args.len() == 2 && args[1].is_none_literal_expr() {
|
||||
if arguments.args.len() == 2 && arguments.args[1].is_none_literal_expr() {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Ex) `iterools.repeat(1, times=None)`
|
||||
for keyword in keywords.iter() {
|
||||
for keyword in arguments.keywords.iter() {
|
||||
if keyword.arg.as_ref().is_some_and(|name| name == "times") {
|
||||
if keyword.value.is_none_literal_expr() {
|
||||
return true;
|
||||
|
||||
@@ -13,7 +13,7 @@ pub(super) fn exactly_one_argument_with_matching_function<'a>(
|
||||
return None;
|
||||
}
|
||||
let func = func.as_name_expr()?;
|
||||
if func.id != name {
|
||||
if &*func.id != name {
|
||||
return None;
|
||||
}
|
||||
Some(arg)
|
||||
@@ -24,7 +24,7 @@ pub(super) fn first_argument_with_matching_function<'a>(
|
||||
func: &Expr,
|
||||
args: &'a [Expr],
|
||||
) -> Option<&'a Expr> {
|
||||
if func.as_name_expr().is_some_and(|func| func.id == name) {
|
||||
if func.as_name_expr().is_some_and(|func| &*func.id == name) {
|
||||
args.first()
|
||||
} else {
|
||||
None
|
||||
|
||||
@@ -66,7 +66,7 @@ pub(crate) fn unnecessary_call_around_sorted(
|
||||
let Some(outer) = func.as_name_expr() else {
|
||||
return;
|
||||
};
|
||||
if !matches!(outer.id.as_str(), "list" | "reversed") {
|
||||
if !matches!(&*outer.id, "list" | "reversed") {
|
||||
return;
|
||||
}
|
||||
let Some(arg) = args.first() else {
|
||||
@@ -78,7 +78,7 @@ pub(crate) fn unnecessary_call_around_sorted(
|
||||
let Some(inner) = func.as_name_expr() else {
|
||||
return;
|
||||
};
|
||||
if inner.id != "sorted" {
|
||||
if &*inner.id != "sorted" {
|
||||
return;
|
||||
}
|
||||
if !checker.semantic().is_builtin(&inner.id) || !checker.semantic().is_builtin(&outer.id) {
|
||||
@@ -93,7 +93,7 @@ pub(crate) fn unnecessary_call_around_sorted(
|
||||
diagnostic.try_set_fix(|| {
|
||||
Ok(Fix::applicable_edit(
|
||||
fixes::fix_unnecessary_call_around_sorted(expr, checker.locator(), checker.stylist())?,
|
||||
if outer.id == "reversed" {
|
||||
if &*outer.id == "reversed" {
|
||||
Applicability::Unsafe
|
||||
} else {
|
||||
Applicability::Safe
|
||||
|
||||
@@ -68,7 +68,7 @@ pub(crate) fn unnecessary_collection_call(
|
||||
let Some(func) = call.func.as_name_expr() else {
|
||||
return;
|
||||
};
|
||||
let collection = match func.id.as_str() {
|
||||
let collection = match &*func.id {
|
||||
"dict"
|
||||
if call.arguments.keywords.is_empty()
|
||||
|| (!settings.allow_dict_calls_with_keyword_arguments
|
||||
@@ -87,7 +87,7 @@ pub(crate) fn unnecessary_collection_call(
|
||||
}
|
||||
_ => return,
|
||||
};
|
||||
if !checker.semantic().is_builtin(func.id.as_str()) {
|
||||
if !checker.semantic().is_builtin(&func.id) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -75,7 +75,7 @@ pub(crate) fn unnecessary_comprehension_any_all(
|
||||
let Expr::Name(ast::ExprName { id, .. }) = func else {
|
||||
return;
|
||||
};
|
||||
if !matches!(id.as_str(), "all" | "any") {
|
||||
if !matches!(&**id, "all" | "any") {
|
||||
return;
|
||||
}
|
||||
let [arg] = args else {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::comparable::ComparableKeyword;
|
||||
use ruff_python_ast::{self as ast, Arguments, Expr, Keyword};
|
||||
use ruff_python_ast::{self as ast, Expr, Keyword};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
@@ -77,21 +77,14 @@ pub(crate) fn unnecessary_double_cast_or_process(
|
||||
let Some(outer) = func.as_name_expr() else {
|
||||
return;
|
||||
};
|
||||
if !matches!(
|
||||
outer.id.as_str(),
|
||||
"list" | "tuple" | "set" | "reversed" | "sorted"
|
||||
) {
|
||||
if !matches!(&*outer.id, "list" | "tuple" | "set" | "reversed" | "sorted") {
|
||||
return;
|
||||
}
|
||||
let Some(arg) = args.first() else {
|
||||
return;
|
||||
};
|
||||
let Expr::Call(ast::ExprCall {
|
||||
func,
|
||||
arguments: Arguments {
|
||||
keywords: inner_kw, ..
|
||||
},
|
||||
..
|
||||
func, arguments, ..
|
||||
}) = arg
|
||||
else {
|
||||
return;
|
||||
@@ -105,11 +98,11 @@ pub(crate) fn unnecessary_double_cast_or_process(
|
||||
|
||||
// Avoid collapsing nested `sorted` calls with non-identical keyword arguments
|
||||
// (i.e., `key`, `reverse`).
|
||||
if inner.id == "sorted" && outer.id == "sorted" {
|
||||
if inner_kw.len() != outer_kw.len() {
|
||||
if &*inner.id == "sorted" && &*outer.id == "sorted" {
|
||||
if arguments.keywords.len() != outer_kw.len() {
|
||||
return;
|
||||
}
|
||||
if !inner_kw.iter().all(|inner| {
|
||||
if !arguments.keywords.iter().all(|inner| {
|
||||
outer_kw
|
||||
.iter()
|
||||
.any(|outer| ComparableKeyword::from(inner) == ComparableKeyword::from(outer))
|
||||
@@ -122,7 +115,7 @@ pub(crate) fn unnecessary_double_cast_or_process(
|
||||
// Ex) `list(tuple(...))`
|
||||
// Ex) `set(set(...))`
|
||||
if matches!(
|
||||
(outer.id.as_str(), inner.id.as_str()),
|
||||
(&*outer.id, &*inner.id),
|
||||
("set" | "sorted", "list" | "tuple" | "reversed" | "sorted")
|
||||
| ("set", "set")
|
||||
| ("list" | "tuple", "list" | "tuple")
|
||||
|
||||
@@ -5,7 +5,7 @@ use ruff_diagnostics::{FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::visitor;
|
||||
use ruff_python_ast::visitor::Visitor;
|
||||
use ruff_python_ast::{self as ast, Arguments, Expr, ExprContext, Parameters, Stmt};
|
||||
use ruff_python_ast::{self as ast, Expr, ExprContext, Parameters, Stmt};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
@@ -77,7 +77,7 @@ pub(crate) fn unnecessary_map(
|
||||
return;
|
||||
};
|
||||
|
||||
let object_type = match func.id.as_str() {
|
||||
let object_type = match &*func.id {
|
||||
"map" => ObjectType::Generator,
|
||||
"list" => ObjectType::List,
|
||||
"set" => ObjectType::Set,
|
||||
@@ -95,7 +95,7 @@ pub(crate) fn unnecessary_map(
|
||||
if parent
|
||||
.and_then(Expr::as_call_expr)
|
||||
.and_then(|call| call.func.as_name_expr())
|
||||
.is_some_and(|name| matches!(name.id.as_str(), "list" | "set" | "dict"))
|
||||
.is_some_and(|name| matches!(&*name.id, "list" | "set" | "dict"))
|
||||
{
|
||||
return;
|
||||
}
|
||||
@@ -125,23 +125,22 @@ pub(crate) fn unnecessary_map(
|
||||
ObjectType::List | ObjectType::Set => {
|
||||
// Only flag, e.g., `list(map(lambda x: x + 1, iterable))`.
|
||||
let [Expr::Call(ast::ExprCall {
|
||||
func,
|
||||
arguments: Arguments { args, keywords, .. },
|
||||
..
|
||||
func, arguments, ..
|
||||
})] = args
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
if args.len() != 2 {
|
||||
if arguments.args.len() != 2 {
|
||||
return;
|
||||
}
|
||||
|
||||
if !keywords.is_empty() {
|
||||
if !arguments.keywords.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(argument) = helpers::first_argument_with_matching_function("map", func, args)
|
||||
let Some(argument) =
|
||||
helpers::first_argument_with_matching_function("map", func, &arguments.args)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
@@ -170,23 +169,22 @@ pub(crate) fn unnecessary_map(
|
||||
ObjectType::Dict => {
|
||||
// Only flag, e.g., `dict(map(lambda v: (v, v ** 2), values))`.
|
||||
let [Expr::Call(ast::ExprCall {
|
||||
func,
|
||||
arguments: Arguments { args, keywords, .. },
|
||||
..
|
||||
func, arguments, ..
|
||||
})] = args
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
if args.len() != 2 {
|
||||
if arguments.args.len() != 2 {
|
||||
return;
|
||||
}
|
||||
|
||||
if !keywords.is_empty() {
|
||||
if !arguments.keywords.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(argument) = helpers::first_argument_with_matching_function("map", func, args)
|
||||
let Some(argument) =
|
||||
helpers::first_argument_with_matching_function("map", func, &arguments.args)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -47,7 +47,7 @@ pub(crate) fn unnecessary_subscript_reversal(checker: &mut Checker, call: &ast::
|
||||
let Some(func) = call.func.as_name_expr() else {
|
||||
return;
|
||||
};
|
||||
if !matches!(func.id.as_str(), "reversed" | "set" | "sorted") {
|
||||
if !matches!(&*func.id, "reversed" | "set" | "sorted") {
|
||||
return;
|
||||
}
|
||||
if !checker.semantic().is_builtin(&func.id) {
|
||||
|
||||
@@ -72,7 +72,7 @@ pub(crate) fn all_with_model_form(checker: &mut Checker, class_def: &ast::StmtCl
|
||||
let Expr::Name(ast::ExprName { id, .. }) = target else {
|
||||
continue;
|
||||
};
|
||||
if id != "fields" {
|
||||
if &**id != "fields" {
|
||||
continue;
|
||||
}
|
||||
match value.as_ref() {
|
||||
|
||||
@@ -70,7 +70,7 @@ pub(crate) fn exclude_with_model_form(checker: &mut Checker, class_def: &ast::St
|
||||
let Expr::Name(ast::ExprName { id, .. }) = target else {
|
||||
continue;
|
||||
};
|
||||
if id == "exclude" {
|
||||
if &**id == "exclude" {
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(DjangoExcludeWithModelForm, target.range()));
|
||||
|
||||
@@ -106,7 +106,7 @@ fn is_model_abstract(class_def: &ast::StmtClassDef) -> bool {
|
||||
let Expr::Name(ast::ExprName { id, .. }) = target else {
|
||||
continue;
|
||||
};
|
||||
if id != "abstract" {
|
||||
if &**id != "abstract" {
|
||||
continue;
|
||||
}
|
||||
if !is_const_true(value) {
|
||||
|
||||
@@ -165,7 +165,7 @@ fn get_element_type(element: &Stmt, semantic: &SemanticModel) -> Option<ContentT
|
||||
let Expr::Name(ast::ExprName { id, .. }) = expr else {
|
||||
return None;
|
||||
};
|
||||
if id == "objects" {
|
||||
if &**id == "objects" {
|
||||
Some(ContentType::ManagerDeclaration)
|
||||
} else {
|
||||
None
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use ruff_python_ast::{self as ast, Arguments, Expr, Stmt};
|
||||
use ruff_python_ast::{self as ast, Expr, Stmt};
|
||||
use ruff_source_file::Locator;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
@@ -175,12 +175,8 @@ impl Violation for DotFormatInException {
|
||||
|
||||
/// EM101, EM102, EM103
|
||||
pub(crate) fn string_in_exception(checker: &mut Checker, stmt: &Stmt, exc: &Expr) {
|
||||
if let Expr::Call(ast::ExprCall {
|
||||
arguments: Arguments { args, .. },
|
||||
..
|
||||
}) = exc
|
||||
{
|
||||
if let Some(first) = args.first() {
|
||||
if let Expr::Call(ast::ExprCall { arguments, .. }) = exc {
|
||||
if let Some(first) = arguments.args.first() {
|
||||
match first {
|
||||
// Check for string literals.
|
||||
Expr::StringLiteral(ast::ExprStringLiteral { value: string, .. }) => {
|
||||
|
||||
@@ -7,7 +7,7 @@ pub mod settings;
|
||||
/// Returns true if the [`Expr`] is an internationalization function call.
|
||||
pub(crate) fn is_gettext_func_call(func: &Expr, functions_names: &[String]) -> bool {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = func {
|
||||
functions_names.contains(id)
|
||||
functions_names.iter().any(|name| name == &**id)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix};
|
||||
use ruff_python_ast::{self as ast, Arguments, Expr, Keyword, Operator};
|
||||
use ruff_python_ast::{self as ast, Expr, Keyword, Operator};
|
||||
use ruff_python_semantic::analyze::logging;
|
||||
use ruff_python_stdlib::logging::LoggingLevel;
|
||||
use ruff_text_size::Ranged;
|
||||
@@ -90,7 +90,7 @@ fn check_msg(checker: &mut Checker, msg: &Expr) {
|
||||
fn check_log_record_attr_clash(checker: &mut Checker, extra: &Keyword) {
|
||||
match &extra.value {
|
||||
Expr::Dict(ast::ExprDict { keys, .. }) => {
|
||||
for key in keys {
|
||||
for key in keys.iter() {
|
||||
if let Some(key) = &key {
|
||||
if let Expr::StringLiteral(ast::ExprStringLiteral { value: attr, .. }) = key {
|
||||
if is_reserved_attr(attr.to_str()) {
|
||||
@@ -104,16 +104,14 @@ fn check_log_record_attr_clash(checker: &mut Checker, extra: &Keyword) {
|
||||
}
|
||||
}
|
||||
Expr::Call(ast::ExprCall {
|
||||
func,
|
||||
arguments: Arguments { keywords, .. },
|
||||
..
|
||||
func, arguments, ..
|
||||
}) => {
|
||||
if checker
|
||||
.semantic()
|
||||
.resolve_call_path(func)
|
||||
.is_some_and(|call_path| matches!(call_path.as_slice(), ["", "dict"]))
|
||||
{
|
||||
for keyword in keywords.iter() {
|
||||
for keyword in arguments.keywords.iter() {
|
||||
if let Some(attr) = &keyword.arg {
|
||||
if is_reserved_attr(attr) {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
|
||||
@@ -93,7 +93,7 @@ pub(crate) fn duplicate_class_field_definition(checker: &mut Checker, body: &[St
|
||||
_ => continue,
|
||||
}
|
||||
|
||||
if !seen_targets.insert(target.id.as_str()) {
|
||||
if !seen_targets.insert(&*target.id) {
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
DuplicateClassFieldDefinition {
|
||||
name: target.id.to_string(),
|
||||
|
||||
@@ -81,23 +81,18 @@ pub(crate) fn multiple_starts_ends_with(checker: &mut Checker, expr: &Expr) {
|
||||
for (index, call) in values.iter().enumerate() {
|
||||
let Expr::Call(ast::ExprCall {
|
||||
func,
|
||||
arguments:
|
||||
Arguments {
|
||||
args,
|
||||
keywords,
|
||||
range: _,
|
||||
},
|
||||
arguments,
|
||||
range: _,
|
||||
}) = &call
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if !keywords.is_empty() {
|
||||
if !arguments.keywords.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let [arg] = &**args else {
|
||||
let [arg] = &*arguments.args else {
|
||||
continue;
|
||||
};
|
||||
|
||||
@@ -120,7 +115,7 @@ pub(crate) fn multiple_starts_ends_with(checker: &mut Checker, expr: &Expr) {
|
||||
}
|
||||
|
||||
duplicates
|
||||
.entry((attr.as_str(), arg_name.as_str()))
|
||||
.entry((&**attr, &**arg_name))
|
||||
.or_insert_with(Vec::new)
|
||||
.push(index);
|
||||
}
|
||||
@@ -140,12 +135,7 @@ pub(crate) fn multiple_starts_ends_with(checker: &mut Checker, expr: &Expr) {
|
||||
.map(|expr| {
|
||||
let Expr::Call(ast::ExprCall {
|
||||
func: _,
|
||||
arguments:
|
||||
Arguments {
|
||||
args,
|
||||
keywords: _,
|
||||
range: _,
|
||||
},
|
||||
arguments,
|
||||
range: _,
|
||||
}) = expr
|
||||
else {
|
||||
@@ -154,7 +144,9 @@ pub(crate) fn multiple_starts_ends_with(checker: &mut Checker, expr: &Expr) {
|
||||
format!("Indices should only contain `{attr_name}` calls")
|
||||
)
|
||||
};
|
||||
args.first()
|
||||
arguments
|
||||
.args
|
||||
.first()
|
||||
.unwrap_or_else(|| panic!("`{attr_name}` should have one argument"))
|
||||
})
|
||||
.collect();
|
||||
@@ -187,11 +179,11 @@ pub(crate) fn multiple_starts_ends_with(checker: &mut Checker, expr: &Expr) {
|
||||
});
|
||||
let node3 = Expr::Call(ast::ExprCall {
|
||||
func: Box::new(node2),
|
||||
arguments: Arguments {
|
||||
arguments: Box::new(Arguments {
|
||||
args: Box::from([node]),
|
||||
keywords: Box::from([]),
|
||||
range: TextRange::default(),
|
||||
},
|
||||
}),
|
||||
range: TextRange::default(),
|
||||
});
|
||||
let call = node3;
|
||||
@@ -229,7 +221,7 @@ fn is_bound_to_tuple(arg: &Expr, semantic: &SemanticModel) -> bool {
|
||||
return false;
|
||||
};
|
||||
|
||||
let Some(binding_id) = semantic.lookup_symbol(id.as_str()) else {
|
||||
let Some(binding_id) = semantic.lookup_symbol(id) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
|
||||
@@ -70,7 +70,7 @@ pub(crate) fn unnecessary_dict_kwargs(checker: &mut Checker, call: &ast::ExprCal
|
||||
};
|
||||
|
||||
// Ex) `foo(**{**bar})`
|
||||
if matches!(keys.as_slice(), [None]) {
|
||||
if matches!(&**keys, [None]) {
|
||||
let mut diagnostic = Diagnostic::new(UnnecessaryDictKwargs, keyword.range());
|
||||
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement(
|
||||
@@ -151,7 +151,7 @@ fn duplicates(call: &ast::ExprCall) -> FxHashSet<&str> {
|
||||
duplicates.insert(name.as_str());
|
||||
}
|
||||
} else if let Expr::Dict(ast::ExprDict { keys, .. }) = &keyword.value {
|
||||
for key in keys {
|
||||
for key in keys.iter() {
|
||||
if let Some(name) = key.as_ref().and_then(as_kwarg) {
|
||||
if !seen.insert(name) {
|
||||
duplicates.insert(name);
|
||||
|
||||
@@ -47,7 +47,7 @@ pub(crate) fn unnecessary_range_start(checker: &mut Checker, call: &ast::ExprCal
|
||||
let Expr::Name(ast::ExprName { id, .. }) = call.func.as_ref() else {
|
||||
return;
|
||||
};
|
||||
if id != "range" {
|
||||
if &**id != "range" {
|
||||
return;
|
||||
};
|
||||
if !checker.semantic().is_builtin("range") {
|
||||
|
||||
@@ -142,7 +142,7 @@ fn class_method(
|
||||
|
||||
// Don't error if the first argument is annotated with typing.Type[T].
|
||||
// These are edge cases, and it's hard to give good error messages for them.
|
||||
if value.id != "type" {
|
||||
if &*value.id != "type" {
|
||||
return false;
|
||||
};
|
||||
|
||||
|
||||
@@ -264,7 +264,7 @@ fn is_name(expr: &Expr, name: &str) -> bool {
|
||||
let Expr::Name(ast::ExprName { id, .. }) = expr else {
|
||||
return false;
|
||||
};
|
||||
id.as_str() == name
|
||||
&**id == name
|
||||
}
|
||||
|
||||
/// Return `true` if the given expression resolves to `typing.Self`.
|
||||
|
||||
@@ -132,7 +132,7 @@ impl fmt::Display for ExprType {
|
||||
/// `str`, `bytes`, or `complex`).
|
||||
fn match_builtin_type(expr: &Expr, semantic: &SemanticModel) -> Option<ExprType> {
|
||||
let name = expr.as_name_expr()?;
|
||||
let result = match name.id.as_str() {
|
||||
let result = match &*name.id {
|
||||
"int" => ExprType::Int,
|
||||
"bool" => ExprType::Bool,
|
||||
"str" => ExprType::Str,
|
||||
@@ -141,7 +141,7 @@ fn match_builtin_type(expr: &Expr, semantic: &SemanticModel) -> Option<ExprType>
|
||||
"complex" => ExprType::Complex,
|
||||
_ => return None,
|
||||
};
|
||||
if !semantic.is_builtin(name.id.as_str()) {
|
||||
if !semantic.is_builtin(&name.id) {
|
||||
return None;
|
||||
}
|
||||
Some(result)
|
||||
|
||||
@@ -307,7 +307,7 @@ fn is_valid_default_value_with_annotation(
|
||||
}) => {
|
||||
return allow_container
|
||||
&& keys.len() <= 10
|
||||
&& keys.iter().zip(values).all(|(k, v)| {
|
||||
&& keys.iter().zip(values.iter()).all(|(k, v)| {
|
||||
k.as_ref().is_some_and(|k| {
|
||||
is_valid_default_value_with_annotation(k, false, locator, semantic)
|
||||
}) && is_valid_default_value_with_annotation(v, false, locator, semantic)
|
||||
@@ -450,7 +450,7 @@ fn is_type_var_like_call(expr: &Expr, semantic: &SemanticModel) -> bool {
|
||||
/// `__all__`).
|
||||
fn is_special_assignment(target: &Expr, semantic: &SemanticModel) -> bool {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = target {
|
||||
match id.as_str() {
|
||||
match &**id {
|
||||
"__all__" => semantic.current_scope().kind.is_module(),
|
||||
"__match_args__" | "__slots__" => semantic.current_scope().kind.is_class(),
|
||||
_ => false,
|
||||
|
||||
@@ -122,7 +122,7 @@ pub(crate) fn unnecessary_type_union<'a>(checker: &mut Checker, union: &'a Expr)
|
||||
.into_iter()
|
||||
.map(|type_member| {
|
||||
Expr::Name(ast::ExprName {
|
||||
id: type_member,
|
||||
id: type_member.into_boxed_str(),
|
||||
ctx: ExprContext::Load,
|
||||
range: TextRange::default(),
|
||||
})
|
||||
|
||||
@@ -47,7 +47,7 @@ pub(crate) fn unsupported_method_call_on_all(checker: &mut Checker, func: &Expr)
|
||||
let Expr::Name(ast::ExprName { id, .. }) = value.as_ref() else {
|
||||
return;
|
||||
};
|
||||
if id.as_str() != "__all__" {
|
||||
if &**id != "__all__" {
|
||||
return;
|
||||
}
|
||||
if !is_unsupported_method(attr.as_str()) {
|
||||
|
||||
@@ -15,11 +15,13 @@ PYI049.py:9:7: PYI049 Private TypedDict `_UnusedTypedDict2` is never used
|
||||
10 | bar: int
|
||||
|
|
||||
|
||||
PYI049.py:21:1: PYI049 Private TypedDict `_UnusedTypedDict3` is never used
|
||||
PYI049.py:20:1: PYI049 Private TypedDict `_UnusedTypedDict3` is never used
|
||||
|
|
||||
21 | _UnusedTypedDict3 = TypedDict("_UnusedTypedDict3", {"foo": int})
|
||||
18 | bar: list[int]
|
||||
19 |
|
||||
20 | _UnusedTypedDict3 = TypedDict("_UnusedTypedDict3", {"foo": int})
|
||||
| ^^^^^^^^^^^^^^^^^ PYI049
|
||||
22 | _UsedTypedDict3 = TypedDict("_UsedTypedDict3", {"bar": bytes})
|
||||
21 | _UsedTypedDict3 = TypedDict("_UsedTypedDict3", {"bar": bytes})
|
||||
|
|
||||
|
||||
|
||||
|
||||
@@ -24,13 +24,4 @@ PYI049.pyi:34:1: PYI049 Private TypedDict `_UnusedTypedDict3` is never used
|
||||
35 | _UsedTypedDict3 = TypedDict("_UsedTypedDict3", {"bar": bytes})
|
||||
|
|
||||
|
||||
PYI049.pyi:43:11: PYI049 Private TypedDict `_UnusedTypeDict4` is never used
|
||||
|
|
||||
41 | # scope (unlike in `.py` files).
|
||||
42 | class _CustomClass3:
|
||||
43 | class _UnusedTypeDict4(TypedDict):
|
||||
| ^^^^^^^^^^^^^^^^ PYI049
|
||||
44 | pass
|
||||
|
|
||||
|
||||
|
||||
|
||||
@@ -242,7 +242,7 @@ where
|
||||
match expr {
|
||||
Expr::Name(ast::ExprName { id, .. }) => {
|
||||
if let Some(current_assert) = self.current_assert {
|
||||
if id.as_str() == self.exception_name {
|
||||
if &**id == self.exception_name {
|
||||
self.errors.push(Diagnostic::new(
|
||||
PytestAssertInExcept {
|
||||
name: id.to_string(),
|
||||
@@ -419,7 +419,7 @@ fn to_pytest_raises_args<'a>(
|
||||
if kwarg
|
||||
.arg
|
||||
.as_ref()
|
||||
.is_some_and(|id| id.as_str() == "expected_exception") =>
|
||||
.is_some_and(|id| &**id == "expected_exception") =>
|
||||
{
|
||||
Cow::Borrowed(checker.locator().slice(kwarg.value.range()))
|
||||
}
|
||||
@@ -452,11 +452,11 @@ fn to_pytest_raises_args<'a>(
|
||||
if kwarg1
|
||||
.arg
|
||||
.as_ref()
|
||||
.is_some_and(|id| id.as_str() == "expected_exception")
|
||||
.is_some_and(|id| &**id == "expected_exception")
|
||||
&& kwarg2
|
||||
.arg
|
||||
.as_ref()
|
||||
.is_some_and(|id| id.as_str() == "expected_regex") =>
|
||||
.is_some_and(|id| &**id == "expected_regex") =>
|
||||
{
|
||||
Cow::Owned(format!(
|
||||
"{}, match={}",
|
||||
@@ -469,11 +469,11 @@ fn to_pytest_raises_args<'a>(
|
||||
if kwarg1
|
||||
.arg
|
||||
.as_ref()
|
||||
.is_some_and(|id| id.as_str() == "expected_regex")
|
||||
.is_some_and(|id| &**id == "expected_regex")
|
||||
&& kwarg2
|
||||
.arg
|
||||
.as_ref()
|
||||
.is_some_and(|id| id.as_str() == "expected_exception") =>
|
||||
.is_some_and(|id| &**id == "expected_exception") =>
|
||||
{
|
||||
Cow::Owned(format!(
|
||||
"{}, match={}",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use ruff_python_ast::{self as ast, Arguments, Decorator, Expr};
|
||||
use ruff_python_ast::{self as ast, Decorator, Expr};
|
||||
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -137,18 +137,10 @@ fn check_mark_parentheses(checker: &mut Checker, decorator: &Decorator, marker:
|
||||
match &decorator.expression {
|
||||
Expr::Call(ast::ExprCall {
|
||||
func,
|
||||
arguments:
|
||||
Arguments {
|
||||
args,
|
||||
keywords,
|
||||
range: _,
|
||||
},
|
||||
arguments,
|
||||
range: _,
|
||||
}) => {
|
||||
if !checker.settings.flake8_pytest_style.mark_parentheses
|
||||
&& args.is_empty()
|
||||
&& keywords.is_empty()
|
||||
{
|
||||
if !checker.settings.flake8_pytest_style.mark_parentheses && arguments.is_empty() {
|
||||
let fix = Fix::safe_edit(Edit::deletion(func.end(), decorator.end()));
|
||||
pytest_mark_parentheses(checker, decorator, marker, fix, "", "()");
|
||||
}
|
||||
@@ -171,11 +163,8 @@ fn check_useless_usefixtures(checker: &mut Checker, decorator: &Decorator, marke
|
||||
// @pytest.mark.usefixtures
|
||||
Expr::Attribute(..) => {}
|
||||
// @pytest.mark.usefixtures(...)
|
||||
Expr::Call(ast::ExprCall {
|
||||
arguments: Arguments { args, keywords, .. },
|
||||
..
|
||||
}) => {
|
||||
if !args.is_empty() || !keywords.is_empty() {
|
||||
Expr::Call(ast::ExprCall { arguments, .. }) => {
|
||||
if !arguments.is_empty() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::comparable::ComparableExpr;
|
||||
use ruff_python_ast::parenthesize::parenthesized_range;
|
||||
use ruff_python_ast::AstNode;
|
||||
use ruff_python_ast::{self as ast, Arguments, Decorator, Expr, ExprContext};
|
||||
use ruff_python_ast::{self as ast, Decorator, Expr, ExprContext};
|
||||
use ruff_python_codegen::Generator;
|
||||
use ruff_python_trivia::CommentRanges;
|
||||
use ruff_python_trivia::{SimpleTokenKind, SimpleTokenizer};
|
||||
@@ -632,23 +632,19 @@ fn handle_value_rows(
|
||||
pub(crate) fn parametrize(checker: &mut Checker, decorators: &[Decorator]) {
|
||||
for decorator in decorators {
|
||||
if is_pytest_parametrize(decorator, checker.semantic()) {
|
||||
if let Expr::Call(ast::ExprCall {
|
||||
arguments: Arguments { args, .. },
|
||||
..
|
||||
}) = &decorator.expression
|
||||
{
|
||||
if let Expr::Call(ast::ExprCall { arguments, .. }) = &decorator.expression {
|
||||
if checker.enabled(Rule::PytestParametrizeNamesWrongType) {
|
||||
if let [names, ..] = &**args {
|
||||
if let [names, ..] = &*arguments.args {
|
||||
check_names(checker, decorator, names);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::PytestParametrizeValuesWrongType) {
|
||||
if let [names, values, ..] = &**args {
|
||||
if let [names, values, ..] = &*arguments.args {
|
||||
check_values(checker, names, values);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::PytestDuplicateParametrizeTestCases) {
|
||||
if let [_, values, ..] = &**args {
|
||||
if let [_, values, ..] = &*arguments.args {
|
||||
check_duplicates(checker, values);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -389,11 +389,11 @@ impl UnittestAssert {
|
||||
};
|
||||
let node1 = ast::ExprCall {
|
||||
func: Box::new(node.into()),
|
||||
arguments: Arguments {
|
||||
arguments: Box::new(Arguments {
|
||||
args: Box::from([(**obj).clone(), (**cls).clone()]),
|
||||
keywords: Box::from([]),
|
||||
range: TextRange::default(),
|
||||
},
|
||||
}),
|
||||
range: TextRange::default(),
|
||||
};
|
||||
let isinstance = node1.into();
|
||||
@@ -433,11 +433,11 @@ impl UnittestAssert {
|
||||
};
|
||||
let node2 = ast::ExprCall {
|
||||
func: Box::new(node1.into()),
|
||||
arguments: Arguments {
|
||||
arguments: Box::new(Arguments {
|
||||
args: Box::from([(**regex).clone(), (**text).clone()]),
|
||||
keywords: Box::from([]),
|
||||
range: TextRange::default(),
|
||||
},
|
||||
}),
|
||||
range: TextRange::default(),
|
||||
};
|
||||
let re_search = node2.into();
|
||||
|
||||
@@ -564,7 +564,7 @@ fn unnecessary_assign(checker: &mut Checker, stack: &Stack) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if stack.non_locals.contains(assigned_id.as_str()) {
|
||||
if stack.non_locals.contains(&**assigned_id) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
@@ -77,7 +77,8 @@ pub(crate) fn private_member_access(checker: &mut Checker, expr: &Expr) {
|
||||
.settings
|
||||
.flake8_self
|
||||
.ignore_names
|
||||
.contains(attr.as_ref())
|
||||
.iter()
|
||||
.any(|name| name == attr.as_str())
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -303,27 +303,22 @@ fn isinstance_target<'a>(call: &'a Expr, semantic: &'a SemanticModel) -> Option<
|
||||
// Verify that this is an `isinstance` call.
|
||||
let Expr::Call(ast::ExprCall {
|
||||
func,
|
||||
arguments:
|
||||
Arguments {
|
||||
args,
|
||||
keywords,
|
||||
range: _,
|
||||
},
|
||||
arguments,
|
||||
range: _,
|
||||
}) = &call
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
if args.len() != 2 {
|
||||
if !arguments.keywords.is_empty() {
|
||||
return None;
|
||||
}
|
||||
if !keywords.is_empty() {
|
||||
if arguments.args.len() != 2 {
|
||||
return None;
|
||||
}
|
||||
let Expr::Name(ast::ExprName { id: func_name, .. }) = func.as_ref() else {
|
||||
return None;
|
||||
};
|
||||
if func_name != "isinstance" {
|
||||
if &**func_name != "isinstance" {
|
||||
return None;
|
||||
}
|
||||
if !semantic.is_builtin("isinstance") {
|
||||
@@ -331,7 +326,7 @@ fn isinstance_target<'a>(call: &'a Expr, semantic: &'a SemanticModel) -> Option<
|
||||
}
|
||||
|
||||
// Collect the target (e.g., `obj` in `isinstance(obj, int)`).
|
||||
Some(&args[0])
|
||||
Some(&arguments.args[0])
|
||||
}
|
||||
|
||||
/// SIM101
|
||||
@@ -374,12 +369,10 @@ pub(crate) fn duplicate_isinstance_call(checker: &mut Checker, expr: &Expr) {
|
||||
if indices.len() > 1 {
|
||||
// Grab the target used in each duplicate `isinstance` call (e.g., `obj` in
|
||||
// `isinstance(obj, int)`).
|
||||
let target = if let Expr::Call(ast::ExprCall {
|
||||
arguments: Arguments { args, .. },
|
||||
..
|
||||
}) = &values[indices[0]]
|
||||
{
|
||||
args.first()
|
||||
let target = if let Expr::Call(ast::ExprCall { arguments, .. }) = &values[indices[0]] {
|
||||
arguments
|
||||
.args
|
||||
.first()
|
||||
.expect("`isinstance` should have two arguments")
|
||||
} else {
|
||||
unreachable!("Indices should only contain `isinstance` calls")
|
||||
@@ -401,14 +394,13 @@ pub(crate) fn duplicate_isinstance_call(checker: &mut Checker, expr: &Expr) {
|
||||
.iter()
|
||||
.map(|index| &values[*index])
|
||||
.map(|expr| {
|
||||
let Expr::Call(ast::ExprCall {
|
||||
arguments: Arguments { args, .. },
|
||||
..
|
||||
}) = expr
|
||||
else {
|
||||
let Expr::Call(ast::ExprCall { arguments, .. }) = expr else {
|
||||
unreachable!("Indices should only contain `isinstance` calls")
|
||||
};
|
||||
args.get(1).expect("`isinstance` should have two arguments")
|
||||
arguments
|
||||
.args
|
||||
.get(1)
|
||||
.expect("`isinstance` should have two arguments")
|
||||
})
|
||||
.collect();
|
||||
|
||||
@@ -436,11 +428,11 @@ pub(crate) fn duplicate_isinstance_call(checker: &mut Checker, expr: &Expr) {
|
||||
};
|
||||
let node2 = ast::ExprCall {
|
||||
func: Box::new(node1.into()),
|
||||
arguments: Arguments {
|
||||
arguments: Box::new(Arguments {
|
||||
args: Box::from([target.clone(), node.into()]),
|
||||
keywords: Box::from([]),
|
||||
range: TextRange::default(),
|
||||
},
|
||||
}),
|
||||
range: TextRange::default(),
|
||||
};
|
||||
let call = node2.into();
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use ruff_python_ast::{self as ast, Arguments, Expr};
|
||||
use ruff_python_ast::{self as ast, Expr};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::fix::snippet::SourceCodeSnippet;
|
||||
@@ -134,14 +134,12 @@ pub(crate) fn use_capital_environment_variables(checker: &mut Checker, expr: &Ex
|
||||
|
||||
// Ex) `os.environ.get('foo')`, `os.getenv('foo')`
|
||||
let Expr::Call(ast::ExprCall {
|
||||
func,
|
||||
arguments: Arguments { args, .. },
|
||||
..
|
||||
func, arguments, ..
|
||||
}) = expr
|
||||
else {
|
||||
return;
|
||||
};
|
||||
let Some(arg) = args.first() else {
|
||||
let Some(arg) = arguments.args.first() else {
|
||||
return;
|
||||
};
|
||||
let Expr::StringLiteral(ast::ExprStringLiteral { value: env_var, .. }) = arg else {
|
||||
@@ -193,7 +191,7 @@ fn check_os_environ_subscript(checker: &mut Checker, expr: &Expr) {
|
||||
let Expr::Name(ast::ExprName { id, .. }) = attr_value.as_ref() else {
|
||||
return;
|
||||
};
|
||||
if id != "os" || attr != "environ" {
|
||||
if &**id != "os" || attr != "environ" {
|
||||
return;
|
||||
}
|
||||
let Expr::StringLiteral(ast::ExprStringLiteral { value: env_var, .. }) = slice.as_ref() else {
|
||||
@@ -233,13 +231,13 @@ fn check_os_environ_subscript(checker: &mut Checker, expr: &Expr) {
|
||||
pub(crate) fn dict_get_with_none_default(checker: &mut Checker, expr: &Expr) {
|
||||
let Expr::Call(ast::ExprCall {
|
||||
func,
|
||||
arguments: Arguments { args, keywords, .. },
|
||||
arguments,
|
||||
range: _,
|
||||
}) = expr
|
||||
else {
|
||||
return;
|
||||
};
|
||||
if !keywords.is_empty() {
|
||||
if !arguments.keywords.is_empty() {
|
||||
return;
|
||||
}
|
||||
let Expr::Attribute(ast::ExprAttribute { value, attr, .. }) = func.as_ref() else {
|
||||
@@ -248,13 +246,13 @@ pub(crate) fn dict_get_with_none_default(checker: &mut Checker, expr: &Expr) {
|
||||
if attr != "get" {
|
||||
return;
|
||||
}
|
||||
let Some(key) = args.first() else {
|
||||
let Some(key) = arguments.args.first() else {
|
||||
return;
|
||||
};
|
||||
if !(key.is_literal_expr() || key.is_name_expr()) {
|
||||
return;
|
||||
}
|
||||
let Some(default) = args.get(1) else {
|
||||
let Some(default) = arguments.args.get(1) else {
|
||||
return;
|
||||
};
|
||||
if !default.is_none_literal_expr() {
|
||||
|
||||
@@ -184,11 +184,11 @@ pub(crate) fn if_expr_with_true_false(
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
arguments: Arguments {
|
||||
arguments: Box::new(Arguments {
|
||||
args: Box::from([test.clone()]),
|
||||
keywords: Box::from([]),
|
||||
range: TextRange::default(),
|
||||
},
|
||||
}),
|
||||
range: TextRange::default(),
|
||||
}
|
||||
.into(),
|
||||
|
||||
@@ -278,11 +278,11 @@ pub(crate) fn double_negation(checker: &mut Checker, expr: &Expr, op: UnaryOp, o
|
||||
};
|
||||
let node1 = ast::ExprCall {
|
||||
func: Box::new(node.into()),
|
||||
arguments: Arguments {
|
||||
arguments: Box::new(Arguments {
|
||||
args: Box::from([*operand.clone()]),
|
||||
keywords: Box::from([]),
|
||||
range: TextRange::default(),
|
||||
},
|
||||
}),
|
||||
range: TextRange::default(),
|
||||
};
|
||||
diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement(
|
||||
|
||||
@@ -252,7 +252,7 @@ fn is_main_check(expr: &Expr) -> bool {
|
||||
}) = expr
|
||||
{
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = left.as_ref() {
|
||||
if id == "__name__" {
|
||||
if &**id == "__name__" {
|
||||
if let [Expr::StringLiteral(ast::ExprStringLiteral { value, .. })] = &**comparators
|
||||
{
|
||||
if value == "__main__" {
|
||||
|
||||
@@ -76,7 +76,8 @@ pub(crate) fn enumerate_for_loop(checker: &mut Checker, for_stmt: &ast::StmtFor)
|
||||
}
|
||||
|
||||
// Ensure that the index variable was initialized to 0.
|
||||
let Some(value) = typing::find_binding_value(binding, checker.semantic()) else {
|
||||
let Some(value) = typing::find_binding_value(&index.id, binding, checker.semantic())
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
if !matches!(
|
||||
|
||||
@@ -175,11 +175,11 @@ pub(crate) fn if_else_block_instead_of_dict_get(checker: &mut Checker, stmt_if:
|
||||
};
|
||||
let node3 = ast::ExprCall {
|
||||
func: Box::new(node2.into()),
|
||||
arguments: Arguments {
|
||||
arguments: Box::new(Arguments {
|
||||
args: Box::from([node1, node]),
|
||||
keywords: Box::from([]),
|
||||
range: TextRange::default(),
|
||||
},
|
||||
}),
|
||||
range: TextRange::default(),
|
||||
};
|
||||
let node4 = expected_var.clone();
|
||||
@@ -275,11 +275,11 @@ pub(crate) fn if_exp_instead_of_dict_get(
|
||||
};
|
||||
let fixed_node = ast::ExprCall {
|
||||
func: Box::new(dict_get_node.into()),
|
||||
arguments: Arguments {
|
||||
arguments: Box::new(Arguments {
|
||||
args: Box::from([dict_key_node, default_value_node]),
|
||||
keywords: Box::from([]),
|
||||
range: TextRange::default(),
|
||||
},
|
||||
}),
|
||||
range: TextRange::default(),
|
||||
};
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ use ruff_diagnostics::{Applicability, Edit};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::parenthesize::parenthesized_range;
|
||||
use ruff_python_ast::AnyNodeRef;
|
||||
use ruff_python_ast::{self as ast, Arguments, CmpOp, Comprehension, Expr};
|
||||
use ruff_python_ast::{self as ast, CmpOp, Comprehension, Expr};
|
||||
use ruff_python_semantic::analyze::typing;
|
||||
use ruff_python_trivia::{SimpleTokenKind, SimpleTokenizer};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
@@ -67,20 +67,21 @@ fn key_in_dict(
|
||||
) {
|
||||
let Expr::Call(ast::ExprCall {
|
||||
func,
|
||||
arguments: Arguments { args, keywords, .. },
|
||||
arguments,
|
||||
range: _,
|
||||
}) = &right
|
||||
else {
|
||||
return;
|
||||
};
|
||||
if !(args.is_empty() && keywords.is_empty()) {
|
||||
|
||||
if !arguments.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let Expr::Attribute(ast::ExprAttribute { attr, value, .. }) = func.as_ref() else {
|
||||
return;
|
||||
};
|
||||
if attr != "keys" {
|
||||
if &**attr != "keys" {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -89,10 +90,7 @@ fn key_in_dict(
|
||||
// def __contains__(self, key: object) -> bool:
|
||||
// return key in self.keys()
|
||||
// ```
|
||||
if value
|
||||
.as_name_expr()
|
||||
.is_some_and(|name| matches!(name.id.as_str(), "self"))
|
||||
{
|
||||
if value.as_name_expr().is_some_and(|name| &*name.id == "self") {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -160,11 +160,11 @@ pub(crate) fn needless_bool(checker: &mut Checker, stmt_if: &ast::StmtIf) {
|
||||
};
|
||||
let value_node = ast::ExprCall {
|
||||
func: Box::new(func_node.into()),
|
||||
arguments: Arguments {
|
||||
arguments: Box::new(Arguments {
|
||||
args: Box::from([if_test.clone()]),
|
||||
keywords: Box::from([]),
|
||||
range: TextRange::default(),
|
||||
},
|
||||
}),
|
||||
range: TextRange::default(),
|
||||
};
|
||||
let return_node = ast::StmtReturn {
|
||||
|
||||
@@ -121,7 +121,7 @@ fn is_open(checker: &mut Checker, func: &Expr) -> bool {
|
||||
}
|
||||
// open(...)
|
||||
Expr::Name(ast::ExprName { id, .. }) => {
|
||||
id.as_str() == "open" && checker.semantic().is_builtin("open")
|
||||
&**id == "open" && checker.semantic().is_builtin("open")
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
|
||||
@@ -390,11 +390,11 @@ fn return_stmt(id: &str, test: &Expr, target: &Expr, iter: &Expr, generator: Gen
|
||||
};
|
||||
let node2 = ast::ExprCall {
|
||||
func: Box::new(node1.into()),
|
||||
arguments: Arguments {
|
||||
arguments: Box::new(Arguments {
|
||||
args: Box::from([node.into()]),
|
||||
keywords: Box::from([]),
|
||||
range: TextRange::default(),
|
||||
},
|
||||
}),
|
||||
range: TextRange::default(),
|
||||
};
|
||||
let node3 = ast::StmtReturn {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use ast::{ExprAttribute, ExprName, Identifier};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, Arguments, Expr, ExprCall};
|
||||
use ruff_python_ast::{self as ast, Expr, ExprCall};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::{checkers::ast::Checker, fix::snippet::SourceCodeSnippet};
|
||||
@@ -61,21 +61,19 @@ impl AlwaysFixableViolation for ZipDictKeysAndValues {
|
||||
/// SIM911
|
||||
pub(crate) fn zip_dict_keys_and_values(checker: &mut Checker, expr: &ExprCall) {
|
||||
let ExprCall {
|
||||
func,
|
||||
arguments: Arguments { args, keywords, .. },
|
||||
..
|
||||
func, arguments, ..
|
||||
} = expr;
|
||||
match &keywords[..] {
|
||||
match &*arguments.keywords {
|
||||
[] => {}
|
||||
[ast::Keyword {
|
||||
arg: Some(name), ..
|
||||
}] if name.as_str() == "strict" => {}
|
||||
_ => return,
|
||||
};
|
||||
if matches!(func.as_ref(), Expr::Name(ExprName { id, .. }) if id != "zip") {
|
||||
if matches!(func.as_ref(), Expr::Name(ExprName { id, .. }) if &**id != "zip") {
|
||||
return;
|
||||
}
|
||||
let [arg1, arg2] = &args[..] else {
|
||||
let [arg1, arg2] = &*arguments.args else {
|
||||
return;
|
||||
};
|
||||
let Some((var1, attr1)) = get_var_attr(arg1) else {
|
||||
|
||||
@@ -7,7 +7,7 @@ pub(super) fn has_slots(body: &[Stmt]) -> bool {
|
||||
Stmt::Assign(ast::StmtAssign { targets, .. }) => {
|
||||
for target in targets {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = target {
|
||||
if id.as_str() == "__slots__" {
|
||||
if &**id == "__slots__" {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -15,7 +15,7 @@ pub(super) fn has_slots(body: &[Stmt]) -> bool {
|
||||
}
|
||||
Stmt::AnnAssign(ast::StmtAnnAssign { target, .. }) => {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = target.as_ref() {
|
||||
if id.as_str() == "__slots__" {
|
||||
if &**id == "__slots__" {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,11 +13,11 @@ fn is_empty_stmt(stmt: &Stmt) -> bool {
|
||||
if let Some(exc) = exc {
|
||||
match exc.as_ref() {
|
||||
Expr::Name(ast::ExprName { id, .. }) => {
|
||||
return id == "NotImplementedError" || id == "NotImplemented";
|
||||
return &**id == "NotImplementedError" || &**id == "NotImplemented";
|
||||
}
|
||||
Expr::Call(ast::ExprCall { func, .. }) => {
|
||||
if let Expr::Name(ast::ExprName { id, .. }) = func.as_ref() {
|
||||
return id == "NotImplementedError" || id == "NotImplemented";
|
||||
return &**id == "NotImplementedError" || &**id == "NotImplemented";
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use ruff_python_ast::{self as ast, Arguments, ConversionFlag, Expr};
|
||||
use ruff_python_ast::{self as ast, ConversionFlag, Expr};
|
||||
use ruff_text_size::TextRange;
|
||||
|
||||
/// Wrap an expression in a [`ast::FStringElement::Expression`] with no special formatting.
|
||||
@@ -26,14 +26,9 @@ fn is_simple_call(expr: &Expr) -> bool {
|
||||
match expr {
|
||||
Expr::Call(ast::ExprCall {
|
||||
func,
|
||||
arguments:
|
||||
Arguments {
|
||||
args,
|
||||
keywords,
|
||||
range: _,
|
||||
},
|
||||
arguments,
|
||||
range: _,
|
||||
}) => args.is_empty() && keywords.is_empty() && is_simple_callee(func),
|
||||
}) => arguments.is_empty() && is_simple_callee(func),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ use itertools::Itertools;
|
||||
use crate::fix::edits::pad;
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, Arguments, Expr};
|
||||
use ruff_python_ast::{self as ast, Expr};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
@@ -103,20 +103,16 @@ fn build_fstring(joiner: &str, joinees: &[Expr]) -> Option<Expr> {
|
||||
|
||||
/// FLY002
|
||||
pub(crate) fn static_join_to_fstring(checker: &mut Checker, expr: &Expr, joiner: &str) {
|
||||
let Expr::Call(ast::ExprCall {
|
||||
arguments: Arguments { args, keywords, .. },
|
||||
..
|
||||
}) = expr
|
||||
else {
|
||||
let Expr::Call(ast::ExprCall { arguments, .. }) = expr else {
|
||||
return;
|
||||
};
|
||||
|
||||
// If there are kwargs or more than one argument, this is some non-standard
|
||||
// string join call.
|
||||
if !keywords.is_empty() {
|
||||
if !arguments.keywords.is_empty() {
|
||||
return;
|
||||
}
|
||||
let [arg] = &**args else {
|
||||
let [arg] = &*arguments.args else {
|
||||
return;
|
||||
};
|
||||
|
||||
|
||||
@@ -419,20 +419,23 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Path::new("line_ending_crlf.py"))]
|
||||
#[test_case(Path::new("line_ending_lf.py"))]
|
||||
fn source_code_style(path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}", path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
Path::new("isort").join(path).as_path(),
|
||||
&LinterSettings {
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||
},
|
||||
)?;
|
||||
crate::assert_messages!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
// Test currently disabled as line endings are automatically converted to
|
||||
// platform-appropriate ones in CI/CD #[test_case(Path::new("
|
||||
// line_ending_crlf.py"))] #[test_case(Path::new("line_ending_lf.py"))]
|
||||
// fn source_code_style(path: &Path) -> Result<()> {
|
||||
// let snapshot = format!("{}", path.to_string_lossy());
|
||||
// let diagnostics = test_path(
|
||||
// Path::new("isort")
|
||||
// .join(path)
|
||||
// .as_path(),
|
||||
// &LinterSettings {
|
||||
// src: vec![test_resource_path("fixtures/isort")],
|
||||
// ..LinterSettings::for_rule(Rule::UnsortedImports)
|
||||
// },
|
||||
// )?;
|
||||
// crate::assert_messages!(snapshot, diagnostics);
|
||||
// Ok(())
|
||||
// }
|
||||
|
||||
#[test_case(Path::new("separate_local_folder_imports.py"))]
|
||||
fn known_local_folder(path: &Path) -> Result<()> {
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/isort/mod.rs
|
||||
---
|
||||
line_ending_crlf.py:1:1: I001 [*] Import block is un-sorted or un-formatted
|
||||
|
|
||||
1 | / from long_module_name import member_one, member_two, member_three, member_four, member_five
|
||||
2 | |
|
||||
| |_^ I001
|
||||
|
|
||||
= help: Organize imports
|
||||
|
||||
ℹ Safe fix
|
||||
1 |-from long_module_name import member_one, member_two, member_three, member_four, member_five
|
||||
1 |+from long_module_name import (
|
||||
2 |+ member_five,
|
||||
3 |+ member_four,
|
||||
4 |+ member_one,
|
||||
5 |+ member_three,
|
||||
6 |+ member_two,
|
||||
7 |+)
|
||||
2 8 |
|
||||
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/isort/mod.rs
|
||||
---
|
||||
line_ending_lf.py:1:1: I001 [*] Import block is un-sorted or un-formatted
|
||||
|
|
||||
1 | / from long_module_name import member_one, member_two, member_three, member_four, member_five
|
||||
2 | |
|
||||
| |_^ I001
|
||||
|
|
||||
= help: Organize imports
|
||||
|
||||
ℹ Safe fix
|
||||
1 |-from long_module_name import member_one, member_two, member_three, member_four, member_five
|
||||
1 |+from long_module_name import (
|
||||
2 |+ member_five,
|
||||
3 |+ member_four,
|
||||
4 |+ member_one,
|
||||
5 |+ member_three,
|
||||
6 |+ member_two,
|
||||
7 |+)
|
||||
2 8 |
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user