Compare commits
1 Commits
pythonplus
...
zanie/docs
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4faa122f54 |
2
.github/ISSUE_TEMPLATE.md
vendored
2
.github/ISSUE_TEMPLATE.md
vendored
@@ -3,8 +3,6 @@ Thank you for taking the time to report an issue! We're glad to have you involve
|
||||
|
||||
If you're filing a bug report, please consider including the following information:
|
||||
|
||||
* List of keywords you searched for before creating this issue. Write them down here so that others can find this issue more easily and help provide feedback.
|
||||
e.g. "RUF001", "unused variable", "Jupyter notebook"
|
||||
* A minimal code snippet that reproduces the bug.
|
||||
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
|
||||
* The current Ruff settings (any relevant sections from your `pyproject.toml`).
|
||||
|
||||
1
.github/workflows/docs.yaml
vendored
1
.github/workflows/docs.yaml
vendored
@@ -9,6 +9,7 @@ on:
|
||||
type: string
|
||||
release:
|
||||
types: [published]
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
mkdocs:
|
||||
|
||||
1
Cargo.lock
generated
1
Cargo.lock
generated
@@ -2345,7 +2345,6 @@ dependencies = [
|
||||
"itertools 0.12.1",
|
||||
"lexical-parse-float",
|
||||
"rand",
|
||||
"ruff_python_ast",
|
||||
"unic-ucd-category",
|
||||
]
|
||||
|
||||
|
||||
26
README.md
26
README.md
@@ -129,7 +129,7 @@ and with [a variety of other package managers](https://docs.astral.sh/ruff/insta
|
||||
To run Ruff as a linter, try any of the following:
|
||||
|
||||
```shell
|
||||
ruff check # Lint all files in the current directory (and any subdirectories).
|
||||
ruff check . # Lint all files in the current directory (and any subdirectories).
|
||||
ruff check path/to/code/ # Lint all files in `/path/to/code` (and any subdirectories).
|
||||
ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`.
|
||||
ruff check path/to/code/to/file.py # Lint `file.py`.
|
||||
@@ -139,7 +139,7 @@ ruff check @arguments.txt # Lint using an input file, treating its con
|
||||
Or, to run Ruff as a formatter:
|
||||
|
||||
```shell
|
||||
ruff format # Format all files in the current directory (and any subdirectories).
|
||||
ruff format . # Format all files in the current directory (and any subdirectories).
|
||||
ruff format path/to/code/ # Format all files in `/path/to/code` (and any subdirectories).
|
||||
ruff format path/to/code/*.py # Format all `.py` files in `/path/to/code`.
|
||||
ruff format path/to/code/to/file.py # Format `file.py`.
|
||||
@@ -183,9 +183,10 @@ Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml`
|
||||
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
||||
for a complete list of all configuration options).
|
||||
|
||||
If left unspecified, Ruff's default configuration is equivalent to the following `ruff.toml` file:
|
||||
If left unspecified, Ruff's default configuration is equivalent to:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
# Exclude a variety of commonly ignored directories.
|
||||
exclude = [
|
||||
".bzr",
|
||||
@@ -223,7 +224,7 @@ indent-width = 4
|
||||
# Assume Python 3.8
|
||||
target-version = "py38"
|
||||
|
||||
[lint]
|
||||
[tool.ruff.lint]
|
||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||
select = ["E4", "E7", "E9", "F"]
|
||||
ignore = []
|
||||
@@ -235,7 +236,7 @@ unfixable = []
|
||||
# Allow unused variables when underscore-prefixed.
|
||||
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||
|
||||
[format]
|
||||
[tool.ruff.format]
|
||||
# Like Black, use double quotes for strings.
|
||||
quote-style = "double"
|
||||
|
||||
@@ -249,20 +250,11 @@ skip-magic-trailing-comma = false
|
||||
line-ending = "auto"
|
||||
```
|
||||
|
||||
Note that, in a `pyproject.toml`, each section header should be prefixed with `tool.ruff`. For
|
||||
example, `[lint]` should be replaced with `[tool.ruff.lint]`.
|
||||
|
||||
Some configuration options can be provided via dedicated command-line arguments, such as those
|
||||
related to rule enablement and disablement, file discovery, and logging level:
|
||||
Some configuration options can be provided via the command-line, such as those related to
|
||||
rule enablement and disablement, file discovery, and logging level:
|
||||
|
||||
```shell
|
||||
ruff check --select F401 --select F403 --quiet
|
||||
```
|
||||
|
||||
The remaining configuration options can be provided through a catch-all `--config` argument:
|
||||
|
||||
```shell
|
||||
ruff check --config "lint.per-file-ignores = {'some_file.py' = ['F841']}"
|
||||
ruff check path/to/code/ --select F401 --select F403 --quiet
|
||||
```
|
||||
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
||||
|
||||
@@ -497,11 +497,7 @@ pub struct FormatCommand {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, clap::Parser)]
|
||||
pub struct ServerCommand {
|
||||
/// Enable preview mode; required for regular operation
|
||||
#[arg(long)]
|
||||
pub(crate) preview: bool,
|
||||
}
|
||||
pub struct ServerCommand;
|
||||
|
||||
#[derive(Debug, Clone, Copy, clap::ValueEnum)]
|
||||
pub enum HelpFormat {
|
||||
|
||||
@@ -9,11 +9,7 @@ use tracing_subscriber::{
|
||||
};
|
||||
use tracing_tree::time::Uptime;
|
||||
|
||||
pub(crate) fn run_server(preview: bool, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
if !preview {
|
||||
tracing::error!("--preview needs to be provided as a command line argument while the server is still unstable.\nFor example: `ruff server --preview`");
|
||||
return Ok(ExitStatus::Error);
|
||||
}
|
||||
pub(crate) fn run_server(log_level: LogLevel) -> Result<ExitStatus> {
|
||||
let trace_level = if log_level == LogLevel::Verbose {
|
||||
Level::TRACE
|
||||
} else {
|
||||
|
||||
@@ -206,8 +206,8 @@ fn format(args: FormatCommand, global_options: GlobalConfigArgs) -> Result<ExitS
|
||||
|
||||
#[allow(clippy::needless_pass_by_value)] // TODO: remove once we start taking arguments from here
|
||||
fn server(args: ServerCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
let ServerCommand { preview } = args;
|
||||
commands::server::run_server(preview, log_level)
|
||||
let ServerCommand {} = args;
|
||||
commands::server::run_server(log_level)
|
||||
}
|
||||
|
||||
pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<ExitStatus> {
|
||||
|
||||
@@ -118,8 +118,6 @@ impl Printer {
|
||||
} else if remaining > 0 {
|
||||
let s = if remaining == 1 { "" } else { "s" };
|
||||
writeln!(writer, "Found {remaining} error{s}.")?;
|
||||
} else if remaining == 0 {
|
||||
writeln!(writer, "All checks passed!")?;
|
||||
}
|
||||
|
||||
if let Some(fixables) = fixables {
|
||||
|
||||
@@ -23,7 +23,7 @@ fn default_options() {
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
def foo(arg1, arg2,):
|
||||
print('Shouldn\'t change quotes')
|
||||
print('Should\'t change quotes')
|
||||
|
||||
|
||||
if condition:
|
||||
@@ -38,7 +38,7 @@ if condition:
|
||||
arg1,
|
||||
arg2,
|
||||
):
|
||||
print("Shouldn't change quotes")
|
||||
print("Should't change quotes")
|
||||
|
||||
|
||||
if condition:
|
||||
|
||||
@@ -101,7 +101,6 @@ fn stdin_success() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -223,7 +222,6 @@ fn stdin_source_type_pyi() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -592,7 +590,6 @@ fn stdin_fix_when_no_issues_should_still_print_contents() {
|
||||
print(sys.version)
|
||||
|
||||
----- stderr -----
|
||||
All checks passed!
|
||||
"###);
|
||||
}
|
||||
|
||||
@@ -1026,7 +1023,6 @@ fn preview_disabled_direct() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: Selection `RUF911` has no effect because preview is not enabled.
|
||||
@@ -1043,7 +1039,6 @@ fn preview_disabled_prefix_empty() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: Selection `RUF91` has no effect because preview is not enabled.
|
||||
@@ -1060,7 +1055,6 @@ fn preview_disabled_does_not_warn_for_empty_ignore_selections() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -1076,7 +1070,6 @@ fn preview_disabled_does_not_warn_for_empty_fixable_selections() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -1182,7 +1175,6 @@ fn removed_indirect() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -1213,7 +1205,6 @@ fn redirect_indirect() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -1316,7 +1307,6 @@ fn deprecated_indirect_preview_enabled() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -1393,7 +1383,6 @@ fn unreadable_dir() -> Result<()> {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: Encountered error: Permission denied (os error 13)
|
||||
@@ -1908,7 +1897,6 @@ def log(x, base) -> float:
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###
|
||||
|
||||
@@ -496,7 +496,6 @@ ignore = ["D203", "D212"]
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: No Python files found under the given path(s)
|
||||
@@ -834,7 +833,6 @@ fn complex_config_setting_overridden_via_cli() -> Result<()> {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
@@ -34,11 +34,6 @@ marking it as unused, as in:
|
||||
from module import member as member
|
||||
```
|
||||
|
||||
## Fix safety
|
||||
|
||||
When `ignore_init_module_imports` is disabled, fixes can remove for unused imports in `__init__` files.
|
||||
These fixes are considered unsafe because they can change the public interface.
|
||||
|
||||
## Example
|
||||
```python
|
||||
import numpy as np # unused import
|
||||
|
||||
@@ -201,7 +201,7 @@ linter.allowed_confusables = []
|
||||
linter.builtins = []
|
||||
linter.dummy_variable_rgx = ^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$
|
||||
linter.external = []
|
||||
linter.ignore_init_module_imports = true
|
||||
linter.ignore_init_module_imports = false
|
||||
linter.logger_objects = []
|
||||
linter.namespace_packages = []
|
||||
linter.src = [
|
||||
@@ -241,22 +241,7 @@ linter.flake8_gettext.functions_names = [
|
||||
ngettext,
|
||||
]
|
||||
linter.flake8_implicit_str_concat.allow_multiline = true
|
||||
linter.flake8_import_conventions.aliases = {
|
||||
altair = alt,
|
||||
holoviews = hv,
|
||||
matplotlib = mpl,
|
||||
matplotlib.pyplot = plt,
|
||||
networkx = nx,
|
||||
numpy = np,
|
||||
pandas = pd,
|
||||
panel = pn,
|
||||
plotly.express = px,
|
||||
polars = pl,
|
||||
pyarrow = pa,
|
||||
seaborn = sns,
|
||||
tensorflow = tf,
|
||||
tkinter = tk,
|
||||
}
|
||||
linter.flake8_import_conventions.aliases = {"matplotlib": "mpl", "matplotlib.pyplot": "plt", "pandas": "pd", "seaborn": "sns", "tensorflow": "tf", "networkx": "nx", "plotly.express": "px", "polars": "pl", "numpy": "np", "panel": "pn", "pyarrow": "pa", "altair": "alt", "tkinter": "tk", "holoviews": "hv"}
|
||||
linter.flake8_import_conventions.banned_aliases = {}
|
||||
linter.flake8_import_conventions.banned_from = []
|
||||
linter.flake8_pytest_style.fixture_parentheses = true
|
||||
|
||||
@@ -37,7 +37,7 @@ pub trait Buffer {
|
||||
#[doc(hidden)]
|
||||
fn elements(&self) -> &[FormatElement];
|
||||
|
||||
/// Glue for usage of the [`write!`] macro with implementers of this trait.
|
||||
/// Glue for usage of the [`write!`] macro with implementors of this trait.
|
||||
///
|
||||
/// This method should generally not be invoked manually, but rather through the [`write!`] macro itself.
|
||||
///
|
||||
|
||||
@@ -18,7 +18,3 @@ func("0.0.0.0")
|
||||
def my_func():
|
||||
x = "0.0.0.0"
|
||||
print(x)
|
||||
|
||||
|
||||
# Implicit string concatenation
|
||||
"0.0.0.0" f"0.0.0.0{expr}0.0.0.0"
|
||||
|
||||
@@ -18,13 +18,6 @@ with open("/dev/shm/unit/test", "w") as f:
|
||||
with open("/foo/bar", "w") as f:
|
||||
f.write("def")
|
||||
|
||||
# Implicit string concatenation
|
||||
with open("/tmp/" "abc", "w") as f:
|
||||
f.write("def")
|
||||
|
||||
with open("/tmp/abc" f"/tmp/abc", "w") as f:
|
||||
f.write("def")
|
||||
|
||||
# Using `tempfile` module should be ok
|
||||
import tempfile
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
@@ -64,5 +64,3 @@ def not_warnings_dot_deprecated(
|
||||
"Not warnings.deprecated, so this one *should* lead to PYI053 in a stub!" # Error: PYI053
|
||||
)
|
||||
def not_a_deprecated_function() -> None: ...
|
||||
|
||||
fbaz: str = f"51 character {foo} stringgggggggggggggggggggggggggg" # Error: PYI053
|
||||
|
||||
@@ -10,7 +10,7 @@ async def func():
|
||||
|
||||
trio.sleep(0) # TRIO115
|
||||
foo = 0
|
||||
trio.sleep(foo) # OK
|
||||
trio.sleep(foo) # TRIO115
|
||||
trio.sleep(1) # OK
|
||||
time.sleep(0) # OK
|
||||
|
||||
@@ -20,26 +20,26 @@ async def func():
|
||||
trio.sleep(bar)
|
||||
|
||||
x, y = 0, 2000
|
||||
trio.sleep(x) # OK
|
||||
trio.sleep(x) # TRIO115
|
||||
trio.sleep(y) # OK
|
||||
|
||||
(a, b, [c, (d, e)]) = (1, 2, (0, [4, 0]))
|
||||
trio.sleep(c) # OK
|
||||
trio.sleep(c) # TRIO115
|
||||
trio.sleep(d) # OK
|
||||
trio.sleep(e) # OK
|
||||
trio.sleep(e) # TRIO115
|
||||
|
||||
m_x, m_y = 0
|
||||
trio.sleep(m_y) # OK
|
||||
trio.sleep(m_x) # OK
|
||||
|
||||
m_a = m_b = 0
|
||||
trio.sleep(m_a) # OK
|
||||
trio.sleep(m_b) # OK
|
||||
trio.sleep(m_a) # TRIO115
|
||||
trio.sleep(m_b) # TRIO115
|
||||
|
||||
m_c = (m_d, m_e) = (0, 0)
|
||||
trio.sleep(m_c) # OK
|
||||
trio.sleep(m_d) # OK
|
||||
trio.sleep(m_e) # OK
|
||||
trio.sleep(m_d) # TRIO115
|
||||
trio.sleep(m_e) # TRIO115
|
||||
|
||||
|
||||
def func():
|
||||
@@ -63,16 +63,4 @@ def func():
|
||||
import trio
|
||||
|
||||
if (walrus := 0) == 0:
|
||||
trio.sleep(walrus) # OK
|
||||
|
||||
|
||||
def func():
|
||||
import trio
|
||||
|
||||
async def main() -> None:
|
||||
sleep = 0
|
||||
for _ in range(2):
|
||||
await trio.sleep(sleep) # OK
|
||||
sleep = 10
|
||||
|
||||
trio.run(main)
|
||||
trio.sleep(walrus) # TRIO115
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
"""Test where the error is after the module's docstring."""
|
||||
|
||||
def fn():
|
||||
pass
|
||||
@@ -1,4 +0,0 @@
|
||||
"Test where the first line is a comment, " + "and the rule violation follows it."
|
||||
|
||||
def fn():
|
||||
pass
|
||||
@@ -1,5 +0,0 @@
|
||||
def fn1():
|
||||
pass
|
||||
|
||||
def fn2():
|
||||
pass
|
||||
@@ -1,4 +0,0 @@
|
||||
print("Test where the first line is a statement, and the rule violation follows it.")
|
||||
|
||||
def fn():
|
||||
pass
|
||||
@@ -1,6 +0,0 @@
|
||||
# Test where the first line is a comment, and the rule violation follows it.
|
||||
|
||||
|
||||
|
||||
def fn():
|
||||
pass
|
||||
@@ -1,6 +0,0 @@
|
||||
"""Test where the error is after the module's docstring."""
|
||||
|
||||
|
||||
|
||||
def fn():
|
||||
pass
|
||||
@@ -1,6 +0,0 @@
|
||||
"Test where the first line is a comment, " + "and the rule violation follows it."
|
||||
|
||||
|
||||
|
||||
def fn():
|
||||
pass
|
||||
@@ -1,6 +0,0 @@
|
||||
print("Test where the first line is a statement, and the rule violation follows it.")
|
||||
|
||||
|
||||
|
||||
def fn():
|
||||
pass
|
||||
@@ -10,7 +10,7 @@ def f1():
|
||||
# Here's a standalone comment that's over the limit.
|
||||
|
||||
x = 2
|
||||
# Another standalone that is preceded by a newline and indent token and is over the limit.
|
||||
# Another standalone that is preceded by a newline and indent toke and is over the limit.
|
||||
|
||||
print("Here's a string that's over the limit, but it's not a docstring.")
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ def f1():
|
||||
# Here's a standalone comment that's over theß9💣2ℝ.
|
||||
|
||||
x = 2
|
||||
# Another standalone that is preceded by a newline and indent token and is over theß9💣2ℝ.
|
||||
# Another standalone that is preceded by a newline and indent toke and is over theß9💣2ℝ.
|
||||
|
||||
print("Here's a string that's over theß9💣2ℝ, but it's not a ß9💣2ℝing.")
|
||||
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
"""Regression test for: https://github.com/astral-sh/ruff/issues/10384"""
|
||||
|
||||
import datetime
|
||||
from datetime import datetime
|
||||
|
||||
datetime(1, 2, 3)
|
||||
@@ -1,37 +0,0 @@
|
||||
# These testcases should raise errors
|
||||
|
||||
class Float:
|
||||
def __bool__(self):
|
||||
return 3.05 # [invalid-bool-return]
|
||||
|
||||
class Int:
|
||||
def __bool__(self):
|
||||
return 0 # [invalid-bool-return]
|
||||
|
||||
|
||||
class Str:
|
||||
def __bool__(self):
|
||||
x = "ruff"
|
||||
return x # [invalid-bool-return]
|
||||
|
||||
# TODO: Once Ruff has better type checking
|
||||
def return_int():
|
||||
return 3
|
||||
|
||||
class ComplexReturn:
|
||||
def __bool__(self):
|
||||
return return_int() # [invalid-bool-return]
|
||||
|
||||
|
||||
|
||||
# These testcases should NOT raise errors
|
||||
|
||||
class Bool:
|
||||
def __bool__(self):
|
||||
return True
|
||||
|
||||
|
||||
class Bool2:
|
||||
def __bool__(self):
|
||||
x = True
|
||||
return x
|
||||
@@ -1,36 +1,28 @@
|
||||
# These testcases should raise errors
|
||||
class Str:
|
||||
def __str__(self):
|
||||
return 1
|
||||
|
||||
class Float:
|
||||
def __str__(self):
|
||||
return 3.05
|
||||
|
||||
|
||||
class Int:
|
||||
def __str__(self):
|
||||
return 1
|
||||
|
||||
class Int2:
|
||||
def __str__(self):
|
||||
return 0
|
||||
|
||||
|
||||
class Bool:
|
||||
def __str__(self):
|
||||
return False
|
||||
|
||||
# TODO: Once Ruff has better type checking
|
||||
|
||||
class Str2:
|
||||
def __str__(self):
|
||||
x = "ruff"
|
||||
return x
|
||||
|
||||
# TODO fixme once Ruff has better type checking
|
||||
def return_int():
|
||||
return 3
|
||||
|
||||
class ComplexReturn:
|
||||
def __str__(self):
|
||||
return return_int()
|
||||
|
||||
# These testcases should NOT raise errors
|
||||
|
||||
class Str:
|
||||
def __str__(self):
|
||||
return "ruff"
|
||||
|
||||
class Str2:
|
||||
def __str__(self):
|
||||
x = "ruff"
|
||||
return x
|
||||
return return_int()
|
||||
@@ -1,8 +1,8 @@
|
||||
# Test case 1: Useless exception statement
|
||||
from abc import ABC, abstractmethod
|
||||
from contextlib import suppress
|
||||
|
||||
|
||||
# Test case 1: Useless exception statement
|
||||
def func():
|
||||
AssertionError("This is an assertion error") # PLW0133
|
||||
|
||||
@@ -66,11 +66,6 @@ def func():
|
||||
x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133
|
||||
|
||||
|
||||
# Test case 11: Useless warning statement
|
||||
def func():
|
||||
UserWarning("This is an assertion error") # PLW0133
|
||||
|
||||
|
||||
# Non-violation test cases: PLW0133
|
||||
|
||||
|
||||
|
||||
@@ -47,7 +47,7 @@ if (
|
||||
and some_third_reasonably_long_condition
|
||||
or some_fourth_reasonably_long_condition
|
||||
and some_fifth_reasonably_long_condition
|
||||
# a comment
|
||||
# a commment
|
||||
and some_sixth_reasonably_long_condition
|
||||
and some_seventh_reasonably_long_condition
|
||||
# another comment
|
||||
|
||||
@@ -48,7 +48,7 @@ __all__ = [
|
||||
# we implement an "isort-style sort":
|
||||
# SCEAMING_CASE constants first,
|
||||
# then CamelCase classes,
|
||||
# then anything that's lowercase_snake_case.
|
||||
# then anything thats lowercase_snake_case.
|
||||
# This (which is currently alphabetically sorted)
|
||||
# should get reordered accordingly:
|
||||
__all__ = [
|
||||
|
||||
@@ -53,6 +53,3 @@ class Labware:
|
||||
|
||||
|
||||
assert getattr(Labware(), "µL") == 1.5
|
||||
|
||||
# Implicit string concatenation
|
||||
x = "𝐁ad" f"𝐁ad string"
|
||||
|
||||
@@ -259,29 +259,23 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) {
|
||||
diagnostic.set_parent(range.start());
|
||||
}
|
||||
|
||||
// Remove the import if the binding and the shadowed binding are both imports,
|
||||
// and both point to the same qualified name.
|
||||
if let Some(shadowed_import) = shadowed.as_any_import() {
|
||||
if let Some(import) = binding.as_any_import() {
|
||||
if shadowed_import.qualified_name() == import.qualified_name() {
|
||||
if let Some(source) = binding.source {
|
||||
diagnostic.try_set_fix(|| {
|
||||
let statement = checker.semantic().statement(source);
|
||||
let parent = checker.semantic().parent_statement(source);
|
||||
let edit = fix::edits::remove_unused_imports(
|
||||
std::iter::once(import.member_name().as_ref()),
|
||||
statement,
|
||||
parent,
|
||||
checker.locator(),
|
||||
checker.stylist(),
|
||||
checker.indexer(),
|
||||
)?;
|
||||
Ok(Fix::safe_edit(edit).isolate(Checker::isolation(
|
||||
checker.semantic().parent_statement_id(source),
|
||||
)))
|
||||
});
|
||||
}
|
||||
}
|
||||
if let Some(import) = binding.as_any_import() {
|
||||
if let Some(source) = binding.source {
|
||||
diagnostic.try_set_fix(|| {
|
||||
let statement = checker.semantic().statement(source);
|
||||
let parent = checker.semantic().parent_statement(source);
|
||||
let edit = fix::edits::remove_unused_imports(
|
||||
std::iter::once(import.member_name().as_ref()),
|
||||
statement,
|
||||
parent,
|
||||
checker.locator(),
|
||||
checker.stylist(),
|
||||
checker.indexer(),
|
||||
)?;
|
||||
Ok(Fix::safe_edit(edit).isolate(Checker::isolation(
|
||||
checker.semantic().parent_statement_id(source),
|
||||
)))
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -91,9 +91,6 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::InvalidBoolReturnType) {
|
||||
pylint::rules::invalid_bool_return(checker, name, body);
|
||||
}
|
||||
if checker.enabled(Rule::InvalidStrReturnType) {
|
||||
pylint::rules::invalid_str_return(checker, name, body);
|
||||
}
|
||||
|
||||
@@ -44,10 +44,10 @@ use ruff_python_ast::helpers::{
|
||||
};
|
||||
use ruff_python_ast::identifier::Identifier;
|
||||
use ruff_python_ast::name::QualifiedName;
|
||||
use ruff_python_ast::str::Quote;
|
||||
use ruff_python_ast::visitor::{walk_except_handler, walk_pattern, Visitor};
|
||||
use ruff_python_ast::str::trailing_quote;
|
||||
use ruff_python_ast::visitor::{walk_except_handler, walk_f_string_element, walk_pattern, Visitor};
|
||||
use ruff_python_ast::{helpers, str, visitor, PySourceType};
|
||||
use ruff_python_codegen::{Generator, Stylist};
|
||||
use ruff_python_codegen::{Generator, Quote, Stylist};
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::typing::{parse_type_annotation, AnnotationKind};
|
||||
use ruff_python_semantic::analyze::{imports, typing, visibility};
|
||||
@@ -228,11 +228,16 @@ impl<'a> Checker<'a> {
|
||||
}
|
||||
|
||||
// Find the quote character used to start the containing f-string.
|
||||
let ast::ExprFString { value, .. } = self
|
||||
.semantic
|
||||
.current_expressions()
|
||||
.find_map(|expr| expr.as_f_string_expr())?;
|
||||
Some(value.iter().next()?.quote_style().opposite())
|
||||
let expr = self.semantic.current_expression()?;
|
||||
let string_range = self.indexer.fstring_ranges().innermost(expr.start())?;
|
||||
let trailing_quote = trailing_quote(self.locator.slice(string_range))?;
|
||||
|
||||
// Invert the quote character, if it's a single quote.
|
||||
match trailing_quote {
|
||||
"'" => Some(Quote::Double),
|
||||
"\"" => Some(Quote::Single),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the [`SourceRow`] for the given offset.
|
||||
@@ -1407,7 +1412,6 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
analyze::string_like(string_literal.into(), self);
|
||||
}
|
||||
Expr::BytesLiteral(bytes_literal) => analyze::string_like(bytes_literal.into(), self),
|
||||
Expr::FString(f_string) => analyze::string_like(f_string.into(), self),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
@@ -1574,6 +1578,16 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
.push((bound, self.semantic.snapshot()));
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_f_string_element(&mut self, f_string_element: &'a ast::FStringElement) {
|
||||
// Step 2: Traversal
|
||||
walk_f_string_element(self, f_string_element);
|
||||
|
||||
// Step 4: Analysis
|
||||
if let Some(literal) = f_string_element.as_literal() {
|
||||
analyze::string_like(literal.into(), self);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Checker<'a> {
|
||||
|
||||
@@ -240,7 +240,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pylint, "E0237") => (RuleGroup::Stable, rules::pylint::rules::NonSlotAssignment),
|
||||
(Pylint, "E0241") => (RuleGroup::Stable, rules::pylint::rules::DuplicateBases),
|
||||
(Pylint, "E0302") => (RuleGroup::Stable, rules::pylint::rules::UnexpectedSpecialMethodSignature),
|
||||
(Pylint, "E0304") => (RuleGroup::Preview, rules::pylint::rules::InvalidBoolReturnType),
|
||||
(Pylint, "E0307") => (RuleGroup::Stable, rules::pylint::rules::InvalidStrReturnType),
|
||||
(Pylint, "E0604") => (RuleGroup::Stable, rules::pylint::rules::InvalidAllObject),
|
||||
(Pylint, "E0605") => (RuleGroup::Stable, rules::pylint::rules::InvalidAllFormat),
|
||||
|
||||
@@ -294,7 +294,7 @@ impl Violation for MissingReturnTypePrivateFunction {
|
||||
///
|
||||
/// Note that type checkers often allow you to omit the return type annotation for
|
||||
/// `__init__` methods, as long as at least one argument has a type annotation. To
|
||||
/// opt in to this behavior, use the `mypy-init-return` setting in your `pyproject.toml`
|
||||
/// opt-in to this behavior, use the `mypy-init-return` setting in your `pyproject.toml`
|
||||
/// or `ruff.toml` file:
|
||||
///
|
||||
/// ```toml
|
||||
|
||||
@@ -38,37 +38,17 @@ impl Violation for HardcodedBindAllInterfaces {
|
||||
|
||||
/// S104
|
||||
pub(crate) fn hardcoded_bind_all_interfaces(checker: &mut Checker, string: StringLike) {
|
||||
match string {
|
||||
StringLike::String(ast::ExprStringLiteral { value, .. }) => {
|
||||
if value == "0.0.0.0" {
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(HardcodedBindAllInterfaces, string.range()));
|
||||
}
|
||||
let is_bind_all_interface = match string {
|
||||
StringLike::StringLiteral(ast::ExprStringLiteral { value, .. }) => value == "0.0.0.0",
|
||||
StringLike::FStringLiteral(ast::FStringLiteralElement { value, .. }) => {
|
||||
&**value == "0.0.0.0"
|
||||
}
|
||||
StringLike::FString(ast::ExprFString { value, .. }) => {
|
||||
for part in value {
|
||||
match part {
|
||||
ast::FStringPart::Literal(literal) => {
|
||||
if &**literal == "0.0.0.0" {
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(HardcodedBindAllInterfaces, literal.range()));
|
||||
}
|
||||
}
|
||||
ast::FStringPart::FString(f_string) => {
|
||||
for literal in f_string.literals() {
|
||||
if &**literal == "0.0.0.0" {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
HardcodedBindAllInterfaces,
|
||||
literal.range(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
StringLike::Bytes(_) => (),
|
||||
StringLike::BytesLiteral(_) => return,
|
||||
};
|
||||
|
||||
if is_bind_all_interface {
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(HardcodedBindAllInterfaces, string.range()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use ruff_python_ast::{self as ast, Expr, StringLike};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -53,29 +53,12 @@ impl Violation for HardcodedTempFile {
|
||||
|
||||
/// S108
|
||||
pub(crate) fn hardcoded_tmp_directory(checker: &mut Checker, string: StringLike) {
|
||||
match string {
|
||||
StringLike::String(ast::ExprStringLiteral { value, .. }) => {
|
||||
check(checker, value.to_str(), string.range());
|
||||
}
|
||||
StringLike::FString(ast::ExprFString { value, .. }) => {
|
||||
for part in value {
|
||||
match part {
|
||||
ast::FStringPart::Literal(literal) => {
|
||||
check(checker, literal, literal.range());
|
||||
}
|
||||
ast::FStringPart::FString(f_string) => {
|
||||
for literal in f_string.literals() {
|
||||
check(checker, literal, literal.range());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
StringLike::Bytes(_) => (),
|
||||
}
|
||||
}
|
||||
let value = match string {
|
||||
StringLike::StringLiteral(ast::ExprStringLiteral { value, .. }) => value.to_str(),
|
||||
StringLike::FStringLiteral(ast::FStringLiteralElement { value, .. }) => value,
|
||||
StringLike::BytesLiteral(_) => return,
|
||||
};
|
||||
|
||||
fn check(checker: &mut Checker, value: &str, range: TextRange) {
|
||||
if !checker
|
||||
.settings
|
||||
.flake8_bandit
|
||||
@@ -102,6 +85,6 @@ fn check(checker: &mut Checker, value: &str, range: TextRange) {
|
||||
HardcodedTempFile {
|
||||
string: value.to_string(),
|
||||
},
|
||||
range,
|
||||
string.range(),
|
||||
));
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ use crate::checkers::ast::Checker;
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `logging.config.listen` starts a server that listens for logging
|
||||
/// configuration requests. This is insecure, as parts of the configuration are
|
||||
/// configuration requests. This is insecure as parts of the configuration are
|
||||
/// passed to the built-in `eval` function, which can be used to execute
|
||||
/// arbitrary code.
|
||||
///
|
||||
|
||||
@@ -222,7 +222,7 @@ impl Violation for StartProcessWithNoShell {
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Starting a process with a partial executable path can allow attackers to
|
||||
/// execute an arbitrary executable by adjusting the `PATH` environment variable.
|
||||
/// execute arbitrary executable by adjusting the `PATH` environment variable.
|
||||
/// Consider using a full path to the executable instead.
|
||||
///
|
||||
/// ## Example
|
||||
|
||||
@@ -11,7 +11,7 @@ use crate::checkers::ast::Checker;
|
||||
/// Checks for uses of policies disabling SSH verification in Paramiko.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// By default, Paramiko checks the identity of the remote host when establishing
|
||||
/// By default, Paramiko checks the identity of remote host when establishing
|
||||
/// an SSH connection. Disabling the verification might lead to the client
|
||||
/// connecting to a malicious host, without the client knowing.
|
||||
///
|
||||
|
||||
@@ -59,7 +59,7 @@ impl Violation for SuspiciousPickleUsage {
|
||||
/// Checks for calls to `marshal` functions.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Deserializing untrusted data with `marshal` is insecure, as it can allow for
|
||||
/// Deserializing untrusted data with `marshal` is insecure as it can allow for
|
||||
/// the creation of arbitrary objects, which can then be used to achieve
|
||||
/// arbitrary code execution and otherwise unexpected behavior.
|
||||
///
|
||||
@@ -68,7 +68,7 @@ impl Violation for SuspiciousPickleUsage {
|
||||
///
|
||||
/// If you must deserialize untrusted data with `marshal`, consider signing the
|
||||
/// data with a secret key and verifying the signature before deserializing the
|
||||
/// payload. This will prevent an attacker from injecting arbitrary objects
|
||||
/// payload, This will prevent an attacker from injecting arbitrary objects
|
||||
/// into the serialized data.
|
||||
///
|
||||
/// ## Example
|
||||
@@ -353,7 +353,7 @@ impl Violation for SuspiciousMarkSafeUsage {
|
||||
/// behavior.
|
||||
///
|
||||
/// To mitigate this risk, audit all uses of URL open functions and ensure that
|
||||
/// only permitted schemes are used (e.g., allowing `http:` and `https:`, and
|
||||
/// only permitted schemes are used (e.g., allowing `http:` and `https:` and
|
||||
/// disallowing `file:` and `ftp:`).
|
||||
///
|
||||
/// ## Example
|
||||
@@ -395,7 +395,7 @@ impl Violation for SuspiciousURLOpenUsage {
|
||||
/// Checks for uses of cryptographically weak pseudo-random number generators.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Cryptographically weak pseudo-random number generators are insecure, as they
|
||||
/// Cryptographically weak pseudo-random number generators are insecure as they
|
||||
/// are easily predictable. This can allow an attacker to guess the generated
|
||||
/// numbers and compromise the security of the system.
|
||||
///
|
||||
|
||||
@@ -245,7 +245,7 @@ impl Violation for SuspiciousLxmlImport {
|
||||
/// Checks for imports of the `xmlrpc` module.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// XMLRPC is a particularly dangerous XML module, as it is also concerned with
|
||||
/// XMLRPC is a particularly dangerous XML module as it is also concerned with
|
||||
/// communicating data over a network. Use the `defused.xmlrpc.monkey_patch()`
|
||||
/// function to monkey-patch the `xmlrpclib` module and mitigate remote XML
|
||||
/// attacks.
|
||||
|
||||
@@ -42,23 +42,4 @@ S104.py:19:9: S104 Possible binding to all interfaces
|
||||
20 | print(x)
|
||||
|
|
||||
|
||||
S104.py:24:1: S104 Possible binding to all interfaces
|
||||
|
|
||||
23 | # Implicit string concatenation
|
||||
24 | "0.0.0.0" f"0.0.0.0{expr}0.0.0.0"
|
||||
| ^^^^^^^^^ S104
|
||||
|
|
||||
|
||||
S104.py:24:13: S104 Possible binding to all interfaces
|
||||
|
|
||||
23 | # Implicit string concatenation
|
||||
24 | "0.0.0.0" f"0.0.0.0{expr}0.0.0.0"
|
||||
| ^^^^^^^ S104
|
||||
|
|
||||
|
||||
S104.py:24:26: S104 Possible binding to all interfaces
|
||||
|
|
||||
23 | # Implicit string concatenation
|
||||
24 | "0.0.0.0" f"0.0.0.0{expr}0.0.0.0"
|
||||
| ^^^^^^^ S104
|
||||
|
|
||||
|
||||
@@ -37,28 +37,4 @@ S108.py:14:11: S108 Probable insecure usage of temporary file or directory: "/de
|
||||
15 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:22:11: S108 Probable insecure usage of temporary file or directory: "/tmp/abc"
|
||||
|
|
||||
21 | # Implicit string concatenation
|
||||
22 | with open("/tmp/" "abc", "w") as f:
|
||||
| ^^^^^^^^^^^^^ S108
|
||||
23 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:25:11: S108 Probable insecure usage of temporary file or directory: "/tmp/abc"
|
||||
|
|
||||
23 | f.write("def")
|
||||
24 |
|
||||
25 | with open("/tmp/abc" f"/tmp/abc", "w") as f:
|
||||
| ^^^^^^^^^^ S108
|
||||
26 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:25:24: S108 Probable insecure usage of temporary file or directory: "/tmp/abc"
|
||||
|
|
||||
23 | f.write("def")
|
||||
24 |
|
||||
25 | with open("/tmp/abc" f"/tmp/abc", "w") as f:
|
||||
| ^^^^^^^^ S108
|
||||
26 | f.write("def")
|
||||
|
|
||||
|
||||
@@ -45,28 +45,4 @@ S108.py:18:11: S108 Probable insecure usage of temporary file or directory: "/fo
|
||||
19 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:22:11: S108 Probable insecure usage of temporary file or directory: "/tmp/abc"
|
||||
|
|
||||
21 | # Implicit string concatenation
|
||||
22 | with open("/tmp/" "abc", "w") as f:
|
||||
| ^^^^^^^^^^^^^ S108
|
||||
23 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:25:11: S108 Probable insecure usage of temporary file or directory: "/tmp/abc"
|
||||
|
|
||||
23 | f.write("def")
|
||||
24 |
|
||||
25 | with open("/tmp/abc" f"/tmp/abc", "w") as f:
|
||||
| ^^^^^^^^^^ S108
|
||||
26 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:25:24: S108 Probable insecure usage of temporary file or directory: "/tmp/abc"
|
||||
|
|
||||
23 | f.write("def")
|
||||
24 |
|
||||
25 | with open("/tmp/abc" f"/tmp/abc", "w") as f:
|
||||
| ^^^^^^^^ S108
|
||||
26 | f.write("def")
|
||||
|
|
||||
|
||||
@@ -13,7 +13,7 @@ use crate::rules::flake8_comprehensions::settings::Settings;
|
||||
/// rewritten as empty literals.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// It's unnecessary to call, e.g., `dict()` as opposed to using an empty
|
||||
/// It's unnecessary to call e.g., `dict()` as opposed to using an empty
|
||||
/// literal (`{}`). The former is slower because the name `dict` must be
|
||||
/// looked up in the global scope in case it has been rebound.
|
||||
///
|
||||
|
||||
@@ -10,14 +10,14 @@ use crate::checkers::ast::Checker;
|
||||
use super::helpers;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks that a `__str__` method is defined in Django models.
|
||||
/// Checks that `__str__` method is defined in Django models.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Django models should define a `__str__` method to return a string representation
|
||||
/// Django models should define `__str__` method to return a string representation
|
||||
/// of the model instance, as Django calls this method to display the object in
|
||||
/// the Django Admin and elsewhere.
|
||||
///
|
||||
/// Models without a `__str__` method will display a non-meaningful representation
|
||||
/// Models without `__str__` method will display a non-meaningful representation
|
||||
/// of the object in the Django Admin.
|
||||
///
|
||||
/// ## Example
|
||||
|
||||
@@ -11,7 +11,7 @@ mod tests {
|
||||
|
||||
use crate::assert_messages;
|
||||
use crate::registry::Rule;
|
||||
use crate::rules::flake8_import_conventions::settings::{default_aliases, BannedAliases};
|
||||
use crate::rules::flake8_import_conventions::settings::default_aliases;
|
||||
use crate::settings::LinterSettings;
|
||||
use crate::test::test_path;
|
||||
|
||||
@@ -57,20 +57,17 @@ mod tests {
|
||||
banned_aliases: FxHashMap::from_iter([
|
||||
(
|
||||
"typing".to_string(),
|
||||
BannedAliases::from_iter(["t".to_string(), "ty".to_string()]),
|
||||
vec!["t".to_string(), "ty".to_string()],
|
||||
),
|
||||
(
|
||||
"numpy".to_string(),
|
||||
BannedAliases::from_iter(["nmp".to_string(), "npy".to_string()]),
|
||||
vec!["nmp".to_string(), "npy".to_string()],
|
||||
),
|
||||
(
|
||||
"tensorflow.keras.backend".to_string(),
|
||||
BannedAliases::from_iter(["K".to_string()]),
|
||||
),
|
||||
(
|
||||
"torch.nn.functional".to_string(),
|
||||
BannedAliases::from_iter(["F".to_string()]),
|
||||
vec!["K".to_string()],
|
||||
),
|
||||
("torch.nn.functional".to_string(), vec!["F".to_string()]),
|
||||
]),
|
||||
banned_from: FxHashSet::default(),
|
||||
},
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
use ruff_python_ast::Stmt;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::Stmt;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::rules::flake8_import_conventions::settings::BannedAliases;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for imports that use non-standard naming conventions, like
|
||||
/// `import tensorflow.keras.backend as K`.
|
||||
@@ -51,7 +49,7 @@ pub(crate) fn banned_import_alias(
|
||||
stmt: &Stmt,
|
||||
name: &str,
|
||||
asname: &str,
|
||||
banned_conventions: &FxHashMap<String, BannedAliases>,
|
||||
banned_conventions: &FxHashMap<String, Vec<String>>,
|
||||
) -> Option<Diagnostic> {
|
||||
if let Some(banned_aliases) = banned_conventions.get(name) {
|
||||
if banned_aliases
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
//! Settings for import conventions.
|
||||
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use ruff_macros::CacheKey;
|
||||
|
||||
use crate::display_settings;
|
||||
use ruff_macros::CacheKey;
|
||||
|
||||
const CONVENTIONAL_ALIASES: &[(&str, &str)] = &[
|
||||
("altair", "alt"),
|
||||
@@ -26,41 +23,10 @@ const CONVENTIONAL_ALIASES: &[(&str, &str)] = &[
|
||||
("pyarrow", "pa"),
|
||||
];
|
||||
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize, CacheKey)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct BannedAliases(Vec<String>);
|
||||
|
||||
impl Display for BannedAliases {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "[")?;
|
||||
for (i, alias) in self.0.iter().enumerate() {
|
||||
if i > 0 {
|
||||
write!(f, ", ")?;
|
||||
}
|
||||
write!(f, "{alias}")?;
|
||||
}
|
||||
write!(f, "]")
|
||||
}
|
||||
}
|
||||
|
||||
impl BannedAliases {
|
||||
/// Returns an iterator over the banned aliases.
|
||||
pub fn iter(&self) -> impl Iterator<Item = &str> {
|
||||
self.0.iter().map(String::as_str)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<String> for BannedAliases {
|
||||
fn from_iter<I: IntoIterator<Item = String>>(iter: I) -> Self {
|
||||
Self(iter.into_iter().collect())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub aliases: FxHashMap<String, String>,
|
||||
pub banned_aliases: FxHashMap<String, BannedAliases>,
|
||||
pub banned_aliases: FxHashMap<String, Vec<String>>,
|
||||
pub banned_from: FxHashSet<String>,
|
||||
}
|
||||
|
||||
@@ -87,9 +53,9 @@ impl Display for Settings {
|
||||
formatter = f,
|
||||
namespace = "linter.flake8_import_conventions",
|
||||
fields = [
|
||||
self.aliases | map,
|
||||
self.banned_aliases | map,
|
||||
self.banned_from | set,
|
||||
self.aliases | debug,
|
||||
self.banned_aliases | debug,
|
||||
self.banned_from | array,
|
||||
]
|
||||
}
|
||||
Ok(())
|
||||
|
||||
@@ -21,7 +21,7 @@ use crate::checkers::ast::Checker;
|
||||
/// ## Why is this bad?
|
||||
/// The `startswith` and `endswith` methods accept tuples of prefixes or
|
||||
/// suffixes respectively. Passing a tuple of prefixes or suffixes is more
|
||||
/// efficient and readable than calling the method multiple times.
|
||||
/// more efficient and readable than calling the method multiple times.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
|
||||
@@ -57,9 +57,11 @@ pub(crate) fn string_or_bytes_too_long(checker: &mut Checker, string: StringLike
|
||||
}
|
||||
|
||||
let length = match string {
|
||||
StringLike::String(ast::ExprStringLiteral { value, .. }) => value.chars().count(),
|
||||
StringLike::Bytes(ast::ExprBytesLiteral { value, .. }) => value.len(),
|
||||
StringLike::FString(node) => count_f_string_chars(node),
|
||||
StringLike::StringLiteral(ast::ExprStringLiteral { value, .. }) => value.chars().count(),
|
||||
StringLike::BytesLiteral(ast::ExprBytesLiteral { value, .. }) => value.len(),
|
||||
StringLike::FStringLiteral(ast::FStringLiteralElement { value, .. }) => {
|
||||
value.chars().count()
|
||||
}
|
||||
};
|
||||
if length <= 50 {
|
||||
return;
|
||||
@@ -73,26 +75,6 @@ pub(crate) fn string_or_bytes_too_long(checker: &mut Checker, string: StringLike
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
|
||||
/// Count the number of visible characters in an f-string. This accounts for
|
||||
/// implicitly concatenated f-strings as well.
|
||||
fn count_f_string_chars(f_string: &ast::ExprFString) -> usize {
|
||||
f_string
|
||||
.value
|
||||
.iter()
|
||||
.map(|part| match part {
|
||||
ast::FStringPart::Literal(string) => string.chars().count(),
|
||||
ast::FStringPart::FString(f_string) => f_string
|
||||
.elements
|
||||
.iter()
|
||||
.map(|element| match element {
|
||||
ast::FStringElement::Literal(string) => string.chars().count(),
|
||||
ast::FStringElement::Expression(expr) => expr.range().len().to_usize(),
|
||||
})
|
||||
.sum(),
|
||||
})
|
||||
.sum()
|
||||
}
|
||||
|
||||
fn is_warnings_dot_deprecated(expr: Option<&ast::Expr>, semantic: &SemanticModel) -> bool {
|
||||
// Does `expr` represent a call to `warnings.deprecated` or `typing_extensions.deprecated`?
|
||||
let Some(expr) = expr else {
|
||||
|
||||
@@ -11,7 +11,7 @@ use crate::checkers::ast::Checker;
|
||||
/// Checks for the presence of multiple literal types in a union.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Literal types accept multiple arguments, and it is clearer to specify them
|
||||
/// Literal types accept multiple arguments and it is clearer to specify them
|
||||
/// as a single literal.
|
||||
///
|
||||
/// ## Example
|
||||
|
||||
@@ -105,12 +105,12 @@ PYI053.pyi:34:14: PYI053 [*] String and bytes literals longer than 50 characters
|
||||
36 36 | ffoo: str = f"50 character stringggggggggggggggggggggggggggggggg" # OK
|
||||
37 37 |
|
||||
|
||||
PYI053.pyi:38:13: PYI053 [*] String and bytes literals longer than 50 characters are not permitted
|
||||
PYI053.pyi:38:15: PYI053 [*] String and bytes literals longer than 50 characters are not permitted
|
||||
|
|
||||
36 | ffoo: str = f"50 character stringggggggggggggggggggggggggggggggg" # OK
|
||||
37 |
|
||||
38 | fbar: str = f"51 character stringgggggggggggggggggggggggggggggggg" # Error: PYI053
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI053
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI053
|
||||
39 |
|
||||
40 | class Demo:
|
||||
|
|
||||
@@ -121,7 +121,7 @@ PYI053.pyi:38:13: PYI053 [*] String and bytes literals longer than 50 characters
|
||||
36 36 | ffoo: str = f"50 character stringggggggggggggggggggggggggggggggg" # OK
|
||||
37 37 |
|
||||
38 |-fbar: str = f"51 character stringgggggggggggggggggggggggggggggggg" # Error: PYI053
|
||||
38 |+fbar: str = ... # Error: PYI053
|
||||
38 |+fbar: str = f"..." # Error: PYI053
|
||||
39 39 |
|
||||
40 40 | class Demo:
|
||||
41 41 | """Docstrings are excluded from this rule. Some padding.""" # OK
|
||||
@@ -144,20 +144,5 @@ PYI053.pyi:64:5: PYI053 [*] String and bytes literals longer than 50 characters
|
||||
64 |+ ... # Error: PYI053
|
||||
65 65 | )
|
||||
66 66 | def not_a_deprecated_function() -> None: ...
|
||||
67 67 |
|
||||
|
||||
PYI053.pyi:68:13: PYI053 [*] String and bytes literals longer than 50 characters are not permitted
|
||||
|
|
||||
66 | def not_a_deprecated_function() -> None: ...
|
||||
67 |
|
||||
68 | fbaz: str = f"51 character {foo} stringgggggggggggggggggggggggggg" # Error: PYI053
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI053
|
||||
|
|
||||
= help: Replace with `...`
|
||||
|
||||
ℹ Safe fix
|
||||
65 65 | )
|
||||
66 66 | def not_a_deprecated_function() -> None: ...
|
||||
67 67 |
|
||||
68 |-fbaz: str = f"51 character {foo} stringgggggggggggggggggggggggggg" # Error: PYI053
|
||||
68 |+fbaz: str = ... # Error: PYI053
|
||||
|
||||
@@ -103,9 +103,9 @@ impl Violation for PytestParametrizeNamesWrongType {
|
||||
/// of values.
|
||||
///
|
||||
/// The style for the list of values rows can be configured via the
|
||||
/// [`lint.flake8-pytest-style.parametrize-values-type`] setting, while the
|
||||
/// the [`lint.flake8-pytest-style.parametrize-values-type`] setting, while the
|
||||
/// style for each row of values can be configured via the
|
||||
/// [`lint.flake8-pytest-style.parametrize-values-row-type`] setting.
|
||||
/// the [`lint.flake8-pytest-style.parametrize-values-row-type`] setting.
|
||||
///
|
||||
/// For example, [`lint.flake8-pytest-style.parametrize-values-type`] will lead to
|
||||
/// the following expectations:
|
||||
|
||||
@@ -22,11 +22,11 @@ impl Default for Quote {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ruff_python_ast::str::Quote> for Quote {
|
||||
fn from(value: ruff_python_ast::str::Quote) -> Self {
|
||||
impl From<ruff_python_ast::str::QuoteStyle> for Quote {
|
||||
fn from(value: ruff_python_ast::str::QuoteStyle) -> Self {
|
||||
match value {
|
||||
ruff_python_ast::str::Quote::Double => Self::Double,
|
||||
ruff_python_ast::str::Quote::Single => Self::Single,
|
||||
ruff_python_ast::str::QuoteStyle::Double => Self::Double,
|
||||
ruff_python_ast::str::QuoteStyle::Single => Self::Single,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -151,7 +151,7 @@ impl AlwaysFixableViolation for ImplicitReturn {
|
||||
/// assigned variable.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// The variable assignment is not necessary, as the value can be returned
|
||||
/// The variable assignment is not necessary as the value can be returned
|
||||
/// directly.
|
||||
///
|
||||
/// ## Example
|
||||
|
||||
@@ -61,7 +61,7 @@ impl Violation for IfExprWithTrueFalse {
|
||||
/// condition.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `if` expressions that evaluate to `False` for a truthy condition and `True`
|
||||
/// `if` expressions that evaluate to `False` for a truthy condition an `True`
|
||||
/// for a falsey condition can be replaced with `not` operators, which are more
|
||||
/// concise and readable.
|
||||
///
|
||||
|
||||
@@ -13,14 +13,15 @@ use crate::rules::flake8_tidy_imports::matchers::NameMatchPolicy;
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Projects may want to ensure that specific modules or module members are
|
||||
/// not imported or accessed.
|
||||
/// not be imported or accessed.
|
||||
///
|
||||
/// Security or other company policies may be a reason to impose
|
||||
/// restrictions on importing external Python libraries. In some cases,
|
||||
/// projects may adopt conventions around the use of certain modules or
|
||||
/// module members that are not enforceable by the language itself.
|
||||
///
|
||||
/// This rule enforces certain import conventions project-wide automatically.
|
||||
/// This rule enforces certain import conventions project-wide in an
|
||||
/// automatic way.
|
||||
///
|
||||
/// ## Options
|
||||
/// - `lint.flake8-tidy-imports.banned-api`
|
||||
|
||||
@@ -13,12 +13,6 @@ pub struct ApiBan {
|
||||
pub msg: String,
|
||||
}
|
||||
|
||||
impl Display for ApiBan {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.msg)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, CacheKey, Default)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
@@ -53,7 +47,7 @@ impl Display for Settings {
|
||||
namespace = "linter.flake8_tidy_imports",
|
||||
fields = [
|
||||
self.ban_relative_imports,
|
||||
self.banned_api | map,
|
||||
self.banned_api | debug,
|
||||
self.banned_module_level_imports | array,
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, Expr, ExprCall, Int, Number};
|
||||
use ruff_python_semantic::analyze::typing::find_assigned_value;
|
||||
use ruff_python_semantic::Modules;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
@@ -73,6 +74,20 @@ pub(crate) fn zero_sleep_call(checker: &mut Checker, call: &ExprCall) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
Expr::Name(ast::ExprName { id, .. }) => {
|
||||
let Some(value) = find_assigned_value(id, checker.semantic()) else {
|
||||
return;
|
||||
};
|
||||
if !matches!(
|
||||
value,
|
||||
Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
value: Number::Int(Int::ZERO),
|
||||
..
|
||||
})
|
||||
) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => return,
|
||||
}
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ TRIO115.py:11:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.s
|
||||
11 | trio.sleep(0) # TRIO115
|
||||
| ^^^^^^^^^^^^^ TRIO115
|
||||
12 | foo = 0
|
||||
13 | trio.sleep(foo) # OK
|
||||
13 | trio.sleep(foo) # TRIO115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
@@ -40,9 +40,30 @@ TRIO115.py:11:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.s
|
||||
11 |- trio.sleep(0) # TRIO115
|
||||
11 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
12 12 | foo = 0
|
||||
13 13 | trio.sleep(foo) # OK
|
||||
13 13 | trio.sleep(foo) # TRIO115
|
||||
14 14 | trio.sleep(1) # OK
|
||||
|
||||
TRIO115.py:13:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
11 | trio.sleep(0) # TRIO115
|
||||
12 | foo = 0
|
||||
13 | trio.sleep(foo) # TRIO115
|
||||
| ^^^^^^^^^^^^^^^ TRIO115
|
||||
14 | trio.sleep(1) # OK
|
||||
15 | time.sleep(0) # OK
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
10 10 |
|
||||
11 11 | trio.sleep(0) # TRIO115
|
||||
12 12 | foo = 0
|
||||
13 |- trio.sleep(foo) # TRIO115
|
||||
13 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
14 14 | trio.sleep(1) # OK
|
||||
15 15 | time.sleep(0) # OK
|
||||
16 16 |
|
||||
|
||||
TRIO115.py:17:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
15 | time.sleep(0) # OK
|
||||
@@ -64,6 +85,145 @@ TRIO115.py:17:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.s
|
||||
19 19 | bar = "bar"
|
||||
20 20 | trio.sleep(bar)
|
||||
|
||||
TRIO115.py:23:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
22 | x, y = 0, 2000
|
||||
23 | trio.sleep(x) # TRIO115
|
||||
| ^^^^^^^^^^^^^ TRIO115
|
||||
24 | trio.sleep(y) # OK
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
20 20 | trio.sleep(bar)
|
||||
21 21 |
|
||||
22 22 | x, y = 0, 2000
|
||||
23 |- trio.sleep(x) # TRIO115
|
||||
23 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
24 24 | trio.sleep(y) # OK
|
||||
25 25 |
|
||||
26 26 | (a, b, [c, (d, e)]) = (1, 2, (0, [4, 0]))
|
||||
|
||||
TRIO115.py:27:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
26 | (a, b, [c, (d, e)]) = (1, 2, (0, [4, 0]))
|
||||
27 | trio.sleep(c) # TRIO115
|
||||
| ^^^^^^^^^^^^^ TRIO115
|
||||
28 | trio.sleep(d) # OK
|
||||
29 | trio.sleep(e) # TRIO115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
24 24 | trio.sleep(y) # OK
|
||||
25 25 |
|
||||
26 26 | (a, b, [c, (d, e)]) = (1, 2, (0, [4, 0]))
|
||||
27 |- trio.sleep(c) # TRIO115
|
||||
27 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
28 28 | trio.sleep(d) # OK
|
||||
29 29 | trio.sleep(e) # TRIO115
|
||||
30 30 |
|
||||
|
||||
TRIO115.py:29:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
27 | trio.sleep(c) # TRIO115
|
||||
28 | trio.sleep(d) # OK
|
||||
29 | trio.sleep(e) # TRIO115
|
||||
| ^^^^^^^^^^^^^ TRIO115
|
||||
30 |
|
||||
31 | m_x, m_y = 0
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
26 26 | (a, b, [c, (d, e)]) = (1, 2, (0, [4, 0]))
|
||||
27 27 | trio.sleep(c) # TRIO115
|
||||
28 28 | trio.sleep(d) # OK
|
||||
29 |- trio.sleep(e) # TRIO115
|
||||
29 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
30 30 |
|
||||
31 31 | m_x, m_y = 0
|
||||
32 32 | trio.sleep(m_y) # OK
|
||||
|
||||
TRIO115.py:36:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
35 | m_a = m_b = 0
|
||||
36 | trio.sleep(m_a) # TRIO115
|
||||
| ^^^^^^^^^^^^^^^ TRIO115
|
||||
37 | trio.sleep(m_b) # TRIO115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
33 33 | trio.sleep(m_x) # OK
|
||||
34 34 |
|
||||
35 35 | m_a = m_b = 0
|
||||
36 |- trio.sleep(m_a) # TRIO115
|
||||
36 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
37 37 | trio.sleep(m_b) # TRIO115
|
||||
38 38 |
|
||||
39 39 | m_c = (m_d, m_e) = (0, 0)
|
||||
|
||||
TRIO115.py:37:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
35 | m_a = m_b = 0
|
||||
36 | trio.sleep(m_a) # TRIO115
|
||||
37 | trio.sleep(m_b) # TRIO115
|
||||
| ^^^^^^^^^^^^^^^ TRIO115
|
||||
38 |
|
||||
39 | m_c = (m_d, m_e) = (0, 0)
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
34 34 |
|
||||
35 35 | m_a = m_b = 0
|
||||
36 36 | trio.sleep(m_a) # TRIO115
|
||||
37 |- trio.sleep(m_b) # TRIO115
|
||||
37 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
38 38 |
|
||||
39 39 | m_c = (m_d, m_e) = (0, 0)
|
||||
40 40 | trio.sleep(m_c) # OK
|
||||
|
||||
TRIO115.py:41:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
39 | m_c = (m_d, m_e) = (0, 0)
|
||||
40 | trio.sleep(m_c) # OK
|
||||
41 | trio.sleep(m_d) # TRIO115
|
||||
| ^^^^^^^^^^^^^^^ TRIO115
|
||||
42 | trio.sleep(m_e) # TRIO115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
38 38 |
|
||||
39 39 | m_c = (m_d, m_e) = (0, 0)
|
||||
40 40 | trio.sleep(m_c) # OK
|
||||
41 |- trio.sleep(m_d) # TRIO115
|
||||
41 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
42 42 | trio.sleep(m_e) # TRIO115
|
||||
43 43 |
|
||||
44 44 |
|
||||
|
||||
TRIO115.py:42:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
40 | trio.sleep(m_c) # OK
|
||||
41 | trio.sleep(m_d) # TRIO115
|
||||
42 | trio.sleep(m_e) # TRIO115
|
||||
| ^^^^^^^^^^^^^^^ TRIO115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
39 39 | m_c = (m_d, m_e) = (0, 0)
|
||||
40 40 | trio.sleep(m_c) # OK
|
||||
41 41 | trio.sleep(m_d) # TRIO115
|
||||
42 |- trio.sleep(m_e) # TRIO115
|
||||
42 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
43 43 |
|
||||
44 44 |
|
||||
45 45 | def func():
|
||||
|
||||
TRIO115.py:48:14: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
46 | import trio
|
||||
@@ -132,3 +292,20 @@ TRIO115.py:59:11: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.
|
||||
60 60 |
|
||||
61 61 |
|
||||
62 62 | def func():
|
||||
|
||||
TRIO115.py:66:9: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
65 | if (walrus := 0) == 0:
|
||||
66 | trio.sleep(walrus) # TRIO115
|
||||
| ^^^^^^^^^^^^^^^^^^ TRIO115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
63 63 | import trio
|
||||
64 64 |
|
||||
65 65 | if (walrus := 0) == 0:
|
||||
66 |- trio.sleep(walrus) # TRIO115
|
||||
66 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ use ruff_macros::{derive_message_formats, violation};
|
||||
///
|
||||
/// | | `glob` | `Path.glob` |
|
||||
/// |-------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
/// | Hidden files | Excludes hidden files by default. From Python 3.11 onwards, the `include_hidden` keyword can be used to include hidden directories. | Includes hidden files by default. |
|
||||
/// | Hidden files | Excludes hidden files by default. From Python 3.11 onwards, the `include_hidden` keyword can used to include hidden directories. | Includes hidden files by default. |
|
||||
/// | Iterator | `iglob` returns an iterator. Under the hood, `glob` simply converts the iterator to a list. | `Path.glob` returns an iterator. |
|
||||
/// | Working directory | `glob` takes a `root_dir` keyword to set the current working directory. | `Path.rglob` can be used to return the relative path. |
|
||||
/// | Globstar (`**`) | `glob` requires the `recursive` flag to be set to `True` for the `**` pattern to match any files and zero or more directories, subdirectories, and symbolic links. | The `**` pattern in `Path.glob` means "this directory and all subdirectories, recursively". In other words, it enables recursive globbing. |
|
||||
|
||||
@@ -278,7 +278,7 @@ mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use rustc_hash::FxHashMap;
|
||||
use test_case::test_case;
|
||||
|
||||
use ruff_text_size::Ranged;
|
||||
@@ -495,7 +495,7 @@ mod tests {
|
||||
Path::new("isort").join(path).as_path(),
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
force_to_top: FxHashSet::from_iter([
|
||||
force_to_top: BTreeSet::from([
|
||||
"z".to_string(),
|
||||
"lib1".to_string(),
|
||||
"lib3".to_string(),
|
||||
@@ -575,10 +575,9 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
force_single_line: true,
|
||||
single_line_exclusions: FxHashSet::from_iter([
|
||||
"os".to_string(),
|
||||
"logging.handlers".to_string(),
|
||||
]),
|
||||
single_line_exclusions: vec!["os".to_string(), "logging.handlers".to_string()]
|
||||
.into_iter()
|
||||
.collect::<BTreeSet<_>>(),
|
||||
..super::settings::Settings::default()
|
||||
},
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
@@ -637,7 +636,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
order_by_type: true,
|
||||
classes: FxHashSet::from_iter([
|
||||
classes: BTreeSet::from([
|
||||
"SVC".to_string(),
|
||||
"SELU".to_string(),
|
||||
"N_CLASS".to_string(),
|
||||
@@ -665,7 +664,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
order_by_type: true,
|
||||
constants: FxHashSet::from_iter([
|
||||
constants: BTreeSet::from([
|
||||
"Const".to_string(),
|
||||
"constant".to_string(),
|
||||
"First".to_string(),
|
||||
@@ -695,7 +694,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
order_by_type: true,
|
||||
variables: FxHashSet::from_iter([
|
||||
variables: BTreeSet::from([
|
||||
"VAR".to_string(),
|
||||
"Variable".to_string(),
|
||||
"MyVar".to_string(),
|
||||
@@ -722,7 +721,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
force_sort_within_sections: true,
|
||||
force_to_top: FxHashSet::from_iter(["z".to_string()]),
|
||||
force_to_top: BTreeSet::from(["z".to_string()]),
|
||||
..super::settings::Settings::default()
|
||||
},
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
@@ -772,7 +771,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
isort: super::settings::Settings {
|
||||
required_imports: BTreeSet::from_iter([
|
||||
required_imports: BTreeSet::from([
|
||||
"from __future__ import annotations".to_string()
|
||||
]),
|
||||
..super::settings::Settings::default()
|
||||
@@ -802,7 +801,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
isort: super::settings::Settings {
|
||||
required_imports: BTreeSet::from_iter([
|
||||
required_imports: BTreeSet::from([
|
||||
"from __future__ import annotations as _annotations".to_string(),
|
||||
]),
|
||||
..super::settings::Settings::default()
|
||||
@@ -825,7 +824,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
isort: super::settings::Settings {
|
||||
required_imports: BTreeSet::from_iter([
|
||||
required_imports: BTreeSet::from([
|
||||
"from __future__ import annotations".to_string(),
|
||||
"from __future__ import generator_stop".to_string(),
|
||||
]),
|
||||
@@ -849,7 +848,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
isort: super::settings::Settings {
|
||||
required_imports: BTreeSet::from_iter(["from __future__ import annotations, \
|
||||
required_imports: BTreeSet::from(["from __future__ import annotations, \
|
||||
generator_stop"
|
||||
.to_string()]),
|
||||
..super::settings::Settings::default()
|
||||
@@ -872,7 +871,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
isort: super::settings::Settings {
|
||||
required_imports: BTreeSet::from_iter(["import os".to_string()]),
|
||||
required_imports: BTreeSet::from(["import os".to_string()]),
|
||||
..super::settings::Settings::default()
|
||||
},
|
||||
..LinterSettings::for_rule(Rule::MissingRequiredImport)
|
||||
@@ -1003,7 +1002,7 @@ mod tests {
|
||||
Path::new("isort").join(path).as_path(),
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
no_lines_before: FxHashSet::from_iter([
|
||||
no_lines_before: BTreeSet::from([
|
||||
ImportSection::Known(ImportType::Future),
|
||||
ImportSection::Known(ImportType::StandardLibrary),
|
||||
ImportSection::Known(ImportType::ThirdParty),
|
||||
@@ -1031,7 +1030,7 @@ mod tests {
|
||||
Path::new("isort").join(path).as_path(),
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
no_lines_before: FxHashSet::from_iter([
|
||||
no_lines_before: BTreeSet::from([
|
||||
ImportSection::Known(ImportType::StandardLibrary),
|
||||
ImportSection::Known(ImportType::LocalFolder),
|
||||
]),
|
||||
|
||||
@@ -5,13 +5,12 @@ use std::error::Error;
|
||||
use std::fmt;
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
use rustc_hash::FxHashSet;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use crate::display_settings;
|
||||
use ruff_macros::CacheKey;
|
||||
|
||||
use crate::display_settings;
|
||||
use crate::rules::isort::categorize::KnownModules;
|
||||
use crate::rules::isort::ImportType;
|
||||
|
||||
@@ -53,17 +52,17 @@ pub struct Settings {
|
||||
pub force_sort_within_sections: bool,
|
||||
pub case_sensitive: bool,
|
||||
pub force_wrap_aliases: bool,
|
||||
pub force_to_top: FxHashSet<String>,
|
||||
pub force_to_top: BTreeSet<String>,
|
||||
pub known_modules: KnownModules,
|
||||
pub detect_same_package: bool,
|
||||
pub order_by_type: bool,
|
||||
pub relative_imports_order: RelativeImportsOrder,
|
||||
pub single_line_exclusions: FxHashSet<String>,
|
||||
pub single_line_exclusions: BTreeSet<String>,
|
||||
pub split_on_trailing_comma: bool,
|
||||
pub classes: FxHashSet<String>,
|
||||
pub constants: FxHashSet<String>,
|
||||
pub variables: FxHashSet<String>,
|
||||
pub no_lines_before: FxHashSet<ImportSection>,
|
||||
pub classes: BTreeSet<String>,
|
||||
pub constants: BTreeSet<String>,
|
||||
pub variables: BTreeSet<String>,
|
||||
pub no_lines_before: BTreeSet<ImportSection>,
|
||||
pub lines_after_imports: isize,
|
||||
pub lines_between_types: usize,
|
||||
pub forced_separate: Vec<String>,
|
||||
@@ -78,23 +77,23 @@ pub struct Settings {
|
||||
impl Default for Settings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
required_imports: BTreeSet::default(),
|
||||
required_imports: BTreeSet::new(),
|
||||
combine_as_imports: false,
|
||||
force_single_line: false,
|
||||
force_sort_within_sections: false,
|
||||
detect_same_package: true,
|
||||
case_sensitive: false,
|
||||
force_wrap_aliases: false,
|
||||
force_to_top: FxHashSet::default(),
|
||||
force_to_top: BTreeSet::new(),
|
||||
known_modules: KnownModules::default(),
|
||||
order_by_type: true,
|
||||
relative_imports_order: RelativeImportsOrder::default(),
|
||||
single_line_exclusions: FxHashSet::default(),
|
||||
single_line_exclusions: BTreeSet::new(),
|
||||
split_on_trailing_comma: true,
|
||||
classes: FxHashSet::default(),
|
||||
constants: FxHashSet::default(),
|
||||
variables: FxHashSet::default(),
|
||||
no_lines_before: FxHashSet::default(),
|
||||
classes: BTreeSet::new(),
|
||||
constants: BTreeSet::new(),
|
||||
variables: BTreeSet::new(),
|
||||
no_lines_before: BTreeSet::new(),
|
||||
lines_after_imports: -1,
|
||||
lines_between_types: 0,
|
||||
forced_separate: Vec::new(),
|
||||
@@ -114,23 +113,23 @@ impl Display for Settings {
|
||||
formatter = f,
|
||||
namespace = "linter.isort",
|
||||
fields = [
|
||||
self.required_imports | set,
|
||||
self.required_imports | array,
|
||||
self.combine_as_imports,
|
||||
self.force_single_line,
|
||||
self.force_sort_within_sections,
|
||||
self.detect_same_package,
|
||||
self.case_sensitive,
|
||||
self.force_wrap_aliases,
|
||||
self.force_to_top | set,
|
||||
self.force_to_top | array,
|
||||
self.known_modules,
|
||||
self.order_by_type,
|
||||
self.relative_imports_order,
|
||||
self.single_line_exclusions | set,
|
||||
self.single_line_exclusions | array,
|
||||
self.split_on_trailing_comma,
|
||||
self.classes | set,
|
||||
self.constants | set,
|
||||
self.variables | set,
|
||||
self.no_lines_before | set,
|
||||
self.classes | array,
|
||||
self.constants | array,
|
||||
self.variables | array,
|
||||
self.no_lines_before | array,
|
||||
self.lines_after_imports,
|
||||
self.lines_between_types,
|
||||
self.forced_separate | array,
|
||||
@@ -156,7 +155,7 @@ pub enum SettingsError {
|
||||
InvalidUserDefinedSection(glob::PatternError),
|
||||
}
|
||||
|
||||
impl Display for SettingsError {
|
||||
impl fmt::Display for SettingsError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
SettingsError::InvalidKnownThirdParty(err) => {
|
||||
|
||||
@@ -15,7 +15,7 @@ use crate::checkers::ast::Checker;
|
||||
/// primarily for historic reasons, and have been a cause of
|
||||
/// frequent confusion for newcomers.
|
||||
///
|
||||
/// These aliases were deprecated in 1.20, and removed in 1.24.
|
||||
/// These aliases were been deprecated in 1.20, and removed in 1.24.
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
|
||||
@@ -10,7 +10,7 @@ use crate::rules::pandas_vet::helpers::{test_expression, Resolution};
|
||||
/// Checks for uses of `.values` on Pandas Series and Index objects.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// The `.values` attribute is ambiguous as its return type is unclear. As
|
||||
/// The `.values` attribute is ambiguous as it's return type is unclear. As
|
||||
/// such, it is no longer recommended by the Pandas documentation.
|
||||
///
|
||||
/// Instead, use `.to_numpy()` to return a NumPy array, or `.array` to return a
|
||||
|
||||
@@ -21,7 +21,7 @@ use crate::rules::pep8_naming::settings::IgnoreNames;
|
||||
/// > all-lowercase names, although the use of underscores is discouraged.
|
||||
/// >
|
||||
/// > When an extension module written in C or C++ has an accompanying Python module that
|
||||
/// > provides a higher level (e.g. more object-oriented) interface, the C/C++ module has
|
||||
/// > provides a higher level (e.g. more object oriented) interface, the C/C++ module has
|
||||
/// > a leading underscore (e.g. `_socket`).
|
||||
///
|
||||
/// Further, in order for Python modules to be importable, they must be valid
|
||||
|
||||
@@ -171,24 +171,6 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::BlankLinesTopLevel, Path::new("E302_first_line_docstring.py"))]
|
||||
#[test_case(Rule::BlankLinesTopLevel, Path::new("E302_first_line_expression.py"))]
|
||||
#[test_case(Rule::BlankLinesTopLevel, Path::new("E302_first_line_function.py"))]
|
||||
#[test_case(Rule::BlankLinesTopLevel, Path::new("E302_first_line_statement.py"))]
|
||||
#[test_case(Rule::TooManyBlankLines, Path::new("E303_first_line_comment.py"))]
|
||||
#[test_case(Rule::TooManyBlankLines, Path::new("E303_first_line_docstring.py"))]
|
||||
#[test_case(Rule::TooManyBlankLines, Path::new("E303_first_line_expression.py"))]
|
||||
#[test_case(Rule::TooManyBlankLines, Path::new("E303_first_line_statement.py"))]
|
||||
fn blank_lines_first_line(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
Path::new("pycodestyle").join(path).as_path(),
|
||||
&settings::LinterSettings::for_rule(rule_code),
|
||||
)?;
|
||||
assert_messages!(snapshot, diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::BlankLineBetweenMethods, Path::new("E30.py"))]
|
||||
#[test_case(Rule::BlankLinesTopLevel, Path::new("E30.py"))]
|
||||
#[test_case(Rule::TooManyBlankLines, Path::new("E30.py"))]
|
||||
|
||||
@@ -241,7 +241,7 @@ impl AlwaysFixableViolation for BlankLineAfterDecorator {
|
||||
/// Checks for missing blank lines after the end of function or class.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// PEP 8 recommends using blank lines as follows:
|
||||
/// PEP 8 recommends using blank lines as following:
|
||||
/// - Two blank lines are expected between functions and classes
|
||||
/// - One blank line is expected between methods of a class.
|
||||
///
|
||||
@@ -292,7 +292,7 @@ impl AlwaysFixableViolation for BlankLinesAfterFunctionOrClass {
|
||||
/// Checks for 1 blank line between nested function or class definitions.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// PEP 8 recommends using blank lines as follows:
|
||||
/// PEP 8 recommends using blank lines as following:
|
||||
/// - Two blank lines are expected between functions and classes
|
||||
/// - One blank line is expected between methods of a class.
|
||||
///
|
||||
@@ -696,7 +696,9 @@ impl<'a> BlankLinesChecker<'a> {
|
||||
state.class_status.update(&logical_line);
|
||||
state.fn_status.update(&logical_line);
|
||||
|
||||
self.check_line(&logical_line, &state, prev_indent_length, diagnostics);
|
||||
if state.is_not_first_logical_line {
|
||||
self.check_line(&logical_line, &state, prev_indent_length, diagnostics);
|
||||
}
|
||||
|
||||
match logical_line.kind {
|
||||
LogicalLineKind::Class => {
|
||||
@@ -816,8 +818,6 @@ impl<'a> BlankLinesChecker<'a> {
|
||||
&& line.kind.is_class_function_or_decorator()
|
||||
// Blank lines in stub files are used to group definitions. Don't enforce blank lines.
|
||||
&& !self.source_type.is_stub()
|
||||
// Do not expect blank lines before the first logical line.
|
||||
&& state.is_not_first_logical_line
|
||||
{
|
||||
// E302
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
|
||||
@@ -9,9 +9,8 @@ use ruff_source_file::Locator;
|
||||
/// Checks for files missing a new line at the end of the file.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Trailing blank lines in a file are superfluous.
|
||||
///
|
||||
/// However, the last line of the file should end with a newline.
|
||||
/// Trailing blank lines are superfluous.
|
||||
/// However the last line should end with a new line.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||
---
|
||||
E302_first_line_docstring.py:3:1: E302 [*] Expected 2 blank lines, found 1
|
||||
|
|
||||
1 | """Test where the error is after the module's docstring."""
|
||||
2 |
|
||||
3 | def fn():
|
||||
| ^^^ E302
|
||||
4 | pass
|
||||
|
|
||||
= help: Add missing blank line(s)
|
||||
|
||||
ℹ Safe fix
|
||||
1 1 | """Test where the error is after the module's docstring."""
|
||||
2 2 |
|
||||
3 |+
|
||||
3 4 | def fn():
|
||||
4 5 | pass
|
||||
@@ -1,19 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||
---
|
||||
E302_first_line_expression.py:3:1: E302 [*] Expected 2 blank lines, found 1
|
||||
|
|
||||
1 | "Test where the first line is a comment, " + "and the rule violation follows it."
|
||||
2 |
|
||||
3 | def fn():
|
||||
| ^^^ E302
|
||||
4 | pass
|
||||
|
|
||||
= help: Add missing blank line(s)
|
||||
|
||||
ℹ Safe fix
|
||||
1 1 | "Test where the first line is a comment, " + "and the rule violation follows it."
|
||||
2 2 |
|
||||
3 |+
|
||||
3 4 | def fn():
|
||||
4 5 | pass
|
||||
@@ -1,20 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||
---
|
||||
E302_first_line_function.py:4:1: E302 [*] Expected 2 blank lines, found 1
|
||||
|
|
||||
2 | pass
|
||||
3 |
|
||||
4 | def fn2():
|
||||
| ^^^ E302
|
||||
5 | pass
|
||||
|
|
||||
= help: Add missing blank line(s)
|
||||
|
||||
ℹ Safe fix
|
||||
1 1 | def fn1():
|
||||
2 2 | pass
|
||||
3 3 |
|
||||
4 |+
|
||||
4 5 | def fn2():
|
||||
5 6 | pass
|
||||
@@ -1,19 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||
---
|
||||
E302_first_line_statement.py:3:1: E302 [*] Expected 2 blank lines, found 1
|
||||
|
|
||||
1 | print("Test where the first line is a statement, and the rule violation follows it.")
|
||||
2 |
|
||||
3 | def fn():
|
||||
| ^^^ E302
|
||||
4 | pass
|
||||
|
|
||||
= help: Add missing blank line(s)
|
||||
|
||||
ℹ Safe fix
|
||||
1 1 | print("Test where the first line is a statement, and the rule violation follows it.")
|
||||
2 2 |
|
||||
3 |+
|
||||
3 4 | def fn():
|
||||
4 5 | pass
|
||||
@@ -1,18 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||
---
|
||||
E303_first_line_comment.py:5:1: E303 [*] Too many blank lines (3)
|
||||
|
|
||||
5 | def fn():
|
||||
| ^^^ E303
|
||||
6 | pass
|
||||
|
|
||||
= help: Remove extraneous blank line(s)
|
||||
|
||||
ℹ Safe fix
|
||||
1 1 | # Test where the first line is a comment, and the rule violation follows it.
|
||||
2 2 |
|
||||
3 3 |
|
||||
4 |-
|
||||
5 4 | def fn():
|
||||
6 5 | pass
|
||||
@@ -1,18 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||
---
|
||||
E303_first_line_docstring.py:5:1: E303 [*] Too many blank lines (3)
|
||||
|
|
||||
5 | def fn():
|
||||
| ^^^ E303
|
||||
6 | pass
|
||||
|
|
||||
= help: Remove extraneous blank line(s)
|
||||
|
||||
ℹ Safe fix
|
||||
1 1 | """Test where the error is after the module's docstring."""
|
||||
2 2 |
|
||||
3 3 |
|
||||
4 |-
|
||||
5 4 | def fn():
|
||||
6 5 | pass
|
||||
@@ -1,18 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||
---
|
||||
E303_first_line_expression.py:5:1: E303 [*] Too many blank lines (3)
|
||||
|
|
||||
5 | def fn():
|
||||
| ^^^ E303
|
||||
6 | pass
|
||||
|
|
||||
= help: Remove extraneous blank line(s)
|
||||
|
||||
ℹ Safe fix
|
||||
1 1 | "Test where the first line is a comment, " + "and the rule violation follows it."
|
||||
2 2 |
|
||||
3 3 |
|
||||
4 |-
|
||||
5 4 | def fn():
|
||||
6 5 | pass
|
||||
@@ -1,18 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pycodestyle/mod.rs
|
||||
---
|
||||
E303_first_line_statement.py:5:1: E303 [*] Too many blank lines (3)
|
||||
|
|
||||
5 | def fn():
|
||||
| ^^^ E303
|
||||
6 | pass
|
||||
|
|
||||
= help: Remove extraneous blank line(s)
|
||||
|
||||
ℹ Safe fix
|
||||
1 1 | print("Test where the first line is a statement, and the rule violation follows it.")
|
||||
2 2 |
|
||||
3 3 |
|
||||
4 |-
|
||||
5 4 | def fn():
|
||||
6 5 | pass
|
||||
@@ -27,11 +27,11 @@ W505.py:10:51: W505 Doc line too long (56 > 50)
|
||||
12 | x = 2
|
||||
|
|
||||
|
||||
W505.py:13:51: W505 Doc line too long (94 > 50)
|
||||
W505.py:13:51: W505 Doc line too long (93 > 50)
|
||||
|
|
||||
12 | x = 2
|
||||
13 | # Another standalone that is preceded by a newline and indent token and is over the limit.
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ W505
|
||||
13 | # Another standalone that is preceded by a newline and indent toke and is over the limit.
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ W505
|
||||
14 |
|
||||
15 | print("Here's a string that's over the limit, but it's not a docstring.")
|
||||
|
|
||||
@@ -58,3 +58,5 @@ W505.py:31:51: W505 Doc line too long (85 > 50)
|
||||
31 | It's over the limit on this line, which isn't the first line in the docstring."""
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ W505
|
||||
|
|
||||
|
||||
|
||||
|
||||
@@ -27,11 +27,11 @@ W505_utf_8.py:10:51: W505 Doc line too long (56 > 50)
|
||||
12 | x = 2
|
||||
|
|
||||
|
||||
W505_utf_8.py:13:51: W505 Doc line too long (94 > 50)
|
||||
W505_utf_8.py:13:51: W505 Doc line too long (93 > 50)
|
||||
|
|
||||
12 | x = 2
|
||||
13 | # Another standalone that is preceded by a newline and indent token and is over theß9💣2ℝ.
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ W505
|
||||
13 | # Another standalone that is preceded by a newline and indent toke and is over theß9💣2ℝ.
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ W505
|
||||
14 |
|
||||
15 | print("Here's a string that's over theß9💣2ℝ, but it's not a ß9💣2ℝing.")
|
||||
|
|
||||
@@ -58,3 +58,5 @@ W505_utf_8.py:31:50: W505 Doc line too long (85 > 50)
|
||||
31 | It's over theß9💣2ℝ on this line, which isn't the first line in the ß9💣2ℝing."""
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ W505
|
||||
|
|
||||
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@ impl AlwaysFixableViolation for OneBlankLineBeforeClass {
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// [PEP 257] recommends the use of a blank line to separate a class's
|
||||
/// docstring from its methods.
|
||||
/// docstring its methods.
|
||||
///
|
||||
/// This rule may not apply to all projects; its applicability is a matter of
|
||||
/// convention. By default, this rule is enabled when using the `google`
|
||||
|
||||
@@ -368,7 +368,7 @@ impl Violation for UndocumentedPublicPackage {
|
||||
/// ## Why is this bad?
|
||||
/// Magic methods (methods with names that start and end with double
|
||||
/// underscores) are used to implement operator overloading and other special
|
||||
/// behavior. Such methods should be documented via docstrings to
|
||||
/// behavior. Such methods should should be documented via docstrings to
|
||||
/// outline their behavior.
|
||||
///
|
||||
/// Generally, magic method docstrings should describe the method's behavior,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::str::Quote;
|
||||
use ruff_python_codegen::Quote;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
@@ -97,8 +97,8 @@ impl fmt::Display for Settings {
|
||||
namespace = "linter.pydocstyle",
|
||||
fields = [
|
||||
self.convention | optional,
|
||||
self.ignore_decorators | set,
|
||||
self.property_decorators | set
|
||||
self.ignore_decorators | debug,
|
||||
self.property_decorators | debug
|
||||
]
|
||||
}
|
||||
Ok(())
|
||||
|
||||
@@ -124,7 +124,6 @@ mod tests {
|
||||
#[test_case(Rule::RedefinedWhileUnused, Path::new("F811_25.py"))]
|
||||
#[test_case(Rule::RedefinedWhileUnused, Path::new("F811_26.py"))]
|
||||
#[test_case(Rule::RedefinedWhileUnused, Path::new("F811_27.py"))]
|
||||
#[test_case(Rule::RedefinedWhileUnused, Path::new("F811_28.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_0.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_1.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_2.py"))]
|
||||
@@ -223,19 +222,6 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn init_unused_import_opt_in_to_fix() -> Result<()> {
|
||||
let diagnostics = test_path(
|
||||
Path::new("pyflakes/__init__.py"),
|
||||
&LinterSettings {
|
||||
ignore_init_module_imports: false,
|
||||
..LinterSettings::for_rules(vec![Rule::UnusedImport])
|
||||
},
|
||||
)?;
|
||||
assert_messages!(diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn default_builtins() -> Result<()> {
|
||||
let diagnostics = test_path(
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::borrow::Cow;
|
||||
use anyhow::Result;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::{Applicability, Diagnostic, Fix, FixAvailability, Violation};
|
||||
use ruff_diagnostics::{Diagnostic, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_semantic::{AnyImport, Exceptions, Imported, NodeId, Scope};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
@@ -37,11 +37,6 @@ enum UnusedImportContext {
|
||||
/// from module import member as member
|
||||
/// ```
|
||||
///
|
||||
/// ## Fix safety
|
||||
///
|
||||
/// When `ignore_init_module_imports` is disabled, fixes can remove for unused imports in `__init__` files.
|
||||
/// These fixes are considered unsafe because they can change the public interface.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// import numpy as np # unused import
|
||||
@@ -95,7 +90,7 @@ impl Violation for UnusedImport {
|
||||
}
|
||||
Some(UnusedImportContext::Init) => {
|
||||
format!(
|
||||
"`{name}` imported but unused; consider removing, adding to `__all__`, or using a redundant alias"
|
||||
"`{name}` imported but unused; consider adding to `__all__` or using a redundant alias"
|
||||
)
|
||||
}
|
||||
None => format!("`{name}` imported but unused"),
|
||||
@@ -159,8 +154,8 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut
|
||||
}
|
||||
}
|
||||
|
||||
let in_init = checker.path().ends_with("__init__.py");
|
||||
let fix_init = !checker.settings.ignore_init_module_imports;
|
||||
let in_init =
|
||||
checker.settings.ignore_init_module_imports && checker.path().ends_with("__init__.py");
|
||||
|
||||
// Generate a diagnostic for every import, but share a fix across all imports within the same
|
||||
// statement (excluding those that are ignored).
|
||||
@@ -169,8 +164,8 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut
|
||||
exceptions.intersects(Exceptions::MODULE_NOT_FOUND_ERROR | Exceptions::IMPORT_ERROR);
|
||||
let multiple = imports.len() > 1;
|
||||
|
||||
let fix = if (!in_init || fix_init) && !in_except_handler {
|
||||
fix_imports(checker, node_id, &imports, in_init).ok()
|
||||
let fix = if !in_init && !in_except_handler {
|
||||
fix_imports(checker, node_id, &imports).ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@@ -248,12 +243,7 @@ impl Ranged for ImportBinding<'_> {
|
||||
}
|
||||
|
||||
/// Generate a [`Fix`] to remove unused imports from a statement.
|
||||
fn fix_imports(
|
||||
checker: &Checker,
|
||||
node_id: NodeId,
|
||||
imports: &[ImportBinding],
|
||||
in_init: bool,
|
||||
) -> Result<Fix> {
|
||||
fn fix_imports(checker: &Checker, node_id: NodeId, imports: &[ImportBinding]) -> Result<Fix> {
|
||||
let statement = checker.semantic().statement(node_id);
|
||||
let parent = checker.semantic().parent_statement(node_id);
|
||||
|
||||
@@ -271,15 +261,7 @@ fn fix_imports(
|
||||
checker.stylist(),
|
||||
checker.indexer(),
|
||||
)?;
|
||||
// It's unsafe to remove things from `__init__.py` because it can break public interfaces
|
||||
let applicability = if in_init {
|
||||
Applicability::Unsafe
|
||||
} else {
|
||||
Applicability::Safe
|
||||
};
|
||||
Ok(
|
||||
Fix::applicable_edit(edit, applicability).isolate(Checker::isolation(
|
||||
checker.semantic().parent_statement_id(node_id),
|
||||
)),
|
||||
)
|
||||
Ok(Fix::safe_edit(edit).isolate(Checker::isolation(
|
||||
checker.semantic().parent_statement_id(node_id),
|
||||
)))
|
||||
}
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F811_1.py:1:25: F811 Redefinition of unused `FU` from line 1
|
||||
F811_1.py:1:25: F811 [*] Redefinition of unused `FU` from line 1
|
||||
|
|
||||
1 | import fu as FU, bar as FU
|
||||
| ^^ F811
|
||||
|
|
||||
= help: Remove definition: `FU`
|
||||
|
||||
ℹ Safe fix
|
||||
1 |-import fu as FU, bar as FU
|
||||
1 |+import fu as FU
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F811_12.py:6:20: F811 Redefinition of unused `mixer` from line 2
|
||||
F811_12.py:6:20: F811 [*] Redefinition of unused `mixer` from line 2
|
||||
|
|
||||
4 | pass
|
||||
5 | else:
|
||||
@@ -10,3 +10,13 @@ F811_12.py:6:20: F811 Redefinition of unused `mixer` from line 2
|
||||
7 | mixer(123)
|
||||
|
|
||||
= help: Remove definition: `mixer`
|
||||
|
||||
ℹ Safe fix
|
||||
3 3 | except ImportError:
|
||||
4 4 | pass
|
||||
5 5 | else:
|
||||
6 |- from bb import mixer
|
||||
6 |+ pass
|
||||
7 7 | mixer(123)
|
||||
|
||||
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F811_2.py:1:34: F811 Redefinition of unused `FU` from line 1
|
||||
F811_2.py:1:34: F811 [*] Redefinition of unused `FU` from line 1
|
||||
|
|
||||
1 | from moo import fu as FU, bar as FU
|
||||
| ^^ F811
|
||||
|
|
||||
= help: Remove definition: `FU`
|
||||
|
||||
ℹ Safe fix
|
||||
1 |-from moo import fu as FU, bar as FU
|
||||
1 |+from moo import fu as FU
|
||||
|
||||
|
||||
|
||||
@@ -1,10 +1,18 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F811_23.py:4:15: F811 Redefinition of unused `foo` from line 3
|
||||
F811_23.py:4:15: F811 [*] Redefinition of unused `foo` from line 3
|
||||
|
|
||||
3 | import foo as foo
|
||||
4 | import bar as foo
|
||||
| ^^^ F811
|
||||
|
|
||||
= help: Remove definition: `foo`
|
||||
|
||||
ℹ Safe fix
|
||||
1 1 | """Test that shadowing an explicit re-export produces a warning."""
|
||||
2 2 |
|
||||
3 3 | import foo as foo
|
||||
4 |-import bar as foo
|
||||
|
||||
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F811_28.py:4:22: F811 Redefinition of unused `datetime` from line 3
|
||||
|
|
||||
3 | import datetime
|
||||
4 | from datetime import datetime
|
||||
| ^^^^^^^^ F811
|
||||
5 |
|
||||
6 | datetime(1, 2, 3)
|
||||
|
|
||||
= help: Remove definition: `datetime`
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
__init__.py:1:8: F401 `os` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
__init__.py:1:8: F401 [*] `os` imported but unused
|
||||
|
|
||||
1 | import os
|
||||
| ^^ F401
|
||||
@@ -9,3 +9,9 @@ __init__.py:1:8: F401 `os` imported but unused; consider removing, adding to `__
|
||||
3 | print(__path__)
|
||||
|
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
ℹ Safe fix
|
||||
1 |-import os
|
||||
2 1 |
|
||||
3 2 | print(__path__)
|
||||
4 3 |
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
__init__.py:1:8: F401 [*] `os` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
1 | import os
|
||||
| ^^ F401
|
||||
2 |
|
||||
3 | print(__path__)
|
||||
|
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |-import os
|
||||
2 1 |
|
||||
3 2 | print(__path__)
|
||||
4 3 |
|
||||
@@ -71,7 +71,6 @@ mod tests {
|
||||
#[test_case(Rule::ImportSelf, Path::new("import_self/module.py"))]
|
||||
#[test_case(Rule::InvalidAllFormat, Path::new("invalid_all_format.py"))]
|
||||
#[test_case(Rule::InvalidAllObject, Path::new("invalid_all_object.py"))]
|
||||
#[test_case(Rule::InvalidBoolReturnType, Path::new("invalid_return_type_bool.py"))]
|
||||
#[test_case(Rule::InvalidStrReturnType, Path::new("invalid_return_type_str.py"))]
|
||||
#[test_case(Rule::DuplicateBases, Path::new("duplicate_bases.py"))]
|
||||
#[test_case(Rule::InvalidCharacterBackspace, Path::new("invalid_characters.py"))]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user