Compare commits
38 Commits
zanie/docs
...
dhruv/curr
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
756e7b6c82 | ||
|
|
4f5604fc83 | ||
|
|
4381629e13 | ||
|
|
b09e5f40df | ||
|
|
1ec7259116 | ||
|
|
d41ecfe351 | ||
|
|
156f7994a7 | ||
|
|
559832ba4d | ||
|
|
98f6dcbb91 | ||
|
|
94cc5f2e13 | ||
|
|
cdcbb04686 | ||
|
|
70aa19e9af | ||
|
|
aac2023999 | ||
|
|
2b00e81c22 | ||
|
|
b65e3fb335 | ||
|
|
814438777c | ||
|
|
f6467216dc | ||
|
|
bf67f129dd | ||
|
|
3ee670440c | ||
|
|
0de3f2f92d | ||
|
|
f4a8ab8756 | ||
|
|
5f40371ffc | ||
|
|
f7802ad5de | ||
|
|
e832327a56 | ||
|
|
324390607c | ||
|
|
4db5c29f19 | ||
|
|
e9d3f71c90 | ||
|
|
7b3ee2daff | ||
|
|
c2e15f38ee | ||
|
|
d59433b12e | ||
|
|
2bf1882398 | ||
|
|
c269c1a706 | ||
|
|
32d6f84e3d | ||
|
|
93d582d734 | ||
|
|
05b406080a | ||
|
|
3ed707f245 | ||
|
|
c56fb6e15a | ||
|
|
dbf82233b8 |
2
.github/ISSUE_TEMPLATE.md
vendored
2
.github/ISSUE_TEMPLATE.md
vendored
@@ -3,6 +3,8 @@ Thank you for taking the time to report an issue! We're glad to have you involve
|
||||
|
||||
If you're filing a bug report, please consider including the following information:
|
||||
|
||||
* List of keywords you searched for before creating this issue. Write them down here so that others can find this issue more easily and help provide feedback.
|
||||
e.g. "RUF001", "unused variable", "Jupyter notebook"
|
||||
* A minimal code snippet that reproduces the bug.
|
||||
* The command you invoked (e.g., `ruff /path/to/file.py --fix`), ideally including the `--isolated` flag.
|
||||
* The current Ruff settings (any relevant sections from your `pyproject.toml`).
|
||||
|
||||
1
.github/workflows/docs.yaml
vendored
1
.github/workflows/docs.yaml
vendored
@@ -9,7 +9,6 @@ on:
|
||||
type: string
|
||||
release:
|
||||
types: [published]
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
mkdocs:
|
||||
|
||||
4
Cargo.lock
generated
4
Cargo.lock
generated
@@ -2345,6 +2345,7 @@ dependencies = [
|
||||
"itertools 0.12.1",
|
||||
"lexical-parse-float",
|
||||
"rand",
|
||||
"ruff_python_ast",
|
||||
"unic-ucd-category",
|
||||
]
|
||||
|
||||
@@ -2352,9 +2353,11 @@ dependencies = [
|
||||
name = "ruff_python_parser"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"annotate-snippets 0.9.2",
|
||||
"anyhow",
|
||||
"bitflags 2.4.2",
|
||||
"bstr",
|
||||
"drop_bomb",
|
||||
"insta",
|
||||
"is-macro",
|
||||
"itertools 0.12.1",
|
||||
@@ -2362,6 +2365,7 @@ dependencies = [
|
||||
"lalrpop-util",
|
||||
"memchr",
|
||||
"ruff_python_ast",
|
||||
"ruff_source_file",
|
||||
"ruff_text_size",
|
||||
"rustc-hash",
|
||||
"static_assertions",
|
||||
|
||||
26
README.md
26
README.md
@@ -129,7 +129,7 @@ and with [a variety of other package managers](https://docs.astral.sh/ruff/insta
|
||||
To run Ruff as a linter, try any of the following:
|
||||
|
||||
```shell
|
||||
ruff check . # Lint all files in the current directory (and any subdirectories).
|
||||
ruff check # Lint all files in the current directory (and any subdirectories).
|
||||
ruff check path/to/code/ # Lint all files in `/path/to/code` (and any subdirectories).
|
||||
ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`.
|
||||
ruff check path/to/code/to/file.py # Lint `file.py`.
|
||||
@@ -139,7 +139,7 @@ ruff check @arguments.txt # Lint using an input file, treating its con
|
||||
Or, to run Ruff as a formatter:
|
||||
|
||||
```shell
|
||||
ruff format . # Format all files in the current directory (and any subdirectories).
|
||||
ruff format # Format all files in the current directory (and any subdirectories).
|
||||
ruff format path/to/code/ # Format all files in `/path/to/code` (and any subdirectories).
|
||||
ruff format path/to/code/*.py # Format all `.py` files in `/path/to/code`.
|
||||
ruff format path/to/code/to/file.py # Format `file.py`.
|
||||
@@ -183,10 +183,9 @@ Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml`
|
||||
[_Configuration_](https://docs.astral.sh/ruff/configuration/), or [_Settings_](https://docs.astral.sh/ruff/settings/)
|
||||
for a complete list of all configuration options).
|
||||
|
||||
If left unspecified, Ruff's default configuration is equivalent to:
|
||||
If left unspecified, Ruff's default configuration is equivalent to the following `ruff.toml` file:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
# Exclude a variety of commonly ignored directories.
|
||||
exclude = [
|
||||
".bzr",
|
||||
@@ -224,7 +223,7 @@ indent-width = 4
|
||||
# Assume Python 3.8
|
||||
target-version = "py38"
|
||||
|
||||
[tool.ruff.lint]
|
||||
[lint]
|
||||
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||
select = ["E4", "E7", "E9", "F"]
|
||||
ignore = []
|
||||
@@ -236,7 +235,7 @@ unfixable = []
|
||||
# Allow unused variables when underscore-prefixed.
|
||||
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||
|
||||
[tool.ruff.format]
|
||||
[format]
|
||||
# Like Black, use double quotes for strings.
|
||||
quote-style = "double"
|
||||
|
||||
@@ -250,11 +249,20 @@ skip-magic-trailing-comma = false
|
||||
line-ending = "auto"
|
||||
```
|
||||
|
||||
Some configuration options can be provided via the command-line, such as those related to
|
||||
rule enablement and disablement, file discovery, and logging level:
|
||||
Note that, in a `pyproject.toml`, each section header should be prefixed with `tool.ruff`. For
|
||||
example, `[lint]` should be replaced with `[tool.ruff.lint]`.
|
||||
|
||||
Some configuration options can be provided via dedicated command-line arguments, such as those
|
||||
related to rule enablement and disablement, file discovery, and logging level:
|
||||
|
||||
```shell
|
||||
ruff check path/to/code/ --select F401 --select F403 --quiet
|
||||
ruff check --select F401 --select F403 --quiet
|
||||
```
|
||||
|
||||
The remaining configuration options can be provided through a catch-all `--config` argument:
|
||||
|
||||
```shell
|
||||
ruff check --config "lint.per-file-ignores = {'some_file.py' = ['F841']}"
|
||||
```
|
||||
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` and `ruff help format`
|
||||
|
||||
@@ -497,7 +497,11 @@ pub struct FormatCommand {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, clap::Parser)]
|
||||
pub struct ServerCommand;
|
||||
pub struct ServerCommand {
|
||||
/// Enable preview mode; required for regular operation
|
||||
#[arg(long)]
|
||||
pub(crate) preview: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, clap::ValueEnum)]
|
||||
pub enum HelpFormat {
|
||||
|
||||
@@ -9,7 +9,11 @@ use tracing_subscriber::{
|
||||
};
|
||||
use tracing_tree::time::Uptime;
|
||||
|
||||
pub(crate) fn run_server(log_level: LogLevel) -> Result<ExitStatus> {
|
||||
pub(crate) fn run_server(preview: bool, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
if !preview {
|
||||
tracing::error!("--preview needs to be provided as a command line argument while the server is still unstable.\nFor example: `ruff server --preview`");
|
||||
return Ok(ExitStatus::Error);
|
||||
}
|
||||
let trace_level = if log_level == LogLevel::Verbose {
|
||||
Level::TRACE
|
||||
} else {
|
||||
|
||||
@@ -206,8 +206,8 @@ fn format(args: FormatCommand, global_options: GlobalConfigArgs) -> Result<ExitS
|
||||
|
||||
#[allow(clippy::needless_pass_by_value)] // TODO: remove once we start taking arguments from here
|
||||
fn server(args: ServerCommand, log_level: LogLevel) -> Result<ExitStatus> {
|
||||
let ServerCommand {} = args;
|
||||
commands::server::run_server(log_level)
|
||||
let ServerCommand { preview } = args;
|
||||
commands::server::run_server(preview, log_level)
|
||||
}
|
||||
|
||||
pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<ExitStatus> {
|
||||
|
||||
@@ -118,6 +118,8 @@ impl Printer {
|
||||
} else if remaining > 0 {
|
||||
let s = if remaining == 1 { "" } else { "s" };
|
||||
writeln!(writer, "Found {remaining} error{s}.")?;
|
||||
} else if remaining == 0 {
|
||||
writeln!(writer, "All checks passed!")?;
|
||||
}
|
||||
|
||||
if let Some(fixables) = fixables {
|
||||
|
||||
@@ -23,7 +23,7 @@ fn default_options() {
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
def foo(arg1, arg2,):
|
||||
print('Should\'t change quotes')
|
||||
print('Shouldn\'t change quotes')
|
||||
|
||||
|
||||
if condition:
|
||||
@@ -38,7 +38,7 @@ if condition:
|
||||
arg1,
|
||||
arg2,
|
||||
):
|
||||
print("Should't change quotes")
|
||||
print("Shouldn't change quotes")
|
||||
|
||||
|
||||
if condition:
|
||||
@@ -523,7 +523,7 @@ from module import =
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Failed to parse main.py:2:20: Unexpected token '='
|
||||
error: Failed to parse main.py:2:20: Unexpected token =
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -101,6 +101,7 @@ fn stdin_success() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -222,6 +223,7 @@ fn stdin_source_type_pyi() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -590,6 +592,7 @@ fn stdin_fix_when_no_issues_should_still_print_contents() {
|
||||
print(sys.version)
|
||||
|
||||
----- stderr -----
|
||||
All checks passed!
|
||||
"###);
|
||||
}
|
||||
|
||||
@@ -728,11 +731,11 @@ fn stdin_parse_error() {
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:17: E999 SyntaxError: Unexpected token '='
|
||||
-:1:17: E999 SyntaxError: Unexpected token =
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
error: Failed to parse at 1:17: Unexpected token '='
|
||||
error: Failed to parse at 1:17: Unexpected token =
|
||||
"###);
|
||||
}
|
||||
|
||||
@@ -1023,6 +1026,7 @@ fn preview_disabled_direct() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: Selection `RUF911` has no effect because preview is not enabled.
|
||||
@@ -1039,6 +1043,7 @@ fn preview_disabled_prefix_empty() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: Selection `RUF91` has no effect because preview is not enabled.
|
||||
@@ -1055,6 +1060,7 @@ fn preview_disabled_does_not_warn_for_empty_ignore_selections() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -1070,6 +1076,7 @@ fn preview_disabled_does_not_warn_for_empty_fixable_selections() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -1175,6 +1182,7 @@ fn removed_indirect() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -1205,6 +1213,7 @@ fn redirect_indirect() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -1307,6 +1316,7 @@ fn deprecated_indirect_preview_enabled() {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
@@ -1383,6 +1393,7 @@ fn unreadable_dir() -> Result<()> {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: Encountered error: Permission denied (os error 13)
|
||||
@@ -1897,6 +1908,7 @@ def log(x, base) -> float:
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###
|
||||
|
||||
@@ -496,6 +496,7 @@ ignore = ["D203", "D212"]
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
warning: No Python files found under the given path(s)
|
||||
@@ -833,6 +834,7 @@ fn complex_config_setting_overridden_via_cli() -> Result<()> {
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
@@ -34,6 +34,11 @@ marking it as unused, as in:
|
||||
from module import member as member
|
||||
```
|
||||
|
||||
## Fix safety
|
||||
|
||||
When `ignore_init_module_imports` is disabled, fixes can remove for unused imports in `__init__` files.
|
||||
These fixes are considered unsafe because they can change the public interface.
|
||||
|
||||
## Example
|
||||
```python
|
||||
import numpy as np # unused import
|
||||
|
||||
@@ -52,6 +52,7 @@ file_resolver.exclude = [
|
||||
file_resolver.extend_exclude = [
|
||||
"crates/ruff_linter/resources/",
|
||||
"crates/ruff_python_formatter/resources/",
|
||||
"crates/ruff_python_parser/resources/",
|
||||
]
|
||||
file_resolver.force_exclude = false
|
||||
file_resolver.include = [
|
||||
@@ -201,7 +202,7 @@ linter.allowed_confusables = []
|
||||
linter.builtins = []
|
||||
linter.dummy_variable_rgx = ^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$
|
||||
linter.external = []
|
||||
linter.ignore_init_module_imports = false
|
||||
linter.ignore_init_module_imports = true
|
||||
linter.logger_objects = []
|
||||
linter.namespace_packages = []
|
||||
linter.src = [
|
||||
@@ -241,7 +242,22 @@ linter.flake8_gettext.functions_names = [
|
||||
ngettext,
|
||||
]
|
||||
linter.flake8_implicit_str_concat.allow_multiline = true
|
||||
linter.flake8_import_conventions.aliases = {"matplotlib": "mpl", "matplotlib.pyplot": "plt", "pandas": "pd", "seaborn": "sns", "tensorflow": "tf", "networkx": "nx", "plotly.express": "px", "polars": "pl", "numpy": "np", "panel": "pn", "pyarrow": "pa", "altair": "alt", "tkinter": "tk", "holoviews": "hv"}
|
||||
linter.flake8_import_conventions.aliases = {
|
||||
altair = alt,
|
||||
holoviews = hv,
|
||||
matplotlib = mpl,
|
||||
matplotlib.pyplot = plt,
|
||||
networkx = nx,
|
||||
numpy = np,
|
||||
pandas = pd,
|
||||
panel = pn,
|
||||
plotly.express = px,
|
||||
polars = pl,
|
||||
pyarrow = pa,
|
||||
seaborn = sns,
|
||||
tensorflow = tf,
|
||||
tkinter = tk,
|
||||
}
|
||||
linter.flake8_import_conventions.banned_aliases = {}
|
||||
linter.flake8_import_conventions.banned_from = []
|
||||
linter.flake8_pytest_style.fixture_parentheses = true
|
||||
|
||||
@@ -7,7 +7,7 @@ use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
||||
use ruff_python_formatter::{format_module_ast, PreviewMode, PyFormatOptions};
|
||||
use ruff_python_index::CommentRangesBuilder;
|
||||
use ruff_python_parser::lexer::lex;
|
||||
use ruff_python_parser::{allocate_tokens_vec, parse_tokens, Mode};
|
||||
use ruff_python_parser::{allocate_tokens_vec, parse_tokens, set_new_parser, Mode};
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
#[global_allocator]
|
||||
@@ -42,6 +42,8 @@ fn create_test_cases() -> Result<Vec<TestCase>, TestFileDownloadError> {
|
||||
}
|
||||
|
||||
fn benchmark_formatter(criterion: &mut Criterion) {
|
||||
set_new_parser(true);
|
||||
|
||||
let mut group = criterion.benchmark_group("formatter");
|
||||
let test_cases = create_test_cases().unwrap();
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ use ruff_benchmark::criterion::{
|
||||
criterion_group, criterion_main, measurement::WallTime, BenchmarkId, Criterion, Throughput,
|
||||
};
|
||||
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
||||
use ruff_python_parser::{lexer, Mode};
|
||||
use ruff_python_parser::{lexer, set_new_parser, Mode};
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
#[global_allocator]
|
||||
@@ -37,6 +37,8 @@ fn create_test_cases() -> Result<Vec<TestCase>, TestFileDownloadError> {
|
||||
}
|
||||
|
||||
fn benchmark_lexer(criterion: &mut Criterion<WallTime>) {
|
||||
set_new_parser(true);
|
||||
|
||||
let test_cases = create_test_cases().unwrap();
|
||||
let mut group = criterion.benchmark_group("lexer");
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ use ruff_linter::settings::{flags, LinterSettings};
|
||||
use ruff_linter::source_kind::SourceKind;
|
||||
use ruff_linter::{registry::Rule, RuleSelector};
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_python_parser::{lexer, parse_program_tokens, Mode};
|
||||
use ruff_python_parser::{lexer, parse_program_tokens, set_new_parser, Mode};
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
#[global_allocator]
|
||||
@@ -45,6 +45,8 @@ fn create_test_cases() -> Result<Vec<TestCase>, TestFileDownloadError> {
|
||||
}
|
||||
|
||||
fn benchmark_linter(mut group: BenchmarkGroup, settings: &LinterSettings) {
|
||||
set_new_parser(true);
|
||||
|
||||
let test_cases = create_test_cases().unwrap();
|
||||
|
||||
for case in test_cases {
|
||||
|
||||
@@ -4,7 +4,7 @@ use ruff_benchmark::criterion::{
|
||||
use ruff_benchmark::{TestCase, TestFile, TestFileDownloadError};
|
||||
use ruff_python_ast::statement_visitor::{walk_stmt, StatementVisitor};
|
||||
use ruff_python_ast::Stmt;
|
||||
use ruff_python_parser::parse_suite;
|
||||
use ruff_python_parser::{parse_suite, set_new_parser};
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
#[global_allocator]
|
||||
@@ -50,6 +50,8 @@ impl<'a> StatementVisitor<'a> for CountVisitor {
|
||||
}
|
||||
|
||||
fn benchmark_parser(criterion: &mut Criterion<WallTime>) {
|
||||
set_new_parser(true);
|
||||
|
||||
let test_cases = create_test_cases().unwrap();
|
||||
let mut group = criterion.benchmark_group("parser");
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ pub trait Buffer {
|
||||
#[doc(hidden)]
|
||||
fn elements(&self) -> &[FormatElement];
|
||||
|
||||
/// Glue for usage of the [`write!`] macro with implementors of this trait.
|
||||
/// Glue for usage of the [`write!`] macro with implementers of this trait.
|
||||
///
|
||||
/// This method should generally not be invoked manually, but rather through the [`write!`] macro itself.
|
||||
///
|
||||
|
||||
@@ -18,3 +18,7 @@ func("0.0.0.0")
|
||||
def my_func():
|
||||
x = "0.0.0.0"
|
||||
print(x)
|
||||
|
||||
|
||||
# Implicit string concatenation
|
||||
"0.0.0.0" f"0.0.0.0{expr}0.0.0.0"
|
||||
|
||||
@@ -18,6 +18,13 @@ with open("/dev/shm/unit/test", "w") as f:
|
||||
with open("/foo/bar", "w") as f:
|
||||
f.write("def")
|
||||
|
||||
# Implicit string concatenation
|
||||
with open("/tmp/" "abc", "w") as f:
|
||||
f.write("def")
|
||||
|
||||
with open("/tmp/abc" f"/tmp/abc", "w") as f:
|
||||
f.write("def")
|
||||
|
||||
# Using `tempfile` module should be ok
|
||||
import tempfile
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
@@ -64,3 +64,5 @@ def not_warnings_dot_deprecated(
|
||||
"Not warnings.deprecated, so this one *should* lead to PYI053 in a stub!" # Error: PYI053
|
||||
)
|
||||
def not_a_deprecated_function() -> None: ...
|
||||
|
||||
fbaz: str = f"51 character {foo} stringgggggggggggggggggggggggggg" # Error: PYI053
|
||||
|
||||
@@ -10,7 +10,7 @@ async def func():
|
||||
|
||||
trio.sleep(0) # TRIO115
|
||||
foo = 0
|
||||
trio.sleep(foo) # TRIO115
|
||||
trio.sleep(foo) # OK
|
||||
trio.sleep(1) # OK
|
||||
time.sleep(0) # OK
|
||||
|
||||
@@ -20,26 +20,26 @@ async def func():
|
||||
trio.sleep(bar)
|
||||
|
||||
x, y = 0, 2000
|
||||
trio.sleep(x) # TRIO115
|
||||
trio.sleep(x) # OK
|
||||
trio.sleep(y) # OK
|
||||
|
||||
(a, b, [c, (d, e)]) = (1, 2, (0, [4, 0]))
|
||||
trio.sleep(c) # TRIO115
|
||||
trio.sleep(c) # OK
|
||||
trio.sleep(d) # OK
|
||||
trio.sleep(e) # TRIO115
|
||||
trio.sleep(e) # OK
|
||||
|
||||
m_x, m_y = 0
|
||||
trio.sleep(m_y) # OK
|
||||
trio.sleep(m_x) # OK
|
||||
|
||||
m_a = m_b = 0
|
||||
trio.sleep(m_a) # TRIO115
|
||||
trio.sleep(m_b) # TRIO115
|
||||
trio.sleep(m_a) # OK
|
||||
trio.sleep(m_b) # OK
|
||||
|
||||
m_c = (m_d, m_e) = (0, 0)
|
||||
trio.sleep(m_c) # OK
|
||||
trio.sleep(m_d) # TRIO115
|
||||
trio.sleep(m_e) # TRIO115
|
||||
trio.sleep(m_d) # OK
|
||||
trio.sleep(m_e) # OK
|
||||
|
||||
|
||||
def func():
|
||||
@@ -63,4 +63,16 @@ def func():
|
||||
import trio
|
||||
|
||||
if (walrus := 0) == 0:
|
||||
trio.sleep(walrus) # TRIO115
|
||||
trio.sleep(walrus) # OK
|
||||
|
||||
|
||||
def func():
|
||||
import trio
|
||||
|
||||
async def main() -> None:
|
||||
sleep = 0
|
||||
for _ in range(2):
|
||||
await trio.sleep(sleep) # OK
|
||||
sleep = 10
|
||||
|
||||
trio.run(main)
|
||||
|
||||
@@ -10,7 +10,7 @@ def f1():
|
||||
# Here's a standalone comment that's over the limit.
|
||||
|
||||
x = 2
|
||||
# Another standalone that is preceded by a newline and indent toke and is over the limit.
|
||||
# Another standalone that is preceded by a newline and indent token and is over the limit.
|
||||
|
||||
print("Here's a string that's over the limit, but it's not a docstring.")
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ def f1():
|
||||
# Here's a standalone comment that's over theß9💣2ℝ.
|
||||
|
||||
x = 2
|
||||
# Another standalone that is preceded by a newline and indent toke and is over theß9💣2ℝ.
|
||||
# Another standalone that is preceded by a newline and indent token and is over theß9💣2ℝ.
|
||||
|
||||
print("Here's a string that's over theß9💣2ℝ, but it's not a ß9💣2ℝing.")
|
||||
|
||||
|
||||
6
crates/ruff_linter/resources/test/fixtures/pyflakes/F811_28.py
vendored
Normal file
6
crates/ruff_linter/resources/test/fixtures/pyflakes/F811_28.py
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
"""Regression test for: https://github.com/astral-sh/ruff/issues/10384"""
|
||||
|
||||
import datetime
|
||||
from datetime import datetime
|
||||
|
||||
datetime(1, 2, 3)
|
||||
37
crates/ruff_linter/resources/test/fixtures/pylint/invalid_return_type_bool.py
vendored
Normal file
37
crates/ruff_linter/resources/test/fixtures/pylint/invalid_return_type_bool.py
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
# These testcases should raise errors
|
||||
|
||||
class Float:
|
||||
def __bool__(self):
|
||||
return 3.05 # [invalid-bool-return]
|
||||
|
||||
class Int:
|
||||
def __bool__(self):
|
||||
return 0 # [invalid-bool-return]
|
||||
|
||||
|
||||
class Str:
|
||||
def __bool__(self):
|
||||
x = "ruff"
|
||||
return x # [invalid-bool-return]
|
||||
|
||||
# TODO: Once Ruff has better type checking
|
||||
def return_int():
|
||||
return 3
|
||||
|
||||
class ComplexReturn:
|
||||
def __bool__(self):
|
||||
return return_int() # [invalid-bool-return]
|
||||
|
||||
|
||||
|
||||
# These testcases should NOT raise errors
|
||||
|
||||
class Bool:
|
||||
def __bool__(self):
|
||||
return True
|
||||
|
||||
|
||||
class Bool2:
|
||||
def __bool__(self):
|
||||
x = True
|
||||
return x
|
||||
@@ -1,28 +1,36 @@
|
||||
class Str:
|
||||
def __str__(self):
|
||||
return 1
|
||||
# These testcases should raise errors
|
||||
|
||||
class Float:
|
||||
def __str__(self):
|
||||
return 3.05
|
||||
|
||||
|
||||
class Int:
|
||||
def __str__(self):
|
||||
return 1
|
||||
|
||||
class Int2:
|
||||
def __str__(self):
|
||||
return 0
|
||||
|
||||
|
||||
class Bool:
|
||||
def __str__(self):
|
||||
return False
|
||||
|
||||
class Str2:
|
||||
def __str__(self):
|
||||
x = "ruff"
|
||||
return x
|
||||
|
||||
# TODO fixme once Ruff has better type checking
|
||||
|
||||
# TODO: Once Ruff has better type checking
|
||||
def return_int():
|
||||
return 3
|
||||
|
||||
class ComplexReturn:
|
||||
def __str__(self):
|
||||
return return_int()
|
||||
return return_int()
|
||||
|
||||
# These testcases should NOT raise errors
|
||||
|
||||
class Str:
|
||||
def __str__(self):
|
||||
return "ruff"
|
||||
|
||||
class Str2:
|
||||
def __str__(self):
|
||||
x = "ruff"
|
||||
return x
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# Test case 1: Useless exception statement
|
||||
from abc import ABC, abstractmethod
|
||||
from contextlib import suppress
|
||||
|
||||
|
||||
# Test case 1: Useless exception statement
|
||||
def func():
|
||||
AssertionError("This is an assertion error") # PLW0133
|
||||
|
||||
@@ -66,6 +66,11 @@ def func():
|
||||
x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133
|
||||
|
||||
|
||||
# Test case 11: Useless warning statement
|
||||
def func():
|
||||
UserWarning("This is an assertion error") # PLW0133
|
||||
|
||||
|
||||
# Non-violation test cases: PLW0133
|
||||
|
||||
|
||||
|
||||
@@ -47,7 +47,7 @@ if (
|
||||
and some_third_reasonably_long_condition
|
||||
or some_fourth_reasonably_long_condition
|
||||
and some_fifth_reasonably_long_condition
|
||||
# a commment
|
||||
# a comment
|
||||
and some_sixth_reasonably_long_condition
|
||||
and some_seventh_reasonably_long_condition
|
||||
# another comment
|
||||
|
||||
@@ -48,7 +48,7 @@ __all__ = [
|
||||
# we implement an "isort-style sort":
|
||||
# SCEAMING_CASE constants first,
|
||||
# then CamelCase classes,
|
||||
# then anything thats lowercase_snake_case.
|
||||
# then anything that's lowercase_snake_case.
|
||||
# This (which is currently alphabetically sorted)
|
||||
# should get reordered accordingly:
|
||||
__all__ = [
|
||||
|
||||
@@ -53,3 +53,6 @@ class Labware:
|
||||
|
||||
|
||||
assert getattr(Labware(), "µL") == 1.5
|
||||
|
||||
# Implicit string concatenation
|
||||
x = "𝐁ad" f"𝐁ad string"
|
||||
|
||||
@@ -259,23 +259,29 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) {
|
||||
diagnostic.set_parent(range.start());
|
||||
}
|
||||
|
||||
if let Some(import) = binding.as_any_import() {
|
||||
if let Some(source) = binding.source {
|
||||
diagnostic.try_set_fix(|| {
|
||||
let statement = checker.semantic().statement(source);
|
||||
let parent = checker.semantic().parent_statement(source);
|
||||
let edit = fix::edits::remove_unused_imports(
|
||||
std::iter::once(import.member_name().as_ref()),
|
||||
statement,
|
||||
parent,
|
||||
checker.locator(),
|
||||
checker.stylist(),
|
||||
checker.indexer(),
|
||||
)?;
|
||||
Ok(Fix::safe_edit(edit).isolate(Checker::isolation(
|
||||
checker.semantic().parent_statement_id(source),
|
||||
)))
|
||||
});
|
||||
// Remove the import if the binding and the shadowed binding are both imports,
|
||||
// and both point to the same qualified name.
|
||||
if let Some(shadowed_import) = shadowed.as_any_import() {
|
||||
if let Some(import) = binding.as_any_import() {
|
||||
if shadowed_import.qualified_name() == import.qualified_name() {
|
||||
if let Some(source) = binding.source {
|
||||
diagnostic.try_set_fix(|| {
|
||||
let statement = checker.semantic().statement(source);
|
||||
let parent = checker.semantic().parent_statement(source);
|
||||
let edit = fix::edits::remove_unused_imports(
|
||||
std::iter::once(import.member_name().as_ref()),
|
||||
statement,
|
||||
parent,
|
||||
checker.locator(),
|
||||
checker.stylist(),
|
||||
checker.indexer(),
|
||||
)?;
|
||||
Ok(Fix::safe_edit(edit).isolate(Checker::isolation(
|
||||
checker.semantic().parent_statement_id(source),
|
||||
)))
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -254,7 +254,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
||||
}
|
||||
}
|
||||
}
|
||||
ExprContext::Del => {}
|
||||
_ => {}
|
||||
}
|
||||
if checker.enabled(Rule::SixPY3) {
|
||||
flake8_2020::rules::name_or_attribute(checker, expr);
|
||||
|
||||
@@ -91,6 +91,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::InvalidBoolReturnType) {
|
||||
pylint::rules::invalid_bool_return(checker, name, body);
|
||||
}
|
||||
if checker.enabled(Rule::InvalidStrReturnType) {
|
||||
pylint::rules::invalid_str_return(checker, name, body);
|
||||
}
|
||||
|
||||
@@ -44,10 +44,10 @@ use ruff_python_ast::helpers::{
|
||||
};
|
||||
use ruff_python_ast::identifier::Identifier;
|
||||
use ruff_python_ast::name::QualifiedName;
|
||||
use ruff_python_ast::str::trailing_quote;
|
||||
use ruff_python_ast::visitor::{walk_except_handler, walk_f_string_element, walk_pattern, Visitor};
|
||||
use ruff_python_ast::str::Quote;
|
||||
use ruff_python_ast::visitor::{walk_except_handler, walk_pattern, Visitor};
|
||||
use ruff_python_ast::{helpers, str, visitor, PySourceType};
|
||||
use ruff_python_codegen::{Generator, Quote, Stylist};
|
||||
use ruff_python_codegen::{Generator, Stylist};
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::typing::{parse_type_annotation, AnnotationKind};
|
||||
use ruff_python_semantic::analyze::{imports, typing, visibility};
|
||||
@@ -228,16 +228,11 @@ impl<'a> Checker<'a> {
|
||||
}
|
||||
|
||||
// Find the quote character used to start the containing f-string.
|
||||
let expr = self.semantic.current_expression()?;
|
||||
let string_range = self.indexer.fstring_ranges().innermost(expr.start())?;
|
||||
let trailing_quote = trailing_quote(self.locator.slice(string_range))?;
|
||||
|
||||
// Invert the quote character, if it's a single quote.
|
||||
match trailing_quote {
|
||||
"'" => Some(Quote::Double),
|
||||
"\"" => Some(Quote::Single),
|
||||
_ => None,
|
||||
}
|
||||
let ast::ExprFString { value, .. } = self
|
||||
.semantic
|
||||
.current_expressions()
|
||||
.find_map(|expr| expr.as_f_string_expr())?;
|
||||
Some(value.iter().next()?.quote_style().opposite())
|
||||
}
|
||||
|
||||
/// Returns the [`SourceRow`] for the given offset.
|
||||
@@ -991,6 +986,7 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
ExprContext::Load => self.handle_node_load(expr),
|
||||
ExprContext::Store => self.handle_node_store(id, expr),
|
||||
ExprContext::Del => self.handle_node_delete(expr),
|
||||
ExprContext::Invalid => {}
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
@@ -1412,6 +1408,7 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
analyze::string_like(string_literal.into(), self);
|
||||
}
|
||||
Expr::BytesLiteral(bytes_literal) => analyze::string_like(bytes_literal.into(), self),
|
||||
Expr::FString(f_string) => analyze::string_like(f_string.into(), self),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
@@ -1578,16 +1575,6 @@ impl<'a> Visitor<'a> for Checker<'a> {
|
||||
.push((bound, self.semantic.snapshot()));
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_f_string_element(&mut self, f_string_element: &'a ast::FStringElement) {
|
||||
// Step 2: Traversal
|
||||
walk_f_string_element(self, f_string_element);
|
||||
|
||||
// Step 4: Analysis
|
||||
if let Some(literal) = f_string_element.as_literal() {
|
||||
analyze::string_like(literal.into(), self);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Checker<'a> {
|
||||
|
||||
@@ -240,6 +240,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||
(Pylint, "E0237") => (RuleGroup::Stable, rules::pylint::rules::NonSlotAssignment),
|
||||
(Pylint, "E0241") => (RuleGroup::Stable, rules::pylint::rules::DuplicateBases),
|
||||
(Pylint, "E0302") => (RuleGroup::Stable, rules::pylint::rules::UnexpectedSpecialMethodSignature),
|
||||
(Pylint, "E0304") => (RuleGroup::Preview, rules::pylint::rules::InvalidBoolReturnType),
|
||||
(Pylint, "E0307") => (RuleGroup::Stable, rules::pylint::rules::InvalidStrReturnType),
|
||||
(Pylint, "E0604") => (RuleGroup::Stable, rules::pylint::rules::InvalidAllObject),
|
||||
(Pylint, "E0605") => (RuleGroup::Stable, rules::pylint::rules::InvalidAllFormat),
|
||||
|
||||
@@ -194,7 +194,7 @@ impl DisplayParseError {
|
||||
// Translate the byte offset to a location in the originating source.
|
||||
let location =
|
||||
if let Some(jupyter_index) = source_kind.as_ipy_notebook().map(Notebook::index) {
|
||||
let source_location = source_code.source_location(error.offset);
|
||||
let source_location = source_code.source_location(error.location.start());
|
||||
|
||||
ErrorLocation::Cell(
|
||||
jupyter_index
|
||||
@@ -208,7 +208,7 @@ impl DisplayParseError {
|
||||
},
|
||||
)
|
||||
} else {
|
||||
ErrorLocation::File(source_code.source_location(error.offset))
|
||||
ErrorLocation::File(source_code.source_location(error.location.start()))
|
||||
};
|
||||
|
||||
Self {
|
||||
@@ -275,27 +275,7 @@ impl<'a> DisplayParseErrorType<'a> {
|
||||
|
||||
impl Display for DisplayParseErrorType<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self.0 {
|
||||
ParseErrorType::Eof => write!(f, "Expected token but reached end of file."),
|
||||
ParseErrorType::ExtraToken(ref tok) => write!(
|
||||
f,
|
||||
"Got extraneous token: {tok}",
|
||||
tok = TruncateAtNewline(&tok)
|
||||
),
|
||||
ParseErrorType::InvalidToken => write!(f, "Got invalid token"),
|
||||
ParseErrorType::UnrecognizedToken(ref tok, ref expected) => {
|
||||
if let Some(expected) = expected.as_ref() {
|
||||
write!(
|
||||
f,
|
||||
"Expected '{expected}', but got {tok}",
|
||||
tok = TruncateAtNewline(&tok)
|
||||
)
|
||||
} else {
|
||||
write!(f, "Unexpected token {tok}", tok = TruncateAtNewline(&tok))
|
||||
}
|
||||
}
|
||||
ParseErrorType::Lexical(ref error) => write!(f, "{error}"),
|
||||
}
|
||||
write!(f, "{}", TruncateAtNewline(&self.0))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -294,7 +294,7 @@ impl Violation for MissingReturnTypePrivateFunction {
|
||||
///
|
||||
/// Note that type checkers often allow you to omit the return type annotation for
|
||||
/// `__init__` methods, as long as at least one argument has a type annotation. To
|
||||
/// opt-in to this behavior, use the `mypy-init-return` setting in your `pyproject.toml`
|
||||
/// opt in to this behavior, use the `mypy-init-return` setting in your `pyproject.toml`
|
||||
/// or `ruff.toml` file:
|
||||
///
|
||||
/// ```toml
|
||||
|
||||
@@ -38,17 +38,37 @@ impl Violation for HardcodedBindAllInterfaces {
|
||||
|
||||
/// S104
|
||||
pub(crate) fn hardcoded_bind_all_interfaces(checker: &mut Checker, string: StringLike) {
|
||||
let is_bind_all_interface = match string {
|
||||
StringLike::StringLiteral(ast::ExprStringLiteral { value, .. }) => value == "0.0.0.0",
|
||||
StringLike::FStringLiteral(ast::FStringLiteralElement { value, .. }) => {
|
||||
&**value == "0.0.0.0"
|
||||
match string {
|
||||
StringLike::String(ast::ExprStringLiteral { value, .. }) => {
|
||||
if value == "0.0.0.0" {
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(HardcodedBindAllInterfaces, string.range()));
|
||||
}
|
||||
}
|
||||
StringLike::BytesLiteral(_) => return,
|
||||
StringLike::FString(ast::ExprFString { value, .. }) => {
|
||||
for part in value {
|
||||
match part {
|
||||
ast::FStringPart::Literal(literal) => {
|
||||
if &**literal == "0.0.0.0" {
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(HardcodedBindAllInterfaces, literal.range()));
|
||||
}
|
||||
}
|
||||
ast::FStringPart::FString(f_string) => {
|
||||
for literal in f_string.literals() {
|
||||
if &**literal == "0.0.0.0" {
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
HardcodedBindAllInterfaces,
|
||||
literal.range(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
StringLike::Bytes(_) => (),
|
||||
};
|
||||
|
||||
if is_bind_all_interface {
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(HardcodedBindAllInterfaces, string.range()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use ruff_python_ast::{self as ast, Expr, StringLike};
|
||||
use ruff_text_size::Ranged;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
@@ -53,12 +53,29 @@ impl Violation for HardcodedTempFile {
|
||||
|
||||
/// S108
|
||||
pub(crate) fn hardcoded_tmp_directory(checker: &mut Checker, string: StringLike) {
|
||||
let value = match string {
|
||||
StringLike::StringLiteral(ast::ExprStringLiteral { value, .. }) => value.to_str(),
|
||||
StringLike::FStringLiteral(ast::FStringLiteralElement { value, .. }) => value,
|
||||
StringLike::BytesLiteral(_) => return,
|
||||
};
|
||||
match string {
|
||||
StringLike::String(ast::ExprStringLiteral { value, .. }) => {
|
||||
check(checker, value.to_str(), string.range());
|
||||
}
|
||||
StringLike::FString(ast::ExprFString { value, .. }) => {
|
||||
for part in value {
|
||||
match part {
|
||||
ast::FStringPart::Literal(literal) => {
|
||||
check(checker, literal, literal.range());
|
||||
}
|
||||
ast::FStringPart::FString(f_string) => {
|
||||
for literal in f_string.literals() {
|
||||
check(checker, literal, literal.range());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
StringLike::Bytes(_) => (),
|
||||
}
|
||||
}
|
||||
|
||||
fn check(checker: &mut Checker, value: &str, range: TextRange) {
|
||||
if !checker
|
||||
.settings
|
||||
.flake8_bandit
|
||||
@@ -85,6 +102,6 @@ pub(crate) fn hardcoded_tmp_directory(checker: &mut Checker, string: StringLike)
|
||||
HardcodedTempFile {
|
||||
string: value.to_string(),
|
||||
},
|
||||
string.range(),
|
||||
range,
|
||||
));
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ use crate::checkers::ast::Checker;
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `logging.config.listen` starts a server that listens for logging
|
||||
/// configuration requests. This is insecure as parts of the configuration are
|
||||
/// configuration requests. This is insecure, as parts of the configuration are
|
||||
/// passed to the built-in `eval` function, which can be used to execute
|
||||
/// arbitrary code.
|
||||
///
|
||||
|
||||
@@ -222,7 +222,7 @@ impl Violation for StartProcessWithNoShell {
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Starting a process with a partial executable path can allow attackers to
|
||||
/// execute arbitrary executable by adjusting the `PATH` environment variable.
|
||||
/// execute an arbitrary executable by adjusting the `PATH` environment variable.
|
||||
/// Consider using a full path to the executable instead.
|
||||
///
|
||||
/// ## Example
|
||||
|
||||
@@ -11,7 +11,7 @@ use crate::checkers::ast::Checker;
|
||||
/// Checks for uses of policies disabling SSH verification in Paramiko.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// By default, Paramiko checks the identity of remote host when establishing
|
||||
/// By default, Paramiko checks the identity of the remote host when establishing
|
||||
/// an SSH connection. Disabling the verification might lead to the client
|
||||
/// connecting to a malicious host, without the client knowing.
|
||||
///
|
||||
|
||||
@@ -59,7 +59,7 @@ impl Violation for SuspiciousPickleUsage {
|
||||
/// Checks for calls to `marshal` functions.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Deserializing untrusted data with `marshal` is insecure as it can allow for
|
||||
/// Deserializing untrusted data with `marshal` is insecure, as it can allow for
|
||||
/// the creation of arbitrary objects, which can then be used to achieve
|
||||
/// arbitrary code execution and otherwise unexpected behavior.
|
||||
///
|
||||
@@ -68,7 +68,7 @@ impl Violation for SuspiciousPickleUsage {
|
||||
///
|
||||
/// If you must deserialize untrusted data with `marshal`, consider signing the
|
||||
/// data with a secret key and verifying the signature before deserializing the
|
||||
/// payload, This will prevent an attacker from injecting arbitrary objects
|
||||
/// payload. This will prevent an attacker from injecting arbitrary objects
|
||||
/// into the serialized data.
|
||||
///
|
||||
/// ## Example
|
||||
@@ -353,7 +353,7 @@ impl Violation for SuspiciousMarkSafeUsage {
|
||||
/// behavior.
|
||||
///
|
||||
/// To mitigate this risk, audit all uses of URL open functions and ensure that
|
||||
/// only permitted schemes are used (e.g., allowing `http:` and `https:` and
|
||||
/// only permitted schemes are used (e.g., allowing `http:` and `https:`, and
|
||||
/// disallowing `file:` and `ftp:`).
|
||||
///
|
||||
/// ## Example
|
||||
@@ -395,7 +395,7 @@ impl Violation for SuspiciousURLOpenUsage {
|
||||
/// Checks for uses of cryptographically weak pseudo-random number generators.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Cryptographically weak pseudo-random number generators are insecure as they
|
||||
/// Cryptographically weak pseudo-random number generators are insecure, as they
|
||||
/// are easily predictable. This can allow an attacker to guess the generated
|
||||
/// numbers and compromise the security of the system.
|
||||
///
|
||||
|
||||
@@ -245,7 +245,7 @@ impl Violation for SuspiciousLxmlImport {
|
||||
/// Checks for imports of the `xmlrpc` module.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// XMLRPC is a particularly dangerous XML module as it is also concerned with
|
||||
/// XMLRPC is a particularly dangerous XML module, as it is also concerned with
|
||||
/// communicating data over a network. Use the `defused.xmlrpc.monkey_patch()`
|
||||
/// function to monkey-patch the `xmlrpclib` module and mitigate remote XML
|
||||
/// attacks.
|
||||
|
||||
@@ -42,4 +42,23 @@ S104.py:19:9: S104 Possible binding to all interfaces
|
||||
20 | print(x)
|
||||
|
|
||||
|
||||
S104.py:24:1: S104 Possible binding to all interfaces
|
||||
|
|
||||
23 | # Implicit string concatenation
|
||||
24 | "0.0.0.0" f"0.0.0.0{expr}0.0.0.0"
|
||||
| ^^^^^^^^^ S104
|
||||
|
|
||||
|
||||
S104.py:24:13: S104 Possible binding to all interfaces
|
||||
|
|
||||
23 | # Implicit string concatenation
|
||||
24 | "0.0.0.0" f"0.0.0.0{expr}0.0.0.0"
|
||||
| ^^^^^^^ S104
|
||||
|
|
||||
|
||||
S104.py:24:26: S104 Possible binding to all interfaces
|
||||
|
|
||||
23 | # Implicit string concatenation
|
||||
24 | "0.0.0.0" f"0.0.0.0{expr}0.0.0.0"
|
||||
| ^^^^^^^ S104
|
||||
|
|
||||
|
||||
@@ -37,4 +37,28 @@ S108.py:14:11: S108 Probable insecure usage of temporary file or directory: "/de
|
||||
15 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:22:11: S108 Probable insecure usage of temporary file or directory: "/tmp/abc"
|
||||
|
|
||||
21 | # Implicit string concatenation
|
||||
22 | with open("/tmp/" "abc", "w") as f:
|
||||
| ^^^^^^^^^^^^^ S108
|
||||
23 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:25:11: S108 Probable insecure usage of temporary file or directory: "/tmp/abc"
|
||||
|
|
||||
23 | f.write("def")
|
||||
24 |
|
||||
25 | with open("/tmp/abc" f"/tmp/abc", "w") as f:
|
||||
| ^^^^^^^^^^ S108
|
||||
26 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:25:24: S108 Probable insecure usage of temporary file or directory: "/tmp/abc"
|
||||
|
|
||||
23 | f.write("def")
|
||||
24 |
|
||||
25 | with open("/tmp/abc" f"/tmp/abc", "w") as f:
|
||||
| ^^^^^^^^ S108
|
||||
26 | f.write("def")
|
||||
|
|
||||
|
||||
@@ -45,4 +45,28 @@ S108.py:18:11: S108 Probable insecure usage of temporary file or directory: "/fo
|
||||
19 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:22:11: S108 Probable insecure usage of temporary file or directory: "/tmp/abc"
|
||||
|
|
||||
21 | # Implicit string concatenation
|
||||
22 | with open("/tmp/" "abc", "w") as f:
|
||||
| ^^^^^^^^^^^^^ S108
|
||||
23 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:25:11: S108 Probable insecure usage of temporary file or directory: "/tmp/abc"
|
||||
|
|
||||
23 | f.write("def")
|
||||
24 |
|
||||
25 | with open("/tmp/abc" f"/tmp/abc", "w") as f:
|
||||
| ^^^^^^^^^^ S108
|
||||
26 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:25:24: S108 Probable insecure usage of temporary file or directory: "/tmp/abc"
|
||||
|
|
||||
23 | f.write("def")
|
||||
24 |
|
||||
25 | with open("/tmp/abc" f"/tmp/abc", "w") as f:
|
||||
| ^^^^^^^^ S108
|
||||
26 | f.write("def")
|
||||
|
|
||||
|
||||
@@ -67,7 +67,7 @@ impl<'a> Visitor<'a> for LoadedNamesVisitor<'a> {
|
||||
Expr::Name(name) => match &name.ctx {
|
||||
ExprContext::Load => self.loaded.push(name),
|
||||
ExprContext::Store => self.stored.push(name),
|
||||
ExprContext::Del => {}
|
||||
_ => {}
|
||||
},
|
||||
_ => visitor::walk_expr(self, expr),
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ use crate::rules::flake8_comprehensions::settings::Settings;
|
||||
/// rewritten as empty literals.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// It's unnecessary to call e.g., `dict()` as opposed to using an empty
|
||||
/// It's unnecessary to call, e.g., `dict()` as opposed to using an empty
|
||||
/// literal (`{}`). The former is slower because the name `dict` must be
|
||||
/// looked up in the global scope in case it has been rebound.
|
||||
///
|
||||
|
||||
@@ -10,14 +10,14 @@ use crate::checkers::ast::Checker;
|
||||
use super::helpers;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks that `__str__` method is defined in Django models.
|
||||
/// Checks that a `__str__` method is defined in Django models.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Django models should define `__str__` method to return a string representation
|
||||
/// Django models should define a `__str__` method to return a string representation
|
||||
/// of the model instance, as Django calls this method to display the object in
|
||||
/// the Django Admin and elsewhere.
|
||||
///
|
||||
/// Models without `__str__` method will display a non-meaningful representation
|
||||
/// Models without a `__str__` method will display a non-meaningful representation
|
||||
/// of the object in the Django Admin.
|
||||
///
|
||||
/// ## Example
|
||||
|
||||
@@ -11,7 +11,7 @@ mod tests {
|
||||
|
||||
use crate::assert_messages;
|
||||
use crate::registry::Rule;
|
||||
use crate::rules::flake8_import_conventions::settings::default_aliases;
|
||||
use crate::rules::flake8_import_conventions::settings::{default_aliases, BannedAliases};
|
||||
use crate::settings::LinterSettings;
|
||||
use crate::test::test_path;
|
||||
|
||||
@@ -57,17 +57,20 @@ mod tests {
|
||||
banned_aliases: FxHashMap::from_iter([
|
||||
(
|
||||
"typing".to_string(),
|
||||
vec!["t".to_string(), "ty".to_string()],
|
||||
BannedAliases::from_iter(["t".to_string(), "ty".to_string()]),
|
||||
),
|
||||
(
|
||||
"numpy".to_string(),
|
||||
vec!["nmp".to_string(), "npy".to_string()],
|
||||
BannedAliases::from_iter(["nmp".to_string(), "npy".to_string()]),
|
||||
),
|
||||
(
|
||||
"tensorflow.keras.backend".to_string(),
|
||||
vec!["K".to_string()],
|
||||
BannedAliases::from_iter(["K".to_string()]),
|
||||
),
|
||||
(
|
||||
"torch.nn.functional".to_string(),
|
||||
BannedAliases::from_iter(["F".to_string()]),
|
||||
),
|
||||
("torch.nn.functional".to_string(), vec!["F".to_string()]),
|
||||
]),
|
||||
banned_from: FxHashSet::default(),
|
||||
},
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
use ruff_python_ast::Stmt;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::Stmt;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::rules::flake8_import_conventions::settings::BannedAliases;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for imports that use non-standard naming conventions, like
|
||||
/// `import tensorflow.keras.backend as K`.
|
||||
@@ -49,7 +51,7 @@ pub(crate) fn banned_import_alias(
|
||||
stmt: &Stmt,
|
||||
name: &str,
|
||||
asname: &str,
|
||||
banned_conventions: &FxHashMap<String, Vec<String>>,
|
||||
banned_conventions: &FxHashMap<String, BannedAliases>,
|
||||
) -> Option<Diagnostic> {
|
||||
if let Some(banned_aliases) = banned_conventions.get(name) {
|
||||
if banned_aliases
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
//! Settings for import conventions.
|
||||
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
use crate::display_settings;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use ruff_macros::CacheKey;
|
||||
|
||||
use crate::display_settings;
|
||||
|
||||
const CONVENTIONAL_ALIASES: &[(&str, &str)] = &[
|
||||
("altair", "alt"),
|
||||
("matplotlib", "mpl"),
|
||||
@@ -23,10 +26,41 @@ const CONVENTIONAL_ALIASES: &[(&str, &str)] = &[
|
||||
("pyarrow", "pa"),
|
||||
];
|
||||
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize, CacheKey)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct BannedAliases(Vec<String>);
|
||||
|
||||
impl Display for BannedAliases {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "[")?;
|
||||
for (i, alias) in self.0.iter().enumerate() {
|
||||
if i > 0 {
|
||||
write!(f, ", ")?;
|
||||
}
|
||||
write!(f, "{alias}")?;
|
||||
}
|
||||
write!(f, "]")
|
||||
}
|
||||
}
|
||||
|
||||
impl BannedAliases {
|
||||
/// Returns an iterator over the banned aliases.
|
||||
pub fn iter(&self) -> impl Iterator<Item = &str> {
|
||||
self.0.iter().map(String::as_str)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<String> for BannedAliases {
|
||||
fn from_iter<I: IntoIterator<Item = String>>(iter: I) -> Self {
|
||||
Self(iter.into_iter().collect())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, CacheKey)]
|
||||
pub struct Settings {
|
||||
pub aliases: FxHashMap<String, String>,
|
||||
pub banned_aliases: FxHashMap<String, Vec<String>>,
|
||||
pub banned_aliases: FxHashMap<String, BannedAliases>,
|
||||
pub banned_from: FxHashSet<String>,
|
||||
}
|
||||
|
||||
@@ -53,9 +87,9 @@ impl Display for Settings {
|
||||
formatter = f,
|
||||
namespace = "linter.flake8_import_conventions",
|
||||
fields = [
|
||||
self.aliases | debug,
|
||||
self.banned_aliases | debug,
|
||||
self.banned_from | array,
|
||||
self.aliases | map,
|
||||
self.banned_aliases | map,
|
||||
self.banned_from | set,
|
||||
]
|
||||
}
|
||||
Ok(())
|
||||
|
||||
@@ -21,7 +21,7 @@ use crate::checkers::ast::Checker;
|
||||
/// ## Why is this bad?
|
||||
/// The `startswith` and `endswith` methods accept tuples of prefixes or
|
||||
/// suffixes respectively. Passing a tuple of prefixes or suffixes is more
|
||||
/// more efficient and readable than calling the method multiple times.
|
||||
/// efficient and readable than calling the method multiple times.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
|
||||
@@ -57,11 +57,9 @@ pub(crate) fn string_or_bytes_too_long(checker: &mut Checker, string: StringLike
|
||||
}
|
||||
|
||||
let length = match string {
|
||||
StringLike::StringLiteral(ast::ExprStringLiteral { value, .. }) => value.chars().count(),
|
||||
StringLike::BytesLiteral(ast::ExprBytesLiteral { value, .. }) => value.len(),
|
||||
StringLike::FStringLiteral(ast::FStringLiteralElement { value, .. }) => {
|
||||
value.chars().count()
|
||||
}
|
||||
StringLike::String(ast::ExprStringLiteral { value, .. }) => value.chars().count(),
|
||||
StringLike::Bytes(ast::ExprBytesLiteral { value, .. }) => value.len(),
|
||||
StringLike::FString(node) => count_f_string_chars(node),
|
||||
};
|
||||
if length <= 50 {
|
||||
return;
|
||||
@@ -75,6 +73,26 @@ pub(crate) fn string_or_bytes_too_long(checker: &mut Checker, string: StringLike
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
|
||||
/// Count the number of visible characters in an f-string. This accounts for
|
||||
/// implicitly concatenated f-strings as well.
|
||||
fn count_f_string_chars(f_string: &ast::ExprFString) -> usize {
|
||||
f_string
|
||||
.value
|
||||
.iter()
|
||||
.map(|part| match part {
|
||||
ast::FStringPart::Literal(string) => string.chars().count(),
|
||||
ast::FStringPart::FString(f_string) => f_string
|
||||
.elements
|
||||
.iter()
|
||||
.map(|element| match element {
|
||||
ast::FStringElement::Literal(string) => string.chars().count(),
|
||||
ast::FStringElement::Expression(expr) => expr.range().len().to_usize(),
|
||||
})
|
||||
.sum(),
|
||||
})
|
||||
.sum()
|
||||
}
|
||||
|
||||
fn is_warnings_dot_deprecated(expr: Option<&ast::Expr>, semantic: &SemanticModel) -> bool {
|
||||
// Does `expr` represent a call to `warnings.deprecated` or `typing_extensions.deprecated`?
|
||||
let Some(expr) = expr else {
|
||||
|
||||
@@ -11,7 +11,7 @@ use crate::checkers::ast::Checker;
|
||||
/// Checks for the presence of multiple literal types in a union.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Literal types accept multiple arguments and it is clearer to specify them
|
||||
/// Literal types accept multiple arguments, and it is clearer to specify them
|
||||
/// as a single literal.
|
||||
///
|
||||
/// ## Example
|
||||
|
||||
@@ -105,12 +105,12 @@ PYI053.pyi:34:14: PYI053 [*] String and bytes literals longer than 50 characters
|
||||
36 36 | ffoo: str = f"50 character stringggggggggggggggggggggggggggggggg" # OK
|
||||
37 37 |
|
||||
|
||||
PYI053.pyi:38:15: PYI053 [*] String and bytes literals longer than 50 characters are not permitted
|
||||
PYI053.pyi:38:13: PYI053 [*] String and bytes literals longer than 50 characters are not permitted
|
||||
|
|
||||
36 | ffoo: str = f"50 character stringggggggggggggggggggggggggggggggg" # OK
|
||||
37 |
|
||||
38 | fbar: str = f"51 character stringgggggggggggggggggggggggggggggggg" # Error: PYI053
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI053
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI053
|
||||
39 |
|
||||
40 | class Demo:
|
||||
|
|
||||
@@ -121,7 +121,7 @@ PYI053.pyi:38:15: PYI053 [*] String and bytes literals longer than 50 characters
|
||||
36 36 | ffoo: str = f"50 character stringggggggggggggggggggggggggggggggg" # OK
|
||||
37 37 |
|
||||
38 |-fbar: str = f"51 character stringgggggggggggggggggggggggggggggggg" # Error: PYI053
|
||||
38 |+fbar: str = f"..." # Error: PYI053
|
||||
38 |+fbar: str = ... # Error: PYI053
|
||||
39 39 |
|
||||
40 40 | class Demo:
|
||||
41 41 | """Docstrings are excluded from this rule. Some padding.""" # OK
|
||||
@@ -144,5 +144,20 @@ PYI053.pyi:64:5: PYI053 [*] String and bytes literals longer than 50 characters
|
||||
64 |+ ... # Error: PYI053
|
||||
65 65 | )
|
||||
66 66 | def not_a_deprecated_function() -> None: ...
|
||||
67 67 |
|
||||
|
||||
PYI053.pyi:68:13: PYI053 [*] String and bytes literals longer than 50 characters are not permitted
|
||||
|
|
||||
66 | def not_a_deprecated_function() -> None: ...
|
||||
67 |
|
||||
68 | fbaz: str = f"51 character {foo} stringgggggggggggggggggggggggggg" # Error: PYI053
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI053
|
||||
|
|
||||
= help: Replace with `...`
|
||||
|
||||
ℹ Safe fix
|
||||
65 65 | )
|
||||
66 66 | def not_a_deprecated_function() -> None: ...
|
||||
67 67 |
|
||||
68 |-fbaz: str = f"51 character {foo} stringgggggggggggggggggggggggggg" # Error: PYI053
|
||||
68 |+fbaz: str = ... # Error: PYI053
|
||||
|
||||
@@ -103,9 +103,9 @@ impl Violation for PytestParametrizeNamesWrongType {
|
||||
/// of values.
|
||||
///
|
||||
/// The style for the list of values rows can be configured via the
|
||||
/// the [`lint.flake8-pytest-style.parametrize-values-type`] setting, while the
|
||||
/// [`lint.flake8-pytest-style.parametrize-values-type`] setting, while the
|
||||
/// style for each row of values can be configured via the
|
||||
/// the [`lint.flake8-pytest-style.parametrize-values-row-type`] setting.
|
||||
/// [`lint.flake8-pytest-style.parametrize-values-row-type`] setting.
|
||||
///
|
||||
/// For example, [`lint.flake8-pytest-style.parametrize-values-type`] will lead to
|
||||
/// the following expectations:
|
||||
|
||||
@@ -22,11 +22,11 @@ impl Default for Quote {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ruff_python_ast::str::QuoteStyle> for Quote {
|
||||
fn from(value: ruff_python_ast::str::QuoteStyle) -> Self {
|
||||
impl From<ruff_python_ast::str::Quote> for Quote {
|
||||
fn from(value: ruff_python_ast::str::Quote) -> Self {
|
||||
match value {
|
||||
ruff_python_ast::str::QuoteStyle::Double => Self::Double,
|
||||
ruff_python_ast::str::QuoteStyle::Single => Self::Single,
|
||||
ruff_python_ast::str::Quote::Double => Self::Double,
|
||||
ruff_python_ast::str::Quote::Single => Self::Single,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -151,7 +151,7 @@ impl AlwaysFixableViolation for ImplicitReturn {
|
||||
/// assigned variable.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// The variable assignment is not necessary as the value can be returned
|
||||
/// The variable assignment is not necessary, as the value can be returned
|
||||
/// directly.
|
||||
///
|
||||
/// ## Example
|
||||
|
||||
@@ -61,7 +61,7 @@ impl Violation for IfExprWithTrueFalse {
|
||||
/// condition.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// `if` expressions that evaluate to `False` for a truthy condition an `True`
|
||||
/// `if` expressions that evaluate to `False` for a truthy condition and `True`
|
||||
/// for a falsey condition can be replaced with `not` operators, which are more
|
||||
/// concise and readable.
|
||||
///
|
||||
|
||||
@@ -13,15 +13,14 @@ use crate::rules::flake8_tidy_imports::matchers::NameMatchPolicy;
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Projects may want to ensure that specific modules or module members are
|
||||
/// not be imported or accessed.
|
||||
/// not imported or accessed.
|
||||
///
|
||||
/// Security or other company policies may be a reason to impose
|
||||
/// restrictions on importing external Python libraries. In some cases,
|
||||
/// projects may adopt conventions around the use of certain modules or
|
||||
/// module members that are not enforceable by the language itself.
|
||||
///
|
||||
/// This rule enforces certain import conventions project-wide in an
|
||||
/// automatic way.
|
||||
/// This rule enforces certain import conventions project-wide automatically.
|
||||
///
|
||||
/// ## Options
|
||||
/// - `lint.flake8-tidy-imports.banned-api`
|
||||
|
||||
@@ -13,6 +13,12 @@ pub struct ApiBan {
|
||||
pub msg: String,
|
||||
}
|
||||
|
||||
impl Display for ApiBan {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.msg)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, CacheKey, Default)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
@@ -47,7 +53,7 @@ impl Display for Settings {
|
||||
namespace = "linter.flake8_tidy_imports",
|
||||
fields = [
|
||||
self.ban_relative_imports,
|
||||
self.banned_api | debug,
|
||||
self.banned_api | map,
|
||||
self.banned_module_level_imports | array,
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, Expr, ExprCall, Int, Number};
|
||||
use ruff_python_semantic::analyze::typing::find_assigned_value;
|
||||
use ruff_python_semantic::Modules;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
@@ -74,20 +73,6 @@ pub(crate) fn zero_sleep_call(checker: &mut Checker, call: &ExprCall) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
Expr::Name(ast::ExprName { id, .. }) => {
|
||||
let Some(value) = find_assigned_value(id, checker.semantic()) else {
|
||||
return;
|
||||
};
|
||||
if !matches!(
|
||||
value,
|
||||
Expr::NumberLiteral(ast::ExprNumberLiteral {
|
||||
value: Number::Int(Int::ZERO),
|
||||
..
|
||||
})
|
||||
) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => return,
|
||||
}
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ TRIO115.py:11:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.s
|
||||
11 | trio.sleep(0) # TRIO115
|
||||
| ^^^^^^^^^^^^^ TRIO115
|
||||
12 | foo = 0
|
||||
13 | trio.sleep(foo) # TRIO115
|
||||
13 | trio.sleep(foo) # OK
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
@@ -40,30 +40,9 @@ TRIO115.py:11:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.s
|
||||
11 |- trio.sleep(0) # TRIO115
|
||||
11 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
12 12 | foo = 0
|
||||
13 13 | trio.sleep(foo) # TRIO115
|
||||
13 13 | trio.sleep(foo) # OK
|
||||
14 14 | trio.sleep(1) # OK
|
||||
|
||||
TRIO115.py:13:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
11 | trio.sleep(0) # TRIO115
|
||||
12 | foo = 0
|
||||
13 | trio.sleep(foo) # TRIO115
|
||||
| ^^^^^^^^^^^^^^^ TRIO115
|
||||
14 | trio.sleep(1) # OK
|
||||
15 | time.sleep(0) # OK
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
10 10 |
|
||||
11 11 | trio.sleep(0) # TRIO115
|
||||
12 12 | foo = 0
|
||||
13 |- trio.sleep(foo) # TRIO115
|
||||
13 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
14 14 | trio.sleep(1) # OK
|
||||
15 15 | time.sleep(0) # OK
|
||||
16 16 |
|
||||
|
||||
TRIO115.py:17:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
15 | time.sleep(0) # OK
|
||||
@@ -85,145 +64,6 @@ TRIO115.py:17:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.s
|
||||
19 19 | bar = "bar"
|
||||
20 20 | trio.sleep(bar)
|
||||
|
||||
TRIO115.py:23:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
22 | x, y = 0, 2000
|
||||
23 | trio.sleep(x) # TRIO115
|
||||
| ^^^^^^^^^^^^^ TRIO115
|
||||
24 | trio.sleep(y) # OK
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
20 20 | trio.sleep(bar)
|
||||
21 21 |
|
||||
22 22 | x, y = 0, 2000
|
||||
23 |- trio.sleep(x) # TRIO115
|
||||
23 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
24 24 | trio.sleep(y) # OK
|
||||
25 25 |
|
||||
26 26 | (a, b, [c, (d, e)]) = (1, 2, (0, [4, 0]))
|
||||
|
||||
TRIO115.py:27:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
26 | (a, b, [c, (d, e)]) = (1, 2, (0, [4, 0]))
|
||||
27 | trio.sleep(c) # TRIO115
|
||||
| ^^^^^^^^^^^^^ TRIO115
|
||||
28 | trio.sleep(d) # OK
|
||||
29 | trio.sleep(e) # TRIO115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
24 24 | trio.sleep(y) # OK
|
||||
25 25 |
|
||||
26 26 | (a, b, [c, (d, e)]) = (1, 2, (0, [4, 0]))
|
||||
27 |- trio.sleep(c) # TRIO115
|
||||
27 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
28 28 | trio.sleep(d) # OK
|
||||
29 29 | trio.sleep(e) # TRIO115
|
||||
30 30 |
|
||||
|
||||
TRIO115.py:29:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
27 | trio.sleep(c) # TRIO115
|
||||
28 | trio.sleep(d) # OK
|
||||
29 | trio.sleep(e) # TRIO115
|
||||
| ^^^^^^^^^^^^^ TRIO115
|
||||
30 |
|
||||
31 | m_x, m_y = 0
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
26 26 | (a, b, [c, (d, e)]) = (1, 2, (0, [4, 0]))
|
||||
27 27 | trio.sleep(c) # TRIO115
|
||||
28 28 | trio.sleep(d) # OK
|
||||
29 |- trio.sleep(e) # TRIO115
|
||||
29 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
30 30 |
|
||||
31 31 | m_x, m_y = 0
|
||||
32 32 | trio.sleep(m_y) # OK
|
||||
|
||||
TRIO115.py:36:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
35 | m_a = m_b = 0
|
||||
36 | trio.sleep(m_a) # TRIO115
|
||||
| ^^^^^^^^^^^^^^^ TRIO115
|
||||
37 | trio.sleep(m_b) # TRIO115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
33 33 | trio.sleep(m_x) # OK
|
||||
34 34 |
|
||||
35 35 | m_a = m_b = 0
|
||||
36 |- trio.sleep(m_a) # TRIO115
|
||||
36 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
37 37 | trio.sleep(m_b) # TRIO115
|
||||
38 38 |
|
||||
39 39 | m_c = (m_d, m_e) = (0, 0)
|
||||
|
||||
TRIO115.py:37:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
35 | m_a = m_b = 0
|
||||
36 | trio.sleep(m_a) # TRIO115
|
||||
37 | trio.sleep(m_b) # TRIO115
|
||||
| ^^^^^^^^^^^^^^^ TRIO115
|
||||
38 |
|
||||
39 | m_c = (m_d, m_e) = (0, 0)
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
34 34 |
|
||||
35 35 | m_a = m_b = 0
|
||||
36 36 | trio.sleep(m_a) # TRIO115
|
||||
37 |- trio.sleep(m_b) # TRIO115
|
||||
37 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
38 38 |
|
||||
39 39 | m_c = (m_d, m_e) = (0, 0)
|
||||
40 40 | trio.sleep(m_c) # OK
|
||||
|
||||
TRIO115.py:41:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
39 | m_c = (m_d, m_e) = (0, 0)
|
||||
40 | trio.sleep(m_c) # OK
|
||||
41 | trio.sleep(m_d) # TRIO115
|
||||
| ^^^^^^^^^^^^^^^ TRIO115
|
||||
42 | trio.sleep(m_e) # TRIO115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
38 38 |
|
||||
39 39 | m_c = (m_d, m_e) = (0, 0)
|
||||
40 40 | trio.sleep(m_c) # OK
|
||||
41 |- trio.sleep(m_d) # TRIO115
|
||||
41 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
42 42 | trio.sleep(m_e) # TRIO115
|
||||
43 43 |
|
||||
44 44 |
|
||||
|
||||
TRIO115.py:42:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
40 | trio.sleep(m_c) # OK
|
||||
41 | trio.sleep(m_d) # TRIO115
|
||||
42 | trio.sleep(m_e) # TRIO115
|
||||
| ^^^^^^^^^^^^^^^ TRIO115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
39 39 | m_c = (m_d, m_e) = (0, 0)
|
||||
40 40 | trio.sleep(m_c) # OK
|
||||
41 41 | trio.sleep(m_d) # TRIO115
|
||||
42 |- trio.sleep(m_e) # TRIO115
|
||||
42 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
43 43 |
|
||||
44 44 |
|
||||
45 45 | def func():
|
||||
|
||||
TRIO115.py:48:14: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
46 | import trio
|
||||
@@ -292,20 +132,3 @@ TRIO115.py:59:11: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.
|
||||
60 60 |
|
||||
61 61 |
|
||||
62 62 | def func():
|
||||
|
||||
TRIO115.py:66:9: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)`
|
||||
|
|
||||
65 | if (walrus := 0) == 0:
|
||||
66 | trio.sleep(walrus) # TRIO115
|
||||
| ^^^^^^^^^^^^^^^^^^ TRIO115
|
||||
|
|
||||
= help: Replace with `trio.lowlevel.checkpoint()`
|
||||
|
||||
ℹ Safe fix
|
||||
63 63 | import trio
|
||||
64 64 |
|
||||
65 65 | if (walrus := 0) == 0:
|
||||
66 |- trio.sleep(walrus) # TRIO115
|
||||
66 |+ trio.lowlevel.checkpoint() # TRIO115
|
||||
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ use ruff_macros::{derive_message_formats, violation};
|
||||
///
|
||||
/// | | `glob` | `Path.glob` |
|
||||
/// |-------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
/// | Hidden files | Excludes hidden files by default. From Python 3.11 onwards, the `include_hidden` keyword can used to include hidden directories. | Includes hidden files by default. |
|
||||
/// | Hidden files | Excludes hidden files by default. From Python 3.11 onwards, the `include_hidden` keyword can be used to include hidden directories. | Includes hidden files by default. |
|
||||
/// | Iterator | `iglob` returns an iterator. Under the hood, `glob` simply converts the iterator to a list. | `Path.glob` returns an iterator. |
|
||||
/// | Working directory | `glob` takes a `root_dir` keyword to set the current working directory. | `Path.rglob` can be used to return the relative path. |
|
||||
/// | Globstar (`**`) | `glob` requires the `recursive` flag to be set to `True` for the `**` pattern to match any files and zero or more directories, subdirectories, and symbolic links. | The `**` pattern in `Path.glob` means "this directory and all subdirectories, recursively". In other words, it enables recursive globbing. |
|
||||
|
||||
@@ -278,7 +278,7 @@ mod tests {
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use rustc_hash::FxHashMap;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use test_case::test_case;
|
||||
|
||||
use ruff_text_size::Ranged;
|
||||
@@ -495,7 +495,7 @@ mod tests {
|
||||
Path::new("isort").join(path).as_path(),
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
force_to_top: BTreeSet::from([
|
||||
force_to_top: FxHashSet::from_iter([
|
||||
"z".to_string(),
|
||||
"lib1".to_string(),
|
||||
"lib3".to_string(),
|
||||
@@ -575,9 +575,10 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
force_single_line: true,
|
||||
single_line_exclusions: vec!["os".to_string(), "logging.handlers".to_string()]
|
||||
.into_iter()
|
||||
.collect::<BTreeSet<_>>(),
|
||||
single_line_exclusions: FxHashSet::from_iter([
|
||||
"os".to_string(),
|
||||
"logging.handlers".to_string(),
|
||||
]),
|
||||
..super::settings::Settings::default()
|
||||
},
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
@@ -636,7 +637,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
order_by_type: true,
|
||||
classes: BTreeSet::from([
|
||||
classes: FxHashSet::from_iter([
|
||||
"SVC".to_string(),
|
||||
"SELU".to_string(),
|
||||
"N_CLASS".to_string(),
|
||||
@@ -664,7 +665,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
order_by_type: true,
|
||||
constants: BTreeSet::from([
|
||||
constants: FxHashSet::from_iter([
|
||||
"Const".to_string(),
|
||||
"constant".to_string(),
|
||||
"First".to_string(),
|
||||
@@ -694,7 +695,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
order_by_type: true,
|
||||
variables: BTreeSet::from([
|
||||
variables: FxHashSet::from_iter([
|
||||
"VAR".to_string(),
|
||||
"Variable".to_string(),
|
||||
"MyVar".to_string(),
|
||||
@@ -721,7 +722,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
force_sort_within_sections: true,
|
||||
force_to_top: BTreeSet::from(["z".to_string()]),
|
||||
force_to_top: FxHashSet::from_iter(["z".to_string()]),
|
||||
..super::settings::Settings::default()
|
||||
},
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
@@ -771,7 +772,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
isort: super::settings::Settings {
|
||||
required_imports: BTreeSet::from([
|
||||
required_imports: BTreeSet::from_iter([
|
||||
"from __future__ import annotations".to_string()
|
||||
]),
|
||||
..super::settings::Settings::default()
|
||||
@@ -801,7 +802,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
isort: super::settings::Settings {
|
||||
required_imports: BTreeSet::from([
|
||||
required_imports: BTreeSet::from_iter([
|
||||
"from __future__ import annotations as _annotations".to_string(),
|
||||
]),
|
||||
..super::settings::Settings::default()
|
||||
@@ -824,7 +825,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
isort: super::settings::Settings {
|
||||
required_imports: BTreeSet::from([
|
||||
required_imports: BTreeSet::from_iter([
|
||||
"from __future__ import annotations".to_string(),
|
||||
"from __future__ import generator_stop".to_string(),
|
||||
]),
|
||||
@@ -848,7 +849,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
isort: super::settings::Settings {
|
||||
required_imports: BTreeSet::from(["from __future__ import annotations, \
|
||||
required_imports: BTreeSet::from_iter(["from __future__ import annotations, \
|
||||
generator_stop"
|
||||
.to_string()]),
|
||||
..super::settings::Settings::default()
|
||||
@@ -871,7 +872,7 @@ mod tests {
|
||||
&LinterSettings {
|
||||
src: vec![test_resource_path("fixtures/isort")],
|
||||
isort: super::settings::Settings {
|
||||
required_imports: BTreeSet::from(["import os".to_string()]),
|
||||
required_imports: BTreeSet::from_iter(["import os".to_string()]),
|
||||
..super::settings::Settings::default()
|
||||
},
|
||||
..LinterSettings::for_rule(Rule::MissingRequiredImport)
|
||||
@@ -1002,7 +1003,7 @@ mod tests {
|
||||
Path::new("isort").join(path).as_path(),
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
no_lines_before: BTreeSet::from([
|
||||
no_lines_before: FxHashSet::from_iter([
|
||||
ImportSection::Known(ImportType::Future),
|
||||
ImportSection::Known(ImportType::StandardLibrary),
|
||||
ImportSection::Known(ImportType::ThirdParty),
|
||||
@@ -1030,7 +1031,7 @@ mod tests {
|
||||
Path::new("isort").join(path).as_path(),
|
||||
&LinterSettings {
|
||||
isort: super::settings::Settings {
|
||||
no_lines_before: BTreeSet::from([
|
||||
no_lines_before: FxHashSet::from_iter([
|
||||
ImportSection::Known(ImportType::StandardLibrary),
|
||||
ImportSection::Known(ImportType::LocalFolder),
|
||||
]),
|
||||
|
||||
@@ -5,12 +5,13 @@ use std::error::Error;
|
||||
use std::fmt;
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
use rustc_hash::FxHashSet;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use crate::display_settings;
|
||||
use ruff_macros::CacheKey;
|
||||
|
||||
use crate::display_settings;
|
||||
use crate::rules::isort::categorize::KnownModules;
|
||||
use crate::rules::isort::ImportType;
|
||||
|
||||
@@ -52,17 +53,17 @@ pub struct Settings {
|
||||
pub force_sort_within_sections: bool,
|
||||
pub case_sensitive: bool,
|
||||
pub force_wrap_aliases: bool,
|
||||
pub force_to_top: BTreeSet<String>,
|
||||
pub force_to_top: FxHashSet<String>,
|
||||
pub known_modules: KnownModules,
|
||||
pub detect_same_package: bool,
|
||||
pub order_by_type: bool,
|
||||
pub relative_imports_order: RelativeImportsOrder,
|
||||
pub single_line_exclusions: BTreeSet<String>,
|
||||
pub single_line_exclusions: FxHashSet<String>,
|
||||
pub split_on_trailing_comma: bool,
|
||||
pub classes: BTreeSet<String>,
|
||||
pub constants: BTreeSet<String>,
|
||||
pub variables: BTreeSet<String>,
|
||||
pub no_lines_before: BTreeSet<ImportSection>,
|
||||
pub classes: FxHashSet<String>,
|
||||
pub constants: FxHashSet<String>,
|
||||
pub variables: FxHashSet<String>,
|
||||
pub no_lines_before: FxHashSet<ImportSection>,
|
||||
pub lines_after_imports: isize,
|
||||
pub lines_between_types: usize,
|
||||
pub forced_separate: Vec<String>,
|
||||
@@ -77,23 +78,23 @@ pub struct Settings {
|
||||
impl Default for Settings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
required_imports: BTreeSet::new(),
|
||||
required_imports: BTreeSet::default(),
|
||||
combine_as_imports: false,
|
||||
force_single_line: false,
|
||||
force_sort_within_sections: false,
|
||||
detect_same_package: true,
|
||||
case_sensitive: false,
|
||||
force_wrap_aliases: false,
|
||||
force_to_top: BTreeSet::new(),
|
||||
force_to_top: FxHashSet::default(),
|
||||
known_modules: KnownModules::default(),
|
||||
order_by_type: true,
|
||||
relative_imports_order: RelativeImportsOrder::default(),
|
||||
single_line_exclusions: BTreeSet::new(),
|
||||
single_line_exclusions: FxHashSet::default(),
|
||||
split_on_trailing_comma: true,
|
||||
classes: BTreeSet::new(),
|
||||
constants: BTreeSet::new(),
|
||||
variables: BTreeSet::new(),
|
||||
no_lines_before: BTreeSet::new(),
|
||||
classes: FxHashSet::default(),
|
||||
constants: FxHashSet::default(),
|
||||
variables: FxHashSet::default(),
|
||||
no_lines_before: FxHashSet::default(),
|
||||
lines_after_imports: -1,
|
||||
lines_between_types: 0,
|
||||
forced_separate: Vec::new(),
|
||||
@@ -113,23 +114,23 @@ impl Display for Settings {
|
||||
formatter = f,
|
||||
namespace = "linter.isort",
|
||||
fields = [
|
||||
self.required_imports | array,
|
||||
self.required_imports | set,
|
||||
self.combine_as_imports,
|
||||
self.force_single_line,
|
||||
self.force_sort_within_sections,
|
||||
self.detect_same_package,
|
||||
self.case_sensitive,
|
||||
self.force_wrap_aliases,
|
||||
self.force_to_top | array,
|
||||
self.force_to_top | set,
|
||||
self.known_modules,
|
||||
self.order_by_type,
|
||||
self.relative_imports_order,
|
||||
self.single_line_exclusions | array,
|
||||
self.single_line_exclusions | set,
|
||||
self.split_on_trailing_comma,
|
||||
self.classes | array,
|
||||
self.constants | array,
|
||||
self.variables | array,
|
||||
self.no_lines_before | array,
|
||||
self.classes | set,
|
||||
self.constants | set,
|
||||
self.variables | set,
|
||||
self.no_lines_before | set,
|
||||
self.lines_after_imports,
|
||||
self.lines_between_types,
|
||||
self.forced_separate | array,
|
||||
@@ -155,7 +156,7 @@ pub enum SettingsError {
|
||||
InvalidUserDefinedSection(glob::PatternError),
|
||||
}
|
||||
|
||||
impl fmt::Display for SettingsError {
|
||||
impl Display for SettingsError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
SettingsError::InvalidKnownThirdParty(err) => {
|
||||
|
||||
@@ -14,5 +14,3 @@ bom_unsorted.py:1:1: I001 [*] Import block is un-sorted or un-formatted
|
||||
2 |-import bar
|
||||
1 |+import bar
|
||||
2 |+import foo
|
||||
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ use crate::checkers::ast::Checker;
|
||||
/// primarily for historic reasons, and have been a cause of
|
||||
/// frequent confusion for newcomers.
|
||||
///
|
||||
/// These aliases were been deprecated in 1.20, and removed in 1.24.
|
||||
/// These aliases were deprecated in 1.20, and removed in 1.24.
|
||||
///
|
||||
/// ## Examples
|
||||
/// ```python
|
||||
|
||||
@@ -10,7 +10,7 @@ use crate::rules::pandas_vet::helpers::{test_expression, Resolution};
|
||||
/// Checks for uses of `.values` on Pandas Series and Index objects.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// The `.values` attribute is ambiguous as it's return type is unclear. As
|
||||
/// The `.values` attribute is ambiguous as its return type is unclear. As
|
||||
/// such, it is no longer recommended by the Pandas documentation.
|
||||
///
|
||||
/// Instead, use `.to_numpy()` to return a NumPy array, or `.array` to return a
|
||||
|
||||
@@ -21,7 +21,7 @@ use crate::rules::pep8_naming::settings::IgnoreNames;
|
||||
/// > all-lowercase names, although the use of underscores is discouraged.
|
||||
/// >
|
||||
/// > When an extension module written in C or C++ has an accompanying Python module that
|
||||
/// > provides a higher level (e.g. more object oriented) interface, the C/C++ module has
|
||||
/// > provides a higher level (e.g. more object-oriented) interface, the C/C++ module has
|
||||
/// > a leading underscore (e.g. `_socket`).
|
||||
///
|
||||
/// Further, in order for Python modules to be importable, they must be valid
|
||||
|
||||
@@ -241,7 +241,7 @@ impl AlwaysFixableViolation for BlankLineAfterDecorator {
|
||||
/// Checks for missing blank lines after the end of function or class.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// PEP 8 recommends using blank lines as following:
|
||||
/// PEP 8 recommends using blank lines as follows:
|
||||
/// - Two blank lines are expected between functions and classes
|
||||
/// - One blank line is expected between methods of a class.
|
||||
///
|
||||
@@ -292,7 +292,7 @@ impl AlwaysFixableViolation for BlankLinesAfterFunctionOrClass {
|
||||
/// Checks for 1 blank line between nested function or class definitions.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// PEP 8 recommends using blank lines as following:
|
||||
/// PEP 8 recommends using blank lines as follows:
|
||||
/// - Two blank lines are expected between functions and classes
|
||||
/// - One blank line is expected between methods of a class.
|
||||
///
|
||||
|
||||
@@ -81,7 +81,7 @@ pub(crate) fn syntax_error(
|
||||
parse_error: &ParseError,
|
||||
locator: &Locator,
|
||||
) {
|
||||
let rest = locator.after(parse_error.offset);
|
||||
let rest = locator.after(parse_error.location.start());
|
||||
|
||||
// Try to create a non-empty range so that the diagnostic can print a caret at the
|
||||
// right position. This requires that we retrieve the next character, if any, and take its length
|
||||
@@ -95,6 +95,6 @@ pub(crate) fn syntax_error(
|
||||
SyntaxError {
|
||||
message: format!("{}", DisplayParseErrorType::new(&parse_error.error)),
|
||||
},
|
||||
TextRange::at(parse_error.offset, len),
|
||||
TextRange::at(parse_error.location.start(), len),
|
||||
));
|
||||
}
|
||||
|
||||
@@ -9,8 +9,9 @@ use ruff_source_file::Locator;
|
||||
/// Checks for files missing a new line at the end of the file.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Trailing blank lines are superfluous.
|
||||
/// However the last line should end with a new line.
|
||||
/// Trailing blank lines in a file are superfluous.
|
||||
///
|
||||
/// However, the last line of the file should end with a newline.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
|
||||
@@ -8,5 +8,3 @@ E999.py:3:1: E999 SyntaxError: unindent does not match any outer indentation lev
|
||||
| ^ E999
|
||||
4 |
|
||||
|
|
||||
|
||||
|
||||
|
||||
@@ -27,11 +27,11 @@ W505.py:10:51: W505 Doc line too long (56 > 50)
|
||||
12 | x = 2
|
||||
|
|
||||
|
||||
W505.py:13:51: W505 Doc line too long (93 > 50)
|
||||
W505.py:13:51: W505 Doc line too long (94 > 50)
|
||||
|
|
||||
12 | x = 2
|
||||
13 | # Another standalone that is preceded by a newline and indent toke and is over the limit.
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ W505
|
||||
13 | # Another standalone that is preceded by a newline and indent token and is over the limit.
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ W505
|
||||
14 |
|
||||
15 | print("Here's a string that's over the limit, but it's not a docstring.")
|
||||
|
|
||||
@@ -58,5 +58,3 @@ W505.py:31:51: W505 Doc line too long (85 > 50)
|
||||
31 | It's over the limit on this line, which isn't the first line in the docstring."""
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ W505
|
||||
|
|
||||
|
||||
|
||||
|
||||
@@ -27,11 +27,11 @@ W505_utf_8.py:10:51: W505 Doc line too long (56 > 50)
|
||||
12 | x = 2
|
||||
|
|
||||
|
||||
W505_utf_8.py:13:51: W505 Doc line too long (93 > 50)
|
||||
W505_utf_8.py:13:51: W505 Doc line too long (94 > 50)
|
||||
|
|
||||
12 | x = 2
|
||||
13 | # Another standalone that is preceded by a newline and indent toke and is over theß9💣2ℝ.
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ W505
|
||||
13 | # Another standalone that is preceded by a newline and indent token and is over theß9💣2ℝ.
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ W505
|
||||
14 |
|
||||
15 | print("Here's a string that's over theß9💣2ℝ, but it's not a ß9💣2ℝing.")
|
||||
|
|
||||
@@ -58,5 +58,3 @@ W505_utf_8.py:31:50: W505 Doc line too long (85 > 50)
|
||||
31 | It's over theß9💣2ℝ on this line, which isn't the first line in the ß9💣2ℝing."""
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ W505
|
||||
|
|
||||
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@ impl AlwaysFixableViolation for OneBlankLineBeforeClass {
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// [PEP 257] recommends the use of a blank line to separate a class's
|
||||
/// docstring its methods.
|
||||
/// docstring from its methods.
|
||||
///
|
||||
/// This rule may not apply to all projects; its applicability is a matter of
|
||||
/// convention. By default, this rule is enabled when using the `google`
|
||||
|
||||
@@ -368,7 +368,7 @@ impl Violation for UndocumentedPublicPackage {
|
||||
/// ## Why is this bad?
|
||||
/// Magic methods (methods with names that start and end with double
|
||||
/// underscores) are used to implement operator overloading and other special
|
||||
/// behavior. Such methods should should be documented via docstrings to
|
||||
/// behavior. Such methods should be documented via docstrings to
|
||||
/// outline their behavior.
|
||||
///
|
||||
/// Generally, magic method docstrings should describe the method's behavior,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_codegen::Quote;
|
||||
use ruff_python_ast::str::Quote;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
@@ -97,8 +97,8 @@ impl fmt::Display for Settings {
|
||||
namespace = "linter.pydocstyle",
|
||||
fields = [
|
||||
self.convention | optional,
|
||||
self.ignore_decorators | debug,
|
||||
self.property_decorators | debug
|
||||
self.ignore_decorators | set,
|
||||
self.property_decorators | set
|
||||
]
|
||||
}
|
||||
Ok(())
|
||||
|
||||
@@ -124,6 +124,7 @@ mod tests {
|
||||
#[test_case(Rule::RedefinedWhileUnused, Path::new("F811_25.py"))]
|
||||
#[test_case(Rule::RedefinedWhileUnused, Path::new("F811_26.py"))]
|
||||
#[test_case(Rule::RedefinedWhileUnused, Path::new("F811_27.py"))]
|
||||
#[test_case(Rule::RedefinedWhileUnused, Path::new("F811_28.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_0.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_1.py"))]
|
||||
#[test_case(Rule::UndefinedName, Path::new("F821_2.py"))]
|
||||
@@ -222,6 +223,19 @@ mod tests {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn init_unused_import_opt_in_to_fix() -> Result<()> {
|
||||
let diagnostics = test_path(
|
||||
Path::new("pyflakes/__init__.py"),
|
||||
&LinterSettings {
|
||||
ignore_init_module_imports: false,
|
||||
..LinterSettings::for_rules(vec![Rule::UnusedImport])
|
||||
},
|
||||
)?;
|
||||
assert_messages!(diagnostics);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn default_builtins() -> Result<()> {
|
||||
let diagnostics = test_path(
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::borrow::Cow;
|
||||
use anyhow::Result;
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Fix, FixAvailability, Violation};
|
||||
use ruff_diagnostics::{Applicability, Diagnostic, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_semantic::{AnyImport, Exceptions, Imported, NodeId, Scope};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
@@ -37,6 +37,11 @@ enum UnusedImportContext {
|
||||
/// from module import member as member
|
||||
/// ```
|
||||
///
|
||||
/// ## Fix safety
|
||||
///
|
||||
/// When `ignore_init_module_imports` is disabled, fixes can remove for unused imports in `__init__` files.
|
||||
/// These fixes are considered unsafe because they can change the public interface.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// import numpy as np # unused import
|
||||
@@ -90,7 +95,7 @@ impl Violation for UnusedImport {
|
||||
}
|
||||
Some(UnusedImportContext::Init) => {
|
||||
format!(
|
||||
"`{name}` imported but unused; consider adding to `__all__` or using a redundant alias"
|
||||
"`{name}` imported but unused; consider removing, adding to `__all__`, or using a redundant alias"
|
||||
)
|
||||
}
|
||||
None => format!("`{name}` imported but unused"),
|
||||
@@ -154,8 +159,8 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut
|
||||
}
|
||||
}
|
||||
|
||||
let in_init =
|
||||
checker.settings.ignore_init_module_imports && checker.path().ends_with("__init__.py");
|
||||
let in_init = checker.path().ends_with("__init__.py");
|
||||
let fix_init = !checker.settings.ignore_init_module_imports;
|
||||
|
||||
// Generate a diagnostic for every import, but share a fix across all imports within the same
|
||||
// statement (excluding those that are ignored).
|
||||
@@ -164,8 +169,8 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut
|
||||
exceptions.intersects(Exceptions::MODULE_NOT_FOUND_ERROR | Exceptions::IMPORT_ERROR);
|
||||
let multiple = imports.len() > 1;
|
||||
|
||||
let fix = if !in_init && !in_except_handler {
|
||||
fix_imports(checker, node_id, &imports).ok()
|
||||
let fix = if (!in_init || fix_init) && !in_except_handler {
|
||||
fix_imports(checker, node_id, &imports, in_init).ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
@@ -243,7 +248,12 @@ impl Ranged for ImportBinding<'_> {
|
||||
}
|
||||
|
||||
/// Generate a [`Fix`] to remove unused imports from a statement.
|
||||
fn fix_imports(checker: &Checker, node_id: NodeId, imports: &[ImportBinding]) -> Result<Fix> {
|
||||
fn fix_imports(
|
||||
checker: &Checker,
|
||||
node_id: NodeId,
|
||||
imports: &[ImportBinding],
|
||||
in_init: bool,
|
||||
) -> Result<Fix> {
|
||||
let statement = checker.semantic().statement(node_id);
|
||||
let parent = checker.semantic().parent_statement(node_id);
|
||||
|
||||
@@ -261,7 +271,15 @@ fn fix_imports(checker: &Checker, node_id: NodeId, imports: &[ImportBinding]) ->
|
||||
checker.stylist(),
|
||||
checker.indexer(),
|
||||
)?;
|
||||
Ok(Fix::safe_edit(edit).isolate(Checker::isolation(
|
||||
checker.semantic().parent_statement_id(node_id),
|
||||
)))
|
||||
// It's unsafe to remove things from `__init__.py` because it can break public interfaces
|
||||
let applicability = if in_init {
|
||||
Applicability::Unsafe
|
||||
} else {
|
||||
Applicability::Safe
|
||||
};
|
||||
Ok(
|
||||
Fix::applicable_edit(edit, applicability).isolate(Checker::isolation(
|
||||
checker.semantic().parent_statement_id(node_id),
|
||||
)),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,15 +1,9 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F811_1.py:1:25: F811 [*] Redefinition of unused `FU` from line 1
|
||||
F811_1.py:1:25: F811 Redefinition of unused `FU` from line 1
|
||||
|
|
||||
1 | import fu as FU, bar as FU
|
||||
| ^^ F811
|
||||
|
|
||||
= help: Remove definition: `FU`
|
||||
|
||||
ℹ Safe fix
|
||||
1 |-import fu as FU, bar as FU
|
||||
1 |+import fu as FU
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F811_12.py:6:20: F811 [*] Redefinition of unused `mixer` from line 2
|
||||
F811_12.py:6:20: F811 Redefinition of unused `mixer` from line 2
|
||||
|
|
||||
4 | pass
|
||||
5 | else:
|
||||
@@ -10,13 +10,3 @@ F811_12.py:6:20: F811 [*] Redefinition of unused `mixer` from line 2
|
||||
7 | mixer(123)
|
||||
|
|
||||
= help: Remove definition: `mixer`
|
||||
|
||||
ℹ Safe fix
|
||||
3 3 | except ImportError:
|
||||
4 4 | pass
|
||||
5 5 | else:
|
||||
6 |- from bb import mixer
|
||||
6 |+ pass
|
||||
7 7 | mixer(123)
|
||||
|
||||
|
||||
|
||||
@@ -1,15 +1,9 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F811_2.py:1:34: F811 [*] Redefinition of unused `FU` from line 1
|
||||
F811_2.py:1:34: F811 Redefinition of unused `FU` from line 1
|
||||
|
|
||||
1 | from moo import fu as FU, bar as FU
|
||||
| ^^ F811
|
||||
|
|
||||
= help: Remove definition: `FU`
|
||||
|
||||
ℹ Safe fix
|
||||
1 |-from moo import fu as FU, bar as FU
|
||||
1 |+from moo import fu as FU
|
||||
|
||||
|
||||
|
||||
@@ -1,18 +1,10 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F811_23.py:4:15: F811 [*] Redefinition of unused `foo` from line 3
|
||||
F811_23.py:4:15: F811 Redefinition of unused `foo` from line 3
|
||||
|
|
||||
3 | import foo as foo
|
||||
4 | import bar as foo
|
||||
| ^^^ F811
|
||||
|
|
||||
= help: Remove definition: `foo`
|
||||
|
||||
ℹ Safe fix
|
||||
1 1 | """Test that shadowing an explicit re-export produces a warning."""
|
||||
2 2 |
|
||||
3 3 | import foo as foo
|
||||
4 |-import bar as foo
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
F811_28.py:4:22: F811 Redefinition of unused `datetime` from line 3
|
||||
|
|
||||
3 | import datetime
|
||||
4 | from datetime import datetime
|
||||
| ^^^^^^^^ F811
|
||||
5 |
|
||||
6 | datetime(1, 2, 3)
|
||||
|
|
||||
= help: Remove definition: `datetime`
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
__init__.py:1:8: F401 [*] `os` imported but unused
|
||||
__init__.py:1:8: F401 `os` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
1 | import os
|
||||
| ^^ F401
|
||||
@@ -9,9 +9,3 @@ __init__.py:1:8: F401 [*] `os` imported but unused
|
||||
3 | print(__path__)
|
||||
|
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
ℹ Safe fix
|
||||
1 |-import os
|
||||
2 1 |
|
||||
3 2 | print(__path__)
|
||||
4 3 |
|
||||
|
||||
@@ -0,0 +1,17 @@
|
||||
---
|
||||
source: crates/ruff_linter/src/rules/pyflakes/mod.rs
|
||||
---
|
||||
__init__.py:1:8: F401 [*] `os` imported but unused; consider removing, adding to `__all__`, or using a redundant alias
|
||||
|
|
||||
1 | import os
|
||||
| ^^ F401
|
||||
2 |
|
||||
3 | print(__path__)
|
||||
|
|
||||
= help: Remove unused import: `os`
|
||||
|
||||
ℹ Unsafe fix
|
||||
1 |-import os
|
||||
2 1 |
|
||||
3 2 | print(__path__)
|
||||
4 3 |
|
||||
@@ -71,6 +71,7 @@ mod tests {
|
||||
#[test_case(Rule::ImportSelf, Path::new("import_self/module.py"))]
|
||||
#[test_case(Rule::InvalidAllFormat, Path::new("invalid_all_format.py"))]
|
||||
#[test_case(Rule::InvalidAllObject, Path::new("invalid_all_object.py"))]
|
||||
#[test_case(Rule::InvalidBoolReturnType, Path::new("invalid_return_type_bool.py"))]
|
||||
#[test_case(Rule::InvalidStrReturnType, Path::new("invalid_return_type_str.py"))]
|
||||
#[test_case(Rule::DuplicateBases, Path::new("duplicate_bases.py"))]
|
||||
#[test_case(Rule::InvalidCharacterBackspace, Path::new("invalid_characters.py"))]
|
||||
|
||||
@@ -13,8 +13,8 @@ use crate::checkers::ast::Checker;
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// An empty string is falsy, so it is unnecessary to compare it to `""`. If
|
||||
/// the value can be something else Python considers falsy, such as `None` or
|
||||
/// `0` or another empty container, then the code is not equivalent.
|
||||
/// the value can be something else Python considers falsy, such as `None`,
|
||||
/// `0`, or another empty container, then the code is not equivalent.
|
||||
///
|
||||
/// ## Known problems
|
||||
/// High false positive rate, as the check is context-insensitive and does not
|
||||
|
||||
@@ -0,0 +1,78 @@
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::helpers::ReturnStatementVisitor;
|
||||
use ruff_python_ast::visitor::Visitor;
|
||||
use ruff_python_ast::Stmt;
|
||||
use ruff_python_semantic::analyze::type_inference::{NumberLike, PythonType, ResolvedPythonType};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for `__bool__` implementations that return a type other than `bool`.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// The `__bool__` method should return a `bool` object. Returning a different
|
||||
/// type may cause unexpected behavior.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// class Foo:
|
||||
/// def __bool__(self):
|
||||
/// return 2
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// class Foo:
|
||||
/// def __bool__(self):
|
||||
/// return True
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: The `__bool__` method](https://docs.python.org/3/reference/datamodel.html#object.__bool__)
|
||||
#[violation]
|
||||
pub struct InvalidBoolReturnType;
|
||||
|
||||
impl Violation for InvalidBoolReturnType {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`__bool__` does not return `bool`")
|
||||
}
|
||||
}
|
||||
|
||||
/// E0307
|
||||
pub(crate) fn invalid_bool_return(checker: &mut Checker, name: &str, body: &[Stmt]) {
|
||||
if name != "__bool__" {
|
||||
return;
|
||||
}
|
||||
|
||||
if !checker.semantic().current_scope().kind.is_class() {
|
||||
return;
|
||||
}
|
||||
|
||||
let returns = {
|
||||
let mut visitor = ReturnStatementVisitor::default();
|
||||
visitor.visit_body(body);
|
||||
visitor.returns
|
||||
};
|
||||
|
||||
for stmt in returns {
|
||||
if let Some(value) = stmt.value.as_deref() {
|
||||
if !matches!(
|
||||
ResolvedPythonType::from(value),
|
||||
ResolvedPythonType::Unknown
|
||||
| ResolvedPythonType::Atom(PythonType::Number(NumberLike::Bool))
|
||||
) {
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(InvalidBoolReturnType, value.range()));
|
||||
}
|
||||
} else {
|
||||
// Disallow implicit `None`.
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(InvalidBoolReturnType, stmt.range()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -14,6 +14,23 @@ use crate::checkers::ast::Checker;
|
||||
/// ## Why is this bad?
|
||||
/// The `__str__` method should return a `str` object. Returning a different
|
||||
/// type may cause unexpected behavior.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// class Foo:
|
||||
/// def __str__(self):
|
||||
/// return True
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// class Foo:
|
||||
/// def __str__(self):
|
||||
/// return "Foo"
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: The `__str__` method](https://docs.python.org/3/reference/datamodel.html#object.__str__)
|
||||
#[violation]
|
||||
pub struct InvalidStrReturnType;
|
||||
|
||||
|
||||
@@ -25,6 +25,7 @@ pub(crate) use import_private_name::*;
|
||||
pub(crate) use import_self::*;
|
||||
pub(crate) use invalid_all_format::*;
|
||||
pub(crate) use invalid_all_object::*;
|
||||
pub(crate) use invalid_bool_return::*;
|
||||
pub(crate) use invalid_envvar_default::*;
|
||||
pub(crate) use invalid_envvar_value::*;
|
||||
pub(crate) use invalid_str_return::*;
|
||||
@@ -113,6 +114,7 @@ mod import_private_name;
|
||||
mod import_self;
|
||||
mod invalid_all_format;
|
||||
mod invalid_all_object;
|
||||
mod invalid_bool_return;
|
||||
mod invalid_envvar_default;
|
||||
mod invalid_envvar_value;
|
||||
mod invalid_str_return;
|
||||
|
||||
@@ -6,8 +6,8 @@ use ruff_macros::{derive_message_formats, violation};
|
||||
/// that redefine function parameters.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// Redefined variable can cause unexpected behavior because of overridden function parameter.
|
||||
/// If nested functions are declared, inner function's body can override outer function's parameter.
|
||||
/// Redefined variables can cause unexpected behavior because of overridden function parameters.
|
||||
/// If nested functions are declared, an inner function's body can override an outer function's parameters.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
|
||||
@@ -29,7 +29,7 @@ use crate::checkers::ast::Checker;
|
||||
/// into the remainder of the enclosing loop.
|
||||
///
|
||||
/// While this mistake is easy to spot in small examples, it can be hidden
|
||||
/// in larger blocks of code where the definition and redefinition of the
|
||||
/// in larger blocks of code, where the definition and redefinition of the
|
||||
/// variable may not be visible at the same time.
|
||||
///
|
||||
/// ## Example
|
||||
|
||||
@@ -2,6 +2,7 @@ use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, Expr};
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
use ruff_python_stdlib::builtins;
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
@@ -14,6 +15,10 @@ use crate::checkers::ast::Checker;
|
||||
/// `ValueError("...")` on its own will have no effect (unlike
|
||||
/// `raise ValueError("...")`) and is likely a mistake.
|
||||
///
|
||||
/// ## Known problems
|
||||
/// This rule only detects built-in exceptions, like `ValueError`, and does
|
||||
/// not catch user-defined exceptions.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// ValueError("...")
|
||||
@@ -60,38 +65,8 @@ pub(crate) fn useless_exception_statement(checker: &mut Checker, expr: &ast::Stm
|
||||
}
|
||||
|
||||
/// Returns `true` if the given expression is a builtin exception.
|
||||
///
|
||||
/// See: <https://docs.python.org/3/library/exceptions.html#exception-hierarchy>
|
||||
fn is_builtin_exception(expr: &Expr, semantic: &SemanticModel) -> bool {
|
||||
return semantic
|
||||
semantic
|
||||
.resolve_qualified_name(expr)
|
||||
.is_some_and(|qualified_name| {
|
||||
matches!(
|
||||
qualified_name.segments(),
|
||||
[
|
||||
"",
|
||||
"SystemExit"
|
||||
| "Exception"
|
||||
| "ArithmeticError"
|
||||
| "AssertionError"
|
||||
| "AttributeError"
|
||||
| "BufferError"
|
||||
| "EOFError"
|
||||
| "ImportError"
|
||||
| "LookupError"
|
||||
| "IndexError"
|
||||
| "KeyError"
|
||||
| "MemoryError"
|
||||
| "NameError"
|
||||
| "ReferenceError"
|
||||
| "RuntimeError"
|
||||
| "NotImplementedError"
|
||||
| "StopIteration"
|
||||
| "SyntaxError"
|
||||
| "SystemError"
|
||||
| "TypeError"
|
||||
| "ValueError"
|
||||
]
|
||||
)
|
||||
});
|
||||
.is_some_and(|qualified_name| matches!(qualified_name.segments(), ["", name] if builtins::is_exception(name)))
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user