Compare commits
118 Commits
v0.0.251
...
github-292
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6e97c9438a | ||
|
|
187104e396 | ||
|
|
ea86edf12e | ||
|
|
2558384817 | ||
|
|
3ed539d50e | ||
|
|
4a70a4c323 | ||
|
|
310f13c7db | ||
|
|
2168404fc2 | ||
|
|
a032b66c2e | ||
|
|
af5f7dbd83 | ||
|
|
8066607ea3 | ||
|
|
0ed9fccce9 | ||
|
|
074a343a63 | ||
|
|
c7e09b54b0 | ||
|
|
67d1f74587 | ||
|
|
1c79dff3bd | ||
|
|
f5f09b489b | ||
|
|
061495a9eb | ||
|
|
470e1c1754 | ||
|
|
16be691712 | ||
|
|
270015865b | ||
|
|
ccfa9d5b20 | ||
|
|
2261e194a0 | ||
|
|
cd6413ca09 | ||
|
|
c65585e14a | ||
|
|
d2a6ed7be6 | ||
|
|
16e2dae0c2 | ||
|
|
e8ba9c9e21 | ||
|
|
d285f5c90a | ||
|
|
386ca7c9a1 | ||
|
|
40c5abf16e | ||
|
|
7e7aec7d74 | ||
|
|
4b5538f74e | ||
|
|
36d134fd41 | ||
|
|
0b7d6b9097 | ||
|
|
994e2e0903 | ||
|
|
bc79f540e4 | ||
|
|
3a78b59314 | ||
|
|
5f83851329 | ||
|
|
484ce7b8fc | ||
|
|
1c75071136 | ||
|
|
51bca19c1d | ||
|
|
a8a312e862 | ||
|
|
33c31cda27 | ||
|
|
cd9fbeb560 | ||
|
|
84e96cdcd9 | ||
|
|
248590224a | ||
|
|
bbc55cdb04 | ||
|
|
2792439eac | ||
|
|
0694aee1b6 | ||
|
|
a17b5c134a | ||
|
|
42f61535b5 | ||
|
|
1c01b3c934 | ||
|
|
39b9a1637f | ||
|
|
2c692e3acf | ||
|
|
24add5f56c | ||
|
|
0b7736ad79 | ||
|
|
eef85067c8 | ||
|
|
da98fab4ae | ||
|
|
0f37a98d91 | ||
|
|
f38624824d | ||
|
|
159422071e | ||
|
|
6eaacf96be | ||
|
|
eb15371453 | ||
|
|
198b301baf | ||
|
|
c8c575dd43 | ||
|
|
6e54cd8233 | ||
|
|
a688a237d7 | ||
|
|
bda2a0007a | ||
|
|
32d165b7ad | ||
|
|
ac79bf4ee9 | ||
|
|
376eab3a53 | ||
|
|
08be7bd285 | ||
|
|
f5241451d8 | ||
|
|
c9fe0708cb | ||
|
|
09f8c487ea | ||
|
|
1e7233a8eb | ||
|
|
f967f344fc | ||
|
|
095f005bf4 | ||
|
|
0f04aa2a5f | ||
|
|
ad7ba77fff | ||
|
|
77d43795f8 | ||
|
|
4357f2be0f | ||
|
|
e5c1f95545 | ||
|
|
227ff62a4e | ||
|
|
d8e4902516 | ||
|
|
e66739884f | ||
|
|
5fd827545b | ||
|
|
c1ddcb8a60 | ||
|
|
48a317d5f6 | ||
|
|
74e18b6cff | ||
|
|
21d02cd51f | ||
|
|
049e77b939 | ||
|
|
b9bfb81e36 | ||
|
|
2d4fae45d9 | ||
|
|
726adb7efc | ||
|
|
dbdfdeb0e1 | ||
|
|
1c41789c2a | ||
|
|
2f9de335db | ||
|
|
ba61bb6a6c | ||
|
|
17ab71ff75 | ||
|
|
4ad4e3e091 | ||
|
|
6ced5122e4 | ||
|
|
7d55b417f7 | ||
|
|
f0e0efc46f | ||
|
|
1efa2e07ad | ||
|
|
df3932f750 | ||
|
|
817d0b4902 | ||
|
|
ffd8e958fc | ||
|
|
ed33b75bad | ||
|
|
262e768fd3 | ||
|
|
bc3a9ce003 | ||
|
|
48005d87f8 | ||
|
|
e37e9c2ca3 | ||
|
|
8fde63b323 | ||
|
|
97338e4cd6 | ||
|
|
9645790a8b | ||
|
|
18800c6884 |
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
- run: ./target/debug/ruff_dev generate-all
|
||||
- run: git diff --quiet README.md || echo "::error file=README.md::This file is outdated. Run 'cargo dev generate-all'."
|
||||
- run: git diff --quiet ruff.schema.json || echo "::error file=ruff.schema.json::This file is outdated. Run 'cargo dev generate-all'."
|
||||
- run: git diff --exit-code -- README.md ruff.schema.json
|
||||
- run: git diff --exit-code -- README.md ruff.schema.json docs
|
||||
|
||||
cargo-fmt:
|
||||
name: "cargo fmt"
|
||||
|
||||
@@ -5,6 +5,14 @@ repos:
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/executablebooks/mdformat
|
||||
rev: 0.7.16
|
||||
hooks:
|
||||
- id: mdformat
|
||||
additional_dependencies:
|
||||
- mdformat-black
|
||||
- black==23.1.0 # Must be the latest version of Black
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.33.0
|
||||
hooks:
|
||||
@@ -13,7 +21,6 @@ repos:
|
||||
- --disable
|
||||
- MD013 # line-length
|
||||
- MD033 # no-inline-html
|
||||
- MD041 # first-line-h1
|
||||
- --
|
||||
|
||||
- repo: local
|
||||
@@ -46,5 +53,16 @@ repos:
|
||||
pass_filenames: false
|
||||
exclude: target
|
||||
|
||||
# Black
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.1.0
|
||||
hooks:
|
||||
- id: black
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/ruff/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*
|
||||
)$
|
||||
|
||||
ci:
|
||||
skip: [cargo-fmt, clippy, dev-generate-all]
|
||||
|
||||
@@ -20,8 +20,7 @@ the intention of adding a stable public API in the future.
|
||||
### `select`, `extend-select`, `ignore`, and `extend-ignore` have new semantics ([#2312](https://github.com/charliermarsh/ruff/pull/2312))
|
||||
|
||||
Previously, the interplay between `select` and its related options could lead to unexpected
|
||||
behavior. For example, `ruff --select E501 --ignore ALL` and `ruff --select E501 --extend-ignore
|
||||
ALL` behaved differently. (See [#2312](https://github.com/charliermarsh/ruff/pull/2312) for more
|
||||
behavior. For example, `ruff --select E501 --ignore ALL` and `ruff --select E501 --extend-ignore ALL` behaved differently. (See [#2312](https://github.com/charliermarsh/ruff/pull/2312) for more
|
||||
examples.)
|
||||
|
||||
When Ruff determines the enabled rule set, it has to reconcile `select` and `ignore` from a variety
|
||||
@@ -74,7 +73,7 @@ ruff rule E402 --format json # Works! (And preferred.)
|
||||
This change is largely backwards compatible -- most users should experience
|
||||
no change in behavior. However, please note the following exceptions:
|
||||
|
||||
* Subcommands will now fail when invoked with unsupported arguments, instead
|
||||
- Subcommands will now fail when invoked with unsupported arguments, instead
|
||||
of silently ignoring them. For example, the following will now fail:
|
||||
|
||||
```console
|
||||
@@ -83,16 +82,16 @@ no change in behavior. However, please note the following exceptions:
|
||||
|
||||
(the `clean` command doesn't support `--respect-gitignore`.)
|
||||
|
||||
* The semantics of `ruff <arg>` have changed slightly when `<arg>` is a valid subcommand.
|
||||
- The semantics of `ruff <arg>` have changed slightly when `<arg>` is a valid subcommand.
|
||||
For example, prior to this release, running `ruff rule` would run `ruff` over a file or
|
||||
directory called `rule`. Now, `ruff rule` would invoke the `rule` subcommand. This should
|
||||
only impact projects with files or directories named `rule`, `check`, `explain`, `clean`,
|
||||
or `generate-shell-completion`.
|
||||
|
||||
* Scripts that invoke ruff should supply `--` before any positional arguments.
|
||||
- Scripts that invoke ruff should supply `--` before any positional arguments.
|
||||
(The semantics of `ruff -- <arg>` have not changed.)
|
||||
|
||||
* `--explain` previously treated `--format grouped` as a synonym for `--format text`.
|
||||
- `--explain` previously treated `--format grouped` as a synonym for `--format text`.
|
||||
This is no longer supported; instead, use `--format text`.
|
||||
|
||||
## 0.0.226
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
* [Our Pledge](#our-pledge)
|
||||
* [Our Standards](#our-standards)
|
||||
* [Enforcement Responsibilities](#enforcement-responsibilities)
|
||||
* [Scope](#scope)
|
||||
* [Enforcement](#enforcement)
|
||||
* [Enforcement Guidelines](#enforcement-guidelines)
|
||||
* [1. Correction](#1-correction)
|
||||
* [2. Warning](#2-warning)
|
||||
* [3. Temporary Ban](#3-temporary-ban)
|
||||
* [4. Permanent Ban](#4-permanent-ban)
|
||||
* [Attribution](#attribution)
|
||||
- [Our Pledge](#our-pledge)
|
||||
- [Our Standards](#our-standards)
|
||||
- [Enforcement Responsibilities](#enforcement-responsibilities)
|
||||
- [Scope](#scope)
|
||||
- [Enforcement](#enforcement)
|
||||
- [Enforcement Guidelines](#enforcement-guidelines)
|
||||
- [1. Correction](#1-correction)
|
||||
- [2. Warning](#2-warning)
|
||||
- [3. Temporary Ban](#3-temporary-ban)
|
||||
- [4. Permanent Ban](#4-permanent-ban)
|
||||
- [Attribution](#attribution)
|
||||
|
||||
## Our Pledge
|
||||
|
||||
@@ -29,23 +29,23 @@ diverse, inclusive, and healthy community.
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
- Demonstrating empathy and kindness toward other people
|
||||
- Being respectful of differing opinions, viewpoints, and experiences
|
||||
- Giving and gracefully accepting constructive feedback
|
||||
- Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
- Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
- The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
- Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
- Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
@@ -132,7 +132,7 @@ version 2.0, available [here](https://www.contributor-covenant.org/version/2/0/c
|
||||
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||
enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see the [FAQ](https://www.contributor-covenant.org/faq).
|
||||
Translations are available [here](https://www.contributor-covenant.org/translations).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
@@ -2,16 +2,16 @@
|
||||
|
||||
Welcome! We're happy to have you here. Thank you in advance for your contribution to Ruff.
|
||||
|
||||
* [The Basics](#the-basics)
|
||||
* [Prerequisites](#prerequisites)
|
||||
* [Development](#development)
|
||||
* [Project Structure](#project-structure)
|
||||
* [Example: Adding a new lint rule](#example-adding-a-new-lint-rule)
|
||||
* [Rule naming convention](#rule-naming-convention)
|
||||
* [Example: Adding a new configuration option](#example-adding-a-new-configuration-option)
|
||||
* [MkDocs](#mkdocs)
|
||||
* [Release Process](#release-process)
|
||||
* [Benchmarks](#benchmarks)
|
||||
- [The Basics](#the-basics)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Development](#development)
|
||||
- [Project Structure](#project-structure)
|
||||
- [Example: Adding a new lint rule](#example-adding-a-new-lint-rule)
|
||||
- [Rule naming convention](#rule-naming-convention)
|
||||
- [Example: Adding a new configuration option](#example-adding-a-new-configuration-option)
|
||||
- [MkDocs](#mkdocs)
|
||||
- [Release Process](#release-process)
|
||||
- [Benchmarks](#benchmarks)
|
||||
|
||||
## The Basics
|
||||
|
||||
@@ -56,9 +56,9 @@ Prior to opening a pull request, ensure that your code has been auto-formatted,
|
||||
and that it passes both the lint and test validation checks:
|
||||
|
||||
```shell
|
||||
cargo fmt --all # Auto-formatting...
|
||||
cargo fmt # Auto-formatting...
|
||||
cargo clippy --fix --workspace --all-targets --all-features # Linting...
|
||||
cargo test --all # Testing...
|
||||
cargo test # Testing...
|
||||
```
|
||||
|
||||
These checks will run on GitHub Actions when you open your Pull Request, but running them locally
|
||||
@@ -91,27 +91,27 @@ The vast majority of the code, including all lint rules, lives in the `ruff` cra
|
||||
|
||||
At time of writing, the repository includes the following crates:
|
||||
|
||||
* `crates/ruff`: library crate containing all lint rules and the core logic for running them.
|
||||
* `crates/ruff_cli`: binary crate containing Ruff's command-line interface.
|
||||
* `crates/ruff_dev`: binary crate containing utilities used in the development of Ruff itself (e.g., `cargo dev generate-all`).
|
||||
* `crates/ruff_macros`: library crate containing macros used by Ruff.
|
||||
* `crates/ruff_python`: library crate implementing Python-specific functionality (e.g., lists of standard library modules by versionb).
|
||||
* `crates/flake8_to_ruff`: binary crate for generating Ruff configuration from Flake8 configuration.
|
||||
- `crates/ruff`: library crate containing all lint rules and the core logic for running them.
|
||||
- `crates/ruff_cli`: binary crate containing Ruff's command-line interface.
|
||||
- `crates/ruff_dev`: binary crate containing utilities used in the development of Ruff itself (e.g., `cargo dev generate-all`).
|
||||
- `crates/ruff_macros`: library crate containing macros used by Ruff.
|
||||
- `crates/ruff_python`: library crate implementing Python-specific functionality (e.g., lists of standard library modules by versionb).
|
||||
- `crates/flake8_to_ruff`: binary crate for generating Ruff configuration from Flake8 configuration.
|
||||
|
||||
### Example: Adding a new lint rule
|
||||
|
||||
At a high level, the steps involved in adding a new lint rule are as follows:
|
||||
|
||||
1. Determine a name for the new rule as per our [rule naming convention](#rule-naming-convention).
|
||||
2. Create a file for your rule (e.g., `crates/ruff/src/rules/flake8_bugbear/rules/abstract_base_class.rs`).
|
||||
3. In that file, define a violation struct. You can grep for `define_violation!` to see examples.
|
||||
4. Map the violation struct to a rule code in `crates/ruff/src/registry.rs` (e.g., `E402`).
|
||||
5. Define the logic for triggering the violation in `crates/ruff/src/checkers/ast.rs` (for AST-based
|
||||
1. Create a file for your rule (e.g., `crates/ruff/src/rules/flake8_bugbear/rules/abstract_base_class.rs`).
|
||||
1. In that file, define a violation struct. You can grep for `define_violation!` to see examples.
|
||||
1. Map the violation struct to a rule code in `crates/ruff/src/registry.rs` (e.g., `E402`).
|
||||
1. Define the logic for triggering the violation in `crates/ruff/src/checkers/ast.rs` (for AST-based
|
||||
checks), `crates/ruff/src/checkers/tokens.rs` (for token-based checks), `crates/ruff/src/checkers/lines.rs`
|
||||
(for text-based checks), or `crates/ruff/src/checkers/filesystem.rs` (for filesystem-based
|
||||
checks).
|
||||
6. Add a test fixture.
|
||||
7. Update the generated files (documentation and generated code).
|
||||
1. Add a test fixture.
|
||||
1. Update the generated files (documentation and generated code).
|
||||
|
||||
To define the violation, start by creating a dedicated file for your rule under the appropriate
|
||||
rule linter (e.g., `crates/ruff/src/rules/flake8_bugbear/rules/abstract_base_class.rs`). That file should
|
||||
@@ -135,7 +135,7 @@ Run `cargo dev generate-all` to generate the code for your new fixture. Then run
|
||||
locally with (e.g.) `cargo run -p ruff_cli -- check crates/ruff/resources/test/fixtures/pycodestyle/E402.py --no-cache --select E402`.
|
||||
|
||||
Once you're satisfied with the output, codify the behavior as a snapshot test by adding a new
|
||||
`test_case` macro in the relevant `crates/ruff/src/[linter]/mod.rs` file. Then, run `cargo test --all`.
|
||||
`test_case` macro in the relevant `crates/ruff/src/[linter]/mod.rs` file. Then, run `cargo test`.
|
||||
Your test will fail, but you'll be prompted to follow-up with `cargo insta review`. Accept the
|
||||
generated snapshot, then commit the snapshot file alongside the rest of your changes.
|
||||
|
||||
@@ -147,11 +147,14 @@ The rule name should make sense when read as "allow _rule-name_" or "allow _rule
|
||||
|
||||
This implies that rule names:
|
||||
|
||||
* should state the bad thing being checked for
|
||||
- should state the bad thing being checked for
|
||||
|
||||
* should not contain instructions on what you what you should use instead
|
||||
- should not contain instructions on what you what you should use instead
|
||||
(these belong in the rule documentation and the `autofix_title` for rules that have autofix)
|
||||
|
||||
When re-implementing rules from other linters, this convention is given more importance than
|
||||
preserving the original rule name.
|
||||
|
||||
### Example: Adding a new configuration option
|
||||
|
||||
Ruff's user-facing settings live in a few different places.
|
||||
@@ -188,13 +191,13 @@ To preview any changes to the documentation locally:
|
||||
pip install -r docs/requirements.txt
|
||||
```
|
||||
|
||||
2. Generate the MkDocs site with:
|
||||
1. Generate the MkDocs site with:
|
||||
|
||||
```shell
|
||||
python scripts/generate_mkdocs.py
|
||||
```
|
||||
|
||||
3. Run the development server with:
|
||||
1. Run the development server with:
|
||||
|
||||
```shell
|
||||
mkdocs serve
|
||||
|
||||
204
Cargo.lock
generated
204
Cargo.lock
generated
@@ -2,6 +2,12 @@
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "Inflector"
|
||||
version = "0.11.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3"
|
||||
|
||||
[[package]]
|
||||
name = "adler"
|
||||
version = "1.0.2"
|
||||
@@ -86,7 +92,7 @@ version = "2.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9834fcc22e0874394a010230586367d4a3e9f11b560f469262678547e1d2575e"
|
||||
dependencies = [
|
||||
"bstr 1.2.0",
|
||||
"bstr 1.3.0",
|
||||
"doc-comment",
|
||||
"predicates",
|
||||
"predicates-core",
|
||||
@@ -175,9 +181,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
version = "1.2.0"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b7f0778972c64420fdedc63f09919c8a88bda7b25135357fd25a5d9f3257e832"
|
||||
checksum = "5ffdb39cb703212f3c11973452c2861b972f757b021158f3516ba10f2fa8b2c1"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"once_cell",
|
||||
@@ -298,9 +304,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_complete"
|
||||
version = "4.1.1"
|
||||
version = "4.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3d6540eedc41f8a5a76cf3d8d458057dcdf817be4158a55b5f861f7a5483de75"
|
||||
checksum = "bd125be87bf4c255ebc50de0b7f4d2a6201e8ac3dc86e39c0ad081dc5e7236fe"
|
||||
dependencies = [
|
||||
"clap 4.1.6",
|
||||
]
|
||||
@@ -318,9 +324,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_complete_fig"
|
||||
version = "4.1.0"
|
||||
version = "4.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf0c76d8fcf782a1102ccfcd10ca8246e7fdd609c1cd6deddbb96cb638e9bb5c"
|
||||
checksum = "63a06158a72dbb088f864887b4409fd22600ba379f3cee3040f842234cc5c2d0"
|
||||
dependencies = [
|
||||
"clap 4.1.6",
|
||||
"clap_complete",
|
||||
@@ -550,9 +556,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cxx"
|
||||
version = "1.0.90"
|
||||
version = "1.0.91"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "90d59d9acd2a682b4e40605a242f6670eaa58c5957471cbf85e8aa6a0b97a5e8"
|
||||
checksum = "86d3488e7665a7a483b57e25bdd90d0aeb2bc7608c8d0346acf2ad3f1caf1d62"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"cxxbridge-flags",
|
||||
@@ -562,9 +568,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cxx-build"
|
||||
version = "1.0.90"
|
||||
version = "1.0.91"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ebfa40bda659dd5c864e65f4c9a2b0aff19bea56b017b9b77c73d3766a453a38"
|
||||
checksum = "48fcaf066a053a41a81dfb14d57d99738b767febb8b735c3016e469fac5da690"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"codespan-reporting",
|
||||
@@ -577,15 +583,26 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cxxbridge-flags"
|
||||
version = "1.0.90"
|
||||
version = "1.0.91"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "457ce6757c5c70dc6ecdbda6925b958aae7f959bda7d8fb9bde889e34a09dc03"
|
||||
checksum = "a2ef98b8b717a829ca5603af80e1f9e2e48013ab227b68ef37872ef84ee479bf"
|
||||
|
||||
[[package]]
|
||||
name = "cxxbridge-macro"
|
||||
version = "1.0.90"
|
||||
version = "1.0.91"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ebf883b7aacd7b2aeb2a7b338648ee19f57c140d4ee8e52c68979c6b2f7f2263"
|
||||
checksum = "086c685979a698443656e5cf7856c95c642295a38599f12fb1ff76fb28d19892"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derivative"
|
||||
version = "2.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -753,7 +770,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.251"
|
||||
version = "0.0.253"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap 4.1.6",
|
||||
@@ -840,7 +857,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "029d74589adefde59de1a0c4f4732695c32805624aec7b68d91503d4dba79afc"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"bstr 1.2.0",
|
||||
"bstr 1.3.0",
|
||||
"fnv",
|
||||
"log",
|
||||
"regex",
|
||||
@@ -990,9 +1007,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "insta"
|
||||
version = "1.26.0"
|
||||
version = "1.28.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f6f0f08b46e4379744de2ab67aa8f7de3ffd1da3e275adc41fcc82053ede46ff"
|
||||
checksum = "fea5b3894afe466b4bcf0388630fc15e11938a6074af0cd637c825ba2ec8a099"
|
||||
dependencies = [
|
||||
"console",
|
||||
"lazy_static",
|
||||
@@ -1024,10 +1041,23 @@ dependencies = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "is-terminal"
|
||||
version = "0.4.3"
|
||||
name = "is-macro"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22e18b0a45d56fe973d6db23972bf5bc46f988a4a2385deac9cc29572f09daef"
|
||||
checksum = "8a7d079e129b77477a49c5c4f1cfe9ce6c2c909ef52520693e8e811a714c7b20"
|
||||
dependencies = [
|
||||
"Inflector",
|
||||
"pmutil",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "is-terminal"
|
||||
version = "0.4.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "21b6b32576413a8e69b90e952e4a026476040d81017b80445deda5f2d3921857"
|
||||
dependencies = [
|
||||
"hermit-abi 0.3.1",
|
||||
"io-lifetimes",
|
||||
@@ -1035,15 +1065,6 @@ dependencies = [
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "is_executable"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fa9acdc6d67b75e626ad644734e8bc6df893d9cd2a834129065d3dd6158ea9c8"
|
||||
dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itertools"
|
||||
version = "0.10.5"
|
||||
@@ -1171,7 +1192,7 @@ checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79"
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/charliermarsh/LibCST?rev=f2f0b7a487a8725d161fe8b3ed73a6758b21e177#f2f0b7a487a8725d161fe8b3ed73a6758b21e177"
|
||||
source = "git+https://github.com/charliermarsh/LibCST?rev=80e4c1399f95e5beb532fdd1e209ad2dbb470438#80e4c1399f95e5beb532fdd1e209ad2dbb470438"
|
||||
dependencies = [
|
||||
"chic",
|
||||
"itertools",
|
||||
@@ -1186,7 +1207,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "libcst_derive"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/charliermarsh/LibCST?rev=f2f0b7a487a8725d161fe8b3ed73a6758b21e177#f2f0b7a487a8725d161fe8b3ed73a6758b21e177"
|
||||
source = "git+https://github.com/charliermarsh/LibCST?rev=80e4c1399f95e5beb532fdd1e209ad2dbb470438#80e4c1399f95e5beb532fdd1e209ad2dbb470438"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
@@ -1292,14 +1313,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "mio"
|
||||
version = "0.8.5"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de"
|
||||
checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"log",
|
||||
"wasi 0.11.0+wasi-snapshot-preview1",
|
||||
"windows-sys 0.42.0",
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1428,18 +1449,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "num_enum"
|
||||
version = "0.5.9"
|
||||
version = "0.5.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8d829733185c1ca374f17e52b762f24f535ec625d2cc1f070e34c8a9068f341b"
|
||||
checksum = "3e0072973714303aa6e3631c7e8e777970cf4bdd25dc4932e41031027b8bcc4e"
|
||||
dependencies = [
|
||||
"num_enum_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num_enum_derive"
|
||||
version = "0.5.9"
|
||||
version = "0.5.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2be1598bf1c313dcdd12092e3f1920f463462525a21b7b4e11b4168353d0123e"
|
||||
checksum = "0629cbd6b897944899b1f10496d9c4a7ac5878d45fd61bc22e9e79bfbbc29597"
|
||||
dependencies = [
|
||||
"proc-macro-crate",
|
||||
"proc-macro2",
|
||||
@@ -1449,9 +1470,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.17.0"
|
||||
version = "1.17.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66"
|
||||
checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
|
||||
|
||||
[[package]]
|
||||
name = "oorandom"
|
||||
@@ -1686,6 +1707,17 @@ dependencies = [
|
||||
"plotters-backend",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pmutil"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3894e5d549cccbe44afecf72922f277f603cd4bb0219c8342631ef18fffbe004"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ppv-lite86"
|
||||
version = "0.2.17"
|
||||
@@ -1910,6 +1942,28 @@ dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "result-like"
|
||||
version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ccc7ce6435c33898517a30e85578cd204cbb696875efb93dec19a2d31294f810"
|
||||
dependencies = [
|
||||
"result-like-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "result-like-derive"
|
||||
version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fabf0a2e54f711c68c50d49f648a1a8a37adcb57353f518ac4df374f0788f42"
|
||||
dependencies = [
|
||||
"pmutil",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn-ext",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ring"
|
||||
version = "0.16.20"
|
||||
@@ -1927,7 +1981,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.0.251"
|
||||
version = "0.0.253"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bisection",
|
||||
@@ -1939,6 +1993,7 @@ dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
"criterion",
|
||||
"derivative",
|
||||
"dirs",
|
||||
"fern",
|
||||
"getrandom",
|
||||
@@ -1947,7 +2002,7 @@ dependencies = [
|
||||
"ignore",
|
||||
"imperative",
|
||||
"insta",
|
||||
"is_executable",
|
||||
"is-macro",
|
||||
"itertools",
|
||||
"js-sys",
|
||||
"libcst",
|
||||
@@ -1959,8 +2014,10 @@ dependencies = [
|
||||
"once_cell",
|
||||
"path-absolutize",
|
||||
"regex",
|
||||
"result-like",
|
||||
"ruff_macros",
|
||||
"ruff_python",
|
||||
"ruff_rustpython",
|
||||
"rustc-hash",
|
||||
"rustpython-common",
|
||||
"rustpython-parser",
|
||||
@@ -1983,7 +2040,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_cli"
|
||||
version = "0.0.251"
|
||||
version = "0.0.253"
|
||||
dependencies = [
|
||||
"annotate-snippets 0.9.1",
|
||||
"anyhow",
|
||||
@@ -2012,6 +2069,7 @@ dependencies = [
|
||||
"rustc-hash",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
"similar",
|
||||
"strum",
|
||||
"textwrap",
|
||||
@@ -2082,15 +2140,42 @@ dependencies = [
|
||||
"anyhow",
|
||||
"clap 4.1.6",
|
||||
"insta",
|
||||
"is-macro",
|
||||
"itertools",
|
||||
"once_cell",
|
||||
"ruff_formatter",
|
||||
"ruff_python",
|
||||
"ruff_rustpython",
|
||||
"ruff_testing_macros",
|
||||
"ruff_text_size",
|
||||
"rustc-hash",
|
||||
"rustpython-common",
|
||||
"rustpython-parser",
|
||||
"similar",
|
||||
"test-case",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff_rustpython"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"once_cell",
|
||||
"rustpython-common",
|
||||
"rustpython-parser",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff_testing_macros"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"glob",
|
||||
"proc-macro-error",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff_text_size"
|
||||
version = "0.0.0"
|
||||
@@ -2146,7 +2231,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rustpython-ast"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=ddf497623ae56d21aa4166ff1c0725a7db67e955#ddf497623ae56d21aa4166ff1c0725a7db67e955"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=aa8336ee94492b52458ed8e1517238e5c6c2914c#aa8336ee94492b52458ed8e1517238e5c6c2914c"
|
||||
dependencies = [
|
||||
"num-bigint",
|
||||
"rustpython-compiler-core",
|
||||
@@ -2155,7 +2240,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rustpython-common"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=ddf497623ae56d21aa4166ff1c0725a7db67e955#ddf497623ae56d21aa4166ff1c0725a7db67e955"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=aa8336ee94492b52458ed8e1517238e5c6c2914c#aa8336ee94492b52458ed8e1517238e5c6c2914c"
|
||||
dependencies = [
|
||||
"ascii",
|
||||
"bitflags",
|
||||
@@ -2180,7 +2265,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rustpython-compiler-core"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=ddf497623ae56d21aa4166ff1c0725a7db67e955#ddf497623ae56d21aa4166ff1c0725a7db67e955"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=aa8336ee94492b52458ed8e1517238e5c6c2914c#aa8336ee94492b52458ed8e1517238e5c6c2914c"
|
||||
dependencies = [
|
||||
"bincode",
|
||||
"bitflags",
|
||||
@@ -2197,7 +2282,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rustpython-parser"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=ddf497623ae56d21aa4166ff1c0725a7db67e955#ddf497623ae56d21aa4166ff1c0725a7db67e955"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=aa8336ee94492b52458ed8e1517238e5c6c2914c#aa8336ee94492b52458ed8e1517238e5c6c2914c"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"anyhow",
|
||||
@@ -2468,15 +2553,24 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.107"
|
||||
version = "1.0.108"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
|
||||
checksum = "d56e159d99e6c2b93995d171050271edb50ecc5288fbc7cc17de8fdce4e58c14"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn-ext"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b86cb2b68c5b3c078cac02588bc23f3c04bb828c5d3aedd17980876ec6a7be6"
|
||||
dependencies = [
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.3.0"
|
||||
@@ -2854,9 +2948,11 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
|
||||
|
||||
[[package]]
|
||||
name = "unicode_names2"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "029df4cc8238cefc911704ff8fa210853a0f3bce2694d8f51181dd41ee0f3301"
|
||||
version = "0.6.0"
|
||||
source = "git+https://github.com/youknowone/unicode_names2.git?tag=v0.6.0+character-alias#4ce16aa85cbcdd9cc830410f1a72ef9a235f2fde"
|
||||
dependencies = [
|
||||
"phf",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "untrusted"
|
||||
|
||||
@@ -3,18 +3,19 @@ members = ["crates/*"]
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
rust-version = "1.65.0"
|
||||
rust-version = "1.67.0"
|
||||
|
||||
[workspace.dependencies]
|
||||
anyhow = { version = "1.0.66" }
|
||||
clap = { version = "4.0.1", features = ["derive"] }
|
||||
itertools = { version = "0.10.5" }
|
||||
libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "f2f0b7a487a8725d161fe8b3ed73a6758b21e177" }
|
||||
is-macro = { version = "0.2.2" }
|
||||
libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "80e4c1399f95e5beb532fdd1e209ad2dbb470438" }
|
||||
once_cell = { version = "1.16.0" }
|
||||
regex = { version = "1.6.0" }
|
||||
rustc-hash = { version = "1.1.0" }
|
||||
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "ddf497623ae56d21aa4166ff1c0725a7db67e955" }
|
||||
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/RustPython/RustPython.git", rev = "ddf497623ae56d21aa4166ff1c0725a7db67e955" }
|
||||
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "aa8336ee94492b52458ed8e1517238e5c6c2914c" }
|
||||
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/RustPython/RustPython.git", rev = "aa8336ee94492b52458ed8e1517238e5c6c2914c" }
|
||||
schemars = { version = "0.8.11" }
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
serde_json = { version = "1.0.87" }
|
||||
|
||||
608
README.md
608
README.md
@@ -24,17 +24,17 @@ An extremely fast Python linter, written in Rust.
|
||||
<i>Linting the CPython codebase from scratch.</i>
|
||||
</p>
|
||||
|
||||
* ⚡️ 10-100x faster than existing linters
|
||||
* 🐍 Installable via `pip`
|
||||
* 🛠️ `pyproject.toml` support
|
||||
* 🤝 Python 3.11 compatibility
|
||||
* 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
* 🔧 Autofix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
* 📏 Over [400 built-in rules](https://beta.ruff.rs/docs/rules/) (and growing)
|
||||
* ⚖️ [Near-parity](https://beta.ruff.rs/docs/faq/#how-does-ruff-compare-to-flake8) with the built-in Flake8 rule set
|
||||
* 🔌 Native re-implementations of dozens of Flake8 plugins, like flake8-bugbear
|
||||
* ⌨️ First-party editor integrations for [VS Code](https://github.com/charliermarsh/ruff-vscode) and [more](https://github.com/charliermarsh/ruff-lsp)
|
||||
* 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](#pyprojecttoml-discovery)
|
||||
- ⚡️ 10-100x faster than existing linters
|
||||
- 🐍 Installable via `pip`
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 🤝 Python 3.11 compatibility
|
||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
- 🔧 Autofix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [500 built-in rules](https://beta.ruff.rs/docs/rules/)
|
||||
- ⚖️ [Near-parity](https://beta.ruff.rs/docs/faq/#how-does-ruff-compare-to-flake8) with the built-in Flake8 rule set
|
||||
- 🔌 Native re-implementations of dozens of Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party editor integrations for [VS Code](https://github.com/charliermarsh/ruff-vscode) and [more](https://github.com/charliermarsh/ruff-lsp)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://beta.ruff.rs/docs/configuration/#pyprojecttoml-discovery)
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
functionality behind a single, common interface.
|
||||
@@ -47,11 +47,11 @@ all while executing tens or hundreds of times faster than any individual tool.
|
||||
|
||||
Ruff is extremely actively developed and used in major open-source projects like:
|
||||
|
||||
* [pandas](https://github.com/pandas-dev/pandas)
|
||||
* [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
* [Transformers (Hugging Face)](https://github.com/huggingface/transformers)
|
||||
* [Apache Airflow](https://github.com/apache/airflow)
|
||||
* [SciPy](https://github.com/scipy/scipy)
|
||||
- [pandas](https://github.com/pandas-dev/pandas)
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Transformers (Hugging Face)](https://github.com/huggingface/transformers)
|
||||
- [Apache Airflow](https://github.com/apache/airflow)
|
||||
- [SciPy](https://github.com/scipy/scipy)
|
||||
|
||||
...and many more.
|
||||
|
||||
@@ -97,21 +97,19 @@ developer of [Zulip](https://github.com/zulip/zulip):
|
||||
|
||||
For more, see the [documentation](https://beta.ruff.rs/docs/).
|
||||
|
||||
1. [Installation and Usage](#installation-and-usage)
|
||||
2. [Configuration](#configuration)
|
||||
3. [Supported Rules](#supported-rules)
|
||||
4. [Contributing](#contributing)
|
||||
5. [Support](#support)
|
||||
6. [Acknowledgements](#acknowledgements)
|
||||
7. [Who's Using Ruff?](#whos-using-ruff)
|
||||
8. [License](#license)
|
||||
1. [Getting Started](#getting-started)
|
||||
1. [Configuration](#configuration)
|
||||
1. [Rules](#rules)
|
||||
1. [Contributing](#contributing)
|
||||
1. [Support](#support)
|
||||
1. [Acknowledgements](#acknowledgements)
|
||||
1. [Who's Using Ruff?](#whos-using-ruff)
|
||||
1. [License](#license)
|
||||
|
||||
## Installation and Usage
|
||||
## Getting Started
|
||||
|
||||
For more, see the [documentation](https://beta.ruff.rs/docs/).
|
||||
|
||||
<!-- Begin section: Installation and Usage -->
|
||||
|
||||
### Installation
|
||||
|
||||
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||
@@ -120,31 +118,8 @@ Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||
pip install ruff
|
||||
```
|
||||
|
||||
For **macOS Homebrew** and **Linuxbrew** users, Ruff is also available as [`ruff`](https://formulae.brew.sh/formula/ruff) on Homebrew:
|
||||
|
||||
```shell
|
||||
brew install ruff
|
||||
```
|
||||
|
||||
For **Conda** users, Ruff is also available as [`ruff`](https://anaconda.org/conda-forge/ruff) on `conda-forge`:
|
||||
|
||||
```shell
|
||||
conda install -c conda-forge ruff
|
||||
```
|
||||
|
||||
For **Arch Linux** users, Ruff is also available as [`ruff`](https://archlinux.org/packages/community/x86_64/ruff/) on the official repositories:
|
||||
|
||||
```shell
|
||||
pacman -S ruff
|
||||
```
|
||||
|
||||
For **Alpine** users, Ruff is also available as [`ruff`](https://pkgs.alpinelinux.org/package/edge/testing/x86_64/ruff) on the testing repositories:
|
||||
|
||||
```shell
|
||||
apk add ruff
|
||||
```
|
||||
|
||||
[](https://repology.org/project/ruff-python-linter/versions)
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
and with [a variety of other package managers](https://beta.ruff.rs/docs/installation/).
|
||||
|
||||
### Usage
|
||||
|
||||
@@ -157,51 +132,30 @@ ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`
|
||||
ruff check path/to/code/to/file.py # Lint `file.py`
|
||||
```
|
||||
|
||||
You can run Ruff in `--watch` mode to automatically re-run on-change:
|
||||
|
||||
```shell
|
||||
ruff check path/to/code/ --watch
|
||||
```
|
||||
|
||||
Ruff also works with [pre-commit](https://pre-commit.com):
|
||||
Ruff can also be used as a [pre-commit](https://pre-commit.com) hook:
|
||||
|
||||
```yaml
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.0.251'
|
||||
rev: 'v0.0.253'
|
||||
hooks:
|
||||
- id: ruff
|
||||
```
|
||||
|
||||
Or, to enable autofix:
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/charliermarsh/ruff-vscode) or
|
||||
alongside any other editor through the [Ruff LSP](https://github.com/charliermarsh/ruff-lsp).
|
||||
|
||||
```yaml
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.0.251'
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
```
|
||||
### Configuration
|
||||
|
||||
<!-- End section: Installation and Usage -->
|
||||
|
||||
## Configuration
|
||||
|
||||
<!-- Begin section: Configuration -->
|
||||
|
||||
Ruff can be configured via a `pyproject.toml` file, a `ruff.toml` file, or through the command line.
|
||||
|
||||
For a complete enumeration of the available configuration options, see the
|
||||
[documentation](https://beta.ruff.rs/docs/settings/).
|
||||
|
||||
### Configure via `pyproject.toml`
|
||||
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
|
||||
[_Configuration_](https://beta.ruff.rs/docs/configuration/), or [_Settings_](https://beta.ruff.rs/docs/settings/)
|
||||
for a complete list of all configuration options).
|
||||
|
||||
If left unspecified, the default configuration is equivalent to:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
# Enable Pyflakes `E` and `F` codes by default.
|
||||
# Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default.
|
||||
select = ["E", "F"]
|
||||
ignore = []
|
||||
|
||||
@@ -232,7 +186,6 @@ exclude = [
|
||||
"node_modules",
|
||||
"venv",
|
||||
]
|
||||
per-file-ignores = {}
|
||||
|
||||
# Same as Black.
|
||||
line-length = 88
|
||||
@@ -248,428 +201,42 @@ target-version = "py310"
|
||||
max-complexity = 10
|
||||
```
|
||||
|
||||
As an example, the following would configure Ruff to: (1) enforce flake8-bugbear rules, in addition
|
||||
to the defaults; (2) avoid enforcing line-length violations (`E501`); (3) avoid attempting to fix
|
||||
flake8-bugbear (`B`) violations; and (3) ignore import-at-top-of-file violations (`E402`) in
|
||||
`__init__.py` files:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
# Enable flake8-bugbear (`B`) rules.
|
||||
select = ["E", "F", "B"]
|
||||
|
||||
# Never enforce `E501` (line length violations).
|
||||
ignore = ["E501"]
|
||||
|
||||
# Avoid trying to fix flake8-bugbear (`B`) violations.
|
||||
unfixable = ["B"]
|
||||
|
||||
# Ignore `E402` (import violations) in all `__init__.py` files, and in `path/to/file.py`.
|
||||
[tool.ruff.per-file-ignores]
|
||||
"__init__.py" = ["E402"]
|
||||
"path/to/file.py" = ["E402"]
|
||||
```
|
||||
|
||||
Plugin configurations should be expressed as subsections, e.g.:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
# Add "Q" to the list of enabled codes.
|
||||
select = ["E", "F", "Q"]
|
||||
|
||||
[tool.ruff.flake8-quotes]
|
||||
docstring-quotes = "double"
|
||||
```
|
||||
|
||||
Ruff mirrors Flake8's rule code system, in which each rule code consists of a one-to-three letter
|
||||
prefix, followed by three digits (e.g., `F401`). The prefix indicates that "source" of the rule
|
||||
(e.g., `F` for Pyflakes, `E` for pycodestyle, `ANN` for flake8-annotations). The set of enabled
|
||||
rules is determined by the `select` and `ignore` options, which support both the full code (e.g.,
|
||||
`F401`) and the prefix (e.g., `F`).
|
||||
|
||||
As a special-case, Ruff also supports the `ALL` code, which enables all rules. Note that some of the
|
||||
pydocstyle rules conflict (e.g., `D203` and `D211`) as they represent alternative docstring
|
||||
formats. Enabling `ALL` without further configuration may result in suboptimal behavior, especially
|
||||
for the pydocstyle plugin.
|
||||
|
||||
If you're wondering how to configure Ruff, here are some **recommended guidelines**:
|
||||
|
||||
* Prefer `select` and `ignore` over `extend-select` and `extend-ignore`, to make your rule set
|
||||
explicit.
|
||||
* Use `ALL` with discretion. Enabling `ALL` will implicitly enable new rules whenever you upgrade.
|
||||
* Start with a small set of rules (`select = ["E", "F"]`) and add a category at-a-time. For example,
|
||||
you might consider expanding to `select = ["E", "F", "B"]` to enable the popular flake8-bugbear
|
||||
extension.
|
||||
* By default, Ruff's autofix is aggressive. If you find that it's too aggressive for your liking,
|
||||
consider turning off autofix for specific rules or categories (see: [FAQ](https://beta.ruff.rs/docs/faq/#ruff-tried-to-fix-something-but-it-broke-my-code-what-should-i-do)).
|
||||
|
||||
### Configure via `ruff.toml`
|
||||
|
||||
As an alternative to `pyproject.toml`, Ruff will also respect a `ruff.toml` file, which implements
|
||||
an equivalent schema (though the `[tool.ruff]` hierarchy can be omitted). For example, the
|
||||
`pyproject.toml` described above would be represented via the following `ruff.toml`:
|
||||
|
||||
```toml
|
||||
# Enable flake8-bugbear (`B`) rules.
|
||||
select = ["E", "F", "B"]
|
||||
|
||||
# Never enforce `E501` (line length violations).
|
||||
ignore = ["E501"]
|
||||
|
||||
# Avoid trying to fix flake8-bugbear (`B`) violations.
|
||||
unfixable = ["B"]
|
||||
|
||||
# Ignore `E402` (import violations) in all `__init__.py` files, and in `path/to/file.py`.
|
||||
[per-file-ignores]
|
||||
"__init__.py" = ["E402"]
|
||||
"path/to/file.py" = ["E402"]
|
||||
```
|
||||
|
||||
For a full list of configurable options, see the [list of all options](https://beta.ruff.rs/docs/settings/).
|
||||
|
||||
### Command-line interface
|
||||
|
||||
Some configuration settings can be provided via the command-line, such as those related to
|
||||
Some configuration options can be provided via the command-line, such as those related to
|
||||
rule enablement and disablement, file discovery, logging level, and more:
|
||||
|
||||
```shell
|
||||
ruff check path/to/code/ --select F401 --select F403 --quiet
|
||||
```
|
||||
|
||||
See `ruff help` for more on Ruff's top-level commands:
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` for more on the
|
||||
linting command.
|
||||
|
||||
<!-- Begin auto-generated command help. -->
|
||||
|
||||
```text
|
||||
Ruff: An extremely fast Python linter.
|
||||
|
||||
Usage: ruff [OPTIONS] <COMMAND>
|
||||
|
||||
Commands:
|
||||
check Run Ruff on the given files or directories (default)
|
||||
rule Explain a rule
|
||||
config List or describe the available configuration options
|
||||
linter List all supported upstream linters
|
||||
clean Clear any caches in the current directory and any subdirectories
|
||||
help Print this message or the help of the given subcommand(s)
|
||||
|
||||
Options:
|
||||
-h, --help Print help
|
||||
-V, --version Print version
|
||||
|
||||
Log levels:
|
||||
-v, --verbose Enable verbose logging
|
||||
-q, --quiet Print lint violations, but nothing else
|
||||
-s, --silent Disable all logging (but still exit with status code "1" upon detecting lint violations)
|
||||
|
||||
For help with a specific command, see: `ruff help <command>`.
|
||||
```
|
||||
|
||||
<!-- End auto-generated command help. -->
|
||||
|
||||
Or `ruff help check` for more on the linting command:
|
||||
|
||||
<!-- Begin auto-generated subcommand help. -->
|
||||
|
||||
```text
|
||||
Run Ruff on the given files or directories (default)
|
||||
|
||||
Usage: ruff check [OPTIONS] [FILES]...
|
||||
|
||||
Arguments:
|
||||
[FILES]... List of files or directories to check
|
||||
|
||||
Options:
|
||||
--fix
|
||||
Attempt to automatically fix lint violations
|
||||
--show-source
|
||||
Show violations with source code
|
||||
--show-fixes
|
||||
Show an enumeration of all autofixed lint violations
|
||||
--diff
|
||||
Avoid writing any fixed files back; instead, output a diff for each changed file to stdout
|
||||
-w, --watch
|
||||
Run in watch mode by re-running whenever files change
|
||||
--fix-only
|
||||
Fix any fixable lint violations, but don't report on leftover violations. Implies `--fix`
|
||||
--format <FORMAT>
|
||||
Output serialization format for violations [env: RUFF_FORMAT=] [possible values: text, json, junit, grouped, github, gitlab, pylint]
|
||||
--target-version <TARGET_VERSION>
|
||||
The minimum Python version that should be supported
|
||||
--config <CONFIG>
|
||||
Path to the `pyproject.toml` or `ruff.toml` file to use for configuration
|
||||
--statistics
|
||||
Show counts for every rule with at least one violation
|
||||
--add-noqa
|
||||
Enable automatic additions of `noqa` directives to failing lines
|
||||
--show-files
|
||||
See the files Ruff will be run against with the current settings
|
||||
--show-settings
|
||||
See the settings Ruff will use to lint a given Python file
|
||||
-h, --help
|
||||
Print help
|
||||
|
||||
Rule selection:
|
||||
--select <RULE_CODE>
|
||||
Comma-separated list of rule codes to enable (or ALL, to enable all rules)
|
||||
--ignore <RULE_CODE>
|
||||
Comma-separated list of rule codes to disable
|
||||
--extend-select <RULE_CODE>
|
||||
Like --select, but adds additional rule codes on top of the selected ones
|
||||
--per-file-ignores <PER_FILE_IGNORES>
|
||||
List of mappings from file pattern to code to exclude
|
||||
--fixable <RULE_CODE>
|
||||
List of rule codes to treat as eligible for autofix. Only applicable when autofix itself is enabled (e.g., via `--fix`)
|
||||
--unfixable <RULE_CODE>
|
||||
List of rule codes to treat as ineligible for autofix. Only applicable when autofix itself is enabled (e.g., via `--fix`)
|
||||
|
||||
File selection:
|
||||
--exclude <FILE_PATTERN> List of paths, used to omit files and/or directories from analysis
|
||||
--extend-exclude <FILE_PATTERN> Like --exclude, but adds additional files and directories on top of those already excluded
|
||||
--respect-gitignore Respect file exclusions via `.gitignore` and other standard ignore files
|
||||
--force-exclude Enforce exclusions, even for paths passed to Ruff directly on the command-line
|
||||
|
||||
Miscellaneous:
|
||||
-n, --no-cache
|
||||
Disable cache reads
|
||||
--isolated
|
||||
Ignore all configuration files
|
||||
--cache-dir <CACHE_DIR>
|
||||
Path to the cache directory [env: RUFF_CACHE_DIR=]
|
||||
--stdin-filename <STDIN_FILENAME>
|
||||
The name of the file when passing it through stdin
|
||||
-e, --exit-zero
|
||||
Exit with status code "0", even upon detecting lint violations
|
||||
--exit-non-zero-on-fix
|
||||
Exit with a non-zero status code if any files were modified via autofix, even if no lint violations remain
|
||||
|
||||
Log levels:
|
||||
-v, --verbose Enable verbose logging
|
||||
-q, --quiet Print lint violations, but nothing else
|
||||
-s, --silent Disable all logging (but still exit with status code "1" upon detecting lint violations)
|
||||
```
|
||||
|
||||
<!-- End auto-generated subcommand help. -->
|
||||
|
||||
### `pyproject.toml` discovery
|
||||
|
||||
Similar to [ESLint](https://eslint.org/docs/latest/user-guide/configuring/configuration-files#cascading-and-hierarchy),
|
||||
Ruff supports hierarchical configuration, such that the "closest" `pyproject.toml` file in the
|
||||
directory hierarchy is used for every individual file, with all paths in the `pyproject.toml` file
|
||||
(e.g., `exclude` globs, `src` paths) being resolved relative to the directory containing the
|
||||
`pyproject.toml` file.
|
||||
|
||||
There are a few exceptions to these rules:
|
||||
|
||||
1. In locating the "closest" `pyproject.toml` file for a given path, Ruff ignores any
|
||||
`pyproject.toml` files that lack a `[tool.ruff]` section.
|
||||
2. If a configuration file is passed directly via `--config`, those settings are used for across
|
||||
files. Any relative paths in that configuration file (like `exclude` globs or `src` paths) are
|
||||
resolved relative to the _current working directory_.
|
||||
3. If no `pyproject.toml` file is found in the filesystem hierarchy, Ruff will fall back to using
|
||||
a default configuration. If a user-specific configuration file exists
|
||||
at `${config_dir}/ruff/pyproject.toml`, that file will be used instead of the default
|
||||
configuration, with `${config_dir}` being determined via the [`dirs`](https://docs.rs/dirs/4.0.0/dirs/fn.config_dir.html)
|
||||
crate, and all relative paths being again resolved relative to the _current working directory_.
|
||||
4. Any `pyproject.toml`-supported settings that are provided on the command-line (e.g., via
|
||||
`--select`) will override the settings in _every_ resolved configuration file.
|
||||
|
||||
Unlike [ESLint](https://eslint.org/docs/latest/user-guide/configuring/configuration-files#cascading-and-hierarchy),
|
||||
Ruff does not merge settings across configuration files; instead, the "closest" configuration file
|
||||
is used, and any parent configuration files are ignored. In lieu of this implicit cascade, Ruff
|
||||
supports an [`extend`](https://beta.ruff.rs/docs/settings#extend) field, which allows you to inherit the settings from another
|
||||
`pyproject.toml` file, like so:
|
||||
|
||||
```toml
|
||||
# Extend the `pyproject.toml` file in the parent directory.
|
||||
extend = "../pyproject.toml"
|
||||
# But use a different line length.
|
||||
line-length = 100
|
||||
```
|
||||
|
||||
All of the above rules apply equivalently to `ruff.toml` files. If Ruff detects both a `ruff.toml`
|
||||
and `pyproject.toml` file, it will defer to the `ruff.toml`.
|
||||
|
||||
### Python file discovery
|
||||
|
||||
When passed a path on the command-line, Ruff will automatically discover all Python files in that
|
||||
path, taking into account the [`exclude`](https://beta.ruff.rs/docs/settings#exclude) and
|
||||
[`extend-exclude`](https://beta.ruff.rs/docs/settings#extend-exclude) settings in each directory's
|
||||
`pyproject.toml` file.
|
||||
|
||||
By default, Ruff will also skip any files that are omitted via `.ignore`, `.gitignore`,
|
||||
`.git/info/exclude`, and global `gitignore` files (see: [`respect-gitignore`](https://beta.ruff.rs/docs/settings#respect-gitignore)).
|
||||
|
||||
Files that are passed to `ruff` directly are always linted, regardless of the above criteria.
|
||||
For example, `ruff check /path/to/excluded/file.py` will always lint `file.py`.
|
||||
|
||||
### Rule resolution
|
||||
|
||||
The set of enabled rules is controlled via the [`select`](https://beta.ruff.rs/docs/settings#select)
|
||||
and [`ignore`](https://beta.ruff.rs/docs/settings#ignore) settings, along with the
|
||||
[`extend-select`](https://beta.ruff.rs/docs/settings#extend-select) and
|
||||
[`extend-ignore`](https://beta.ruff.rs/docs/settings#extend-ignore) modifiers.
|
||||
|
||||
To resolve the enabled rule set, Ruff may need to reconcile `select` and `ignore` from a variety
|
||||
of sources, including the current `pyproject.toml`, any inherited `pyproject.toml` files, and the
|
||||
CLI (e.g., `--select`).
|
||||
|
||||
In those scenarios, Ruff uses the "highest-priority" `select` as the basis for the rule set, and
|
||||
then applies any `extend-select`, `ignore`, and `extend-ignore` adjustments. CLI options are given
|
||||
higher priority than `pyproject.toml` options, and the current `pyproject.toml` file is given higher
|
||||
priority than any inherited `pyproject.toml` files.
|
||||
|
||||
For example, given the following `pyproject.toml` file:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
select = ["E", "F"]
|
||||
ignore = ["F401"]
|
||||
```
|
||||
|
||||
Running `ruff check --select F401` would result in Ruff enforcing `F401`, and no other rules.
|
||||
|
||||
Running `ruff check --extend-select B` would result in Ruff enforcing the `E`, `F`, and `B` rules, with
|
||||
the exception of `F401`.
|
||||
|
||||
### Suppressing errors
|
||||
|
||||
To omit a lint rule entirely, add it to the "ignore" list via [`ignore`](https://beta.ruff.rs/docs/settings#ignore)
|
||||
or [`extend-ignore`](https://beta.ruff.rs/docs/settings#extend-ignore), either on the command-line
|
||||
or in your `pyproject.toml` file.
|
||||
|
||||
To ignore a violation inline, Ruff uses a `noqa` system similar to [Flake8](https://flake8.pycqa.org/en/3.1.1/user/ignoring-errors.html).
|
||||
To ignore an individual violation, add `# noqa: {code}` to the end of the line, like so:
|
||||
|
||||
```python
|
||||
# Ignore F841.
|
||||
x = 1 # noqa: F841
|
||||
|
||||
# Ignore E741 and F841.
|
||||
i = 1 # noqa: E741, F841
|
||||
|
||||
# Ignore _all_ violations.
|
||||
x = 1 # noqa
|
||||
```
|
||||
|
||||
Note that, for multi-line strings, the `noqa` directive should come at the end of the string, and
|
||||
will apply to the entire string, like so:
|
||||
|
||||
```python
|
||||
"""Lorem ipsum dolor sit amet.
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor.
|
||||
""" # noqa: E501
|
||||
```
|
||||
|
||||
To ignore all violations across an entire file, add `# ruff: noqa` to any line in the file, like so:
|
||||
|
||||
```python
|
||||
# ruff: noqa
|
||||
```
|
||||
|
||||
To ignore a specific rule across an entire file, add `# ruff: noqa: {code}` to any line in the file,
|
||||
like so:
|
||||
|
||||
```python
|
||||
# ruff: noqa: F841
|
||||
```
|
||||
|
||||
Or see the [`per-file-ignores`](https://beta.ruff.rs/docs/settings#per-file-ignores) configuration
|
||||
setting, which enables the same functionality via a `pyproject.toml` file.
|
||||
|
||||
Note that Ruff will also respect Flake8's `# flake8: noqa` directive, and will treat it as
|
||||
equivalent to `# ruff: noqa`.
|
||||
|
||||
#### Automatic error suppression
|
||||
|
||||
Ruff supports several workflows to aid in `noqa` management.
|
||||
|
||||
First, Ruff provides a special rule code, `RUF100`, to enforce that your `noqa` directives are
|
||||
"valid", in that the violations they _say_ they ignore are actually being triggered on that line (and
|
||||
thus suppressed). You can run `ruff check /path/to/file.py --extend-select RUF100` to flag unused `noqa`
|
||||
directives.
|
||||
|
||||
Second, Ruff can _automatically remove_ unused `noqa` directives via its autofix functionality.
|
||||
You can run `ruff check /path/to/file.py --extend-select RUF100 --fix` to automatically remove unused
|
||||
`noqa` directives.
|
||||
|
||||
Third, Ruff can _automatically add_ `noqa` directives to all failing lines. This is useful when
|
||||
migrating a new codebase to Ruff. You can run `ruff check /path/to/file.py --add-noqa` to automatically
|
||||
add `noqa` directives to all failing lines, with the appropriate rule codes.
|
||||
|
||||
#### Action comments
|
||||
|
||||
Ruff respects `isort`'s [action comments](https://pycqa.github.io/isort/docs/configuration/action_comments.html)
|
||||
(`# isort: skip_file`, `# isort: on`, `# isort: off`, `# isort: skip`, and `# isort: split`), which
|
||||
enable selectively enabling and disabling import sorting for blocks of code and other inline
|
||||
configuration.
|
||||
|
||||
See the [`isort` documentation](https://pycqa.github.io/isort/docs/configuration/action_comments.html)
|
||||
for more.
|
||||
|
||||
### Exit codes
|
||||
|
||||
By default, Ruff exits with the following status codes:
|
||||
|
||||
* `0` if no violations were found, or if all present violations were fixed automatically.
|
||||
* `1` if violations were found.
|
||||
* `2` if Ruff terminates abnormally due to invalid configuration, invalid CLI options, or an internal error.
|
||||
|
||||
This convention mirrors that of tools like ESLint, Prettier, and RuboCop.
|
||||
|
||||
Ruff supports two command-line flags that alter its exit code behavior:
|
||||
|
||||
* `--exit-zero` will cause Ruff to exit with a status code of `0` even if violations were found.
|
||||
Note that Ruff will still exit with a status code of `2` if it terminates abnormally.
|
||||
* `--exit-non-zero-on-fix` will cause Ruff to exit with a status code of `1` if violations were
|
||||
found, _even if_ all such violations were fixed automatically. Note that the use of
|
||||
`--exit-non-zero-on-fix` can result in a non-zero exit code even if no violations remain after
|
||||
autofixing.
|
||||
|
||||
### Autocompletion
|
||||
|
||||
Ruff supports autocompletion for most shells. A shell-specific completion script can be generated
|
||||
by `ruff generate-shell-completion <SHELL>`, where `<SHELL>` is one of `bash`, `elvish`, `fig`, `fish`,
|
||||
`powershell`, or `zsh`.
|
||||
|
||||
The exact steps required to enable autocompletion will vary by shell. For example instructions,
|
||||
see the [Poetry](https://python-poetry.org/docs/#enable-tab-completion-for-bash-fish-or-zsh) or
|
||||
[ripgrep](https://github.com/BurntSushi/ripgrep/blob/master/FAQ.md#complete) documentation.
|
||||
|
||||
As an example: to enable autocompletion for Zsh, run
|
||||
`ruff generate-shell-completion zsh > ~/.zfunc/_ruff`. Then add the following line to your
|
||||
`~/.zshrc` file, if they're not already present:
|
||||
|
||||
```zsh
|
||||
fpath+=~/.zfunc
|
||||
autoload -Uz compinit && compinit
|
||||
```
|
||||
|
||||
<!-- End section: Configuration -->
|
||||
|
||||
## Supported Rules
|
||||
## Rules
|
||||
|
||||
<!-- Begin section: Rules -->
|
||||
|
||||
Ruff supports over 400 lint rules, many of which are inspired by popular tools like Flake8, isort,
|
||||
pyupgrade, and others. Regardless of the rule's origin, Ruff re-implements every rule in
|
||||
**Ruff supports over 500 lint rules**, many of which are inspired by popular tools like Flake8,
|
||||
isort, pyupgrade, and others. Regardless of the rule's origin, Ruff re-implements every rule in
|
||||
Rust as a first-party feature.
|
||||
|
||||
By default, Ruff enables Flake8's `E` and `F` rules. Ruff supports all rules from the `F` category,
|
||||
and a [subset](https://beta.ruff.rs/docs/rules/#error-e) of the `E` category, omitting those
|
||||
stylistic rules made obsolete by the use of an autoformatter, like [Black](https://github.com/psf/black).
|
||||
stylistic rules made obsolete by the use of an autoformatter, like
|
||||
[Black](https://github.com/psf/black).
|
||||
|
||||
If you're just getting started with Ruff, **the default rule set is a great place to start**: it
|
||||
catches a wide variety of common errors (like unused imports) with zero configuration.
|
||||
|
||||
<!-- End section: Rules -->
|
||||
|
||||
For a complete enumeration, see the [list of rules](https://beta.ruff.rs/docs/rules/) in the
|
||||
Ruff documentation.
|
||||
For a complete enumeration of the supported rules, see [_Rules_](https://beta.ruff.rs/docs/rules/).
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome and highly appreciated. To get started, check out the
|
||||
[**contributing guidelines**](https://github.com/charliermarsh/ruff/blob/main/CONTRIBUTING.md). You
|
||||
can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||
[**contributing guidelines**](https://beta.ruff.rs/docs/contributing/).
|
||||
|
||||
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||
|
||||
## Support
|
||||
|
||||
@@ -678,8 +245,6 @@ or feel free to [**open a new one**](https://github.com/charliermarsh/ruff/issue
|
||||
|
||||
You can also ask for help on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||
|
||||
<!-- Begin section: Acknowledgements -->
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
Ruff's linter draws on both the APIs and implementation details of many other
|
||||
@@ -702,46 +267,45 @@ Ruff is the beneficiary of a large number of [contributors](https://github.com/c
|
||||
|
||||
Ruff is released under the MIT license.
|
||||
|
||||
<!-- End section: Acknowledgements -->
|
||||
|
||||
## Who's Using Ruff?
|
||||
|
||||
Ruff is used in a number of major open-source projects, including:
|
||||
|
||||
* [pandas](https://github.com/pandas-dev/pandas)
|
||||
* [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
* [Transformers (Hugging Face)](https://github.com/huggingface/transformers)
|
||||
* [Diffusers (Hugging Face)](https://github.com/huggingface/diffusers)
|
||||
* [Apache Airflow](https://github.com/apache/airflow)
|
||||
* [SciPy](https://github.com/scipy/scipy)
|
||||
* [Zulip](https://github.com/zulip/zulip)
|
||||
* [Bokeh](https://github.com/bokeh/bokeh)
|
||||
* [Pydantic](https://github.com/pydantic/pydantic)
|
||||
* [Dagster](https://github.com/dagster-io/dagster)
|
||||
* [Dagger](https://github.com/dagger/dagger)
|
||||
* [Sphinx](https://github.com/sphinx-doc/sphinx)
|
||||
* [Hatch](https://github.com/pypa/hatch)
|
||||
* [PDM](https://github.com/pdm-project/pdm)
|
||||
* [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
||||
* [Great Expectations](https://github.com/great-expectations/great_expectations)
|
||||
* [ONNX](https://github.com/onnx/onnx)
|
||||
* [Polars](https://github.com/pola-rs/polars)
|
||||
* [Ibis](https://github.com/ibis-project/ibis)
|
||||
* [Synapse (Matrix)](https://github.com/matrix-org/synapse)
|
||||
* [SnowCLI (Snowflake)](https://github.com/Snowflake-Labs/snowcli)
|
||||
* [Dispatch (Netflix)](https://github.com/Netflix/dispatch)
|
||||
* [Saleor](https://github.com/saleor/saleor)
|
||||
* [Pynecone](https://github.com/pynecone-io/pynecone)
|
||||
* [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
|
||||
* [Home Assistant](https://github.com/home-assistant/core)
|
||||
* [Pylint](https://github.com/PyCQA/pylint)
|
||||
* [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
||||
* [cibuildwheel (PyPA)](https://github.com/pypa/cibuildwheel)
|
||||
* [build (PyPA)](https://github.com/pypa/build)
|
||||
* [Babel](https://github.com/python-babel/babel)
|
||||
* [featuretools](https://github.com/alteryx/featuretools)
|
||||
* [meson-python](https://github.com/mesonbuild/meson-python)
|
||||
* [ZenML](https://github.com/zenml-io/zenml)
|
||||
- [pandas](https://github.com/pandas-dev/pandas)
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Transformers (Hugging Face)](https://github.com/huggingface/transformers)
|
||||
- [Diffusers (Hugging Face)](https://github.com/huggingface/diffusers)
|
||||
- [Apache Airflow](https://github.com/apache/airflow)
|
||||
- [SciPy](https://github.com/scipy/scipy)
|
||||
- [Zulip](https://github.com/zulip/zulip)
|
||||
- [Bokeh](https://github.com/bokeh/bokeh)
|
||||
- [Pydantic](https://github.com/pydantic/pydantic)
|
||||
- [Dagster](https://github.com/dagster-io/dagster)
|
||||
- [Dagger](https://github.com/dagger/dagger)
|
||||
- [Sphinx](https://github.com/sphinx-doc/sphinx)
|
||||
- [Hatch](https://github.com/pypa/hatch)
|
||||
- [PDM](https://github.com/pdm-project/pdm)
|
||||
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
||||
- [Great Expectations](https://github.com/great-expectations/great_expectations)
|
||||
- [ONNX](https://github.com/onnx/onnx)
|
||||
- [Polars](https://github.com/pola-rs/polars)
|
||||
- [Ibis](https://github.com/ibis-project/ibis)
|
||||
- [Synapse (Matrix)](https://github.com/matrix-org/synapse)
|
||||
- [SnowCLI (Snowflake)](https://github.com/Snowflake-Labs/snowcli)
|
||||
- [Dispatch (Netflix)](https://github.com/Netflix/dispatch)
|
||||
- [Saleor](https://github.com/saleor/saleor)
|
||||
- [Pynecone](https://github.com/pynecone-io/pynecone)
|
||||
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
|
||||
- [Home Assistant](https://github.com/home-assistant/core)
|
||||
- [Pylint](https://github.com/PyCQA/pylint)
|
||||
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
||||
- [cibuildwheel (PyPA)](https://github.com/pypa/cibuildwheel)
|
||||
- [build (PyPA)](https://github.com/pypa/build)
|
||||
- [Babel](https://github.com/python-babel/babel)
|
||||
- [featuretools](https://github.com/alteryx/featuretools)
|
||||
- [meson-python](https://github.com/mesonbuild/meson-python)
|
||||
- [ZenML](https://github.com/zenml-io/zenml)
|
||||
- [delta-rs](https://github.com/delta-io/delta-rs)
|
||||
|
||||
## License
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.251"
|
||||
version = "0.0.253"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
|
||||
|
||||
@@ -84,7 +84,7 @@ flake8-to-ruff path/to/.flake8 --plugin flake8-builtins --plugin flake8-quotes
|
||||
1. Ruff only supports a subset of the Flake configuration options. `flake8-to-ruff` will warn on and
|
||||
ignore unsupported options in the `.flake8` file (or equivalent). (Similarly, Ruff has a few
|
||||
configuration options that don't exist in Flake8.)
|
||||
2. Ruff will omit any rule codes that are unimplemented or unsupported by Ruff, including rule
|
||||
1. Ruff will omit any rule codes that are unimplemented or unsupported by Ruff, including rule
|
||||
codes from unsupported plugins. (See the [Ruff README](https://github.com/charliermarsh/ruff#user-content-how-does-ruff-compare-to-flake8)
|
||||
for the complete list of supported plugins.)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.0.251"
|
||||
version = "0.0.253"
|
||||
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
@@ -16,6 +16,10 @@ crate-type = ["cdylib", "rlib"]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
ruff_macros = { path = "../ruff_macros" }
|
||||
ruff_python = { path = "../ruff_python" }
|
||||
ruff_rustpython = { path = "../ruff_rustpython" }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bisection = { version = "0.1.0" }
|
||||
bitflags = { version = "1.3.2" }
|
||||
@@ -23,12 +27,14 @@ cfg-if = { version = "1.0.0" }
|
||||
chrono = { version = "0.4.21", default-features = false, features = ["clock"] }
|
||||
clap = { workspace = true, features = ["derive", "env", "string"] }
|
||||
colored = { version = "2.0.0" }
|
||||
derivative = { version = "2.2.0" }
|
||||
dirs = { version = "4.0.0" }
|
||||
fern = { version = "0.6.1" }
|
||||
glob = { version = "0.3.0" }
|
||||
globset = { version = "0.4.9" }
|
||||
ignore = { version = "0.4.18" }
|
||||
imperative = { version = "1.0.3" }
|
||||
is-macro = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
libcst = { workspace = true }
|
||||
log = { version = "0.4.17" }
|
||||
@@ -39,8 +45,7 @@ num-traits = "0.2.15"
|
||||
once_cell = { workspace = true }
|
||||
path-absolutize = { version = "3.0.14", features = ["once_cell_cache", "use_unix_paths_on_wasm"] }
|
||||
regex = { workspace = true }
|
||||
ruff_macros = { path = "../ruff_macros" }
|
||||
ruff_python = { path = "../ruff_python" }
|
||||
result-like = "0.4.6"
|
||||
rustc-hash = { workspace = true }
|
||||
rustpython-common = { workspace = true }
|
||||
rustpython-parser = { workspace = true }
|
||||
@@ -57,7 +62,6 @@ titlecase = { version = "2.2.1" }
|
||||
toml = { workspace = true }
|
||||
|
||||
# https://docs.rs/getrandom/0.2.7/getrandom/#webassembly-support
|
||||
# For (future) wasm-pack support
|
||||
[target.'cfg(all(target_family = "wasm", target_os = "unknown"))'.dependencies]
|
||||
getrandom = { version = "0.2.7", features = ["js"] }
|
||||
console_error_panic_hook = { version = "0.1.7" }
|
||||
@@ -66,9 +70,6 @@ serde-wasm-bindgen = { version = "0.4" }
|
||||
js-sys = { version = "0.3.60" }
|
||||
wasm-bindgen = { version = "0.2.83" }
|
||||
|
||||
[target.'cfg(not(target_family = "wasm"))'.dependencies]
|
||||
is_executable = "1.0.1"
|
||||
|
||||
[dev-dependencies]
|
||||
insta = { version = "1.19.0", features = ["yaml", "redactions"] }
|
||||
test-case = { version = "2.2.2" }
|
||||
|
||||
@@ -107,3 +107,7 @@ class Foo:
|
||||
# ANN101
|
||||
def foo(self, /, a: int, b: int) -> int:
|
||||
pass
|
||||
|
||||
|
||||
# OK
|
||||
def f(*args: *tuple[int]) -> None: ...
|
||||
|
||||
@@ -19,6 +19,8 @@ token = "s3cr3t"
|
||||
secrete = "s3cr3t"
|
||||
safe = password = "s3cr3t"
|
||||
password = safe = "s3cr3t"
|
||||
PASSWORD = "s3cr3t"
|
||||
PassWord = "s3cr3t"
|
||||
|
||||
d["password"] = "s3cr3t"
|
||||
d["pass"] = "s3cr3t"
|
||||
@@ -61,3 +63,15 @@ if token == "3\t4":
|
||||
|
||||
if token == "5\r6":
|
||||
pass
|
||||
|
||||
|
||||
# These should not be flagged
|
||||
passed_msg = "You have passed!"
|
||||
compassion = "Please don't match!"
|
||||
impassable = "You shall not pass!"
|
||||
passwords = ""
|
||||
PASSWORDS = ""
|
||||
passphrases = ""
|
||||
PassPhrases = ""
|
||||
tokens = ""
|
||||
secrets = ""
|
||||
|
||||
@@ -59,6 +59,10 @@ getattr(someobj, attrname, False)
|
||||
mylist.index(True)
|
||||
int(True)
|
||||
str(int(False))
|
||||
cfg.get("hello", True)
|
||||
cfg.getint("hello", True)
|
||||
cfg.getfloat("hello", True)
|
||||
cfg.getboolean("hello", True)
|
||||
|
||||
|
||||
class Registry:
|
||||
|
||||
@@ -626,3 +626,8 @@ result = function(
|
||||
bar,
|
||||
**{'ham': spam}
|
||||
)
|
||||
|
||||
# Make sure the COM812 and UP034 rules don't autofix simultaneously and cause a syntax error.
|
||||
the_first_one = next(
|
||||
(i for i in range(10) if i // 2 == 0) # COM812 fix should include the final bracket
|
||||
)
|
||||
|
||||
21
crates/ruff/resources/test/fixtures/flake8_django/DJ003.py
vendored
Normal file
21
crates/ruff/resources/test/fixtures/flake8_django/DJ003.py
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
from django.shortcuts import render
|
||||
|
||||
|
||||
def test_view1(request):
|
||||
return render(request, "index.html", locals())
|
||||
|
||||
|
||||
def test_view2(request):
|
||||
return render(request, "index.html", context=locals())
|
||||
|
||||
|
||||
def test_view3(request):
|
||||
return render(request, "index.html")
|
||||
|
||||
|
||||
def test_view4(request):
|
||||
return render(request, "index.html", {})
|
||||
|
||||
|
||||
def test_view5(request):
|
||||
return render(request, "index.html", context={})
|
||||
11
crates/ruff/resources/test/fixtures/flake8_django/DJ006.py
vendored
Normal file
11
crates/ruff/resources/test/fixtures/flake8_django/DJ006.py
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
from django import forms
|
||||
|
||||
|
||||
class TestModelForm1(forms.ModelForm):
|
||||
class Meta:
|
||||
exclude = ["bar"]
|
||||
|
||||
|
||||
class TestModelForm2(forms.ModelForm):
|
||||
class Meta:
|
||||
fields = ["foo"]
|
||||
16
crates/ruff/resources/test/fixtures/flake8_django/DJ007.py
vendored
Normal file
16
crates/ruff/resources/test/fixtures/flake8_django/DJ007.py
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
from django import forms
|
||||
|
||||
|
||||
class TestModelForm1(forms.ModelForm):
|
||||
class Meta:
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class TestModelForm2(forms.ModelForm):
|
||||
class Meta:
|
||||
fields = b"__all__"
|
||||
|
||||
|
||||
class TestModelForm3(forms.ModelForm):
|
||||
class Meta:
|
||||
fields = ["foo"]
|
||||
10
crates/ruff/resources/test/fixtures/flake8_pie/PIE802.py
vendored
Normal file
10
crates/ruff/resources/test/fixtures/flake8_pie/PIE802.py
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
# no error
|
||||
all((x.id for x in bar))
|
||||
all(x.id for x in bar)
|
||||
all(x.id for x in bar)
|
||||
any(x.id for x in bar)
|
||||
any({x.id for x in bar})
|
||||
|
||||
# PIE 802
|
||||
any([x.id for x in bar])
|
||||
all([x.id for x in bar])
|
||||
14
crates/ruff/resources/test/fixtures/flake8_pyi/PYI006.py
vendored
Normal file
14
crates/ruff/resources/test/fixtures/flake8_pyi/PYI006.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
import sys
|
||||
from sys import version_info as python_version
|
||||
|
||||
if sys.version_info < (3, 9): ... # OK
|
||||
|
||||
if sys.version_info >= (3, 9): ... # OK
|
||||
|
||||
if sys.version_info == (3, 9): ... # OK
|
||||
|
||||
if sys.version_info <= (3, 10): ... # OK
|
||||
|
||||
if sys.version_info > (3, 10): ... # OK
|
||||
|
||||
if python_version > (3, 10): ... # OK
|
||||
18
crates/ruff/resources/test/fixtures/flake8_pyi/PYI006.pyi
vendored
Normal file
18
crates/ruff/resources/test/fixtures/flake8_pyi/PYI006.pyi
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
import sys
|
||||
from sys import version_info as python_version
|
||||
|
||||
if sys.version_info < (3, 9): ... # OK
|
||||
|
||||
if sys.version_info >= (3, 9): ... # OK
|
||||
|
||||
if sys.version_info == (3, 9): ... # OK
|
||||
|
||||
if sys.version_info == (3, 9): ... # Error: PYI006 Use only `<` and `>=` for version info comparisons
|
||||
|
||||
if sys.version_info <= (3, 10): ... # Error: PYI006 Use only `<` and `>=` for version info comparisons
|
||||
|
||||
if sys.version_info <= (3, 10): ... # Error: PYI006 Use only `<` and `>=` for version info comparisons
|
||||
|
||||
if sys.version_info > (3, 10): ... # Error: PYI006 Use only `<` and `>=` for version info comparisons
|
||||
|
||||
if python_version > (3, 10): ... # Error: PYI006 Use only `<` and `>=` for version info comparisons
|
||||
14
crates/ruff/resources/test/fixtures/flake8_pyi/PYI009.py
vendored
Normal file
14
crates/ruff/resources/test/fixtures/flake8_pyi/PYI009.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
def bar():
|
||||
... # OK
|
||||
|
||||
|
||||
def foo():
|
||||
pass # OK, since we're not in a stub file
|
||||
|
||||
|
||||
class Bar:
|
||||
... # OK
|
||||
|
||||
|
||||
class Foo:
|
||||
pass # OK, since we're not in a stub file
|
||||
8
crates/ruff/resources/test/fixtures/flake8_pyi/PYI009.pyi
vendored
Normal file
8
crates/ruff/resources/test/fixtures/flake8_pyi/PYI009.pyi
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
def bar(): ... # OK
|
||||
def foo():
|
||||
pass # ERROR PYI009, since we're in a stub file
|
||||
|
||||
class Bar: ... # OK
|
||||
|
||||
class Foo:
|
||||
pass # ERROR PYI009, since we're in a stub file
|
||||
18
crates/ruff/resources/test/fixtures/flake8_pyi/PYI010.py
vendored
Normal file
18
crates/ruff/resources/test/fixtures/flake8_pyi/PYI010.py
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
def bar():
|
||||
... # OK
|
||||
|
||||
|
||||
def foo():
|
||||
"""foo""" # OK
|
||||
|
||||
|
||||
def buzz():
|
||||
print("buzz") # OK, not in stub file
|
||||
|
||||
|
||||
def foo2():
|
||||
123 # OK, not in a stub file
|
||||
|
||||
|
||||
def bizz():
|
||||
x = 123 # OK, not in a stub file
|
||||
12
crates/ruff/resources/test/fixtures/flake8_pyi/PYI010.pyi
vendored
Normal file
12
crates/ruff/resources/test/fixtures/flake8_pyi/PYI010.pyi
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
def bar(): ... # OK
|
||||
def foo():
|
||||
"""foo""" # OK, strings are handled by another rule
|
||||
|
||||
def buzz():
|
||||
print("buzz") # ERROR PYI010
|
||||
|
||||
def foo2():
|
||||
123 # ERROR PYI010
|
||||
|
||||
def bizz():
|
||||
x = 123 # ERROR PYI010
|
||||
79
crates/ruff/resources/test/fixtures/flake8_pyi/PYI011.py
vendored
Normal file
79
crates/ruff/resources/test/fixtures/flake8_pyi/PYI011.py
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
def f12(
|
||||
x,
|
||||
y: str = os.pathsep, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f11(*, x: str = "x") -> None: # OK
|
||||
...
|
||||
|
||||
|
||||
def f13(
|
||||
x: list[str] = [
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
] # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f14(
|
||||
x: tuple[str, ...] = (
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
) # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f15(
|
||||
x: set[str] = {
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
} # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f16(x: frozenset[bytes] = frozenset({b"foo", b"bar", b"baz"})) -> None: # OK
|
||||
...
|
||||
|
||||
|
||||
def f17(
|
||||
x: str = "foo" + "bar", # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f18(
|
||||
x: str = b"foo" + b"bar", # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f19(
|
||||
x: object = "foo" + 4, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f20(
|
||||
x: int = 5 + 5, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f21(
|
||||
x: complex = 3j - 3j, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f22(
|
||||
x: complex = -42.5j + 4.3j, # OK
|
||||
) -> None:
|
||||
...
|
||||
63
crates/ruff/resources/test/fixtures/flake8_pyi/PYI011.pyi
vendored
Normal file
63
crates/ruff/resources/test/fixtures/flake8_pyi/PYI011.pyi
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
def f12(
|
||||
x,
|
||||
y: str = os.pathsep, # Error PYI011 Only simple default values allowed for typed arguments
|
||||
) -> None: ...
|
||||
def f11(*, x: str = "x") -> None: ... # OK
|
||||
def f13(
|
||||
x: list[
|
||||
str
|
||||
] = [ # Error PYI011 Only simple default values allowed for typed arguments
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
]
|
||||
) -> None: ...
|
||||
def f14(
|
||||
x: tuple[
|
||||
str, ...
|
||||
] = ( # Error PYI011 Only simple default values allowed for typed arguments
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
)
|
||||
) -> None: ...
|
||||
def f15(
|
||||
x: set[
|
||||
str
|
||||
] = { # Error PYI011 Only simple default values allowed for typed arguments
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
}
|
||||
) -> None: ...
|
||||
def f16(
|
||||
x: frozenset[
|
||||
bytes
|
||||
] = frozenset( # Error PYI011 Only simple default values allowed for typed arguments
|
||||
{b"foo", b"bar", b"baz"}
|
||||
)
|
||||
) -> None: ...
|
||||
def f17(
|
||||
x: str = "foo" # Error PYI011 Only simple default values allowed for typed arguments
|
||||
+ "bar",
|
||||
) -> None: ...
|
||||
def f18(
|
||||
x: str = b"foo" # Error PYI011 Only simple default values allowed for typed arguments
|
||||
+ b"bar",
|
||||
) -> None: ...
|
||||
def f19(
|
||||
x: object = "foo" # Error PYI011 Only simple default values allowed for typed arguments
|
||||
+ 4,
|
||||
) -> None: ...
|
||||
def f20(
|
||||
x: int = 5
|
||||
+ 5, # Error PYI011 Only simple default values allowed for typed arguments
|
||||
) -> None: ...
|
||||
def f21(
|
||||
x: complex = 3j
|
||||
- 3j, # Error PYI011 Only simple default values allowed for typed arguments
|
||||
) -> None: ...
|
||||
def f22(
|
||||
x: complex = -42.5j # Error PYI011 Only simple default values allowed for typed arguments
|
||||
+ 4.3j,
|
||||
) -> None: ...
|
||||
79
crates/ruff/resources/test/fixtures/flake8_pyi/PYI014.py
vendored
Normal file
79
crates/ruff/resources/test/fixtures/flake8_pyi/PYI014.py
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
def f12(
|
||||
x,
|
||||
y=os.pathsep, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f11(*, x="x") -> None:
|
||||
... # OK
|
||||
|
||||
|
||||
def f13(
|
||||
x=[ # OK
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
]
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f14(
|
||||
x=( # OK
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
)
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f15(
|
||||
x={ # OK
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
}
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f16(x=frozenset({b"foo", b"bar", b"baz"})) -> None:
|
||||
... # OK
|
||||
|
||||
|
||||
def f17(
|
||||
x="foo" + "bar", # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f18(
|
||||
x=b"foo" + b"bar", # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f19(
|
||||
x="foo" + 4, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f20(
|
||||
x=5 + 5, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f21(
|
||||
x=3j - 3j, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f22(
|
||||
x=-42.5j + 4.3j, # OK
|
||||
) -> None:
|
||||
...
|
||||
45
crates/ruff/resources/test/fixtures/flake8_pyi/PYI014.pyi
vendored
Normal file
45
crates/ruff/resources/test/fixtures/flake8_pyi/PYI014.pyi
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
def f12(
|
||||
x,
|
||||
y=os.pathsep, # Error PYI014
|
||||
) -> None: ...
|
||||
def f11(*, x="x") -> None: ... # OK
|
||||
def f13(
|
||||
x=[ # Error PYI014
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
]
|
||||
) -> None: ...
|
||||
def f14(
|
||||
x=( # Error PYI014
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
)
|
||||
) -> None: ...
|
||||
def f15(
|
||||
x={ # Error PYI014
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
}
|
||||
) -> None: ...
|
||||
def f16(x=frozenset({b"foo", b"bar", b"baz"})) -> None: ... # Error PYI014
|
||||
def f17(
|
||||
x="foo" + "bar", # Error PYI014
|
||||
) -> None: ...
|
||||
def f18(
|
||||
x=b"foo" + b"bar", # Error PYI014
|
||||
) -> None: ...
|
||||
def f19(
|
||||
x="foo" + 4, # Error PYI014
|
||||
) -> None: ...
|
||||
def f20(
|
||||
x=5 + 5, # Error PYI014
|
||||
) -> None: ...
|
||||
def f21(
|
||||
x=3j - 3j, # Error PYI014
|
||||
) -> None: ...
|
||||
def f22(
|
||||
x=-42.5j + 4.3j, # Error PYI014
|
||||
) -> None: ...
|
||||
14
crates/ruff/resources/test/fixtures/flake8_pyi/PYI021.py
vendored
Normal file
14
crates/ruff/resources/test/fixtures/flake8_pyi/PYI021.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
"""foo""" # OK, not in stub
|
||||
|
||||
|
||||
def foo():
|
||||
"""foo""" # OK, doc strings are allowed in non-stubs
|
||||
|
||||
|
||||
class Bar:
|
||||
"""bar""" # OK, doc strings are allowed in non-stubs
|
||||
|
||||
|
||||
def bar():
|
||||
x = 1
|
||||
"""foo""" # OK, not a doc string
|
||||
11
crates/ruff/resources/test/fixtures/flake8_pyi/PYI021.pyi
vendored
Normal file
11
crates/ruff/resources/test/fixtures/flake8_pyi/PYI021.pyi
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
"""foo""" # ERROR PYI021
|
||||
|
||||
def foo():
|
||||
"""foo""" # ERROR PYI021
|
||||
|
||||
class Bar:
|
||||
"""bar""" # ERROR PYI021
|
||||
|
||||
def bar():
|
||||
x = 1
|
||||
"""foo""" # OK, not a doc string
|
||||
@@ -23,7 +23,7 @@ def test_error():
|
||||
"""
|
||||
|
||||
# recursive case
|
||||
assert not (a or not (b or c)) # note that we only reduce once here
|
||||
assert not (a or not (b or c))
|
||||
assert not (a or not (b and c))
|
||||
|
||||
# detected, but no autofix for messages
|
||||
|
||||
@@ -3,6 +3,8 @@ import os
|
||||
import posix
|
||||
from posix import abort
|
||||
import sys as std_sys
|
||||
import typing
|
||||
import typing_extensions
|
||||
import _thread
|
||||
import _winapi
|
||||
|
||||
@@ -211,6 +213,18 @@ def noreturn_sys_exit():
|
||||
std_sys.exit(0)
|
||||
|
||||
|
||||
def noreturn_typing_assert_never():
|
||||
if x > 0:
|
||||
return 1
|
||||
typing.assert_never(0)
|
||||
|
||||
|
||||
def noreturn_typing_extensions_assert_never():
|
||||
if x > 0:
|
||||
return 1
|
||||
typing_extensions.assert_never(0)
|
||||
|
||||
|
||||
def noreturn__thread_exit():
|
||||
if x > 0:
|
||||
return 1
|
||||
@@ -275,3 +289,7 @@ def x(y):
|
||||
return 1
|
||||
case 1:
|
||||
print() # error
|
||||
|
||||
|
||||
def foo(baz: str) -> str:
|
||||
return baz
|
||||
|
||||
@@ -74,3 +74,13 @@ elif b == b"two":
|
||||
return 2
|
||||
elif a == b"three":
|
||||
return 3
|
||||
|
||||
# SIM116
|
||||
if func_name == "create":
|
||||
return "A"
|
||||
elif func_name == "modify":
|
||||
return "M"
|
||||
elif func_name == "remove":
|
||||
return "D"
|
||||
elif func_name == "move":
|
||||
return "MV"
|
||||
|
||||
@@ -11,6 +11,7 @@ YODA > age # SIM300
|
||||
YODA >= age # SIM300
|
||||
JediOrder.YODA == age # SIM300
|
||||
0 < (number - 100) # SIM300
|
||||
SomeClass().settings.SOME_CONSTANT_VALUE > (60 * 60) # SIM300
|
||||
|
||||
# OK
|
||||
compare == "yoda"
|
||||
|
||||
15
crates/ruff/resources/test/fixtures/isort/as_imports_comments.py
vendored
Normal file
15
crates/ruff/resources/test/fixtures/isort/as_imports_comments.py
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
from foo import ( # Comment on `foo`
|
||||
Member as Alias, # Comment on `Alias`
|
||||
)
|
||||
|
||||
from bar import ( # Comment on `bar`
|
||||
Member, # Comment on `Member`
|
||||
)
|
||||
|
||||
from baz import ( # Comment on `baz`
|
||||
Member as Alias # Comment on `Alias`
|
||||
)
|
||||
|
||||
from bop import ( # Comment on `bop`
|
||||
Member # Comment on `Member`
|
||||
)
|
||||
3
crates/ruff/resources/test/fixtures/isort/no_lines_before_with_empty_sections.py
vendored
Normal file
3
crates/ruff/resources/test/fixtures/isort/no_lines_before_with_empty_sections.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
from __future__ import annotations
|
||||
from typing import Any
|
||||
from . import my_local_folder_object
|
||||
0
crates/ruff/resources/test/fixtures/pep8_naming/N999/module/valid_name/file-with-dashes
vendored
Normal file
0
crates/ruff/resources/test/fixtures/pep8_naming/N999/module/valid_name/file-with-dashes
vendored
Normal file
50
crates/ruff/resources/test/fixtures/pycodestyle/E25.py
vendored
Normal file
50
crates/ruff/resources/test/fixtures/pycodestyle/E25.py
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
#: E251 E251
|
||||
def foo(bar = False):
|
||||
'''Test function with an error in declaration'''
|
||||
pass
|
||||
#: E251
|
||||
foo(bar= True)
|
||||
#: E251
|
||||
foo(bar =True)
|
||||
#: E251 E251
|
||||
foo(bar = True)
|
||||
#: E251
|
||||
y = bar(root= "sdasd")
|
||||
#: E251:2:29
|
||||
parser.add_argument('--long-option',
|
||||
default=
|
||||
"/rather/long/filesystem/path/here/blah/blah/blah")
|
||||
#: E251:1:45
|
||||
parser.add_argument('--long-option', default
|
||||
="/rather/long/filesystem/path/here/blah/blah/blah")
|
||||
#: E251:3:8 E251:3:10
|
||||
foo(True,
|
||||
baz=(1, 2),
|
||||
biz = 'foo'
|
||||
)
|
||||
#: Okay
|
||||
foo(bar=(1 == 1))
|
||||
foo(bar=(1 != 1))
|
||||
foo(bar=(1 >= 1))
|
||||
foo(bar=(1 <= 1))
|
||||
(options, args) = parser.parse_args()
|
||||
d[type(None)] = _deepcopy_atomic
|
||||
|
||||
# Annotated Function Definitions
|
||||
#: Okay
|
||||
def munge(input: AnyStr, sep: AnyStr = None, limit=1000,
|
||||
extra: Union[str, dict] = None) -> AnyStr:
|
||||
pass
|
||||
#: Okay
|
||||
async def add(a: int = 0, b: int = 0) -> int:
|
||||
return a + b
|
||||
# Previously E251 four times
|
||||
#: E271:1:6
|
||||
async def add(a: int = 0, b: int = 0) -> int:
|
||||
return a + b
|
||||
#: E252:1:15 E252:1:16 E252:1:27 E252:1:36
|
||||
def add(a: int=0, b: int =0, c: int= 0) -> int:
|
||||
return a + b + c
|
||||
#: Okay
|
||||
def add(a: int = _default(name='f')):
|
||||
return a
|
||||
@@ -57,3 +57,6 @@ class C: ...; ...
|
||||
#: E701:2:12
|
||||
match *0, 1, *2:
|
||||
case 0,: y = 0
|
||||
#:
|
||||
class Foo:
|
||||
match: Optional[Match] = None
|
||||
|
||||
145
crates/ruff/resources/test/fixtures/pycodestyle/W19.py
vendored
Normal file
145
crates/ruff/resources/test/fixtures/pycodestyle/W19.py
vendored
Normal file
@@ -0,0 +1,145 @@
|
||||
#: W191
|
||||
if False:
|
||||
print # indented with 1 tab
|
||||
#:
|
||||
|
||||
|
||||
#: W191
|
||||
y = x == 2 \
|
||||
or x == 3
|
||||
#: E101 W191 W504
|
||||
if (
|
||||
x == (
|
||||
3
|
||||
) or
|
||||
y == 4):
|
||||
pass
|
||||
#: E101 W191
|
||||
if x == 2 \
|
||||
or y > 1 \
|
||||
or x == 3:
|
||||
pass
|
||||
#: E101 W191
|
||||
if x == 2 \
|
||||
or y > 1 \
|
||||
or x == 3:
|
||||
pass
|
||||
#:
|
||||
|
||||
#: E101 W191 W504
|
||||
if (foo == bar and
|
||||
baz == bop):
|
||||
pass
|
||||
#: E101 W191 W504
|
||||
if (
|
||||
foo == bar and
|
||||
baz == bop
|
||||
):
|
||||
pass
|
||||
#:
|
||||
|
||||
#: E101 E101 W191 W191
|
||||
if start[1] > end_col and not (
|
||||
over_indent == 4 and indent_next):
|
||||
return (0, "E121 continuation line over-"
|
||||
"indented for visual indent")
|
||||
#:
|
||||
|
||||
#: E101 W191
|
||||
|
||||
|
||||
def long_function_name(
|
||||
var_one, var_two, var_three,
|
||||
var_four):
|
||||
print(var_one)
|
||||
#: E101 W191 W504
|
||||
if ((row < 0 or self.moduleCount <= row or
|
||||
col < 0 or self.moduleCount <= col)):
|
||||
raise Exception("%s,%s - %s" % (row, col, self.moduleCount))
|
||||
#: E101 E101 E101 E101 W191 W191 W191 W191 W191 W191
|
||||
if bar:
|
||||
return (
|
||||
start, 'E121 lines starting with a '
|
||||
'closing bracket should be indented '
|
||||
"to match that of the opening "
|
||||
"bracket's line"
|
||||
)
|
||||
#
|
||||
#: E101 W191 W504
|
||||
# you want vertical alignment, so use a parens
|
||||
if ((foo.bar("baz") and
|
||||
foo.bar("bop")
|
||||
)):
|
||||
print "yes"
|
||||
#: E101 W191 W504
|
||||
# also ok, but starting to look like LISP
|
||||
if ((foo.bar("baz") and
|
||||
foo.bar("bop"))):
|
||||
print "yes"
|
||||
#: E101 W191 W504
|
||||
if (a == 2 or
|
||||
b == "abc def ghi"
|
||||
"jkl mno"):
|
||||
return True
|
||||
#: E101 W191 W504
|
||||
if (a == 2 or
|
||||
b == """abc def ghi
|
||||
jkl mno"""):
|
||||
return True
|
||||
#: W191:2:1 W191:3:1 E101:3:2
|
||||
if length > options.max_line_length:
|
||||
return options.max_line_length, \
|
||||
"E501 line too long (%d characters)" % length
|
||||
|
||||
|
||||
#
|
||||
#: E101 W191 W191 W504
|
||||
if os.path.exists(os.path.join(path, PEP8_BIN)):
|
||||
cmd = ([os.path.join(path, PEP8_BIN)] +
|
||||
self._pep8_options(targetfile))
|
||||
#: W191
|
||||
'''
|
||||
multiline string with tab in it'''
|
||||
#: E101 W191
|
||||
'''multiline string
|
||||
with tabs
|
||||
and spaces
|
||||
'''
|
||||
#: Okay
|
||||
'''sometimes, you just need to go nuts in a multiline string
|
||||
and allow all sorts of crap
|
||||
like mixed tabs and spaces
|
||||
|
||||
or trailing whitespace
|
||||
or long long long long long long long long long long long long long long long long long lines
|
||||
''' # nopep8
|
||||
#: Okay
|
||||
'''this one
|
||||
will get no warning
|
||||
even though the noqa comment is not immediately after the string
|
||||
''' + foo # noqa
|
||||
#
|
||||
#: E101 W191
|
||||
if foo is None and bar is "bop" and \
|
||||
blah == 'yeah':
|
||||
blah = 'yeahnah'
|
||||
|
||||
|
||||
#
|
||||
#: W191 W191 W191
|
||||
if True:
|
||||
foo(
|
||||
1,
|
||||
2)
|
||||
#: W191 W191 W191 W191 W191
|
||||
def test_keys(self):
|
||||
"""areas.json - All regions are accounted for."""
|
||||
expected = set([
|
||||
u'Norrbotten',
|
||||
u'V\xe4sterbotten',
|
||||
])
|
||||
#: W191
|
||||
x = [
|
||||
'abc'
|
||||
]
|
||||
#:
|
||||
26
crates/ruff/resources/test/fixtures/pycodestyle/W29.py
vendored
Normal file
26
crates/ruff/resources/test/fixtures/pycodestyle/W29.py
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
#: Okay
|
||||
# 情
|
||||
#: W291:1:6
|
||||
print
|
||||
#: W293:2:1
|
||||
class Foo(object):
|
||||
|
||||
bang = 12
|
||||
#: W291:2:35
|
||||
'''multiline
|
||||
string with trailing whitespace'''
|
||||
#: W291 W292 noeol
|
||||
x = 1
|
||||
#: W191 W292 noeol
|
||||
if False:
|
||||
pass # indented with tabs
|
||||
#: W292:1:36 noeol
|
||||
# This line doesn't have a linefeed
|
||||
#: W292:1:5 E225:1:2 noeol
|
||||
1+ 1
|
||||
#: W292:1:27 E261:1:12 noeol
|
||||
import this # no line feed
|
||||
#: W292:3:22 noeol
|
||||
class Test(object):
|
||||
def __repr__(self):
|
||||
return 'test'
|
||||
@@ -16,21 +16,17 @@ if False == None: # E711, E712 (fix)
|
||||
if None == False: # E711, E712 (fix)
|
||||
pass
|
||||
|
||||
###
|
||||
# Unfixable errors
|
||||
###
|
||||
if "abc" == None: # E711
|
||||
pass
|
||||
if None == "abc": # E711
|
||||
pass
|
||||
if "abc" == False: # E712
|
||||
pass
|
||||
if False == "abc": # E712
|
||||
pass
|
||||
|
||||
###
|
||||
# Non-errors
|
||||
###
|
||||
if "abc" == None:
|
||||
pass
|
||||
if None == "abc":
|
||||
pass
|
||||
if "abc" == False:
|
||||
pass
|
||||
if False == "abc":
|
||||
pass
|
||||
if "def" == "abc":
|
||||
pass
|
||||
if False is None:
|
||||
|
||||
10
crates/ruff/resources/test/fixtures/pyflakes/F401_10.py
vendored
Normal file
10
crates/ruff/resources/test/fixtures/pyflakes/F401_10.py
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
"""Test: imports within `ModuleNotFoundError` handlers."""
|
||||
|
||||
|
||||
def check_orjson():
|
||||
try:
|
||||
import orjson
|
||||
|
||||
return True
|
||||
except ModuleNotFoundError:
|
||||
return False
|
||||
4
crates/ruff/resources/test/fixtures/pyflakes/F821_10.py
vendored
Normal file
4
crates/ruff/resources/test/fixtures/pyflakes/F821_10.py
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
try:
|
||||
pass
|
||||
except ExceptionGroup:
|
||||
pass
|
||||
@@ -20,7 +20,27 @@ def f(provided: int) -> int:
|
||||
match provided:
|
||||
case True:
|
||||
return captured # F821
|
||||
case [captured, *_]:
|
||||
return captured
|
||||
|
||||
|
||||
def f(provided: int) -> int:
|
||||
match provided:
|
||||
case captured:
|
||||
return captured
|
||||
|
||||
|
||||
def f(provided: int) -> int:
|
||||
match provided:
|
||||
case [captured, *_]:
|
||||
return captured
|
||||
|
||||
|
||||
def f(provided: int) -> int:
|
||||
match provided:
|
||||
case [*captured]:
|
||||
return captured
|
||||
|
||||
|
||||
def f(provided: int) -> int:
|
||||
match provided:
|
||||
case {**captured}:
|
||||
return captured
|
||||
|
||||
49
crates/ruff/resources/test/fixtures/pylint/collapsible_else_if.py
vendored
Normal file
49
crates/ruff/resources/test/fixtures/pylint/collapsible_else_if.py
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
"""
|
||||
Test for else-if-used
|
||||
"""
|
||||
|
||||
|
||||
def ok0():
|
||||
"""Should not trigger on elif"""
|
||||
if 1:
|
||||
pass
|
||||
elif 2:
|
||||
pass
|
||||
|
||||
|
||||
def ok1():
|
||||
"""If the orelse has more than 1 item in it, shouldn't trigger"""
|
||||
if 1:
|
||||
pass
|
||||
else:
|
||||
print()
|
||||
if 1:
|
||||
pass
|
||||
|
||||
|
||||
def ok2():
|
||||
"""If the orelse has more than 1 item in it, shouldn't trigger"""
|
||||
if 1:
|
||||
pass
|
||||
else:
|
||||
if 1:
|
||||
pass
|
||||
print()
|
||||
|
||||
|
||||
def not_ok0():
|
||||
if 1:
|
||||
pass
|
||||
else:
|
||||
if 2:
|
||||
pass
|
||||
|
||||
|
||||
def not_ok1():
|
||||
if 1:
|
||||
pass
|
||||
else:
|
||||
if 2:
|
||||
pass
|
||||
else:
|
||||
pass
|
||||
75
crates/ruff/resources/test/fixtures/pylint/global_statement.py
vendored
Normal file
75
crates/ruff/resources/test/fixtures/pylint/global_statement.py
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
# Adapted from:
|
||||
# https://github.com/PyCQA/pylint/blob/b70d2abd7fabe9bfd735a30b593b9cd5eaa36194/tests/functional/g/globals.py
|
||||
|
||||
CONSTANT = 1
|
||||
|
||||
|
||||
def FUNC():
|
||||
pass
|
||||
|
||||
|
||||
class CLASS:
|
||||
pass
|
||||
|
||||
|
||||
def fix_constant(value):
|
||||
"""All this is ok, but try not to use `global` ;)"""
|
||||
global CONSTANT # [global-statement]
|
||||
print(CONSTANT)
|
||||
CONSTANT = value
|
||||
|
||||
|
||||
def global_with_import():
|
||||
"""Should only warn for global-statement when using `Import` node"""
|
||||
global sys # [global-statement]
|
||||
import sys
|
||||
|
||||
|
||||
def global_with_import_from():
|
||||
"""Should only warn for global-statement when using `ImportFrom` node"""
|
||||
global namedtuple # [global-statement]
|
||||
from collections import namedtuple
|
||||
|
||||
|
||||
def global_del():
|
||||
"""Deleting the global name prevents `global-variable-not-assigned`"""
|
||||
global CONSTANT # [global-statement]
|
||||
print(CONSTANT)
|
||||
del CONSTANT
|
||||
|
||||
|
||||
def global_operator_assign():
|
||||
"""Operator assigns should only throw a global statement error"""
|
||||
global CONSTANT # [global-statement]
|
||||
print(CONSTANT)
|
||||
CONSTANT += 1
|
||||
|
||||
|
||||
def global_function_assign():
|
||||
"""Function assigns should only throw a global statement error"""
|
||||
global CONSTANT # [global-statement]
|
||||
|
||||
def CONSTANT():
|
||||
pass
|
||||
|
||||
CONSTANT()
|
||||
|
||||
|
||||
def override_func():
|
||||
"""Overriding a function should only throw a global statement error"""
|
||||
global FUNC # [global-statement]
|
||||
|
||||
def FUNC():
|
||||
pass
|
||||
|
||||
FUNC()
|
||||
|
||||
|
||||
def override_class():
|
||||
"""Overriding a class should only throw a global statement error"""
|
||||
global CLASS # [global-statement]
|
||||
|
||||
class CLASS:
|
||||
pass
|
||||
|
||||
CLASS()
|
||||
24
crates/ruff/resources/test/fixtures/pylint/logging_too_few_args.py
vendored
Normal file
24
crates/ruff/resources/test/fixtures/pylint/logging_too_few_args.py
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
import logging
|
||||
|
||||
logging.warning("Hello %s %s", "World!") # [logging-too-few-args]
|
||||
|
||||
# do not handle calls with kwargs (like pylint)
|
||||
logging.warning("Hello %s", "World!", "again", something="else")
|
||||
|
||||
logging.warning("Hello %s", "World!")
|
||||
|
||||
# do not handle calls without any args
|
||||
logging.info("100% dynamic")
|
||||
|
||||
# do not handle calls with *args
|
||||
logging.error("Example log %s, %s", "foo", "bar", "baz", *args)
|
||||
|
||||
# do not handle calls with **kwargs
|
||||
logging.error("Example log %s, %s", "foo", "bar", "baz", **kwargs)
|
||||
|
||||
# do not handle keyword arguments
|
||||
logging.error("%(objects)d modifications: %(modifications)d errors: %(errors)d")
|
||||
|
||||
import warning
|
||||
|
||||
warning.warning("Hello %s %s", "World!")
|
||||
20
crates/ruff/resources/test/fixtures/pylint/logging_too_many_args.py
vendored
Normal file
20
crates/ruff/resources/test/fixtures/pylint/logging_too_many_args.py
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import logging
|
||||
|
||||
logging.warning("Hello %s", "World!", "again") # [logging-too-many-args]
|
||||
|
||||
logging.warning("Hello %s", "World!", "again", something="else")
|
||||
|
||||
logging.warning("Hello %s", "World!")
|
||||
|
||||
# do not handle calls with *args
|
||||
logging.error("Example log %s, %s", "foo", "bar", "baz", *args)
|
||||
|
||||
# do not handle calls with **kwargs
|
||||
logging.error("Example log %s, %s", "foo", "bar", "baz", **kwargs)
|
||||
|
||||
# do not handle keyword arguments
|
||||
logging.error("%(objects)d modifications: %(modifications)d errors: %(errors)d", {"objects": 1, "modifications": 1, "errors": 1})
|
||||
|
||||
import warning
|
||||
|
||||
warning.warning("Hello %s", "World!", "again")
|
||||
155
crates/ruff/resources/test/fixtures/pylint/redefined_loop_name.py
vendored
Normal file
155
crates/ruff/resources/test/fixtures/pylint/redefined_loop_name.py
vendored
Normal file
@@ -0,0 +1,155 @@
|
||||
# For -> for, variable reused
|
||||
for i in []:
|
||||
for i in []: # error
|
||||
pass
|
||||
|
||||
# With -> for, variable reused
|
||||
with None as i:
|
||||
for i in []: # error
|
||||
pass
|
||||
|
||||
# For -> with, variable reused
|
||||
for i in []:
|
||||
with None as i: # error
|
||||
pass
|
||||
|
||||
# With -> with, variable reused
|
||||
with None as i:
|
||||
with None as i: # error
|
||||
pass
|
||||
|
||||
# For -> for, different variable
|
||||
for i in []:
|
||||
for j in []: # ok
|
||||
pass
|
||||
|
||||
# With -> with, different variable
|
||||
with None as i:
|
||||
with None as j: # ok
|
||||
pass
|
||||
|
||||
# For -> for -> for, doubly nested variable reuse
|
||||
for i in []:
|
||||
for j in []:
|
||||
for i in []: # error
|
||||
pass
|
||||
|
||||
# For -> for -> for -> for, doubly nested variable reuse x2
|
||||
for i in []:
|
||||
for j in []:
|
||||
for i in []: # error
|
||||
for j in []: # error
|
||||
pass
|
||||
|
||||
# For -> assignment
|
||||
for i in []:
|
||||
i = 5 # error
|
||||
|
||||
# For -> augmented assignment
|
||||
for i in []:
|
||||
i += 5 # error
|
||||
|
||||
# For -> annotated assignment
|
||||
for i in []:
|
||||
i: int = 5 # error
|
||||
|
||||
# Async for -> for, variable reused
|
||||
async for i in []:
|
||||
for i in []: # error
|
||||
pass
|
||||
|
||||
# For -> async for, variable reused
|
||||
for i in []:
|
||||
async for i in []: # error
|
||||
pass
|
||||
|
||||
# For -> for, outer loop unpacks tuple
|
||||
for i, j in enumerate([]):
|
||||
for i in []: # error
|
||||
pass
|
||||
|
||||
# For -> for, inner loop unpacks tuple
|
||||
for i in []:
|
||||
for i, j in enumerate([]): # error
|
||||
pass
|
||||
|
||||
# For -> for, both loops unpack tuple
|
||||
for (i, (j, k)) in []:
|
||||
for i, j in enumerate([]): # two errors
|
||||
pass
|
||||
|
||||
# For else -> for, variable reused in else
|
||||
for i in []:
|
||||
pass
|
||||
else:
|
||||
for i in []: # no error
|
||||
pass
|
||||
|
||||
# For -> for, ignore dummy variables
|
||||
for _ in []:
|
||||
for _ in []: # no error
|
||||
pass
|
||||
|
||||
# For -> for, outer loop unpacks with asterisk
|
||||
for i, *j in []:
|
||||
for j in []: # error
|
||||
pass
|
||||
|
||||
# For -> function definition
|
||||
for i in []:
|
||||
def f():
|
||||
i = 2 # no error
|
||||
|
||||
# For -> class definition
|
||||
for i in []:
|
||||
class A:
|
||||
i = 2 # no error
|
||||
|
||||
# For -> function definition -> for -> assignment
|
||||
for i in []:
|
||||
def f():
|
||||
for i in []: # no error
|
||||
i = 2 # error
|
||||
|
||||
# For -> class definition -> for -> for
|
||||
for i in []:
|
||||
class A:
|
||||
for i in []: # no error
|
||||
for i in []: # error
|
||||
pass
|
||||
|
||||
# For -> use in assignment target without actual assignment; subscript
|
||||
for i in []:
|
||||
a[i] = 2 # no error
|
||||
i[a] = 2 # no error
|
||||
|
||||
# For -> use in assignment target without actual assignment; attribute
|
||||
for i in []:
|
||||
a.i = 2 # no error
|
||||
i.a = 2 # no error
|
||||
|
||||
# For target with subscript -> assignment
|
||||
for a[0] in []:
|
||||
a[0] = 2 # error
|
||||
a[1] = 2 # no error
|
||||
|
||||
# For target with subscript -> assignment
|
||||
for a['i'] in []:
|
||||
a['i'] = 2 # error
|
||||
a['j'] = 2 # no error
|
||||
|
||||
# For target with attribute -> assignment
|
||||
for a.i in []:
|
||||
a.i = 2 # error
|
||||
a.j = 2 # no error
|
||||
|
||||
# For target with double nested attribute -> assignment
|
||||
for a.i.j in []:
|
||||
a.i.j = 2 # error
|
||||
a.j.i = 2 # no error
|
||||
|
||||
# For target with attribute -> assignment with different spacing
|
||||
for a.i in []:
|
||||
a. i = 2 # error
|
||||
for a. i in []:
|
||||
a.i = 2 # error
|
||||
@@ -124,3 +124,14 @@ def test_break_in_with():
|
||||
else:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def test_break_in_match():
|
||||
"""no false positive for break in match"""
|
||||
for name in ["demo"]:
|
||||
match name:
|
||||
case "demo":
|
||||
break
|
||||
else:
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -28,3 +28,10 @@ def f(x: IList[str]) -> None:
|
||||
|
||||
def f(x: "List[str]") -> None:
|
||||
...
|
||||
|
||||
|
||||
list = "abc"
|
||||
|
||||
|
||||
def f(x: List[str]) -> None:
|
||||
...
|
||||
|
||||
@@ -41,3 +41,8 @@ def f(x: Union["str", int]) -> None:
|
||||
|
||||
def f(x: Union[("str", "int"), float]) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f() -> None:
|
||||
x: Optional[str]
|
||||
x = Optional[str]
|
||||
|
||||
@@ -2,36 +2,42 @@ from typing import TypedDict, NotRequired, Literal
|
||||
import typing
|
||||
|
||||
# dict literal
|
||||
MyType1 = TypedDict("MyType1", {"a": int, "b": str})
|
||||
MyType = TypedDict("MyType", {"a": int, "b": str})
|
||||
|
||||
# dict call
|
||||
MyType2 = TypedDict("MyType2", dict(a=int, b=str))
|
||||
MyType = TypedDict("MyType", dict(a=int, b=str))
|
||||
|
||||
# kwargs
|
||||
MyType3 = TypedDict("MyType3", a=int, b=str)
|
||||
MyType = TypedDict("MyType", a=int, b=str)
|
||||
|
||||
# Empty TypedDict
|
||||
MyType4 = TypedDict("MyType4")
|
||||
MyType = TypedDict("MyType")
|
||||
|
||||
# Literal values
|
||||
MyType5 = TypedDict("MyType5", {"a": "hello"})
|
||||
MyType6 = TypedDict("MyType6", a="hello")
|
||||
MyType = TypedDict("MyType", {"a": "hello"})
|
||||
MyType = TypedDict("MyType", a="hello")
|
||||
|
||||
# NotRequired
|
||||
MyType7 = TypedDict("MyType7", {"a": NotRequired[dict]})
|
||||
MyType = TypedDict("MyType", {"a": NotRequired[dict]})
|
||||
|
||||
# total
|
||||
MyType8 = TypedDict("MyType8", {"x": int, "y": int}, total=False)
|
||||
|
||||
# invalid identifiers
|
||||
MyType9 = TypedDict("MyType9", {"in": int, "x-y": int})
|
||||
MyType = TypedDict("MyType", {"x": int, "y": int}, total=False)
|
||||
|
||||
# using Literal type
|
||||
MyType10 = TypedDict("MyType10", {"key": Literal["value"]})
|
||||
MyType = TypedDict("MyType", {"key": Literal["value"]})
|
||||
|
||||
# using namespace TypedDict
|
||||
MyType11 = typing.TypedDict("MyType11", {"key": int})
|
||||
MyType = typing.TypedDict("MyType", {"key": int})
|
||||
|
||||
# unpacking
|
||||
# invalid identifiers (OK)
|
||||
MyType = TypedDict("MyType", {"in": int, "x-y": int})
|
||||
|
||||
# unpacking (OK)
|
||||
c = {"c": float}
|
||||
MyType12 = TypedDict("MyType1", {"a": int, "b": str, **c})
|
||||
MyType = TypedDict("MyType", {"a": int, "b": str, **c})
|
||||
|
||||
# Empty dict literal
|
||||
MyType = TypedDict("MyType", {})
|
||||
|
||||
# Empty dict call
|
||||
MyType = TypedDict("MyType", dict())
|
||||
|
||||
@@ -2,21 +2,30 @@ from typing import NamedTuple
|
||||
import typing
|
||||
|
||||
# with complex annotations
|
||||
NT1 = NamedTuple("NT1", [("a", int), ("b", tuple[str, ...])])
|
||||
MyType = NamedTuple("MyType", [("a", int), ("b", tuple[str, ...])])
|
||||
|
||||
# with default values as list
|
||||
NT2 = NamedTuple(
|
||||
"NT2",
|
||||
MyType = NamedTuple(
|
||||
"MyType",
|
||||
[("a", int), ("b", str), ("c", list[bool])],
|
||||
defaults=["foo", [True]],
|
||||
)
|
||||
|
||||
# with namespace
|
||||
NT3 = typing.NamedTuple("NT3", [("a", int), ("b", str)])
|
||||
MyType = typing.NamedTuple("MyType", [("a", int), ("b", str)])
|
||||
|
||||
# with too many default values
|
||||
NT4 = NamedTuple(
|
||||
"NT4",
|
||||
# too many default values (OK)
|
||||
MyType = NamedTuple(
|
||||
"MyType",
|
||||
[("a", int), ("b", str)],
|
||||
defaults=[1, "bar", "baz"],
|
||||
)
|
||||
|
||||
# invalid identifiers (OK)
|
||||
MyType = NamedTuple("MyType", [("x-y", int), ("b", tuple[str, ...])])
|
||||
|
||||
# no fields
|
||||
MyType = typing.NamedTuple("MyType")
|
||||
|
||||
# empty fields
|
||||
MyType = typing.NamedTuple("MyType", [])
|
||||
|
||||
7
crates/ruff/resources/test/fixtures/pyupgrade/UP038.py
vendored
Normal file
7
crates/ruff/resources/test/fixtures/pyupgrade/UP038.py
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
isinstance(1, (int, float)) # UP038
|
||||
issubclass("yes", (int, float, str)) # UP038
|
||||
|
||||
isinstance(1, int) # OK
|
||||
issubclass("yes", int) # OK
|
||||
isinstance(1, int | float) # OK
|
||||
issubclass("yes", int | str) # OK
|
||||
@@ -1,15 +0,0 @@
|
||||
def f(*args, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
a = (1, 2)
|
||||
b = (3, 4)
|
||||
c = (5, 6)
|
||||
d = (7, 8)
|
||||
|
||||
f(a, b)
|
||||
f(a, kw=b)
|
||||
f(*a, kw=b)
|
||||
f(kw=a, *b)
|
||||
f(kw=a, *b, *c)
|
||||
f(*a, kw=b, *c, kw1=d)
|
||||
@@ -30,3 +30,11 @@ def good():
|
||||
raise e # This is verbose violation, shouldn't trigger no cause
|
||||
except Exception:
|
||||
raise # Just re-raising don't need 'from'
|
||||
|
||||
|
||||
def good():
|
||||
try:
|
||||
from mod import f
|
||||
except ImportError:
|
||||
def f():
|
||||
raise MyException() # Raising within a new scope is fine
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use std::path::Path;
|
||||
|
||||
use bitflags::bitflags;
|
||||
use itertools::Itertools;
|
||||
use log::error;
|
||||
use once_cell::sync::Lazy;
|
||||
@@ -9,9 +10,7 @@ use rustpython_parser::ast::{
|
||||
Arguments, Constant, Excepthandler, ExcepthandlerKind, Expr, ExprKind, Keyword, KeywordData,
|
||||
Located, Location, MatchCase, Pattern, PatternKind, Stmt, StmtKind,
|
||||
};
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::Tok;
|
||||
use rustpython_parser::token::StringKind;
|
||||
use rustpython_parser::{lexer, Mode, StringKind, Tok};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
||||
use crate::ast::types::{Binding, BindingKind, CallPath, Range};
|
||||
@@ -577,33 +576,32 @@ pub fn has_non_none_keyword(keywords: &[Keyword], keyword: &str) -> bool {
|
||||
})
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
pub struct Exceptions: u32 {
|
||||
const NAME_ERROR = 0b0000_0001;
|
||||
const MODULE_NOT_FOUND_ERROR = 0b0000_0010;
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract the names of all handled exceptions.
|
||||
pub fn extract_handler_names(handlers: &[Excepthandler]) -> Vec<CallPath> {
|
||||
// TODO(charlie): Use `resolve_call_path` to avoid false positives for
|
||||
// overridden builtins.
|
||||
let mut handler_names = vec![];
|
||||
pub fn extract_handled_exceptions(handlers: &[Excepthandler]) -> Vec<&Expr> {
|
||||
let mut handled_exceptions = Vec::new();
|
||||
for handler in handlers {
|
||||
match &handler.node {
|
||||
ExcepthandlerKind::ExceptHandler { type_, .. } => {
|
||||
if let Some(type_) = type_ {
|
||||
if let ExprKind::Tuple { elts, .. } = &type_.node {
|
||||
for type_ in elts {
|
||||
let call_path = collect_call_path(type_);
|
||||
if !call_path.is_empty() {
|
||||
handler_names.push(call_path);
|
||||
}
|
||||
handled_exceptions.push(type_);
|
||||
}
|
||||
} else {
|
||||
let call_path = collect_call_path(type_);
|
||||
if !call_path.is_empty() {
|
||||
handler_names.push(call_path);
|
||||
}
|
||||
handled_exceptions.push(type_);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
handler_names
|
||||
handled_exceptions
|
||||
}
|
||||
|
||||
/// Return the set of all bound argument names.
|
||||
@@ -655,7 +653,7 @@ pub fn has_comments<T>(located: &Located<T>, locator: &Locator) -> bool {
|
||||
|
||||
/// Returns `true` if a [`Range`] includes at least one comment.
|
||||
pub fn has_comments_in(range: Range, locator: &Locator) -> bool {
|
||||
for tok in lexer::make_tokenizer(locator.slice(&range)) {
|
||||
for tok in lexer::lex_located(locator.slice(&range), Mode::Module, range.location) {
|
||||
match tok {
|
||||
Ok((_, tok, _)) => {
|
||||
if matches!(tok, Tok::Comment(..)) {
|
||||
@@ -767,7 +765,7 @@ pub fn from_relative_import<'a>(module: &'a [String], name: &'a str) -> CallPath
|
||||
call_path
|
||||
}
|
||||
|
||||
/// A [`Visitor`] that collects all return statements in a function or method.
|
||||
/// A [`Visitor`] that collects all `return` statements in a function or method.
|
||||
#[derive(Default)]
|
||||
pub struct ReturnStatementVisitor<'a> {
|
||||
pub returns: Vec<Option<&'a Expr>>,
|
||||
@@ -788,6 +786,48 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
/// A [`Visitor`] that collects all `raise` statements in a function or method.
|
||||
#[derive(Default)]
|
||||
pub struct RaiseStatementVisitor<'a> {
|
||||
pub raises: Vec<(Range, Option<&'a Expr>, Option<&'a Expr>)>,
|
||||
}
|
||||
|
||||
impl<'a, 'b> Visitor<'b> for RaiseStatementVisitor<'b>
|
||||
where
|
||||
'b: 'a,
|
||||
{
|
||||
fn visit_stmt(&mut self, stmt: &'b Stmt) {
|
||||
match &stmt.node {
|
||||
StmtKind::Raise { exc, cause } => {
|
||||
self.raises
|
||||
.push((Range::from_located(stmt), exc.as_deref(), cause.as_deref()));
|
||||
}
|
||||
StmtKind::ClassDef { .. }
|
||||
| StmtKind::FunctionDef { .. }
|
||||
| StmtKind::AsyncFunctionDef { .. }
|
||||
| StmtKind::Try { .. }
|
||||
| StmtKind::TryStar { .. } => {}
|
||||
StmtKind::If { body, orelse, .. } => {
|
||||
visitor::walk_body(self, body);
|
||||
visitor::walk_body(self, orelse);
|
||||
}
|
||||
StmtKind::While { body, .. }
|
||||
| StmtKind::With { body, .. }
|
||||
| StmtKind::AsyncWith { body, .. }
|
||||
| StmtKind::For { body, .. }
|
||||
| StmtKind::AsyncFor { body, .. } => {
|
||||
visitor::walk_body(self, body);
|
||||
}
|
||||
StmtKind::Match { cases, .. } => {
|
||||
for case in cases {
|
||||
visitor::walk_body(self, &case.body);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a location within a file (relative to `base`) to an absolute
|
||||
/// position.
|
||||
pub fn to_absolute(relative: Location, base: Location) -> Location {
|
||||
@@ -870,7 +910,7 @@ pub fn match_parens(start: Location, locator: &Locator) -> Option<Range> {
|
||||
let mut fix_start = None;
|
||||
let mut fix_end = None;
|
||||
let mut count: usize = 0;
|
||||
for (start, tok, end) in lexer::make_tokenizer_located(contents, start).flatten() {
|
||||
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, start).flatten() {
|
||||
if matches!(tok, Tok::Lpar) {
|
||||
if count == 0 {
|
||||
fix_start = Some(start);
|
||||
@@ -902,7 +942,8 @@ pub fn identifier_range(stmt: &Stmt, locator: &Locator) -> Range {
|
||||
| StmtKind::AsyncFunctionDef { .. }
|
||||
) {
|
||||
let contents = locator.slice(&Range::from_located(stmt));
|
||||
for (start, tok, end) in lexer::make_tokenizer_located(contents, stmt.location).flatten() {
|
||||
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, stmt.location).flatten()
|
||||
{
|
||||
if matches!(tok, Tok::Name { .. }) {
|
||||
return Range::new(start, end);
|
||||
}
|
||||
@@ -928,16 +969,18 @@ pub fn binding_range(binding: &Binding, locator: &Locator) -> Range {
|
||||
}
|
||||
|
||||
// Return the ranges of `Name` tokens within a specified node.
|
||||
pub fn find_names<T>(located: &Located<T>, locator: &Locator) -> Vec<Range> {
|
||||
pub fn find_names<'a, T, U>(
|
||||
located: &'a Located<T, U>,
|
||||
locator: &'a Locator,
|
||||
) -> impl Iterator<Item = Range> + 'a {
|
||||
let contents = locator.slice(&Range::from_located(located));
|
||||
lexer::make_tokenizer_located(contents, located.location)
|
||||
lexer::lex_located(contents, Mode::Module, located.location)
|
||||
.flatten()
|
||||
.filter(|(_, tok, _)| matches!(tok, Tok::Name { .. }))
|
||||
.map(|(start, _, end)| Range {
|
||||
location: start,
|
||||
end_location: end,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Return the `Range` of `name` in `Excepthandler`.
|
||||
@@ -949,7 +992,7 @@ pub fn excepthandler_name_range(handler: &Excepthandler, locator: &Locator) -> O
|
||||
(Some(_), Some(type_)) => {
|
||||
let type_end_location = type_.end_location.unwrap();
|
||||
let contents = locator.slice(&Range::new(type_end_location, body[0].location));
|
||||
let range = lexer::make_tokenizer_located(contents, type_end_location)
|
||||
let range = lexer::lex_located(contents, Mode::Module, type_end_location)
|
||||
.flatten()
|
||||
.tuple_windows()
|
||||
.find(|(tok, next_tok)| {
|
||||
@@ -976,7 +1019,7 @@ pub fn except_range(handler: &Excepthandler, locator: &Locator) -> Range {
|
||||
location: handler.location,
|
||||
end_location: end,
|
||||
});
|
||||
let range = lexer::make_tokenizer_located(contents, handler.location)
|
||||
let range = lexer::lex_located(contents, Mode::Module, handler.location)
|
||||
.flatten()
|
||||
.find(|(_, kind, _)| matches!(kind, Tok::Except { .. }))
|
||||
.map(|(location, _, end_location)| Range {
|
||||
@@ -990,7 +1033,7 @@ pub fn except_range(handler: &Excepthandler, locator: &Locator) -> Range {
|
||||
/// Find f-strings that don't contain any formatted values in a `JoinedStr`.
|
||||
pub fn find_useless_f_strings(expr: &Expr, locator: &Locator) -> Vec<(Range, Range)> {
|
||||
let contents = locator.slice(&Range::from_located(expr));
|
||||
lexer::make_tokenizer_located(contents, expr.location)
|
||||
lexer::lex_located(contents, Mode::Module, expr.location)
|
||||
.flatten()
|
||||
.filter_map(|(location, tok, end_location)| match tok {
|
||||
Tok::String {
|
||||
@@ -1044,7 +1087,7 @@ pub fn else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
|
||||
.expect("Expected orelse to be non-empty")
|
||||
.location,
|
||||
});
|
||||
let range = lexer::make_tokenizer_located(contents, body_end)
|
||||
let range = lexer::lex_located(contents, Mode::Module, body_end)
|
||||
.flatten()
|
||||
.find(|(_, kind, _)| matches!(kind, Tok::Else))
|
||||
.map(|(location, _, end_location)| Range {
|
||||
@@ -1060,7 +1103,7 @@ pub fn else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
|
||||
/// Return the `Range` of the first `Tok::Colon` token in a `Range`.
|
||||
pub fn first_colon_range(range: Range, locator: &Locator) -> Option<Range> {
|
||||
let contents = locator.slice(&range);
|
||||
let range = lexer::make_tokenizer_located(contents, range.location)
|
||||
let range = lexer::lex_located(contents, Mode::Module, range.location)
|
||||
.flatten()
|
||||
.find(|(_, kind, _)| matches!(kind, Tok::Colon))
|
||||
.map(|(location, _, end_location)| Range {
|
||||
@@ -1090,7 +1133,7 @@ pub fn elif_else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
|
||||
_ => return None,
|
||||
};
|
||||
let contents = locator.slice(&Range::new(start, end));
|
||||
let range = lexer::make_tokenizer_located(contents, start)
|
||||
let range = lexer::lex_located(contents, Mode::Module, start)
|
||||
.flatten()
|
||||
.find(|(_, kind, _)| matches!(kind, Tok::Elif | Tok::Else))
|
||||
.map(|(location, _, end_location)| Range {
|
||||
@@ -1206,8 +1249,8 @@ pub fn is_logger_candidate(func: &Expr) -> bool {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
use rustpython_parser as parser;
|
||||
use rustpython_parser::ast::Location;
|
||||
use rustpython_parser::parser;
|
||||
|
||||
use crate::ast::helpers::{
|
||||
elif_else_range, else_range, first_colon_range, identifier_range, match_trailing_content,
|
||||
|
||||
24
crates/ruff/src/ast/logging.rs
Normal file
24
crates/ruff/src/ast/logging.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
pub enum LoggingLevel {
|
||||
Debug,
|
||||
Critical,
|
||||
Error,
|
||||
Exception,
|
||||
Info,
|
||||
Warn,
|
||||
Warning,
|
||||
}
|
||||
|
||||
impl LoggingLevel {
|
||||
pub fn from_str(level: &str) -> Option<Self> {
|
||||
match level {
|
||||
"debug" => Some(LoggingLevel::Debug),
|
||||
"critical" => Some(LoggingLevel::Critical),
|
||||
"error" => Some(LoggingLevel::Error),
|
||||
"exception" => Some(LoggingLevel::Exception),
|
||||
"info" => Some(LoggingLevel::Info),
|
||||
"warn" => Some(LoggingLevel::Warn),
|
||||
"warning" => Some(LoggingLevel::Warning),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@ pub mod comparable;
|
||||
pub mod function_type;
|
||||
pub mod hashable;
|
||||
pub mod helpers;
|
||||
pub mod logging;
|
||||
pub mod operations;
|
||||
pub mod relocate;
|
||||
pub mod types;
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use bitflags::bitflags;
|
||||
use rustc_hash::FxHashMap;
|
||||
use rustpython_parser::ast::{Cmpop, Constant, Expr, ExprKind, Located, Stmt, StmtKind};
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::Tok;
|
||||
use rustpython_parser::{lexer, Mode, Tok};
|
||||
|
||||
use crate::ast::helpers::any_over_expr;
|
||||
use crate::ast::types::{BindingKind, Scope};
|
||||
@@ -205,6 +204,7 @@ pub fn in_nested_block<'a>(mut parents: impl Iterator<Item = &'a Stmt>) -> bool
|
||||
| StmtKind::TryStar { .. }
|
||||
| StmtKind::If { .. }
|
||||
| StmtKind::With { .. }
|
||||
| StmtKind::Match { .. }
|
||||
)
|
||||
})
|
||||
}
|
||||
@@ -283,7 +283,7 @@ pub type LocatedCmpop<U = ()> = Located<Cmpop, U>;
|
||||
/// `CPython` doesn't either. This method iterates over the token stream and
|
||||
/// re-identifies [`Cmpop`] nodes, annotating them with valid ranges.
|
||||
pub fn locate_cmpops(contents: &str) -> Vec<LocatedCmpop> {
|
||||
let mut tok_iter = lexer::make_tokenizer(contents).flatten().peekable();
|
||||
let mut tok_iter = lexer::lex(contents, Mode::Module).flatten().peekable();
|
||||
let mut ops: Vec<LocatedCmpop> = vec![];
|
||||
let mut count: usize = 0;
|
||||
loop {
|
||||
|
||||
@@ -29,7 +29,7 @@ impl Range {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_located<T>(located: &Located<T>) -> Self {
|
||||
pub fn from_located<T, U>(located: &Located<T, U>) -> Self {
|
||||
Range::new(located.location, located.end_location.unwrap())
|
||||
}
|
||||
|
||||
@@ -124,7 +124,7 @@ impl<'a> Scope<'a> {
|
||||
// StarImportation
|
||||
// FutureImportation
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Debug, is_macro::Is)]
|
||||
pub enum BindingKind<'a> {
|
||||
Annotation,
|
||||
Argument,
|
||||
|
||||
@@ -4,8 +4,7 @@ use libcst_native::{
|
||||
Codegen, CodegenState, ImportNames, ParenthesizableWhitespace, SmallStatement, Statement,
|
||||
};
|
||||
use rustpython_parser::ast::{ExcepthandlerKind, Expr, Keyword, Location, Stmt, StmtKind};
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::Tok;
|
||||
use rustpython_parser::{lexer, Mode, Tok};
|
||||
|
||||
use crate::ast::helpers;
|
||||
use crate::ast::helpers::to_absolute;
|
||||
@@ -371,7 +370,7 @@ pub fn remove_argument(
|
||||
if n_arguments == 1 {
|
||||
// Case 1: there is only one argument.
|
||||
let mut count: usize = 0;
|
||||
for (start, tok, end) in lexer::make_tokenizer_located(contents, stmt_at).flatten() {
|
||||
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, stmt_at).flatten() {
|
||||
if matches!(tok, Tok::Lpar) {
|
||||
if count == 0 {
|
||||
fix_start = Some(if remove_parentheses {
|
||||
@@ -403,7 +402,7 @@ pub fn remove_argument(
|
||||
{
|
||||
// Case 2: argument or keyword is _not_ the last node.
|
||||
let mut seen_comma = false;
|
||||
for (start, tok, end) in lexer::make_tokenizer_located(contents, stmt_at).flatten() {
|
||||
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, stmt_at).flatten() {
|
||||
if seen_comma {
|
||||
if matches!(tok, Tok::NonLogicalNewline) {
|
||||
// Also delete any non-logical newlines after the comma.
|
||||
@@ -426,7 +425,7 @@ pub fn remove_argument(
|
||||
} else {
|
||||
// Case 3: argument or keyword is the last node, so we have to find the last
|
||||
// comma in the stmt.
|
||||
for (start, tok, _) in lexer::make_tokenizer_located(contents, stmt_at).flatten() {
|
||||
for (start, tok, _) in lexer::lex_located(contents, Mode::Module, stmt_at).flatten() {
|
||||
if start == expr_at {
|
||||
fix_end = Some(expr_end);
|
||||
break;
|
||||
@@ -448,8 +447,8 @@ pub fn remove_argument(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
use rustpython_parser as parser;
|
||||
use rustpython_parser::ast::Location;
|
||||
use rustpython_parser::parser;
|
||||
|
||||
use crate::autofix::helpers::{next_stmt_break, trailing_semicolon};
|
||||
use crate::source_code::Locator;
|
||||
|
||||
@@ -6,21 +6,21 @@ use std::path::Path;
|
||||
use itertools::Itertools;
|
||||
use log::error;
|
||||
use nohash_hasher::IntMap;
|
||||
use ruff_python::builtins::{BUILTINS, MAGIC_GLOBALS};
|
||||
use ruff_python::typing::TYPING_EXTENSIONS;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use rustpython_common::cformat::{CFormatError, CFormatErrorType};
|
||||
use rustpython_parser as parser;
|
||||
use rustpython_parser::ast::{
|
||||
Arg, Arguments, Comprehension, Constant, Excepthandler, ExcepthandlerKind, Expr, ExprContext,
|
||||
ExprKind, KeywordData, Located, Location, Operator, Pattern, PatternKind, Stmt, StmtKind,
|
||||
Suite,
|
||||
};
|
||||
use rustpython_parser::parser;
|
||||
use smallvec::smallvec;
|
||||
|
||||
use ruff_python::builtins::{BUILTINS, MAGIC_GLOBALS};
|
||||
use ruff_python::typing::TYPING_EXTENSIONS;
|
||||
|
||||
use crate::ast::helpers::{
|
||||
binding_range, collect_call_path, extract_handler_names, from_relative_import, to_module_path,
|
||||
binding_range, collect_call_path, extract_handled_exceptions, from_relative_import,
|
||||
to_module_path, Exceptions,
|
||||
};
|
||||
use crate::ast::operations::{extract_all_names, AllNamesFlags};
|
||||
use crate::ast::relocate::relocate_expr;
|
||||
@@ -110,7 +110,7 @@ pub struct Checker<'a> {
|
||||
pub(crate) seen_import_boundary: bool,
|
||||
futures_allowed: bool,
|
||||
annotations_future_enabled: bool,
|
||||
except_handlers: Vec<Vec<Vec<&'a str>>>,
|
||||
handled_exceptions: Vec<Exceptions>,
|
||||
// Check-specific state.
|
||||
pub(crate) flake8_bugbear_seen: Vec<&'a Expr>,
|
||||
}
|
||||
@@ -179,7 +179,7 @@ impl<'a> Checker<'a> {
|
||||
seen_import_boundary: false,
|
||||
futures_allowed: true,
|
||||
annotations_future_enabled: is_interface_definition,
|
||||
except_handlers: vec![],
|
||||
handled_exceptions: vec![],
|
||||
// Check-specific state.
|
||||
flake8_bugbear_seen: vec![],
|
||||
}
|
||||
@@ -188,7 +188,7 @@ impl<'a> Checker<'a> {
|
||||
/// Return `true` if a patch should be generated under the given autofix
|
||||
/// `Mode`.
|
||||
pub fn patch(&self, code: &Rule) -> bool {
|
||||
matches!(self.autofix, flags::Autofix::Enabled) && self.settings.rules.should_fix(code)
|
||||
self.autofix.into() && self.settings.rules.should_fix(code)
|
||||
}
|
||||
|
||||
/// Return `true` if the `Expr` is a reference to `typing.${target}`.
|
||||
@@ -230,11 +230,18 @@ impl<'a> Checker<'a> {
|
||||
|
||||
/// Return `true` if `member` is bound as a builtin.
|
||||
pub fn is_builtin(&self, member: &str) -> bool {
|
||||
self.find_binding(member).map_or(false, |binding| {
|
||||
matches!(binding.kind, BindingKind::Builtin)
|
||||
})
|
||||
self.find_binding(member)
|
||||
.map_or(false, |binding| binding.kind.is_builtin())
|
||||
}
|
||||
|
||||
/// Resolves the call path, e.g. if you have a file
|
||||
///
|
||||
/// ```python
|
||||
/// from sys import version_info as python_version
|
||||
/// print(python_version)
|
||||
/// ```
|
||||
///
|
||||
/// then `python_version` from the print statement will resolve to `sys.version_info`.
|
||||
pub fn resolve_call_path<'b>(&'a self, value: &'b Expr) -> Option<CallPath<'a>>
|
||||
where
|
||||
'b: 'a,
|
||||
@@ -296,7 +303,7 @@ impl<'a> Checker<'a> {
|
||||
// members from the fix that will eventually be excluded by a `noqa`.
|
||||
// Unfortunately, we _do_ want to register a `Diagnostic` for each
|
||||
// eventually-ignored import, so that our `noqa` counts are accurate.
|
||||
if matches!(self.noqa, flags::Noqa::Disabled) {
|
||||
if !self.noqa.to_bool() {
|
||||
return false;
|
||||
}
|
||||
noqa::rule_is_ignored(code, lineno, self.noqa_line_for, self.locator)
|
||||
@@ -342,7 +349,7 @@ where
|
||||
match &stmt.node {
|
||||
StmtKind::Global { names } => {
|
||||
let scope_index = *self.scope_stack.last().expect("No current scope found");
|
||||
let ranges = helpers::find_names(stmt, self.locator);
|
||||
let ranges: Vec<Range> = helpers::find_names(stmt, self.locator).collect();
|
||||
if scope_index != GLOBAL_SCOPE_INDEX {
|
||||
// Add the binding to the current scope.
|
||||
let context = self.execution_context();
|
||||
@@ -372,7 +379,7 @@ where
|
||||
}
|
||||
StmtKind::Nonlocal { names } => {
|
||||
let scope_index = *self.scope_stack.last().expect("No current scope found");
|
||||
let ranges = helpers::find_names(stmt, self.locator);
|
||||
let ranges: Vec<Range> = helpers::find_names(stmt, self.locator).collect();
|
||||
if scope_index != GLOBAL_SCOPE_INDEX {
|
||||
let context = self.execution_context();
|
||||
let scope = &mut self.scopes[scope_index];
|
||||
@@ -538,6 +545,15 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
if self.is_interface_definition {
|
||||
if self.settings.rules.enabled(&Rule::PassStatementStubBody) {
|
||||
flake8_pyi::rules::pass_statement_stub_body(self, body);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::NonEmptyStubBody) {
|
||||
flake8_pyi::rules::non_empty_stub_body(self, body);
|
||||
}
|
||||
}
|
||||
|
||||
if self.settings.rules.enabled(&Rule::DunderFunctionName) {
|
||||
if let Some(diagnostic) = pep8_naming::rules::dunder_function_name(
|
||||
self.current_scope(),
|
||||
@@ -549,6 +565,10 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
if self.settings.rules.enabled(&Rule::GlobalStatement) {
|
||||
pylint::rules::global_statement(self, name);
|
||||
}
|
||||
|
||||
if self
|
||||
.settings
|
||||
.rules
|
||||
@@ -805,6 +825,21 @@ where
|
||||
self, body,
|
||||
));
|
||||
}
|
||||
|
||||
if self.settings.rules.enabled(&Rule::ExcludeWithModelForm) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_django::rules::exclude_with_model_form(self, bases, body)
|
||||
{
|
||||
self.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::AllWithModelForm) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_django::rules::all_with_model_form(self, bases, body)
|
||||
{
|
||||
self.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::ModelWithoutDunderStr) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_django::rules::model_without_dunder_str(self, bases, body, stmt)
|
||||
@@ -812,6 +847,9 @@ where
|
||||
self.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::GlobalStatement) {
|
||||
pylint::rules::global_statement(self, name);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::UselessObjectInheritance) {
|
||||
pyupgrade::rules::useless_object_inheritance(self, stmt, name, bases, keywords);
|
||||
}
|
||||
@@ -866,6 +904,11 @@ where
|
||||
);
|
||||
}
|
||||
}
|
||||
if self.is_interface_definition {
|
||||
if self.settings.rules.enabled(&Rule::PassStatementStubBody) {
|
||||
flake8_pyi::rules::pass_statement_stub_body(self, body);
|
||||
}
|
||||
}
|
||||
|
||||
if self
|
||||
.settings
|
||||
@@ -910,6 +953,17 @@ where
|
||||
{
|
||||
pycodestyle::rules::module_import_not_at_top_of_file(self, stmt);
|
||||
}
|
||||
|
||||
if self.settings.rules.enabled(&Rule::GlobalStatement) {
|
||||
for name in names.iter() {
|
||||
if let Some(asname) = name.node.asname.as_ref() {
|
||||
pylint::rules::global_statement(self, asname);
|
||||
} else {
|
||||
pylint::rules::global_statement(self, &name.node.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if self.settings.rules.enabled(&Rule::RewriteCElementTree) {
|
||||
pyupgrade::rules::replace_c_element_tree(self, stmt);
|
||||
}
|
||||
@@ -917,6 +971,13 @@ where
|
||||
pyupgrade::rules::rewrite_mock_import(self, stmt);
|
||||
}
|
||||
|
||||
// If a module is imported within a `ModuleNotFoundError` body, treat that as a
|
||||
// synthetic usage.
|
||||
let is_handled = self
|
||||
.handled_exceptions
|
||||
.iter()
|
||||
.any(|exceptions| exceptions.contains(Exceptions::MODULE_NOT_FOUND_ERROR));
|
||||
|
||||
for alias in names {
|
||||
if alias.node.name == "__future__" {
|
||||
let name = alias.node.asname.as_ref().unwrap_or(&alias.node.name);
|
||||
@@ -959,7 +1020,18 @@ where
|
||||
Binding {
|
||||
kind: BindingKind::SubmoduleImportation(name, full_name),
|
||||
runtime_usage: None,
|
||||
synthetic_usage: None,
|
||||
synthetic_usage: if is_handled {
|
||||
Some((
|
||||
self.scopes[*(self
|
||||
.scope_stack
|
||||
.last()
|
||||
.expect("No current scope found"))]
|
||||
.id,
|
||||
Range::from_located(alias),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
typing_usage: None,
|
||||
range: Range::from_located(alias),
|
||||
source: Some(self.current_stmt().clone()),
|
||||
@@ -967,6 +1039,14 @@ where
|
||||
},
|
||||
);
|
||||
} else {
|
||||
// Treat explicit re-export as usage (e.g., `from .applications
|
||||
// import FastAPI as FastAPI`).
|
||||
let is_explicit_reexport = alias
|
||||
.node
|
||||
.asname
|
||||
.as_ref()
|
||||
.map_or(false, |asname| asname == &alias.node.name);
|
||||
|
||||
// Given `import foo`, `name` and `full_name` would both be `foo`.
|
||||
// Given `import foo as bar`, `name` would be `bar` and `full_name` would
|
||||
// be `foo`.
|
||||
@@ -977,14 +1057,7 @@ where
|
||||
Binding {
|
||||
kind: BindingKind::Importation(name, full_name),
|
||||
runtime_usage: None,
|
||||
// Treat explicit re-export as usage (e.g., `import applications
|
||||
// as applications`).
|
||||
synthetic_usage: if alias
|
||||
.node
|
||||
.asname
|
||||
.as_ref()
|
||||
.map_or(false, |asname| asname == &alias.node.name)
|
||||
{
|
||||
synthetic_usage: if is_handled || is_explicit_reexport {
|
||||
Some((
|
||||
self.scopes[*(self
|
||||
.scope_stack
|
||||
@@ -1167,6 +1240,16 @@ where
|
||||
pycodestyle::rules::module_import_not_at_top_of_file(self, stmt);
|
||||
}
|
||||
|
||||
if self.settings.rules.enabled(&Rule::GlobalStatement) {
|
||||
for name in names.iter() {
|
||||
if let Some(asname) = name.node.asname.as_ref() {
|
||||
pylint::rules::global_statement(self, asname);
|
||||
} else {
|
||||
pylint::rules::global_statement(self, &name.node.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if self.settings.rules.enabled(&Rule::UnnecessaryFutureImport)
|
||||
&& self.settings.target_version >= PythonVersion::Py37
|
||||
{
|
||||
@@ -1230,6 +1313,13 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
// If a module is imported within a `ModuleNotFoundError` body, treat that as a
|
||||
// synthetic usage.
|
||||
let is_handled = self
|
||||
.handled_exceptions
|
||||
.iter()
|
||||
.any(|exceptions| exceptions.contains(Exceptions::MODULE_NOT_FOUND_ERROR));
|
||||
|
||||
for alias in names {
|
||||
if let Some("__future__") = module.as_deref() {
|
||||
let name = alias.node.asname.as_ref().unwrap_or(&alias.node.name);
|
||||
@@ -1276,7 +1366,18 @@ where
|
||||
Binding {
|
||||
kind: BindingKind::StarImportation(*level, module.clone()),
|
||||
runtime_usage: None,
|
||||
synthetic_usage: None,
|
||||
synthetic_usage: if is_handled {
|
||||
Some((
|
||||
self.scopes[*(self
|
||||
.scope_stack
|
||||
.last()
|
||||
.expect("No current scope found"))]
|
||||
.id,
|
||||
Range::from_located(alias),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
typing_usage: None,
|
||||
range: Range::from_located(stmt),
|
||||
source: Some(self.current_stmt().clone()),
|
||||
@@ -1320,6 +1421,14 @@ where
|
||||
self.check_builtin_shadowing(asname, stmt, false);
|
||||
}
|
||||
|
||||
// Treat explicit re-export as usage (e.g., `from .applications
|
||||
// import FastAPI as FastAPI`).
|
||||
let is_explicit_reexport = alias
|
||||
.node
|
||||
.asname
|
||||
.as_ref()
|
||||
.map_or(false, |asname| asname == &alias.node.name);
|
||||
|
||||
// Given `from foo import bar`, `name` would be "bar" and `full_name` would
|
||||
// be "foo.bar". Given `from foo import bar as baz`, `name` would be "baz"
|
||||
// and `full_name` would be "foo.bar".
|
||||
@@ -1329,33 +1438,25 @@ where
|
||||
module.as_deref(),
|
||||
&alias.node.name,
|
||||
);
|
||||
let range = Range::from_located(alias);
|
||||
self.add_binding(
|
||||
name,
|
||||
Binding {
|
||||
kind: BindingKind::FromImportation(name, full_name),
|
||||
runtime_usage: None,
|
||||
// Treat explicit re-export as usage (e.g., `from .applications
|
||||
// import FastAPI as FastAPI`).
|
||||
synthetic_usage: if alias
|
||||
.node
|
||||
.asname
|
||||
.as_ref()
|
||||
.map_or(false, |asname| asname == &alias.node.name)
|
||||
{
|
||||
synthetic_usage: if is_handled || is_explicit_reexport {
|
||||
Some((
|
||||
self.scopes[*(self
|
||||
.scope_stack
|
||||
.last()
|
||||
.expect("No current scope found"))]
|
||||
.id,
|
||||
range,
|
||||
Range::from_located(alias),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
typing_usage: None,
|
||||
range,
|
||||
range: Range::from_located(alias),
|
||||
source: Some(self.current_stmt().clone()),
|
||||
context: self.execution_context(),
|
||||
},
|
||||
@@ -1549,6 +1650,12 @@ where
|
||||
}
|
||||
StmtKind::AugAssign { target, .. } => {
|
||||
self.handle_node_load(target);
|
||||
|
||||
if self.settings.rules.enabled(&Rule::GlobalStatement) {
|
||||
if let ExprKind::Name { id, .. } = &target.node {
|
||||
pylint::rules::global_statement(self, id);
|
||||
}
|
||||
}
|
||||
}
|
||||
StmtKind::If { test, body, orelse } => {
|
||||
if self.settings.rules.enabled(&Rule::IfTuple) {
|
||||
@@ -1572,10 +1679,17 @@ where
|
||||
);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::NeedlessBool) {
|
||||
flake8_simplify::rules::return_bool_condition_directly(self, stmt);
|
||||
flake8_simplify::rules::needless_bool(self, stmt);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::ManualDictLookup) {
|
||||
flake8_simplify::rules::manual_dict_lookup(self, stmt, test, body, orelse);
|
||||
flake8_simplify::rules::manual_dict_lookup(
|
||||
self,
|
||||
stmt,
|
||||
test,
|
||||
body,
|
||||
orelse,
|
||||
self.current_stmt_parent().map(std::convert::Into::into),
|
||||
);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::UseTernaryOperator) {
|
||||
flake8_simplify::rules::use_ternary_operator(
|
||||
@@ -1606,6 +1720,13 @@ where
|
||||
if self.settings.rules.enabled(&Rule::OutdatedVersionBlock) {
|
||||
pyupgrade::rules::outdated_version_block(self, stmt, test, body, orelse);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::CollapsibleElseIf) {
|
||||
if let Some(diagnostic) =
|
||||
pylint::rules::collapsible_else_if(orelse, self.locator)
|
||||
{
|
||||
self.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
StmtKind::Assert { test, msg } => {
|
||||
if self.settings.rules.enabled(&Rule::AssertTuple) {
|
||||
@@ -1651,6 +1772,9 @@ where
|
||||
self.current_stmt_parent().map(Into::into),
|
||||
);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::RedefinedLoopName) {
|
||||
pylint::rules::redefined_loop_name(self, &Node::Stmt(stmt));
|
||||
}
|
||||
}
|
||||
StmtKind::While { body, orelse, .. } => {
|
||||
if self.settings.rules.enabled(&Rule::FunctionUsesLoopVariable) {
|
||||
@@ -1695,6 +1819,9 @@ where
|
||||
if self.settings.rules.enabled(&Rule::UselessElseOnLoop) {
|
||||
pylint::rules::useless_else_on_loop(self, stmt, body, orelse);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::RedefinedLoopName) {
|
||||
pylint::rules::redefined_loop_name(self, &Node::Stmt(stmt));
|
||||
}
|
||||
if matches!(stmt.node, StmtKind::For { .. }) {
|
||||
if self.settings.rules.enabled(&Rule::ReimplementedBuiltin) {
|
||||
flake8_simplify::rules::convert_for_loop_to_any_all(
|
||||
@@ -1806,6 +1933,14 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
if self.settings.rules.enabled(&Rule::GlobalStatement) {
|
||||
for target in targets.iter() {
|
||||
if let ExprKind::Name { id, .. } = &target.node {
|
||||
pylint::rules::global_statement(self, id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if self.settings.rules.enabled(&Rule::UselessMetaclassType) {
|
||||
pyupgrade::rules::useless_metaclass_type(self, stmt, value, targets);
|
||||
}
|
||||
@@ -1856,7 +1991,15 @@ where
|
||||
);
|
||||
}
|
||||
}
|
||||
StmtKind::Delete { .. } => {}
|
||||
StmtKind::Delete { targets } => {
|
||||
if self.settings.rules.enabled(&Rule::GlobalStatement) {
|
||||
for target in targets.iter() {
|
||||
if let ExprKind::Name { id, .. } = &target.node {
|
||||
pylint::rules::global_statement(self, id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
StmtKind::Expr { value, .. } => {
|
||||
if self.settings.rules.enabled(&Rule::UselessComparison) {
|
||||
flake8_bugbear::rules::useless_comparison(self, value);
|
||||
@@ -1923,9 +2066,7 @@ where
|
||||
if self.scopes[GLOBAL_SCOPE_INDEX]
|
||||
.bindings
|
||||
.get(name)
|
||||
.map_or(true, |index| {
|
||||
matches!(self.bindings[*index].kind, BindingKind::Annotation)
|
||||
})
|
||||
.map_or(true, |index| self.bindings[*index].kind.is_annotation())
|
||||
{
|
||||
let index = self.bindings.len();
|
||||
self.bindings.push(Binding {
|
||||
@@ -1987,9 +2128,7 @@ where
|
||||
if self.scopes[GLOBAL_SCOPE_INDEX]
|
||||
.bindings
|
||||
.get(name)
|
||||
.map_or(true, |index| {
|
||||
matches!(self.bindings[*index].kind, BindingKind::Annotation)
|
||||
})
|
||||
.map_or(true, |index| self.bindings[*index].kind.is_annotation())
|
||||
{
|
||||
let index = self.bindings.len();
|
||||
self.bindings.push(Binding {
|
||||
@@ -2027,17 +2166,23 @@ where
|
||||
orelse,
|
||||
finalbody,
|
||||
} => {
|
||||
// TODO(charlie): The use of `smallvec` here leads to a lifetime issue.
|
||||
let handler_names = extract_handler_names(handlers)
|
||||
.into_iter()
|
||||
.map(|call_path| call_path.to_vec())
|
||||
.collect();
|
||||
self.except_handlers.push(handler_names);
|
||||
let mut handled_exceptions = Exceptions::empty();
|
||||
for type_ in extract_handled_exceptions(handlers) {
|
||||
if let Some(call_path) = self.resolve_call_path(type_) {
|
||||
if call_path.as_slice() == ["", "NameError"] {
|
||||
handled_exceptions |= Exceptions::NAME_ERROR;
|
||||
} else if call_path.as_slice() == ["", "ModuleNotFoundError"] {
|
||||
handled_exceptions |= Exceptions::MODULE_NOT_FOUND_ERROR;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.handled_exceptions.push(handled_exceptions);
|
||||
if self.settings.rules.enabled(&Rule::JumpStatementInFinally) {
|
||||
flake8_bugbear::rules::jump_statement_in_finally(self, finalbody);
|
||||
}
|
||||
self.visit_body(body);
|
||||
self.except_handlers.pop();
|
||||
self.handled_exceptions.pop();
|
||||
|
||||
self.in_exception_handler = true;
|
||||
for excepthandler in handlers {
|
||||
@@ -2054,8 +2199,8 @@ where
|
||||
value,
|
||||
..
|
||||
} => {
|
||||
// If we're in a class or module scope, then the annotation needs to be available
|
||||
// at runtime.
|
||||
// If we're in a class or module scope, then the annotation needs to be
|
||||
// available at runtime.
|
||||
// See: https://docs.python.org/3/reference/simple_stmts.html#annotated-assignment-statements
|
||||
if !self.annotations_future_enabled
|
||||
&& matches!(
|
||||
@@ -2172,8 +2317,9 @@ where
|
||||
// Pre-visit.
|
||||
match &expr.node {
|
||||
ExprKind::Subscript { value, slice, .. } => {
|
||||
// Ex) Optional[...]
|
||||
if !self.in_deferred_string_type_definition
|
||||
// Ex) Optional[...], Union[...]
|
||||
if self.in_type_definition
|
||||
&& !self.in_deferred_string_type_definition
|
||||
&& !self.settings.pyupgrade.keep_runtime_typing
|
||||
&& self.settings.rules.enabled(&Rule::TypingUnion)
|
||||
&& (self.settings.target_version >= PythonVersion::Py310
|
||||
@@ -2438,6 +2584,9 @@ where
|
||||
if self.settings.rules.enabled(&Rule::OSErrorAlias) {
|
||||
pyupgrade::rules::os_error_alias(self, &expr);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::IsinstanceWithTuple) {
|
||||
pyupgrade::rules::use_pep604_isinstance(self, expr, func, args);
|
||||
}
|
||||
|
||||
// flake8-print
|
||||
if self.settings.rules.enabled(&Rule::PrintFound)
|
||||
@@ -2485,6 +2634,13 @@ where
|
||||
if self.settings.rules.enabled(&Rule::UnnecessaryDictKwargs) {
|
||||
flake8_pie::rules::no_unnecessary_dict_kwargs(self, expr, keywords);
|
||||
}
|
||||
if self
|
||||
.settings
|
||||
.rules
|
||||
.enabled(&Rule::UnnecessaryComprehensionAnyAll)
|
||||
{
|
||||
flake8_pie::rules::unnecessary_comprehension_any_all(self, expr, func, args);
|
||||
}
|
||||
|
||||
// flake8-bandit
|
||||
if self.settings.rules.enabled(&Rule::ExecBuiltin) {
|
||||
@@ -2845,18 +3001,6 @@ where
|
||||
flake8_pytest_style::rules::fail_call(self, func, args, keywords);
|
||||
}
|
||||
|
||||
// ruff
|
||||
if self
|
||||
.settings
|
||||
.rules
|
||||
.enabled(&Rule::KeywordArgumentBeforeStarArgument)
|
||||
{
|
||||
self.diagnostics
|
||||
.extend(ruff::rules::keyword_argument_before_star_argument(
|
||||
args, keywords,
|
||||
));
|
||||
}
|
||||
|
||||
// flake8-simplify
|
||||
if self
|
||||
.settings
|
||||
@@ -2912,6 +3056,18 @@ where
|
||||
{
|
||||
flake8_logging_format::rules::logging_call(self, func, args, keywords);
|
||||
}
|
||||
|
||||
// pylint logging checker
|
||||
if self.settings.rules.enabled(&Rule::LoggingTooFewArgs)
|
||||
|| self.settings.rules.enabled(&Rule::LoggingTooManyArgs)
|
||||
{
|
||||
pylint::rules::logging_call(self, func, args, keywords);
|
||||
}
|
||||
|
||||
// flake8-django
|
||||
if self.settings.rules.enabled(&Rule::LocalsInRenderFunction) {
|
||||
flake8_django::rules::locals_in_render_function(self, func, args, keywords);
|
||||
}
|
||||
}
|
||||
ExprKind::Dict { keys, values } => {
|
||||
if self
|
||||
@@ -3278,6 +3434,16 @@ where
|
||||
comparators,
|
||||
);
|
||||
}
|
||||
|
||||
if self.settings.rules.enabled(&Rule::BadVersionInfoComparison) {
|
||||
flake8_pyi::rules::bad_version_info_comparison(
|
||||
self,
|
||||
expr,
|
||||
left,
|
||||
ops,
|
||||
comparators,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
ExprKind::Constant {
|
||||
@@ -3405,17 +3571,17 @@ where
|
||||
if self.settings.rules.enabled(&Rule::CompareWithTuple) {
|
||||
flake8_simplify::rules::compare_with_tuple(self, expr);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::AAndNotA) {
|
||||
flake8_simplify::rules::a_and_not_a(self, expr);
|
||||
if self.settings.rules.enabled(&Rule::ExprAndNotExpr) {
|
||||
flake8_simplify::rules::expr_and_not_expr(self, expr);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::AOrNotA) {
|
||||
flake8_simplify::rules::a_or_not_a(self, expr);
|
||||
if self.settings.rules.enabled(&Rule::ExprOrNotExpr) {
|
||||
flake8_simplify::rules::expr_or_not_expr(self, expr);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::OrTrue) {
|
||||
flake8_simplify::rules::or_true(self, expr);
|
||||
if self.settings.rules.enabled(&Rule::ExprOrTrue) {
|
||||
flake8_simplify::rules::expr_or_true(self, expr);
|
||||
}
|
||||
if self.settings.rules.enabled(&Rule::AndFalse) {
|
||||
flake8_simplify::rules::and_false(self, expr);
|
||||
if self.settings.rules.enabled(&Rule::ExprAndFalse) {
|
||||
flake8_simplify::rules::expr_and_false(self, expr);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
@@ -3844,6 +4010,10 @@ where
|
||||
fn visit_pattern(&mut self, pattern: &'b Pattern) {
|
||||
if let PatternKind::MatchAs {
|
||||
name: Some(name), ..
|
||||
}
|
||||
| PatternKind::MatchStar { name: Some(name) }
|
||||
| PatternKind::MatchMapping {
|
||||
rest: Some(name), ..
|
||||
} = &pattern.node
|
||||
{
|
||||
self.add_binding(
|
||||
@@ -3897,6 +4067,21 @@ where
|
||||
flake8_bugbear::rules::function_call_argument_default(self, arguments);
|
||||
}
|
||||
|
||||
if self.is_interface_definition {
|
||||
if self
|
||||
.settings
|
||||
.rules
|
||||
.enabled(&Rule::TypedArgumentSimpleDefaults)
|
||||
{
|
||||
flake8_pyi::rules::typed_argument_simple_defaults(self, arguments);
|
||||
}
|
||||
}
|
||||
if self.is_interface_definition {
|
||||
if self.settings.rules.enabled(&Rule::ArgumentSimpleDefaults) {
|
||||
flake8_pyi::rules::argument_simple_defaults(self, arguments);
|
||||
}
|
||||
}
|
||||
|
||||
// Bind, but intentionally avoid walking default expressions, as we handle them
|
||||
// upstream.
|
||||
for arg in &arguments.posonlyargs {
|
||||
@@ -4113,7 +4298,7 @@ impl<'a> Checker<'a> {
|
||||
let existing_binding_index = self.scopes[*scope_index].bindings.get(&name).unwrap();
|
||||
let existing = &self.bindings[*existing_binding_index];
|
||||
let in_current_scope = stack_index == 0;
|
||||
if !matches!(existing.kind, BindingKind::Builtin)
|
||||
if !existing.kind.is_builtin()
|
||||
&& existing.source.as_ref().map_or(true, |left| {
|
||||
binding.source.as_ref().map_or(true, |right| {
|
||||
!branch_detection::different_forks(
|
||||
@@ -4133,7 +4318,7 @@ impl<'a> Checker<'a> {
|
||||
| BindingKind::StarImportation(..)
|
||||
| BindingKind::FutureImportation
|
||||
);
|
||||
if matches!(binding.kind, BindingKind::LoopVar) && existing_is_import {
|
||||
if binding.kind.is_loop_var() && existing_is_import {
|
||||
if self.settings.rules.enabled(&Rule::ImportShadowedByLoopVar) {
|
||||
self.diagnostics.push(Diagnostic::new(
|
||||
pyflakes::rules::ImportShadowedByLoopVar {
|
||||
@@ -4147,7 +4332,7 @@ impl<'a> Checker<'a> {
|
||||
if !existing.used()
|
||||
&& binding.redefines(existing)
|
||||
&& (!self.settings.dummy_variable_rgx.is_match(name) || existing_is_import)
|
||||
&& !(matches!(existing.kind, BindingKind::FunctionDefinition)
|
||||
&& !(existing.kind.is_function_definition()
|
||||
&& visibility::is_overload(
|
||||
self,
|
||||
cast::decorator_list(existing.source.as_ref().unwrap()),
|
||||
@@ -4182,36 +4367,29 @@ impl<'a> Checker<'a> {
|
||||
|
||||
let scope = self.current_scope();
|
||||
let binding = if let Some(index) = scope.bindings.get(&name) {
|
||||
if matches!(self.bindings[*index].kind, BindingKind::Builtin) {
|
||||
// Avoid overriding builtins.
|
||||
binding
|
||||
} else if matches!(self.bindings[*index].kind, BindingKind::Global) {
|
||||
// If the original binding was a global, and the new binding conflicts within
|
||||
// the current scope, then the new binding is also a global.
|
||||
Binding {
|
||||
runtime_usage: self.bindings[*index].runtime_usage,
|
||||
synthetic_usage: self.bindings[*index].synthetic_usage,
|
||||
typing_usage: self.bindings[*index].typing_usage,
|
||||
kind: BindingKind::Global,
|
||||
..binding
|
||||
let existing = &self.bindings[*index];
|
||||
match &existing.kind {
|
||||
BindingKind::Builtin => {
|
||||
// Avoid overriding builtins.
|
||||
binding
|
||||
}
|
||||
} else if matches!(self.bindings[*index].kind, BindingKind::Nonlocal) {
|
||||
// If the original binding was a nonlocal, and the new binding conflicts within
|
||||
// the current scope, then the new binding is also a nonlocal.
|
||||
Binding {
|
||||
runtime_usage: self.bindings[*index].runtime_usage,
|
||||
synthetic_usage: self.bindings[*index].synthetic_usage,
|
||||
typing_usage: self.bindings[*index].typing_usage,
|
||||
kind: BindingKind::Nonlocal,
|
||||
..binding
|
||||
kind @ (BindingKind::Global | BindingKind::Nonlocal) => {
|
||||
// If the original binding was a global or nonlocal, and the new binding conflicts within
|
||||
// the current scope, then the new binding is also as the same.
|
||||
Binding {
|
||||
runtime_usage: existing.runtime_usage,
|
||||
synthetic_usage: existing.synthetic_usage,
|
||||
typing_usage: existing.typing_usage,
|
||||
kind: kind.clone(),
|
||||
..binding
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Binding {
|
||||
runtime_usage: self.bindings[*index].runtime_usage,
|
||||
synthetic_usage: self.bindings[*index].synthetic_usage,
|
||||
typing_usage: self.bindings[*index].typing_usage,
|
||||
_ => Binding {
|
||||
runtime_usage: existing.runtime_usage,
|
||||
synthetic_usage: existing.synthetic_usage,
|
||||
typing_usage: existing.typing_usage,
|
||||
..binding
|
||||
}
|
||||
},
|
||||
}
|
||||
} else {
|
||||
binding
|
||||
@@ -4220,7 +4398,7 @@ impl<'a> Checker<'a> {
|
||||
// Don't treat annotations as assignments if there is an existing value
|
||||
// in scope.
|
||||
let scope = &mut self.scopes[*(self.scope_stack.last().expect("No current scope found"))];
|
||||
if !(matches!(binding.kind, BindingKind::Annotation) && scope.bindings.contains_key(name)) {
|
||||
if !(binding.kind.is_annotation() && scope.bindings.contains_key(name)) {
|
||||
if let Some(rebound_index) = scope.bindings.insert(name, binding_index) {
|
||||
scope
|
||||
.rebounds
|
||||
@@ -4259,7 +4437,7 @@ impl<'a> Checker<'a> {
|
||||
let context = self.execution_context();
|
||||
self.bindings[*index].mark_used(scope_id, Range::from_located(expr), context);
|
||||
|
||||
if matches!(self.bindings[*index].kind, BindingKind::Annotation)
|
||||
if self.bindings[*index].kind.is_annotation()
|
||||
&& !self.in_deferred_string_type_definition
|
||||
&& !self.in_deferred_type_definition
|
||||
{
|
||||
@@ -4362,13 +4540,12 @@ impl<'a> Checker<'a> {
|
||||
}
|
||||
|
||||
// Avoid flagging if NameError is handled.
|
||||
if let Some(handler_names) = self.except_handlers.last() {
|
||||
if handler_names
|
||||
.iter()
|
||||
.any(|call_path| call_path.as_slice() == ["NameError"])
|
||||
{
|
||||
return;
|
||||
}
|
||||
if self
|
||||
.handled_exceptions
|
||||
.iter()
|
||||
.any(|handler_names| handler_names.contains(Exceptions::NAME_ERROR))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
self.diagnostics.push(Diagnostic::new(
|
||||
@@ -4407,9 +4584,7 @@ impl<'a> Checker<'a> {
|
||||
.current_scope()
|
||||
.bindings
|
||||
.get(id)
|
||||
.map_or(false, |index| {
|
||||
matches!(self.bindings[*index].kind, BindingKind::Global)
|
||||
})
|
||||
.map_or(false, |index| self.bindings[*index].kind.is_global())
|
||||
{
|
||||
pep8_naming::rules::non_lowercase_variable_in_function(self, expr, parent, id);
|
||||
}
|
||||
@@ -4912,7 +5087,7 @@ impl<'a> Checker<'a> {
|
||||
{
|
||||
for (name, index) in &scope.bindings {
|
||||
let binding = &self.bindings[*index];
|
||||
if matches!(binding.kind, BindingKind::Global) {
|
||||
if binding.kind.is_global() {
|
||||
if let Some(stmt) = &binding.source {
|
||||
if matches!(stmt.node, StmtKind::Global { .. }) {
|
||||
diagnostics.push(Diagnostic::new(
|
||||
@@ -5336,9 +5511,22 @@ impl<'a> Checker<'a> {
|
||||
}
|
||||
overloaded_name = flake8_annotations::helpers::overloaded_name(self, &definition);
|
||||
}
|
||||
if self.is_interface_definition {
|
||||
if self.settings.rules.enabled(&Rule::DocstringInStub) {
|
||||
flake8_pyi::rules::docstring_in_stubs(self, definition.docstring);
|
||||
}
|
||||
}
|
||||
|
||||
// pydocstyle
|
||||
if enforce_docstrings {
|
||||
if pydocstyle::helpers::should_ignore_definition(
|
||||
self,
|
||||
&definition,
|
||||
&self.settings.pydocstyle.ignore_decorators,
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if definition.docstring.is_none() {
|
||||
pydocstyle::rules::not_missing(self, &definition, &visibility);
|
||||
continue;
|
||||
|
||||
@@ -7,7 +7,8 @@ use crate::ast::types::Range;
|
||||
use crate::registry::Diagnostic;
|
||||
use crate::rules::pycodestyle::logical_lines::{iter_logical_lines, TokenFlags};
|
||||
use crate::rules::pycodestyle::rules::{
|
||||
extraneous_whitespace, indentation, space_around_operator, whitespace_around_keywords,
|
||||
extraneous_whitespace, indentation, missing_whitespace_after_keyword, space_around_operator,
|
||||
whitespace_around_keywords, whitespace_around_named_parameter_equals,
|
||||
whitespace_before_comment,
|
||||
};
|
||||
use crate::settings::Settings;
|
||||
@@ -106,6 +107,18 @@ pub fn check_logical_lines(
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (location, kind) in missing_whitespace_after_keyword(&line.tokens) {
|
||||
if settings.rules.enabled(kind.rule()) {
|
||||
diagnostics.push(Diagnostic {
|
||||
kind,
|
||||
location,
|
||||
end_location: location,
|
||||
fix: None,
|
||||
parent: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
if line.flags.contains(TokenFlags::COMMENT) {
|
||||
for (range, kind) in whitespace_before_comment(&line.tokens, locator) {
|
||||
@@ -120,6 +133,21 @@ pub fn check_logical_lines(
|
||||
}
|
||||
}
|
||||
}
|
||||
if line.flags.contains(TokenFlags::OPERATOR) {
|
||||
for (location, kind) in
|
||||
whitespace_around_named_parameter_equals(&line.tokens, &line.text)
|
||||
{
|
||||
if settings.rules.enabled(kind.rule()) {
|
||||
diagnostics.push(Diagnostic {
|
||||
kind,
|
||||
location,
|
||||
end_location: location,
|
||||
fix: None,
|
||||
parent: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (index, kind) in indentation(
|
||||
&line,
|
||||
@@ -152,8 +180,8 @@ pub fn check_logical_lines(
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::{lexer, Mode};
|
||||
|
||||
use crate::checkers::logical_lines::iter_logical_lines;
|
||||
use crate::source_code::Locator;
|
||||
@@ -164,7 +192,7 @@ mod tests {
|
||||
x = 1
|
||||
y = 2
|
||||
z = x + 1"#;
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
let locator = Locator::new(contents);
|
||||
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
|
||||
.into_iter()
|
||||
@@ -185,7 +213,7 @@ x = [
|
||||
]
|
||||
y = 2
|
||||
z = x + 1"#;
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
let locator = Locator::new(contents);
|
||||
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
|
||||
.into_iter()
|
||||
@@ -199,7 +227,7 @@ z = x + 1"#;
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let contents = "x = 'abc'";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
let locator = Locator::new(contents);
|
||||
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
|
||||
.into_iter()
|
||||
@@ -212,7 +240,7 @@ z = x + 1"#;
|
||||
def f():
|
||||
x = 1
|
||||
f()"#;
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
let locator = Locator::new(contents);
|
||||
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
|
||||
.into_iter()
|
||||
@@ -227,7 +255,7 @@ def f():
|
||||
# Comment goes here.
|
||||
x = 1
|
||||
f()"#;
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
let locator = Locator::new(contents);
|
||||
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
|
||||
.into_iter()
|
||||
|
||||
@@ -148,9 +148,7 @@ pub fn check_noqa(
|
||||
UnusedNOQA { codes: None },
|
||||
Range::new(Location::new(row + 1, start), Location::new(row + 1, end)),
|
||||
);
|
||||
if matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(diagnostic.kind.rule())
|
||||
{
|
||||
if autofix.into() && settings.rules.should_fix(diagnostic.kind.rule()) {
|
||||
diagnostic.amend(Fix::deletion(
|
||||
Location::new(row + 1, start - spaces),
|
||||
Location::new(row + 1, lines[row].chars().count()),
|
||||
@@ -217,9 +215,7 @@ pub fn check_noqa(
|
||||
},
|
||||
Range::new(Location::new(row + 1, start), Location::new(row + 1, end)),
|
||||
);
|
||||
if matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(diagnostic.kind.rule())
|
||||
{
|
||||
if autofix.into() && settings.rules.should_fix(diagnostic.kind.rule()) {
|
||||
if valid_codes.is_empty() {
|
||||
diagnostic.amend(Fix::deletion(
|
||||
Location::new(row + 1, start - spaces),
|
||||
|
||||
@@ -8,7 +8,8 @@ use crate::rules::flake8_executable::rules::{
|
||||
shebang_missing, shebang_newline, shebang_not_executable, shebang_python, shebang_whitespace,
|
||||
};
|
||||
use crate::rules::pycodestyle::rules::{
|
||||
doc_line_too_long, line_too_long, mixed_spaces_and_tabs, no_newline_at_end_of_file,
|
||||
doc_line_too_long, indentation_contains_tabs, line_too_long, mixed_spaces_and_tabs,
|
||||
no_newline_at_end_of_file, trailing_whitespace,
|
||||
};
|
||||
use crate::rules::pygrep_hooks::rules::{blanket_noqa, blanket_type_ignore};
|
||||
use crate::rules::pylint;
|
||||
@@ -41,11 +42,15 @@ pub fn check_physical_lines(
|
||||
let enforce_unnecessary_coding_comment = settings.rules.enabled(&Rule::UTF8EncodingDeclaration);
|
||||
let enforce_mixed_spaces_and_tabs = settings.rules.enabled(&Rule::MixedSpacesAndTabs);
|
||||
let enforce_bidirectional_unicode = settings.rules.enabled(&Rule::BidirectionalUnicode);
|
||||
let enforce_trailing_whitespace = settings.rules.enabled(&Rule::TrailingWhitespace);
|
||||
let enforce_blank_line_contains_whitespace =
|
||||
settings.rules.enabled(&Rule::BlankLineContainsWhitespace);
|
||||
let enforce_indentation_contains_tabs = settings.rules.enabled(&Rule::IndentationContainsTabs);
|
||||
|
||||
let fix_unnecessary_coding_comment = matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(&Rule::UTF8EncodingDeclaration);
|
||||
let fix_shebang_whitespace = matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(&Rule::ShebangWhitespace);
|
||||
let fix_unnecessary_coding_comment =
|
||||
autofix.into() && settings.rules.should_fix(&Rule::UTF8EncodingDeclaration);
|
||||
let fix_shebang_whitespace =
|
||||
autofix.into() && settings.rules.should_fix(&Rule::ShebangWhitespace);
|
||||
|
||||
let mut commented_lines_iter = commented_lines.iter().peekable();
|
||||
let mut doc_lines_iter = doc_lines.iter().peekable();
|
||||
@@ -139,14 +144,25 @@ pub fn check_physical_lines(
|
||||
if enforce_bidirectional_unicode {
|
||||
diagnostics.extend(pylint::rules::bidirectional_unicode(index, line));
|
||||
}
|
||||
|
||||
if enforce_trailing_whitespace || enforce_blank_line_contains_whitespace {
|
||||
if let Some(diagnostic) = trailing_whitespace(index, line, settings, autofix) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
|
||||
if enforce_indentation_contains_tabs {
|
||||
if let Some(diagnostic) = indentation_contains_tabs(index, line) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if enforce_no_newline_at_end_of_file {
|
||||
if let Some(diagnostic) = no_newline_at_end_of_file(
|
||||
stylist,
|
||||
contents,
|
||||
matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(&Rule::NoNewLineAtEndOfFile),
|
||||
autofix.into() && settings.rules.should_fix(&Rule::NoNewLineAtEndOfFile),
|
||||
) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
//! Lint rules based on token traversal.
|
||||
|
||||
use rustpython_parser::lexer::{LexResult, Tok};
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::Tok;
|
||||
|
||||
use crate::lex::docstring_detection::StateMachine;
|
||||
use crate::registry::{Diagnostic, Rule};
|
||||
@@ -107,8 +108,7 @@ pub fn check_tokens(
|
||||
locator,
|
||||
*start,
|
||||
*end,
|
||||
matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(&Rule::InvalidEscapeSequence),
|
||||
autofix.into() && settings.rules.should_fix(&Rule::InvalidEscapeSequence),
|
||||
));
|
||||
}
|
||||
}
|
||||
@@ -147,7 +147,7 @@ pub fn check_tokens(
|
||||
// COM812, COM818, COM819
|
||||
if enforce_trailing_comma {
|
||||
diagnostics.extend(
|
||||
flake8_commas::rules::trailing_commas(tokens, settings, autofix)
|
||||
flake8_commas::rules::trailing_commas(tokens, locator, settings, autofix)
|
||||
.into_iter()
|
||||
.filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
|
||||
);
|
||||
|
||||
@@ -37,6 +37,10 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E224") => Rule::TabAfterOperator,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E251") => Rule::UnexpectedSpacesAroundKeywordParameterEquals,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E252") => Rule::MissingWhitespaceAroundParameterEquals,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E261") => Rule::TooFewSpacesBeforeInlineComment,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E262") => Rule::NoSpaceAfterInlineComment,
|
||||
@@ -52,6 +56,8 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Pycodestyle, "E273") => Rule::TabAfterKeyword,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E274") => Rule::TabBeforeKeyword,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E275") => Rule::MissingWhitespaceAfterKeyword,
|
||||
(Pycodestyle, "E401") => Rule::MultipleImportsOnOneLine,
|
||||
(Pycodestyle, "E402") => Rule::ModuleImportNotAtTopOfFile,
|
||||
(Pycodestyle, "E501") => Rule::LineTooLong,
|
||||
@@ -72,7 +78,10 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Pycodestyle, "E999") => Rule::SyntaxError,
|
||||
|
||||
// pycodestyle warnings
|
||||
(Pycodestyle, "W191") => Rule::IndentationContainsTabs,
|
||||
(Pycodestyle, "W291") => Rule::TrailingWhitespace,
|
||||
(Pycodestyle, "W292") => Rule::NoNewLineAtEndOfFile,
|
||||
(Pycodestyle, "W293") => Rule::BlankLineContainsWhitespace,
|
||||
(Pycodestyle, "W505") => Rule::DocLineTooLong,
|
||||
(Pycodestyle, "W605") => Rule::InvalidEscapeSequence,
|
||||
|
||||
@@ -126,6 +135,8 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Pylint, "E0101") => Rule::ReturnInInit,
|
||||
(Pylint, "E0604") => Rule::InvalidAllObject,
|
||||
(Pylint, "E0605") => Rule::InvalidAllFormat,
|
||||
(Pylint, "E1205") => Rule::LoggingTooManyArgs,
|
||||
(Pylint, "E1206") => Rule::LoggingTooFewArgs,
|
||||
(Pylint, "E1307") => Rule::BadStringFormatType,
|
||||
(Pylint, "E2502") => Rule::BidirectionalUnicode,
|
||||
(Pylint, "E1310") => Rule::BadStrStripCall,
|
||||
@@ -139,13 +150,16 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Pylint, "R0133") => Rule::ComparisonOfConstant,
|
||||
(Pylint, "R1701") => Rule::ConsiderMergingIsinstance,
|
||||
(Pylint, "R1722") => Rule::ConsiderUsingSysExit,
|
||||
(Pylint, "R5501") => Rule::CollapsibleElseIf,
|
||||
(Pylint, "R2004") => Rule::MagicValueComparison,
|
||||
(Pylint, "W0120") => Rule::UselessElseOnLoop,
|
||||
(Pylint, "W0602") => Rule::GlobalVariableNotAssigned,
|
||||
(Pylint, "W0603") => Rule::GlobalStatement,
|
||||
(Pylint, "R0911") => Rule::TooManyReturnStatements,
|
||||
(Pylint, "R0913") => Rule::TooManyArguments,
|
||||
(Pylint, "R0912") => Rule::TooManyBranches,
|
||||
(Pylint, "R0915") => Rule::TooManyStatements,
|
||||
(Pylint, "W2901") => Rule::RedefinedLoopName,
|
||||
|
||||
// flake8-builtins
|
||||
(Flake8Builtins, "001") => Rule::BuiltinVariableShadowing,
|
||||
@@ -287,10 +301,10 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Flake8Simplify, "210") => Rule::IfExprWithTrueFalse,
|
||||
(Flake8Simplify, "211") => Rule::IfExprWithFalseTrue,
|
||||
(Flake8Simplify, "212") => Rule::IfExprWithTwistedArms,
|
||||
(Flake8Simplify, "220") => Rule::AAndNotA,
|
||||
(Flake8Simplify, "221") => Rule::AOrNotA,
|
||||
(Flake8Simplify, "222") => Rule::OrTrue,
|
||||
(Flake8Simplify, "223") => Rule::AndFalse,
|
||||
(Flake8Simplify, "220") => Rule::ExprAndNotExpr,
|
||||
(Flake8Simplify, "221") => Rule::ExprOrNotExpr,
|
||||
(Flake8Simplify, "222") => Rule::ExprOrTrue,
|
||||
(Flake8Simplify, "223") => Rule::ExprAndFalse,
|
||||
(Flake8Simplify, "300") => Rule::YodaConditions,
|
||||
(Flake8Simplify, "401") => Rule::DictGetWithDefault,
|
||||
|
||||
@@ -330,6 +344,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Pyupgrade, "035") => Rule::ImportReplacements,
|
||||
(Pyupgrade, "036") => Rule::OutdatedVersionBlock,
|
||||
(Pyupgrade, "037") => Rule::QuotedAnnotation,
|
||||
(Pyupgrade, "038") => Rule::IsinstanceWithTuple,
|
||||
|
||||
// pydocstyle
|
||||
(Pydocstyle, "100") => Rule::PublicModule,
|
||||
@@ -478,8 +493,14 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
|
||||
// flake8-pyi
|
||||
(Flake8Pyi, "001") => Rule::PrefixTypeParams,
|
||||
(Flake8Pyi, "006") => Rule::BadVersionInfoComparison,
|
||||
(Flake8Pyi, "007") => Rule::UnrecognizedPlatformCheck,
|
||||
(Flake8Pyi, "008") => Rule::UnrecognizedPlatformName,
|
||||
(Flake8Pyi, "009") => Rule::PassStatementStubBody,
|
||||
(Flake8Pyi, "010") => Rule::NonEmptyStubBody,
|
||||
(Flake8Pyi, "011") => Rule::TypedArgumentSimpleDefaults,
|
||||
(Flake8Pyi, "014") => Rule::ArgumentSimpleDefaults,
|
||||
(Flake8Pyi, "021") => Rule::DocstringInStub,
|
||||
|
||||
// flake8-pytest-style
|
||||
(Flake8PytestStyle, "001") => Rule::IncorrectFixtureParenthesesStyle,
|
||||
@@ -513,6 +534,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Flake8Pie, "794") => Rule::DupeClassFieldDefinitions,
|
||||
(Flake8Pie, "796") => Rule::PreferUniqueEnums,
|
||||
(Flake8Pie, "800") => Rule::UnnecessarySpread,
|
||||
(Flake8Pie, "802") => Rule::UnnecessaryComprehensionAnyAll,
|
||||
(Flake8Pie, "804") => Rule::UnnecessaryDictKwargs,
|
||||
(Flake8Pie, "807") => Rule::PreferListBuiltin,
|
||||
(Flake8Pie, "810") => Rule::SingleStartsEndsWith,
|
||||
@@ -601,13 +623,15 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Ruff, "001") => Rule::AmbiguousUnicodeCharacterString,
|
||||
(Ruff, "002") => Rule::AmbiguousUnicodeCharacterDocstring,
|
||||
(Ruff, "003") => Rule::AmbiguousUnicodeCharacterComment,
|
||||
(Ruff, "004") => Rule::KeywordArgumentBeforeStarArgument,
|
||||
(Ruff, "005") => Rule::UnpackInsteadOfConcatenatingToCollectionLiteral,
|
||||
(Ruff, "006") => Rule::AsyncioDanglingTask,
|
||||
(Ruff, "100") => Rule::UnusedNOQA,
|
||||
|
||||
// flake8-django
|
||||
(Flake8Django, "001") => Rule::NullableModelStringField,
|
||||
(Flake8Django, "003") => Rule::LocalsInRenderFunction,
|
||||
(Flake8Django, "006") => Rule::ExcludeWithModelForm,
|
||||
(Flake8Django, "007") => Rule::AllWithModelForm,
|
||||
(Flake8Django, "008") => Rule::ModelWithoutDunderStr,
|
||||
(Flake8Django, "013") => Rule::NonLeadingReceiverDecorator,
|
||||
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
use bitflags::bitflags;
|
||||
use nohash_hasher::{IntMap, IntSet};
|
||||
use rustpython_parser::ast::Location;
|
||||
use rustpython_parser::lexer::{LexResult, Tok};
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::Tok;
|
||||
|
||||
use crate::registry::LintSource;
|
||||
use crate::settings::Settings;
|
||||
|
||||
bitflags! {
|
||||
@@ -20,7 +20,7 @@ impl Flags {
|
||||
if settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::Imports))
|
||||
.any(|rule_code| rule_code.lint_source().is_imports())
|
||||
{
|
||||
Self::NOQA | Self::ISORT
|
||||
} else {
|
||||
@@ -150,56 +150,61 @@ pub fn extract_isort_directives(lxr: &[LexResult]) -> IsortDirectives {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use nohash_hasher::{IntMap, IntSet};
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::{lexer, Mode};
|
||||
|
||||
use crate::directives::{extract_isort_directives, extract_noqa_line_for};
|
||||
|
||||
#[test]
|
||||
fn noqa_extraction() {
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"x = 1
|
||||
y = 2
|
||||
z = x + 1",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(extract_noqa_line_for(&lxr), IntMap::default());
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"
|
||||
x = 1
|
||||
y = 2
|
||||
z = x + 1",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(extract_noqa_line_for(&lxr), IntMap::default());
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"x = 1
|
||||
y = 2
|
||||
z = x + 1
|
||||
",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(extract_noqa_line_for(&lxr), IntMap::default());
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"x = 1
|
||||
|
||||
y = 2
|
||||
z = x + 1
|
||||
",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(extract_noqa_line_for(&lxr), IntMap::default());
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"x = '''abc
|
||||
def
|
||||
ghi
|
||||
'''
|
||||
y = 2
|
||||
z = x + 1",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(
|
||||
@@ -207,13 +212,14 @@ z = x + 1",
|
||||
IntMap::from_iter([(1, 4), (2, 4), (3, 4)])
|
||||
);
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"x = 1
|
||||
y = '''abc
|
||||
def
|
||||
ghi
|
||||
'''
|
||||
z = 2",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(
|
||||
@@ -221,12 +227,13 @@ z = 2",
|
||||
IntMap::from_iter([(2, 5), (3, 5), (4, 5)])
|
||||
);
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"x = 1
|
||||
y = '''abc
|
||||
def
|
||||
ghi
|
||||
'''",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(
|
||||
@@ -234,17 +241,19 @@ ghi
|
||||
IntMap::from_iter([(2, 5), (3, 5), (4, 5)])
|
||||
);
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
r#"x = \
|
||||
1"#,
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(extract_noqa_line_for(&lxr), IntMap::from_iter([(1, 2)]));
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
r#"from foo import \
|
||||
bar as baz, \
|
||||
qux as quux"#,
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(
|
||||
@@ -252,7 +261,7 @@ ghi
|
||||
IntMap::from_iter([(1, 3), (2, 3)])
|
||||
);
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
r#"
|
||||
# Foo
|
||||
from foo import \
|
||||
@@ -262,6 +271,7 @@ x = \
|
||||
1
|
||||
y = \
|
||||
2"#,
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(
|
||||
@@ -275,7 +285,7 @@ y = \
|
||||
let contents = "x = 1
|
||||
y = 2
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(extract_isort_directives(&lxr).exclusions, IntSet::default());
|
||||
|
||||
let contents = "# isort: off
|
||||
@@ -283,7 +293,7 @@ x = 1
|
||||
y = 2
|
||||
# isort: on
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(
|
||||
extract_isort_directives(&lxr).exclusions,
|
||||
IntSet::from_iter([2, 3, 4])
|
||||
@@ -296,7 +306,7 @@ y = 2
|
||||
# isort: on
|
||||
z = x + 1
|
||||
# isort: on";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(
|
||||
extract_isort_directives(&lxr).exclusions,
|
||||
IntSet::from_iter([2, 3, 4, 5])
|
||||
@@ -306,7 +316,7 @@ z = x + 1
|
||||
x = 1
|
||||
y = 2
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(
|
||||
extract_isort_directives(&lxr).exclusions,
|
||||
IntSet::from_iter([2, 3, 4])
|
||||
@@ -316,7 +326,7 @@ z = x + 1";
|
||||
x = 1
|
||||
y = 2
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(extract_isort_directives(&lxr).exclusions, IntSet::default());
|
||||
|
||||
let contents = "# isort: off
|
||||
@@ -325,7 +335,7 @@ x = 1
|
||||
y = 2
|
||||
# isort: skip_file
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(extract_isort_directives(&lxr).exclusions, IntSet::default());
|
||||
}
|
||||
|
||||
@@ -334,20 +344,20 @@ z = x + 1";
|
||||
let contents = "x = 1
|
||||
y = 2
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(extract_isort_directives(&lxr).splits, Vec::<usize>::new());
|
||||
|
||||
let contents = "x = 1
|
||||
y = 2
|
||||
# isort: split
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(extract_isort_directives(&lxr).splits, vec![3]);
|
||||
|
||||
let contents = "x = 1
|
||||
y = 2 # isort: split
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(extract_isort_directives(&lxr).splits, vec![2]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,34 +1,62 @@
|
||||
//! Doc line extraction. In this context, a doc line is a line consisting of a
|
||||
//! standalone comment or a constant string statement.
|
||||
|
||||
use std::iter::FusedIterator;
|
||||
|
||||
use rustpython_parser::ast::{Constant, ExprKind, Stmt, StmtKind, Suite};
|
||||
use rustpython_parser::lexer::{LexResult, Tok};
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::Tok;
|
||||
|
||||
use crate::ast::visitor;
|
||||
use crate::ast::visitor::Visitor;
|
||||
|
||||
/// Extract doc lines (standalone comments) from a token sequence.
|
||||
pub fn doc_lines_from_tokens(lxr: &[LexResult]) -> Vec<usize> {
|
||||
let mut doc_lines: Vec<usize> = Vec::default();
|
||||
let mut prev: Option<usize> = None;
|
||||
for (start, tok, end) in lxr.iter().flatten() {
|
||||
if matches!(tok, Tok::Indent | Tok::Dedent | Tok::Newline) {
|
||||
continue;
|
||||
}
|
||||
if matches!(tok, Tok::Comment(..)) {
|
||||
if let Some(prev) = prev {
|
||||
if start.row() > prev {
|
||||
doc_lines.push(start.row());
|
||||
}
|
||||
} else {
|
||||
doc_lines.push(start.row());
|
||||
}
|
||||
}
|
||||
prev = Some(end.row());
|
||||
}
|
||||
doc_lines
|
||||
pub fn doc_lines_from_tokens(lxr: &[LexResult]) -> DocLines {
|
||||
DocLines::new(lxr)
|
||||
}
|
||||
|
||||
pub struct DocLines<'a> {
|
||||
inner: std::iter::Flatten<core::slice::Iter<'a, LexResult>>,
|
||||
prev: Option<usize>,
|
||||
}
|
||||
|
||||
impl<'a> DocLines<'a> {
|
||||
fn new(lxr: &'a [LexResult]) -> Self {
|
||||
Self {
|
||||
inner: lxr.iter().flatten(),
|
||||
prev: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for DocLines<'_> {
|
||||
type Item = usize;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
let (start, tok, end) = self.inner.next()?;
|
||||
|
||||
match tok {
|
||||
Tok::Indent | Tok::Dedent | Tok::Newline => continue,
|
||||
Tok::Comment(..) => {
|
||||
if let Some(prev) = self.prev {
|
||||
if start.row() > prev {
|
||||
break Some(start.row());
|
||||
}
|
||||
} else {
|
||||
break Some(start.row());
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
self.prev = Some(end.row());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedIterator for DocLines<'_> {}
|
||||
|
||||
#[derive(Default)]
|
||||
struct StringLinesVisitor {
|
||||
string_lines: Vec<usize>,
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
/// See: <https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals>
|
||||
|
||||
pub const TRIPLE_QUOTE_PREFIXES: &[&str] = &[
|
||||
"u\"\"\"", "u'''", "r\"\"\"", "r'''", "U\"\"\"", "U'''", "R\"\"\"", "R'''", "\"\"\"", "'''",
|
||||
];
|
||||
|
||||
pub const SINGLE_QUOTE_PREFIXES: &[&str] = &[
|
||||
"u\"", "u'", "r\"", "r'", "u\"", "u'", "r\"", "r'", "U\"", "U'", "R\"", "R'", "\"", "'",
|
||||
];
|
||||
|
||||
pub const TRIPLE_QUOTE_SUFFIXES: &[&str] = &["\"\"\"", "'''"];
|
||||
|
||||
pub const SINGLE_QUOTE_SUFFIXES: &[&str] = &["\"", "'"];
|
||||
@@ -1,4 +1,3 @@
|
||||
pub mod constants;
|
||||
pub mod definition;
|
||||
pub mod extraction;
|
||||
pub mod google;
|
||||
|
||||
@@ -576,6 +576,7 @@ mod tests {
|
||||
let expected = Pyproject::new(Options {
|
||||
pydocstyle: Some(pydocstyle::settings::Options {
|
||||
convention: Some(Convention::Numpy),
|
||||
ignore_decorators: None,
|
||||
}),
|
||||
..default_options([Linter::Pydocstyle.into()])
|
||||
});
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
use std::fs::File;
|
||||
use std::io::{BufReader, Read};
|
||||
use std::ops::Deref;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
@@ -88,12 +86,3 @@ pub fn relativize_path(path: impl AsRef<Path>) -> String {
|
||||
}
|
||||
format!("{}", path.display())
|
||||
}
|
||||
|
||||
/// Read a file's contents from disk.
|
||||
pub fn read_file<P: AsRef<Path>>(path: P) -> Result<String> {
|
||||
let file = File::open(path)?;
|
||||
let mut buf_reader = BufReader::new(file);
|
||||
let mut contents = String::new();
|
||||
buf_reader.read_to_string(&mut contents)?;
|
||||
Ok(contents)
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
//!
|
||||
//! TODO(charlie): Consolidate with the existing AST-based docstring extraction.
|
||||
|
||||
use rustpython_parser::lexer::Tok;
|
||||
use rustpython_parser::Tok;
|
||||
|
||||
#[derive(Default)]
|
||||
enum State {
|
||||
|
||||
@@ -33,10 +33,8 @@ pub mod resolver;
|
||||
mod rule_redirects;
|
||||
mod rule_selector;
|
||||
mod rules;
|
||||
mod rustpython_helpers;
|
||||
pub mod settings;
|
||||
pub mod source_code;
|
||||
mod vendor;
|
||||
mod violation;
|
||||
mod visibility;
|
||||
|
||||
|
||||
@@ -14,7 +14,6 @@ use crate::rules::{
|
||||
flake8_quotes, flake8_self, flake8_tidy_imports, flake8_type_checking, flake8_unused_arguments,
|
||||
isort, mccabe, pep8_naming, pycodestyle, pydocstyle, pylint, pyupgrade,
|
||||
};
|
||||
use crate::rustpython_helpers::tokenize;
|
||||
use crate::settings::configuration::Configuration;
|
||||
use crate::settings::options::Options;
|
||||
use crate::settings::{defaults, flags, Settings};
|
||||
@@ -175,7 +174,7 @@ pub fn check(contents: &str, options: JsValue) -> Result<JsValue, JsValue> {
|
||||
Settings::from_configuration(configuration, Path::new(".")).map_err(|e| e.to_string())?;
|
||||
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = tokenize(contents);
|
||||
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(contents);
|
||||
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = Locator::new(contents);
|
||||
@@ -202,8 +201,8 @@ pub fn check(contents: &str, options: JsValue) -> Result<JsValue, JsValue> {
|
||||
&indexer,
|
||||
&directives,
|
||||
&settings,
|
||||
flags::Autofix::Enabled,
|
||||
flags::Noqa::Enabled,
|
||||
flags::Autofix::Enabled,
|
||||
);
|
||||
|
||||
let messages: Vec<ExpandedMessage> = diagnostics
|
||||
|
||||
@@ -5,8 +5,8 @@ use anyhow::{anyhow, Result};
|
||||
use colored::Colorize;
|
||||
use log::error;
|
||||
use rustc_hash::FxHashMap;
|
||||
use rustpython_parser::error::ParseError;
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::ParseError;
|
||||
|
||||
use crate::autofix::fix_file;
|
||||
use crate::checkers::ast::check_ast;
|
||||
@@ -20,11 +20,11 @@ use crate::directives::Directives;
|
||||
use crate::doc_lines::{doc_lines_from_ast, doc_lines_from_tokens};
|
||||
use crate::message::{Message, Source};
|
||||
use crate::noqa::{add_noqa, rule_is_ignored};
|
||||
use crate::registry::{Diagnostic, LintSource, Rule};
|
||||
use crate::registry::{Diagnostic, Rule};
|
||||
use crate::rules::pycodestyle;
|
||||
use crate::settings::{flags, Settings};
|
||||
use crate::source_code::{Indexer, Locator, Stylist};
|
||||
use crate::{directives, fs, rustpython_helpers};
|
||||
use crate::{directives, fs};
|
||||
|
||||
const CARGO_PKG_NAME: &str = env!("CARGO_PKG_NAME");
|
||||
const CARGO_PKG_REPOSITORY: &str = env!("CARGO_PKG_REPOSITORY");
|
||||
@@ -62,8 +62,8 @@ pub fn check_path(
|
||||
indexer: &Indexer,
|
||||
directives: &Directives,
|
||||
settings: &Settings,
|
||||
autofix: flags::Autofix,
|
||||
noqa: flags::Noqa,
|
||||
autofix: flags::Autofix,
|
||||
) -> LinterResult<Vec<Diagnostic>> {
|
||||
// Aggregate all diagnostics.
|
||||
let mut diagnostics = vec![];
|
||||
@@ -81,7 +81,7 @@ pub fn check_path(
|
||||
if settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::Tokens))
|
||||
.any(|rule_code| rule_code.lint_source().is_tokens())
|
||||
{
|
||||
diagnostics.extend(check_tokens(locator, &tokens, settings, autofix));
|
||||
}
|
||||
@@ -90,7 +90,7 @@ pub fn check_path(
|
||||
if settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::Filesystem))
|
||||
.any(|rule_code| rule_code.lint_source().is_filesystem())
|
||||
{
|
||||
diagnostics.extend(check_file_path(path, package, settings));
|
||||
}
|
||||
@@ -99,7 +99,7 @@ pub fn check_path(
|
||||
if settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::LogicalLines))
|
||||
.any(|rule_code| rule_code.lint_source().is_logical_lines())
|
||||
{
|
||||
diagnostics.extend(check_logical_lines(&tokens, locator, stylist, settings));
|
||||
}
|
||||
@@ -108,14 +108,14 @@ pub fn check_path(
|
||||
let use_ast = settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::Ast));
|
||||
.any(|rule_code| rule_code.lint_source().is_ast());
|
||||
let use_imports = !directives.isort.skip_file
|
||||
&& settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::Imports));
|
||||
.any(|rule_code| rule_code.lint_source().is_imports());
|
||||
if use_ast || use_imports || use_doc_lines {
|
||||
match rustpython_helpers::parse_program_tokens(tokens, &path.to_string_lossy()) {
|
||||
match ruff_rustpython::parse_program_tokens(tokens, &path.to_string_lossy()) {
|
||||
Ok(python_ast) => {
|
||||
if use_ast {
|
||||
diagnostics.extend(check_ast(
|
||||
@@ -177,7 +177,7 @@ pub fn check_path(
|
||||
if settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::PhysicalLines))
|
||||
.any(|rule_code| rule_code.lint_source().is_physical_lines())
|
||||
{
|
||||
diagnostics.extend(check_physical_lines(
|
||||
path,
|
||||
@@ -199,11 +199,11 @@ pub fn check_path(
|
||||
};
|
||||
|
||||
// Enforce `noqa` directives.
|
||||
if (matches!(noqa, flags::Noqa::Enabled) && !diagnostics.is_empty())
|
||||
if (noqa.into() && !diagnostics.is_empty())
|
||||
|| settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::NoQa))
|
||||
.any(|rule_code| rule_code.lint_source().is_noqa())
|
||||
{
|
||||
check_noqa(
|
||||
&mut diagnostics,
|
||||
@@ -223,10 +223,10 @@ const MAX_ITERATIONS: usize = 100;
|
||||
/// Add any missing `# noqa` pragmas to the source code at the given `Path`.
|
||||
pub fn add_noqa_to_path(path: &Path, package: Option<&Path>, settings: &Settings) -> Result<usize> {
|
||||
// Read the file from disk.
|
||||
let contents = fs::read_file(path)?;
|
||||
let contents = std::fs::read_to_string(path)?;
|
||||
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&contents);
|
||||
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(&contents);
|
||||
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = Locator::new(&contents);
|
||||
@@ -255,8 +255,8 @@ pub fn add_noqa_to_path(path: &Path, package: Option<&Path>, settings: &Settings
|
||||
&indexer,
|
||||
&directives,
|
||||
settings,
|
||||
flags::Autofix::Disabled,
|
||||
flags::Noqa::Disabled,
|
||||
flags::Autofix::Disabled,
|
||||
);
|
||||
|
||||
// Log any parse errors.
|
||||
@@ -287,10 +287,11 @@ pub fn lint_only(
|
||||
path: &Path,
|
||||
package: Option<&Path>,
|
||||
settings: &Settings,
|
||||
noqa: flags::Noqa,
|
||||
autofix: flags::Autofix,
|
||||
) -> LinterResult<Vec<Message>> {
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(contents);
|
||||
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(contents);
|
||||
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = Locator::new(contents);
|
||||
@@ -316,8 +317,8 @@ pub fn lint_only(
|
||||
&indexer,
|
||||
&directives,
|
||||
settings,
|
||||
noqa,
|
||||
autofix,
|
||||
flags::Noqa::Enabled,
|
||||
);
|
||||
|
||||
// Convert from diagnostics to messages.
|
||||
@@ -331,7 +332,9 @@ pub fn lint_only(
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Message::from_diagnostic(diagnostic, path_lossy.to_string(), source)
|
||||
let lineno = diagnostic.location.row();
|
||||
let noqa_row = *directives.noqa_line_for.get(&lineno).unwrap_or(&lineno);
|
||||
Message::from_diagnostic(diagnostic, path_lossy.to_string(), source, noqa_row)
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
@@ -343,6 +346,7 @@ pub fn lint_fix<'a>(
|
||||
contents: &'a str,
|
||||
path: &Path,
|
||||
package: Option<&Path>,
|
||||
noqa: flags::Noqa,
|
||||
settings: &Settings,
|
||||
) -> Result<(LinterResult<Vec<Message>>, Cow<'a, str>, FixTable)> {
|
||||
let mut transformed = Cow::Borrowed(contents);
|
||||
@@ -359,7 +363,7 @@ pub fn lint_fix<'a>(
|
||||
// Continuously autofix until the source code stabilizes.
|
||||
loop {
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = rustpython_helpers::tokenize(&transformed);
|
||||
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(&transformed);
|
||||
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = Locator::new(&transformed);
|
||||
@@ -385,8 +389,8 @@ pub fn lint_fix<'a>(
|
||||
&indexer,
|
||||
&directives,
|
||||
settings,
|
||||
noqa,
|
||||
flags::Autofix::Enabled,
|
||||
flags::Noqa::Enabled,
|
||||
);
|
||||
|
||||
if iterations == 0 {
|
||||
@@ -469,7 +473,14 @@ This indicates a bug in `{}`. If you could open an issue at:
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Message::from_diagnostic(diagnostic, path_lossy.to_string(), source)
|
||||
let lineno = diagnostic.location.row();
|
||||
let noqa_row = *directives.noqa_line_for.get(&lineno).unwrap_or(&lineno);
|
||||
Message::from_diagnostic(
|
||||
diagnostic,
|
||||
path_lossy.to_string(),
|
||||
source,
|
||||
noqa_row,
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
}),
|
||||
|
||||
@@ -16,6 +16,7 @@ pub struct Message {
|
||||
pub fix: Option<Fix>,
|
||||
pub filename: String,
|
||||
pub source: Option<Source>,
|
||||
pub noqa_row: usize,
|
||||
}
|
||||
|
||||
impl Message {
|
||||
@@ -23,6 +24,7 @@ impl Message {
|
||||
diagnostic: Diagnostic,
|
||||
filename: String,
|
||||
source: Option<Source>,
|
||||
noqa_row: usize,
|
||||
) -> Self {
|
||||
Self {
|
||||
kind: diagnostic.kind,
|
||||
@@ -34,6 +36,7 @@ impl Message {
|
||||
fix: diagnostic.fix,
|
||||
filename,
|
||||
source,
|
||||
noqa_row,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,10 +53,16 @@ ruff_macros::register_rules!(
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::MultipleSpacesAfterKeyword,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::MissingWhitespaceAfterKeyword,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::MultipleSpacesBeforeKeyword,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::TabAfterKeyword,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::UnexpectedSpacesAroundKeywordParameterEquals,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::MissingWhitespaceAroundParameterEquals,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
rules::pycodestyle::rules::TabBeforeKeyword,
|
||||
rules::pycodestyle::rules::MultipleImportsOnOneLine,
|
||||
rules::pycodestyle::rules::ModuleImportNotAtTopOfFile,
|
||||
@@ -77,7 +83,10 @@ ruff_macros::register_rules!(
|
||||
rules::pycodestyle::rules::IOError,
|
||||
rules::pycodestyle::rules::SyntaxError,
|
||||
// pycodestyle warnings
|
||||
rules::pycodestyle::rules::IndentationContainsTabs,
|
||||
rules::pycodestyle::rules::TrailingWhitespace,
|
||||
rules::pycodestyle::rules::NoNewLineAtEndOfFile,
|
||||
rules::pycodestyle::rules::BlankLineContainsWhitespace,
|
||||
rules::pycodestyle::rules::DocLineTooLong,
|
||||
rules::pycodestyle::rules::InvalidEscapeSequence,
|
||||
// pyflakes
|
||||
@@ -131,6 +140,7 @@ ruff_macros::register_rules!(
|
||||
rules::pylint::rules::BadStringFormatType,
|
||||
rules::pylint::rules::BidirectionalUnicode,
|
||||
rules::pylint::rules::BadStrStripCall,
|
||||
rules::pylint::rules::CollapsibleElseIf,
|
||||
rules::pylint::rules::UselessImportAlias,
|
||||
rules::pylint::rules::UnnecessaryDirectLambdaCall,
|
||||
rules::pylint::rules::NonlocalWithoutBinding,
|
||||
@@ -144,11 +154,15 @@ ruff_macros::register_rules!(
|
||||
rules::pylint::rules::ConsiderUsingSysExit,
|
||||
rules::pylint::rules::MagicValueComparison,
|
||||
rules::pylint::rules::UselessElseOnLoop,
|
||||
rules::pylint::rules::GlobalStatement,
|
||||
rules::pylint::rules::GlobalVariableNotAssigned,
|
||||
rules::pylint::rules::TooManyReturnStatements,
|
||||
rules::pylint::rules::TooManyArguments,
|
||||
rules::pylint::rules::TooManyBranches,
|
||||
rules::pylint::rules::TooManyStatements,
|
||||
rules::pylint::rules::RedefinedLoopName,
|
||||
rules::pylint::rules::LoggingTooFewArgs,
|
||||
rules::pylint::rules::LoggingTooManyArgs,
|
||||
// flake8-builtins
|
||||
rules::flake8_builtins::rules::BuiltinVariableShadowing,
|
||||
rules::flake8_builtins::rules::BuiltinArgumentShadowing,
|
||||
@@ -275,10 +289,10 @@ ruff_macros::register_rules!(
|
||||
rules::flake8_simplify::rules::IfExprWithTrueFalse,
|
||||
rules::flake8_simplify::rules::IfExprWithFalseTrue,
|
||||
rules::flake8_simplify::rules::IfExprWithTwistedArms,
|
||||
rules::flake8_simplify::rules::AAndNotA,
|
||||
rules::flake8_simplify::rules::AOrNotA,
|
||||
rules::flake8_simplify::rules::OrTrue,
|
||||
rules::flake8_simplify::rules::AndFalse,
|
||||
rules::flake8_simplify::rules::ExprAndNotExpr,
|
||||
rules::flake8_simplify::rules::ExprOrNotExpr,
|
||||
rules::flake8_simplify::rules::ExprOrTrue,
|
||||
rules::flake8_simplify::rules::ExprAndFalse,
|
||||
rules::flake8_simplify::rules::YodaConditions,
|
||||
rules::flake8_simplify::rules::DictGetWithDefault,
|
||||
// pyupgrade
|
||||
@@ -317,6 +331,7 @@ ruff_macros::register_rules!(
|
||||
rules::pyupgrade::rules::ImportReplacements,
|
||||
rules::pyupgrade::rules::OutdatedVersionBlock,
|
||||
rules::pyupgrade::rules::QuotedAnnotation,
|
||||
rules::pyupgrade::rules::IsinstanceWithTuple,
|
||||
// pydocstyle
|
||||
rules::pydocstyle::rules::PublicModule,
|
||||
rules::pydocstyle::rules::PublicClass,
|
||||
@@ -452,8 +467,14 @@ ruff_macros::register_rules!(
|
||||
rules::flake8_errmsg::rules::DotFormatInException,
|
||||
// flake8-pyi
|
||||
rules::flake8_pyi::rules::PrefixTypeParams,
|
||||
rules::flake8_pyi::rules::BadVersionInfoComparison,
|
||||
rules::flake8_pyi::rules::UnrecognizedPlatformCheck,
|
||||
rules::flake8_pyi::rules::UnrecognizedPlatformName,
|
||||
rules::flake8_pyi::rules::PassStatementStubBody,
|
||||
rules::flake8_pyi::rules::NonEmptyStubBody,
|
||||
rules::flake8_pyi::rules::DocstringInStub,
|
||||
rules::flake8_pyi::rules::TypedArgumentSimpleDefaults,
|
||||
rules::flake8_pyi::rules::ArgumentSimpleDefaults,
|
||||
// flake8-pytest-style
|
||||
rules::flake8_pytest_style::rules::IncorrectFixtureParenthesesStyle,
|
||||
rules::flake8_pytest_style::rules::FixturePositionalArgs,
|
||||
@@ -488,6 +509,7 @@ ruff_macros::register_rules!(
|
||||
rules::flake8_pie::rules::UnnecessaryDictKwargs,
|
||||
rules::flake8_pie::rules::PreferListBuiltin,
|
||||
rules::flake8_pie::rules::SingleStartsEndsWith,
|
||||
rules::flake8_pie::rules::UnnecessaryComprehensionAnyAll,
|
||||
// flake8-commas
|
||||
rules::flake8_commas::rules::TrailingCommaMissing,
|
||||
rules::flake8_commas::rules::TrailingCommaOnBareTupleProhibited,
|
||||
@@ -562,12 +584,14 @@ ruff_macros::register_rules!(
|
||||
rules::ruff::rules::AmbiguousUnicodeCharacterString,
|
||||
rules::ruff::rules::AmbiguousUnicodeCharacterDocstring,
|
||||
rules::ruff::rules::AmbiguousUnicodeCharacterComment,
|
||||
rules::ruff::rules::KeywordArgumentBeforeStarArgument,
|
||||
rules::ruff::rules::UnpackInsteadOfConcatenatingToCollectionLiteral,
|
||||
rules::ruff::rules::AsyncioDanglingTask,
|
||||
rules::ruff::rules::UnusedNOQA,
|
||||
// flake8-django
|
||||
rules::flake8_django::rules::NullableModelStringField,
|
||||
rules::flake8_django::rules::LocalsInRenderFunction,
|
||||
rules::flake8_django::rules::ExcludeWithModelForm,
|
||||
rules::flake8_django::rules::AllWithModelForm,
|
||||
rules::flake8_django::rules::ModelWithoutDunderStr,
|
||||
rules::flake8_django::rules::NonLeadingReceiverDecorator,
|
||||
);
|
||||
@@ -753,6 +777,7 @@ impl Linter {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(is_macro::Is)]
|
||||
pub enum LintSource {
|
||||
Ast,
|
||||
Io,
|
||||
@@ -760,7 +785,7 @@ pub enum LintSource {
|
||||
LogicalLines,
|
||||
Tokens,
|
||||
Imports,
|
||||
NoQa,
|
||||
Noqa,
|
||||
Filesystem,
|
||||
}
|
||||
|
||||
@@ -769,7 +794,7 @@ impl Rule {
|
||||
/// physical lines).
|
||||
pub const fn lint_source(&self) -> &'static LintSource {
|
||||
match self {
|
||||
Rule::UnusedNOQA => &LintSource::NoQa,
|
||||
Rule::UnusedNOQA => &LintSource::Noqa,
|
||||
Rule::BlanketNOQA
|
||||
| Rule::BlanketTypeIgnore
|
||||
| Rule::DocLineTooLong
|
||||
@@ -782,7 +807,10 @@ impl Rule {
|
||||
| Rule::ShebangNewline
|
||||
| Rule::BidirectionalUnicode
|
||||
| Rule::ShebangPython
|
||||
| Rule::ShebangWhitespace => &LintSource::PhysicalLines,
|
||||
| Rule::ShebangWhitespace
|
||||
| Rule::TrailingWhitespace
|
||||
| Rule::IndentationContainsTabs
|
||||
| Rule::BlankLineContainsWhitespace => &LintSource::PhysicalLines,
|
||||
Rule::AmbiguousUnicodeCharacterComment
|
||||
| Rule::AmbiguousUnicodeCharacterDocstring
|
||||
| Rule::AmbiguousUnicodeCharacterString
|
||||
@@ -812,6 +840,7 @@ impl Rule {
|
||||
| Rule::MultipleSpacesAfterOperator
|
||||
| Rule::MultipleSpacesBeforeKeyword
|
||||
| Rule::MultipleSpacesBeforeOperator
|
||||
| Rule::MissingWhitespaceAfterKeyword
|
||||
| Rule::NoIndentedBlock
|
||||
| Rule::NoIndentedBlockComment
|
||||
| Rule::NoSpaceAfterBlockComment
|
||||
@@ -826,6 +855,8 @@ impl Rule {
|
||||
| Rule::UnexpectedIndentationComment
|
||||
| Rule::WhitespaceAfterOpenBracket
|
||||
| Rule::WhitespaceBeforeCloseBracket
|
||||
| Rule::UnexpectedSpacesAroundKeywordParameterEquals
|
||||
| Rule::MissingWhitespaceAroundParameterEquals
|
||||
| Rule::WhitespaceBeforePunctuation => &LintSource::LogicalLines,
|
||||
_ => &LintSource::Ast,
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
//! Discover Python files, and their corresponding `Settings`, from the
|
||||
//! Discover Python files, and their corresponding [`Settings`], from the
|
||||
//! filesystem.
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
@@ -19,7 +19,7 @@ use crate::settings::{pyproject, AllSettings, Settings};
|
||||
|
||||
/// The strategy used to discover the relevant `pyproject.toml` file for each
|
||||
/// Python file.
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, is_macro::Is)]
|
||||
pub enum PyprojectDiscovery {
|
||||
/// Use a fixed `pyproject.toml` file for all Python files (i.e., one
|
||||
/// provided on the command-line).
|
||||
@@ -30,7 +30,7 @@ pub enum PyprojectDiscovery {
|
||||
}
|
||||
|
||||
impl PyprojectDiscovery {
|
||||
fn top_level_settings(&self) -> &AllSettings {
|
||||
pub fn top_level_settings(&self) -> &AllSettings {
|
||||
match self {
|
||||
PyprojectDiscovery::Fixed(settings) => settings,
|
||||
PyprojectDiscovery::Hierarchical(settings) => settings,
|
||||
@@ -65,12 +65,12 @@ pub struct Resolver {
|
||||
}
|
||||
|
||||
impl Resolver {
|
||||
/// Add a resolved `Settings` under a given `PathBuf` scope.
|
||||
/// Add a resolved [`Settings`] under a given [`PathBuf`] scope.
|
||||
pub fn add(&mut self, path: PathBuf, settings: AllSettings) {
|
||||
self.settings.insert(path, settings);
|
||||
}
|
||||
|
||||
/// Return the appropriate `AllSettings` for a given `Path`.
|
||||
/// Return the appropriate [`AllSettings`] for a given [`Path`].
|
||||
pub fn resolve_all<'a>(
|
||||
&'a self,
|
||||
path: &Path,
|
||||
@@ -82,13 +82,7 @@ impl Resolver {
|
||||
.settings
|
||||
.iter()
|
||||
.rev()
|
||||
.find_map(|(root, settings)| {
|
||||
if path.starts_with(root) {
|
||||
Some(settings)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.find_map(|(root, settings)| path.starts_with(root).then_some(settings))
|
||||
.unwrap_or(default),
|
||||
}
|
||||
}
|
||||
@@ -97,7 +91,7 @@ impl Resolver {
|
||||
&self.resolve_all(path, strategy).lib
|
||||
}
|
||||
|
||||
/// Return an iterator over the resolved `Settings` in this `Resolver`.
|
||||
/// Return an iterator over the resolved [`Settings`] in this [`Resolver`].
|
||||
pub fn iter(&self) -> impl Iterator<Item = &AllSettings> {
|
||||
self.settings.values()
|
||||
}
|
||||
@@ -112,8 +106,8 @@ impl ConfigProcessor for &NoOpProcessor {
|
||||
fn process_config(&self, _config: &mut Configuration) {}
|
||||
}
|
||||
|
||||
/// Recursively resolve a `Configuration` from a `pyproject.toml` file at the
|
||||
/// specified `Path`.
|
||||
/// Recursively resolve a [`Configuration`] from a `pyproject.toml` file at the
|
||||
/// specified [`Path`].
|
||||
// TODO(charlie): This whole system could do with some caching. Right now, if a
|
||||
// configuration file extends another in the same path, we'll re-parse the same
|
||||
// file at least twice (possibly more than twice, since we'll also parse it when
|
||||
@@ -161,26 +155,26 @@ pub fn resolve_configuration(
|
||||
Ok(configuration)
|
||||
}
|
||||
|
||||
/// Extract the project root (scope) and `Settings` from a given
|
||||
/// Extract the project root (scope) and [`Settings`] from a given
|
||||
/// `pyproject.toml`.
|
||||
pub fn resolve_scoped_settings(
|
||||
pyproject: &Path,
|
||||
relativity: &Relativity,
|
||||
processor: impl ConfigProcessor,
|
||||
) -> Result<(PathBuf, AllSettings)> {
|
||||
let project_root = relativity.resolve(pyproject);
|
||||
let configuration = resolve_configuration(pyproject, relativity, processor)?;
|
||||
let project_root = relativity.resolve(pyproject);
|
||||
let settings = AllSettings::from_configuration(configuration, &project_root)?;
|
||||
Ok((project_root, settings))
|
||||
}
|
||||
|
||||
/// Extract the `Settings` from a given `pyproject.toml`.
|
||||
/// Extract the [`Settings`] from a given `pyproject.toml`.
|
||||
pub fn resolve_settings(pyproject: &Path, relativity: &Relativity) -> Result<AllSettings> {
|
||||
let (_project_root, settings) = resolve_scoped_settings(pyproject, relativity, &NoOpProcessor)?;
|
||||
Ok(settings)
|
||||
}
|
||||
|
||||
/// Extract the `Settings` from a given `pyproject.toml` and process the
|
||||
/// Extract the [`Settings`] from a given `pyproject.toml` and process the
|
||||
/// configuration with the given [`ConfigProcessor`].
|
||||
pub fn resolve_settings_with_processor(
|
||||
pyproject: &Path,
|
||||
@@ -197,18 +191,18 @@ fn match_exclusion(file_path: &str, file_basename: &str, exclusion: &globset::Gl
|
||||
exclusion.is_match(file_path) || exclusion.is_match(file_basename)
|
||||
}
|
||||
|
||||
/// Return `true` if the `Path` appears to be that of a Python file.
|
||||
/// Return `true` if the [`Path`] appears to be that of a Python file.
|
||||
fn is_python_path(path: &Path) -> bool {
|
||||
path.extension()
|
||||
.map_or(false, |ext| ext == "py" || ext == "pyi")
|
||||
}
|
||||
|
||||
/// Return `true` if the `Path` appears to be that of a Python interface definition file (`.pyi`).
|
||||
/// Return `true` if the [`Path`] appears to be that of a Python interface definition file (`.pyi`).
|
||||
pub fn is_interface_definition_path(path: &Path) -> bool {
|
||||
path.extension().map_or(false, |ext| ext == "pyi")
|
||||
}
|
||||
|
||||
/// Return `true` if the `Entry` appears to be that of a Python file.
|
||||
/// Return `true` if the [`DirEntry`] appears to be that of a Python file.
|
||||
pub fn is_python_entry(entry: &DirEntry) -> bool {
|
||||
is_python_path(entry.path())
|
||||
&& !entry
|
||||
@@ -228,7 +222,7 @@ pub fn python_files_in_path(
|
||||
// Search for `pyproject.toml` files in all parent directories.
|
||||
let mut resolver = Resolver::default();
|
||||
let mut seen = FxHashSet::default();
|
||||
if matches!(pyproject_strategy, PyprojectDiscovery::Hierarchical(..)) {
|
||||
if pyproject_strategy.is_hierarchical() {
|
||||
for path in &paths {
|
||||
for ancestor in path.ancestors() {
|
||||
if seen.insert(ancestor) {
|
||||
@@ -277,7 +271,7 @@ pub fn python_files_in_path(
|
||||
Box::new(|result| {
|
||||
// Search for the `pyproject.toml` file in this directory, before we visit any
|
||||
// of its contents.
|
||||
if matches!(pyproject_strategy, PyprojectDiscovery::Hierarchical(..)) {
|
||||
if pyproject_strategy.is_hierarchical() {
|
||||
if let Ok(entry) = &result {
|
||||
if entry
|
||||
.file_type()
|
||||
@@ -357,7 +351,7 @@ pub fn python_files_in_path(
|
||||
Ok((files.into_inner().unwrap(), resolver.into_inner().unwrap()))
|
||||
}
|
||||
|
||||
/// Return `true` if the Python file at `Path` is _not_ excluded.
|
||||
/// Return `true` if the Python file at [`Path`] is _not_ excluded.
|
||||
pub fn python_file_at_path(
|
||||
path: &Path,
|
||||
pyproject_strategy: &PyprojectDiscovery,
|
||||
@@ -372,7 +366,7 @@ pub fn python_file_at_path(
|
||||
|
||||
// Search for `pyproject.toml` files in all parent directories.
|
||||
let mut resolver = Resolver::default();
|
||||
if matches!(pyproject_strategy, PyprojectDiscovery::Hierarchical(..)) {
|
||||
if pyproject_strategy.is_hierarchical() {
|
||||
for ancestor in path.ancestors() {
|
||||
if let Some(pyproject) = settings_toml(ancestor)? {
|
||||
let (root, settings) =
|
||||
@@ -386,7 +380,7 @@ pub fn python_file_at_path(
|
||||
Ok(!is_file_excluded(&path, &resolver, pyproject_strategy))
|
||||
}
|
||||
|
||||
/// Return `true` if the given top-level `Path` should be excluded.
|
||||
/// Return `true` if the given top-level [`Path`] should be excluded.
|
||||
fn is_file_excluded(
|
||||
path: &Path,
|
||||
resolver: &Resolver,
|
||||
|
||||
@@ -92,5 +92,7 @@ static REDIRECTS: Lazy<HashMap<&'static str, &'static str>> = Lazy::new(|| {
|
||||
// TODO(charlie): Remove by 2023-04-01.
|
||||
("TYP", "TCH"),
|
||||
("TYP001", "TCH001"),
|
||||
// TODO(charlie): Remove by 2023-06-01.
|
||||
("RUF004", "B026"),
|
||||
])
|
||||
});
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
/// See: [eradicate.py](https://github.com/myint/eradicate/blob/98f199940979c94447a461d50d27862b118b282d/eradicate.py)
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use rustpython_parser as parser;
|
||||
|
||||
static ALLOWLIST_REGEX: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(
|
||||
@@ -77,7 +78,7 @@ pub fn comment_contains_code(line: &str, task_tags: &[String]) -> bool {
|
||||
}
|
||||
|
||||
// Finally, compile the source code.
|
||||
rustpython_parser::parser::parse_program(&line, "<filename>").is_ok()
|
||||
parser::parse_program(&line, "<filename>").is_ok()
|
||||
}
|
||||
|
||||
/// Returns `true` if a line is probably part of some multiline code.
|
||||
|
||||
@@ -60,9 +60,7 @@ pub fn commented_out_code(
|
||||
// Verify that the comment is on its own line, and that it contains code.
|
||||
if is_standalone_comment(line) && comment_contains_code(line, &settings.task_tags[..]) {
|
||||
let mut diagnostic = Diagnostic::new(CommentedOutCode, Range::new(start, end));
|
||||
if matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(&Rule::CommentedOutCode)
|
||||
{
|
||||
if autofix.into() && settings.rules.should_fix(&Rule::CommentedOutCode) {
|
||||
diagnostic.amend(Fix::deletion(location, end_location));
|
||||
}
|
||||
Some(diagnostic)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use anyhow::{bail, Result};
|
||||
use rustpython_parser::ast::Stmt;
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::Tok;
|
||||
use rustpython_parser::{lexer, Mode, Tok};
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::fix::Fix;
|
||||
@@ -16,7 +15,7 @@ pub fn add_return_none_annotation(locator: &Locator, stmt: &Stmt) -> Result<Fix>
|
||||
let mut seen_lpar = false;
|
||||
let mut seen_rpar = false;
|
||||
let mut count: usize = 0;
|
||||
for (start, tok, ..) in lexer::make_tokenizer_located(contents, range.location).flatten() {
|
||||
for (start, tok, ..) in lexer::lex_located(contents, Mode::Module, range.location).flatten() {
|
||||
if seen_lpar && seen_rpar {
|
||||
if matches!(tok, Tok::Colon) {
|
||||
return Ok(Fix::insertion(" -> None".to_string(), start));
|
||||
|
||||
@@ -398,9 +398,9 @@ define_violation!(
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// * [PEP 484](https://www.python.org/dev/peps/pep-0484/#the-any-type)
|
||||
/// * [`typing.Any`](https://docs.python.org/3/library/typing.html#typing.Any)
|
||||
/// * [Mypy: The Any type](https://mypy.readthedocs.io/en/stable/kinds_of_types.html#the-any-type)
|
||||
/// - [PEP 484](https://www.python.org/dev/peps/pep-0484/#the-any-type)
|
||||
/// - [`typing.Any`](https://docs.python.org/3/library/typing.html#typing.Any)
|
||||
/// - [Mypy: The Any type](https://mypy.readthedocs.io/en/stable/kinds_of_types.html#the-any-type)
|
||||
pub struct AnyType {
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
@@ -37,8 +37,8 @@ pub struct Options {
|
||||
/// Whether to suppress `ANN200`-level violations for functions that meet
|
||||
/// either of the following criteria:
|
||||
///
|
||||
/// * Contain no `return` statement.
|
||||
/// * Explicit `return` statement(s) all return `None` (explicitly or
|
||||
/// - Contain no `return` statement.
|
||||
/// - Explicit `return` statement(s) all return `None` (explicitly or
|
||||
/// implicitly).
|
||||
pub suppress_none_returning: Option<bool>,
|
||||
#[option(
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use rustpython_parser::ast::{Constant, Expr, ExprKind};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
const PASSWORD_NAMES: [&str; 7] = [
|
||||
"password", "pass", "passwd", "pwd", "secret", "token", "secrete",
|
||||
];
|
||||
static PASSWORD_CANDIDATE_REGEX: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(r"(^|_)(?i)(pas+wo?r?d|pass(phrase)?|pwd|token|secrete?)($|_)").unwrap()
|
||||
});
|
||||
|
||||
pub fn string_literal(expr: &Expr) -> Option<&str> {
|
||||
match &expr.node {
|
||||
@@ -16,11 +18,8 @@ pub fn string_literal(expr: &Expr) -> Option<&str> {
|
||||
}
|
||||
}
|
||||
|
||||
// Maybe use regex for this?
|
||||
pub fn matches_password_name(string: &str) -> bool {
|
||||
PASSWORD_NAMES
|
||||
.iter()
|
||||
.any(|name| string.to_lowercase().contains(name))
|
||||
PASSWORD_CANDIDATE_REGEX.is_match(string)
|
||||
}
|
||||
|
||||
pub fn is_untyped_exception(type_: Option<&Expr>, checker: &Checker) -> bool {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use rustpython_parser::ast::Stmt;
|
||||
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
use rustpython_parser::ast::{Located, StmtKind};
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use crate::registry::Diagnostic;
|
||||
@@ -16,7 +17,7 @@ impl Violation for Assert {
|
||||
}
|
||||
|
||||
/// S101
|
||||
pub fn assert_used(stmt: &Located<StmtKind>) -> Diagnostic {
|
||||
pub fn assert_used(stmt: &Stmt) -> Diagnostic {
|
||||
Diagnostic::new(
|
||||
Assert,
|
||||
Range::new(stmt.location, stmt.location.with_col_offset("assert".len())),
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
use rustpython_parser::ast::{ArgData, Arguments, Expr, Located};
|
||||
use rustpython_parser::ast::{Arg, Arguments, Expr};
|
||||
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
|
||||
use super::super::helpers::{matches_password_name, string_literal};
|
||||
use crate::ast::types::Range;
|
||||
use crate::registry::Diagnostic;
|
||||
use crate::violation::Violation;
|
||||
|
||||
use super::super::helpers::{matches_password_name, string_literal};
|
||||
|
||||
define_violation!(
|
||||
pub struct HardcodedPasswordDefault {
|
||||
pub string: String,
|
||||
@@ -19,7 +21,7 @@ impl Violation for HardcodedPasswordDefault {
|
||||
}
|
||||
}
|
||||
|
||||
fn check_password_kwarg(arg: &Located<ArgData>, default: &Expr) -> Option<Diagnostic> {
|
||||
fn check_password_kwarg(arg: &Arg, default: &Expr) -> Option<Diagnostic> {
|
||||
let string = string_literal(default).filter(|string| !string.is_empty())?;
|
||||
let kwarg_name = &arg.node.arg;
|
||||
if !matches_password_name(kwarg_name) {
|
||||
|
||||
@@ -33,8 +33,8 @@ define_violation!(
|
||||
/// ```
|
||||
///
|
||||
/// ## References
|
||||
/// * [B608: Test for SQL injection](https://bandit.readthedocs.io/en/latest/plugins/b608_hardcoded_sql_expressions.html)
|
||||
/// * [psycopg3: Server-side binding](https://www.psycopg.org/psycopg3/docs/basic/from_pg2.html#server-side-binding)
|
||||
/// - [B608: Test for SQL injection](https://bandit.readthedocs.io/en/latest/plugins/b608_hardcoded_sql_expressions.html)
|
||||
/// - [psycopg3: Server-side binding](https://www.psycopg.org/psycopg3/docs/basic/from_pg2.html#server-side-binding)
|
||||
pub struct HardcodedSQLExpression;
|
||||
);
|
||||
impl Violation for HardcodedSQLExpression {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: src/rules/flake8_bandit/mod.rs
|
||||
source: crates/ruff/src/rules/flake8_bandit/mod.rs
|
||||
expression: diagnostics
|
||||
---
|
||||
- kind:
|
||||
@@ -105,46 +105,46 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 23
|
||||
column: 16
|
||||
end_location:
|
||||
row: 23
|
||||
column: 24
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 24
|
||||
column: 12
|
||||
end_location:
|
||||
row: 24
|
||||
column: 20
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 25
|
||||
column: 14
|
||||
end_location:
|
||||
row: 25
|
||||
column: 22
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 26
|
||||
row: 22
|
||||
column: 11
|
||||
end_location:
|
||||
row: 26
|
||||
row: 22
|
||||
column: 19
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 23
|
||||
column: 11
|
||||
end_location:
|
||||
row: 23
|
||||
column: 19
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 25
|
||||
column: 16
|
||||
end_location:
|
||||
row: 25
|
||||
column: 24
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 26
|
||||
column: 12
|
||||
end_location:
|
||||
row: 26
|
||||
column: 20
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
@@ -161,9 +161,31 @@ expression: diagnostics
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 28
|
||||
column: 13
|
||||
column: 11
|
||||
end_location:
|
||||
row: 28
|
||||
column: 19
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 29
|
||||
column: 14
|
||||
end_location:
|
||||
row: 29
|
||||
column: 22
|
||||
fix: ~
|
||||
parent: ~
|
||||
- kind:
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 30
|
||||
column: 13
|
||||
end_location:
|
||||
row: 30
|
||||
column: 21
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -171,10 +193,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 29
|
||||
row: 31
|
||||
column: 15
|
||||
end_location:
|
||||
row: 29
|
||||
row: 31
|
||||
column: 23
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -182,10 +204,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 30
|
||||
row: 32
|
||||
column: 23
|
||||
end_location:
|
||||
row: 30
|
||||
row: 32
|
||||
column: 31
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -193,10 +215,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 31
|
||||
row: 33
|
||||
column: 23
|
||||
end_location:
|
||||
row: 31
|
||||
row: 33
|
||||
column: 31
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -204,10 +226,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 35
|
||||
row: 37
|
||||
column: 15
|
||||
end_location:
|
||||
row: 35
|
||||
row: 37
|
||||
column: 23
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -215,10 +237,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 39
|
||||
row: 41
|
||||
column: 19
|
||||
end_location:
|
||||
row: 39
|
||||
row: 41
|
||||
column: 27
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -226,10 +248,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 40
|
||||
row: 42
|
||||
column: 16
|
||||
end_location:
|
||||
row: 40
|
||||
row: 42
|
||||
column: 24
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -237,10 +259,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 41
|
||||
row: 43
|
||||
column: 17
|
||||
end_location:
|
||||
row: 41
|
||||
row: 43
|
||||
column: 25
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -248,10 +270,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 42
|
||||
row: 44
|
||||
column: 14
|
||||
end_location:
|
||||
row: 42
|
||||
row: 44
|
||||
column: 22
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -259,10 +281,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 43
|
||||
row: 45
|
||||
column: 17
|
||||
end_location:
|
||||
row: 43
|
||||
row: 45
|
||||
column: 25
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -270,10 +292,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 44
|
||||
row: 46
|
||||
column: 16
|
||||
end_location:
|
||||
row: 44
|
||||
row: 46
|
||||
column: 24
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -281,10 +303,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 45
|
||||
row: 47
|
||||
column: 18
|
||||
end_location:
|
||||
row: 45
|
||||
row: 47
|
||||
column: 26
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -292,10 +314,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 47
|
||||
row: 49
|
||||
column: 12
|
||||
end_location:
|
||||
row: 47
|
||||
row: 49
|
||||
column: 20
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -303,10 +325,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 48
|
||||
row: 50
|
||||
column: 9
|
||||
end_location:
|
||||
row: 48
|
||||
row: 50
|
||||
column: 17
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -314,10 +336,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 49
|
||||
row: 51
|
||||
column: 10
|
||||
end_location:
|
||||
row: 49
|
||||
row: 51
|
||||
column: 18
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -325,10 +347,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 50
|
||||
row: 52
|
||||
column: 7
|
||||
end_location:
|
||||
row: 50
|
||||
row: 52
|
||||
column: 15
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -336,10 +358,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 51
|
||||
row: 53
|
||||
column: 10
|
||||
end_location:
|
||||
row: 51
|
||||
row: 53
|
||||
column: 18
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -347,10 +369,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 52
|
||||
row: 54
|
||||
column: 9
|
||||
end_location:
|
||||
row: 52
|
||||
row: 54
|
||||
column: 17
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -358,10 +380,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 53
|
||||
row: 55
|
||||
column: 11
|
||||
end_location:
|
||||
row: 53
|
||||
row: 55
|
||||
column: 19
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -369,10 +391,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: s3cr3t
|
||||
location:
|
||||
row: 54
|
||||
row: 56
|
||||
column: 20
|
||||
end_location:
|
||||
row: 54
|
||||
row: 56
|
||||
column: 28
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -380,10 +402,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: "1\n2"
|
||||
location:
|
||||
row: 56
|
||||
row: 58
|
||||
column: 12
|
||||
end_location:
|
||||
row: 56
|
||||
row: 58
|
||||
column: 18
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -391,10 +413,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: "3\t4"
|
||||
location:
|
||||
row: 59
|
||||
row: 61
|
||||
column: 12
|
||||
end_location:
|
||||
row: 59
|
||||
row: 61
|
||||
column: 18
|
||||
fix: ~
|
||||
parent: ~
|
||||
@@ -402,10 +424,10 @@ expression: diagnostics
|
||||
HardcodedPasswordString:
|
||||
string: "5\r6"
|
||||
location:
|
||||
row: 62
|
||||
row: 64
|
||||
column: 12
|
||||
end_location:
|
||||
row: 62
|
||||
row: 64
|
||||
column: 18
|
||||
fix: ~
|
||||
parent: ~
|
||||
|
||||
@@ -56,6 +56,9 @@ const FUNC_CALL_NAME_ALLOWLIST: &[&str] = &[
|
||||
"bytes",
|
||||
"int",
|
||||
"float",
|
||||
"getint",
|
||||
"getfloat",
|
||||
"getboolean",
|
||||
];
|
||||
|
||||
const FUNC_DEF_NAME_ALLOWLIST: &[&str] = &["__setitem__"];
|
||||
|
||||
@@ -85,10 +85,10 @@ expression: diagnostics
|
||||
- kind:
|
||||
BooleanPositionalArgInFunctionDefinition: ~
|
||||
location:
|
||||
row: 77
|
||||
row: 81
|
||||
column: 18
|
||||
end_location:
|
||||
row: 77
|
||||
row: 81
|
||||
column: 29
|
||||
fix: ~
|
||||
parent: ~
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
use ruff_python::string::is_lower;
|
||||
use rustpython_parser::ast::{ExprKind, Stmt, StmtKind};
|
||||
use rustpython_parser::ast::{ExprKind, Stmt};
|
||||
|
||||
use crate::ast::types::Range;
|
||||
use ruff_macros::{define_violation, derive_message_formats};
|
||||
use ruff_python::str::is_lower;
|
||||
|
||||
use crate::ast::helpers::RaiseStatementVisitor;
|
||||
use crate::ast::visitor;
|
||||
use crate::ast::visitor::Visitor;
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::registry::Diagnostic;
|
||||
use crate::violation::Violation;
|
||||
@@ -16,62 +16,32 @@ impl Violation for RaiseWithoutFromInsideExcept {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!(
|
||||
"Within an except clause, raise exceptions with `raise ... from err` or `raise ... \
|
||||
"Within an `except` clause, raise exceptions with `raise ... from err` or `raise ... \
|
||||
from None` to distinguish them from errors in exception handling"
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
struct RaiseVisitor {
|
||||
diagnostics: Vec<Diagnostic>,
|
||||
}
|
||||
/// B904
|
||||
pub fn raise_without_from_inside_except(checker: &mut Checker, body: &[Stmt]) {
|
||||
let raises = {
|
||||
let mut visitor = RaiseStatementVisitor::default();
|
||||
visitor::walk_body(&mut visitor, body);
|
||||
visitor.raises
|
||||
};
|
||||
|
||||
impl<'a> Visitor<'a> for RaiseVisitor {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
match &stmt.node {
|
||||
StmtKind::Raise {
|
||||
exc: Some(exc),
|
||||
cause: None,
|
||||
} => match &exc.node {
|
||||
ExprKind::Name { id, .. } if is_lower(id) => {}
|
||||
_ => {
|
||||
self.diagnostics.push(Diagnostic::new(
|
||||
RaiseWithoutFromInsideExcept,
|
||||
Range::from_located(stmt),
|
||||
));
|
||||
}
|
||||
},
|
||||
StmtKind::ClassDef { .. }
|
||||
| StmtKind::FunctionDef { .. }
|
||||
| StmtKind::AsyncFunctionDef { .. }
|
||||
| StmtKind::Try { .. }
|
||||
| StmtKind::TryStar { .. } => {}
|
||||
StmtKind::If { body, orelse, .. } => {
|
||||
visitor::walk_body(self, body);
|
||||
visitor::walk_body(self, orelse);
|
||||
}
|
||||
StmtKind::While { body, .. }
|
||||
| StmtKind::With { body, .. }
|
||||
| StmtKind::AsyncWith { body, .. }
|
||||
| StmtKind::For { body, .. }
|
||||
| StmtKind::AsyncFor { body, .. } => {
|
||||
visitor::walk_body(self, body);
|
||||
}
|
||||
StmtKind::Match { cases, .. } => {
|
||||
for case in cases {
|
||||
visitor::walk_body(self, &case.body);
|
||||
for (range, exc, cause) in raises {
|
||||
if cause.is_none() {
|
||||
if let Some(exc) = exc {
|
||||
match &exc.node {
|
||||
ExprKind::Name { id, .. } if is_lower(id) => {}
|
||||
_ => {
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(RaiseWithoutFromInsideExcept, range));
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// B904
|
||||
pub fn raise_without_from_inside_except(checker: &mut Checker, body: &[Stmt]) {
|
||||
let mut visitor = RaiseVisitor {
|
||||
diagnostics: vec![],
|
||||
};
|
||||
visitor::walk_body(&mut visitor, body);
|
||||
checker.diagnostics.extend(visitor.diagnostics);
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ use rustc_hash::FxHashMap;
|
||||
use rustpython_parser::ast::{Expr, ExprKind, Stmt};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::ast::types::{BindingKind, Range, RefEquality};
|
||||
use crate::ast::types::{Range, RefEquality};
|
||||
use crate::ast::visitor::Visitor;
|
||||
use crate::ast::{helpers, visitor};
|
||||
use crate::checkers::ast::Checker;
|
||||
@@ -31,7 +31,7 @@ use crate::fix::Fix;
|
||||
use crate::registry::Diagnostic;
|
||||
use crate::violation::{AutofixKind, Availability, Violation};
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, result_like::BoolLike)]
|
||||
pub enum Certainty {
|
||||
Certain,
|
||||
Uncertain,
|
||||
@@ -59,7 +59,7 @@ impl Violation for UnusedLoopControlVariable {
|
||||
let UnusedLoopControlVariable {
|
||||
name, certainty, ..
|
||||
} = self;
|
||||
if matches!(certainty, Certainty::Certain) {
|
||||
if certainty.to_bool() {
|
||||
format!("Loop control variable `{name}` not used within loop body")
|
||||
} else {
|
||||
format!("Loop control variable `{name}` may not be used within loop body")
|
||||
@@ -70,7 +70,7 @@ impl Violation for UnusedLoopControlVariable {
|
||||
let UnusedLoopControlVariable {
|
||||
certainty, rename, ..
|
||||
} = self;
|
||||
if matches!(certainty, Certainty::Certain) && rename.is_some() {
|
||||
if certainty.to_bool() && rename.is_some() {
|
||||
Some(|UnusedLoopControlVariable { name, rename, .. }| {
|
||||
let rename = rename.as_ref().unwrap();
|
||||
format!("Rename unused `{name}` to `{rename}`")
|
||||
@@ -140,25 +140,17 @@ pub fn unused_loop_control_variable(
|
||||
}
|
||||
|
||||
// Avoid fixing any variables that _may_ be used, but undetectably so.
|
||||
let certainty = if helpers::uses_magic_variable_access(checker, body) {
|
||||
Certainty::Uncertain
|
||||
} else {
|
||||
Certainty::Certain
|
||||
};
|
||||
let certainty = Certainty::from(!helpers::uses_magic_variable_access(checker, body));
|
||||
|
||||
// Attempt to rename the variable by prepending an underscore, but avoid
|
||||
// applying the fix if doing so wouldn't actually cause us to ignore the
|
||||
// violation in the next pass.
|
||||
let rename = format!("_{name}");
|
||||
let rename = if checker
|
||||
let rename = checker
|
||||
.settings
|
||||
.dummy_variable_rgx
|
||||
.is_match(rename.as_str())
|
||||
{
|
||||
Some(rename)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
.then_some(rename);
|
||||
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
UnusedLoopControlVariable {
|
||||
@@ -169,7 +161,7 @@ pub fn unused_loop_control_variable(
|
||||
Range::from_located(expr),
|
||||
);
|
||||
if let Some(rename) = rename {
|
||||
if matches!(certainty, Certainty::Certain) && checker.patch(diagnostic.kind.rule()) {
|
||||
if certainty.into() && checker.patch(diagnostic.kind.rule()) {
|
||||
// Find the `BindingKind::LoopVar` corresponding to the name.
|
||||
let scope = checker.current_scope();
|
||||
let binding = scope
|
||||
@@ -185,7 +177,7 @@ pub fn unused_loop_control_variable(
|
||||
.and_then(|source| (source == &RefEquality(stmt)).then_some(binding))
|
||||
});
|
||||
if let Some(binding) = binding {
|
||||
if matches!(binding.kind, BindingKind::LoopVar) {
|
||||
if binding.kind.is_loop_var() {
|
||||
if !binding.used() {
|
||||
diagnostic.amend(Fix::replacement(
|
||||
rename,
|
||||
|
||||
@@ -23,7 +23,7 @@ define_violation!(
|
||||
///
|
||||
/// ## Options
|
||||
///
|
||||
/// * `flake8-builtins.builtins-ignorelist`
|
||||
/// - `flake8-builtins.builtins-ignorelist`
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
@@ -45,7 +45,7 @@ define_violation!(
|
||||
/// return result
|
||||
/// ```
|
||||
///
|
||||
/// * [Why is it a bad idea to name a variable `id` in Python?_](https://stackoverflow.com/questions/77552/id-is-a-bad-variable-name-in-python)
|
||||
/// - [_Why is it a bad idea to name a variable `id` in Python?_](https://stackoverflow.com/questions/77552/id-is-a-bad-variable-name-in-python)
|
||||
pub struct BuiltinVariableShadowing {
|
||||
pub name: String,
|
||||
}
|
||||
@@ -73,7 +73,7 @@ define_violation!(
|
||||
///
|
||||
/// ## Options
|
||||
///
|
||||
/// * `flake8-builtins.builtins-ignorelist`
|
||||
/// - `flake8-builtins.builtins-ignorelist`
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
@@ -128,7 +128,7 @@ define_violation!(
|
||||
///
|
||||
/// ## Options
|
||||
///
|
||||
/// * `flake8-builtins.builtins-ignorelist`
|
||||
/// - `flake8-builtins.builtins-ignorelist`
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user