Compare commits
150 Commits
v0.0.248
...
github-292
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6e97c9438a | ||
|
|
187104e396 | ||
|
|
ea86edf12e | ||
|
|
2558384817 | ||
|
|
3ed539d50e | ||
|
|
4a70a4c323 | ||
|
|
310f13c7db | ||
|
|
2168404fc2 | ||
|
|
a032b66c2e | ||
|
|
af5f7dbd83 | ||
|
|
8066607ea3 | ||
|
|
0ed9fccce9 | ||
|
|
074a343a63 | ||
|
|
c7e09b54b0 | ||
|
|
67d1f74587 | ||
|
|
1c79dff3bd | ||
|
|
f5f09b489b | ||
|
|
061495a9eb | ||
|
|
470e1c1754 | ||
|
|
16be691712 | ||
|
|
270015865b | ||
|
|
ccfa9d5b20 | ||
|
|
2261e194a0 | ||
|
|
cd6413ca09 | ||
|
|
c65585e14a | ||
|
|
d2a6ed7be6 | ||
|
|
16e2dae0c2 | ||
|
|
e8ba9c9e21 | ||
|
|
d285f5c90a | ||
|
|
386ca7c9a1 | ||
|
|
40c5abf16e | ||
|
|
7e7aec7d74 | ||
|
|
4b5538f74e | ||
|
|
36d134fd41 | ||
|
|
0b7d6b9097 | ||
|
|
994e2e0903 | ||
|
|
bc79f540e4 | ||
|
|
3a78b59314 | ||
|
|
5f83851329 | ||
|
|
484ce7b8fc | ||
|
|
1c75071136 | ||
|
|
51bca19c1d | ||
|
|
a8a312e862 | ||
|
|
33c31cda27 | ||
|
|
cd9fbeb560 | ||
|
|
84e96cdcd9 | ||
|
|
248590224a | ||
|
|
bbc55cdb04 | ||
|
|
2792439eac | ||
|
|
0694aee1b6 | ||
|
|
a17b5c134a | ||
|
|
42f61535b5 | ||
|
|
1c01b3c934 | ||
|
|
39b9a1637f | ||
|
|
2c692e3acf | ||
|
|
24add5f56c | ||
|
|
0b7736ad79 | ||
|
|
eef85067c8 | ||
|
|
da98fab4ae | ||
|
|
0f37a98d91 | ||
|
|
f38624824d | ||
|
|
159422071e | ||
|
|
6eaacf96be | ||
|
|
eb15371453 | ||
|
|
198b301baf | ||
|
|
c8c575dd43 | ||
|
|
6e54cd8233 | ||
|
|
a688a237d7 | ||
|
|
bda2a0007a | ||
|
|
32d165b7ad | ||
|
|
ac79bf4ee9 | ||
|
|
376eab3a53 | ||
|
|
08be7bd285 | ||
|
|
f5241451d8 | ||
|
|
c9fe0708cb | ||
|
|
09f8c487ea | ||
|
|
1e7233a8eb | ||
|
|
f967f344fc | ||
|
|
095f005bf4 | ||
|
|
0f04aa2a5f | ||
|
|
ad7ba77fff | ||
|
|
77d43795f8 | ||
|
|
4357f2be0f | ||
|
|
e5c1f95545 | ||
|
|
227ff62a4e | ||
|
|
d8e4902516 | ||
|
|
e66739884f | ||
|
|
5fd827545b | ||
|
|
c1ddcb8a60 | ||
|
|
48a317d5f6 | ||
|
|
74e18b6cff | ||
|
|
21d02cd51f | ||
|
|
049e77b939 | ||
|
|
b9bfb81e36 | ||
|
|
2d4fae45d9 | ||
|
|
726adb7efc | ||
|
|
dbdfdeb0e1 | ||
|
|
1c41789c2a | ||
|
|
2f9de335db | ||
|
|
ba61bb6a6c | ||
|
|
17ab71ff75 | ||
|
|
4ad4e3e091 | ||
|
|
6ced5122e4 | ||
|
|
7d55b417f7 | ||
|
|
f0e0efc46f | ||
|
|
1efa2e07ad | ||
|
|
df3932f750 | ||
|
|
817d0b4902 | ||
|
|
ffd8e958fc | ||
|
|
ed33b75bad | ||
|
|
262e768fd3 | ||
|
|
bc3a9ce003 | ||
|
|
48005d87f8 | ||
|
|
e37e9c2ca3 | ||
|
|
8fde63b323 | ||
|
|
97338e4cd6 | ||
|
|
9645790a8b | ||
|
|
18800c6884 | ||
|
|
fd638a2e54 | ||
|
|
fa1459d56e | ||
|
|
d93c5811ea | ||
|
|
06e426f509 | ||
|
|
6eb014b3b2 | ||
|
|
d9fd78d907 | ||
|
|
37df07d2e0 | ||
|
|
d5c65b5f1b | ||
|
|
cdc4e86158 | ||
|
|
50ec6d3b0f | ||
|
|
a6eb60cdd5 | ||
|
|
90c04b9cff | ||
|
|
b701cca779 | ||
|
|
ce8953442d | ||
|
|
7d4e513a82 | ||
|
|
35f7f7b66d | ||
|
|
6e02405bd6 | ||
|
|
b657468346 | ||
|
|
f72ed255e5 | ||
|
|
7e9dea0027 | ||
|
|
9545958ad8 | ||
|
|
41faa335d1 | ||
|
|
4cfa350112 | ||
|
|
41f163fc8d | ||
|
|
d21dd994e6 | ||
|
|
6f5a6b8c8b | ||
|
|
35606d7b05 | ||
|
|
3ad257cfea | ||
|
|
b39f960cd1 | ||
|
|
c297d46899 | ||
|
|
d6a100028c | ||
|
|
35d4e03f2a |
6
.gitattributes
vendored
Normal file
6
.gitattributes
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
* text=auto eol=lf
|
||||
|
||||
crates/ruff/resources/test/fixtures/isort/line_ending_crlf.py text eol=crlf
|
||||
crates/ruff/resources/test/fixtures/pycodestyle/W605_1.py text eol=crlf
|
||||
|
||||
ruff.schema.json linguist-generated=true text=auto eol=lf
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
- run: ./target/debug/ruff_dev generate-all
|
||||
- run: git diff --quiet README.md || echo "::error file=README.md::This file is outdated. Run 'cargo dev generate-all'."
|
||||
- run: git diff --quiet ruff.schema.json || echo "::error file=ruff.schema.json::This file is outdated. Run 'cargo dev generate-all'."
|
||||
- run: git diff --exit-code -- README.md ruff.schema.json
|
||||
- run: git diff --exit-code -- README.md ruff.schema.json docs
|
||||
|
||||
cargo-fmt:
|
||||
name: "cargo fmt"
|
||||
|
||||
@@ -5,6 +5,14 @@ repos:
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/executablebooks/mdformat
|
||||
rev: 0.7.16
|
||||
hooks:
|
||||
- id: mdformat
|
||||
additional_dependencies:
|
||||
- mdformat-black
|
||||
- black==23.1.0 # Must be the latest version of Black
|
||||
|
||||
- repo: https://github.com/igorshubovych/markdownlint-cli
|
||||
rev: v0.33.0
|
||||
hooks:
|
||||
@@ -13,7 +21,6 @@ repos:
|
||||
- --disable
|
||||
- MD013 # line-length
|
||||
- MD033 # no-inline-html
|
||||
- MD041 # first-line-h1
|
||||
- --
|
||||
|
||||
- repo: local
|
||||
@@ -30,7 +37,7 @@ repos:
|
||||
pass_filenames: false
|
||||
- id: ruff
|
||||
name: ruff
|
||||
entry: cargo run -- --no-cache --fix
|
||||
entry: cargo run -p ruff_cli -- check --no-cache --force-exclude --fix --exit-non-zero-on-fix
|
||||
language: rust
|
||||
types_or: [python, pyi]
|
||||
require_serial: true
|
||||
@@ -46,5 +53,16 @@ repos:
|
||||
pass_filenames: false
|
||||
exclude: target
|
||||
|
||||
# Black
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.1.0
|
||||
hooks:
|
||||
- id: black
|
||||
exclude: |
|
||||
(?x)^(
|
||||
crates/ruff/resources/.*|
|
||||
crates/ruff_python_formatter/resources/.*
|
||||
)$
|
||||
|
||||
ci:
|
||||
skip: [cargo-fmt, clippy, dev-generate-all]
|
||||
|
||||
@@ -20,8 +20,7 @@ the intention of adding a stable public API in the future.
|
||||
### `select`, `extend-select`, `ignore`, and `extend-ignore` have new semantics ([#2312](https://github.com/charliermarsh/ruff/pull/2312))
|
||||
|
||||
Previously, the interplay between `select` and its related options could lead to unexpected
|
||||
behavior. For example, `ruff --select E501 --ignore ALL` and `ruff --select E501 --extend-ignore
|
||||
ALL` behaved differently. (See [#2312](https://github.com/charliermarsh/ruff/pull/2312) for more
|
||||
behavior. For example, `ruff --select E501 --ignore ALL` and `ruff --select E501 --extend-ignore ALL` behaved differently. (See [#2312](https://github.com/charliermarsh/ruff/pull/2312) for more
|
||||
examples.)
|
||||
|
||||
When Ruff determines the enabled rule set, it has to reconcile `select` and `ignore` from a variety
|
||||
@@ -74,7 +73,7 @@ ruff rule E402 --format json # Works! (And preferred.)
|
||||
This change is largely backwards compatible -- most users should experience
|
||||
no change in behavior. However, please note the following exceptions:
|
||||
|
||||
* Subcommands will now fail when invoked with unsupported arguments, instead
|
||||
- Subcommands will now fail when invoked with unsupported arguments, instead
|
||||
of silently ignoring them. For example, the following will now fail:
|
||||
|
||||
```console
|
||||
@@ -83,16 +82,16 @@ no change in behavior. However, please note the following exceptions:
|
||||
|
||||
(the `clean` command doesn't support `--respect-gitignore`.)
|
||||
|
||||
* The semantics of `ruff <arg>` have changed slightly when `<arg>` is a valid subcommand.
|
||||
- The semantics of `ruff <arg>` have changed slightly when `<arg>` is a valid subcommand.
|
||||
For example, prior to this release, running `ruff rule` would run `ruff` over a file or
|
||||
directory called `rule`. Now, `ruff rule` would invoke the `rule` subcommand. This should
|
||||
only impact projects with files or directories named `rule`, `check`, `explain`, `clean`,
|
||||
or `generate-shell-completion`.
|
||||
|
||||
* Scripts that invoke ruff should supply `--` before any positional arguments.
|
||||
- Scripts that invoke ruff should supply `--` before any positional arguments.
|
||||
(The semantics of `ruff -- <arg>` have not changed.)
|
||||
|
||||
* `--explain` previously treated `--format grouped` as a synonym for `--format text`.
|
||||
- `--explain` previously treated `--format grouped` as a synonym for `--format text`.
|
||||
This is no longer supported; instead, use `--format text`.
|
||||
|
||||
## 0.0.226
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
* [Our Pledge](#our-pledge)
|
||||
* [Our Standards](#our-standards)
|
||||
* [Enforcement Responsibilities](#enforcement-responsibilities)
|
||||
* [Scope](#scope)
|
||||
* [Enforcement](#enforcement)
|
||||
* [Enforcement Guidelines](#enforcement-guidelines)
|
||||
* [1. Correction](#1-correction)
|
||||
* [2. Warning](#2-warning)
|
||||
* [3. Temporary Ban](#3-temporary-ban)
|
||||
* [4. Permanent Ban](#4-permanent-ban)
|
||||
* [Attribution](#attribution)
|
||||
- [Our Pledge](#our-pledge)
|
||||
- [Our Standards](#our-standards)
|
||||
- [Enforcement Responsibilities](#enforcement-responsibilities)
|
||||
- [Scope](#scope)
|
||||
- [Enforcement](#enforcement)
|
||||
- [Enforcement Guidelines](#enforcement-guidelines)
|
||||
- [1. Correction](#1-correction)
|
||||
- [2. Warning](#2-warning)
|
||||
- [3. Temporary Ban](#3-temporary-ban)
|
||||
- [4. Permanent Ban](#4-permanent-ban)
|
||||
- [Attribution](#attribution)
|
||||
|
||||
## Our Pledge
|
||||
|
||||
@@ -29,23 +29,23 @@ diverse, inclusive, and healthy community.
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
- Demonstrating empathy and kindness toward other people
|
||||
- Being respectful of differing opinions, viewpoints, and experiences
|
||||
- Giving and gracefully accepting constructive feedback
|
||||
- Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
- Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
- The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
- Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
- Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
@@ -132,7 +132,7 @@ version 2.0, available [here](https://www.contributor-covenant.org/version/2/0/c
|
||||
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||
enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see the [FAQ](https://www.contributor-covenant.org/faq).
|
||||
Translations are available [here](https://www.contributor-covenant.org/translations).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
@@ -2,16 +2,16 @@
|
||||
|
||||
Welcome! We're happy to have you here. Thank you in advance for your contribution to Ruff.
|
||||
|
||||
* [The Basics](#the-basics)
|
||||
* [Prerequisites](#prerequisites)
|
||||
* [Development](#development)
|
||||
* [Project Structure](#project-structure)
|
||||
* [Example: Adding a new lint rule](#example-adding-a-new-lint-rule)
|
||||
* [Rule naming convention](#rule-naming-convention)
|
||||
* [Example: Adding a new configuration option](#example-adding-a-new-configuration-option)
|
||||
* [MkDocs](#mkdocs)
|
||||
* [Release Process](#release-process)
|
||||
* [Benchmarks](#benchmarks)
|
||||
- [The Basics](#the-basics)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Development](#development)
|
||||
- [Project Structure](#project-structure)
|
||||
- [Example: Adding a new lint rule](#example-adding-a-new-lint-rule)
|
||||
- [Rule naming convention](#rule-naming-convention)
|
||||
- [Example: Adding a new configuration option](#example-adding-a-new-configuration-option)
|
||||
- [MkDocs](#mkdocs)
|
||||
- [Release Process](#release-process)
|
||||
- [Benchmarks](#benchmarks)
|
||||
|
||||
## The Basics
|
||||
|
||||
@@ -29,13 +29,10 @@ If you're looking for a place to start, we recommend implementing a new lint rul
|
||||
pattern-match against the examples in the existing codebase. Many lint rules are inspired by
|
||||
existing Python plugins, which can be used as a reference implementation.
|
||||
|
||||
As a concrete example: consider taking on one of the rules from the [`tryceratops`](https://github.com/charliermarsh/ruff/issues/2056)
|
||||
plugin, and looking to the originating [Python source](https://github.com/guilatrova/tryceratops)
|
||||
As a concrete example: consider taking on one of the rules from the [`flake8-pyi`](https://github.com/charliermarsh/ruff/issues/848)
|
||||
plugin, and looking to the originating [Python source](https://github.com/PyCQA/flake8-pyi)
|
||||
for guidance.
|
||||
|
||||
Alternatively, we've started work on the [`flake8-pyi`](https://github.com/charliermarsh/ruff/issues/848)
|
||||
plugin (see the [Python source](https://github.com/PyCQA/flake8-pyi)) -- another good place to start.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Ruff is written in Rust. You'll need to install the
|
||||
@@ -52,16 +49,16 @@ cargo install cargo-insta
|
||||
After cloning the repository, run Ruff locally with:
|
||||
|
||||
```shell
|
||||
cargo run check /path/to/file.py --no-cache
|
||||
cargo run -p ruff_cli -- check /path/to/file.py --no-cache
|
||||
```
|
||||
|
||||
Prior to opening a pull request, ensure that your code has been auto-formatted,
|
||||
and that it passes both the lint and test validation checks:
|
||||
|
||||
```shell
|
||||
cargo fmt --all # Auto-formatting...
|
||||
cargo fmt # Auto-formatting...
|
||||
cargo clippy --fix --workspace --all-targets --all-features # Linting...
|
||||
cargo test --all # Testing...
|
||||
cargo test # Testing...
|
||||
```
|
||||
|
||||
These checks will run on GitHub Actions when you open your Pull Request, but running them locally
|
||||
@@ -94,27 +91,27 @@ The vast majority of the code, including all lint rules, lives in the `ruff` cra
|
||||
|
||||
At time of writing, the repository includes the following crates:
|
||||
|
||||
* `crates/ruff`: library crate containing all lint rules and the core logic for running them.
|
||||
* `crates/ruff_cli`: binary crate containing Ruff's command-line interface.
|
||||
* `crates/ruff_dev`: binary crate containing utilities used in the development of Ruff itself (e.g., `cargo dev generate-all`).
|
||||
* `crates/ruff_macros`: library crate containing macros used by Ruff.
|
||||
* `crates/ruff_python`: library crate implementing Python-specific functionality (e.g., lists of standard library modules by versionb).
|
||||
* `crates/flake8_to_ruff`: binary crate for generating Ruff configuration from Flake8 configuration.
|
||||
- `crates/ruff`: library crate containing all lint rules and the core logic for running them.
|
||||
- `crates/ruff_cli`: binary crate containing Ruff's command-line interface.
|
||||
- `crates/ruff_dev`: binary crate containing utilities used in the development of Ruff itself (e.g., `cargo dev generate-all`).
|
||||
- `crates/ruff_macros`: library crate containing macros used by Ruff.
|
||||
- `crates/ruff_python`: library crate implementing Python-specific functionality (e.g., lists of standard library modules by versionb).
|
||||
- `crates/flake8_to_ruff`: binary crate for generating Ruff configuration from Flake8 configuration.
|
||||
|
||||
### Example: Adding a new lint rule
|
||||
|
||||
At a high level, the steps involved in adding a new lint rule are as follows:
|
||||
|
||||
1. Determine a name for the new rule as per our [rule naming convention](#rule-naming-convention).
|
||||
2. Create a file for your rule (e.g., `crates/ruff/src/rules/flake8_bugbear/rules/abstract_base_class.rs`).
|
||||
3. In that file, define a violation struct. You can grep for `define_violation!` to see examples.
|
||||
4. Map the violation struct to a rule code in `crates/ruff/src/registry.rs` (e.g., `E402`).
|
||||
5. Define the logic for triggering the violation in `crates/ruff/src/checkers/ast.rs` (for AST-based
|
||||
1. Create a file for your rule (e.g., `crates/ruff/src/rules/flake8_bugbear/rules/abstract_base_class.rs`).
|
||||
1. In that file, define a violation struct. You can grep for `define_violation!` to see examples.
|
||||
1. Map the violation struct to a rule code in `crates/ruff/src/registry.rs` (e.g., `E402`).
|
||||
1. Define the logic for triggering the violation in `crates/ruff/src/checkers/ast.rs` (for AST-based
|
||||
checks), `crates/ruff/src/checkers/tokens.rs` (for token-based checks), `crates/ruff/src/checkers/lines.rs`
|
||||
(for text-based checks), or `crates/ruff/src/checkers/filesystem.rs` (for filesystem-based
|
||||
checks).
|
||||
6. Add a test fixture.
|
||||
7. Update the generated files (documentation and generated code).
|
||||
1. Add a test fixture.
|
||||
1. Update the generated files (documentation and generated code).
|
||||
|
||||
To define the violation, start by creating a dedicated file for your rule under the appropriate
|
||||
rule linter (e.g., `crates/ruff/src/rules/flake8_bugbear/rules/abstract_base_class.rs`). That file should
|
||||
@@ -135,10 +132,10 @@ contain a variety of violations and non-violations designed to evaluate and demo
|
||||
of your lint rule.
|
||||
|
||||
Run `cargo dev generate-all` to generate the code for your new fixture. Then run Ruff
|
||||
locally with (e.g.) `cargo run check crates/ruff/resources/test/fixtures/pycodestyle/E402.py --no-cache --select E402`.
|
||||
locally with (e.g.) `cargo run -p ruff_cli -- check crates/ruff/resources/test/fixtures/pycodestyle/E402.py --no-cache --select E402`.
|
||||
|
||||
Once you're satisfied with the output, codify the behavior as a snapshot test by adding a new
|
||||
`test_case` macro in the relevant `crates/ruff/src/[linter]/mod.rs` file. Then, run `cargo test --all`.
|
||||
`test_case` macro in the relevant `crates/ruff/src/[linter]/mod.rs` file. Then, run `cargo test`.
|
||||
Your test will fail, but you'll be prompted to follow-up with `cargo insta review`. Accept the
|
||||
generated snapshot, then commit the snapshot file alongside the rest of your changes.
|
||||
|
||||
@@ -150,11 +147,14 @@ The rule name should make sense when read as "allow _rule-name_" or "allow _rule
|
||||
|
||||
This implies that rule names:
|
||||
|
||||
* should state the bad thing being checked for
|
||||
- should state the bad thing being checked for
|
||||
|
||||
* should not contain instructions on what you what you should use instead
|
||||
- should not contain instructions on what you what you should use instead
|
||||
(these belong in the rule documentation and the `autofix_title` for rules that have autofix)
|
||||
|
||||
When re-implementing rules from other linters, this convention is given more importance than
|
||||
preserving the original rule name.
|
||||
|
||||
### Example: Adding a new configuration option
|
||||
|
||||
Ruff's user-facing settings live in a few different places.
|
||||
@@ -191,13 +191,13 @@ To preview any changes to the documentation locally:
|
||||
pip install -r docs/requirements.txt
|
||||
```
|
||||
|
||||
2. Generate the MkDocs site with:
|
||||
1. Generate the MkDocs site with:
|
||||
|
||||
```shell
|
||||
python scripts/generate_mkdocs.py
|
||||
```
|
||||
|
||||
3. Run the development server with:
|
||||
1. Run the development server with:
|
||||
|
||||
```shell
|
||||
mkdocs serve
|
||||
|
||||
204
Cargo.lock
generated
204
Cargo.lock
generated
@@ -2,6 +2,12 @@
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "Inflector"
|
||||
version = "0.11.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3"
|
||||
|
||||
[[package]]
|
||||
name = "adler"
|
||||
version = "1.0.2"
|
||||
@@ -86,7 +92,7 @@ version = "2.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9834fcc22e0874394a010230586367d4a3e9f11b560f469262678547e1d2575e"
|
||||
dependencies = [
|
||||
"bstr 1.2.0",
|
||||
"bstr 1.3.0",
|
||||
"doc-comment",
|
||||
"predicates",
|
||||
"predicates-core",
|
||||
@@ -175,9 +181,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "bstr"
|
||||
version = "1.2.0"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b7f0778972c64420fdedc63f09919c8a88bda7b25135357fd25a5d9f3257e832"
|
||||
checksum = "5ffdb39cb703212f3c11973452c2861b972f757b021158f3516ba10f2fa8b2c1"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"once_cell",
|
||||
@@ -298,9 +304,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_complete"
|
||||
version = "4.1.1"
|
||||
version = "4.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3d6540eedc41f8a5a76cf3d8d458057dcdf817be4158a55b5f861f7a5483de75"
|
||||
checksum = "bd125be87bf4c255ebc50de0b7f4d2a6201e8ac3dc86e39c0ad081dc5e7236fe"
|
||||
dependencies = [
|
||||
"clap 4.1.6",
|
||||
]
|
||||
@@ -318,9 +324,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_complete_fig"
|
||||
version = "4.1.0"
|
||||
version = "4.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf0c76d8fcf782a1102ccfcd10ca8246e7fdd609c1cd6deddbb96cb638e9bb5c"
|
||||
checksum = "63a06158a72dbb088f864887b4409fd22600ba379f3cee3040f842234cc5c2d0"
|
||||
dependencies = [
|
||||
"clap 4.1.6",
|
||||
"clap_complete",
|
||||
@@ -550,9 +556,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cxx"
|
||||
version = "1.0.90"
|
||||
version = "1.0.91"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "90d59d9acd2a682b4e40605a242f6670eaa58c5957471cbf85e8aa6a0b97a5e8"
|
||||
checksum = "86d3488e7665a7a483b57e25bdd90d0aeb2bc7608c8d0346acf2ad3f1caf1d62"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"cxxbridge-flags",
|
||||
@@ -562,9 +568,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cxx-build"
|
||||
version = "1.0.90"
|
||||
version = "1.0.91"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ebfa40bda659dd5c864e65f4c9a2b0aff19bea56b017b9b77c73d3766a453a38"
|
||||
checksum = "48fcaf066a053a41a81dfb14d57d99738b767febb8b735c3016e469fac5da690"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"codespan-reporting",
|
||||
@@ -577,15 +583,26 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cxxbridge-flags"
|
||||
version = "1.0.90"
|
||||
version = "1.0.91"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "457ce6757c5c70dc6ecdbda6925b958aae7f959bda7d8fb9bde889e34a09dc03"
|
||||
checksum = "a2ef98b8b717a829ca5603af80e1f9e2e48013ab227b68ef37872ef84ee479bf"
|
||||
|
||||
[[package]]
|
||||
name = "cxxbridge-macro"
|
||||
version = "1.0.90"
|
||||
version = "1.0.91"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ebf883b7aacd7b2aeb2a7b338648ee19f57c140d4ee8e52c68979c6b2f7f2263"
|
||||
checksum = "086c685979a698443656e5cf7856c95c642295a38599f12fb1ff76fb28d19892"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derivative"
|
||||
version = "2.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -753,7 +770,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.248"
|
||||
version = "0.0.253"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap 4.1.6",
|
||||
@@ -840,7 +857,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "029d74589adefde59de1a0c4f4732695c32805624aec7b68d91503d4dba79afc"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"bstr 1.2.0",
|
||||
"bstr 1.3.0",
|
||||
"fnv",
|
||||
"log",
|
||||
"regex",
|
||||
@@ -990,9 +1007,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "insta"
|
||||
version = "1.26.0"
|
||||
version = "1.28.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f6f0f08b46e4379744de2ab67aa8f7de3ffd1da3e275adc41fcc82053ede46ff"
|
||||
checksum = "fea5b3894afe466b4bcf0388630fc15e11938a6074af0cd637c825ba2ec8a099"
|
||||
dependencies = [
|
||||
"console",
|
||||
"lazy_static",
|
||||
@@ -1024,10 +1041,23 @@ dependencies = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "is-terminal"
|
||||
version = "0.4.3"
|
||||
name = "is-macro"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22e18b0a45d56fe973d6db23972bf5bc46f988a4a2385deac9cc29572f09daef"
|
||||
checksum = "8a7d079e129b77477a49c5c4f1cfe9ce6c2c909ef52520693e8e811a714c7b20"
|
||||
dependencies = [
|
||||
"Inflector",
|
||||
"pmutil",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "is-terminal"
|
||||
version = "0.4.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "21b6b32576413a8e69b90e952e4a026476040d81017b80445deda5f2d3921857"
|
||||
dependencies = [
|
||||
"hermit-abi 0.3.1",
|
||||
"io-lifetimes",
|
||||
@@ -1035,15 +1065,6 @@ dependencies = [
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "is_executable"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fa9acdc6d67b75e626ad644734e8bc6df893d9cd2a834129065d3dd6158ea9c8"
|
||||
dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itertools"
|
||||
version = "0.10.5"
|
||||
@@ -1171,7 +1192,7 @@ checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79"
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/charliermarsh/LibCST?rev=f2f0b7a487a8725d161fe8b3ed73a6758b21e177#f2f0b7a487a8725d161fe8b3ed73a6758b21e177"
|
||||
source = "git+https://github.com/charliermarsh/LibCST?rev=80e4c1399f95e5beb532fdd1e209ad2dbb470438#80e4c1399f95e5beb532fdd1e209ad2dbb470438"
|
||||
dependencies = [
|
||||
"chic",
|
||||
"itertools",
|
||||
@@ -1186,7 +1207,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "libcst_derive"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/charliermarsh/LibCST?rev=f2f0b7a487a8725d161fe8b3ed73a6758b21e177#f2f0b7a487a8725d161fe8b3ed73a6758b21e177"
|
||||
source = "git+https://github.com/charliermarsh/LibCST?rev=80e4c1399f95e5beb532fdd1e209ad2dbb470438#80e4c1399f95e5beb532fdd1e209ad2dbb470438"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
@@ -1292,14 +1313,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "mio"
|
||||
version = "0.8.5"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de"
|
||||
checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"log",
|
||||
"wasi 0.11.0+wasi-snapshot-preview1",
|
||||
"windows-sys 0.42.0",
|
||||
"windows-sys 0.45.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1428,18 +1449,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "num_enum"
|
||||
version = "0.5.9"
|
||||
version = "0.5.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8d829733185c1ca374f17e52b762f24f535ec625d2cc1f070e34c8a9068f341b"
|
||||
checksum = "3e0072973714303aa6e3631c7e8e777970cf4bdd25dc4932e41031027b8bcc4e"
|
||||
dependencies = [
|
||||
"num_enum_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num_enum_derive"
|
||||
version = "0.5.9"
|
||||
version = "0.5.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2be1598bf1c313dcdd12092e3f1920f463462525a21b7b4e11b4168353d0123e"
|
||||
checksum = "0629cbd6b897944899b1f10496d9c4a7ac5878d45fd61bc22e9e79bfbbc29597"
|
||||
dependencies = [
|
||||
"proc-macro-crate",
|
||||
"proc-macro2",
|
||||
@@ -1449,9 +1470,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.17.0"
|
||||
version = "1.17.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66"
|
||||
checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
|
||||
|
||||
[[package]]
|
||||
name = "oorandom"
|
||||
@@ -1686,6 +1707,17 @@ dependencies = [
|
||||
"plotters-backend",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pmutil"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3894e5d549cccbe44afecf72922f277f603cd4bb0219c8342631ef18fffbe004"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ppv-lite86"
|
||||
version = "0.2.17"
|
||||
@@ -1910,6 +1942,28 @@ dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "result-like"
|
||||
version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ccc7ce6435c33898517a30e85578cd204cbb696875efb93dec19a2d31294f810"
|
||||
dependencies = [
|
||||
"result-like-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "result-like-derive"
|
||||
version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fabf0a2e54f711c68c50d49f648a1a8a37adcb57353f518ac4df374f0788f42"
|
||||
dependencies = [
|
||||
"pmutil",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"syn-ext",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ring"
|
||||
version = "0.16.20"
|
||||
@@ -1927,7 +1981,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.0.248"
|
||||
version = "0.0.253"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bisection",
|
||||
@@ -1939,6 +1993,7 @@ dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
"criterion",
|
||||
"derivative",
|
||||
"dirs",
|
||||
"fern",
|
||||
"getrandom",
|
||||
@@ -1947,7 +2002,7 @@ dependencies = [
|
||||
"ignore",
|
||||
"imperative",
|
||||
"insta",
|
||||
"is_executable",
|
||||
"is-macro",
|
||||
"itertools",
|
||||
"js-sys",
|
||||
"libcst",
|
||||
@@ -1959,8 +2014,10 @@ dependencies = [
|
||||
"once_cell",
|
||||
"path-absolutize",
|
||||
"regex",
|
||||
"result-like",
|
||||
"ruff_macros",
|
||||
"ruff_python",
|
||||
"ruff_rustpython",
|
||||
"rustc-hash",
|
||||
"rustpython-common",
|
||||
"rustpython-parser",
|
||||
@@ -1983,7 +2040,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff_cli"
|
||||
version = "0.0.248"
|
||||
version = "0.0.253"
|
||||
dependencies = [
|
||||
"annotate-snippets 0.9.1",
|
||||
"anyhow",
|
||||
@@ -2012,6 +2069,7 @@ dependencies = [
|
||||
"rustc-hash",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
"similar",
|
||||
"strum",
|
||||
"textwrap",
|
||||
@@ -2082,15 +2140,42 @@ dependencies = [
|
||||
"anyhow",
|
||||
"clap 4.1.6",
|
||||
"insta",
|
||||
"is-macro",
|
||||
"itertools",
|
||||
"once_cell",
|
||||
"ruff_formatter",
|
||||
"ruff_python",
|
||||
"ruff_rustpython",
|
||||
"ruff_testing_macros",
|
||||
"ruff_text_size",
|
||||
"rustc-hash",
|
||||
"rustpython-common",
|
||||
"rustpython-parser",
|
||||
"similar",
|
||||
"test-case",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff_rustpython"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"once_cell",
|
||||
"rustpython-common",
|
||||
"rustpython-parser",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff_testing_macros"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"glob",
|
||||
"proc-macro-error",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff_text_size"
|
||||
version = "0.0.0"
|
||||
@@ -2146,7 +2231,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rustpython-ast"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=61b48f108982d865524f86624a9d5bc2ae3bccef#61b48f108982d865524f86624a9d5bc2ae3bccef"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=aa8336ee94492b52458ed8e1517238e5c6c2914c#aa8336ee94492b52458ed8e1517238e5c6c2914c"
|
||||
dependencies = [
|
||||
"num-bigint",
|
||||
"rustpython-compiler-core",
|
||||
@@ -2155,7 +2240,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rustpython-common"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=61b48f108982d865524f86624a9d5bc2ae3bccef#61b48f108982d865524f86624a9d5bc2ae3bccef"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=aa8336ee94492b52458ed8e1517238e5c6c2914c#aa8336ee94492b52458ed8e1517238e5c6c2914c"
|
||||
dependencies = [
|
||||
"ascii",
|
||||
"bitflags",
|
||||
@@ -2180,7 +2265,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rustpython-compiler-core"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=61b48f108982d865524f86624a9d5bc2ae3bccef#61b48f108982d865524f86624a9d5bc2ae3bccef"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=aa8336ee94492b52458ed8e1517238e5c6c2914c#aa8336ee94492b52458ed8e1517238e5c6c2914c"
|
||||
dependencies = [
|
||||
"bincode",
|
||||
"bitflags",
|
||||
@@ -2197,7 +2282,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rustpython-parser"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=61b48f108982d865524f86624a9d5bc2ae3bccef#61b48f108982d865524f86624a9d5bc2ae3bccef"
|
||||
source = "git+https://github.com/RustPython/RustPython.git?rev=aa8336ee94492b52458ed8e1517238e5c6c2914c#aa8336ee94492b52458ed8e1517238e5c6c2914c"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"anyhow",
|
||||
@@ -2468,15 +2553,24 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.107"
|
||||
version = "1.0.108"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
|
||||
checksum = "d56e159d99e6c2b93995d171050271edb50ecc5288fbc7cc17de8fdce4e58c14"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn-ext"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b86cb2b68c5b3c078cac02588bc23f3c04bb828c5d3aedd17980876ec6a7be6"
|
||||
dependencies = [
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tempfile"
|
||||
version = "3.3.0"
|
||||
@@ -2854,9 +2948,11 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
|
||||
|
||||
[[package]]
|
||||
name = "unicode_names2"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "029df4cc8238cefc911704ff8fa210853a0f3bce2694d8f51181dd41ee0f3301"
|
||||
version = "0.6.0"
|
||||
source = "git+https://github.com/youknowone/unicode_names2.git?tag=v0.6.0+character-alias#4ce16aa85cbcdd9cc830410f1a72ef9a235f2fde"
|
||||
dependencies = [
|
||||
"phf",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "untrusted"
|
||||
|
||||
@@ -3,18 +3,19 @@ members = ["crates/*"]
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
rust-version = "1.65.0"
|
||||
rust-version = "1.67.0"
|
||||
|
||||
[workspace.dependencies]
|
||||
anyhow = { version = "1.0.66" }
|
||||
clap = { version = "4.0.1", features = ["derive"] }
|
||||
itertools = { version = "0.10.5" }
|
||||
libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "f2f0b7a487a8725d161fe8b3ed73a6758b21e177" }
|
||||
is-macro = { version = "0.2.2" }
|
||||
libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "80e4c1399f95e5beb532fdd1e209ad2dbb470438" }
|
||||
once_cell = { version = "1.16.0" }
|
||||
regex = { version = "1.6.0" }
|
||||
rustc-hash = { version = "1.1.0" }
|
||||
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "61b48f108982d865524f86624a9d5bc2ae3bccef" }
|
||||
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/RustPython/RustPython.git", rev = "61b48f108982d865524f86624a9d5bc2ae3bccef" }
|
||||
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "aa8336ee94492b52458ed8e1517238e5c6c2914c" }
|
||||
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/RustPython/RustPython.git", rev = "aa8336ee94492b52458ed8e1517238e5c6c2914c" }
|
||||
schemars = { version = "0.8.11" }
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
serde_json = { version = "1.0.87" }
|
||||
|
||||
606
README.md
606
README.md
@@ -24,16 +24,17 @@ An extremely fast Python linter, written in Rust.
|
||||
<i>Linting the CPython codebase from scratch.</i>
|
||||
</p>
|
||||
|
||||
* ⚡️ 10-100x faster than existing linters
|
||||
* 🐍 Installable via `pip`
|
||||
* 🛠️ `pyproject.toml` support
|
||||
* 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
* 🔧 Autofix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
* 📏 Over [400 built-in rules](https://beta.ruff.rs/docs/rules/) (and growing)
|
||||
* ⚖️ [Near-parity](https://beta.ruff.rs/docs/faq/#how-does-ruff-compare-to-flake8) with the built-in Flake8 rule set
|
||||
* 🔌 Native re-implementations of dozens of Flake8 plugins, like flake8-bugbear
|
||||
* ⌨️ First-party editor integrations for [VS Code](https://github.com/charliermarsh/ruff-vscode) and [more](https://github.com/charliermarsh/ruff-lsp)
|
||||
* 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](#pyprojecttoml-discovery)
|
||||
- ⚡️ 10-100x faster than existing linters
|
||||
- 🐍 Installable via `pip`
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 🤝 Python 3.11 compatibility
|
||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
- 🔧 Autofix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
- 📏 Over [500 built-in rules](https://beta.ruff.rs/docs/rules/)
|
||||
- ⚖️ [Near-parity](https://beta.ruff.rs/docs/faq/#how-does-ruff-compare-to-flake8) with the built-in Flake8 rule set
|
||||
- 🔌 Native re-implementations of dozens of Flake8 plugins, like flake8-bugbear
|
||||
- ⌨️ First-party editor integrations for [VS Code](https://github.com/charliermarsh/ruff-vscode) and [more](https://github.com/charliermarsh/ruff-lsp)
|
||||
- 🌎 Monorepo-friendly, with [hierarchical and cascading configuration](https://beta.ruff.rs/docs/configuration/#pyprojecttoml-discovery)
|
||||
|
||||
Ruff aims to be orders of magnitude faster than alternative tools while integrating more
|
||||
functionality behind a single, common interface.
|
||||
@@ -46,11 +47,11 @@ all while executing tens or hundreds of times faster than any individual tool.
|
||||
|
||||
Ruff is extremely actively developed and used in major open-source projects like:
|
||||
|
||||
* [pandas](https://github.com/pandas-dev/pandas)
|
||||
* [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
* [Transformers (Hugging Face)](https://github.com/huggingface/transformers)
|
||||
* [Apache Airflow](https://github.com/apache/airflow)
|
||||
* [SciPy](https://github.com/scipy/scipy)
|
||||
- [pandas](https://github.com/pandas-dev/pandas)
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Transformers (Hugging Face)](https://github.com/huggingface/transformers)
|
||||
- [Apache Airflow](https://github.com/apache/airflow)
|
||||
- [SciPy](https://github.com/scipy/scipy)
|
||||
|
||||
...and many more.
|
||||
|
||||
@@ -96,21 +97,19 @@ developer of [Zulip](https://github.com/zulip/zulip):
|
||||
|
||||
For more, see the [documentation](https://beta.ruff.rs/docs/).
|
||||
|
||||
1. [Installation and Usage](#installation-and-usage)
|
||||
2. [Configuration](#configuration)
|
||||
3. [Supported Rules](#supported-rules)
|
||||
4. [Contributing](#contributing)
|
||||
5. [Support](#support)
|
||||
6. [Acknowledgements](#acknowledgements)
|
||||
7. [Who's Using Ruff?](#whos-using-ruff)
|
||||
8. [License](#license)
|
||||
1. [Getting Started](#getting-started)
|
||||
1. [Configuration](#configuration)
|
||||
1. [Rules](#rules)
|
||||
1. [Contributing](#contributing)
|
||||
1. [Support](#support)
|
||||
1. [Acknowledgements](#acknowledgements)
|
||||
1. [Who's Using Ruff?](#whos-using-ruff)
|
||||
1. [License](#license)
|
||||
|
||||
## Installation and Usage
|
||||
## Getting Started
|
||||
|
||||
For more, see the [documentation](https://beta.ruff.rs/docs/).
|
||||
|
||||
<!-- Begin section: Installation and Usage -->
|
||||
|
||||
### Installation
|
||||
|
||||
Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||
@@ -119,31 +118,8 @@ Ruff is available as [`ruff`](https://pypi.org/project/ruff/) on PyPI:
|
||||
pip install ruff
|
||||
```
|
||||
|
||||
For **macOS Homebrew** and **Linuxbrew** users, Ruff is also available as [`ruff`](https://formulae.brew.sh/formula/ruff) on Homebrew:
|
||||
|
||||
```shell
|
||||
brew install ruff
|
||||
```
|
||||
|
||||
For **Conda** users, Ruff is also available as [`ruff`](https://anaconda.org/conda-forge/ruff) on `conda-forge`:
|
||||
|
||||
```shell
|
||||
conda install -c conda-forge ruff
|
||||
```
|
||||
|
||||
For **Arch Linux** users, Ruff is also available as [`ruff`](https://archlinux.org/packages/community/x86_64/ruff/) on the official repositories:
|
||||
|
||||
```shell
|
||||
pacman -S ruff
|
||||
```
|
||||
|
||||
For **Alpine** users, Ruff is also available as [`ruff`](https://pkgs.alpinelinux.org/package/edge/testing/x86_64/ruff) on the testing repositories:
|
||||
|
||||
```shell
|
||||
apk add ruff
|
||||
```
|
||||
|
||||
[](https://repology.org/project/ruff-python-linter/versions)
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
and with [a variety of other package managers](https://beta.ruff.rs/docs/installation/).
|
||||
|
||||
### Usage
|
||||
|
||||
@@ -156,51 +132,30 @@ ruff check path/to/code/*.py # Lint all `.py` files in `/path/to/code`
|
||||
ruff check path/to/code/to/file.py # Lint `file.py`
|
||||
```
|
||||
|
||||
You can run Ruff in `--watch` mode to automatically re-run on-change:
|
||||
|
||||
```shell
|
||||
ruff check path/to/code/ --watch
|
||||
```
|
||||
|
||||
Ruff also works with [pre-commit](https://pre-commit.com):
|
||||
Ruff can also be used as a [pre-commit](https://pre-commit.com) hook:
|
||||
|
||||
```yaml
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.0.248'
|
||||
rev: 'v0.0.253'
|
||||
hooks:
|
||||
- id: ruff
|
||||
```
|
||||
|
||||
Or, to enable autofix:
|
||||
Ruff can also be used as a [VS Code extension](https://github.com/charliermarsh/ruff-vscode) or
|
||||
alongside any other editor through the [Ruff LSP](https://github.com/charliermarsh/ruff-lsp).
|
||||
|
||||
```yaml
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: 'v0.0.248'
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
```
|
||||
### Configuration
|
||||
|
||||
<!-- End section: Installation and Usage -->
|
||||
|
||||
## Configuration
|
||||
|
||||
<!-- Begin section: Configuration -->
|
||||
|
||||
Ruff can be configured via a `pyproject.toml` file, a `ruff.toml` file, or through the command line.
|
||||
|
||||
For a complete enumeration of the available configuration options, see the
|
||||
[documentation](https://beta.ruff.rs/docs/settings/).
|
||||
|
||||
### Configure via `pyproject.toml`
|
||||
Ruff can be configured through a `pyproject.toml`, `ruff.toml`, or `.ruff.toml` file (see:
|
||||
[_Configuration_](https://beta.ruff.rs/docs/configuration/), or [_Settings_](https://beta.ruff.rs/docs/settings/)
|
||||
for a complete list of all configuration options).
|
||||
|
||||
If left unspecified, the default configuration is equivalent to:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
# Enable Pyflakes `E` and `F` codes by default.
|
||||
# Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default.
|
||||
select = ["E", "F"]
|
||||
ignore = []
|
||||
|
||||
@@ -231,7 +186,6 @@ exclude = [
|
||||
"node_modules",
|
||||
"venv",
|
||||
]
|
||||
per-file-ignores = {}
|
||||
|
||||
# Same as Black.
|
||||
line-length = 88
|
||||
@@ -247,428 +201,42 @@ target-version = "py310"
|
||||
max-complexity = 10
|
||||
```
|
||||
|
||||
As an example, the following would configure Ruff to: (1) enforce flake8-bugbear rules, in addition
|
||||
to the defaults; (2) avoid enforcing line-length violations (`E501`); (3) avoid attempting to fix
|
||||
flake8-bugbear (`B`) violations; and (3) ignore import-at-top-of-file violations (`E402`) in
|
||||
`__init__.py` files:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
# Enable flake8-bugbear (`B`) rules.
|
||||
select = ["E", "F", "B"]
|
||||
|
||||
# Never enforce `E501` (line length violations).
|
||||
ignore = ["E501"]
|
||||
|
||||
# Avoid trying to fix flake8-bugbear (`B`) violations.
|
||||
unfixable = ["B"]
|
||||
|
||||
# Ignore `E402` (import violations) in all `__init__.py` files, and in `path/to/file.py`.
|
||||
[tool.ruff.per-file-ignores]
|
||||
"__init__.py" = ["E402"]
|
||||
"path/to/file.py" = ["E402"]
|
||||
```
|
||||
|
||||
Plugin configurations should be expressed as subsections, e.g.:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
# Add "Q" to the list of enabled codes.
|
||||
select = ["E", "F", "Q"]
|
||||
|
||||
[tool.ruff.flake8-quotes]
|
||||
docstring-quotes = "double"
|
||||
```
|
||||
|
||||
Ruff mirrors Flake8's rule code system, in which each rule code consists of a one-to-three letter
|
||||
prefix, followed by three digits (e.g., `F401`). The prefix indicates that "source" of the rule
|
||||
(e.g., `F` for Pyflakes, `E` for pycodestyle, `ANN` for flake8-annotations). The set of enabled
|
||||
rules is determined by the `select` and `ignore` options, which support both the full code (e.g.,
|
||||
`F401`) and the prefix (e.g., `F`).
|
||||
|
||||
As a special-case, Ruff also supports the `ALL` code, which enables all rules. Note that some of the
|
||||
pydocstyle rules conflict (e.g., `D203` and `D211`) as they represent alternative docstring
|
||||
formats. Enabling `ALL` without further configuration may result in suboptimal behavior, especially
|
||||
for the pydocstyle plugin.
|
||||
|
||||
If you're wondering how to configure Ruff, here are some **recommended guidelines**:
|
||||
|
||||
* Prefer `select` and `ignore` over `extend-select` and `extend-ignore`, to make your rule set
|
||||
explicit.
|
||||
* Use `ALL` with discretion. Enabling `ALL` will implicitly enable new rules whenever you upgrade.
|
||||
* Start with a small set of rules (`select = ["E", "F"]`) and add a category at-a-time. For example,
|
||||
you might consider expanding to `select = ["E", "F", "B"]` to enable the popular flake8-bugbear
|
||||
extension.
|
||||
* By default, Ruff's autofix is aggressive. If you find that it's too aggressive for your liking,
|
||||
consider turning off autofix for specific rules or categories (see: [FAQ](https://beta.ruff.rs/docs/faq/#ruff-tried-to-fix-something-but-it-broke-my-code-what-should-i-do)).
|
||||
|
||||
### Configure via `ruff.toml`
|
||||
|
||||
As an alternative to `pyproject.toml`, Ruff will also respect a `ruff.toml` file, which implements
|
||||
an equivalent schema (though the `[tool.ruff]` hierarchy can be omitted). For example, the
|
||||
`pyproject.toml` described above would be represented via the following `ruff.toml`:
|
||||
|
||||
```toml
|
||||
# Enable flake8-bugbear (`B`) rules.
|
||||
select = ["E", "F", "B"]
|
||||
|
||||
# Never enforce `E501` (line length violations).
|
||||
ignore = ["E501"]
|
||||
|
||||
# Avoid trying to fix flake8-bugbear (`B`) violations.
|
||||
unfixable = ["B"]
|
||||
|
||||
# Ignore `E402` (import violations) in all `__init__.py` files, and in `path/to/file.py`.
|
||||
[per-file-ignores]
|
||||
"__init__.py" = ["E402"]
|
||||
"path/to/file.py" = ["E402"]
|
||||
```
|
||||
|
||||
For a full list of configurable options, see the [list of all options](https://beta.ruff.rs/docs/settings/).
|
||||
|
||||
### Command-line interface
|
||||
|
||||
Some configuration settings can be provided via the command-line, such as those related to
|
||||
Some configuration options can be provided via the command-line, such as those related to
|
||||
rule enablement and disablement, file discovery, logging level, and more:
|
||||
|
||||
```shell
|
||||
ruff check path/to/code/ --select F401 --select F403 --quiet
|
||||
```
|
||||
|
||||
See `ruff help` for more on Ruff's top-level commands:
|
||||
See `ruff help` for more on Ruff's top-level commands, or `ruff help check` for more on the
|
||||
linting command.
|
||||
|
||||
<!-- Begin auto-generated command help. -->
|
||||
|
||||
```text
|
||||
Ruff: An extremely fast Python linter.
|
||||
|
||||
Usage: ruff [OPTIONS] <COMMAND>
|
||||
|
||||
Commands:
|
||||
check Run Ruff on the given files or directories (default)
|
||||
rule Explain a rule
|
||||
config List or describe the available configuration options
|
||||
linter List all supported upstream linters
|
||||
clean Clear any caches in the current directory and any subdirectories
|
||||
help Print this message or the help of the given subcommand(s)
|
||||
|
||||
Options:
|
||||
-h, --help Print help
|
||||
-V, --version Print version
|
||||
|
||||
Log levels:
|
||||
-v, --verbose Enable verbose logging
|
||||
-q, --quiet Print lint violations, but nothing else
|
||||
-s, --silent Disable all logging (but still exit with status code "1" upon detecting lint violations)
|
||||
|
||||
For help with a specific command, see: `ruff help <command>`.
|
||||
```
|
||||
|
||||
<!-- End auto-generated command help. -->
|
||||
|
||||
Or `ruff help check` for more on the linting command:
|
||||
|
||||
<!-- Begin auto-generated subcommand help. -->
|
||||
|
||||
```text
|
||||
Run Ruff on the given files or directories (default)
|
||||
|
||||
Usage: ruff check [OPTIONS] [FILES]...
|
||||
|
||||
Arguments:
|
||||
[FILES]... List of files or directories to check
|
||||
|
||||
Options:
|
||||
--fix
|
||||
Attempt to automatically fix lint violations
|
||||
--show-source
|
||||
Show violations with source code
|
||||
--show-fixes
|
||||
Show an enumeration of all autofixed lint violations
|
||||
--diff
|
||||
Avoid writing any fixed files back; instead, output a diff for each changed file to stdout
|
||||
-w, --watch
|
||||
Run in watch mode by re-running whenever files change
|
||||
--fix-only
|
||||
Fix any fixable lint violations, but don't report on leftover violations. Implies `--fix`
|
||||
--format <FORMAT>
|
||||
Output serialization format for violations [env: RUFF_FORMAT=] [possible values: text, json, junit, grouped, github, gitlab, pylint]
|
||||
--target-version <TARGET_VERSION>
|
||||
The minimum Python version that should be supported
|
||||
--config <CONFIG>
|
||||
Path to the `pyproject.toml` or `ruff.toml` file to use for configuration
|
||||
--statistics
|
||||
Show counts for every rule with at least one violation
|
||||
--add-noqa
|
||||
Enable automatic additions of `noqa` directives to failing lines
|
||||
--show-files
|
||||
See the files Ruff will be run against with the current settings
|
||||
--show-settings
|
||||
See the settings Ruff will use to lint a given Python file
|
||||
-h, --help
|
||||
Print help
|
||||
|
||||
Rule selection:
|
||||
--select <RULE_CODE>
|
||||
Comma-separated list of rule codes to enable (or ALL, to enable all rules)
|
||||
--ignore <RULE_CODE>
|
||||
Comma-separated list of rule codes to disable
|
||||
--extend-select <RULE_CODE>
|
||||
Like --select, but adds additional rule codes on top of the selected ones
|
||||
--per-file-ignores <PER_FILE_IGNORES>
|
||||
List of mappings from file pattern to code to exclude
|
||||
--fixable <RULE_CODE>
|
||||
List of rule codes to treat as eligible for autofix. Only applicable when autofix itself is enabled (e.g., via `--fix`)
|
||||
--unfixable <RULE_CODE>
|
||||
List of rule codes to treat as ineligible for autofix. Only applicable when autofix itself is enabled (e.g., via `--fix`)
|
||||
|
||||
File selection:
|
||||
--exclude <FILE_PATTERN> List of paths, used to omit files and/or directories from analysis
|
||||
--extend-exclude <FILE_PATTERN> Like --exclude, but adds additional files and directories on top of those already excluded
|
||||
--respect-gitignore Respect file exclusions via `.gitignore` and other standard ignore files
|
||||
--force-exclude Enforce exclusions, even for paths passed to Ruff directly on the command-line
|
||||
|
||||
Miscellaneous:
|
||||
-n, --no-cache
|
||||
Disable cache reads
|
||||
--isolated
|
||||
Ignore all configuration files
|
||||
--cache-dir <CACHE_DIR>
|
||||
Path to the cache directory [env: RUFF_CACHE_DIR=]
|
||||
--stdin-filename <STDIN_FILENAME>
|
||||
The name of the file when passing it through stdin
|
||||
-e, --exit-zero
|
||||
Exit with status code "0", even upon detecting lint violations
|
||||
--exit-non-zero-on-fix
|
||||
Exit with a non-zero status code if any files were modified via autofix, even if no lint violations remain
|
||||
|
||||
Log levels:
|
||||
-v, --verbose Enable verbose logging
|
||||
-q, --quiet Print lint violations, but nothing else
|
||||
-s, --silent Disable all logging (but still exit with status code "1" upon detecting lint violations)
|
||||
```
|
||||
|
||||
<!-- End auto-generated subcommand help. -->
|
||||
|
||||
### `pyproject.toml` discovery
|
||||
|
||||
Similar to [ESLint](https://eslint.org/docs/latest/user-guide/configuring/configuration-files#cascading-and-hierarchy),
|
||||
Ruff supports hierarchical configuration, such that the "closest" `pyproject.toml` file in the
|
||||
directory hierarchy is used for every individual file, with all paths in the `pyproject.toml` file
|
||||
(e.g., `exclude` globs, `src` paths) being resolved relative to the directory containing the
|
||||
`pyproject.toml` file.
|
||||
|
||||
There are a few exceptions to these rules:
|
||||
|
||||
1. In locating the "closest" `pyproject.toml` file for a given path, Ruff ignores any
|
||||
`pyproject.toml` files that lack a `[tool.ruff]` section.
|
||||
2. If a configuration file is passed directly via `--config`, those settings are used for across
|
||||
files. Any relative paths in that configuration file (like `exclude` globs or `src` paths) are
|
||||
resolved relative to the _current working directory_.
|
||||
3. If no `pyproject.toml` file is found in the filesystem hierarchy, Ruff will fall back to using
|
||||
a default configuration. If a user-specific configuration file exists
|
||||
at `${config_dir}/ruff/pyproject.toml`, that file will be used instead of the default
|
||||
configuration, with `${config_dir}` being determined via the [`dirs`](https://docs.rs/dirs/4.0.0/dirs/fn.config_dir.html)
|
||||
crate, and all relative paths being again resolved relative to the _current working directory_.
|
||||
4. Any `pyproject.toml`-supported settings that are provided on the command-line (e.g., via
|
||||
`--select`) will override the settings in _every_ resolved configuration file.
|
||||
|
||||
Unlike [ESLint](https://eslint.org/docs/latest/user-guide/configuring/configuration-files#cascading-and-hierarchy),
|
||||
Ruff does not merge settings across configuration files; instead, the "closest" configuration file
|
||||
is used, and any parent configuration files are ignored. In lieu of this implicit cascade, Ruff
|
||||
supports an [`extend`](https://beta.ruff.rs/docs/settings#extend) field, which allows you to inherit the settings from another
|
||||
`pyproject.toml` file, like so:
|
||||
|
||||
```toml
|
||||
# Extend the `pyproject.toml` file in the parent directory.
|
||||
extend = "../pyproject.toml"
|
||||
# But use a different line length.
|
||||
line-length = 100
|
||||
```
|
||||
|
||||
All of the above rules apply equivalently to `ruff.toml` files. If Ruff detects both a `ruff.toml`
|
||||
and `pyproject.toml` file, it will defer to the `ruff.toml`.
|
||||
|
||||
### Python file discovery
|
||||
|
||||
When passed a path on the command-line, Ruff will automatically discover all Python files in that
|
||||
path, taking into account the [`exclude`](https://beta.ruff.rs/docs/settings#exclude) and
|
||||
[`extend-exclude`](https://beta.ruff.rs/docs/settings#extend-exclude) settings in each directory's
|
||||
`pyproject.toml` file.
|
||||
|
||||
By default, Ruff will also skip any files that are omitted via `.ignore`, `.gitignore`,
|
||||
`.git/info/exclude`, and global `gitignore` files (see: [`respect-gitignore`](https://beta.ruff.rs/docs/settings#respect-gitignore)).
|
||||
|
||||
Files that are passed to `ruff` directly are always linted, regardless of the above criteria.
|
||||
For example, `ruff check /path/to/excluded/file.py` will always lint `file.py`.
|
||||
|
||||
### Rule resolution
|
||||
|
||||
The set of enabled rules is controlled via the [`select`](https://beta.ruff.rs/docs/settings#select)
|
||||
and [`ignore`](https://beta.ruff.rs/docs/settings#ignore) settings, along with the
|
||||
[`extend-select`](https://beta.ruff.rs/docs/settings#extend-select) and
|
||||
[`extend-ignore`](https://beta.ruff.rs/docs/settings#extend-ignore) modifiers.
|
||||
|
||||
To resolve the enabled rule set, Ruff may need to reconcile `select` and `ignore` from a variety
|
||||
of sources, including the current `pyproject.toml`, any inherited `pyproject.toml` files, and the
|
||||
CLI (e.g., `--select`).
|
||||
|
||||
In those scenarios, Ruff uses the "highest-priority" `select` as the basis for the rule set, and
|
||||
then applies any `extend-select`, `ignore`, and `extend-ignore` adjustments. CLI options are given
|
||||
higher priority than `pyproject.toml` options, and the current `pyproject.toml` file is given higher
|
||||
priority than any inherited `pyproject.toml` files.
|
||||
|
||||
For example, given the following `pyproject.toml` file:
|
||||
|
||||
```toml
|
||||
[tool.ruff]
|
||||
select = ["E", "F"]
|
||||
ignore = ["F401"]
|
||||
```
|
||||
|
||||
Running `ruff check --select F401` would result in Ruff enforcing `F401`, and no other rules.
|
||||
|
||||
Running `ruff check --extend-select B` would result in Ruff enforcing the `E`, `F`, and `B` rules, with
|
||||
the exception of `F401`.
|
||||
|
||||
### Suppressing errors
|
||||
|
||||
To omit a lint rule entirely, add it to the "ignore" list via [`ignore`](https://beta.ruff.rs/docs/settings#ignore)
|
||||
or [`extend-ignore`](https://beta.ruff.rs/docs/settings#extend-ignore), either on the command-line
|
||||
or in your `pyproject.toml` file.
|
||||
|
||||
To ignore a violation inline, Ruff uses a `noqa` system similar to [Flake8](https://flake8.pycqa.org/en/3.1.1/user/ignoring-errors.html).
|
||||
To ignore an individual violation, add `# noqa: {code}` to the end of the line, like so:
|
||||
|
||||
```python
|
||||
# Ignore F841.
|
||||
x = 1 # noqa: F841
|
||||
|
||||
# Ignore E741 and F841.
|
||||
i = 1 # noqa: E741, F841
|
||||
|
||||
# Ignore _all_ violations.
|
||||
x = 1 # noqa
|
||||
```
|
||||
|
||||
Note that, for multi-line strings, the `noqa` directive should come at the end of the string, and
|
||||
will apply to the entire string, like so:
|
||||
|
||||
```python
|
||||
"""Lorem ipsum dolor sit amet.
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor.
|
||||
""" # noqa: E501
|
||||
```
|
||||
|
||||
To ignore all violations across an entire file, add `# ruff: noqa` to any line in the file, like so:
|
||||
|
||||
```python
|
||||
# ruff: noqa
|
||||
```
|
||||
|
||||
To ignore a specific rule across an entire file, add `# ruff: noqa: {code}` to any line in the file,
|
||||
like so:
|
||||
|
||||
```python
|
||||
# ruff: noqa: F841
|
||||
```
|
||||
|
||||
Or see the [`per-file-ignores`](https://beta.ruff.rs/docs/settings#per-file-ignores) configuration
|
||||
setting, which enables the same functionality via a `pyproject.toml` file.
|
||||
|
||||
Note that Ruff will also respect Flake8's `# flake8: noqa` directive, and will treat it as
|
||||
equivalent to `# ruff: noqa`.
|
||||
|
||||
#### Automatic error suppression
|
||||
|
||||
Ruff supports several workflows to aid in `noqa` management.
|
||||
|
||||
First, Ruff provides a special rule code, `RUF100`, to enforce that your `noqa` directives are
|
||||
"valid", in that the violations they _say_ they ignore are actually being triggered on that line (and
|
||||
thus suppressed). You can run `ruff check /path/to/file.py --extend-select RUF100` to flag unused `noqa`
|
||||
directives.
|
||||
|
||||
Second, Ruff can _automatically remove_ unused `noqa` directives via its autofix functionality.
|
||||
You can run `ruff check /path/to/file.py --extend-select RUF100 --fix` to automatically remove unused
|
||||
`noqa` directives.
|
||||
|
||||
Third, Ruff can _automatically add_ `noqa` directives to all failing lines. This is useful when
|
||||
migrating a new codebase to Ruff. You can run `ruff check /path/to/file.py --add-noqa` to automatically
|
||||
add `noqa` directives to all failing lines, with the appropriate rule codes.
|
||||
|
||||
#### Action comments
|
||||
|
||||
Ruff respects `isort`'s [action comments](https://pycqa.github.io/isort/docs/configuration/action_comments.html)
|
||||
(`# isort: skip_file`, `# isort: on`, `# isort: off`, `# isort: skip`, and `# isort: split`), which
|
||||
enable selectively enabling and disabling import sorting for blocks of code and other inline
|
||||
configuration.
|
||||
|
||||
See the [`isort` documentation](https://pycqa.github.io/isort/docs/configuration/action_comments.html)
|
||||
for more.
|
||||
|
||||
### Exit codes
|
||||
|
||||
By default, Ruff exits with the following status codes:
|
||||
|
||||
* `0` if no violations were found, or if all present violations were fixed automatically.
|
||||
* `1` if violations were found.
|
||||
* `2` if Ruff terminates abnormally due to invalid configuration, invalid CLI options, or an internal error.
|
||||
|
||||
This convention mirrors that of tools like ESLint, Prettier, and RuboCop.
|
||||
|
||||
Ruff supports two command-line flags that alter its exit code behavior:
|
||||
|
||||
* `--exit-zero` will cause Ruff to exit with a status code of `0` even if violations were found.
|
||||
Note that Ruff will still exit with a status code of `2` if it terminates abnormally.
|
||||
* `--exit-non-zero-on-fix` will cause Ruff to exit with a status code of `1` if violations were
|
||||
found, _even if_ all such violations were fixed automatically. Note that the use of
|
||||
`--exit-non-zero-on-fix` can result in a non-zero exit code even if no violations remain after
|
||||
autofixing.
|
||||
|
||||
### Autocompletion
|
||||
|
||||
Ruff supports autocompletion for most shells. A shell-specific completion script can be generated
|
||||
by `ruff generate-shell-completion <SHELL>`, where `<SHELL>` is one of `bash`, `elvish`, `fig`, `fish`,
|
||||
`powershell`, or `zsh`.
|
||||
|
||||
The exact steps required to enable autocompletion will vary by shell. For example instructions,
|
||||
see the [Poetry](https://python-poetry.org/docs/#enable-tab-completion-for-bash-fish-or-zsh) or
|
||||
[ripgrep](https://github.com/BurntSushi/ripgrep/blob/master/FAQ.md#complete) documentation.
|
||||
|
||||
As an example: to enable autocompletion for Zsh, run
|
||||
`ruff generate-shell-completion zsh > ~/.zfunc/_ruff`. Then add the following line to your
|
||||
`~/.zshrc` file, if they're not already present:
|
||||
|
||||
```zsh
|
||||
fpath+=~/.zfunc
|
||||
autoload -Uz compinit && compinit
|
||||
```
|
||||
|
||||
<!-- End section: Configuration -->
|
||||
|
||||
## Supported Rules
|
||||
## Rules
|
||||
|
||||
<!-- Begin section: Rules -->
|
||||
|
||||
Ruff supports over 400 lint rules, many of which are inspired by popular tools like Flake8, isort,
|
||||
pyupgrade, and others. Regardless of the rule's origin, Ruff re-implements every rule in
|
||||
**Ruff supports over 500 lint rules**, many of which are inspired by popular tools like Flake8,
|
||||
isort, pyupgrade, and others. Regardless of the rule's origin, Ruff re-implements every rule in
|
||||
Rust as a first-party feature.
|
||||
|
||||
By default, Ruff enables Flake8's `E` and `F` rules. Ruff supports all rules from the `F` category,
|
||||
and a [subset](https://beta.ruff.rs/docs/rules/#error-e) of the `E` category, omitting those
|
||||
stylistic rules made obsolete by the use of an autoformatter, like [Black](https://github.com/psf/black).
|
||||
stylistic rules made obsolete by the use of an autoformatter, like
|
||||
[Black](https://github.com/psf/black).
|
||||
|
||||
If you're just getting started with Ruff, **the default rule set is a great place to start**: it
|
||||
catches a wide variety of common errors (like unused imports) with zero configuration.
|
||||
|
||||
<!-- End section: Rules -->
|
||||
|
||||
For a complete enumeration, see the [list of rules](https://beta.ruff.rs/docs/rules/) in the
|
||||
Ruff documentation.
|
||||
For a complete enumeration of the supported rules, see [_Rules_](https://beta.ruff.rs/docs/rules/).
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome and highly appreciated. To get started, check out the
|
||||
[**contributing guidelines**](https://github.com/charliermarsh/ruff/blob/main/CONTRIBUTING.md). You
|
||||
can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||
[**contributing guidelines**](https://beta.ruff.rs/docs/contributing/).
|
||||
|
||||
You can also join us on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||
|
||||
## Support
|
||||
|
||||
@@ -677,8 +245,6 @@ or feel free to [**open a new one**](https://github.com/charliermarsh/ruff/issue
|
||||
|
||||
You can also ask for help on [**Discord**](https://discord.gg/c9MhzV8aU5).
|
||||
|
||||
<!-- Begin section: Acknowledgements -->
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
Ruff's linter draws on both the APIs and implementation details of many other
|
||||
@@ -701,45 +267,45 @@ Ruff is the beneficiary of a large number of [contributors](https://github.com/c
|
||||
|
||||
Ruff is released under the MIT license.
|
||||
|
||||
<!-- End section: Acknowledgements -->
|
||||
|
||||
## Who's Using Ruff?
|
||||
|
||||
Ruff is used in a number of major open-source projects, including:
|
||||
|
||||
* [pandas](https://github.com/pandas-dev/pandas)
|
||||
* [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
* [Transformers (Hugging Face)](https://github.com/huggingface/transformers)
|
||||
* [Diffusers (Hugging Face)](https://github.com/huggingface/diffusers)
|
||||
* [Apache Airflow](https://github.com/apache/airflow)
|
||||
* [SciPy](https://github.com/scipy/scipy)
|
||||
* [Zulip](https://github.com/zulip/zulip)
|
||||
* [Bokeh](https://github.com/bokeh/bokeh)
|
||||
* [Pydantic](https://github.com/pydantic/pydantic)
|
||||
* [Dagster](https://github.com/dagster-io/dagster)
|
||||
* [Dagger](https://github.com/dagger/dagger)
|
||||
* [Sphinx](https://github.com/sphinx-doc/sphinx)
|
||||
* [Hatch](https://github.com/pypa/hatch)
|
||||
* [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
||||
* [Great Expectations](https://github.com/great-expectations/great_expectations)
|
||||
* [ONNX](https://github.com/onnx/onnx)
|
||||
* [Polars](https://github.com/pola-rs/polars)
|
||||
* [Ibis](https://github.com/ibis-project/ibis)
|
||||
* [Synapse (Matrix)](https://github.com/matrix-org/synapse)
|
||||
* [SnowCLI (Snowflake)](https://github.com/Snowflake-Labs/snowcli)
|
||||
* [Dispatch (Netflix)](https://github.com/Netflix/dispatch)
|
||||
* [Saleor](https://github.com/saleor/saleor)
|
||||
* [Pynecone](https://github.com/pynecone-io/pynecone)
|
||||
* [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
|
||||
* [Home Assistant](https://github.com/home-assistant/core)
|
||||
* [Pylint](https://github.com/PyCQA/pylint)
|
||||
* [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
||||
* [cibuildwheel (PyPA)](https://github.com/pypa/cibuildwheel)
|
||||
* [build (PyPA)](https://github.com/pypa/build)
|
||||
* [Babel](https://github.com/python-babel/babel)
|
||||
* [featuretools](https://github.com/alteryx/featuretools)
|
||||
* [meson-python](https://github.com/mesonbuild/meson-python)
|
||||
* [ZenML](https://github.com/zenml-io/zenml)
|
||||
- [pandas](https://github.com/pandas-dev/pandas)
|
||||
- [FastAPI](https://github.com/tiangolo/fastapi)
|
||||
- [Transformers (Hugging Face)](https://github.com/huggingface/transformers)
|
||||
- [Diffusers (Hugging Face)](https://github.com/huggingface/diffusers)
|
||||
- [Apache Airflow](https://github.com/apache/airflow)
|
||||
- [SciPy](https://github.com/scipy/scipy)
|
||||
- [Zulip](https://github.com/zulip/zulip)
|
||||
- [Bokeh](https://github.com/bokeh/bokeh)
|
||||
- [Pydantic](https://github.com/pydantic/pydantic)
|
||||
- [Dagster](https://github.com/dagster-io/dagster)
|
||||
- [Dagger](https://github.com/dagger/dagger)
|
||||
- [Sphinx](https://github.com/sphinx-doc/sphinx)
|
||||
- [Hatch](https://github.com/pypa/hatch)
|
||||
- [PDM](https://github.com/pdm-project/pdm)
|
||||
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
|
||||
- [Great Expectations](https://github.com/great-expectations/great_expectations)
|
||||
- [ONNX](https://github.com/onnx/onnx)
|
||||
- [Polars](https://github.com/pola-rs/polars)
|
||||
- [Ibis](https://github.com/ibis-project/ibis)
|
||||
- [Synapse (Matrix)](https://github.com/matrix-org/synapse)
|
||||
- [SnowCLI (Snowflake)](https://github.com/Snowflake-Labs/snowcli)
|
||||
- [Dispatch (Netflix)](https://github.com/Netflix/dispatch)
|
||||
- [Saleor](https://github.com/saleor/saleor)
|
||||
- [Pynecone](https://github.com/pynecone-io/pynecone)
|
||||
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)
|
||||
- [Home Assistant](https://github.com/home-assistant/core)
|
||||
- [Pylint](https://github.com/PyCQA/pylint)
|
||||
- [Cryptography (PyCA)](https://github.com/pyca/cryptography)
|
||||
- [cibuildwheel (PyPA)](https://github.com/pypa/cibuildwheel)
|
||||
- [build (PyPA)](https://github.com/pypa/build)
|
||||
- [Babel](https://github.com/python-babel/babel)
|
||||
- [featuretools](https://github.com/alteryx/featuretools)
|
||||
- [meson-python](https://github.com/mesonbuild/meson-python)
|
||||
- [ZenML](https://github.com/zenml-io/zenml)
|
||||
- [delta-rs](https://github.com/delta-io/delta-rs)
|
||||
|
||||
## License
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.0.248"
|
||||
version = "0.0.253"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
|
||||
|
||||
@@ -84,7 +84,7 @@ flake8-to-ruff path/to/.flake8 --plugin flake8-builtins --plugin flake8-quotes
|
||||
1. Ruff only supports a subset of the Flake configuration options. `flake8-to-ruff` will warn on and
|
||||
ignore unsupported options in the `.flake8` file (or equivalent). (Similarly, Ruff has a few
|
||||
configuration options that don't exist in Flake8.)
|
||||
2. Ruff will omit any rule codes that are unimplemented or unsupported by Ruff, including rule
|
||||
1. Ruff will omit any rule codes that are unimplemented or unsupported by Ruff, including rule
|
||||
codes from unsupported plugins. (See the [Ruff README](https://github.com/charliermarsh/ruff#user-content-how-does-ruff-compare-to-flake8)
|
||||
for the complete list of supported plugins.)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "ruff"
|
||||
version = "0.0.248"
|
||||
version = "0.0.253"
|
||||
authors = ["Charlie Marsh <charlie.r.marsh@gmail.com>"]
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
@@ -16,6 +16,10 @@ crate-type = ["cdylib", "rlib"]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
ruff_macros = { path = "../ruff_macros" }
|
||||
ruff_python = { path = "../ruff_python" }
|
||||
ruff_rustpython = { path = "../ruff_rustpython" }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bisection = { version = "0.1.0" }
|
||||
bitflags = { version = "1.3.2" }
|
||||
@@ -23,12 +27,14 @@ cfg-if = { version = "1.0.0" }
|
||||
chrono = { version = "0.4.21", default-features = false, features = ["clock"] }
|
||||
clap = { workspace = true, features = ["derive", "env", "string"] }
|
||||
colored = { version = "2.0.0" }
|
||||
derivative = { version = "2.2.0" }
|
||||
dirs = { version = "4.0.0" }
|
||||
fern = { version = "0.6.1" }
|
||||
glob = { version = "0.3.0" }
|
||||
globset = { version = "0.4.9" }
|
||||
ignore = { version = "0.4.18" }
|
||||
imperative = { version = "1.0.3" }
|
||||
is-macro = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
libcst = { workspace = true }
|
||||
log = { version = "0.4.17" }
|
||||
@@ -39,8 +45,7 @@ num-traits = "0.2.15"
|
||||
once_cell = { workspace = true }
|
||||
path-absolutize = { version = "3.0.14", features = ["once_cell_cache", "use_unix_paths_on_wasm"] }
|
||||
regex = { workspace = true }
|
||||
ruff_macros = { path = "../ruff_macros" }
|
||||
ruff_python = { path = "../ruff_python" }
|
||||
result-like = "0.4.6"
|
||||
rustc-hash = { workspace = true }
|
||||
rustpython-common = { workspace = true }
|
||||
rustpython-parser = { workspace = true }
|
||||
@@ -57,7 +62,6 @@ titlecase = { version = "2.2.1" }
|
||||
toml = { workspace = true }
|
||||
|
||||
# https://docs.rs/getrandom/0.2.7/getrandom/#webassembly-support
|
||||
# For (future) wasm-pack support
|
||||
[target.'cfg(all(target_family = "wasm", target_os = "unknown"))'.dependencies]
|
||||
getrandom = { version = "0.2.7", features = ["js"] }
|
||||
console_error_panic_hook = { version = "0.1.7" }
|
||||
@@ -66,9 +70,6 @@ serde-wasm-bindgen = { version = "0.4" }
|
||||
js-sys = { version = "0.3.60" }
|
||||
wasm-bindgen = { version = "0.2.83" }
|
||||
|
||||
[target.'cfg(not(target_family = "wasm"))'.dependencies]
|
||||
is_executable = "1.0.1"
|
||||
|
||||
[dev-dependencies]
|
||||
insta = { version = "1.19.0", features = ["yaml", "redactions"] }
|
||||
test-case = { version = "2.2.2" }
|
||||
|
||||
@@ -107,3 +107,7 @@ class Foo:
|
||||
# ANN101
|
||||
def foo(self, /, a: int, b: int) -> int:
|
||||
pass
|
||||
|
||||
|
||||
# OK
|
||||
def f(*args: *tuple[int]) -> None: ...
|
||||
|
||||
@@ -19,6 +19,8 @@ token = "s3cr3t"
|
||||
secrete = "s3cr3t"
|
||||
safe = password = "s3cr3t"
|
||||
password = safe = "s3cr3t"
|
||||
PASSWORD = "s3cr3t"
|
||||
PassWord = "s3cr3t"
|
||||
|
||||
d["password"] = "s3cr3t"
|
||||
d["pass"] = "s3cr3t"
|
||||
@@ -61,3 +63,15 @@ if token == "3\t4":
|
||||
|
||||
if token == "5\r6":
|
||||
pass
|
||||
|
||||
|
||||
# These should not be flagged
|
||||
passed_msg = "You have passed!"
|
||||
compassion = "Please don't match!"
|
||||
impassable = "You shall not pass!"
|
||||
passwords = ""
|
||||
PASSWORDS = ""
|
||||
passphrases = ""
|
||||
PassPhrases = ""
|
||||
tokens = ""
|
||||
secrets = ""
|
||||
|
||||
@@ -57,6 +57,12 @@ dict.fromkeys(("world",), True)
|
||||
{}.deploy(True, False)
|
||||
getattr(someobj, attrname, False)
|
||||
mylist.index(True)
|
||||
int(True)
|
||||
str(int(False))
|
||||
cfg.get("hello", True)
|
||||
cfg.getint("hello", True)
|
||||
cfg.getfloat("hello", True)
|
||||
cfg.getboolean("hello", True)
|
||||
|
||||
|
||||
class Registry:
|
||||
@@ -66,3 +72,11 @@ class Registry:
|
||||
# FBT001: Boolean positional arg in function definition
|
||||
def __setitem__(self, switch: Switch, value: bool) -> None:
|
||||
self._switches[switch.value] = value
|
||||
|
||||
@foo.setter
|
||||
def foo(self, value: bool) -> None:
|
||||
pass
|
||||
|
||||
# FBT001: Boolean positional arg in function definition
|
||||
def foo(self, value: bool) -> None:
|
||||
pass
|
||||
|
||||
@@ -105,3 +105,25 @@ while True:
|
||||
pass
|
||||
finally:
|
||||
break # warning
|
||||
|
||||
|
||||
while True:
|
||||
try:
|
||||
pass
|
||||
finally:
|
||||
match *0, 1, *2:
|
||||
case 0,:
|
||||
y = 0
|
||||
case 0, *x:
|
||||
break # warning
|
||||
|
||||
|
||||
while True:
|
||||
try:
|
||||
pass
|
||||
finally:
|
||||
match *0, 1, *2:
|
||||
case 0,:
|
||||
y = 0
|
||||
case 0, *x:
|
||||
pass # no warning
|
||||
|
||||
14
crates/ruff/resources/test/fixtures/flake8_bugbear/B029.py
vendored
Normal file
14
crates/ruff/resources/test/fixtures/flake8_bugbear/B029.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
"""
|
||||
Should emit:
|
||||
B029 - on lines 8 and 13
|
||||
"""
|
||||
|
||||
try:
|
||||
pass
|
||||
except ():
|
||||
pass
|
||||
|
||||
try:
|
||||
pass
|
||||
except () as e:
|
||||
pass
|
||||
29
crates/ruff/resources/test/fixtures/flake8_bugbear/B032.py
vendored
Normal file
29
crates/ruff/resources/test/fixtures/flake8_bugbear/B032.py
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
"""
|
||||
Should emit:
|
||||
B032 - on lines 9, 10, 12, 13, 16-19
|
||||
"""
|
||||
|
||||
# Flag these
|
||||
dct = {"a": 1}
|
||||
|
||||
dct["b"]: 2
|
||||
dct.b: 2
|
||||
|
||||
dct["b"]: "test"
|
||||
dct.b: "test"
|
||||
|
||||
test = "test"
|
||||
dct["b"]: test
|
||||
dct["b"]: test.lower()
|
||||
dct.b: test
|
||||
dct.b: test.lower()
|
||||
|
||||
# Do not flag below
|
||||
typed_dct: dict[str, int] = {"a": 1}
|
||||
typed_dct["b"] = 2
|
||||
typed_dct.b = 2
|
||||
|
||||
|
||||
class TestClass:
|
||||
def test_self(self):
|
||||
self.test: int
|
||||
@@ -62,3 +62,11 @@ except Exception as e:
|
||||
raise RuntimeError("boom!")
|
||||
else:
|
||||
raise RuntimeError("bang!")
|
||||
|
||||
|
||||
try:
|
||||
...
|
||||
except Exception as e:
|
||||
match 0:
|
||||
case 0:
|
||||
raise RuntimeError("boom!")
|
||||
|
||||
@@ -626,3 +626,8 @@ result = function(
|
||||
bar,
|
||||
**{'ham': spam}
|
||||
)
|
||||
|
||||
# Make sure the COM812 and UP034 rules don't autofix simultaneously and cause a syntax error.
|
||||
the_first_one = next(
|
||||
(i for i in range(10) if i // 2 == 0) # COM812 fix should include the final bracket
|
||||
)
|
||||
|
||||
21
crates/ruff/resources/test/fixtures/flake8_django/DJ003.py
vendored
Normal file
21
crates/ruff/resources/test/fixtures/flake8_django/DJ003.py
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
from django.shortcuts import render
|
||||
|
||||
|
||||
def test_view1(request):
|
||||
return render(request, "index.html", locals())
|
||||
|
||||
|
||||
def test_view2(request):
|
||||
return render(request, "index.html", context=locals())
|
||||
|
||||
|
||||
def test_view3(request):
|
||||
return render(request, "index.html")
|
||||
|
||||
|
||||
def test_view4(request):
|
||||
return render(request, "index.html", {})
|
||||
|
||||
|
||||
def test_view5(request):
|
||||
return render(request, "index.html", context={})
|
||||
11
crates/ruff/resources/test/fixtures/flake8_django/DJ006.py
vendored
Normal file
11
crates/ruff/resources/test/fixtures/flake8_django/DJ006.py
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
from django import forms
|
||||
|
||||
|
||||
class TestModelForm1(forms.ModelForm):
|
||||
class Meta:
|
||||
exclude = ["bar"]
|
||||
|
||||
|
||||
class TestModelForm2(forms.ModelForm):
|
||||
class Meta:
|
||||
fields = ["foo"]
|
||||
16
crates/ruff/resources/test/fixtures/flake8_django/DJ007.py
vendored
Normal file
16
crates/ruff/resources/test/fixtures/flake8_django/DJ007.py
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
from django import forms
|
||||
|
||||
|
||||
class TestModelForm1(forms.ModelForm):
|
||||
class Meta:
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class TestModelForm2(forms.ModelForm):
|
||||
class Meta:
|
||||
fields = b"__all__"
|
||||
|
||||
|
||||
class TestModelForm3(forms.ModelForm):
|
||||
class Meta:
|
||||
fields = ["foo"]
|
||||
10
crates/ruff/resources/test/fixtures/flake8_pie/PIE802.py
vendored
Normal file
10
crates/ruff/resources/test/fixtures/flake8_pie/PIE802.py
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
# no error
|
||||
all((x.id for x in bar))
|
||||
all(x.id for x in bar)
|
||||
all(x.id for x in bar)
|
||||
any(x.id for x in bar)
|
||||
any({x.id for x in bar})
|
||||
|
||||
# PIE 802
|
||||
any([x.id for x in bar])
|
||||
all([x.id for x in bar])
|
||||
@@ -18,3 +18,10 @@ class Foo:
|
||||
|
||||
class FooTable(BaseTable):
|
||||
bar = fields.ListField(list)
|
||||
|
||||
|
||||
lambda *args, **kwargs: []
|
||||
|
||||
lambda *args: []
|
||||
|
||||
lambda **kwargs: []
|
||||
|
||||
14
crates/ruff/resources/test/fixtures/flake8_pyi/PYI006.py
vendored
Normal file
14
crates/ruff/resources/test/fixtures/flake8_pyi/PYI006.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
import sys
|
||||
from sys import version_info as python_version
|
||||
|
||||
if sys.version_info < (3, 9): ... # OK
|
||||
|
||||
if sys.version_info >= (3, 9): ... # OK
|
||||
|
||||
if sys.version_info == (3, 9): ... # OK
|
||||
|
||||
if sys.version_info <= (3, 10): ... # OK
|
||||
|
||||
if sys.version_info > (3, 10): ... # OK
|
||||
|
||||
if python_version > (3, 10): ... # OK
|
||||
18
crates/ruff/resources/test/fixtures/flake8_pyi/PYI006.pyi
vendored
Normal file
18
crates/ruff/resources/test/fixtures/flake8_pyi/PYI006.pyi
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
import sys
|
||||
from sys import version_info as python_version
|
||||
|
||||
if sys.version_info < (3, 9): ... # OK
|
||||
|
||||
if sys.version_info >= (3, 9): ... # OK
|
||||
|
||||
if sys.version_info == (3, 9): ... # OK
|
||||
|
||||
if sys.version_info == (3, 9): ... # Error: PYI006 Use only `<` and `>=` for version info comparisons
|
||||
|
||||
if sys.version_info <= (3, 10): ... # Error: PYI006 Use only `<` and `>=` for version info comparisons
|
||||
|
||||
if sys.version_info <= (3, 10): ... # Error: PYI006 Use only `<` and `>=` for version info comparisons
|
||||
|
||||
if sys.version_info > (3, 10): ... # Error: PYI006 Use only `<` and `>=` for version info comparisons
|
||||
|
||||
if python_version > (3, 10): ... # Error: PYI006 Use only `<` and `>=` for version info comparisons
|
||||
14
crates/ruff/resources/test/fixtures/flake8_pyi/PYI009.py
vendored
Normal file
14
crates/ruff/resources/test/fixtures/flake8_pyi/PYI009.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
def bar():
|
||||
... # OK
|
||||
|
||||
|
||||
def foo():
|
||||
pass # OK, since we're not in a stub file
|
||||
|
||||
|
||||
class Bar:
|
||||
... # OK
|
||||
|
||||
|
||||
class Foo:
|
||||
pass # OK, since we're not in a stub file
|
||||
8
crates/ruff/resources/test/fixtures/flake8_pyi/PYI009.pyi
vendored
Normal file
8
crates/ruff/resources/test/fixtures/flake8_pyi/PYI009.pyi
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
def bar(): ... # OK
|
||||
def foo():
|
||||
pass # ERROR PYI009, since we're in a stub file
|
||||
|
||||
class Bar: ... # OK
|
||||
|
||||
class Foo:
|
||||
pass # ERROR PYI009, since we're in a stub file
|
||||
18
crates/ruff/resources/test/fixtures/flake8_pyi/PYI010.py
vendored
Normal file
18
crates/ruff/resources/test/fixtures/flake8_pyi/PYI010.py
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
def bar():
|
||||
... # OK
|
||||
|
||||
|
||||
def foo():
|
||||
"""foo""" # OK
|
||||
|
||||
|
||||
def buzz():
|
||||
print("buzz") # OK, not in stub file
|
||||
|
||||
|
||||
def foo2():
|
||||
123 # OK, not in a stub file
|
||||
|
||||
|
||||
def bizz():
|
||||
x = 123 # OK, not in a stub file
|
||||
12
crates/ruff/resources/test/fixtures/flake8_pyi/PYI010.pyi
vendored
Normal file
12
crates/ruff/resources/test/fixtures/flake8_pyi/PYI010.pyi
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
def bar(): ... # OK
|
||||
def foo():
|
||||
"""foo""" # OK, strings are handled by another rule
|
||||
|
||||
def buzz():
|
||||
print("buzz") # ERROR PYI010
|
||||
|
||||
def foo2():
|
||||
123 # ERROR PYI010
|
||||
|
||||
def bizz():
|
||||
x = 123 # ERROR PYI010
|
||||
79
crates/ruff/resources/test/fixtures/flake8_pyi/PYI011.py
vendored
Normal file
79
crates/ruff/resources/test/fixtures/flake8_pyi/PYI011.py
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
def f12(
|
||||
x,
|
||||
y: str = os.pathsep, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f11(*, x: str = "x") -> None: # OK
|
||||
...
|
||||
|
||||
|
||||
def f13(
|
||||
x: list[str] = [
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
] # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f14(
|
||||
x: tuple[str, ...] = (
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
) # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f15(
|
||||
x: set[str] = {
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
} # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f16(x: frozenset[bytes] = frozenset({b"foo", b"bar", b"baz"})) -> None: # OK
|
||||
...
|
||||
|
||||
|
||||
def f17(
|
||||
x: str = "foo" + "bar", # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f18(
|
||||
x: str = b"foo" + b"bar", # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f19(
|
||||
x: object = "foo" + 4, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f20(
|
||||
x: int = 5 + 5, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f21(
|
||||
x: complex = 3j - 3j, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f22(
|
||||
x: complex = -42.5j + 4.3j, # OK
|
||||
) -> None:
|
||||
...
|
||||
63
crates/ruff/resources/test/fixtures/flake8_pyi/PYI011.pyi
vendored
Normal file
63
crates/ruff/resources/test/fixtures/flake8_pyi/PYI011.pyi
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
def f12(
|
||||
x,
|
||||
y: str = os.pathsep, # Error PYI011 Only simple default values allowed for typed arguments
|
||||
) -> None: ...
|
||||
def f11(*, x: str = "x") -> None: ... # OK
|
||||
def f13(
|
||||
x: list[
|
||||
str
|
||||
] = [ # Error PYI011 Only simple default values allowed for typed arguments
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
]
|
||||
) -> None: ...
|
||||
def f14(
|
||||
x: tuple[
|
||||
str, ...
|
||||
] = ( # Error PYI011 Only simple default values allowed for typed arguments
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
)
|
||||
) -> None: ...
|
||||
def f15(
|
||||
x: set[
|
||||
str
|
||||
] = { # Error PYI011 Only simple default values allowed for typed arguments
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
}
|
||||
) -> None: ...
|
||||
def f16(
|
||||
x: frozenset[
|
||||
bytes
|
||||
] = frozenset( # Error PYI011 Only simple default values allowed for typed arguments
|
||||
{b"foo", b"bar", b"baz"}
|
||||
)
|
||||
) -> None: ...
|
||||
def f17(
|
||||
x: str = "foo" # Error PYI011 Only simple default values allowed for typed arguments
|
||||
+ "bar",
|
||||
) -> None: ...
|
||||
def f18(
|
||||
x: str = b"foo" # Error PYI011 Only simple default values allowed for typed arguments
|
||||
+ b"bar",
|
||||
) -> None: ...
|
||||
def f19(
|
||||
x: object = "foo" # Error PYI011 Only simple default values allowed for typed arguments
|
||||
+ 4,
|
||||
) -> None: ...
|
||||
def f20(
|
||||
x: int = 5
|
||||
+ 5, # Error PYI011 Only simple default values allowed for typed arguments
|
||||
) -> None: ...
|
||||
def f21(
|
||||
x: complex = 3j
|
||||
- 3j, # Error PYI011 Only simple default values allowed for typed arguments
|
||||
) -> None: ...
|
||||
def f22(
|
||||
x: complex = -42.5j # Error PYI011 Only simple default values allowed for typed arguments
|
||||
+ 4.3j,
|
||||
) -> None: ...
|
||||
79
crates/ruff/resources/test/fixtures/flake8_pyi/PYI014.py
vendored
Normal file
79
crates/ruff/resources/test/fixtures/flake8_pyi/PYI014.py
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
def f12(
|
||||
x,
|
||||
y=os.pathsep, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f11(*, x="x") -> None:
|
||||
... # OK
|
||||
|
||||
|
||||
def f13(
|
||||
x=[ # OK
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
]
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f14(
|
||||
x=( # OK
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
)
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f15(
|
||||
x={ # OK
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
}
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f16(x=frozenset({b"foo", b"bar", b"baz"})) -> None:
|
||||
... # OK
|
||||
|
||||
|
||||
def f17(
|
||||
x="foo" + "bar", # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f18(
|
||||
x=b"foo" + b"bar", # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f19(
|
||||
x="foo" + 4, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f20(
|
||||
x=5 + 5, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f21(
|
||||
x=3j - 3j, # OK
|
||||
) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f22(
|
||||
x=-42.5j + 4.3j, # OK
|
||||
) -> None:
|
||||
...
|
||||
45
crates/ruff/resources/test/fixtures/flake8_pyi/PYI014.pyi
vendored
Normal file
45
crates/ruff/resources/test/fixtures/flake8_pyi/PYI014.pyi
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
def f12(
|
||||
x,
|
||||
y=os.pathsep, # Error PYI014
|
||||
) -> None: ...
|
||||
def f11(*, x="x") -> None: ... # OK
|
||||
def f13(
|
||||
x=[ # Error PYI014
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
]
|
||||
) -> None: ...
|
||||
def f14(
|
||||
x=( # Error PYI014
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
)
|
||||
) -> None: ...
|
||||
def f15(
|
||||
x={ # Error PYI014
|
||||
"foo",
|
||||
"bar",
|
||||
"baz",
|
||||
}
|
||||
) -> None: ...
|
||||
def f16(x=frozenset({b"foo", b"bar", b"baz"})) -> None: ... # Error PYI014
|
||||
def f17(
|
||||
x="foo" + "bar", # Error PYI014
|
||||
) -> None: ...
|
||||
def f18(
|
||||
x=b"foo" + b"bar", # Error PYI014
|
||||
) -> None: ...
|
||||
def f19(
|
||||
x="foo" + 4, # Error PYI014
|
||||
) -> None: ...
|
||||
def f20(
|
||||
x=5 + 5, # Error PYI014
|
||||
) -> None: ...
|
||||
def f21(
|
||||
x=3j - 3j, # Error PYI014
|
||||
) -> None: ...
|
||||
def f22(
|
||||
x=-42.5j + 4.3j, # Error PYI014
|
||||
) -> None: ...
|
||||
14
crates/ruff/resources/test/fixtures/flake8_pyi/PYI021.py
vendored
Normal file
14
crates/ruff/resources/test/fixtures/flake8_pyi/PYI021.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
"""foo""" # OK, not in stub
|
||||
|
||||
|
||||
def foo():
|
||||
"""foo""" # OK, doc strings are allowed in non-stubs
|
||||
|
||||
|
||||
class Bar:
|
||||
"""bar""" # OK, doc strings are allowed in non-stubs
|
||||
|
||||
|
||||
def bar():
|
||||
x = 1
|
||||
"""foo""" # OK, not a doc string
|
||||
11
crates/ruff/resources/test/fixtures/flake8_pyi/PYI021.pyi
vendored
Normal file
11
crates/ruff/resources/test/fixtures/flake8_pyi/PYI021.pyi
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
"""foo""" # ERROR PYI021
|
||||
|
||||
def foo():
|
||||
"""foo""" # ERROR PYI021
|
||||
|
||||
class Bar:
|
||||
"""bar""" # ERROR PYI021
|
||||
|
||||
def bar():
|
||||
x = 1
|
||||
"""foo""" # OK, not a doc string
|
||||
@@ -17,6 +17,12 @@ class Test(unittest.TestCase):
|
||||
self.assertTrue(**{"expr": expr, "msg": msg}) # Error, unfixable
|
||||
self.assertTrue(msg=msg, expr=expr, unexpected_arg=False) # Error, unfixable
|
||||
self.assertTrue(msg=msg) # Error, unfixable
|
||||
(
|
||||
self.assertIsNotNone(value) # Error, unfixable
|
||||
if expect_condition
|
||||
else self.assertIsNone(value) # Error, unfixable
|
||||
)
|
||||
return self.assertEqual(True, False) # Error, unfixable
|
||||
|
||||
def test_assert_false(self):
|
||||
self.assertFalse(True) # Error
|
||||
|
||||
@@ -9,6 +9,7 @@ def test_ok():
|
||||
assert something, "something message"
|
||||
assert something or something_else and something_third, "another message"
|
||||
|
||||
|
||||
def test_error():
|
||||
assert something and something_else
|
||||
assert something and something_else and something_third
|
||||
@@ -17,13 +18,24 @@ def test_error():
|
||||
assert not something and something_else
|
||||
assert not (something or something_else)
|
||||
assert not (something or something_else or something_third)
|
||||
assert something and something_else == """error
|
||||
message
|
||||
"""
|
||||
|
||||
# recursive case
|
||||
assert not (a or not (b or c))
|
||||
assert not (a or not (b and c)) # note that we only reduce once here
|
||||
assert not (a or not (b and c))
|
||||
|
||||
# detected, but no autofix for messages
|
||||
assert something and something_else, "error message"
|
||||
assert not (something or something_else and something_third), "with message"
|
||||
# detected, but no autofix for mixed conditions (e.g. `a or b and c`)
|
||||
assert not (something or something_else and something_third)
|
||||
# detected, but no autofix for parenthesized conditions
|
||||
assert (
|
||||
something
|
||||
and something_else
|
||||
== """error
|
||||
message
|
||||
"""
|
||||
)
|
||||
|
||||
@@ -3,6 +3,8 @@ import os
|
||||
import posix
|
||||
from posix import abort
|
||||
import sys as std_sys
|
||||
import typing
|
||||
import typing_extensions
|
||||
import _thread
|
||||
import _winapi
|
||||
|
||||
@@ -77,7 +79,7 @@ def x(y):
|
||||
|
||||
# last line in while loop
|
||||
def x(y):
|
||||
while True:
|
||||
while i > 0:
|
||||
if y > 0:
|
||||
return 1
|
||||
y += 1
|
||||
@@ -211,6 +213,18 @@ def noreturn_sys_exit():
|
||||
std_sys.exit(0)
|
||||
|
||||
|
||||
def noreturn_typing_assert_never():
|
||||
if x > 0:
|
||||
return 1
|
||||
typing.assert_never(0)
|
||||
|
||||
|
||||
def noreturn_typing_extensions_assert_never():
|
||||
if x > 0:
|
||||
return 1
|
||||
typing_extensions.assert_never(0)
|
||||
|
||||
|
||||
def noreturn__thread_exit():
|
||||
if x > 0:
|
||||
return 1
|
||||
@@ -259,3 +273,23 @@ def nested(values):
|
||||
|
||||
for value in values:
|
||||
print(value)
|
||||
|
||||
|
||||
def while_true():
|
||||
while True:
|
||||
if y > 0:
|
||||
return 1
|
||||
y += 1
|
||||
|
||||
|
||||
# match
|
||||
def x(y):
|
||||
match y:
|
||||
case 0:
|
||||
return 1
|
||||
case 1:
|
||||
print() # error
|
||||
|
||||
|
||||
def foo(baz: str) -> str:
|
||||
return baz
|
||||
|
||||
86
crates/ruff/resources/test/fixtures/flake8_simplify/SIM116.py
vendored
Normal file
86
crates/ruff/resources/test/fixtures/flake8_simplify/SIM116.py
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
# Errors
|
||||
a = "hello"
|
||||
|
||||
# SIM116
|
||||
if a == "foo":
|
||||
return "bar"
|
||||
elif a == "bar":
|
||||
return "baz"
|
||||
elif a == "boo":
|
||||
return "ooh"
|
||||
else:
|
||||
return 42
|
||||
|
||||
# SIM116
|
||||
if a == 1:
|
||||
return (1, 2, 3)
|
||||
elif a == 2:
|
||||
return (4, 5, 6)
|
||||
elif a == 3:
|
||||
return (7, 8, 9)
|
||||
else:
|
||||
return (10, 11, 12)
|
||||
|
||||
# SIM116
|
||||
if a == 1:
|
||||
return (1, 2, 3)
|
||||
elif a == 2:
|
||||
return (4, 5, 6)
|
||||
elif a == 3:
|
||||
return (7, 8, 9)
|
||||
|
||||
# SIM116
|
||||
if a == "hello 'sir'":
|
||||
return (1, 2, 3)
|
||||
elif a == 'goodbye "mam"':
|
||||
return (4, 5, 6)
|
||||
elif a == """Fairwell 'mister'""":
|
||||
return (7, 8, 9)
|
||||
else:
|
||||
return (10, 11, 12)
|
||||
|
||||
# SIM116
|
||||
if a == b"one":
|
||||
return 1
|
||||
elif a == b"two":
|
||||
return 2
|
||||
elif a == b"three":
|
||||
return 3
|
||||
|
||||
# SIM116
|
||||
if a == "hello 'sir'":
|
||||
return ("hello'", 'hi"', 3)
|
||||
elif a == 'goodbye "mam"':
|
||||
return (4, 5, 6)
|
||||
elif a == """Fairwell 'mister'""":
|
||||
return (7, 8, 9)
|
||||
else:
|
||||
return (10, 11, 12)
|
||||
|
||||
# OK
|
||||
if a == "foo":
|
||||
return "bar"
|
||||
elif a == "bar":
|
||||
return baz()
|
||||
elif a == "boo":
|
||||
return "ooh"
|
||||
else:
|
||||
return 42
|
||||
|
||||
# OK
|
||||
if a == b"one":
|
||||
return 1
|
||||
elif b == b"two":
|
||||
return 2
|
||||
elif a == b"three":
|
||||
return 3
|
||||
|
||||
# SIM116
|
||||
if func_name == "create":
|
||||
return "A"
|
||||
elif func_name == "modify":
|
||||
return "M"
|
||||
elif func_name == "remove":
|
||||
return "D"
|
||||
elif func_name == "move":
|
||||
return "MV"
|
||||
@@ -11,6 +11,7 @@ YODA > age # SIM300
|
||||
YODA >= age # SIM300
|
||||
JediOrder.YODA == age # SIM300
|
||||
0 < (number - 100) # SIM300
|
||||
SomeClass().settings.SOME_CONSTANT_VALUE > (60 * 60) # SIM300
|
||||
|
||||
# OK
|
||||
compare == "yoda"
|
||||
|
||||
15
crates/ruff/resources/test/fixtures/isort/as_imports_comments.py
vendored
Normal file
15
crates/ruff/resources/test/fixtures/isort/as_imports_comments.py
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
from foo import ( # Comment on `foo`
|
||||
Member as Alias, # Comment on `Alias`
|
||||
)
|
||||
|
||||
from bar import ( # Comment on `bar`
|
||||
Member, # Comment on `Member`
|
||||
)
|
||||
|
||||
from baz import ( # Comment on `baz`
|
||||
Member as Alias # Comment on `Alias`
|
||||
)
|
||||
|
||||
from bop import ( # Comment on `bop`
|
||||
Member # Comment on `Member`
|
||||
)
|
||||
@@ -1,2 +1,2 @@
|
||||
from long_module_name import member_one, member_two, member_three, member_four, member_five
|
||||
from long_module_name import member_one, member_two, member_three, member_four, member_five
|
||||
|
||||
|
||||
3
crates/ruff/resources/test/fixtures/isort/no_lines_before_with_empty_sections.py
vendored
Normal file
3
crates/ruff/resources/test/fixtures/isort/no_lines_before_with_empty_sections.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
from __future__ import annotations
|
||||
from typing import Any
|
||||
from . import my_local_folder_object
|
||||
0
crates/ruff/resources/test/fixtures/pep8_naming/N999/module/valid_name/0001_initial.py
vendored
Normal file
0
crates/ruff/resources/test/fixtures/pep8_naming/N999/module/valid_name/0001_initial.py
vendored
Normal file
0
crates/ruff/resources/test/fixtures/pep8_naming/N999/module/valid_name/__main__.py
vendored
Normal file
0
crates/ruff/resources/test/fixtures/pep8_naming/N999/module/valid_name/__main__.py
vendored
Normal file
0
crates/ruff/resources/test/fixtures/pep8_naming/N999/module/valid_name/__setup__.py
vendored
Normal file
0
crates/ruff/resources/test/fixtures/pep8_naming/N999/module/valid_name/__setup__.py
vendored
Normal file
0
crates/ruff/resources/test/fixtures/pep8_naming/N999/module/valid_name/file-with-dashes
vendored
Normal file
0
crates/ruff/resources/test/fixtures/pep8_naming/N999/module/valid_name/file-with-dashes
vendored
Normal file
50
crates/ruff/resources/test/fixtures/pycodestyle/E25.py
vendored
Normal file
50
crates/ruff/resources/test/fixtures/pycodestyle/E25.py
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
#: E251 E251
|
||||
def foo(bar = False):
|
||||
'''Test function with an error in declaration'''
|
||||
pass
|
||||
#: E251
|
||||
foo(bar= True)
|
||||
#: E251
|
||||
foo(bar =True)
|
||||
#: E251 E251
|
||||
foo(bar = True)
|
||||
#: E251
|
||||
y = bar(root= "sdasd")
|
||||
#: E251:2:29
|
||||
parser.add_argument('--long-option',
|
||||
default=
|
||||
"/rather/long/filesystem/path/here/blah/blah/blah")
|
||||
#: E251:1:45
|
||||
parser.add_argument('--long-option', default
|
||||
="/rather/long/filesystem/path/here/blah/blah/blah")
|
||||
#: E251:3:8 E251:3:10
|
||||
foo(True,
|
||||
baz=(1, 2),
|
||||
biz = 'foo'
|
||||
)
|
||||
#: Okay
|
||||
foo(bar=(1 == 1))
|
||||
foo(bar=(1 != 1))
|
||||
foo(bar=(1 >= 1))
|
||||
foo(bar=(1 <= 1))
|
||||
(options, args) = parser.parse_args()
|
||||
d[type(None)] = _deepcopy_atomic
|
||||
|
||||
# Annotated Function Definitions
|
||||
#: Okay
|
||||
def munge(input: AnyStr, sep: AnyStr = None, limit=1000,
|
||||
extra: Union[str, dict] = None) -> AnyStr:
|
||||
pass
|
||||
#: Okay
|
||||
async def add(a: int = 0, b: int = 0) -> int:
|
||||
return a + b
|
||||
# Previously E251 four times
|
||||
#: E271:1:6
|
||||
async def add(a: int = 0, b: int = 0) -> int:
|
||||
return a + b
|
||||
#: E252:1:15 E252:1:16 E252:1:27 E252:1:36
|
||||
def add(a: int=0, b: int =0, c: int= 0) -> int:
|
||||
return a + b + c
|
||||
#: Okay
|
||||
def add(a: int = _default(name='f')):
|
||||
return a
|
||||
@@ -54,3 +54,9 @@ def f(): ...
|
||||
class C: ...; x = 1
|
||||
#: E701:1:8 E702:1:13
|
||||
class C: ...; ...
|
||||
#: E701:2:12
|
||||
match *0, 1, *2:
|
||||
case 0,: y = 0
|
||||
#:
|
||||
class Foo:
|
||||
match: Optional[Match] = None
|
||||
|
||||
@@ -9,6 +9,9 @@ while False:
|
||||
f = lambda: (yield 1)
|
||||
#: E731
|
||||
f = lambda: (yield from g())
|
||||
#: E731
|
||||
class F:
|
||||
f = lambda x: 2 * x
|
||||
|
||||
f = object()
|
||||
f.method = lambda: "Method"
|
||||
|
||||
145
crates/ruff/resources/test/fixtures/pycodestyle/W19.py
vendored
Normal file
145
crates/ruff/resources/test/fixtures/pycodestyle/W19.py
vendored
Normal file
@@ -0,0 +1,145 @@
|
||||
#: W191
|
||||
if False:
|
||||
print # indented with 1 tab
|
||||
#:
|
||||
|
||||
|
||||
#: W191
|
||||
y = x == 2 \
|
||||
or x == 3
|
||||
#: E101 W191 W504
|
||||
if (
|
||||
x == (
|
||||
3
|
||||
) or
|
||||
y == 4):
|
||||
pass
|
||||
#: E101 W191
|
||||
if x == 2 \
|
||||
or y > 1 \
|
||||
or x == 3:
|
||||
pass
|
||||
#: E101 W191
|
||||
if x == 2 \
|
||||
or y > 1 \
|
||||
or x == 3:
|
||||
pass
|
||||
#:
|
||||
|
||||
#: E101 W191 W504
|
||||
if (foo == bar and
|
||||
baz == bop):
|
||||
pass
|
||||
#: E101 W191 W504
|
||||
if (
|
||||
foo == bar and
|
||||
baz == bop
|
||||
):
|
||||
pass
|
||||
#:
|
||||
|
||||
#: E101 E101 W191 W191
|
||||
if start[1] > end_col and not (
|
||||
over_indent == 4 and indent_next):
|
||||
return (0, "E121 continuation line over-"
|
||||
"indented for visual indent")
|
||||
#:
|
||||
|
||||
#: E101 W191
|
||||
|
||||
|
||||
def long_function_name(
|
||||
var_one, var_two, var_three,
|
||||
var_four):
|
||||
print(var_one)
|
||||
#: E101 W191 W504
|
||||
if ((row < 0 or self.moduleCount <= row or
|
||||
col < 0 or self.moduleCount <= col)):
|
||||
raise Exception("%s,%s - %s" % (row, col, self.moduleCount))
|
||||
#: E101 E101 E101 E101 W191 W191 W191 W191 W191 W191
|
||||
if bar:
|
||||
return (
|
||||
start, 'E121 lines starting with a '
|
||||
'closing bracket should be indented '
|
||||
"to match that of the opening "
|
||||
"bracket's line"
|
||||
)
|
||||
#
|
||||
#: E101 W191 W504
|
||||
# you want vertical alignment, so use a parens
|
||||
if ((foo.bar("baz") and
|
||||
foo.bar("bop")
|
||||
)):
|
||||
print "yes"
|
||||
#: E101 W191 W504
|
||||
# also ok, but starting to look like LISP
|
||||
if ((foo.bar("baz") and
|
||||
foo.bar("bop"))):
|
||||
print "yes"
|
||||
#: E101 W191 W504
|
||||
if (a == 2 or
|
||||
b == "abc def ghi"
|
||||
"jkl mno"):
|
||||
return True
|
||||
#: E101 W191 W504
|
||||
if (a == 2 or
|
||||
b == """abc def ghi
|
||||
jkl mno"""):
|
||||
return True
|
||||
#: W191:2:1 W191:3:1 E101:3:2
|
||||
if length > options.max_line_length:
|
||||
return options.max_line_length, \
|
||||
"E501 line too long (%d characters)" % length
|
||||
|
||||
|
||||
#
|
||||
#: E101 W191 W191 W504
|
||||
if os.path.exists(os.path.join(path, PEP8_BIN)):
|
||||
cmd = ([os.path.join(path, PEP8_BIN)] +
|
||||
self._pep8_options(targetfile))
|
||||
#: W191
|
||||
'''
|
||||
multiline string with tab in it'''
|
||||
#: E101 W191
|
||||
'''multiline string
|
||||
with tabs
|
||||
and spaces
|
||||
'''
|
||||
#: Okay
|
||||
'''sometimes, you just need to go nuts in a multiline string
|
||||
and allow all sorts of crap
|
||||
like mixed tabs and spaces
|
||||
|
||||
or trailing whitespace
|
||||
or long long long long long long long long long long long long long long long long long lines
|
||||
''' # nopep8
|
||||
#: Okay
|
||||
'''this one
|
||||
will get no warning
|
||||
even though the noqa comment is not immediately after the string
|
||||
''' + foo # noqa
|
||||
#
|
||||
#: E101 W191
|
||||
if foo is None and bar is "bop" and \
|
||||
blah == 'yeah':
|
||||
blah = 'yeahnah'
|
||||
|
||||
|
||||
#
|
||||
#: W191 W191 W191
|
||||
if True:
|
||||
foo(
|
||||
1,
|
||||
2)
|
||||
#: W191 W191 W191 W191 W191
|
||||
def test_keys(self):
|
||||
"""areas.json - All regions are accounted for."""
|
||||
expected = set([
|
||||
u'Norrbotten',
|
||||
u'V\xe4sterbotten',
|
||||
])
|
||||
#: W191
|
||||
x = [
|
||||
'abc'
|
||||
]
|
||||
#:
|
||||
26
crates/ruff/resources/test/fixtures/pycodestyle/W29.py
vendored
Normal file
26
crates/ruff/resources/test/fixtures/pycodestyle/W29.py
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
#: Okay
|
||||
# 情
|
||||
#: W291:1:6
|
||||
print
|
||||
#: W293:2:1
|
||||
class Foo(object):
|
||||
|
||||
bang = 12
|
||||
#: W291:2:35
|
||||
'''multiline
|
||||
string with trailing whitespace'''
|
||||
#: W291 W292 noeol
|
||||
x = 1
|
||||
#: W191 W292 noeol
|
||||
if False:
|
||||
pass # indented with tabs
|
||||
#: W292:1:36 noeol
|
||||
# This line doesn't have a linefeed
|
||||
#: W292:1:5 E225:1:2 noeol
|
||||
1+ 1
|
||||
#: W292:1:27 E261:1:12 noeol
|
||||
import this # no line feed
|
||||
#: W292:3:22 noeol
|
||||
class Test(object):
|
||||
def __repr__(self):
|
||||
return 'test'
|
||||
@@ -1,35 +1,35 @@
|
||||
#: W605:1:10
|
||||
regex = '\.png$'
|
||||
|
||||
#: W605:2:1
|
||||
regex = '''
|
||||
\.png$
|
||||
'''
|
||||
|
||||
#: W605:2:6
|
||||
f(
|
||||
'\_'
|
||||
)
|
||||
|
||||
#: W605:4:6
|
||||
"""
|
||||
multi-line
|
||||
literal
|
||||
with \_ somewhere
|
||||
in the middle
|
||||
"""
|
||||
|
||||
#: Okay
|
||||
regex = r'\.png$'
|
||||
regex = '\\.png$'
|
||||
regex = r'''
|
||||
\.png$
|
||||
'''
|
||||
regex = r'''
|
||||
\\.png$
|
||||
'''
|
||||
s = '\\'
|
||||
regex = '\w' # noqa
|
||||
regex = '''
|
||||
\w
|
||||
''' # noqa
|
||||
#: W605:1:10
|
||||
regex = '\.png$'
|
||||
|
||||
#: W605:2:1
|
||||
regex = '''
|
||||
\.png$
|
||||
'''
|
||||
|
||||
#: W605:2:6
|
||||
f(
|
||||
'\_'
|
||||
)
|
||||
|
||||
#: W605:4:6
|
||||
"""
|
||||
multi-line
|
||||
literal
|
||||
with \_ somewhere
|
||||
in the middle
|
||||
"""
|
||||
|
||||
#: Okay
|
||||
regex = r'\.png$'
|
||||
regex = '\\.png$'
|
||||
regex = r'''
|
||||
\.png$
|
||||
'''
|
||||
regex = r'''
|
||||
\\.png$
|
||||
'''
|
||||
s = '\\'
|
||||
regex = '\w' # noqa
|
||||
regex = '''
|
||||
\w
|
||||
''' # noqa
|
||||
|
||||
@@ -16,21 +16,17 @@ if False == None: # E711, E712 (fix)
|
||||
if None == False: # E711, E712 (fix)
|
||||
pass
|
||||
|
||||
###
|
||||
# Unfixable errors
|
||||
###
|
||||
if "abc" == None: # E711
|
||||
pass
|
||||
if None == "abc": # E711
|
||||
pass
|
||||
if "abc" == False: # E712
|
||||
pass
|
||||
if False == "abc": # E712
|
||||
pass
|
||||
|
||||
###
|
||||
# Non-errors
|
||||
###
|
||||
if "abc" == None:
|
||||
pass
|
||||
if None == "abc":
|
||||
pass
|
||||
if "abc" == False:
|
||||
pass
|
||||
if False == "abc":
|
||||
pass
|
||||
if "def" == "abc":
|
||||
pass
|
||||
if False is None:
|
||||
|
||||
@@ -85,3 +85,10 @@ else:
|
||||
|
||||
|
||||
CustomInt: TypeAlias = "np.int8 | np.int16"
|
||||
|
||||
|
||||
# Test: match statements.
|
||||
match *0, 1, *2:
|
||||
case 0,:
|
||||
import x
|
||||
import y
|
||||
|
||||
10
crates/ruff/resources/test/fixtures/pyflakes/F401_10.py
vendored
Normal file
10
crates/ruff/resources/test/fixtures/pyflakes/F401_10.py
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
"""Test: imports within `ModuleNotFoundError` handlers."""
|
||||
|
||||
|
||||
def check_orjson():
|
||||
try:
|
||||
import orjson
|
||||
|
||||
return True
|
||||
except ModuleNotFoundError:
|
||||
return False
|
||||
@@ -1,7 +1,7 @@
|
||||
"""
|
||||
Test that shadowing a global with a class attribute does not produce a
|
||||
warning.
|
||||
"""
|
||||
Test that shadowing a global with a class attribute does not produce a
|
||||
warning.
|
||||
"""
|
||||
|
||||
import fu
|
||||
|
||||
|
||||
4
crates/ruff/resources/test/fixtures/pyflakes/F821_10.py
vendored
Normal file
4
crates/ruff/resources/test/fixtures/pyflakes/F821_10.py
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
try:
|
||||
pass
|
||||
except ExceptionGroup:
|
||||
pass
|
||||
46
crates/ruff/resources/test/fixtures/pyflakes/F821_9.py
vendored
Normal file
46
crates/ruff/resources/test/fixtures/pyflakes/F821_9.py
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
"""Test: match statements."""
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class Car:
|
||||
make: str
|
||||
model: str
|
||||
|
||||
|
||||
def f():
|
||||
match Car("Toyota", "Corolla"):
|
||||
case Car("Toyota", model):
|
||||
print(model)
|
||||
case Car(make, "Corolla"):
|
||||
print(make)
|
||||
|
||||
|
||||
def f(provided: int) -> int:
|
||||
match provided:
|
||||
case True:
|
||||
return captured # F821
|
||||
|
||||
|
||||
def f(provided: int) -> int:
|
||||
match provided:
|
||||
case captured:
|
||||
return captured
|
||||
|
||||
|
||||
def f(provided: int) -> int:
|
||||
match provided:
|
||||
case [captured, *_]:
|
||||
return captured
|
||||
|
||||
|
||||
def f(provided: int) -> int:
|
||||
match provided:
|
||||
case [*captured]:
|
||||
return captured
|
||||
|
||||
|
||||
def f(provided: int) -> int:
|
||||
match provided:
|
||||
case {**captured}:
|
||||
return captured
|
||||
@@ -71,7 +71,7 @@ def f():
|
||||
def connect():
|
||||
return None, None
|
||||
|
||||
with (connect() as (connection, cursor)):
|
||||
with connect() as (connection, cursor):
|
||||
cursor.execute("SELECT * FROM users")
|
||||
|
||||
|
||||
@@ -94,3 +94,30 @@ def f():
|
||||
(exponential := (exponential * base_multiplier) % 3): i + 1 for i in range(2)
|
||||
}
|
||||
return hash_map
|
||||
|
||||
|
||||
def f(x: int):
|
||||
msg1 = "Hello, world!"
|
||||
msg2 = "Hello, world!"
|
||||
msg3 = "Hello, world!"
|
||||
match x:
|
||||
case 1:
|
||||
print(msg1)
|
||||
case 2:
|
||||
print(msg2)
|
||||
|
||||
|
||||
def f(x: int):
|
||||
import enum
|
||||
|
||||
Foo = enum.Enum("Foo", "A B")
|
||||
Bar = enum.Enum("Bar", "A B")
|
||||
Baz = enum.Enum("Baz", "A B")
|
||||
|
||||
match x:
|
||||
case (Foo.A):
|
||||
print("A")
|
||||
case [Bar.A, *_]:
|
||||
print("A")
|
||||
case y:
|
||||
pass
|
||||
|
||||
49
crates/ruff/resources/test/fixtures/pylint/collapsible_else_if.py
vendored
Normal file
49
crates/ruff/resources/test/fixtures/pylint/collapsible_else_if.py
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
"""
|
||||
Test for else-if-used
|
||||
"""
|
||||
|
||||
|
||||
def ok0():
|
||||
"""Should not trigger on elif"""
|
||||
if 1:
|
||||
pass
|
||||
elif 2:
|
||||
pass
|
||||
|
||||
|
||||
def ok1():
|
||||
"""If the orelse has more than 1 item in it, shouldn't trigger"""
|
||||
if 1:
|
||||
pass
|
||||
else:
|
||||
print()
|
||||
if 1:
|
||||
pass
|
||||
|
||||
|
||||
def ok2():
|
||||
"""If the orelse has more than 1 item in it, shouldn't trigger"""
|
||||
if 1:
|
||||
pass
|
||||
else:
|
||||
if 1:
|
||||
pass
|
||||
print()
|
||||
|
||||
|
||||
def not_ok0():
|
||||
if 1:
|
||||
pass
|
||||
else:
|
||||
if 2:
|
||||
pass
|
||||
|
||||
|
||||
def not_ok1():
|
||||
if 1:
|
||||
pass
|
||||
else:
|
||||
if 2:
|
||||
pass
|
||||
else:
|
||||
pass
|
||||
75
crates/ruff/resources/test/fixtures/pylint/global_statement.py
vendored
Normal file
75
crates/ruff/resources/test/fixtures/pylint/global_statement.py
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
# Adapted from:
|
||||
# https://github.com/PyCQA/pylint/blob/b70d2abd7fabe9bfd735a30b593b9cd5eaa36194/tests/functional/g/globals.py
|
||||
|
||||
CONSTANT = 1
|
||||
|
||||
|
||||
def FUNC():
|
||||
pass
|
||||
|
||||
|
||||
class CLASS:
|
||||
pass
|
||||
|
||||
|
||||
def fix_constant(value):
|
||||
"""All this is ok, but try not to use `global` ;)"""
|
||||
global CONSTANT # [global-statement]
|
||||
print(CONSTANT)
|
||||
CONSTANT = value
|
||||
|
||||
|
||||
def global_with_import():
|
||||
"""Should only warn for global-statement when using `Import` node"""
|
||||
global sys # [global-statement]
|
||||
import sys
|
||||
|
||||
|
||||
def global_with_import_from():
|
||||
"""Should only warn for global-statement when using `ImportFrom` node"""
|
||||
global namedtuple # [global-statement]
|
||||
from collections import namedtuple
|
||||
|
||||
|
||||
def global_del():
|
||||
"""Deleting the global name prevents `global-variable-not-assigned`"""
|
||||
global CONSTANT # [global-statement]
|
||||
print(CONSTANT)
|
||||
del CONSTANT
|
||||
|
||||
|
||||
def global_operator_assign():
|
||||
"""Operator assigns should only throw a global statement error"""
|
||||
global CONSTANT # [global-statement]
|
||||
print(CONSTANT)
|
||||
CONSTANT += 1
|
||||
|
||||
|
||||
def global_function_assign():
|
||||
"""Function assigns should only throw a global statement error"""
|
||||
global CONSTANT # [global-statement]
|
||||
|
||||
def CONSTANT():
|
||||
pass
|
||||
|
||||
CONSTANT()
|
||||
|
||||
|
||||
def override_func():
|
||||
"""Overriding a function should only throw a global statement error"""
|
||||
global FUNC # [global-statement]
|
||||
|
||||
def FUNC():
|
||||
pass
|
||||
|
||||
FUNC()
|
||||
|
||||
|
||||
def override_class():
|
||||
"""Overriding a class should only throw a global statement error"""
|
||||
global CLASS # [global-statement]
|
||||
|
||||
class CLASS:
|
||||
pass
|
||||
|
||||
CLASS()
|
||||
24
crates/ruff/resources/test/fixtures/pylint/logging_too_few_args.py
vendored
Normal file
24
crates/ruff/resources/test/fixtures/pylint/logging_too_few_args.py
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
import logging
|
||||
|
||||
logging.warning("Hello %s %s", "World!") # [logging-too-few-args]
|
||||
|
||||
# do not handle calls with kwargs (like pylint)
|
||||
logging.warning("Hello %s", "World!", "again", something="else")
|
||||
|
||||
logging.warning("Hello %s", "World!")
|
||||
|
||||
# do not handle calls without any args
|
||||
logging.info("100% dynamic")
|
||||
|
||||
# do not handle calls with *args
|
||||
logging.error("Example log %s, %s", "foo", "bar", "baz", *args)
|
||||
|
||||
# do not handle calls with **kwargs
|
||||
logging.error("Example log %s, %s", "foo", "bar", "baz", **kwargs)
|
||||
|
||||
# do not handle keyword arguments
|
||||
logging.error("%(objects)d modifications: %(modifications)d errors: %(errors)d")
|
||||
|
||||
import warning
|
||||
|
||||
warning.warning("Hello %s %s", "World!")
|
||||
20
crates/ruff/resources/test/fixtures/pylint/logging_too_many_args.py
vendored
Normal file
20
crates/ruff/resources/test/fixtures/pylint/logging_too_many_args.py
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import logging
|
||||
|
||||
logging.warning("Hello %s", "World!", "again") # [logging-too-many-args]
|
||||
|
||||
logging.warning("Hello %s", "World!", "again", something="else")
|
||||
|
||||
logging.warning("Hello %s", "World!")
|
||||
|
||||
# do not handle calls with *args
|
||||
logging.error("Example log %s, %s", "foo", "bar", "baz", *args)
|
||||
|
||||
# do not handle calls with **kwargs
|
||||
logging.error("Example log %s, %s", "foo", "bar", "baz", **kwargs)
|
||||
|
||||
# do not handle keyword arguments
|
||||
logging.error("%(objects)d modifications: %(modifications)d errors: %(errors)d", {"objects": 1, "modifications": 1, "errors": 1})
|
||||
|
||||
import warning
|
||||
|
||||
warning.warning("Hello %s", "World!", "again")
|
||||
155
crates/ruff/resources/test/fixtures/pylint/redefined_loop_name.py
vendored
Normal file
155
crates/ruff/resources/test/fixtures/pylint/redefined_loop_name.py
vendored
Normal file
@@ -0,0 +1,155 @@
|
||||
# For -> for, variable reused
|
||||
for i in []:
|
||||
for i in []: # error
|
||||
pass
|
||||
|
||||
# With -> for, variable reused
|
||||
with None as i:
|
||||
for i in []: # error
|
||||
pass
|
||||
|
||||
# For -> with, variable reused
|
||||
for i in []:
|
||||
with None as i: # error
|
||||
pass
|
||||
|
||||
# With -> with, variable reused
|
||||
with None as i:
|
||||
with None as i: # error
|
||||
pass
|
||||
|
||||
# For -> for, different variable
|
||||
for i in []:
|
||||
for j in []: # ok
|
||||
pass
|
||||
|
||||
# With -> with, different variable
|
||||
with None as i:
|
||||
with None as j: # ok
|
||||
pass
|
||||
|
||||
# For -> for -> for, doubly nested variable reuse
|
||||
for i in []:
|
||||
for j in []:
|
||||
for i in []: # error
|
||||
pass
|
||||
|
||||
# For -> for -> for -> for, doubly nested variable reuse x2
|
||||
for i in []:
|
||||
for j in []:
|
||||
for i in []: # error
|
||||
for j in []: # error
|
||||
pass
|
||||
|
||||
# For -> assignment
|
||||
for i in []:
|
||||
i = 5 # error
|
||||
|
||||
# For -> augmented assignment
|
||||
for i in []:
|
||||
i += 5 # error
|
||||
|
||||
# For -> annotated assignment
|
||||
for i in []:
|
||||
i: int = 5 # error
|
||||
|
||||
# Async for -> for, variable reused
|
||||
async for i in []:
|
||||
for i in []: # error
|
||||
pass
|
||||
|
||||
# For -> async for, variable reused
|
||||
for i in []:
|
||||
async for i in []: # error
|
||||
pass
|
||||
|
||||
# For -> for, outer loop unpacks tuple
|
||||
for i, j in enumerate([]):
|
||||
for i in []: # error
|
||||
pass
|
||||
|
||||
# For -> for, inner loop unpacks tuple
|
||||
for i in []:
|
||||
for i, j in enumerate([]): # error
|
||||
pass
|
||||
|
||||
# For -> for, both loops unpack tuple
|
||||
for (i, (j, k)) in []:
|
||||
for i, j in enumerate([]): # two errors
|
||||
pass
|
||||
|
||||
# For else -> for, variable reused in else
|
||||
for i in []:
|
||||
pass
|
||||
else:
|
||||
for i in []: # no error
|
||||
pass
|
||||
|
||||
# For -> for, ignore dummy variables
|
||||
for _ in []:
|
||||
for _ in []: # no error
|
||||
pass
|
||||
|
||||
# For -> for, outer loop unpacks with asterisk
|
||||
for i, *j in []:
|
||||
for j in []: # error
|
||||
pass
|
||||
|
||||
# For -> function definition
|
||||
for i in []:
|
||||
def f():
|
||||
i = 2 # no error
|
||||
|
||||
# For -> class definition
|
||||
for i in []:
|
||||
class A:
|
||||
i = 2 # no error
|
||||
|
||||
# For -> function definition -> for -> assignment
|
||||
for i in []:
|
||||
def f():
|
||||
for i in []: # no error
|
||||
i = 2 # error
|
||||
|
||||
# For -> class definition -> for -> for
|
||||
for i in []:
|
||||
class A:
|
||||
for i in []: # no error
|
||||
for i in []: # error
|
||||
pass
|
||||
|
||||
# For -> use in assignment target without actual assignment; subscript
|
||||
for i in []:
|
||||
a[i] = 2 # no error
|
||||
i[a] = 2 # no error
|
||||
|
||||
# For -> use in assignment target without actual assignment; attribute
|
||||
for i in []:
|
||||
a.i = 2 # no error
|
||||
i.a = 2 # no error
|
||||
|
||||
# For target with subscript -> assignment
|
||||
for a[0] in []:
|
||||
a[0] = 2 # error
|
||||
a[1] = 2 # no error
|
||||
|
||||
# For target with subscript -> assignment
|
||||
for a['i'] in []:
|
||||
a['i'] = 2 # error
|
||||
a['j'] = 2 # no error
|
||||
|
||||
# For target with attribute -> assignment
|
||||
for a.i in []:
|
||||
a.i = 2 # error
|
||||
a.j = 2 # no error
|
||||
|
||||
# For target with double nested attribute -> assignment
|
||||
for a.i.j in []:
|
||||
a.i.j = 2 # error
|
||||
a.j.i = 2 # no error
|
||||
|
||||
# For target with attribute -> assignment with different spacing
|
||||
for a.i in []:
|
||||
a. i = 2 # error
|
||||
for a. i in []:
|
||||
a.i = 2 # error
|
||||
@@ -124,3 +124,14 @@ def test_break_in_with():
|
||||
else:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def test_break_in_match():
|
||||
"""no false positive for break in match"""
|
||||
for name in ["demo"]:
|
||||
match name:
|
||||
case "demo":
|
||||
break
|
||||
else:
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -28,3 +28,10 @@ def f(x: IList[str]) -> None:
|
||||
|
||||
def f(x: "List[str]") -> None:
|
||||
...
|
||||
|
||||
|
||||
list = "abc"
|
||||
|
||||
|
||||
def f(x: List[str]) -> None:
|
||||
...
|
||||
|
||||
@@ -41,3 +41,8 @@ def f(x: Union["str", int]) -> None:
|
||||
|
||||
def f(x: Union[("str", "int"), float]) -> None:
|
||||
...
|
||||
|
||||
|
||||
def f() -> None:
|
||||
x: Optional[str]
|
||||
x = Optional[str]
|
||||
|
||||
@@ -2,36 +2,42 @@ from typing import TypedDict, NotRequired, Literal
|
||||
import typing
|
||||
|
||||
# dict literal
|
||||
MyType1 = TypedDict("MyType1", {"a": int, "b": str})
|
||||
MyType = TypedDict("MyType", {"a": int, "b": str})
|
||||
|
||||
# dict call
|
||||
MyType2 = TypedDict("MyType2", dict(a=int, b=str))
|
||||
MyType = TypedDict("MyType", dict(a=int, b=str))
|
||||
|
||||
# kwargs
|
||||
MyType3 = TypedDict("MyType3", a=int, b=str)
|
||||
MyType = TypedDict("MyType", a=int, b=str)
|
||||
|
||||
# Empty TypedDict
|
||||
MyType4 = TypedDict("MyType4")
|
||||
MyType = TypedDict("MyType")
|
||||
|
||||
# Literal values
|
||||
MyType5 = TypedDict("MyType5", {"a": "hello"})
|
||||
MyType6 = TypedDict("MyType6", a="hello")
|
||||
MyType = TypedDict("MyType", {"a": "hello"})
|
||||
MyType = TypedDict("MyType", a="hello")
|
||||
|
||||
# NotRequired
|
||||
MyType7 = TypedDict("MyType7", {"a": NotRequired[dict]})
|
||||
MyType = TypedDict("MyType", {"a": NotRequired[dict]})
|
||||
|
||||
# total
|
||||
MyType8 = TypedDict("MyType8", {"x": int, "y": int}, total=False)
|
||||
|
||||
# invalid identifiers
|
||||
MyType9 = TypedDict("MyType9", {"in": int, "x-y": int})
|
||||
MyType = TypedDict("MyType", {"x": int, "y": int}, total=False)
|
||||
|
||||
# using Literal type
|
||||
MyType10 = TypedDict("MyType10", {"key": Literal["value"]})
|
||||
MyType = TypedDict("MyType", {"key": Literal["value"]})
|
||||
|
||||
# using namespace TypedDict
|
||||
MyType11 = typing.TypedDict("MyType11", {"key": int})
|
||||
MyType = typing.TypedDict("MyType", {"key": int})
|
||||
|
||||
# unpacking
|
||||
# invalid identifiers (OK)
|
||||
MyType = TypedDict("MyType", {"in": int, "x-y": int})
|
||||
|
||||
# unpacking (OK)
|
||||
c = {"c": float}
|
||||
MyType12 = TypedDict("MyType1", {"a": int, "b": str, **c})
|
||||
MyType = TypedDict("MyType", {"a": int, "b": str, **c})
|
||||
|
||||
# Empty dict literal
|
||||
MyType = TypedDict("MyType", {})
|
||||
|
||||
# Empty dict call
|
||||
MyType = TypedDict("MyType", dict())
|
||||
|
||||
@@ -2,21 +2,30 @@ from typing import NamedTuple
|
||||
import typing
|
||||
|
||||
# with complex annotations
|
||||
NT1 = NamedTuple("NT1", [("a", int), ("b", tuple[str, ...])])
|
||||
MyType = NamedTuple("MyType", [("a", int), ("b", tuple[str, ...])])
|
||||
|
||||
# with default values as list
|
||||
NT2 = NamedTuple(
|
||||
"NT2",
|
||||
MyType = NamedTuple(
|
||||
"MyType",
|
||||
[("a", int), ("b", str), ("c", list[bool])],
|
||||
defaults=["foo", [True]],
|
||||
)
|
||||
|
||||
# with namespace
|
||||
NT3 = typing.NamedTuple("NT3", [("a", int), ("b", str)])
|
||||
MyType = typing.NamedTuple("MyType", [("a", int), ("b", str)])
|
||||
|
||||
# with too many default values
|
||||
NT4 = NamedTuple(
|
||||
"NT4",
|
||||
# too many default values (OK)
|
||||
MyType = NamedTuple(
|
||||
"MyType",
|
||||
[("a", int), ("b", str)],
|
||||
defaults=[1, "bar", "baz"],
|
||||
)
|
||||
|
||||
# invalid identifiers (OK)
|
||||
MyType = NamedTuple("MyType", [("x-y", int), ("b", tuple[str, ...])])
|
||||
|
||||
# no fields
|
||||
MyType = typing.NamedTuple("MyType")
|
||||
|
||||
# empty fields
|
||||
MyType = typing.NamedTuple("MyType", [])
|
||||
|
||||
7
crates/ruff/resources/test/fixtures/pyupgrade/UP038.py
vendored
Normal file
7
crates/ruff/resources/test/fixtures/pyupgrade/UP038.py
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
isinstance(1, (int, float)) # UP038
|
||||
issubclass("yes", (int, float, str)) # UP038
|
||||
|
||||
isinstance(1, int) # OK
|
||||
issubclass("yes", int) # OK
|
||||
isinstance(1, int | float) # OK
|
||||
issubclass("yes", int | str) # OK
|
||||
@@ -1,15 +0,0 @@
|
||||
def f(*args, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
a = (1, 2)
|
||||
b = (3, 4)
|
||||
c = (5, 6)
|
||||
d = (7, 8)
|
||||
|
||||
f(a, b)
|
||||
f(a, kw=b)
|
||||
f(*a, kw=b)
|
||||
f(kw=a, *b)
|
||||
f(kw=a, *b, *c)
|
||||
f(*a, kw=b, *c, kw1=d)
|
||||
@@ -30,3 +30,11 @@ def good():
|
||||
raise e # This is verbose violation, shouldn't trigger no cause
|
||||
except Exception:
|
||||
raise # Just re-raising don't need 'from'
|
||||
|
||||
|
||||
def good():
|
||||
try:
|
||||
from mod import f
|
||||
except ImportError:
|
||||
def f():
|
||||
raise MyException() # Raising within a new scope is fine
|
||||
|
||||
@@ -20,6 +20,24 @@ def bad():
|
||||
logger.exception("something failed")
|
||||
|
||||
|
||||
def bad():
|
||||
try:
|
||||
a = process()
|
||||
if not a:
|
||||
raise MyException(a)
|
||||
|
||||
raise MyException(a)
|
||||
|
||||
try:
|
||||
b = process()
|
||||
if not b:
|
||||
raise MyException(b)
|
||||
except* Exception:
|
||||
logger.exception("something failed")
|
||||
except* Exception:
|
||||
logger.exception("something failed")
|
||||
|
||||
|
||||
def good():
|
||||
try:
|
||||
a = process() # This throws the exception now
|
||||
|
||||
64
crates/ruff/resources/test/fixtures/tryceratops/TRY401.py
vendored
Normal file
64
crates/ruff/resources/test/fixtures/tryceratops/TRY401.py
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
# Errors
|
||||
def main_function():
|
||||
try:
|
||||
process()
|
||||
handle()
|
||||
finish()
|
||||
except Exception as ex:
|
||||
logger.exception(f"Found an error: {ex}") # TRY401
|
||||
|
||||
|
||||
def main_function():
|
||||
try:
|
||||
process()
|
||||
handle()
|
||||
finish()
|
||||
except ValueError as bad:
|
||||
if True is False:
|
||||
for i in range(10):
|
||||
logger.exception(f"Found an error: {bad} {good}") # TRY401
|
||||
except IndexError as bad:
|
||||
logger.exception(f"Found an error: {bad} {bad}") # TRY401
|
||||
except Exception as bad:
|
||||
logger.exception(f"Found an error: {bad}") # TRY401
|
||||
logger.exception(f"Found an error: {bad}") # TRY401
|
||||
|
||||
if True:
|
||||
logger.exception(f"Found an error: {bad}") # TRY401
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def func_fstr():
|
||||
try:
|
||||
...
|
||||
except Exception as ex:
|
||||
logger.exception(f"Logging an error: {ex}") # TRY401
|
||||
|
||||
|
||||
def func_concat():
|
||||
try:
|
||||
...
|
||||
except Exception as ex:
|
||||
logger.exception("Logging an error: " + str(ex)) # TRY401
|
||||
|
||||
|
||||
def func_comma():
|
||||
try:
|
||||
...
|
||||
except Exception as ex:
|
||||
logger.exception("Logging an error:", ex) # TRY401
|
||||
|
||||
|
||||
# OK
|
||||
def main_function():
|
||||
try:
|
||||
process()
|
||||
handle()
|
||||
finish()
|
||||
except Exception as ex:
|
||||
logger.exception(f"Found an error: {er}")
|
||||
logger.exception(f"Found an error: {ex.field}")
|
||||
@@ -8,14 +8,14 @@ behaviors.
|
||||
Running from the repo root should pick up and enforce the appropriate settings for each package:
|
||||
|
||||
```console
|
||||
∴ cargo run resources/test/project/
|
||||
resources/test/project/examples/.dotfiles/script.py:1:1: I001 Import block is un-sorted or un-formatted
|
||||
resources/test/project/examples/.dotfiles/script.py:1:8: F401 `numpy` imported but unused
|
||||
resources/test/project/examples/.dotfiles/script.py:2:17: F401 `app.app_file` imported but unused
|
||||
resources/test/project/examples/docs/docs/file.py:1:1: I001 Import block is un-sorted or un-formatted
|
||||
resources/test/project/examples/docs/docs/file.py:8:5: F841 Local variable `x` is assigned to but never used
|
||||
resources/test/project/project/file.py:1:8: F401 `os` imported but unused
|
||||
resources/test/project/project/import_file.py:1:1: I001 Import block is un-sorted or un-formatted
|
||||
∴ cargo run -p ruff_cli -- check crates/ruff/resources/test/project/
|
||||
crates/ruff/resources/test/project/examples/.dotfiles/script.py:1:1: I001 Import block is un-sorted or un-formatted
|
||||
crates/ruff/resources/test/project/examples/.dotfiles/script.py:1:8: F401 `numpy` imported but unused
|
||||
crates/ruff/resources/test/project/examples/.dotfiles/script.py:2:17: F401 `app.app_file` imported but unused
|
||||
crates/ruff/resources/test/project/examples/docs/docs/file.py:1:1: I001 Import block is un-sorted or un-formatted
|
||||
crates/ruff/resources/test/project/examples/docs/docs/file.py:8:5: F841 Local variable `x` is assigned to but never used
|
||||
crates/ruff/resources/test/project/project/file.py:1:8: F401 `os` imported but unused
|
||||
crates/ruff/resources/test/project/project/import_file.py:1:1: I001 Import block is un-sorted or un-formatted
|
||||
Found 7 errors.
|
||||
7 potentially fixable with the --fix option.
|
||||
```
|
||||
@@ -23,7 +23,7 @@ Found 7 errors.
|
||||
Running from the project directory itself should exhibit the same behavior:
|
||||
|
||||
```console
|
||||
∴ (cd resources/test/project/ && cargo run .)
|
||||
∴ (cd crates/ruff/resources/test/project/ && cargo run -p ruff_cli -- check .)
|
||||
examples/.dotfiles/script.py:1:1: I001 Import block is un-sorted or un-formatted
|
||||
examples/.dotfiles/script.py:1:8: F401 `numpy` imported but unused
|
||||
examples/.dotfiles/script.py:2:17: F401 `app.app_file` imported but unused
|
||||
@@ -39,7 +39,7 @@ Running from the sub-package directory should exhibit the same behavior, but omi
|
||||
files:
|
||||
|
||||
```console
|
||||
∴ (cd resources/test/project/examples/docs && cargo run .)
|
||||
∴ (cd crates/ruff/resources/test/project/examples/docs && cargo run -p ruff_cli -- check .)
|
||||
docs/file.py:1:1: I001 Import block is un-sorted or un-formatted
|
||||
docs/file.py:8:5: F841 Local variable `x` is assigned to but never used
|
||||
Found 2 errors.
|
||||
@@ -50,16 +50,16 @@ Found 2 errors.
|
||||
file paths from the current working directory:
|
||||
|
||||
```console
|
||||
∴ (cargo run -- --config=resources/test/project/pyproject.toml resources/test/project/)
|
||||
resources/test/project/examples/.dotfiles/script.py:1:8: F401 `numpy` imported but unused
|
||||
resources/test/project/examples/.dotfiles/script.py:2:17: F401 `app.app_file` imported but unused
|
||||
resources/test/project/examples/docs/docs/concepts/file.py:1:8: F401 `os` imported but unused
|
||||
resources/test/project/examples/docs/docs/file.py:1:1: I001 Import block is un-sorted or un-formatted
|
||||
resources/test/project/examples/docs/docs/file.py:1:8: F401 `os` imported but unused
|
||||
resources/test/project/examples/docs/docs/file.py:3:8: F401 `numpy` imported but unused
|
||||
resources/test/project/examples/docs/docs/file.py:4:27: F401 `docs.concepts.file` imported but unused
|
||||
resources/test/project/examples/excluded/script.py:1:8: F401 `os` imported but unused
|
||||
resources/test/project/project/file.py:1:8: F401 `os` imported but unused
|
||||
∴ (cargo run -p ruff_cli -- check --config=crates/ruff/resources/test/project/pyproject.toml crates/ruff/resources/test/project/)
|
||||
crates/ruff/resources/test/project/examples/.dotfiles/script.py:1:8: F401 `numpy` imported but unused
|
||||
crates/ruff/resources/test/project/examples/.dotfiles/script.py:2:17: F401 `app.app_file` imported but unused
|
||||
crates/ruff/resources/test/project/examples/docs/docs/concepts/file.py:1:8: F401 `os` imported but unused
|
||||
crates/ruff/resources/test/project/examples/docs/docs/file.py:1:1: I001 Import block is un-sorted or un-formatted
|
||||
crates/ruff/resources/test/project/examples/docs/docs/file.py:1:8: F401 `os` imported but unused
|
||||
crates/ruff/resources/test/project/examples/docs/docs/file.py:3:8: F401 `numpy` imported but unused
|
||||
crates/ruff/resources/test/project/examples/docs/docs/file.py:4:27: F401 `docs.concepts.file` imported but unused
|
||||
crates/ruff/resources/test/project/examples/excluded/script.py:1:8: F401 `os` imported but unused
|
||||
crates/ruff/resources/test/project/project/file.py:1:8: F401 `os` imported but unused
|
||||
Found 9 errors.
|
||||
9 potentially fixable with the --fix option.
|
||||
```
|
||||
@@ -68,7 +68,7 @@ Running from a parent directory should "ignore" the `exclude` (hence, `concepts/
|
||||
included in the output):
|
||||
|
||||
```console
|
||||
∴ (cd resources/test/project/examples && cargo run -- --config=docs/ruff.toml .)
|
||||
∴ (cd crates/ruff/resources/test/project/examples && cargo run -p ruff_cli -- check --config=docs/ruff.toml .)
|
||||
docs/docs/concepts/file.py:5:5: F841 Local variable `x` is assigned to but never used
|
||||
docs/docs/file.py:1:1: I001 Import block is un-sorted or un-formatted
|
||||
docs/docs/file.py:8:5: F841 Local variable `x` is assigned to but never used
|
||||
@@ -80,8 +80,8 @@ Found 4 errors.
|
||||
Passing an excluded directory directly should report errors in the contained files:
|
||||
|
||||
```console
|
||||
∴ cargo run resources/test/project/examples/excluded/
|
||||
resources/test/project/examples/excluded/script.py:1:8: F401 `os` imported but unused
|
||||
∴ cargo run -p ruff_cli -- check crates/ruff/resources/test/project/examples/excluded/
|
||||
crates/ruff/resources/test/project/examples/excluded/script.py:1:8: F401 `os` imported but unused
|
||||
Found 1 error.
|
||||
1 potentially fixable with the --fix option.
|
||||
```
|
||||
@@ -89,8 +89,8 @@ Found 1 error.
|
||||
Unless we `--force-exclude`:
|
||||
|
||||
```console
|
||||
∴ cargo run resources/test/project/examples/excluded/ --force-exclude
|
||||
∴ cargo run -p ruff_cli -- check crates/ruff/resources/test/project/examples/excluded/ --force-exclude
|
||||
warning: No Python files found under the given path(s)
|
||||
∴ cargo run resources/test/project/examples/excluded/script.py --force-exclude
|
||||
∴ cargo run -p ruff_cli -- check crates/ruff/resources/test/project/examples/excluded/script.py --force-exclude
|
||||
warning: No Python files found under the given path(s)
|
||||
```
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
/// Platform-independent snapshot assertion
|
||||
#[macro_export]
|
||||
macro_rules! assert_yaml_snapshot {
|
||||
( $($args: expr),+) => {
|
||||
let line_sep = if cfg!(windows) { "\r\n" } else { "\n" };
|
||||
|
||||
// adjust snapshot file for platform
|
||||
let mut settings = insta::Settings::clone_current();
|
||||
settings.add_redaction("[].fix.content", insta::dynamic_redaction(move |value, _path| {
|
||||
insta::internals::Content::Seq(
|
||||
value.as_str().unwrap().split(line_sep).map(|line| line.into()).collect()
|
||||
)
|
||||
}));
|
||||
settings.bind(|| {
|
||||
insta::assert_yaml_snapshot!($($args),+);
|
||||
});
|
||||
};
|
||||
}
|
||||
@@ -59,6 +59,12 @@ fn alternatives<'a>(stmt: &'a RefEquality<'a, Stmt>) -> Vec<Vec<RefEquality<'a,
|
||||
handlers,
|
||||
orelse,
|
||||
..
|
||||
}
|
||||
| StmtKind::TryStar {
|
||||
body,
|
||||
handlers,
|
||||
orelse,
|
||||
..
|
||||
} => vec![body.iter().chain(orelse.iter()).map(RefEquality).collect()]
|
||||
.into_iter()
|
||||
.chain(handlers.iter().map(|handler| {
|
||||
|
||||
@@ -4,8 +4,8 @@
|
||||
use num_bigint::BigInt;
|
||||
use rustpython_parser::ast::{
|
||||
Alias, Arg, Arguments, Boolop, Cmpop, Comprehension, Constant, Excepthandler,
|
||||
ExcepthandlerKind, Expr, ExprContext, ExprKind, Keyword, Operator, Stmt, StmtKind, Unaryop,
|
||||
Withitem,
|
||||
ExcepthandlerKind, Expr, ExprContext, ExprKind, Keyword, MatchCase, Operator, Pattern,
|
||||
PatternKind, Stmt, StmtKind, Unaryop, Withitem,
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
@@ -157,6 +157,110 @@ impl<'a> From<&'a Withitem> for ComparableWithitem<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::enum_variant_names)]
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
pub enum ComparablePattern<'a> {
|
||||
MatchValue {
|
||||
value: ComparableExpr<'a>,
|
||||
},
|
||||
MatchSingleton {
|
||||
value: ComparableConstant<'a>,
|
||||
},
|
||||
MatchSequence {
|
||||
patterns: Vec<ComparablePattern<'a>>,
|
||||
},
|
||||
MatchMapping {
|
||||
keys: Vec<ComparableExpr<'a>>,
|
||||
patterns: Vec<ComparablePattern<'a>>,
|
||||
rest: Option<&'a str>,
|
||||
},
|
||||
MatchClass {
|
||||
cls: ComparableExpr<'a>,
|
||||
patterns: Vec<ComparablePattern<'a>>,
|
||||
kwd_attrs: Vec<&'a str>,
|
||||
kwd_patterns: Vec<ComparablePattern<'a>>,
|
||||
},
|
||||
MatchStar {
|
||||
name: Option<&'a str>,
|
||||
},
|
||||
MatchAs {
|
||||
pattern: Option<Box<ComparablePattern<'a>>>,
|
||||
name: Option<&'a str>,
|
||||
},
|
||||
MatchOr {
|
||||
patterns: Vec<ComparablePattern<'a>>,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> From<&'a Pattern> for ComparablePattern<'a> {
|
||||
fn from(pattern: &'a Pattern) -> Self {
|
||||
match &pattern.node {
|
||||
PatternKind::MatchValue { value } => Self::MatchValue {
|
||||
value: value.into(),
|
||||
},
|
||||
PatternKind::MatchSingleton { value } => Self::MatchSingleton {
|
||||
value: value.into(),
|
||||
},
|
||||
PatternKind::MatchSequence { patterns } => Self::MatchSequence {
|
||||
patterns: patterns.iter().map(Into::into).collect(),
|
||||
},
|
||||
PatternKind::MatchMapping {
|
||||
keys,
|
||||
patterns,
|
||||
rest,
|
||||
} => Self::MatchMapping {
|
||||
keys: keys.iter().map(Into::into).collect(),
|
||||
patterns: patterns.iter().map(Into::into).collect(),
|
||||
rest: rest.as_deref(),
|
||||
},
|
||||
PatternKind::MatchClass {
|
||||
cls,
|
||||
patterns,
|
||||
kwd_attrs,
|
||||
kwd_patterns,
|
||||
} => Self::MatchClass {
|
||||
cls: cls.into(),
|
||||
patterns: patterns.iter().map(Into::into).collect(),
|
||||
kwd_attrs: kwd_attrs.iter().map(String::as_str).collect(),
|
||||
kwd_patterns: kwd_patterns.iter().map(Into::into).collect(),
|
||||
},
|
||||
PatternKind::MatchStar { name } => Self::MatchStar {
|
||||
name: name.as_deref(),
|
||||
},
|
||||
PatternKind::MatchAs { pattern, name } => Self::MatchAs {
|
||||
pattern: pattern.as_ref().map(Into::into),
|
||||
name: name.as_deref(),
|
||||
},
|
||||
PatternKind::MatchOr { patterns } => Self::MatchOr {
|
||||
patterns: patterns.iter().map(Into::into).collect(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a Box<Pattern>> for Box<ComparablePattern<'a>> {
|
||||
fn from(pattern: &'a Box<Pattern>) -> Self {
|
||||
Box::new((&**pattern).into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
pub struct ComparableMatchCase<'a> {
|
||||
pub pattern: ComparablePattern<'a>,
|
||||
pub guard: Option<ComparableExpr<'a>>,
|
||||
pub body: Vec<ComparableStmt<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> From<&'a MatchCase> for ComparableMatchCase<'a> {
|
||||
fn from(match_case: &'a MatchCase) -> Self {
|
||||
Self {
|
||||
pattern: (&match_case.pattern).into(),
|
||||
guard: match_case.guard.as_ref().map(Into::into),
|
||||
body: match_case.body.iter().map(Into::into).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
pub enum ComparableConstant<'a> {
|
||||
None,
|
||||
@@ -644,6 +748,10 @@ pub enum ComparableStmt<'a> {
|
||||
body: Vec<ComparableStmt<'a>>,
|
||||
type_comment: Option<&'a str>,
|
||||
},
|
||||
Match {
|
||||
subject: ComparableExpr<'a>,
|
||||
cases: Vec<ComparableMatchCase<'a>>,
|
||||
},
|
||||
Raise {
|
||||
exc: Option<ComparableExpr<'a>>,
|
||||
cause: Option<ComparableExpr<'a>>,
|
||||
@@ -654,6 +762,12 @@ pub enum ComparableStmt<'a> {
|
||||
orelse: Vec<ComparableStmt<'a>>,
|
||||
finalbody: Vec<ComparableStmt<'a>>,
|
||||
},
|
||||
TryStar {
|
||||
body: Vec<ComparableStmt<'a>>,
|
||||
handlers: Vec<ComparableExcepthandler<'a>>,
|
||||
orelse: Vec<ComparableStmt<'a>>,
|
||||
finalbody: Vec<ComparableStmt<'a>>,
|
||||
},
|
||||
Assert {
|
||||
test: ComparableExpr<'a>,
|
||||
msg: Option<ComparableExpr<'a>>,
|
||||
@@ -811,7 +925,10 @@ impl<'a> From<&'a Stmt> for ComparableStmt<'a> {
|
||||
body: body.iter().map(Into::into).collect(),
|
||||
type_comment: type_comment.as_ref().map(String::as_str),
|
||||
},
|
||||
StmtKind::Match { .. } => unreachable!("StmtKind::Match is not supported"),
|
||||
StmtKind::Match { subject, cases } => Self::Match {
|
||||
subject: subject.into(),
|
||||
cases: cases.iter().map(Into::into).collect(),
|
||||
},
|
||||
StmtKind::Raise { exc, cause } => Self::Raise {
|
||||
exc: exc.as_ref().map(Into::into),
|
||||
cause: cause.as_ref().map(Into::into),
|
||||
@@ -827,6 +944,17 @@ impl<'a> From<&'a Stmt> for ComparableStmt<'a> {
|
||||
orelse: orelse.iter().map(Into::into).collect(),
|
||||
finalbody: finalbody.iter().map(Into::into).collect(),
|
||||
},
|
||||
StmtKind::TryStar {
|
||||
body,
|
||||
handlers,
|
||||
orelse,
|
||||
finalbody,
|
||||
} => Self::TryStar {
|
||||
body: body.iter().map(Into::into).collect(),
|
||||
handlers: handlers.iter().map(Into::into).collect(),
|
||||
orelse: orelse.iter().map(Into::into).collect(),
|
||||
finalbody: finalbody.iter().map(Into::into).collect(),
|
||||
},
|
||||
StmtKind::Assert { test, msg } => Self::Assert {
|
||||
test: test.into(),
|
||||
msg: msg.as_ref().map(Into::into),
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use std::path::Path;
|
||||
|
||||
use bitflags::bitflags;
|
||||
use itertools::Itertools;
|
||||
use log::error;
|
||||
use once_cell::sync::Lazy;
|
||||
@@ -7,11 +8,9 @@ use regex::Regex;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use rustpython_parser::ast::{
|
||||
Arguments, Constant, Excepthandler, ExcepthandlerKind, Expr, ExprKind, Keyword, KeywordData,
|
||||
Located, Location, Stmt, StmtKind,
|
||||
Located, Location, MatchCase, Pattern, PatternKind, Stmt, StmtKind,
|
||||
};
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::Tok;
|
||||
use rustpython_parser::token::StringKind;
|
||||
use rustpython_parser::{lexer, Mode, StringKind, Tok};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
||||
use crate::ast::types::{Binding, BindingKind, CallPath, Range};
|
||||
@@ -249,6 +248,46 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
pub fn any_over_pattern<F>(pattern: &Pattern, func: &F) -> bool
|
||||
where
|
||||
F: Fn(&Expr) -> bool,
|
||||
{
|
||||
match &pattern.node {
|
||||
PatternKind::MatchValue { value } => any_over_expr(value, func),
|
||||
PatternKind::MatchSingleton { .. } => false,
|
||||
PatternKind::MatchSequence { patterns } => patterns
|
||||
.iter()
|
||||
.any(|pattern| any_over_pattern(pattern, func)),
|
||||
PatternKind::MatchMapping { keys, patterns, .. } => {
|
||||
keys.iter().any(|key| any_over_expr(key, func))
|
||||
|| patterns
|
||||
.iter()
|
||||
.any(|pattern| any_over_pattern(pattern, func))
|
||||
}
|
||||
PatternKind::MatchClass {
|
||||
cls,
|
||||
patterns,
|
||||
kwd_patterns,
|
||||
..
|
||||
} => {
|
||||
any_over_expr(cls, func)
|
||||
|| patterns
|
||||
.iter()
|
||||
.any(|pattern| any_over_pattern(pattern, func))
|
||||
|| kwd_patterns
|
||||
.iter()
|
||||
.any(|pattern| any_over_pattern(pattern, func))
|
||||
}
|
||||
PatternKind::MatchStar { .. } => false,
|
||||
PatternKind::MatchAs { pattern, .. } => pattern
|
||||
.as_ref()
|
||||
.map_or(false, |pattern| any_over_pattern(pattern, func)),
|
||||
PatternKind::MatchOr { patterns } => patterns
|
||||
.iter()
|
||||
.any(|pattern| any_over_pattern(pattern, func)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn any_over_stmt<F>(stmt: &Stmt, func: &F) -> bool
|
||||
where
|
||||
F: Fn(&Expr) -> bool,
|
||||
@@ -391,6 +430,12 @@ where
|
||||
handlers,
|
||||
orelse,
|
||||
finalbody,
|
||||
}
|
||||
| StmtKind::TryStar {
|
||||
body,
|
||||
handlers,
|
||||
orelse,
|
||||
finalbody,
|
||||
} => {
|
||||
any_over_body(body, func)
|
||||
|| handlers.iter().any(|handler| {
|
||||
@@ -409,8 +454,21 @@ where
|
||||
.as_ref()
|
||||
.map_or(false, |value| any_over_expr(value, func))
|
||||
}
|
||||
// TODO(charlie): Handle match statements.
|
||||
StmtKind::Match { .. } => false,
|
||||
StmtKind::Match { subject, cases } => {
|
||||
any_over_expr(subject, func)
|
||||
|| cases.iter().any(|case| {
|
||||
let MatchCase {
|
||||
pattern,
|
||||
guard,
|
||||
body,
|
||||
} = case;
|
||||
any_over_pattern(pattern, func)
|
||||
|| guard
|
||||
.as_ref()
|
||||
.map_or(false, |expr| any_over_expr(expr, func))
|
||||
|| any_over_body(body, func)
|
||||
})
|
||||
}
|
||||
StmtKind::Import { .. } => false,
|
||||
StmtKind::ImportFrom { .. } => false,
|
||||
StmtKind::Global { .. } => false,
|
||||
@@ -518,33 +576,32 @@ pub fn has_non_none_keyword(keywords: &[Keyword], keyword: &str) -> bool {
|
||||
})
|
||||
}
|
||||
|
||||
bitflags! {
|
||||
pub struct Exceptions: u32 {
|
||||
const NAME_ERROR = 0b0000_0001;
|
||||
const MODULE_NOT_FOUND_ERROR = 0b0000_0010;
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract the names of all handled exceptions.
|
||||
pub fn extract_handler_names(handlers: &[Excepthandler]) -> Vec<CallPath> {
|
||||
// TODO(charlie): Use `resolve_call_path` to avoid false positives for
|
||||
// overridden builtins.
|
||||
let mut handler_names = vec![];
|
||||
pub fn extract_handled_exceptions(handlers: &[Excepthandler]) -> Vec<&Expr> {
|
||||
let mut handled_exceptions = Vec::new();
|
||||
for handler in handlers {
|
||||
match &handler.node {
|
||||
ExcepthandlerKind::ExceptHandler { type_, .. } => {
|
||||
if let Some(type_) = type_ {
|
||||
if let ExprKind::Tuple { elts, .. } = &type_.node {
|
||||
for type_ in elts {
|
||||
let call_path = collect_call_path(type_);
|
||||
if !call_path.is_empty() {
|
||||
handler_names.push(call_path);
|
||||
}
|
||||
handled_exceptions.push(type_);
|
||||
}
|
||||
} else {
|
||||
let call_path = collect_call_path(type_);
|
||||
if !call_path.is_empty() {
|
||||
handler_names.push(call_path);
|
||||
}
|
||||
handled_exceptions.push(type_);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
handler_names
|
||||
handled_exceptions
|
||||
}
|
||||
|
||||
/// Return the set of all bound argument names.
|
||||
@@ -596,7 +653,7 @@ pub fn has_comments<T>(located: &Located<T>, locator: &Locator) -> bool {
|
||||
|
||||
/// Returns `true` if a [`Range`] includes at least one comment.
|
||||
pub fn has_comments_in(range: Range, locator: &Locator) -> bool {
|
||||
for tok in lexer::make_tokenizer(locator.slice(&range)) {
|
||||
for tok in lexer::lex_located(locator.slice(&range), Mode::Module, range.location) {
|
||||
match tok {
|
||||
Ok((_, tok, _)) => {
|
||||
if matches!(tok, Tok::Comment(..)) {
|
||||
@@ -708,7 +765,7 @@ pub fn from_relative_import<'a>(module: &'a [String], name: &'a str) -> CallPath
|
||||
call_path
|
||||
}
|
||||
|
||||
/// A [`Visitor`] that collects all return statements in a function or method.
|
||||
/// A [`Visitor`] that collects all `return` statements in a function or method.
|
||||
#[derive(Default)]
|
||||
pub struct ReturnStatementVisitor<'a> {
|
||||
pub returns: Vec<Option<&'a Expr>>,
|
||||
@@ -729,6 +786,48 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
/// A [`Visitor`] that collects all `raise` statements in a function or method.
|
||||
#[derive(Default)]
|
||||
pub struct RaiseStatementVisitor<'a> {
|
||||
pub raises: Vec<(Range, Option<&'a Expr>, Option<&'a Expr>)>,
|
||||
}
|
||||
|
||||
impl<'a, 'b> Visitor<'b> for RaiseStatementVisitor<'b>
|
||||
where
|
||||
'b: 'a,
|
||||
{
|
||||
fn visit_stmt(&mut self, stmt: &'b Stmt) {
|
||||
match &stmt.node {
|
||||
StmtKind::Raise { exc, cause } => {
|
||||
self.raises
|
||||
.push((Range::from_located(stmt), exc.as_deref(), cause.as_deref()));
|
||||
}
|
||||
StmtKind::ClassDef { .. }
|
||||
| StmtKind::FunctionDef { .. }
|
||||
| StmtKind::AsyncFunctionDef { .. }
|
||||
| StmtKind::Try { .. }
|
||||
| StmtKind::TryStar { .. } => {}
|
||||
StmtKind::If { body, orelse, .. } => {
|
||||
visitor::walk_body(self, body);
|
||||
visitor::walk_body(self, orelse);
|
||||
}
|
||||
StmtKind::While { body, .. }
|
||||
| StmtKind::With { body, .. }
|
||||
| StmtKind::AsyncWith { body, .. }
|
||||
| StmtKind::For { body, .. }
|
||||
| StmtKind::AsyncFor { body, .. } => {
|
||||
visitor::walk_body(self, body);
|
||||
}
|
||||
StmtKind::Match { cases, .. } => {
|
||||
for case in cases {
|
||||
visitor::walk_body(self, &case.body);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a location within a file (relative to `base`) to an absolute
|
||||
/// position.
|
||||
pub fn to_absolute(relative: Location, base: Location) -> Location {
|
||||
@@ -811,7 +910,7 @@ pub fn match_parens(start: Location, locator: &Locator) -> Option<Range> {
|
||||
let mut fix_start = None;
|
||||
let mut fix_end = None;
|
||||
let mut count: usize = 0;
|
||||
for (start, tok, end) in lexer::make_tokenizer_located(contents, start).flatten() {
|
||||
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, start).flatten() {
|
||||
if matches!(tok, Tok::Lpar) {
|
||||
if count == 0 {
|
||||
fix_start = Some(start);
|
||||
@@ -843,7 +942,8 @@ pub fn identifier_range(stmt: &Stmt, locator: &Locator) -> Range {
|
||||
| StmtKind::AsyncFunctionDef { .. }
|
||||
) {
|
||||
let contents = locator.slice(&Range::from_located(stmt));
|
||||
for (start, tok, end) in lexer::make_tokenizer_located(contents, stmt.location).flatten() {
|
||||
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, stmt.location).flatten()
|
||||
{
|
||||
if matches!(tok, Tok::Name { .. }) {
|
||||
return Range::new(start, end);
|
||||
}
|
||||
@@ -869,16 +969,18 @@ pub fn binding_range(binding: &Binding, locator: &Locator) -> Range {
|
||||
}
|
||||
|
||||
// Return the ranges of `Name` tokens within a specified node.
|
||||
pub fn find_names<T>(located: &Located<T>, locator: &Locator) -> Vec<Range> {
|
||||
pub fn find_names<'a, T, U>(
|
||||
located: &'a Located<T, U>,
|
||||
locator: &'a Locator,
|
||||
) -> impl Iterator<Item = Range> + 'a {
|
||||
let contents = locator.slice(&Range::from_located(located));
|
||||
lexer::make_tokenizer_located(contents, located.location)
|
||||
lexer::lex_located(contents, Mode::Module, located.location)
|
||||
.flatten()
|
||||
.filter(|(_, tok, _)| matches!(tok, Tok::Name { .. }))
|
||||
.map(|(start, _, end)| Range {
|
||||
location: start,
|
||||
end_location: end,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Return the `Range` of `name` in `Excepthandler`.
|
||||
@@ -890,7 +992,7 @@ pub fn excepthandler_name_range(handler: &Excepthandler, locator: &Locator) -> O
|
||||
(Some(_), Some(type_)) => {
|
||||
let type_end_location = type_.end_location.unwrap();
|
||||
let contents = locator.slice(&Range::new(type_end_location, body[0].location));
|
||||
let range = lexer::make_tokenizer_located(contents, type_end_location)
|
||||
let range = lexer::lex_located(contents, Mode::Module, type_end_location)
|
||||
.flatten()
|
||||
.tuple_windows()
|
||||
.find(|(tok, next_tok)| {
|
||||
@@ -917,7 +1019,7 @@ pub fn except_range(handler: &Excepthandler, locator: &Locator) -> Range {
|
||||
location: handler.location,
|
||||
end_location: end,
|
||||
});
|
||||
let range = lexer::make_tokenizer_located(contents, handler.location)
|
||||
let range = lexer::lex_located(contents, Mode::Module, handler.location)
|
||||
.flatten()
|
||||
.find(|(_, kind, _)| matches!(kind, Tok::Except { .. }))
|
||||
.map(|(location, _, end_location)| Range {
|
||||
@@ -931,7 +1033,7 @@ pub fn except_range(handler: &Excepthandler, locator: &Locator) -> Range {
|
||||
/// Find f-strings that don't contain any formatted values in a `JoinedStr`.
|
||||
pub fn find_useless_f_strings(expr: &Expr, locator: &Locator) -> Vec<(Range, Range)> {
|
||||
let contents = locator.slice(&Range::from_located(expr));
|
||||
lexer::make_tokenizer_located(contents, expr.location)
|
||||
lexer::lex_located(contents, Mode::Module, expr.location)
|
||||
.flatten()
|
||||
.filter_map(|(location, tok, end_location)| match tok {
|
||||
Tok::String {
|
||||
@@ -985,7 +1087,7 @@ pub fn else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
|
||||
.expect("Expected orelse to be non-empty")
|
||||
.location,
|
||||
});
|
||||
let range = lexer::make_tokenizer_located(contents, body_end)
|
||||
let range = lexer::lex_located(contents, Mode::Module, body_end)
|
||||
.flatten()
|
||||
.find(|(_, kind, _)| matches!(kind, Tok::Else))
|
||||
.map(|(location, _, end_location)| Range {
|
||||
@@ -1001,7 +1103,7 @@ pub fn else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
|
||||
/// Return the `Range` of the first `Tok::Colon` token in a `Range`.
|
||||
pub fn first_colon_range(range: Range, locator: &Locator) -> Option<Range> {
|
||||
let contents = locator.slice(&range);
|
||||
let range = lexer::make_tokenizer_located(contents, range.location)
|
||||
let range = lexer::lex_located(contents, Mode::Module, range.location)
|
||||
.flatten()
|
||||
.find(|(_, kind, _)| matches!(kind, Tok::Colon))
|
||||
.map(|(location, _, end_location)| Range {
|
||||
@@ -1031,7 +1133,7 @@ pub fn elif_else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
|
||||
_ => return None,
|
||||
};
|
||||
let contents = locator.slice(&Range::new(start, end));
|
||||
let range = lexer::make_tokenizer_located(contents, start)
|
||||
let range = lexer::lex_located(contents, Mode::Module, start)
|
||||
.flatten()
|
||||
.find(|(_, kind, _)| matches!(kind, Tok::Elif | Tok::Else))
|
||||
.map(|(location, _, end_location)| Range {
|
||||
@@ -1147,8 +1249,8 @@ pub fn is_logger_candidate(func: &Expr) -> bool {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
use rustpython_parser as parser;
|
||||
use rustpython_parser::ast::Location;
|
||||
use rustpython_parser::parser;
|
||||
|
||||
use crate::ast::helpers::{
|
||||
elif_else_range, else_range, first_colon_range, identifier_range, match_trailing_content,
|
||||
|
||||
24
crates/ruff/src/ast/logging.rs
Normal file
24
crates/ruff/src/ast/logging.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
pub enum LoggingLevel {
|
||||
Debug,
|
||||
Critical,
|
||||
Error,
|
||||
Exception,
|
||||
Info,
|
||||
Warn,
|
||||
Warning,
|
||||
}
|
||||
|
||||
impl LoggingLevel {
|
||||
pub fn from_str(level: &str) -> Option<Self> {
|
||||
match level {
|
||||
"debug" => Some(LoggingLevel::Debug),
|
||||
"critical" => Some(LoggingLevel::Critical),
|
||||
"error" => Some(LoggingLevel::Error),
|
||||
"exception" => Some(LoggingLevel::Exception),
|
||||
"info" => Some(LoggingLevel::Info),
|
||||
"warn" => Some(LoggingLevel::Warn),
|
||||
"warning" => Some(LoggingLevel::Warning),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@ pub mod comparable;
|
||||
pub mod function_type;
|
||||
pub mod hashable;
|
||||
pub mod helpers;
|
||||
pub mod logging;
|
||||
pub mod operations;
|
||||
pub mod relocate;
|
||||
pub mod types;
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use bitflags::bitflags;
|
||||
use rustc_hash::FxHashMap;
|
||||
use rustpython_parser::ast::{Cmpop, Constant, Expr, ExprKind, Located, Stmt, StmtKind};
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::Tok;
|
||||
use rustpython_parser::{lexer, Mode, Tok};
|
||||
|
||||
use crate::ast::helpers::any_over_expr;
|
||||
use crate::ast::types::{BindingKind, Scope};
|
||||
@@ -181,7 +180,10 @@ pub fn extract_globals(body: &[Stmt]) -> FxHashMap<&str, &Stmt> {
|
||||
/// Check if a node is parent of a conditional branch.
|
||||
pub fn on_conditional_branch<'a>(parents: &mut impl Iterator<Item = &'a Stmt>) -> bool {
|
||||
parents.any(|parent| {
|
||||
if matches!(parent.node, StmtKind::If { .. } | StmtKind::While { .. }) {
|
||||
if matches!(
|
||||
parent.node,
|
||||
StmtKind::If { .. } | StmtKind::While { .. } | StmtKind::Match { .. }
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
if let StmtKind::Expr { value } = &parent.node {
|
||||
@@ -198,7 +200,11 @@ pub fn in_nested_block<'a>(mut parents: impl Iterator<Item = &'a Stmt>) -> bool
|
||||
parents.any(|parent| {
|
||||
matches!(
|
||||
parent.node,
|
||||
StmtKind::Try { .. } | StmtKind::If { .. } | StmtKind::With { .. }
|
||||
StmtKind::Try { .. }
|
||||
| StmtKind::TryStar { .. }
|
||||
| StmtKind::If { .. }
|
||||
| StmtKind::With { .. }
|
||||
| StmtKind::Match { .. }
|
||||
)
|
||||
})
|
||||
}
|
||||
@@ -277,7 +283,7 @@ pub type LocatedCmpop<U = ()> = Located<Cmpop, U>;
|
||||
/// `CPython` doesn't either. This method iterates over the token stream and
|
||||
/// re-identifies [`Cmpop`] nodes, annotating them with valid ranges.
|
||||
pub fn locate_cmpops(contents: &str) -> Vec<LocatedCmpop> {
|
||||
let mut tok_iter = lexer::make_tokenizer(contents).flatten().peekable();
|
||||
let mut tok_iter = lexer::lex(contents, Mode::Module).flatten().peekable();
|
||||
let mut ops: Vec<LocatedCmpop> = vec![];
|
||||
let mut count: usize = 0;
|
||||
loop {
|
||||
@@ -304,13 +310,14 @@ pub fn locate_cmpops(contents: &str) -> Vec<LocatedCmpop> {
|
||||
ops.push(LocatedCmpop::new(start, end, Cmpop::In));
|
||||
}
|
||||
Tok::Is => {
|
||||
if let Some((_, _, end)) =
|
||||
let op = if let Some((_, _, end)) =
|
||||
tok_iter.next_if(|(_, tok, _)| matches!(tok, Tok::Not))
|
||||
{
|
||||
ops.push(LocatedCmpop::new(start, end, Cmpop::IsNot));
|
||||
LocatedCmpop::new(start, end, Cmpop::IsNot)
|
||||
} else {
|
||||
ops.push(LocatedCmpop::new(start, end, Cmpop::Is));
|
||||
}
|
||||
LocatedCmpop::new(start, end, Cmpop::Is)
|
||||
};
|
||||
ops.push(op);
|
||||
}
|
||||
Tok::NotEqual => {
|
||||
ops.push(LocatedCmpop::new(start, end, Cmpop::NotEq));
|
||||
|
||||
@@ -29,7 +29,7 @@ impl Range {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_located<T>(located: &Located<T>) -> Self {
|
||||
pub fn from_located<T, U>(located: &Located<T, U>) -> Self {
|
||||
Range::new(located.location, located.end_location.unwrap())
|
||||
}
|
||||
|
||||
@@ -124,7 +124,7 @@ impl<'a> Scope<'a> {
|
||||
// StarImportation
|
||||
// FutureImportation
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Debug, is_macro::Is)]
|
||||
pub enum BindingKind<'a> {
|
||||
Annotation,
|
||||
Argument,
|
||||
|
||||
@@ -205,7 +205,6 @@ pub fn walk_stmt<'a, V: Visitor<'a> + ?Sized>(visitor: &mut V, stmt: &'a Stmt) {
|
||||
visitor.visit_body(body);
|
||||
}
|
||||
StmtKind::Match { subject, cases } => {
|
||||
// TODO(charlie): Handle `cases`.
|
||||
visitor.visit_expr(subject);
|
||||
for match_case in cases {
|
||||
visitor.visit_match_case(match_case);
|
||||
@@ -232,6 +231,19 @@ pub fn walk_stmt<'a, V: Visitor<'a> + ?Sized>(visitor: &mut V, stmt: &'a Stmt) {
|
||||
visitor.visit_body(orelse);
|
||||
visitor.visit_body(finalbody);
|
||||
}
|
||||
StmtKind::TryStar {
|
||||
body,
|
||||
handlers,
|
||||
orelse,
|
||||
finalbody,
|
||||
} => {
|
||||
visitor.visit_body(body);
|
||||
for excepthandler in handlers {
|
||||
visitor.visit_excepthandler(excepthandler);
|
||||
}
|
||||
visitor.visit_body(orelse);
|
||||
visitor.visit_body(finalbody);
|
||||
}
|
||||
StmtKind::Assert { test, msg } => {
|
||||
visitor.visit_expr(test);
|
||||
if let Some(expr) = msg {
|
||||
|
||||
@@ -4,8 +4,7 @@ use libcst_native::{
|
||||
Codegen, CodegenState, ImportNames, ParenthesizableWhitespace, SmallStatement, Statement,
|
||||
};
|
||||
use rustpython_parser::ast::{ExcepthandlerKind, Expr, Keyword, Location, Stmt, StmtKind};
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::Tok;
|
||||
use rustpython_parser::{lexer, Mode, Tok};
|
||||
|
||||
use crate::ast::helpers;
|
||||
use crate::ast::helpers::to_absolute;
|
||||
@@ -53,6 +52,12 @@ fn is_lone_child(child: &Stmt, parent: &Stmt, deleted: &[&Stmt]) -> Result<bool>
|
||||
handlers,
|
||||
orelse,
|
||||
finalbody,
|
||||
}
|
||||
| StmtKind::TryStar {
|
||||
body,
|
||||
handlers,
|
||||
orelse,
|
||||
finalbody,
|
||||
} => {
|
||||
if body.iter().contains(child) {
|
||||
Ok(has_single_child(body, deleted))
|
||||
@@ -74,6 +79,19 @@ fn is_lone_child(child: &Stmt, parent: &Stmt, deleted: &[&Stmt]) -> Result<bool>
|
||||
bail!("Unable to find child in parent body")
|
||||
}
|
||||
}
|
||||
StmtKind::Match { cases, .. } => {
|
||||
if let Some(body) = cases.iter().find_map(|case| {
|
||||
if case.body.iter().contains(child) {
|
||||
Some(&case.body)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}) {
|
||||
Ok(has_single_child(body, deleted))
|
||||
} else {
|
||||
bail!("Unable to find child in parent body")
|
||||
}
|
||||
}
|
||||
_ => bail!("Unable to find child in parent body"),
|
||||
}
|
||||
}
|
||||
@@ -352,7 +370,7 @@ pub fn remove_argument(
|
||||
if n_arguments == 1 {
|
||||
// Case 1: there is only one argument.
|
||||
let mut count: usize = 0;
|
||||
for (start, tok, end) in lexer::make_tokenizer_located(contents, stmt_at).flatten() {
|
||||
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, stmt_at).flatten() {
|
||||
if matches!(tok, Tok::Lpar) {
|
||||
if count == 0 {
|
||||
fix_start = Some(if remove_parentheses {
|
||||
@@ -384,7 +402,7 @@ pub fn remove_argument(
|
||||
{
|
||||
// Case 2: argument or keyword is _not_ the last node.
|
||||
let mut seen_comma = false;
|
||||
for (start, tok, end) in lexer::make_tokenizer_located(contents, stmt_at).flatten() {
|
||||
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, stmt_at).flatten() {
|
||||
if seen_comma {
|
||||
if matches!(tok, Tok::NonLogicalNewline) {
|
||||
// Also delete any non-logical newlines after the comma.
|
||||
@@ -407,7 +425,7 @@ pub fn remove_argument(
|
||||
} else {
|
||||
// Case 3: argument or keyword is the last node, so we have to find the last
|
||||
// comma in the stmt.
|
||||
for (start, tok, _) in lexer::make_tokenizer_located(contents, stmt_at).flatten() {
|
||||
for (start, tok, _) in lexer::lex_located(contents, Mode::Module, stmt_at).flatten() {
|
||||
if start == expr_at {
|
||||
fix_end = Some(expr_end);
|
||||
break;
|
||||
@@ -429,8 +447,8 @@ pub fn remove_argument(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
use rustpython_parser as parser;
|
||||
use rustpython_parser::ast::Location;
|
||||
use rustpython_parser::parser;
|
||||
|
||||
use crate::autofix::helpers::{next_stmt_break, trailing_semicolon};
|
||||
use crate::source_code::Locator;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -7,7 +7,8 @@ use crate::ast::types::Range;
|
||||
use crate::registry::Diagnostic;
|
||||
use crate::rules::pycodestyle::logical_lines::{iter_logical_lines, TokenFlags};
|
||||
use crate::rules::pycodestyle::rules::{
|
||||
extraneous_whitespace, indentation, space_around_operator, whitespace_around_keywords,
|
||||
extraneous_whitespace, indentation, missing_whitespace_after_keyword, space_around_operator,
|
||||
whitespace_around_keywords, whitespace_around_named_parameter_equals,
|
||||
whitespace_before_comment,
|
||||
};
|
||||
use crate::settings::Settings;
|
||||
@@ -106,6 +107,18 @@ pub fn check_logical_lines(
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (location, kind) in missing_whitespace_after_keyword(&line.tokens) {
|
||||
if settings.rules.enabled(kind.rule()) {
|
||||
diagnostics.push(Diagnostic {
|
||||
kind,
|
||||
location,
|
||||
end_location: location,
|
||||
fix: None,
|
||||
parent: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
if line.flags.contains(TokenFlags::COMMENT) {
|
||||
for (range, kind) in whitespace_before_comment(&line.tokens, locator) {
|
||||
@@ -120,6 +133,21 @@ pub fn check_logical_lines(
|
||||
}
|
||||
}
|
||||
}
|
||||
if line.flags.contains(TokenFlags::OPERATOR) {
|
||||
for (location, kind) in
|
||||
whitespace_around_named_parameter_equals(&line.tokens, &line.text)
|
||||
{
|
||||
if settings.rules.enabled(kind.rule()) {
|
||||
diagnostics.push(Diagnostic {
|
||||
kind,
|
||||
location,
|
||||
end_location: location,
|
||||
fix: None,
|
||||
parent: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (index, kind) in indentation(
|
||||
&line,
|
||||
@@ -152,8 +180,8 @@ pub fn check_logical_lines(
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::{lexer, Mode};
|
||||
|
||||
use crate::checkers::logical_lines::iter_logical_lines;
|
||||
use crate::source_code::Locator;
|
||||
@@ -164,7 +192,7 @@ mod tests {
|
||||
x = 1
|
||||
y = 2
|
||||
z = x + 1"#;
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
let locator = Locator::new(contents);
|
||||
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
|
||||
.into_iter()
|
||||
@@ -185,7 +213,7 @@ x = [
|
||||
]
|
||||
y = 2
|
||||
z = x + 1"#;
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
let locator = Locator::new(contents);
|
||||
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
|
||||
.into_iter()
|
||||
@@ -199,7 +227,7 @@ z = x + 1"#;
|
||||
assert_eq!(actual, expected);
|
||||
|
||||
let contents = "x = 'abc'";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
let locator = Locator::new(contents);
|
||||
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
|
||||
.into_iter()
|
||||
@@ -212,7 +240,7 @@ z = x + 1"#;
|
||||
def f():
|
||||
x = 1
|
||||
f()"#;
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
let locator = Locator::new(contents);
|
||||
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
|
||||
.into_iter()
|
||||
@@ -227,7 +255,7 @@ def f():
|
||||
# Comment goes here.
|
||||
x = 1
|
||||
f()"#;
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
let locator = Locator::new(contents);
|
||||
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
|
||||
.into_iter()
|
||||
|
||||
@@ -148,9 +148,7 @@ pub fn check_noqa(
|
||||
UnusedNOQA { codes: None },
|
||||
Range::new(Location::new(row + 1, start), Location::new(row + 1, end)),
|
||||
);
|
||||
if matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(diagnostic.kind.rule())
|
||||
{
|
||||
if autofix.into() && settings.rules.should_fix(diagnostic.kind.rule()) {
|
||||
diagnostic.amend(Fix::deletion(
|
||||
Location::new(row + 1, start - spaces),
|
||||
Location::new(row + 1, lines[row].chars().count()),
|
||||
@@ -217,9 +215,7 @@ pub fn check_noqa(
|
||||
},
|
||||
Range::new(Location::new(row + 1, start), Location::new(row + 1, end)),
|
||||
);
|
||||
if matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(diagnostic.kind.rule())
|
||||
{
|
||||
if autofix.into() && settings.rules.should_fix(diagnostic.kind.rule()) {
|
||||
if valid_codes.is_empty() {
|
||||
diagnostic.amend(Fix::deletion(
|
||||
Location::new(row + 1, start - spaces),
|
||||
|
||||
@@ -8,7 +8,8 @@ use crate::rules::flake8_executable::rules::{
|
||||
shebang_missing, shebang_newline, shebang_not_executable, shebang_python, shebang_whitespace,
|
||||
};
|
||||
use crate::rules::pycodestyle::rules::{
|
||||
doc_line_too_long, line_too_long, mixed_spaces_and_tabs, no_newline_at_end_of_file,
|
||||
doc_line_too_long, indentation_contains_tabs, line_too_long, mixed_spaces_and_tabs,
|
||||
no_newline_at_end_of_file, trailing_whitespace,
|
||||
};
|
||||
use crate::rules::pygrep_hooks::rules::{blanket_noqa, blanket_type_ignore};
|
||||
use crate::rules::pylint;
|
||||
@@ -41,11 +42,15 @@ pub fn check_physical_lines(
|
||||
let enforce_unnecessary_coding_comment = settings.rules.enabled(&Rule::UTF8EncodingDeclaration);
|
||||
let enforce_mixed_spaces_and_tabs = settings.rules.enabled(&Rule::MixedSpacesAndTabs);
|
||||
let enforce_bidirectional_unicode = settings.rules.enabled(&Rule::BidirectionalUnicode);
|
||||
let enforce_trailing_whitespace = settings.rules.enabled(&Rule::TrailingWhitespace);
|
||||
let enforce_blank_line_contains_whitespace =
|
||||
settings.rules.enabled(&Rule::BlankLineContainsWhitespace);
|
||||
let enforce_indentation_contains_tabs = settings.rules.enabled(&Rule::IndentationContainsTabs);
|
||||
|
||||
let fix_unnecessary_coding_comment = matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(&Rule::UTF8EncodingDeclaration);
|
||||
let fix_shebang_whitespace = matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(&Rule::ShebangWhitespace);
|
||||
let fix_unnecessary_coding_comment =
|
||||
autofix.into() && settings.rules.should_fix(&Rule::UTF8EncodingDeclaration);
|
||||
let fix_shebang_whitespace =
|
||||
autofix.into() && settings.rules.should_fix(&Rule::ShebangWhitespace);
|
||||
|
||||
let mut commented_lines_iter = commented_lines.iter().peekable();
|
||||
let mut doc_lines_iter = doc_lines.iter().peekable();
|
||||
@@ -139,14 +144,25 @@ pub fn check_physical_lines(
|
||||
if enforce_bidirectional_unicode {
|
||||
diagnostics.extend(pylint::rules::bidirectional_unicode(index, line));
|
||||
}
|
||||
|
||||
if enforce_trailing_whitespace || enforce_blank_line_contains_whitespace {
|
||||
if let Some(diagnostic) = trailing_whitespace(index, line, settings, autofix) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
|
||||
if enforce_indentation_contains_tabs {
|
||||
if let Some(diagnostic) = indentation_contains_tabs(index, line) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if enforce_no_newline_at_end_of_file {
|
||||
if let Some(diagnostic) = no_newline_at_end_of_file(
|
||||
stylist,
|
||||
contents,
|
||||
matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(&Rule::NoNewLineAtEndOfFile),
|
||||
autofix.into() && settings.rules.should_fix(&Rule::NoNewLineAtEndOfFile),
|
||||
) {
|
||||
diagnostics.push(diagnostic);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
//! Lint rules based on token traversal.
|
||||
|
||||
use rustpython_parser::lexer::{LexResult, Tok};
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::Tok;
|
||||
|
||||
use crate::lex::docstring_detection::StateMachine;
|
||||
use crate::registry::{Diagnostic, Rule};
|
||||
@@ -107,8 +108,7 @@ pub fn check_tokens(
|
||||
locator,
|
||||
*start,
|
||||
*end,
|
||||
matches!(autofix, flags::Autofix::Enabled)
|
||||
&& settings.rules.should_fix(&Rule::InvalidEscapeSequence),
|
||||
autofix.into() && settings.rules.should_fix(&Rule::InvalidEscapeSequence),
|
||||
));
|
||||
}
|
||||
}
|
||||
@@ -147,7 +147,7 @@ pub fn check_tokens(
|
||||
// COM812, COM818, COM819
|
||||
if enforce_trailing_comma {
|
||||
diagnostics.extend(
|
||||
flake8_commas::rules::trailing_commas(tokens, settings, autofix)
|
||||
flake8_commas::rules::trailing_commas(tokens, locator, settings, autofix)
|
||||
.into_iter()
|
||||
.filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
|
||||
);
|
||||
|
||||
@@ -37,6 +37,10 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E224") => Rule::TabAfterOperator,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E251") => Rule::UnexpectedSpacesAroundKeywordParameterEquals,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E252") => Rule::MissingWhitespaceAroundParameterEquals,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E261") => Rule::TooFewSpacesBeforeInlineComment,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E262") => Rule::NoSpaceAfterInlineComment,
|
||||
@@ -52,6 +56,8 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Pycodestyle, "E273") => Rule::TabAfterKeyword,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E274") => Rule::TabBeforeKeyword,
|
||||
#[cfg(feature = "logical_lines")]
|
||||
(Pycodestyle, "E275") => Rule::MissingWhitespaceAfterKeyword,
|
||||
(Pycodestyle, "E401") => Rule::MultipleImportsOnOneLine,
|
||||
(Pycodestyle, "E402") => Rule::ModuleImportNotAtTopOfFile,
|
||||
(Pycodestyle, "E501") => Rule::LineTooLong,
|
||||
@@ -72,7 +78,10 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Pycodestyle, "E999") => Rule::SyntaxError,
|
||||
|
||||
// pycodestyle warnings
|
||||
(Pycodestyle, "W191") => Rule::IndentationContainsTabs,
|
||||
(Pycodestyle, "W291") => Rule::TrailingWhitespace,
|
||||
(Pycodestyle, "W292") => Rule::NoNewLineAtEndOfFile,
|
||||
(Pycodestyle, "W293") => Rule::BlankLineContainsWhitespace,
|
||||
(Pycodestyle, "W505") => Rule::DocLineTooLong,
|
||||
(Pycodestyle, "W605") => Rule::InvalidEscapeSequence,
|
||||
|
||||
@@ -126,6 +135,8 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Pylint, "E0101") => Rule::ReturnInInit,
|
||||
(Pylint, "E0604") => Rule::InvalidAllObject,
|
||||
(Pylint, "E0605") => Rule::InvalidAllFormat,
|
||||
(Pylint, "E1205") => Rule::LoggingTooManyArgs,
|
||||
(Pylint, "E1206") => Rule::LoggingTooFewArgs,
|
||||
(Pylint, "E1307") => Rule::BadStringFormatType,
|
||||
(Pylint, "E2502") => Rule::BidirectionalUnicode,
|
||||
(Pylint, "E1310") => Rule::BadStrStripCall,
|
||||
@@ -139,13 +150,16 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Pylint, "R0133") => Rule::ComparisonOfConstant,
|
||||
(Pylint, "R1701") => Rule::ConsiderMergingIsinstance,
|
||||
(Pylint, "R1722") => Rule::ConsiderUsingSysExit,
|
||||
(Pylint, "R5501") => Rule::CollapsibleElseIf,
|
||||
(Pylint, "R2004") => Rule::MagicValueComparison,
|
||||
(Pylint, "W0120") => Rule::UselessElseOnLoop,
|
||||
(Pylint, "W0602") => Rule::GlobalVariableNotAssigned,
|
||||
(Pylint, "W0603") => Rule::GlobalStatement,
|
||||
(Pylint, "R0911") => Rule::TooManyReturnStatements,
|
||||
(Pylint, "R0913") => Rule::TooManyArguments,
|
||||
(Pylint, "R0912") => Rule::TooManyBranches,
|
||||
(Pylint, "R0915") => Rule::TooManyStatements,
|
||||
(Pylint, "W2901") => Rule::RedefinedLoopName,
|
||||
|
||||
// flake8-builtins
|
||||
(Flake8Builtins, "001") => Rule::BuiltinVariableShadowing,
|
||||
@@ -179,6 +193,8 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Flake8Bugbear, "025") => Rule::DuplicateTryBlockException,
|
||||
(Flake8Bugbear, "026") => Rule::StarArgUnpackingAfterKeywordArg,
|
||||
(Flake8Bugbear, "027") => Rule::EmptyMethodWithoutAbstractDecorator,
|
||||
(Flake8Bugbear, "029") => Rule::ExceptWithEmptyTuple,
|
||||
(Flake8Bugbear, "032") => Rule::UnintentionalTypeAnnotation,
|
||||
(Flake8Bugbear, "904") => Rule::RaiseWithoutFromInsideExcept,
|
||||
(Flake8Bugbear, "905") => Rule::ZipWithoutExplicitStrict,
|
||||
|
||||
@@ -276,6 +292,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Flake8Simplify, "112") => Rule::UseCapitalEnvironmentVariables,
|
||||
(Flake8Simplify, "114") => Rule::IfWithSameArms,
|
||||
(Flake8Simplify, "115") => Rule::OpenFileWithContextHandler,
|
||||
(Flake8Simplify, "116") => Rule::ManualDictLookup,
|
||||
(Flake8Simplify, "117") => Rule::MultipleWithStatements,
|
||||
(Flake8Simplify, "118") => Rule::KeyInDict,
|
||||
(Flake8Simplify, "201") => Rule::NegateEqualOp,
|
||||
@@ -284,10 +301,10 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Flake8Simplify, "210") => Rule::IfExprWithTrueFalse,
|
||||
(Flake8Simplify, "211") => Rule::IfExprWithFalseTrue,
|
||||
(Flake8Simplify, "212") => Rule::IfExprWithTwistedArms,
|
||||
(Flake8Simplify, "220") => Rule::AAndNotA,
|
||||
(Flake8Simplify, "221") => Rule::AOrNotA,
|
||||
(Flake8Simplify, "222") => Rule::OrTrue,
|
||||
(Flake8Simplify, "223") => Rule::AndFalse,
|
||||
(Flake8Simplify, "220") => Rule::ExprAndNotExpr,
|
||||
(Flake8Simplify, "221") => Rule::ExprOrNotExpr,
|
||||
(Flake8Simplify, "222") => Rule::ExprOrTrue,
|
||||
(Flake8Simplify, "223") => Rule::ExprAndFalse,
|
||||
(Flake8Simplify, "300") => Rule::YodaConditions,
|
||||
(Flake8Simplify, "401") => Rule::DictGetWithDefault,
|
||||
|
||||
@@ -327,6 +344,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Pyupgrade, "035") => Rule::ImportReplacements,
|
||||
(Pyupgrade, "036") => Rule::OutdatedVersionBlock,
|
||||
(Pyupgrade, "037") => Rule::QuotedAnnotation,
|
||||
(Pyupgrade, "038") => Rule::IsinstanceWithTuple,
|
||||
|
||||
// pydocstyle
|
||||
(Pydocstyle, "100") => Rule::PublicModule,
|
||||
@@ -475,8 +493,14 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
|
||||
// flake8-pyi
|
||||
(Flake8Pyi, "001") => Rule::PrefixTypeParams,
|
||||
(Flake8Pyi, "006") => Rule::BadVersionInfoComparison,
|
||||
(Flake8Pyi, "007") => Rule::UnrecognizedPlatformCheck,
|
||||
(Flake8Pyi, "008") => Rule::UnrecognizedPlatformName,
|
||||
(Flake8Pyi, "009") => Rule::PassStatementStubBody,
|
||||
(Flake8Pyi, "010") => Rule::NonEmptyStubBody,
|
||||
(Flake8Pyi, "011") => Rule::TypedArgumentSimpleDefaults,
|
||||
(Flake8Pyi, "014") => Rule::ArgumentSimpleDefaults,
|
||||
(Flake8Pyi, "021") => Rule::DocstringInStub,
|
||||
|
||||
// flake8-pytest-style
|
||||
(Flake8PytestStyle, "001") => Rule::IncorrectFixtureParenthesesStyle,
|
||||
@@ -510,6 +534,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Flake8Pie, "794") => Rule::DupeClassFieldDefinitions,
|
||||
(Flake8Pie, "796") => Rule::PreferUniqueEnums,
|
||||
(Flake8Pie, "800") => Rule::UnnecessarySpread,
|
||||
(Flake8Pie, "802") => Rule::UnnecessaryComprehensionAnyAll,
|
||||
(Flake8Pie, "804") => Rule::UnnecessaryDictKwargs,
|
||||
(Flake8Pie, "807") => Rule::PreferListBuiltin,
|
||||
(Flake8Pie, "810") => Rule::SingleStartsEndsWith,
|
||||
@@ -545,6 +570,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Tryceratops, "300") => Rule::TryConsiderElse,
|
||||
(Tryceratops, "301") => Rule::RaiseWithinTry,
|
||||
(Tryceratops, "400") => Rule::ErrorInsteadOfException,
|
||||
(Tryceratops, "401") => Rule::VerboseLogMessage,
|
||||
|
||||
// flake8-use-pathlib
|
||||
(Flake8UsePathlib, "100") => Rule::PathlibAbspath,
|
||||
@@ -597,13 +623,15 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<Rule> {
|
||||
(Ruff, "001") => Rule::AmbiguousUnicodeCharacterString,
|
||||
(Ruff, "002") => Rule::AmbiguousUnicodeCharacterDocstring,
|
||||
(Ruff, "003") => Rule::AmbiguousUnicodeCharacterComment,
|
||||
(Ruff, "004") => Rule::KeywordArgumentBeforeStarArgument,
|
||||
(Ruff, "005") => Rule::UnpackInsteadOfConcatenatingToCollectionLiteral,
|
||||
(Ruff, "006") => Rule::AsyncioDanglingTask,
|
||||
(Ruff, "100") => Rule::UnusedNOQA,
|
||||
|
||||
// flake8-django
|
||||
(Flake8Django, "001") => Rule::NullableModelStringField,
|
||||
(Flake8Django, "003") => Rule::LocalsInRenderFunction,
|
||||
(Flake8Django, "006") => Rule::ExcludeWithModelForm,
|
||||
(Flake8Django, "007") => Rule::AllWithModelForm,
|
||||
(Flake8Django, "008") => Rule::ModelWithoutDunderStr,
|
||||
(Flake8Django, "013") => Rule::NonLeadingReceiverDecorator,
|
||||
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
use bitflags::bitflags;
|
||||
use nohash_hasher::{IntMap, IntSet};
|
||||
use rustpython_parser::ast::Location;
|
||||
use rustpython_parser::lexer::{LexResult, Tok};
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::Tok;
|
||||
|
||||
use crate::registry::LintSource;
|
||||
use crate::settings::Settings;
|
||||
|
||||
bitflags! {
|
||||
@@ -20,7 +20,7 @@ impl Flags {
|
||||
if settings
|
||||
.rules
|
||||
.iter_enabled()
|
||||
.any(|rule_code| matches!(rule_code.lint_source(), LintSource::Imports))
|
||||
.any(|rule_code| rule_code.lint_source().is_imports())
|
||||
{
|
||||
Self::NOQA | Self::ISORT
|
||||
} else {
|
||||
@@ -150,56 +150,61 @@ pub fn extract_isort_directives(lxr: &[LexResult]) -> IsortDirectives {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use nohash_hasher::{IntMap, IntSet};
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::{lexer, Mode};
|
||||
|
||||
use crate::directives::{extract_isort_directives, extract_noqa_line_for};
|
||||
|
||||
#[test]
|
||||
fn noqa_extraction() {
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"x = 1
|
||||
y = 2
|
||||
z = x + 1",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(extract_noqa_line_for(&lxr), IntMap::default());
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"
|
||||
x = 1
|
||||
y = 2
|
||||
z = x + 1",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(extract_noqa_line_for(&lxr), IntMap::default());
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"x = 1
|
||||
y = 2
|
||||
z = x + 1
|
||||
",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(extract_noqa_line_for(&lxr), IntMap::default());
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"x = 1
|
||||
|
||||
y = 2
|
||||
z = x + 1
|
||||
",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(extract_noqa_line_for(&lxr), IntMap::default());
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"x = '''abc
|
||||
def
|
||||
ghi
|
||||
'''
|
||||
y = 2
|
||||
z = x + 1",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(
|
||||
@@ -207,13 +212,14 @@ z = x + 1",
|
||||
IntMap::from_iter([(1, 4), (2, 4), (3, 4)])
|
||||
);
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"x = 1
|
||||
y = '''abc
|
||||
def
|
||||
ghi
|
||||
'''
|
||||
z = 2",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(
|
||||
@@ -221,12 +227,13 @@ z = 2",
|
||||
IntMap::from_iter([(2, 5), (3, 5), (4, 5)])
|
||||
);
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
"x = 1
|
||||
y = '''abc
|
||||
def
|
||||
ghi
|
||||
'''",
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(
|
||||
@@ -234,17 +241,19 @@ ghi
|
||||
IntMap::from_iter([(2, 5), (3, 5), (4, 5)])
|
||||
);
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
r#"x = \
|
||||
1"#,
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(extract_noqa_line_for(&lxr), IntMap::from_iter([(1, 2)]));
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
r#"from foo import \
|
||||
bar as baz, \
|
||||
qux as quux"#,
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(
|
||||
@@ -252,7 +261,7 @@ ghi
|
||||
IntMap::from_iter([(1, 3), (2, 3)])
|
||||
);
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
let lxr: Vec<LexResult> = lexer::lex(
|
||||
r#"
|
||||
# Foo
|
||||
from foo import \
|
||||
@@ -262,6 +271,7 @@ x = \
|
||||
1
|
||||
y = \
|
||||
2"#,
|
||||
Mode::Module,
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(
|
||||
@@ -275,7 +285,7 @@ y = \
|
||||
let contents = "x = 1
|
||||
y = 2
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(extract_isort_directives(&lxr).exclusions, IntSet::default());
|
||||
|
||||
let contents = "# isort: off
|
||||
@@ -283,7 +293,7 @@ x = 1
|
||||
y = 2
|
||||
# isort: on
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(
|
||||
extract_isort_directives(&lxr).exclusions,
|
||||
IntSet::from_iter([2, 3, 4])
|
||||
@@ -296,7 +306,7 @@ y = 2
|
||||
# isort: on
|
||||
z = x + 1
|
||||
# isort: on";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(
|
||||
extract_isort_directives(&lxr).exclusions,
|
||||
IntSet::from_iter([2, 3, 4, 5])
|
||||
@@ -306,7 +316,7 @@ z = x + 1
|
||||
x = 1
|
||||
y = 2
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(
|
||||
extract_isort_directives(&lxr).exclusions,
|
||||
IntSet::from_iter([2, 3, 4])
|
||||
@@ -316,7 +326,7 @@ z = x + 1";
|
||||
x = 1
|
||||
y = 2
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(extract_isort_directives(&lxr).exclusions, IntSet::default());
|
||||
|
||||
let contents = "# isort: off
|
||||
@@ -325,7 +335,7 @@ x = 1
|
||||
y = 2
|
||||
# isort: skip_file
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(extract_isort_directives(&lxr).exclusions, IntSet::default());
|
||||
}
|
||||
|
||||
@@ -334,20 +344,20 @@ z = x + 1";
|
||||
let contents = "x = 1
|
||||
y = 2
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(extract_isort_directives(&lxr).splits, Vec::<usize>::new());
|
||||
|
||||
let contents = "x = 1
|
||||
y = 2
|
||||
# isort: split
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(extract_isort_directives(&lxr).splits, vec![3]);
|
||||
|
||||
let contents = "x = 1
|
||||
y = 2 # isort: split
|
||||
z = x + 1";
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
|
||||
assert_eq!(extract_isort_directives(&lxr).splits, vec![2]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,34 +1,62 @@
|
||||
//! Doc line extraction. In this context, a doc line is a line consisting of a
|
||||
//! standalone comment or a constant string statement.
|
||||
|
||||
use std::iter::FusedIterator;
|
||||
|
||||
use rustpython_parser::ast::{Constant, ExprKind, Stmt, StmtKind, Suite};
|
||||
use rustpython_parser::lexer::{LexResult, Tok};
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
use rustpython_parser::Tok;
|
||||
|
||||
use crate::ast::visitor;
|
||||
use crate::ast::visitor::Visitor;
|
||||
|
||||
/// Extract doc lines (standalone comments) from a token sequence.
|
||||
pub fn doc_lines_from_tokens(lxr: &[LexResult]) -> Vec<usize> {
|
||||
let mut doc_lines: Vec<usize> = Vec::default();
|
||||
let mut prev: Option<usize> = None;
|
||||
for (start, tok, end) in lxr.iter().flatten() {
|
||||
if matches!(tok, Tok::Indent | Tok::Dedent | Tok::Newline) {
|
||||
continue;
|
||||
}
|
||||
if matches!(tok, Tok::Comment(..)) {
|
||||
if let Some(prev) = prev {
|
||||
if start.row() > prev {
|
||||
doc_lines.push(start.row());
|
||||
}
|
||||
} else {
|
||||
doc_lines.push(start.row());
|
||||
}
|
||||
}
|
||||
prev = Some(end.row());
|
||||
}
|
||||
doc_lines
|
||||
pub fn doc_lines_from_tokens(lxr: &[LexResult]) -> DocLines {
|
||||
DocLines::new(lxr)
|
||||
}
|
||||
|
||||
pub struct DocLines<'a> {
|
||||
inner: std::iter::Flatten<core::slice::Iter<'a, LexResult>>,
|
||||
prev: Option<usize>,
|
||||
}
|
||||
|
||||
impl<'a> DocLines<'a> {
|
||||
fn new(lxr: &'a [LexResult]) -> Self {
|
||||
Self {
|
||||
inner: lxr.iter().flatten(),
|
||||
prev: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for DocLines<'_> {
|
||||
type Item = usize;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
let (start, tok, end) = self.inner.next()?;
|
||||
|
||||
match tok {
|
||||
Tok::Indent | Tok::Dedent | Tok::Newline => continue,
|
||||
Tok::Comment(..) => {
|
||||
if let Some(prev) = self.prev {
|
||||
if start.row() > prev {
|
||||
break Some(start.row());
|
||||
}
|
||||
} else {
|
||||
break Some(start.row());
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
self.prev = Some(end.row());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FusedIterator for DocLines<'_> {}
|
||||
|
||||
#[derive(Default)]
|
||||
struct StringLinesVisitor {
|
||||
string_lines: Vec<usize>,
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
/// See: <https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals>
|
||||
|
||||
pub const TRIPLE_QUOTE_PREFIXES: &[&str] = &[
|
||||
"u\"\"\"", "u'''", "r\"\"\"", "r'''", "U\"\"\"", "U'''", "R\"\"\"", "R'''", "\"\"\"", "'''",
|
||||
];
|
||||
|
||||
pub const SINGLE_QUOTE_PREFIXES: &[&str] = &[
|
||||
"u\"", "u'", "r\"", "r'", "u\"", "u'", "r\"", "r'", "U\"", "U'", "R\"", "R'", "\"", "'",
|
||||
];
|
||||
|
||||
pub const TRIPLE_QUOTE_SUFFIXES: &[&str] = &["\"\"\"", "'''"];
|
||||
|
||||
pub const SINGLE_QUOTE_SUFFIXES: &[&str] = &["\"", "'"];
|
||||
@@ -1,4 +1,3 @@
|
||||
pub mod constants;
|
||||
pub mod definition;
|
||||
pub mod extraction;
|
||||
pub mod google;
|
||||
|
||||
@@ -576,6 +576,7 @@ mod tests {
|
||||
let expected = Pyproject::new(Options {
|
||||
pydocstyle: Some(pydocstyle::settings::Options {
|
||||
convention: Some(Convention::Numpy),
|
||||
ignore_decorators: None,
|
||||
}),
|
||||
..default_options([Linter::Pydocstyle.into()])
|
||||
});
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
use std::fs::File;
|
||||
use std::io::{BufReader, Read};
|
||||
use std::ops::Deref;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
@@ -88,12 +86,3 @@ pub fn relativize_path(path: impl AsRef<Path>) -> String {
|
||||
}
|
||||
format!("{}", path.display())
|
||||
}
|
||||
|
||||
/// Read a file's contents from disk.
|
||||
pub fn read_file<P: AsRef<Path>>(path: P) -> Result<String> {
|
||||
let file = File::open(path)?;
|
||||
let mut buf_reader = BufReader::new(file);
|
||||
let mut contents = String::new();
|
||||
buf_reader.read_to_string(&mut contents)?;
|
||||
Ok(contents)
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user